From 4ed53dea73538e752ffc1c4d1abfa226ef6fa3ef Mon Sep 17 00:00:00 2001 From: ThibaultBee <37510686+ThibaultBee@users.noreply.github.com> Date: Sat, 4 Jan 2025 21:24:48 +0100 Subject: [PATCH] refactor(*): introduce streamer pipeline to enable multiple output --- .../core/elements/endpoints/DummyEndpoint.kt | 63 ++ .../core/pipelines/StreamerPipelineTest.kt | 7 + .../pipelines/outputs/DummyPipelineOutput.kt | 68 ++ .../encoding/EncodingPipelineOutputTest.kt | 23 + .../utils/ConfigurationUtils.kt | 2 +- .../core/{ => pipelines}/utils/FileUtils.kt | 2 +- .../pipelines/utils/SourceConfigUtilsTest.kt | 89 +++ .../dual/file/CameraDualStreamerFileTest.kt | 185 +++++ .../file/CameraSingleStreamerFileTest.kt} | 11 +- .../CameraSingleStreamerMultiStreamTest.kt} | 11 +- .../AudioOnlySingleStreamerStateTest.kt} | 40 +- .../state/CameraSingleStreamerStateTest.kt} | 11 +- .../state/SingleStreamerStateTest.kt} | 28 +- .../streamer/{file => utils}/StreamerUtils.kt | 30 +- .../streamer/{file => utils}/VideoUtils.kt | 20 +- .../encoders/mediacodec/MediaCodecEncoder.kt | 2 + .../elements/endpoints/MediaContainerType.kt | 4 +- .../processing/video/SurfaceProcessor.kt | 8 +- .../video/outputs/AbstractSurfaceOutput.kt | 4 +- .../processing/video/outputs/SurfaceOutput.kt | 8 +- ...ovider.kt => DefaultSourceInfoProvider.kt} | 24 +- .../video/source/ISourceInfoProvider.kt | 31 +- .../elements/sources/video/IVideoSource.kt | 3 +- .../video/camera/CameraInfoProvider.kt | 44 +- .../sources/video/camera/CameraSource.kt | 10 +- .../MediaProjectionVideoSource.kt | 9 +- .../utils/extensions/CodecConfigExtensions.kt | 35 +- .../utils/extensions/StateFlowExtensions.kt | 14 + .../core/pipelines/StreamerPipeline.kt | 737 ++++++++++++++++++ .../core/pipelines/outputs/IPipelineOutput.kt | 148 ++++ .../encoding/EncodingPipelineOutput.kt | 670 ++++++++++++++++ .../encoding/IEncodingPipelineOutput.kt | 224 ++++++ .../core/pipelines/utils/SourceConfigUtils.kt | 17 + .../controllers/BitrateRegulatorController.kt | 5 +- .../DefaultBitrateRegulatorController.kt | 35 +- .../IBitrateRegulatorController.kt | 6 +- .../core/streamers/dual/CameraDualStreamer.kt | 180 +++++ .../core/streamers/dual/DualStreamer.kt | 213 +++++ .../core/streamers/dual/IDualStreamer.kt | 230 ++++++ .../core/streamers/interfaces/IStreamer.kt | 203 +++++ .../StreamerActivityLifeCycleObserver.kt | 14 +- .../StreamerViewModelLifeCycleObserver.kt | 17 +- .../single/AudioOnlySingleStreamer.kt | 69 +- .../streamers/single/CameraSingleStreamer.kt | 22 +- .../core/streamers/single/ISingleStreamer.kt | 207 +---- .../single/ScreenRecorderSingleStreamer.kt | 32 +- .../core/streamers/single/SingleStreamer.kt | 688 +++------------- .../callbacks/CallbackSingleStreamer.kt | 42 +- .../callbacks/CameraCallbackSingleStreamer.kt | 16 + .../app/ui/main/PreviewViewModel.kt | 18 +- .../ui/main/usecases/BuildStreamerUseCase.kt | 22 +- .../app/ui/settings/SettingsFragment.kt | 28 +- .../src/main/res/xml/root_preferences.xml | 5 - .../streampack/screenrecorder/MainActivity.kt | 136 ++-- .../RtmpSingleStreamerTest.kt} | 9 +- .../SrtSingleStreamerTest.kt} | 9 +- .../DefaultSrtBitrateRegulatorController.kt | 25 +- .../services/DefaultScreenRecorderService.kt | 2 +- 58 files changed, 3661 insertions(+), 1154 deletions(-) create mode 100644 core/src/androidTest/java/io/github/thibaultbee/streampack/core/elements/endpoints/DummyEndpoint.kt create mode 100644 core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/StreamerPipelineTest.kt create mode 100644 core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/outputs/DummyPipelineOutput.kt create mode 100644 core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/EncodingPipelineOutputTest.kt rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/{ => pipelines}/utils/ConfigurationUtils.kt (97%) rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/{ => pipelines}/utils/FileUtils.kt (93%) create mode 100644 core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/SourceConfigUtilsTest.kt create mode 100644 core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/dual/file/CameraDualStreamerFileTest.kt rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/{file/CameraStreamerFileTest.kt => single/file/CameraSingleStreamerFileTest.kt} (94%) rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/{file/CameraStreamerMultiStreamTest.kt => single/file/CameraSingleStreamerMultiStreamTest.kt} (91%) rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/{state/AudioOnlyStreamerStateTest.kt => single/state/AudioOnlySingleStreamerStateTest.kt} (80%) rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/{state/CameraStreamerStateTest.kt => single/state/CameraSingleStreamerStateTest.kt} (93%) rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/{state/StreamerStateTest.kt => single/state/SingleStreamerStateTest.kt} (85%) rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/{file => utils}/StreamerUtils.kt (51%) rename core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/{file => utils}/VideoUtils.kt (86%) rename core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/{AbstractSourceInfoProvider.kt => DefaultSourceInfoProvider.kt} (65%) create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/elements/utils/extensions/StateFlowExtensions.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/StreamerPipeline.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/IPipelineOutput.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/EncodingPipelineOutput.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/IEncodingPipelineOutput.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/utils/SourceConfigUtils.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/CameraDualStreamer.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/DualStreamer.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/IDualStreamer.kt create mode 100644 core/src/main/java/io/github/thibaultbee/streampack/core/streamers/interfaces/IStreamer.kt rename extensions/rtmp/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/{RtmpStreamerTest.kt => single/RtmpSingleStreamerTest.kt} (91%) rename extensions/srt/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/{SrtStreamerTest.kt => single/SrtSingleStreamerTest.kt} (91%) diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/elements/endpoints/DummyEndpoint.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/elements/endpoints/DummyEndpoint.kt new file mode 100644 index 000000000..ca632b001 --- /dev/null +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/elements/endpoints/DummyEndpoint.kt @@ -0,0 +1,63 @@ +package io.github.thibaultbee.streampack.core.elements.endpoints + +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor +import io.github.thibaultbee.streampack.core.elements.data.Frame +import io.github.thibaultbee.streampack.core.elements.encoders.CodecConfig +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow + +class DummyEndpoint : IEndpointInternal { + private val _isOpen = MutableStateFlow(false) + override val isOpen: StateFlow = _isOpen + + var numOfAudioFramesWritten = 0 + private set + var numOfVideoFramesWritten = 0 + private set + val numOfFramesWritten: Int + get() = numOfAudioFramesWritten + numOfVideoFramesWritten + + private val _isStreaming = MutableStateFlow(false) + val isStreaming: StateFlow = _isStreaming + + override val info: IEndpoint.IEndpointInfo + get() = TODO("Not yet implemented") + + override fun getInfo(type: MediaDescriptor.Type): IEndpoint.IEndpointInfo { + TODO("Not yet implemented") + } + + override val metrics: Any + get() = TODO("Not yet implemented") + + override suspend fun open(descriptor: MediaDescriptor) { + _isOpen.emit(true) + } + + override suspend fun close() { + _isOpen.emit(false) + } + + override suspend fun write(frame: Frame, streamPid: Int) { + when { + frame.isAudio -> numOfAudioFramesWritten++ + frame.isVideo -> numOfVideoFramesWritten++ + } + } + + override fun addStreams(streamConfigs: List): Map { + return streamConfigs.associateWith { it.hashCode() } + } + + override fun addStream(streamConfig: CodecConfig): Int { + return streamConfig.hashCode() + } + + override suspend fun startStream() { + _isStreaming.emit(true) + } + + override suspend fun stopStream() { + _isStreaming.emit(false) + } +} \ No newline at end of file diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/StreamerPipelineTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/StreamerPipelineTest.kt new file mode 100644 index 000000000..2b5cf54d0 --- /dev/null +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/StreamerPipelineTest.kt @@ -0,0 +1,7 @@ +package io.github.thibaultbee.streampack.core.pipelines + +import org.junit.Assert.* + +class StreamerPipelineTest { + +} \ No newline at end of file diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/outputs/DummyPipelineOutput.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/outputs/DummyPipelineOutput.kt new file mode 100644 index 000000000..a707aa318 --- /dev/null +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/outputs/DummyPipelineOutput.kt @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2025 Thibault B. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.github.thibaultbee.streampack.core.pipelines.outputs + +import io.github.thibaultbee.streampack.core.elements.encoders.IEncoderInternal +import io.github.thibaultbee.streampack.core.logger.Logger +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow + +class DummyVideoPipelineOutput : DummyPipelineOutput(hasAudio = false, hasVideo = true), IVideoPipelineOutputInternal { + companion object { + private const val TAG = "DummyVideoPipelineOutput" + } + + override var targetRotation: Int = 0 + override val surface: StateFlow + get() = TODO("Not yet implemented") + override var videoSourceTimestampOffset: Long + get() = TODO("Not yet implemented") + set(value) {} + override var videoFrameRequestedListener: IEncoderInternal.IByteBufferInput.OnFrameRequestedListener? + get() = TODO("Not yet implemented") + set(value) {} +} + +open class DummyPipelineOutput(override val hasAudio: Boolean, override val hasVideo: Boolean) : + IPipelineOutputInternal { + + override var streamListener: IPipelineOutputInternal.Listener? = null + + private val _throwable = MutableStateFlow(null) + override val throwable: StateFlow = _throwable + + private val _isStreaming = MutableStateFlow(false) + override val isStreaming: StateFlow = _isStreaming + + override suspend fun startStream() { + Logger.i(TAG, "Start stream") + _isStreaming.emit(true) + } + + override suspend fun stopStream() { + Logger.i(TAG, "Stop stream") + _isStreaming.emit(false) + } + + override suspend fun release() { + Logger.i(TAG, "Release") + _isStreaming.emit(false) + } + + companion object { + private const val TAG = "DummyPipelineOutput" + } +} \ No newline at end of file diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/EncodingPipelineOutputTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/EncodingPipelineOutputTest.kt new file mode 100644 index 000000000..cf9c6b077 --- /dev/null +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/EncodingPipelineOutputTest.kt @@ -0,0 +1,23 @@ +package io.github.thibaultbee.streampack.core.pipelines.outputs.encoding + +import android.content.Context +import androidx.test.platform.app.InstrumentationRegistry +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor +import io.github.thibaultbee.streampack.core.elements.endpoints.DummyEndpoint +import junit.framework.TestCase.assertTrue +import kotlinx.coroutines.test.runTest +import org.junit.Assert.assertFalse +import org.junit.Test + +class EncodingPipelineOutputTest { + private val context: Context = InstrumentationRegistry.getInstrumentation().context + + @Test + fun testOpenClose() = runTest { + val output = EncodingPipelineOutput(context, endpointInternal = DummyEndpoint()) + output.open(UriMediaDescriptor("file://test.mp4")) + assertTrue(output.isOpen.value) + output.close() + assertFalse(output.isOpen.value) + } +} \ No newline at end of file diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/utils/ConfigurationUtils.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/ConfigurationUtils.kt similarity index 97% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/utils/ConfigurationUtils.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/ConfigurationUtils.kt index 1259f082b..f62e47006 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/utils/ConfigurationUtils.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/ConfigurationUtils.kt @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.github.thibaultbee.streampack.core.utils +package io.github.thibaultbee.streampack.core.pipelines.utils import android.media.MediaFormat import android.util.Size diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/utils/FileUtils.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/FileUtils.kt similarity index 93% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/utils/FileUtils.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/FileUtils.kt index b9d1fbb20..2b1c62223 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/utils/FileUtils.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/FileUtils.kt @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.github.thibaultbee.streampack.core.utils +package io.github.thibaultbee.streampack.core.pipelines.utils import androidx.test.platform.app.InstrumentationRegistry import java.io.File diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/SourceConfigUtilsTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/SourceConfigUtilsTest.kt new file mode 100644 index 000000000..b75b3eec4 --- /dev/null +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/pipelines/utils/SourceConfigUtilsTest.kt @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2025 Thibault B. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.github.thibaultbee.streampack.core.pipelines.utils + +import android.media.MediaFormat +import android.util.Size +import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig +import org.junit.Assert.assertEquals +import org.junit.Assert.fail +import org.junit.Test + +class SourceConfigUtilsTest { + + @Test + fun videoSourceConfigFromEmpty() { + try { + SourceConfigUtils.buildVideoSourceConfig(emptySet()) + fail("Video codec configs must not be empty") + } catch (_: Throwable) { + } + } + + @Test + fun buildVideoSourceConfigWithSimple() { + // Given + val videoCodecConfigs = setOf( + VideoCodecConfig( + MediaFormat.MIMETYPE_VIDEO_AVC, + resolution = Size(1280, 720), + fps = 30 + ), + VideoCodecConfig(MediaFormat.MIMETYPE_VIDEO_AVC, resolution = Size(1280, 720), fps = 30) + ) + + // When + val videoSourceConfig = SourceConfigUtils.buildVideoSourceConfig(videoCodecConfigs) + + // Then + assertEquals(1280, videoSourceConfig.resolution.width) + assertEquals(720, videoSourceConfig.resolution.height) + assertEquals(30, videoSourceConfig.fps) + } + + @Test + fun buildVideoSourceConfigWithDifferentResolution() { + // Given + val videoCodecConfigs = setOf( + VideoCodecConfig(MediaFormat.MIMETYPE_VIDEO_AVC, resolution = Size(1280, 720)), + VideoCodecConfig(MediaFormat.MIMETYPE_VIDEO_AVC, resolution = Size(1920, 1080)) + ) + + // When + val videoSourceConfig = SourceConfigUtils.buildVideoSourceConfig(videoCodecConfigs) + + // Then + assertEquals(1920, videoSourceConfig.resolution.width) + assertEquals(1080, videoSourceConfig.resolution.height) + } + + @Test + fun videoSourceConfigWithDifferentFps() { + // Given + val videoCodecConfigs = setOf( + VideoCodecConfig(MediaFormat.MIMETYPE_VIDEO_AVC, fps = 30), + VideoCodecConfig(MediaFormat.MIMETYPE_VIDEO_AVC, fps = 25) + ) + + // When + try { + SourceConfigUtils.buildVideoSourceConfig(videoCodecConfigs) + fail("All video codec configs must have the same fps") + } catch (e: IllegalArgumentException) { + assertEquals("All video codec configs must have the same fps", e.message) + } + } +} \ No newline at end of file diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/dual/file/CameraDualStreamerFileTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/dual/file/CameraDualStreamerFileTest.kt new file mode 100644 index 000000000..f5312dcad --- /dev/null +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/dual/file/CameraDualStreamerFileTest.kt @@ -0,0 +1,185 @@ +/* + * Copyright (C) 2024 Thibault B. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.github.thibaultbee.streampack.core.streamer.single.file + +import android.Manifest +import android.content.Context +import android.media.MediaFormat.MIMETYPE_AUDIO_AAC +import android.media.MediaFormat.MIMETYPE_AUDIO_OPUS +import android.media.MediaFormat.MIMETYPE_VIDEO_AVC +import android.media.MediaFormat.MIMETYPE_VIDEO_VP9 +import android.util.Size +import androidx.core.net.toUri +import androidx.test.filters.LargeTest +import androidx.test.platform.app.InstrumentationRegistry +import androidx.test.rule.GrantPermissionRule +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor +import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpointInternal +import io.github.thibaultbee.streampack.core.elements.endpoints.composites.CompositeEndpoint +import io.github.thibaultbee.streampack.core.elements.endpoints.composites.muxers.mp4.Mp4Muxer +import io.github.thibaultbee.streampack.core.elements.endpoints.composites.sinks.FileSink +import io.github.thibaultbee.streampack.core.pipelines.utils.FileUtils +import io.github.thibaultbee.streampack.core.streamer.utils.StreamerUtils +import io.github.thibaultbee.streampack.core.streamer.utils.VideoUtils +import io.github.thibaultbee.streampack.core.streamers.dual.CameraDualStreamer +import io.github.thibaultbee.streampack.core.streamers.dual.DualStreamerAudioConfig +import io.github.thibaultbee.streampack.core.streamers.dual.DualStreamerVideoConfig +import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig +import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStreamer +import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig +import kotlinx.coroutines.test.runTest +import org.junit.Rule +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.Parameterized +import kotlin.time.Duration.Companion.milliseconds +import kotlin.time.Duration.Companion.seconds + +@LargeTest +@RunWith(Parameterized::class) +class CameraDualStreamerFileTest( + private val firstDescriptor: MediaDescriptor, + private val verifyFirst: Boolean, + firstEndpoint: IEndpointInternal?, + private val secondDescriptor: MediaDescriptor, + private val verifySecond: Boolean, + secondEndpoint: IEndpointInternal?, +) { + private val context: Context = InstrumentationRegistry.getInstrumentation().context + private val streamer = if (firstEndpoint != null && secondEndpoint != null) { + CameraDualStreamer( + context, firstEndpointInternal = firstEndpoint, secondEndpointInternal = secondEndpoint + ) + } else if (firstEndpoint != null) { + CameraDualStreamer(context, firstEndpointInternal = firstEndpoint) + } else if (secondEndpoint != null) { + CameraDualStreamer(context, secondEndpointInternal = secondEndpoint) + } else { + CameraDualStreamer(context) + } + + // TODO: temp + private val info = CameraSingleStreamer(context).getInfo(firstDescriptor) + + private val videoCodec = if (info.video.supportedEncoders.contains(MIMETYPE_VIDEO_AVC)) { + MIMETYPE_VIDEO_AVC + } else if (info.video.supportedEncoders.contains(MIMETYPE_VIDEO_VP9)) { + MIMETYPE_VIDEO_VP9 + } else { + throw IllegalArgumentException("No supported video codec") + } + + private val audioCodec = if (info.audio.supportedEncoders.contains(MIMETYPE_AUDIO_AAC)) { + MIMETYPE_AUDIO_AAC + } else if (info.audio.supportedEncoders.contains(MIMETYPE_AUDIO_OPUS)) { + MIMETYPE_AUDIO_OPUS + } else { + throw IllegalArgumentException("No supported audio codec") + } + + private val audioSampleRate = + if (info.audio.getSupportedSampleRates(audioCodec).contains(44_100)) { + 44100 + } else if (info.audio.getSupportedSampleRates(audioCodec).contains(48_000)) { + 48_000 + } else { + throw IllegalArgumentException("No supported audio sample rate for $audioCodec") + } + + @get:Rule + val runtimePermissionRule: GrantPermissionRule = + GrantPermissionRule.grant(Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO) + + @Test + fun writeToFile() = runTest(timeout = 200.seconds) { + val audioConfig = AudioConfig(mimeType = audioCodec, sampleRate = audioSampleRate) + val videoConfig = + VideoConfig(mimeType = videoCodec, resolution = Size(VIDEO_WIDTH, VIDEO_HEIGHT)) + + // Run stream + streamer.setConfig( + DualStreamerAudioConfig(audioConfig), DualStreamerVideoConfig(videoConfig) + ) + + // Run stream + StreamerUtils.runDualStream( + streamer, + firstDescriptor, + secondDescriptor, + STREAM_DURATION_MS.milliseconds, + STREAM_POLLING_MS.milliseconds + ) + streamer.release() + + // Verify + VideoUtils.verifyFile( + context, + firstDescriptor.uri, + verifyFirst, + audioConfig, + videoConfig + ) + VideoUtils.verifyFile( + context, + secondDescriptor.uri, + verifySecond, + audioConfig, + videoConfig + ) + } + + companion object { + private const val STREAM_DURATION_MS = 30_000L + private const val STREAM_POLLING_MS = 1_000L + + private const val VIDEO_WIDTH = 1280 + private const val VIDEO_HEIGHT = 720 + + @JvmStatic + @Parameterized.Parameters( + name = "First mediaDescriptor: {0} - first verify: {1} - first endpoint: {2} - second mediaDescriptor: {3} - second verify: {4} - second endpoint: {5}" + ) + fun getMediaDescriptor(): Iterable> { + return arrayListOf( + arrayOf( + UriMediaDescriptor(FileUtils.createCacheFile("video.ts").toUri()), + true, + null, + UriMediaDescriptor(FileUtils.createCacheFile("video.mp4").toUri()), + true, + null + ), + arrayOf( + UriMediaDescriptor(FileUtils.createCacheFile("video.mp4").toUri()), + true, + null, + UriMediaDescriptor(FileUtils.createCacheFile("video.flv").toUri()), + false, + null + ), + arrayOf( + UriMediaDescriptor(FileUtils.createCacheFile("video.webm").toUri()), + true, + null, + UriMediaDescriptor(FileUtils.createCacheFile("video.mp4").toUri()), + true, + CompositeEndpoint(Mp4Muxer(), FileSink()) + ), + ) + } + } +} \ No newline at end of file diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/CameraStreamerFileTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/file/CameraSingleStreamerFileTest.kt similarity index 94% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/CameraStreamerFileTest.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/file/CameraSingleStreamerFileTest.kt index f409e25c2..382c1d753 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/CameraStreamerFileTest.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/file/CameraSingleStreamerFileTest.kt @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.github.thibaultbee.streampack.core.streamer.file +package io.github.thibaultbee.streampack.core.streamer.single.file import android.Manifest import android.content.Context @@ -36,8 +36,9 @@ import io.github.thibaultbee.streampack.core.elements.endpoints.composites.sinks import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig -import io.github.thibaultbee.streampack.core.streamers.single.setConfig -import io.github.thibaultbee.streampack.core.utils.FileUtils +import io.github.thibaultbee.streampack.core.pipelines.utils.FileUtils +import io.github.thibaultbee.streampack.core.streamer.utils.StreamerUtils +import io.github.thibaultbee.streampack.core.streamer.utils.VideoUtils import kotlinx.coroutines.test.runTest import org.junit.Rule import org.junit.Test @@ -48,7 +49,7 @@ import kotlin.time.Duration.Companion.seconds @LargeTest @RunWith(Parameterized::class) -class CameraStreamerFileTest( +class CameraSingleStreamerFileTest( private val descriptor: MediaDescriptor, private val verify: Boolean, endpoint: IEndpointInternal? @@ -105,7 +106,7 @@ class CameraStreamerFileTest( ) // Run stream - StreamerUtils.runStream( + StreamerUtils.runSingleStream( streamer, descriptor, STREAM_DURATION_MS.milliseconds, diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/CameraStreamerMultiStreamTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/file/CameraSingleStreamerMultiStreamTest.kt similarity index 91% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/CameraStreamerMultiStreamTest.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/file/CameraSingleStreamerMultiStreamTest.kt index fd50dbb95..f8e44e179 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/CameraStreamerMultiStreamTest.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/file/CameraSingleStreamerMultiStreamTest.kt @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.github.thibaultbee.streampack.core.streamer.file +package io.github.thibaultbee.streampack.core.streamer.single.file import android.Manifest import android.content.Context @@ -25,11 +25,12 @@ import androidx.test.filters.LargeTest import androidx.test.platform.app.InstrumentationRegistry import androidx.test.rule.GrantPermissionRule import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor +import io.github.thibaultbee.streampack.core.pipelines.utils.FileUtils +import io.github.thibaultbee.streampack.core.streamer.utils.StreamerUtils +import io.github.thibaultbee.streampack.core.streamer.utils.VideoUtils import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig -import io.github.thibaultbee.streampack.core.streamers.single.setConfig -import io.github.thibaultbee.streampack.core.utils.FileUtils import kotlinx.coroutines.test.runTest import org.junit.Rule import org.junit.Test @@ -40,7 +41,7 @@ import kotlin.time.Duration.Companion.seconds * Test [CameraSingleStreamer] with multiple streams. */ @LargeTest -class CameraStreamerMultiStreamTest { +class CameraSingleStreamerMultiStreamTest { private val context: Context = InstrumentationRegistry.getInstrumentation().context private val streamer = CameraSingleStreamer(context) @@ -81,7 +82,7 @@ class CameraStreamerMultiStreamTest { videoConfig: VideoConfig ) { // Run stream - StreamerUtils.runStream( + StreamerUtils.runSingleStream( streamer, descriptor, STREAM_DURATION_MS.milliseconds, diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/AudioOnlyStreamerStateTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/AudioOnlySingleStreamerStateTest.kt similarity index 80% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/AudioOnlyStreamerStateTest.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/AudioOnlySingleStreamerStateTest.kt index 66b593cd8..432c59708 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/AudioOnlyStreamerStateTest.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/AudioOnlySingleStreamerStateTest.kt @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.github.thibaultbee.streampack.core.streamer.state +package io.github.thibaultbee.streampack.core.streamer.single.state import android.Manifest import android.content.Context @@ -22,11 +22,10 @@ import androidx.test.platform.app.InstrumentationRegistry import androidx.test.rule.GrantPermissionRule import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor +import io.github.thibaultbee.streampack.core.pipelines.utils.ConfigurationUtils +import io.github.thibaultbee.streampack.core.pipelines.utils.FileUtils import io.github.thibaultbee.streampack.core.streamers.single.AudioOnlySingleStreamer -import io.github.thibaultbee.streampack.core.streamers.single.setConfig import io.github.thibaultbee.streampack.core.streamers.single.startStream -import io.github.thibaultbee.streampack.core.utils.ConfigurationUtils -import io.github.thibaultbee.streampack.core.utils.FileUtils import kotlinx.coroutines.test.runTest import org.junit.Assert.fail import org.junit.Rule @@ -35,7 +34,7 @@ import org.junit.runner.RunWith import org.junit.runners.Parameterized @RunWith(Parameterized::class) -class AudioOnlyStreamerStateTest(private val descriptor: MediaDescriptor) { +class AudioOnlySingleStreamerStateTest(private val descriptor: MediaDescriptor) { private val context: Context = InstrumentationRegistry.getInstrumentation().context private val streamer = AudioOnlySingleStreamer(context) @@ -55,37 +54,14 @@ class AudioOnlyStreamerStateTest(private val descriptor: MediaDescriptor) { // Single method calls @Test - fun configureAudioOnlyTest() { + fun configureAudioOnlyTest() = runTest { streamer.setAudioConfig( ConfigurationUtils.dummyValidAudioConfig() ) } @Test - fun configureVideoOnlyTest() { - try { - streamer.setVideoConfig( - ConfigurationUtils.dummyValidVideoConfig() - ) - fail("Must not be possible to configure video") - } catch (_: Throwable) { - } - } - - @Test - fun configureTest() { - try { - streamer.setConfig( - ConfigurationUtils.dummyValidAudioConfig(), - ConfigurationUtils.dummyValidVideoConfig() - ) - fail("Must not be possible to configure video") - } catch (_: Throwable) { - } - } - - @Test - fun configureErrorTest() { + fun configureErrorTest() = runTest { try { streamer.setAudioConfig( ConfigurationUtils.dummyInvalidAudioConfig() @@ -105,7 +81,7 @@ class AudioOnlyStreamerStateTest(private val descriptor: MediaDescriptor) { } @Test - fun configureReleaseTest() { + fun configureReleaseTest() = runTest { streamer.setAudioConfig( ConfigurationUtils.dummyValidAudioConfig() ) @@ -151,7 +127,7 @@ class AudioOnlyStreamerStateTest(private val descriptor: MediaDescriptor) { } @Test - fun multipleConfigureTest() { + fun multipleConfigureTest() = runTest { (0..10).forEach { _ -> streamer.setAudioConfig( ConfigurationUtils.dummyValidAudioConfig() diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/CameraStreamerStateTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/CameraSingleStreamerStateTest.kt similarity index 93% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/CameraStreamerStateTest.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/CameraSingleStreamerStateTest.kt index f44bd74da..2e2213b9d 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/CameraStreamerStateTest.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/CameraSingleStreamerStateTest.kt @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.github.thibaultbee.streampack.core.streamer.state +package io.github.thibaultbee.streampack.core.streamer.single.state import android.Manifest import android.content.Context @@ -23,14 +23,13 @@ import androidx.test.platform.app.InstrumentationRegistry import androidx.test.rule.GrantPermissionRule import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor +import io.github.thibaultbee.streampack.core.pipelines.utils.ConfigurationUtils +import io.github.thibaultbee.streampack.core.pipelines.utils.FileUtils import io.github.thibaultbee.streampack.core.streamer.surface.SurfaceUtils import io.github.thibaultbee.streampack.core.streamer.surface.SurfaceViewTestActivity import io.github.thibaultbee.streampack.core.streamers.interfaces.startPreview import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStreamer -import io.github.thibaultbee.streampack.core.streamers.single.setConfig import io.github.thibaultbee.streampack.core.streamers.single.startStream -import io.github.thibaultbee.streampack.core.utils.ConfigurationUtils -import io.github.thibaultbee.streampack.core.utils.FileUtils import kotlinx.coroutines.test.runTest import org.junit.Rule import org.junit.Test @@ -38,8 +37,8 @@ import org.junit.runner.RunWith import org.junit.runners.Parameterized @RunWith(Parameterized::class) -class CameraStreamerStateTest(descriptor: MediaDescriptor) : - StreamerStateTest( +class CameraSingleStreamerStateTest(descriptor: MediaDescriptor) : + SingleStreamerStateTest( descriptor ) { private val context: Context = InstrumentationRegistry.getInstrumentation().context diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/StreamerStateTest.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/SingleStreamerStateTest.kt similarity index 85% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/StreamerStateTest.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/SingleStreamerStateTest.kt index cb6cd24ab..d37e04281 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/state/StreamerStateTest.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/single/state/SingleStreamerStateTest.kt @@ -13,26 +13,26 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.github.thibaultbee.streampack.core.streamer.state +package io.github.thibaultbee.streampack.core.streamer.single.state import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor -import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer -import io.github.thibaultbee.streampack.core.streamers.single.setConfig +import io.github.thibaultbee.streampack.core.pipelines.utils.ConfigurationUtils +import io.github.thibaultbee.streampack.core.streamers.interfaces.releaseBlocking +import io.github.thibaultbee.streampack.core.streamers.single.SingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.startStream -import io.github.thibaultbee.streampack.core.utils.ConfigurationUtils import kotlinx.coroutines.test.runTest import org.junit.After import org.junit.Assert.fail import org.junit.Test -abstract class StreamerStateTest( +abstract class SingleStreamerStateTest( protected val descriptor: MediaDescriptor ) { - protected abstract val streamer: ICoroutineSingleStreamer + protected abstract val streamer: SingleStreamer @After open fun tearDown() { - streamer.release() + streamer.releaseBlocking() } @Test @@ -48,21 +48,21 @@ abstract class StreamerStateTest( // Single method calls @Test - open fun configureAudioOnlyTest() { + open fun configureAudioOnlyTest() = runTest { streamer.setAudioConfig( ConfigurationUtils.dummyValidAudioConfig() ) } @Test - open fun configureVideoOnlyTest() { + open fun configureVideoOnlyTest() = runTest { streamer.setVideoConfig( ConfigurationUtils.dummyValidVideoConfig() ) } @Test - open fun configureTest() { + open fun configureTest() = runTest { streamer.setConfig( ConfigurationUtils.dummyValidAudioConfig(), ConfigurationUtils.dummyValidVideoConfig() @@ -70,7 +70,7 @@ abstract class StreamerStateTest( } @Test - open fun configureErrorTest() { + open fun configureErrorTest() = runTest { try { streamer.setConfig( ConfigurationUtils.dummyInvalidAudioConfig(), @@ -102,7 +102,7 @@ abstract class StreamerStateTest( } @Test - fun releaseTest() { + fun releaseTest() = runTest { streamer.release() } @@ -117,7 +117,7 @@ abstract class StreamerStateTest( } @Test - open fun configureReleaseTest() { + open fun configureReleaseTest() = runTest { streamer.setConfig( ConfigurationUtils.dummyValidAudioConfig(), ConfigurationUtils.dummyValidVideoConfig() @@ -156,7 +156,7 @@ abstract class StreamerStateTest( // Stress test @Test - open fun multipleConfigureTest() { + open fun multipleConfigureTest() = runTest { (0..10).forEach { _ -> streamer.setConfig( ConfigurationUtils.dummyValidAudioConfig(), diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/StreamerUtils.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/utils/StreamerUtils.kt similarity index 51% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/StreamerUtils.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/utils/StreamerUtils.kt index d94388777..c0e2e1ed2 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/StreamerUtils.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/utils/StreamerUtils.kt @@ -1,6 +1,9 @@ -package io.github.thibaultbee.streampack.core.streamer.file +package io.github.thibaultbee.streampack.core.streamer.utils import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.startStream +import io.github.thibaultbee.streampack.core.streamers.dual.DualStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineStreamer import io.github.thibaultbee.streampack.core.streamers.single.SingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.startStream import kotlinx.coroutines.Dispatchers @@ -11,13 +14,35 @@ import kotlin.time.Duration import kotlin.time.Duration.Companion.seconds object StreamerUtils { - suspend fun runStream( + suspend fun runSingleStream( streamer: SingleStreamer, descriptor: MediaDescriptor, duration: Duration, pollDuration: Duration = 1.seconds ) { streamer.startStream(descriptor) + + runStream(streamer, duration, pollDuration) + } + + suspend fun runDualStream( + streamer: DualStreamer, + firstDescriptor: MediaDescriptor, + secondDescriptor: MediaDescriptor, + duration: Duration, + pollDuration: Duration = 1.seconds + ) { + streamer.first.startStream(firstDescriptor) + streamer.second.startStream(secondDescriptor) + + runStream(streamer, duration, pollDuration) + } + + private suspend fun runStream( + streamer: ICoroutineStreamer, + duration: Duration, + pollDuration: Duration = 1.seconds + ) { var i = 0 val numOfLoop = duration / pollDuration withContext(Dispatchers.Default) { @@ -27,6 +52,7 @@ object StreamerUtils { assertTrue(streamer.isStreaming.value) } } + streamer.stopStream() streamer.close() } diff --git a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/VideoUtils.kt b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/utils/VideoUtils.kt similarity index 86% rename from core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/VideoUtils.kt rename to core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/utils/VideoUtils.kt index a500b44b9..3e73f83cc 100644 --- a/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/file/VideoUtils.kt +++ b/core/src/androidTest/java/io/github/thibaultbee/streampack/core/streamer/utils/VideoUtils.kt @@ -1,4 +1,4 @@ -package io.github.thibaultbee.streampack.core.streamer.file +package io.github.thibaultbee.streampack.core.streamer.utils import android.content.Context import android.media.MediaMetadataRetriever @@ -9,6 +9,7 @@ import android.media.MediaMetadataRetriever.METADATA_KEY_SAMPLERATE import android.media.MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT import android.media.MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH import android.net.Uri +import androidx.core.net.toFile import io.github.thibaultbee.streampack.core.elements.utils.extensions.isDevicePortrait import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig @@ -17,11 +18,28 @@ import org.junit.Assert.assertTrue import java.io.File object VideoUtils { + fun verifyFile(file: File) { assertTrue(file.exists()) assertTrue(file.length() > 0) } + fun verifyFile( + context: Context, + uri: Uri, + verifyVideo: Boolean, + audioConfig: AudioConfig?, + videoConfig: VideoConfig?, + ) { + // Check files + verifyFile(uri.toFile()) + + // Check video metadata + if (verifyVideo) { + verify(context, uri, audioConfig, videoConfig) + } + } + fun verify( context: Context, uri: Uri, diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/encoders/mediacodec/MediaCodecEncoder.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/encoders/mediacodec/MediaCodecEncoder.kt index a93b70221..9189dde11 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/encoders/mediacodec/MediaCodecEncoder.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/encoders/mediacodec/MediaCodecEncoder.kt @@ -418,6 +418,8 @@ internal constructor( val buffer = requireNotNull(mediaCodec.getInputBuffer(index)) val frame = input.listener.onFrameRequested(buffer) queueInputFrame(index, frame) + } catch (e: IllegalArgumentException) { + Logger.e(tag, "Failed to get input buffer: $e") } catch (t: Throwable) { handleError(t) } diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/endpoints/MediaContainerType.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/endpoints/MediaContainerType.kt index 16df05f24..b9821d2ab 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/endpoints/MediaContainerType.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/endpoints/MediaContainerType.kt @@ -20,9 +20,7 @@ enum class MediaContainerType(val values: Set) { } internal fun inferFromFileUri(uri: Uri): MediaContainerType { - val path = uri.path - val extension = path?.substringAfterLast('.') - ?: throw IllegalArgumentException("No extension found in uri: $uri") + val extension = uri.toString().substringAfterLast('.') return inferFromExtension(extension) } diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/SurfaceProcessor.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/SurfaceProcessor.kt index dc3331f7d..38be25bf7 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/SurfaceProcessor.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/SurfaceProcessor.kt @@ -14,7 +14,7 @@ import java.util.concurrent.atomic.AtomicBoolean class SurfaceProcessor( - val dynamicRangeProfile: DynamicRangeProfile + private val dynamicRangeProfile: DynamicRangeProfile ) : ISurfaceProcessorInternal, SurfaceTexture.OnFrameAvailableListener { private val renderer = OpenGlRenderer() @@ -178,8 +178,10 @@ class SurfaceProcessor( surfaceOutputs.forEach { try { - it.updateTransformMatrix(surfaceOutputMatrix, textureMatrix) - renderer.render(surfaceTexture.timestamp, surfaceOutputMatrix, it.surface) + if (it.isStreaming()) { + it.updateTransformMatrix(surfaceOutputMatrix, textureMatrix) + renderer.render(surfaceTexture.timestamp, surfaceOutputMatrix, it.surface) + } } catch (e: Exception) { Logger.e(TAG, "Error while rendering frame", e) } diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/outputs/AbstractSurfaceOutput.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/outputs/AbstractSurfaceOutput.kt index 930348a4f..a0d457cb3 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/outputs/AbstractSurfaceOutput.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/outputs/AbstractSurfaceOutput.kt @@ -24,7 +24,8 @@ import io.github.thibaultbee.streampack.core.elements.processing.video.utils.GLU open class AbstractSurfaceOutput( override val surface: Surface, - final override val resolution: Size + final override val resolution: Size, + override val isStreaming: () -> Boolean ) : ISurfaceOutput { protected val lock = Any() protected var isClosed = false @@ -51,6 +52,7 @@ interface ISurfaceOutput { val surface: Surface val cropRect: Rect val resolution: Size + val isStreaming: () -> Boolean fun updateTransformMatrix(output: FloatArray, input: FloatArray) diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/outputs/SurfaceOutput.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/outputs/SurfaceOutput.kt index f2ad4fa0c..2a405ad15 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/outputs/SurfaceOutput.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/outputs/SurfaceOutput.kt @@ -27,13 +27,15 @@ import io.github.thibaultbee.streampack.core.elements.processing.video.utils.ext import io.github.thibaultbee.streampack.core.elements.processing.video.utils.extensions.toRectF import io.github.thibaultbee.streampack.core.elements.utils.RotationValue import io.github.thibaultbee.streampack.core.elements.utils.extensions.rotate +import io.github.thibaultbee.streampack.core.logger.Logger class SurfaceOutput( surface: Surface, resolution: Size, + isStreaming: () -> Boolean, private val transformationInfo: TransformationInfo ) : - AbstractSurfaceOutput(surface, resolution) { + AbstractSurfaceOutput(surface, resolution, isStreaming) { private val infoProvider: ISourceInfoProvider get() = transformationInfo.infoProvider @@ -51,6 +53,10 @@ class SurfaceOutput( private val invertedTextureTransform = FloatArray(16) init { + Logger.e( + "SurfaceOutput", + ">>>> SurfaceOutput: rotationDegrees: $rotationDegrees, sourceRotationDegrees: $sourceRotationDegrees transformInfo: $transformationInfo" + ) calculateAdditionalTransform( additionalTransform, invertedTextureTransform, diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/AbstractSourceInfoProvider.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/DefaultSourceInfoProvider.kt similarity index 65% rename from core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/AbstractSourceInfoProvider.kt rename to core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/DefaultSourceInfoProvider.kt index 239983649..8a7a1c13c 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/AbstractSourceInfoProvider.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/DefaultSourceInfoProvider.kt @@ -18,14 +18,10 @@ package io.github.thibaultbee.streampack.core.elements.processing.video.source import androidx.annotation.IntRange import io.github.thibaultbee.streampack.core.elements.utils.RotationValue -abstract class AbstractSourceInfoProvider : ISourceInfoProvider { - protected val listeners = mutableSetOf() - - override val isMirror = false - - @IntRange(from = 0, to = 359) - override val rotationDegrees = 0 - +open class DefaultSourceInfoProvider( + override val isMirror: Boolean = false, + @IntRange(from = 0, to = 359) override val rotationDegrees: Int = 0 +) : ISourceInfoProvider { @IntRange(from = 0, to = 359) override fun getRelativeRotationDegrees( @RotationValue targetRotation: Int, requiredMirroring: Boolean @@ -33,15 +29,7 @@ abstract class AbstractSourceInfoProvider : ISourceInfoProvider { return 0 } - override fun addListener(listener: ISourceInfoListener) { - listeners.add(listener) - } - - override fun removeListener(listener: ISourceInfoListener) { - listeners.remove(listener) - } - - override fun removeAllListeners() { - listeners.clear() + override fun toString(): String { + return "DefaultSourceInfoProvider(isMirror=$isMirror, rotationDegrees=$rotationDegrees)" } } \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/ISourceInfoProvider.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/ISourceInfoProvider.kt index 816ce84a2..8501d3779 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/ISourceInfoProvider.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/processing/video/source/ISourceInfoProvider.kt @@ -55,34 +55,5 @@ interface ISourceInfoProvider { /** * Gets the size of the surface to allocate to display the source. */ - fun getSurfaceSize(size: Size, @RotationValue targetRotation: Int): Size = size - - /** - * Adds a listener to be notified when the orientation changes. - * - * @param listener to add. - */ - fun addListener(listener: ISourceInfoListener) - - /** - * Removes a listener. - * - * @param listener to remove. - */ - fun removeListener(listener: ISourceInfoListener) - - /** - * Removes all registered listeners. - */ - fun removeAllListeners() + fun getSurfaceSize(size: Size): Size = size } - -/** - * Interface to be notified when the orientation changes. - */ -interface ISourceInfoListener { - /** - * Called when the orientation changes. - */ - fun onInfoChanged() -} \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/IVideoSource.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/IVideoSource.kt index 7296906fd..7837d3496 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/IVideoSource.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/IVideoSource.kt @@ -19,6 +19,7 @@ import io.github.thibaultbee.streampack.core.elements.interfaces.Configurable import io.github.thibaultbee.streampack.core.elements.interfaces.Releasable import io.github.thibaultbee.streampack.core.elements.interfaces.SuspendStreamable import io.github.thibaultbee.streampack.core.elements.processing.video.source.ISourceInfoProvider +import kotlinx.coroutines.flow.StateFlow interface IVideoSourceInternal : IVideoSource, SuspendStreamable, Configurable, Releasable { @@ -26,7 +27,7 @@ interface IVideoSourceInternal : IVideoSource, * Orientation provider of the capture source. * It is used to orientate the frame according to the source orientation. */ - val infoProvider: ISourceInfoProvider + val infoProviderFlow: StateFlow } interface IVideoSource \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/camera/CameraInfoProvider.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/camera/CameraInfoProvider.kt index fea38fd8e..4b96d71a1 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/camera/CameraInfoProvider.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/camera/CameraInfoProvider.kt @@ -19,32 +19,29 @@ import android.content.Context import android.hardware.camera2.CameraCharacteristics import android.util.Size import androidx.annotation.IntRange -import io.github.thibaultbee.streampack.core.elements.processing.video.source.AbstractSourceInfoProvider +import io.github.thibaultbee.streampack.core.elements.processing.video.source.ISourceInfoProvider import io.github.thibaultbee.streampack.core.elements.utils.RotationValue import io.github.thibaultbee.streampack.core.elements.utils.extensions.landscapize import io.github.thibaultbee.streampack.core.elements.utils.extensions.rotationToDegrees +fun CameraInfoProvider( + context: Context, + cameraId: String +): CameraInfoProvider { + val characteristics = context.getCameraCharacteristics(cameraId) + val rotationDegrees = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0 + val facingDirection = characteristics.get(CameraCharacteristics.LENS_FACING) + return CameraInfoProvider(rotationDegrees, facingDirection = facingDirection) +} + class CameraInfoProvider( - private val context: Context, - private val cameraController: CameraController, - var defaultCamera: String + @IntRange(from = 0, to = 359) override val rotationDegrees: Int, + private val facingDirection: Int? ) : - AbstractSourceInfoProvider() { - - val cameraId: String - get() = cameraController.cameraId ?: defaultCamera - - override val rotationDegrees: Int - @IntRange(from = 0, to = 359) - get() { - val characteristics = context.getCameraCharacteristics(cameraId) - return characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0 - } - - val isFrontFacing: Boolean - get() = context.getFacingDirection(cameraId) == CameraCharacteristics.LENS_FACING_FRONT + ISourceInfoProvider { - override val isMirror = false + val isFrontFacing: Boolean = facingDirection == CameraCharacteristics.LENS_FACING_FRONT + override val isMirror = isFrontFacing @IntRange(from = 0, to = 359) override fun getRelativeRotationDegrees( @@ -60,12 +57,15 @@ class CameraInfoProvider( // Currently this assumes that a back-facing camera is always opposite to the screen. // This may not be the case for all devices, so in the future we may need to handle that // scenario. - val lensFacing = context.getFacingDirection(cameraId) - val isOppositeFacingScreen = CameraCharacteristics.LENS_FACING_BACK == lensFacing + val isOppositeFacingScreen = CameraCharacteristics.LENS_FACING_BACK == facingDirection return CameraOrientationUtils.getRelativeRotation( targetRotationDegrees, sensorOrientation, isOppositeFacingScreen ) } - override fun getSurfaceSize(size: Size, targetRotation: Int) = size.landscapize + override fun getSurfaceSize(size: Size) = size.landscapize + + override fun toString(): String { + return "CameraInfoProvider(rotationDegrees=$rotationDegrees, isMirror=$isMirror, facingDirection=$facingDirection, isFrontFacing=$isFrontFacing)" + } } \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/camera/CameraSource.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/camera/CameraSource.kt index 60251c41b..d28a9da55 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/camera/CameraSource.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/camera/CameraSource.kt @@ -19,10 +19,14 @@ import android.Manifest import android.content.Context import android.view.Surface import androidx.annotation.RequiresPermission +import io.github.thibaultbee.streampack.core.elements.processing.video.source.ISourceInfoProvider import io.github.thibaultbee.streampack.core.elements.sources.video.ISurfaceSource import io.github.thibaultbee.streampack.core.elements.sources.video.VideoSourceConfig import io.github.thibaultbee.streampack.core.elements.utils.av.video.DynamicRangeProfile import io.github.thibaultbee.streampack.core.logger.Logger +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.flow.asStateFlow import kotlinx.coroutines.runBlocking class CameraSource( @@ -129,7 +133,7 @@ class CameraSource( Logger.e(TAG, "Camera $cameraId does not support $fps fps") } - infoProvider.defaultCamera = cameraId + _infoProviderFlow.emit(CameraInfoProvider(context, cameraId)) if (cameraController.isCameraRunning()) { // Restart camera with new cameraId restartCamera(cameraId = cameraId) @@ -154,7 +158,9 @@ class CameraSource( override val settings = CameraSettings(context, cameraController) override val timestampOffset = CameraHelper.getTimeOffsetToMonoClock(context, cameraId) - override val infoProvider = CameraInfoProvider(context, cameraController, cameraId) + private val _infoProviderFlow = + MutableStateFlow(CameraInfoProvider(context, cameraId)) + override val infoProviderFlow: StateFlow = _infoProviderFlow.asStateFlow() private var fps: Int = 30 private var dynamicRangeProfile: DynamicRangeProfile = DynamicRangeProfile.sdr diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/mediaprojection/MediaProjectionVideoSource.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/mediaprojection/MediaProjectionVideoSource.kt index 249e06ae5..92f1be2ad 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/mediaprojection/MediaProjectionVideoSource.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/sources/video/mediaprojection/MediaProjectionVideoSource.kt @@ -24,7 +24,7 @@ import android.os.Handler import android.os.HandlerThread import android.view.Surface import androidx.activity.result.ActivityResult -import io.github.thibaultbee.streampack.core.elements.processing.video.source.AbstractSourceInfoProvider +import io.github.thibaultbee.streampack.core.elements.processing.video.source.DefaultSourceInfoProvider import io.github.thibaultbee.streampack.core.elements.sources.IMediaProjectionSource import io.github.thibaultbee.streampack.core.elements.sources.video.ISurfaceSource import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSourceInternal @@ -32,13 +32,15 @@ import io.github.thibaultbee.streampack.core.elements.sources.video.VideoSourceC import io.github.thibaultbee.streampack.core.elements.utils.extensions.densityDpi import io.github.thibaultbee.streampack.core.elements.utils.extensions.screenRect import io.github.thibaultbee.streampack.core.logger.Logger +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.asStateFlow class MediaProjectionVideoSource( private val context: Context ) : IVideoSourceInternal, ISurfaceSource, IMediaProjectionSource { override var outputSurface: Surface? = null override val timestampOffset = 0L - override val infoProvider = ScreenSourceInfoProvider() + override val infoProviderFlow = MutableStateFlow(DefaultSourceInfoProvider()).asStateFlow() private var mediaProjection: MediaProjection? = null @@ -140,9 +142,6 @@ class MediaProjectionVideoSource( } } - class ScreenSourceInfoProvider : - AbstractSourceInfoProvider() - interface Listener { fun onStop() } diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/utils/extensions/CodecConfigExtensions.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/utils/extensions/CodecConfigExtensions.kt index ac35cfa7d..599484a62 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/utils/extensions/CodecConfigExtensions.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/utils/extensions/CodecConfigExtensions.kt @@ -17,20 +17,6 @@ fun AudioCodecConfig.isCompatibleWith(sourceConfig: AudioSourceConfig): Boolean && (byteFormat == sourceConfig.byteFormat) } -/** - * Merges [AudioCodecConfig] with [AudioSourceConfig]. - * - * @param sourceConfig [AudioSourceConfig] to merge with - * @return [AudioCodecConfig] merged with [AudioSourceConfig] - */ -fun AudioCodecConfig.mergeWith(sourceConfig: AudioSourceConfig): AudioCodecConfig { - return copy( - channelConfig = sourceConfig.channelConfig, - sampleRate = sourceConfig.sampleRate, - byteFormat = sourceConfig.byteFormat - ) -} - /** * Converts [AudioCodecConfig] to [AudioSourceConfig]. * @@ -50,26 +36,7 @@ val AudioCodecConfig.sourceConfig: AudioSourceConfig * @return `true` if [VideoCodecConfig] is compatible with [VideoCodecConfig], `false` otherwise */ fun VideoCodecConfig.isCompatibleWith(sourceConfig: VideoSourceConfig): Boolean { - require(dynamicRangeProfile == sourceConfig.dynamicRangeProfile) { - "Dynamic range profile must be the same: $dynamicRangeProfile != ${sourceConfig.dynamicRangeProfile}" - } - return (fps == sourceConfig.fps) -} - -/** - * Merges [VideoCodecConfig] with [VideoCodecConfig]. - * - * @param sourceConfig [VideoCodecConfig] to merge with - * @return [VideoCodecConfig] merged with [VideoCodecConfig] - */ -fun VideoCodecConfig.mergeWith(sourceConfig: VideoSourceConfig): VideoCodecConfig { - require(dynamicRangeProfile == sourceConfig.dynamicRangeProfile) { - "Dynamic range profile must be the same: $dynamicRangeProfile != ${sourceConfig.dynamicRangeProfile}" - } - - return copy( - fps = sourceConfig.fps - ) + return (fps == sourceConfig.fps) && (dynamicRangeProfile == sourceConfig.dynamicRangeProfile) } /** diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/elements/utils/extensions/StateFlowExtensions.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/utils/extensions/StateFlowExtensions.kt new file mode 100644 index 000000000..8320a7eea --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/elements/utils/extensions/StateFlowExtensions.kt @@ -0,0 +1,14 @@ +package io.github.thibaultbee.streampack.core.elements.utils.extensions + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.flow.filterNotNull +import kotlinx.coroutines.flow.runningFold + +data class History(val previous: T?, val current: T) + +fun StateFlow.runningHistory(): Flow> = + runningFold( + initial = null as (History?), + operation = { accumulator, new -> History(accumulator?.current, new) } + ).filterNotNull() diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/StreamerPipeline.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/StreamerPipeline.kt new file mode 100644 index 000000000..74efb623b --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/StreamerPipeline.kt @@ -0,0 +1,737 @@ +/* + * Copyright (C) 2025 Thibault B. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.github.thibaultbee.streampack.core.pipelines + +import android.content.Context +import android.util.Size +import android.view.Surface +import io.github.thibaultbee.streampack.core.elements.data.Frame +import io.github.thibaultbee.streampack.core.elements.encoders.AudioCodecConfig +import io.github.thibaultbee.streampack.core.elements.encoders.IEncoderInternal +import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig +import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpointInternal +import io.github.thibaultbee.streampack.core.elements.processing.video.ISurfaceProcessorInternal +import io.github.thibaultbee.streampack.core.elements.processing.video.SurfaceProcessor +import io.github.thibaultbee.streampack.core.elements.processing.video.outputs.AbstractSurfaceOutput +import io.github.thibaultbee.streampack.core.elements.processing.video.outputs.SurfaceOutput +import io.github.thibaultbee.streampack.core.elements.processing.video.source.ISourceInfoProvider +import io.github.thibaultbee.streampack.core.elements.sources.audio.AudioSourceConfig +import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSource +import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSourceInternal +import io.github.thibaultbee.streampack.core.elements.sources.video.ISurfaceSource +import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoFrameSource +import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSource +import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSourceInternal +import io.github.thibaultbee.streampack.core.elements.sources.video.VideoSourceConfig +import io.github.thibaultbee.streampack.core.elements.utils.RotationValue +import io.github.thibaultbee.streampack.core.elements.utils.extensions.displayRotation +import io.github.thibaultbee.streampack.core.elements.utils.extensions.isCompatibleWith +import io.github.thibaultbee.streampack.core.elements.utils.extensions.runningHistory +import io.github.thibaultbee.streampack.core.elements.utils.extensions.sourceConfig +import io.github.thibaultbee.streampack.core.logger.Logger +import io.github.thibaultbee.streampack.core.pipelines.outputs.IAudioPipelineOutputInternal +import io.github.thibaultbee.streampack.core.pipelines.outputs.IPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.outputs.IPipelineOutputInternal +import io.github.thibaultbee.streampack.core.pipelines.outputs.IVideoPipelineOutputInternal +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.EncodingPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IConfigurableAudioPipelineOutputInternal +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IConfigurableVideoPipelineOutputInternal +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.utils.SourceConfigUtils +import kotlinx.coroutines.CoroutineDispatcher +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.cancel +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.launch +import kotlinx.coroutines.runBlocking +import kotlinx.coroutines.sync.Mutex +import kotlinx.coroutines.sync.withLock +import kotlinx.coroutines.withContext +import java.nio.ByteBuffer + +/** + * Base class of all streamers. + * + * @param context the application context + * @param videoSourceInternal the video source implementation + * @param audioSourceInternal the audio source implementation + */ +open class StreamerPipeline( + protected val context: Context, + protected val audioSourceInternal: IAudioSourceInternal?, + protected val videoSourceInternal: IVideoSourceInternal?, + protected val coroutineDispatcher: CoroutineDispatcher = Dispatchers.Default, + protected val coroutineScope: CoroutineScope = CoroutineScope(coroutineDispatcher), +) { + private var surfaceProcessor: ISurfaceProcessorInternal? = null + + private val _throwable = MutableStateFlow(null) + val throwable: StateFlow = _throwable + + private val sourceInfoProvider: ISourceInfoProvider? + get() = videoSourceInternal?.infoProviderFlow?.value + + // SOURCES + private val audioSourceMutex = Mutex() + private val videoSourceMutex = Mutex() + + /** + * The audio source. + * It allows advanced audio settings. + */ + val audioSource: IAudioSource? + get() = audioSourceInternal + + /** + * The video source. + * It allows advanced video settings. + */ + val videoSource: IVideoSource? + get() = videoSourceInternal + + private val _isStreaming = MutableStateFlow(false) + val isStreaming: StateFlow = _isStreaming + + /** + * Whether the streamer has audio. + */ + val hasAudio = audioSourceInternal != null + + /** + * Whether the streamer has video. + */ + val hasVideo = videoSourceInternal != null + + // OUTPUTS + private val outputs = mutableMapOf() + + /** + * Sets the target rotation of all outputs.s + */ + var targetRotation: Int = context.displayRotation + set(@RotationValue value) { + outputs.keys.forEach { + (it as? IVideoPipelineOutputInternal)?.targetRotation = value + } + field = value + } + + + init { + videoSourceInternal?.infoProviderFlow?.let { + coroutineScope.launch { + it.collect { + resetSurfaceProcessorOutputSurface() + } + } + } + } + + private var _audioSourceConfig: AudioSourceConfig? = null + private val audioSourceConfig: AudioSourceConfig + get() = requireNotNull(_audioSourceConfig) { "Audio source config is not set" } + + private suspend fun setAudioSourceConfig(audioConfig: AudioCodecConfig) = + setAudioSourceConfig(audioConfig.sourceConfig) + + private suspend fun setAudioSourceConfig(value: AudioSourceConfig) = + withContext(coroutineDispatcher) { + audioSourceMutex.withLock { + Logger.e(TAG, "setAudioSourceConfig") + require(hasAudio) { "Do not need to set audio as it is a video only streamer" } + require(!isStreaming.value) { "Can't change audio source configuration while streaming" } + + if (_audioSourceConfig == value) { + Logger.i(TAG, "Audio source configuration is the same, skipping configuration") + return@withContext + } + _audioSourceConfig = value + applyAudioSourceConfig(value) + } + } + + private fun applyAudioSourceConfig(audioConfig: AudioSourceConfig) { + try { + audioSourceInternal?.configure(audioConfig) + } catch (t: Throwable) { + throw t + } + } + + private var _videoSourceConfig: VideoSourceConfig? = null + private val videoSourceConfig: VideoSourceConfig + get() = requireNotNull(_videoSourceConfig) { "Video source config is not set" } + + private suspend fun setVideoSourceConfig(value: VideoSourceConfig) = + withContext(coroutineDispatcher) { + videoSourceMutex.withLock { + require(hasVideo) { "Do not need to set video as it is a audio only streamer" } + require(!isStreaming.value) { "Can't change video source configuration while streaming" } + + if (_videoSourceConfig == value) { + Logger.i(TAG, "Video source configuration is the same, skipping configuration") + return@withContext + } + + val previousVideoConfig = _videoSourceConfig + _videoSourceConfig = value + applyVideoSourceConfig(previousVideoConfig, value) + } + } + + private fun applyVideoSourceConfig( + previousVideoConfig: VideoSourceConfig?, videoConfig: VideoSourceConfig + ) { + try { + videoSourceInternal?.configure(videoConfig) + + // Update surface processor + if (videoSourceInternal is ISurfaceSource) { + val currentSurfaceProcessor = surfaceProcessor + if (currentSurfaceProcessor == null) { + surfaceProcessor = buildSurfaceProcessor(videoConfig) + } else if (previousVideoConfig?.dynamicRangeProfile != videoConfig.dynamicRangeProfile) { + releaseSurfaceProcessor() + surfaceProcessor = buildSurfaceProcessor(videoConfig) + } else if (previousVideoConfig.resolution != videoConfig.resolution) { + val outputSurface = requireNotNull(videoSourceInternal.outputSurface) { + "Video source must have an output surface" + } + currentSurfaceProcessor.updateInputSurface( + outputSurface, videoSourceInternal.infoProviderFlow.value.getSurfaceSize( + videoSourceConfig.resolution + ) + ) + } + } + } catch (t: Throwable) { + throw t + } + } + + private fun buildSurfaceOutput( + videoOutput: IVideoPipelineOutputInternal + ): AbstractSurfaceOutput { + val surfaceWithSize = requireNotNull(videoOutput.surface.value) { + "Output $videoOutput has no surface" + } + + Logger.e(TAG, ">>>>>> Build surface output ${sourceInfoProvider!!}") + return buildSurfaceOutput( + surfaceWithSize.surface, + surfaceWithSize.resolution, + videoOutput.isStreaming::value, + sourceInfoProvider!! + ) + } + + /** + * Creates a surface output for the given surface. + * + * Use it for additional processing. + * + * @param surface the encoder surface + * @param resolution the resolution of the surface + * @param infoProvider the source info provider for internal processing + */ + private fun buildSurfaceOutput( + surface: Surface, + resolution: Size, + isStreaming: () -> Boolean, + infoProvider: ISourceInfoProvider + ): AbstractSurfaceOutput { + return SurfaceOutput( + surface, resolution, isStreaming, SurfaceOutput.TransformationInfo( + targetRotation, isMirroringRequired(), infoProvider + ) + ) + } + + /** + * Whether the output surface needs to be mirrored. + */ + protected open fun isMirroringRequired(): Boolean { + return false + } + + /** + * Updates the transformation of the surface output. + * To be called when the source info provider or [isMirroringRequired] is updated. + */ + private fun resetSurfaceProcessorOutputSurface() { + outputs.keys.filterIsInstance() + .filter { it.surface.value != null }.forEach { + resetSurfaceProcessorOutputSurface(it) + } + } + + /** + * Updates the transformation of the surface output. + */ + private fun resetSurfaceProcessorOutputSurface( + videoOutput: IVideoPipelineOutputInternal + ) { + Logger.i(TAG, "Updating transformation") + videoOutput.surface.value?.let { + surfaceProcessor?.removeOutputSurface(it.surface) + } + + surfaceProcessor?.addOutputSurface(buildSurfaceOutput(videoOutput)) + } + + private fun releaseSurfaceProcessor() { + val videoSource = videoSourceInternal + if (videoSource is ISurfaceSource) { + videoSource.outputSurface?.let { + surfaceProcessor?.removeInputSurface(it) + } + } + surfaceProcessor?.removeAllOutputSurfaces() + surfaceProcessor?.release() + } + + private fun buildSurfaceProcessor( + videoSourceConfig: VideoSourceConfig + ): ISurfaceProcessorInternal { + val videoSource = videoSourceInternal + if (videoSource !is ISurfaceSource) { + throw IllegalStateException("Video source must have an output surface") + } + + val newSurfaceProcessor = SurfaceProcessor(videoSourceConfig.dynamicRangeProfile) + + // Adds surface processor input + videoSource.outputSurface = newSurfaceProcessor.createInputSurface( + videoSource.infoProviderFlow.value.getSurfaceSize( + videoSourceConfig.resolution + ) + ) + + // Adds surface processor output + outputs.keys.filterIsInstance() + .filter { it.surface.value != null }.forEach { + newSurfaceProcessor.addOutputSurface(buildSurfaceOutput(it)) + } + + return newSurfaceProcessor + } + + /** + * Creates and adds an output to the pipeline to the given [endpoint]. + * + * @param endpoint the endpoint to add the output to + * @param targetRotation the target rotation of the output + * + * @return the [EncodingPipelineOutput] created + */ + suspend fun addOutput( + endpoint: IEndpointInternal, @RotationValue targetRotation: Int = context.displayRotation + ): IEncodingPipelineOutput { + val output = EncodingPipelineOutput(context, endpoint, targetRotation) + return addOutput(output) + } + + /** + * Adds an output. + * + * @param output the output to add + * @return the [output] added (same as input) + */ + private suspend fun addOutput(output: T): T { + require((output is IVideoPipelineOutputInternal) || (output is IAudioPipelineOutputInternal)) { + "Output must be an audio or video output" + } + if (outputs.contains(output)) { + Logger.w(TAG, "Output $output already added") + return output + } + + val scope = CoroutineScope(coroutineDispatcher) + outputs[output] = scope + + try { + scope.launch { + output.isStreaming.collect { isStreaming -> + if (!isStreaming) { + // Call [stopStream] if all outputs are stopped + if (outputs.keys.all { !it.isStreaming.value }) { + stopStreamInternal() + } + } + } + } + + if (output.isStreaming.value) { + // Start stream if it is not already started + if (!this@StreamerPipeline.isStreaming.value) { + startSourceStream() + } + } + if (output is IPipelineOutputInternal) { + require(output.streamListener == null) { "Output $output already have a listener" } + output.streamListener = object : IPipelineOutputInternal.Listener { + override suspend fun onStarted() { + /** + * Verify if the source configuration is still valid with the output configuration. + * Another output could have changed the source configuration in the meantime. + */ + if (output.hasAudio) { + requireNotNull(_audioSourceConfig) { "Audio source config is not set" } + if (output is IConfigurableAudioPipelineOutputInternal) { + require( + output.audioCodecConfigFlow.value!!.isCompatibleWith( + audioSourceConfig + ) + ) { "Audio codec config is not compatible with audio source config" } + } + } + if (output.hasVideo) { + requireNotNull(_videoSourceConfig) { "Video source config is not set" } + if (output is IConfigurableVideoPipelineOutputInternal) { + require( + output.videoCodecConfigFlow.value!!.isCompatibleWith( + videoSourceConfig + ) + ) { "Video codec config is not compatible with video source config" } + } + } + + // Start stream if it is not already started + if (!this@StreamerPipeline.isStreaming.value) { + startSourceStream() + } + } + } + } + + if (output is IVideoPipelineOutputInternal) { + addVideoOutputIfNeeded(output, scope) + if (output is IConfigurableVideoPipelineOutputInternal) { + addEncodingVideoOutput(output) + } + } + if (output is IAudioPipelineOutputInternal) { + addAudioOutputIfNeeded(output) + if (output is IConfigurableAudioPipelineOutputInternal) { + addEncodingAudioOutput(output) + } + } + } catch (t: Throwable) { + outputs.remove(output) + scope.cancel() + throw t + } + return output + } + + + private fun addAudioOutputIfNeeded(output: IAudioPipelineOutputInternal) { + if (hasAudio) { + output.audioFrameRequestedListener = + object : IEncoderInternal.IByteBufferInput.OnFrameRequestedListener { + override fun onFrameRequested(buffer: ByteBuffer): Frame { + return audioSourceInternal!!.getAudioFrame(buffer) + } + } + } else { + output.audioFrameRequestedListener = null + Logger.w(TAG, "Output $output has audio but streamer has no audio") + } + } + + private suspend fun addEncodingAudioOutput( + output: IConfigurableAudioPipelineOutputInternal + ) { + // Apply already set audio source config + output.audioCodecConfigFlow.value?.let { audioCodecConfig -> + if (hasAudio != output.hasAudio) { + Logger.w( + TAG, + "Output $output has audio: ${output.hasAudio} but streamer has audio: $hasAudio" + ) + } + //TODO: Check if audio codec config is compatible with audio source config of all outputs + setAudioSourceConfig(audioCodecConfig.sourceConfig) + } + // Apply future audio source config + require(output.audioConfigListener == null) { "Output $output already have an audio listener" } + output.audioConfigListener = object : IConfigurableAudioPipelineOutputInternal.Listener { + override suspend fun onSetAudioConfig(newAudioCodecConfig: AudioCodecConfig) { + setAudioSourceConfig(newAudioCodecConfig) + } + } + } + + private fun addVideoOutputIfNeeded( + output: IVideoPipelineOutputInternal, scope: CoroutineScope + ) { + if (hasVideo) { + when (videoSourceInternal) { + is ISurfaceSource -> { + output.videoSourceTimestampOffset = videoSourceInternal.timestampOffset + scope.launch { + output.surface.runningHistory().collect { (previousSurface, newSurface) -> + Logger.i(TAG, "Surface changed") + if (previousSurface?.surface == newSurface?.surface) { + return@collect + } + + previousSurface?.let { + Logger.i(TAG, "Removing previous surface") + surfaceProcessor?.removeOutputSurface(it.surface) + } + newSurface?.let { + Logger.i(TAG, "Adding new surface") + surfaceProcessor?.addOutputSurface( + buildSurfaceOutput( + it.surface, + it.resolution, + output.isStreaming::value, + sourceInfoProvider!! + ) + ) + } + } + } + } + + is IVideoFrameSource -> { + require(outputs.keys.filterIsInstance().size == 1) { + "Only one output is allowed for frame source" + } + output.videoSourceTimestampOffset = 0L + output.videoFrameRequestedListener = + object : IEncoderInternal.IByteBufferInput.OnFrameRequestedListener { + override fun onFrameRequested(buffer: ByteBuffer): Frame { + return videoSourceInternal.getVideoFrame(buffer) + } + } + } + + else -> { + output.videoFrameRequestedListener = null + Logger.w(TAG, "Output $output has video but streamer has no video") + } + } + } + } + + private fun buildVideoSourceConfig(newVideoCodecConfig: VideoCodecConfig? = null): VideoSourceConfig { + val videoCodecConfigs = outputs.keys.filterIsInstance() + .filter { it.isStreaming.value }.mapNotNull { + (it as? IConfigurableVideoPipelineOutputInternal)?.videoCodecConfigFlow?.value + }.toMutableSet() + newVideoCodecConfig?.let { videoCodecConfigs.add(it) } + return SourceConfigUtils.buildVideoSourceConfig(videoCodecConfigs) + } + + private suspend fun addEncodingVideoOutput( + output: IConfigurableVideoPipelineOutputInternal + ) { + // Apply already set video source config + output.videoCodecConfigFlow.value?.let { _ -> + if (hasVideo != output.hasVideo) { + Logger.w( + TAG, + "Output $output has video: ${output.hasVideo} but streamer has video: $hasVideo" + ) + } + setVideoSourceConfig(buildVideoSourceConfig()) + } + // Apply future video source config + require(output.videoConfigListener == null) { "Output $output already have a video listener" } + output.videoConfigListener = object : IConfigurableVideoPipelineOutputInternal.Listener { + override suspend fun onSetVideoConfig(newVideoCodecConfig: VideoCodecConfig) { + setVideoSourceConfig(buildVideoSourceConfig(newVideoCodecConfig)) + } + } + } + + /** + * Removes an output. + * + * It will stop the stream. + */ + private suspend fun removeOutputImpl(output: IPipelineOutput) { + output.stopStream() + + // Clean streamer output + if (output is IConfigurableVideoPipelineOutputInternal) { + output.videoConfigListener = null + } + if (output is IConfigurableAudioPipelineOutputInternal) { + output.audioConfigListener = null + } + if (output is IPipelineOutputInternal) { + output.streamListener = null + } + if (output is IVideoPipelineOutputInternal) { + output.surface.value?.let { + surfaceProcessor?.removeOutputSurface(it.surface) + } + output.videoFrameRequestedListener = null + } + if (output is IAudioPipelineOutputInternal) { + output.audioFrameRequestedListener = null + } + + outputs[output]?.cancel() + } + + /** + * Removes an output. + * + * It will stop the stream. + */ + suspend fun removeOutput(output: IPipelineOutput) { + if (!outputs.contains(output)) { + Logger.w(TAG, "Output $output not found") + return + } + + removeOutputImpl(output) + + outputs.remove(output) + } + + /** + * Starts audio/video source. + * + * @see [stopStream] + */ + private suspend fun startSourceStream() = withContext(coroutineDispatcher) { + Logger.e(TAG, "Starting stream before lock") + audioSourceMutex.lock() + videoSourceMutex.lock() + + Logger.e(TAG, "Starting stream after lock") + try { + // Sources + audioSourceInternal?.startStream() + videoSourceInternal?.startStream() + + _isStreaming.emit(true) + } finally { + audioSourceMutex.unlock() + videoSourceMutex.unlock() + } + } + + /** + * Try to start all streams. + * + * If an [IEncodingPipelineOutput] is not opened, it won't start the stream. + */ + suspend fun startStream() { + outputs.keys.forEach { + try { + it.startStream() + } catch (t: Throwable) { + Logger.w(TAG, "startStream: Can't start output $it: ${t.message}") + } + } + } + + /** + * Stops all streams. + * + * It stops audio and video sources and calls [IPipelineOutput.stopStream] on all outputs. + */ + suspend fun stopStream() { + // Sources + stopStreamInternal() + + // Outputs + outputs.keys.forEach { + try { + it.stopStream() + } catch (t: Throwable) { + Logger.w(TAG, "stopStream: Can't stop output $it: ${t.message}") + } + } + + _isStreaming.emit(false) + } + + private suspend fun stopStreamInternal() = withContext(coroutineDispatcher) { + audioSourceMutex.lock() + videoSourceMutex.lock() + + try { + // Sources + try { + audioSourceInternal?.stopStream() + } catch (t: Throwable) { + Logger.w(TAG, "stopStream: Can't stop audio source: ${t.message}") + } + try { + videoSourceInternal?.stopStream() + } catch (t: Throwable) { + Logger.w(TAG, "stopStream: Can't stop video source: ${t.message}") + } + _isStreaming.emit(false) + } finally { + audioSourceMutex.unlock() + videoSourceMutex.unlock() + } + } + + + /** + * Releases all resources. + * + * It releases the audio and video sources and the processors. + * It also calls [IPipelineOutput.release] on all outputs. + */ + suspend fun release() = withContext(coroutineDispatcher) { + Logger.e(TAG, "Releasing streamer") + // Sources + audioSourceMutex.withLock { audioSourceInternal?.release() } + videoSourceMutex.withLock { + releaseSurfaceProcessor() + val videoSource = videoSourceInternal + if (videoSource is ISurfaceSource) { + videoSource.outputSurface = null + } + videoSourceInternal?.release() + } + + // Outputs + outputs.keys.forEach { + try { + removeOutputImpl(it) + it.release() + } catch (t: Throwable) { + Logger.w(TAG, "release: Can't release output $it: ${t.message}") + } + } + } + + companion object { + const val TAG = "StreamerPipeline" + } +} + +/** + * Clean and reset the pipeline synchronously. + * + * @see [StreamerPipeline.release] + */ +fun StreamerPipeline.releaseBlocking() = runBlocking { + release() +} \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/IPipelineOutput.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/IPipelineOutput.kt new file mode 100644 index 000000000..92529ac4b --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/IPipelineOutput.kt @@ -0,0 +1,148 @@ +/* + * Copyright (C) 2025 Thibault B. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.github.thibaultbee.streampack.core.pipelines.outputs + +import android.util.Size +import android.view.Surface +import io.github.thibaultbee.streampack.core.elements.encoders.IEncoderInternal.IByteBufferInput.OnFrameRequestedListener +import io.github.thibaultbee.streampack.core.elements.utils.RotationValue +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutput +import io.github.thibaultbee.streampack.core.streamers.single.startStream +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.runBlocking + + +/** + * An output component for a streamer. + */ +interface IPipelineOutput { + /** + * Whether the output has audio. + */ + val hasAudio: Boolean + + /** + * Whether the output has video. + */ + val hasVideo: Boolean + + /** + * Returns the last throwable that occurred. + */ + val throwable: StateFlow + + /** + * Returns true if output is running. + */ + val isStreaming: StateFlow + + /** + * Starts audio/video stream. + * + * @see [stopStream] + */ + suspend fun startStream() + + /** + * Stops audio/video stream. + * + * @see [startStream] + */ + suspend fun stopStream() + + /** + * Clean and reset the output. + */ + suspend fun release() +} + +/** + * Clean and reset the output synchronously. + * + * @see [IEncodingPipelineOutput.release] + */ +fun IEncodingPipelineOutput.releaseBlocking() = runBlocking { + release() +} + +/** + * A [Surface] with its resolution. + * + * @param surface The [Surface]. + * @param resolution The resolution of the [Surface]. + */ +data class SurfaceWithSize( + val surface: Surface, val resolution: Size +) + +interface IPipelineOutputInternal : IPipelineOutput { + /** + * A listener for audio/video stream. + */ + var streamListener: Listener? + + /** + * Audio/video stream listener interface. + */ + interface Listener { + /** + * Called synchronously when the stream has started. + * It is called on the same thread as [startStream]. + * The listener can throw an exception to prevent the stream from starting. + */ + suspend fun onStarted() + } +} + +/** + * An internal video output component for a pipeline. + */ +interface IVideoPipelineOutputInternal : IPipelineOutputInternal { + /** + * The rotation in one the [Surface] rotations from the device natural orientation. + */ + @RotationValue + var targetRotation: Int + + /** + * The [Surface] flow to render video. + * For surface mode video encoder. + */ + val surface: StateFlow + + /** + * The video source timestamp offset. + * Used to synchronize video and audio when video comes from the [surface].s + */ + var videoSourceTimestampOffset: Long + + /** + * The video [Frame] listener. + * For buffer mode video encoder. + */ + var videoFrameRequestedListener: OnFrameRequestedListener? +} + +/** + * An internal audio output component for a pipeline. + */ +interface IAudioPipelineOutputInternal : IPipelineOutputInternal { + /** + * The audio [Frame] listener. + */ + var audioFrameRequestedListener: OnFrameRequestedListener? +} + diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/EncodingPipelineOutput.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/EncodingPipelineOutput.kt new file mode 100644 index 000000000..6e71ca34c --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/EncodingPipelineOutput.kt @@ -0,0 +1,670 @@ +/* + * Copyright (C) 2025 Thibault B. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.github.thibaultbee.streampack.core.pipelines.outputs.encoding + +import android.content.Context +import android.view.Surface +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor +import io.github.thibaultbee.streampack.core.elements.data.Frame +import io.github.thibaultbee.streampack.core.elements.encoders.AudioCodecConfig +import io.github.thibaultbee.streampack.core.elements.encoders.CodecConfig +import io.github.thibaultbee.streampack.core.elements.encoders.IEncoder +import io.github.thibaultbee.streampack.core.elements.encoders.IEncoderInternal +import io.github.thibaultbee.streampack.core.elements.encoders.IEncoderInternal.IByteBufferInput.OnFrameRequestedListener +import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig +import io.github.thibaultbee.streampack.core.elements.encoders.mediacodec.AudioEncoderConfig +import io.github.thibaultbee.streampack.core.elements.encoders.mediacodec.MediaCodecEncoder +import io.github.thibaultbee.streampack.core.elements.encoders.mediacodec.VideoEncoderConfig +import io.github.thibaultbee.streampack.core.elements.encoders.rotateFromNaturalOrientation +import io.github.thibaultbee.streampack.core.elements.endpoints.DynamicEndpoint +import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpoint +import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpointInternal +import io.github.thibaultbee.streampack.core.elements.utils.RotationValue +import io.github.thibaultbee.streampack.core.elements.utils.extensions.displayRotation +import io.github.thibaultbee.streampack.core.logger.Logger +import io.github.thibaultbee.streampack.core.pipelines.StreamerPipeline.Companion.TAG +import io.github.thibaultbee.streampack.core.pipelines.outputs.IPipelineOutputInternal +import io.github.thibaultbee.streampack.core.pipelines.outputs.SurfaceWithSize +import io.github.thibaultbee.streampack.core.regulator.controllers.IBitrateRegulatorController +import kotlinx.coroutines.CoroutineDispatcher +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.runBlocking +import kotlinx.coroutines.sync.Mutex +import kotlinx.coroutines.sync.withLock +import kotlinx.coroutines.withContext + +/** + * An implementation of [IEncodingPipelineOutputInternal] that manages encoding and endpoint. + * + * @param context The application context + * @param endpointInternal The endpoint implementation + * @param defaultRotation The default rotation in [Surface] rotation ([Surface.ROTATION_0], ...). By default, it is the current device orientation. + * @param coroutineDispatcher The coroutine dispatcher to use. By default, it is [Dispatchers.Default] + */ +internal class EncodingPipelineOutput( + private val context: Context, + private val endpointInternal: IEndpointInternal = DynamicEndpoint(context), + @RotationValue defaultRotation: Int = context.displayRotation, + private val coroutineDispatcher: CoroutineDispatcher = Dispatchers.Default +) : IEncodingPipelineOutputInternal { + /** + * Mutex to avoid concurrent open/close operations. + */ + private val openCloseMutex = Mutex() + + /** + * Mutex to avoid concurrent audio configuration operations. + */ + private val audioConfigurationMutex = Mutex() + + /** + * Mutex to avoid concurrent video configuration operations. + */ + private val videoConfigurationMutex = Mutex() + + private var bitrateRegulatorController: IBitrateRegulatorController? = null + + private var audioStreamId: Int? = null + private var videoStreamId: Int? = null + + /** + * Whether the output has audio. + */ + override val hasAudio: Boolean + get() = audioEncoderInternal != null + + /** + * Whether the output has video. + */ + override val hasVideo: Boolean + get() = videoEncoderInternal != null + + // INPUTS + private val _surface = MutableStateFlow(null) + override val surface: StateFlow = _surface + + /** + * The video source. + * We need to know the timestamp offset to synchronize audio and video. + * This is set by the [StreamerPipeline]. + */ + override var videoSourceTimestampOffset = 0L + + // ENCODERS + private var audioEncoderInternal: IEncoderInternal? = null + override val audioEncoder: IEncoder? + get() = audioEncoderInternal + + private var videoEncoderInternal: IEncoderInternal? = null + override val videoEncoder: IEncoder? + get() = videoEncoderInternal + + // ENDPOINT + override val endpoint: IEndpoint + get() = endpointInternal + + /** + * Keep the target rotation if it can't be applied immediately. + * It will be applied when the stream is stopped. + */ + @RotationValue + private var pendingTargetRotation: Int? = null + + /** + * The target rotation in [Surface] rotation ([Surface.ROTATION_0], ...) + */ + @RotationValue + private var _targetRotation = defaultRotation + + override var targetRotation: Int + @RotationValue get() = _targetRotation + set(@RotationValue value) { + if (isStreaming.value) { + Logger.w(TAG, "Can't change rotation to $value while streaming") + pendingTargetRotation = value + return + } + + setTargetRotationInternal(value) + } + + private val _throwable = MutableStateFlow(null) + override val throwable: StateFlow = _throwable + + override val isOpen: StateFlow + get() = endpointInternal.isOpen + + private val _isStreaming = MutableStateFlow(false) + override val isStreaming: StateFlow = _isStreaming + + /** + * Audio frame requested listener. + */ + override var audioFrameRequestedListener: OnFrameRequestedListener? = null + + /** + * Video frame requested listener. + */ + override var videoFrameRequestedListener: OnFrameRequestedListener? = null + + /** + * Called when audio configuration is set. + * The purpose is to validate and apply audio configuration to the source. + */ + override var audioConfigListener: IConfigurableAudioPipelineOutputInternal.Listener? = null + + /** + * Called when video configuration is set. + * The purpose is to validate and apply video configuration to the source. + */ + override var videoConfigListener: IConfigurableVideoPipelineOutputInternal.Listener? = null + + /** + * Called when stream starts. + * It is called after the endpoint is started. + * The purpose is to start any other required components. + */ + override var streamListener: IPipelineOutputInternal.Listener? = null + + /** + * Manages error on stream. + * Stops only stream. + * + * @param t triggered [Throwable] + */ + private fun onInternalError(t: Throwable) { + try { + runBlocking { + if (isStreaming.value) { + stopStream() + } + } + } catch (t: Throwable) { + Logger.e(TAG, "onStreamError: Can't stop stream", t) + } finally { + Logger.e(TAG, "onStreamError: ${t.message}", t) + _throwable.tryEmit(t) + } + } + + private val audioEncoderListener = object : IEncoderInternal.IListener { + override fun onError(t: Throwable) { + onInternalError(t) + } + + override fun onOutputFrame(frame: Frame) { + audioStreamId?.let { + runBlocking { + this@EncodingPipelineOutput.endpointInternal.write(frame, it) + } + } + } + } + + private val videoEncoderListener = object : IEncoderInternal.IListener { + override fun onError(t: Throwable) { + onInternalError(t) + } + + override fun onOutputFrame(frame: Frame) { + videoStreamId?.let { + frame.pts += videoSourceTimestampOffset + frame.dts = if (frame.dts != null) { + frame.dts!! + videoSourceTimestampOffset + } else { + null + } + runBlocking { + this@EncodingPipelineOutput.endpointInternal.write(frame, it) + } + } + } + } + + private val _audioCodecConfigFlow = MutableStateFlow(null) + override val audioCodecConfigFlow: StateFlow = _audioCodecConfigFlow + + private val audioCodecConfig: AudioCodecConfig? + get() = _audioCodecConfigFlow.value + + override suspend fun setAudioCodecConfig(audioCodecConfig: AudioCodecConfig) = + withContext(coroutineDispatcher) { + audioConfigurationMutex.withLock { + setAudioCodecConfigInternal(audioCodecConfig) + } + } + + private suspend fun setAudioCodecConfigInternal(audioCodecConfig: AudioCodecConfig) { + require(!isStreaming.value) { "Can't change audio configuration while streaming" } + + if (this.audioCodecConfig == audioCodecConfig) { + Logger.i(TAG, "Audio configuration is the same, skipping configuration") + return + } + + audioConfigListener?.onSetAudioConfig(audioCodecConfig) + + applyAudioCodecConfig(audioCodecConfig) + _audioCodecConfigFlow.emit(audioCodecConfig) + } + + private fun applyAudioCodecConfig(audioConfig: AudioCodecConfig) { + try { + audioEncoderInternal?.release() + audioEncoderInternal = MediaCodecEncoder( + AudioEncoderConfig( + audioConfig + ), listener = audioEncoderListener + ).apply { + if (input is MediaCodecEncoder.ByteBufferInput) { + input.listener = requireNotNull(audioFrameRequestedListener) { + "Audio frame requested listener is required" + } + } else { + throw UnsupportedOperationException("Audio encoder only support ByteBuffer mode") + } + configure() + } + } catch (t: Throwable) { + audioEncoderInternal?.release() + audioEncoderInternal = null + throw t + } + } + + private val _videoCodecConfigFlow = MutableStateFlow(null) + override val videoCodecConfigFlow: StateFlow = _videoCodecConfigFlow + + private val videoCodecConfig: VideoCodecConfig? + get() = _videoCodecConfigFlow.value + + override suspend fun setVideoCodecConfig(videoCodecConfig: VideoCodecConfig) = + withContext(coroutineDispatcher) { + videoConfigurationMutex.withLock { + setVideoCodecConfigInternal(videoCodecConfig) + } + } + + private suspend fun setVideoCodecConfigInternal(videoCodecConfig: VideoCodecConfig) { + require(!isStreaming.value) { "Can't change video configuration while streaming" } + + if (this.videoCodecConfig == videoCodecConfig) { + Logger.i(TAG, "Video configuration is the same, skipping configuration") + return + } + + videoConfigListener?.onSetVideoConfig(videoCodecConfig) + + applyVideoCodecConfig(videoCodecConfig) + + _videoCodecConfigFlow.emit(videoCodecConfig) + } + + private fun applyVideoCodecConfig(videoConfig: VideoCodecConfig) { + try { + videoEncoderInternal = buildAndConfigureVideoEncoder( + videoConfig, targetRotation + ) + } catch (t: Throwable) { + videoEncoderInternal?.release() + videoEncoderInternal = null + throw t + } + } + + private fun buildAndConfigureVideoEncoder( + videoConfig: VideoCodecConfig, @RotationValue targetRotation: Int + ): IEncoderInternal { + val rotatedVideoConfig = videoConfig.rotateFromNaturalOrientation(context, targetRotation) + + // Release codec instance + videoEncoderInternal?.let { encoder -> + val input = encoder.input + if (input is MediaCodecEncoder.SurfaceInput) { + _surface.tryEmit(null) + } + encoder.release() + } + + // Prepare new codec instance + return buildVideoEncoder(rotatedVideoConfig, videoFrameRequestedListener == null).apply { + configure() + } + } + + private fun buildVideoEncoder( + videoConfig: VideoCodecConfig, useSurfaceMode: Boolean + ): IEncoderInternal { + val videoEncoder = MediaCodecEncoder( + VideoEncoderConfig( + videoConfig, useSurfaceMode + ), listener = videoEncoderListener + ) + + when (videoEncoder.input) { + is MediaCodecEncoder.SurfaceInput -> { + videoEncoder.input.listener = + object : IEncoderInternal.ISurfaceInput.OnSurfaceUpdateListener { + override fun onSurfaceUpdated(surface: Surface) { + _surface.tryEmit(SurfaceWithSize(surface, videoConfig.resolution)) + } + } + } + + is MediaCodecEncoder.ByteBufferInput -> { + videoEncoder.input.listener = requireNotNull(videoFrameRequestedListener) { + "Video frame requested listener is required" + } + } + + else -> { + throw UnsupportedOperationException("Unknown input type: ${videoEncoder.input}") + } + } + + return videoEncoder + } + + /** + * Opens the output endpoint. + * + * @param descriptor Media descriptor to open + */ + override suspend fun open(descriptor: MediaDescriptor) = withContext(coroutineDispatcher) { + openCloseMutex.withLock { + endpointInternal.open(descriptor) + } + } + + /** + * Closes the streamer endpoint. + */ + override suspend fun close() = withContext(coroutineDispatcher) { + openCloseMutex.withLock { + if (!isOpen.value) { + Logger.i(TAG, "Endpoint is already closed") + return@withContext + } + stopStreamInternal() + endpointInternal.close() + } + } + + /** + * Starts audio and/or video streams without a concurrent lock. + * + * @see [stopStream] + */ + private suspend fun startStreamInternal() { + require(isOpen.value) { "Endpoint must be opened before starting stream" } + require(!isStreaming.value) { "Stream is already running" } + require(hasAudio || hasVideo) { "At least one of audio or video must be set" } + if (hasAudio) { + requireNotNull(audioCodecConfig) { "Audio configuration must be set" } + } + if (hasVideo) { + requireNotNull(videoCodecConfig) { "Video configuration must be set" } + } + + try { + val streams = mutableListOf() + val orientedVideoConfig = videoCodecConfig?.let { + /** + * We need to get oriented size for the muxer. + * For example, the [FlvMuxer] `onMetaData` event needs to know the oriented size. + */ + it.rotateFromNaturalOrientation(context, targetRotation).apply { + streams.add(this) + } + } + + audioCodecConfig?.let { + streams.add(it) + } + + val streamsIdMap = endpointInternal.addStreams(streams) + orientedVideoConfig?.let { + videoStreamId = streamsIdMap[it] + } + audioCodecConfig?.let { audioStreamId = streamsIdMap[it] } + + endpointInternal.startStream() + + streamListener?.onStarted() + + audioEncoderInternal?.startStream() + videoEncoderInternal?.startStream() + + bitrateRegulatorController?.start() + + _isStreaming.emit(true) + } catch (t: Throwable) { + stopStreamInternal() + throw t + } + } + + /** + * Starts audio and/or video streams. + * + * Before starting the stream, the endpoint must be opened with [open] and the audio and/or + * video configuration must be set. + * + * @see [stopStream] + */ + override suspend fun startStream() = withContext(coroutineDispatcher) { + openCloseMutex.lock() + audioConfigurationMutex.lock() + videoConfigurationMutex.lock() + + try { + startStreamInternal() + } finally { + audioConfigurationMutex.unlock() + videoConfigurationMutex.unlock() + openCloseMutex.unlock() + } + } + + /** + * Stops audio/video stream. + * + * Internally, it resets audio and video recorders and encoders to get them ready for another + * [startStream] session. It explains why preview could be restarted. + * + * @see [startStream] + */ + override suspend fun stopStream() = withContext(coroutineDispatcher) { + openCloseMutex.lock() + audioConfigurationMutex.lock() + videoConfigurationMutex.lock() + + try { + stopStreamInternal() + } finally { + audioConfigurationMutex.unlock() + videoConfigurationMutex.unlock() + openCloseMutex.unlock() + } + } + + /** + * Stops audio/video and reset stream implementation. + * + * @see [stopStream] + */ + private suspend fun stopStreamInternal() { + if (!isStreaming.value) { + Logger.i(TAG, "Stream is already stopped") + return + } + + stopStreamImpl() + + try { + audioEncoderInternal?.reset() + } catch (t: Throwable) { + Logger.w(TAG, "Can't reset audio encoder: ${t.message}") + } + try { + resetVideoEncoder() + } catch (t: Throwable) { + Logger.w(TAG, "Can't reset video encoder: ${t.message}") + } + + _isStreaming.emit(false) + } + + private suspend fun resetVideoEncoder() { + _surface.emit(null) + + val previousVideoEncoder = videoEncoderInternal + pendingTargetRotation?.let { + setTargetRotationInternal(it) + } + pendingTargetRotation = null + + // Only reset if the encoder is the same. Otherwise, it is already configured. + if (previousVideoEncoder == videoEncoderInternal) { + videoEncoderInternal?.reset() + } + } + + /** + * Stops audio/video stream implementation without a concurrent lock. + * + * @see [stopStream] + */ + private suspend fun stopStreamImpl() { + try { + bitrateRegulatorController?.stop() + } catch (t: Throwable) { + Logger.w(TAG, "Can't stop bitrate regulator controller: ${t.message}") + } + + // Encoders + try { + audioEncoderInternal?.stopStream() + } catch (t: Throwable) { + Logger.w(TAG, "Can't stop audio encoder: ${t.message}") + } + try { + videoEncoderInternal?.stopStream() + } catch (t: Throwable) { + Logger.w(TAG, "Can't stop video encoder: ${t.message}") + } + + // Endpoint + try { + endpointInternal.stopStream() + } catch (t: Throwable) { + Logger.w(TAG, "Can't stop endpoint: ${t.message}") + } + } + + /** + * Releases endpoint and encoders. + */ + override suspend fun release() = withContext(coroutineDispatcher) { + _isStreaming.emit(false) + + // Encoders + audioConfigurationMutex.withLock { + try { + audioEncoderInternal?.release() + } catch (t: Throwable) { + Logger.w(TAG, "Can't release audio encoder: ${t.message}") + } finally { + audioEncoderInternal = null + } + } + + videoConfigurationMutex.withLock { + try { + videoEncoderInternal?.release() + } catch (t: Throwable) { + Logger.w(TAG, "Can't release video encoder: ${t.message}") + } finally { + _surface.tryEmit(null) + videoEncoderInternal = null + } + } + + // Endpoint + try { + endpointInternal.release() + } catch (t: Throwable) { + Logger.w(TAG, "Can't release endpoint: ${t.message}") + } + } + + /** + * Adds a bitrate regulator controller. + * + * Limitation: it is only available for SRT for now. + */ + override fun addBitrateRegulatorController(controllerFactory: IBitrateRegulatorController.Factory) { + bitrateRegulatorController?.stop() + bitrateRegulatorController = controllerFactory.newBitrateRegulatorController(this).apply { + if (isStreaming.value) { + this.start() + } + Logger.d( + TAG, "Bitrate regulator controller added: ${this.javaClass.simpleName}" + ) + } + } + + /** + * Removes the bitrate regulator controller. + */ + override fun removeBitrateRegulatorController() { + bitrateRegulatorController?.stop() + bitrateRegulatorController = null + Logger.d(TAG, "Bitrate regulator controller removed") + } + + private fun setTargetRotationInternal(@RotationValue newTargetRotation: Int) { + if (shouldUpdateRotation(newTargetRotation)) { + updateVideoEncoderForTransformation() + } + } + + private fun updateVideoEncoderForTransformation() { + if (hasVideo) { + val videoConfig = videoCodecConfig + if (videoConfig != null) { + videoEncoderInternal = buildAndConfigureVideoEncoder( + videoConfig, targetRotation + ) + } + } + } + + /** + * @return true if the target rotation has changed + */ + private fun shouldUpdateRotation(@RotationValue newTargetRotation: Int): Boolean { + return if (targetRotation != newTargetRotation) { + _targetRotation = newTargetRotation + true + } else { + false + } + } +} diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/IEncodingPipelineOutput.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/IEncodingPipelineOutput.kt new file mode 100644 index 000000000..5939e646c --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/outputs/encoding/IEncodingPipelineOutput.kt @@ -0,0 +1,224 @@ +/* + * Copyright (C) 2025 Thibault B. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.github.thibaultbee.streampack.core.pipelines.outputs.encoding + +import android.net.Uri +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor +import io.github.thibaultbee.streampack.core.elements.encoders.AudioCodecConfig +import io.github.thibaultbee.streampack.core.elements.encoders.IEncoder +import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig +import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpoint +import io.github.thibaultbee.streampack.core.pipelines.outputs.IAudioPipelineOutputInternal +import io.github.thibaultbee.streampack.core.pipelines.outputs.IPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.outputs.IVideoPipelineOutputInternal +import io.github.thibaultbee.streampack.core.regulator.controllers.IBitrateRegulatorController +import io.github.thibaultbee.streampack.core.streamers.single.open +import io.github.thibaultbee.streampack.core.streamers.single.startStream +import kotlinx.coroutines.flow.StateFlow + +/** + * An output component for a streamer. + */ +interface IEncodingPipelineOutput : IPipelineOutput { + /** + * Advanced settings for the endpoint. + */ + val endpoint: IEndpoint + + /** + * Returns true if output is opened. + * For example, if the streamer is connected to a server if the endpoint is SRT or RTMP. + */ + val isOpen: StateFlow + + /** + * Opens the streamer output. + * + * @param descriptor Media descriptor to open + */ + suspend fun open(descriptor: MediaDescriptor) + + /** + * Closes the streamer output. + */ + suspend fun close() + + /** + * Adds a bitrate regulator controller to the streamer. + */ + fun addBitrateRegulatorController(controllerFactory: IBitrateRegulatorController.Factory) + + /** + * Removes the bitrate regulator controller from the streamer. + */ + fun removeBitrateRegulatorController() +} + +/** + * Opens the streamer endpoint. + * + * @param uri The uri to open + */ +suspend fun IEncodingPipelineOutput.open(uri: Uri) = open(UriMediaDescriptor(uri)) + +/** + * Opens the streamer endpoint. + * + * @param uriString The uri to open + */ +suspend fun IEncodingPipelineOutput.open(uriString: String) = + open(UriMediaDescriptor(Uri.parse(uriString))) + + +/** + * Starts audio/video stream. + * + * Same as doing [open] and [startStream]. + * + * @param descriptor The media descriptor to open + * @see [IEncodingPipelineOutput.stopStream] + */ +suspend fun IEncodingPipelineOutput.startStream(descriptor: MediaDescriptor) { + open(descriptor) + startStream() +} + +/** + * Starts audio/video stream. + * + * Same as doing [open] and [startStream]. + * + * @param uri The uri to open + * @see [IEncodingPipelineOutput.stopStream] + */ +suspend fun IEncodingPipelineOutput.startStream(uri: Uri) { + open(uri) + startStream() +} + +/** + * Starts audio/video stream. + * + * Same as doing [open] and [startStream]. + * + * @param uriString The uri to open + * @see [IEncodingPipelineOutput.stopStream] + */ +suspend fun IEncodingPipelineOutput.startStream(uriString: String) { + open(uriString) + startStream() +} + +/** + * An audio encoding output component for a pipeline. + */ +interface IConfigurableAudioPipelineOutput { + /** + * The audio configuration flow. + */ + val audioCodecConfigFlow: StateFlow + + /** + * Advanced settings for the audio encoder. + */ + val audioEncoder: IEncoder? + + /** + * Configures only audio codec settings. + * + * @param audioCodecConfig The audio codec configuration + * + * @throws [Throwable] if configuration can not be applied. + */ + suspend fun setAudioCodecConfig(audioCodecConfig: AudioCodecConfig) +} + +/** + * An video encoding output component for a pipeline. + */ +interface IConfigurableVideoPipelineOutput { + /** + * The video configuration flow. + */ + val videoCodecConfigFlow: StateFlow + + /** + * Advanced settings for the video encoder. + */ + val videoEncoder: IEncoder? + + /** + * Configures only video codec settings. + * + * @param videoCodecConfig The video codec configuration + * + * @throws [Throwable] if configuration can not be applied. + */ + suspend fun setVideoCodecConfig(videoCodecConfig: VideoCodecConfig) +} + +/** + * An internal output component for a streamer. + */ +internal interface IConfigurableAudioPipelineOutputInternal : IEncodingPipelineOutput, + IConfigurableAudioPipelineOutput, IAudioPipelineOutputInternal { + /** + * Audio configuration listener. + */ + var audioConfigListener: Listener? + + /** + * Audio configuration listener interface. + */ + interface Listener { + /** + * It is called when audio configuration is set. + * The listener can reject the configuration by throwing an exception. + * It is used to validate and apply audio configuration to the source. + */ + suspend fun onSetAudioConfig(newAudioCodecConfig: AudioCodecConfig) + } +} + +/** + * An internal output component for a streamer. + */ +internal interface IConfigurableVideoPipelineOutputInternal : IEncodingPipelineOutput, + IConfigurableVideoPipelineOutput, IVideoPipelineOutputInternal { + /** + * Video configuration listener. + */ + var videoConfigListener: Listener? + + /** + * Video configuration listener interface. + */ + interface Listener { + /** + * It is called when video configuration is set. + * The listener can reject the configuration by throwing an exception. + * It is used to validate and apply video configuration to the source. + */ + suspend fun onSetVideoConfig(newVideoCodecConfig: VideoCodecConfig) + } +} + +internal interface IEncodingPipelineOutputInternal : IConfigurableAudioPipelineOutputInternal, + IConfigurableVideoPipelineOutputInternal + + + diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/utils/SourceConfigUtils.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/utils/SourceConfigUtils.kt new file mode 100644 index 000000000..0f7bbd1bf --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/pipelines/utils/SourceConfigUtils.kt @@ -0,0 +1,17 @@ +package io.github.thibaultbee.streampack.core.pipelines.utils + +import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig +import io.github.thibaultbee.streampack.core.elements.sources.video.VideoSourceConfig + +object SourceConfigUtils { + fun buildVideoSourceConfig(videoCodecConfigs: Set): VideoSourceConfig { + require(videoCodecConfigs.isNotEmpty()) { "No video codec config provided" } + val maxResolution = + videoCodecConfigs.map { it.resolution }.maxWith(compareBy({ it.width }, { it.height })) + val fps = videoCodecConfigs.first().fps + require(videoCodecConfigs.all { it.fps == fps }) { "All video codec configs must have the same fps" } + val dynamicRangeProfile = videoCodecConfigs.first().dynamicRangeProfile + require(videoCodecConfigs.all { it.dynamicRangeProfile == dynamicRangeProfile }) { "All video codec configs must have the same dynamic range profile" } + return VideoSourceConfig(maxResolution, fps, dynamicRangeProfile) + } +} \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/BitrateRegulatorController.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/BitrateRegulatorController.kt index 23dc1d6a7..478da0956 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/BitrateRegulatorController.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/BitrateRegulatorController.kt @@ -19,6 +19,7 @@ import io.github.thibaultbee.streampack.core.configuration.BitrateRegulatorConfi import io.github.thibaultbee.streampack.core.elements.encoders.IEncoder import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpoint import io.github.thibaultbee.streampack.core.regulator.IBitrateRegulator +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutput import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer /** @@ -41,9 +42,9 @@ abstract class BitrateRegulatorController( /** * Creates a [IBitrateRegulatorController] object from given parameters * - * @param streamer the [ICoroutineSingleStreamer] implementation. + * @param pipelineOutput the [ICoroutineSingleStreamer] implementation. * @return a [IBitrateRegulatorController] object */ - abstract override fun newBitrateRegulatorController(streamer: ICoroutineSingleStreamer): IBitrateRegulatorController + abstract override fun newBitrateRegulatorController(pipelineOutput: IEncodingPipelineOutput): IBitrateRegulatorController } } diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/DefaultBitrateRegulatorController.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/DefaultBitrateRegulatorController.kt index a4a6b7846..525fe94ca 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/DefaultBitrateRegulatorController.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/DefaultBitrateRegulatorController.kt @@ -19,8 +19,10 @@ import io.github.thibaultbee.streampack.core.configuration.BitrateRegulatorConfi import io.github.thibaultbee.streampack.core.elements.encoders.IEncoder import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpoint import io.github.thibaultbee.streampack.core.elements.utils.Scheduler +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IConfigurableAudioPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IConfigurableVideoPipelineOutput import io.github.thibaultbee.streampack.core.regulator.IBitrateRegulator -import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer /** * A [BitrateRegulatorController] implementation that triggers [IBitrateRegulator.update] every [delayTimeInMs]. @@ -34,7 +36,7 @@ import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleSt */ open class DefaultBitrateRegulatorController( audioEncoder: IEncoder?, - videoEncoder: IEncoder?, + videoEncoder: IEncoder, endpoint: IEndpoint, bitrateRegulatorFactory: IBitrateRegulator.Factory, bitrateRegulatorConfig: BitrateRegulatorConfig = BitrateRegulatorConfig(), @@ -46,17 +48,13 @@ open class DefaultBitrateRegulatorController( bitrateRegulatorFactory, bitrateRegulatorConfig ) { - init { - requireNotNull(videoEncoder) { "Video encoder is required" } - } - /** * Bitrate regulator. Calls regularly by [scheduler]. Don't call it otherwise or you might break regulation. */ private val bitrateRegulator = bitrateRegulatorFactory.newBitrateRegulator( bitrateRegulatorConfig, { - videoEncoder!!.bitrate = it + videoEncoder.bitrate = it }, { /* Do nothing for audio */ } ) @@ -67,7 +65,7 @@ open class DefaultBitrateRegulatorController( private val scheduler = Scheduler(delayTimeInMs) { bitrateRegulator.update( endpoint.metrics, - videoEncoder?.bitrate ?: 0, + videoEncoder.bitrate, audioEncoder?.bitrate ?: 0 ) } @@ -85,11 +83,24 @@ open class DefaultBitrateRegulatorController( private val bitrateRegulatorConfig: BitrateRegulatorConfig = BitrateRegulatorConfig(), private val delayTimeInMs: Long = 500 ) : BitrateRegulatorController.Factory() { - override fun newBitrateRegulatorController(streamer: ICoroutineSingleStreamer): BitrateRegulatorController { + override fun newBitrateRegulatorController(pipelineOutput: IEncodingPipelineOutput): BitrateRegulatorController { + require(pipelineOutput is IConfigurableVideoPipelineOutput) { + "Pipeline output must be an video encoding output" + } + + val videoEncoder = requireNotNull(pipelineOutput.videoEncoder) { + "Video encoder must be set" + } + + val audioEncoder = if (pipelineOutput is IConfigurableAudioPipelineOutput) { + pipelineOutput.audioEncoder + } else { + null + } return DefaultBitrateRegulatorController( - streamer.audioEncoder, - streamer.videoEncoder, - streamer.endpoint, + audioEncoder, + videoEncoder, + pipelineOutput.endpoint, bitrateRegulatorFactory, bitrateRegulatorConfig, delayTimeInMs diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/IBitrateRegulatorController.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/IBitrateRegulatorController.kt index cd6255e0c..fcea8f294 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/IBitrateRegulatorController.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/regulator/controllers/IBitrateRegulatorController.kt @@ -15,7 +15,7 @@ */ package io.github.thibaultbee.streampack.core.regulator.controllers -import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutput /** * Interface to implement a bitrate regulator controller. @@ -36,12 +36,12 @@ interface IBitrateRegulatorController { /** * Creates a [IBitrateRegulatorController] object from given parameters * - * @param streamer the [ICoroutineSingleStreamer] implementation. + * @param pipelineOutput the [IEncodingPipelineOutput] implementation. * * @return a [IBitrateRegulatorController] object */ fun newBitrateRegulatorController( - streamer: ICoroutineSingleStreamer + pipelineOutput: IEncodingPipelineOutput ): IBitrateRegulatorController } } diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/CameraDualStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/CameraDualStreamer.kt new file mode 100644 index 000000000..fff19fd25 --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/CameraDualStreamer.kt @@ -0,0 +1,180 @@ +package io.github.thibaultbee.streampack.core.streamers.dual + +import android.Manifest +import android.content.Context +import android.view.Surface +import androidx.annotation.RequiresPermission +import io.github.thibaultbee.streampack.core.elements.endpoints.DynamicEndpoint +import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpointInternal +import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSourceInternal +import io.github.thibaultbee.streampack.core.elements.sources.audio.audiorecord.MicrophoneSource.Companion.buildDefaultMicrophoneSource +import io.github.thibaultbee.streampack.core.elements.sources.video.camera.CameraSource +import io.github.thibaultbee.streampack.core.elements.sources.video.camera.ICameraSource +import io.github.thibaultbee.streampack.core.elements.utils.RotationValue +import io.github.thibaultbee.streampack.core.elements.utils.extensions.displayRotation +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICameraCoroutineStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.setPreview +import io.github.thibaultbee.streampack.core.streamers.interfaces.startPreview +import io.github.thibaultbee.streampack.core.streamers.single.ISingleStreamer +import kotlinx.coroutines.runBlocking +import kotlinx.coroutines.sync.Mutex +import kotlinx.coroutines.sync.withLock + +/** + * A [DualStreamer] that sends microphone and camera frames. + * + * @param context application context + * @param enableMicrophone [Boolean.true] to capture audio + * @param firstEndpointInternal the [IEndpointInternal] implementation of the first output. By default, it is a [DynamicEndpoint]. + * @param secondEndpointInternal the [IEndpointInternal] implementation of the first output. By default, it is a [DynamicEndpoint]. + * @param defaultRotation the default rotation in [Surface] rotation ([Surface.ROTATION_0], ...). By default, it is the current device orientation. + */ +fun CameraDualStreamer( + context: Context, + enableMicrophone: Boolean = true, + firstEndpointInternal: IEndpointInternal = DynamicEndpoint(context), + secondEndpointInternal: IEndpointInternal = DynamicEndpoint(context), + @RotationValue defaultRotation: Int = context.displayRotation +) = CameraDualStreamer( + context, + if (enableMicrophone) buildDefaultMicrophoneSource() else null, + firstEndpointInternal, + secondEndpointInternal, + defaultRotation +) + +/** + * A [DualStreamer] that sends from camera frames and [audioSourceInternal] audio frames. + * + * @param context application context + * @param audioSourceInternal the audio source implementation + * @param firstEndpointInternal the [IEndpointInternal] implementation of the first output. By default, it is a [DynamicEndpoint]. + * @param secondEndpointInternal the [IEndpointInternal] implementation of the first output. By default, it is a [DynamicEndpoint]. + * @param defaultRotation the default rotation in [Surface] rotation ([Surface.ROTATION_0], ...). By default, it is the current device orientation. + */ +open class CameraDualStreamer( + context: Context, + audioSourceInternal: IAudioSourceInternal?, + firstEndpointInternal: IEndpointInternal = DynamicEndpoint(context), + secondEndpointInternal: IEndpointInternal = DynamicEndpoint(context), + @RotationValue defaultRotation: Int = context.displayRotation +) : DualStreamer( + context = context, + audioSourceInternal = audioSourceInternal, + videoSourceInternal = CameraSource(context), + firstEndpointInternal = firstEndpointInternal, + secondEndpointInternal = secondEndpointInternal, + defaultRotation = defaultRotation +), ICameraCoroutineStreamer { + private val cameraSource = videoSourceInternal as CameraSource + + /** + * Mutex to avoid concurrent access to preview surface. + */ + private val previewMutex = Mutex() + + /** + * Gets the camera source. + * It allows to configure camera settings and to set the camera id. + */ + override val videoSource = cameraSource as ICameraSource + + /** + * Get/Set current camera id. + * It is a shortcut for [CameraSource.cameraId] + */ + override var cameraId: String + /** + * Get current camera id. + * + * @return a string that described current camera + */ + get() = videoSource.cameraId + /** + * Set current camera id. + * Retrieves list of cameras from [Context.cameras] + * + * It will block the current thread until the camera id is set. You can use [setCameraId] to + * set it in a coroutine. + * + * @param value string that described the camera. + */ + @RequiresPermission(Manifest.permission.CAMERA) + set(value) { + runBlocking { + setCameraId(value) + } + } + + /** + * Sets a camera id with a suspend function. + * + * @param cameraId The camera id to use + */ + override suspend fun setCameraId(cameraId: String) { + cameraSource.setCameraId(cameraId) + } + + /** + * Sets a preview surface. + */ + override suspend fun setPreview(surface: Surface) { + previewMutex.withLock { + cameraSource.setPreviewSurface(surface) + } + } + + /** + * Starts video preview. + * + * The preview will be rendered on the surface set by [setPreview]. + * It is recommend to call configure before call [startPreview] to avoid camera restart when + * encoder surface will be added. + * + * @see [stopPreview] + * @see [setPreview] + */ + @RequiresPermission(Manifest.permission.CAMERA) + override suspend fun startPreview() { + previewMutex.withLock { + cameraSource.startPreview() + } + } + + /** + * Starts audio and video capture. + * If you can prefer to call [ISingleStreamer.setAudioConfig] before starting preview. + * It is a shortcut for [setPreview] and [startPreview]. + * + * @param previewSurface The [Surface] used for camera preview + * + * @see [ICameraCoroutineStreamer.stopPreview] + */ + @RequiresPermission(Manifest.permission.CAMERA) + override suspend fun startPreview(previewSurface: Surface) { + previewMutex.withLock { + cameraSource.setPreviewSurface(previewSurface) + cameraSource.startPreview() + } + } + + /** + * Stops capture. + * It also stops stream if the stream is running. + * + * @see [startPreview] + */ + override suspend fun stopPreview() { + previewMutex.withLock { + cameraSource.stopPreview() + } + } + + /** + * Same as [DualStreamer.release] but it also calls [stopPreview]. + */ + override suspend fun release() { + stopPreview() + super.release() + } +} \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/DualStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/DualStreamer.kt new file mode 100644 index 000000000..46ac4ed7f --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/DualStreamer.kt @@ -0,0 +1,213 @@ +package io.github.thibaultbee.streampack.core.streamers.dual + +import android.Manifest +import android.content.Context +import android.view.Surface +import androidx.annotation.RequiresPermission +import io.github.thibaultbee.streampack.core.elements.endpoints.DynamicEndpoint +import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpointInternal +import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSource +import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSourceInternal +import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSource +import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSourceInternal +import io.github.thibaultbee.streampack.core.elements.utils.RotationValue +import io.github.thibaultbee.streampack.core.elements.utils.combineStates +import io.github.thibaultbee.streampack.core.elements.utils.extensions.displayRotation +import io.github.thibaultbee.streampack.core.pipelines.StreamerPipeline +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutputInternal +import io.github.thibaultbee.streampack.core.streamers.infos.IConfigurationInfo +import io.github.thibaultbee.streampack.core.streamers.single.ISingleStreamer +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.runBlocking + +/** + * Base class of all dual streamers. + * + * A single streamer is a streamer that can handle only two audio and video stream independently. + * For example, you can use it to live stream and record simultaneously. + * + * @param context the application context + * @param videoSourceInternal the video source implementation + * @param audioSourceInternal the audio source implementation + * @param firstEndpointInternal the [IEndpointInternal] implementation of the first output. By default, it is a [DynamicEndpoint]. + * @param secondEndpointInternal the [IEndpointInternal] implementation of the first output. By default, it is a [DynamicEndpoint]. + * @param defaultRotation the default rotation in [Surface] rotation ([Surface.ROTATION_0], ...). By default, it is the current device orientation. + */ +open class DualStreamer( + protected val context: Context, + audioSourceInternal: IAudioSourceInternal?, + videoSourceInternal: IVideoSourceInternal?, + firstEndpointInternal: IEndpointInternal = DynamicEndpoint(context), + secondEndpointInternal: IEndpointInternal = DynamicEndpoint(context), + @RotationValue defaultRotation: Int = context.displayRotation +) : ICoroutineDualStreamer, ICoroutineAudioDualStreamer, ICoroutineVideoDualStreamer { + private val pipeline = StreamerPipeline( + context, + audioSourceInternal, + videoSourceInternal + ) + + private val firstPipelineOutput: IEncodingPipelineOutputInternal = runBlocking { + pipeline.addOutput( + firstEndpointInternal, + defaultRotation + ) as IEncodingPipelineOutputInternal + } + + /** + * First output of the streamer. + */ + val first = firstPipelineOutput as IEncodingPipelineOutput + + private val secondPipelineOutput: IEncodingPipelineOutputInternal = runBlocking { + pipeline.addOutput( + secondEndpointInternal, + defaultRotation + ) as IEncodingPipelineOutputInternal + } + + /** + * Second output of the streamer. + */ + val second = secondPipelineOutput as IEncodingPipelineOutput + + override val throwable: StateFlow = + combineStates( + pipeline.throwable, + firstPipelineOutput.throwable, + secondPipelineOutput.throwable + ) { throwableArray -> + throwableArray[0] ?: throwableArray[1] ?: throwableArray[2] + } + + /** + * Whether any of the output is opening. + */ + override val isOpen: StateFlow = combineStates( + firstPipelineOutput.isOpen, + secondPipelineOutput.isOpen + ) { isOpeningArray -> + isOpeningArray[0] || isOpeningArray[1] + } + + /** + * Whether any of the output is streaming. + */ + override val isStreaming: StateFlow = combineStates( + pipeline.isStreaming, + firstPipelineOutput.isStreaming, + secondPipelineOutput.isStreaming + ) { isStreamingArray -> + isStreamingArray[0] && (isStreamingArray[1] || isStreamingArray[2]) + } + + override suspend fun close() { + TODO("Not yet implemented") + } + + // SOURCES + override val audioSource: IAudioSource? + get() = pipeline.audioSource + override val videoSource: IVideoSource? + get() = pipeline.videoSource + + // INTERNAL + protected val videoSourceInternal = pipeline.videoSource as IVideoSourceInternal? + protected val audioSourceInternal = pipeline.audioSource as IAudioSourceInternal? + + /** + * Whether the streamer has audio. + */ + val hasAudio: Boolean + get() = pipeline.hasAudio + + /** + * Whether the streamer has video. + */ + val hasVideo: Boolean + get() = pipeline.hasVideo + + /** + * The target rotation in [Surface] rotation ([Surface.ROTATION_0], ...) + */ + var targetRotation: Int + @RotationValue get() = pipeline.targetRotation + set(@RotationValue newTargetRotation) { + pipeline.targetRotation = newTargetRotation + } + + init { + require(audioSourceInternal != null || videoSourceInternal != null) { + "At least one audio or video source must be provided" + } + } + + @RequiresPermission(Manifest.permission.RECORD_AUDIO) + override suspend fun setAudioConfig(audioConfig: DualStreamerAudioConfig) { + var throwable: Throwable? = null + + try { + firstPipelineOutput.setAudioCodecConfig(audioConfig.firstAudioConfig) + } catch (e: Throwable) { + throwable = e + } + try { + audioConfig.secondAudioConfig.let { secondPipelineOutput.setAudioCodecConfig(it) } + } catch (e: Throwable) { + throwable = e + } + throwable?.let { throw it } + } + + override suspend fun setVideoConfig(videoConfig: DualStreamerVideoConfig) { + var throwable: Throwable? = null + try { + firstPipelineOutput.setVideoCodecConfig(videoConfig.firstVideoConfig) + } catch (e: Throwable) { + throwable = e + } + try { + secondPipelineOutput.setVideoCodecConfig(videoConfig.secondVideoConfig) + } catch (e: Throwable) { + throwable = e + } + throwable?.let { throw it } + } + + + /** + * Configures both video and audio settings. + * + * It must be call when both stream and audio and video capture are not running. + * + * Use [IConfigurationInfo] to get value limits. + * + * If video encoder does not support [VideoConfig.level] or [VideoConfig.profile], it fallbacks + * to video encoder default level and default profile. + * + * @param audioConfig Audio configuration to set + * @param videoConfig Video configuration to set + * + * @throws [Throwable] if configuration can not be applied. + * @see [ISingleStreamer.release] + */ + @RequiresPermission(Manifest.permission.RECORD_AUDIO) + suspend fun setConfig( + audioConfig: DualStreamerAudioConfig, + videoConfig: DualStreamerVideoConfig + ) { + setAudioConfig(audioConfig) + setVideoConfig(videoConfig) + } + + + override suspend fun startStream() = + pipeline.startStream() + + override suspend fun stopStream() = + pipeline.stopStream() + + override suspend fun release() = + pipeline.release() +} \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/IDualStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/IDualStreamer.kt new file mode 100644 index 000000000..55534499d --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/dual/IDualStreamer.kt @@ -0,0 +1,230 @@ +package io.github.thibaultbee.streampack.core.streamers.dual + +import android.media.AudioFormat +import android.media.MediaCodecInfo +import android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline +import android.media.MediaCodecInfo.CodecProfileLevel.HEVCProfileMain +import android.media.MediaCodecInfo.CodecProfileLevel.VP9Profile0 +import android.media.MediaFormat +import android.util.Size +import io.github.thibaultbee.streampack.core.elements.encoders.AudioCodecConfig +import io.github.thibaultbee.streampack.core.elements.encoders.AudioCodecConfig.Companion.getDefaultProfile +import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig +import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig.Companion.getBestLevel +import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig.Companion.getBestProfile +import io.github.thibaultbee.streampack.core.elements.utils.ByteFormatValue +import io.github.thibaultbee.streampack.core.elements.utils.ChannelConfigValue +import io.github.thibaultbee.streampack.core.streamers.interfaces.IAudioStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineAudioStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineVideoStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.IVideoStreamer +import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig +import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig + +/** + * Creates a [DualStreamerAudioConfig] with the same configuration for both audio streams. + * + * @param config the audio configuration + */ +fun DualStreamerAudioConfig(config: AudioConfig) = DualStreamerAudioConfig( + firstAudioConfig = config, + secondAudioConfig = config +) + +/** + * Creates a [DualStreamerAudioConfig] with different configuration for each audio stream. + */ +fun DualStreamerAudioConfig( + firstAudioCodecConfig: DualStreamerAudioCodecConfig = DualStreamerAudioCodecConfig(), + secondAudioCodecConfig: DualStreamerAudioCodecConfig = DualStreamerAudioCodecConfig(), + + /** + * Audio capture sample rate in Hz. + * From [AudioRecord API](https://developer.android.com/reference/android/media/AudioRecord?hl=en#AudioRecord(int,%20int,%20int,%20int,%20int)): "44100Hz is currently the only rate that is guaranteed to work on all devices, but other rates such as 22050, 16000, and 11025 may work on some devices." + */ + sampleRate: Int = DualStreamerAudioCodecConfig.getDefaultSampleRate( + listOf( + firstAudioCodecConfig.mimeType, + secondAudioCodecConfig.mimeType + ) + ), + + /** + * Audio channel configuration. + * From [AudioRecord API](https://developer.android.com/reference/android/media/AudioRecord?hl=en#AudioRecord(int,%20int,%20int,%20int,%20int)): " AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices." + * + * @see [AudioFormat.CHANNEL_IN_MONO] + * @see [AudioFormat.CHANNEL_IN_STEREO] + */ + @ChannelConfigValue channelConfig: Int = AudioFormat.CHANNEL_IN_STEREO, + + /** + * Audio byte format. + * + * @see [AudioFormat.ENCODING_PCM_8BIT] + * @see [AudioFormat.ENCODING_PCM_16BIT] + * @see [AudioFormat.ENCODING_PCM_FLOAT] + */ + @ByteFormatValue byteFormat: Int = AudioFormat.ENCODING_PCM_16BIT, +) = DualStreamerAudioConfig( + firstAudioCodecConfig.toAudioCodecConfig(sampleRate, channelConfig, byteFormat), + secondAudioCodecConfig.toAudioCodecConfig(sampleRate, channelConfig, byteFormat) +) + +/** + * A data class that holds audio specific codec data. + */ +data class DualStreamerAudioCodecConfig( + /** + * Audio encoder mime type. + * + * **See Also:** [MediaFormat MIMETYPE_AUDIO_*](https://developer.android.com/reference/android/media/MediaFormat) + */ + val mimeType: String = MediaFormat.MIMETYPE_AUDIO_AAC, + + /** + * Audio encoder bitrate in bits/s. + */ + val startBitrate: Int = 128_000, + + /** + * Audio profile. + * For AAC only. + * Default value is [MediaCodecInfo.CodecProfileLevel.AACObjectLC]. + * + * @see [MediaCodecInfo.CodecProfileLevel.AACObjectLC] + * @see [MediaCodecInfo.CodecProfileLevel.AACObjectHE] + */ + val profile: Int = getDefaultProfile(mimeType), +) { + /** + * Creates an [AudioCodecConfig] from this [DualStreamerAudioCodecConfig]. + */ + internal fun toAudioCodecConfig( + sampleRate: Int, + @ChannelConfigValue channelConfig: Int, + @ByteFormatValue byteFormat: Int + ) = AudioCodecConfig( + mimeType = mimeType, + startBitrate = startBitrate, + sampleRate = sampleRate, + channelConfig = channelConfig, + byteFormat = byteFormat, + profile = profile + ) + + companion object { + /** + * Returns the default sample rate for the given mime types. + */ + internal fun getDefaultSampleRate(mimeTypes: List): Int { + return if (mimeTypes.contains(MediaFormat.MIMETYPE_AUDIO_OPUS)) { + 48_000 + } else { + 44_100 + } + } + } +} + +class DualStreamerAudioConfig +internal constructor( + val firstAudioConfig: AudioCodecConfig, + val secondAudioConfig: AudioCodecConfig +) + +/** + * Creates a [DualStreamerVideoConfig] with the same configuration for both video streams. + * + * @param config the video configuration + */ +fun DualStreamerVideoConfig( + config: VideoConfig +) = DualStreamerVideoConfig( + firstVideoConfig = config, + secondVideoConfig = config +) + +/** + * Creates a [DualStreamerVideoConfig] with different configuration for each video stream. + */ +fun DualStreamerVideoConfig( + /** + * Video framerate. + * This is a best effort as few camera can not generate a fixed framerate. + */ + fps: Int = 30, + firstVideoCodecConfig: DualStreamerVideoCodecConfig = DualStreamerVideoCodecConfig(), + secondVideoCodecConfig: DualStreamerVideoCodecConfig = DualStreamerVideoCodecConfig() +) = DualStreamerVideoConfig( + firstVideoCodecConfig.toVideoCodecConfig(fps), + secondVideoCodecConfig.toVideoCodecConfig(fps) +) + +/** + * A data class that holds video specific codec data. + */ +data class DualStreamerVideoCodecConfig( + /** + * Video encoder mime type. + * Only [MediaFormat.MIMETYPE_VIDEO_AVC], [MediaFormat.MIMETYPE_VIDEO_HEVC], + * [MediaFormat.MIMETYPE_VIDEO_VP9] and [MediaFormat.MIMETYPE_VIDEO_AV1] are supported yet. + * + * **See Also:** [MediaFormat MIMETYPE_VIDEO_*](https://developer.android.com/reference/android/media/MediaFormat) + */ + val mimeType: String = MediaFormat.MIMETYPE_VIDEO_AVC, + /** + * Video encoder bitrate in bits/s. + */ + val startBitrate: Int = 2_000_000, + /** + * Video output resolution in pixel. + */ + val resolution: Size = Size(1280, 720), + /** + * Video encoder profile. Encoders may not support requested profile. In this case, StreamPack fallbacks to default profile. + * If not set, profile is always a 8 bit profile. StreamPack try to apply the highest profile available. + * If the decoder does not support the profile, you should explicitly set the profile to a lower + * value such as [AVCProfileBaseline] for AVC, [HEVCProfileMain] for HEVC, [VP9Profile0] for VP9. + * ** See ** [MediaCodecInfo.CodecProfileLevel](https://developer.android.com/reference/android/media/MediaCodecInfo.CodecProfileLevel) + */ + val profile: Int = getBestProfile(mimeType), + /** + * Video encoder level. Encoders may not support requested level. In this case, StreamPack fallbacks to default level. + * ** See ** [MediaCodecInfo.CodecProfileLevel](https://developer.android.com/reference/android/media/MediaCodecInfo.CodecProfileLevel) + */ + val level: Int = getBestLevel(mimeType, profile), + /** + * Video encoder I-frame interval in seconds. + * This is a best effort as few camera can not generate a fixed framerate. + * For live streaming, I-frame interval should be really low. For recording, I-frame interval should be higher. + * A value of 0 means that each frame is an I-frame. + * On device with API < 25, this value will be rounded to an integer. So don't expect a precise value and any value < 0.5 will be considered as 0. + */ + val gopDuration: Float = 1f // 1s between I frames +) { + internal fun toVideoCodecConfig(fps: Int) = VideoCodecConfig( + mimeType = mimeType, + startBitrate = startBitrate, + resolution = resolution, + fps = fps, + profile = profile, + level = level, + gopDuration = gopDuration + ) +} + +class DualStreamerVideoConfig +internal constructor( + val firstVideoConfig: VideoCodecConfig, + val secondVideoConfig: VideoCodecConfig +) + +interface ICoroutineAudioDualStreamer : ICoroutineAudioStreamer, + IAudioStreamer + +interface ICoroutineVideoDualStreamer : ICoroutineVideoStreamer, + IVideoStreamer + +interface ICoroutineDualStreamer : ICoroutineStreamer \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/interfaces/IStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/interfaces/IStreamer.kt new file mode 100644 index 000000000..565496ef5 --- /dev/null +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/interfaces/IStreamer.kt @@ -0,0 +1,203 @@ +package io.github.thibaultbee.streampack.core.streamers.interfaces + +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor +import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSource +import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSource +import io.github.thibaultbee.streampack.core.streamers.single.open +import io.github.thibaultbee.streampack.core.streamers.single.startStream +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.runBlocking + +interface IStreamer + +/** + * A single Streamer based on coroutines. + */ +interface ICoroutineStreamer : IStreamer { + /** + * Returns the last throwable that occurred. + */ + val throwable: StateFlow + + /** + * Returns true if the streamer is opened. + * For example, if the streamer is connected to a server if the endpoint is SRT or RTMP. + */ + val isOpen: StateFlow + + /** + * Closes the streamer. + */ + suspend fun close() + + /** + * Returns true if stream is running. + */ + val isStreaming: StateFlow + + /** + * Starts audio/video stream. + * + * @see [stopStream] + */ + suspend fun startStream() + + /** + * Stops audio/video stream. + * + * @see [startStream] + */ + suspend fun stopStream() + + /** + * Clean and reset the streamer. + */ + suspend fun release() +} + +/** + * Clean and reset the streamer synchronously. + * + * @see [ICoroutineStreamer.release] + */ +fun ICoroutineStreamer.releaseBlocking() = runBlocking { + release() +} + + +interface ICoroutineAudioStreamer { + /** + * Configures only audio settings. + * + * @param audioConfig Audio configuration to set + * + * @throws [Throwable] if configuration can not be applied. + */ + suspend fun setAudioConfig(audioConfig: T) +} + +interface ICoroutineVideoStreamer { + /** + * Configures only video settings. + * + * @param videoConfig Video configuration to set + * + * @throws [Throwable] if configuration can not be applied. + */ + suspend fun setVideoConfig(videoConfig: T) +} + +/** + * An audio single Streamer + */ +interface IAudioStreamer { + + /** + * Advanced settings for the audio source. + */ + val audioSource: IAudioSource? +} + +/** + * A video single streamer. + */ +interface IVideoStreamer { + /** + * Advanced settings for the video source. + */ + val videoSource: IVideoSource? +} + + +interface ICallbackAudioStreamer { + /** + * Configures only audio settings. + * + * @param audioConfig Audio configuration to set + * + * @throws [Throwable] if configuration can not be applied. + */ + fun setAudioConfig(audioConfig: T) +} + +interface ICallbackVideoStreamer { + /** + * Configures only video settings. + * + * @param videoConfig Video configuration to set + * + * @throws [Throwable] if configuration can not be applied. + */ + fun setVideoConfig(videoConfig: T) +} + +interface ICallbackStreamer : IStreamer { + /** + * Returns true if streamer is opened. + * For example, if the streamer is connected to a server if the endpoint is SRT or RTMP. + */ + val isOpen: Boolean + + /** + * Closes the streamer. + */ + fun close() + + /** + * Returns true if stream is running. + */ + val isStreaming: Boolean + + /** + * Starts audio/video stream asynchronously. + * + * You must call [open] before calling this method. + * The streamer must be opened before starting the stream. You can use [Listener.onIsOpenChanged]. + * + * @see [stopStream] + */ + fun startStream() + + /** + * Starts audio/video stream asynchronously. + * + * Same as doing [open] and [startStream]. + * + * @see [stopStream] + */ + fun startStream(descriptor: MediaDescriptor) + + /** + * Stops audio/video stream asynchronously. + * + * @see [startStream] + */ + fun stopStream() + + /** + * Clean and reset the streamer. + */ + fun release() + + /** + * Listener for the callback streamer. + */ + interface Listener { + /** + * Called when the streamer is opened or closed. + */ + fun onIsOpenChanged(isOpen: Boolean) = Unit + + /** + * Called when the stream is started or stopped. + */ + fun onIsStreamingChanged(isStarted: Boolean) = Unit + + /** + * Called when an error occurs. + * + * @param throwable The throwable that occurred + */ + fun onError(throwable: Throwable) = Unit + } +} \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/observers/StreamerActivityLifeCycleObserver.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/observers/StreamerActivityLifeCycleObserver.kt index 50ea40f45..f6df9cd9b 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/observers/StreamerActivityLifeCycleObserver.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/observers/StreamerActivityLifeCycleObserver.kt @@ -17,7 +17,10 @@ package io.github.thibaultbee.streampack.core.streamers.observers import androidx.lifecycle.DefaultLifecycleObserver import androidx.lifecycle.LifecycleOwner -import io.github.thibaultbee.streampack.core.streamers.single.ISingleStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICallbackStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.IStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.releaseBlocking /** * A [DefaultLifecycleObserver] to control a streamer on [Activity] lifecycle. @@ -29,9 +32,14 @@ import io.github.thibaultbee.streampack.core.streamers.single.ISingleStreamer * * @param streamer The streamer to control */ -open class StreamerActivityLifeCycleObserver(streamer: ISingleStreamer) : +open class StreamerActivityLifeCycleObserver(streamer: IStreamer) : StreamerViewModelLifeCycleObserver(streamer) { + override fun onDestroy(owner: LifecycleOwner) { - streamer.release() + if (streamer is ICoroutineStreamer) { + streamer.releaseBlocking() + } else if (streamer is ICallbackStreamer) { + streamer.release() + } } } \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/observers/StreamerViewModelLifeCycleObserver.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/observers/StreamerViewModelLifeCycleObserver.kt index ba7e1e3d6..563e7dfc2 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/observers/StreamerViewModelLifeCycleObserver.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/observers/StreamerViewModelLifeCycleObserver.kt @@ -17,11 +17,11 @@ package io.github.thibaultbee.streampack.core.streamers.observers import androidx.lifecycle.DefaultLifecycleObserver import androidx.lifecycle.LifecycleOwner +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICallbackStreamer import io.github.thibaultbee.streampack.core.streamers.interfaces.ICameraCallbackStreamer import io.github.thibaultbee.streampack.core.streamers.interfaces.ICameraCoroutineStreamer -import io.github.thibaultbee.streampack.core.streamers.single.ICallbackSingleStreamer -import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer -import io.github.thibaultbee.streampack.core.streamers.single.ISingleStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.IStreamer import kotlinx.coroutines.runBlocking /** @@ -34,25 +34,26 @@ import kotlinx.coroutines.runBlocking * * @param streamer The streamer to control */ -open class StreamerViewModelLifeCycleObserver(protected val streamer: ISingleStreamer) : +open class StreamerViewModelLifeCycleObserver(protected val streamer: IStreamer) : DefaultLifecycleObserver { + override fun onPause(owner: LifecycleOwner) { - if (streamer is ICoroutineSingleStreamer) { + if (streamer is ICoroutineStreamer) { if (streamer is ICameraCoroutineStreamer) { runBlocking { streamer.stopPreview() } } runBlocking { streamer.stopStream() - if (streamer.endpoint.isOpen.value) { + if (streamer.isOpen.value) { streamer.close() } } - } else if (streamer is ICallbackSingleStreamer) { + } else if (streamer is ICallbackStreamer) { if (streamer is ICameraCallbackStreamer) { streamer.stopPreview() } streamer.stopStream() - if (streamer.endpoint.isOpen.value) { + if (streamer.isOpen) { streamer.close() } } else { diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/AudioOnlySingleStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/AudioOnlySingleStreamer.kt index 357f32166..270d33974 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/AudioOnlySingleStreamer.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/AudioOnlySingleStreamer.kt @@ -16,24 +16,75 @@ package io.github.thibaultbee.streampack.core.streamers.single import android.content.Context +import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor +import io.github.thibaultbee.streampack.core.elements.encoders.IEncoder import io.github.thibaultbee.streampack.core.elements.endpoints.DynamicEndpoint +import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpoint import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpointInternal +import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSource import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSourceInternal import io.github.thibaultbee.streampack.core.elements.sources.audio.audiorecord.MicrophoneSource.Companion.buildDefaultMicrophoneSource +import io.github.thibaultbee.streampack.core.regulator.controllers.IBitrateRegulatorController +import io.github.thibaultbee.streampack.core.streamers.infos.IConfigurationInfo /** - * A [SingleStreamer] that sends only microphone frames. + * A [ICoroutineSingleStreamer] that sends only audio source frames. * - * @param context application context - * @param internalEndpoint the [IEndpointInternal] implementation + * @param context the application context + * @param audioSourceInternal the audio source implementation. By default, it is the default microphone source. + * @param internalEndpoint the [IEndpointInternal] implementation. By default, it is a [DynamicEndpoint]. */ open class AudioOnlySingleStreamer( context: Context, audioSourceInternal: IAudioSourceInternal? = buildDefaultMicrophoneSource(), internalEndpoint: IEndpointInternal = DynamicEndpoint(context) -) : SingleStreamer( - context = context, - videoSourceInternal = null, - audioSourceInternal = audioSourceInternal, - endpointInternal = internalEndpoint -) \ No newline at end of file +) : ICoroutineSingleStreamer, ICoroutineAudioSingleStreamer { + private val streamer = SingleStreamer( + context = context, + videoSourceInternal = null, + audioSourceInternal = audioSourceInternal, + endpointInternal = internalEndpoint + ) + override val throwable = streamer.throwable + override val isOpen = streamer.isOpen + override val isStreaming = streamer.isStreaming + override val endpoint: IEndpoint + get() = streamer.endpoint + override val info: IConfigurationInfo + get() = streamer.info + override var targetRotation: Int + get() = streamer.targetRotation + set(value) { + streamer.targetRotation = value + } + + override val audioConfig: AudioConfig? + get() = streamer.audioConfig + override val audioSource: IAudioSource? + get() = streamer.audioSource + override val audioEncoder: IEncoder? + get() = streamer.audioEncoder + + override fun getInfo(descriptor: MediaDescriptor) = streamer.getInfo(descriptor) + + override suspend fun setAudioConfig(audioConfig: AudioConfig) = + streamer.setAudioConfig(audioConfig) + + override suspend fun open(descriptor: MediaDescriptor) = streamer.open(descriptor) + + override suspend fun close() = streamer.close() + + override suspend fun startStream() = streamer.startStream() + + override suspend fun stopStream() = streamer.stopStream() + + override suspend fun release() = streamer.release() + + override fun addBitrateRegulatorController(controllerFactory: IBitrateRegulatorController.Factory) { + throw UnsupportedOperationException("Audio single streamer does not support bitrate regulator controller") + } + + override fun removeBitrateRegulatorController() { + // Do nothing + } +} \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/CameraSingleStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/CameraSingleStreamer.kt index 60d89d0e6..ce07929e3 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/CameraSingleStreamer.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/CameraSingleStreamer.kt @@ -42,7 +42,7 @@ import kotlinx.coroutines.sync.withLock * * @param context application context * @param enableMicrophone [Boolean.true] to capture audio - * @param internalEndpoint the [IEndpointInternal] implementation + * @param internalEndpoint the [IEndpointInternal] implementation. By default, it is a [DynamicEndpoint]. * @param defaultRotation the default rotation in [Surface] rotation ([Surface.ROTATION_0], ...). By default, it is the current device orientation. */ fun CameraSingleStreamer( @@ -62,7 +62,7 @@ fun CameraSingleStreamer( * * @param context application context * @param audioSourceInternal the audio source implementation - * @param internalEndpoint the [IEndpointInternal] implementation + * @param internalEndpoint the [IEndpointInternal] implementation. By default, it is a [DynamicEndpoint]. * @param defaultRotation the default rotation in [Surface] rotation ([Surface.ROTATION_0], ...). By default, it is the current device orientation. */ open class CameraSingleStreamer( @@ -122,13 +122,7 @@ open class CameraSingleStreamer( * * @param cameraId The camera id to use */ - override suspend fun setCameraId(cameraId: String) { - cameraSource.setCameraId(cameraId) - // If config has not been set yet, [configure] will update transformation later. - if (videoConfig != null) { - updateTransformation() - } - } + override suspend fun setCameraId(cameraId: String) = cameraSource.setCameraId(cameraId) /** * Gets configuration information. @@ -153,10 +147,6 @@ open class CameraSingleStreamer( return CameraStreamerConfigurationInfo(endpointInfo) } - override fun isMirroringRequired(): Boolean { - return cameraSource.infoProvider.isFrontFacing - } - /** * Sets a preview surface. */ @@ -215,10 +205,8 @@ open class CameraSingleStreamer( /** * Same as [SingleStreamer.release] but it also calls [stopPreview]. */ - override fun release() { - runBlocking { - stopPreview() - } + override suspend fun release() { + stopPreview() super.release() } } \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/ISingleStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/ISingleStreamer.kt index f7b004435..01cfd80b9 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/ISingleStreamer.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/ISingleStreamer.kt @@ -15,23 +15,26 @@ */ package io.github.thibaultbee.streampack.core.streamers.single -import android.Manifest import android.net.Uri import androidx.annotation.IntRange -import androidx.annotation.RequiresPermission import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor import io.github.thibaultbee.streampack.core.elements.encoders.AudioCodecConfig import io.github.thibaultbee.streampack.core.elements.encoders.IEncoder import io.github.thibaultbee.streampack.core.elements.encoders.VideoCodecConfig import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpoint -import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSource -import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSource import io.github.thibaultbee.streampack.core.elements.utils.RotationValue import io.github.thibaultbee.streampack.core.elements.utils.extensions.rotationToDegrees import io.github.thibaultbee.streampack.core.regulator.controllers.IBitrateRegulatorController import io.github.thibaultbee.streampack.core.streamers.infos.IConfigurationInfo -import kotlinx.coroutines.flow.StateFlow +import io.github.thibaultbee.streampack.core.streamers.interfaces.IAudioStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICallbackAudioStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICallbackStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICallbackVideoStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineAudioStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineVideoStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.IVideoStreamer /** * The single streamer audio configuration. @@ -47,36 +50,6 @@ typealias VideoConfig = VideoCodecConfig * A single Streamer that is agnostic to the underlying implementation (either with coroutines or callbacks). */ interface ISingleStreamer { - /** - * Gets the audio configuration. - */ - val audioConfig: AudioConfig? - - /** - * Gets the video configuration. - */ - val videoConfig: VideoConfig? - - /** - * Advanced settings for the audio source. - */ - val audioSource: IAudioSource? - - /** - * Advanced settings for the audio encoder. - */ - val audioEncoder: IEncoder? - - /** - * Advanced settings for the video source. - */ - val videoSource: IVideoSource? - - /** - * Advanced settings for the video encoder. - */ - val videoEncoder: IEncoder? - /** * Advanced settings for the endpoint. */ @@ -98,31 +71,6 @@ interface ISingleStreamer { */ fun getInfo(descriptor: MediaDescriptor): IConfigurationInfo - /** - * Configures only audio settings. - * - * @param audioConfig Audio configuration to set - * - * @throws [Throwable] if configuration can not be applied. - * @see [release] - */ - fun setAudioConfig(audioConfig: AudioConfig) - - /** - * Configures only video settings. - * - * @param videoConfig Video configuration to set - * - * @throws [Throwable] if configuration can not be applied. - * @see [release] - */ - fun setVideoConfig(videoConfig: VideoConfig) - - /** - * Clean and reset the streamer. - */ - fun release() - /** * Adds a bitrate regulator controller to the streamer. */ @@ -141,74 +89,50 @@ val ISingleStreamer.targetRotationDegrees: Int @IntRange(from = 0, to = 359) get() = targetRotation.rotationToDegrees - /** - * Configures both video and audio settings. - * It is the first method to call after a [SingleStreamer] instantiation. - * It must be call when both stream and audio and video capture are not running. - * - * Use [IConfigurationInfo] to get value limits. - * - * If video encoder does not support [VideoConfig.level] or [VideoConfig.profile], it fallbacks - * to video encoder default level and default profile. - * - * @param audioConfig Audio configuration to set - * @param videoConfig Video configuration to set - * - * @throws [Throwable] if configuration can not be applied. - * @see [ISingleStreamer.release] - */ -@RequiresPermission(Manifest.permission.RECORD_AUDIO) -fun ISingleStreamer.setConfig(audioConfig: AudioConfig, videoConfig: VideoConfig) { - setAudioConfig(audioConfig) - setVideoConfig(videoConfig) -} - -/** - * A single Streamer based on coroutines. + * An audio single Streamer */ -interface ICoroutineSingleStreamer : ISingleStreamer { +interface IAudioSingleStreamer : IAudioStreamer { /** - * Returns the last throwable that occurred. + * Gets the audio configuration. */ - val throwable: StateFlow + val audioConfig: AudioConfig? /** - * Returns true if endpoint is opened. - * For example, if the streamer is connected to a server if the endpoint is SRT or RTMP. + * Advanced settings for the audio encoder. */ - val isOpen: StateFlow + val audioEncoder: IEncoder? +} +/** + * A video single streamer. + */ +interface IVideoSingleStreamer : IVideoStreamer { /** - * Returns true if stream is running. + * Gets the video configuration. */ - val isStreaming: StateFlow + val videoConfig: VideoConfig? /** - * Opens the streamer endpoint. - * - * @param descriptor Media descriptor to open + * Advanced settings for the video encoder. */ - suspend fun open(descriptor: MediaDescriptor) + val videoEncoder: IEncoder? +} - /** - * Closes the streamer endpoint. - */ - suspend fun close() +interface ICoroutineAudioSingleStreamer : ICoroutineAudioStreamer, IAudioSingleStreamer - /** - * Starts audio/video stream. - * - * @see [stopStream] - */ - suspend fun startStream() +interface ICoroutineVideoSingleStreamer : ICoroutineVideoStreamer, IVideoSingleStreamer +/** + * A single Streamer based on coroutines. + */ +interface ICoroutineSingleStreamer : ICoroutineStreamer, ISingleStreamer { /** - * Stops audio/video stream. + * Opens the streamer endpoint. * - * @see [startStream] + * @param descriptor Media descriptor to open */ - suspend fun stopStream() + suspend fun open(descriptor: MediaDescriptor) } /** @@ -266,18 +190,12 @@ suspend fun ICoroutineSingleStreamer.startStream(uriString: String) { startStream() } -interface ICallbackSingleStreamer : ISingleStreamer { - /** - * Returns true if endpoint is opened. - * For example, if the streamer is connected to a server if the endpoint is SRT or RTMP. - */ - val isOpen: Boolean - /** - * Returns true if stream is running. - */ - val isStreaming: Boolean +interface ICallbackAudioSingleStreamer : ICallbackAudioStreamer, IAudioSingleStreamer + +interface ICallbackVideoSingleStreamer : ICallbackVideoStreamer, IVideoSingleStreamer +interface ICallbackSingleStreamer : ICallbackStreamer, ISingleStreamer { /** * Opens the streamer endpoint asynchronously. * @@ -288,37 +206,6 @@ interface ICallbackSingleStreamer : ISingleStreamer { */ fun open(descriptor: MediaDescriptor) - /** - * Closes the streamer endpoint. - */ - fun close() - - /** - * Starts audio/video stream asynchronously. - * - * You must call [open] before calling this method. - * The streamer must be opened before starting the stream. You can use [Listener.onIsOpenChanged]. - * - * @see [stopStream] - */ - fun startStream() - - /** - * Starts audio/video stream asynchronously. - * - * Same as doing [open] and [startStream]. - * - * @see [stopStream] - */ - fun startStream(descriptor: MediaDescriptor) - - /** - * Stops audio/video stream asynchronously. - * - * @see [startStream] - */ - fun stopStream() - /** * Adds a listener to the streamer. */ @@ -332,7 +219,7 @@ interface ICallbackSingleStreamer : ISingleStreamer { /** * Listener for the callback streamer. */ - interface Listener { + interface Listener : ICallbackStreamer.Listener { /** * Called when the streamer opening failed. * @@ -340,33 +227,15 @@ interface ICallbackSingleStreamer : ISingleStreamer { */ fun onOpenFailed(t: Throwable) = Unit - /** - * Called when the streamer is opened or closed. - */ - fun onIsOpenChanged(isOpen: Boolean) = Unit - /** * Called when the streamer was closed by an error. * * @param t The reason why the streamer was closed */ fun onClose(t: Throwable) = Unit - - /** - * Called when the stream is started or stopped. - */ - fun onIsStreamingChanged(isStarted: Boolean) = Unit - - /** - * Called when an error occurs. - * - * @param throwable The throwable that occurred - */ - fun onError(throwable: Throwable) = Unit } } - /** * Opens the streamer endpoint. * diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/ScreenRecorderSingleStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/ScreenRecorderSingleStreamer.kt index 61d3907f8..f9e51d6eb 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/ScreenRecorderSingleStreamer.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/ScreenRecorderSingleStreamer.kt @@ -75,25 +75,12 @@ open class ScreenRecorderSingleStreamer( (videoSourceInternal as MediaProjectionVideoSource).apply { listener = object : MediaProjectionVideoSource.Listener { override fun onStop() { - onStreamError(Exception("Screen source has been stopped")) + // TODO: handle error from source + // onStreamError(Exception("Screen source has been stopped")) } } } - companion object { - /** - * Create a screen record intent that must be pass to [ActivityCompat.startActivityForResult]. - * It will prompt the user whether to allow screen capture. - * - * @param context application/service context - * @return the intent to pass to [ActivityCompat.startActivityForResult] - */ - fun createScreenRecorderIntent(context: Context): Intent = - (context.getSystemService(Context.MEDIA_PROJECTION_SERVICE) as MediaProjectionManager).run { - createScreenCaptureIntent() - } - } - /** * Set/get activity result from [ComponentActivity.registerForActivityResult] callback. * It is mandatory to set this before [startStream]. @@ -116,4 +103,19 @@ open class ScreenRecorderSingleStreamer( (audioSourceInternal as IMediaProjectionSource).activityResult = value } } + + + companion object { + /** + * Create a screen record intent that must be pass to [ActivityCompat.startActivityForResult]. + * It will prompt the user whether to allow screen capture. + * + * @param context application/service context + * @return the intent to pass to [ActivityCompat.startActivityForResult] + */ + fun createScreenRecorderIntent(context: Context): Intent = + (context.getSystemService(Context.MEDIA_PROJECTION_SERVICE) as MediaProjectionManager).run { + createScreenCaptureIntent() + } + } } \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/SingleStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/SingleStreamer.kt index 1efb8d959..89e943558 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/SingleStreamer.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/SingleStreamer.kt @@ -17,207 +17,122 @@ package io.github.thibaultbee.streampack.core.streamers.single import android.Manifest import android.content.Context -import android.util.Size import android.view.Surface import androidx.annotation.RequiresPermission import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.MediaDescriptor -import io.github.thibaultbee.streampack.core.elements.data.Frame -import io.github.thibaultbee.streampack.core.elements.encoders.CodecConfig import io.github.thibaultbee.streampack.core.elements.encoders.IEncoder -import io.github.thibaultbee.streampack.core.elements.encoders.IEncoderInternal -import io.github.thibaultbee.streampack.core.elements.encoders.mediacodec.AudioEncoderConfig -import io.github.thibaultbee.streampack.core.elements.encoders.mediacodec.MediaCodecEncoder -import io.github.thibaultbee.streampack.core.elements.encoders.mediacodec.VideoEncoderConfig -import io.github.thibaultbee.streampack.core.elements.encoders.rotateFromNaturalOrientation import io.github.thibaultbee.streampack.core.elements.endpoints.DynamicEndpoint import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpoint import io.github.thibaultbee.streampack.core.elements.endpoints.IEndpointInternal -import io.github.thibaultbee.streampack.core.elements.processing.video.SurfaceProcessor -import io.github.thibaultbee.streampack.core.elements.processing.video.outputs.AbstractSurfaceOutput -import io.github.thibaultbee.streampack.core.elements.processing.video.outputs.SurfaceOutput -import io.github.thibaultbee.streampack.core.elements.processing.video.source.ISourceInfoProvider import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSource import io.github.thibaultbee.streampack.core.elements.sources.audio.IAudioSourceInternal -import io.github.thibaultbee.streampack.core.elements.sources.video.ISurfaceSource -import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoFrameSource import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSource import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSourceInternal import io.github.thibaultbee.streampack.core.elements.utils.RotationValue +import io.github.thibaultbee.streampack.core.elements.utils.combineStates import io.github.thibaultbee.streampack.core.elements.utils.extensions.displayRotation -import io.github.thibaultbee.streampack.core.elements.utils.extensions.sourceConfig -import io.github.thibaultbee.streampack.core.logger.Logger +import io.github.thibaultbee.streampack.core.pipelines.StreamerPipeline +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutputInternal import io.github.thibaultbee.streampack.core.regulator.controllers.IBitrateRegulatorController import io.github.thibaultbee.streampack.core.streamers.infos.IConfigurationInfo import io.github.thibaultbee.streampack.core.streamers.infos.StreamerConfigurationInfo -import kotlinx.coroutines.CoroutineDispatcher -import kotlinx.coroutines.asCoroutineDispatcher -import kotlinx.coroutines.flow.MutableStateFlow +import io.github.thibaultbee.streampack.core.streamers.interfaces.ICoroutineStreamer import kotlinx.coroutines.flow.StateFlow import kotlinx.coroutines.runBlocking -import kotlinx.coroutines.withContext -import java.nio.ByteBuffer -import java.util.concurrent.Executors /** - * The single streamer implementation. + * Base class of all single streamer. * - * A single streamer is a streamer that can handle only one stream at a time. + * A single streamer is a streamer that can handle only one audio and video stream at a time. * * @param context the application context * @param videoSourceInternal the video source implementation * @param audioSourceInternal the audio source implementation - * @param endpointInternal the [IEndpointInternal] implementation + * @param endpointInternal the [IEndpointInternal] implementation. By default, it is a [DynamicEndpoint]. * @param defaultRotation the default rotation in [Surface] rotation ([Surface.ROTATION_0], ...). By default, it is the current device orientation. */ open class SingleStreamer( protected val context: Context, - protected val audioSourceInternal: IAudioSourceInternal?, - protected val videoSourceInternal: IVideoSourceInternal?, - protected val endpointInternal: IEndpointInternal = DynamicEndpoint(context), + audioSourceInternal: IAudioSourceInternal?, + videoSourceInternal: IVideoSourceInternal?, + endpointInternal: IEndpointInternal = DynamicEndpoint(context), @RotationValue defaultRotation: Int = context.displayRotation -) : ICoroutineSingleStreamer { - private val dispatcher: CoroutineDispatcher = - Executors.newSingleThreadExecutor().asCoroutineDispatcher() - - private val _throwable = MutableStateFlow(null) - override val throwable: StateFlow = _throwable - - private var audioStreamId: Int? = null - private var videoStreamId: Int? = null - - private var bitrateRegulatorController: IBitrateRegulatorController? = null - - // Keep configurations - private var _audioConfig: AudioConfig? = null - private var _videoConfig: VideoConfig? = null - - override val audioConfig: AudioConfig? - get() = _audioConfig - - override val videoConfig: VideoConfig? - get() = _videoConfig - - protected val sourceInfoProvider = videoSourceInternal?.infoProvider - - private val audioEncoderListener = object : IEncoderInternal.IListener { - override fun onError(t: Throwable) { - onStreamError(t) - } - - override fun onOutputFrame(frame: Frame) { - audioStreamId?.let { - runBlocking { - this@SingleStreamer.endpointInternal.write(frame, it) - } - } - } +) : ICoroutineSingleStreamer, ICoroutineAudioSingleStreamer, ICoroutineVideoSingleStreamer { + private val pipeline = StreamerPipeline( + context, + audioSourceInternal, + videoSourceInternal + ) + private val pipelineOutput: IEncodingPipelineOutputInternal = runBlocking { + pipeline.addOutput( + endpointInternal, + defaultRotation + ) as IEncodingPipelineOutputInternal } - private val videoEncoderListener = object : IEncoderInternal.IListener { - override fun onError(t: Throwable) { - onStreamError(t) + override val throwable: StateFlow = + combineStates(pipeline.throwable, pipelineOutput.throwable) { throwableArray -> + throwableArray[0] ?: throwableArray[1] } - override fun onOutputFrame(frame: Frame) { - videoStreamId?.let { - val videoEncoder = - requireNotNull(videoEncoderInternal) { "Video encoder must not be null" } - val timestampOffset = if (videoEncoder is ISurfaceSource) { - videoEncoder.timestampOffset - } else { - 0L - } - frame.pts += timestampOffset - frame.dts = if (frame.dts != null) { - frame.dts!! + timestampOffset - } else { - null - } - runBlocking { - this@SingleStreamer.endpointInternal.write(frame, it) - } - } - } - } + override val isOpen: StateFlow + get() = pipelineOutput.isOpen - /** - * Manages error on stream. - * Stops only stream. - * - * @param t triggered [Throwable] - */ - protected fun onStreamError(t: Throwable) { - try { - runBlocking { - stopStream() - } - } catch (t: Throwable) { - Logger.e(TAG, "onStreamError: Can't stop stream", t) - } finally { - Logger.e(TAG, "onStreamError: ${t.message}", t) - _throwable.tryEmit(t) - } + override val isStreaming: StateFlow = combineStates( + pipelineOutput.isStreaming, + pipeline.isStreaming + ) { isStreamingArray -> + isStreamingArray[0] && isStreamingArray[1] } - // SOURCES - + // AUDIO /** * The audio source. - * It allows advanced audio settings. + * It allows advanced audio source settings. */ override val audioSource: IAudioSource? - get() = audioSourceInternal + get() = pipeline.audioSource + override val audioEncoder: IEncoder? + get() = pipelineOutput.audioEncoder + // VIDEO /** * The video source. - * It allows advanced video settings. + * It allows advanced video source settings. */ override val videoSource: IVideoSource? - get() = videoSourceInternal - - // ENCODERS - - private var audioEncoderInternal: IEncoderInternal? = null - - /** - * The audio encoder. - * Only valid when audio has been [setAudioConfig]. It is null after [release]. - */ - override val audioEncoder: IEncoder? - get() = audioEncoderInternal - - private var videoEncoderInternal: IEncoderInternal? = null - - /** - * The video encoder. - * Only valid when audio has been [setAudioConfig]. It is null after [release]. - */ + get() = pipeline.videoSource override val videoEncoder: IEncoder? - get() = videoEncoderInternal + get() = pipelineOutput.videoEncoder - private var surfaceProcessor: SurfaceProcessor? = null + // INTERNAL + protected val videoSourceInternal = pipeline.videoSource as IVideoSourceInternal? + protected val audioSourceInternal = pipeline.audioSource as IAudioSourceInternal? // ENDPOINT - override val endpoint: IEndpoint - get() = endpointInternal - - override val isOpen: StateFlow - get() = endpointInternal.isOpen - - - private val _isStreaming = MutableStateFlow(false) - override val isStreaming: StateFlow = _isStreaming + get() = pipelineOutput.endpoint /** * Whether the streamer has audio. */ - val hasAudio = audioSourceInternal != null + val hasAudio: Boolean + get() = pipeline.hasAudio /** * Whether the streamer has video. */ - val hasVideo = videoSourceInternal != null + val hasVideo: Boolean + get() = pipeline.hasVideo + + /** + * The target rotation in [Surface] rotation ([Surface.ROTATION_0], ...) + */ + override var targetRotation: Int + @RotationValue get() = pipeline.targetRotation + set(@RotationValue newTargetRotation) { + pipeline.targetRotation = newTargetRotation + } /** * Gets configuration information. @@ -229,34 +144,6 @@ open class SingleStreamer( override val info: IConfigurationInfo get() = StreamerConfigurationInfo(endpoint.info) - /** - * The target rotation in [Surface] rotation ([Surface.ROTATION_0], ...) - */ - @RotationValue - private var _targetRotation = defaultRotation - - /** - * Keep the target rotation if it can't be applied immediately. - * It will be applied when the stream is stopped. - */ - @RotationValue - private var pendingTargetRotation: Int? = null - - /** - * The target rotation in [Surface] rotation ([Surface.ROTATION_0], ...) - */ - override var targetRotation: Int - @RotationValue get() = _targetRotation - set(@RotationValue newTargetRotation) { - if (isStreaming.value) { - Logger.w(TAG, "Can't change rotation while streaming") - pendingTargetRotation = newTargetRotation - return - } - - setTargetRotationInternal(newTargetRotation) - } - /** * Gets configuration information from [MediaDescriptor]. * @@ -274,6 +161,22 @@ open class SingleStreamer( return StreamerConfigurationInfo(endpointInfo) } + init { + require(!(audioSourceInternal == null && videoSourceInternal == null)) { + "At least one source must be provided" + } + } + + // CONFIGURATION + /** + * Whether the streamer has audio configuration. + */ + val hasAudioConfig: Boolean + get() = pipelineOutput.audioCodecConfigFlow.value != null + + override val audioConfig: AudioConfig + get() = requireNotNull(pipelineOutput.audioCodecConfigFlow.value) + /** * Configures audio settings. * It is the first method to call after a [SingleStreamer] instantiation. @@ -286,246 +189,57 @@ open class SingleStreamer( * @throws [Throwable] if configuration can not be applied. */ @RequiresPermission(Manifest.permission.RECORD_AUDIO) - override fun setAudioConfig(audioConfig: AudioConfig) { - require(hasAudio) { "Do not need to set audio as it is a video only streamer" } - requireNotNull(audioSourceInternal) { "Audio source must not be null" } - - if (this._audioConfig == audioConfig) { - Logger.i(TAG, "Audio configuration is the same, skipping configuration") - return - } - - this._audioConfig = audioConfig - - try { - audioSourceInternal.configure(audioConfig.sourceConfig) - - audioEncoderInternal?.release() - audioEncoderInternal = MediaCodecEncoder( - AudioEncoderConfig( - audioConfig - ), listener = audioEncoderListener - ).apply { - if (input is MediaCodecEncoder.ByteBufferInput) { - input.listener = - object : IEncoderInternal.IByteBufferInput.OnFrameRequestedListener { - override fun onFrameRequested(buffer: ByteBuffer): Frame { - return audioSourceInternal.getAudioFrame(buffer) - } - } - } else { - throw UnsupportedOperationException("Audio encoder only support ByteBuffer mode") - } - configure() - } - } catch (t: Throwable) { - release() - throw t - } + override suspend fun setAudioConfig(audioConfig: AudioConfig) { + pipelineOutput.setAudioCodecConfig(audioConfig) } /** - * Creates a surface output for the given surface. - * - * Use it for additional processing. - * - * @param surface the encoder surface - * @param resolution the resolution of the surface - * @param infoProvider the source info provider for internal processing + * Whether the streamer has video configuration. */ - protected open fun buildSurfaceOutput( - surface: Surface, resolution: Size, infoProvider: ISourceInfoProvider - ): AbstractSurfaceOutput { - return SurfaceOutput( - surface, resolution, SurfaceOutput.TransformationInfo( - targetRotation, isMirroringRequired(), infoProvider - ) - ) - } + val hasVideoConfig: Boolean + get() = pipelineOutput.videoCodecConfigFlow.value != null - /** - * Whether the output surface needs to be mirrored. - */ - protected open fun isMirroringRequired(): Boolean { - return false - } + override val videoConfig: VideoConfig + get() = requireNotNull(pipelineOutput.videoCodecConfigFlow.value) /** - * Updates the transformation of the surface output. - * To be called when the source info provider or [isMirroringRequired] is updated. - */ - protected fun updateTransformation() { - val sourceInfoProvider = requireNotNull(sourceInfoProvider) { - "Source info provider must not be null" - } - val videoConfig = requireNotNull(videoConfig) { "Video config must not be null" } - - val videoEncoder = requireNotNull(videoEncoderInternal) { "Video encoder must not be null" } - val input = videoEncoder.input as MediaCodecEncoder.SurfaceInput - - val surface = requireNotNull(input.surface) { "Surface must not be null" } - updateTransformation(surface, videoConfig.resolution, sourceInfoProvider) - } - - /** - * Updates the transformation of the surface output. + * Configures video settings. + * It is the first method to call after a [SingleStreamer] instantiation. + * It must be call when both stream and video capture are not running. + * + * Use [IConfigurationInfo] to get value limits. + * + * If video encoder does not support [VideoConfig.level] or [VideoConfig.profile], it fallbacks + * to video encoder default level and default profile. + * + * @param videoConfig Video configuration to set + * + * @throws [Throwable] if configuration can not be applied. */ - protected open fun updateTransformation( - surface: Surface, resolution: Size, infoProvider: ISourceInfoProvider - ) { - Logger.i(TAG, "Updating transformation") - surfaceProcessor?.removeOutputSurface(surface) - surfaceProcessor?.addOutputSurface( - buildSurfaceOutput( - surface, resolution, infoProvider - ) - ) - } - - private fun buildOrUpdateSurfaceProcessor( - videoConfig: VideoConfig, videoSource: IVideoSourceInternal - ): SurfaceProcessor { - if (videoSource !is ISurfaceSource) { - throw IllegalStateException("Video source must have an output surface") - } - val previousSurfaceProcessor = surfaceProcessor - val newSurfaceProcessor = when { - previousSurfaceProcessor == null -> SurfaceProcessor(videoConfig.dynamicRangeProfile) - previousSurfaceProcessor.dynamicRangeProfile != videoConfig.dynamicRangeProfile -> { - videoSource.outputSurface?.let { - previousSurfaceProcessor.removeInputSurface(it) - } - previousSurfaceProcessor.removeAllOutputSurfaces() - previousSurfaceProcessor.release() - SurfaceProcessor(videoConfig.dynamicRangeProfile) - } - - else -> previousSurfaceProcessor - } - - if (newSurfaceProcessor != previousSurfaceProcessor) { - videoSource.outputSurface = newSurfaceProcessor.createInputSurface( - videoSource.infoProvider.getSurfaceSize( - videoConfig.resolution, targetRotation - ) - ) - } else { - newSurfaceProcessor.updateInputSurface( - videoSource.outputSurface!!, - videoSource.infoProvider.getSurfaceSize(videoConfig.resolution, targetRotation) - ) - } - - return newSurfaceProcessor - } - - private fun buildAndConfigureVideoEncoder( - videoConfig: VideoConfig, videoSource: IVideoSourceInternal - ): IEncoderInternal { - val videoEncoder = MediaCodecEncoder( - VideoEncoderConfig( - videoConfig, videoSource is ISurfaceSource - ), listener = videoEncoderListener - ) - - when (videoEncoder.input) { - is MediaCodecEncoder.SurfaceInput -> { - surfaceProcessor = buildOrUpdateSurfaceProcessor(videoConfig, videoSource) - - videoEncoder.input.listener = - object : IEncoderInternal.ISurfaceInput.OnSurfaceUpdateListener { - override fun onSurfaceUpdated(surface: Surface) { - val surfaceProcessor = requireNotNull(surfaceProcessor) { - "Surface processor must not be null" - } - // TODO: only remove previous encoder surface - surfaceProcessor.removeAllOutputSurfaces() - Logger.d(TAG, "Updating with new encoder surface input") - surfaceProcessor.addOutputSurface( - buildSurfaceOutput( - surface, videoConfig.resolution, videoSource.infoProvider - ) - ) - } - } - } - - is MediaCodecEncoder.ByteBufferInput -> { - videoEncoder.input.listener = - object : IEncoderInternal.IByteBufferInput.OnFrameRequestedListener { - override fun onFrameRequested(buffer: ByteBuffer): Frame { - return (videoSource as IVideoFrameSource).getVideoFrame(buffer) - } - } - } - - else -> { - throw UnsupportedOperationException("Unknown input type") - } - } - - videoEncoder.configure() - - return videoEncoder - } - - private fun buildAndConfigureVideoEncoderIfNeeded( - videoConfig: VideoConfig, - videoSource: IVideoSourceInternal, - @RotationValue targetRotation: Int - ): IEncoderInternal { - val rotatedVideoConfig = videoConfig.rotateFromNaturalOrientation(context, targetRotation) - - // Release codec instance - videoEncoderInternal?.let { encoder -> - val input = encoder.input - if (input is MediaCodecEncoder.SurfaceInput) { - input.surface?.let { surface -> - surfaceProcessor?.removeOutputSurface(surface) - } - } - encoder.release() - } - - // Prepare new codec instance - return buildAndConfigureVideoEncoder(rotatedVideoConfig, videoSource) + override suspend fun setVideoConfig(videoConfig: VideoConfig) { + pipelineOutput.setVideoCodecConfig(videoConfig) } /** - * Configures video settings. + * Configures both video and audio settings. * It is the first method to call after a [SingleStreamer] instantiation. - * It must be call when both stream and video capture are not running. + * It must be call when both stream and audio and video capture are not running. * * Use [IConfigurationInfo] to get value limits. * * If video encoder does not support [VideoConfig.level] or [VideoConfig.profile], it fallbacks * to video encoder default level and default profile. * + * @param audioConfig Audio configuration to set * @param videoConfig Video configuration to set * * @throws [Throwable] if configuration can not be applied. + * @see [ICoroutineStreamer.release] */ - override fun setVideoConfig(videoConfig: VideoConfig) { - require(hasVideo) { "Do not need to set video as it is a audio only streamer" } - requireNotNull(videoSourceInternal) { "Video source must not be null" } - - if (this._videoConfig == videoConfig) { - Logger.i(TAG, "Video configuration is the same, skipping configuration") - return - } - - this._videoConfig = videoConfig - - try { - videoSourceInternal.configure(videoConfig.sourceConfig) - - videoEncoderInternal = buildAndConfigureVideoEncoderIfNeeded( - videoConfig, videoSourceInternal, targetRotation - ) - } catch (t: Throwable) { - release() - throw t - } + @RequiresPermission(Manifest.permission.RECORD_AUDIO) + suspend fun setConfig(audioConfig: AudioConfig, videoConfig: VideoConfig) { + setAudioConfig(audioConfig) + setVideoConfig(videoConfig) } /** @@ -533,17 +247,12 @@ open class SingleStreamer( * * @param descriptor Media descriptor to open */ - override suspend fun open(descriptor: MediaDescriptor) = withContext(dispatcher) { - endpointInternal.open(descriptor) - } + override suspend fun open(descriptor: MediaDescriptor) = pipelineOutput.open(descriptor) /** * Closes the streamer endpoint. */ - override suspend fun close() = withContext(dispatcher) { - stopStreamInternal() - endpointInternal.close() - } + override suspend fun close() = pipelineOutput.close() /** * Starts audio/video stream. @@ -553,51 +262,7 @@ open class SingleStreamer( * * @see [stopStream] */ - override suspend fun startStream() = withContext(dispatcher) { - require(isOpen.value) { "Endpoint must be opened before starting stream" } - require(!isStreaming.value) { "Stream is already running" } - - try { - val streams = mutableListOf() - val orientedVideoConfig = if (hasVideo) { - val videoConfig = requireNotNull(_videoConfig) { "Requires video config" } - /** - * If sourceOrientationProvider is not null, we need to get oriented size. - * For example, the [FlvMuxer] `onMetaData` event needs to know the oriented size. - */ - videoConfig.rotateFromNaturalOrientation(context, targetRotation) - } else { - null - } - if (orientedVideoConfig != null) { - streams.add(orientedVideoConfig) - } - - if (hasAudio) { - val audioConfig = requireNotNull(_audioConfig) { "Requires audio config" } - streams.add(audioConfig) - } - - val streamsIdMap = endpointInternal.addStreams(streams) - orientedVideoConfig?.let { videoStreamId = streamsIdMap[orientedVideoConfig] } - _audioConfig?.let { audioStreamId = streamsIdMap[_audioConfig as CodecConfig] } - - endpointInternal.startStream() - - audioSourceInternal?.startStream() - audioEncoderInternal?.startStream() - - videoSourceInternal?.startStream() - videoEncoderInternal?.startStream() - - bitrateRegulatorController?.start() - - _isStreaming.emit(true) - } catch (t: Throwable) { - stopStreamInternal() - throw t - } - } + override suspend fun startStream() = pipelineOutput.startStream() /** * Stops audio/video stream. @@ -607,97 +272,13 @@ open class SingleStreamer( * * @see [startStream] */ - override suspend fun stopStream() = withContext(dispatcher) { - stopStreamInternal() - } - - private fun resetVideoEncoder() { - val previousVideoEncoder = videoEncoderInternal - pendingTargetRotation?.let { - setTargetRotationInternal(it) - } - pendingTargetRotation = null - - // Only reset if the encoder is the same. Otherwise, it is already configured. - if (previousVideoEncoder == videoEncoderInternal) { - videoEncoderInternal?.reset() - } - } - - /** - * Stops audio/video and reset stream implementation. - * - * @see [stopStream] - */ - private suspend fun stopStreamInternal() { - stopStreamImpl() - - audioEncoderInternal?.reset() - resetVideoEncoder() - - _isStreaming.emit(false) - } - - /** - * Stops audio/video stream implementation. - * - * @see [stopStream] - */ - private suspend fun stopStreamImpl() { - bitrateRegulatorController?.stop() - - // Sources - audioSourceInternal?.stopStream() - videoSourceInternal?.stopStream() - - // Encoders - try { - audioEncoderInternal?.stopStream() - } catch (e: IllegalStateException) { - Logger.w(TAG, "stopStreamImpl: Can't stop audio encoder: ${e.message}") - } - try { - videoEncoderInternal?.stopStream() - } catch (e: IllegalStateException) { - Logger.w(TAG, "stopStreamImpl: Can't stop video encoder: ${e.message}") - } - - // Endpoint - endpointInternal.stopStream() - } + override suspend fun stopStream() = pipeline.stopStream() /** - * Releases recorders and encoders object. - * It also stops preview if needed - * - * @see [setAudioConfig] + * Releases the streamer. */ - override fun release() { - // Sources - audioSourceInternal?.release() - val videoSource = videoSourceInternal - val outputSurface = if (videoSource is ISurfaceSource) { - val surface = videoSource.outputSurface - videoSource.outputSurface = null - surface - } else { - null - } - videoSourceInternal?.release() - outputSurface?.let { - surfaceProcessor?.removeInputSurface(it) - } - - surfaceProcessor?.release() - - // Encoders - audioEncoderInternal?.release() - audioEncoderInternal = null - videoEncoderInternal?.release() - videoEncoderInternal = null - - // Endpoint - endpointInternal.release() + override suspend fun release() { + pipeline.release() } /** @@ -705,57 +286,14 @@ open class SingleStreamer( * * Limitation: it is only available for SRT for now. */ - override fun addBitrateRegulatorController(controllerFactory: IBitrateRegulatorController.Factory) { - bitrateRegulatorController?.stop() - bitrateRegulatorController = controllerFactory.newBitrateRegulatorController(this).apply { - if (isStreaming.value) { - this.start() - } - Logger.d( - TAG, "Bitrate regulator controller added: ${this.javaClass.simpleName}" - ) - } - - } + override fun addBitrateRegulatorController(controllerFactory: IBitrateRegulatorController.Factory) = + pipelineOutput.addBitrateRegulatorController(controllerFactory) /** * Removes the bitrate regulator controller. */ - override fun removeBitrateRegulatorController() { - bitrateRegulatorController?.stop() - bitrateRegulatorController = null - Logger.d(TAG, "Bitrate regulator controller removed") - } - - private fun setTargetRotationInternal(@RotationValue newTargetRotation: Int) { - if (shouldUpdateRotation(newTargetRotation)) { - sendTransformation() - } - } - - private fun sendTransformation() { - if (hasVideo) { - val videoConfig = videoConfig - if (videoConfig != null) { - videoSourceInternal?.configure(videoConfig.sourceConfig) - videoEncoderInternal = buildAndConfigureVideoEncoderIfNeeded( - videoConfig, requireNotNull(videoSourceInternal), targetRotation - ) - } - } - } - - /** - * @return true if the target rotation has changed - */ - private fun shouldUpdateRotation(@RotationValue newTargetRotation: Int): Boolean { - return if (targetRotation != newTargetRotation) { - _targetRotation = newTargetRotation - true - } else { - false - } - } + override fun removeBitrateRegulatorController() = + pipelineOutput.removeBitrateRegulatorController() companion object { const val TAG = "SingleStreamer" diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/callbacks/CallbackSingleStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/callbacks/CallbackSingleStreamer.kt index cb32da6fb..7e8f69a20 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/callbacks/CallbackSingleStreamer.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/callbacks/CallbackSingleStreamer.kt @@ -10,9 +10,14 @@ import io.github.thibaultbee.streampack.core.elements.sources.video.IVideoSource import io.github.thibaultbee.streampack.core.elements.utils.RotationValue import io.github.thibaultbee.streampack.core.regulator.controllers.IBitrateRegulatorController import io.github.thibaultbee.streampack.core.streamers.infos.IConfigurationInfo +import io.github.thibaultbee.streampack.core.streamers.interfaces.releaseBlocking import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig +import io.github.thibaultbee.streampack.core.streamers.single.ICallbackAudioSingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.ICallbackSingleStreamer +import io.github.thibaultbee.streampack.core.streamers.single.ICallbackVideoSingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer +import io.github.thibaultbee.streampack.core.streamers.single.ISingleStreamer +import io.github.thibaultbee.streampack.core.streamers.single.SingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig import io.github.thibaultbee.streampack.core.utils.extensions.isClosedException import kotlinx.coroutines.CoroutineScope @@ -30,8 +35,8 @@ import kotlinx.coroutines.launch * * @param streamer the [ICoroutineSingleStreamer] to use */ -open class CallbackSingleStreamer(val streamer: ICoroutineSingleStreamer) : - ICallbackSingleStreamer { +open class CallbackSingleStreamer(val streamer: SingleStreamer) : + ICallbackSingleStreamer, ICallbackAudioSingleStreamer, ICallbackVideoSingleStreamer { protected val coroutineScope: CoroutineScope = CoroutineScope( SupervisorJob() + Dispatchers.Default ) @@ -100,11 +105,37 @@ open class CallbackSingleStreamer(val streamer: ICoroutineSingleStreamer) : @RequiresPermission(Manifest.permission.RECORD_AUDIO) override fun setAudioConfig(audioConfig: AudioConfig) { - streamer.setAudioConfig(audioConfig) + coroutineScope.launch { + streamer.setAudioConfig(audioConfig) + } } override fun setVideoConfig(videoConfig: VideoConfig) { - streamer.setVideoConfig(videoConfig) + coroutineScope.launch { + streamer.setVideoConfig(videoConfig) + } + } + + /** + * Configures both video and audio settings. + * It is the first method to call after a [SingleStreamer] instantiation. + * It must be call when both stream and audio and video capture are not running. + * + * Use [IConfigurationInfo] to get value limits. + * + * If video encoder does not support [VideoConfig.level] or [VideoConfig.profile], it fallbacks + * to video encoder default level and default profile. + * + * @param audioConfig Audio configuration to set + * @param videoConfig Video configuration to set + * + * @throws [Throwable] if configuration can not be applied. + * @see [ISingleStreamer.release] + */ + @RequiresPermission(Manifest.permission.RECORD_AUDIO) + fun setConfig(audioConfig: AudioConfig, videoConfig: VideoConfig) { + setAudioConfig(audioConfig) + setVideoConfig(videoConfig) } override fun open(descriptor: MediaDescriptor) { @@ -171,9 +202,8 @@ open class CallbackSingleStreamer(val streamer: ICoroutineSingleStreamer) : listeners.remove(listener) } - override fun release() { - streamer.release() + streamer.releaseBlocking() listeners.clear() coroutineScope.coroutineContext.cancelChildren() } diff --git a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/callbacks/CameraCallbackSingleStreamer.kt b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/callbacks/CameraCallbackSingleStreamer.kt index 6dfad5a94..e4d76b9ce 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/callbacks/CameraCallbackSingleStreamer.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/core/streamers/single/callbacks/CameraCallbackSingleStreamer.kt @@ -1,3 +1,18 @@ +/* + * Copyright (C) 2024 Thibault B. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package io.github.thibaultbee.streampack.core.streamers.single.callbacks import android.Manifest @@ -19,6 +34,7 @@ import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStream import io.github.thibaultbee.streampack.core.streamers.single.ICallbackSingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.SingleStreamer +import io.github.thibaultbee.streampack.core.streamers.single.open import kotlinx.coroutines.launch import kotlinx.coroutines.runBlocking import kotlinx.coroutines.sync.Mutex diff --git a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewViewModel.kt b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewViewModel.kt index 17c9840b1..e035ba02c 100644 --- a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewViewModel.kt +++ b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewViewModel.kt @@ -43,11 +43,13 @@ import io.github.thibaultbee.streampack.app.utils.toggleCamera import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor import io.github.thibaultbee.streampack.core.elements.endpoints.MediaSinkType import io.github.thibaultbee.streampack.core.elements.sources.video.camera.CameraSettings +import io.github.thibaultbee.streampack.core.elements.sources.video.camera.isFrameRateSupported import io.github.thibaultbee.streampack.core.streamers.interfaces.ICameraStreamer +import io.github.thibaultbee.streampack.core.streamers.interfaces.releaseBlocking import io.github.thibaultbee.streampack.core.streamers.observers.StreamerViewModelLifeCycleObserver +import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.startStream import io.github.thibaultbee.streampack.core.utils.extensions.isClosedException -import io.github.thibaultbee.streampack.core.elements.sources.video.camera.isFrameRateSupported import io.github.thibaultbee.streampack.ext.srt.regulator.controllers.DefaultSrtBitrateRegulatorController import io.github.thibaultbee.streampack.ui.views.CameraPreviewView import kotlinx.coroutines.flow.combine @@ -63,7 +65,7 @@ class PreviewViewModel(private val application: Application) : ObservableViewMod private val buildStreamerUseCase = BuildStreamerUseCase(application, storageRepository) - private var streamer = buildStreamerUseCase() + private var streamer = CameraSingleStreamer(application)//buildStreamerUseCase() val streamerLifeCycleObserver: DefaultLifecycleObserver get() = StreamerViewModelLifeCycleObserver(streamer) private val cameraSettings: CameraSettings? @@ -75,7 +77,7 @@ class PreviewViewModel(private val application: Application) : ObservableViewMod if (streamer is ICameraStreamer) { permissions.add(Manifest.permission.CAMERA) } - if (streamer.audioSource != null) { + if (streamer.hasAudio) { permissions.add(Manifest.permission.RECORD_AUDIO) } storageRepository.endpointDescriptorFlow.asLiveData().value?.let { @@ -135,7 +137,7 @@ class PreviewViewModel(private val application: Application) : ObservableViewMod Pair(isAudioEnable, isVideoEnable) }.collect { (_, _) -> val previousStreamer = streamer - streamer = buildStreamerUseCase(previousStreamer) + //streamer = buildStreamerUseCase(previousStreamer) if (previousStreamer != streamer) { previousStreamer.release() } @@ -167,7 +169,7 @@ class PreviewViewModel(private val application: Application) : ObservableViewMod fun setStreamerView(view: CameraPreviewView) { if (streamer is ICameraStreamer) { - view.streamer = streamer as ICameraStreamer + view.streamer = streamer } } @@ -183,7 +185,9 @@ class PreviewViewModel(private val application: Application) : ObservableViewMod fun configureAudio() { viewModelScope.launch { try { - storageRepository.audioConfigFlow.first()?.let { streamer.setAudioConfig(it) } + storageRepository.audioConfigFlow.first()?.let { + streamer.setAudioConfig(it) + } ?: Log.i( TAG, "Audio is disabled" @@ -401,7 +405,7 @@ class PreviewViewModel(private val application: Application) : ObservableViewMod override fun onCleared() { super.onCleared() try { - streamer.release() + streamer.releaseBlocking() } catch (t: Throwable) { Log.e(TAG, "Streamer release failed", t) } diff --git a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/usecases/BuildStreamerUseCase.kt b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/usecases/BuildStreamerUseCase.kt index 68bedd529..b2c223f48 100644 --- a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/usecases/BuildStreamerUseCase.kt +++ b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/usecases/BuildStreamerUseCase.kt @@ -2,9 +2,9 @@ package io.github.thibaultbee.streampack.app.ui.main.usecases import android.content.Context import io.github.thibaultbee.streampack.app.data.storage.DataStoreRepository -import io.github.thibaultbee.streampack.core.streamers.single.AudioOnlySingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStreamer import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer +import io.github.thibaultbee.streampack.core.streamers.single.SingleStreamer import kotlinx.coroutines.flow.first import kotlinx.coroutines.runBlocking @@ -13,30 +13,22 @@ class BuildStreamerUseCase( private val dataStoreRepository: DataStoreRepository ) { /** - * Build a new [ICoroutineStreamer] based on audio and video preferences. + * Build a new [ICoroutineSingleStreamer] based on audio and video preferences. * * Only create a new streamer if the previous one is not the same type. * * @param previousStreamer Previous streamer to check if we need to create a new one. */ - operator fun invoke(previousStreamer: ICoroutineSingleStreamer? = null): ICoroutineSingleStreamer { + operator fun invoke(previousStreamer: ICoroutineSingleStreamer? = null): SingleStreamer { val isAudioEnable = runBlocking { dataStoreRepository.isAudioEnableFlow.first() } - val isVideoEnable = runBlocking { dataStoreRepository.isVideoEnableFlow.first() } - - if (isVideoEnable) { - if (previousStreamer !is CameraSingleStreamer) { - return CameraSingleStreamer(context, isAudioEnable) - } else { - if ((previousStreamer.audioSource == null) != !isAudioEnable) { - return CameraSingleStreamer(context, isAudioEnable) - } - } + if (previousStreamer !is CameraSingleStreamer) { + return CameraSingleStreamer(context, isAudioEnable) } else { - if (previousStreamer !is AudioOnlySingleStreamer) { - return AudioOnlySingleStreamer(context) + if ((previousStreamer.audioSource == null) != !isAudioEnable) { + return CameraSingleStreamer(context, isAudioEnable) } } diff --git a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/settings/SettingsFragment.kt b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/settings/SettingsFragment.kt index 918d0e1dd..20af40388 100644 --- a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/settings/SettingsFragment.kt +++ b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/settings/SettingsFragment.kt @@ -38,26 +38,18 @@ import io.github.thibaultbee.streampack.app.utils.ProfileLevelDisplay import io.github.thibaultbee.streampack.app.utils.StreamerInfoFactory import io.github.thibaultbee.streampack.app.utils.dataStore import io.github.thibaultbee.streampack.core.elements.encoders.mediacodec.MediaCodecHelper -import io.github.thibaultbee.streampack.core.streamers.infos.CameraStreamerConfigurationInfo -import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig -import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig import io.github.thibaultbee.streampack.core.elements.sources.video.camera.cameras import io.github.thibaultbee.streampack.core.elements.sources.video.camera.defaultCameraId import io.github.thibaultbee.streampack.core.elements.sources.video.camera.isFrameRateSupported +import io.github.thibaultbee.streampack.core.streamers.infos.CameraStreamerConfigurationInfo +import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig +import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig import java.io.IOException class SettingsFragment : PreferenceFragmentCompat() { private lateinit var streamerInfo: CameraStreamerConfigurationInfo private val profileLevelDisplay by lazy { ProfileLevelDisplay(requireContext()) } - private val videoEnablePreference: SwitchPreference by lazy { - this.findPreference(getString(R.string.video_enable_key))!! - } - - private val videoSettingsCategory: PreferenceCategory by lazy { - this.findPreference(getString(R.string.video_settings_key))!! - } - private val videoEncoderListPreference: ListPreference by lazy { this.findPreference(getString(R.string.video_encoder_key))!! } @@ -198,22 +190,10 @@ class SettingsFragment : PreferenceFragmentCompat() { true } - if (videoEncoderListPreference.value == null) { - // Audio only container - videoSettingsCategory.isVisible = false - videoEnablePreference.isChecked = false - } else { - loadVideoSettings(videoEncoderListPreference.value) - } + loadVideoSettings(videoEncoderListPreference.value) } private fun loadVideoSettings(encoder: String) { - videoSettingsCategory.isVisible = videoEnablePreference.isChecked - videoEnablePreference.setOnPreferenceChangeListener { _, newValue -> - videoSettingsCategory.isVisible = newValue as Boolean - true - } - // Inflates video resolutions streamerInfo.video.getSupportedResolutions( requireContext(), encoder diff --git a/demos/camera/src/main/res/xml/root_preferences.xml b/demos/camera/src/main/res/xml/root_preferences.xml index 9358281de..e47de9ad3 100644 --- a/demos/camera/src/main/res/xml/root_preferences.xml +++ b/demos/camera/src/main/res/xml/root_preferences.xml @@ -2,11 +2,6 @@ xmlns:app="http://schemas.android.com/apk/res-auto"> - - diff --git a/demos/screenrecorder/src/main/java/io/github/thibaultbee/streampack/screenrecorder/MainActivity.kt b/demos/screenrecorder/src/main/java/io/github/thibaultbee/streampack/screenrecorder/MainActivity.kt index ae75fa789..37fe85606 100644 --- a/demos/screenrecorder/src/main/java/io/github/thibaultbee/streampack/screenrecorder/MainActivity.kt +++ b/demos/screenrecorder/src/main/java/io/github/thibaultbee/streampack/screenrecorder/MainActivity.kt @@ -33,6 +33,7 @@ import androidx.activity.result.contract.ActivityResultContracts import androidx.appcompat.app.AlertDialog import androidx.appcompat.app.AppCompatActivity import androidx.core.app.ActivityCompat +import androidx.lifecycle.lifecycleScope import io.github.thibaultbee.streampack.core.configuration.mediadescriptor.UriMediaDescriptor import io.github.thibaultbee.streampack.core.elements.encoders.mediacodec.MediaCodecHelper import io.github.thibaultbee.streampack.core.elements.endpoints.composites.muxers.ts.data.TSServiceInfo @@ -46,6 +47,7 @@ import io.github.thibaultbee.streampack.screenrecorder.models.EndpointType import io.github.thibaultbee.streampack.screenrecorder.services.DemoScreenRecorderService import io.github.thibaultbee.streampack.screenrecorder.settings.SettingsActivity import io.github.thibaultbee.streampack.services.DefaultScreenRecorderService +import kotlinx.coroutines.launch import kotlinx.coroutines.runBlocking class MainActivity : AppCompatActivity() { @@ -97,43 +99,43 @@ class MainActivity : AppCompatActivity() { stopService() } - private val requestAudioPermissionsLauncher = - registerForActivityResult( - ActivityResultContracts.RequestPermission() - ) { isGranted -> - if (!isGranted) { - showPermissionAlertDialog(this) { this.finish() } - } else { - getContent.launch( - ScreenRecorderSingleStreamer.createScreenRecorderIntent( - this - ) + private val requestAudioPermissionsLauncher = registerForActivityResult( + ActivityResultContracts.RequestPermission() + ) { isGranted -> + if (!isGranted) { + showPermissionAlertDialog(this) { this.finish() } + } else { + getContent.launch( + ScreenRecorderSingleStreamer.createScreenRecorderIntent( + this ) - } + ) } + } private var getContent = registerForActivityResult(ActivityResultContracts.StartActivityForResult()) { result -> if (streamer != null) { - startStream(requireNotNull(streamer)) + lifecycleScope.launch { + startStream(requireNotNull(streamer)) + } } else { - connection = DefaultScreenRecorderService.launch( - this, + connection = DefaultScreenRecorderService.launch(this, DemoScreenRecorderService::class.java, { streamer -> streamer.activityResult = result - try { - configure(streamer) - } catch (t: Throwable) { - this@MainActivity.showAlertDialog( - this@MainActivity, - "Error", - t.message ?: "Unknown error" - ) - binding.liveButton.isChecked = false - Log.e(TAG, "Error while starting streamer", t) + lifecycleScope.launch { + try { + configure(streamer) + } catch (t: Throwable) { + this@MainActivity.showAlertDialog( + this@MainActivity, "Error", t.message ?: "Unknown error" + ) + binding.liveButton.isChecked = false + Log.e(TAG, "Error while starting streamer", t) + } + startStream(streamer) } - startStream(streamer) this.streamer = streamer }, { @@ -145,19 +147,18 @@ class MainActivity : AppCompatActivity() { } } - private fun configure(streamer: ScreenRecorderSingleStreamer) { + private suspend fun configure(streamer: ScreenRecorderSingleStreamer) { val deviceRefreshRate = (this.getSystemService(Context.DISPLAY_SERVICE) as DisplayManager).getDisplay( Display.DEFAULT_DISPLAY ).refreshRate.toInt() - val fps = - if (MediaCodecHelper.Video.getFramerateRange(configuration.video.encoder) - .contains(deviceRefreshRate) - ) { - deviceRefreshRate - } else { - 30 - } + val fps = if (MediaCodecHelper.Video.getFramerateRange(configuration.video.encoder) + .contains(deviceRefreshRate) + ) { + deviceRefreshRate + } else { + 30 + } val videoConfig = VideoConfig( mimeType = configuration.video.encoder, @@ -177,8 +178,7 @@ class MainActivity : AppCompatActivity() { ) if (ActivityCompat.checkSelfPermission( - this, - Manifest.permission.RECORD_AUDIO + this, Manifest.permission.RECORD_AUDIO ) == PackageManager.PERMISSION_GRANTED ) { streamer.setAudioConfig(audioConfig) @@ -188,29 +188,25 @@ class MainActivity : AppCompatActivity() { } } - private fun startStream(streamer: ScreenRecorderSingleStreamer) { + private suspend fun startStream(streamer: ScreenRecorderSingleStreamer) { try { - runBlocking { - val descriptor = when (configuration.endpoint.type) { - EndpointType.SRT -> SrtMediaDescriptor( - configuration.endpoint.srt.ip, - configuration.endpoint.srt.port, - configuration.endpoint.srt.streamID, - configuration.endpoint.srt.passPhrase, - serviceInfo = tsServiceInfo - ) - - EndpointType.RTMP -> UriMediaDescriptor(Uri.parse(configuration.endpoint.rtmp.url)) - } + val descriptor = when (configuration.endpoint.type) { + EndpointType.SRT -> SrtMediaDescriptor( + configuration.endpoint.srt.ip, + configuration.endpoint.srt.port, + configuration.endpoint.srt.streamID, + configuration.endpoint.srt.passPhrase, + serviceInfo = tsServiceInfo + ) - streamer.startStream(descriptor) + EndpointType.RTMP -> UriMediaDescriptor(Uri.parse(configuration.endpoint.rtmp.url)) } + + streamer.startStream(descriptor) moveTaskToBack(true) } catch (t: Throwable) { this.showAlertDialog( - this, - "Error", - t.message ?: "Unknown error" + this, "Error", t.message ?: "Unknown error" ) binding.liveButton.isChecked = false Log.e(TAG, "Error while starting streamer", t) @@ -248,19 +244,13 @@ class MainActivity : AppCompatActivity() { } private fun showAlertDialog( - context: Context, - title: String, - message: String, - afterPositiveButton: () -> Unit = {} + context: Context, title: String, message: String, afterPositiveButton: () -> Unit = {} ) { - AlertDialog.Builder(context) - .setTitle(title) - .setMessage(message) + AlertDialog.Builder(context).setTitle(title).setMessage(message) .setPositiveButton(android.R.string.ok) { dialogInterface: DialogInterface, _: Int -> dialogInterface.dismiss() afterPositiveButton() - } - .show() + }.show() } private fun showAlertDialog( @@ -269,26 +259,18 @@ class MainActivity : AppCompatActivity() { messageResourceId: Int, afterPositiveButton: () -> Unit = {} ) { - AlertDialog.Builder(context) - .setTitle(titleResourceId) - .setMessage(messageResourceId) + AlertDialog.Builder(context).setTitle(titleResourceId).setMessage(messageResourceId) .setPositiveButton(android.R.string.ok) { dialogInterface: DialogInterface, _: Int -> dialogInterface.dismiss() afterPositiveButton() - } - .show() + }.show() } private fun showPermissionAlertDialog( - context: Context, - afterPositiveButton: () -> Unit = {} - ) = - showAlertDialog( - context, - R.string.permission, - R.string.permission_not_granted, - afterPositiveButton - ) + context: Context, afterPositiveButton: () -> Unit = {} + ) = showAlertDialog( + context, R.string.permission, R.string.permission_not_granted, afterPositiveButton + ) companion object { private const val TAG = "MainActivity" diff --git a/extensions/rtmp/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/RtmpStreamerTest.kt b/extensions/rtmp/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/single/RtmpSingleStreamerTest.kt similarity index 91% rename from extensions/rtmp/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/RtmpStreamerTest.kt rename to extensions/rtmp/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/single/RtmpSingleStreamerTest.kt index e5996ad52..c227f5fca 100644 --- a/extensions/rtmp/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/RtmpStreamerTest.kt +++ b/extensions/rtmp/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/single/RtmpSingleStreamerTest.kt @@ -1,4 +1,4 @@ -package io.github.thibaultbee.streampack.core.streamers +package io.github.thibaultbee.streampack.core.streamers.single import android.Manifest import android.content.Context @@ -7,11 +7,6 @@ import android.util.Size import androidx.test.filters.LargeTest import androidx.test.platform.app.InstrumentationRegistry import androidx.test.rule.GrantPermissionRule -import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig -import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStreamer -import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig -import io.github.thibaultbee.streampack.core.streamers.single.setConfig -import io.github.thibaultbee.streampack.core.streamers.single.startStream import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.delay import kotlinx.coroutines.test.runTest @@ -29,7 +24,7 @@ import video.api.client.api.models.VideoStatus import kotlin.time.Duration.Companion.seconds @LargeTest -class RtmpStreamerTest { +class RtmpSingleStreamerTest { private val context: Context = InstrumentationRegistry.getInstrumentation().context private val arguments = InstrumentationRegistry.getArguments() private var apiKey: String? = null diff --git a/extensions/srt/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/SrtStreamerTest.kt b/extensions/srt/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/single/SrtSingleStreamerTest.kt similarity index 91% rename from extensions/srt/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/SrtStreamerTest.kt rename to extensions/srt/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/single/SrtSingleStreamerTest.kt index 66353bacc..d77aace51 100644 --- a/extensions/srt/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/SrtStreamerTest.kt +++ b/extensions/srt/src/androidTest/java/io/github/thibaultbee/streampack/core/streamers/single/SrtSingleStreamerTest.kt @@ -1,4 +1,4 @@ -package io.github.thibaultbee.streampack.core.streamers +package io.github.thibaultbee.streampack.core.streamers.single import android.Manifest import android.content.Context @@ -7,11 +7,6 @@ import android.util.Size import androidx.test.filters.LargeTest import androidx.test.platform.app.InstrumentationRegistry import androidx.test.rule.GrantPermissionRule -import io.github.thibaultbee.streampack.core.streamers.single.AudioConfig -import io.github.thibaultbee.streampack.core.streamers.single.CameraSingleStreamer -import io.github.thibaultbee.streampack.core.streamers.single.VideoConfig -import io.github.thibaultbee.streampack.core.streamers.single.setConfig -import io.github.thibaultbee.streampack.core.streamers.single.startStream import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.delay import kotlinx.coroutines.test.runTest @@ -29,7 +24,7 @@ import video.api.client.api.models.VideoStatus import kotlin.time.Duration.Companion.seconds @LargeTest -class SrtStreamerTest { +class SrtSingleStreamerTest { private val context: Context = InstrumentationRegistry.getInstrumentation().context private val arguments = InstrumentationRegistry.getArguments() private var apiKey: String? = null diff --git a/extensions/srt/src/main/java/io/github/thibaultbee/streampack/ext/srt/regulator/controllers/DefaultSrtBitrateRegulatorController.kt b/extensions/srt/src/main/java/io/github/thibaultbee/streampack/ext/srt/regulator/controllers/DefaultSrtBitrateRegulatorController.kt index a28d18743..9339c76e3 100644 --- a/extensions/srt/src/main/java/io/github/thibaultbee/streampack/ext/srt/regulator/controllers/DefaultSrtBitrateRegulatorController.kt +++ b/extensions/srt/src/main/java/io/github/thibaultbee/streampack/ext/srt/regulator/controllers/DefaultSrtBitrateRegulatorController.kt @@ -16,9 +16,11 @@ package io.github.thibaultbee.streampack.ext.srt.regulator.controllers import io.github.thibaultbee.streampack.core.configuration.BitrateRegulatorConfig +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IConfigurableAudioPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IEncodingPipelineOutput +import io.github.thibaultbee.streampack.core.pipelines.outputs.encoding.IConfigurableVideoPipelineOutput import io.github.thibaultbee.streampack.core.regulator.controllers.BitrateRegulatorController import io.github.thibaultbee.streampack.core.regulator.controllers.DefaultBitrateRegulatorController -import io.github.thibaultbee.streampack.core.streamers.single.ICoroutineSingleStreamer import io.github.thibaultbee.streampack.ext.srt.regulator.DefaultSrtBitrateRegulator import io.github.thibaultbee.streampack.ext.srt.regulator.SrtBitrateRegulator @@ -31,11 +33,24 @@ class DefaultSrtBitrateRegulatorController { private val bitrateRegulatorConfig: BitrateRegulatorConfig = BitrateRegulatorConfig(), private val delayTimeInMs: Long = 500 ) : BitrateRegulatorController.Factory() { - override fun newBitrateRegulatorController(streamer: ICoroutineSingleStreamer): DefaultBitrateRegulatorController { + override fun newBitrateRegulatorController(pipelineOutput: IEncodingPipelineOutput): DefaultBitrateRegulatorController { + require(pipelineOutput is IConfigurableVideoPipelineOutput) { + "Pipeline output must be an video encoding output" + } + + val videoEncoder = requireNotNull(pipelineOutput.videoEncoder) { + "Video encoder must be set" + } + + val audioEncoder = if (pipelineOutput is IConfigurableAudioPipelineOutput) { + pipelineOutput.audioEncoder + } else { + null + } return DefaultBitrateRegulatorController( - streamer.audioEncoder, - streamer.videoEncoder, - streamer.endpoint, + audioEncoder, + videoEncoder, + pipelineOutput.endpoint, bitrateRegulatorFactory, bitrateRegulatorConfig, delayTimeInMs diff --git a/services/src/main/java/io/github/thibaultbee/streampack/services/DefaultScreenRecorderService.kt b/services/src/main/java/io/github/thibaultbee/streampack/services/DefaultScreenRecorderService.kt index 32c9c3740..d6fbe795b 100644 --- a/services/src/main/java/io/github/thibaultbee/streampack/services/DefaultScreenRecorderService.kt +++ b/services/src/main/java/io/github/thibaultbee/streampack/services/DefaultScreenRecorderService.kt @@ -185,8 +185,8 @@ abstract class DefaultScreenRecorderService( runBlocking { streamer?.stopStream() streamer?.close() + streamer?.release() } - streamer?.release() streamer = null Log.i(TAG, "Service destroyed") }