Skip to content

Commit

Permalink
feat(*): add support for HDR
Browse files Browse the repository at this point in the history
  • Loading branch information
ThibaultBee committed Nov 21, 2023
1 parent a13439e commit f268a3f
Show file tree
Hide file tree
Showing 19 changed files with 411 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
package io.github.thibaultbee.streampack.data

import android.content.Context
import android.media.MediaCodecInfo
import android.media.MediaCodecInfo.CodecProfileLevel
import android.media.MediaCodecInfo.CodecProfileLevel.AV1ProfileMain8
import android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline
Expand All @@ -31,6 +32,7 @@ import android.media.MediaFormat
import android.os.Build
import android.util.Size
import io.github.thibaultbee.streampack.internal.encoders.MediaCodecHelper
import io.github.thibaultbee.streampack.internal.utils.av.video.DynamicRangeProfile
import io.github.thibaultbee.streampack.internal.utils.extensions.isDevicePortrait
import io.github.thibaultbee.streampack.internal.utils.extensions.isVideo
import io.github.thibaultbee.streampack.internal.utils.extensions.landscapize
Expand Down Expand Up @@ -69,6 +71,9 @@ class VideoConfig(
val fps: Int = 30,
/**
* Video encoder profile. Encoders may not support requested profile. In this case, StreamPack fallbacks to default profile.
* If not set, profile is always a 8 bit profile. StreamPack try to apply the highest profile available.
* If the decoder does not support the profile, you should explicitly set the profile to a lower
* value such as [AVCProfileBaseline] for AVC, [HEVCProfileMain] for HEVC, [VP9Profile0] for VP9.
* ** See ** [MediaCodecInfo.CodecProfileLevel](https://developer.android.com/reference/android/media/MediaCodecInfo.CodecProfileLevel)
*/
profile: Int = getBestProfile(mimeType),
Expand Down Expand Up @@ -132,6 +137,13 @@ class VideoConfig(
gopDuration
)

/**
* The dynamic range profile.
* It is deduced from the [profile].
* **See Also:** [DynamicRangeProfiles](https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles)
*/
val dynamicRangeProfile = DynamicRangeProfile.fromProfile(mimeType, profile)

/**
* Get resolution according to device orientation
*
Expand Down Expand Up @@ -174,6 +186,23 @@ class VideoConfig(
}
}

if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
if (dynamicRangeProfile != DynamicRangeProfile.sdr) {
format.setInteger(
MediaFormat.KEY_COLOR_STANDARD,
MediaFormat.COLOR_STANDARD_BT2020
)
format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED)
format.setInteger(
MediaFormat.KEY_COLOR_TRANSFER,
dynamicRangeProfile.transferFunction
)
format.setFeatureEnabled(
MediaCodecInfo.CodecCapabilities.FEATURE_HdrEditing, true
)
}

}
return format
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,10 @@ abstract class MediaCodecEncoder<T : Config>(
EventHandler(), IEncoder<Config> {
protected var mediaCodec: MediaCodec? = null
set(value) {
if (value != null) {
onNewMediaCodec(value)
}
field = value
onNewMediaCodec()
}
private var callbackThread: HandlerThread? = null
private var handler: Handler? = null
Expand Down Expand Up @@ -149,7 +151,7 @@ abstract class MediaCodecEncoder<T : Config>(
}
}

open fun onNewMediaCodec() {}
open fun onNewMediaCodec(mediaCodec: MediaCodec) {}

open fun createMediaFormat(config: Config, withProfileLevel: Boolean) =
config.getFormat(withProfileLevel)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ object MediaCodecHelper {
fun getProfiles(
mimeType: String,
): List<Int> =
getProfileLevel(mimeType).map { it.profile }
getProfileLevel(mimeType).map { it.profile }.toSet().toList()

/**
* Get encoder supported profiles list for the specified encoder.
Expand All @@ -254,7 +254,7 @@ object MediaCodecHelper {
mimeType: String,
name: String
): List<Int> =
getProfileLevel(mimeType, name).map { it.profile }
getProfileLevel(mimeType, name).map { it.profile }.toSet().toList()

/**
* Get encoder maximum supported levels for the default encoder.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import io.github.thibaultbee.streampack.internal.gl.EglWindowSurface
import io.github.thibaultbee.streampack.internal.gl.FullFrameRect
import io.github.thibaultbee.streampack.internal.gl.Texture2DProgram
import io.github.thibaultbee.streampack.internal.interfaces.ISourceOrientationProvider
import io.github.thibaultbee.streampack.internal.utils.av.video.DynamicRangeProfile
import io.github.thibaultbee.streampack.listeners.OnErrorListener
import java.util.concurrent.Executors

Expand Down Expand Up @@ -62,10 +63,17 @@ class VideoMediaCodecEncoder(
_bitrate = value
}

override fun onNewMediaCodec() {
mediaCodec?.let {
codecSurface?.outputSurface = it.createInputSurface()
override fun onNewMediaCodec(mediaCodec: MediaCodec) {
try {
val mimeType = mediaCodec.outputFormat.getString(MediaFormat.KEY_MIME)!!
val profile = mediaCodec.outputFormat.getInteger(MediaFormat.KEY_PROFILE)
codecSurface?.useHighBitDepth =
DynamicRangeProfile.fromProfile(mimeType, profile).isHdr
} catch (_: Exception) {
codecSurface?.useHighBitDepth = false
}

codecSurface?.outputSurface = mediaCodec.createInputSurface()
}

override fun createMediaFormat(config: Config, withProfileLevel: Boolean): MediaFormat {
Expand Down Expand Up @@ -124,6 +132,11 @@ class VideoMediaCodecEncoder(
val inputSurface: Surface?
get() = surfaceTexture?.let { Surface(surfaceTexture) }

/**
* If true, the encoder will use high bit depth (10 bits) for encoding.
*/
var useHighBitDepth = false

var outputSurface: Surface? = null
set(value) {
/**
Expand All @@ -145,7 +158,7 @@ class VideoMediaCodecEncoder(
}

private fun initOrUpdateSurfaceTexture(surface: Surface) {
eglSurface = ensureGlContext(EglWindowSurface(surface)) {
eglSurface = ensureGlContext(EglWindowSurface(surface, useHighBitDepth)) {
val width = it.getWidth()
val height = it.getHeight()
val size =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,14 @@
*/
package io.github.thibaultbee.streampack.internal.gl

import android.opengl.*
import android.opengl.EGL14
import android.opengl.EGLConfig
import android.opengl.EGLContext
import android.opengl.EGLDisplay
import android.opengl.EGLExt
import android.opengl.EGLSurface
import android.view.Surface
import java.util.*
import java.util.Objects

/**
* Holds state associated with a Surface used for MediaCodec encoder input.
Expand All @@ -30,7 +35,7 @@ import java.util.*
* (Contains mostly code borrowed from CameraX)
*/

class EglWindowSurface(private val surface: Surface) {
class EglWindowSurface(private val surface: Surface, useHighBitDepth: Boolean = false) {
private var eglDisplay: EGLDisplay = EGL14.EGL_NO_DISPLAY
private var eglContext: EGLContext = EGL14.EGL_NO_CONTEXT
private var eglSurface: EGLSurface = EGL14.EGL_NO_SURFACE
Expand All @@ -41,13 +46,13 @@ class EglWindowSurface(private val surface: Surface) {
}

init {
eglSetup()
eglSetup(useHighBitDepth)
}

/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private fun eglSetup() {
private fun eglSetup(useHighBitDepth: Boolean) {
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
if (Objects.equals(eglDisplay, EGL14.EGL_NO_DISPLAY)) {
throw RuntimeException("unable to get EGL14 display")
Expand All @@ -59,12 +64,16 @@ class EglWindowSurface(private val surface: Surface) {

// Configure EGL for recordable and OpenGL ES 2.0. We want enough RGB bits
// to minimize artifacts from possible YUV conversion.
val eglColorSize = if (useHighBitDepth) 10 else 8
val eglAlphaSize = if (useHighBitDepth) 2 else 0
val recordable = if (useHighBitDepth) 0 else 1
var attribList = intArrayOf(
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RED_SIZE, eglColorSize,
EGL14.EGL_GREEN_SIZE, eglColorSize,
EGL14.EGL_BLUE_SIZE, eglColorSize,
EGL14.EGL_ALPHA_SIZE, eglAlphaSize,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL_RECORDABLE_ANDROID, recordable,
EGL14.EGL_NONE
)
val numConfigs = IntArray(1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import android.content.Context
import android.hardware.camera2.*
import android.hardware.camera2.CameraDevice.AUDIO_RESTRICTION_NONE
import android.hardware.camera2.CameraDevice.AUDIO_RESTRICTION_VIBRATION_SOUND
import android.hardware.camera2.params.OutputConfiguration
import android.os.Build
import android.util.Range
import android.view.Surface
Expand Down Expand Up @@ -133,12 +134,19 @@ class CameraController(

private suspend fun createCaptureSession(
camera: CameraDevice,
targets: List<Surface>
targets: List<Surface>,
dynamicRange: Long,
): CameraCaptureSession = suspendCancellableCoroutine { cont ->
threadManager.createCaptureSession(
camera,
targets,
CameraCaptureSessionCallback(cont)
val outputConfigurations = targets.map {
OutputConfiguration(it).apply {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
dynamicRangeProfile = dynamicRange
}
}
}

threadManager.createCaptureSessionByOutputConfiguration(
camera, outputConfigurations, CameraCaptureSessionCallback(cont)
)
}

Expand All @@ -162,7 +170,8 @@ class CameraController(
@RequiresPermission(Manifest.permission.CAMERA)
suspend fun startCamera(
cameraId: String,
targets: List<Surface>
targets: List<Surface>,
dynamicRange: Long,
) {
require(targets.isNotEmpty()) { " At least one target is required" }

Expand All @@ -172,7 +181,8 @@ class CameraController(
captureSession =
createCaptureSession(
cameraDevice,
targets
targets,
dynamicRange
)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,19 @@ class CameraExecutorManager : ICameraThreadManager {
targets: List<Surface>,
callback: CameraCaptureSession.StateCallback
) {
val outputs = mutableListOf<OutputConfiguration>()
targets.forEach { outputs.add(OutputConfiguration(it)) }
val outputConfigurations = targets.map { OutputConfiguration(it) }
createCaptureSessionByOutputConfiguration(camera, outputConfigurations, callback)
}

@RequiresApi(Build.VERSION_CODES.P)
override fun createCaptureSessionByOutputConfiguration(
camera: CameraDevice,
outputConfigurations: List<OutputConfiguration>,
callback: CameraCaptureSession.StateCallback
) {
SessionConfiguration(
SessionConfiguration.SESSION_REGULAR,
outputs,
outputConfigurations,
cameraExecutor,
callback
).also { sessionConfig ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import android.hardware.camera2.CameraCaptureSession
import android.hardware.camera2.CameraDevice
import android.hardware.camera2.CameraManager
import android.hardware.camera2.CaptureRequest
import android.hardware.camera2.params.OutputConfiguration
import android.os.Handler
import android.os.HandlerThread
import android.view.Surface
Expand Down Expand Up @@ -51,6 +52,19 @@ class CameraHandlerManager : ICameraThreadManager {
camera.createCaptureSession(targets, callback, cameraHandler)
}

override fun createCaptureSessionByOutputConfiguration(
camera: CameraDevice,
outputConfigurations: List<OutputConfiguration>,
callback: CameraCaptureSession.StateCallback
) {
@Suppress("deprecation")
camera.createCaptureSessionByOutputConfigurations(
outputConfigurations,
callback,
cameraHandler
)
}

override fun setRepeatingSingleRequest(
captureSession: CameraCaptureSession,
captureRequest: CaptureRequest,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import io.github.thibaultbee.streampack.data.VideoConfig
import io.github.thibaultbee.streampack.internal.data.Frame
import io.github.thibaultbee.streampack.internal.interfaces.ISourceOrientationProvider
import io.github.thibaultbee.streampack.internal.sources.IVideoSource
import io.github.thibaultbee.streampack.internal.utils.av.video.DynamicRangeProfile
import io.github.thibaultbee.streampack.internal.utils.extensions.deviceOrientation
import io.github.thibaultbee.streampack.internal.utils.extensions.isDevicePortrait
import io.github.thibaultbee.streampack.internal.utils.extensions.landscapize
Expand Down Expand Up @@ -72,19 +73,21 @@ class CameraSource(
}

private var fps: Int = 30
private var dynamicRangeProfile: DynamicRangeProfile = DynamicRangeProfile.sdr
private var isStreaming = false
private var isPreviewing = false

override fun configure(config: VideoConfig) {
this.fps = config.fps
this.dynamicRangeProfile = config.dynamicRangeProfile
}

@RequiresPermission(Manifest.permission.CAMERA)
suspend fun startPreview(cameraId: String = this.cameraId, restartStream: Boolean = false) {
var targets = mutableListOf<Surface>()
previewSurface?.let { targets.add(it) }
encoderSurface?.let { targets.add(it) }
cameraController.startCamera(cameraId, targets)
cameraController.startCamera(cameraId, targets, dynamicRangeProfile.dynamicRange)

targets = mutableListOf()
previewSurface?.let { targets.add(it) }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import android.hardware.camera2.CameraCaptureSession
import android.hardware.camera2.CameraDevice
import android.hardware.camera2.CameraManager
import android.hardware.camera2.CaptureRequest
import android.hardware.camera2.params.OutputConfiguration
import android.view.Surface

/**
Expand All @@ -40,10 +41,10 @@ interface ICameraThreadManager {
)

/**
* Create a camera capture session.
* Create a camera capture session for surfaces.
*
* @param camera the [CameraDevice]
* @param targets list of surfaces
* @param targets list of [Surface]
* @param callback an implementation of [CameraCaptureSession.StateCallback]
*/
fun createCaptureSession(
Expand All @@ -52,6 +53,19 @@ interface ICameraThreadManager {
callback: CameraCaptureSession.StateCallback
)

/**
* Create a camera capture session for output configurations.
*
* @param camera the [CameraDevice]
* @param outputConfigurations list of [OutputConfiguration]
* @param callback an implementation of [CameraCaptureSession.StateCallback]
*/
fun createCaptureSessionByOutputConfiguration(
camera: CameraDevice,
outputConfigurations: List<OutputConfiguration>,
callback: CameraCaptureSession.StateCallback
)

/**
* Set a repeating request.
*
Expand Down
Loading

0 comments on commit f268a3f

Please sign in to comment.