Skip to content

Commit

Permalink
Improve android media player, also fix #2101
Browse files Browse the repository at this point in the history
a. handle cbcr offset properly
b. handle output dim properly
  • Loading branch information
halx99 committed Aug 27, 2024
1 parent 5851603 commit ad76a89
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 12 deletions.
7 changes: 5 additions & 2 deletions core/media/AndroidMediaEngine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeHandleVideoSamp
int outputY,
int videoX,
int videoY,
int cbcrOffset,
int rotation,
int videoPF)
{
Expand All @@ -53,7 +54,7 @@ JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeHandleVideoSamp

auto sampleData = static_cast<uint8_t*>(env->GetDirectBufferAddress(sampleBuffer));

mediaEngine->handleVideoSample(sampleData, sampleLen, outputX, outputY, videoX, videoY, rotation, videoPF);
mediaEngine->handleVideoSample(sampleData, sampleLen, outputX, outputY, videoX, videoY, cbcrOffset, rotation, videoPF);
}

JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeSetDuration(JNIEnv* env,
Expand Down Expand Up @@ -162,7 +163,7 @@ bool AndroidMediaEngine::transferVideoFrame()

auto& buffer = _frameBuffer2;

ax::MEVideoFrame frame{buffer.data(), buffer.data() + _outputDim.x * _outputDim.y, buffer.size(),
ax::MEVideoFrame frame{buffer.data(), buffer.data() + _cbcrOffset, buffer.size(),
ax::MEVideoPixelDesc{static_cast<ax::MEVideoPixelFormat>(_videoPF), _outputDim}, _videoDim};
frame._vpd._rotation = _videoRotation;
assert(static_cast<int>(frame._dataLen) >= frame._vpd._dim.x * frame._vpd._dim.y * 3 / 2);
Expand All @@ -180,13 +181,15 @@ void AndroidMediaEngine::handleVideoSample(const uint8_t* buf,
int outputY,
int videoX,
int videoY,
int cbcrOffset,
int rotation,
int videoPF)
{
std::unique_lock<std::mutex> lck(_frameBuffer1Mtx);
_frameBuffer1.assign(buf, buf + len);
_outputDim.set(outputX, outputY);
_videoDim.set(videoX, videoY);
_cbcrOffset = cbcrOffset;
_videoRotation = rotation;
_videoPF = videoPF;
}
Expand Down
3 changes: 2 additions & 1 deletion core/media/AndroidMediaEngine.h
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ class AndroidMediaEngine : public MediaEngine
MEMediaState getState() const override;
bool transferVideoFrame() override;

void handleVideoSample(const uint8_t* buf, size_t len, int outputX, int outputY, int videoX, int videoY, int rotation, int videoPF);
void handleVideoSample(const uint8_t* buf, size_t len, int outputX, int outputY, int videoX, int videoY, int cbcrOffset, int rotation, int videoPF);
void updateCurrentTime(double currentTime) { _currentTime = currentTime; }
void updateDuration(double duration) { _duration = duration; }

Expand All @@ -71,6 +71,7 @@ class AndroidMediaEngine : public MediaEngine

MEIntPoint _outputDim;
MEIntPoint _videoDim;
int _cbcrOffset{0};
int _videoRotation{0};
int _videoPF{-1};

Expand Down
62 changes: 53 additions & 9 deletions core/platform/android/java/src/org/axmol/lib/AxmolMediaEngine.java
Original file line number Diff line number Diff line change
Expand Up @@ -105,12 +105,14 @@ public class AxmolMediaEngine extends DefaultRenderersFactory implements Player.
private AtomicInteger mState = new AtomicInteger(STATE_CLOSED);
Point mOutputDim = new Point(); // The output dim match with buffer
Point mVideoDim = new Point(); // The video dim (validate image dim)
String mSampleMimeType = null;
private int mVideoPF = -1;
private int mVideoRotation = 0;
private int mCbcrOffset = 0;

/** ------ native methods ------- */
public static native void nativeHandleEvent(long nativeObj, int arg1);
public static native void nativeHandleVideoSample(long nativeObj, ByteBuffer sampleData, int sampleLen, int outputX, int outputY, int videoX, int videoY, int rotation, int videoPF);
public static native void nativeHandleVideoSample(long nativeObj, ByteBuffer sampleData, int sampleLen, int outputX, int outputY, int videoX, int videoY, int cbcrOffset, int rotation, int videoPF);
public static native void nativeSetDuration(long nativeObj, double duration);
public static native void nativeSetCurrentTime(long nativeObj, double currentTime);

Expand Down Expand Up @@ -304,16 +306,18 @@ public void onVideoFrameAboutToBeRendered(
Format format,
@Nullable MediaFormat mediaFormat) {
if (mOutputFormat != mediaFormat) {
mSampleMimeType = format.sampleMimeType; // video/hevc, video/avc
mOutputFormat = mediaFormat;
updateVideoMeta();
handleVideoMetaChanged();
}
}

/** update video informations */
private void updateVideoMeta() {
/** handle video informations changed */
private void handleVideoMetaChanged() {
MediaFormat format = mOutputFormat;
if(format != null) {
// String mimeType = format.getString(MediaFormat.KEY_MIME); // "video/raw"
// String mimeType = format.getString(MediaFormat.KEY_MIME); // =="video/raw"

// Note: some android 11 and older devices not response desired color format(NV12), instead will be YUV420P aka I420
// refer: https://github.com/axmolengine/axmol/issues/2049
Integer colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Expand All @@ -329,30 +333,70 @@ private void updateVideoMeta() {
Log.w(TAG, String.format("Unsupported color format: %d, video render may incorrect!", colorFormat));
}

// output dim
mOutputDim.x = format.getInteger(MediaFormat.KEY_WIDTH);
mOutputDim.y = format.getInteger(MediaFormat.KEY_HEIGHT);

int stride = 0, sliceHeight = 0;
if (format.containsKey(MediaFormat.KEY_STRIDE)) {
stride = format.getInteger(MediaFormat.KEY_STRIDE);
}
if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
}
Log.d(TAG, String.format("Frame stride and slice height: %dx%d", stride, sliceHeight);
stride = Math.max(mOutputDim.x, stride);
sliceHeight = Math.max(mOutputDim.y, sliceHeight);

/* Notes
* 1. About desired frame size bytes
* a. stride > mOutputDim.x: means all frame bytes should pass to GPU(shader), and
* desired frame size bytes is: stride * sliceHeight * 3 / 2
* b. stride == mOutputDim.x: means we need discard Y plane aligned extra data, and
* desired frame size bytes is: stride * sliceHeight + (mOutputDim.x / 2) * (mOutputDim.y / 2) * 2
* 2. About video frame size alignment
* a. many devices may align 2, the sliceHeight == mOutputDim.y and stride == mOutputDim.x
* b. H264: align 16 for both width and height
* HEVC/H265: align 32 for both width and height
* 3. The cbcrOffset should be always stride * sliceHeight
* refer: https://github.com/axmolengine/axmol/issues/2101
*/
mCbcrOffset = stride * sliceHeight;
int frameSizeBytes = 0;
if (stride > mOutputDim.x) {
mOutputDim.x = stride;
mOutputDim.y = sliceHeight;
frameSizeBytes = mCbcrOffset * 3 / 2;
} else frameSizeBytes = mCbcrOffset + mOutputDim.x / 2 * mOutputDim.y;

// video dim
if (format.containsKey(MediaFormat.KEY_CROP_LEFT)
&& format.containsKey(MediaFormat.KEY_CROP_RIGHT)) {
mVideoDim.x = format.getInteger(MediaFormat.KEY_CROP_RIGHT) + 1
- format.getInteger(MediaFormat.KEY_CROP_LEFT);
} else
mVideoDim.x = mOutputDim.x;

mOutputDim.y = format.getInteger(MediaFormat.KEY_HEIGHT);
if (format.containsKey(MediaFormat.KEY_CROP_TOP)
&& format.containsKey(MediaFormat.KEY_CROP_BOTTOM)) {
mVideoDim.y = format.getInteger(MediaFormat.KEY_CROP_BOTTOM) + 1
- format.getInteger(MediaFormat.KEY_CROP_TOP);
} else
mVideoDim.y = mOutputDim.y;

// video rotation
if (format.containsKey(MediaFormat.KEY_ROTATION)) {
mVideoRotation = format.getInteger(MediaFormat.KEY_ROTATION);
}

Log.d(TAG, String.format("Input format:%s, outputDim:%dx%d, videoDim:%dx%d, cbcrOffset:%d, frameSizeBytes:%d", mSampleMimeType,
mOutputDim.x, mOutputDim.y,
mVideoDim.x, mVideoDim.y,
mCbcrOffset, frameSizeBytes));
}
}

/** handler or listener methods */

@Override
public void processVideoFrame(MediaCodecAdapter codec, int index, long presentationTimeUs) {
if (mState.get() != STATE_PLAYING) {
Expand All @@ -362,7 +406,7 @@ public void processVideoFrame(MediaCodecAdapter codec, int index, long presentat
}

ByteBuffer tmpBuffer = codec.getOutputBuffer(index);
nativeHandleVideoSample(mNativeObj, tmpBuffer, tmpBuffer.remaining(), mOutputDim.x, mOutputDim.y, mVideoDim.x, mVideoDim.y, mVideoRotation, mVideoPF);
nativeHandleVideoSample(mNativeObj, tmpBuffer, tmpBuffer.remaining(), mOutputDim.x, mOutputDim.y, mVideoDim.x, mVideoDim.y, mCbcrOffset, mVideoRotation, mVideoPF);

AxmolEngine.getActivity().runOnUiThread(() -> {
if (mPlayer != null) {
Expand Down Expand Up @@ -437,7 +481,7 @@ public void onVideoSizeChanged(VideoSize videoSize) {
Log.d(TAG, String.format("[Individual]onVideoSizeChanged: (%d,%d)", videoSize.width, videoSize.height));

if(mPlayer != null)
updateVideoMeta();
handleVideoMetaChanged();
}

@Override
Expand Down

0 comments on commit ad76a89

Please sign in to comment.