diff --git a/lib/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java b/lib/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java index e56e307b..fb2a3769 100644 --- a/lib/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java +++ b/lib/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java @@ -160,7 +160,12 @@ private void setupTrackTranscoders(MediaFormatStrategy formatStrategy) { @Override public void onDetermineOutputFormat() { MediaFormatValidator.validateVideoOutputFormat(mVideoTrackTranscoder.getDeterminedFormat()); - MediaFormatValidator.validateAudioOutputFormat(mAudioTrackTranscoder.getDeterminedFormat()); + + // If there is an audio track, validate the output is correct. + MediaFormat audioFormat = mAudioTrackTranscoder.getDeterminedFormat(); + if (audioFormat != null) { + MediaFormatValidator.validateAudioOutputFormat(audioFormat); + } } }); @@ -170,17 +175,21 @@ public void onDetermineOutputFormat() { mVideoTrackTranscoder = new VideoTrackTranscoder(mExtractor, trackResult.mVideoTrackIndex, videoOutputFormat, queuedMuxer); } mVideoTrackTranscoder.setup(); + mExtractor.selectTrack(trackResult.mVideoTrackIndex); + if (audioOutputFormat == null) { mAudioTrackTranscoder = new PassThroughTrackTranscoder(mExtractor, trackResult.mAudioTrackIndex, queuedMuxer, QueuedMuxer.SampleType.AUDIO); } else { mAudioTrackTranscoder = new AudioTrackTranscoder(mExtractor, trackResult.mAudioTrackIndex, audioOutputFormat, queuedMuxer); } + + if (trackResult.mAudioTrackIndex >= 0) { + mExtractor.selectTrack(trackResult.mAudioTrackIndex); + } mAudioTrackTranscoder.setup(); - mExtractor.selectTrack(trackResult.mVideoTrackIndex); - mExtractor.selectTrack(trackResult.mAudioTrackIndex); } - private void runPipelines() throws InterruptedException { + private void runPipelines() { long loopCount = 0; if (mDurationUs <= 0) { double progress = PROGRESS_UNKNOWN; @@ -199,7 +208,11 @@ private void runPipelines() throws InterruptedException { if (mProgressCallback != null) mProgressCallback.onProgress(progress); } if (!stepped) { - Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS); + try { + Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS); + } catch (InterruptedException e) { + // nothing to do + } } } } @@ -212,4 +225,4 @@ public interface ProgressCallback { */ void onProgress(double progress); } -} +} \ No newline at end of file diff --git a/lib/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java b/lib/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java index 7608dac8..290ee1e5 100644 --- a/lib/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java +++ b/lib/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java @@ -36,16 +36,25 @@ public class PassThroughTrackTranscoder implements TrackTranscoder { private long mWrittenPresentationTimeUs; public PassThroughTrackTranscoder(MediaExtractor extractor, int trackIndex, - QueuedMuxer muxer, QueuedMuxer.SampleType sampleType) { + QueuedMuxer muxer, QueuedMuxer.SampleType sampleType) { mExtractor = extractor; mTrackIndex = trackIndex; mMuxer = muxer; mSampleType = sampleType; - mActualOutputFormat = mExtractor.getTrackFormat(mTrackIndex); - mMuxer.setOutputFormat(mSampleType, mActualOutputFormat); - mBufferSize = mActualOutputFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); - mBuffer = ByteBuffer.allocateDirect(mBufferSize).order(ByteOrder.nativeOrder()); + if (trackIndex >= 0) { + mActualOutputFormat = mExtractor.getTrackFormat(mTrackIndex); + mMuxer.setOutputFormat(mSampleType, mActualOutputFormat); + mBufferSize = mActualOutputFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); + mBuffer = ByteBuffer.allocateDirect(mBufferSize).order(ByteOrder.nativeOrder()); + } else { + // track has no audio. Passthrough should also exclude the track. + mMuxer.setOutputFormat(mSampleType, null); + + // Nothing to do. EOS and report it took us 0 ms. + mIsEOS = true; + mWrittenPresentationTimeUs = 0; + } } @Override @@ -97,4 +106,4 @@ public boolean isFinished() { @Override public void release() { } -} +} \ No newline at end of file diff --git a/lib/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java b/lib/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java index df58e992..3b0ef280 100644 --- a/lib/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java +++ b/lib/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java @@ -30,6 +30,7 @@ */ public class QueuedMuxer { private static final String TAG = "QueuedMuxer"; + private static final int EXCLUDE_TRACK_INDEX = -1; private static final int BUFFER_SIZE = 64 * 1024; // I have no idea whether this value is appropriate or not... private final MediaMuxer mMuxer; private final Listener mListener; @@ -54,6 +55,11 @@ public void setOutputFormat(SampleType sampleType, MediaFormat format) { break; case AUDIO: mAudioFormat = format; + + if(format == null) { + // Tell the muxer we do not require audio. + mAudioTrackIndex = EXCLUDE_TRACK_INDEX; + } break; default: throw new AssertionError(); @@ -62,13 +68,17 @@ public void setOutputFormat(SampleType sampleType, MediaFormat format) { } private void onSetOutputFormat() { - if (mVideoFormat == null || mAudioFormat == null) return; + if (mVideoFormat == null || (mAudioFormat == null && mAudioTrackIndex != EXCLUDE_TRACK_INDEX)) return; mListener.onDetermineOutputFormat(); mVideoTrackIndex = mMuxer.addTrack(mVideoFormat); Log.v(TAG, "Added track #" + mVideoTrackIndex + " with " + mVideoFormat.getString(MediaFormat.KEY_MIME) + " to muxer"); - mAudioTrackIndex = mMuxer.addTrack(mAudioFormat); - Log.v(TAG, "Added track #" + mAudioTrackIndex + " with " + mAudioFormat.getString(MediaFormat.KEY_MIME) + " to muxer"); + + if(mAudioFormat != null) { + mAudioTrackIndex = mMuxer.addTrack(mAudioFormat); + Log.v(TAG, "Added track #" + mAudioTrackIndex + " with " + mAudioFormat.getString(MediaFormat.KEY_MIME) + " to muxer"); + } + mMuxer.start(); mStarted = true; @@ -137,4 +147,4 @@ private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) { public interface Listener { void onDetermineOutputFormat(); } -} +} \ No newline at end of file diff --git a/lib/src/main/java/net/ypresto/androidtranscoder/format/Android16By9FormatStrategy.java b/lib/src/main/java/net/ypresto/androidtranscoder/format/Android16By9FormatStrategy.java index faf30312..50105093 100644 --- a/lib/src/main/java/net/ypresto/androidtranscoder/format/Android16By9FormatStrategy.java +++ b/lib/src/main/java/net/ypresto/androidtranscoder/format/Android16By9FormatStrategy.java @@ -17,7 +17,6 @@ import android.media.MediaCodecInfo; import android.media.MediaFormat; -import android.util.Log; class Android16By9FormatStrategy implements MediaFormatStrategy { public static final int AUDIO_BITRATE_AS_IS = -1; @@ -61,10 +60,14 @@ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) { if (longer * 9 != shorter * 16) { throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")"); } + + /* + I've commented this out because its unsafe to assume the user wants to bypass compression if resolution is equal. if (shorter <= targetShorter) { Log.d(TAG, "This video's height is less or equal to " + targetShorter + ", pass-through. (" + width + "x" + height + ")"); return null; - } + }*/ + MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight); // From Nexus 4 Camera in 720p format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate); @@ -76,13 +79,13 @@ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) { @Override public MediaFormat createAudioOutputFormat(MediaFormat inputFormat) { - if (mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS) return null; + if (inputFormat == null || mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS) return null; // Use original sample rate, as resampling is not supported yet. final MediaFormat format = MediaFormat.createAudioFormat(MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC, - inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mAudioChannels); + inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mAudioChannels); format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate); return format; } -} +} \ No newline at end of file diff --git a/lib/src/main/java/net/ypresto/androidtranscoder/format/Android720pFormatStrategy.java b/lib/src/main/java/net/ypresto/androidtranscoder/format/Android720pFormatStrategy.java index dc59caa1..fa9612f3 100644 --- a/lib/src/main/java/net/ypresto/androidtranscoder/format/Android720pFormatStrategy.java +++ b/lib/src/main/java/net/ypresto/androidtranscoder/format/Android720pFormatStrategy.java @@ -17,7 +17,6 @@ import android.media.MediaCodecInfo; import android.media.MediaFormat; -import android.util.Log; class Android720pFormatStrategy implements MediaFormatStrategy { public static final int AUDIO_BITRATE_AS_IS = -1; @@ -63,10 +62,13 @@ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) { if (longer * 9 != shorter * 16) { throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")"); } + + /* + I've commented this out because its unsafe to assume the user wants to bypass compression if resolution is equal. if (shorter <= SHORTER_LENGTH) { Log.d(TAG, "This video is less or equal to 720p, pass-through. (" + width + "x" + height + ")"); return null; - } + }*/ MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight); // From Nexus 4 Camera in 720p format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate); @@ -78,13 +80,13 @@ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) { @Override public MediaFormat createAudioOutputFormat(MediaFormat inputFormat) { - if (mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS) return null; + if (inputFormat == null || mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS) return null; // Use original sample rate, as resampling is not supported yet. final MediaFormat format = MediaFormat.createAudioFormat(MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC, - inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mAudioChannels); + inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mAudioChannels); format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate); return format; } -} +} \ No newline at end of file diff --git a/lib/src/main/java/net/ypresto/androidtranscoder/utils/MediaExtractorUtils.java b/lib/src/main/java/net/ypresto/androidtranscoder/utils/MediaExtractorUtils.java index b973d2f1..82f3161a 100644 --- a/lib/src/main/java/net/ypresto/androidtranscoder/utils/MediaExtractorUtils.java +++ b/lib/src/main/java/net/ypresto/androidtranscoder/utils/MediaExtractorUtils.java @@ -55,9 +55,9 @@ public static TrackResult getFirstVideoAndAudioTrack(MediaExtractor extractor) { } if (trackResult.mVideoTrackIndex >= 0 && trackResult.mAudioTrackIndex >= 0) break; } - if (trackResult.mVideoTrackIndex < 0 || trackResult.mAudioTrackIndex < 0) { - throw new IllegalArgumentException("extractor does not contain video and/or audio tracks."); + if (trackResult.mVideoTrackIndex < 0) { + throw new IllegalArgumentException("extractor does not contain video tracks."); } return trackResult; } -} +} \ No newline at end of file