package in.omerjerk.droidcv; import android.media.*; import android.net.Uri; import android.util.Log; import in.omerjerk.droidcv.DisplayFrame; import in.omerjerk.droidcv.DisplayFrameListener; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; /** * Created by omerjerk on 12/7/14. */ public class CodecUtils { private static boolean VERBOSE = true; private static final String TAG = "Codec"; public static String MIME_TYPE = "video/avc"; public static int FRAME_RATE = 15; public static MediaCodecInfo selectCodec(String mimeType) { int numCodecs = MediaCodecList.getCodecCount(); for (int i = 0; i < numCodecs; i++) { MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); if (!codecInfo.isEncoder()) { continue; } String[] types = codecInfo.getSupportedTypes(); for (int j = 0; j < types.length; j++) { if (types[j].equalsIgnoreCase(mimeType)) { return codecInfo; } } } return null; } public static void doEncodeDecodeVideoFromSurface(MediaCodec encoder, MediaCodec decoder, DisplayFrameListener mListener, DisplayFrame displayFrame) { final int TIMEOUT_USEC = 10000; ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers(); ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); ByteBuffer[] decoderInputBuffers = null; ByteBuffer[] decoderOutputBuffers = null; MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); MediaFormat decoderOutputFormat = null; int generateIndex = 0; int checkIndex = 0; int badFrames = 0; int mWidth = 960; int mHeight = 1280; boolean decoderConfigured = false; // The size of a frame of video data, in the formats we handle, is stride*sliceHeight // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels. Application // of algebra and assuming that stride==width and sliceHeight==height yields: byte[] frameData = new byte[mWidth * mHeight * 3 / 2]; // Just out of curiosity. long rawSize = 0; long encodedSize = 0; // Loop until the output side is done. boolean inputDone = false; boolean encoderDone = false; boolean outputDone = false; while (!outputDone) { //if (VERBOSE) Log.d(TAG, "loop"); // If we're not done submitting frames, generate a new one and submit it. By // doing this on every loop we're working to ensure that the encoder always has // work to do. // // We don't really want a timeout here, but sometimes there's a delay opening // the encoder device, so a short timeout can keep us from spinning hard. /* if (!inputDone) { int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC); if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex); if (inputBufIndex >= 0) { long ptsUsec = computePresentationTime(generateIndex); //if (generateIndex == NUM_FRAMES) { if (false) { // Send an empty frame with the end-of-stream flag set. If we set EOS // on a frame with data, that frame data will be ignored, and the // output will be short one frame. encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec, MediaCodec.BUFFER_FLAG_END_OF_STREAM); inputDone = true; if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)"); } else { generateFrame(generateIndex, encoderColorFormat, frameData); ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex]; // the buffer should be sized to hold one full frame //assertTrue(inputBuf.capacity() >= frameData.length); inputBuf.clear(); inputBuf.put(frameData); encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0); if (VERBOSE) Log.d(TAG, "submitted frame " + generateIndex + " to enc"); } generateIndex++; } else { // either all in use, or we timed out during initial setup if (VERBOSE) Log.d(TAG, "input buffer not available"); } } */ // Check for output from the encoder. If there's no output yet, we either need to // provide more input, or we need to wait for the encoder to work its magic. We // can't actually tell which is the case, so if we can't get an output buffer right // away we loop around and see if it wants more input. // // Once we get EOS from the encoder, we don't need to do this anymore. if (!encoderDone) { int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from encoder available"); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // not expected for an encoder encoderOutputBuffers = encoder.getOutputBuffers(); if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // not expected for an encoder MediaFormat newFormat = encoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); } else if (encoderStatus < 0) { //fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else { // encoderStatus >= 0 ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { //fail("encoderOutputBuffer " + encoderStatus + " was null"); } // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. encodedData.position(info.offset); encodedData.limit(info.offset + info.size); encodedSize += info.size; if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { // Codec config info. Only expected on first packet. One way to // handle this is to manually stuff the data into the MediaFormat // and pass that to configure(). We do that here to exercise the API. MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); format.setByteBuffer("csd-0", encodedData); decoder.configure(format, null, null, 0); mListener.decoderOutputFormatChanged(format); decoder.start(); decoderInputBuffers = decoder.getInputBuffers(); decoderOutputBuffers = decoder.getOutputBuffers(); decoderConfigured = true; if (VERBOSE) Log.d(TAG, "decoder configured (" + info.size + " bytes)"); } else { // Get a decoder input buffer, blocking until it's available. //assertTrue(decoderConfigured); int inputBufIndex = decoder.dequeueInputBuffer(-1); ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.clear(); inputBuf.put(encodedData); decoder.queueInputBuffer(inputBufIndex, 0, info.size, info.presentationTimeUs, info.flags); encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder" + (encoderDone ? " (EOS)" : "")); } encoder.releaseOutputBuffer(encoderStatus, false); } } // Check for output from the decoder. We want to do this on every loop to avoid // the possibility of stalling the pipeline. We use a short timeout to avoid // burning CPU if the decoder is hard at work but the next frame isn't quite ready. // // If we're decoding to a Surface, we'll get notified here as usual but the // ByteBuffer references will be null. The data is sent to Surface instead. if (decoderConfigured) { int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from decoder available"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // The storage associated with the direct ByteBuffer may already be unmapped, // so attempting to access data through the old output buffer array could // lead to a native crash. if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); decoderOutputBuffers = decoder.getOutputBuffers(); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // this happens before the first frame is returned decoderOutputFormat = decoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "decoder output format changed: " + decoderOutputFormat); } else if (decoderStatus < 0) { //TODO: fail } else { // decoderStatus >= 0 ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus]; outputFrame.position(info.offset); outputFrame.limit(info.offset + info.size); rawSize += info.size; if (info.size == 0) { if (VERBOSE) Log.d(TAG, "got empty frame"); } else { if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex); if (mListener != null) { byte[] b = new byte[info.size]; outputFrame.get(b, info.offset, info.size); displayFrame.updateFrame(b); mListener.onNewFrame(displayFrame); mListener.rawFrame(b); } } if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (VERBOSE) Log.d(TAG, "output EOS"); outputDone = true; } decoder.releaseOutputBuffer(decoderStatus, false /*render*/); } } } if (VERBOSE) Log.d(TAG, "decoded " + checkIndex + " frames at " + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize); } public static void doDecodeFromVideo (Uri filePath, DisplayFrameListener frameListener, DisplayFrame displayFrame) throws IOException{ MediaCodec decoder; MediaExtractor extractor = new MediaExtractor(); Log.d("omerjerk", "File Path = " + filePath.getPath()); extractor.setDataSource(filePath.getPath()); MediaFormat format = extractor.getTrackFormat(0); String mime = format.getString(MediaFormat.KEY_MIME); decoder = MediaCodec.createDecoderByType(mime); Log.d("omerjerk", "Mime = " + mime); decoder.configure(format, null /* surface */, null /* crypto */, 0 /* flags */); decoder.start(); ByteBuffer[] codecInputBuffers = decoder.getInputBuffers(); ByteBuffer[] codecOutputBuffers = decoder.getOutputBuffers(); extractor.selectTrack(0); final long kTimeOutUs = 10000; MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); boolean sawInputEOS = false; boolean sawOutputEOS = false; int inputBufIndex; while (!sawOutputEOS) { if (!sawInputEOS) { inputBufIndex = decoder.dequeueInputBuffer(kTimeOutUs); // Log.d(LOG_TAG, " bufIndexCheck " + bufIndexCheck); if (inputBufIndex >= 0) { ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; int sampleSize = extractor .readSampleData(dstBuf, 0 /* offset */); long presentationTimeUs = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeUs = extractor.getSampleTime(); } // can throw illegal state exception (???) decoder.queueInputBuffer(inputBufIndex, 0 /* offset */, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (!sawInputEOS) { extractor.advance(); } } else { Log.e("omerjerk", "inputBufIndex " + inputBufIndex); } } int outputBufIndex = decoder.dequeueOutputBuffer(bufferInfo, kTimeOutUs); if (outputBufIndex >= 0) { if (bufferInfo.size != 0) { ByteBuffer buf = codecOutputBuffers[outputBufIndex]; buf.position(bufferInfo.offset); buf.limit(bufferInfo.offset + bufferInfo.size); final byte[] frame = new byte[bufferInfo.size]; buf.get(frame, bufferInfo.offset , bufferInfo.size); buf.clear(); displayFrame.updateFrame(frame); frameListener.onNewFrame(displayFrame); decoder.releaseOutputBuffer(outputBufIndex, false /* render */); } else { if (VERBOSE) Log.d("omerjerk", "Got empty frame"); } if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { codecOutputBuffers = decoder.getOutputBuffers(); } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat outputFormat = decoder.getOutputFormat(); Log.i("omerjerk", "output format has changed to " + outputFormat); } else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER){ Log.i("omerjerk", "No output from the decoder"); } } } }