I have been trying to decode a video file which is encoded via H264 encoding with Android's MediaCodec and tried to put the output of the decoder to a surface, but when I run the app it shows a black surface and in DDMS logcat I see that decoder timed out.
I have parsed the file into valid frames first [reading 4 bytes first which indicates the length of the upcoming frame and then read length amount bytes which indicates the frame, then again reading 4 bytes for the length of the next frame and so on.] then passed the frames to the decoder in a loop. When configuring the decoder, I passed sps & pps in the mediaFormat by hardcoding values directly from the encoded file [I got those values by opening that file with hexedit]. I did not set any presentationTimeUS and used 0 for it. Now the decoder's dequeInputBuffer() method returns >=0 value but dequeOutputBuffer() only returns MediaCodec.INFO_TRY_AGAIN_LATER which eventually means the decoder is being timed out.
Please have a look at my code and please help.
Thanks in advance.
Here is the file url - https://drive.google.com/file/d/0B39qOyEnXlR8Z3FSb2lzTWlORUU/edit?usp=sharing
Here is my code -
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import android.app.Activity;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.Toast;
public class MainActivity extends Activity implements SurfaceHolder.Callback
{
private static final String filePath = Environment.getExternalStorageDirectory()+ "/H264Data1.264"; // + "/video_encoded.263";//"/video_encoded.264";
private PlayerThread mPlayer = null;
Handler handler = null;
public static byte[] SPS = null;
public static byte[] PPS = null;
public static ArrayList<Frame> frames = null;
public static int frameID = 0;
public static boolean incompleteLastFrame = false;
File encodedFile = new File(filePath);
InputStream is;
private static class Frame
{
public int id;
public byte[] frameData;
public Frame(int id)
{
this.id = id;
}
}
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
try
{
is = new FileInputStream(encodedFile);
byte[] data = new byte[(int)encodedFile.length()];
System.out.println("Total file size : " + encodedFile.length());
frameID = 0;
frames = new ArrayList<Frame>();
try {
if ((is.read(data, 0, (int)encodedFile.length())) != -1)
{
getFramesFromData(data);
Toast.makeText(getApplicationContext(), "frames processing finished. number of frames : " + frames.size(), Toast.LENGTH_SHORT).show();
SurfaceView sv = new SurfaceView(this);
handler = new Handler();
sv.getHolder().addCallback(this);
setContentView(sv);
}
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
public static void getFramesFromData(byte[] data)
{
int dataLength = data.length;
int frameLength = 0;
frameID = 0;
if(data.length <= 0) return;
// each iteration in this loop indicates generation of a new frame
for(int i = 0; ; )
{
if(i+3 >= dataLength) return;
frameLength = ((data[i] & 0xff) << 24)
+ ((data[i + 1] & 0xff) << 16)
+ ((data[i + 2] & 0xff) << 8)
+ (data[i + 3] & 0xff);
i += 4;
if(frameLength > 0)
{
if(i+frameLength-1 >= dataLength) return;
Frame frame = new Frame(frameID);
frame.frameData = new byte[frameLength];
System.arraycopy(data, i, frame.frameData, 0, frameLength);
frames.add(frame);
frameID++;
i += frameLength;
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder)
{
Log.d("DecodeActivity", "in surfaceCreated");
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
Log.d("DecodeActivity", "in surfaceChanged");
if (mPlayer == null)
{
Toast.makeText(getApplicationContext(), "in surfaceChanged. creating playerthread", Toast.LENGTH_SHORT).show();
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
if (mPlayer != null)
{
mPlayer.interrupt();
}
}
private class PlayerThread extends Thread
{
//private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
public PlayerThread(Surface surface)
{
this.surface = surface;
}
@Override
public void run()
{
handler.post(new Runnable()
{
@Override
public void run()
{
decoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 320, 240);
byte[] header_sps = { 0x00, 0x00, 0x00, 0x01, 0x67, 0x42, (byte)0x80, 0x0C, (byte)0xE4, 0x40, (byte)0xA0, (byte)0xFD, 0x00, (byte)0xDA, 0x14, 0x26, (byte)0xA0 };
byte[] header_pps = {0x00, 0x00, 0x00, 0x01, 0x68, (byte)0xCE, 0x38, (byte)0x80 };
mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */);
if (decoder == null)
{
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
Log.d("DecodeActivity", "decoder.start() called");
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
long startMs = System.currentTimeMillis();
int i = 0;
while(!Thread.interrupted())
{
if(i >= frames.size())
break;
byte[] data = new byte[frames.get(i).frameData.length];
System.arraycopy(frames.get(i).frameData, 0, data, 0, frames.get(i).frameData.length);
Log.d("DecodeActivity", "i = " + i + " dataLength = " + frames.get(i).frameData.length);
int inIndex = 0;
while ((inIndex = decoder.dequeueInputBuffer(1)) < 0)
;
if (inIndex >= 0)
{
ByteBuffer buffer = inputBuffers[inIndex];
buffer.clear();
int sampleSize = data.length;
if (sampleSize < 0)
{
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
}
else
{
Log.d("DecodeActivity", "sample size: " + sampleSize);
buffer = ByteBuffer.allocate(data.length);
buffer.put(data);
decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0);
}
BufferInfo info = new BufferInfo();
int outIndex = decoder.dequeueOutputBuffer(info, 100000);
switch (outIndex)
{
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
try {
sleep(100);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
default:
ByteBuffer outbuffer = outputBuffers[outIndex];
Log.d("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outbuffer);
/*while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs)
{
try
{
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}*/
decoder.releaseOutputBuffer(outIndex, true);
break;
}
i++;
// All decoded frames have been rendered, we can stop playing now
/*if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
{
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}*/
}
}
decoder.stop();
decoder.release();
}
});
}
}
}