Android 내장 미디어 라이브러리인 MediaCodec, MediaExtractor 그리고 렌더링으로는 TextureView를 사용하여 간단히 MP4 동영상을 플레이 해보았다.
public class SurfaceTextureActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener {
TextureView mTextureView;
Surface mSurface;
private PlayerThread mPlayer = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTextureView = new TextureView(this);
mTextureView.setSurfaceTextureListener(this);
setContentView(mTextureView);
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mPlayer != null) {
mPlayer.interrupt();
}
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
Log.e("SurfaceTextureActivity", "onSurfaceTextureAvailable "+i + " " + i1);
if (mPlayer == null) {
mSurface = new Surface(mTextureView.getSurfaceTexture());
mPlayer = new PlayerThread(mSurface);
mPlayer.start();
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
Log.e("SurfaceTextureActivity", "onSurfaceTextureSizeChanged");
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
Log.e("SurfaceTextureActivity", "onSurfaceTextureDestroyed");
if (mPlayer != null) {
mPlayer.interrupt();
return true;
}
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
Log.e("SurfaceTextureActivity", "onSurfaceTextureUpdated");
}
}
public class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
@Override
public void run() {
// api 16
extractor = new MediaExtractor();
try {
extractor.setDataSource(SAMPLE);
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
decoder = MediaCodec.createDecoderByType(mime);
// Test surface disabled
decoder.configure(format, surface, null, 0);
break;
}
}
} catch (IOException e) {
e.printStackTrace();
}
if (decoder == null) {
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
Log.e("Codec", "inputBuffers.length=" + inputBuffers.length + " outputBuffers.length=" + outputBuffers.length);
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean isEOS = false;
long startMs = System.currentTimeMillis();
//Test
int frameNo = 0;
while (!Thread.interrupted()) {
Log.e("Codec", "presentationTimeUs=" + info.presentationTimeUs);
if (!isEOS) {
int inIndex = decoder.dequeueInputBuffer(100000);
Log.e("Codec", "inIndex=" + inIndex);
if (inIndex >= 0) {
// buffer is available
ByteBuffer buffer = inputBuffers[inIndex];
// get sample
int sampleSize = extractor.readSampleData(buffer, 0);
// end of stream
if (sampleSize < 0) {
// We shouldn't stop the playback at this point, just pass the EOS
// flag to decoder, we will get it again from the
// dequeueOutputBuffer
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
long sampleTime = extractor.getSampleTime();
Log.e("Codec", "sampleTime=" + sampleTime);
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
long target = sampleTime + 16670;
//extractor.seekTo(target, MediaExtractor.SEEK_TO_NEXT_SYNC);
extractor.advance();
frameNo++;
Log.e("Codec", "advance=" + frameNo + " target=" + target);
}
}
}
int outIndex = decoder.dequeueOutputBuffer(info, 100000);
Log.e("Codec", "outIndex=" + outIndex);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);
// We use a very simple clock to keep the video FPS, or the video
// playback will be too fast
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
try {
Log.e("Codec", "sleep 10");
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, true);
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
Log.e("Codec", "frameNo=" + frameNo);
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}