接收网络传输的数据,并使用MediaCodec解码,解码之后,使用SurfaceView播放。
<MyGLSurfaceView
android:id="@+id/videoView"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
定义Renderer渲染:
public class VideoRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener{
private static final String TAG = "VideoRenderer";
private GLSurfaceViewHelper mHelper;
private int mWidth;
private int mHeight;
public VideoRenderer(int width, int height){
mWidth = width;
mHeight = height;
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.d(TAG, "onFrameAvailable");
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.d(TAG, "onSurfaceCreated");
if (mHelper == null) {
mHelper = new GLSurfaceViewHelper(this, mWidth, mHeight);
mHelper.init();
}
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.d(TAG, "onSurfaceChanged");
}
@Override
public void onDrawFrame(GL10 gl) {
//Log.d(TAG, "onDrawFrame");
if (mHelper != null) {
mHelper.render();
}
}
public Surface getSurface() {
Log.d(TAG, "getSurface");
return new Surface(mHelper.getSurfaceTexture());
}
}
主要解码逻辑如下:
public class VideoDecoder extends Thread {
private static final int MAX_RECV_BUFFER_SIZE = 65535;
private static final String TAG = "VideoDecoder";
private MediaCodec mDecoder;
private final int mOutputWidth;
private final int mOutputHeight;
private final VideoRenderer mVideoRenderer;
private Tunnel mTunnel;
private TimeLine mTimeline = new TimeLine();
private BufferManager mBufferManager;
private FrameDataMerger mCurrentMerger;
private Object mCurrentMergerLock = new Object();
SocketPackageUtils.PackageHeader mHeader = new SocketPackageUtils.PackageHeader();
public VideoDecoder(VideoRenderer renderer, int width, int height) {
mOutputWidth = width;
mOutputHeight = height;
mVideoRenderer = renderer;
mBufferManager = new BufferManager(60);
setPriority(Thread.MAX_PRIORITY);
}
private void recvData() {
byte[] recvBuffer = new byte[MAX_RECV_BUFFER_SIZE];
int size = recv(recvBuffer, recvBuffer.length);
ByteBuffer buffer = ByteBuffer.wrap(recvBuffer, 0, size);
buffer.order(ByteOrder.BIG_ENDIAN);
buffer.position(0);
SocketPackageUtils.parsePackageHeader(buffer, mHeader);
ByteBuffer decodeBuffer = null;
if (mHeader.mPackageType == SocketPackageUtils.PACKAGE_TYPE_CODEC_FRAME) {
SocketPackageUtils.CodecFrameData frameData = new SocketPackageUtils.CodecFrameData();
SocketPackageUtils.parseCodecFrameData(buffer, frameData);
if (frameData.frameDataSize != frameData.frameDataTotalSize) {
Log.i(TAG, " merge the frameData frameId=" + frameData.frameID);
synchronized (mCurrentMergerLock) {
if (mCurrentMerger != null && mCurrentMerger.getFrameId() == frameData.frameID) {
boolean complete = mCurrentMerger.mergeFrameData(buffer, frameData);
if (complete) {
Log.i(TAG, " complete frame data merged.");
decodeBuffer = ByteBuffer.wrap(mCurrentMerger.getFrameData());
decodeBuffer.position(0);
decodeBuffer.limit(frameData.frameDataTotalSize);
mCurrentMerger = null;
} else {
Log.i(TAG, "not complete frame frameId=" + frameData.frameID);
return;
}
} else {
mCurrentMerger = new FrameDataMerger(frameData.frameDataTotalSize, frameData.frameID);
mCurrentMerger.mergeFrameData(buffer, frameData);
return;
}
}
} else {
decodeBuffer = buffer;
decodeBuffer.position(SocketPackageUtils.PACKAGE_HEADER_LENGTH + SocketPackageUtils.PACKAGE_CODEC_FRAME_INFO_LENGTH);
decodeBuffer.limit(SocketPackageUtils.PACKAGE_HEADER_LENGTH + SocketPackageUtils.PACKAGE_CODEC_FRAME_INFO_LENGTH + frameData.frameDataSize);
}
frameData.buffer = decodeBuffer;
mBufferManager.push(frameData);
}
}
private void createDecoder() {
try {
mDecoder = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, mOutputWidth, mOutputHeight);
format.setInteger(MediaFormat.KEY_LOW_LATENCY, 1);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 60);
while (mVideoRenderer.getSurface() == null) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
DebugUtils.d(TAG, "surface available");
mDecoder.configure(format, mVideoRenderer.getSurface(), null, 0);
mDecoder.start();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void run() {
createDecoder();
decode();
putBuffer();
}
private void decode() {
new Thread("Decode") {
@Override
public void run() {
while (true) {
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int decoderStatus = mDecoder.dequeueOutputBuffer(info, 2000000/*100ms*/);
if(decoderStatus >= 0) {
long presentationTime = info.presentationTimeUs / 1000;
boolean render = (info.size != 0);
long waitTime = mTimeline.getWaitTimeForRenderBuffer(presentationTime);
if (waitTime > 0) {
try {
Thread.sleep(waitTime);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
mDecoder.releaseOutputBuffer(decoderStatus, render);
}
} catch (Throwable e) {
e.printStackTrace();
try {
Thread.sleep(1000);
} catch (InterruptedException interruptedException) {
interruptedException.printStackTrace();
}
}
}
}
}.start();
}
private void putBuffer() {
while (true) {
try {
int decodeInputIndex = mDecoder.dequeueInputBuffer(-1);
ByteBuffer decodeInputBuf = mDecoder.getInputBuffer(decodeInputIndex);
decodeInputBuf.clear();
SocketPackageUtils.CodecFrameData frameData = null;
while (frameData == null) {
frameData = mBufferManager.pop();
}
if (frameData != null) {
decodeInputBuf.put(frameData.buffer);
try {
mDecoder.queueInputBuffer(decodeInputIndex, 0, frameData.frameDataTotalSize, frameData.presentationTimeUs, frameData.frameFlags);
} catch (Throwable e) {
e.printStackTrace();
}
}
} catch (Throwable e) {
e.printStackTrace();
try {
Thread.sleep(1000);
} catch (InterruptedException interruptedException) {
interruptedException.printStackTrace();
}
}
}
Android MediaCodec视频解码与播放实战
本文介绍如何在Android中使用MediaCodec进行视频解码,并结合SurfaceView实现网络视频的播放。详细阐述了解码过程及关键代码实现。
16万+

被折叠的 条评论
为什么被折叠?



