Android MediaCodec解码并播放视频

本文介绍如何在Android中使用MediaCodec进行视频解码,并结合SurfaceView实现网络视频的播放。详细阐述了解码过程及关键代码实现。

接收网络传输的数据,并使用MediaCodec解码,解码之后,使用SurfaceView播放。

    <MyGLSurfaceView
        android:id="@+id/videoView"
        android:layout_width="wrap_content"
        android:layout_height="wrap_content" />

定义Renderer渲染:

public class VideoRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener{
    private static final String TAG = "VideoRenderer";
    private GLSurfaceViewHelper mHelper;
    private int mWidth;
    private int mHeight;

    public VideoRenderer(int width, int height){
        mWidth = width;
        mHeight = height;
    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        //Log.d(TAG, "onFrameAvailable");
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        Log.d(TAG, "onSurfaceCreated");
        if (mHelper == null) {
            mHelper = new GLSurfaceViewHelper(this, mWidth, mHeight);
            mHelper.init();
        }
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        Log.d(TAG, "onSurfaceChanged");
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        //Log.d(TAG, "onDrawFrame");
        if (mHelper != null) {
            mHelper.render();
        }
    }

    public Surface getSurface() {
        Log.d(TAG, "getSurface");
        return new Surface(mHelper.getSurfaceTexture());
    }
}

主要解码逻辑如下:


public class VideoDecoder extends Thread {
    private static final int MAX_RECV_BUFFER_SIZE = 65535;
    private static final String TAG = "VideoDecoder";
    private MediaCodec mDecoder;
    private final int mOutputWidth;
    private final int mOutputHeight;
    private final VideoRenderer mVideoRenderer;
    private Tunnel mTunnel;
    private TimeLine mTimeline = new TimeLine();
    private BufferManager mBufferManager;
    private FrameDataMerger mCurrentMerger;
    private Object mCurrentMergerLock = new Object();
    SocketPackageUtils.PackageHeader mHeader = new SocketPackageUtils.PackageHeader();

    public VideoDecoder(VideoRenderer renderer, int width, int height) {
        mOutputWidth = width;
        mOutputHeight = height;
        mVideoRenderer = renderer;
        mBufferManager = new BufferManager(60);
        setPriority(Thread.MAX_PRIORITY);
    }

    private void recvData() {
        byte[] recvBuffer = new byte[MAX_RECV_BUFFER_SIZE];
        int size = recv(recvBuffer, recvBuffer.length);
        ByteBuffer buffer = ByteBuffer.wrap(recvBuffer, 0, size);
        buffer.order(ByteOrder.BIG_ENDIAN);
        buffer.position(0);

        SocketPackageUtils.parsePackageHeader(buffer, mHeader);
        ByteBuffer decodeBuffer = null;
        if (mHeader.mPackageType == SocketPackageUtils.PACKAGE_TYPE_CODEC_FRAME) {
            SocketPackageUtils.CodecFrameData frameData = new SocketPackageUtils.CodecFrameData();
            SocketPackageUtils.parseCodecFrameData(buffer, frameData);
            if (frameData.frameDataSize != frameData.frameDataTotalSize) {
                Log.i(TAG, "   merge the frameData frameId=" + frameData.frameID);
                synchronized (mCurrentMergerLock) {
                    if (mCurrentMerger != null && mCurrentMerger.getFrameId() == frameData.frameID) {
                        boolean complete = mCurrentMerger.mergeFrameData(buffer, frameData);
                        if (complete) {
                            Log.i(TAG, "  complete frame data merged.");
                            decodeBuffer = ByteBuffer.wrap(mCurrentMerger.getFrameData());
                            decodeBuffer.position(0);
                            decodeBuffer.limit(frameData.frameDataTotalSize);
                            mCurrentMerger = null;
                        } else {
                            Log.i(TAG, "not complete frame frameId=" + frameData.frameID);
                            return;
                        }
                    } else {
                        mCurrentMerger = new FrameDataMerger(frameData.frameDataTotalSize, frameData.frameID);
                        mCurrentMerger.mergeFrameData(buffer, frameData);
                        return;
                    }
                }
            } else {
                decodeBuffer = buffer;
                decodeBuffer.position(SocketPackageUtils.PACKAGE_HEADER_LENGTH + SocketPackageUtils.PACKAGE_CODEC_FRAME_INFO_LENGTH);
                decodeBuffer.limit(SocketPackageUtils.PACKAGE_HEADER_LENGTH + SocketPackageUtils.PACKAGE_CODEC_FRAME_INFO_LENGTH + frameData.frameDataSize);
            }
            frameData.buffer = decodeBuffer;
            mBufferManager.push(frameData);
        }
    }

    private void createDecoder() {
        try {
            mDecoder = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
            MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, mOutputWidth, mOutputHeight);
            format.setInteger(MediaFormat.KEY_LOW_LATENCY, 1);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 60);
            while (mVideoRenderer.getSurface() == null) {
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
            DebugUtils.d(TAG, "surface available");
            mDecoder.configure(format, mVideoRenderer.getSurface(), null, 0);
            mDecoder.start();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void run() {
        createDecoder();
        decode();
        putBuffer();
    }

    private void decode() {
        new Thread("Decode") {
            @Override
            public void run() {
                while (true) {
                    try {
                        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
                        int decoderStatus = mDecoder.dequeueOutputBuffer(info, 2000000/*100ms*/);
                        if(decoderStatus >= 0) { 
                            long presentationTime = info.presentationTimeUs / 1000;
                            boolean render = (info.size != 0);
                            long waitTime = mTimeline.getWaitTimeForRenderBuffer(presentationTime);
                            if (waitTime > 0) {
                                try {
                                    Thread.sleep(waitTime);
                                } catch (InterruptedException e) {
                                    e.printStackTrace();
                                }
                            }
                            mDecoder.releaseOutputBuffer(decoderStatus, render);
                        }
                    } catch (Throwable e) {
                        e.printStackTrace();
                        try {
                            Thread.sleep(1000);
                        } catch (InterruptedException interruptedException) {
                            interruptedException.printStackTrace();
                        }
                    }
                }
            }
        }.start();
    }

    private void putBuffer() {
        while (true) {
            try {
                int decodeInputIndex = mDecoder.dequeueInputBuffer(-1);
                ByteBuffer decodeInputBuf = mDecoder.getInputBuffer(decodeInputIndex);
                decodeInputBuf.clear();
                SocketPackageUtils.CodecFrameData frameData = null;

                while (frameData == null) {
                    frameData = mBufferManager.pop();
                }

                if (frameData != null) {
                    decodeInputBuf.put(frameData.buffer);
                    try {
                        mDecoder.queueInputBuffer(decodeInputIndex, 0, frameData.frameDataTotalSize, frameData.presentationTimeUs, frameData.frameFlags);
                    } catch (Throwable e) {
                        e.printStackTrace();
                    }
                }
            } catch (Throwable e) {
                e.printStackTrace();
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException interruptedException) {
                    interruptedException.printStackTrace();
                }
            }
        }
    

评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

后知后觉

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值