qt下ffmpeg录制mp4经验分享,支持音视频(h264、h265,AAC,G711 aLaw, G711muLaw)

前言

    MP4,是最常见的国际通用格式,在常见的播放软件中都可以使用和播放,磁盘空间占地小,画质一般清晰,它本身是支持h264、AAC的编码格式,对于其他编码的话,需要进行额外处理。本文提供了ffmpeg录制mp4的封装代码,经测试视频上它支持h264、h265编码,音频支持了AAC、G711的aLaw、muLaw编码。对于以上编码的支持,部分是需要修改ffmpeg的源码,本文也有提供已编译好的ffmpeg以及说明源码上需要修改的地方。

一、时间戳处理

    在mp4录制中,有碰到一个问题,即在录制实时流后,用播放器进行播放,播放时间没有从0秒开始。windows自带的media play播放时,一开始都是静止的画面,从第n秒后,才开始正式播放,用VLC可以直接跳到n秒进行播放。这个问题的原因是时间戳没有处理好,需要记录下首帧,指定首帧时间戳为0,然后后续视频帧的时间戳等于当前帧的时间戳减去首帧时间戳。代码如下:
在这里插入图片描述

二、添加h264、h265、AAC解码头信息

    解码头信息是保存在解码器上下文(AVCodecContext)的extradata中,这些信息包含h264的SPS、PPS头信息,AAC的adts头信息,h265的VPS、SPS、PPS,我们需要使用比特流过滤器(AVBitStreamFilter)来为每一种格式添加相应的头部信息,这样才能在解码器中正常进行解码。以下为添加解码头信息的相关代码:
    初始化时视频:
在这里插入图片描述
    循环读帧中,视频:
在这里插入图片描述
    初始化时音频:
在这里插入图片描述
    循环读帧中,音频:
在这里插入图片描述

三、ffmpeg支持g711 aLaw muLaw

在ffmpeg源码movenc.c文件中,找到mov_write_audio_tag函数,修改以下:
在这里插入图片描述
和在该文件中增加以下:
在这里插入图片描述
muLaw修改类似,它的MKTAG为 ‘u’,‘l’, ‘a’,‘w’。

四、代码分享

mp4recorder.h

#ifndef MP4RECORDER_H
#define MP4RECORDER_H

extern "C"
{
    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libavfilter/avfilter.h"
    #include "libswscale/swscale.h"
    #include "libavutil/frame.h"
    #include "libavutil/imgutils.h"
    #include "libavcodec/bsf.h"
}

#include <QObject>
#include <QMutex>

class mp4Recorder : public QObject
{
    Q_OBJECT
public:
    explicit mp4Recorder(QObject *parent = nullptr);
    virtual ~mp4Recorder();

    bool Init(AVFormatContext *pIfmtCtx, int nCodecType, int nAudioCodecType, QString& sFile);
    bool DeInit();
    bool isInit() {return m_bInit;}
    bool saveOneFrame(AVPacket& pkt, int nCodecType, int nAudioCodecType);

private:
    uint64_t         m_nCounts;
    bool             m_bFirstGoP;
    bool             m_bInit;
    QString          m_sRecordFile;
    AVFormatContext *m_pIfmtCtx;
    AVFormatContext *m_pOfmtCtx; // output stream format. copy from instream format.
    const AVOutputFormat  *m_pOfmt; // save file format.
    QMutex           m_lock;
    int64_t          m_nVideoTimeStamp;
    int              m_nVideoDuration;
    int              m_nVideoIndex = -1;
    int              m_nAudioIndex = -1;
    int				 m_nSpsPpsSize = 0;
    AVBSFContext    *m_pBsfc = nullptr;
    AVBSFContext    *m_pBsfcAAC = nullptr;
    AVPacket        *m_pktFilter = nullptr;
    AVPacket        *m_pktFilterAudio = nullptr;
    int64_t         m_nFirstVideoPts = 0;
    int64_t         m_nFirstAudioPts = 0;
    bool            m_bTransCode = false;

    // stream map.
    int  *m_pStreamMapping;
    int   m_nMappingSize;

};

#endif // MP4RECORDER_H

mp4recorder.cpp

#include "mp4recorder.h"
#include "commondef.h"
#include "cteasyaacencoder.h"

#define TRANSCODE 0

mp4Recorder::mp4Recorder(QObject *parent) : QObject(parent)
{
    QMutexLocker guard(&m_lock);
    m_sRecordFile.clear();
    m_pIfmtCtx = nullptr;
    m_pOfmtCtx = nullptr;
    m_pOfmt = nullptr;
    m_pStreamMapping = nullptr;
    m_nMappingSize = 0;
    m_nCounts = 0;
    m_bFirstGoP = false;
    m_bInit = false;
}

mp4Recorder::~mp4Recorder()
{
    DeInit();
}

bool mp4Recorder::Init(AVFormatContext *pIfmtCtx, int nCodecType, int nAudioCodecType, QString &sFile)
{
    QMutexLocker guard(&m_lock);
    if(!pIfmtCtx || sFile.isEmpty())
    {
        MY_DEBUG << "sFile.isEmpty().";
        return false;
    }

    m_sRecordFile = sFile;
    m_pIfmtCtx = pIfmtCtx;

    QByteArray ba = m_sRecordFile.toLatin1();
    const char* pOutFile = ba.data();

    qDebug() << "pOutFile:" << pOutFile;

    unsigned i = 0;
    int ret = 0;
    int stream_index = 0;

    // 1. create output context
    avformat_alloc_output_context2(&m_pOfmtCtx, nullptr, nullptr, pOutFile);
    if (!m_pOfmtCtx)
    {
        MY_DEBUG << "Could not create output context.";
        ret = AVERROR_UNKNOWN;
        goto end;
    }

    // 2. get memory.
    m_nMappingSize = pIfmtCtx->nb_streams;
    m_pStreamMapping = (int*)av_mallocz_array(m_nMappingSize, sizeof(*m_pStreamMapping));
    if (!m_pStreamMapping)
    {
        MY_DEBUG << "av_mallocz_array fail.";
        ret = AVERROR(ENOMEM);
        goto end;
    }

    // 3. copy steam information.
    m_pOfmt = m_pOfmtCtx->oformat;
    for (i = 0; i < pIfmtCtx->nb_streams; i++)
    {
        AVStream *pOutStream;
        AVStream *pInStream = pIfmtCtx->streams[i];
        AVCodecParameters *pInCodecpar = pInStream->codecpar;
        if (pInCodecpar->codec_type != AVMEDIA_TYPE_AUDIO &&
            pInCodecpar->codec_type != AVMEDIA_TYPE_VIDEO &&
            pInCodecpar->codec_type != AVMEDIA_TYPE_SUBTITLE)
        {
            m_pStreamMapping[i] = -1;
            continue;
        }

        if(pInCodecpar->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            m_nVideoIndex = i;

            //1.找到相应解码器的过滤器
            if(nCodecType == AV_CODEC_ID_HEVC)
            {
                const AVBitStreamFilter *bsf = av_bsf_get_by_name("hevc_mp4toannexb");
                if (!bsf)
                {
                    MY_DEBUG << "av_bsf_get_by_name() video failed";
                    return false;
                }
                //2.过滤器分配内存
                av_bsf_alloc(bsf, &m_pBsfc);
            }
            else
            {
                const AVBitStreamFilter *bsf = av_bsf_get_by_name("h264_mp4toannexb");
                if (!bsf)
                {
                    MY_DEBUG << "av_bsf_get_by_name() video failed";
                    return false;
                }
                //2.过滤器分配内存
                av_bsf_alloc(bsf, &m_pBsfc);
            }

            //3.添加解码器属性
            avcodec_parameters_copy(m_pBsfc->par_in, pInCodecpar);

            //4. 初始化过滤器上下文
            av_bsf_init(m_pBsfc);
        }
        else if(pInCodecpar->codec_type == AVMEDIA_TYPE_AUDIO)
        {
            m_nAudioIndex = i;

#if TRANSCODE
            if(nAudioCodecType == AV_CODEC_ID_PCM_ALAW || nAudioCodecType == AV_CODEC_ID_PCM_MULAW)
            {
                MY_DEBUG << "ctEasyAACEncoder Init";
                if(nAudioCodecType == AV_CODEC_ID_PCM_ALAW)
                    ctEasyAACEncoder::getInstance().Init(Law_ALaw);
                else
                    ctEasyAACEncoder::getInstance().Init(Law_ULaw);
                m_bTransCode = true;
            }
            else
                m_bTransCode = false;

#endif
            if(m_bTransCode || nAudioCodecType == AV_CODEC_ID_AAC)
            {
                //1. 找到相应解码器的过滤器
                const AVBitStreamFilter *bsf = av_bsf_get_by_name("aac_adtstoasc");
                if (!bsf)
                {
                    MY_DEBUG << "av_bsf_get_by_name() audio failed";
                    return false;
                }

                //2.过滤器分配内存
                av_bsf_alloc(bsf, &m_pBsfcAAC);

                //3.添加解码器属性
                avcodec_parameters_copy(m_pBsfcAAC->par_in, pInCodecpar);

                //4. 初始化过滤器上下文
                av_bsf_init(m_pBsfcAAC);
            }

#if TRANSCODE
            if(m_bTransCode)
                m_pBsfcAAC->par_in->codec_id = AV_CODEC_ID_AAC;
#endif

        }

        // fill the stream index.
        m_pStreamMapping[i] = stream_index++;

        // copy the new codec prameters.
        pOutStream = avformat_new_stream(m_pOfmtCtx, nullptr);
        if (!pOutStream)
        {
            MY_DEBUG << "Failed allocating output stream";
            ret = AVERROR_UNKNOWN;
            goto end;
        }

        ret = avcodec_parameters_copy(pOutStream->codecpar, pInCodecpar);
        if (ret < 0)
        {
            MY_DEBUG << "Failed to copy codec parameters";
            goto end;
        }
#if TRANSCODE
        if(m_bTransCode && pInCodecpar->codec_type == AVMEDIA_TYPE_AUDIO)
            pOutStream->codecpar->codec_id = AV_CODEC_ID_AAC;
#endif

        //pOutStream->codecpar->bit_rate = 2000000;
        //pOutStream->codecpar->codec_tag = 0;
    }

    // 4. create MP4 header.
    if (!(m_pOfmt->flags & AVFMT_NOFILE)) // network stream
    {
        ret = avio_open(&m_pOfmtCtx->pb, pOutFile, AVIO_FLAG_WRITE);
        if (ret < 0)
        {
            MY_DEBUG << "Could not open output file " << m_sRecordFile;
            goto end;
        }
    }
    // 5. write file header.
    ret = avformat_write_header(m_pOfmtCtx, nullptr);
    if (ret < 0)
    {
        MY_DEBUG << "Error occurred when opening output file ret:" << ret;
        goto end;
    }

    m_pktFilter = new AVPacket;
    av_init_packet(m_pktFilter);
    m_pktFilter->data = NULL;
    m_pktFilter->size = 0;

    m_pktFilterAudio = new AVPacket;
    av_init_packet(m_pktFilterAudio);
    m_pktFilterAudio->data = NULL;
    m_pktFilterAudio->size = 0;

    m_nFirstVideoPts = 0;
    m_nFirstAudioPts = 0;


    m_bFirstGoP = false;
    m_bInit = true;
    m_nCounts = 0;

    return true;

end:
    DeInit();
    if (ret < 0 && ret != AVERROR_EOF)
    {
        MY_DEBUG << "Error occurred.";
    }
    return false;
}

bool mp4Recorder::DeInit()
{
    // 1. save tail.
    if(m_bInit && m_pOfmtCtx)
    {
        av_write_trailer(m_pOfmtCtx);
    }
    m_bInit = false;

    // 2. close output
    if (m_pOfmtCtx && !(m_pOfmt->flags & AVFMT_NOFILE))
    {
        avio_closep(&m_pOfmtCtx->pb);
    }

    // 3. free contex.
    if(m_pOfmtCtx)
    {
        avformat_free_context(m_pOfmtCtx);
        m_pOfmtCtx = nullptr;
    }
    av_freep(&m_pStreamMapping);

    if(m_pBsfc)
    {
        av_bsf_free(&m_pBsfc);
        m_pBsfc = nullptr;
    }

    if(m_pBsfcAAC)
    {
        av_bsf_free(&m_pBsfcAAC);
        m_pBsfcAAC = nullptr;
    }

#if TRANSCODE
    if(m_bTransCode)
    {
        ctEasyAACEncoder::getInstance().DeInit();
        m_bTransCode = false;
    }
#endif

    return true;
}

bool mp4Recorder::saveOneFrame(AVPacket &pkt, int nCodecType, int nAudioCodecType)
{
    int ret = 0;
    if(!m_bInit)
    {
        return false;
    }
    AVStream *pInStream, *pOutStream;

    if(nCodecType == AV_CODEC_ID_H264)
    {
        if(m_bFirstGoP == false)
        {
            if(pkt.flags != AV_PKT_FLAG_KEY)
            {
                av_packet_unref(&pkt);
                return false; // first frame must be Iframe.
            }
            else
            {
                m_bFirstGoP = true;
            }
        }
    }

    pInStream  = m_pIfmtCtx->streams[pkt.stream_index];
    if (pkt.stream_index >= m_nMappingSize ||
        m_pStreamMapping[pkt.stream_index] < 0)
    {
        av_packet_unref(&pkt);
        return true;
    }

    pkt.stream_index = m_pStreamMapping[pkt.stream_index];
    pOutStream = m_pOfmtCtx->streams[pkt.stream_index];

    if(pInStream->codecpar->codec_type != AVMEDIA_TYPE_VIDEO &&
            pInStream->codecpar->codec_type != AVMEDIA_TYPE_AUDIO)
    {
        av_packet_unref(&pkt);
        return false;
    }

    if(pInStream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
    {
        av_bsf_send_packet(m_pBsfc, &pkt);
        av_bsf_receive_packet(m_pBsfc, m_pktFilter);

        m_pktFilter->pts = av_rescale_q_rnd(m_pktFilter->pts, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
        m_pktFilter->dts = av_rescale_q_rnd(m_pktFilter->dts, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
        m_pktFilter->duration = av_rescale_q_rnd(m_pktFilter->duration, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
        m_pktFilter->stream_index = pOutStream->index;


        //时间戳处理
        if(m_nFirstVideoPts == 0)
        {
            m_nFirstVideoPts = m_pktFilter->pts;
            m_pktFilter->pts = 0;
            m_pktFilter->dts = 0;
        }
        else
        {
            m_pktFilter->pts = m_pktFilter->pts - m_nFirstVideoPts;
            m_pktFilter->dts = m_pktFilter->dts - m_nFirstVideoPts;
        }

        //av_packet_rescale_ts(&pkt, pInStream->time_base, pOutStream->time_base);
        m_pktFilter->pos = -1;
        m_pktFilter->flags |= AV_PKT_FLAG_KEY;

        ret = av_interleaved_write_frame(m_pOfmtCtx, m_pktFilter);
        av_packet_unref(&pkt);
        if (ret < 0)
        {
            qDebug() << "Video Error muxing packet";
        }
    }
    else
    {
#if TRANSCODE

        if(m_bTransCode)
        {
            AVPacket* pAACPkt = av_packet_clone(&pkt);
            if(ctEasyAACEncoder::getInstance().G711ToAAC(pkt.data, pkt.size, pAACPkt->data, pAACPkt->size) == false)
            {
                av_packet_unref(&pkt);
                return false;
            }

            av_bsf_send_packet(m_pBsfcAAC, pAACPkt);
            av_bsf_receive_packet(m_pBsfcAAC, m_pktFilterAudio);
        }
        else
#endif
        if(m_bTransCode || nAudioCodecType == AV_CODEC_ID_AAC)
        {
            av_bsf_send_packet(m_pBsfcAAC, &pkt);
            av_bsf_receive_packet(m_pBsfcAAC, m_pktFilterAudio);

            m_pktFilterAudio->pts = av_rescale_q_rnd(m_pktFilterAudio->pts, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
            m_pktFilterAudio->dts = av_rescale_q_rnd(m_pktFilterAudio->dts, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
            m_pktFilterAudio->duration = av_rescale_q_rnd(m_pktFilterAudio->duration, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
            m_pktFilterAudio->stream_index = pOutStream->index;

            //用差值作时间戳
            if(m_nFirstAudioPts == 0)
            {
                m_nFirstAudioPts = m_pktFilterAudio->pts;
                m_pktFilterAudio->pts = 0;
                m_pktFilterAudio->dts = 0;
            }
            else
            {
                m_pktFilterAudio->pts = m_pktFilterAudio->pts - m_nFirstAudioPts;
                m_pktFilterAudio->dts = m_pktFilterAudio->dts - m_nFirstAudioPts;
            }

            m_pktFilterAudio->pos = -1;
            m_pktFilterAudio->flags |= AV_PKT_FLAG_KEY;

            ret = av_interleaved_write_frame(m_pOfmtCtx, m_pktFilterAudio);
        }
        else
        {
            pkt.pts = av_rescale_q_rnd(pkt.pts, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
            pkt.dts = av_rescale_q_rnd(pkt.dts, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
            pkt.duration = av_rescale_q_rnd(pkt.duration, pInStream->time_base, pOutStream->time_base, AV_ROUND_NEAR_INF);
            pkt.stream_index = pOutStream->index;

            //用差值作时间戳
            if(m_nFirstAudioPts == 0)
            {
                m_nFirstAudioPts = pkt.pts;
                pkt.pts = 0;
                pkt.dts = 0;
            }
            else
            {
                pkt.pts = pkt.pts - m_nFirstAudioPts;
                pkt.dts = pkt.dts - m_nFirstAudioPts;
            }

            pkt.pos = -1;
            pkt.flags |= AV_PKT_FLAG_KEY;

            ret = av_interleaved_write_frame(m_pOfmtCtx, &pkt);
        }
        av_packet_unref(&pkt);
        if (ret < 0)
        {
            qDebug() << "Audio Error muxing packet";
        }
    }
    return (ret == 0);
}

四、ffmpeg库下载

链接地址:https://download.youkuaiyun.com/download/linyibin_123/87542123

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

浅笑一斤

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值