ffmpeg+QML显示摄像头实时画面

本文介绍如何使用FFmpeg进行摄像头数据捕获,并根据需要将YUV格式转换为RGB24,包括OpenCV和SWSscale的应用。涉及关键函数如open(), capture(), 和frameToRgbImage()。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

ffmpeg代码如下:

#ifndef CAMERACAPTURE_H
#define CAMERACAPTURE_H
#include <QObject>
#include <atomic>
#include <QMutex>
#include <QMutexLocker>
extern "C"
{
#include <libavdevice/avdevice.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <libavutil/parseutils.h>
}

class CameraCapture;
class CameraData
{
public:
    enum PixelFormat
    {
        PIXFORMAT_YUV420P,
        PIXFORMAT_YUV422P,
        PIXFORMAT_YUV444P,
        PIXFORMAT_RGB24
    };

public:
    CameraData(QMutex *mutex)
        :m_pMutex(mutex){
        //qRegisterMetaType<CameraData>("CameraData");
    }
    ~CameraData(){}

    int getWidth(void) const {return m_nWidth;}
    int getHeight(void) const {return m_nHeight;}
    QByteArray getCameraData(void) {
        QMutexLocker locker(m_pMutex);
        return m_cameraData;
    }
    PixelFormat getPixelFormat(void) const {return m_pixelFormat;}

    friend class CameraCapture;

private:
    QByteArray m_cameraData;
    std::atomic<int> m_nWidth;
    std::atomic<int> m_nHeight;
    PixelFormat m_pixelFormat;

    QMutex* m_pMutex = nullptr;
};

class CameraCapture : public QObject
{
public:
    CameraCapture(QObject* parent = nullptr);
    virtual ~CameraCapture();

    // 打开摄像头
    bool open(const QString& deviceName);
    // 关闭摄像头
    void close(void);

    // 获取一帧数据
    bool capture(void);

    // 是否使用SWS转化为RGB格式
    void setUsedSwsScaleEnabled(bool isEnabled);

    // 获取数据
    const CameraData& getCameraData(void) {return m_pCameraData;}

    AVFrame* m_avFrame = nullptr;
    AVFrame* m_pRGBFrame = nullptr;

    int m_nVideoStreamIndex = -1;
    AVFormatContext* m_pFormatContent = nullptr;
    AVCodecContext* m_pCaptureContext = nullptr;

    SwsContext* m_pSwsContext = nullptr;

    CameraData m_pCameraData;
    AVPixelFormat m_pixelFormat;
    QMutex m_pMutex;

    bool m_isUsedSwsScale = true;

    void frameToRgbImage(AVFrame* pDest, AVFrame* frame);
    AVPixelFormat convertDeprecatedFormat(enum AVPixelFormat format);

    // 处理YUV数据组合成一个Buffer
    void disposeYUVData(void);
};

#endif // CAMERACAPTURE_H
#include "cameracapture.h"
#include <QDebug>

CameraCapture::CameraCapture(QObject* parent)
    :QObject(parent),
     m_pCameraData(&m_pMutex)
{
    avdevice_register_all();
}
CameraCapture::~CameraCapture()
{
    this->close();
}
bool CameraCapture::open(const QString& deviceName)
{
    m_avFrame = av_frame_alloc();

    AVInputFormat *inputFormat = av_find_input_format("dshow");

    AVDictionary *format_opts =  nullptr;
    //av_dict_set_int(&format_opts, "rtbufsize", 3041280 * 10, 0);
    av_dict_set(&format_opts, "avioflags", "direct", 0);
    av_dict_set(&format_opts, "video_size", "1280x720", 0);
    av_dict_set(&format_opts, "framerate", "30", 0);
    av_dict_set(&format_opts, "vcodec", "mjpeg", 0);

    m_pFormatContent = avformat_alloc_context();
    QString urlString = QString("video=") + deviceName;
    // 打开输入
    int result = avformat_open_input(&m_pFormatContent, urlString.toLocal8Bit().data(), inputFormat, &format_opts);
    if (result < 0)
    {
        qDebug() << "AVFormat Open Input Error!";
        return false;
    }

    result = avformat_find_stream_info(m_pFormatContent, nullptr);
    if (result < 0)
    {
        qDebug() << "AVFormat Find Stream Info Error!";
        return false;
    }

    // find Video Stream Index
    int count = m_pFormatContent->nb_streams;
    for (int i=0; i<count; ++i)
    {
        if (m_pFormatContent->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            m_nVideoStreamIndex = i;
            break;
        }
    }

    if (m_nVideoStreamIndex < 0)
        return false;

    // 查找解码器
    m_pCaptureContext = m_pFormatContent->streams[m_nVideoStreamIndex]->codec;
    AVCodec* codec = avcodec_find_decoder(m_pCaptureContext->codec_id);
    if (codec == nullptr)
        return false;

    // 打开解码器
    if (avcodec_open2(m_pCaptureContext, codec, nullptr) != 0)
        return false;

    // 设置尺寸、格式等信息
    m_pCameraData.m_nWidth = m_pCaptureContext->width;
    m_pCameraData.m_nHeight = m_pCaptureContext->height;
    AVPixelFormat format = m_pCaptureContext->pix_fmt;
    format = convertDeprecatedFormat(format);

    if (m_isUsedSwsScale)
        return true;

    m_isUsedSwsScale = false;
    if (format == AV_PIX_FMT_YUV420P)
        m_pCameraData.m_pixelFormat = CameraData::PIXFORMAT_YUV420P;
    else if (format == AV_PIX_FMT_YUV422P)
        m_pCameraData.m_pixelFormat = CameraData::PIXFORMAT_YUV422P;
    else if (format == AV_PIX_FMT_YUV444P)
        m_pCameraData.m_pixelFormat = CameraData::PIXFORMAT_YUV444P;
    else {
        m_pCameraData.m_pixelFormat = CameraData::PIXFORMAT_RGB24;
        m_isUsedSwsScale = true;
    }

    return true;
}

// 关闭摄像头
void CameraCapture::close(void)
{
    sws_freeContext(m_pSwsContext);
    av_frame_free(&m_avFrame);
    av_frame_free(&m_pRGBFrame);
    m_pRGBFrame = nullptr;
    m_avFrame = nullptr;

    avcodec_close(m_pCaptureContext);
    avformat_close_input(&m_pFormatContent);
}

void CameraCapture::setUsedSwsScaleEnabled(bool isEnabled)
{
    m_isUsedSwsScale = isEnabled;
}

void CameraCapture::frameToRgbImage(AVFrame* pDest, AVFrame* frame)
{
    // 创建SWS上下文
    if (m_pSwsContext == nullptr)
    {
        m_pSwsContext = sws_getContext(frame->width, frame->height, convertDeprecatedFormat((AVPixelFormat)(frame->format)), \
            frame->width, frame->height, AV_PIX_FMT_RGB24, \
            SWS_BILINEAR, nullptr, nullptr, nullptr);
    }

    //avpicture_fill( )
    sws_scale(m_pSwsContext, frame->data, frame->linesize, 0, frame->height, \
        pDest->data, pDest->linesize);
}

AVPixelFormat CameraCapture::convertDeprecatedFormat(enum AVPixelFormat format)
{
    switch (format)
    {
    case AV_PIX_FMT_YUVJ420P:
        return AV_PIX_FMT_YUV420P;
    case AV_PIX_FMT_YUVJ422P:
        return AV_PIX_FMT_YUV422P;
    case AV_PIX_FMT_YUVJ444P:
        return AV_PIX_FMT_YUV444P;
    case AV_PIX_FMT_YUVJ440P:
        return AV_PIX_FMT_YUV440P;
    default:
        return format;
    }
}

void CameraCapture::disposeYUVData(void)
{
    QMutexLocker locker(&m_pMutex);
    m_pCameraData.m_cameraData.clear();

    AVPixelFormat pixFormat = convertDeprecatedFormat((AVPixelFormat)m_avFrame->format);
    // 设置Y的数据
    if (m_avFrame->linesize[0] == m_avFrame->width)
    {
        m_pCameraData.m_cameraData.append((char*)m_avFrame->data[0], \
                m_avFrame->linesize[0] * m_avFrame->height);
    }
    else
    {
        for (int i=0; i<m_avFrame->height; ++i)
        {
            m_pCameraData.m_cameraData.append((char*)m_avFrame->data[0], m_avFrame->width);
        }
    }

    // 设置U的数据
    int uDataWidth = m_avFrame->width;
    int uDataHeight = m_avFrame->height;
    if (pixFormat == AV_PIX_FMT_YUV420P)
    {
        uDataWidth = uDataWidth / 2;
        uDataHeight = uDataHeight / 2;
    }
    else if (pixFormat == AV_PIX_FMT_YUV422P)
        uDataWidth = uDataWidth / 2;

    if (m_avFrame->linesize[1] == uDataWidth)
    {
        m_pCameraData.m_cameraData.append((char*)m_avFrame->data[1], \
                m_avFrame->linesize[1] * uDataHeight);
    }
    else
    {
        for (int i=0; i<uDataHeight; ++i)
        {
            m_pCameraData.m_cameraData.append((char*)m_avFrame->data[1], uDataWidth);
        }
    }

    // 设置V的数据
    int vDataWidth = uDataWidth;
    int vDataHeight = uDataHeight;
    if (m_avFrame->linesize[1] == vDataWidth)
    {
        m_pCameraData.m_cameraData.append((char*)m_avFrame->data[2], \
                m_avFrame->linesize[2] * vDataHeight);
    }
    else
    {
        for (int i=0; i<vDataHeight; ++i)
        {
            m_pCameraData.m_cameraData.append((char*)m_avFrame->data[2], vDataWidth);
        }
    }
}

// 获取一帧数据
bool CameraCapture::capture(void)
{
    AVPacket pkt;
    // 获取一帧数据
    int result = av_read_frame(m_pFormatContent, &pkt);
    if (result)
        return false;

    if (pkt.stream_index != m_nVideoStreamIndex)
    {
        av_packet_unref(&pkt);
        return false;
    }

    // 解码视频数据
    result = avcodec_send_packet(m_pCaptureContext, &pkt);
    if (result)
    {
        av_packet_unref(&pkt);
        return false;
    }

    result = avcodec_receive_frame(m_pCaptureContext, m_avFrame);
    if (result)
    {
        av_packet_unref(&pkt);
        return false;
    }
    // 转换成RGB24
    if (m_isUsedSwsScale)
    {
        // 设置RGBFrame
        if (m_pRGBFrame == nullptr)
        {
            m_pRGBFrame = av_frame_alloc();
            m_pRGBFrame->width = m_avFrame->width;
            m_pRGBFrame->height = m_avFrame->height;
            m_pRGBFrame->linesize[0] = m_pRGBFrame->width * m_pRGBFrame->height * 3;
            av_image_alloc(m_pRGBFrame->data, m_pRGBFrame->linesize,
                m_pRGBFrame->width, m_pRGBFrame->height, AV_PIX_FMT_RGB24, 1);
        }

        // 转化为RGB24
        frameToRgbImage(m_pRGBFrame, m_avFrame);

        // 设置数据
        //m_pMutex.lock();
        m_pCameraData.m_cameraData.clear();
        m_pCameraData.m_cameraData.append((char*)m_pRGBFrame->data[0], \
                m_pRGBFrame->width * m_pRGBFrame->height * 3);
        m_pCameraData.m_pixelFormat = CameraData::PIXFORMAT_RGB24;
        //m_pMutex.unlock();
    }
    else
    {
        disposeYUVData();
    }
    av_packet_unref(&pkt);
    return true;
}


rgb_AVFrame转QImage

QImage CameraCaptureThread::FFmpeg2QImage(AVFrame *rgbframe)
{
        QImage img(rgbframe->width, rgbframe->height, QImage::Format_RGB888);
        for(int y=0; y<rgbframe->height; ++y)
        {
            memcpy(img.scanLine(y), rgbframe->data[0]+y*rgbframe->linesize[0], rgbframe->linesize[0]);
        }
        return img;
}

子线程发送图片

void CameraCaptureThread::run(void)
{
    this->openCamera("Astra Pro FHD Camera");
    while (1)
    {
        m_pCameraCapture->capture();
        QImage img =this->FFmpeg2QImage(this->m_pCameraCapture->m_pRGBFrame);
        emit this->needUpdate(img);//用ImgProvider接收即可
    }
}

QML:

import QtQuick 2.14
import QtQuick.Window 2.14
import QtQuick.Controls 2.12  //这个版本有菜单栏
Rectangle{
    width:  700
    height: 480
Image {
    id:    img
    cache: false  //不缓存防止卡死
    anchors.fill: parent
}
Connections{
target:imgSender  //发送信号的对象的id
onCallQmlRefeshImg:{    //on+信号名得到槽函数
img.source = ""
//最后这个pimg是注册到qml中的ImageProvider的id
//调用这句话会触发pimg中的requestImage函数(虚函数)
img.source = "image://imgProvider"
}
}
}

参考:

AVFrame转QImage

FFMPEG读取摄像头

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值