qt使用opengl绘制yuv网上得例子有很多,使用opengl函数进行绘制。这里介绍一个更加简单得方式。
方法:
1.使用libyuv库将yuv数据转化为RGB数据
libyuv::I420ToARGB((uint8_t*)Y_data_dest, dest_stride_Y,
(uint8_t*)U_data_dest, dest_stride_U,
(uint8_t*)V_data_dest, dest_stride_V,
_rgbBuffer, _imageWidth * 4,
_imageWidth, _imageHeight);
2.使用libyuv将转化好的RGB数据裁剪或者缩放到目标窗口大小
libyuv::ARGBScale(_rgbBuffer, _imageWidth * 4, _imageWidth, _imageHeight, _rgbScaleBuffer,
scale_width*4, scale_width,
sacle_height, libyuv::kFilterBilinear);
3.将RGB数据转化为QImage
QImage image((uchar*)_mirror_rgbBuffer, scale_width, sacle_height, QImage::Format_ARGB32);
4.创建一个窗体继承QGraphicsView并设置显示窗口为QOpenGLWidget
QOpenGLWidget *glWidget = new QOpenGLWidget(this);
auto format = glWidget->format();
format.setDepthBufferSize(0);
format.setStencilBufferSize(0);
format.setSamples(4);
glWidget->setFormat(format);
setViewport(glWidget);
this->setScene(new QGraphicsScene);
5.将步骤4得QImage 作为QGraphicsView得背景设置进去
setBackgroundBrush(QBrush(image));
完毕
最后附上代码,代码未整理只是实现了功能
#ifndef VIDEORENDERGRAPHISVIEW_H
#define VIDEORENDERGRAPHISVIEW_H
#include <QGraphicsView>
#include <QPainter>
#include <QImage>
#include <QByteArray>
#include <QResizeEvent>
#include <QPaintEvent>
#include <QRect>
#include "VideoRender.h"
class QGraphicsScene;
class VideoRenderGraphicsView : public QGraphicsView, public VideoRender
{
public:
explicit VideoRenderGraphicsView (QWidget *parent = Q_NULLPTR);
~VideoRenderGraphicsView () override;
VideoRenderType type() override;
QWidget *widget() override;
void setYUV420pParameters(int width, int height, int* strides = Q_NULLPTR, int trueWidth = -1) override;
void setFrameData(const QByteArray& data) override;
void setFrameData(const QSharedPointer<QByteArray>& data) override;
protected:
void resizeEvent(QResizeEvent* event) override;
private:
uint8_t* _rgbScaleBuffer{nullptr};
uint8_t* _rgbBuffer{nullptr};
uint8_t* _mirror_rgbBuffer{nullptr};
int _imageWidth{0};
int _imageHeight{0};
int _scale_data_length{0};
int _data_length{0};
};
#endif // VIDEORENDERGDI_H
源文件:
#include <QToolButton>
#include <QPainter>
#include "VideoRenderGraphicsView.h"
#include "HandlerClass.h"
#include "RemixLog.h"
#include "AppSession.h"
//#include "meiyan/beautyfilter.h"
#include "remixhelp.h"
#include "libyuv.h"
#include "AppEnv.h"
#include <QGraphicsView>
#include <QOpenGLWidget>
#include <QGraphicsScene>
#ifdef Q_OS_WIN
#define FONT_STR "Microsoft YaHei"
#elif defined(Q_OS_MAC)
#define FONT_STR "PingFang SC"
#endif
using namespace libyuv;
VideoRenderGraphicsView::VideoRenderGraphicsView(QWidget *parent)
: QGraphicsView(parent), VideoRender(),
_rgbBuffer(Q_NULLPTR), _mirror_rgbBuffer(Q_NULLPTR),
_imageWidth(0), _imageHeight(0), _data_length(-1)
{
setMouseTracking(true);
QOpenGLWidget *glWidget = new QOpenGLWidget(this);
auto format = glWidget->format();
format.setDepthBufferSize(0);
format.setStencilBufferSize(0);
format.setSamples(4);
glWidget->setFormat(format);
setViewport(glWidget);
this->setScene(new QGraphicsScene);
}
VideoRenderGraphicsView::~VideoRenderGraphicsView()
{
if (_rgbBuffer != nullptr)
{
delete[] _rgbBuffer;
_rgbBuffer = nullptr;
}
if (_mirror_rgbBuffer != nullptr)
{
delete[] _mirror_rgbBuffer;
_mirror_rgbBuffer = nullptr;
}
}
VideoRenderType VideoRenderGraphicsView::type()
{
return VideoRenderType::OpenGl;
}
QWidget* VideoRenderGraphicsView::widget()
{
return this;
}
void VideoRenderGraphicsView::setYUV420pParameters(int width, int height, int *strides, int trueWidth)
{
_imageWidth = width;
_imageHeight = height;
}
void VideoRenderGraphicsView::setFrameData(const QByteArray& data)
{
int data_length = _imageWidth * _imageHeight * 4;
if (data_length == 0)
{
return;
}
if (_data_length != data_length)
{
_data_length = data_length;
if (_rgbBuffer)
{
delete [] _rgbBuffer;
_rgbBuffer = Q_NULLPTR;
}
if (_mirror_rgbBuffer)
{
delete [] _mirror_rgbBuffer;
_mirror_rgbBuffer = Q_NULLPTR;
}
}
//旋转
int dest_stride_Y = _imageWidth;
int dest_stride_U = _imageWidth / 2;
int dest_stride_V = _imageWidth / 2;
const char *Y_data_dest = data.constData();
const char *U_data_dest = Y_data_dest + _imageWidth * _imageHeight;
const char *V_data_dest = U_data_dest + _imageWidth * _imageHeight / 4;
if (_rgbBuffer == Q_NULLPTR)
{
_rgbBuffer = new uint8_t [_data_length];
}
//转ARGB
libyuv::I420ToARGB((uint8_t*)Y_data_dest, dest_stride_Y,
(uint8_t*)U_data_dest, dest_stride_U,
(uint8_t*)V_data_dest, dest_stride_V,
_rgbBuffer, _imageWidth * 4,
_imageWidth, _imageHeight);
/*}*/
//缩放
int scale_data_length = this->width()*this->height()*4;
int scale_width = this->width();
int sacle_height = this->height();
if (_rgbScaleBuffer == Q_NULLPTR || _scale_data_length != scale_data_length)
{
delete [] _rgbScaleBuffer;
_rgbScaleBuffer = new uint8_t [scale_data_length];
}
libyuv::ARGBScale(_rgbBuffer, _imageWidth * 4, _imageWidth, _imageHeight, _rgbScaleBuffer,
scale_width*4, scale_width,
sacle_height, libyuv::kFilterBilinear);
if (!m_bMirror)
{
if (_mirror_rgbBuffer == Q_NULLPTR || _scale_data_length != scale_data_length)
{
delete [] _mirror_rgbBuffer;
_mirror_rgbBuffer = new uint8_t [scale_data_length];
}
libyuv::ARGBMirror(_rgbScaleBuffer, scale_width*4, _mirror_rgbBuffer, scale_width*4, scale_width, sacle_height);
}
_scale_data_length = scale_data_length;
if (!m_bMirror)
{
QImage image((uchar*)_mirror_rgbBuffer, scale_width, sacle_height, QImage::Format_ARGB32);
setBackgroundBrush(QBrush(image));
}
else
{
QImage image((uchar*)_rgbScaleBuffer, scale_width, sacle_height, QImage::Format_ARGB32);
setBackgroundBrush(QBrush(image));
}
}
void VideoRenderGraphicsView::setFrameData(const QSharedPointer<QByteArray>& data)
{
setFrameData(*data.data());
}
void VideoRenderGraphicsView::resizeEvent(QResizeEvent *event)
{
setSceneRect(dpiWidth(2), dpiHeight(2), width()-dpiWidth(2)*2, height()-dpiHeight(2)*2);
}

该博客介绍了如何利用libyuv库将YUV数据转换为RGB,然后使用OpenGL在Qt环境中进行渲染。首先,通过libyuv的I420ToARGB函数将YUV数据转换为RGB,接着使用ARGBScale进行缩放处理。随后,将处理后的RGB数据转换为QImage,并在继承自QGraphicsView的窗体中设置为背景。整个过程简洁高效,适用于视频帧的实时渲染。
2368

被折叠的 条评论
为什么被折叠?



