下面是通过ffmpeg读取本地视频,并在openGLWidget上进行播放的方法。话不多说,上代码。
首先是封装openGLwidget播放视频的类
头文件openGLWidget.h
#ifndef OPENGLWIDGETCLASS_H
#define OPENGLWIDGETCLASS_H
#include <QObject>
#include <QOpenGLWidget>
#include <GL/gl.h>
#include <GL/glu.h>
#include <QOpenGLFunctions>
#include <QOpenGLShaderProgram>
#include <QOpenGLTexture>
#include <QGLWidget>
#include <QImage>
#include <QWheelEvent>
class openGLWidgetClass : public QOpenGLWidget, protected QOpenGLFunctions
{
Q_OBJECT
public:
explicit openGLWidgetClass(QWidget *parent = 0);
public slots:
void initializeGL() Q_DECL_OVERRIDE;
void resizeGL(int w, int h) Q_DECL_OVERRIDE;
void paintGL() Q_DECL_OVERRIDE;
void setCurrentImage(const QImage &image);
void initTextures();
void initShaders();
private:
QVector<QVector3D> vertices;
QVector<QVector2D> texCoords;
QOpenGLShaderProgram program;
QOpenGLTexture *texture;
QMatrix4x4 projection;
float ratio = 1.0;
bool m_down = false;
QPoint m_pt;
float m_xoff;
float m_yoff;
float m_angle = 0.0f;
};
#endif // OPENGLWIDGETCLASS_H
源代码openGLWidget.cpp
#include "openGLWidget.h"
#include <QDebug>
#include <QGraphicsOpacityEffect>
openGLWidgetClass::openGLWidgetClass(QWidget *parent) : QOpenGLWidget(parent)
{
}
void openGLWidgetClass::initTextures()
{
texture = new QOpenGLTexture(QOpenGLTexture::Target2D);
texture->setMinificationFilter(QOpenGLTexture::LinearMipMapLinear);
texture->setMagnificationFilter(QOpenGLTexture::Linear);
//重复使用纹理坐标
//纹理坐标(1.1, 1.2)与(0.1, 0.2)相同
texture->setWrapMode(QOpenGLTexture::ClampToEdge);
//设置纹理大小
texture->setSize(this->width(), this->height());
//分配储存空间
texture->allocateStorage();
}
void openGLWidgetClass::initShaders()
{
//纹理坐标
texCoords.append(QVector2D(0, 1)); //左上
texCoords.append(QVector2D(1, 1)); //右上
texCoords.append(QVector2D(0, 0)); //左下
texCoords.append(QVector2D(1, 0)); //右下
//顶点坐标
vertices.append(QVector3D(-1, -1, 1));//左下
vertices.append(QVector3D(1, -1, 1)); //右下
vertices.append(QVector3D(-1, 1, 1)); //左上
vertices.append(QVector3D(1, 1, 1)); //右上
QOpenGLShader *vshader = new QOpenGLShader(QOpenGLShader::Vertex, this);
const char *vsrc =
"attribute vec4 vertex;\n"
"attribute vec2 texCoord;\n"
"varying vec2 texc;\n"
"void main(void)\n"
"{\n"
" gl_Position = vertex;\n"
" texc = texCoord;\n"
"}\n";
vshader->compileSourceCode(vsrc);//编译顶点着色器代码
QOpenGLShader *fshader = new QOpenGLShader(QOpenGLShader::Fragment, this);
const char *fsrc =
"uniform sampler2D texture;\n"
"varying vec2 texc;\n"
"void main(void)\n"
"{\n"
" gl_FragColor = texture2D(texture,texc);\n"
"}\n";
fshader->compileSourceCode(fsrc); //编译纹理着色器代码
program.addShader(vshader);//添加顶点着色器
program.addShader(fshader);//添加纹理碎片着色器
program.bindAttributeLocation("vertex", 0);//绑定顶点属性位置
program.bindAttributeLocation("texCoord", 1);//绑定纹理属性位置
// 链接着色器管道
if (!program.link())
close();
// 绑定着色器管道
if (!program.bind())
close();
}
void openGLWidgetClass::initializeGL()
{
initializeOpenGLFunctions(); //初始化OPenGL功能函数
glClearColor(0, 0, 0, 0); //设置背景为黑色
glEnable(GL_TEXTURE_2D); //设置纹理2D功能可用
initTextures(); //初始化纹理设置
initShaders(); //初始化shaders
}
void openGLWidgetClass::resizeGL(int w, int h)
{
// 计算窗口横纵比
qreal aspect = qreal(w) / qreal(h ? h : 1);
// 设置近平面值 3.0, 远平面值 7.0, 视场45度
const qreal zNear = 3.0, zFar = 7.0, fov = 45.0;
// 重设投影
projection.setToIdentity();
// 设置透视投影
projection.perspective(fov, static_cast<float>(aspect), zNear, zFar);
}
void openGLWidgetClass::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //清除屏幕缓存和深度缓冲
QMatrix4x4 matrix;
matrix.translate(0.0, 0.0, -5.0); //矩阵变换
program.enableAttributeArray(0);
program.enableAttributeArray(1);
program.setAttributeArray(0, vertices.constData());
program.setAttributeArray(1, texCoords.constData());
program.setUniformValue("texture", 0); //将当前上下文中位置的统一变量设置为value
texture->bind(); //绑定纹理
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);//绘制纹理
texture->release(); //释放绑定的纹理
texture->destroy(); //消耗底层的纹理对象
texture->create();
texture->setMipLevels(1);
}
void openGLWidgetClass::setCurrentImage(const QImage &image)
{
if (NULL != texture)
{
texture->setData(image); //设置纹理图像
//设置纹理细节
texture->setLevelofDetailBias(0);//值越小,图像越清晰
update();
}
}
下面是调用ffmpeg和openGLWidget的类
ui文件videoPlayerClass.ui
头文件videoPlayerClass.h
#pragma once
#include <QWidget>
#include <QTimer>
#include <qDebug>
#include <QTime>
#include <windows.h>
#include <QOpenGLTexture>
#include <QOpenGLWidget>
#include <QOpenGLFunctions>
#include <QOpenGLShaderProgram>
#include "ui_videoPlayerClass.h"
#include "opencv2/opencv.hpp"
#include "openGLWidget.h"
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
class videoPlayerClass : public QWidget
{
Q_OBJECT
public:
videoPlayerClass(QWidget *parent = nullptr);
~videoPlayerClass();
void openVideo(QString videoFilePath, int frequency);
void initWidget();
private:
QPixmap extractFirstFrame(const char *filename);
void updateProgress();
void closeVideo();
void videoToPictureFrequency(QString videoPath, int frequency);
private slots:
void on_pushButton_left_clicked();
void on_pushButton_play_clicked();
void on_pushButton_right_clicked();
void on_pushButton_stop_clicked();
private:
Ui::videoPlayerClassClass ui;
AVFormatContext *formatContext = nullptr;
AVCodecContext *codecContext = nullptr;
AVFrame *frame = nullptr;
AVPacket *packet = nullptr;
SwsContext *swsContext = nullptr;
int videoStreamIndex;
bool isPlaying;
double speedFactor = 1.0; // 播放速度因子
QTimer *timer = nullptr;
QImage currentFrameImage;
int64_t duration; // 视频总时长(以微秒为单位)
int64_t currentTime; // 当前播放时间(以微秒为单位)
QString m_videoFilePath;
bool m_isPlaying = false;
int m_frequency = 0;
};
源文件videoPlayerClass.cpp
#include "videoPlayerClass.h"
videoPlayerClass::videoPlayerClass(QWidget *parent)
: QWidget(parent)
{
ui.setupUi(this);
// 设置定时器
timer = new QTimer(this);
connect(timer, &QTimer::timeout, this, &videoPlayerClass::updateProgress);
}
videoPlayerClass::~videoPlayerClass()
{
closeVideo();
}
/*加载本地视频*/
void videoPlayerClass::openVideo(QString videoFilePath, int frequency)
{
if (m_isPlaying && timer->isActive())
{
m_isPlaying = false;
ui.horizontalSlider_progressBar->setValue(0);
ui.pushButton_play->setStyleSheet("background-color:transparent;color:#FFFFFF;background-image: url(:/style/image/play.png);");
timer->stop(); closeVideo();
}
m_videoFilePath = videoFilePath;
m_frequency = frequency;
/*按照固定频率截取视频图片*/
//videoToPictureFrequency(m_videoFilePath, frequency);
/*改变视频输出区域尺寸*/
ui.widget->setMinimumSize(ui.widget->height() * ((float)16 / 9), ui.widget->height());
ui.widget->setMaximumSize(ui.widget->height() * ((float)16 / 9), ui.widget->height());
// Initialize FFmpeg
av_register_all();
if (formatContext) avformat_close_input(&formatContext);
avformat_open_input(&formatContext, m_videoFilePath.toStdString().c_str(), nullptr, nullptr);
avformat_find_stream_info(formatContext, nullptr);
// Find video stream
for (unsigned int i = 0; i < formatContext->nb_streams; i++) {
if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
AVCodecParameters *codecParameters = formatContext->streams[i]->codecpar;
AVCodec *codec = avcodec_find_decoder(codecParameters->codec_id);
codecContext = avcodec_alloc_context3(codec);
avcodec_parameters_to_context(codecContext, codecParameters);
avcodec_open2(codecContext, codec, nullptr);
frame = av_frame_alloc();
packet = av_packet_alloc();
duration = formatContext->duration; // 获取视频总时长
ui.horizontalSlider_progressBar->setMaximum(duration / AV_TIME_BASE); // 设置进度条最大值
ui.label_videoTime->setText("00:00:00/" + QTime::fromMSecsSinceStartOfDay(duration / AV_TIME_BASE * 1000).toString("HH:mm:ss"));
break;
}
}
QPixmap pixmap_orgin = extractFirstFrame(m_videoFilePath.toStdString().c_str());
ui.widget->setCurrentImage(pixmap_orgin.toImage());
}
void videoPlayerClass::initWidget()
{
/*改变视频输出区域尺寸*/
ui.widget->setMinimumSize(ui.widget->height() * ((float)16 / 9), ui.widget->height());
ui.widget->setMaximumSize(ui.widget->height() * ((float)16 / 9), ui.widget->height());
}
/*获取视频第一帧图片*/
QPixmap videoPlayerClass::extractFirstFrame(const char *filename)
{
AVFormatContext* fmt_ctx_ = nullptr;
//打开视频文件
int errCode = avformat_open_input(&fmt_ctx_, filename, nullptr, nullptr);
if (errCode != 0) {
qDebug() << "avformat_open_input fail" << errCode;
return QPixmap();
}
//读取音视频流信息
errCode = avformat_find_stream_info(fmt_ctx_, nullptr);
if (errCode != 0) {
qDebug() << "avformat_find_stream_info fail" << errCode;
avformat_close_input(&fmt_ctx_);
return QPixmap();
}
//打印输出视频相关信息
av_dump_format(fmt_ctx_, 0, filename, 0);
AVPacket* pkt = av_packet_alloc();
AVFrame* temp_frame = av_frame_alloc();
SwsContext* sws_ctx = nullptr;
int ret = 0;
QImage preview;
bool preview_done = false;
for (int i = 0; i<int(fmt_ctx_->nb_streams) && !preview_done; i++) {
//只处理视频信息
if (fmt_ctx_->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
//查找视频解码器
AVCodec* codec = avcodec_find_decoder(fmt_ctx_->streams[i]->codecpar->codec_id);
AVCodecContext *codec_ctx = avcodec_alloc_context3(codec);
//根据提供的编解码器参数的值填充编解码器上下文
avcodec_parameters_to_context(codec_ctx, fmt_ctx_->streams[i]->codecpar);
//打开解码器
avcodec_open2(codec_ctx, codec, nullptr);
//读取帧数据
while (av_read_frame(fmt_ctx_, pkt) >= 0) {
av_frame_unref(temp_frame);
//对视频帧数据进行解码
while ((ret = avcodec_receive_frame(codec_ctx, temp_frame)) == AVERROR(EAGAIN)) {
ret = avcodec_send_packet(codec_ctx, pkt);
if (ret < 0) {
qCritical() << "Failed to send packet to decoder." << ret;
break;
}
}
if (ret < 0 && ret != AVERROR_EOF) {
qDebug() << "Failed to receive packet from decoder." << ret;
continue;
}
sws_ctx = sws_getContext(
temp_frame->width,
temp_frame->height,
static_cast<AVPixelFormat>(temp_frame->format),
temp_frame->width,
temp_frame->height,
static_cast<AVPixelFormat>(AV_PIX_FMT_RGBA),
SWS_FAST_BILINEAR,
nullptr,
nullptr,
nullptr
);
int linesize[AV_NUM_DATA_POINTERS];
linesize[0] = temp_frame->width * 4;
//生成图片
preview = QImage(temp_frame->width, temp_frame->height, QImage::Format_RGBA8888);
uint8_t* data = preview.bits();
sws_scale(sws_ctx,
temp_frame->data,
temp_frame->linesize,
0,
temp_frame->height,
&data,
linesize);
sws_freeContext(sws_ctx);
avcodec_close(codec_ctx);
avcodec_free_context(&codec_ctx);
preview_done = true;
break;
}
}
}
//释放资源
av_frame_free(&temp_frame);
av_packet_free(&pkt);
avformat_close_input(&fmt_ctx_);
return QPixmap::fromImage(preview);
}
/*更新视频图像显示*/
void videoPlayerClass::updateProgress()
{
if (!m_isPlaying) return;
// Read frames and update progress
if (av_read_frame(formatContext, packet) >= 0)
{
if (packet->stream_index == videoStreamIndex) {
avcodec_send_packet(codecContext, packet);
if (avcodec_receive_frame(codecContext, frame) == 0) {
// Convert frame to QImage
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, codecContext->width, codecContext->height, 1);
uint8_t *buffer = (uint8_t *)av_malloc(numBytes);
swsContext = sws_getContext(codecContext->width, codecContext->height,
codecContext->pix_fmt,
codecContext->width, codecContext->height,
AV_PIX_FMT_RGB24,
SWS_BILINEAR, nullptr, nullptr, nullptr);
uint8_t *dest[1] = { buffer };
int destLinesize[1] = { 3 * codecContext->width };
sws_scale(swsContext, frame->data, frame->linesize, 0, codecContext->height, dest, destLinesize);
currentFrameImage = QImage(buffer, codecContext->width, codecContext->height, QImage::Format_RGB888);
ui.widget->setCurrentImage(currentFrameImage);
av_free(buffer);
sws_freeContext(swsContext);
currentTime = frame->pts * av_q2d(formatContext->streams[videoStreamIndex]->time_base);
ui.horizontalSlider_progressBar->setValue(currentTime); // 更新进度条
ui.label_videoTime->setText(QTime::fromMSecsSinceStartOfDay(currentTime * 1000).toString("HH:mm:ss") + "/" + QTime::fromMSecsSinceStartOfDay(duration / AV_TIME_BASE * 1000).toString("HH:mm:ss"));
update(); // 刷新图像显示
return; // 处理完一帧后退出
}
}
}
else
{
av_packet_unref(packet);
on_pushButton_stop_clicked();
}
}
/*关闭ffmpeg句柄*/
void videoPlayerClass::closeVideo()
{
timer->stop();
if (packet) av_packet_free(&packet);
//if (swsContext) sws_freeContext(swsContext);
if (frame) av_frame_free(&frame);
if (codecContext) avcodec_free_context(&codecContext);
if (formatContext) avformat_close_input(&formatContext);
}
/*按照固定频率提取图像*/
void videoPlayerClass::videoToPictureFrequency(QString videoPath, int frequency)
{
cv::VideoCapture cap(videoPath.toStdString().c_str()); // 打开视频文件
if (!cap.isOpened()) {
std::cerr << "Error: Could not open video file." << std::endl;
return;
}
int frameCount = 0;
double fps = cap.get(cv::CAP_PROP_FPS); // 获取视频的帧率
double totalcount = cap.get(cv::CAP_PROP_FRAME_COUNT); // 获取视频的帧率
int interval = 0; // 计算提取帧的间隔
/*小数部分大于或等于0.5,建议向上取整*/
if (fps / frequency - std::floor(fps / frequency) >= 0.5)
{
interval = std::ceil(fps / frequency);
}
else
{
interval = std::floor(fps / frequency);
}
cv::Mat frame;
while (cap.read(frame))
{
if (static_cast<int>(frameCount % interval) == 0)
{
std::string filename = QApplication::applicationDirPath().toStdString() + "/data/image/output_" + std::to_string(frameCount / interval) + ".png";
cv::imwrite(filename, frame); // 保存帧为图片
}
frameCount++;
}
cap.release();
}
void videoPlayerClass::on_pushButton_left_clicked()
{
speedFactor /= 2.0;
if (m_isPlaying) {
timer->start(30 / speedFactor); // 根据倍速调整定时器
}
}
void videoPlayerClass::on_pushButton_play_clicked()
{
m_isPlaying = !m_isPlaying;
if (m_isPlaying)
{
timer->start(); // Set a timer to update progress
ui.pushButton_play->setStyleSheet("background-color:transparent;color:#FFFFFF;background-image: url(:/style/image/pause.png);");
}
else
{
ui.pushButton_play->setStyleSheet("background-color:transparent;color:#FFFFFF;background-image: url(:/style/image/play.png);");
}
}
void videoPlayerClass::on_pushButton_right_clicked()
{
speedFactor *= 2.0;
if (m_isPlaying) {
timer->start(30 / speedFactor); // 根据倍速调整定时器
}
}
void videoPlayerClass::on_pushButton_stop_clicked()
{
m_isPlaying = false;
ui.pushButton_play->setStyleSheet("background-color:transparent;color:#FFFFFF;background-image: url(:/style/image/play.png);");
timer->stop();closeVideo();
// 重新打开视频
openVideo(m_videoFilePath , m_frequency); // 替换为实际视频路径
}
主函数调用方法,只需要调用videoPlayerClass::openVideo(QString videoFilePath, int frequency)传入对应的视频文件路径即可播放视频。
以上就是通过ffmpeg读取本地视频,并在openGLWidget上进行播放的方法。
注:如果本文章对您有所帮助,请点赞收藏支持一下,谢谢。^_^
版权声明:本文为博主原创文章,转载请附上博文链接。