#include "mainwindow.h"
#include "ui_mainwindow.h"
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
timer = new QTimer(this);
connect(timer, &QTimer::timeout, this, &MainWindow::captureScreen);
isSelecting = false;
isRecording = false;
frameCount = 0;
// 默认录制全屏
QScreen *screen = QGuiApplication::primaryScreen();
selectionRect = screen->geometry();
// 初始化UI
ui->fpsSpinBox->setValue(30);
ui->qualitySpinBox->setValue(23); // 默认CRF值
ui->statusLabel->setText("就緒");
// 禁用停止按钮
ui->stopButton->setEnabled(false);
// RTSP地址写死
rtspUrl = "rtsp://127.0.0.1:8554/live/test222";
}
MainWindow::~MainWindow()
{
if (isRecording) {
finalizeFFmpeg();
}
delete ui;
}
void MainWindow::on_startButton_clicked()
{
if (isRecording)
return;
// 获取输出文件路径
outputFilePath = QFileDialog::getSaveFileName(
this, "保存录制文件",
QDir::homePath(),
"MP4文件 (*.mp4)"
);
if (outputFilePath.isEmpty())
return;
// 确保文件扩展名为.mp4
if (!outputFilePath.endsWith(".mp4", Qt::CaseInsensitive))
outputFilePath += ".mp4";
// 获取帧率和质量设置
int fps = ui->fpsSpinBox->value();
int quality = ui->qualitySpinBox->value();
// 初始化FFmpeg
if (!initializeFFmpeg(outputFilePath, selectionRect.width(), selectionRect.height(), fps)) {
QMessageBox::critical(this, "错误", "无法初始化FFmpeg编码器");
return;
}
// 初始化RTSP推流
if (!initializeRTSP(selectionRect.width(), selectionRect.height(), fps)) {
QMessageBox::warning(this, "警告", "无法初始化RTSP推流,将继续录制本地文件");
}
// 开始录制
isRecording = true;
frameCount = 0;
startTime = av_gettime();
timer->start(1000 / fps);
ui->startButton->setEnabled(false);
ui->stopButton->setEnabled(true);
ui->selectAreaButton->setEnabled(false);
ui->statusLabel->setText("正在录制和推流...");
}
void MainWindow::on_stopButton_clicked()
{
if (!isRecording)
return;
// 停止录制
timer->stop();
finalizeFFmpeg();
isRecording = false;
ui->startButton->setEnabled(true);
ui->stopButton->setEnabled(false);
ui->selectAreaButton->setEnabled(true);
ui->statusLabel->setText("录制已完成");
QMessageBox::information(this, "成功", QString("录制已完成,文件保存至:\n%1").arg(outputFilePath));
}
void MainWindow::on_selectAreaButton_clicked()
{
if (isRecording)
return;
// 进入选择区域模式
isSelecting = true;
setCursor(Qt::CrossCursor);
ui->statusLabel->setText("请在屏幕上拖动选择录制区域");
}
void MainWindow::captureScreen()
{
if (!isRecording)
return;
// 捕获屏幕
QScreen *screen = QGuiApplication::primaryScreen();
QPixmap pixmap = screen->grabWindow(0,
selectionRect.x(), selectionRect.y(),
selectionRect.width(), selectionRect.height());
QImage image = pixmap.toImage().convertToFormat(QImage::Format_RGB888);
// 添加到FFmpeg编码器
addFrame(image);
frameCount++;
// 更新状态
ui->statusLabel->setText(QString("正在录制和推流... 已录制 %1 帧").arg(frameCount));
}
void MainWindow::mousePressEvent(QMouseEvent *event)
{
if (isSelecting && event->button() == Qt::LeftButton) {
startPoint = event->globalPos();
endPoint = startPoint;
update();
}
}
void MainWindow::mouseMoveEvent(QMouseEvent *event)
{
if (isSelecting && (event->buttons() & Qt::LeftButton)) {
endPoint = event->globalPos();
update();
}
}
void MainWindow::mouseReleaseEvent(QMouseEvent *event)
{
if (isSelecting && event->button() == Qt::LeftButton) {
endPoint = event->globalPos();
// 计算选择区域
int x = qMin(startPoint.x(), endPoint.x());
int y = qMin(startPoint.y(), endPoint.y());
int width = qAbs(startPoint.x() - endPoint.x());
int height = qAbs(startPoint.y() - endPoint.y());
selectionRect = QRect(x, y, width, height);
// 退出选择模式
isSelecting = false;
setCursor(Qt::ArrowCursor);
ui->statusLabel->setText(QString("已选择录制区域: %1x%2")
.arg(selectionRect.width()).arg(selectionRect.height()));
update();
}
}
void MainWindow::paintEvent(QPaintEvent *event)
{
Q_UNUSED(event);
if (isSelecting) {
QPainter painter(this);
painter.setPen(QPen(Qt::red, 2));
painter.setBrush(QBrush(QColor(255, 0, 0, 50)));
// 转换为窗口坐标
QPoint topLeft = mapFromGlobal(QPoint(qMin(startPoint.x(), endPoint.x()),
qMin(startPoint.y(), endPoint.y())));
QPoint bottomRight = mapFromGlobal(QPoint(qMax(startPoint.x(), endPoint.x()),
qMax(startPoint.y(), endPoint.y())));
painter.drawRect(QRect(topLeft, bottomRight));
}
}
bool MainWindow::initializeFFmpeg(const QString &filename, int width, int height, int fps)
{
// 查找编码器
const AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec) {
qDebug() << "找不到H.264编码器";
return false;
}
// 分配编码器上下文
codecContext = avcodec_alloc_context3(codec);
if (!codecContext) {
qDebug() << "无法分配编码器上下文";
return false;
}
// 设置编码器参数
codecContext->bit_rate = 4000000; // 4 Mbps
codecContext->width = width;
codecContext->height = height;
codecContext->time_base = {1, fps};
codecContext->framerate = {fps, 1};
codecContext->gop_size = 10; // 每10帧一个I帧
codecContext->max_b_frames = 1;
codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
// H.264 CRF设置 (0-51, 越小质量越高)
av_opt_set(codecContext->priv_data, "crf", QString::number(ui->qualitySpinBox->value()).toUtf8(), 0);
// 打开编码器
int ret = avcodec_open2(codecContext, codec, nullptr);
if (ret < 0) {
qDebug() << "无法打开编码器:" << ret;
return false;
}
// 分配输出格式上下文
avformat_alloc_output_context2(&formatContext, nullptr, nullptr, filename.toUtf8().constData());
if (!formatContext) {
qDebug() << "无法分配输出格式上下文";
return false;
}
// 添加视频流
stream = avformat_new_stream(formatContext, nullptr);
if (!stream) {
qDebug() << "无法创建视频流";
return false;
}
// 兼容旧版FFmpeg API
#if LIBAVCODEC_VERSION_MAJOR < 58
stream->codec->codec_id = codecContext->codec_id;
stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
stream->codec->width = codecContext->width;
stream->codec->height = codecContext->height;
stream->codec->pix_fmt = codecContext->pix_fmt;
stream->codec->bit_rate = codecContext->bit_rate;
stream->codec->time_base = codecContext->time_base;
#else
stream->time_base = codecContext->time_base;
ret = avcodec_parameters_from_context(stream->codecpar, codecContext);
if (ret < 0) {
qDebug() << "无法复制编码器参数:" << ret;
return false;
}
#endif
// 打开输出文件
if (!(formatContext->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&formatContext->pb, filename.toUtf8().constData(), AVIO_FLAG_WRITE);
if (ret < 0) {
qDebug() << "无法打开输出文件:" << ret;
return false;
}
}
// 写入文件头
ret = avformat_write_header(formatContext, nullptr);
if (ret < 0) {
qDebug() << "无法写入文件头:" << ret;
return false;
}
// 分配AVFrame
frame = av_frame_alloc();
if (!frame) {
qDebug() << "无法分配AVFrame";
return false;
}
frame->format = codecContext->pix_fmt;
frame->width = codecContext->width;
frame->height = codecContext->height;
ret = av_frame_get_buffer(frame, 0);
if (ret < 0) {
qDebug() << "无法分配AVFrame缓冲区:" << ret;
return false;
}
// 分配AVPacket
packet = av_packet_alloc();
if (!packet) {
qDebug() << "无法分配AVPacket";
return false;
}
// 初始化SwsContext用于图像格式转换
swsContext = sws_getContext(
width, height,
AV_PIX_FMT_BGR24, // 输入格式 (Qt的QImage通常是BGR24)
width, height,
codecContext->pix_fmt,
SWS_BILINEAR, nullptr, nullptr, nullptr
);
if (!swsContext) {
qDebug() << "无法初始化SwsContext";
return false;
}
return true;
}
bool MainWindow::initializeRTSP(int width, int height, int fps)
{
// 初始化RTSP输出上下文
int ret = avformat_alloc_output_context2(&rtspContext, nullptr, "rtsp", rtspUrl.toUtf8().constData());
if (ret < 0 || !rtspContext) {
qDebug() << "无法分配RTSP输出格式上下文:" << ret;
return false;
}
// 添加视频流
rtspStream = avformat_new_stream(rtspContext, nullptr);
if (!rtspStream) {
qDebug() << "无法创建RTSP视频流";
avformat_free_context(rtspContext);
rtspContext = nullptr;
return false;
}
// 兼容旧版FFmpeg API
#if LIBAVCODEC_VERSION_MAJOR < 58
// 复制编码器参数
rtspStream->codec->codec_id = codecContext->codec_id;
rtspStream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
rtspStream->codec->width = codecContext->width;
rtspStream->codec->height = codecContext->height;
rtspStream->codec->pix_fmt = codecContext->pix_fmt;
rtspStream->codec->bit_rate = codecContext->bit_rate;
rtspStream->codec->time_base = codecContext->time_base;
// 设置RTSP流的时间基准
rtspStream->time_base = codecContext->time_base;
#else
// 复制编码器参数
ret = avcodec_parameters_copy(rtspStream->codecpar, stream->codecpar);
if (ret < 0) {
qDebug() << "无法复制编码器参数到RTSP流:" << ret;
avformat_free_context(rtspContext);
rtspContext = nullptr;
return false;
}
// 设置RTSP流的时间基准
rtspStream->time_base = stream->time_base;
#endif
// 设置RTSP传输选项
AVDictionary *options = nullptr;
av_dict_set(&options, "rtsp_transport", "tcp", 0); // 使用TCP传输
av_dict_set(&options, "stimeout", "2000000", 0); // 设置超时2秒
// 打开RTSP输出
if (!(rtspContext->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open2(&rtspContext->pb, rtspUrl.toUtf8().constData(), AVIO_FLAG_WRITE, nullptr, &options);
if (ret < 0) {
qDebug() << "无法打开RTSP输出:" << ret;
av_dict_free(&options);
avformat_free_context(rtspContext);
rtspContext = nullptr;
return false;
}
}
// 写入RTSP头
ret = avformat_write_header(rtspContext, &options);
if (ret < 0) {
qDebug() << "无法写入RTSP头:" << ret;
avio_closep(&rtspContext->pb);
avformat_free_context(rtspContext);
rtspContext = nullptr;
av_dict_free(&options);
return false;
}
av_dict_free(&options);
return true;
}
void MainWindow::finalizeFFmpeg()
{
// 刷新编码器缓冲区
int ret;
while (true) {
ret = avcodec_send_frame(codecContext, nullptr);
if (ret < 0)
break;
while (ret >= 0) {
ret = avcodec_receive_packet(codecContext, packet);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
break;
else if (ret < 0)
break;
// 设置时间戳
av_packet_rescale_ts(packet, codecContext->time_base, stream->time_base);
packet->stream_index = stream->index;
// 写入本地文件
ret = av_interleaved_write_frame(formatContext, packet);
if (ret < 0) {
qDebug() << "写入本地文件数据包失败:" << ret;
}
// 推送到RTSP
pushToRTSP(packet);
av_packet_unref(packet);
}
}
// 写入文件尾
av_write_trailer(formatContext);
// 关闭RTSP流
if (rtspContext) {
av_write_trailer(rtspContext);
if (!(rtspContext->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&rtspContext->pb);
}
avformat_free_context(rtspContext);
rtspContext = nullptr;
}
// 释放资源
if (swsContext) {
sws_freeContext(swsContext);
swsContext = nullptr;
}
if (packet) {
av_packet_free(&packet);
packet = nullptr;
}
if (frame) {
av_frame_free(&frame);
frame = nullptr;
}
if (codecContext) {
avcodec_free_context(&codecContext);
codecContext = nullptr;
}
if (formatContext) {
if (!(formatContext->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&formatContext->pb);
}
avformat_free_context(formatContext);
formatContext = nullptr;
}
}
void MainWindow::addFrame(const QImage &image)
{
// 确保图像格式正确
if (image.format() != QImage::Format_RGB888) {
qDebug() << "图像格式不是RGB24";
return;
}
// 准备输入数据
const uint8_t *src_data[4] = { image.bits(), nullptr, nullptr, nullptr };
int src_linesize[4] = { image.bytesPerLine(), 0, 0, 0 };
// 转换图像格式
sws_scale(swsContext, src_data, src_linesize, 0, image.height(),
frame->data, frame->linesize);
// 设置时间戳
frame->pts = frameCount;
// 发送帧到编码器
int ret = avcodec_send_frame(codecContext, frame);
if (ret < 0) {
qDebug() << "发送帧到编码器失败:" << ret;
return;
}
// 接收编码后的数据包
while (ret >= 0) {
ret = avcodec_receive_packet(codecContext, packet);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
break;
else if (ret < 0) {
qDebug() << "接收编码数据包失败:" << ret;
break;
}
// 调整时间戳
av_packet_rescale_ts(packet, codecContext->time_base, stream->time_base);
packet->stream_index = stream->index;
// 写入本地文件
ret = av_interleaved_write_frame(formatContext, packet);
if (ret < 0) {
qDebug() << "写入本地文件数据包失败:" << ret;
}
// 推送到RTSP
pushToRTSP(packet);
av_packet_unref(packet);
}
}
void MainWindow::pushToRTSP(AVPacket *packet)
{
if (!rtspContext)
return;
// 复制数据包(RTSP流需要独立的时间戳)
AVPacket *rtspPacket = av_packet_clone(packet);
if (!rtspPacket) {
qDebug() << "无法复制RTSP数据包";
return;
}
// 调整RTSP流的时间戳
av_packet_rescale_ts(rtspPacket, stream->time_base, rtspStream->time_base);
rtspPacket->stream_index = rtspStream->index;
// 发送到RTSP服务器
int ret = av_interleaved_write_frame(rtspContext, rtspPacket);
if (ret < 0) {
qDebug() << "RTSP推流失败:" << ret;
}
av_packet_free(&rtspPacket);
}
int ret = av_interleaved_write_frame(rtspContext, rtspPacket);这里返回-22
最新发布