Qt+Vs实现使用ffmpeg拉流并保存到Mp4文件(附源代码)

下面是通过ffmpeg拉流并保存到MP4文件中。废话不多说,直接上代码

头文件EncodeThreadClass.h

#ifndef ENCODETHREADCLASS_H
#define ENCODETHREADCLASS_H
#include <QThread>
#include <QDebug>

extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}

class EncodeThreadClass : public QThread
{
	Q_OBJECT

public:
	EncodeThreadClass(QObject *parent);
	~EncodeThreadClass();

public:
	void setUrlFileName(const QString& url, const QString &outFile);
	void stop();
	void close();

private:
	bool openInputUrl();
	bool openOutputFile();
	void read();

protected:
	void run();
	double r2d(AVRational r);

private:
	//input
	AVFormatContext* m_inputFmtCtx = NULL;
	AVCodecID m_codecId;
	AVPixelFormat m_chromaFormat;
	AVBSFContext *m_bsfc = NULL;
	AVBSFContext *m_absfc = NULL;
	AVPacket *m_avpacket = NULL;
	AVPacket *m_pktFilter = NULL;
	//视频index
	int m_videoIndex = -1;
	//音频index
	int m_audioIndex = -1;
	//视频总时间,单位ms
	int64_t m_totalTime = 0;
	//视频宽度;
	int m_width = 0;
	//视频高度;
	int m_height = 0;
	//视频帧率;
	int m_fps = 0;
	//音频样本率
	int m_sampleRate = 0;
	//音频样本大小
	AVSampleFormat m_sampleSize;
	//音频通道数
	int m_channel = 0;
	uint64_t m_channel_layout = 0;
	//是否存在音频
	bool m_bExistAudio = false;
	//是否是打开成功
	bool m_bOpen = false;
	//输入url
	QString m_inputUrl;

	//output
	AVFormatContext* m_outputFmtCtx = NULL;
	AVCodec *m_pCodec = nullptr;
	AVCodec *m_aCodec = nullptr;
	AVStream *m_pOutStream = nullptr;
	AVStream *m_aOutStream = nullptr;
	QString m_outFileName;

	bool m_bExit = false;
	bool m_bFindKey = false;
	int64_t m_ptsInc = 0;
	int64_t m_aptsInc = 0;

	bool is_generate = false;
};

#endif //ENCODETHREADCLASS_H

源文件EncodeThreadClass.cpp

#include "EncodeThreadClass.h"

EncodeThreadClass::EncodeThreadClass(QObject *parent)
{

}

EncodeThreadClass::~EncodeThreadClass()
{

}

void EncodeThreadClass::setUrlFileName(const QString& url, const QString &outFile)
{
	m_inputUrl = url;
	m_outFileName = outFile;
}

void EncodeThreadClass::stop()
{
	m_bExit = true;
}

void EncodeThreadClass::close()
{
	//close input
	if (m_inputFmtCtx)
	{
		avformat_close_input(&m_inputFmtCtx);
	}

	//close output
	if (m_outputFmtCtx)
	{
		//写文件尾
		qDebug() << "av_write_trailer(m_pAvformat)" << av_write_trailer(m_outputFmtCtx);
	}

	if (m_outputFmtCtx && !(m_outputFmtCtx->oformat->flags & AVFMT_NOFILE))
	{
		qDebug() << "avio_close(m_outputFmtCtx->pb)" << avio_close(m_outputFmtCtx->pb);
	}

	avformat_free_context(m_outputFmtCtx);
	m_outputFmtCtx = nullptr;

	qDebug() << "close";
}

bool EncodeThreadClass::openInputUrl()
{
	av_register_all();
	avformat_network_init();

	//格式上下文;
	m_inputFmtCtx = avformat_alloc_context();

	AVDictionary *options = NULL;

	av_dict_set(&options, "buffer_size", "1024000", 0);
	av_dict_set(&options, "max_delay", "500000", 0);
	av_dict_set(&options, "stimeout", "2000000", 0);
	av_dict_set(&options, "rtsp_transport", "tcp", 0);

	//根据RTSP地址或者文件名初始化格式化上下文
	int ret = avformat_open_input(&m_inputFmtCtx, m_inputUrl.toStdString().c_str(), NULL, &options);
	if (ret != 0)
	{
		printf("Couldn't open input stream.\n");
		return false;
	}

	av_dump_format(m_inputFmtCtx, 0, m_inputUrl.toStdString().c_str(), 0);

	m_inputFmtCtx->probesize = 1000 * 1024;
	m_inputFmtCtx->max_analyze_duration = 5 * AV_TIME_BASE;

	//查找文件格式;
	if (avformat_find_stream_info(m_inputFmtCtx, NULL) < 0)
	{
		printf("Couldn't find stream information.\n");
		return false;
	}

	for (int i = 0; i < m_inputFmtCtx->nb_streams; i++)
	{
		AVStream *stream = m_inputFmtCtx->streams[i];

		if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			m_videoIndex = i;

			//编码ID;
			m_codecId = stream->codecpar->codec_id;
			//像素格式;
			m_chromaFormat = (AVPixelFormat)stream->codecpar->format;
			//视频信息;
			m_totalTime = stream->duration / (AV_TIME_BASE / 1000);
			m_width = stream->codecpar->width;
			m_height = stream->codecpar->height;
			//获取帧率;
			m_fps = r2d(stream->avg_frame_rate);
			if (m_fps == 0)
			{
				m_fps = 25;
			}

			//1. 找到相应解码器的过滤器
			const AVBitStreamFilter *bsf = av_bsf_get_by_name("h264_mp4toannexb");
			if (!bsf)
			{
				printf("av_bsf_get_by_name() failed");
				return false;
			}

			//2.过滤器分配内存
			av_bsf_alloc(bsf, &m_bsfc);
			//3.添加解码器属性
			avcodec_parameters_copy(m_bsfc->par_in, stream->codecpar);
			//4. 初始化过滤器上下文
				av_bsf_init(m_bsfc);
		}
		else if (stream->codec->codec_type == AVMEDIA_TYPE_AUDIO)
		{
			m_audioIndex = i;
			m_bExistAudio = true;

			m_sampleRate = stream->codecpar->sample_rate;
			m_channel = stream->codecpar->channels;
			m_channel_layout = stream->codecpar->channel_layout;

			//1. 找到相应解码器的过滤器
			const AVBitStreamFilter *bsfcAAC = av_bsf_get_by_name("aac_adtstoasc");
			if (!bsfcAAC)
			{
				printf("av_bsf_get_by_name() failed");
				return false;
			}

			//2.过滤器分配内存
			av_bsf_alloc(bsfcAAC, &m_absfc);
			//3.添加解码器属性
			avcodec_parameters_copy(m_absfc->par_in, stream->codecpar);
			//4. 初始化过滤器上下文
				av_bsf_init(m_absfc);
		}
	}

	m_avpacket = new AVPacket;
	av_init_packet(m_avpacket);
	m_avpacket->data = NULL;
	m_avpacket->size = 0;

	m_pktFilter = new AVPacket;
	av_init_packet(m_pktFilter);
	m_pktFilter->data = NULL;
	m_pktFilter->size = 0;

	return true;
}

bool EncodeThreadClass::openOutputFile()
{
	std::string fileName = m_outFileName.toStdString();
	const char* pszFileName = fileName.c_str();

	//初始化输出码流的AVFormatContext。
	if (avformat_alloc_output_context2(&m_outputFmtCtx, NULL, NULL, pszFileName) < 0)
	{
		printf("Could not create output context\n");
		return false;
	}

	//判断输入流上下文是否存在音视频
	for (int i = 0; i < m_inputFmtCtx->nb_streams; i++)
	{
		AVStream *stream = m_inputFmtCtx->streams[i];

		//找到音视频的编码器
		AVCodec *codec;
		codec = avcodec_find_encoder(stream->codecpar->codec_id);
		if (!codec)
		{
			printf("could not find encoder for \n");
			return false;
		}

		//创建音视频新流
		AVStream *out_stream;
		out_stream = avformat_new_stream(m_outputFmtCtx, codec);
		if (!out_stream)
		{
			printf("avformat_new_stream error\n");
			return false;
		}

		if (m_outputFmtCtx->oformat->flags & AVFMT_GLOBALHEADER)
		{
			out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
		}

		if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			m_pOutStream = out_stream;

			AVCodecContext *c = out_stream->codec;
			c->codec_id = AV_CODEC_ID_H264;
			c->bit_rate = 0;
			c->width = m_width;
			c->height = m_height;
			c->time_base.den = 1;
			c->time_base.num = 30;
			c->gop_size = 1;
			c->pix_fmt = AV_PIX_FMT_YUV420P;
			//打开解码器
			int ret = avcodec_open2(c, codec, NULL);
			if (ret < 0)
			{
				printf("can't open avcodec_open2 \n");
				return false;
			}
		}
		else if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
		{
			m_aOutStream = out_stream;
			AVCodecContext *c = out_stream->codec;
			c->sample_fmt = codec->sample_fmts ? codec->sample_fmts[0] : AV_SAMPLE_FMT_S16;
			c->bit_rate = 0;
			c->sample_rate = m_sampleRate;
			c->channels = m_channel;
			c->channel_layout = m_channel_layout;
			//打开解码器
			int ret = avcodec_open2(c, codec, NULL);
			if (ret < 0)
			{
				printf("can't open avcodec_open2 \n");
				return false;
			}
		}
	}

	av_dump_format(m_outputFmtCtx, 0, pszFileName, 1);

	if (!(m_outputFmtCtx->flags & AVFMT_NOFILE))
	{
		//打开输出的文件,准备往里面写数据
		if (avio_open(&m_outputFmtCtx->pb, pszFileName, AVIO_FLAG_WRITE) < 0)
		{
			printf("could not open %s\n", pszFileName);
			return false;
		}
	}

	//写入文件头
	int ret = avformat_write_header(m_outputFmtCtx, NULL);
	if (ret < 0)
	{
		printf("avformat_write_header error\n");
		return false;
	}
	return true;
}

void EncodeThreadClass::read()
{
	while (!m_bExit)
	{
		if (av_read_frame(m_inputFmtCtx, m_avpacket) < 0)
		{
			close();
			break;
		}

		if (m_avpacket->flags &AV_PKT_FLAG_KEY)
		{
			m_bFindKey = true;
		}
		if (m_bFindKey)
		{
			AVStream *in_stream = m_inputFmtCtx->streams[m_avpacket->stream_index];
			AVStream *out_stream = m_outputFmtCtx->streams[m_avpacket->stream_index];

			if (m_avpacket->stream_index == m_videoIndex)
			{
				av_bsf_send_packet(m_bsfc, m_avpacket);
				av_bsf_receive_packet(m_bsfc, m_pktFilter);

				m_pktFilter->pts = av_rescale_q_rnd(m_pktFilter->pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
				m_pktFilter->dts = av_rescale_q_rnd(m_pktFilter->dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
				m_pktFilter->duration = av_rescale_q_rnd(m_pktFilter->duration, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
				m_pktFilter->stream_index = out_stream->index;

				int nError = av_interleaved_write_frame(m_outputFmtCtx, m_pktFilter);
				if (nError != 0)
				{
					char tmpErrString[AV_ERROR_MAX_STRING_SIZE] = { 0 };
					av_make_error_string(tmpErrString, AV_ERROR_MAX_STRING_SIZE, nError);

					qDebug() << "av_interleaved_write_frame  video" << nError << tmpErrString;
				}
			}
			else if (m_avpacket->stream_index == m_audioIndex)
			{
				av_bsf_send_packet(m_absfc, m_avpacket);
				av_bsf_receive_packet(m_absfc, m_pktFilter);

				m_pktFilter->pts = av_rescale_q_rnd(m_pktFilter->pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
				m_pktFilter->dts = av_rescale_q_rnd(m_pktFilter->dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
				m_pktFilter->duration = av_rescale_q_rnd(m_pktFilter->duration, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF);
				m_pktFilter->stream_index = out_stream->index;

				int nError = av_interleaved_write_frame(m_outputFmtCtx, m_pktFilter);
				if (nError != 0)
				{
					char tmpErrString[AV_ERROR_MAX_STRING_SIZE] = { 0 };
					av_make_error_string(tmpErrString, AV_ERROR_MAX_STRING_SIZE, nError);

					qDebug() << "av_interleaved_write_frame audio " << nError << tmpErrString;
				}
			}
		}
	}
}

void EncodeThreadClass::run()
{
	if (openInputUrl() && openOutputFile())
	{
		read();
	}
}

double EncodeThreadClass::r2d(AVRational r)
{
	return av_q2d(r);
}

实现方式

/*录制视频*/
	connect(ui.pushButton_GenerateVideoData, &QPushButton::clicked, this, [=]() {
		isGenerated = !isGenerated;
		if (isGenerated)
		{
			QString outputFile = QApplication::applicationDirPath() + "/data/video/" + QDateTime::currentDateTime().toString("yyyy-MM-dd-hh-mm-ss") + ".mp4";
			m_encodeThread = new EncodeThreadClass(this);
			m_encodeThread->setUrlFileName(rtspPath, outputFile);
			m_encodeThread->start();
			ui.pushButton_GenerateVideoData->setText(QStringLiteral("停止生成"));
		}
		else
		{
			if (m_encodeThread)
			{
				m_encodeThread->stop();
				m_encodeThread->wait();
				m_encodeThread->close();
			}
			ui.pushButton_GenerateVideoData->setText(QStringLiteral("生成数据"));
		}
	});

以上就是实现拉流保存到MP4文件的方法。

注:如果本文章对您有所帮助,请点赞收藏支持一下,谢谢。^_^

版权声明:本文为博主原创文章,转载请附上博文链接。

在安卓平台上使用Qt环境结合ffmpeg进行媒体的以及硬件解码是一个较为复杂的操作。通常,这涉及到Qt的多媒体模块,以及对ffmpeg的调用。在安卓平台上进行硬件解码,往往需要使用ffmpeg与安卓的MediaCodec API进行集成。 下面是一个简化的代码案例程,描述如何使用Qtffmpeg尝试进行硬件解码: 1. 引入Qt的多媒体模块,确保你的Qt环境配置了对ffmpeg的支持。 2. 使用Qt的`QMediaCaptureSession`和`QMediaPlayer`类来处理。 3. 调用ffmpeg的API来创建一个解码器上下文,选择硬解码器。 4. 将取的数据送入ffmpeg的解码程中。 5. 使用`MediaCodec`的API来处理硬解码,将解码后的数据渲染到屏幕上。 由于直接使用Qtffmpeg集成到一起进行硬解的操作相对复杂,且代码量较大,不适合在此详尽展开。一般来说,你可能需要查看Qtffmpeg的官方文档,以及安卓的`MediaCodec` API文档,了解如何将这些组件结合起来使用。 下面是一个大致的代码框架示例: ```cpp // 伪代码示例,需要结合Qtffmpeg具体API进行调整 // 初始化Qt环境,创建媒体播放器和捕获会话 QMediaPlayer *player = new QMediaPlayer(); QMediaCaptureSession *captureSession = new QMediaCaptureSession(); // 设置媒体源为媒体地址 player->setMedia(QUrl("http://your.stream.url")); // 将捕获会话与媒体播放器关联 captureSession->setVideoOutput(player); // 创建ffmpeg解码器上下文选择硬解码器 AVCodecContext *codecContext = avcodec_alloc_context3(nullptr); // 查找硬解码器打开解码器 AVCodec *codec = avcodec_find_decoder_by_name("h264_mediacodec"); avcodec_open2(codecContext, codec, nullptr); // 这里需要结合Qtffmpeg进行数据的处理和解码 // ... // 释放资源和清理 avcodec_free_context(&amp;codecContext); delete player; delete captureSession; ``` 请注意,上述代码仅为示例框架,实际操作时需要根据具体的ffmpeg版本和Qt版本进行API调用和参数配置的调整。你还需要处理错误情况、内存管理以及多线程问题等。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值