本文记录ffmpeg进行视频格式的转换。
ffmpeg视频格式转换主要流程

ffmpeg视频格式转换示例
#include <iostream>
extern "C" {
#include "libavformat\avformat.h"
#include "libavutil\avutil.h"
#include "libavcodec\avcodec.h"
}
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avutil.lib")
void printErr(char *info, int ret)
{
char err[1024] = { 0 };
av_strerror(ret, err, sizeof(err));
std::cout << info << ": " << err << std::endl;
system("pause");
}
int main(int argv, char *argc[])
{
//打开输入封装格式
char *inPath = "D:/video/Bilby.mp4";
AVFormatContext *inFormatCtx = nullptr;
int ret = avformat_open_input(&inFormatCtx, inPath, NULL, NULL);
if (ret != 0) {
printErr("avformat_open_input failed", ret);
return -1;
}
//读取packet,获取stream信息
avformat_find_stream_info(inFormatCtx, NULL);
av_dump_format(inFormatCtx, 0, NULL, 0);
//创建输出的封装格式
char *outPath = "D:/video/Bilby.flv";
AVFormatContext *outFormatCtx = nullptr;
ret = avformat_alloc_output_context2(&outFormatCtx, NULL, NULL, outPath);
if (ret != 0) {
printErr("avformat_alloc_output_context2 failed", ret);
return -1;
}
//为输出封装格式创建stream
for (int i = 0; i < inFormatCtx->nb_streams; i++) {
AVStream *inStream = inFormatCtx->streams[i];
AVStream *outStream = avformat_new_stream(outFormatCtx, NULL);
ret = avcodec_parameters_copy(outStream->codecpar, inStream->codecpar);
if (ret != 0) {
printErr("avcodec_parameters_copy failed", ret);
return -1;
}
//如果不设置,某些格式转换会失败
outStream->codecpar->codec_tag = 0;
}
av_dump_format(outFormatCtx, 0, NULL, 1);
//打开io,创建输出文件
ret = avio_open(&outFormatCtx->pb, outPath, AVIO_FLAG_WRITE);
if (ret != 0) {
printErr("avio_open failed", ret);
return -1;
}
//写入stream头,会改变输出stream中的time_base
ret = avformat_write_header(outFormatCtx, NULL);
if (ret != 0) {
printErr("avformat_write_header failed", ret);
return -1;
}
//遍历输入文件中的packet
AVPacket pkt;
av_init_packet(&pkt);
while (1) {
//读取packet
ret = av_read_frame(inFormatCtx, &pkt);
if (ret < 0) {
break;
}
//pts,dts,duration以输出format的time_base转换
AVRational srcTb = inFormatCtx->streams[pkt.stream_index]->time_base;
AVRational dstTb = outFormatCtx->streams[pkt.stream_index]->time_base;
pkt.pts = av_rescale_q_rnd(pkt.pts, srcTb, dstTb, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, srcTb, dstTb, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, srcTb, dstTb);
//packet写入文件
av_interleaved_write_frame(outFormatCtx, &pkt);
}
//输出剩余的packet和stream尾部信息
av_write_trailer(outFormatCtx);
//关闭输入输出
avformat_close_input(&inFormatCtx);
avio_close(outFormatCtx->pb);
avformat_free_context(outFormatCtx);
system("pause");
return 0;
}