原帖链接如下:http://blog.youkuaiyun.com/ren65432/article/details/43449391
有时候我们需要获取h264裸码流进行分析。本文介绍如何通过FFmpeg 获取h264 码流。获取到的h264码流文件 可以直接通过vlc 等播放器直接播放。
一、 .h264文件数据流
如下图 是通过WinHex工具 分析的一个.h264文件数据:
ffmpeg 获取h264 思路如下:
1,写同步码,4字节(00,00,00,01)
2,写sps
3,写同步码,4字节(00,00,00,01)
4,写pps
5,将读到的AVPacket.data 的前4位替换成(00,00,00,01)写文件。
二、sps pps的获取
H.264的SPS和PPS串,包含了初始化H.264解码器所需要的信息参数,包括编码所用的profile,level,图像的宽和高,deblock滤波器等。
(1)avcC的数据结构:
- aligned(8) class AVCDecoderConfigurationRecord {
- unsigned int(8) configurationVersion = 1;
- unsigned int(8) AVCProfileIndication;
- unsigned int(8) profile_compatibility;
- unsigned int(8) AVCLevelIndication;
- bit(6) reserved = '111111'b;
- unsigned int(2) lengthSizeMinusOne;
- bit(3) reserved = '111'b;
- unsigned int(5) numOfSequenceParameterSets;
- for (i=0; i< numOfSequenceParameterSetsispan>
- unsigned int(16) sequenceParameterSetLength ;
- bit(8*sequenceParameterSetLength) sequenceParameterSetNALUnit;
- }
- unsigned int(8) numOfPictureParameterSets;
- for (i=0; i< numOfPictureParameterSetsispan>
- unsigned int(16) pictureParameterSetLength;
- bit(8*pictureParameterSetLength) pictureParameterSetNALUnit;
- }
- }
(2) FFmpeg 如何获取sps和pps
ffmpeg获取sps和pps非常简单。avcC数据结构对应于AVFormatContext->streams[H264Index]->codec->extradata。
代码如下:
- if ((ret = avformat_open_input(&ic, InputFileName, NULL, NULL)) < 0)
- {
- xprintf->Trace(0,"******** Decode avformat_open_input() Function result=%d",ret);
- return ret;
- }
- if ((ret = avformat_find_stream_info(ic, NULL)) < 0)
- {
- xprintf->Trace(0,"******** Decode avformat_find_stream_info() Function result=%d ",ret);
- avformat_close_input(&ic);
- return ret;
- }
- for (int i=0;i<ic->streams[0]->codec->extradata_size;i++)
- {
- printf("%x ",ic->streams[0]->codec->extradata[i]);
- }
对应上面的avcC结构体我们知道:
第7,8位, 为sps长度(0,18)即为24。
接下来的24位为sps数据,(67,64,0,20,ac,b2,0,a0,b,76,2,20,0,0,3,0,20,0,0,c,81,e3,6,49)。
接下来的1表示后面跟着PPS:
- numOfPictureParameterSets, 为1。
接下来6位为pps数据,(68,eb,c3,cb,22,c0)。
三、以上各部分的详细代码如下:
- int GetH264Stream()
- {
- int ret;
- AVFormatContext *ic=NULL;
- AVFormatContext *oc=NULL;
- uint8_t sps[100];
- uint8_t pps[100];
- int spsLength=0;
- int ppsLength=0;
- uint8_t startcode[4]={00,00,00,01};
- FILE *fp;
- fp=fopen("123.h264","wb+");
- char *InputFileName="11111.mp4";
- if ((ret = avformat_open_input(&ic, InputFileName, NULL, NULL)) < 0)
- {
- return ret;
- }
- if ((ret = avformat_find_stream_info(ic, NULL)) < 0)
- {
- avformat_close_input(&ic);
- return ret;
- }
- spsLength=ic->streams[0]->codec->extradata[6]*0xFF+ic->streams[0]->codec->extradata[7];
- ppsLength=ic->streams[0]->codec->extradata[8+spsLength+1]*0xFF+ic->streams[0]->codec->extradata[8+spsLength+2];
- for (int i=0;i<spsLength;i++)
- {
- sps[i]=ic->streams[0]->codec->extradata[i+8];
- }
- for (int i=0;i<ppsLength;i++)
- {
- pps[i]=ic->streams[0]->codec->extradata[i+8+2+1+spsLength];
- }
- for(int i=0;i<ic->nb_streams;i++)
- {
- if(ic->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
- {
- videoindex=i;
- }
- else if(ic->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO)
- {
- audioindex=i;
- }
- }
- AVOutputFormat *ofmt = NULL;
- AVPacket pkt;
- avformat_alloc_output_context2(&oc, NULL, NULL, OutPutPath);
- if (!oc)
- {
- printf( "Could not create output context\n");
- ret = AVERROR_UNKNOWN;
- }
- ofmt = oc->oformat;
- int i;
- for (i = 0; i < ic->nb_streams; i++)
- {
- AVStream *in_stream = ic->streams[i];
- AVStream *out_stream = avformat_new_stream(oc, in_stream->codec->codec);
- if (!out_stream)
- {
- printf( "Failed allocating output stream\n");
- ret = AVERROR_UNKNOWN;
- }
- ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
- if (ret < 0)
- {
- printf( "Failed to copy context from input to output stream codec context\n");
- }
- out_stream->codec->codec_tag = 0;
- if (oc->oformat->flags & AVFMT_GLOBALHEADER)
- out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
- }
- if (!(ofmt->flags & AVFMT_NOFILE))
- {
- ret = avio_open(&oc->pb, OutPutPath, AVIO_FLAG_WRITE);
- if (ret < 0)
- {
- printf( "Could not open output file '%s'", OutPutPath);
- }
- }
- ret = avformat_write_header(oc, NULL);
- int frame_index=0;
- int flag=1;
- av_init_packet(&pkt);
- pkt.data = NULL;
- pkt.size = 0;
- while (1)
- {
- AVStream *in_stream, *out_stream;
- ret = av_read_frame(ic, &pkt);
- if (ret < 0)
- break;
- in_stream = ic->streams[pkt.stream_index];
- out_stream = oc->streams[pkt.stream_index];
- AVPacket tmppkt;
- if (in_stream->codec->codec_type==AVMEDIA_TYPE_VIDEO )
- {
- if (flag)
- {
- fwrite(startcode,4,1,fp);
- fwrite(sps,spsLength,1,fp);
- fwrite(startcode,4,1,fp);
- fwrite(pps,ppsLength,1,fp);
- pkt.data[0]=0x00;
- pkt.data[1]=0x00;
- pkt.data[2]=0x00;
- pkt.data[3]=0x01;
- fwrite(pkt.data,pkt.size,1,fp);
- flag=0;
- }
- else
- {
- pkt.data[0]=0x00;
- pkt.data[1]=0x00;
- pkt.data[2]=0x00;
- pkt.data[3]=0x01;
- fwrite(pkt.data,pkt.size,1,fp);
- }
- pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
- pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
- pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
- pkt.pos = -1;
- pkt.stream_index=0;
- ret = av_interleaved_write_frame(oc, &pkt);
- }
- av_free_packet(&pkt);
- }
- fclose(fp);
- fp=NULL;
- av_write_trailer(oc);
- return 0;
- }