int MP4MuxWriteIndex(MP4MUXCONTEXT *pMP4MuxContext)
{
long currentOffset;
int ret = 0;
int retSize = 0;
off_t offset = lseek(pMP4MuxContext->iMP4Fd, 0, SEEK_CUR);
switch (pMP4MuxContext->uiOutputOption)
{
STM_INFO("MP4MuxWriteIndex() pMP4MuxContext->uiOutputOption=%d",pMP4MuxContext->uiOutputOption);
case MP4MUX_OUTPUT_OPTION_DEFAULT:
case MP4MUX_OUTPUT_OPTION_TWO_PASS:
MP4MuxResetByteInfoArray(pMP4MuxContext);
MP4MuxPutMovieAtom(pMP4MuxContext);
ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiCurrentOffset, SEEK_SET);
if (ret < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
retSize = write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pInfoByteArray->pucBuffer, pMP4MuxContext->pInfoByteArray->iCurPos);
if (retSize != pMP4MuxContext->pInfoByteArray->iCurPos)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiMdatSizePos, SEEK_SET);
if (ret < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
retSize = write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pucMdatSizeField, MP4MUX_ATOM_SIZE_LENGTH);
if (retSize != MP4MUX_ATOM_SIZE_LENGTH)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
break;
case MP4MUX_OUTPUT_OPTION_IN_PLACE:
currentOffset = lseek(pMP4MuxContext->iMP4Fd, 0, SEEK_SET);
if (currentOffset < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
MP4MuxResetByteInfoArray(pMP4MuxContext);
MP4MuxPutMovieAtom(pMP4MuxContext);
/* Jump to the current updated offset of free box for event */
ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiIndexOffset, SEEK_SET);
if (ret < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
retSize = write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pInfoByteArray->pucBuffer, pMP4MuxContext->pInfoByteArray->iCurPos);
if (retSize != pMP4MuxContext->pInfoByteArray->iCurPos)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiMdatSizePos, SEEK_SET);
if (ret < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
retSize = write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pucMdatSizeField, MP4MUX_ATOM_SIZE_LENGTH);
if (retSize != MP4MUX_ATOM_SIZE_LENGTH)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
ret = lseek(pMP4MuxContext->iMP4Fd, currentOffset, SEEK_SET);
if (ret < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
break;
case MP4MUX_OUTPUT_OPTION_REALTIME:
// Nothing to update
if (pMP4MuxContext->piSampleInterval[MP4MUX_VIDEO_ES_INDEX] == 0
// #ifdef DUAL_CAM
#ifdef APP_STREAM
&& pMP4MuxContext->piSampleInterval[MP4MUX_VIDEO2_ES_INDEX] == 0
#endif
&& pMP4MuxContext->piSampleInterval[MP4MUX_AUDIO_ES_INDEX] == 0)
{
break;
}
MP4MuxResetByteInfoArray(pMP4MuxContext);
if (!(pMP4MuxContext->iIfNotFirstlyUpdateMovieAtom))
{
unsigned int uiSize = pMP4MuxContext->uiIndexOffset - pMP4MuxContext->uiMdatSizePos;
pMP4MuxContext->pucMdatSizeField[0] = (uiSize >> 24) & 0xff;
pMP4MuxContext->pucMdatSizeField[1] = (uiSize >> 16) & 0xff;
pMP4MuxContext->pucMdatSizeField[2] = (uiSize >> 8) & 0xff;
pMP4MuxContext->pucMdatSizeField[3] = uiSize & 0xff;
ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiMdatSizePos, SEEK_SET);
if (ret < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
if (write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pucMdatSizeField,
MP4MUX_ATOM_SIZE_LENGTH) != MP4MUX_ATOM_SIZE_LENGTH)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
}
ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiIndexOffset, SEEK_SET);
if (ret < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
MP4MuxUpdateMovieAtom(pMP4MuxContext);
fdatasync(pMP4MuxContext->iMP4Fd);
ret = lseek(pMP4MuxContext->iMP4Fd, offset, SEEK_SET);
if (ret < 0)
{
STM_ERROR("%s", strerror(errno));
return MP4MUX_EC_FAILURE;
}
fdatasync(pMP4MuxContext->iMP4Fd); //将数据刷新到SD卡中
break;
default:
break;
}
return(MP4MUX_EC_OK);
}
其中static int mp4_storage_do_saving_mp4(MP4_STORAGE_FRAME_TYPE_E type, PTR_FRAME_HEAD_S p_frame_head,
unsigned char *p_frame_data)
{
MP4MUXCONTEXT *p_mp4_mux_context = &(g_mp4_storage.mp4_mux_context);
MBUFFERESSideInfo side_info;
MBUFFERESBuffer *p_esbuffer = &(g_mp4_storage.v_input_buf);
int es_index = MP4MUX_VIDEO_ES_INDEX;
int ret = OK;
long long frame_timestamp = 0LL;
unsigned int frame_size = 0;
frame_timestamp = ntohll(p_frame_head->timestamp);
frame_size = ntohl(p_frame_head->data_len);
memset(&side_info, 0, sizeof(MBUFFERESSideInfo));
MBUFFERESSETFRAMETYPE(MBUFFER_ES_FRAME_TYPE_NORMALFRAME, side_info.iFlags);
side_info.PTS = frame_timestamp * 9 / 100;
side_info.DTS = -1;
side_info.iSize = (int) (frame_size);
switch (type)
{
#ifdef AUDIO_RECORD_SUPPORT
case MP4_STORAGE_FRAME_TYPE_AUDIO:
MBUFFERESSETIFKEYFRAME(1, side_info.iFlags);
p_esbuffer = &(g_mp4_storage.a_input_buf);
es_index = MP4MUX_AUDIO_ES_INDEX;
#ifdef AAC_SUPPORT
if (TP_AVCODEC_AAC_ADTS == p_mp4_mux_context->iAudioCodec && !get_record_audio_enable())
{
frame_size = AUDIO_MUTE_AAC_FRAME_LEN;
}
#endif
break;
#endif
case MP4_STORAGE_FRAME_TYPE_VIDEO_P:
MBUFFERESSETIFKEYFRAME(0, side_info.iFlags);
break;
case MP4_STORAGE_FRAME_TYPE_VIDEO_NON_EVENT_START_I:
MBUFFERESSETIFKEYFRAME(1, side_info.iFlags);
break;
case MP4_STORAGE_FRAME_TYPE_VIDEO_EVENT_START_I:
MBUFFERESSETIFKEYFRAME(1, side_info.iFlags);
MBUFFERESSETIFFIRSTFRAMEOFEVENT(1, side_info.iFlags);
break;
// #ifdef DUAL_CAM
#ifdef APP_STREAM
case MP4_STORAGE_FRAME_TYPE_VIDEO2_P:
MBUFFERESSETIFKEYFRAME(0, side_info.iFlags);
p_esbuffer = &(g_mp4_storage.v2_input_buf);
es_index = MP4MUX_VIDEO2_ES_INDEX;
STM_INFO("dosaving MP4_STORAGE_FRAME_TYPE_VIDEO2_P");
break;
case MP4_STORAGE_FRAME_TYPE_VIDEO2_NON_EVENT_START_I:
MBUFFERESSETIFKEYFRAME(1, side_info.iFlags);
p_esbuffer = &(g_mp4_storage.v2_input_buf);
es_index = MP4MUX_VIDEO2_ES_INDEX;
STM_INFO("dosaving MP4_STORAGE_FRAME_TYPE_VIDEO2_NON_EVENT_START_I");
break;
case MP4_STORAGE_FRAME_TYPE_VIDEO2_EVENT_START_I:
MBUFFERESSETIFKEYFRAME(1, side_info.iFlags);
MBUFFERESSETIFFIRSTFRAMEOFEVENT(1, side_info.iFlags);
p_esbuffer = &(g_mp4_storage.v2_input_buf);
es_index = MP4MUX_VIDEO2_ES_INDEX;
STM_INFO("dosaving MP4_STORAGE_FRAME_TYPE_VIDEO2_EVENT_START_I");
break;
#endif
#ifdef VIDEO_AVBR_ENABLE
case MP4_STORAGE_FRAME_TYPE_VIDEO_NON_EVENT_START_VIRTUAL_I:
MBUFFERESSETIFKEYFRAME(0, side_info.iFlags);
MBUFFERESSETIFVIRTUALIFRAME(1, side_info.iFlags);
break;
case MP4_STORAGE_FRAME_TYPE_VIDEO_ONLY_DECODING_I:
MBUFFERESSETIFKEYFRAME(1, side_info.iFlags);
MBUFFERESSETIFONLYDECODINGFRAME(1, side_info.iFlags);
break;
case MP4_STORAGE_FRAME_TYPE_VIDEO_EVENT_START_VIRTUAL_I:
MBUFFERESSETIFKEYFRAME(0, side_info.iFlags);
MBUFFERESSETIFFIRSTFRAMEOFEVENT(1, side_info.iFlags);
MBUFFERESSETIFVIRTUALIFRAME(1, side_info.iFlags);
break;
#endif
default:
return MP4MUX_EC_FAILURE;
}
if (MBUFFERESGETIFFIRSTFRAMEOFEVENT(side_info.iFlags))
{
side_info.llParameter = (long long) p_frame_head->reserve1;
}
ret = MBUFFERESAppendData(p_frame_data, (int) (frame_size), 1, p_esbuffer);
if (ret != MBUFFER_EC_OK)
{
STM_ERROR("Mbuffer append data failed");
ret = ERROR;
goto end;
}
MBUFFERESSetSideInfo(&side_info, p_esbuffer);
ret = MP4MuxPutData(es_index, p_mp4_mux_context);
if (ret == MP4MUX_EC_NEED_UPDATE_BOX)
{
STM_INFO("MP4MUX_EC_NEED_UPDATE_BOX");
if (MP4MuxWriteIndex(p_mp4_mux_context) != MP4MUX_EC_OK)
{
STM_ERROR("Write mp4 index failed");
ret = ERROR;
goto end;
}
/*
* 每次更新mp4索引时,对整个文件进行一次drop cache操作,
* 否则会有20k左右的cache增长(索引各boxfile较为分散,
* 暂时对整个mp4文件进行cache drop)
*
* 执行该操作后跳过GOP的drop cache操作
*/
drop_file_cache(p_mp4_mux_context->iMP4Fd, true);
mp4_storage_notify_sync();
}
else if (ret == MP4MUX_EC_OUT_OF_RANGE)
{
STM_INFO("MP4MUX_EC_OUT_OF_RANGE");
g_mp4_storage.if_file_full = 1;
if (MP4MuxWriteIndex(p_mp4_mux_context) != MP4MUX_EC_OK)
{
STM_ERROR("Write mp4 index failed");
ret = ERROR;
goto end;
}
drop_file_cache(p_mp4_mux_context->iMP4Fd, true);
mp4_storage_notify_sync();
}
else if (ret < 0)
{
STM_ERROR("Deal with frame failed, err_code: %d", ret);
ret = ERROR;
goto end;
}
ret = OK;
end:
return ret;
}
,int MP4MuxPutMovieAtom(MP4MUXCONTEXT *pMP4MuxContext)
{
int iCurTrack, iMaxTrack;
unsigned int uiSizemoov = 0;
unsigned int uiPos4moovSize;
MP4MuxParseSPS(pMP4MuxContext);
//set time scale. should be set as early as possible
MP4MuxSetTimeScale(pMP4MuxContext);
MP4MuxGenerateSTCO(pMP4MuxContext);
MP4MuxGenerateSTTS(pMP4MuxContext);
#ifdef VIDEO_AVBR_ENABLE
MP4MuxGenerateSTSSAndSTVS(pMP4MuxContext);
#else
MP4MuxGenerateSTSS(pMP4MuxContext);
#endif
MP4MuxGenerateSTSC(pMP4MuxContext);
MP4MuxGenerateCTTS(pMP4MuxContext);
MP4MuxCalculateAudioBitrate(pMP4MuxContext);
MP4MuxCalculateDuration(pMP4MuxContext);
MP4MuxSetStartTime(pMP4MuxContext);
uiPos4moovSize = MBUFFERByteArrayPutSize(-1, 0, pMP4MuxContext->pInfoByteArray); // reserve the space for moov atom
#ifndef KjbSDK_SUPPORT
MBUFFERByteArrayPutTag("moov", pMP4MuxContext->pInfoByteArray);
#else
MBUFFERByteArrayPutTag("tpmb", pMP4MuxContext->pInfoByteArray);
#endif
uiSizemoov = 8; // moov size and tag
// mvhd
uiSizemoov += MP4MuxPutMVHD(pMP4MuxContext);
iCurTrack = pMP4MuxContext->iTrackInfo == MP4MUX_TRACK_INFO_AUDIO_ONLY ? 1 : 0;
iMaxTrack = pMP4MuxContext->iNumTracks + iCurTrack;
for (; iCurTrack < iMaxTrack; iCurTrack++)
{
// skip the track if the codec is not defined
if ((iCurTrack == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iVideoCodec == TP_AVCODEC_UNDET) ||
(iCurTrack == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_UNDET) ||
(iCurTrack == MP4MUX_AUDIO_ES_INDEX && pMP4MuxContext->iAudioCodec == TP_AVCODEC_UNDET))
{
continue;
}
//trak
uiSizemoov += MP4MuxPutTRAK(iCurTrack, pMP4MuxContext);
}
MBUFFERByteArrayPutSize(uiSizemoov, uiPos4moovSize, pMP4MuxContext->pInfoByteArray);
MP4MuxCalculateMdatSize(pMP4MuxContext);
return(MP4MUX_EC_OK);
}
,int MP4MuxPutSTSD(int iTrak, MP4MUXCONTEXT *pMP4MuxContext)
{
unsigned int uiSize = 0;
unsigned int uiSizePos = 0;
uiSizePos = MBUFFERByteArrayPutSize(-1, 0, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutTag("stsd", pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* version & flags */
MBUFFERByteArrayPutIntBE(1, pMP4MuxContext->pInfoByteArray); /* entry count */
uiSize = 16;
// if (iTrak == MP4MUX_VIDEO_ES_INDEX || iTrak == MP4MUX_VIDEO2_ES_INDEX)
// // avc1 or hev1
// uiSize += MP4MuxPutVideoSampleEntry(iTrak, pMP4MuxContext);
if (iTrak == MP4MUX_VIDEO_ES_INDEX)
{
// avc1 or hev1
uiSize += MP4MuxPutVideoSampleEntry(iTrak, pMP4MuxContext);
}
else if(iTrak == MP4MUX_VIDEO2_ES_INDEX)
{
// if (g_network_stream_enable)
// {
uiSize += MP4MuxPutVideoSampleEntry(iTrak, pMP4MuxContext);
// }
}
else
// mp4a or mulaw
uiSize += MP4MuxPutAudioSampleEntry(iTrak, pMP4MuxContext->iAudioCodec, pMP4MuxContext);
MBUFFERByteArrayPutSize(uiSize, uiSizePos, pMP4MuxContext->pInfoByteArray);
return uiSize;
},int MP4MuxPutVideoSampleEntry(int iTrak, MP4MUXCONTEXT *pMP4MuxContext)
{
unsigned int uiSize = 0;
unsigned int uiSizePos = 0;
uiSizePos = MBUFFERByteArrayPutSize(-1, 0, pMP4MuxContext->pInfoByteArray); /* size */
// if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264)
// {
// MBUFFERByteArrayPutTag("avc1", pMP4MuxContext->pInfoByteArray);
// }
// else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265)
// {
// MBUFFERByteArrayPutTag("hev1", pMP4MuxContext->pInfoByteArray);
// }
// 根据当前 track 的编码类型写入 tag —— 正确位置!
if (iTrak == MP4MUX_VIDEO_ES_INDEX)
{
if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264)
{
MBUFFERByteArrayPutTag("avc1", pMP4MuxContext->pInfoByteArray);
}
else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265)
{
MBUFFERByteArrayPutTag("hev1", pMP4MuxContext->pInfoByteArray);
}
}
#ifdef APP_STREAM
else if (iTrak == MP4MUX_VIDEO2_ES_INDEX)
{
if (g_network_stream_enable)
{
// 第二路可能用不同的编码
if (pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_H264)
{ // 新增字段 iVideo2Codec
MBUFFERByteArrayPutTag("avc1", pMP4MuxContext->pInfoByteArray);
}
else if (pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_H265)
{
MBUFFERByteArrayPutTag("hev1", pMP4MuxContext->pInfoByteArray);
}
}
}
#endif
MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Reserved */
MBUFFERByteArrayPutBE16(0, pMP4MuxContext->pInfoByteArray); /* Reserved */
MBUFFERByteArrayPutBE16(1, pMP4MuxContext->pInfoByteArray); /* Data-reference index */
MBUFFERByteArrayPutBE16(0, pMP4MuxContext->pInfoByteArray); /* Codec stream version */
MBUFFERByteArrayPutBE16(0, pMP4MuxContext->pInfoByteArray); /* Codec stream revision (=0) */
MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Reserved */
MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Reserved */
MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Reserved */
if (iTrak == MP4MUX_VIDEO_ES_INDEX)
{
MBUFFERByteArrayPutBE16(pMP4MuxContext->uiImageWidth, pMP4MuxContext->pInfoByteArray); /* Video width */
MBUFFERByteArrayPutBE16(pMP4MuxContext->uiImageHeight, pMP4MuxContext->pInfoByteArray); /* Video height */
}
// #ifdef DUAL_CAM
#ifdef APP_STREAM
else if (iTrak == MP4MUX_VIDEO2_ES_INDEX)
{
if (g_network_stream_enable)
// {
// MBUFFERByteArrayPutBE16(pMP4MuxContext->uiVideo2ImageWidth, pMP4MuxContext->pInfoByteArray); /* Video width */
// MBUFFERByteArrayPutBE16(pMP4MuxContext->uiVideo2ImageHeight, pMP4MuxContext->pInfoByteArray); /* Video height */
MBUFFERByteArrayPutBE16(480, pMP4MuxContext->pInfoByteArray); /* Video width */
MBUFFERByteArrayPutBE16(800, pMP4MuxContext->pInfoByteArray); /* Video height */
}
}
#endif
MBUFFERByteArrayPutIntBE(0x00480000, pMP4MuxContext->pInfoByteArray); /* Horizontal resolution 72dpi */
MBUFFERByteArrayPutIntBE(0x00480000, pMP4MuxContext->pInfoByteArray); /* Vertical resolution 72dpi */
MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Data size (= 0) */
MBUFFERByteArrayPutBE16(1, pMP4MuxContext->pInfoByteArray); /* Frame count (= 1) */
MBUFFERByteArrayPutPlaceHolder(0, 32, pMP4MuxContext->pInfoByteArray); // compressor name
MBUFFERByteArrayPutBE16(0x18, pMP4MuxContext->pInfoByteArray); /* bit depth */
MBUFFERByteArrayPutBE16(0xffff, pMP4MuxContext->pInfoByteArray); /* Reserved(=-1) */
uiSize = 86;
// #ifdef DUAL_CAM
#ifdef APP_STREAM
// Ensure that there are parameters to create avcC box or hvcC box
// if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264
// && ((iTrak == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0)
// || (iTrak == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2SPSLength > 0 && pMP4MuxContext->iVideo2PPSLength > 0)))
// {
// // avcC
// uiSize += MP4MuxPutAVCC(iTrak, pMP4MuxContext);
// }
// else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265
// && ((iTrak == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iVPSLength > 0 && pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0)
// || (iTrak == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2VPSLength > 0 && pMP4MuxContext->iVideo2SPSLength > 0 && pMP4MuxContext->iVideo2PPSLength > 0)))
// {
// // hvcC
// uiSize += MP4MuxPutHVCC(iTrak, pMP4MuxContext);
// }
if (iTrak == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0)
{
if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264)
{
// avcC
uiSize += MP4MuxPutAVCC(iTrak, pMP4MuxContext);
}
if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265 && pMP4MuxContext->iVPSLength > 0)
{
// hvcC
uiSize += MP4MuxPutHVCC(iTrak, pMP4MuxContext);
}
}
// if (iTrak == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2SPSLength > 0 && pMP4MuxContext->iVideo2PPSLength > 0)
if (iTrak == MP4MUX_VIDEO2_ES_INDEX )
{
if (g_network_stream_enable)
{
STM_INFO("MP4MuxPutVideoSampleEntry,pMP4MuxContext->iVideo2SPSLength=%d, pMP4MuxContext->iVideo2PPSLength=%d",pMP4MuxContext->iVideo2SPSLength, pMP4MuxContext->iVideo2PPSLength );
if (pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_H264)
{
// avcC
uiSize += MP4MuxPutAVCC(iTrak, pMP4MuxContext);
}
if (pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_H265 && pMP4MuxContext->iVideo2VPSLength > 0)
{
// hvcC
uiSize += MP4MuxPutHVCC(iTrak, pMP4MuxContext);
}
}
else
{
STM_INFO("MP4MuxPutVideoSampleEntry, unable");
MBUFFERByteArrayPutIntBE(0x00000029, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0x61766343, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0x01010101, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0xFFE10011, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0x6764001F, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0xACB40F03, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0x2D370506, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0x0506D0A1, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0x35010005, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0x68EE06F2, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(0xC0, pMP4MuxContext->pInfoByteArray);
}
}
#else
// Ensure that there are parameters to create avcC box or hvcC box
if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264 && pMP4MuxContext->iSPSLength > 0
&& pMP4MuxContext->iPPSLength > 0)
{
// avcC
uiSize += MP4MuxPutAVCC(iTrak, pMP4MuxContext);
}
else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265 && pMP4MuxContext->iVPSLength > 0
&& pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0)
{
// hvcC
uiSize += MP4MuxPutHVCC(iTrak, pMP4MuxContext);
}
#endif
MBUFFERByteArrayPutSize(uiSize, uiSizePos, pMP4MuxContext->pInfoByteArray);
return uiSize;
}
//If we deal with B frame ,CTTS table is needed.
int MP4MuxPutCTTS(MP4MUXCONTEXT *pMP4MuxContext)
{
unsigned int uiSize = 0;
unsigned int i;
unsigned int uiSizePos = 0;
uiSizePos = MBUFFERByteArrayPutSize(-1, 0, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutTag("ctts", pMP4MuxContext->pInfoByteArray); // composition to decode time
MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* version & flags */
MBUFFERByteArrayPutIntBE(pMP4MuxContext->uiNumCTTSEntry, pMP4MuxContext->pInfoByteArray); // entry count
uiSize = 16;
for (i = 0; i < pMP4MuxContext->uiNumCTTSEntry; i++)
{
MBUFFERByteArrayPutIntBE(pMP4MuxContext->pCTTSEntry[i].uiSampleCount, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutIntBE(pMP4MuxContext->pCTTSEntry[i].uiSampleOffset, pMP4MuxContext->pInfoByteArray);
uiSize += 8;
}
MBUFFERByteArrayPutSize(uiSize, uiSizePos, pMP4MuxContext->pInfoByteArray);
return uiSize;
},int MP4MuxPutAVCC(int iTrak, MP4MUXCONTEXT *pMP4MuxContext)
{
unsigned int uiSize = 0;
if (iTrak == MP4MUX_VIDEO_ES_INDEX)
{
uiSize = 19 + pMP4MuxContext->iSPSLength + pMP4MuxContext->iPPSLength;
}
// #ifdef DUAL_CAM
#ifdef APP_STREAM
else if (iTrak == MP4MUX_VIDEO2_ES_INDEX)
{
uiSize = 19 + pMP4MuxContext->iVideo2SPSLength + pMP4MuxContext->iVideo2PPSLength;
}
#endif
MBUFFERByteArrayPutIntBE(uiSize, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutTag("avcC", pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutUChar(0x01, pMP4MuxContext->pInfoByteArray); // ConfigurationVersion
MBUFFERByteArrayPutUChar(pMP4MuxContext->pucSPS[1], pMP4MuxContext->pInfoByteArray); // AVCProfileIndication
MBUFFERByteArrayPutUChar(pMP4MuxContext->pucSPS[2], pMP4MuxContext->pInfoByteArray); // Profile_Compatibility
MBUFFERByteArrayPutUChar(pMP4MuxContext->pucSPS[3], pMP4MuxContext->pInfoByteArray); // AVCLevelIndication
MBUFFERByteArrayPutUChar(0xFF, pMP4MuxContext->pInfoByteArray); // 6bits reserved(=111111), 2bits for lengthSizeMinusOne(11), NALU length=4
if (iTrak == MP4MUX_VIDEO_ES_INDEX)
{
MBUFFERByteArrayPutUChar(0xE1, pMP4MuxContext->pInfoByteArray); // 3bits reserved(=111), 5bits for numOfSPS(00001)
MBUFFERByteArrayPutBE16(pMP4MuxContext->iSPSLength, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutMultiUCharBE(pMP4MuxContext->pucSPS, pMP4MuxContext->iSPSLength, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutUChar(0x01, pMP4MuxContext->pInfoByteArray); // numOfPPS
MBUFFERByteArrayPutBE16(pMP4MuxContext->iPPSLength, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutMultiUCharBE(pMP4MuxContext->pucPPS, pMP4MuxContext->iPPSLength, pMP4MuxContext->pInfoByteArray);
}
// #ifdef DUAL_CAM
#ifdef APP_STREAM
else if (iTrak == MP4MUX_VIDEO2_ES_INDEX)
{
if (g_network_stream_enable)
{
STM_INFO("mp4muxput avcc 2");
MBUFFERByteArrayPutUChar(0xE1, pMP4MuxContext->pInfoByteArray); // 3bits reserved(=111), 5bits for numOfSPS(00001)
MBUFFERByteArrayPutBE16(pMP4MuxContext->iVideo2SPSLength, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutMultiUCharBE(pMP4MuxContext->pucVideo2SPS, pMP4MuxContext->iVideo2SPSLength, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutUChar(0x01, pMP4MuxContext->pInfoByteArray); // numOfPPS
MBUFFERByteArrayPutBE16(pMP4MuxContext->iVideo2PPSLength, pMP4MuxContext->pInfoByteArray);
MBUFFERByteArrayPutMultiUCharBE(pMP4MuxContext->pucVideo2PPS, pMP4MuxContext->iVideo2PPSLength, pMP4MuxContext->pInfoByteArray);
}
// else
// {
// STM_INFO("mp4muxput avcc reserve");
// for (int i = 0; i < 41; ++i)
// {
// MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); // sample count
// }
// }
}
#endif
return uiSize;
}他们几个函数之前层级调用,怎么在某一轨道为空的时候,控制stsd先不能为空轨道的数据,在后续写入时,原先导致avcc等无法更新信息,怎么设计