pmp video

int MP4MuxWriteIndex(MP4MUXCONTEXT *pMP4MuxContext) { long currentOffset; int ret = 0; int retSize = 0; off_t offset = lseek(pMP4MuxContext->iMP4Fd, 0, SEEK_CUR); switch (pMP4MuxContext->uiOutputOption) { STM_INFO("MP4MuxWriteIndex() pMP4MuxContext->uiOutputOption=%d",pMP4MuxContext->uiOutputOption); case MP4MUX_OUTPUT_OPTION_DEFAULT: case MP4MUX_OUTPUT_OPTION_TWO_PASS: MP4MuxResetByteInfoArray(pMP4MuxContext); MP4MuxPutMovieAtom(pMP4MuxContext); ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiCurrentOffset, SEEK_SET); if (ret < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } retSize = write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pInfoByteArray->pucBuffer, pMP4MuxContext->pInfoByteArray->iCurPos); if (retSize != pMP4MuxContext->pInfoByteArray->iCurPos) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiMdatSizePos, SEEK_SET); if (ret < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } retSize = write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pucMdatSizeField, MP4MUX_ATOM_SIZE_LENGTH); if (retSize != MP4MUX_ATOM_SIZE_LENGTH) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } break; case MP4MUX_OUTPUT_OPTION_IN_PLACE: currentOffset = lseek(pMP4MuxContext->iMP4Fd, 0, SEEK_SET); if (currentOffset < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } MP4MuxResetByteInfoArray(pMP4MuxContext); MP4MuxPutMovieAtom(pMP4MuxContext); /* Jump to the current updated offset of free box for event */ ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiIndexOffset, SEEK_SET); if (ret < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } retSize = write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pInfoByteArray->pucBuffer, pMP4MuxContext->pInfoByteArray->iCurPos); if (retSize != pMP4MuxContext->pInfoByteArray->iCurPos) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiMdatSizePos, SEEK_SET); if (ret < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } retSize = write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pucMdatSizeField, MP4MUX_ATOM_SIZE_LENGTH); if (retSize != MP4MUX_ATOM_SIZE_LENGTH) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } ret = lseek(pMP4MuxContext->iMP4Fd, currentOffset, SEEK_SET); if (ret < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } break; case MP4MUX_OUTPUT_OPTION_REALTIME: // Nothing to update if (pMP4MuxContext->piSampleInterval[MP4MUX_VIDEO_ES_INDEX] == 0 // #ifdef DUAL_CAM #ifdef APP_STREAM && pMP4MuxContext->piSampleInterval[MP4MUX_VIDEO2_ES_INDEX] == 0 #endif && pMP4MuxContext->piSampleInterval[MP4MUX_AUDIO_ES_INDEX] == 0) { break; } MP4MuxResetByteInfoArray(pMP4MuxContext); if (!(pMP4MuxContext->iIfNotFirstlyUpdateMovieAtom)) { unsigned int uiSize = pMP4MuxContext->uiIndexOffset - pMP4MuxContext->uiMdatSizePos; pMP4MuxContext->pucMdatSizeField[0] = (uiSize >> 24) & 0xff; pMP4MuxContext->pucMdatSizeField[1] = (uiSize >> 16) & 0xff; pMP4MuxContext->pucMdatSizeField[2] = (uiSize >> 8) & 0xff; pMP4MuxContext->pucMdatSizeField[3] = uiSize & 0xff; ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiMdatSizePos, SEEK_SET); if (ret < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } if (write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pucMdatSizeField, MP4MUX_ATOM_SIZE_LENGTH) != MP4MUX_ATOM_SIZE_LENGTH) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } } ret = lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiIndexOffset, SEEK_SET); if (ret < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } MP4MuxUpdateMovieAtom(pMP4MuxContext); fdatasync(pMP4MuxContext->iMP4Fd); ret = lseek(pMP4MuxContext->iMP4Fd, offset, SEEK_SET); if (ret < 0) { STM_ERROR("%s", strerror(errno)); return MP4MUX_EC_FAILURE; } fdatasync(pMP4MuxContext->iMP4Fd); //将数据刷新到SD卡中 break; default: break; } return(MP4MUX_EC_OK); } 其中static int mp4_storage_do_saving_mp4(MP4_STORAGE_FRAME_TYPE_E type, PTR_FRAME_HEAD_S p_frame_head, unsigned char *p_frame_data) { MP4MUXCONTEXT *p_mp4_mux_context = &(g_mp4_storage.mp4_mux_context); MBUFFERESSideInfo side_info; MBUFFERESBuffer *p_esbuffer = &(g_mp4_storage.v_input_buf); int es_index = MP4MUX_VIDEO_ES_INDEX; int ret = OK; long long frame_timestamp = 0LL; unsigned int frame_size = 0; frame_timestamp = ntohll(p_frame_head->timestamp); frame_size = ntohl(p_frame_head->data_len); memset(&side_info, 0, sizeof(MBUFFERESSideInfo)); MBUFFERESSETFRAMETYPE(MBUFFER_ES_FRAME_TYPE_NORMALFRAME, side_info.iFlags); side_info.PTS = frame_timestamp * 9 / 100; side_info.DTS = -1; side_info.iSize = (int) (frame_size); switch (type) { #ifdef AUDIO_RECORD_SUPPORT case MP4_STORAGE_FRAME_TYPE_AUDIO: MBUFFERESSETIFKEYFRAME(1, side_info.iFlags); p_esbuffer = &(g_mp4_storage.a_input_buf); es_index = MP4MUX_AUDIO_ES_INDEX; #ifdef AAC_SUPPORT if (TP_AVCODEC_AAC_ADTS == p_mp4_mux_context->iAudioCodec && !get_record_audio_enable()) { frame_size = AUDIO_MUTE_AAC_FRAME_LEN; } #endif break; #endif case MP4_STORAGE_FRAME_TYPE_VIDEO_P: MBUFFERESSETIFKEYFRAME(0, side_info.iFlags); break; case MP4_STORAGE_FRAME_TYPE_VIDEO_NON_EVENT_START_I: MBUFFERESSETIFKEYFRAME(1, side_info.iFlags); break; case MP4_STORAGE_FRAME_TYPE_VIDEO_EVENT_START_I: MBUFFERESSETIFKEYFRAME(1, side_info.iFlags); MBUFFERESSETIFFIRSTFRAMEOFEVENT(1, side_info.iFlags); break; // #ifdef DUAL_CAM #ifdef APP_STREAM case MP4_STORAGE_FRAME_TYPE_VIDEO2_P: MBUFFERESSETIFKEYFRAME(0, side_info.iFlags); p_esbuffer = &(g_mp4_storage.v2_input_buf); es_index = MP4MUX_VIDEO2_ES_INDEX; STM_INFO("dosaving MP4_STORAGE_FRAME_TYPE_VIDEO2_P"); break; case MP4_STORAGE_FRAME_TYPE_VIDEO2_NON_EVENT_START_I: MBUFFERESSETIFKEYFRAME(1, side_info.iFlags); p_esbuffer = &(g_mp4_storage.v2_input_buf); es_index = MP4MUX_VIDEO2_ES_INDEX; STM_INFO("dosaving MP4_STORAGE_FRAME_TYPE_VIDEO2_NON_EVENT_START_I"); break; case MP4_STORAGE_FRAME_TYPE_VIDEO2_EVENT_START_I: MBUFFERESSETIFKEYFRAME(1, side_info.iFlags); MBUFFERESSETIFFIRSTFRAMEOFEVENT(1, side_info.iFlags); p_esbuffer = &(g_mp4_storage.v2_input_buf); es_index = MP4MUX_VIDEO2_ES_INDEX; STM_INFO("dosaving MP4_STORAGE_FRAME_TYPE_VIDEO2_EVENT_START_I"); break; #endif #ifdef VIDEO_AVBR_ENABLE case MP4_STORAGE_FRAME_TYPE_VIDEO_NON_EVENT_START_VIRTUAL_I: MBUFFERESSETIFKEYFRAME(0, side_info.iFlags); MBUFFERESSETIFVIRTUALIFRAME(1, side_info.iFlags); break; case MP4_STORAGE_FRAME_TYPE_VIDEO_ONLY_DECODING_I: MBUFFERESSETIFKEYFRAME(1, side_info.iFlags); MBUFFERESSETIFONLYDECODINGFRAME(1, side_info.iFlags); break; case MP4_STORAGE_FRAME_TYPE_VIDEO_EVENT_START_VIRTUAL_I: MBUFFERESSETIFKEYFRAME(0, side_info.iFlags); MBUFFERESSETIFFIRSTFRAMEOFEVENT(1, side_info.iFlags); MBUFFERESSETIFVIRTUALIFRAME(1, side_info.iFlags); break; #endif default: return MP4MUX_EC_FAILURE; } if (MBUFFERESGETIFFIRSTFRAMEOFEVENT(side_info.iFlags)) { side_info.llParameter = (long long) p_frame_head->reserve1; } ret = MBUFFERESAppendData(p_frame_data, (int) (frame_size), 1, p_esbuffer); if (ret != MBUFFER_EC_OK) { STM_ERROR("Mbuffer append data failed"); ret = ERROR; goto end; } MBUFFERESSetSideInfo(&side_info, p_esbuffer); ret = MP4MuxPutData(es_index, p_mp4_mux_context); if (ret == MP4MUX_EC_NEED_UPDATE_BOX) { STM_INFO("MP4MUX_EC_NEED_UPDATE_BOX"); if (MP4MuxWriteIndex(p_mp4_mux_context) != MP4MUX_EC_OK) { STM_ERROR("Write mp4 index failed"); ret = ERROR; goto end; } /* * 每次更新mp4索引时,对整个文件进行一次drop cache操作, * 否则会有20k左右的cache增长(索引各boxfile较为分散, * 暂时对整个mp4文件进行cache drop) * * 执行该操作后跳过GOP的drop cache操作 */ drop_file_cache(p_mp4_mux_context->iMP4Fd, true); mp4_storage_notify_sync(); } else if (ret == MP4MUX_EC_OUT_OF_RANGE) { STM_INFO("MP4MUX_EC_OUT_OF_RANGE"); g_mp4_storage.if_file_full = 1; if (MP4MuxWriteIndex(p_mp4_mux_context) != MP4MUX_EC_OK) { STM_ERROR("Write mp4 index failed"); ret = ERROR; goto end; } drop_file_cache(p_mp4_mux_context->iMP4Fd, true); mp4_storage_notify_sync(); } else if (ret < 0) { STM_ERROR("Deal with frame failed, err_code: %d", ret); ret = ERROR; goto end; } ret = OK; end: return ret; } ,int MP4MuxPutMovieAtom(MP4MUXCONTEXT *pMP4MuxContext) { int iCurTrack, iMaxTrack; unsigned int uiSizemoov = 0; unsigned int uiPos4moovSize; MP4MuxParseSPS(pMP4MuxContext); //set time scale. should be set as early as possible MP4MuxSetTimeScale(pMP4MuxContext); MP4MuxGenerateSTCO(pMP4MuxContext); MP4MuxGenerateSTTS(pMP4MuxContext); #ifdef VIDEO_AVBR_ENABLE MP4MuxGenerateSTSSAndSTVS(pMP4MuxContext); #else MP4MuxGenerateSTSS(pMP4MuxContext); #endif MP4MuxGenerateSTSC(pMP4MuxContext); MP4MuxGenerateCTTS(pMP4MuxContext); MP4MuxCalculateAudioBitrate(pMP4MuxContext); MP4MuxCalculateDuration(pMP4MuxContext); MP4MuxSetStartTime(pMP4MuxContext); uiPos4moovSize = MBUFFERByteArrayPutSize(-1, 0, pMP4MuxContext->pInfoByteArray); // reserve the space for moov atom #ifndef KjbSDK_SUPPORT MBUFFERByteArrayPutTag("moov", pMP4MuxContext->pInfoByteArray); #else MBUFFERByteArrayPutTag("tpmb", pMP4MuxContext->pInfoByteArray); #endif uiSizemoov = 8; // moov size and tag // mvhd uiSizemoov += MP4MuxPutMVHD(pMP4MuxContext); iCurTrack = pMP4MuxContext->iTrackInfo == MP4MUX_TRACK_INFO_AUDIO_ONLY ? 1 : 0; iMaxTrack = pMP4MuxContext->iNumTracks + iCurTrack; for (; iCurTrack < iMaxTrack; iCurTrack++) { // skip the track if the codec is not defined if ((iCurTrack == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iVideoCodec == TP_AVCODEC_UNDET) || (iCurTrack == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_UNDET) || (iCurTrack == MP4MUX_AUDIO_ES_INDEX && pMP4MuxContext->iAudioCodec == TP_AVCODEC_UNDET)) { continue; } //trak uiSizemoov += MP4MuxPutTRAK(iCurTrack, pMP4MuxContext); } MBUFFERByteArrayPutSize(uiSizemoov, uiPos4moovSize, pMP4MuxContext->pInfoByteArray); MP4MuxCalculateMdatSize(pMP4MuxContext); return(MP4MUX_EC_OK); } ,int MP4MuxPutSTSD(int iTrak, MP4MUXCONTEXT *pMP4MuxContext) { unsigned int uiSize = 0; unsigned int uiSizePos = 0; uiSizePos = MBUFFERByteArrayPutSize(-1, 0, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutTag("stsd", pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* version & flags */ MBUFFERByteArrayPutIntBE(1, pMP4MuxContext->pInfoByteArray); /* entry count */ uiSize = 16; // if (iTrak == MP4MUX_VIDEO_ES_INDEX || iTrak == MP4MUX_VIDEO2_ES_INDEX) // // avc1 or hev1 // uiSize += MP4MuxPutVideoSampleEntry(iTrak, pMP4MuxContext); if (iTrak == MP4MUX_VIDEO_ES_INDEX) { // avc1 or hev1 uiSize += MP4MuxPutVideoSampleEntry(iTrak, pMP4MuxContext); } else if(iTrak == MP4MUX_VIDEO2_ES_INDEX) { // if (g_network_stream_enable) // { uiSize += MP4MuxPutVideoSampleEntry(iTrak, pMP4MuxContext); // } } else // mp4a or mulaw uiSize += MP4MuxPutAudioSampleEntry(iTrak, pMP4MuxContext->iAudioCodec, pMP4MuxContext); MBUFFERByteArrayPutSize(uiSize, uiSizePos, pMP4MuxContext->pInfoByteArray); return uiSize; },int MP4MuxPutVideoSampleEntry(int iTrak, MP4MUXCONTEXT *pMP4MuxContext) { unsigned int uiSize = 0; unsigned int uiSizePos = 0; uiSizePos = MBUFFERByteArrayPutSize(-1, 0, pMP4MuxContext->pInfoByteArray); /* size */ // if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264) // { // MBUFFERByteArrayPutTag("avc1", pMP4MuxContext->pInfoByteArray); // } // else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265) // { // MBUFFERByteArrayPutTag("hev1", pMP4MuxContext->pInfoByteArray); // } // 根据当前 track 的编码类型写入 tag —— 正确位置! if (iTrak == MP4MUX_VIDEO_ES_INDEX) { if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264) { MBUFFERByteArrayPutTag("avc1", pMP4MuxContext->pInfoByteArray); } else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265) { MBUFFERByteArrayPutTag("hev1", pMP4MuxContext->pInfoByteArray); } } #ifdef APP_STREAM else if (iTrak == MP4MUX_VIDEO2_ES_INDEX) { if (g_network_stream_enable) { // 第二路可能用不同的编码 if (pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_H264) { // 新增字段 iVideo2Codec MBUFFERByteArrayPutTag("avc1", pMP4MuxContext->pInfoByteArray); } else if (pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_H265) { MBUFFERByteArrayPutTag("hev1", pMP4MuxContext->pInfoByteArray); } } } #endif MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Reserved */ MBUFFERByteArrayPutBE16(0, pMP4MuxContext->pInfoByteArray); /* Reserved */ MBUFFERByteArrayPutBE16(1, pMP4MuxContext->pInfoByteArray); /* Data-reference index */ MBUFFERByteArrayPutBE16(0, pMP4MuxContext->pInfoByteArray); /* Codec stream version */ MBUFFERByteArrayPutBE16(0, pMP4MuxContext->pInfoByteArray); /* Codec stream revision (=0) */ MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Reserved */ MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Reserved */ MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Reserved */ if (iTrak == MP4MUX_VIDEO_ES_INDEX) { MBUFFERByteArrayPutBE16(pMP4MuxContext->uiImageWidth, pMP4MuxContext->pInfoByteArray); /* Video width */ MBUFFERByteArrayPutBE16(pMP4MuxContext->uiImageHeight, pMP4MuxContext->pInfoByteArray); /* Video height */ } // #ifdef DUAL_CAM #ifdef APP_STREAM else if (iTrak == MP4MUX_VIDEO2_ES_INDEX) { if (g_network_stream_enable) // { // MBUFFERByteArrayPutBE16(pMP4MuxContext->uiVideo2ImageWidth, pMP4MuxContext->pInfoByteArray); /* Video width */ // MBUFFERByteArrayPutBE16(pMP4MuxContext->uiVideo2ImageHeight, pMP4MuxContext->pInfoByteArray); /* Video height */ MBUFFERByteArrayPutBE16(480, pMP4MuxContext->pInfoByteArray); /* Video width */ MBUFFERByteArrayPutBE16(800, pMP4MuxContext->pInfoByteArray); /* Video height */ } } #endif MBUFFERByteArrayPutIntBE(0x00480000, pMP4MuxContext->pInfoByteArray); /* Horizontal resolution 72dpi */ MBUFFERByteArrayPutIntBE(0x00480000, pMP4MuxContext->pInfoByteArray); /* Vertical resolution 72dpi */ MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* Data size (= 0) */ MBUFFERByteArrayPutBE16(1, pMP4MuxContext->pInfoByteArray); /* Frame count (= 1) */ MBUFFERByteArrayPutPlaceHolder(0, 32, pMP4MuxContext->pInfoByteArray); // compressor name MBUFFERByteArrayPutBE16(0x18, pMP4MuxContext->pInfoByteArray); /* bit depth */ MBUFFERByteArrayPutBE16(0xffff, pMP4MuxContext->pInfoByteArray); /* Reserved(=-1) */ uiSize = 86; // #ifdef DUAL_CAM #ifdef APP_STREAM // Ensure that there are parameters to create avcC box or hvcC box // if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264 // && ((iTrak == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0) // || (iTrak == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2SPSLength > 0 && pMP4MuxContext->iVideo2PPSLength > 0))) // { // // avcC // uiSize += MP4MuxPutAVCC(iTrak, pMP4MuxContext); // } // else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265 // && ((iTrak == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iVPSLength > 0 && pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0) // || (iTrak == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2VPSLength > 0 && pMP4MuxContext->iVideo2SPSLength > 0 && pMP4MuxContext->iVideo2PPSLength > 0))) // { // // hvcC // uiSize += MP4MuxPutHVCC(iTrak, pMP4MuxContext); // } if (iTrak == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0) { if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264) { // avcC uiSize += MP4MuxPutAVCC(iTrak, pMP4MuxContext); } if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265 && pMP4MuxContext->iVPSLength > 0) { // hvcC uiSize += MP4MuxPutHVCC(iTrak, pMP4MuxContext); } } // if (iTrak == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2SPSLength > 0 && pMP4MuxContext->iVideo2PPSLength > 0) if (iTrak == MP4MUX_VIDEO2_ES_INDEX ) { if (g_network_stream_enable) { STM_INFO("MP4MuxPutVideoSampleEntry,pMP4MuxContext->iVideo2SPSLength=%d, pMP4MuxContext->iVideo2PPSLength=%d",pMP4MuxContext->iVideo2SPSLength, pMP4MuxContext->iVideo2PPSLength ); if (pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_H264) { // avcC uiSize += MP4MuxPutAVCC(iTrak, pMP4MuxContext); } if (pMP4MuxContext->iVideoCodec2 == TP_AVCODEC_H265 && pMP4MuxContext->iVideo2VPSLength > 0) { // hvcC uiSize += MP4MuxPutHVCC(iTrak, pMP4MuxContext); } } else { STM_INFO("MP4MuxPutVideoSampleEntry, unable"); MBUFFERByteArrayPutIntBE(0x00000029, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0x61766343, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0x01010101, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0xFFE10011, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0x6764001F, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0xACB40F03, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0x2D370506, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0x0506D0A1, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0x35010005, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0x68EE06F2, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(0xC0, pMP4MuxContext->pInfoByteArray); } } #else // Ensure that there are parameters to create avcC box or hvcC box if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264 && pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0) { // avcC uiSize += MP4MuxPutAVCC(iTrak, pMP4MuxContext); } else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265 && pMP4MuxContext->iVPSLength > 0 && pMP4MuxContext->iSPSLength > 0 && pMP4MuxContext->iPPSLength > 0) { // hvcC uiSize += MP4MuxPutHVCC(iTrak, pMP4MuxContext); } #endif MBUFFERByteArrayPutSize(uiSize, uiSizePos, pMP4MuxContext->pInfoByteArray); return uiSize; } //If we deal with B frame ,CTTS table is needed. int MP4MuxPutCTTS(MP4MUXCONTEXT *pMP4MuxContext) { unsigned int uiSize = 0; unsigned int i; unsigned int uiSizePos = 0; uiSizePos = MBUFFERByteArrayPutSize(-1, 0, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutTag("ctts", pMP4MuxContext->pInfoByteArray); // composition to decode time MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); /* version & flags */ MBUFFERByteArrayPutIntBE(pMP4MuxContext->uiNumCTTSEntry, pMP4MuxContext->pInfoByteArray); // entry count uiSize = 16; for (i = 0; i < pMP4MuxContext->uiNumCTTSEntry; i++) { MBUFFERByteArrayPutIntBE(pMP4MuxContext->pCTTSEntry[i].uiSampleCount, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutIntBE(pMP4MuxContext->pCTTSEntry[i].uiSampleOffset, pMP4MuxContext->pInfoByteArray); uiSize += 8; } MBUFFERByteArrayPutSize(uiSize, uiSizePos, pMP4MuxContext->pInfoByteArray); return uiSize; },int MP4MuxPutAVCC(int iTrak, MP4MUXCONTEXT *pMP4MuxContext) { unsigned int uiSize = 0; if (iTrak == MP4MUX_VIDEO_ES_INDEX) { uiSize = 19 + pMP4MuxContext->iSPSLength + pMP4MuxContext->iPPSLength; } // #ifdef DUAL_CAM #ifdef APP_STREAM else if (iTrak == MP4MUX_VIDEO2_ES_INDEX) { uiSize = 19 + pMP4MuxContext->iVideo2SPSLength + pMP4MuxContext->iVideo2PPSLength; } #endif MBUFFERByteArrayPutIntBE(uiSize, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutTag("avcC", pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutUChar(0x01, pMP4MuxContext->pInfoByteArray); // ConfigurationVersion MBUFFERByteArrayPutUChar(pMP4MuxContext->pucSPS[1], pMP4MuxContext->pInfoByteArray); // AVCProfileIndication MBUFFERByteArrayPutUChar(pMP4MuxContext->pucSPS[2], pMP4MuxContext->pInfoByteArray); // Profile_Compatibility MBUFFERByteArrayPutUChar(pMP4MuxContext->pucSPS[3], pMP4MuxContext->pInfoByteArray); // AVCLevelIndication MBUFFERByteArrayPutUChar(0xFF, pMP4MuxContext->pInfoByteArray); // 6bits reserved(=111111), 2bits for lengthSizeMinusOne(11), NALU length=4 if (iTrak == MP4MUX_VIDEO_ES_INDEX) { MBUFFERByteArrayPutUChar(0xE1, pMP4MuxContext->pInfoByteArray); // 3bits reserved(=111), 5bits for numOfSPS(00001) MBUFFERByteArrayPutBE16(pMP4MuxContext->iSPSLength, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutMultiUCharBE(pMP4MuxContext->pucSPS, pMP4MuxContext->iSPSLength, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutUChar(0x01, pMP4MuxContext->pInfoByteArray); // numOfPPS MBUFFERByteArrayPutBE16(pMP4MuxContext->iPPSLength, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutMultiUCharBE(pMP4MuxContext->pucPPS, pMP4MuxContext->iPPSLength, pMP4MuxContext->pInfoByteArray); } // #ifdef DUAL_CAM #ifdef APP_STREAM else if (iTrak == MP4MUX_VIDEO2_ES_INDEX) { if (g_network_stream_enable) { STM_INFO("mp4muxput avcc 2"); MBUFFERByteArrayPutUChar(0xE1, pMP4MuxContext->pInfoByteArray); // 3bits reserved(=111), 5bits for numOfSPS(00001) MBUFFERByteArrayPutBE16(pMP4MuxContext->iVideo2SPSLength, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutMultiUCharBE(pMP4MuxContext->pucVideo2SPS, pMP4MuxContext->iVideo2SPSLength, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutUChar(0x01, pMP4MuxContext->pInfoByteArray); // numOfPPS MBUFFERByteArrayPutBE16(pMP4MuxContext->iVideo2PPSLength, pMP4MuxContext->pInfoByteArray); MBUFFERByteArrayPutMultiUCharBE(pMP4MuxContext->pucVideo2PPS, pMP4MuxContext->iVideo2PPSLength, pMP4MuxContext->pInfoByteArray); } // else // { // STM_INFO("mp4muxput avcc reserve"); // for (int i = 0; i < 41; ++i) // { // MBUFFERByteArrayPutIntBE(0, pMP4MuxContext->pInfoByteArray); // sample count // } // } } #endif return uiSize; }他们几个函数之前层级调用,怎么在某一轨道为空的时候,控制stsd先不能为空轨道的数据,在后续写入时,原先导致avcc等无法更新信息,怎么设计
10-29
static int MP4MuxUpdateMovieAtom(MP4MUXCONTEXT *pMP4MuxContext) { MBUFFERByteArray *pInfoByteArray = pMP4MuxContext->pInfoByteArray; int iSize = 0; int iCount = 0; int iDiffSize = 0; if (!(pMP4MuxContext->iIfNotFirstlyUpdateMovieAtom)) { MP4MuxParseSPS(pMP4MuxContext); MP4MuxSetTimeScale(pMP4MuxContext); /* Do only once * If we have not get audio frame yet, we can calculate start time of audio frame later in dealing with STTS */ MP4MuxSetStartTime(pMP4MuxContext); } if (pMP4MuxContext->uiOutputOption == MP4MUX_OUTPUT_OPTION_REALTIME) { MP4MuxGenerateSTCOAndSTSC(pMP4MuxContext); iCount = pMP4MuxContext->iFrameInterval; } else { MP4MuxGenerateSTCO(pMP4MuxContext); MP4MuxGenerateSTSC(pMP4MuxContext); iCount = pMP4MuxContext->iMaxNumFrame; } if ((pMP4MuxContext->iTrackInfo == MP4MUX_TRACK_INFO_VIDEO_AUDIO || pMP4MuxContext->iTrackInfo == MP4MUX_TRACK_INFO_VIDEO_VIDEO2_AUDIO) && pMP4MuxContext->uiOutputOption == MP4MUX_OUTPUT_OPTION_REALTIME) { MP4MuxGenerateSTTSForVideoAndAudio(pMP4MuxContext, pMP4MuxContext->iTrackInfo); } else { MP4MuxGenerateSTTS(pMP4MuxContext); } MP4MuxGenerateSTSS(pMP4MuxContext); #endif MP4MuxGenerateCTTS(pMP4MuxContext); MP4MuxCalculateAudioBitrate(pMP4MuxContext); MP4MuxCalculateDuration(pMP4MuxContext); if (!(pMP4MuxContext->iIfNotFirstlyUpdateMovieAtom)) { MP4MuxSetOtherMovieAtomsInfo(pMP4MuxContext); /* Put free head */ MBUFFERByteArrayPutIntBE(pMP4MuxContext->MovieAtomOffsetsAndSizes.iFreeSize, pInfoByteArray); MBUFFERByteArrayPutTag("free", pInfoByteArray); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer, MP4MUX_BOX_HEAD_MIN_SIZE); MBUFFERByteArrayReset(pInfoByteArray); #ifdef VIDEO_AVBR_ENABLE /* Put stvs head only once if its entry count is 0 */ if (pMP4MuxContext->iVirtualSampleCount == 0) { MBUFFERByteArrayPutIntBE(pMP4MuxContext->MovieAtomOffsetsAndSizes.iStvsSize, pInfoByteArray); MBUFFERByteArrayPutTag(MP4MUX_BOX_NAME_SAMPLE_TABLE_OF_VIRTUAL_SAMPLE, pInfoByteArray); MBUFFERByteArrayPutIntBE(0, pInfoByteArray); // version & flags MBUFFERByteArrayPutIntBE(pMP4MuxContext->iVirtualSampleCount, pInfoByteArray); lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iStvsOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer, MP4MUX_BOX_HEAD_MAX_SIZE); MBUFFERByteArrayReset(pInfoByteArray); } #endif } /* Put stte */ iSize = MP4MuxPutStte(pMP4MuxContext); if (iSize > 0) { lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->uiIndexOffset + MP4MUX_BOX_HEAD_MIN_SIZE, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer, MP4MUX_BOX_HEAD_MAX_SIZE); // Write stte head lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iStteEntryOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer + MP4MUX_BOX_HEAD_MAX_SIZE, iSize); // Write stte entry pMP4MuxContext->MovieAtomOffsetsAndSizes.iStteEntryOffset += iSize; } MBUFFERByteArrayReset(pInfoByteArray); #ifdef APP_STREAM /* Put video2 trak, track2 在track1前面*/ if (pMP4MuxContext->iTrackInfo == MP4MUX_TRACK_INFO_VIDEO_VIDEO2_AUDIO) { /* Reset and seek, in case that we need to update video2 trak */ lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2TrakOffset, SEEK_SET); MBUFFERByteArrayReset(pInfoByteArray); iSize = 0; /* Put trak head */ MBUFFERByteArrayPutIntBE(pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2TrakSize, pInfoByteArray); MBUFFERByteArrayPutTag("trak", pInfoByteArray); iSize += MP4MUX_BOX_HEAD_MIN_SIZE; /* Put tkhd */ iSize += MP4MuxPutTKHD(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); /* Put edts */ iSize += MP4MuxPutEDTS(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); /* Put mdia head */ MBUFFERByteArrayPutIntBE(pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2MdiaSize, pInfoByteArray); MBUFFERByteArrayPutTag("mdia", pInfoByteArray); iSize += MP4MUX_BOX_HEAD_MIN_SIZE; /* Put mdhd */ iSize += MP4MuxPutMDHD(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); if (!(pMP4MuxContext->iIfNotFirstlyUpdateMovieAtom)) { /* Put hdlr */ iSize += MP4MuxPutHDLR(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); /* Put minf head */ MBUFFERByteArrayPutIntBE(pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2MinfSize, pInfoByteArray); MBUFFERByteArrayPutTag("minf", pInfoByteArray); iSize += MP4MUX_BOX_HEAD_MIN_SIZE; /* Put vmhd */ iSize += MP4MuxPutVMHD(pMP4MuxContext); /* Put dinf */ iSize += MP4MuxPutDINF(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); /* Put stbl head */ MBUFFERByteArrayPutIntBE(pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StblSize, pInfoByteArray); MBUFFERByteArrayPutTag("stbl", pInfoByteArray); iSize += MP4MUX_BOX_HEAD_MIN_SIZE; /* Put stsd, do only once in case that sps, pps and vps change later */ iSize += MP4MuxPutSTSD(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); } /* Write boxes */ // lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iMoovOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pMP4MuxContext->pInfoByteArray->pucBuffer, iSize); if (!(pMP4MuxContext->iIfNotFirstlyUpdateMovieAtom)) { // Set stxx offset pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2SttsOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2TrakOffset + iSize; pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2SttsEntryOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2SttsOffset + MP4MUX_BOX_HEAD_MAX_SIZE; pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StssOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2SttsEntryOffset + pMP4MuxContext->iMaxNumFrame * 8; pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StssEntryOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StssOffset + MP4MUX_BOX_HEAD_MAX_SIZE; #ifdef VIDEO_AVBR_ENABLE pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StscOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StssEntryOffset + pMP4MuxContext->iMaxNumFrame / 2 * 4; #else pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StscOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StssEntryOffset + pMP4MuxContext->iMaxNumFrame * 4; #endif pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StscEntryOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StscOffset + MP4MUX_BOX_HEAD_MAX_SIZE; pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StszOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StscEntryOffset + pMP4MuxContext->iMaxNumFrame * 12; pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StszEntryOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StszOffset + 20; pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StcoOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StszEntryOffset + pMP4MuxContext->iMaxNumFrame * 4; pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StcoEntryOffset = pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StcoOffset + MP4MUX_BOX_HEAD_MAX_SIZE; } /* Write stts */ MBUFFERByteArrayReset(pInfoByteArray); MP4MuxPutSTTS(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2SttsOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer, MP4MUX_BOX_HEAD_MAX_SIZE); // Write stts head lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2SttsEntryOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer + MP4MUX_BOX_HEAD_MAX_SIZE, pInfoByteArray->iCurPos - MP4MUX_BOX_HEAD_MAX_SIZE); // Write new stts entries /* Because the duration of last sample is a default value, it will be updated by next time, * back to one sample for updating conveniently */ iDiffSize = pInfoByteArray->iCurPos - MP4MUX_BOX_HEAD_MAX_SIZE - 8; pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2SttsEntryOffset += (iDiffSize >= 0 ? iDiffSize : 0); /* Write stss */ MBUFFERByteArrayReset(pInfoByteArray); MP4MuxPutSTSS(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StssOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer, MP4MUX_BOX_HEAD_MAX_SIZE); // Write stss head lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StssEntryOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer + MP4MUX_BOX_HEAD_MAX_SIZE, pInfoByteArray->iCurPos - MP4MUX_BOX_HEAD_MAX_SIZE); // Write new stss entries pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StssEntryOffset += (pInfoByteArray->iCurPos - MP4MUX_BOX_HEAD_MAX_SIZE); /* Write stsc */ MBUFFERByteArrayReset(pInfoByteArray); MP4MuxPutSTSC(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StscOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer, MP4MUX_BOX_HEAD_MAX_SIZE); // Write stsc head lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StscEntryOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer + MP4MUX_BOX_HEAD_MAX_SIZE, pInfoByteArray->iCurPos - MP4MUX_BOX_HEAD_MAX_SIZE); // Write new stsc entries pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StscEntryOffset += (pInfoByteArray->iCurPos - MP4MUX_BOX_HEAD_MAX_SIZE); /* Write stsz */ MBUFFERByteArrayReset(pInfoByteArray); MP4MuxPutSTSZ(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StszOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer, 20); // Write stsz head lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StszEntryOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer + 20, pInfoByteArray->iCurPos - 20); // Write new stsz entries pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StszEntryOffset += (pInfoByteArray->iCurPos - 20); /* Write stco */ MBUFFERByteArrayReset(pInfoByteArray); MP4MuxPutSTCO(MP4MUX_VIDEO2_ES_INDEX, pMP4MuxContext); lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StcoOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer, MP4MUX_BOX_HEAD_MAX_SIZE); // Write stco head lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StcoEntryOffset, SEEK_SET); write(pMP4MuxContext->iMP4Fd, pInfoByteArray->pucBuffer + MP4MUX_BOX_HEAD_MAX_SIZE, pInfoByteArray->iCurPos - MP4MUX_BOX_HEAD_MAX_SIZE); // Write new stco entries pMP4MuxContext->MovieAtomOffsetsAndSizes.iVideo2StcoEntryOffset += (pInfoByteArray->iCurPos - MP4MUX_BOX_HEAD_MAX_SIZE); /* Reset and seek, in case that we need to update audio trak */ { lseek(pMP4MuxContext->iMP4Fd, pMP4MuxContext->MovieAtomOffsetsAndSizes.iAudioTrakOffset, SEEK_SET); MBUFFERByteArrayReset(pInfoByteArray); iSize = 0; } }这里第二轨道app_stream的只能写一次,如何在获得数据之后进行更新
10-29
/* This method do the following work: *Parse audio and video sample. (start code in H264) *Skip sample until we get IDR. *Write sample into file. *Save index information during TWO_PASS mode. */ int MP4MuxPutData(int iESIndex, MP4MUXCONTEXT *pMP4MuxContext) { unsigned char *pucPos; int iLength = 0; int j; int iFrameType; MBUFFERESAUDIOEXTRAINFO AudioExtraInfo; MP4MUXFRAMEINFO *pFrameInfo; MBUFFERESSideInfo SideInfo; int piSliceLength[MP4MUX_MAX_SLICE_IN_FRAME] = { 0 }; int piSliceOffset[MP4MUX_MAX_SLICE_IN_FRAME] = { 0 }; int piNALUType[MP4MUX_MAX_SLICE_IN_FRAME] = { 0 }; int iNumSlicesInFrame = 0; int iCurOffset; unsigned char *mute_audio_buf = NULL; unsigned char *base = NULL; int len = 0; // drop frame if the track doesn't contain the related es buffer if (pMP4MuxContext->iTrackInfo == MP4MUX_TRACK_INFO_AUDIO_ONLY && pMP4MuxContext->pESBuffer[MP4MUX_VIDEO_ES_INDEX] && MBUFFERESNumFrames(pMP4MuxContext->pESBuffer[MP4MUX_VIDEO_ES_INDEX]) > 1) { MBUFFERESAdvanceFrame(pMP4MuxContext->pESBuffer[MP4MUX_AUDIO_ES_INDEX]); } if (pMP4MuxContext->iTrackInfo == MP4MUX_TRACK_INFO_VIDEO_ONLY && pMP4MuxContext->pESBuffer[MP4MUX_AUDIO_ES_INDEX] && MBUFFERESNumFrames(pMP4MuxContext->pESBuffer[MP4MUX_AUDIO_ES_INDEX]) > 1) { MBUFFERESAdvanceFrame(pMP4MuxContext->pESBuffer[MP4MUX_AUDIO_ES_INDEX]); } if (iESIndex == -1) { iESIndex = MP4MuxNextDataType(pMP4MuxContext); } if (iESIndex < 0) return iESIndex; //SideInfo keep the sample info including pts,size,dts,etc in ESBuffer. MBUFFERESGetSideInfo(&SideInfo, pMP4MuxContext->pESBuffer[iESIndex]); if (MBUFFERESSideInfoGetIfDiscontinuityEnd(&SideInfo)) { pMP4MuxContext->pbIfDiscontinuityEnd[iESIndex] = 1; } iFrameType = MBUFFERESGETFRAMETYPE(SideInfo.iFlags); if (iFrameType == MBUFFER_ES_FRAME_TYPE_EMPTYFRAME) { MBUFFERESAdvanceFrame(pMP4MuxContext->pESBuffer[iESIndex]); return (MP4MUX_EC_OK); } // just advance the es buffer if the codec is not defined if (iESIndex == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iVideoCodec == TP_AVCODEC_UNDET) { MBUFFERESAdvanceFrame(pMP4MuxContext->pESBuffer[iESIndex]); return (MP4MUX_EC_OK); } if (iESIndex == MP4MUX_AUDIO_ES_INDEX && pMP4MuxContext->iAudioCodec == TP_AVCODEC_UNDET) { MBUFFERESAdvanceFrame(pMP4MuxContext->pESBuffer[iESIndex]); return (MP4MUX_EC_OK); } #ifdef DUAL_CAM if (iESIndex == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideoCodec == TP_AVCODEC_UNDET) // DC_TODO 两路共用codec定义 { MBUFFERESAdvanceFrame(pMP4MuxContext->pESBuffer[iESIndex]); return (MP4MUX_EC_OK); } #endif //FrameInfo keep the sample info including pts,fileoffset ,etc in MP4 mux. pFrameInfo = MP4MuxFrameInfoContextGetFrameInfo(iESIndex, pMP4MuxContext->piFrameInfoHead[iESIndex] + pMP4MuxContext->piSampleInterval[iESIndex], pMP4MuxContext->pFrameInfoContext); // the old tsdemux version would generate DTS with value of -1 // which means the same value as PTS. if (SideInfo.DTS == -1) SideInfo.DTS = SideInfo.PTS; if (pMP4MuxContext->pbIfDiscontinuityEnd[iESIndex]) { // we save the gap between the end of last section and the beginning of current section pMP4MuxContext->pllPTSInterval[iESIndex] = SideInfo.PTS - pMP4MuxContext->pllPrePTS[iESIndex]; pMP4MuxContext->pbIfDiscontinuityEnd[iESIndex] = 0; } // PTS adjustment pFrameInfo->llPTS = SideInfo.PTS - pMP4MuxContext->pllPTSInterval[iESIndex]; pFrameInfo->llDTS = SideInfo.DTS - pMP4MuxContext->pllPTSInterval[iESIndex]; // save the PTS for future usage, to calculate the gap between two PTS pMP4MuxContext->pllPrePTS[iESIndex] = SideInfo.PTS; if (iESIndex == MP4MUX_AUDIO_ES_INDEX) { //parse audio sample if (iFrameType == MBUFFER_ES_FRAME_TYPE_EXTRAINFOFRAME) { // if extra info, just extract the info and stores in the context memcpy(&AudioExtraInfo, MBUFFERESFirstFramePos(pMP4MuxContext->pESBuffer[iESIndex]), MBUFFERESFirstFrameLength(pMP4MuxContext->pESBuffer[iESIndex])); MBUFFERESAdvanceFrame(pMP4MuxContext->pESBuffer[iESIndex]); // find sample rate index pMP4MuxContext->uiSamplingFrequencyIndex = AudioExtraInfo.iSampleRate; if (pMP4MuxContext->uiSamplingFrequencyIndex >= 0) { pMP4MuxContext->uiChannelConfiguration = (unsigned int)AudioExtraInfo.iNumChannels; if (pMP4MuxContext->uiOutputOption == MP4MUX_OUTPUT_OPTION_TWO_PASS) MP4MuxWriteRecoverData(pMP4MuxContext); return (MP4MUX_EC_GOT_EXTRAINFO); } // not found return (MP4MUX_EC_INVALID_CONFIG); } else { // unsigned char *p; GetFrameFromESBUFFER(&pucPos, &iLength, pMP4MuxContext->iAudioCodec, pMP4MuxContext->pESBuffer[iESIndex]); if (pMP4MuxContext->iAudioCodec == TP_AVCODEC_AAC_ADTS && pucPos[0] == 0xff && (pucPos[1] & 0xf0) == 0xf0) { // if the frame data starts with ADTS sync word // grap the sample rate and channel number info from the ADTS header MP4MuxParseADTSHeader(pucPos, pMP4MuxContext); iLength -= MP4MUX_ADTS_HEADER_LENGTH; // for AAC_ADTS audio format, need to remove the ADTS header before put it into mdat. MBUFFERESFirstFrameAdvance(MP4MUX_ADTS_HEADER_LENGTH, pMP4MuxContext->pESBuffer[iESIndex]); // p = MBUFFERESFirstFramePos(pMP4MuxContext->pESBuffer[iESIndex]); } pFrameInfo->uiSampleSize = iLength; base = MBUFFERESFirstFramePos(pMP4MuxContext->pESBuffer[iESIndex]); len = MBUFFERESFirstFrameLength(pMP4MuxContext->pESBuffer[iESIndex]); if(get_record_audio_enable()) { put_data_into_iovec(base, len, pMP4MuxContext); } else { mute_audio_buf = audio_generate_mute_buffer(NORAMAL_MUTE, pMP4MuxContext->iAudioCodec, pMP4MuxContext->uiSamplingFrequencyIndex, len); if (NULL == mute_audio_buf) { mute_audio_buf = base; } put_data_into_iovec(pMP4MuxContext->iAudioCodec == TP_AVCODEC_AAC_ADTS ? mute_audio_buf + MP4MUX_ADTS_HEADER_LENGTH : mute_audio_buf, len, pMP4MuxContext); } MBUFFERESBufferNoMallocAdvanceData(pMP4MuxContext->pESBuffer[iESIndex]); } } else if (iESIndex == MP4MUX_VIDEO_ES_INDEX #ifdef DUAL_CAM || iESIndex == MP4MUX_VIDEO2_ES_INDEX #endif ) { //parse video sample //TODO h265 iCurOffset = 0; pFrameInfo->uiSampleSize = 0; pucPos = MBUFFERESFirstFramePos(pMP4MuxContext->pESBuffer[iESIndex]); if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264) { //Get array from a Sample .Array element can be sps , pps ,slice .Each element begin with start code(0x000001 or 0x00000001) NALParseFrame(pucPos, MBUFFERESFirstFrameLength(pMP4MuxContext->pESBuffer[iESIndex]), MP4MuxIfCheckMultiSliceInFrame(pMP4MuxContext), &(iNumSlicesInFrame), piSliceOffset, piSliceLength, piNALUType); } else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265) { //Get array from a Sample .Array element can be sps , pps ,slice .Each element begin with start code(0x000001 or 0x00000001) H265NALParseFrame(pucPos, MBUFFERESFirstFrameLength(pMP4MuxContext->pESBuffer[iESIndex]), MP4MuxIfCheckMultiSliceInFrame(pMP4MuxContext), &(iNumSlicesInFrame), piSliceOffset, piSliceLength, piNALUType); } //Parse array we have got. for (j = 0; j < iNumSlicesInFrame; j++) { if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H264 && piSliceLength[j] <= MP4MUX_MAX_STRING_LENGTH) { switch (piNALUType[j]) { case H264_NALUTYPE_SPS: if (iESIndex == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iSPSLength == 0) { pMP4MuxContext->iSPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucSPS, pucPos + piSliceOffset[j], pMP4MuxContext->iSPSLength); } #ifdef DUAL_CAM else if (iESIndex == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2SPSLength == 0) { pMP4MuxContext->iVideo2SPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucVideo2SPS, pucPos + piSliceOffset[j], pMP4MuxContext->iVideo2SPSLength); } #endif break; case H264_NALUTYPE_PPS: if (iESIndex == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iPPSLength == 0) { pMP4MuxContext->iPPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucPPS, pucPos + piSliceOffset[j], pMP4MuxContext->iPPSLength); } #ifdef DUAL_CAM else if (iESIndex == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2PPSLength == 0) { pMP4MuxContext->iVideo2PPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucVideo2PPS, pucPos + piSliceOffset[j], pMP4MuxContext->iVideo2PPSLength); } #endif break; default: break; } } else if (pMP4MuxContext->iVideoCodec == TP_AVCODEC_H265 && piSliceLength[j] <= MP4MUX_MAX_STRING_LENGTH) { switch (piNALUType[j]) { case H265_NALUTYPE_SPS: if (iESIndex == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iSPSLength == 0) { pMP4MuxContext->iSPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucSPS, pucPos + piSliceOffset[j], pMP4MuxContext->iSPSLength); } #ifdef DUAL_CAM else if (iESIndex == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2SPSLength == 0) { pMP4MuxContext->iVideo2SPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucVideo2SPS, pucPos + piSliceOffset[j], pMP4MuxContext->iVideo2SPSLength); } #endif break; case H265_NALUTYPE_PPS: if (iESIndex == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iPPSLength == 0) { pMP4MuxContext->iPPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucPPS, pucPos + piSliceOffset[j], pMP4MuxContext->iPPSLength); } #ifdef DUAL_CAM else if (iESIndex == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2PPSLength == 0) { pMP4MuxContext->iVideo2PPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucVideo2PPS, pucPos + piSliceOffset[j], pMP4MuxContext->iVideo2PPSLength); } #endif break; case H265_NALUTYPE_VPS: if (iESIndex == MP4MUX_VIDEO_ES_INDEX && pMP4MuxContext->iVPSLength == 0) { pMP4MuxContext->iVPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucVPS, pucPos + piSliceOffset[j], pMP4MuxContext->iVPSLength); } #ifdef DUAL_CAM else if (iESIndex == MP4MUX_VIDEO2_ES_INDEX && pMP4MuxContext->iVideo2VPSLength == 0) { pMP4MuxContext->iVideo2VPSLength = piSliceLength[j]; memcpy(pMP4MuxContext->pucVideo2VPS, pucPos + piSliceOffset[j], pMP4MuxContext->iVideo2VPSLength); } #endif break; default: break; } } //Advance start code. MBUFFERESFirstFrameAdvance(piSliceOffset[j] - iCurOffset, pMP4MuxContext->pESBuffer[iESIndex]); if (iFrameType != MBUFFER_ES_FRAME_TYPE_EXTRAINFOFRAME) { //Write length of element into file put_video_frame_len(pMP4MuxContext, piSliceLength[j]); //Write element data into file base = MBUFFERESFirstFramePos(pMP4MuxContext->pESBuffer[iESIndex]); len = piSliceLength[j]; put_data_into_iovec(base, len, pMP4MuxContext); //Record the sample size,which compose of elements in array. pFrameInfo->uiSampleSize += (piSliceLength[j] + 4); } //No need to write if we got extra info sample. MBUFFERESFirstFrameAdvance(piSliceLength[j], pMP4MuxContext->pESBuffer[iESIndex]); iCurOffset = piSliceOffset[j] + piSliceLength[j]; } if (MBUFFERESGETIFKEYFRAME(SideInfo.iFlags)) { pFrameInfo->iKeyFrameType = MP4MUX_KEY_FRAME_TYPE_I; } #ifdef VIDEO_AVBR_ENABLE else if (MBUFFERESGETIFVIRTUALIFRAME(SideInfo.iFlags)) { pFrameInfo->iKeyFrameType = MP4MUX_KEY_FRAME_TYPE_VIRTUAL_I; } #endif if (iFrameType == MBUFFER_ES_FRAME_TYPE_EXTRAINFOFRAME) { return (MP4MUX_EC_GOT_EXTRAINFO); } if (MBUFFERESGETIFFIRSTFRAMEOFEVENT(SideInfo.iFlags) && iESIndex == MP4MUX_VIDEO_ES_INDEX) { pFrameInfo->uiStartTimeOfEvent = 1; // Add 1, in case uiStartTimeOfEvent equals 0 if (SideInfo.llParameter > 0) { pFrameInfo->iEventTypeCount = (int) SideInfo.llParameter; } else { pFrameInfo->iEventTypeCount = 1; } pMP4MuxContext->iEventCount += pFrameInfo->iEventTypeCount; } } pFrameInfo->uiOffsetInFile = pMP4MuxContext->uiCurrentOffset; if (MP4MuxAdvanceOffset(pFrameInfo->uiSampleSize, pMP4MuxContext) != MP4MUX_EC_OK) return MP4MUX_EC_OUT_OF_RANGE; MP4MuxFrameInfoContextAdvance(iESIndex, pMP4MuxContext->pFrameInfoContext); pMP4MuxContext->piSampleInterval[iESIndex]++; pMP4MuxContext->puiNumSamples[iESIndex]++; pMP4MuxContext->iLastSampleESIndex = iESIndex; if (pMP4MuxContext->uiOutputOption == MP4MUX_OUTPUT_OPTION_REALTIME) { int iRet = MP4MuxCheckFrameCount(0, pMP4MuxContext); if (iRet != MP4MUX_EC_OK) { return iRet; } } else if (pMP4MuxContext->uiOutputOption == MP4MUX_OUTPUT_OPTION_TWO_PASS) { //Write index data for recorvering later. MP4MuxWriteRecoverData(pMP4MuxContext); MP4MuxWriteFrameInfo(iESIndex, pMP4MuxContext); } return (iESIndex); } 我想要mp4两路视频轨道有不一样的编码格式,这里的代码怎么改?
09-27
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值