Please note that:本文章为原创,若需转载,请指明该出处.
=================================================================================
sourcecode:http://androidxref.com/
1369 void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
...
1410 ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1411 mStartTimeUs, timeUs);
本文章主要研究此处的mStartTimeUs与timeUs是怎么得到的:
1.mStartTimeUs
line1917:camerasource中获取系统时间:int64_t startTimeUs = systemTime() / 1000;
line2132:camerasource中加上偏移时间:startTimeUs += startTimeOffsetUs;
line1362 & line1363:从camera中获取每一帧的系统时间:
while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
mCameraSource->processBufferQueueFrame(buffer);
line1179: camera的第一帧系统时间 减去 camerasource中开始时间(line1362&line1363 - ine2132): mStartTimeUs = timestampUs - mStartTimeUs
2.timeUs
line1362 & line1363:从camera中获取每一帧的系统时间:
while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
mCameraSource->processBufferQueueFrame(buffer);
PartI:start stamp from CameraSource
media/libmediaplayerservice/StagefrightRecorder.cpp
971 status_t StagefrightRecorder::start() {
972 ALOGV("start");
...
992
993 switch (mOutputFormat) {
994 case OUTPUT_FORMAT_DEFAULT:
995 case OUTPUT_FORMAT_THREE_GPP:
996 case OUTPUT_FORMAT_MPEG_4:
997 case OUTPUT_FORMAT_WEBM:
998 {
999 bool isMPEG4 = true;
1000 if (mOutputFormat == OUTPUT_FORMAT_WEBM) {
1001 isMPEG4 = false;
1002 }
1003 sp<MetaData> meta = new MetaData;
1004 setupMPEG4orWEBMMetaData(&meta); >>>>a1
1005 status = mWriter->start(meta.get()); >>>>a4
1006 break;
1007 }
1916 void StagefrightRecorder::setupMPEG4orWEBMMetaData(sp<MetaData> *meta) {
1917 int64_t startTimeUs = systemTime() / 1000; >>>>a2
1918 (*meta)->setInt64(kKeyTime, startTimeUs); >>>>a3
+ALOGI("case03738707,Start time offset: %" PRId64 " us", startTimeUs);
./media/libstagefright/MPEG4Writer.cpp
730 status_t MPEG4Writer::start(MetaData *param) {
...
783 if (mStarted) {
784 if (mPaused) {
785 mPaused = false;
786 return startTracks(param); >>>>a5
787 }
788 return OK;
789 }
610 status_t MPEG4Writer::startTracks(MetaData *params) {
611 if (mTracks.empty()) {
612 ALOGE("No source added");
613 return INVALID_OPERATION;
614 }
615
616 for (List<Track *>::iterator it = mTracks.begin();
617 it != mTracks.end(); ++it) {
618 status_t err = (*it)->start(params); >>>a6
619
2097 status_t MPEG4Writer::Track::start(MetaData *params) {
2098 if (!mDone && mPaused) {
2099 mPaused = false;
2100 mResumed = true;
2101 return OK;
2102 }
2103
2104 int64_t startTimeUs;
2105 if (params == NULL || !params->findInt64(kKeyTime, &startTimeUs)) {
2106 startTimeUs = 0;
2107 }
...
2117 sp<MetaData> meta = new MetaData;
2118 if (mOwner->isRealTimeRecording() && mOwner->numTracks() > 1) {
...
2128 int64_t startTimeOffsetUs = mOwner->getStartTimeOffsetMs() * 1000LL;
2129 if (startTimeOffsetUs < 0) { // Start time offset was not set
2130 startTimeOffsetUs = kInitialDelayTimeUs;
2131 }
2132 startTimeUs += startTimeOffsetUs; >>>>
2133 ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs);
2134 }
2135
2136 meta->setInt64(kKeyTime, startTimeUs);
2137
2138 status_t err = mSource->start(meta.get()); >>>>a7 >>>>MediaCodecSource->start
media/libstagefright/MediaCodecSource.cpp
379 status_t MediaCodecSource::start(MetaData* params) {
380 sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
381 msg->setObject("meta", params);
382 return postSynchronouslyAndReturnError(msg);
383 }
857 void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
...
1004 case kWhatStart:
1005 {
...
1009 sp<RefBase> obj;
1010 CHECK(msg->findObject("meta", &obj));
1011 MetaData *params = static_cast<MetaData *>(obj.get());
1012
1013 sp<AMessage> response = new AMessage;
1014 response->setInt32("err", onStart(params)); >>>>a8
780 status_t MediaCodecSource::onStart(MetaData *params) {
...
822 CHECK(mPuller != NULL);
823 sp<MetaData> meta = params;
824 if (mSetEncoderFormat) {
825 if (meta == NULL) {
826 meta = new MetaData;
827 }
828 meta->setInt32(kKeyPixelFormat, mEncoderFormat);
829 meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
830 }
831
832 sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
833 err = mPuller->start(meta.get(), notify); >>>>a9
177 status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> ¬ify) {
178 ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
179 mLooper->start(
180 false /* runOnCallingThread */,
181 false /* canCallJava */,
182 PRIORITY_AUDIO);
183 mLooper->registerHandler(this);
184 mNotify = notify;
185
186 sp<AMessage> msg = new AMessage(kWhatStart, this); >>>>a10
187 msg->setObject("meta", meta);
188 return postSynchronouslyAndReturnError(msg);
189 }
236 void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
237 switch (msg->what()) {
238 case kWhatStart:
239 {
240 sp<RefBase> obj;
241 CHECK(msg->findObject("meta", &obj));
242
243 {
244 Mutexed<Queue>::Locked queue(mQueue);
245 queue->mPulling = true;
246 }
247
248 status_t err = mSource->start(static_cast<MetaData *>(obj.get())); >>>>a11 >>>>CameraSource->start
./media/libstagefright/CameraSource.cpp
801 status_t CameraSource::start(MetaData *meta) {
802 ALOGV("start");
...
818 if (meta) {
819 int64_t startTimeUs;
820 if (meta->findInt64(kKeyTime, &startTimeUs)) { >>>>a12
821 mStartTimeUs = startTimeUs; >>>>a13 >>>>mStartTimeUs means start timestamp for CameraSource
822 }
PartII:initial delay and current time stamp in camerasource
/media/libstagefright/CameraSource.cpp
1335 void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
1336 ALOGV("%s: onFrameAvailable", __FUNCTION__); >>>>
1337
1338 Mutex::Autolock l(mLock);
1339
1340 if (!mFrameAvailable) {
1341 mFrameAvailable = true;
1342 mFrameAvailableSignal.signal();
1343 }
1344 }
1346 bool CameraSource::BufferQueueListener::threadLoop() {
...
1351 {
1352 Mutex::Autolock l(mLock);
1353 while (!mFrameAvailable) {
1354 if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1355 return true;
1356 }
1357 }
1358 mFrameAvailable = false;
1359 }
1360
1361 BufferItem buffer;
1362 while (mConsumer->acquireBuffer(&buffer, 0) == OK) { >>>>b1 >>>>buffer.mTimestamp means timestamp for each frame out of camera Consumer
1363 mCameraSource->processBufferQueueFrame(buffer); >>>>b2
1364 }
1365
1366 return true;
1367 }
1369 void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
...
1372 int64_t timestampUs = buffer.mTimestamp / 1000; >>>>b3
1373 if (shouldSkipFrameLocked(timestampUs)) { >>>>b4
1374 mVideoBufferConsumer->releaseBuffer(buffer);
1375 return;
1376 }
...
1408 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); >>>>b7 >>>>timeUs means timestamp for each frame out of CameraSource
1409 mFrameTimes.push_back(timeUs);
1410 ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1411 mStartTimeUs, timeUs);
1140 bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
...
1169 mLastFrameTimestampUs = timestampUs;
1170 if (mNumFramesReceived == 0) {
1171 mFirstFrameTimeUs = timestampUs; >>>>b5
1172 // Initial delay
1173 if (mStartTimeUs > 0) {
1174 if (timestampUs < mStartTimeUs) {
1175 // Frame was captured before recording was started
1176 // Drop it without updating the statistical data.
1177 return true;
1178 }
1179 mStartTimeUs = timestampUs - mStartTimeUs; >>>>b6 >>>>mStartTimeUs means initial delay timestamp from camera Consumer to CameraSource
1180 }
1181 }
1182
1183 return false;
1184 }
PartIII:who is consumer
frameworks/av/include/media/stagefright/CameraSource.h
263 // Consumer and producer of the buffer queue between this class and camera. >>>>
264 sp<BufferItemConsumer> mVideoBufferConsumer;
265 sp<IGraphicBufferProducer> mVideoBufferProducer;
569 status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
570 uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
571 ALOGV("initBufferQueue");
...
578 // Create a buffer queue.
579 sp<IGraphicBufferProducer> producer;
580 sp<IGraphicBufferConsumer> consumer;
581 BufferQueue::createBufferQueue(&producer, &consumer); >>>>
...
590 mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
591 mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
592 mVideoBufferProducer = producer;
...
615 res = mCamera->setVideoTarget(mVideoBufferProducer); >>>>
...
624 mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this); >>>>
625 res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
frameworks/native/libs/gui/BufferQueue.cpp
80void BufferQueue::createBufferQueue(sp<IGraphicBufferProducer>* outProducer,
81 sp<IGraphicBufferConsumer>* outConsumer,
82 bool consumerIsSurfaceFlinger) {
83 LOG_ALWAYS_FATAL_IF(outProducer == NULL,
84 "BufferQueue: outProducer must not be NULL");
85 LOG_ALWAYS_FATAL_IF(outConsumer == NULL,
86 "BufferQueue: outConsumer must not be NULL");
87
88 sp<BufferQueueCore> core(new BufferQueueCore());
89 LOG_ALWAYS_FATAL_IF(core == NULL,
90 "BufferQueue: failed to create BufferQueueCore");
91
92 sp<IGraphicBufferProducer> producer(new BufferQueueProducer(core, consumerIsSurfaceFlinger));
93 LOG_ALWAYS_FATAL_IF(producer == NULL,
94 "BufferQueue: failed to create BufferQueueProducer");
95
96 sp<IGraphicBufferConsumer> consumer(new BufferQueueConsumer(core));
97 LOG_ALWAYS_FATAL_IF(consumer == NULL,
98 "BufferQueue: failed to create BufferQueueConsumer");
99
100 *outProducer = producer;
101 *outConsumer = consumer;
102}
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Issue sample:
#pass-logcat.log
>>>>the start time is very normal(17ms).the recoder duration is about (10s+17ms)=10s17ms
11-13 15:16:02.763 1003 2803 I StagefrightRecorder: case03738707,Start time offset: 215703345 us
11-13 15:16:02.764 1003 2803 I MPEG4Writer: case03738707,MPEG4Writer::Track::start 1: 215703345 us
11-13 15:16:02.764 1003 2803 I MPEG4Writer: Start time offset: 1000000 us
11-13 15:16:02.764 1003 4142 I MediaCodecSource: MediaCodecSource (video) starting
11-13 15:16:02.765 1003 4177 I CameraSource: case03738707,CameraSource::start: 216703345 us >>>>mStartTimeUs
11-13 15:16:03.280 1003 4142 I MediaCodecSource: MediaCodecSource (video) started <<<<
11-13 15:16:03.503 1003 4178 I CameraSource: case03738707,CameraSource::processBufferQueueFrame:11-13: 216352949 us
11-13 15:16:03.503 1003 4178 V CameraSource: Drop frame at 216352949/216703345 us
...
11-13 15:16:03.881 1003 4178 I CameraSource: case03738707,CameraSource::processBufferQueueFrame:11-13: 216720431 us >>>>timestampUs
11-13 15:16:03.881 1003 4178 I CameraSource: case03738707,CameraSource::shouldSkipFrameLocked 2: 216703345 us
11-13 15:16:03.881 1003 4178 I CameraSource: case03738707,CameraSource::shouldSkipFrameLocked 3: 17086 us >>>>mStartTimeUs = timestampUs - mStartTimeUs
11-13 15:16:03.882 1003 4178 V CameraSource: initial delay: 17086, current time stamp: 17086 >>>>216720431-216703345=17086us=17ms
...
11-13 15:16:13.295 1003 4142 I MediaCodecSource: encoder (video) stopping
11-13 15:16:13.294 1003 4142 V CameraSource: Set stoptime: 226232474 us >>>>>>>>226232474-216703345=9529129us=9.5s
11-13 15:16:13.333 1003 4142 I MediaCodecSource: encoder (video) stopped <<<<15:16:13-15:16:03=10s
#fail-logcat.log
>>>>the start time is very big(13s298ms). the recoder duration is about (11s+13s298ms)=24s298ms
11-13 15:16:10.515 996 996 I StagefrightRecorder: case03738707,Start time offset: 210533899 us
11-13 15:16:10.516 996 996 I MPEG4Writer: case03738707,MPEG4Writer::Track::start 1: 210533899 us
11-13 15:16:10.516 996 996 I MPEG4Writer: Start time offset: 1000000 us
11-13 15:16:10.516 996 3969 I MediaCodecSource: MediaCodecSource (video) starting
11-13 15:16:10.517 996 4002 I CameraSource: case03738707,CameraSource::start: 211533899 us >>>>mStartTimeUs
11-13 15:16:11.090 996 3969 I MediaCodecSource: MediaCodecSource (video) started <<<<
11-13 15:16:11.311 996 4003 I CameraSource: case03738707,CameraSource::processBufferQueueFrame:11-13: 224832511 us >>>>timestampUs
11-13 15:16:11.311 996 4003 I CameraSource: case03738707,CameraSource::shouldSkipFrameLocked 2: 211533899 us
11-13 15:16:11.311 996 4003 I CameraSource: case03738707,CameraSource::shouldSkipFrameLocked 3: 13298612 us >>>>mStartTimeUs = timestampUs - mStartTimeUs
11-13 15:16:11.311 996 4003 V CameraSource: initial delay: 13298612, current time stamp: 13298612 >>>>224832511-211533899=13298612us=13s298ms
11-13 15:16:21.121 996 3969 I MediaCodecSource: encoder (video) stopping
11-13 15:16:21.120 996 3969 V CameraSource: Set stoptime: 221138609 us >>>>>>>>221138609-224832511<0,221138609-211533899=9604710us=9.6s
11-13 15:16:22.377 996 3969 I MediaCodecSource: encoder (video) stopped <<<<15:16:22-15:16:11=11s
#pass-Record.mp4
Complete name : pass-Record.mp4
...
Duration : 9s 421ms >>>>
Bit rate : 199 Kbps
#fail-Record.mp4
General
Complete name : fail-Record.mp4
...
Duration : 23s 110ms >>>
Bit rate : 85.8 Kbps
本文深入探讨Android CameraSource中mStartTimeUs和timeUs的获取过程,涉及系统时间、帧时间戳以及如何计算初始延迟。通过logcat日志分析,展示正常与异常情况下的时间戳差异,并讨论其对录制视频时长的影响。
1318

被折叠的 条评论
为什么被折叠?



