当camera open,并且顺利下发capture request之后,camera HAL完成capture操作,会通过HIDL定义的接口processCaptureResult将数据回传处理。
/hardware/interfaces/camera/device/3.2/ICameraDeviceCallback.hal
ICameraDeviceCallback是HIDL定义的camera回调接口,processCaptureResult方法就是从HAL层回调到CameraServer的接口,CameraServer这一侧的回调类就是Camera3Device,因为在openCamera时,构造出来的Camera3Device进行初始化,Camera3Device类的initialize方法中与HAL进行连接,获取session时,将自己作为callback回调类传递到了HAL,所以后续HAL就会回调到Camera3Device类的processCaptureResult方法当中。
/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
// Only one processCaptureResult should be called at a time, so
// the locks won't block. The locks are present here simply to enforce this.
hardware::Return<void> Camera3Device::processCaptureResult(
1070 const hardware::hidl_vec<
1071 hardware::camera::device::V3_2::CaptureResult>& results) {
1072 hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
1073
1074 // Ideally we should grab mLock, but that can lead to deadlock, and
1075 // it's not super important to get up to date value of mStatus for this
1076 // warning print, hence skipping the lock here
1077 if (mStatus == STATUS_ERROR) {
1078 // Per API contract, HAL should act as closed after device error
1079 // But mStatus can be set to error by framework as well, so just log
1080 // a warning here.
1081 ALOGW("%s: received capture result in error state.", __FUNCTION__);
1082 }
1083
1084 sp<NotificationListener> listener;
1085 {
1086 std::lock_guard<std::mutex> l(mOutputLock);
1087 listener = mListener.promote();
1088 }
1089
1090 if (mProcessCaptureResultLock.tryLock() != OK) {
1091 // This should never happen; it indicates a wrong client implementation
1092 // that doesn't follow the contract. But, we can be tolerant here.
1093 ALOGE("%s: callback overlapped! waiting 1s...",
1094 __FUNCTION__);
1095 if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
1096 ALOGE("%s: cannot acquire lock in 1s, dropping results",
1097 __FUNCTION__);
1098 // really don't know what to do, so bail out.
1099 return hardware::Void();
1100 }
1101 }
1102
1103 CaptureOutputStates states {
1104 mId,
1105 mInFlightLock, mLastCompletedRegularFrameNumber,
1106 mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
1107 mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
1108 mNextShutterFrameNumber,
1109 mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
1110 mNextResultFrameNumber,
1111 mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
1112 mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
1113 mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
1114 mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
1115 mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
1116 };
1117
1118 for (const auto& result : results) {
1119 processOneCaptureResultLocked(states, result, noPhysMetadata);
1120 }
1121 mProcessCaptureResultLock.unlock();
1122 return hardware::Void();
}
void processOneCaptureResultLocked(
652 CaptureOutputStates& states,
653 const hardware::camera::device::V3_2::CaptureResult& result,
654 const hardware::hidl_vec<
655 hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
656 using hardware::camera::device::V3_2::StreamBuffer;
657 using hardware::camera::device::V3_2::BufferStatus;
658 std::unique_ptr<ResultMetadataQueue>& fmq = states.fmq;
659 BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
660 camera3_capture_result r;
661 status_t res;
662 r.frame_number = result.frameNumber;
663
664 // Read and validate the result metadata.
665 hardware::camera::device::V3_2::CameraMetadata resultMetadata;
666 res = readOneCameraMetadataLocked(
667 fmq, result.fmqResultSize,
668 resultMetadata, result.result);
669 if (res != OK) {
670 ALOGE("%s: Frame %d: Failed to read capture result metadata",
671 __FUNCTION__, result.frameNumber);
672 return;
673 }
674 r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
675
676 // Read and validate physical camera metadata
677 size_t physResultCount = physicalCameraMetadata.size();
678 std::vector<const char*> physCamIds(physResultCount);
679 std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
680 std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
681 physResultMetadata.resize(physResultCount);
682 for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
683 res = readOneCameraMetadataLocked(fmq, physicalCameraMetadata[i].fmqMetadataSize,
684 physResultMetadata[i], physicalCameraMetadata[i].metadata);
685 if (res != OK) {
686 ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
687 __FUNCTION__, result.frameNumber,
688 physicalCameraMetadata[i].physicalCameraId.c_str());
689 return;
690 }
691 physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
692 phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
693 physResultMetadata[i].data());
694 }
695 r.num_physcam_metadata = physResultCount;
696 r.physcam_ids = physCamIds.data();
697 r.physcam_metadata = phyCamMetadatas.data();
698
699 std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
700 std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
701 for (size_t i = 0; i < result.outputBuffers.size(); i++) {
702 auto& bDst = outputBuffers[i];
703 const StreamBuffer &bSrc = result.outputBuffers[i];
704
705 sp<Camera3StreamInterface> stream = states.outputStreams.get(bSrc.streamId);
706 if (stream == nullptr) {
707 ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
708 __FUNCTION__, result.frameNumber, i, bSrc.streamId);
709 return;
710 }
711 bDst.stream = stream->asHalStream();
712
713 bool noBufferReturned = false;
714 buffer_handle_t *buffer = nullptr;
715 if (states.useHalBufManager) {
716 // This is suspicious most of the time but can be correct during flush where HAL
717 // has to return capture result before a buffer is requested
718 if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
719 if (bSrc.status == BufferStatus::OK) {
720 ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
721 __FUNCTION__, result.frameNumber, i, bSrc.streamId);
722 // Still proceeds so other buffers can be returned
723 }
724 noBufferReturned = true;
725 }
726 if (noBufferReturned) {
727 res = OK;
728 } else {
729 res = bufferRecords.popInflightRequestBuffer(bSrc.bufferId, &buffer);
730 }
731 } else {
732 res = bufferRecords.popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
733 }
734
735 if (res != OK) {
736 ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
737 __FUNCTION__, result.frameNumber, i, bSrc.streamId);
738 return;
739 }
740
741 bDst.buffer = buffer;
742 bDst.status = mapHidlBufferStatus(bSrc.status);
743 bDst.acquire_fence = -1;
744 if (bSrc.releaseFence == nullptr) {
745 bDst.release_fence = -1;
746 } else if (bSrc.releaseFence->numFds == 1) {
747 if (noBufferReturned) {
748 ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
749 }
750 bDst.release_fence = dup(bSrc.releaseFence->data[0]);
751 } else {
752 ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
753 __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
754 return;
755 }
756 }
757 r.num_output_buffers = outputBuffers.size();
758 r.output_buffers = outputBuffers.data();
759
760 camera3_stream_buffer_t inputBuffer;
761 if (result.inputBuffer.streamId == -1) {
762 r.input_buffer = nullptr;
763 } else {
764 if (states.inputStream->getId() != result.inputBuffer.streamId) {
765 ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
766 result.frameNumber, result.inputBuffer.streamId);
767 return;
768 }
769 inputBuffer.stream = states.inputStream->asHalStream();
770 buffer_handle_t *buffer;
771 res = bufferRecords.popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
772 &buffer);
773 if (res != OK) {
774 ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
775 __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
776 return;
777 }
778 inputBuffer.buffer = buffer;
779 inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
780 inputBuffer.acquire_fence = -1;
781 if (result.inputBuffer.releaseFence == nullptr) {
782 inputBuffer.release_fence = -1;
783 } else if (result.inputBuffer.releaseFence->numFds == 1) {
784 inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
785 } else {
786 ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
787 __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
788 return;
789 }
790 r.input_buffer = &inputBuffer;
791 }
792
793 r.partial_result = result.partialResult;
794
795 processCaptureResult(states, &r);
}
第一行就是给成员变量frame_number赋值,该属性非常重要,它是CameraServer、CameraHalServer两个进程对Request对标的标志,接着根据if (result.fmqResultSize > 0)读取metadata,再下来就是outputBuffers了,for循环将result.outputBuffers中的StreamBuffer一个一个取出,然后调用mInterface->popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer)再去取HAL填充完成的buffer指针,这个buffer指针就是最终我们要的数据载体了。再下来是对inputBuffer输入buffer的处理,处理完,封装参数camera3_capture_result就解析好了,接着调用processCaptureResult处理一帧结果。
void processCaptureResult(CaptureOutputStates& states, const camera3_capture_result *result) {
457 ATRACE_CALL();
458
459 status_t res;
460
461 uint32_t frameNumber = result->frame_number;
462 if (result->result == NULL && result->num_output_buffers == 0 &&
463 result->input_buffer == NULL) {
464 SET_ERR("No result data provided by HAL for frame %d",
465 frameNumber);
466 return;
467 }
468
469 if (!states.usePartialResult &&
470 result->result != NULL &&
471 result->partial_result != 1) {
472 SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
473 " if partial result is not supported",
474 frameNumber, result->partial_result);
475 return;
476 }
477
478 bool isPartialResult = false;
479 CameraMetadata collectedPartialResult;
480 bool hasInputBufferInRequest = false;
481
482 // Get shutter timestamp and resultExtras from list of in-flight requests,
483 // where it was added by the shutter notification for this frame. If the
484 // shutter timestamp isn't received yet, append the output buffers to the
485 // in-flight request and they will be returned when the shutter timestamp
486 // arrives. Update the in-flight status and remove the in-flight entry if
487 // all result data and shutter timestamp have been received.
488 nsecs_t shutterTimestamp = 0;
489 {
490 std::lock