// 在深度转换部分,修正基线计算
// 1. 从立体校正参数提取深度计算参数
const double fx_lr = P1.at<double>(0, 0); // 左右对的焦距
const double fy_lr = P1.at<double>(1, 1);
const double cx_lr = P1.at<double>(0, 2); // 左右对的主点
const double cy_lr = P1.at<double>(1, 2);
// 修正:直接使用平移向量T的分量作为基线
const double baseline_lr = abs(T.at<double>(0, 0)); // 左右对使用T.x分量:57.81mm
const double fx_lt = P3.at<double>(0, 0); // 左上对的焦距
const double fy_lt = P3.at<double>(1, 1);
const double cx_lt = P3.at<double>(0, 2); // 左上对的主点
const double cy_lt = P3.at<double>(1, 2);
// 修正:左上对使用T_l2t.y分量作为基线
const double baseline_lt = abs(T_l2t.at<double>(1, 0)); // 左上对使用T_l2t.y分量:116.94mm
std::cout << "\n========== 基线计算调试 ==========" << std::endl;
std::cout << "T = [" << T.at<double>(0, 0) << ", " << T.at<double>(1, 0) << ", " << T.at<double>(2, 0) << "]" << std::endl;
std::cout << "T_l2t = [" << T_l2t.at<double>(0, 0) << ", " << T_l2t.at<double>(1, 0) << ", " << T_l2t.at<double>(2, 0) << "]" << std::endl;
std::cout << "baseline_lr (从T.x): " << baseline_lr << " mm" << std::endl;
std::cout << "baseline_lt (从T_l2t.y): " << baseline_lt << " mm" << std::endl;
std::cout << "P2(0,3): " << P2.at<double>(0, 3) << std::endl;
std::cout << "P4(0,3): " << P4.at<double>(0, 3) << std::endl;
std::cout << "fx_lr: " << fx_lr << std::endl;
std::cout << "fx_lt: " << fx_lt << std::endl;
std::cout << "=================================" << std::endl;
// 创建智能帧缓冲区
SmartFrameBuffer frameBuffer;
std::atomic<bool> stopCapture{ false };
// 预分配一个专用的转换帧,避免在捕获线程中重复分配
cv::Mat captureFrame;
if (codec_ctx) {
captureFrame = cv::Mat(codec_ctx->height, codec_ctx->width, CV_8UC3);
}
// 创建专用异步捕获线程(模拟OpenCV的内部线程)
std::thread captureThread([&]() {
printf("异步捕获线程启动...\n");
FRAME_GYRO_DATA gyro_data;
bool gyro_data_valid = false;
AP4_NalParser nal_parser;
// 线程统计信息
int frame_count = 0;
int error_count = 0;
int io_error_count = 0;
int consecutive_errors = 0; // 连续错误计数
auto last_stats_time = std::chrono::steady_clock::now();
auto thread_start_time = std::chrono::steady_clock::now();
while (!stopCapture) {
bool got_frame = false;
gyro_data_valid = false;
while (!got_frame && !stopCapture) {
int ret = av_read_frame(format_ctx, &packet);
if (ret < 0) {
// 详细的错误分析和处理
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, errbuf, sizeof(errbuf));
if (ret == AVERROR(EAGAIN)) {
// 暂时没有数据可读,短暂等待后重试
std::this_thread::sleep_for(std::chrono::milliseconds(1));
continue;
}
else if (ret == AVERROR(EIO)) {
// I/O错误,可能是V4L2缓冲区问题
error_count++;
io_error_count++;
fprintf(stderr, "V4L2 I/O错误 #%d (可能是缓冲区损坏): %s (%d)\n", io_error_count, errbuf, ret);
fprintf(stderr, "尝试短暂延迟后继续...\n");
std::this_thread::sleep_for(std::chrono::milliseconds(10));
continue; // 尝试继续而不是退出
}
else if (ret == AVERROR(ENOMEM)) {
// 内存不足
error_count++;
fprintf(stderr, "内存不足错误: %s (%d)\n", errbuf, ret);
std::this_thread::sleep_for(std::chrono::milliseconds(50));
continue;
}
else if (ret == AVERROR_EOF) {
// 流结束(对于设备来说不太常见)
fprintf(stderr, "设备流结束: %s (%d)\n", errbuf, ret);
stopCapture = true;
break;
}
else {
// 其他严重错误
error_count++;
consecutive_errors++;
fprintf(stderr, "严重读取错误: %s (%d), 连续错误: %d\n", errbuf, ret, consecutive_errors);
// 如果连续错误过多,延长等待时间,但不要立即退出
if (consecutive_errors > 50) {
fprintf(stderr, "连续错误过多,延长等待时间...\n");
std::this_thread::sleep_for(std::chrono::milliseconds(100));
consecutive_errors = 0; // 重置计数器
} else {
std::this_thread::sleep_for(std::chrono::milliseconds(5));
}
continue; // 继续尝试,不要退出
}
}
if (packet.stream_index == video_stream_index) {
if (format_ctx->streams[video_stream_index]->codecpar->codec_id == AV_CODEC_ID_MJPEG)
gyro_data_valid = parse_mjpeg_userdata(&packet, &gyro_data);
else if (format_ctx->streams[video_stream_index]->codecpar->codec_id == AV_CODEC_ID_H264)
gyro_data_valid = parse_h264_sei(&nal_parser, &packet, &gyro_data);
ret = avcodec_send_packet(codec_ctx, &packet);
if (ret < 0) {
// 解码错误处理(参考ffmpeg_uvc的成功做法)
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, errbuf, sizeof(errbuf));
fprintf(stderr, "解码发送数据包错误: %s (%d)\n", errbuf, ret);
error_count++;
av_packet_unref(&packet);
continue; // 继续处理下一个包,不要退出
}
if (ret >= 0) {
ret = avcodec_receive_frame(codec_ctx, av_frame);
if (ret == 0) {
if (codec_ctx->pix_fmt == AV_PIX_FMT_NONE) {
// 还未探测到格式,丢弃这帧
av_frame_unref(av_frame);
av_packet_unref(&packet);
continue;
}
if (!sws_ctx) {
sws_ctx = sws_getContext(av_frame->width, av_frame->height,
(AVPixelFormat)av_frame->format,
av_frame->width, av_frame->height,
AV_PIX_FMT_BGR24,
SWS_BILINEAR, nullptr, nullptr, nullptr);
if (!sws_ctx) {
fprintf(stderr, "创建 sws_ctx 失败,采集线程退出\n");
stopCapture = true;
av_frame_unref(av_frame);
av_packet_unref(&packet);
break;
}
}
if (captureFrame.cols != av_frame->width || captureFrame.rows != av_frame->height)
captureFrame = cv::Mat(av_frame->height, av_frame->width, CV_8UC3);
uint8_t* dest[4] = { captureFrame.data, nullptr, nullptr, nullptr };
int dest_linesize[4] = { (int)captureFrame.step, 0, 0, 0 };
sws_scale(sws_ctx, av_frame->data, av_frame->linesize, 0,
av_frame->height, dest, dest_linesize);
if (gyro_data_valid) {
overlayTimestampOnImage(captureFrame, &gyro_data, gyro_data_valid);
print_enhanced_gyro_data(&gyro_data);
}
frameBuffer.pushFrame(captureFrame);
av_frame_unref(av_frame); // 释放引用
got_frame = true;
frame_count++;
consecutive_errors = 0; // 成功处理帧,重置连续错误计数
// 每10秒输出一次统计信息
auto now = std::chrono::steady_clock::now();
if (std::chrono::duration_cast<std::chrono::seconds>(now - last_stats_time).count() >= 10) {
auto elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - thread_start_time).count();
double fps = (double)frame_count / elapsed;
printf("捕获线程统计 - 运行时间: %lds, 总帧数: %d, 错误: %d (I/O错误: %d), 平均FPS: %.2f\n",
elapsed, frame_count, error_count, io_error_count, fps);
last_stats_time = now;
}
}
else if (ret != AVERROR(EAGAIN)) {
// 详细的接收帧错误处理
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, errbuf, sizeof(errbuf));
fprintf(stderr, "接收帧失败: %s (%d)\n", errbuf, ret);
error_count++;
// 不退出,继续尝试
}
}
}
av_packet_unref(&packet);
}
}
printf("异步捕获线程结束\n");
});
详细注释上述代码
最新发布