#include <stdio.h>
#include <Windows.h>
#include <process.h>
#include <conio.h>
#include "MvCameraControl.h"
#ifndef MV_E_TIMEOUT
#define MV_E_TIMEOUT -7
#endif
// ch:等待按键输入 | en:Wait for key press
void WaitForKeyPress(void)
{
while (!_kbhit())
{
Sleep(10);
}
_getch();
}
// 全局变量
bool g_bExit = false; // 主循环退出标志
bool g_bStartRecord = false; // 是否正在录像
bool g_bRecording = false; // 正在录制中(防止重复启动)
long g_nTargetFrameCount = 0; // 目标帧数 = FPS × 10
long g_nRecordedFrames = 0; // 已录制帧数
HANDLE g_hRecordThread = NULL; // 录像线程句柄
void* g_handle = nullptr; // 相机句柄
float g_fFrameRate = 30.0f; // 当前帧率
int g_nWidth = 640, g_nHeight = 480; // 图像尺寸
HWND g_hWnd = nullptr; // 显示窗口句柄
HDC g_hDC = nullptr; // 设备上下文
unsigned char* g_pDisplayBuffer = nullptr;// 显示缓冲区(RGB)
CRITICAL_SECTION g_csDisplay; // 显示数据临界区
// Window class name
const char* WINDOW_CLASS_NAME = "CameraLiveView";
const char* WINDOW_TITLE = "Camera Live View - Press 'R' to Record 10s";
// 函数声明
LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam);
DWORD WINAPI DisplayThread(LPVOID lpParam);
unsigned int __stdcall RecordThread(void* pUser);
// 创建显示窗口的线程
DWORD WINAPI DisplayThread(LPVOID lpParam)
{
HINSTANCE hInstance = (HINSTANCE)lpParam;
WNDCLASSEX wcex;
wcex.cbSize = sizeof(WNDCLASSEX);
wcex.style = CS_HREDRAW | CS_VREDRAW;
wcex.lpfnWndProc = WndProc;
wcex.cbClsExtra = 0;
wcex.cbWndExtra = 0;
wcex.hInstance = hInstance;
wcex.hIcon = LoadIcon(NULL, IDI_APPLICATION);
wcex.hCursor = LoadCursor(NULL, IDC_ARROW);
wcex.hbrBackground = (HBRUSH)(COLOR_WINDOW + 1);
wcex.lpszMenuName = NULL;
wcex.lpszClassName = WINDOW_CLASS_NAME;
wcex.hIconSm = LoadIcon(wcex.hInstance, IDI_APPLICATION);
RegisterClassEx(&wcex);
g_hWnd = CreateWindow(
WINDOW_CLASS_NAME,
WINDOW_TITLE,
WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT, CW_USEDEFAULT,
g_nWidth + 20, g_nHeight + 60,
NULL, NULL, hInstance, NULL);
if (!g_hWnd)
{
printf("Failed to create window!\n");
return 1;
}
ShowWindow(g_hWnd, SW_SHOW);
UpdateWindow(g_hWnd);
MSG msg;
while (GetMessage(&msg, NULL, 0, 0))
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}
return 0;
}
bool IsColor(MvGvspPixelType enType)
{
switch (enType)
{
case PixelType_Gvsp_BGR8_Packed:
case PixelType_Gvsp_YUV422_Packed:
case PixelType_Gvsp_YUV422_YUYV_Packed:
case PixelType_Gvsp_BayerGR8:
case PixelType_Gvsp_BayerRG8:
case PixelType_Gvsp_BayerGB8:
case PixelType_Gvsp_BayerBG8:
case PixelType_Gvsp_BayerGB10:
case PixelType_Gvsp_BayerGB10_Packed:
case PixelType_Gvsp_BayerBG10:
case PixelType_Gvsp_BayerBG10_Packed:
case PixelType_Gvsp_BayerRG10:
case PixelType_Gvsp_BayerRG10_Packed:
case PixelType_Gvsp_BayerGR10:
case PixelType_Gvsp_BayerGR10_Packed:
case PixelType_Gvsp_BayerGB12:
case PixelType_Gvsp_BayerGB12_Packed:
case PixelType_Gvsp_BayerBG12:
case PixelType_Gvsp_BayerBG12_Packed:
case PixelType_Gvsp_BayerRG12:
case PixelType_Gvsp_BayerRG12_Packed:
case PixelType_Gvsp_BayerGR12:
case PixelType_Gvsp_BayerGR12_Packed:
case PixelType_Gvsp_BayerRBGG8:
case PixelType_Gvsp_BayerGR16:
case PixelType_Gvsp_BayerRG16:
case PixelType_Gvsp_BayerGB16:
case PixelType_Gvsp_BayerBG16:
return true;
default:
return false;
}
}
bool IsMono(MvGvspPixelType enType)
{
switch (enType)
{
case PixelType_Gvsp_Mono10:
case PixelType_Gvsp_Mono10_Packed:
case PixelType_Gvsp_Mono12:
case PixelType_Gvsp_Mono12_Packed:
case PixelType_Gvsp_Mono14:
case PixelType_Gvsp_Mono16:
return true;
default:
return false;
}
}
// 窗口过程函数:处理消息
LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
switch (message)
{
case WM_PAINT:
{
PAINTSTRUCT ps;
HDC hdc = BeginPaint(hWnd, &ps);
if (g_pDisplayBuffer && g_nWidth > 0 && g_nHeight > 0)
{
BITMAPINFO bmi = { 0 };
bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmi.bmiHeader.biWidth = g_nWidth;
bmi.bmiHeader.biHeight = -g_nHeight; // Top-down DIB
bmi.bmiHeader.biPlanes = 1;
bmi.bmiHeader.biBitCount = 24;
bmi.bmiHeader.biCompression = BI_RGB;
bmi.bmiHeader.biSizeImage = 0;
SetDIBitsToDevice(
hdc,
0, 0,
g_nWidth, g_nHeight,
0, 0,
0, g_nHeight,
g_pDisplayBuffer,
&bmi,
DIB_RGB_COLORS);
}
else
{
TextOut(hdc, 10, 10, "No image data", 13);
}
EndPaint(hWnd, &ps);
break;
}
case WM_KEYDOWN:
{
if (wParam == 'R' || wParam == 'r')
{
if (!g_bStartRecord && !g_bRecording)
{
g_bStartRecord = true;
g_nRecordedFrames = 0;
g_nTargetFrameCount = (long)(g_fFrameRate * 10.0f);
printf(">> Recording started: Target %ld frames (@ %.1ffps)\n", g_nTargetFrameCount, g_fFrameRate);
}
else
{
printf(">> Already recording...\n");
}
}
else if (wParam == VK_ESCAPE || wParam == 'Q' || wParam == 'q')
{
g_bExit = true;
PostQuitMessage(0);
}
break;
}
case WM_DESTROY:
g_bExit = true;
PostQuitMessage(0);
break;
default:
return DefWindowProc(hWnd, message, wParam, lParam);
}
return 0;
}
// BayerRG8 -> RGB 转换函数(简化版去马赛克:邻近插值)
void BayerRG8ToRGB(const unsigned char* bayer, unsigned char* rgb, int width, int height)
{
for (int y = 0; y < height; ++y)
{
for (int x = 0; x < width; ++x)
{
int idx_rgb = (y * width + x) * 3;
int val = bayer[y * width + x];
if ((x % 2 == 0) && (y % 2 == 0)) // R
{
rgb[idx_rgb + 0] = (x > 0) ? bayer[y * width + x - 1] : val; // B
rgb[idx_rgb + 1] = bayer[y * width + x]; // G
rgb[idx_rgb + 2] = val; // R
}
else if ((x % 2 == 1) && (y % 2 == 0)) // G (next to R)
{
rgb[idx_rgb + 0] = (y > 0) ? bayer[(y - 1) * width + x] : val; // B
rgb[idx_rgb + 1] = val; // G
rgb[idx_rgb + 2] = (x > 0) ? bayer[y * width + x - 1] : val; // R
}
else if ((x % 2 == 0) && (y % 2 == 1)) // G (below R)
{
rgb[idx_rgb + 0] = (x > 0) ? bayer[y * width + x - 1] : val; // B
rgb[idx_rgb + 1] = val; // G
rgb[idx_rgb + 2] = (y > 0) ? bayer[(y - 1) * width + x] : val; // R
}
else // B
{
rgb[idx_rgb + 0] = val; // B
rgb[idx_rgb + 1] = bayer[y * width + x - 1]; // G
rgb[idx_rgb + 2] = (y > 0) ? bayer[(y - 1) * width + x] : val; // R
}
}
}
}
// RGB24 转 BGR24(GDI 需要 BGR 排列)
void RGB24ToBGR24(const unsigned char* rgb, unsigned char* bgr, int size)
{
for (int i = 0; i < size; ++i)
{
bgr[i * 3 + 0] = rgb[i * 3 + 2]; // B
bgr[i * 3 + 1] = rgb[i * 3 + 1]; // G
bgr[i * 3 + 2] = rgb[i * 3 + 0]; // R
}
}
unsigned int __stdcall RecordThread(void* pUser)
{
int nRet = MV_OK;
// 获取 PayloadSize(必须在主线程或此处先查询)
MVCC_INTVALUE stPayLoadSize = { 0 };
nRet = MV_CC_GetIntValue(g_handle, "PayloadSize", &stPayLoadSize);
if (MV_OK != nRet)
{
printf("[RecordThread] Failed to get PayloadSize! nRet [0x%x]\n", nRet);
return -1;
}
unsigned int nBufferSize = stPayLoadSize.nCurValue;
// 分配缓冲区
unsigned char* pBuffer = new unsigned char[nBufferSize];
if (!pBuffer)
{
printf("[RecordThread] Memory allocation failed!\n");
return -1;
}
MV_FRAME_OUT_INFO_EX stFrameInfo = { 0 }; // 注意:不是 MV_FRAME_OUT
MV_CC_INPUT_FRAME_INFO stInputInfo = { 0 };
printf("[RecordThread] Ready. Waiting to record...\n");
while (!g_bExit)
{
if (g_bStartRecord && !g_bRecording)
{
g_bRecording = true;
long frameCount = 0;
printf("[RecordThread] Recording started. Target: %ld frames.\n", g_nTargetFrameCount);
while (frameCount < g_nTargetFrameCount && !g_bExit && g_bStartRecord)
{
// 清零结构体(重要!)
memset(&stFrameInfo, 0, sizeof(stFrameInfo));
// 获取单帧图像数据
nRet = MV_CC_GetOneFrameTimeout(
g_handle, // 相机句柄
pBuffer, // 用户提供的缓冲区
nBufferSize, // 缓冲区大小(来自 PayloadSize)
&stFrameInfo, // 输出帧信息
1000 // 超时 1000ms
);
if (nRet == MV_OK)
{
// 设置输入帧参数
stInputInfo.pData = pBuffer;
stInputInfo.nDataLen = stFrameInfo.nFrameLenEx; // 实际图像数据长度
nRet = MV_CC_InputOneFrame(g_handle, &stInputInfo);
if (nRet != MV_OK)
{
printf("MV_CC_InputOneFrame failed! nRet [0x%x]\n", nRet);
}
++frameCount;
}
else if (nRet == MV_E_TIMEOUT)
{
printf("GetOneFrameTimeout timeout.\n");
}
else
{
printf("GetOneFrameTimeout failed! nRet [0x%x]\n", nRet);
}
Sleep(1); // 避免忙等
}
// 停止录像
MV_CC_StopRecord(g_handle);
g_bStartRecord = false;
g_bRecording = false;
printf("[RecordThread] Recording finished. Recorded %ld frames.\n", frameCount);
}
Sleep(10); // 主循环休眠
}
// 清理资源
delete[] pBuffer;
return 0;
}
bool PrintDeviceInfo(MV_CC_DEVICE_INFO* pstMVDevInfo)
{
if (NULL == pstMVDevInfo)
{
printf("The Pointer of pstMVDevInfo is NULL!\n");
return false;
}
if (pstMVDevInfo->nTLayerType == MV_GIGE_DEVICE)
{
int nIp1 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0xff000000) >> 24);
int nIp2 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x00ff0000) >> 16);
int nIp3 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x0000ff00) >> 8);
int nIp4 = (pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x000000ff);
// ch:打印当前相机ip和用户自定义名字 | en:print current ip and user defined name
printf("CurrentIp: %d.%d.%d.%d\n", nIp1, nIp2, nIp3, nIp4);
printf("UserDefinedName: %s\n\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chUserDefinedName);
}
else if (pstMVDevInfo->nTLayerType == MV_USB_DEVICE)
{
printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chUserDefinedName);
printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chSerialNumber);
printf("Device Number: %d\n\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.nDeviceNumber);
}
else if (pstMVDevInfo->nTLayerType == MV_GENTL_GIGE_DEVICE)
{
printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chUserDefinedName);
printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chSerialNumber);
printf("Model Name: %s\n\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chModelName);
}
else if (pstMVDevInfo->nTLayerType == MV_GENTL_CAMERALINK_DEVICE)
{
printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stCMLInfo.chUserDefinedName);
printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stCMLInfo.chSerialNumber);
printf("Model Name: %s\n\n", pstMVDevInfo->SpecialInfo.stCMLInfo.chModelName);
}
else if (pstMVDevInfo->nTLayerType == MV_GENTL_CXP_DEVICE)
{
printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stCXPInfo.chUserDefinedName);
printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stCXPInfo.chSerialNumber);
printf("Model Name: %s\n\n", pstMVDevInfo->SpecialInfo.stCXPInfo.chModelName);
}
else if (pstMVDevInfo->nTLayerType == MV_GENTL_XOF_DEVICE)
{
printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stXoFInfo.chUserDefinedName);
printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stXoFInfo.chSerialNumber);
printf("Model Name: %s\n\n", pstMVDevInfo->SpecialInfo.stXoFInfo.chModelName);
}
else
{
printf("Not support.\n");
}
return true;
}
// 主函数
int main()
{
int nRet = MV_OK;
void* handle = NULL;
unsigned char* pConvertData = NULL;
unsigned int nConvertDataSize = 0;
do
{
// ch:初始化SDK | en:Initialize SDK
nRet = MV_CC_Initialize();
if (MV_OK != nRet)
{
printf("Initialize SDK fail! nRet [0x%x]\n", nRet);
break;
}
// ch:枚举设备 | Enum device
MV_CC_DEVICE_INFO_LIST stDeviceList;
memset(&stDeviceList, 0, sizeof(MV_CC_DEVICE_INFO_LIST));
nRet = MV_CC_EnumDevices(MV_GIGE_DEVICE | MV_USB_DEVICE | MV_GENTL_CAMERALINK_DEVICE | MV_GENTL_CXP_DEVICE | MV_GENTL_XOF_DEVICE, &stDeviceList);
if (MV_OK != nRet)
{
printf("Enum Devices fail! nRet [0x%x]\n", nRet);
break;
}
if (stDeviceList.nDeviceNum > 0)
{
for (unsigned int i = 0; i < stDeviceList.nDeviceNum; i++)
{
printf("[device %d]:\n", i);
MV_CC_DEVICE_INFO* pDeviceInfo = stDeviceList.pDeviceInfo[i];
if (NULL == pDeviceInfo)
{
break;
}
PrintDeviceInfo(pDeviceInfo);
}
}
else
{
printf("Find No Devices!\n");
break;
}
printf("Please Input camera index(0-%d):", stDeviceList.nDeviceNum - 1);
unsigned int nIndex = 0;
scanf_s("%d", &nIndex);
if (nIndex >= stDeviceList.nDeviceNum)
{
printf("Input error!\n");
break;
}
// ch:选择设备并创建句柄 | Select device and create handle
nRet = MV_CC_CreateHandle(&handle, stDeviceList.pDeviceInfo[nIndex]);
if (MV_OK != nRet)
{
printf("Create Handle fail! nRet [0x%x]\n", nRet);
break;
}
// ch:打开设备 | Open device
nRet = MV_CC_OpenDevice(handle);
if (MV_OK != nRet)
{
printf("Open Device fail! nRet [0x%x]\n", nRet);
break;
}
// ch:探测网络最佳包大小(只对GigE相机有效) | en:Detection network optimal package size(It only works for the GigE camera)
if (stDeviceList.pDeviceInfo[nIndex]->nTLayerType == MV_GIGE_DEVICE)
{
int nPacketSize = MV_CC_GetOptimalPacketSize(handle);
if (nPacketSize > 0)
{
nRet = MV_CC_SetIntValueEx(handle, "GevSCPSPacketSize", nPacketSize);
if (nRet != MV_OK)
{
printf("Warning: Set Packet Size fail nRet [0x%x]!", nRet);
}
}
else
{
printf("Warning: Get Packet Size fail nRet [0x%x]!", nPacketSize);
}
}
// 关闭触发模式
MV_CC_SetEnumValue(handle, "TriggerMode", 0);
// 获取图像参数
MVCC_INTVALUE stParam = { 0 };
MV_CC_GetIntValue(handle, "Width", &stParam);
g_nWidth = stParam.nCurValue;
MV_CC_GetIntValue(handle, "Height", &stParam);
g_nHeight = stParam.nCurValue;
MVCC_ENUMVALUE stEnumValue = { 0 };
MV_CC_GetEnumValue(handle, "PixelFormat", &stEnumValue);
MVCC_FLOATVALUE stFloatValue = { 0 };
MV_CC_GetFloatValue(handle, "ResultingFrameRate", &stFloatValue);
g_fFrameRate = (stFloatValue.fCurValue > 0.0f) ? stFloatValue.fCurValue : 30.0f;
// 分配显示缓冲区(BGR24)
int imageSize = g_nWidth * g_nHeight * 3;
g_pDisplayBuffer = new unsigned char[imageSize];
memset(g_pDisplayBuffer, 0, imageSize);
// 准备录像参数
MV_CC_RECORD_PARAM stRecordPar;
memset(&stRecordPar, 0, sizeof(stRecordPar));
// 获取当前像素格式
MVCC_INTVALUE stPixFmt = { 0 };
int nRet = MV_CC_GetIntValue(handle, "PixelFormat", &stPixFmt);
if (MV_OK != nRet)
{
printf("Failed to get PixelFormat, use default MV_Gvsp_PixelType_BayerRG8\n");
stRecordPar.enPixelType = PixelType_Gvsp_RGB8_Packed;
}
else
{
stRecordPar.enPixelType = static_cast<MvGvspPixelType>(stPixFmt.nCurValue);
}
// 设置其他参数
stRecordPar.nWidth = g_nWidth;
stRecordPar.nHeight = g_nHeight;
stRecordPar.fFrameRate = static_cast<float>(g_fFrameRate);
stRecordPar.nBitRate = 5000;
stRecordPar.enRecordFmtType = MV_FormatType_AVI;
// 文件路径需长期有效
static char szFilePath[] = "./Recording.avi";
stRecordPar.strFilePath = szFilePath;
nRet = MV_CC_StartRecord(handle, &stRecordPar);
if (MV_OK != nRet)
{
printf("Start Record failed! nRet [0x%x]\n", nRet);
break;
}
// 启动取流
nRet = MV_CC_StartGrabbing(handle);
if (MV_OK != nRet)
{
printf("Start Grabbing fail! nRet [0x%x]\n", nRet);
break;
}
// 创建显示窗口线程
HANDLE hDisplayThread = (HANDLE)_beginthreadex(
NULL,
0,
(_beginthreadex_proc_type)DisplayThread, // ⚠️ 显式强制类型转换
GetModuleHandle(NULL),
0,
NULL
);
if (!hDisplayThread)
{
printf("Failed to create display thread!\n");
break;
}
// 创建录像线程
g_hRecordThread = (HANDLE)_beginthreadex(NULL, 0, RecordThread, NULL, 0, NULL);
if (!g_hRecordThread)
{
printf("Failed to create record thread!\n");
break;
}
printf("\n=== Live view started ===\n");
printf("Press 'R' in the window to start 10-second recording.\n");
printf("Press ESC or close window to exit.\n\n");
// 主取流循环
MV_FRAME_OUT stImageInfo = { 0 };
unsigned char* pRgbBuffer = new unsigned char[imageSize];
while (!g_bExit)
{
nRet = MV_CC_GetImageBuffer(handle, &stImageInfo, 1000);
if (nRet == MV_OK)
{
MV_FRAME_OUT_INFO_EX* pInfo = &stImageInfo.stFrameInfo;
const unsigned char* pData = stImageInfo.pBufAddr;
MvGvspPixelType enDstPixelType = PixelType_Gvsp_Undefined;
unsigned int nChannelNum = 0;
char chFileName[MAX_PATH] = { 0 };
// ch:如果是彩色则转成RGB8 | en:if pixel type is color, convert it to RGB8
if (IsColor(stImageInfo.stFrameInfo.enPixelType))
{
nChannelNum = 3;
enDstPixelType = PixelType_Gvsp_RGB8_Packed;
sprintf(chFileName, "AfterConvertRGB.raw");
}
// ch:如果是黑白则转换成Mono8 | en:if pixel type is mono, convert it to mono8
else if (IsMono(stImageInfo.stFrameInfo.enPixelType))
{
nChannelNum = 1;
enDstPixelType = PixelType_Gvsp_Mono8;
sprintf(chFileName, "AfterConvertMono8.raw");
}
else
{
printf("Don't need to convert!\n");
}
if (enDstPixelType != PixelType_Gvsp_Undefined)
{
// ch:设置插值算法为均衡 | en:set interpolation algorithm type, 0-Fast 1-Equilibrium 2-Optimal 3-Optimal plus
nRet = MV_CC_SetBayerCvtQuality(handle, 1);
if (MV_OK != nRet)
{
printf("set Bayer convert quality fail! nRet [0x%x]\n", nRet);
break;
}
pConvertData = (unsigned char*)malloc(stImageInfo.stFrameInfo.nExtendWidth * stImageInfo.stFrameInfo.nExtendHeight * nChannelNum);
if (NULL == pConvertData)
{
printf("malloc pConvertData fail!\n");
break;
}
nConvertDataSize = stImageInfo.stFrameInfo.nExtendWidth * stImageInfo.stFrameInfo.nExtendHeight * nChannelNum;
// ch:像素格式转换 | en:Convert pixel format
MV_CC_PIXEL_CONVERT_PARAM_EX stConvertParam = { 0 };
stConvertParam.nWidth = stImageInfo.stFrameInfo.nExtendWidth; //ch:图像宽 | en:image width
stConvertParam.nHeight = stImageInfo.stFrameInfo.nExtendHeight; //ch:图像高 | en:image height
stConvertParam.pSrcData = stImageInfo.pBufAddr; //ch:输入数据缓存 | en:input data buffer
stConvertParam.nSrcDataLen = stImageInfo.stFrameInfo.nFrameLenEx; //ch:输入数据大小 | en:input data size
stConvertParam.enSrcPixelType = stImageInfo.stFrameInfo.enPixelType; //ch:输入像素格式 | en:input pixel format
stConvertParam.enDstPixelType = enDstPixelType; //ch:输出像素格式 | en:output pixel format
stConvertParam.pDstBuffer = pConvertData; //ch:输出数据缓存 | en:output data buffer
stConvertParam.nDstBufferSize = nConvertDataSize; //ch:输出缓存大小 | en:output buffer size
nRet = MV_CC_ConvertPixelTypeEx(handle, &stConvertParam);
if (MV_OK != nRet)
{
printf("Convert Pixel Type fail! nRet [0x%x]\n", nRet);
break;
}
FILE* fp = NULL;
errno_t err = fopen_s(&fp, chFileName, "wb");
if (0 != err || NULL == fp)
{
printf("Open file failed\n");
break;
}
fwrite(stConvertParam.pDstBuffer, 1, stConvertParam.nDstLen, fp);
fclose(fp);
printf("Convert pixeltype succeed\n");
}
// 录像输入帧
if (g_bStartRecord && !g_bRecording)
{
MV_CC_INPUT_FRAME_INFO stInputInfo = { 0 };
stInputInfo.pData = (unsigned char*)pData;
stInputInfo.nDataLen = pInfo->nFrameLenEx;
// ⚠️ 旧版 SDK 不支持通过 stInputInfo 指定宽高/像素类型
// 必须确保开始录像前已调用 MV_CC_SetVideoParam() 设置参数!
int nRet = MV_CC_InputOneFrame(handle, &stInputInfo);
if (MV_OK != nRet)
{
printf("MV_CC_InputOneFrame failed: 0x%x\n", nRet);
}
else
{
InterlockedIncrement(&g_nRecordedFrames);
}
}
MV_CC_FreeImageBuffer(handle, &stImageInfo);
}
else if (nRet != MV_E_TIMEOUT)
{
printf("Get image buffer failed! nRet [0x%x]\n", nRet);
}
Sleep(1);
}
// 清理流程
g_bStartRecord = false;
if (g_hRecordThread)
{
WaitForSingleObject(g_hRecordThread, 3000);
CloseHandle(g_hRecordThread);
}
MV_CC_StopGrabbing(handle);
if (g_bRecording)
{
MV_CC_StopRecord(handle);
}
MV_CC_CloseDevice(handle);
MV_CC_DestroyHandle(handle);
handle = nullptr;
g_handle = nullptr;
delete[] g_pDisplayBuffer;
delete[] pRgbBuffer;
g_pDisplayBuffer = nullptr;
} while (0);
// ch:反初始化SDK | en:Finalize SDK
MV_CC_Finalize();
printf("Press a key to exit.\n");
WaitForKeyPress();
return 0;
}
代码解析