setdibitstodevice

SetDIBitsToDevice函数能够将DIB位图和颜色数据设置到目标设备环境的指定矩形区域。此函数适用于Windows98和WindowsNT5.0及以上版本,支持JPEG源图像。函数原型包括多个参数用于指定设备环境句柄、目标位置坐标、DIB尺寸等。函数返回成功设置的扫描线数量,失败则返回0。
  name="google_ads_frame" marginwidth="0" marginheight="0" src="http://pagead2.googlesyndication.com/pagead/ads?client=ca-pub-5572165936844014&dt=1193665761703&lmt=1193665780&format=336x280_as&output=html&correlator=1193665761687&url=http%3A%2F%2Fwww.codeguru.cn%2Fpublic%2Fiframe%2Fwinapiiframe.htm&color_bg=FFFFFF&color_text=000000&color_link=000000&color_url=FFFFFF&color_border=FFFFFF&ad_type=text&ga_vid=1285758818.1193665762&ga_sid=1193665762&ga_hid=111695597&flash=9&u_h=768&u_w=1024&u_ah=740&u_aw=1024&u_cd=32&u_tz=480&u_his=8&u_java=true" frameborder="0" width="336" scrolling="no" height="280" allowtransparency="allowtransparency">     函数功能:该函数使用DIB位图和颜色数据对与目标设备环境相关的设备上的指定矩形中的像素进行设置。对于Windows 98和Windows NT 5.0,函数SetDIBitsToDevice已经得到扩展,它允许JPEG图像作为源图像。

    函数原型:int SetDIBitsToDevice(HDC hdc, int xDest, int Ydest, DWORD dwWidth, DWORD dwHeight, intXSrc, int Ysrc, UINT uStartScan, UINT cScanLines, CONST VOID *lpvBits, CONST BITMAPINFO *lpbmi, UINT fuColorUse);

    参数:

    hdc:设备环境句柄。

    XDest:指定目标矩形左上角的X轴坐标,按逻辑单位表示坐标。

    YDest:指字目标矩形左上角的Y轴坐标,按逻辑单位表示坐标。

    dwWidth:指定DIB的宽度,按逻辑单位表示宽度。

    dwHeight:指定DIB的高度,按逻辑单位表示高度。

    XSrc:指定DIB位图左下角的X轴坐标,按逻辑单位表示坐标。

    YSrc:指定DIB位图左下角的Y轴坐标,按逻辑单位表示坐标。

    uScanLines:指定DIB中的起始扫描线。

    cScanLInes:指定参数lpvBits指向的数组中包含的DIB扫描线数目。

    lpvBits:指向存储DIB颜色数据的字节类型数组的指针。关于更多的信息,请参考下面的备注一节。

    lpbmi:指向BITMAPINFO结构的指针,该结构包含有关DIB的信息。

    fuColorUse:指向BITMAPINFO结构中的成员bmiColors是否包含明确的RGB值或对调色板进行索引的值。有关更多的信息,请参考下面的备注部分。

    参数fuColorUse必须是下列值之一,这些值的含义如下:

    DIB_PAL_COLORS:表示颜色表由16位的索引值数组组成,利用这些值可对当前选中的逻辑调色板进行索引。

    DIB_RGB_COLORS:表示颜色表包含原义的RGB值。

    返回值:如果函数执行成功,那么返回值是设置的扫描线数目;如果函数失败,那么返回值为0。

    Windows NT:若想获得更多错误信息,请调用GetLastError函数。

    Windows 98、NT 5.0及以后版本:如果驱动程序不支持传给SetDIBitsToDevice函数的JPEG文件图像,那么函数将失败,并返回GDI_ERROR。

    备注:当位图的位是对系统调色板进行索引时,可获得最佳的位图绘制速度。应用程序可以通过调用GetSystemPaletteEntries函数来检索系统调色板的颜色和索引值。在检索到颜色和索引值之后,应用程序可以创建DIB。有关系统调色板方面更多的信息,请参考颜色方面的内容。

    自底向上的DIB位图的起始点是在该位图的左下角,而自顶向下的DIB的起始点是在左上角。

    为了减少对大型DIB位图的位进行设置所需的内存量,应用程序可以通过重复调用SetDIBitsToBevice。每次将位图的不同部分放入到lpvBits数组来将输出捆绑在一起。参数uStartScan和cScanLines的值标明了lpvBits数组中包含的位图部分。在有一个全屏幕MS DOS会话在前台运行时,如果正在后台运行的一个进程调用了SetDIBitsToDevice函数,那么该函数会返回一个错误。

    对于Windows 98、Windows NT 5.0及以后版本;如果BITMAPINFOHEADER中的成员biCompression为BI_JPEG,那么lpvBits指向一个包含JPEG图像的缓冲区。BITMAPINFOHEADER结构中的成员biSizeimage指定了该缓冲区的大小。参数fuColorUse必须设置为DIB_RGB_COLORS。如果BITMAPV4HEADER中的成员bV4SizeImage指定了该缓冲区的大小。参数fuColorUse必须设为DIB_RGB-COLORS。如果BITMAPV5HEADER结构中的成员bV5Compression为BI_JPEG,那么参数lpbBits指向一个包含JPEG图像的缓冲区。DITMAPV5HEADER结构中的成员bV5SizeImage指定了该缓冲区的大小,参数fuColorUse必须设为DIB_RGB_COLORS。

    ICM:进行颜色管理操作。如果指定的BITMAPINFO结构不是BITMAPV4HEADER或BITMAPV5HEADER,那么当前设备环境的颜色配置(profile)就当作源颜色配置使用。如果BITMAPINFO结构不是BITMAPV4HEADER或BITMAPV5HEADER,那么使用RGB颜色。如果指定的BITMAPINFO结构为BITMAPV4HEADER或BITMAPV5HEADER,那么与该位图有关的颜色配置(profile)就用作源颜色。

    速查:Windows NT:3.1及以上版本;Windows:95及以上版本;Windows CE:不支持;头文件:wingdi.h:库文件:gdi32.lib。

#include <stdio.h> #include <Windows.h> #include <process.h> #include <conio.h> #include "MvCameraControl.h" #ifndef MV_E_TIMEOUT #define MV_E_TIMEOUT -7 #endif // 等待按键输入 void WaitForKeyPress(void) { while (!_kbhit()) Sleep(10); _getch(); } // 全局变量 bool g_bExit = false; bool g_bStartRecord = false; long g_nTargetFrameCount = 0; long g_nRecordedFrames = 0; void* g_handle = nullptr; float g_fFrameRate = 30.0f; int g_nWidth = 640, g_nHeight = 480; HWND g_hWnd = nullptr; unsigned char* g_pDisplayBuffer = nullptr; CRITICAL_SECTION g_csDisplay; const char* WINDOW_CLASS_NAME = "CameraLiveView"; const char* WINDOW_TITLE = "Camera Live View - Press 'R' to Record 10s"; // 函数声明 LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam); unsigned int __stdcall DisplayThread(void* lpParam); // 显示窗口线程函数 unsigned int __stdcall DisplayThread(void* lpParam) { HINSTANCE hInstance = (HINSTANCE)lpParam; WNDCLASSEX wcex = { sizeof(WNDCLASSEX) }; wcex.style = CS_HREDRAW | CS_VREDRAW; wcex.lpfnWndProc = WndProc; wcex.hInstance = hInstance; wcex.hIcon = LoadIcon(NULL, IDI_APPLICATION); wcex.hCursor = LoadCursor(NULL, IDC_ARROW); wcex.hbrBackground = (HBRUSH)(COLOR_WINDOW + 1); wcex.lpszClassName = WINDOW_CLASS_NAME; wcex.hIconSm = LoadIcon(wcex.hInstance, IDI_APPLICATION); if (!GetClassInfoEx(hInstance, WINDOW_CLASS_NAME, &wcex)) { RegisterClassEx(&wcex); } g_hWnd = CreateWindow( WINDOW_CLASS_NAME, WINDOW_TITLE, WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, CW_USEDEFAULT, g_nWidth + 20, g_nHeight + 60, NULL, NULL, hInstance, NULL); if (!g_hWnd) { printf("Failed to create window!\n"); return 1; } ShowWindow(g_hWnd, SW_SHOW); UpdateWindow(g_hWnd); MSG msg; while (GetMessage(&msg, NULL, 0, 0)) { TranslateMessage(&msg); DispatchMessage(&msg); } return 0; } // 判断是否为彩色像素格式 bool IsColor(MvGvspPixelType enType) { switch (enType) { case PixelType_Gvsp_BGR8_Packed: case PixelType_Gvsp_YUV422_Packed: case PixelType_Gvsp_YUV422_YUYV_Packed: case PixelType_Gvsp_BayerGR8: case PixelType_Gvsp_BayerRG8: case PixelType_Gvsp_BayerGB8: case PixelType_Gvsp_BayerBG8: case PixelType_Gvsp_BayerGB10: case PixelType_Gvsp_BayerBG10: case PixelType_Gvsp_BayerRG10: case PixelType_Gvsp_BayerGR10: return true; default: return false; } } // 窗口过程函数 LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) { switch (message) { case WM_PAINT: { PAINTSTRUCT ps; HDC hdc = BeginPaint(hWnd, &ps); EnterCriticalSection(&g_csDisplay); if (g_pDisplayBuffer && g_nWidth > 0 && g_nHeight > 0) { BITMAPINFO bmi = { 0 }; bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); bmi.bmiHeader.biWidth = g_nWidth; bmi.bmiHeader.biHeight = -g_nHeight; bmi.bmiHeader.biPlanes = 1; bmi.bmiHeader.biBitCount = 24; bmi.bmiHeader.biCompression = BI_RGB; SetDIBitsToDevice(hdc, 0, 0, g_nWidth, g_nHeight, 0, 0, 0, g_nHeight, g_pDisplayBuffer, &bmi, DIB_RGB_COLORS); } else { TextOutA(hdc, 10, 10, "No image data", 13); } LeaveCriticalSection(&g_csDisplay); EndPaint(hWnd, &ps); break; } case WM_KEYDOWN: if (wParam == 'R' || wParam == 'r') { if (!g_bStartRecord) { g_bStartRecord = true; g_nRecordedFrames = 0; g_nTargetFrameCount = static_cast<long>(g_fFrameRate * 10.0f); printf(">> Recording started: Target %ld frames (@ %.1ffps)\n", g_nTargetFrameCount, g_fFrameRate); } } else if (wParam == VK_ESCAPE || wParam == 'Q' || wParam == 'q') { g_bExit = true; PostQuitMessage(0); } break; case WM_DESTROY: g_hWnd = nullptr; // 防止后续操作访问无效句柄 break; default: return DefWindowProc(hWnd, message, wParam, lParam); } return 0; } // 打印设备信息 bool PrintDeviceInfo(MV_CC_DEVICE_INFO* pstMVDevInfo) { if (!pstMVDevInfo) return false; if (pstMVDevInfo->nTLayerType == MV_GIGE_DEVICE) { DWORD ip = pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp; printf("IP: %d.%d.%d.%d\n", (ip >> 24) & 0xFF, (ip >> 16) & 0xFF, (ip >> 8) & 0xFF, ip & 0xFF); printf("Name: %s\n\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chUserDefinedName); } else if (pstMVDevInfo->nTLayerType == MV_USB_DEVICE) { printf("Name: %s\nSN: %s\nDevNum: %d\n\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chUserDefinedName, pstMVDevInfo->SpecialInfo.stUsb3VInfo.chSerialNumber, pstMVDevInfo->SpecialInfo.stUsb3VInfo.nDeviceNumber); } return true; } // 主函数 int main() { int nRet = MV_OK; void* handle = nullptr; unsigned char* pConvertData = nullptr; unsigned int nConvertDataSize = 0; // 初始化全局临界区(只一次) InitializeCriticalSection(&g_csDisplay); do { // 初始化 SDK nRet = MV_CC_Initialize(); if (MV_OK != nRet) { printf("Initialize SDK failed! nRet [0x%x]\n", nRet); break; } // 枚举设备 MV_CC_DEVICE_INFO_LIST stDeviceList = { 0 }; nRet = MV_CC_EnumDevices(MV_GIGE_DEVICE | MV_USB_DEVICE, &stDeviceList); if (MV_OK != nRet || stDeviceList.nDeviceNum == 0) { printf("No devices found or enum failed! nRet [0x%x]\n", nRet); break; } for (unsigned int i = 0; i < stDeviceList.nDeviceNum; ++i) { printf("[Device %u]:\n", i); PrintDeviceInfo(stDeviceList.pDeviceInfo[i]); } unsigned int nIndex; printf("Select camera index (0-%d): ", stDeviceList.nDeviceNum - 1); scanf_s("%u", &nIndex); if (nIndex >= stDeviceList.nDeviceNum) { printf("Invalid index!\n"); break; } // 创建句柄 nRet = MV_CC_CreateHandle(&handle, stDeviceList.pDeviceInfo[nIndex]); if (MV_OK != nRet) { printf("Create handle failed! nRet [0x%x]\n", nRet); break; } g_handle = handle; // 打开设备 nRet = MV_CC_OpenDevice(handle); if (MV_OK != nRet) { printf("Open device failed! nRet [0x%x]\n", nRet); break; } // GigE 包大小优化 if (stDeviceList.pDeviceInfo[nIndex]->nTLayerType == MV_GIGE_DEVICE) { int nPacketSize = MV_CC_GetOptimalPacketSize(handle); if (nPacketSize > 0) { MV_CC_SetIntValueEx(handle, "GevSCPSPacketSize", nPacketSize); } } // 关闭触发模式 MV_CC_SetEnumValue(handle, "TriggerMode", 0); // 获取图像参数 MVCC_INTVALUE stParam = { 0 }; MV_CC_GetIntValue(handle, "Width", &stParam); g_nWidth = stParam.nCurValue; MV_CC_GetIntValue(handle, "Height", &stParam); g_nHeight = stParam.nCurValue; MVCC_FLOATVALUE stFloat = { 0 }; MV_CC_GetFloatValue(handle, "ResultingFrameRate", &stFloat); g_fFrameRate = (stFloat.fCurValue > 0.0f) ? stFloat.fCurValue : 30.0f; // 初始化显示缓冲区 int imageSize = g_nWidth * g_nHeight * 3; g_pDisplayBuffer = new unsigned char[imageSize]; memset(g_pDisplayBuffer, 0, imageSize); // 配置录像参数 MV_CC_RECORD_PARAM stRecordPar = {0}; stRecordPar.enPixelType = PixelType_Gvsp_BGR8_Packed; stRecordPar.nWidth = g_nWidth; stRecordPar.nHeight = g_nHeight; stRecordPar.fFrameRate = g_fFrameRate; stRecordPar.nBitRate = 5000; stRecordPar.enRecordFmtType = MV_FormatType_AVI; static char szFilePath[] = "./Recording.avi"; stRecordPar.strFilePath = szFilePath; nRet = MV_CC_StartRecord(handle, &stRecordPar); if (MV_OK != nRet) { printf("Start record failed! nRet [0x%x]\n", nRet); break; } // 启动取流 nRet = MV_CC_StartGrabbing(handle); if (MV_OK != nRet) { printf("Start grabbing failed! nRet [0x%x]\n", nRet); break; } // 创建显示线程 HANDLE hThread = (HANDLE)_beginthreadex(nullptr, 0, DisplayThread, GetModuleHandle(nullptr), 0, nullptr); if (!hThread) { printf("Create display thread failed!\n"); break; } printf("\n=== Live view started ===\n"); printf("Press 'R' to start 10-second recording.\n"); printf("Press ESC to exit.\n\n"); // 主循环 MV_FRAME_OUT frame = { 0 }; while (!g_bExit) { nRet = MV_CC_GetImageBuffer(handle, &frame, 1000); if (nRet == MV_OK) { MV_FRAME_OUT_INFO_EX* pInfo = &frame.stFrameInfo; const unsigned char* pData = frame.pBufAddr; bool needConvert = IsColor(pInfo->enPixelType); if (needConvert) { unsigned int requiredSize = g_nWidth * g_nHeight * 3; if (nConvertDataSize < requiredSize || !pConvertData) { free(pConvertData); pConvertData = (unsigned char*)malloc(requiredSize); if (!pConvertData) { printf("Memory allocation failed!\n"); MV_CC_FreeImageBuffer(handle, &frame); continue; } nConvertDataSize = requiredSize; } MV_CC_PIXEL_CONVERT_PARAM_EX convertParam = { 0 }; convertParam.nWidth = pInfo->nWidth; convertParam.nHeight = pInfo->nHeight; convertParam.pSrcData = const_cast<unsigned char*>(pData); convertParam.nSrcDataLen = pInfo->nFrameLenEx; convertParam.enSrcPixelType = pInfo->enPixelType; convertParam.enDstPixelType = PixelType_Gvsp_BGR8_Packed; convertParam.pDstBuffer = pConvertData; convertParam.nDstBufferSize = nConvertDataSize; MV_CC_SetBayerCvtQuality(handle, 1); nRet = MV_CC_ConvertPixelTypeEx(handle, &convertParam); if (nRet == MV_OK) { EnterCriticalSection(&g_csDisplay); memcpy(g_pDisplayBuffer, pConvertData, nConvertDataSize); LeaveCriticalSection(&g_csDisplay); InvalidateRect(g_hWnd, NULL, FALSE); } else { printf("Convert failed! nRet [0x%x]\n", nRet); } } // 录制控制 if (g_bStartRecord && g_nRecordedFrames < g_nTargetFrameCount) { MV_CC_INPUT_FRAME_INFO inputInfo = { 0 }; inputInfo.pData = needConvert ? pConvertData : (unsigned char*)pData; inputInfo.nDataLen = needConvert ? nConvertDataSize : pInfo->nFrameLenEx; nRet = MV_CC_InputOneFrame(handle, &inputInfo); if (nRet == MV_OK) { InterlockedIncrement(&g_nRecordedFrames); } else { printf("Input frame failed! nRet [0x%x]\n", nRet); } } else if (g_bStartRecord && g_nRecordedFrames >= g_nTargetFrameCount) { MV_CC_StopRecord(handle); g_bStartRecord = false; printf("Recording finished. Saved %ld frames.\n", g_nRecordedFrames); } MV_CC_FreeImageBuffer(handle, &frame); } else if (nRet != MV_E_TIMEOUT) { printf("Get image buffer failed! nRet [0x%x]\n", nRet); } Sleep(1); } // 清理流程 g_bStartRecord = false; if (g_hWnd) { PostMessage(g_hWnd, WM_CLOSE, 0, 0); } WaitForSingleObject(hThread, 5000); // 给更长时间等待 UI 结束 CloseHandle(hThread); MV_CC_StopGrabbing(handle); MV_CC_StopRecord(handle); // 安全调用,无论是否正在录 MV_CC_CloseDevice(handle); MV_CC_DestroyHandle(handle); } while (0); // 统一释放资源 DeleteCriticalSection(&g_csDisplay); delete[] g_pDisplayBuffer; free(pConvertData); MV_CC_Finalize(); printf("Press any key to exit...\n"); WaitForKeyPress(); return 0; } 报错Cannot load nvcuda.dll,仅使用CPU进行渲染出图
最新发布
10-25
#include <stdio.h> #include <Windows.h> #include <process.h> #include <conio.h> #include "MvCameraControl.h" #ifndef MV_E_TIMEOUT #define MV_E_TIMEOUT -7 #endif // ch:等待按键输入 | en:Wait for key press void WaitForKeyPress(void) { while (!_kbhit()) { Sleep(10); } _getch(); } // 全局变量 bool g_bExit = false; // 主循环退出标志 bool g_bStartRecord = false; // 是否正在录像 bool g_bRecording = false; // 正在录制中(防止重复启动) long g_nTargetFrameCount = 0; // 目标帧数 = FPS × 10 long g_nRecordedFrames = 0; // 已录制帧数 HANDLE g_hRecordThread = NULL; // 录像线程句柄 void* g_handle = nullptr; // 相机句柄 float g_fFrameRate = 30.0f; // 当前帧率 int g_nWidth = 640, g_nHeight = 480; // 图像尺寸 HWND g_hWnd = nullptr; // 显示窗口句柄 HDC g_hDC = nullptr; // 设备上下文 unsigned char* g_pDisplayBuffer = nullptr;// 显示缓冲区(RGB) CRITICAL_SECTION g_csDisplay; // 显示数据临界区 // Window class name const char* WINDOW_CLASS_NAME = "CameraLiveView"; const char* WINDOW_TITLE = "Camera Live View - Press 'R' to Record 10s"; // 函数声明 LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam); DWORD WINAPI DisplayThread(LPVOID lpParam); unsigned int __stdcall RecordThread(void* pUser); // 创建显示窗口的线程 DWORD WINAPI DisplayThread(LPVOID lpParam) { HINSTANCE hInstance = (HINSTANCE)lpParam; WNDCLASSEX wcex; wcex.cbSize = sizeof(WNDCLASSEX); wcex.style = CS_HREDRAW | CS_VREDRAW; wcex.lpfnWndProc = WndProc; wcex.cbClsExtra = 0; wcex.cbWndExtra = 0; wcex.hInstance = hInstance; wcex.hIcon = LoadIcon(NULL, IDI_APPLICATION); wcex.hCursor = LoadCursor(NULL, IDC_ARROW); wcex.hbrBackground = (HBRUSH)(COLOR_WINDOW + 1); wcex.lpszMenuName = NULL; wcex.lpszClassName = WINDOW_CLASS_NAME; wcex.hIconSm = LoadIcon(wcex.hInstance, IDI_APPLICATION); RegisterClassEx(&wcex); g_hWnd = CreateWindow( WINDOW_CLASS_NAME, WINDOW_TITLE, WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, CW_USEDEFAULT, g_nWidth + 20, g_nHeight + 60, NULL, NULL, hInstance, NULL); if (!g_hWnd) { printf("Failed to create window!\n"); return 1; } ShowWindow(g_hWnd, SW_SHOW); UpdateWindow(g_hWnd); MSG msg; while (GetMessage(&msg, NULL, 0, 0)) { TranslateMessage(&msg); DispatchMessage(&msg); } return 0; } bool IsColor(MvGvspPixelType enType) { switch (enType) { case PixelType_Gvsp_BGR8_Packed: case PixelType_Gvsp_YUV422_Packed: case PixelType_Gvsp_YUV422_YUYV_Packed: case PixelType_Gvsp_BayerGR8: case PixelType_Gvsp_BayerRG8: case PixelType_Gvsp_BayerGB8: case PixelType_Gvsp_BayerBG8: case PixelType_Gvsp_BayerGB10: case PixelType_Gvsp_BayerGB10_Packed: case PixelType_Gvsp_BayerBG10: case PixelType_Gvsp_BayerBG10_Packed: case PixelType_Gvsp_BayerRG10: case PixelType_Gvsp_BayerRG10_Packed: case PixelType_Gvsp_BayerGR10: case PixelType_Gvsp_BayerGR10_Packed: case PixelType_Gvsp_BayerGB12: case PixelType_Gvsp_BayerGB12_Packed: case PixelType_Gvsp_BayerBG12: case PixelType_Gvsp_BayerBG12_Packed: case PixelType_Gvsp_BayerRG12: case PixelType_Gvsp_BayerRG12_Packed: case PixelType_Gvsp_BayerGR12: case PixelType_Gvsp_BayerGR12_Packed: case PixelType_Gvsp_BayerRBGG8: case PixelType_Gvsp_BayerGR16: case PixelType_Gvsp_BayerRG16: case PixelType_Gvsp_BayerGB16: case PixelType_Gvsp_BayerBG16: return true; default: return false; } } bool IsMono(MvGvspPixelType enType) { switch (enType) { case PixelType_Gvsp_Mono10: case PixelType_Gvsp_Mono10_Packed: case PixelType_Gvsp_Mono12: case PixelType_Gvsp_Mono12_Packed: case PixelType_Gvsp_Mono14: case PixelType_Gvsp_Mono16: return true; default: return false; } } // 窗口过程函数:处理消息 LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) { switch (message) { case WM_PAINT: { PAINTSTRUCT ps; HDC hdc = BeginPaint(hWnd, &ps); if (g_pDisplayBuffer && g_nWidth > 0 && g_nHeight > 0) { BITMAPINFO bmi = { 0 }; bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); bmi.bmiHeader.biWidth = g_nWidth; bmi.bmiHeader.biHeight = -g_nHeight; // Top-down DIB bmi.bmiHeader.biPlanes = 1; bmi.bmiHeader.biBitCount = 24; bmi.bmiHeader.biCompression = BI_RGB; bmi.bmiHeader.biSizeImage = 0; SetDIBitsToDevice( hdc, 0, 0, g_nWidth, g_nHeight, 0, 0, 0, g_nHeight, g_pDisplayBuffer, &bmi, DIB_RGB_COLORS); } else { TextOut(hdc, 10, 10, "No image data", 13); } EndPaint(hWnd, &ps); break; } case WM_KEYDOWN: { if (wParam == 'R' || wParam == 'r') { if (!g_bStartRecord && !g_bRecording) { g_bStartRecord = true; g_nRecordedFrames = 0; g_nTargetFrameCount = (long)(g_fFrameRate * 10.0f); printf(">> Recording started: Target %ld frames (@ %.1ffps)\n", g_nTargetFrameCount, g_fFrameRate); } else { printf(">> Already recording...\n"); } } else if (wParam == VK_ESCAPE || wParam == 'Q' || wParam == 'q') { g_bExit = true; PostQuitMessage(0); } break; } case WM_DESTROY: g_bExit = true; PostQuitMessage(0); break; default: return DefWindowProc(hWnd, message, wParam, lParam); } return 0; } // BayerRG8 -> RGB 转换函数(简化版去马赛克:邻近插值) void BayerRG8ToRGB(const unsigned char* bayer, unsigned char* rgb, int width, int height) { for (int y = 0; y < height; ++y) { for (int x = 0; x < width; ++x) { int idx_rgb = (y * width + x) * 3; int val = bayer[y * width + x]; if ((x % 2 == 0) && (y % 2 == 0)) // R { rgb[idx_rgb + 0] = (x > 0) ? bayer[y * width + x - 1] : val; // B rgb[idx_rgb + 1] = bayer[y * width + x]; // G rgb[idx_rgb + 2] = val; // R } else if ((x % 2 == 1) && (y % 2 == 0)) // G (next to R) { rgb[idx_rgb + 0] = (y > 0) ? bayer[(y - 1) * width + x] : val; // B rgb[idx_rgb + 1] = val; // G rgb[idx_rgb + 2] = (x > 0) ? bayer[y * width + x - 1] : val; // R } else if ((x % 2 == 0) && (y % 2 == 1)) // G (below R) { rgb[idx_rgb + 0] = (x > 0) ? bayer[y * width + x - 1] : val; // B rgb[idx_rgb + 1] = val; // G rgb[idx_rgb + 2] = (y > 0) ? bayer[(y - 1) * width + x] : val; // R } else // B { rgb[idx_rgb + 0] = val; // B rgb[idx_rgb + 1] = bayer[y * width + x - 1]; // G rgb[idx_rgb + 2] = (y > 0) ? bayer[(y - 1) * width + x] : val; // R } } } } // RGB24 转 BGR24(GDI 需要 BGR 排列) void RGB24ToBGR24(const unsigned char* rgb, unsigned char* bgr, int size) { for (int i = 0; i < size; ++i) { bgr[i * 3 + 0] = rgb[i * 3 + 2]; // B bgr[i * 3 + 1] = rgb[i * 3 + 1]; // G bgr[i * 3 + 2] = rgb[i * 3 + 0]; // R } } unsigned int __stdcall RecordThread(void* pUser) { int nRet = MV_OK; // 获取 PayloadSize(必须在主线程或此处先查询) MVCC_INTVALUE stPayLoadSize = { 0 }; nRet = MV_CC_GetIntValue(g_handle, "PayloadSize", &stPayLoadSize); if (MV_OK != nRet) { printf("[RecordThread] Failed to get PayloadSize! nRet [0x%x]\n", nRet); return -1; } unsigned int nBufferSize = stPayLoadSize.nCurValue; // 分配缓冲区 unsigned char* pBuffer = new unsigned char[nBufferSize]; if (!pBuffer) { printf("[RecordThread] Memory allocation failed!\n"); return -1; } MV_FRAME_OUT_INFO_EX stFrameInfo = { 0 }; // 注意:不是 MV_FRAME_OUT MV_CC_INPUT_FRAME_INFO stInputInfo = { 0 }; printf("[RecordThread] Ready. Waiting to record...\n"); while (!g_bExit) { if (g_bStartRecord && !g_bRecording) { g_bRecording = true; long frameCount = 0; printf("[RecordThread] Recording started. Target: %ld frames.\n", g_nTargetFrameCount); while (frameCount < g_nTargetFrameCount && !g_bExit && g_bStartRecord) { // 清零结构体(重要!) memset(&stFrameInfo, 0, sizeof(stFrameInfo)); // 获取单帧图像数据 nRet = MV_CC_GetOneFrameTimeout( g_handle, // 相机句柄 pBuffer, // 用户提供的缓冲区 nBufferSize, // 缓冲区大小(来自 PayloadSize) &stFrameInfo, // 输出帧信息 1000 // 超时 1000ms ); if (nRet == MV_OK) { // 设置输入帧参数 stInputInfo.pData = pBuffer; stInputInfo.nDataLen = stFrameInfo.nFrameLenEx; // 实际图像数据长度 nRet = MV_CC_InputOneFrame(g_handle, &stInputInfo); if (nRet != MV_OK) { printf("MV_CC_InputOneFrame failed! nRet [0x%x]\n", nRet); } ++frameCount; } else if (nRet == MV_E_TIMEOUT) { printf("GetOneFrameTimeout timeout.\n"); } else { printf("GetOneFrameTimeout failed! nRet [0x%x]\n", nRet); } Sleep(1); // 避免忙等 } // 停止录像 MV_CC_StopRecord(g_handle); g_bStartRecord = false; g_bRecording = false; printf("[RecordThread] Recording finished. Recorded %ld frames.\n", frameCount); } Sleep(10); // 主循环休眠 } // 清理资源 delete[] pBuffer; return 0; } bool PrintDeviceInfo(MV_CC_DEVICE_INFO* pstMVDevInfo) { if (NULL == pstMVDevInfo) { printf("The Pointer of pstMVDevInfo is NULL!\n"); return false; } if (pstMVDevInfo->nTLayerType == MV_GIGE_DEVICE) { int nIp1 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0xff000000) >> 24); int nIp2 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x00ff0000) >> 16); int nIp3 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x0000ff00) >> 8); int nIp4 = (pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x000000ff); // ch:打印当前相机ip和用户自定义名字 | en:print current ip and user defined name printf("CurrentIp: %d.%d.%d.%d\n", nIp1, nIp2, nIp3, nIp4); printf("UserDefinedName: %s\n\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chUserDefinedName); } else if (pstMVDevInfo->nTLayerType == MV_USB_DEVICE) { printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chUserDefinedName); printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chSerialNumber); printf("Device Number: %d\n\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.nDeviceNumber); } else if (pstMVDevInfo->nTLayerType == MV_GENTL_GIGE_DEVICE) { printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chUserDefinedName); printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chSerialNumber); printf("Model Name: %s\n\n", pstMVDevInfo->SpecialInfo.stGigEInfo.chModelName); } else if (pstMVDevInfo->nTLayerType == MV_GENTL_CAMERALINK_DEVICE) { printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stCMLInfo.chUserDefinedName); printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stCMLInfo.chSerialNumber); printf("Model Name: %s\n\n", pstMVDevInfo->SpecialInfo.stCMLInfo.chModelName); } else if (pstMVDevInfo->nTLayerType == MV_GENTL_CXP_DEVICE) { printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stCXPInfo.chUserDefinedName); printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stCXPInfo.chSerialNumber); printf("Model Name: %s\n\n", pstMVDevInfo->SpecialInfo.stCXPInfo.chModelName); } else if (pstMVDevInfo->nTLayerType == MV_GENTL_XOF_DEVICE) { printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stXoFInfo.chUserDefinedName); printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stXoFInfo.chSerialNumber); printf("Model Name: %s\n\n", pstMVDevInfo->SpecialInfo.stXoFInfo.chModelName); } else { printf("Not support.\n"); } return true; } // 主函数 int main() { int nRet = MV_OK; void* handle = NULL; unsigned char* pConvertData = NULL; unsigned int nConvertDataSize = 0; do { // ch:初始化SDK | en:Initialize SDK nRet = MV_CC_Initialize(); if (MV_OK != nRet) { printf("Initialize SDK fail! nRet [0x%x]\n", nRet); break; } // ch:枚举设备 | Enum device MV_CC_DEVICE_INFO_LIST stDeviceList; memset(&stDeviceList, 0, sizeof(MV_CC_DEVICE_INFO_LIST)); nRet = MV_CC_EnumDevices(MV_GIGE_DEVICE | MV_USB_DEVICE | MV_GENTL_CAMERALINK_DEVICE | MV_GENTL_CXP_DEVICE | MV_GENTL_XOF_DEVICE, &stDeviceList); if (MV_OK != nRet) { printf("Enum Devices fail! nRet [0x%x]\n", nRet); break; } if (stDeviceList.nDeviceNum > 0) { for (unsigned int i = 0; i < stDeviceList.nDeviceNum; i++) { printf("[device %d]:\n", i); MV_CC_DEVICE_INFO* pDeviceInfo = stDeviceList.pDeviceInfo[i]; if (NULL == pDeviceInfo) { break; } PrintDeviceInfo(pDeviceInfo); } } else { printf("Find No Devices!\n"); break; } printf("Please Input camera index(0-%d):", stDeviceList.nDeviceNum - 1); unsigned int nIndex = 0; scanf_s("%d", &nIndex); if (nIndex >= stDeviceList.nDeviceNum) { printf("Input error!\n"); break; } // ch:选择设备并创建句柄 | Select device and create handle nRet = MV_CC_CreateHandle(&handle, stDeviceList.pDeviceInfo[nIndex]); if (MV_OK != nRet) { printf("Create Handle fail! nRet [0x%x]\n", nRet); break; } // ch:打开设备 | Open device nRet = MV_CC_OpenDevice(handle); if (MV_OK != nRet) { printf("Open Device fail! nRet [0x%x]\n", nRet); break; } // ch:探测网络最佳包大小(只对GigE相机有效) | en:Detection network optimal package size(It only works for the GigE camera) if (stDeviceList.pDeviceInfo[nIndex]->nTLayerType == MV_GIGE_DEVICE) { int nPacketSize = MV_CC_GetOptimalPacketSize(handle); if (nPacketSize > 0) { nRet = MV_CC_SetIntValueEx(handle, "GevSCPSPacketSize", nPacketSize); if (nRet != MV_OK) { printf("Warning: Set Packet Size fail nRet [0x%x]!", nRet); } } else { printf("Warning: Get Packet Size fail nRet [0x%x]!", nPacketSize); } } // 关闭触发模式 MV_CC_SetEnumValue(handle, "TriggerMode", 0); // 获取图像参数 MVCC_INTVALUE stParam = { 0 }; MV_CC_GetIntValue(handle, "Width", &stParam); g_nWidth = stParam.nCurValue; MV_CC_GetIntValue(handle, "Height", &stParam); g_nHeight = stParam.nCurValue; MVCC_ENUMVALUE stEnumValue = { 0 }; MV_CC_GetEnumValue(handle, "PixelFormat", &stEnumValue); MVCC_FLOATVALUE stFloatValue = { 0 }; MV_CC_GetFloatValue(handle, "ResultingFrameRate", &stFloatValue); g_fFrameRate = (stFloatValue.fCurValue > 0.0f) ? stFloatValue.fCurValue : 30.0f; // 分配显示缓冲区(BGR24) int imageSize = g_nWidth * g_nHeight * 3; g_pDisplayBuffer = new unsigned char[imageSize]; memset(g_pDisplayBuffer, 0, imageSize); // 准备录像参数 MV_CC_RECORD_PARAM stRecordPar; memset(&stRecordPar, 0, sizeof(stRecordPar)); // 获取当前像素格式 MVCC_INTVALUE stPixFmt = { 0 }; int nRet = MV_CC_GetIntValue(handle, "PixelFormat", &stPixFmt); if (MV_OK != nRet) { printf("Failed to get PixelFormat, use default MV_Gvsp_PixelType_BayerRG8\n"); stRecordPar.enPixelType = PixelType_Gvsp_RGB8_Packed; } else { stRecordPar.enPixelType = static_cast<MvGvspPixelType>(stPixFmt.nCurValue); } // 设置其他参数 stRecordPar.nWidth = g_nWidth; stRecordPar.nHeight = g_nHeight; stRecordPar.fFrameRate = static_cast<float>(g_fFrameRate); stRecordPar.nBitRate = 5000; stRecordPar.enRecordFmtType = MV_FormatType_AVI; // 文件路径需长期有效 static char szFilePath[] = "./Recording.avi"; stRecordPar.strFilePath = szFilePath; nRet = MV_CC_StartRecord(handle, &stRecordPar); if (MV_OK != nRet) { printf("Start Record failed! nRet [0x%x]\n", nRet); break; } // 启动取流 nRet = MV_CC_StartGrabbing(handle); if (MV_OK != nRet) { printf("Start Grabbing fail! nRet [0x%x]\n", nRet); break; } // 创建显示窗口线程 HANDLE hDisplayThread = (HANDLE)_beginthreadex( NULL, 0, (_beginthreadex_proc_type)DisplayThread, // ⚠️ 显式强制类型转换 GetModuleHandle(NULL), 0, NULL ); if (!hDisplayThread) { printf("Failed to create display thread!\n"); break; } // 创建录像线程 g_hRecordThread = (HANDLE)_beginthreadex(NULL, 0, RecordThread, NULL, 0, NULL); if (!g_hRecordThread) { printf("Failed to create record thread!\n"); break; } printf("\n=== Live view started ===\n"); printf("Press 'R' in the window to start 10-second recording.\n"); printf("Press ESC or close window to exit.\n\n"); // 主取流循环 MV_FRAME_OUT stImageInfo = { 0 }; unsigned char* pRgbBuffer = new unsigned char[imageSize]; while (!g_bExit) { nRet = MV_CC_GetImageBuffer(handle, &stImageInfo, 1000); if (nRet == MV_OK) { MV_FRAME_OUT_INFO_EX* pInfo = &stImageInfo.stFrameInfo; const unsigned char* pData = stImageInfo.pBufAddr; MvGvspPixelType enDstPixelType = PixelType_Gvsp_Undefined; unsigned int nChannelNum = 0; char chFileName[MAX_PATH] = { 0 }; // ch:如果是彩色则转成RGB8 | en:if pixel type is color, convert it to RGB8 if (IsColor(stImageInfo.stFrameInfo.enPixelType)) { nChannelNum = 3; enDstPixelType = PixelType_Gvsp_RGB8_Packed; sprintf(chFileName, "AfterConvertRGB.raw"); } // ch:如果是黑白则转换成Mono8 | en:if pixel type is mono, convert it to mono8 else if (IsMono(stImageInfo.stFrameInfo.enPixelType)) { nChannelNum = 1; enDstPixelType = PixelType_Gvsp_Mono8; sprintf(chFileName, "AfterConvertMono8.raw"); } else { printf("Don't need to convert!\n"); } if (enDstPixelType != PixelType_Gvsp_Undefined) { // ch:设置插值算法为均衡 | en:set interpolation algorithm type, 0-Fast 1-Equilibrium 2-Optimal 3-Optimal plus nRet = MV_CC_SetBayerCvtQuality(handle, 1); if (MV_OK != nRet) { printf("set Bayer convert quality fail! nRet [0x%x]\n", nRet); break; } pConvertData = (unsigned char*)malloc(stImageInfo.stFrameInfo.nExtendWidth * stImageInfo.stFrameInfo.nExtendHeight * nChannelNum); if (NULL == pConvertData) { printf("malloc pConvertData fail!\n"); break; } nConvertDataSize = stImageInfo.stFrameInfo.nExtendWidth * stImageInfo.stFrameInfo.nExtendHeight * nChannelNum; // ch:像素格式转换 | en:Convert pixel format MV_CC_PIXEL_CONVERT_PARAM_EX stConvertParam = { 0 }; stConvertParam.nWidth = stImageInfo.stFrameInfo.nExtendWidth; //ch:图像宽 | en:image width stConvertParam.nHeight = stImageInfo.stFrameInfo.nExtendHeight; //ch:图像高 | en:image height stConvertParam.pSrcData = stImageInfo.pBufAddr; //ch:输入数据缓存 | en:input data buffer stConvertParam.nSrcDataLen = stImageInfo.stFrameInfo.nFrameLenEx; //ch:输入数据大小 | en:input data size stConvertParam.enSrcPixelType = stImageInfo.stFrameInfo.enPixelType; //ch:输入像素格式 | en:input pixel format stConvertParam.enDstPixelType = enDstPixelType; //ch:输出像素格式 | en:output pixel format stConvertParam.pDstBuffer = pConvertData; //ch:输出数据缓存 | en:output data buffer stConvertParam.nDstBufferSize = nConvertDataSize; //ch:输出缓存大小 | en:output buffer size nRet = MV_CC_ConvertPixelTypeEx(handle, &stConvertParam); if (MV_OK != nRet) { printf("Convert Pixel Type fail! nRet [0x%x]\n", nRet); break; } FILE* fp = NULL; errno_t err = fopen_s(&fp, chFileName, "wb"); if (0 != err || NULL == fp) { printf("Open file failed\n"); break; } fwrite(stConvertParam.pDstBuffer, 1, stConvertParam.nDstLen, fp); fclose(fp); printf("Convert pixeltype succeed\n"); } // 录像输入帧 if (g_bStartRecord && !g_bRecording) { MV_CC_INPUT_FRAME_INFO stInputInfo = { 0 }; stInputInfo.pData = (unsigned char*)pData; stInputInfo.nDataLen = pInfo->nFrameLenEx; // ⚠️ 旧版 SDK 不支持通过 stInputInfo 指定宽高/像素类型 // 必须确保开始录像前已调用 MV_CC_SetVideoParam() 设置参数! int nRet = MV_CC_InputOneFrame(handle, &stInputInfo); if (MV_OK != nRet) { printf("MV_CC_InputOneFrame failed: 0x%x\n", nRet); } else { InterlockedIncrement(&g_nRecordedFrames); } } MV_CC_FreeImageBuffer(handle, &stImageInfo); } else if (nRet != MV_E_TIMEOUT) { printf("Get image buffer failed! nRet [0x%x]\n", nRet); } Sleep(1); } // 清理流程 g_bStartRecord = false; if (g_hRecordThread) { WaitForSingleObject(g_hRecordThread, 3000); CloseHandle(g_hRecordThread); } MV_CC_StopGrabbing(handle); if (g_bRecording) { MV_CC_StopRecord(handle); } MV_CC_CloseDevice(handle); MV_CC_DestroyHandle(handle); handle = nullptr; g_handle = nullptr; delete[] g_pDisplayBuffer; delete[] pRgbBuffer; g_pDisplayBuffer = nullptr; } while (0); // ch:反初始化SDK | en:Finalize SDK MV_CC_Finalize(); printf("Press a key to exit.\n"); WaitForKeyPress(); return 0; } 代码解析
10-25
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值