YUV转为IplImage格式(I420和YV12)

本文详细介绍了YUV格式的基本概念,包括YUV420与YV12的区别,并提供了从YUV420和YV12格式转换到RGB/BGR格式的OpenCV及数学方法实现。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

一、YUV简介
    一般来说,直接采集到的视频数据是RGB24的格式,RGB24一帧的大小size=width×heigth×3 Byte,RGB32的size=width×heigth×4 Byte,如果是I420(即YUV标准格式4:2:0)的数据量是 size=width×heigth×1.5 Byte。 在采集到RGB24数据后,需要对这个格式的数据进行第一次压缩。即将图像的颜色空间由RGB24转化为IYUV。因为,X264在进行编码的时候需要标准的YUV(4:2:0)。但是这里需要注意的是,虽然YV12也是(4:2:0),但是YV12和I420的却是不同的,在存储空间上面有些区别。如下:
    YV12 : 亮度(行×列) + V(行×列/4) + U(行×列/4)
    I420 : 亮度(行×列) + U(行×列/4) + V(行×列/4)

可以看出,YV12和I420基本上是一样的,就是UV的顺序不同。
    YUV420p 和 YUV420的区别在于存储格式上有区别:
    YUV420p:yyyyyyyy uuuu vvvvv
    YUV420: yuv yuv yuv

     关于YUV 更详细资料可参考:http://zh.wikipedia.org/wiki/YUV。
    另外,需要注意的是海康设备回调数据类型为YV12格式;而大华设备回调数据类型为YUV420格式。

二、YUV420转IplImage

采用OpenCV转换的方式,代码如下:
IplImage* YUV420_To_IplImage_Opencv(unsigned char* pYUV420, int width, int height)
{
    if (!pYUV420)
    {
        return NULL;
    }

    IplImage *yuvimage,*rgbimg,*yimg,*uimg,*vimg,*uuimg,*vvimg;

    int nWidth = width;
    int nHeight = height;
    rgbimg = cvCreateImage(cvSize(nWidth, nHeight),IPL_DEPTH_8U,3);
    yuvimage = cvCreateImage(cvSize(nWidth, nHeight),IPL_DEPTH_8U,3);

    yimg = cvCreateImageHeader(cvSize(nWidth, nHeight),IPL_DEPTH_8U,1);
    uimg = cvCreateImageHeader(cvSize(nWidth/2, nHeight/2),IPL_DEPTH_8U,1);
    vimg = cvCreateImageHeader(cvSize(nWidth/2, nHeight/2),IPL_DEPTH_8U,1);

    uuimg = cvCreateImage(cvSize(nWidth, nHeight),IPL_DEPTH_8U,1);
    vvimg = cvCreateImage(cvSize(nWidth, nHeight),IPL_DEPTH_8U,1);

    cvSetData(yimg,pYUV420, nWidth);
    cvSetData(uimg,pYUV420+nWidth*nHeight, nWidth/2);
    cvSetData(vimg,pYUV420+long(nWidth*nHeight*1.25), nWidth/2);
    cvResize(uimg,uuimg,CV_INTER_LINEAR);
    cvResize(vimg,vvimg,CV_INTER_LINEAR);

    cvMerge(yimg,uuimg,vvimg,NULL,yuvimage);
    cvCvtColor(yuvimage,rgbimg,CV_YCrCb2RGB);

    cvReleaseImage(&uuimg);
    cvReleaseImage(&vvimg);
    cvReleaseImageHeader(&yimg);
    cvReleaseImageHeader(&uimg);
    cvReleaseImageHeader(&vimg);

    cvReleaseImage(&yuvimage);

    if (!rgbimg)
    {
        return NULL;
    }

    return rgbimg;
}

采用数学转换的方式,代码如下:
bool YUV420_To_BGR24(unsigned char *puc_y, unsigned char *puc_u, unsigned char *puc_v, unsigned char *puc_rgb, int width_y, int height_y)
{
    if (!puc_y || !puc_u || !puc_v || !puc_rgb)
    {
        return false;
    }
    
    //初始化变量
    int baseSize = width_y * height_y;
    int rgbSize = baseSize * 3;

    BYTE* rgbData  = new BYTE[rgbSize];
    memset(rgbData, 0, rgbSize);

    /* 变量声明 */
    int temp = 0;

    BYTE* rData = rgbData;                  //r分量地址
    BYTE* gData = rgbData + baseSize;       //g分量地址
    BYTE* bData = gData   + baseSize;       //b分量地址

    int uvIndex =0, yIndex =0;

    //YUV->RGB 的转换矩阵
    //double  Yuv2Rgb[3][3] = {1, 0, 1.4022,
    //    1, -0.3456, -0.7145,
    //    1, 1.771,   0};

    for(int y=0; y < height_y; y++)
    {
        for(int x=0; x < width_y; x++)
        {
            uvIndex        = (y>>1) * (width_y>>1) + (x>>1);
            yIndex         = y * width_y + x;

            /* r分量 */
            temp          = (int)(puc_y[yIndex] + (puc_v[uvIndex] - 128) * 1.4022);
            rData[yIndex] = temp<0 ? 0 : (temp > 255 ? 255 : temp);

            /* g分量 */
            temp          = (int)(puc_y[yIndex] + (puc_u[uvIndex] - 128) * (-0.3456) +
                (puc_v[uvIndex] - 128) * (-0.7145));
            gData[yIndex] = temp < 0 ? 0 : (temp > 255 ? 255 : temp);

            /* b分量 */
            temp          = (int)(puc_y[yIndex] + (puc_u[uvIndex] - 128) * 1.771);
            bData[yIndex] = temp < 0 ? 0 : (temp > 255 ? 255 : temp);
        }
    }

    //将R,G,B三个分量赋给img_data
    int widthStep = width_y*3;
    for (int y = 0; y < height_y; y++)
    {
        for (int x = 0; x < width_y; x++)
        {
            puc_rgb[y * widthStep + x * 3 + 2] = rData[y * width_y + x];   //R
            puc_rgb[y * widthStep + x * 3 + 1] = gData[y * width_y + x];   //G
            puc_rgb[y * widthStep + x * 3 + 0] = bData[y * width_y + x];   //B
        }
    }

    if (!puc_rgb)
    {
        return false;
    }

    delete [] rgbData;
    return true;
}

IplImage* YUV420_To_IplImage(unsigned char* pYUV420, int width, int height)
{
    if (!pYUV420)
    {
        return NULL;
    }

    //初始化变量
    int baseSize = width*height;
    int imgSize = baseSize*3;

BYTE* pRGB24  = new BYTE[imgSize];
memset(pRGB24,  0, imgSize);

    /* 变量声明 */
    int temp = 0;

    BYTE* yData = pYUV420;                  //y分量地址
    BYTE* uData = pYUV420 + baseSize;       //u分量地址
    BYTE* vData = uData  + (baseSize>>2);   //v分量地址

    if(YUV420_To_BGR24(yData, uData, vData, pRGB24, width, height) == false || !pRGB24)
    {
        return NULL;
    }

    IplImage *image = cvCreateImage(cvSize(width, height), 8,3);
    memcpy(image->imageData, pRGB24, imgSize);

    if (!image)
    {
        return NULL;
    }

    delete [] pRGB24;
    return image;
}

三、YV12转IplImage

//YV12转为BGR24数据
bool YV12_To_BGR24(unsigned char* pYV12, unsigned char* pRGB24,int width, int height)
{
    if(!pYV12 || !pRGB24)
    {
        return false;
    }

    const long nYLen = long(height * width);
    const int halfWidth = (width>>1);

    if(nYLen<1 || halfWidth<1)
    {
        return false;
    }

    // yv12's data structure
    // |WIDTH |
    // y......y--------
    // y......y   HEIGHT
    // y......y
    // y......y--------
    // v..v
    // v..v
    // u..u
    // u..u
    unsigned char* yData = pYV12;
    unsigned char* vData = &yData[nYLen];
    unsigned char* uData = &vData[nYLen>>2];

    if(!uData || !vData)
    {
        return false;
    }

    // Convert YV12 to RGB24
    int rgb[3];
    int i, j, m, n, x, y;
    m = -width;
    n = -halfWidth;
    for(y=0; y<height;y++)
    {
        m += width;
            if(!(y % 2))
                n += halfWidth;
        for(x=0; x<width;x++)   
        {
            i = m + x;
                j = n + (x>>1);
            rgb[2] = int(yData[i] + 1.370705 * (vData[j] - 128)); // r
            rgb[1] = int(yData[i] - 0.698001 * (uData[j] - 128)  - 0.703125 * (vData[j] - 128));   // g
            rgb[0] = int(yData[i] + 1.732446 * (uData[j] - 128)); // b

            //j = nYLen - iWidth - m + x;
            //i = (j<<1) + j;    //图像是上下颠倒的

            j = m + x;
            i = (j<<1) + j;

            for(j=0; j<3; j++)
            {
                if(rgb[j]>=0 && rgb[j]<=255)
                    pRGB24[i + j] = rgb[j];
                else
                    pRGB24[i + j] = (rgb[j] < 0)? 0 : 255;
            }
        }
    }

    if (pRGB24 == NULL)
    {
        return false;
    }

    return true;
}


IplImage* YV12_To_IplImage(unsigned char* pYV12, int width, int height)
{
    if (!pYV12)
    {
        return NULL;
    }

    int sizeRGB = width* height *3;
    unsigned char* pRGB24 = new unsigned char[sizeRGB];
    memset(pRGB24, 0, sizeRGB);

    if(YV12_To_BGR24(pYV12, pRGB24 ,width, height) == false || (!pRGB24))
    {
        return NULL;
    }

    IplImage* pImage = cvCreateImage(cvSize(width, height), 8, 3);
    if(!pImage)
    {
        return NULL;
    }

    memcpy(pImage->imageData, pRGB24, sizeRGB);
    if (!(pImage->imageData))
    {
        return NULL;
    }

    delete [] pRGB24;
    return pImage;
}





#include <cstdio> #include <cstring> #include <iostream> #include "Windows.h" #include "HCNetSDK.h" #include "PlayM4.h" #include <opencv2\opencv.hpp> #include "cv.h" #include "highgui.h" #include <time.h> #define USECOLOR 1 using namespace std; using namespace std; //-------------------------------------------- int iPicNum=0;//Set channel NO. LONG nPort=-1; HWND hWnd=NULL; void yv12toYUV(char *outYuv, char *inYv12, int width, int height,int widthStep) { int col,row; unsigned int Y,U,V; int tmp; int idx; //printf("widthStep=%d.\n",widthStep); for (row=0; row<height; row++) { idx=row * widthStep; int rowptr=row*width; for (col=0; col<width; col++) { //int colhalf=col>>1; tmp = (row/2)*(width/2)+(col/2); // if((row==1)&&( col>=1400 &&col<=1600)) // { // printf("col=%d,row=%d,width=%d,tmp=%d.\n",col,row,width,tmp); // printf("row*width+col=%d,width*height+width*height/4+tmp=%d,width*height+tmp=%d.\n",row*width+col,width*height+width*height/4+tmp,width*height+tmp); // } Y=(unsigned int) inYv12[row*width+col]; U=(unsigned int) inYv12[width*height+width*height/4+tmp]; V=(unsigned int) inYv12[width*height+tmp]; // if ((col==200)) // { // printf("col=%d,row=%d,width=%d,tmp=%d.\n",col,row,width,tmp); // printf("width*height+width*height/4+tmp=%d.\n",width*height+width*height/4+tmp); // return ; // } if((idx+col*3+2)> (1200 * widthStep)) { //printf("row * widthStep=%d,idx+col*3+2=%d.\n",1200 * widthStep,idx+col*3+2); } outYuv[idx+col*3] = Y; outYuv[idx+col*3+1] = U; outYuv[idx+col*3+2] = V; } } //printf("col=%d,row=%d.\n",col,row); } //解码回调 视频为YUV数据(YV12),音频为PCM数据 void CALLBACK DecCBFun(long nPort,char * pBuf,long nSize,FRAME_INFO * pFrameInfo, long nReserved1,long nReserved2) { long lFrameType = pFrameInfo->nType; if(lFrameType ==T_YV12) { #if USECOLOR //int start = clock(); IplImage* pImgYCrCb = cvCreateImage(cvSize(pFrameInfo->nWidth,pFrameInfo->nHeight), 8, 3);//得到图像的Y分量 yv12toYUV(pImgYCrCb->imageData, pBuf, pFrameInfo->nWidth,pFrameInfo->nHeight,pImgYCrCb->widthStep);//得到全部RGB图像 IplImage* pImg = cvCreateImage(cvSize(pFrameInfo->nWidth,pFrameInfo->nHeight), 8, 3); cvCvtColor(pImgYCrCb,pImg,CV_YCrCb2RGB); //int end = clock(); #else IplImage* pImg = cvCreateImage(cvSize(pFrameInfo->nWidth,pFrameInfo->nHeight), 8, 1); memcpy(pImg->imageData,pBuf,pFrameInfo->nWidth*pFrameInfo->nHeight); #endif //printf("%d\n",end-start); cvShowImage("IPCamera",pImg); cvWaitKey(1); #if USECOLOR cvReleaseImage(&pImgYCrCb); cvReleaseImage(&pImg); #else cvReleaseImage(&pImg); #endif //此时是YV12格式的视频数据,保存在pBuf中,可以fwrite(pBuf,nSize,1,Videofile); //fwrite(pBuf,nSize,1,fp); } /*************** else if (lFrameType ==T_AUDIO16) { //此时是音频数据,数据保存在pBuf中,可以fwrite(pBuf,nSize,1,Audiofile); } else { } *******************/ } ///实时流回调 void CALLBACK fRealDataCallBack(LONG lRealHandle,DWORD dwDataType,BYTE *pBuffer,DWORD dwBufSize,void *pUser) { DWORD dRet; switch (dwDataType) { case NET_DVR_SYSHEAD: //系统头 if (!PlayM4_GetPort(&nPort)) //获取播放库未使用的通道号 { break; } if(dwBufSize > 0) { if (!PlayM4_OpenStream(nPort,pBuffer,dwBufSize,1024*1024)) { dRet=PlayM4_GetLastError(nPort); break; } //设置解码回调函数 只解码不显示 if (!PlayM4_SetDecCallBack(nPort,DecCBFun)) { dRet=PlayM4_GetLastError(nPort); break; } //设置解码回调函数 解码且显示 //if (!PlayM4_SetDecCallBackEx(nPort,DecCBFun,NULL,NULL)) //{ // dRet=PlayM4_GetLastError(nPort); // break; //} //打开视频解码 if (!PlayM4_Play(nPort,hWnd)) { dRet=PlayM4_GetLastError(nPort); break; } //打开音频解码, 需要码流是复合流 if (!PlayM4_PlaySound(nPort)) { dRet=PlayM4_GetLastError(nPort); break; } } break; case NET_DVR_STREAMDATA: //码流数据 if (dwBufSize > 0 && nPort != -1) { BOOL inData=PlayM4_InputData(nPort,pBuffer,dwBufSize); while (!inData) { Sleep(10); inData=PlayM4_InputData(nPort,pBuffer,dwBufSize); OutputDebugString(L"PlayM4_InputData failed \n"); } } break; } } void CALLBACK g_ExceptionCallBack(DWORD dwType, LONG lUserID, LONG lHandle, void *pUser) { char tempbuf[256] = {0}; switch(dwType) { case EXCEPTION_RECONNECT: //预览时重连 printf("----------reconnect--------%d\n", time(NULL)); break; default: break; } } void main() { //--------------------------------------- // 初始化 NET_DVR_Init(); //设置连接时间与重连时间 NET_DVR_SetConnectTime(2000, 1); NET_DVR_SetReconnect(10000, true); //--------------------------------------- // 获取控制台窗口句柄 //HMODULE hKernel32 = GetModuleHandle((LPCWSTR)"kernel32"); //GetConsoleWindow = (PROCGETCONSOLEWINDOW)GetProcAddress(hKernel32,"GetConsoleWindow"); //--------------------------------------- // 注册设备 LONG lUserID; NET_DVR_DEVICEINFO_V30 struDeviceInfo; lUserID = NET_DVR_Login_V30("10.102.7.88", 8000, "admin", "12345", &struDeviceInfo); if (lUserID < 0) { printf("Login error, %d\n", NET_DVR_GetLastError()); NET_DVR_Cleanup(); return; } //--------------------------------------- //设置异常消息回调函数 NET_DVR_SetExceptionCallBack_V30(0, NULL,g_ExceptionCallBack, NULL); //cvNamedWindow("IPCamera"); //--------------------------------------- //启动预览并设置回调数据流 NET_DVR_CLIENTINFO ClientInfo; ClientInfo.lChannel = 1; //Channel number 设备通道号 ClientInfo.hPlayWnd = NULL; //窗口为空,设备SDK不解码只取流 ClientInfo.lLinkMode = 0; //Main Stream ClientInfo.sMultiCastIP = NULL; LONG lRealPlayHandle; lRealPlayHandle = NET_DVR_RealPlay_V30(lUserID,&ClientInfo,fRealDataCallBack,NULL,TRUE); if (lRealPlayHandle<0) { printf("NET_DVR_RealPlay_V30 failed! Error number: %d\n",NET_DVR_GetLastError()); return; } //cvWaitKey(0); Sleep(-1); //fclose(fp); //--------------------------------------- //关闭预览 if(!NET_DVR_StopRealPlay(lRealPlayHandle)) { printf("NET_DVR_StopRealPlay error! Error number: %d\n",NET_DVR_GetLastError()); return; } //注销用户 NET_DVR_Logout(lUserID); NET_DVR_Cleanup(); return; } 转为python语言
06-13
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值