sfsdf

本文介绍了一个基于Qt的视频采集程序实现,包括视频分辨率设置、帧数据读取及YUV转RGB等关键功能。

#include <QtGui>
#include "driver.h"

DialogImpl::DialogImpl( QWidget * parent, Qt::WFlags f) : QDialog(parent, f)
{   
    setupUi(this);
    meg=QPixmap();
    m_pFramTimer=new QTimer(this);
    m_pFramTimer->setInterval(50);
    connect(exit, SIGNAL(clicked()), this, SLOT(close()));
    connect(start, SIGNAL(clicked()), this,SLOT(print()));
    connect(setting, SIGNAL(clicked()), this,SLOT(video_set()));
    connect(m_pFramTimer,SIGNAL(timeout()),this,SLOT(paintEven()));
    m_pFramTimer->start(50);
}

void DialogImpl::print()
{   

}

/*设置按钮执行的函数*/
void DialogImpl::video_set()
{
    int combobox_index;

    combobox_index=resolution->currentIndex();
    /*得到输入的分辨率*/
    switch(combobox_index)
    {
        case 0:
            video.SCREEN_WIDTH=640;
            video.SCREEN_HEIGHT=480;
            break;
        case 1:
            video.SCREEN_WIDTH=320;
            video.SCREEN_HEIGHT=240;
            break;
        case 2:
            video.SCREEN_WIDTH=160;
            video.SCREEN_HEIGHT=120;
            break;
    };
   
    /*更改当前得到的图片的总大小*/
    video.picture_length=video.SCREEN_WIDTH*video.SCREEN_HEIGHT*COLOR_BYTES;
    /*停止计时器*/
    m_pFramTimer->stop();   
    /*重新设置相应的摄像头参数*/   
    video.restart();
    /*启动计时器*/
    m_pFramTimer->start();
}

/*Video构造函数*/
Video::Video()
{
    SCREEN_WIDTH=640;
    SCREEN_HEIGHT=480;
    picture_length=SCREEN_WIDTH*SCREEN_HEIGHT*COLOR_BYTES;
    frame_index=0;
    /*默认打开video1*/
    init_video("/dev/Logitech/video1");
}

/*重新设置相应的摄像头参数*/   
void Video::restart()
{
    int i;
    frame_index=0;
    /*取消以前的映射*/
    for(i=0;i<req.count;i++)
    {
        munmap(buffers[i].start,buffers[i].length);
    }
    /*关闭前文件*/
    close_file(&fd);
    /*重新打开设备文件*/
    init_video("/dev/Logitech/video1");
}

/*初始化设备*/
int Video::init_video(char *device_name)
{
    unsigned int numBufs;
    int nbuffer;

    /*打开设备文件*/
    if((fd=open_file(device_name,&fd))==-1)
       {
           return -1;
    }       
       
    memset (&fmt, 0, sizeof(fmt) );
      
    //设置当前驱动的视频捕获格式
    //set the current actuation video frequency capture form
    fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width       = SCREEN_WIDTH;
    fmt.fmt.pix.height      = SCREEN_HEIGHT;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED;
    if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1)
    {
        perror("VIDIOC_S_FMT");
          return -1;
    }

    //为视频捕获分配内存
    //For video frequency capture assignment memory
    req.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
    req.count  = 5;
    if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1)
    {
        perror("VIDIOC_REQBUFS");
          return -1;
    }
   
    //总的缓冲区块个数
    //Total cushion sub-area integer
    nbuffer=req.count;

    //获取并记录缓存的物理空间
    //Gains and records the buffer the physical space
    buffers =(VideoBuffer *) calloc(nbuffer, sizeof(*buffers));
    if(buffers==NULL)
    {
        printf("Failed to calloc!/n");
        return -1;
    }
    for (numBufs = 0; numBufs < req.count; numBufs++)
    {
        memset( &buf, 0, sizeof(buf) );
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = numBufs;
        // 获取VIDIOC_REQBUFS中分配的数据缓存地址
        // Gains data buffer address which in VIDIOC_REQBUFS assigns
        if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1)
        {
            perror("VIDIOC_QUERYBUF");
            return -1;
        }
        // 将得到数据缓存地址转化成应用程序中可用的绝对地址
        /* Will obtain the data buffer address to transform in the application
           procedure the available specific address */
        buffers[numBufs].length = buf.length;   
        buffers[numBufs].start = mmap(NULL, //指向欲映射的内存起始地址,设为 NULL,代表让系统自动选定地址,映射成功后返回该地址
                                      buf.length,
                                      PROT_READ ,
                                      MAP_SHARED,
                                      fd,  //将设备文件映射到内存中
                                      buf.m.offset);
        if (buffers[numBufs].start == MAP_FAILED)
        {
            printf("start == MAP_FAILED/n");
            return -1;
        }
   
        // 把这段缓存放入缓存队列
        // Puts in the buffer formation this section of buffers
        if (ioctl(fd, VIDIOC_QBUF, &buf) == -1)
        {
            perror("VIDIOC_QBUF");
            return -1;
        }
    }
   
    // 打开视频捕获
    // Opens the video frequency capture
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (-1==ioctl(fd,VIDIOC_STREAMON,&type))
    {
         perror ("While opening stream");
         return -1;
    }
    return 0;
}


/*读取一侦数据*/
int Video::read_frame()
{
    // 与内核交换缓冲区
    // With essence exchange buffer
    memset (&buf,0,sizeof(buf));
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.index = frame_index;
 
     //从缓冲队列中读取数据
     //Reads the data from the cushion formation
    if (-1==ioctl(fd,VIDIOC_DQBUF,&buf))
    {
         perror ("While getting buffer's data");
         return -1;
    }
 
    // 图片数据处理
    process_image ((unsigned char *)buffers[buf.index].start);
 
    // 把这段缓存放入缓存队列
    // Puts in the buffer formation this section of buffers 
    if (-1==ioctl(fd,VIDIOC_QBUF,&buf))
    {
         perror ("While returning buffer's data");
            return -1;
    }
       
    frame_index = (frame_index+1) % req.count;
                  
    return 0;
}

/*显示视频函数*/
void DialogImpl::paintEven()
{          
    video.read_frame();
    show_picture();
}

/*显示视频的单桢*/
void DialogImpl::show_picture()
{
    //显示单祯图片
     (&meg)->loadFromData(picture_data,video.picture_length+sizeof(sg_BHeader));        
    QPixmap & pmap=meg;           
    showwin->setPixmap(pmap);
}

/*打开文件*/
int Video::open_file(char *name,int *file)
{   
   /*每次打开失败,就等待1秒,共3次*/
   if ((*file = open(name, O_RDWR))< 0)
   {
        perror("Open drivers");
        return -1;
   }   
    return *file;
}

/*关闭文件*/
void Video:: close_file(int *file)
{
    close(*file);
}

/*得到BMP图片的数据流*/
void Video::process_image (unsigned char * data)
{
    typedef unsigned int UINT;
    typedef unsigned char UCHAR;
    unsigned char rgb[MAXLEN];
    UINT m_Width = SCREEN_WIDTH, m_Height = SCREEN_HEIGHT;
 
    /*设置BMP图片的头格式数据*/

    //位图大小,  0x36表示头标识大小
    sg_BHeader[0x02] = (UCHAR)(m_Width * m_Height * COLOR_BYTES + 0x36) & 0xff; 
    sg_BHeader[0x03] = (UCHAR)((m_Width * m_Height * COLOR_BYTES + 0x36) >> 8) & 0xff;
    sg_BHeader[0x04] = (UCHAR)((m_Width * m_Height * COLOR_BYTES + 0x36) >> 16) & 0xff;
    sg_BHeader[0x05] = (UCHAR)((m_Width * m_Height * COLOR_BYTES + 0x36) >> 24) & 0xff;
    //图像宽度
    sg_BHeader[0x12] = (UCHAR)m_Width & 0xff;
    sg_BHeader[0x13] = (UCHAR)(m_Width >> 8) & 0xff;
    sg_BHeader[0x14] = (UCHAR)(m_Width >> 16) & 0xff;
    sg_BHeader[0x15] = (UCHAR)(m_Width >> 24) & 0xff;
    //图像高度
    sg_BHeader[0x16] = (UCHAR)m_Height & 0xff;
    sg_BHeader[0x17] = (UCHAR)(m_Height >> 8) & 0xff;
    sg_BHeader[0x18] = (UCHAR)(m_Height >> 16) & 0xff;
    sg_BHeader[0x19] = (UCHAR)(m_Height >> 24) & 0xff;   
    //图像的plan总数
    sg_BHeader[0x1A] = 0x01;
    sg_BHeader[0x1B] = 0x00;
    //记录颜色的位数
    sg_BHeader[0x1C] = COLOR_BYTES*8;
    sg_BHeader[0x1D] = 0x00;
    //数据压缩方式
    sg_BHeader[0x1E] = 0x00;
    sg_BHeader[0x1F] = 0x00;
    sg_BHeader[0x20] = 0x00;
    sg_BHeader[0x21] = 0x00;
    //图像区数据大小
    sg_BHeader[0x22] = 0x00;
    sg_BHeader[0x23] = 0x00;
    sg_BHeader[0x24] = 0x10;
    sg_BHeader[0x25] = 0x00;
   
    /*将YUV数据流改成RGB数据流*/
    convert_yuv_to_rgb_buffer(data,rgb,m_Width,m_Height);
    /*组合BMP图像的头数据和内容数据*/
    memcpy(picture_data,sg_BHeader,sizeof(sg_BHeader));
    memcpy(picture_data+sizeof(sg_BHeader),rgb,picture_length);
}



/*将一个像素点的Y,U,V值转化成R,G,B*/
int Video:: convert_yuv_to_rgb_pixel(int y, int u, int v)
{
    unsigned int pixel32 = 0;
     unsigned char *pixel = (unsigned char *)&pixel32;
     int r, g, b;
    
     r = y + (1.4075 * (v-128));
     g = y - (0.7169 * (v-128)) - (0.3455 * (u-128));
     b = y + (1.779 * (u-128));
    
     if(r > 255)
         r = 255;
     if(g > 255)
         g = 255;
     if(b > 255)
         b = 255;
     if(r < 0)
         r = 0;
     if(g < 0)
         g = 0;
     if(b < 0)
         b = 0;
       
     pixel[0] = r * 220 / 256;
    pixel[1] = g * 220 / 256;
     pixel[2] = b * 220 / 256;

     return pixel32;
}

/*将YUV数据序列转化成RGB24数据序列*/
int Video:: convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
    unsigned int in, out = 0;
    unsigned int real_index=0;
     unsigned int pixel_16;
     unsigned char pixel_24[3];
     unsigned int pixel32;
     int y0, u, y1, v;
    
     for(in = 0; in < width * height * 2; in += 4)
     {
        pixel_16 =  (unsigned int)yuv[in + 3] << 24 |
                       (unsigned int)yuv[in + 2] << 16 |
                       (unsigned int)yuv[in + 1] <<  8 |
                       (unsigned int)yuv[in + 0];
                      
         y0 = (pixel_16 & 0x000000ff);
          u  = (pixel_16 & 0x0000ff00) >>  8;
        y1 = (pixel_16 & 0x00ff0000) >> 16;
          v  = (pixel_16 & 0xff000000) >> 24;
        
          pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
          pixel_24[2] = (pixel32 & 0x000000ff);
          pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
          pixel_24[0] = (pixel32 & 0x00ff0000) >> 16;
         
          real_index=get_real_index(out++,width,height);
          rgb[real_index] = pixel_24[0];
          real_index=get_real_index(out++,width,height);
          rgb[real_index] = pixel_24[1];
          real_index=get_real_index(out++,width,height);
          rgb[real_index] = pixel_24[2];
         
          pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
          pixel_24[2] = (pixel32 & 0x000000ff);
          pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
          pixel_24[0] = (pixel32 & 0x00ff0000) >> 16;
         
          real_index=get_real_index(out++,width,height);
          rgb[real_index] = pixel_24[0];
          real_index=get_real_index(out++,width,height);
          rgb[real_index] = pixel_24[1];
          real_index=get_real_index(out++,width,height);
          rgb[real_index] = pixel_24[2];
         
     }
    
     return 0;
}


/*计算图片上像素点的上下对称点的下标*/
unsigned int Video:: get_real_index(unsigned int index,unsigned int width, unsigned int height)
{
    unsigned int x,y;
    unsigned int x1,y1;
    unsigned int real_index;
   
    x=index/(width*3);
    y=index%(width*3);
   
    x1=height-x;
    y1=y;
   
    real_index=x1*(width*3)+y1;
   
    return real_index;
}

 

 

 

 

 

 

 

#ifndef DIALOGIMPL_H
#define DIALOGIMPL_H

#include <QDialog>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <stdlib.h>
#include <linux/types.h>
#include <linux/videodev2.h>
#include <setjmp.h>
#include <string.h>
#include <signal.h>
#include <errno.h>
#include <assert.h>
#include "ui_driver.h"

/*缓存图片数据流的空间最大值*/
#define MAXLEN 953600
/*表示每个像素RGB颜色的字节数*/
#define COLOR_BYTES 3


//static int SCREEN_WIDTH=640;
//static int SCREEN_HEIGHT=480;


/*一张BMP图片数据*/
//static int picture_length=SCREEN_WIDTH*SCREEN_HEIGHT*COLOR_BYTES;
static unsigned char picture_data[MAXLEN];

typedef struct VideoBuffer
{
    void   *start;
    size_t  length;
} VideoBuffer;

/*BMP图像头*/
static unsigned char sg_BHeader[] = {
 0x42, 0x4D, 0x36, 0x58, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
 0x36, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x40, 0x01,
 0x00, 0x00, 0xF0, 0x00, 0x00, 0x00, 0x01, 0x00, 0x10, 0x00,
 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 
 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 0x00, 0x00, 0x00, 0x00
};

/*视频类*/
class Video
{
public:
    Video();
    struct v4l2_format fmt;
    struct v4l2_requestbuffers req;
    VideoBuffer * buffers;
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;
    int frame_index;
    int fd;
    int SCREEN_WIDTH;
    int SCREEN_HEIGHT;
    int picture_length;
    int open_file(char *name,int *file);
    void close_file(int *file);
    int init_video(char *device_name);
    int read_frame();
    void restart();   
    void process_image (unsigned char* buffers);
    int convert_yuv_to_rgb_pixel(int y, int u, int v);
    int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);
    unsigned int get_real_index(unsigned int index,unsigned int width, unsigned int height);
};

/*窗口类*/
class DialogImpl : public QDialog, public Ui::Dialog
{
Q_OBJECT
public:
   DialogImpl( QWidget * parent = 0, Qt::WFlags f = 0 );
private slots:
   void print();
   void paintEven();
   void video_set();
private:
   QTimer* m_pFramTimer;
   QPixmap meg;
   Video video;
   void show_picture();
};





#endif

需求响应动态冰蓄冷系统与需求响应策略的优化研究(Matlab代码实现)内容概要:本文围绕需求响应动态冰蓄冷系统及其优化策略展开研究,结合Matlab代码实现,探讨了在电力需求侧管理背景下,冰蓄冷系统如何通过优化运行策略参与需求响应,以实现削峰填谷、降低用电成本和提升能源利用效率的目标。研究内容包括系统建模、负荷预测、优化算法设计(如智能优化算法)以及多场景仿真验证,重点分析不同需求响应机制下系统的经济性和运行特性,并通过Matlab编程实现模型求解与结果可视化,为实际工程应用提供理论支持和技术路径。; 适合人群:具备一定电力系统、能源工程或自动化背景的研究生、科研人员及从事综合能源系统优化工作的工程师;熟悉Matlab编程且对需求响应、储能优化等领域感兴趣的技术人员。; 使用场景及目标:①用于高校科研中关于冰蓄冷系统与需求响应协同优化的课题研究;②支撑企业开展楼宇能源管理系统、智慧园区调度平台的设计与仿真;③为政策制定者评估需求响应措施的有效性提供量化分析工具。; 阅读建议:建议读者结合文中Matlab代码逐段理解模型构建与算法实现过程,重点关注目标函数设定、约束条件处理及优化结果分析部分,同时可拓展应用其他智能算法进行对比实验,加深对系统优化机制的理解。
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值