ov2640驱动
驱动编译
作者使用的linux内核版本是4.1.15+,内含ov2640的驱动,只需要在NXP出厂配置外开启对应的选项。
在<linux内核源码路径>/arch/arm/configs/**defconfig下添加
DTS修改
删除这一行,避免产生多余的video设备
对照原有的ov5640修改为ov2640
参考Linux ov2640移植 可以得到从设备地址及多余video设备删除相关内容
应用开发
整体结构上开发板作为服务端,使用监听端口。PC上的QT端作为客户端,主动发送消息,获取数据。
注意:
- 帧过大会导致单个数据包无法发送,需要分包,即添加帧头等信息
- 对于占用多个字节的数据需要使用htonl等接口,完成大小端顺序的转化,否则反序列化可能出问题
- 可变参数的宏展开和参数列表展开
开发板应用
#include <arpa/inet.h>
#include <cstdarg>
#include <ctime>
#include <errno.h>
#include <fcntl.h>
#include <ifaddrs.h>
#include <linux/v4l2-controls.h>
#include <linux/videodev2.h>
#include <netinet/in.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <string>
#include <sys/epoll.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/socket.h>
#include <unistd.h>
#define FRAMEBUFFER_COUNT 4
#define UDP_PORT 12345 // 替换为实际的端口号
#define MAX_EVENTS 1
#define MAX_PAYLOAD_SIZE 1500 // 选择一个合适的大小,确保不会超过MTU
#define HEAD_SIZE 5
struct cam_buf_info
{
void* start;
size_t length;
};
// 宏实现打印添加时间戳
#if 0
#define LOG_PRINT(format, ...) \
do \
{ \
std::time_t raw_time; \
std::time(&raw_time); \
struct tm* time_info = localtime(&raw_time); \
char buffer[80]; \
std::strftime(buffer, sizeof(buffer), "%Y-%m-%d %H:%M:%S", time_info); \
printf("[%s] " format "\n", buffer, ##__VA_ARGS__); \
} while(0);
#else
// 可变参数列表实现打印添加时间戳
void LOG_PRINT(const char* format, ...)
{
std::time_t raw_time;
std::time(&raw_time);
struct tm* time_info = localtime(&raw_time);
char buffer[80];
std::strftime(buffer, sizeof(buffer), "%Y-%m-%d %H:%M:%S", time_info);
printf("[%s] ", buffer);
va_list args;
va_start(args, format);
vprintf(format, args);
va_end(args);
printf("\n");
fflush(stdout);
}
#endif
class udpSocket
{
public:
udpSocket(int port)
: m_port(port){};
~udpSocket();
int init();
int getFd() { return m_udp_socket; }
private:
int m_udp_socket;
int m_port;
};
class Video
{
public:
Video(std::string dev_name)
: m_dev_name(dev_name){};
~Video();
int init();
void print_supported_formats();
int capture_and_send(udpSocket& udp_socket);
private:
void send_real(void* data, int32_t length, int udp_socket, sockaddr_in* client_addr);
void save_yuv_image(const char* filename, void* data, size_t length);
private:
int m_v4l2_fd; // 摄像头设备文件描述符
std::string m_dev_name;
cam_buf_info m_buf_infos[FRAMEBUFFER_COUNT];
};
Video::~Video()
{
if(m_v4l2_fd >= 0)
{
close(m_v4l2_fd);
}
}
int Video::init()
{
/* 打开摄像头 */
m_v4l2_fd = open(m_dev_name.c_str(), O_RDWR);
if(m_v4l2_fd < 0)
{
perror("open error");
return -1;
}
/* 设置摄像头格式为YUYV */
v4l2_format fmt = {0};
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 640; // 替换为实际的宽度
fmt.fmt.pix.height = 480; // 替换为实际的高度
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
if(ioctl(m_v4l2_fd, VIDIOC_S_FMT, &fmt) < 0)
{
perror("ioctl error: VIDIOC_S_FMT");
return -1;
}
v4l2_streamparm streamparm;
memset(&streamparm, 0, sizeof(streamparm));
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
/* 申请帧缓冲 */
v4l2_requestbuffers reqbuf = {0};
reqbuf.count = FRAMEBUFFER_COUNT; // 帧缓冲的数量
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.memory = V4L2_MEMORY_MMAP;
if(ioctl(m_v4l2_fd, VIDIOC_REQBUFS, &reqbuf) < 0)
{
perror("ioctl error: VIDIOC_REQBUFS");
return -1;
}
/* 建立内存映射 */
v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
for(buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++)
{
if(ioctl(m_v4l2_fd, VIDIOC_QUERYBUF, &buf) < 0)
{
perror("ioctl error: VIDIOC_QUERYBUF");
return -1;
}
m_buf_infos[buf.index].length = buf.length;
m_buf_infos[buf.index].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, m_v4l2_fd, buf.m.offset);
if(m_buf_infos[buf.index].start == MAP_FAILED)
{
perror("mmap error");
return -1;
}
}
/* 入队 */
for(buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++)
{
if(ioctl(m_v4l2_fd, VIDIOC_QBUF, &buf) < 0)
{
perror("ioctl error: VIDIOC_QBUF");
return -1;
}
}
/* 启动视频采集 */
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(m_v4l2_fd, VIDIOC_STREAMON, &type) < 0)
{
perror("ioctl error: VIDIOC_STREAMON");
return -1;
}
return 0;
}
int Video::capture_and_send(udpSocket& socketObj)
{
int epoll_fd = epoll_create1(0);
if(epoll_fd == -1)
{
perror("epoll_create1 error");
return -1;
}
int udp_socket = socketObj.getFd();
epoll_event ev, events[MAX_EVENTS];
ev.events = EPOLLIN;
ev.data.fd = udp_socket;
if(epoll_ctl(epoll_fd, EPOLL_CTL_ADD, udp_socket, &ev) == -1)
{
perror("epoll_ctl error");
return -1;
}
v4l2_buffer frame_buf = {0};
frame_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
frame_buf.memory = V4L2_MEMORY_MMAP;
uint32_t frame_number = 0;
char buffer[100];
sockaddr_in client_addr;
socklen_t addr_len = sizeof(client_addr);
while(1)
{
int nfds = epoll_wait(epoll_fd, events, MAX_EVENTS, -1);
if(nfds == -1)
{
if(errno == EINTR) continue;
perror("epoll_wait error");
return -1;
}
for(int n = 0; n < nfds; ++n)
{
if(events[n].data.fd == udp_socket)
{
int recv_len = recvfrom(udp_socket, buffer, 99, 0, (sockaddr*)&client_addr, &addr_len);
if(recv_len < 0)
{
perror("recvfrom fail");
continue;
}
/* 从摄像头读取数据 */
if(ioctl(m_v4l2_fd, VIDIOC_DQBUF, &frame_buf) < 0)
{
perror("ioctl error: VIDIOC_DQBUF");
return -1;
}
/* 每隔10帧保存一张raw图片 */
if(frame_number % 10 == 0)
{
char filename[256];
snprintf(filename, sizeof(filename), "frame_%u.raw", frame_number);
save_yuv_image(filename, m_buf_infos[frame_buf.index].start, m_buf_infos[frame_buf.index].length);
}
LOG_PRINT("%s", "send once");
/* 发送数据 */
send_real(m_buf_infos[frame_buf.index].start, m_buf_infos[frame_buf.index].length, udp_socket, &client_addr);
frame_number++;
if(ioctl(m_v4l2_fd, VIDIOC_QBUF, &frame_buf) < 0)
{
perror("ioctl error: VIDIOC_QBUF");
return -1;
}
}
}
}
return 0;
}
void Video::print_supported_formats()
{
v4l2_fmtdesc fmtdesc;
memset(&fmtdesc, 0, sizeof(fmtdesc));
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Supported formats:\n");
while(ioctl(m_v4l2_fd, VIDIOC_ENUM_FMT, &fmtdesc) == 0)
{
printf(" %d: %s\n", fmtdesc.index + 1, fmtdesc.description);
fmtdesc.index++;
}
}
void Video::save_yuv_image(const char* filename, void* data, size_t length)
{
#if SAVE_PICTURE
int fd = open(filename, O_CREAT | O_RDWR, 0777);
if(fd < 0)
{
perror("fopen error");
return;
}
write(fd, data, length);
close(fd);
#endif
}
void Video::send_real(void* data, int32_t length, int udp_socket, sockaddr_in* client_addr)
{
char frame_head[HEAD_SIZE + 1] = {"Video"};
int32_t frame_size = htonl(length);
int32_t total_size = length + HEAD_SIZE + sizeof(frame_size);
char* buf = (char*)malloc(total_size);
if(buf == NULL)
{
perror("malloc error");
return;
}
/* 添加帧头信息 */
memcpy(buf, frame_head, HEAD_SIZE);
memcpy(buf + HEAD_SIZE, &frame_size, sizeof(frame_size));
memcpy(buf + sizeof(frame_size) + HEAD_SIZE, data, length);
/* 发送数据 */
uint32_t offset = 0;
while(offset < total_size)
{
int chunk_size = (total_size - offset > MAX_PAYLOAD_SIZE) ? MAX_PAYLOAD_SIZE : total_size - offset;
int ret = sendto(udp_socket, buf + offset, chunk_size, 0, (sockaddr*)client_addr, sizeof(*client_addr));
if(ret < 0)
{
perror("sendto error");
break;
}
offset += chunk_size;
}
free(buf);
}
udpSocket::~udpSocket()
{
if(m_udp_socket >= 0)
{
close(m_udp_socket);
}
}
int udpSocket::init()
{
m_udp_socket = socket(AF_INET, SOCK_DGRAM, 0);
if(m_udp_socket < 0)
{
perror("socket error");
return -1;
}
sockaddr_in server_addr;
memset(&server_addr, 0, sizeof(server_addr));
server_addr.sin_family = AF_INET;
server_addr.sin_port = htons(m_port);
server_addr.sin_addr.s_addr = INADDR_ANY;
if(bind(m_udp_socket, (sockaddr*)&server_addr, sizeof(server_addr)) < 0)
{
perror("bind error");
close(m_udp_socket);
return -1;
}
return m_udp_socket;
}
int main(int argc, char* argv[])
{
Video myVideo("/dev/video0");
if(myVideo.init() < 0)
{
return EXIT_FAILURE;
}
myVideo.print_supported_formats(); // 添加这一行以打印支持的格式
udpSocket myUdpSocket(UDP_PORT);
if(myUdpSocket.init() < 0)
{
return EXIT_FAILURE;
}
myVideo.capture_and_send(myUdpSocket);
return EXIT_SUCCESS;
}
PC端QT应用
// videoplayer.cpp,即一般工程中默认的mainwindow.cpp
#include "videoplayer.h"
#include <QBuffer>
#include <QPainter>
#include <QtEndian>
#include <WinSock2.h>
const int VideoPlayer::head_size = 5;
static char Head_value[]={"Video"};
VideoWidget::VideoWidget(QWidget *parent)
: QWidget(parent)
{}
void VideoWidget::setFrame(const QImage &frame)
{
currentFrame = frame;
update();
}
void VideoWidget::paintEvent(QPaintEvent *)
{
QPainter painter(this);
if (!currentFrame.isNull()) {
painter.drawImage(0, 0, currentFrame.scaled(size(), Qt::KeepAspectRatio));
}
}
VideoPlayer::VideoPlayer(QWidget *parent)
: QMainWindow(parent)
, udpSocket(new QUdpSocket(this))
, videoWidget(new VideoWidget(this))
, layout(new QVBoxLayout)
, centralWidget(new QWidget(this))
, frameSize(0)
{
// 应该发送请求数据的命令并在服务段做判断,这里简化,发送空信息
udpSocket->writeDatagram("", QHostAddress("10.168.1.123"), 12345);
connect(udpSocket, &QUdpSocket::readyRead, this, &VideoPlayer::processPendingDatagrams);
layout->addWidget(videoWidget);
centralWidget->setLayout(layout);
setCentralWidget(centralWidget);
resize(800, 600);
}
VideoPlayer::~VideoPlayer()
{
buffer.resize(1600);
}
void VideoPlayer::processPendingDatagrams()
{
while (udpSocket->hasPendingDatagrams()) {
udpSocket->writeDatagram("", QHostAddress("10.168.1.123"), 12345);
std::vector<char> datagram(udpSocket->pendingDatagramSize());
udpSocket->readDatagram(datagram.data(), datagram.size());
buffer.insert(buffer.end(),
std::make_move_iterator(datagram.begin()),
std::make_move_iterator(datagram.end()));
if (buffer.size() < head_size + 4) {
continue;
}
std::vector<char> head{buffer.begin(), buffer.begin() + head_size};
if (!std::equal(head.begin(), head.begin() + head_size, Head_value)) {
buffer.clear();
frameSize = 0;
qDebug() << "error head" << head << "!!!";
continue;
}
if(frameSize == 0)
{
memcpy(&frameSize, &buffer[head_size], 4);
frameSize = qFromBigEndian(frameSize);
qDebug() << "frameSize:" << frameSize;
// frameSize = 614400;
}
if (buffer.size() < frameSize + head_size + 4) {
continue;
}
std::vector<char>::iterator frameDataBegin = buffer.begin() + head_size + 4;
std::vector<char> frameData(frameDataBegin, frameDataBegin + frameSize);
// buffer.erase(buffer.begin(), frameDataBegin + frameSize);
buffer.clear();
frameSize = 0;
// 假设接收到的数据是YUYV格式,需要转换为QImage
int width = 640; // 替换为实际的宽度
int height = 480; // 替换为实际的高度
QImage frame(width, height, QImage::Format_RGB888);
// 将YUYV数据转换为RGB数据
for (int y = 0; y < height; ++y) {
for (int x = 0; x < width; x += 2) {
int index = (y * width + x) * 2;
if (index + 3 >= frameData.size()) {
qWarning("Index out of range");
continue;
}
int Y1 = static_cast<unsigned char>(frameData[index]);
int U = static_cast<unsigned char>(frameData[index + 1]);
int Y2 = static_cast<unsigned char>(frameData[index + 2]);
int V = static_cast<unsigned char>(frameData[index + 3]);
int C1 = Y1 - 16;
int C2 = Y2 - 16;
int D = U - 128;
int E = V - 128;
int R1 = qBound(0, (298 * C1 + 409 * E + 128) >> 8, 255);
int G1 = qBound(0, (298 * C1 - 100 * D - 208 * E + 128) >> 8, 255);
int B1 = qBound(0, (298 * C1 + 516 * D + 128) >> 8, 255);
int R2 = qBound(0, (298 * C2 + 409 * E + 128) >> 8, 255);
int G2 = qBound(0, (298 * C2 - 100 * D - 208 * E + 128) >> 8, 255);
int B2 = qBound(0, (298 * C2 + 516 * D + 128) >> 8, 255);
frame.setPixel(x, y, qRgb(R1, G1, B1));
frame.setPixel(x + 1, y, qRgb(R2, G2, B2));
}
}
// 创建 QTransform 对象并设置旋转角度
QTransform transform;
transform.rotate(180); // 旋转90度
// 使用 QTransform 旋转图像
frame = frame.transformed(transform);
videoWidget->setFrame(frame);
}
}
效果
但是有时候会出现这种异常帧,暂时还不知道怎么回事