摄像头原始数据读取——V4L2(mmap模式,V4L2_MEMORY_MMAP)
内存映射模式,是将设备在内核态申请的用于存储视频数据的物理内存映射到用户空间,使得用户应用程序可以直接访问和操作设备数据物理内存,避免了数据的拷贝。因此采集速度较快,一般用于连续视频数据的采集。
流程:
- 通过VIDIOC_REQBUFS ioctl请求内核分配视频数据缓冲区。
- 通过VIDIOC_QUERYBUF ioctl获取内核已经分配好的视频数据缓冲区信息。
- 使用mmap函数将获取到的缓冲区映射到用户空间。
- 通过VIDIOC_QBUF ioctl将已经申请好的缓冲区放入数据缓存队列。
- 启动视频流后,使用poll或select函数等待设备缓冲区数据就绪。
- 通过VIDIOC_DQBUF ioctl从队列中取出已填充的数据缓冲区并进行处理。
- 处理完数据后,再次通过VIDIOC_QBUF ioctl将缓冲区放入输入队列,循环使用。
v4l2mmapmode.hpp
#ifndef _V4L2MMAPMODE_H_
#define _V4L2MMAPMODE_H_
#include <iostream>
#include <list>
#include <vector>
#include <utility>
#include <functional>
#include <mutex>
#include <thread>
#include <linux/videodev2.h>
//图像帧数据结构体
struct VideoBufferStruct
{
std::string dev_name; //video设备名
unsigned int pixel_format; //当前使用的图像格式
int width; //图像宽度
int height; //图像高度
unsigned long timestamp; //图像从内核读上来后的时间戳
size_t data_len; //图像数据长度
void *data; //图像数据
};
class V4L2CaptureVideoData
{
public:
static bool m_keeprunning; //运行标志,用于Ctrl+C结束运行
public:
explicit V4L2CaptureVideoData();
~V4L2CaptureVideoData();
void RegisterVideoDataProcessCallback(std::function<void(const VideoBufferStruct&)> func);
bool OpenVideoDevice(std::string device_name);
bool closeVideoDevice();
bool QueryVideoDeviceCapability(const unsigned int capability);
void StartMmapData();
/* VIDIOC_QUERYCAP 获取设备支持的操作*/
bool GetVideoDeviceCapability(struct v4l2_capability &cap);
/* VIDIOC_G_PRIORITY 获取设备操作的优先级*/
bool GetVideoDevicePriority(unsigned int &priority);
/* VIDIOC_S_PRIORITY 设置设备操作的优先级*/
bool SetVideoDevicePriority(const unsigned int priority);
/* VIDIOC_LOG_STATUS 获取关于视频设备当前状态的日志信息*/
bool GetVideoDeviceLogStatus(void);
/* VIDIOC_ENUM_FMT 列举设备所支持的视频格式*/
bool EnumVideoDeviceFormat(std::list<struct v4l2_fmtdesc> &fmtdesc);
/* VIDIOC_G_FMT 获取设备当前使用的视频像素格式*/
bool GetVideoDeviceFormat(struct v4l2_format &fmt);
/* VIDIOC_S_FMT 设置设备当前使用的视频像素格式*/
bool SetVideoDeviceFormat(const struct v4l2_format &fmt);
/* VIDIOC_TRY_FMT 尝试设置视频像素格式、用于判断设备是否支持该视频像素格式*/
bool TrySetVideoDeviceFormat(const struct v4l2_format &fmt);
/* VIDIOC_ENUM_FRAMESIZES 枚举设备支持的视频采集分辨率*/
bool EnumVideoDeviceFrameSize(const unsigned int pixel_format, std::list<struct v4l2_frmsizeenum> &frmsize);
/* VIDIOC_ENUM_FRAMEINTERVALS 枚举设备支持的视频采集帧率fps*/
bool EnumVideoDeviceFrameIntervals(const unsigned int pixel_format, const unsigned int width, const unsigned int height, std::list<struct v4l2_frmivalenum> &frmivals);
/*VIDIOC_STREAMON 启动视频采集*/
bool StartVideoCapture(void);
/*VIDIOC_STREAMOFF 停止视频采集*/
bool StopVideoCapture(void);
/*VIDIOC_REQBUFS 申请驱动分配视频帧缓冲区*/
bool RequestVideoBuffer(const struct v4l2_requestbuffers &requestbuf);
/*VIDIOC_QUERYBUF 查询视频缓冲区信息 struct v4l2_buffer*/
bool GetVideoBuffer(const unsigned int memory_type, unsigned int index, v4l2_buffer &video_buffer);
/*VIDIOC_QBUF 将申请的缓冲帧放入队列*/
bool PushVideoBuffer(const struct v4l2_buffer &video_buffer);
/*VIDIOC_DQBUF 采集的缓冲帧出队列*/
bool PopVideoBuffer(const unsigned int memory_type, struct v4l2_buffer &video_buffer);
private:
unsigned long getEpochTimeShiftus();
private:
int m_video_fd;
std::function<void(const VideoBufferStruct&)> m_video_data_callback;
std::string m_video_device_name;
};
#endif // _V4L2MMAPMODE_H_
v4l2mmapmode.cpp
#include "v4l2mmapmode.hpp"
#include <iostream>
#include <thread>
#include <chrono>
#include <string>
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <assert.h>
#include <signal.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <linux/videodev2.h>
bool V4L2CaptureVideoData::m_keeprunning=true;
V4L2CaptureVideoData::V4L2CaptureVideoData()
{
m_video_fd=-1;
m_video_data_callback=nullptr;
m_video_device_name.clear();
}
V4L2CaptureVideoData::~V4L2CaptureVideoData()
{
if(m_video_fd!=-1)
{
closeVideoDevice();
}
}
void V4L2CaptureVideoData::RegisterVideoDataProcessCallback(std::function<void(const VideoBufferStruct&)> func)
{
m_video_data_callback=func;
}
bool V4L2CaptureVideoData::OpenVideoDevice(std::string device_name)
{
m_video_device_name=device_name;
std::string device_head="/dev/video";
if(m_video_device_name.compare(0, device_head.size(), device_head)!=0)
{
std::cerr<<m_video_device_name<<" is not camera device"<<std::endl;
return false;
}
m_video_fd = open(m_video_device_name.c_str(), O_RDWR /* required */ | O_NONBLOCK, 0);
if (-1 == m_video_fd)
{
std::cerr<<"cannot open video device:"<<"device name="<<m_video_device_name<<",errno="<<errno<<",strerror="<<strerror(errno)<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::closeVideoDevice()
{
if(m_video_fd<0)
{
return true;
}
if(close(m_video_fd)==-1)
{
std::cerr<<"close video device failed:"<<"errno="<<errno<<",strerror="<<strerror(errno)<<std::endl;
m_video_fd=-1;
return false;
}
m_video_fd=-1;
return true;
}
bool V4L2CaptureVideoData::QueryVideoDeviceCapability(const unsigned int capability)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
v4l2_capability video_cap;
if(GetVideoDeviceCapability(video_cap)==false)
{
return false;
}
if(!(video_cap.capabilities & capability))
{
std::cerr<<m_video_device_name<<" not support this capability="<<capability<<std::endl;
return false;
}
return true;
}
void V4L2CaptureVideoData::StartMmapData()
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return;
}
if(QueryVideoDeviceCapability(V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING)==false)
{
return;
}
struct v4l2_format video_format;
GetVideoDeviceFormat(video_format);
struct VideoBufferStruct data_buffer;
data_buffer.dev_name=m_video_device_name;
data_buffer.pixel_format=video_format.fmt.pix.pixelformat;
data_buffer.width=video_format.fmt.pix.width;
data_buffer.height=video_format.fmt.pix.height;
const unsigned int video_req_buffer_cnts=4;
struct v4l2_requestbuffers video_req_buffer;
memset(&video_req_buffer,'\0',sizeof(video_req_buffer));
video_req_buffer.count = video_req_buffer_cnts;
video_req_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
video_req_buffer.memory = V4L2_MEMORY_MMAP;
if(RequestVideoBuffer(video_req_buffer)==false)
{
return;
}
void *mmap_addrs[video_req_buffer_cnts];
unsigned int mmap_length[video_req_buffer_cnts];
for(int index=0;index<video_req_buffer_cnts;index++)
{
struct v4l2_buffer video_v4l2_buffer;
memset(&video_v4l2_buffer,'\0',sizeof(video_v4l2_buffer));
if(GetVideoBuffer(V4L2_MEMORY_MMAP,index,video_v4l2_buffer)==false)
{
return;
}
mmap_addrs[index]=mmap(NULL,video_v4l2_buffer.length,PROT_READ | PROT_WRITE, MAP_SHARED,m_video_fd,video_v4l2_buffer.m.offset);
if(mmap_addrs[index]==MAP_FAILED)
{
std::cerr<<"mmap failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return;
}
mmap_length[index]=video_v4l2_buffer.length;
}
for(int index=0;index<video_req_buffer_cnts;index++)
{
struct v4l2_buffer tmp_video_buffer;
tmp_video_buffer.index=index;
tmp_video_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tmp_video_buffer.memory = V4L2_MEMORY_MMAP;
if(PushVideoBuffer(tmp_video_buffer)==false)
{
return;
}
}
if(StartVideoCapture()==false)
{
return;
}
fd_set fds;
struct timeval tv;
int ret;
while(m_keeprunning==true)
{
FD_ZERO (&fds);
FD_SET (m_video_fd, &fds);
tv.tv_sec = 3;
tv.tv_usec = 0;
ret = select (m_video_fd + 1, &fds, NULL, NULL, &tv);
if (-1 == ret)
{
if (EINTR == errno)
{
continue;
}
else
{
std::cerr<<"select failed:"<<"errno="<<errno<<",strerror="<<strerror(errno)<<std::endl;
break;
}
}
else if (0 == ret)
{
std::cerr<<"select timeout no data available."<<std::endl;
break;
}
else
{
struct v4l2_buffer video_buffer_data;
if(PopVideoBuffer(V4L2_MEMORY_MMAP,video_buffer_data)==false)
{
break;
}
data_buffer.data=mmap_addrs[video_buffer_data.index];
data_buffer.data_len=video_buffer_data.bytesused;
data_buffer.timestamp=video_buffer_data.timestamp.tv_sec*1000000+video_buffer_data.timestamp.tv_usec+getEpochTimeShiftus();
m_video_data_callback(data_buffer);
if(PushVideoBuffer(video_buffer_data)==false)
{
return;
}
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
}
if(StopVideoCapture()==false)
{
return;
}
for(int index=0;index<video_req_buffer_cnts;index++)
{
if(ret==munmap(mmap_addrs[index], mmap_length[index]))
{
std::cerr<<"munmap failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
}
}
closeVideoDevice();
}
bool V4L2CaptureVideoData::GetVideoDeviceCapability(v4l2_capability &cap)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_QUERYCAP, &cap);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_QUERYCAP failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
std::cout<<"camera v4l2_capability {"<<std::endl
<<" driver = "<<cap.driver<<std::endl
<<" card = "<<cap.card<<std::endl
<<" bus_info = "<<cap.bus_info<<std::endl
<<" version = "<<cap.version<<std::endl
<<" capabilities = "<<cap.capabilities<<std::endl
<<" device_caps = "<<cap.device_caps<<std::endl
<<"}"<<std::endl;
return true;
}
bool V4L2CaptureVideoData::GetVideoDevicePriority(unsigned int &priority)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_G_PRIORITY, &priority);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_G_PRIORITY failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
std::cout<<"video device get priority = "<<priority<<std::endl;
return true;
}
bool V4L2CaptureVideoData::SetVideoDevicePriority(const unsigned int priority)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
unsigned int bk_priority=priority;
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_S_PRIORITY, &bk_priority);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_S_PRIORITY failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
if(bk_priority!=priority)
{
std::cerr<<"actual video device priority set "<<bk_priority<<" not input value "<<priority<<std::endl;
return false;
}
std::cout<<"video device set priority = "<<priority<<std::endl;
return true;
}
bool V4L2CaptureVideoData::GetVideoDeviceLogStatus()
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_LOG_STATUS, NULL);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_LOG_STATUS failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::EnumVideoDeviceFormat(std::list<struct v4l2_fmtdesc> &fmtdesc)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
fmtdesc.clear();
struct v4l2_fmtdesc tmp_fmtdesc;
tmp_fmtdesc.index = 0;
tmp_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret=0;
while(!((ret == -1) && (errno != EINTR) && (errno != EAGAIN)))
{
ret=ioctl(m_video_fd,VIDIOC_ENUM_FMT,&tmp_fmtdesc);
if(ret==0)
{
fmtdesc.push_back(tmp_fmtdesc);
std::cout<<"camera v4l2_fmtdesc {"<<std::endl
<<" index = "<<tmp_fmtdesc.index<<std::endl
<<" type = "<<tmp_fmtdesc.type<<std::endl
<<" flags = "<<tmp_fmtdesc.flags<<std::endl
<<" description = "<<tmp_fmtdesc.description<<std::endl
<<" pixelformat = "<<std::hex<<tmp_fmtdesc.pixelformat<<std::dec<<std::endl
<<"}"<<std::endl;
tmp_fmtdesc.index++;
}
}
if(fmtdesc.size()==0)
{
std::cerr<<"get video format cout is 0"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::GetVideoDeviceFormat(v4l2_format &fmt)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
fmt.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_G_FMT, &fmt);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_G_FMT failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
std::cout<<"camera v4l2_format {"<<std::endl
<<" type = "<<fmt.type<<std::endl
<<" v4l2_pix_format.width = "<<fmt.fmt.pix.width<<std::endl
<<" v4l2_pix_format.height = "<<fmt.fmt.pix.height<<std::endl
<<" v4l2_pix_format.pixelformat = "<<std::hex<<fmt.fmt.pix.pixelformat<<std::dec<<std::endl
<<" v4l2_pix_format.field = "<<fmt.fmt.pix.field<<std::endl
<<" v4l2_pix_format.bytesperline = "<<fmt.fmt.pix.bytesperline<<std::endl
<<" v4l2_pix_format.sizeimage = "<<fmt.fmt.pix.sizeimage<<std::endl
<<" v4l2_pix_format.colorspace = "<<fmt.fmt.pix.colorspace<<std::endl
<<" v4l2_pix_format.priv = "<<fmt.fmt.pix.priv<<std::endl
<<" v4l2_pix_format.flags = "<<fmt.fmt.pix.flags<<std::endl
<<"}"<<std::endl;
return true;
}
bool V4L2CaptureVideoData::SetVideoDeviceFormat(const v4l2_format &fmt)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
v4l2_format tmp_fmt;
memcpy(&tmp_fmt,&fmt,sizeof(tmp_fmt));
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_S_FMT, &tmp_fmt);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_S_FMT failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
if((tmp_fmt.fmt.pix.width==fmt.fmt.pix.width) &&
(tmp_fmt.fmt.pix.height==fmt.fmt.pix.height) &&
(tmp_fmt.fmt.pix.pixelformat==fmt.fmt.pix.pixelformat))
{
std::cout<<"camera set v4l2_format {"<<std::endl
<<" type = "<<fmt.type<<std::endl
<<" v4l2_pix_format.width = "<<fmt.fmt.pix.width<<std::endl
<<" v4l2_pix_format.height = "<<fmt.fmt.pix.height<<std::endl
<<" v4l2_pix_format.pixelformat = "<<std::hex<<fmt.fmt.pix.pixelformat<<std::dec<<std::endl
<<"}"<<std::endl;
return true;
}
else
{
std::cerr<<"camera actual set v4l2_format {"<<std::endl
<<" type = "<<tmp_fmt.type<<std::endl
<<" v4l2_pix_format.width = "<<tmp_fmt.fmt.pix.width<<std::endl
<<" v4l2_pix_format.height = "<<tmp_fmt.fmt.pix.height<<std::endl
<<" v4l2_pix_format.pixelformat = "<<std::hex<<tmp_fmt.fmt.pix.pixelformat<<std::dec<<std::endl
<<"}"<<std::endl;
return false;
}
}
bool V4L2CaptureVideoData::TrySetVideoDeviceFormat(const v4l2_format &fmt)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
std::cout<<"camera try set v4l2_format {"<<std::endl
<<" type = "<<fmt.type<<std::endl
<<" v4l2_pix_format.width = "<<fmt.fmt.pix.width<<std::endl
<<" v4l2_pix_format.height = "<<fmt.fmt.pix.height<<std::endl
<<" v4l2_pix_format.pixelformat = "<<std::hex<<fmt.fmt.pix.pixelformat<<std::dec<<std::endl
<<" v4l2_pix_format.field = "<<fmt.fmt.pix.field<<std::endl
<<"}"<<std::endl;
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_TRY_FMT, &fmt);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_TRY_FMT failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::EnumVideoDeviceFrameSize(const unsigned int pixel_format,std::list<struct v4l2_frmsizeenum> &frmsize)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
frmsize.clear();
struct v4l2_frmsizeenum tmp_frmsize;
tmp_frmsize.index=0;
tmp_frmsize.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tmp_frmsize.pixel_format = pixel_format;
int ret=0;
while(!((ret == -1) && (errno != EINTR) && (errno != EAGAIN)))
{
ret=ioctl(m_video_fd,VIDIOC_ENUM_FRAMESIZES,&tmp_frmsize);
if(ret==0)
{
frmsize.push_back(tmp_frmsize);
std::cout<<"camera v4l2_frmsizeenum {"<<std::endl
<<" index = "<<tmp_frmsize.index<<std::endl
<<" type = "<<tmp_frmsize.type<<std::endl
<<" pixel_format = "<<std::hex<<tmp_frmsize.pixel_format<<std::dec<<std::endl
<<" v4l2_frmsize_discrete.width = "<<tmp_frmsize.discrete.width<<std::endl
<<" v4l2_frmsize_discrete.height = "<<tmp_frmsize.discrete.height<<std::endl
<<"}"<<std::endl;
tmp_frmsize.index++;
}
}
if(frmsize.size()==0)
{
std::cerr<<"get video frame size cout is 0"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::EnumVideoDeviceFrameIntervals(const unsigned int pixel_format,const unsigned int width,const unsigned int height,std::list<struct v4l2_frmivalenum> &frmivals)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
frmivals.clear();
struct v4l2_frmivalenum tmp_frmival;
tmp_frmival.index = 0;
tmp_frmival.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tmp_frmival.pixel_format = pixel_format;
tmp_frmival.width = width;
tmp_frmival.height = height;
int ret=0;
while(!((ret == -1) && (errno != EINTR) && (errno != EAGAIN)))
{
ret=ioctl(m_video_fd, VIDIOC_ENUM_FRAMEINTERVALS, &tmp_frmival);
if(ret==0)
{
frmivals.push_back(tmp_frmival);
std::cout<<"Frame interval<"<<tmp_frmival.discrete.denominator / tmp_frmival.discrete.numerator<<"fps>"<<std::endl;
std::cout<<"camera v4l2_frmivalenum {"<<std::endl
<<" index = "<<tmp_frmival.index<<std::endl
<<" type = "<<tmp_frmival.type<<std::endl
<<" pixel_format = "<<std::hex<<tmp_frmival.pixel_format<<std::dec<<std::endl
<<" width = "<<tmp_frmival.width<<std::endl
<<" height = "<<tmp_frmival.height<<std::endl
<<" v4l2_fract.numerator = "<<tmp_frmival.discrete.numerator<<std::endl
<<" v4l2_fract.denominator = "<<tmp_frmival.discrete.denominator<<std::endl
<<" numerator/denominator = "<<tmp_frmival.discrete.denominator / tmp_frmival.discrete.numerator<<" fps"<<std::endl
<<"}"<<std::endl;
tmp_frmival.index++;
}
}
if(frmivals.size()==0)
{
std::cerr<<"get video frame intervals cout is 0"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::StartVideoCapture()
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_STREAMON, &type);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_STREAMON failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::StopVideoCapture()
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_STREAMOFF, &type);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_STREAMOFF failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::RequestVideoBuffer(const v4l2_requestbuffers &requestbuf)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
if(ioctl(m_video_fd, VIDIOC_REQBUFS, &requestbuf)==-1)
{
std::cerr<<"ioctl VIDIOC_REQBUFS failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::GetVideoBuffer(const unsigned int memory_type,unsigned int index,struct v4l2_buffer &video_buffer)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
video_buffer.index = index;
video_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
video_buffer.memory = memory_type;
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_QUERYBUF, &video_buffer);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_QUERYBUF failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
std::cout<<"camera v4l2_buffer {"<<std::endl
<<" index = "<<video_buffer.index<<std::endl
<<" type = "<<video_buffer.type<<std::endl
<<" flags = "<<video_buffer.flags<<std::endl
<<" memory = "<<video_buffer.memory<<std::endl
<<" length = "<<video_buffer.length<<std::endl
<<" offset = "<<std::hex<<video_buffer.m.offset<<std::dec<<std::endl
<<" fd = "<<std::hex<<video_buffer.m.fd<<std::dec<<std::endl
<<" userptr = "<<std::hex<<video_buffer.m.userptr<<std::dec<<std::endl
<<"}"<<std::endl;
return true;
}
bool V4L2CaptureVideoData::PushVideoBuffer(const v4l2_buffer &video_buffer)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_QBUF, &video_buffer);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_QBUF failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
return true;
}
bool V4L2CaptureVideoData::PopVideoBuffer(const unsigned int memory_type,struct v4l2_buffer &video_buffer)
{
if(m_video_fd==-1)
{
std::cerr<<"not open video device,must open first"<<std::endl;
return false;
}
video_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
video_buffer.memory = memory_type;
int ret=0;
do
{
ret=ioctl(m_video_fd, VIDIOC_DQBUF, &video_buffer);
} while (ret == -1 && ((errno == EINTR) || (errno == EAGAIN)));
if(ret!=0)
{
std::cerr<<"ioctl VIDIOC_DQBUF failed:("<<"errno="<<errno<<",strerror="<<strerror(errno)<<")"<<std::endl;
return false;
}
return true;
}
unsigned long V4L2CaptureVideoData::getEpochTimeShiftus()
{
struct timeval epochtime;
struct timespec vsTime;
gettimeofday(&epochtime, NULL);
clock_gettime(CLOCK_MONOTONIC, &vsTime);
unsigned long uptime_us = vsTime.tv_sec*1000000+(long)round(vsTime.tv_nsec/ 1000.0);
unsigned long epoch_us = epochtime.tv_sec*1000000+epochtime.tv_usec;
return epoch_us - uptime_us;
}
测试代码test.cpp
#include "v4l2mmapmode.hpp"
#include <iostream>
#include <string>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <fcntl.h>
#include <unistd.h>
#include <signal.h>
//信号绑定,绑定Ctrl+C
static void sig_handler(int sig)
{
if (sig == SIGINT)
{
V4L2CaptureVideoData::m_keeprunning = false;
}
}
// save picture to file
static int id_index = 0;
//视频帧数据处理函数
void VideoDataFunc(const VideoBufferStruct &video_data)
{
std::cout << "device name:" << video_data.dev_name << ",timestamp:" << video_data.timestamp << ",width:" << video_data.width << ",height:" << video_data.height << ",data length:" << video_data.data_len << std::endl;
std::string filename = "pictures_yuv/test" + std::to_string(id_index) + ".yuv";
int file_fd = open(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0777);
write(file_fd, video_data.data, video_data.data_len);
close(file_fd);
id_index++;
}
int main(int argc, char *argv[])
{
signal(SIGINT, sig_handler);
V4L2CaptureVideoData camera_read;
//注册回调函数
camera_read.RegisterVideoDataProcessCallback(VideoDataFunc);
//打开视频设备
if(camera_read.OpenVideoDevice("/dev/video0")==false)
{
return -1;
}
struct v4l2_format video_fmt;
memset(&video_fmt, '\0', sizeof(video_fmt));
//获取设备当前使用的视频格式
if(camera_read.GetVideoDeviceFormat(video_fmt)==false)
{
return -1;
}
//设置设备的视频格式
video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
video_fmt.fmt.pix.width = 1280;
video_fmt.fmt.pix.height = 720;
video_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
video_fmt.fmt.pix.field = V4L2_FIELD_ANY;
if(camera_read.SetVideoDeviceFormat(video_fmt)==false)
{
return -1;
}
//开始采集数据
camera_read.StartMmapData();
return 0;
}
测试结果