您的位置:首页 > 运维架构 > Linux

Linux下实现视频读取

2014-08-26 17:15 267 查看
  V4L(video4linux是一些视频系统,视频软件、音频软件的基础,经常时候在需要采集图像的场合,如视频监控,webcam,可视电话,经常
使用在embedded linux中是linux嵌入式开发中经常使用的系统接口。它是linux内核提供给用户空间的编程接口,各种的视频和音频设备开
发相应的驱动程序后,就可以通过v4l提供的系统API来控制视频和音频设备,也就是说v4l分为两层,底层为音视频设备在内核中的驱动,上
层为系统提供的API,而对于我们来说需要的就是使用这些系统API。
V4L2是V4L的升级版本,为linux下视频设备程序提供了一套接口规范。包括一套数据结构和底层V4L2驱动接口。V4L2采用流水线的方式,
操作更简单直观,基本遵循打开视频设备、设置格式、处理数据、关闭设备,更多的具体操作通过ioctl函数来实现。

1、打开设备
  int open(const char *device_name, int flags);
  int fd = open("/dev/video0", O_RDONLY); //O_NONBLOCK --非阻塞(不推荐使用)
2、关闭设备 int close(int fd)
  int ret = close(fd);
3、v4l2_capability 查看属性
  int ioctl(int fd, int request, struct v4l2_capability *argp);
  struct v4l2_capability
  {
    u8 driver[16]; // 驱动名字
    u8 card[32]; // 设备名字
    u8 bus_info[32]; // 设备在系统中的位置
    u32 version; // 驱动版本号
    u32 capabilities; // 设备支持的操作
    u32 reserved[4]; // 保留字段
  };

4、设置视频格式与制式
相关函数:
int ioctl(int fd, int request, struct v4l2_fmtdesc *argp);
int ioctl(int fd, int request, struct v4l2_format *argp);

相关结构体:
v4l2_cropcap 结构体用来设置摄像头的捕捉能力,在捕捉上视频时应先先设置
v4l2_cropcap 的 type 域,再通过 VIDIO_CROPCAP 操作命令获取设备捕捉能力的参数,保存于 v4l2_cropcap 结构体中,包括 bounds
(最大捕捉方框的左上角坐标和宽高),defrect(默认捕捉方框的左上角坐标和宽高)等。
v4l2_format 结构体用来设置摄像头的视频制式、帧格式等,在设置这个参数时应先填 好 v4l2_format 的各个域,如 type(传输流类型),
fmt.pix.width(宽),fmt.pix.heigth(高),fmt.pix.field(采样区域,如隔行采样),fmt.pix.pixelformat(采样类型,如 YUV4:2:2),
然后通过 VIDIO_S_FMT 操作命令设置视频捕捉格式。
struct v4l2_fmtdesc
{
u32 index; // 要查询的格式序号,应用程序设置
enum v4l2_buf_type type; // 帧类型,应用程序设置
u32 flags; // 是否为压缩格式
u8 description[32]; // 格式名称
u32 pixelformat; // 格式
u32 reserved[4]; // 保留
};
所有的视频格式可以能下面的方法查看
#define v4l2_fourcc(a, b, c, d) ((a) | ((b) << 8) | ((c) << 16) | ((d) << 24))
All format : VIDIOC_ENUM_FMT, v4l2_fmtdesc

struct v4l2_format
{
enum v4l2_buf_type type; // 帧类型,应用程序设置
union fmt
{
struct v4l2_pix_format pix; // 视频设备使用
struct v4l2_window win;
struct v4l2_vbi_format vbi;
struct v4l2_sliced_vbi_format sliced;
u8 raw_data[200];
};
};

struct v4l2_pix_format
{
u32 width; // 帧宽,单位像素
u32 height; // 帧高,单位像素
u32 pixelformat; // 帧格式
enum v4l2_field field;
u32 bytesperline;
u32 sizeimage;
enum v4l2_colorspace colorspace;
u32 priv;
};

5、查看视频的帧率
int ioctl(int fd, int request, struct v4l2_streamparm parm* argp);
struct v4l2_streamparm parm;
parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
其中VIDIOC_G_PARM是用取帧率的,VIDIOC_S_PARM是用来设定帧率

6、定制ioctl函数
在内核目录下找到kernel/include/linux/videodev2.h头文件,你可查看所有的io控制的命令
/*
* Experimental, third param 0--video, 1--tracking
*/
#define VIDIOC_POCCESS_NOTIFY _IOW('V', 99, int) //add by Henry.Wen 20131126

实例:

#ifndef CAMERA_V4L2CAMERA_H
#define CAMERA_V4L2CAMERA_H

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <getopt.h>             /* getopt_long() */
#include <fcntl.h>              /* low-level i/o */
#include <unistd.h>
#include <errno.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h>          /* for videodev2.h */
#include <linux/videodev2.h>

#define CONFIG_CAMERA_UVC_INVAL_FRAMECNT    5

namespace v4l2
{
/**
* return error code
*/
enum _RET_ERROR_CODE
{
RET_ERROR_FAIL = -1,
RET_ERROR_OK = 0,
RET_ERROR_CAPTURE_NULL = -9999,
RET_ERROR_CAPTURE_NAME,
RET_ERROR_CAPTURE_CAPABILITY,
RET_ERROR_CAPTURE_FORMAT,
RET_ERROR_CAPTURE_BUFFER,
RET_ERROR_CAPTURE_OUTMEMORY,
RET_ERROR_CAPTURE_MMAP,
RET_ERROR_CAPTURE_FORM,
RET_ERROR_CAPTURE_MUMAP,
RET_ERROR_CAPTURE_VIDIOC,
};

/**
*  Name: video_format enum
*  Function: Describe formats V4L2 will support
*/
typedef enum _pFormat
{
UNKNOWN,
YUYV,
MJPEG,
YV12,
YU12,
NV12,
NV21,
H264,
}pFormat;

/**
* frame width and height infomation
*/
typedef struct _V4l2Info
{
unsigned int width;
unsigned int height;
unsigned int stepWidth;
unsigned int length;
void*        buffer;
}V4l2Info;

/**
* caputre properties
*/
typedef struct _V4l2Capture
{
pFormat      format;
char         name[31];//dev_name
int          fd;
unsigned int rate;
unsigned int quality;
unsigned int brightness;
V4l2Info     v4l2Info;
}V4l2Capture;

/**
*
*/
class V4l2Camera
{
public:
V4l2Camera();
virtual ~V4l2Camera();

public:
/**
* get the number of cameras
*
* @return the number of camera
*/
static int getNumberOfCameras();

/**
* initialize v4l2 device
* @param capture v4l2 capture handl
* @param width frame width
* @param height fram height
*
* @return 0/other successful or failure
*/
int InitDevice(V4l2Capture *capture, pFormat format, const char* name, unsigned int rate, unsigned int width, unsigned int height);

/**
* initialize v4l2 device
* @param capture v4l2 capture handl
* @param width frame width
* @param height fram height
*
* @return 0/other successful or failure
*/
int UninitDevice(V4l2Capture *capture);

/**
* Set v4l2 device brightness
* @param capture v4l2 capture handl
* @param value brightness value
*
* @return 0/other successful or failure
*/
int SetBrightness(V4l2Capture *capture, unsigned int value);

/**
* start v4l2 device
* @param fd v4l2 capture handl
*
* @return 0/other successful or failure
*/
int StartDevice(int fd);

/**
* stop v4l2 device
* @fd capture v4l2 capture handl
*
* @return 0/other successful or failure
*/
int StopDevice(int fd);

/**
* Get frame data
* @param capture v4l2 capture handl
*
* @return 0/other successful or failure
*/
int GetFrame(V4l2Capture *capture);

private:
int InitMmap(int fd);

int xioctl(int fd, int request, void *arg);

unsigned int GetCameraFormat(pFormat format);

int AdjustV4l2Info(unsigned int& width, unsigned int& height);

int MSleep(int fd, unsigned int msec);

int MatchCameraAuto(int cameraId);

private:
typedef struct _Buffers
{
void *start;
size_t length;
}V4l2Buffers;

int                    m_stime;
V4l2Buffers*           m_buffers;
unsigned int           m_nBuffers;
V4l2Capture*           m_capture;

static int mNumberOfCameras;
static int mCameraIndex[10];
int mUsbCameraIvalidFrameCnt;
bool m_InitDevice;
};
}//end namespace
#endif //CAMERA_V4L2CAMERA_H


  V4l2Camera.cpp

#include "V4l2Camera.h"

namespace v4l2
{
#define BUFFERS_COUNT  4
#define ARRAY_LEN(a) (sizeof(a) / sizeof(a[0]))
#define MEMST_VALUE(x) memset(&(x), 0, sizeof (x))

const V4l2Info g_v4l2Info[] = {{160, 120}, {320, 240}, {640, 480}, {1024, 768}, {1200, 900}, {1440, 1080}, {1600, 900}, {1600, 1200}};

V4l2Camera::V4l2Camera()
{
// TODO Auto-generated constructor stub
m_capture = NULL;
mUsbCameraIvalidFrameCnt = 0;
m_InitDevice = false;
}

V4l2Camera::~V4l2Camera()
{
// TODO Auto-generated destructor stub
if(m_capture)
UninitDevice(m_capture);
}

int V4l2Camera::xioctl(int fd, int request, void *arg)
{
int r;
int nCount= 0;

do
{
r = ioctl (fd, request, arg);
}while (RET_ERROR_FAIL == r && (EINTR == errno) && (++nCount < 100));

return r;
}

int V4l2Camera::mNumberOfCameras = 0;
int V4l2Camera::mCameraIndex[] = {0};

unsigned int V4l2Camera::GetCameraFormat(pFormat format)
{
unsigned int ret = UNKNOWN;
switch(format)
{
case YUYV:
ret = V4L2_PIX_FMT_YUYV;
break;
case MJPEG:
ret = V4L2_PIX_FMT_MJPEG;
break;
case YV12:
ret = V4L2_PIX_FMT_YVU420;
break;
case YU12:
ret = V4L2_PIX_FMT_YUV420;
break;
case NV12:
ret = V4L2_PIX_FMT_NV12;
break;
case NV21:
ret = V4L2_PIX_FMT_NV21;
break;
case H264:
ret = V4L2_PIX_FMT_MPEG;
break;
default:
break;
}
return ret;
}

int V4l2Camera::InitMmap(int fd)
{
int ret = RET_ERROR_OK;
struct v4l2_requestbuffers req;
req.count = BUFFERS_COUNT;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;

if (RET_ERROR_FAIL == xioctl (fd, VIDIOC_REQBUFS, &req) || req.count < 2)
{
return RET_ERROR_CAPTURE_BUFFER;
}

m_buffers = (V4l2Buffers*)calloc(req.count, sizeof(V4l2Buffers));

if (!m_buffers)
{
return RET_ERROR_CAPTURE_OUTMEMORY;
}

for (m_nBuffers = 0; m_nBuffers < req.count; ++m_nBuffers)
{
struct v4l2_buffer buf;

MEMST_VALUE (buf);

buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = m_nBuffers;

if (RET_ERROR_FAIL == xioctl (fd, VIDIOC_QUERYBUF, &buf))
{
ret = RET_ERROR_CAPTURE_BUFFER;
break;
}

m_buffers[m_nBuffers].length = buf.length;
m_buffers[m_nBuffers].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);

if (MAP_FAILED == m_buffers[m_nBuffers].start)
{
ret = RET_ERROR_CAPTURE_MMAP;
break;
}
}
return ret;
}

int V4l2Camera::AdjustV4l2Info(unsigned int& width, unsigned int& height)
{
int ret = RET_ERROR_FAIL;
int index = 0;
for(int nCount = ARRAY_LEN(g_v4l2Info) - 1, i = nCount; i >= 0; --i)
{
if(width <= (g_v4l2Info[i].width + 50))
{
index = i;
ret = RET_ERROR_OK;
}
else if(0 != nCount)
{
width  = g_v4l2Info[index].width;
height = g_v4l2Info[index].height;
break;
}
}
return ret;
}

int V4l2Camera::getNumberOfCameras()
{
char cam_path[20];
int fd = -1, i=0;
struct v4l2_capability capability;

mNumberOfCameras = 0;
memset(mCameraIndex,0x00,sizeof(mCameraIndex));

for (i = 0; i < 10; ++i)
{
memset(cam_path,0x00,20);
sprintf(cam_path, "/dev/video%d",i);
fd = open(cam_path, O_RDONLY);
if (fd < 0)
continue;

memset(&capability, 0, sizeof(struct v4l2_capability));
if (ioctl(fd, VIDIOC_QUERYCAP, &capability) < 0)
{
//LOGE("Video device(%s): query capability not supported.\n", cam_path);
goto loop_continue;
}

if ((capability.capabilities
& (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING))
!= (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING))
{
}
else
{
mCameraIndex[mNumberOfCameras] = i;
mNumberOfCameras++;
}

loop_continue:
if (fd > 0)
{
close(fd);
fd = -1;
}
continue;
}

return mNumberOfCameras;
}

int V4l2Camera::MatchCameraAuto(int cameraId)
{
if (mNumberOfCameras <= 0 || mNumberOfCameras > 10 || cameraId < 0)
{
return -1;
}

// search for camera ID normally
for (int i = 0; i < mNumberOfCameras; ++i)
{
if (cameraId == mCameraIndex[i])
{
return cameraId;
}
}

if (mNumberOfCameras == 1)
{
return mCameraIndex[0];
}

return cameraId > mCameraIndex[mNumberOfCameras -1] ? mCameraIndex[mNumberOfCameras -1] : mCameraIndex[0];
}

int V4l2Camera::InitDevice(V4l2Capture *capture, pFormat format, const char* name, unsigned int rate, unsigned int width, unsigned int height)
{
// TODO Auto-generated function stub
struct stat st;
int fd = 0, nRealCameraNameLen = 0, nMinCameraNameLen = 0;
int ret = RET_ERROR_OK;
char szcameraID[4], szCameraName[20];
int cameraId = 0;
struct v4l2_capability cap;
struct v4l2_cropcap cropcap;
struct v4l2_crop crop;

if(NULL == capture || NULL == name || 0 == rate || 0 == width || 0 == height) {
ret = RET_ERROR_CAPTURE_NULL;
goto InitDeviceFAILED;
}

if ((nRealCameraNameLen = strlen(name)) < (nMinCameraNameLen =strlen("/dev/video0"))) {
ret = RET_ERROR_CAPTURE_NULL;
goto InitDeviceFAILED;
}
// Get camera ID

memset(szcameraID,0x00,4);
for (int i=0;i<3;i++) {
if (nRealCameraNameLen >= (nMinCameraNameLen + i))
szcameraID[i] = name[nMinCameraNameLen - 1 + i];
}

cameraId = atoi(szcameraID);
mNumberOfCameras = 0;
memset(mCameraIndex, 0x00, sizeof(mCameraIndex));
if (0 == getNumberOfCameras()) {
//LOGE("There is NO camera!");
ret = RET_ERROR_CAPTURE_NAME;
goto InitDeviceFAILED;
}

if (-1 == (cameraId = MatchCameraAuto(cameraId))) {
//LOGE("There is NO camera!");
ret = RET_ERROR_CAPTURE_NAME;
goto InitDeviceFAILED;
}

memset(szCameraName, 0x00, 20);
sprintf(szCameraName, "/dev/video%d", cameraId);
//LOGI("camera name is %s.", name);
//
if ((RET_ERROR_FAIL == stat (szCameraName, &st)) || (!S_ISCHR (st.st_mode))
|| (RET_ERROR_FAIL == (fd = open(szCameraName, O_RDWR | O_NONBLOCK, 0))))
{
ret = RET_ERROR_CAPTURE_NAME;
goto InitDeviceFAILED;
}

if (RET_ERROR_FAIL == xioctl(fd, VIDIOC_QUERYCAP, &cap) || !(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)
|| !(cap.capabilities & V4L2_CAP_STREAMING))
{
ret = RET_ERROR_CAPTURE_CAPABILITY;
goto InitDeviceFAILED;
}

MEMST_VALUE(cropcap);
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

if(RET_ERROR_OK == xioctl (fd, VIDIOC_CROPCAP, &cropcap))
{
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect; /* reset to default */

xioctl(fd, VIDIOC_S_CROP, &crop);
}

struct v4l2_format fmt;
MEMST_VALUE (fmt);

ret = GetCameraFormat(format);
if(UNKNOWN == ret) {
return RET_ERROR_CAPTURE_FORMAT;
goto InitDeviceFAILED;
}

if(RET_ERROR_OK != AdjustV4l2Info(width, height))
{
ret = RET_ERROR_CAPTURE_FORM;
goto InitDeviceFAILED;
}

//SetBrightness(capture, capture->brightness);
fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width       = width;
fmt.fmt.pix.height      = height;
fmt.fmt.pix.pixelformat = ret;
fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED;
printf("%s(%d) pixel format is %d\n", __FUNCTION__,__LINE__,ret);

if (RET_ERROR_FAIL == xioctl (fd, VIDIOC_S_FMT, &fmt)) {
ret = RET_ERROR_CAPTURE_FORMAT;
goto InitDeviceFAILED;
}

if(RET_ERROR_OK != (ret = InitMmap(fd)) ) {// || RET_ERROR_OK != (ret = SetBrightness(capture, capture->brightness)))
goto InitDeviceFAILED;
}

// set video frame rate
struct v4l2_streamparm parm;
parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (RET_ERROR_OK != ioctl(fd, VIDIOC_G_PARM, &parm)) {
//LOGI("VIDIOC_G_PARM fail....");
}

parm.parm.capture.timeperframe.numerator = 1;
parm.parm.capture.timeperframe.denominator = rate;
if (RET_ERROR_OK != ioctl(fd, VIDIOC_S_PARM, &parm)) {
//LOGI("VIDIOC_S_PARM  Fail....");
}

//check setting of frame rate
memset(&parm, 0x00, sizeof(v4l2_streamparm));
parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

ret = ioctl(fd, VIDIOC_G_PARM, &parm);
if (ret != RET_ERROR_OK) {
//LOGI("VIDIOC_G_PARM fail....");
}

capture->fd = fd;
capture->v4l2Info.width = fmt.fmt.pix.width;
capture->v4l2Info.height = fmt.fmt.pix.height;
capture->format = format;
capture->rate = rate;
capture->v4l2Info.length = fmt.fmt.pix.sizeimage;
capture->v4l2Info.stepWidth = fmt.fmt.pix.bytesperline;
capture->v4l2Info.buffer = malloc(fmt.fmt.pix.sizeimage);
strncpy(capture->name, name, sizeof(capture->name));

m_stime = 1000 / rate;
m_capture = capture;
m_InitDevice = true;

return ret;

InitDeviceFAILED:
if (fd >= 0)
{
close(fd);
fd = -1;
}

return ret;
}

int V4l2Camera::UninitDevice(V4l2Capture *capture)
{
// TODO Auto-generated function stub
printf("%s(%d)...[BEGIN]\n", __FUNCTION__, __LINE__);
int ret = RET_ERROR_OK;
if (!m_InitDevice) {
return ret;
} else {
m_InitDevice = false;
}
if (m_buffers) {
for (unsigned int i = 0; i < m_nBuffers; ++i) {
printf("%s(%d) munmap() i = %d\n", __FUNCTION__, __LINE__, i);
if (RET_ERROR_FAIL == munmap(m_buffers[i].start, m_buffers[i].length)) {
ret = RET_ERROR_CAPTURE_MUMAP;
break;
}
}

if (RET_ERROR_OK == ret) {
printf("%s(%d) free(m_buffers)\n", __FUNCTION__, __LINE__);
free(m_buffers);
}
m_buffers = NULL;
if (capture) {
if (capture->v4l2Info.buffer) {
printf("%s(%d) free(capture->v4l2Info.buffer)\n", __FUNCTION__,
__LINE__);
free(capture->v4l2Info.buffer);
capture->v4l2Info.buffer = NULL;
}

if (capture->fd >= 0) {
printf("%s(%d) close(capture->fd)\n", __FUNCTION__, __LINE__);
ret = close(capture->fd);
}
}

}
printf("%s(%d)...[END]\n", __FUNCTION__, __LINE__);
return ret;
}

int V4l2Camera::SetBrightness(V4l2Capture *capture, unsigned int value)
{
// TODO Auto-generated function stub
if(!capture || value > 10000)
return RET_ERROR_FAIL;

struct v4l2_control control;
control.id = V4L2_CID_BRIGHTNESS;
control.value = 255;

if(RET_ERROR_FAIL == xioctl(capture->fd, VIDIOC_S_CTRL, &control))
{
return RET_ERROR_FAIL;
}
capture->brightness = control.value;

return RET_ERROR_OK;
}

int V4l2Camera::StartDevice(int fd)
{
// TODO Auto-generated function stub
if(fd < 0)
return RET_ERROR_FAIL;

int ret = RET_ERROR_OK;
for (unsigned int i = 0; i < m_nBuffers; ++i)
{
struct v4l2_buffer buf;
MEMST_VALUE(buf);

buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory      = V4L2_MEMORY_MMAP;
buf.index       = i;

if (RET_ERROR_FAIL == xioctl(fd, VIDIOC_QBUF, &buf))
{
ret = RET_ERROR_CAPTURE_VIDIOC;
break;
}
}

enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

if (RET_ERROR_FAIL == xioctl(fd, VIDIOC_STREAMON, &type))
ret = RET_ERROR_CAPTURE_VIDIOC;

return ret;
}

int V4l2Camera::StopDevice(int fd)
{
// TODO Auto-generated function stub
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (RET_ERROR_FAIL == xioctl(fd, VIDIOC_STREAMOFF, &type))
{
return RET_ERROR_FAIL;
}
return 0;
}

int V4l2Camera::MSleep(int fd, unsigned int msec)
{
fd_set fds;
FD_ZERO (&fds);
FD_SET (fd, &fds);

struct timeval tv;
tv.tv_sec = msec;
tv.tv_usec = 0;

return select (fd + 1, &fds, NULL, NULL, &tv);
}

int V4l2Camera::GetFrame(V4l2Capture *capture)
{
// TODO Auto-generated function stub
if(!capture)
return RET_ERROR_FAIL;

int fd = capture->fd;

if(RET_ERROR_FAIL == MSleep(capture->fd, m_stime))
return RET_ERROR_FAIL;

struct v4l2_buffer buf;

MEMST_VALUE(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.reserved = 0;

// Skip the first CONFIG_CAMERA_UVC_INVAL_FRAMECNT video frames
// because they are possibly invalid
if (mUsbCameraIvalidFrameCnt< CONFIG_CAMERA_UVC_INVAL_FRAMECNT) {
mUsbCameraIvalidFrameCnt++;
if(xioctl(fd, VIDIOC_DQBUF, &buf) >= 0) {
xioctl(fd, VIDIOC_QBUF, &buf);
}
return RET_ERROR_OK;
}

if(RET_ERROR_FAIL == xioctl(fd, VIDIOC_DQBUF, &buf) || buf.index >=  m_nBuffers)
{
return RET_ERROR_FAIL;
}

memcpy(capture->v4l2Info.buffer, m_buffers[buf.index].start, buf.bytesused);

if(RET_ERROR_FAIL == xioctl(fd, VIDIOC_QBUF, &buf))
return RET_ERROR_FAIL;

return RET_ERROR_OK;
}
}//end namespace


  
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: