您的位置:首页 > 理论基础 > 计算机网络

---WebCam网络摄像头7 cmos--yuv rgb , Format............:V4L2_PIX_FMT_YUYV

2011-11-27 08:16 489 查看
颜色系统基本 refer to http://bbs.chinavideo.org/viewthread.php?tid=4143
常见的RGB格式有RGB1、RGB4、RGB8、RGB565、RGB555、RGB24、RGB32、ARGB32

常见的YUV格式有YUY2、YUYV、YVYU、UYVY、AYUV、Y41P、Y411、Y211、IF09、IYUV、YV12、YVU9、YUV411、YUV420等

YUV与RGB相互转换的公式如下(RGB取值范围均为0-255):

Y = 0.299R + 0.587G + 0.114B

U = -0.147R - 0.289G + 0.436B

V = 0.615R - 0.515G - 0.100B

R = Y + 1.14V

G = Y - 0.39U - 0.58V

B = Y + 2.03U

/*******************************************************************************************************/

[root@localhost input_cmoscamera]# pwd
/opt/FriendlyArm/mini2440/WebCam/mjpg-streamer/mjpg-streamer/plugins/input_cmoscamera
[root@localhost input_cmoscamera]# tree
.
|-- Makefile
|-- Makefile~
|-- dynctrl.c
|-- dynctrl.h
|-- dynctrl.lo
|-- huffman.h
|-- input_cmoscamera.so
|-- input_uvc.c
|-- jpeg_utils.c
|-- jpeg_utils.h
|-- jpeg_utils.lo
|-- uvc_compat.h
|-- uvcvideo.h
|-- v4l2uvc.c
|-- v4l2uvc.c~
|-- v4l2uvc.h
`-- v4l2uvc.lo

0 directories, 17 files

http://blog.csdn.net/songqqnew/article/details/7012927
cmos摄像头数据的读取方法和这个类似,此处的

input_uvc.c对应那里的input_s3c2410.c--调用v4l2uvc.c里函数的部分

v4l2uvc.c对应那里的s3c2410.c--与驱动亲密接触的部分

几点说明:

mjpg-streamer/plugins/input_cmoscamera/input_uvc.c

int input_init(input_parameter *param)
{
char *argv[MAX_ARGUMENTS]={NULL}, *dev = "/dev/camera", *s;
int argc=1, width=640, height=512, fps=5, format=V4L2_PIX_FMT_YUYV, i;
in_cmd_type led = IN_CMD_LED_AUTO;

/* initialize the mutes variable */
if( pthread_mutex_init(&controls_mutex, NULL) != 0 ) {
IPRINT("could not initialize mutex variable\n");
exit(EXIT_FAILURE);
}

/* convert the single parameter-string to an array of strings */
argv[0] = INPUT_PLUGIN_NAME;
if ( param->parameter_string != NULL && strlen(param->parameter_string) != 0 ) {
char *arg=NULL, *saveptr=NULL, *token=NULL;

arg=(char *)strdup(param->parameter_string);

if ( strchr(arg, ' ') != NULL ) {
token=strtok_r(arg, " ", &saveptr);
if ( token != NULL ) {
argv[argc] = strdup(token);
argc++;
while ( (token=strtok_r(NULL, " ", &saveptr)) != NULL ) {
argv[argc] = strdup(token);
argc++;
if (argc >= MAX_ARGUMENTS) {
IPRINT("ERROR: too many arguments to input plugin\n");
return 1;
}
}
}
}
}

/* show all parameters for DBG purposes */
for (i=0; i<argc; i++) {
DBG("argv[%d]=%s\n", i, argv[i]);
}

/* parse the parameters */
reset_getopt();
while(1) {
int option_index = 0, c=0;
static struct option long_options[] = \
{
{"h", no_argument, 0, 0},
{"help", no_argument, 0, 0},
{"d", required_argument, 0, 0},
{"device", required_argument, 0, 0},
{"r", required_argument, 0, 0},
{"resolution", required_argument, 0, 0},
{"f", required_argument, 0, 0},
{"fps", required_argument, 0, 0},
{"y", no_argument, 0, 0},
{"yuv", no_argument, 0, 0},
{"q", required_argument, 0, 0},
{"quality", required_argument, 0, 0},
{"m", required_argument, 0, 0},
{"minimum_size", required_argument, 0, 0},
{"n", no_argument, 0, 0},
{"no_dynctrl", no_argument, 0, 0},
{"l", required_argument, 0, 0},
{"led", required_argument, 0, 0},
{0, 0, 0, 0}
};

/* parsing all parameters according to the list above is sufficent */
c = getopt_long_only(argc, argv, "", long_options, &option_index);

/* no more options to parse */
if (c == -1) break;

/* unrecognized option */
if (c == '?'){
help();
return 1;
}

/* dispatch the given options */
switch (option_index) {
/* h, help */
case 0:
case 1:
DBG("case 0,1\n");
help();
return 1;
break;

/* d, device */
case 2:
case 3:
DBG("case 2,3\n");
dev = strdup(optarg);
break;

/* r, resolution */
case 4:
case 5:
DBG("case 4,5\n");
width = -1;
height = -1;

/* try to find the resolution in lookup table "resolutions" */
for ( i=0; i < LENGTH_OF(resolutions); i++ ) {
if ( strcmp(resolutions[i].string, optarg) == 0 ) {
width  = resolutions[i].width;
height = resolutions[i].height;
}
}
/* done if width and height were set */
if(width != -1 && height != -1)
break;
/* parse value as decimal value */
width  = strtol(optarg, &s, 10);
height = strtol(s+1, NULL, 10);
break;

/* f, fps */
case 6:
case 7:
DBG("case 6,7\n");
fps=atoi(optarg);
break;

/* y, yuv */
case 8:
case 9:
DBG("case 8,9\n");
format = V4L2_PIX_FMT_YUYV;
break;

/* q, quality */
case 10:
case 11:
DBG("case 10,11\n");
format = V4L2_PIX_FMT_YUYV;
gquality = MIN(MAX(atoi(optarg), 0), 100);
break;

/* m, minimum_size */
case 12:
case 13:
DBG("case 12,13\n");
minimum_size = MAX(atoi(optarg), 0);
break;

/* n, no_dynctrl */
case 14:
case 15:
DBG("case 14,15\n");
dynctrls = 0;
break;

/* l, led */
case 16:
case 17:
DBG("case 16,17\n");
if ( strcmp("on", optarg) == 0 ) {
led = IN_CMD_LED_ON;
} else if ( strcmp("off", optarg) == 0 ) {
led = IN_CMD_LED_OFF;
} else if ( strcmp("auto", optarg) == 0 ) {
led = IN_CMD_LED_AUTO;
} else if ( strcmp("blink", optarg) == 0 ) {
led = IN_CMD_LED_BLINK;
}
break;

default:
DBG("default case\n");
help();
return 1;
}
}

/* keep a pointer to the global variables */
pglobal = param->global;

/* allocate webcam datastructure */
videoIn = malloc(sizeof(struct vdIn));
if ( videoIn == NULL ) {
IPRINT("not enough memory for videoIn\n");
exit(EXIT_FAILURE);
}
memset(videoIn, 0, sizeof(struct vdIn));

/* display the parsed values */
IPRINT("Using CMOS Camera device.: %s\n", dev);
IPRINT("Desired Resolution: %i x %i\n", width, height);
IPRINT("Frames Per Second.: %i\n", fps);
IPRINT("Format............: %s\n", (format==V4L2_PIX_FMT_YUYV)?"YUV":"MJPEG");
if ( format == V4L2_PIX_FMT_YUYV )
IPRINT("JPEG Quality......: %d\n", gquality);

/* open video device and prepare data structure */
if (init_videoIn(videoIn, dev, width, height, fps, format, 1) < 0) {
IPRINT("init_VideoIn failed\n");
closelog();
exit(EXIT_FAILURE);
}

/*
* recent linux-uvc driver (revision > ~#125) requires to use dynctrls
* for pan/tilt/focus/...
* dynctrls must get initialized
*/
if (dynctrls)
initDynCtrls(videoIn->fd);

/*
* switch the LED according to the command line parameters (if any)
*/
input_cmd(led, 0);

return 0;
}


char *argv[MAX_ARGUMENTS]={NULL}, *dev = "/dev/camera", *s;

int argc=1, width=640, height=512, fps=5, format=V4L2_PIX_FMT_YUYV, i;

可知默认设备/dev/camera,默认格式V4L2_PIX_FMT_YUYV

并且从

/* y, yuv */

case 8:

case 9:

DBG("case 8,9\n");

format = V4L2_PIX_FMT_YUYV;

break;

可知指定 -y后格式也是V4L2_PIX_FMT_YUYV

其中init_videoIn()在mjpg-streamer/plugins/input_cmoscamera/v4l2uvc.c

int init_videoIn(struct vdIn *vd, char *device, int width, int height, int fps, int format, int grabmethod)
{

if (vd == NULL || device == NULL)
return -1;
if (width == 0 || height == 0)
return -1;
if (grabmethod < 0 || grabmethod > 1)
grabmethod = 1;		//mmap by default;
vd->videodevice = NULL;
vd->status = NULL;
vd->pictName = NULL;
vd->videodevice = (char *) calloc (1, 16 * sizeof (char));
vd->status = (char *) calloc (1, 100 * sizeof (char));
vd->pictName = (char *) calloc (1, 80 * sizeof (char));
snprintf (vd->videodevice, 12, "%s", device);
vd->toggleAvi = 0;
vd->getPict = 0;
vd->signalquit = 1;
vd->width = width;
vd->height = height;
vd->fps = fps;
vd->formatIn = format;
vd->grabmethod = grabmethod;

if (init_v4l2 (vd) < 0) {
fprintf (stderr, " Init Camera interface failed !! exit fatal \n");
goto error;;
}

// alloc a temp buffer to reconstruct the pict
vd->framesizeIn = (vd->width * vd->height << 1);
vd->framebuffer =  (unsigned char *) calloc(1, (size_t) vd->framesizeIn);
if (!vd->framebuffer)
goto error;
return 0;
error:
free(vd->videodevice);
free(vd->status);
free(vd->pictName);
close(vd->fd);
return -1;
}
同文件下的init_v4l2()

static int init_v4l2(struct vdIn *vd)
{
int fd = open(vd->videodevice, O_RDONLY);
if (fd > 0)
{
// Set preview width,height
ioctl(fd, 0x0000, vd->width);
ioctl(fd, 0x0001, vd->height);
close(fd);
}
else
{
goto fatal;
}
// Open camera
fdcamera = fopen(vd->videodevice,"rb");

if (fdcamera == NULL)
{
printf("Could not open camera\n");
goto fatal;
}

return 0;
fatal:
return -1;

}
line7 line8的2行

ioctl(fd, 0x0000, vd->width);

ioctl(fd, 0x0001, vd->height);

返回值 < 0.只是没有打印,原因同前面的那篇一样,由于驱动没有实现ioctl。这两行注释掉即可。

数据获取同
http://blog.csdn.net/songqqnew/article/details/7012927
类似,均用read方式,因为是由ov9650驱动实现决定的。搜索“见下面”

mjpg-streamer/plugins/input_cmoscamera/input_uvc.c

/******************************************************************************
Description.: this thread worker grabs a frame and copies it to the global buffer
Input Value.: unused
Return Value: unused, always NULL
******************************************************************************/
void *cam_thread( void *arg ) {
/* set cleanup handler to cleanup allocated ressources */
pthread_cleanup_push(cam_cleanup, NULL);

while( !pglobal->stop ) {

/* grab a frame */
if( uvcGrab(videoIn) < 0 ) {//见下面
IPRINT("Error grabbing frames\n");
exit(EXIT_FAILURE);
}

DBG("received frame of size: %d\n", videoIn->buf.bytesused);

/*
* Workaround for broken, corrupted frames:
* Under low light conditions corrupted frames may get captured.
* The good thing is such frames are quite small compared to the regular pictures.
* For example a VGA (640x480) webcam picture is normally >= 8kByte large,
* corrupted frames are smaller.
*/
//if ( videoIn->buf.bytesused < minimum_size ) {
//  DBG("dropping too small frame, assuming it as broken\n");
//  continue;
//}

/* copy JPG picture to global buffer */
pthread_mutex_lock( &pglobal->db );

/*
* If capturing in YUV mode convert to JPEG now.
* This compression requires many CPU cycles, so try to avoid YUV format.
* Getting JPEGs straight from the webcam, is one of the major advantages of
* Linux-UVC compatible devices.
*/
if (videoIn->formatIn == V4L2_PIX_FMT_YUYV) {
DBG("compressing frame\n");
pglobal->size = compress_yuyv_to_jpeg(videoIn, pglobal->buf, videoIn->framesizeIn, gquality);
}
else {
DBG("copying frame\n");
pglobal->size = memcpy_picture(pglobal->buf, videoIn->tmpbuffer, videoIn->buf.bytesused);
}

#if 0
/* motion detection can be done just by comparing the picture size, but it is not very accurate!! */
if ( (prev_size - global->size)*(prev_size - global->size) > 4*1024*1024 ) {
DBG("motion detected (delta: %d kB)\n", (prev_size - global->size) / 1024);
}
prev_size = global->size;
#endif

/* signal fresh_frame */
pthread_cond_broadcast(&pglobal->db_update);
pthread_mutex_unlock( &pglobal->db );

DBG("waiting for next frame\n");

/* only use usleep if the fps is below 5, otherwise the overhead is too long */
if ( videoIn->fps < 5 ) {
usleep(1000*1000/videoIn->fps);
}
}

DBG("leaving input thread, calling cleanup function now\n");
pthread_cleanup_pop(1);

return NULL;
}


mjpg-streamer/plugins/input_cmoscamera/v4l2uvc.c

int uvcGrab(struct vdIn *vd)
{
#define HEADERFRAME1 0xaf
int ret;
int nSize = 0;

//if (!vd->isstreaming)
//  if (video_enable(vd))
//    goto err;

//memset(&vd->buf, 0, sizeof(struct v4l2_buffer));
//vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
//vd->buf.memory = V4L2_MEMORY_MMAP;

nSize = fread(vd->framebuffer,1,vd->framesizeIn,fdcamera);//原始数据塞进vd->framebuffer
vd->buf.bytesused = nSize;//原始数据大小塞进vd->buf.bytesused

//ret = ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf);
if (nSize !=  vd->framesizeIn) {
perror("Read size small than requested");
goto err;
}

//  ret = ioctl(vd->fd, VIDIOC_QBUF, &vd->buf);
//  if (ret < 0) {
//    perror("Unable to requeue buffer");
//    goto err;
//  }

return 0;

err:
vd->signalquit = 0;
return -1;
}


线程函数cam_thread中有调用如下图像格式转换函数

/*
* If capturing in YUV mode convert to JPEG now.
* This compression requires many CPU cycles, so try to avoid YUV format.
* Getting JPEGs straight from the webcam, is one of the major advantages of
* Linux-UVC compatible devices.
*/
if (videoIn->formatIn == V4L2_PIX_FMT_YUYV) {
DBG("compressing frame\n");
pglobal->size = compress_yuyv_to_jpeg(videoIn, pglobal->buf, videoIn->framesizeIn, gquality);//格式化图像数据塞进pglobal->buf
}
else {
DBG("copying frame\n");
pglobal->size = memcpy_picture(pglobal->buf, videoIn->tmpbuffer, videoIn->buf.bytesused);//图像数据塞进pglobal->buf
}
由于cmos指定格式是YUV,所以从驱动中获取到的数据是YUV格式,所以需要转换成MJPEG格式以便显示,所以此处实际上调用compress_yuyv_to_jpeg()

一般要避免使用YUV格式,因为这个转换函数浪费时间。从浏览器中观察cmos的动态表现也可以看出来,比较慢。

可以直接从WebCam中获取MJPEG的格式就不需要转换了---但ov9650不支持---http://blog.csdn.net/songqqnew/article/details/7013054

zc0301pl支持,见下文。
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: