---WebCam网络摄像头9 usb dirver
Device Drivers? ---><*> Multimedia support? --->[*]?? Video capture adapters? --->[*]?? V4L USB devices? ---><M>?? GSPCA based webcams? ---><M>? ZC3XX USB Camera Driver
從 GSPCA based webcams 項的help看到 CONFIG_USB_GSPCA
從 ZC3XX USB Camera 項的help看到CONFIG_USB_GSPCA_ZC3XX
然后去Makefile尋找之,找到2 lines
obj-$(CONFIG_USB_GSPCA_ZC3XX)??? += gspca_zc3xx.o
obj-$(CONFIG_USB_GSPCA)????????? += gspca_main.o
然后又找到
gspca_zc3xx-objs??? := zc3xx.o
gspca_main-objs???? := gspca.o
然后去?drivers/media/video/gspca/ 目錄下找zc3xx.c gspca.c
就是源碼了。
此時編譯成模塊,生成gspca_zc3xx.ko? gspca_main.ko。---------搞到板子上執行
[root@FriendlyARM plg]# insmod gspca_main.ko gspca: main v2.7.0 registere [root@FriendlyARM plg]# insmod gspca_zc3xx.ko gspca: probing 0ac8:301b zc3xx: probe 2wr ov vga 0x0000 zc3xx: probe sensor -> 0011 zc3xx: Find Sensor HV7131R(c) gspca: probe ok usbcore: registered new interface driver zc3xx zc3xx: registered 如果先加載gspca_zc3xx.ko,會出現錯誤。---------這個錯誤很友好哦,如果你不知道2者啥關系,故意搞出錯誤就稍微知道啦。
[root@FriendlyARM plg]# insmod gspca_zc3xx.ko gspca_zc3xx: Unknown symbol gspca_frame_add gspca_zc3xx: Unknown symbol gspca_debug gspca_zc3xx: Unknown symbol gspca_disconnect gspca_zc3xx: Unknown symbol gspca_dev_probe insmod: cannot insert 'gspca_zc3xx.ko': unknown symbol in module or invalid parameter
/*********************************************************************************************************************************************************/
下面從usb攝像頭插入到向應用程序提供圖像的線索看一下萬能攝像頭驅動源碼,看一下法國的這位天才coder怎么將v4l2和gspca和這么多的攝像頭驅動結合在一起的。
當usb攝像頭-zc0301pl插入時,usbcore通過枚舉過程獲知當前設備的信息,包括兩個id,然后和注冊在usb總線上的各個usb驅動(zc3xx.c)里的usb_device_id? table比較,有相同的的話,就調用哪個驅動的probe函數--sd_probe(),并把該設備的資源信息(inteface)作為參數交給sd_probe
比如zc0301pl的idVendor=0ac8,?idProduct=301b,在列表中,如下
zc3xx.c
static const struct sd_desc sd_desc = {.name = MODULE_NAME,.ctrls = sd_ctrls,.nctrls = ARRAY_SIZE(sd_ctrls),.config = sd_config,.init = sd_init,.start = sd_start,.stop0 = sd_stop0,.pkt_scan = sd_pkt_scan,.querymenu = sd_querymenu,.get_jcomp = sd_get_jcomp,.set_jcomp = sd_set_jcomp, };static const __devinitdata struct usb_device_id device_table[] = {{USB_DEVICE(0x041e, 0x041e)},{USB_DEVICE(0x041e, 0x4017)},{USB_DEVICE(0x041e, 0x401c), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x041e, 0x401e)},{USB_DEVICE(0x041e, 0x401f)},{USB_DEVICE(0x041e, 0x4022)},{USB_DEVICE(0x041e, 0x4029)},{USB_DEVICE(0x041e, 0x4034), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x041e, 0x4035), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x041e, 0x4036)},{USB_DEVICE(0x041e, 0x403a)},{USB_DEVICE(0x041e, 0x4051), .driver_info = SENSOR_TAS5130C_VF0250},{USB_DEVICE(0x041e, 0x4053), .driver_info = SENSOR_TAS5130C_VF0250},{USB_DEVICE(0x0458, 0x7007)},{USB_DEVICE(0x0458, 0x700c)},{USB_DEVICE(0x0458, 0x700f)},{USB_DEVICE(0x0461, 0x0a00)},{USB_DEVICE(0x046d, 0x089d), .driver_info = SENSOR_MC501CB},{USB_DEVICE(0x046d, 0x08a0)},{USB_DEVICE(0x046d, 0x08a1)},{USB_DEVICE(0x046d, 0x08a2)},{USB_DEVICE(0x046d, 0x08a3)},{USB_DEVICE(0x046d, 0x08a6)},{USB_DEVICE(0x046d, 0x08a7)},{USB_DEVICE(0x046d, 0x08a9)},{USB_DEVICE(0x046d, 0x08aa)},{USB_DEVICE(0x046d, 0x08ac)},{USB_DEVICE(0x046d, 0x08ad)}, #if !defined CONFIG_USB_ZC0301 && !defined CONFIG_USB_ZC0301_MODULE{USB_DEVICE(0x046d, 0x08ae)}, #endif{USB_DEVICE(0x046d, 0x08af)},{USB_DEVICE(0x046d, 0x08b9)},{USB_DEVICE(0x046d, 0x08d7)},{USB_DEVICE(0x046d, 0x08d9)},{USB_DEVICE(0x046d, 0x08d8)},{USB_DEVICE(0x046d, 0x08da)},{USB_DEVICE(0x046d, 0x08dd), .driver_info = SENSOR_MC501CB},{USB_DEVICE(0x0471, 0x0325), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x0471, 0x0326), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x0471, 0x032d), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x0471, 0x032e), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x055f, 0xc005)},{USB_DEVICE(0x055f, 0xd003)},{USB_DEVICE(0x055f, 0xd004)},{USB_DEVICE(0x0698, 0x2003)},{USB_DEVICE(0x0ac8, 0x0301), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x0ac8, 0x0302), .driver_info = SENSOR_PAS106},{USB_DEVICE(0x0ac8, 0x301b)},//for my usb camera{USB_DEVICE(0x0ac8, 0x303b)},{USB_DEVICE(0x0ac8, 0x305b), .driver_info = SENSOR_TAS5130C_VF0250},{USB_DEVICE(0x0ac8, 0x307b)},{USB_DEVICE(0x10fd, 0x0128)},{USB_DEVICE(0x10fd, 0x804d)},{USB_DEVICE(0x10fd, 0x8050)},{} /* end of entry */ }; #undef DVNAME MODULE_DEVICE_TABLE(usb, device_table);/* -- device connect -- */ static int sd_probe(struct usb_interface *intf,const struct usb_device_id *id) {return gspca_dev_probe(intf, id, &sd_desc, sizeof(struct sd),THIS_MODULE); }/* USB driver */ static struct usb_driver sd_driver = {.name = MODULE_NAME,.id_table = device_table,.probe = sd_probe,.disconnect = gspca_disconnect, #ifdef CONFIG_PM.suspend = gspca_suspend,.resume = gspca_resume, #endif };static int __init sd_mod_init(void) {int ret;ret = usb_register(&sd_driver);if (ret < 0)return ret;PDEBUG(D_PROBE, "registered");return 0; }static void __exit sd_mod_exit(void) {usb_deregister(&sd_driver);PDEBUG(D_PROBE, "deregistered"); }module_init(sd_mod_init); module_exit(sd_mod_exit); 在sd_probe函數中是調用的gspca_dev_probe(intf, id, &sd_desc, sizeof(struct sd),THIS_MODULE)
其中有個重要的參數struct sd_desc sd_desc,
跟進gspca_dev_probe()
gspca.c
/** probe and create a new gspca device** This function must be called by the sub-driver when it is* called for probing a new device.*/ int gspca_dev_probe(struct usb_interface *intf,const struct usb_device_id *id,const struct sd_desc *sd_desc,int dev_size,struct module *module) {struct usb_interface_descriptor *interface;struct gspca_dev *gspca_dev;struct usb_device *dev = interface_to_usbdev(intf);int ret;PDEBUG(D_PROBE, "probing %04x:%04x", id->idVendor, id->idProduct);/* we don't handle multi-config cameras */if (dev->descriptor.bNumConfigurations != 1)return -ENODEV;interface = &intf->cur_altsetting->desc;if (interface->bInterfaceNumber > 0)return -ENODEV;/* create the device */if (dev_size < sizeof *gspca_dev)dev_size = sizeof *gspca_dev;gspca_dev = kzalloc(dev_size, GFP_KERNEL);if (!gspca_dev) {err("couldn't kzalloc gspca struct");return -ENOMEM;}gspca_dev->usb_buf = kmalloc(USB_BUF_SZ, GFP_KERNEL);if (!gspca_dev->usb_buf) {err("out of memory");ret = -ENOMEM;goto out;}gspca_dev->dev = dev;gspca_dev->iface = interface->bInterfaceNumber;gspca_dev->nbalt = intf->num_altsetting;gspca_dev->sd_desc = sd_desc;gspca_dev->nbufread = 2;gspca_dev->empty_packet = -1; /* don't check the empty packets *//* configure the subdriver and initialize the USB device */ret = sd_desc->config(gspca_dev, id);if (ret < 0)goto out;ret = sd_desc->init(gspca_dev);if (ret < 0)goto out;ret = gspca_set_alt0(gspca_dev);if (ret < 0)goto out;gspca_set_default_mode(gspca_dev);mutex_init(&gspca_dev->usb_lock);mutex_init(&gspca_dev->read_lock);mutex_init(&gspca_dev->queue_lock);init_waitqueue_head(&gspca_dev->wq);/* init video stuff */memcpy(&gspca_dev->vdev, &gspca_template, sizeof gspca_template);gspca_dev->vdev.parent = &intf->dev;gspca_dev->module = module;gspca_dev->present = 1;ret = video_register_device(&gspca_dev->vdev,VFL_TYPE_GRABBER,-1);if (ret < 0) {err("video_register_device err %d", ret);goto out;}usb_set_intfdata(intf, gspca_dev);PDEBUG(D_PROBE, "probe ok");return 0; out:kfree(gspca_dev->usb_buf);kfree(gspca_dev);return ret; } EXPORT_SYMBOL(gspca_dev_probe);上面定義了struct gspca_dev *gspca_dev;結構體如下
gspca.h
struct gspca_dev {struct video_device vdev; /* !! must be the first item */struct module *module; /* subdriver handling the device */struct usb_device *dev;struct file *capt_file; /* file doing video capture */struct cam cam; /* device information */const struct sd_desc *sd_desc; /* subdriver description */unsigned ctrl_dis; /* disabled controls (bit map) */#define USB_BUF_SZ 64__u8 *usb_buf; /* buffer for USB exchanges */struct urb *urb[MAX_NURBS];__u8 *frbuf; /* buffer for nframes */struct gspca_frame frame[GSPCA_MAX_FRAMES];__u32 frsz; /* frame size */char nframes; /* number of frames */char fr_i; /* frame being filled */char fr_q; /* next frame to queue */char fr_o; /* next frame to dequeue */signed char fr_queue[GSPCA_MAX_FRAMES]; /* frame queue */__u8 last_packet_type;__s8 empty_packet; /* if (-1) don't check empty packets */__u8 streaming;__u8 curr_mode; /* current camera mode */__u32 pixfmt; /* current mode parameters */__u16 width;__u16 height;__u32 sequence; /* frame sequence number */wait_queue_head_t wq; /* wait queue */struct mutex usb_lock; /* usb exchange protection */struct mutex read_lock; /* read protection */struct mutex queue_lock; /* ISOC queue protection */ #ifdef CONFIG_PMchar frozen; /* suspend - resume */ #endifchar users; /* number of opens */char present; /* device connected */char nbufread; /* number of buffers for read() */char nurbs; /* number of allocated URBs */char memory; /* memory type (V4L2_MEMORY_xxx) */__u8 iface; /* USB interface number */__u8 alt; /* USB alternate setting */__u8 nbalt; /* number of USB alternate settings */u16 pkt_size; /* ISOC packet size */ };
然后用各個參數填充gspca_dev的各個成員或者為某些成員分配內存
然后調用sd_desc->config(gspca_dev, id);sd_desc->init(gspca_dev).當然這兩個函數都是在zc3xx.c中定義的。
然后執行memcpy(&gspca_dev->vdev, &gspca_template, sizeof gspca_template);
參數1 gspca_dev->vdev是struct video_device,如下
v4l2-dev.h
struct video_device {/* device ops */const struct v4l2_file_operations *fops;/* sysfs */struct device dev; /* v4l device */struct cdev *cdev; /* character device *//* Set either parent or v4l2_dev if your driver uses v4l2_device */struct device *parent; /* device parent */struct v4l2_device *v4l2_dev; /* v4l2_device parent *//* device info */char name[32];int vfl_type;/* 'minor' is set to -1 if the registration failed */int minor;u16 num;/* use bitops to set/clear/test flags */unsigned long flags;/* attribute to differentiate multiple indices on one physical device */int index;int debug; /* Activates debug level*//* Video standard vars */v4l2_std_id tvnorms; /* Supported tv norms */v4l2_std_id current_norm; /* Current tvnorm *//* callbacks */void (*release)(struct video_device *vdev);/* ioctl callbacks */const struct v4l2_ioctl_ops *ioctl_ops; };
參數2 gspca_template,其定義
gspca.c
static struct video_device gspca_template = {.name = "gspca main driver",.fops = &dev_fops,.ioctl_ops = &dev_ioctl_ops,.release = gspca_release,.minor = -1, };從gspca_template的名字也可看出是一個模子,相當于給gspca_dev->vdev的各個成員賦初值。兩個較重要的是函數指針集合:
.fops = &dev_fops,.ioctl_ops = &dev_ioctl_ops,
其定義如下
gspca.c
static struct v4l2_file_operations dev_fops = {.owner = THIS_MODULE,.open = dev_open,.release = dev_close,.read = dev_read,.mmap = dev_mmap,.unlocked_ioctl = video_ioctl2,.poll = dev_poll, };static const struct v4l2_ioctl_ops dev_ioctl_ops = {.vidioc_querycap?? ?= vidioc_querycap,.vidioc_dqbuf?? ??? ?= vidioc_dqbuf,.vidioc_qbuf?? ??? ?= vidioc_qbuf,.vidioc_enum_fmt_vid_cap = vidioc_enum_fmt_vid_cap,.vidioc_try_fmt_vid_cap?? ?= vidioc_try_fmt_vid_cap,.vidioc_g_fmt_vid_cap?? ?= vidioc_g_fmt_vid_cap,.vidioc_s_fmt_vid_cap?? ?= vidioc_s_fmt_vid_cap,.vidioc_streamon?? ?= vidioc_streamon,.vidioc_queryctrl?? ?= vidioc_queryctrl,.vidioc_g_ctrl?? ??? ?= vidioc_g_ctrl,.vidioc_s_ctrl?? ??? ?= vidioc_s_ctrl,.vidioc_g_audio?? ??? ?= vidioc_g_audio,.vidioc_s_audio?? ??? ?= vidioc_s_audio,.vidioc_enumaudio?? ?= vidioc_enumaudio,.vidioc_querymenu?? ?= vidioc_querymenu,.vidioc_enum_input?? ?= vidioc_enum_input,.vidioc_g_input?? ??? ?= vidioc_g_input,.vidioc_s_input?? ??? ?= vidioc_s_input,.vidioc_reqbufs?? ??? ?= vidioc_reqbufs,.vidioc_querybuf?? ?= vidioc_querybuf,.vidioc_streamoff?? ?= vidioc_streamoff,.vidioc_g_jpegcomp?? ?= vidioc_g_jpegcomp,.vidioc_s_jpegcomp?? ?= vidioc_s_jpegcomp,.vidioc_g_parm?? ??? ?= vidioc_g_parm,.vidioc_s_parm?? ??? ?= vidioc_s_parm,.vidioc_enum_framesizes = vidioc_enum_framesizes, #ifdef CONFIG_VIDEO_ADV_DEBUG.vidioc_g_register?? ?= vidioc_g_register,.vidioc_s_register?? ?= vidioc_s_register, #endif.vidioc_g_chip_ident?? ?= vidioc_g_chip_ident, #ifdef CONFIG_VIDEO_V4L1_COMPAT.vidiocgmbuf????????? = vidiocgmbuf, #endif }; memcpy()先跟到這里,回到gspca_dev_probe(),接著執行
video_register_device(&gspca_dev->vdev,VFL_TYPE_GRABBER, -1);
參數1是gspca_dev->vdev,即一個已填充過dev_fops和dev_ioctl_ops的struct video_device,繼續追蹤
v4l2-dev.c
int video_register_device(struct video_device *vdev, int type, int nr) {return __video_register_device(vdev, type, nr, 1); }v4l2-dev.c
static int __video_register_device(struct video_device *vdev, int type, int nr,int warn_if_nr_in_use) {int i = 0;int ret;int minor_offset = 0;int minor_cnt = VIDEO_NUM_DEVICES;const char *name_base;void *priv = video_get_drvdata(vdev);/* A minor value of -1 marks this video device as neverhaving been registered */vdev->minor = -1;/* the release callback MUST be present */WARN_ON(!vdev->release);if (!vdev->release)return -EINVAL;/* Part 1: check device type */switch (type) {case VFL_TYPE_GRABBER:name_base = "video";break;case VFL_TYPE_VTX:name_base = "vtx";break;case VFL_TYPE_VBI:name_base = "vbi";break;case VFL_TYPE_RADIO:name_base = "radio";break;default:printk(KERN_ERR "%s called with unknown type: %d\n",__func__, type);return -EINVAL;}vdev->vfl_type = type;vdev->cdev = NULL;if (vdev->v4l2_dev && vdev->v4l2_dev->dev)vdev->parent = vdev->v4l2_dev->dev;/* Part 2: find a free minor, device node number and device index. */ #ifdef CONFIG_VIDEO_FIXED_MINOR_RANGES/* Keep the ranges for the first four types for historical* reasons.* Newer devices (not yet in place) should use the range* of 128-191 and just pick the first free minor there* (new style). */switch (type) {case VFL_TYPE_GRABBER:minor_offset = 0;minor_cnt = 64;break;case VFL_TYPE_RADIO:minor_offset = 64;minor_cnt = 64;break;case VFL_TYPE_VTX:minor_offset = 192;minor_cnt = 32;break;case VFL_TYPE_VBI:minor_offset = 224;minor_cnt = 32;break;default:minor_offset = 128;minor_cnt = 64;break;} #endif/* Pick a device node number */mutex_lock(&videodev_lock);nr = devnode_find(vdev, nr == -1 ? 0 : nr, minor_cnt);if (nr == minor_cnt)nr = devnode_find(vdev, 0, minor_cnt);if (nr == minor_cnt) {printk(KERN_ERR "could not get a free device node number\n");mutex_unlock(&videodev_lock);return -ENFILE;} #ifdef CONFIG_VIDEO_FIXED_MINOR_RANGES/* 1-on-1 mapping of device node number to minor number */i = nr; #else/* The device node number and minor numbers are independent, sowe just find the first free minor number. */for (i = 0; i < VIDEO_NUM_DEVICES; i++)if (video_device[i] == NULL)break;if (i == VIDEO_NUM_DEVICES) {mutex_unlock(&videodev_lock);printk(KERN_ERR "could not get a free minor\n");return -ENFILE;} #endifvdev->minor = i + minor_offset;vdev->num = nr;devnode_set(vdev);/* Should not happen since we thought this minor was free */WARN_ON(video_device[vdev->minor] != NULL);vdev->index = get_index(vdev);mutex_unlock(&videodev_lock);/* Part 3: Initialize the character device */vdev->cdev = cdev_alloc();if (vdev->cdev == NULL) {ret = -ENOMEM;goto cleanup;}if (vdev->fops->unlocked_ioctl)vdev->cdev->ops = &v4l2_unlocked_fops;elsevdev->cdev->ops = &v4l2_fops;vdev->cdev->owner = vdev->fops->owner;ret = cdev_add(vdev->cdev, MKDEV(VIDEO_MAJOR, vdev->minor), 1);if (ret < 0) {printk(KERN_ERR "%s: cdev_add failed\n", __func__);kfree(vdev->cdev);vdev->cdev = NULL;goto cleanup;}/* Part 4: register the device with sysfs */memset(&vdev->dev, 0, sizeof(vdev->dev));/* The memset above cleared the device's drvdata, soput back the copy we made earlier. */video_set_drvdata(vdev, priv);vdev->dev.class = &video_class;//notevdev->dev.devt = MKDEV(VIDEO_MAJOR, vdev->minor);//noteif (vdev->parent)vdev->dev.parent = vdev->parent;dev_set_name(&vdev->dev, "%s%d", name_base, vdev->num);ret = device_register(&vdev->dev);//noteif (ret < 0) {printk(KERN_ERR "%s: device_register failed\n", __func__);goto cleanup;}/* Register the release callback that will be called when the lastreference to the device goes away. */vdev->dev.release = v4l2_device_release;if (nr != -1 && nr != vdev->num && warn_if_nr_in_use)printk(KERN_WARNING "%s: requested %s%d, got %s%d\n",__func__, name_base, nr, name_base, vdev->num);/* Part 5: Activate this minor. The char device can now be used. */mutex_lock(&videodev_lock);video_device[vdev->minor] = vdev;mutex_unlock(&videodev_lock);return 0;cleanup:mutex_lock(&videodev_lock);if (vdev->cdev)cdev_del(vdev->cdev);devnode_clear(vdev);mutex_unlock(&videodev_lock);/* Mark this video device as never having been registered. */vdev->minor = -1;return ret; } 在__video_register_device()注冊了字符設備,并指定了file_operation: vdev->cdev->ops = &v4l2_unlocked_fops;并創建了設備節點。比如/dev/video0。不過不怎么清楚怎么創建設備節點的----device_register(&vdev->dev),其中device指定了class就可以創建設備節點嗎?有待證實...參考
register_chrdev,register_chrdev_region,alloc_chrdev_region---udev class bus driver device
其中字符設備的file_operation是
v4l2-dev.c
static const struct file_operations v4l2_unlocked_fops = {.owner = THIS_MODULE,.read = v4l2_read,.write = v4l2_write,.open = v4l2_open,.get_unmapped_area = v4l2_get_unmapped_area,.mmap = v4l2_mmap,.unlocked_ioctl = v4l2_unlocked_ioctl, #ifdef CONFIG_COMPAT.compat_ioctl = v4l2_compat_ioctl32, #endif.release = v4l2_release,.poll = v4l2_poll,.llseek = no_llseek, }
在用戶空間open? /dev/video0,會調用對應字符設備的v4l2_unlocked_fops的open,函數源碼如下
v4l2-dev.c
static int v4l2_open(struct inode *inode, struct file *filp) {struct video_device *vdev;int ret = 0;/* Check if the video device is available */mutex_lock(&videodev_lock);vdev = video_devdata(filp);/* return ENODEV if the video device has been removedalready or if it is not registered anymore. */if (vdev == NULL || video_is_unregistered(vdev)) {mutex_unlock(&videodev_lock);return -ENODEV;}/* and increase the device refcount */video_get(vdev);mutex_unlock(&videodev_lock);if (vdev->fops->open)ret = vdev->fops->open(filp);///* decrease the refcount in case of an error */if (ret)video_put(vdev);return ret; }line 19,可知是跳到vdev->fops->open()即前面剛貼出的 static struct v4l2_file_operations dev_fops的open()
跟進此open()--在gspca.c里面,增加一下計數,其他也沒做什么。
在用戶空間ioctl? /dev/video0,會調用對應字符設備的v4l2_unlocked_fops的v4l2_unlocked_ioctl,然后調用到gspca.c里dev_fops的video_ioctl2(),函數源碼如下
v4l2-ioctl.c
long video_ioctl2(struct file *file,unsigned int cmd, unsigned long arg) {/* Handles IOCTL */err = __video_do_ioctl(file, cmd, parg); } v4l2-ioctl.c-----搜索case取得關鍵
static long __video_do_ioctl(struct file *file,unsigned int cmd, void *arg) {struct video_device *vfd = video_devdata(file);const struct v4l2_ioctl_ops *ops = vfd->ioctl_ops;void *fh = file->private_data;long ret = -EINVAL;if ((vfd->debug & V4L2_DEBUG_IOCTL) &&!(vfd->debug & V4L2_DEBUG_IOCTL_ARG)) {v4l_print_ioctl(vfd->name, cmd);printk(KERN_CONT "\n");}if (ops == NULL) {printk(KERN_WARNING "videodev: \"%s\" has no ioctl_ops.\n",vfd->name);return -EINVAL;}#ifdef CONFIG_VIDEO_V4L1_COMPAT/***********************************************************Handles calls to the obsoleted V4L1 APIDue to the nature of VIDIOCGMBUF, each driver that supportsV4L1 should implement its own handler for this ioctl.***********************************************************//* --- streaming capture ------------------------------------- */if (cmd == VIDIOCGMBUF) {struct video_mbuf *p = arg;if (!ops->vidiocgmbuf)return ret;ret = ops->vidiocgmbuf(file, fh, p);if (!ret)dbgarg(cmd, "size=%d, frames=%d, offsets=0x%08lx\n",p->size, p->frames,(unsigned long)p->offsets);return ret;}/********************************************************All other V4L1 calls are handled by v4l1_compat module.Those calls will be translated into V4L2 calls, and__video_do_ioctl will be called again, with one or moreV4L2 ioctls.********************************************************/if (_IOC_TYPE(cmd) == 'v' && _IOC_NR(cmd) < BASE_VIDIOCPRIVATE)return v4l_compat_translate_ioctl(file, cmd, arg,__video_do_ioctl); #endifswitch (cmd) {/* --- capabilities ------------------------------------------ */case VIDIOC_QUERYCAP:{struct v4l2_capability *cap = (struct v4l2_capability *)arg;if (!ops->vidioc_querycap)break;ret = ops->vidioc_querycap(file, fh, cap);if (!ret)dbgarg(cmd, "driver=%s, card=%s, bus=%s, ""version=0x%08x, ""capabilities=0x%08x\n",cap->driver, cap->card, cap->bus_info,cap->version,cap->capabilities);break;}/* --- priority ------------------------------------------ */case VIDIOC_G_PRIORITY:{enum v4l2_priority *p = arg;if (!ops->vidioc_g_priority)break;ret = ops->vidioc_g_priority(file, fh, p);if (!ret)dbgarg(cmd, "priority is %d\n", *p);break;}case VIDIOC_S_PRIORITY:{enum v4l2_priority *p = arg;if (!ops->vidioc_s_priority)break;dbgarg(cmd, "setting priority to %d\n", *p);ret = ops->vidioc_s_priority(file, fh, *p);break;}/* --- capture ioctls ---------------------------------------- */case VIDIOC_ENUM_FMT:{struct v4l2_fmtdesc *f = arg;switch (f->type) {case V4L2_BUF_TYPE_VIDEO_CAPTURE:if (ops->vidioc_enum_fmt_vid_cap)ret = ops->vidioc_enum_fmt_vid_cap(file, fh, f);break;case V4L2_BUF_TYPE_VIDEO_OVERLAY:if (ops->vidioc_enum_fmt_vid_overlay)ret = ops->vidioc_enum_fmt_vid_overlay(file,fh, f);break;case V4L2_BUF_TYPE_VIDEO_OUTPUT:if (ops->vidioc_enum_fmt_vid_out)ret = ops->vidioc_enum_fmt_vid_out(file, fh, f);break;case V4L2_BUF_TYPE_PRIVATE:if (ops->vidioc_enum_fmt_type_private)ret = ops->vidioc_enum_fmt_type_private(file,fh, f);break;default:break;}if (!ret)dbgarg(cmd, "index=%d, type=%d, flags=%d, ""pixelformat=%c%c%c%c, description='%s'\n",f->index, f->type, f->flags,(f->pixelformat & 0xff),(f->pixelformat >> 8) & 0xff,(f->pixelformat >> 16) & 0xff,(f->pixelformat >> 24) & 0xff,f->description);break;}case VIDIOC_G_FMT:{struct v4l2_format *f = (struct v4l2_format *)arg;/* FIXME: Should be one dump per type */dbgarg(cmd, "type=%s\n", prt_names(f->type, v4l2_type_names));switch (f->type) {case V4L2_BUF_TYPE_VIDEO_CAPTURE:if (ops->vidioc_g_fmt_vid_cap)ret = ops->vidioc_g_fmt_vid_cap(file, fh, f);if (!ret)v4l_print_pix_fmt(vfd, &f->fmt.pix);break;case V4L2_BUF_TYPE_VIDEO_OVERLAY:if (ops->vidioc_g_fmt_vid_overlay)ret = ops->vidioc_g_fmt_vid_overlay(file,fh, f);break;case V4L2_BUF_TYPE_VIDEO_OUTPUT:if (ops->vidioc_g_fmt_vid_out)ret = ops->vidioc_g_fmt_vid_out(file, fh, f);if (!ret)v4l_print_pix_fmt(vfd, &f->fmt.pix);break;case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:if (ops->vidioc_g_fmt_vid_out_overlay)ret = ops->vidioc_g_fmt_vid_out_overlay(file,fh, f);break;case V4L2_BUF_TYPE_VBI_CAPTURE:if (ops->vidioc_g_fmt_vbi_cap)ret = ops->vidioc_g_fmt_vbi_cap(file, fh, f);break;case V4L2_BUF_TYPE_VBI_OUTPUT:if (ops->vidioc_g_fmt_vbi_out)ret = ops->vidioc_g_fmt_vbi_out(file, fh, f);break;case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE:if (ops->vidioc_g_fmt_sliced_vbi_cap)ret = ops->vidioc_g_fmt_sliced_vbi_cap(file,fh, f);break;case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT:if (ops->vidioc_g_fmt_sliced_vbi_out)ret = ops->vidioc_g_fmt_sliced_vbi_out(file,fh, f);break;case V4L2_BUF_TYPE_PRIVATE:if (ops->vidioc_g_fmt_type_private)ret = ops->vidioc_g_fmt_type_private(file,fh, f);break;}break;}case VIDIOC_S_FMT:{struct v4l2_format *f = (struct v4l2_format *)arg;/* FIXME: Should be one dump per type */dbgarg(cmd, "type=%s\n", prt_names(f->type, v4l2_type_names));switch (f->type) {case V4L2_BUF_TYPE_VIDEO_CAPTURE:CLEAR_AFTER_FIELD(f, fmt.pix);v4l_print_pix_fmt(vfd, &f->fmt.pix);if (ops->vidioc_s_fmt_vid_cap)ret = ops->vidioc_s_fmt_vid_cap(file, fh, f);break;case V4L2_BUF_TYPE_VIDEO_OVERLAY:CLEAR_AFTER_FIELD(f, fmt.win);if (ops->vidioc_s_fmt_vid_overlay)ret = ops->vidioc_s_fmt_vid_overlay(file,fh, f);break;case V4L2_BUF_TYPE_VIDEO_OUTPUT:CLEAR_AFTER_FIELD(f, fmt.pix);v4l_print_pix_fmt(vfd, &f->fmt.pix);if (ops->vidioc_s_fmt_vid_out)ret = ops->vidioc_s_fmt_vid_out(file, fh, f);break;case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:CLEAR_AFTER_FIELD(f, fmt.win);if (ops->vidioc_s_fmt_vid_out_overlay)ret = ops->vidioc_s_fmt_vid_out_overlay(file,fh, f);break;case V4L2_BUF_TYPE_VBI_CAPTURE:CLEAR_AFTER_FIELD(f, fmt.vbi);if (ops->vidioc_s_fmt_vbi_cap)ret = ops->vidioc_s_fmt_vbi_cap(file, fh, f);break;case V4L2_BUF_TYPE_VBI_OUTPUT:CLEAR_AFTER_FIELD(f, fmt.vbi);if (ops->vidioc_s_fmt_vbi_out)ret = ops->vidioc_s_fmt_vbi_out(file, fh, f);break;case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE:CLEAR_AFTER_FIELD(f, fmt.sliced);if (ops->vidioc_s_fmt_sliced_vbi_cap)ret = ops->vidioc_s_fmt_sliced_vbi_cap(file,fh, f);break;case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT:CLEAR_AFTER_FIELD(f, fmt.sliced);if (ops->vidioc_s_fmt_sliced_vbi_out)ret = ops->vidioc_s_fmt_sliced_vbi_out(file,fh, f);break;case V4L2_BUF_TYPE_PRIVATE:/* CLEAR_AFTER_FIELD(f, fmt.raw_data); <- does nothing */if (ops->vidioc_s_fmt_type_private)ret = ops->vidioc_s_fmt_type_private(file,fh, f);break;}break;}case VIDIOC_TRY_FMT:{struct v4l2_format *f = (struct v4l2_format *)arg;/* FIXME: Should be one dump per type */dbgarg(cmd, "type=%s\n", prt_names(f->type,v4l2_type_names));switch (f->type) {case V4L2_BUF_TYPE_VIDEO_CAPTURE:CLEAR_AFTER_FIELD(f, fmt.pix);if (ops->vidioc_try_fmt_vid_cap)ret = ops->vidioc_try_fmt_vid_cap(file, fh, f);if (!ret)v4l_print_pix_fmt(vfd, &f->fmt.pix);break;case V4L2_BUF_TYPE_VIDEO_OVERLAY:CLEAR_AFTER_FIELD(f, fmt.win);if (ops->vidioc_try_fmt_vid_overlay)ret = ops->vidioc_try_fmt_vid_overlay(file,fh, f);break;case V4L2_BUF_TYPE_VIDEO_OUTPUT:CLEAR_AFTER_FIELD(f, fmt.pix);if (ops->vidioc_try_fmt_vid_out)ret = ops->vidioc_try_fmt_vid_out(file, fh, f);if (!ret)v4l_print_pix_fmt(vfd, &f->fmt.pix);break;case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:CLEAR_AFTER_FIELD(f, fmt.win);if (ops->vidioc_try_fmt_vid_out_overlay)ret = ops->vidioc_try_fmt_vid_out_overlay(file,fh, f);break;case V4L2_BUF_TYPE_VBI_CAPTURE:CLEAR_AFTER_FIELD(f, fmt.vbi);if (ops->vidioc_try_fmt_vbi_cap)ret = ops->vidioc_try_fmt_vbi_cap(file, fh, f);break;case V4L2_BUF_TYPE_VBI_OUTPUT:CLEAR_AFTER_FIELD(f, fmt.vbi);if (ops->vidioc_try_fmt_vbi_out)ret = ops->vidioc_try_fmt_vbi_out(file, fh, f);break;case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE:CLEAR_AFTER_FIELD(f, fmt.sliced);if (ops->vidioc_try_fmt_sliced_vbi_cap)ret = ops->vidioc_try_fmt_sliced_vbi_cap(file,fh, f);break;case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT:CLEAR_AFTER_FIELD(f, fmt.sliced);if (ops->vidioc_try_fmt_sliced_vbi_out)ret = ops->vidioc_try_fmt_sliced_vbi_out(file,fh, f);break;case V4L2_BUF_TYPE_PRIVATE:/* CLEAR_AFTER_FIELD(f, fmt.raw_data); <- does nothing */if (ops->vidioc_try_fmt_type_private)ret = ops->vidioc_try_fmt_type_private(file,fh, f);break;}break;}/* FIXME: Those buf reqs could be handled here,with some changes on videobuf to allow its header to be included atvideodev2.h or being merged at videodev2.*/case VIDIOC_REQBUFS:{struct v4l2_requestbuffers *p = arg;if (!ops->vidioc_reqbufs)break;ret = check_fmt(ops, p->type);if (ret)break;if (p->type < V4L2_BUF_TYPE_PRIVATE)CLEAR_AFTER_FIELD(p, memory);ret = ops->vidioc_reqbufs(file, fh, p);dbgarg(cmd, "count=%d, type=%s, memory=%s\n",p->count,prt_names(p->type, v4l2_type_names),prt_names(p->memory, v4l2_memory_names));break;}case VIDIOC_QUERYBUF:{struct v4l2_buffer *p = arg;if (!ops->vidioc_querybuf)break;ret = check_fmt(ops, p->type);if (ret)break;ret = ops->vidioc_querybuf(file, fh, p);if (!ret)dbgbuf(cmd, vfd, p);break;}case VIDIOC_QBUF:{struct v4l2_buffer *p = arg;if (!ops->vidioc_qbuf)break;ret = check_fmt(ops, p->type);if (ret)break;ret = ops->vidioc_qbuf(file, fh, p);if (!ret)dbgbuf(cmd, vfd, p);break;}case VIDIOC_DQBUF:{struct v4l2_buffer *p = arg;if (!ops->vidioc_dqbuf)break;ret = check_fmt(ops, p->type);if (ret)break;ret = ops->vidioc_dqbuf(file, fh, p);if (!ret)dbgbuf(cmd, vfd, p);break;}case VIDIOC_OVERLAY:{int *i = arg;if (!ops->vidioc_overlay)break;dbgarg(cmd, "value=%d\n", *i);ret = ops->vidioc_overlay(file, fh, *i);break;}case VIDIOC_G_FBUF:{struct v4l2_framebuffer *p = arg;if (!ops->vidioc_g_fbuf)break;ret = ops->vidioc_g_fbuf(file, fh, arg);if (!ret) {dbgarg(cmd, "capability=0x%x, flags=%d, base=0x%08lx\n",p->capability, p->flags,(unsigned long)p->base);v4l_print_pix_fmt(vfd, &p->fmt);}break;}case VIDIOC_S_FBUF:{struct v4l2_framebuffer *p = arg;if (!ops->vidioc_s_fbuf)break;dbgarg(cmd, "capability=0x%x, flags=%d, base=0x%08lx\n",p->capability, p->flags, (unsigned long)p->base);v4l_print_pix_fmt(vfd, &p->fmt);ret = ops->vidioc_s_fbuf(file, fh, arg);break;}case VIDIOC_STREAMON:{enum v4l2_buf_type i = *(int *)arg;if (!ops->vidioc_streamon)break;dbgarg(cmd, "type=%s\n", prt_names(i, v4l2_type_names));ret = ops->vidioc_streamon(file, fh, i);break;}case VIDIOC_STREAMOFF:{enum v4l2_buf_type i = *(int *)arg;if (!ops->vidioc_streamoff)break;dbgarg(cmd, "type=%s\n", prt_names(i, v4l2_type_names));ret = ops->vidioc_streamoff(file, fh, i);break;}/* ---------- tv norms ---------- */case VIDIOC_ENUMSTD:{struct v4l2_standard *p = arg;v4l2_std_id id = vfd->tvnorms, curr_id = 0;unsigned int index = p->index, i, j = 0;const char *descr = "";/* Return norm array in a canonical way */for (i = 0; i <= index && id; i++) {/* last std value in the standards array is 0, so thiswhile always ends there since (id & 0) == 0. */while ((id & standards[j].std) != standards[j].std)j++;curr_id = standards[j].std;descr = standards[j].descr;j++;if (curr_id == 0)break;if (curr_id != V4L2_STD_PAL &&curr_id != V4L2_STD_SECAM &&curr_id != V4L2_STD_NTSC)id &= ~curr_id;}if (i <= index)return -EINVAL;v4l2_video_std_construct(p, curr_id, descr);dbgarg(cmd, "index=%d, id=0x%Lx, name=%s, fps=%d/%d, ""framelines=%d\n", p->index,(unsigned long long)p->id, p->name,p->frameperiod.numerator,p->frameperiod.denominator,p->framelines);ret = 0;break;}case VIDIOC_G_STD:{v4l2_std_id *id = arg;ret = 0;/* Calls the specific handler */if (ops->vidioc_g_std)ret = ops->vidioc_g_std(file, fh, id);else if (vfd->current_norm)*id = vfd->current_norm;elseret = -EINVAL;if (!ret)dbgarg(cmd, "std=0x%08Lx\n", (long long unsigned)*id);break;}case VIDIOC_S_STD:{v4l2_std_id *id = arg, norm;dbgarg(cmd, "std=%08Lx\n", (long long unsigned)*id);norm = (*id) & vfd->tvnorms;if (vfd->tvnorms && !norm) /* Check if std is supported */break;/* Calls the specific handler */if (ops->vidioc_s_std)ret = ops->vidioc_s_std(file, fh, &norm);elseret = -EINVAL;/* Updates standard information */if (ret >= 0)vfd->current_norm = norm;break;}case VIDIOC_QUERYSTD:{v4l2_std_id *p = arg;if (!ops->vidioc_querystd)break;ret = ops->vidioc_querystd(file, fh, arg);if (!ret)dbgarg(cmd, "detected std=%08Lx\n",(unsigned long long)*p);break;}/* ------ input switching ---------- *//* FIXME: Inputs can be handled inside videodev2 */case VIDIOC_ENUMINPUT:{struct v4l2_input *p = arg;if (!ops->vidioc_enum_input)break;ret = ops->vidioc_enum_input(file, fh, p);if (!ret)dbgarg(cmd, "index=%d, name=%s, type=%d, ""audioset=%d, ""tuner=%d, std=%08Lx, status=%d\n",p->index, p->name, p->type, p->audioset,p->tuner,(unsigned long long)p->std,p->status);break;}case VIDIOC_G_INPUT:{unsigned int *i = arg;if (!ops->vidioc_g_input)break;ret = ops->vidioc_g_input(file, fh, i);if (!ret)dbgarg(cmd, "value=%d\n", *i);break;}case VIDIOC_S_INPUT:{unsigned int *i = arg;if (!ops->vidioc_s_input)break;dbgarg(cmd, "value=%d\n", *i);ret = ops->vidioc_s_input(file, fh, *i);break;}/* ------ output switching ---------- */case VIDIOC_ENUMOUTPUT:{struct v4l2_output *p = arg;if (!ops->vidioc_enum_output)break;ret = ops->vidioc_enum_output(file, fh, p);if (!ret)dbgarg(cmd, "index=%d, name=%s, type=%d, ""audioset=0x%x, ""modulator=%d, std=0x%08Lx\n",p->index, p->name, p->type, p->audioset,p->modulator, (unsigned long long)p->std);break;}case VIDIOC_G_OUTPUT:{unsigned int *i = arg;if (!ops->vidioc_g_output)break;ret = ops->vidioc_g_output(file, fh, i);if (!ret)dbgarg(cmd, "value=%d\n", *i);break;}case VIDIOC_S_OUTPUT:{unsigned int *i = arg;if (!ops->vidioc_s_output)break;dbgarg(cmd, "value=%d\n", *i);ret = ops->vidioc_s_output(file, fh, *i);break;}/* --- controls ---------------------------------------------- */case VIDIOC_QUERYCTRL:{struct v4l2_queryctrl *p = arg;if (!ops->vidioc_queryctrl)break;ret = ops->vidioc_queryctrl(file, fh, p);if (!ret)dbgarg(cmd, "id=0x%x, type=%d, name=%s, min/max=%d/%d, ""step=%d, default=%d, flags=0x%08x\n",p->id, p->type, p->name,p->minimum, p->maximum,p->step, p->default_value, p->flags);elsedbgarg(cmd, "id=0x%x\n", p->id);break;}case VIDIOC_G_CTRL:{struct v4l2_control *p = arg;if (ops->vidioc_g_ctrl)ret = ops->vidioc_g_ctrl(file, fh, p);else if (ops->vidioc_g_ext_ctrls) {struct v4l2_ext_controls ctrls;struct v4l2_ext_control ctrl;ctrls.ctrl_class = V4L2_CTRL_ID2CLASS(p->id);ctrls.count = 1;ctrls.controls = &ctrl;ctrl.id = p->id;ctrl.value = p->value;if (check_ext_ctrls(&ctrls, 1)) {ret = ops->vidioc_g_ext_ctrls(file, fh, &ctrls);if (ret == 0)p->value = ctrl.value;}} elsebreak;if (!ret)dbgarg(cmd, "id=0x%x, value=%d\n", p->id, p->value);elsedbgarg(cmd, "id=0x%x\n", p->id);break;}case VIDIOC_S_CTRL:{struct v4l2_control *p = arg;struct v4l2_ext_controls ctrls;struct v4l2_ext_control ctrl;if (!ops->vidioc_s_ctrl && !ops->vidioc_s_ext_ctrls)break;dbgarg(cmd, "id=0x%x, value=%d\n", p->id, p->value);if (ops->vidioc_s_ctrl) {ret = ops->vidioc_s_ctrl(file, fh, p);break;}if (!ops->vidioc_s_ext_ctrls)break;ctrls.ctrl_class = V4L2_CTRL_ID2CLASS(p->id);ctrls.count = 1;ctrls.controls = &ctrl;ctrl.id = p->id;ctrl.value = p->value;if (check_ext_ctrls(&ctrls, 1))ret = ops->vidioc_s_ext_ctrls(file, fh, &ctrls);break;}case VIDIOC_G_EXT_CTRLS:{struct v4l2_ext_controls *p = arg;p->error_idx = p->count;if (!ops->vidioc_g_ext_ctrls)break;if (check_ext_ctrls(p, 0))ret = ops->vidioc_g_ext_ctrls(file, fh, p);v4l_print_ext_ctrls(cmd, vfd, p, !ret);break;}case VIDIOC_S_EXT_CTRLS:{struct v4l2_ext_controls *p = arg;p->error_idx = p->count;if (!ops->vidioc_s_ext_ctrls)break;v4l_print_ext_ctrls(cmd, vfd, p, 1);if (check_ext_ctrls(p, 0))ret = ops->vidioc_s_ext_ctrls(file, fh, p);break;}case VIDIOC_TRY_EXT_CTRLS:{struct v4l2_ext_controls *p = arg;p->error_idx = p->count;if (!ops->vidioc_try_ext_ctrls)break;v4l_print_ext_ctrls(cmd, vfd, p, 1);if (check_ext_ctrls(p, 0))ret = ops->vidioc_try_ext_ctrls(file, fh, p);break;}case VIDIOC_QUERYMENU:{struct v4l2_querymenu *p = arg;if (!ops->vidioc_querymenu)break;ret = ops->vidioc_querymenu(file, fh, p);if (!ret)dbgarg(cmd, "id=0x%x, index=%d, name=%s\n",p->id, p->index, p->name);elsedbgarg(cmd, "id=0x%x, index=%d\n",p->id, p->index);break;}/* --- audio ---------------------------------------------- */case VIDIOC_ENUMAUDIO:{struct v4l2_audio *p = arg;if (!ops->vidioc_enumaudio)break;ret = ops->vidioc_enumaudio(file, fh, p);if (!ret)dbgarg(cmd, "index=%d, name=%s, capability=0x%x, ""mode=0x%x\n", p->index, p->name,p->capability, p->mode);elsedbgarg(cmd, "index=%d\n", p->index);break;}case VIDIOC_G_AUDIO:{struct v4l2_audio *p = arg;if (!ops->vidioc_g_audio)break;ret = ops->vidioc_g_audio(file, fh, p);if (!ret)dbgarg(cmd, "index=%d, name=%s, capability=0x%x, ""mode=0x%x\n", p->index,p->name, p->capability, p->mode);elsedbgarg(cmd, "index=%d\n", p->index);break;}case VIDIOC_S_AUDIO:{struct v4l2_audio *p = arg;if (!ops->vidioc_s_audio)break;dbgarg(cmd, "index=%d, name=%s, capability=0x%x, ""mode=0x%x\n", p->index, p->name,p->capability, p->mode);ret = ops->vidioc_s_audio(file, fh, p);break;}case VIDIOC_ENUMAUDOUT:{struct v4l2_audioout *p = arg;if (!ops->vidioc_enumaudout)break;dbgarg(cmd, "Enum for index=%d\n", p->index);ret = ops->vidioc_enumaudout(file, fh, p);if (!ret)dbgarg2("index=%d, name=%s, capability=%d, ""mode=%d\n", p->index, p->name,p->capability, p->mode);break;}case VIDIOC_G_AUDOUT:{struct v4l2_audioout *p = arg;if (!ops->vidioc_g_audout)break;ret = ops->vidioc_g_audout(file, fh, p);if (!ret)dbgarg2("index=%d, name=%s, capability=%d, ""mode=%d\n", p->index, p->name,p->capability, p->mode);break;}case VIDIOC_S_AUDOUT:{struct v4l2_audioout *p = arg;if (!ops->vidioc_s_audout)break;dbgarg(cmd, "index=%d, name=%s, capability=%d, ""mode=%d\n", p->index, p->name,p->capability, p->mode);ret = ops->vidioc_s_audout(file, fh, p);break;}case VIDIOC_G_MODULATOR:{struct v4l2_modulator *p = arg;if (!ops->vidioc_g_modulator)break;ret = ops->vidioc_g_modulator(file, fh, p);if (!ret)dbgarg(cmd, "index=%d, name=%s, ""capability=%d, rangelow=%d,"" rangehigh=%d, txsubchans=%d\n",p->index, p->name, p->capability,p->rangelow, p->rangehigh,p->txsubchans);break;}case VIDIOC_S_MODULATOR:{struct v4l2_modulator *p = arg;if (!ops->vidioc_s_modulator)break;dbgarg(cmd, "index=%d, name=%s, capability=%d, ""rangelow=%d, rangehigh=%d, txsubchans=%d\n",p->index, p->name, p->capability, p->rangelow,p->rangehigh, p->txsubchans);ret = ops->vidioc_s_modulator(file, fh, p);break;}case VIDIOC_G_CROP:{struct v4l2_crop *p = arg;if (!ops->vidioc_g_crop)break;dbgarg(cmd, "type=%s\n", prt_names(p->type, v4l2_type_names));ret = ops->vidioc_g_crop(file, fh, p);if (!ret)dbgrect(vfd, "", &p->c);break;}case VIDIOC_S_CROP:{struct v4l2_crop *p = arg;if (!ops->vidioc_s_crop)break;dbgarg(cmd, "type=%s\n", prt_names(p->type, v4l2_type_names));dbgrect(vfd, "", &p->c);ret = ops->vidioc_s_crop(file, fh, p);break;}case VIDIOC_CROPCAP:{struct v4l2_cropcap *p = arg;/*FIXME: Should also show v4l2_fract pixelaspect */if (!ops->vidioc_cropcap)break;dbgarg(cmd, "type=%s\n", prt_names(p->type, v4l2_type_names));ret = ops->vidioc_cropcap(file, fh, p);if (!ret) {dbgrect(vfd, "bounds ", &p->bounds);dbgrect(vfd, "defrect ", &p->defrect);}break;}case VIDIOC_G_JPEGCOMP:{struct v4l2_jpegcompression *p = arg;if (!ops->vidioc_g_jpegcomp)break;ret = ops->vidioc_g_jpegcomp(file, fh, p);if (!ret)dbgarg(cmd, "quality=%d, APPn=%d, ""APP_len=%d, COM_len=%d, ""jpeg_markers=%d\n",p->quality, p->APPn, p->APP_len,p->COM_len, p->jpeg_markers);break;}case VIDIOC_S_JPEGCOMP:{struct v4l2_jpegcompression *p = arg;if (!ops->vidioc_g_jpegcomp)break;dbgarg(cmd, "quality=%d, APPn=%d, APP_len=%d, ""COM_len=%d, jpeg_markers=%d\n",p->quality, p->APPn, p->APP_len,p->COM_len, p->jpeg_markers);ret = ops->vidioc_s_jpegcomp(file, fh, p);break;}case VIDIOC_G_ENC_INDEX:{struct v4l2_enc_idx *p = arg;if (!ops->vidioc_g_enc_index)break;ret = ops->vidioc_g_enc_index(file, fh, p);if (!ret)dbgarg(cmd, "entries=%d, entries_cap=%d\n",p->entries, p->entries_cap);break;}case VIDIOC_ENCODER_CMD:{struct v4l2_encoder_cmd *p = arg;if (!ops->vidioc_encoder_cmd)break;ret = ops->vidioc_encoder_cmd(file, fh, p);if (!ret)dbgarg(cmd, "cmd=%d, flags=%x\n", p->cmd, p->flags);break;}case VIDIOC_TRY_ENCODER_CMD:{struct v4l2_encoder_cmd *p = arg;if (!ops->vidioc_try_encoder_cmd)break;ret = ops->vidioc_try_encoder_cmd(file, fh, p);if (!ret)dbgarg(cmd, "cmd=%d, flags=%x\n", p->cmd, p->flags);break;}case VIDIOC_G_PARM:{struct v4l2_streamparm *p = arg;if (ops->vidioc_g_parm) {ret = check_fmt(ops, p->type);if (ret)break;ret = ops->vidioc_g_parm(file, fh, p);} else {v4l2_std_id std = vfd->current_norm;if (p->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)return -EINVAL;ret = 0;if (ops->vidioc_g_std)ret = ops->vidioc_g_std(file, fh, &std);else if (std == 0)ret = -EINVAL;if (ret == 0)v4l2_video_std_frame_period(std,&p->parm.capture.timeperframe);}dbgarg(cmd, "type=%d\n", p->type);break;}case VIDIOC_S_PARM:{struct v4l2_streamparm *p = arg;if (!ops->vidioc_s_parm)break;ret = check_fmt(ops, p->type);if (ret)break;dbgarg(cmd, "type=%d\n", p->type);ret = ops->vidioc_s_parm(file, fh, p);break;}case VIDIOC_G_TUNER:{struct v4l2_tuner *p = arg;if (!ops->vidioc_g_tuner)break;ret = ops->vidioc_g_tuner(file, fh, p);if (!ret)dbgarg(cmd, "index=%d, name=%s, type=%d, ""capability=0x%x, rangelow=%d, ""rangehigh=%d, signal=%d, afc=%d, ""rxsubchans=0x%x, audmode=%d\n",p->index, p->name, p->type,p->capability, p->rangelow,p->rangehigh, p->signal, p->afc,p->rxsubchans, p->audmode);break;}case VIDIOC_S_TUNER:{struct v4l2_tuner *p = arg;if (!ops->vidioc_s_tuner)break;dbgarg(cmd, "index=%d, name=%s, type=%d, ""capability=0x%x, rangelow=%d, ""rangehigh=%d, signal=%d, afc=%d, ""rxsubchans=0x%x, audmode=%d\n",p->index, p->name, p->type,p->capability, p->rangelow,p->rangehigh, p->signal, p->afc,p->rxsubchans, p->audmode);ret = ops->vidioc_s_tuner(file, fh, p);break;}case VIDIOC_G_FREQUENCY:{struct v4l2_frequency *p = arg;if (!ops->vidioc_g_frequency)break;ret = ops->vidioc_g_frequency(file, fh, p);if (!ret)dbgarg(cmd, "tuner=%d, type=%d, frequency=%d\n",p->tuner, p->type, p->frequency);break;}case VIDIOC_S_FREQUENCY:{struct v4l2_frequency *p = arg;if (!ops->vidioc_s_frequency)break;dbgarg(cmd, "tuner=%d, type=%d, frequency=%d\n",p->tuner, p->type, p->frequency);ret = ops->vidioc_s_frequency(file, fh, p);break;}case VIDIOC_G_SLICED_VBI_CAP:{struct v4l2_sliced_vbi_cap *p = arg;if (!ops->vidioc_g_sliced_vbi_cap)break;/* Clear up to type, everything after type is zerod already */memset(p, 0, offsetof(struct v4l2_sliced_vbi_cap, type));dbgarg(cmd, "type=%s\n", prt_names(p->type, v4l2_type_names));ret = ops->vidioc_g_sliced_vbi_cap(file, fh, p);if (!ret)dbgarg2("service_set=%d\n", p->service_set);break;}case VIDIOC_LOG_STATUS:{if (!ops->vidioc_log_status)break;ret = ops->vidioc_log_status(file, fh);break;} #ifdef CONFIG_VIDEO_ADV_DEBUGcase VIDIOC_DBG_G_REGISTER:{struct v4l2_dbg_register *p = arg;if (!capable(CAP_SYS_ADMIN))ret = -EPERM;else if (ops->vidioc_g_register)ret = ops->vidioc_g_register(file, fh, p);break;}case VIDIOC_DBG_S_REGISTER:{struct v4l2_dbg_register *p = arg;if (!capable(CAP_SYS_ADMIN))ret = -EPERM;else if (ops->vidioc_s_register)ret = ops->vidioc_s_register(file, fh, p);break;} #endifcase VIDIOC_DBG_G_CHIP_IDENT:{struct v4l2_dbg_chip_ident *p = arg;if (!ops->vidioc_g_chip_ident)break;p->ident = V4L2_IDENT_NONE;p->revision = 0;ret = ops->vidioc_g_chip_ident(file, fh, p);if (!ret)dbgarg(cmd, "chip_ident=%u, revision=0x%x\n", p->ident, p->revision);break;}case VIDIOC_S_HW_FREQ_SEEK:{struct v4l2_hw_freq_seek *p = arg;if (!ops->vidioc_s_hw_freq_seek)break;dbgarg(cmd,"tuner=%d, type=%d, seek_upward=%d, wrap_around=%d\n",p->tuner, p->type, p->seek_upward, p->wrap_around);ret = ops->vidioc_s_hw_freq_seek(file, fh, p);break;}case VIDIOC_ENUM_FRAMESIZES:{struct v4l2_frmsizeenum *p = arg;if (!ops->vidioc_enum_framesizes)break;ret = ops->vidioc_enum_framesizes(file, fh, p);dbgarg(cmd,"index=%d, pixelformat=%c%c%c%c, type=%d ",p->index,(p->pixel_format & 0xff),(p->pixel_format >> 8) & 0xff,(p->pixel_format >> 16) & 0xff,(p->pixel_format >> 24) & 0xff,p->type);switch (p->type) {case V4L2_FRMSIZE_TYPE_DISCRETE:dbgarg3("width = %d, height=%d\n",p->discrete.width, p->discrete.height);break;case V4L2_FRMSIZE_TYPE_STEPWISE:dbgarg3("min %dx%d, max %dx%d, step %dx%d\n",p->stepwise.min_width, p->stepwise.min_height,p->stepwise.step_width, p->stepwise.step_height,p->stepwise.max_width, p->stepwise.max_height);break;case V4L2_FRMSIZE_TYPE_CONTINUOUS:dbgarg3("continuous\n");break;default:dbgarg3("- Unknown type!\n");}break;}case VIDIOC_ENUM_FRAMEINTERVALS:{struct v4l2_frmivalenum *p = arg;if (!ops->vidioc_enum_frameintervals)break;ret = ops->vidioc_enum_frameintervals(file, fh, p);dbgarg(cmd,"index=%d, pixelformat=%d, width=%d, height=%d, type=%d ",p->index, p->pixel_format,p->width, p->height, p->type);switch (p->type) {case V4L2_FRMIVAL_TYPE_DISCRETE:dbgarg2("fps=%d/%d\n",p->discrete.numerator,p->discrete.denominator);break;case V4L2_FRMIVAL_TYPE_STEPWISE:dbgarg2("min=%d/%d, max=%d/%d, step=%d/%d\n",p->stepwise.min.numerator,p->stepwise.min.denominator,p->stepwise.max.numerator,p->stepwise.max.denominator,p->stepwise.step.numerator,p->stepwise.step.denominator);break;case V4L2_FRMIVAL_TYPE_CONTINUOUS:dbgarg2("continuous\n");break;default:dbgarg2("- Unknown type!\n");}break;}default:{if (!ops->vidioc_default)break;ret = ops->vidioc_default(file, fh, cmd, arg);break;}} /* switch */if (vfd->debug & V4L2_DEBUG_IOCTL_ARG) {if (ret < 0) {v4l_print_ioctl(vfd->name, cmd);printk(KERN_CONT " error %ld\n", ret);}}return ret; }ioctl的case太長,舉幾個例子,
/**********************************************************************************************************************************************/
??? case VIDIOC_STREAMON:? //啟動視頻采集命令
??? { ?
??????? enum v4l2_buf_type i = *(int *)arg;?
??????? if (!ops->vidioc_streamon) ?
??????????? break; ?
??????? dbgarg(cmd, "type=%s\n", prt_names(i, v4l2_type_names)); ?
??????? ret = ops->vidioc_streamon(file, fh, i); ?
??????? break; ?
??? }?
會調用在gspcal.c中定義的struct v4l2_ioctl_ops?dev_ioctl_ops(前面已貼出)的vidioc_streamon(),在此函數里會調用gspca_init_transfer(gspca_dev);
此函數比較關鍵,其
1.調用?create_urbs(gspca_dev, ep);此函數對urb進行分配和初始化操作如下
urb = usb_alloc_urb(npkt, GFP_KERNEL);
urb->transfer_buffer = usb_buffer_alloc(gspca_dev->dev,bsize,GFP_KERNEL,&urb->transfer_dma);//讀回數據的緩沖區
urb->pipe = usb_rcvisocpipe(gspca_dev->dev,ep->desc.bEndpointAddress);//同步傳輸,或者是批量傳輸usb_rcvbulkpipe
urb->interval = ep->desc.bInterval;
urb->complete = isoc_irq;//指定u如b完成時的回調函數
2.調用zc3xx.c中的一些函數對攝像頭傳輸初始化
gspca_dev->sd_desc->isoc_init(gspca_dev);
開始從攝像頭采集數據
gspca_dev->sd_desc->start(gspca_dev);
3.提交usb
usb_submit_urb(gspca_dev->urb[n], GFP_KERNEL);
/**********************************************************************************************************************************************/
??? case VIDIOC_QBUF://投放一個空的視頻緩沖區到視頻緩沖區輸入隊列中
?? ?{
?? ??? ?struct v4l2_buffer *p = arg;
?? ??? ?if (!ops->vidioc_qbuf)
?? ??? ??? ?break;
?? ??? ?ret = check_fmt(ops, p->type);
?? ??? ?if (ret)
?? ??? ??? ?break;
?? ??? ?ret = ops->vidioc_qbuf(file, fh, p);
?? ??? ?if (!ret)
?? ??? ??? ?dbgbuf(cmd, vfd, p);
?? ??? ?break;
?? ?}
同上類似,會調用ops->vidioc_qbuf的vidioc_qbuf,此函數執行
??? /* put the buffer in the 'queued' queue */
?? ?i = gspca_dev->fr_q;
?? ?gspca_dev->fr_queue[i] = index;
?? ?gspca_dev->fr_q = (i + 1) % gspca_dev->nframes;
涉及到gspca_dev的部分成員
??? __u8 *frbuf;?? ??? ??? ??? ?/* buffer for nframes */
?? ?struct gspca_frame frame[GSPCA_MAX_FRAMES];
?? ?__u32 frsz;?? ??? ??? ??? ?/* frame size */
?? ?char nframes;?? ??? ??? ??? ?/* number of frames */
?? ?char fr_i;?? ??? ??? ??? ?/* frame being filled */
?? ?char fr_q;?? ??? ??? ??? ?/* next frame to queue */
?? ?char fr_o;?? ??? ??? ??? ?/* next frame to dequeue */
?? ?signed char fr_queue[GSPCA_MAX_FRAMES];?? ?/* frame queue */
或許上面看的朦朦朧朧,結合一個用戶空間的實例就清楚啦,demo refer to? http://blog.csdn.net/Sasoritattoo/article/details/6225486
structv4l2_buffer tV4L2buf;?
memset(&tV4L2buf, 0, sizeof(struct v4l2_buffer));
tV4L2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;?
tV4L2buf.memory = V4L2_MEMORY_MMAP;?
tV4L2buf.index = i;//指令(指定)要投放到視頻輸入隊列中的內核空間視頻緩沖區的編號;
iret = ioctl(fd_usbcam, VIDIOC_QBUF, &tV4L2buf);
清楚了嗎?視頻緩沖區(隊列)都是在內核中實現的,在用戶空間需要指定使用這個隊列里的哪一個視頻緩沖區.在啟動視頻設備拍攝圖像時,相應的視頻數據被保存到視頻輸入隊列相應的視頻緩沖區中。
/**********************************************************************************************************************************************/
??? case VIDIOC_DQBUF://從視頻緩沖區的輸出隊列中取得一個已經保存有一幀視頻數據的視頻緩沖區;
?? ?{
?? ??? ?struct v4l2_buffer *p = arg;
?? ??? ?if (!ops->vidioc_dqbuf)
?? ??? ??? ?break;
?? ??? ?ret = check_fmt(ops, p->type);
?? ??? ?if (ret)
?? ??? ??? ?break;
?? ??? ?ret = ops->vidioc_dqbuf(file, fh, p);
?? ??? ?if (!ret)
?? ??? ??? ?dbgbuf(cmd, vfd, p);
?? ??? ?break;
?? ?}
/**********************************************************************************************************************************************/
然后看一下mmap的實現,gspca.c
static int dev_mmap(struct file *file, struct vm_area_struct *vma) {struct gspca_dev *gspca_dev = file->private_data;struct gspca_frame *frame;struct page *page;unsigned long addr, start, size;int i, ret;start = vma->vm_start;size = vma->vm_end - vma->vm_start;PDEBUG(D_STREAM, "mmap start:%08x size:%d", (int) start, (int) size);if (mutex_lock_interruptible(&gspca_dev->queue_lock))return -ERESTARTSYS;if (!gspca_dev->present) {ret = -ENODEV;goto out;}if (gspca_dev->capt_file != file) {ret = -EINVAL;goto out;}frame = NULL;for (i = 0; i < gspca_dev->nframes; ++i) {if (gspca_dev->frame[i].v4l2_buf.memory != V4L2_MEMORY_MMAP) {PDEBUG(D_STREAM, "mmap bad memory type");break;}if ((gspca_dev->frame[i].v4l2_buf.m.offset >> PAGE_SHIFT)== vma->vm_pgoff) {frame = &gspca_dev->frame[i];break;}}if (frame == NULL) {PDEBUG(D_STREAM, "mmap no frame buffer found");ret = -EINVAL;goto out;} #ifdef CONFIG_VIDEO_V4L1_COMPAT/* v4l1 maps all the buffers */if (i != 0|| size != frame->v4l2_buf.length * gspca_dev->nframes) #endifif (size != frame->v4l2_buf.length) {PDEBUG(D_STREAM, "mmap bad size");ret = -EINVAL;goto out;}/** - VM_IO marks the area as being a mmaped region for I/O to a* device. It also prevents the region from being core dumped.*/vma->vm_flags |= VM_IO;addr = (unsigned long) frame->data;while (size > 0) {page = vmalloc_to_page((void *) addr);ret = vm_insert_page(vma, start, page);if (ret < 0)goto out;start += PAGE_SIZE;addr += PAGE_SIZE;size -= PAGE_SIZE;}vma->vm_ops = &gspca_vm_ops;vma->vm_private_data = frame;gspca_vm_open(vma);ret = 0; out:mutex_unlock(&gspca_dev->queue_lock);return ret; } 簡言之
start = vma->vm_start;
size = vma->vm_end - vma->vm_start;
page = vmalloc_to_page((void *) addr);
vm_insert_page(vma, start, page);
這個mmap的實現和ldd3中講的都不一樣,
ldd3中所講是mmap()實現函數中不做什么,而在nopage()--每次缺頁錯誤時會自動調用,里調用vmalloc_to_page()然后return page,
或者是在mmap()實現函數中調用remap_pfn_range()一次性將vma的頁表建立完畢
此處則是在mmap()即dev_mmap()函數中調用vmalloc_to_page()然后再調用vm_insert_page()----觀其名,此函數應該是填充vma中相應頁表的。
所以是這個mmap中盡管沒使用remap_pfn_range,卻也是在用戶空間之星mmap()時一次性將頁表建立好的,而不是用一頁映射一頁。
/**********************************************************************************************************************************************/
即v4l2創建設備節點--字符設備和向用戶提供設備接口,當用戶訪問設備時最終都會被v4l2跳到gspca.c里的這2個集合里的某個函數執行
?struct v4l2_file_operations dev_fops
?struct v4l2_ioctl_ops dev_ioctl_ops
比如用戶執行open時,v4l2會調用gspca.c里的dev_fops的open()對設備進行操作。
比如用戶執行ioctl時,v4l2會調用gspca.c里的dev_ioctl_ops的ioctl()對設備進行操作。
首先用戶空間需要 ioctl(VIDIOC_REQBUFS)指示驅動分配視頻緩沖區(若干個,組成隊列,gspca_dev->frame[0].data),然后ioctl(視頻緩沖區)查詢上一步分配的V4L2的視頻緩沖區的相關信息,然后mmap();然后ioctl(VIDIOC_STREAMON),以便初始化urb和開啟攝像頭的采集等,然后ioctl(VIDIOC_QBUF)指示驅動使用視頻緩沖區隊列的哪個緩沖區,然后ioctl(VIDIOC_DQBUF)指示驅動將這個視頻緩沖區塞進圖像數據,然后讀取(用戶空間讀)
下段或許有誤:
結構體gspca_dev 使用以下幾個成員維護了一個隊列,
??? atomic_t fr_q;?? ??? ??? ??? ?/* next frame to queue */
?? ?atomic_t fr_i;?? ??? ??? ??? ?/* frame being filled */
?? ?signed char fr_queue[GSPCA_MAX_FRAMES];?? ?/* frame queue */
?? ?char nframes;?? ??? ??? ??? ?/* number of frames */
?? ?u8 fr_o;?? ??? ??? ??? ?/* next frame to dequeue */
從而在vidioc_qbuf和vidioc_dqbuf里面決定使用哪個緩沖區即哪個緩沖區入列(出列),實際上是哪個緩沖區的index入列(出列),如下,部分代碼有簡化
在vidioc_qbuf里面,
(從用戶層會先傳來一個index);
fr_queue[fr_q]=index;//將要入列的index給fr_queue[fr_q]
fr_q=(fr_q + 1) % GSPCA_MAX_FRAMES;//fr_q從0到15循環,GSPCA_MAX_FRAMES默認是是16,初始時fr_q=0
frame[index].v4l2_buf->flags |= V4L2_BUF_FLAG_QUEUED;//設置第index幀為入列標志
frame[index].v4l2_buf->flags &= ~V4L2_BUF_FLAG_DONE;//清空第index幀的完成標志
如下在vidioc_dqbuf里面,
index = fr_queue[fr_o];//將要出列的fr_queue[fr_o]給index
fr_o = (i + 1) % GSPCA_MAX_FRAMES;//fr_o從0到GSPCA_MAX_FRAMES循環,初始時fr_o=0
frame[index]->v4l2_buf.flags &= ~V4L2_BUF_FLAG_DONE;//清空第index幀的完成標志
memcpy(v4l2_buf, &frame->v4l2_buf, sizeof *v4l2_buf);
還有一個地方是在urb完成時調用到的fill_frame(位于gspca.c)里面調用的pkt_scan(位于zc3xx.c)里面調用的gspca_frame_add(位于gspca.c),
如果是最后圖像的最后一包數據,則
index = fr_queue[fr_i];
gspca_dev->image = frame[index]->data;
i = (i + 1) % GSPCA_MAX_FRAMES;
atomic_set(fr_i, i);
下面根據v4l2操作攝像頭的流程理清一下數據獲取的思路
init_v4l2()函數里面順序使用到的ioctl:
ioctl(vd->fd, VIDIOC_QUERYCAP, &vd->cap);//查詢視頻設備的功能
ioctl(vd->fd, VIDIOC_S_FMT, &vd->fmt);//設置視頻設備的視頻數據格式
ioctl(vd->fd, VIDIOC_S_PARM, setfps);//number offrames per second
ioctl(vd->fd, VIDIOC_REQBUFS, &vd->rb);//請求V4L2驅動分配視頻緩沖區(若干個,組成隊列),位于內核空間
ioctl(vd->fd, VIDIOC_QUERYBUF, &vd->buf);//查詢上一步分配的V4L2的視頻緩沖區的相關信息,為下一步的mmap()。
mmap();
for(i=0;i<n;i++){
vd->buf.index?????? = i;//指定第i個緩沖區
ioctl(vd->fd, VIDIOC_QBUF, &vd->buf);//指示驅動將第i個緩沖區入列,共計入列n個即申請到的緩沖區全部入列。之后驅動會維護這么一個隊列,即之后再次執行入列時,就不需要執行index了,驅動會將剛剛出列的緩沖區入列。如下面
}
while(!stop){
ioctl(vd->fd, VIDIOC_STREAMON, &type);//啟動攝像頭抓取圖像數據
ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf);//將隊列中的第一個緩沖區出列,因為第一個緩沖區中已經有了數據(在urb中斷時放入的,一般好幾個urb中斷之后才能引起其出列)
memcpy(vd->tmpbuffer, vd->mem[vd->buf.index], vd->buf.bytesused);//拷貝圖像數據
ioctl(vd->fd, VIDIOC_QBUF, &vd->buf);//指示驅動將剛才出列的緩沖區再次入列,(如果不執行此句,則隊列中僅有初始時放入的4個幀,那4個幀讀完隊列就空了)
}
解析:如下圖
用戶空間命令驅動執行vidioc_reqbufs時,會在內核創建n個frame,mjpg默認是4個(NB_BUFFER=4),首地址分別gspca_dev->frmame[i].data
用戶空間mmap時(mjpg執行4次),會將內核空間的4個frame[i].data映射到用戶空間的vd->mem[i]
用戶空間命令驅動執行vidioc_qbuf時,會將選定的那個index入列,從而frame[index]入列,從而使得frame[index].data作為圖像數據的保存地址
用戶空間命令驅動執行vidioc_streamon時,會創建同步urb,并設置urb完成函數為isoc_irq,
用戶空間命令驅動執行vidioc_dqbuf時,會阻塞在此函數里面,直到usb中斷
在中斷函數里面,讀取從urb中讀取數據到gspca_dev->image(此時它指向frame[index].data)
?? ??? ???? memcpy(gspca_dev->image + gspca_dev->image_len,data, len);
然后喚醒線程,此時線程即可從vd->mem[index}讀取數據了
由于一次urb傳輸一般不可能傳輸完所有的圖像數據,所以會有多個urb包才組成一幅圖像
另外結構體驅動中的v4l2_buffer并不保存圖像數據,而是驅動提供給用戶的一個接口,使得用戶在調用mmap,ioctl(vd->fd, VIDIOC_QBUF, &vd->buf),ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf)時能傳給驅動一些設置參數。當然,驅動也能通過這個結構體的某些元素返回給用戶空間某些信息。
struct v4l2_buffer {
?__u32???index;
?enum v4l2_buf_type????? type;
?__u32???bytesused;
?__u32???flags;
?enum v4l2_field??field;
?struct timeval??timestamp;
?struct v4l2_timecode?timecode;
?__u32???sequence;
?/* memory location */
?enum v4l2_memory??????? memory;
?union {
??__u32?????????? offset;
??unsigned long?? userptr;
?} m;
?__u32???length;
?__u32???input;
?__u32???reserved;
};
另外干嗎非得申請這么多幀(4個),一個幀不也夠用嗎?反正同一時間只會用到一個幀緩存。莫非是用4個幀可以平均內存單元損耗?
/**********************************************************************************************************************************************/
gspca.c的作用最關鍵,一方面要實現v4l2的函數指針來處理用戶的交互(open,mmap,ioctl...),一方面通過zc3xx.c的sd_probe獲取到usb_device實現提交urb獲取圖像數據,一方面通過調用zc3xx.c里的函數去設置和啟動攝像頭采集的一些工作(也是usb傳輸的)。
zc3xx.c實現了一些設置硬件的函數,實現了usb_register()用于枚舉時的匹配然后調用gspca.c的gspca_dev_probe讓其去處理usb_device
轉載于:https://www.cnblogs.com/-song/archive/2011/11/27/3331923.html
總結
以上是生活随笔為你收集整理的---WebCam网络摄像头9 usb dirver的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 王菲 人间
- 下一篇: ---WebCam网络摄像头12 ---