📄 v4l2.c
字号:
return 0; } case VIDIOC_G_PARM: { struct v4l2_streamparm *sp = arg; if (sp->type != V4L2_BUF_TYPE_CAPTURE) return -EINVAL; sp->parm.capture = dev->capture; return 0; } case VIDIOC_S_PARM: { struct v4l2_streamparm *sp = arg; struct v4l2_captureparm *vp = &sp->parm.capture; if (vp->capturemode & ~dev->capture.capability) { dbg("PARM unsupported capture capability %08X\n", vp->capturemode); return -EINVAL; } if ((dev->capture.capability & V4L2_CAP_TIMEPERFRAME) && vp->timeperframe < 10000) { dbg("PARM time per frame out of range %ld\n", vp->timeperframe); return -EINVAL; } if (vp->capturemode != dev->capture.capturemode && !o->noncapturing && dev->streaming) { dbg("S_PARM state error\n"); return -EINVAL; } if (o->noncapturing) return 0; if (vp->capturemode != dev->capture.capturemode || vp->timeperframe != dev->capture.timeperframe) { int frame_period; dev->capture.capturemode = vp->capturemode; if (vp->capturemode & V4L2_MODE_HIGHQUALITY) frame_period = 666667; // 15 fps else frame_period = vp->timeperframe; capture_abort (dev); dev->videc.set_frame_period (dev, frame_period); if (frame_period >= dev->videc.frame_period) dev->capture.timeperframe = frame_period; else dev->capture.timeperframe = dev->videc.frame_period; if (dev->streaming) capture_stream_start (dev); else if (dev->preview) capture_grab_frame (dev); } return 0; } case VIDIOC_G_STD: { struct v4l2_standard *std = arg; v4l2_video_std_construct (std, dev->videc.standard, 0); return 0; } case VIDIOC_S_STD: { struct v4l2_standard *std = arg; int id; if ((o->noncapturing && dev->capturing_opens)) return -EPERM; id = v4l2_video_std_confirm (std); if (!((1 << id) & dev->videc.standards)) { //dbg("Bad standard: %u\n", (unsigned)id); return -EINVAL; } capture_abort (dev); dev->videc.set_standard (dev, id); if (capture_new_format (dev)) return -EINVAL; if (dev->streaming) capture_stream_start (dev); else if (dev->preview) capture_grab_frame (dev); return 0; } case VIDIOC_ENUMSTD: { struct v4l2_enumstd *estd = arg; __u32 b, i; if (estd->index < 0 || estd->index > 30) return -EINVAL; for (b = 1, i = 0; b < 32; ++b) { if (((1 << b) & dev->videc.standards) == 0) continue; if (i == estd->index) { v4l2_video_std_construct (&estd->std, b, 0); estd->inputs = (__u32) - 1; /* all inputs */ estd->outputs = 0; return 0; } ++i; } return -EINVAL; } case VIDIOC_ENUMINPUT: { struct v4l2_input *vi = arg; if (vi->index < 0 || vi->index >= dev->videc.num_inputs) return -EINVAL; *vi = dev->source[vi->index].input; return 0; } case VIDIOC_QUERYCTRL: { struct v4l2_queryctrl *qc = arg; if (dev->camera) return dev->camera->query_control (qc); return -ENODEV; } case VIDIOC_QUERYMENU: { struct v4l2_querymenu *qm = arg; if (dev->camera) return dev->camera->query_menu (qm); return -ENODEV; } case VIDIOC_G_CTRL: { struct v4l2_control *vc = arg; if (dev->camera) return dev->camera->get_control (vc); return -ENODEV; } case VIDIOC_S_CTRL: { struct v4l2_control *vc = arg; if (dev->camera) return dev->camera->set_control (vc); return -ENODEV; } case VIDIOC_G_TUNER: return -EINVAL; case VIDIOC_S_TUNER: return -EINVAL; case VIDIOC_G_FREQ: return -EINVAL; case VIDIOC_S_FREQ: return -EINVAL; case VIDIOC_G_AUDIO: return -EINVAL; case VIDIOC_S_AUDIO: return -EINVAL; default: return -ENOIOCTLCMD; } return 0;}static intv4l2_mmap (void *id, struct vm_area_struct *vma){ struct device_open *o = (struct device_open *) id; struct capture_device *dev = o->dev; struct stream_buffer *buf; int i, n = 1;#ifdef CONFIG_CEE /* MVL-CEE */ wait_event(dev->suspend_wq, dev->suspended == 0);#endif /* MVL-CEE */ if (o->noncapturing) { //dbg("mmap() called on non-capturing open\n"); return -ENODEV; } buf = mmap_stream_buffer_from_offset (dev, vma->vm_pgoff); if (dev->stream_contig_map) { /* N buffers in one contiguous map */ buf = &dev->stream_buf[0]; n = dev->stream_buffers_requested; } if (buf == NULL) { //dbg("mmap() Invalid offset parameter\n"); return -EINVAL; /* no such buffer */ } if (buf->vidbuf.length * n != vma->vm_end - vma->vm_start) { //dbg("mmap() Wrong length parameter\n"); return -EINVAL; /* wrong length */ } for (i = 0; i < n; ++i) { if (!buf->requested) { //dbg("mmap() Buffer is not available for" // " mapping\n"); return -EINVAL; /* not requested */ } if (buf->vidbuf.flags & V4L2_BUF_FLAG_MAPPED) { //dbg("mmap() Buffer is already mapped\n"); return -EINVAL; /* already mapped */ } if (buf->vaddress != NULL) vfree (buf->vaddress); if (i == 0) buf->vaddress = vmalloc (buf->vidbuf.length * n); else buf->vaddress = buf[-1].vaddress + buf->vidbuf.length; if (buf->vaddress == NULL) { err ("Could not allocate mmap() buffer\n"); return -ENODEV; }#if 0 /* TODO: build scatter list for buffer if using DMA */ if ((using DMA) && !bm_build_scatter_list (dev, buf->vaddress, &buf->dma_list)) return -ENODEV;#endif buf->vidbuf.flags |= V4L2_BUF_FLAG_MAPPED; ++dev->stream_buffers_mapped; ++buf; } vma->vm_ops = &capture_vma_operations; if (vma->vm_ops->open) vma->vm_ops->open (vma); /* Note: vma->vm_file will be set up by V4L2 */ return 0;}static intv4l2_poll (void *id, struct file *file, poll_table * table){ struct device_open *o = (struct device_open *) id; struct capture_device *dev = o->dev;#ifdef CONFIG_CEE /* MVL-CEE */ wait_event(dev->suspend_wq, dev->suspended == 0);#endif /* MVL-CEE */ if (o->noncapturing) { //dbg("poll() illegal in non-capturing open\n"); return POLLERR; } if (dev->streaming) { void *node; node = v4l2_q_peek_head (&dev->stream_q_done); if (node != NULL) return (POLLIN | POLLRDNORM); /* data is ready now */ node = v4l2_q_peek_head (&dev->stream_q_capture); if (node == NULL) return POLLERR; /* no frames queued */ poll_wait (file, &dev->new_video_frame, table); return 0; } /* Capture is through read() call */ if (dev->capture_completed) /* data is ready now */ return (POLLIN | POLLRDNORM); capture_grab_frame (dev); /* does nothing if capture is in progress */ if (!dev->ready_to_capture) { /* Can't grab frames! */ dbg ("Can't grab frames!\n"); return POLLERR; } poll_wait (file, &dev->new_video_frame, table); return 0;}static longv4l2_read (void *id, char *buf, unsigned long count, int noblock){ struct device_open *o = (struct device_open *) id; struct capture_device *dev = o->dev; long len = 0; long my_timeout;#ifdef CONFIG_CEE /* MVL-CEE */ wait_event(dev->suspend_wq, dev->suspended == 0);#endif /* MVL-CEE */ if (o->noncapturing) { //dbg("read() illegal in non-capturing open\n"); return -EPERM; } if (dev->streaming) { //dbg("Can't read() when streaming is on\n"); return -EPERM; } capture_grab_frame (dev); /* does nothing if capture is in progress */ if (!dev->ready_to_capture) { dbg ("Can't grab frames!\n"); return 0; } my_timeout = HZ / 5; while (len == 0) { if (!dev->capture_completed) { if (noblock) return -EAGAIN; my_timeout = interruptible_sleep_on_timeout (&dev-> new_video_frame, my_timeout); } if (my_timeout == 0) { //dbg("Timeout on read\n"); break; } len = capture_read (dev, buf, count); } return len;}/* * Remaining initialization of video decoder etc. This is only * done when the device is successfully identified and registered. */static intv4l2_init_done (struct v4l2_device *v){ struct capture_device *dev = (struct capture_device *) v; int i;#ifdef CONFIG_CEE /* MVL-CEE */ wait_event(dev->suspend_wq, dev->suspended == 0);#endif /* MVL-CEE */ /* Initialize video input array */ for (i = 0; i < VSOURCE_COUNT; ++i) { dev->source[i].input.index = i; dev->source[i].input.type = V4L2_INPUT_TYPE_CAMERA; dev->source[i].input.capability = 0; } strcpy (dev->source[VSOURCE_CAMERA].input.name, "Camera"); strcpy (dev->source[VSOURCE_TEST].input.name, "Color Bar Test"); /*strcpy(dev->source[VSOURCE_TUNER].input.name, "Tuner"); */ /*dev->source[VSOURCE_TUNER].input.type = V4L2_INPUT_TYPE_TUNER; */ /* Initialize the video decoder hardware */ dev->videc.initialize (dev); dev->camif->open (); /* BUG: get defaults from user somehow... */ dev->videc.set_standard (dev, V4L2_STD_NTSC); dev->videc.set_vcrmode (dev, 0); set_video_input (dev, VSOURCE_CAMERA); /* Capture parameters */ dev->capture.capability = V4L2_CAP_TIMEPERFRAME | V4L2_MODE_HIGHQUALITY; dev->capture.capturemode = 0; dev->capture.extendedmode = 0; dev->capture.timeperframe = dev->videc.frame_period; /* Default capture dimensions */ dev->clientfmt.depth = 16; dev->clientfmt.pixelformat = V4L2_PIX_FMT_RGB565; dev->clientfmt.width = omap_image_size[QVGA].width; dev->clientfmt.height = omap_image_size[QVGA].height; dev->clientfmt.flags = 0; dev->clientfmt.bytesperline = 0; dev->clientfmt.sizeimage = 0; capture_new_format (dev); dev->camif->close (); return 0;}/* ===================================================================== * The functions below this point are only called during loading * and unloading of the driver. *//* * D E V I C E I N I A L I Z A T I O N R O U T I N E S * * These routines locate and enable the hardware, and initialize * the device structure. *//* Initialize v4l2_device fields */static voidinit_device_fields (struct capture_device *dev){ sprintf (dev->v.name, "OMAP1510/1610/730 V4L2 Capture Driver"); init_waitqueue_head (&dev->new_video_frame); init_waitqueue_head (&dev->fbinfo_wait); init_waitqueue_head (&dev->suspend_wq); init_waitqueue_head (&dev->resume_wq); init_completion(&dev->resume_thread_sync); dev->v.type = V4L2_TYPE_CAPTURE; dev->v.minor = unit_video; dev->v.open = v4l2_open; dev->v.close = v4l2_close; dev->v.read = v4l2_read; dev->v.write = v4l2_write; dev->v.ioctl = v4l2_ioctl; dev->v.mmap = v4l2_mmap; dev->v.poll = v4l2_poll; dev->v.initialize = v4l2_init_done; dev->v.priv = NULL; dev->preview = 0;}static intconfig_a_device (struct capture_device *dev){ sprintf (dev->shortname, "capture"); init_device_fields (dev); if (!find_decoder (dev)) { err ("Bad or unrecognized video decoder\n"); return -ENODEV; }#ifdef CONFIG_CEE /* MVL-CEE */ dev->resume_thread_exit = 0; dev->resume_thread_pid = kernel_thread(&resume_thread, dev, CLONE_FS | CLONE_FILES | CLONE_SIGHAND); if (dev->resume_thread_pid < 0) { err("could not start resume thread\n"); return -ENODEV; } omap_camera_ldm_register(dev);#endif return 0;}static voidunconfig_a_device (struct capture_device *dev){ capture_close (dev); dev->camif->cleanup (); if (dev->is_registered) { v4l2_unregister_device ((struct v4l2_device *) dev); //dbg("Removed device %s\n", dev->shortname); }#ifdef CONFIG_CEE /* MVL-CEE */ if (dev->resume_thread_pid >= 0) { dev->resume_thread_exit = 1; wake_up(&dev->resume_wq); wait_for_completion (&dev->resume_thread_sync); } omap_camera_ldm_unregister(dev);#endif memset (dev, 0, sizeof (capture));}static voidupdate_fbinfo_task (void *dev){ struct capture_device *device = (struct capture_device *) dev; if (device) { get_framebuffer_info (device); wake_up (&device->fbinfo_wait); }}/* * M O D U L E I N I T A N D C L E A N U P */intomap_v4l2_init (void){ struct capture_device *dev; int retcode; memset (&capture, 0, sizeof (capture)); dev = &capture;#if !defined(CONFIG_OMAP_INNOVATOR) && defined(CONFIG_ARCH_OMAP1510) dev->camif = &camif_evm;#else dev->camif = &camif_innovator;#endif // initialize the camera interface if ((retcode = dev->camif->init (camif_capture_callback, dev))) { err ("Camera Interface init failed\n"); return retcode; } dev->camera = dev->camif->camera_detect (); if (!dev->camera) { info ("No camera detected.\n"); } if (config_a_device (dev)) { unconfig_a_device (dev); return -ENODEV; } if (v4l2_register_device ((struct v4l2_device *) dev) != 0) { err ("Couldn't register the driver.\n"); unconfig_a_device (dev); return -ENODEV; } dev->is_registered = 1; return 0;}static voidomap_v4l2_cleanup (void){ unconfig_a_device (&capture);}#ifndef MODULEint init_v4l2_omap (struct video_init *ignore){ return omap_v4l2_init();}#elsemodule_init (omap_v4l2_init);#endifmodule_exit (omap_v4l2_cleanup);MODULE_LICENSE ("GPL");
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -