⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 pwc-if.c

📁 是关于linux2.5.1的完全源码
💻 C
📖 第 1 页 / 共 4 页
字号:
		return i;	}	pdev->vopen++;	file->private_data = vdev;	/* lock decompressor; this has a small race condition, since we 	   could in theory unload pwcx.o between pwc_find_decompressor()	   above and this call. I doubt it's ever going to be a problem.	 */	if (pdev->decompressor != NULL)		pdev->decompressor->lock();	up(&pdev->modlock);	Trace(TRACE_OPEN, "video_open() returning 0.\n");	return 0;}/* Note that all cleanup is done in the reverse order as in _open */static int pwc_video_close(struct inode *inode, struct file *file){	struct video_device *vdev = file->private_data;	struct pwc_device *pdev;	int i;	Trace(TRACE_OPEN, "video_close called(0x%p).\n", vdev);	pdev = (struct pwc_device *)vdev->priv;	if (pdev->vopen == 0)		Info("video_close() called on closed device?\n");	/* Free isoc URBs */	pwc_isoc_cleanup(pdev);	/* Dump statistics, but only if a reasonable amount of frames were	   processed (to prevent endless log-entries in case of snap-shot	   programs) 	 */	if (pdev->vframe_count > 20)		Info("Closing video device: %d frames received, dumped %d frames, %d frames with errors.\n", pdev->vframe_count, pdev->vframes_dumped, pdev->vframes_error);	if (!pdev->unplugged) {		/* Normal close: stop isochronuous and interrupt endpoint */		Trace(TRACE_OPEN, "Normal close(): setting interface to 0.\n");		usb_set_interface(pdev->udev, 0, 0);		/* Turn LEDs off */		if (pwc_set_leds(pdev, 0, 0) < 0)			Info("Failed to set LED on/off time..\n");		/* Power down camere to save energy */		if (power_save) {			i = pwc_camera_power(pdev, 0);			if (i < 0) 				Err("Failed to power down camera (%d)\n", i);		}	}	pdev->vopen = 0;	if (pdev->decompressor != NULL) {		pdev->decompressor->exit();		pdev->decompressor->unlock();	}	pwc_free_buffers(pdev);	/* wake up _disconnect() routine */	if (pdev->unplugged)		wake_up(&pdev->remove_ok);	file->private_data = NULL;	return 0;}/* *	FIXME: what about two parallel reads ???? *      ANSWER: Not supported. You can't open the device more than once,                despite what the V4L1 interface says. First, I don't see                 the need, second there's no mechanism of alerting the                 2nd/3rd/... process of events like changing image size.                And I don't see the point of blocking that for the                 2nd/3rd/... process.                In multi-threaded environments reading parallel from any                device is tricky anyhow. */ static int pwc_video_read(struct file *file, char *buf,			  size_t count, loff_t *ppos){	struct video_device *vdev = file->private_data;	struct pwc_device *pdev;	int noblock = file->f_flags & O_NONBLOCK;	DECLARE_WAITQUEUE(wait, current);	Trace(TRACE_READ, "video_read(0x%p, %p, %d) called.\n", vdev, buf, count);	if (vdev == NULL)		return -EFAULT;	pdev = vdev->priv;	if (pdev == NULL)		return -EFAULT;	if (pdev->unplugged) {		Info("pwc_video_read: Device got unplugged (1).\n");		return -EPIPE; /* unplugged device! */	}	/* In case we're doing partial reads, we don't have to wait for a frame */	if (pdev->image_read_pos == 0) {		/* Do wait queueing according to the (doc)book */		add_wait_queue(&pdev->frameq, &wait);		while (pdev->full_frames == NULL) {	                if (noblock) {	                	remove_wait_queue(&pdev->frameq, &wait);	                	set_current_state(TASK_RUNNING);	                	return -EWOULDBLOCK;	                }	                if (signal_pending(current)) {	                	remove_wait_queue(&pdev->frameq, &wait);	                	set_current_state(TASK_RUNNING);	                	return -ERESTARTSYS;	                }	                schedule();	               	set_current_state(TASK_INTERRUPTIBLE);		}		remove_wait_queue(&pdev->frameq, &wait);		set_current_state(TASK_RUNNING);	                                                                                                                                                                                		/* Decompress [, convert] and release frame */		if (pwc_handle_frame(pdev))			return -EFAULT;	}	Trace(TRACE_READ, "Copying data to user space.\n");	/* copy bytes to user space; we allow for partial reads */	if (count + pdev->image_read_pos > pdev->view.size)		count = pdev->view.size - pdev->image_read_pos;	if (copy_to_user(buf, pdev->image_ptr[pdev->fill_image] + pdev->image_read_pos, count))		return -EFAULT;	pdev->image_read_pos += count;	if (pdev->image_read_pos >= pdev->view.size) { /* All data has been read */		pdev->image_read_pos = 0;		pwc_next_image(pdev);	}	return count;}static unsigned int pwc_video_poll(struct file *file, poll_table *wait){	struct video_device *vdev = file->private_data;	struct pwc_device *pdev;		if (vdev == NULL)		return -EFAULT;	pdev = vdev->priv;	if (pdev == NULL)		return -EFAULT;		poll_wait(file, &pdev->frameq, wait);	if (pdev->unplugged) {		Info("pwc_video_poll: Device got unplugged.\n");		return POLLERR;	}			if (pdev->full_frames != NULL) /* we have frames waiting */		return (POLLIN | POLLRDNORM);	return 0;}        static int pwc_video_do_ioctl(struct inode *inode, struct file *file,			      unsigned int cmd, void *arg){	struct video_device *vdev = file->private_data;	struct pwc_device *pdev;	DECLARE_WAITQUEUE(wait, current);		if (vdev == NULL)		return -EFAULT;	pdev = vdev->priv;	if (pdev == NULL)		return -EFAULT;	switch (cmd) {		/* Query cabapilities */		case VIDIOCGCAP: 		{			struct video_capability *caps = arg;			strcpy(caps->name, vdev->name);			caps->type = VID_TYPE_CAPTURE;			caps->channels = 1;			caps->audios = 1;			caps->minwidth  = pdev->view_min.x;			caps->minheight = pdev->view_min.y;			caps->maxwidth  = pdev->view_max.x;			caps->maxheight = pdev->view_max.y;			break;		}		/* Channel functions (simulate 1 channel) */		case VIDIOCGCHAN:		{			struct video_channel *v = arg;			if (v->channel != 0)				return -EINVAL;			v->flags = 0;			v->tuners = 0;			v->type = VIDEO_TYPE_CAMERA;			strcpy(v->name, "Webcam");			return 0;		}		case VIDIOCSCHAN:		{				/* The spec says the argument is an integer, but			   the bttv driver uses a video_channel arg, which			   makes sense becasue it also has the norm flag.			 */			struct video_channel *v = arg;			if (v->channel != 0)				return -EINVAL;			return 0;		}		/* Picture functions; contrast etc. */		case VIDIOCGPICT:		{			struct video_picture *p = arg;			int val;			p->colour = 0x8000;			p->hue = 0x8000;			val = pwc_get_brightness(pdev);			if (val >= 0)				p->brightness = val;			else				p->brightness = 0xffff;			val = pwc_get_contrast(pdev);			if (val >= 0)				p->contrast = val;			else				p->contrast = 0xffff;			/* Gamma, Whiteness, what's the difference? :) */			val = pwc_get_gamma(pdev);			if (val >= 0)				p->whiteness = val;			else				p->whiteness = 0xffff;			val = pwc_get_saturation(pdev);			if (val >= 0)				p->colour = val;			else				p->colour = 0xffff;			p->depth = 24;			p->palette = pdev->vpalette;			p->hue = 0xFFFF; /* N/A */			break;		}				case VIDIOCSPICT:		{			struct video_picture *p = arg;			/*			 *	FIXME:	Suppose we are mid read			        ANSWER: No problem: the firmware of the camera			                can handle brightness/contrast/etc			                changes at _any_ time, and the palette			                is used exactly once in the uncompress			                routine.			 */			if (p->palette && p->palette != pdev->vpalette) {				if (pwc_set_palette(pdev, p->palette) < 0)					return -EINVAL;			}			pwc_set_brightness(pdev, p->brightness);			pwc_set_contrast(pdev, p->contrast);			pwc_set_gamma(pdev, p->whiteness);			pwc_set_saturation(pdev, p->colour);			break;		}		/* Window/size parameters */				case VIDIOCGWIN:		{			struct video_window *vw = arg;						vw->x = 0;			vw->y = 0;			vw->width = pdev->view.x;			vw->height = pdev->view.y;			vw->chromakey = 0;			vw->flags = (pdev->vframes << PWC_FPS_SHIFT) | 			           (pdev->vsnapshot ? PWC_FPS_SNAPSHOT : 0);			break;		}				case VIDIOCSWIN:		{			struct video_window *vw = arg;			int fps, snapshot, ret;			fps = (vw->flags & PWC_FPS_FRMASK) >> PWC_FPS_SHIFT;			snapshot = vw->flags & PWC_FPS_SNAPSHOT;			if (fps == 0)				fps = pdev->vframes;			if (pdev->view.x == vw->width && pdev->view.y && fps == pdev->vframes && snapshot == pdev->vsnapshot)				return 0;			ret = pwc_try_video_mode(pdev, vw->width, vw->height, fps, pdev->vcompression, snapshot);			if (ret)				return ret;			break;				}				/* We don't have overlay support (yet) */		case VIDIOCGFBUF:		{			struct video_buffer *vb = arg;			memset(vb,0,sizeof(*vb));			break;		}		/* mmap() functions */		case VIDIOCGMBUF:		{			/* Tell the user program how much memory is needed for a mmap() */			struct video_mbuf *vm = arg;			int i;			memset(vm, 0, sizeof(*vm));			vm->size = default_mbufs * pdev->len_per_image;			vm->frames = default_mbufs; /* double buffering should be enough for most applications */			for (i = 0; i < default_mbufs; i++)				vm->offsets[i] = i * pdev->len_per_image;			break;		}		case VIDIOCMCAPTURE:		{			/* Start capture into a given image buffer (called 'frame' in video_mmap structure) */			struct video_mmap *vm = arg;			Trace(TRACE_READ, "VIDIOCMCAPTURE: %dx%d, frame %d, format %d\n", vm->width, vm->height, vm->frame, vm->format);			if (vm->frame < 0 || vm->frame >= default_mbufs)				return -EINVAL;			/* xawtv is nasty. It probes the available palettes			   by setting a very small image size and trying			   various palettes... The driver doesn't support			   such small images, so I'm working around it.			 */			if (vm->format && vm->format != pdev->vpalette)				if (pwc_set_palette(pdev, vm->format) < 0)					return -EINVAL;			 			if ((vm->width != pdev->view.x || vm->height != pdev->view.y) &&			    (vm->width >= pdev->view_min.x && vm->height >= pdev->view_min.y)) {				int ret;								Trace(TRACE_OPEN, "VIDIOCMCAPTURE: changing size to please xawtv :-(.\n");				ret = pwc_try_video_mode(pdev, vm->width, vm->height, pdev->vframes, pdev->vcompression, pdev->vsnapshot);				if (ret)					return ret;			} /* ... size mismatch */			/* FIXME: should we lock here? */			if (pdev->image_used[vm->frame])				return -EBUSY;	/* buffer wasn't available. Bummer */			pdev->image_used[vm->frame] = 1;			/* Okay, we're done here. In the SYNC call we wait until a 			   frame comes available, then expand image into the given 			   buffer.			   In contrast to the CPiA cam the Philips cams deliver a 			   constant stream, almost like a grabber card. Also,			   we have separate buffers for the rawdata and the image,			   meaning we can nearly always expand into the requested buffer.			 */			Trace(TRACE_READ, "VIDIOCMCAPTURE done.\n");			break;		}		case VIDIOCSYNC:		{			/* The doc says: "Whenever a buffer is used it should			   call VIDIOCSYNC to free this frame up and continue."			   			   The only odd thing about this whole procedure is 			   that MCAPTURE flags the buffer as "in use", and			   SYNC immediately unmarks it, while it isn't 			   after SYNC that you know that the buffer actually			   got filled! So you better not start a CAPTURE in			   the same frame immediately (use double buffering). 			   This is not a problem for this cam, since it has 			   extra intermediate buffers, but a hardware 			   grabber card will then overwrite the buffer 			   you're working on.			 */			int *mbuf = arg;			int ret;			Trace(TRACE_READ, "VIDIOCSYNC called (%d).\n", *mbuf);			/* bounds check */			if (*mbuf < 0 || *mbuf >= default_mbufs)				return -EINVAL;			/* check if this buffer was requested anyway */			if (pdev->image_used[*mbuf] == 0)				return -EINVAL;			/* Add ourselves to the frame wait-queue.			   			   FIXME: needs auditing for safety.			   QUSTION: In what respect? I think that using the			            frameq is safe now.			 */			add_wait_queue(&pdev->frameq, &wait);			while (pdev->full_frames == NULL) {				if (pdev->unplugged) {					remove_wait_queue(&pdev->frameq, &wait);					set_current_state(TASK_RUNNING);					return -ENODEV;				}				                	if (signal_pending(current)) {	                		remove_wait_queue(&pdev->frameq, &wait);		                	set_current_state(TASK_RUNNING);		                	return -ERESTARTSYS;	        	        }		                set_current_state(TASK_INTERRUPTIBLE);	                	schedule();			}			remove_wait_queue(&pdev->frameq, &wait);			set_current_state(TASK_RUNNING);							/* The frame is ready. Expand in the image buffer 			   requested by the user. I don't care if you 			   mmap() 5 buffers and request data in this order: 			   buffer 4 2 3 0 1 2 3 0 4 3 1 . . .			   Grabber hardware may not be so forgiving.			 */			Trace(TRACE_READ, "VIDIOCSYNC: frame ready.\n");			pdev->fill_image = *mbuf; /* tell in which buffer we want the image to be expanded */			/* Decompress, etc */			ret = pwc_handle_frame(pdev);			pdev->image_used[*mbuf] = 0;			if (ret)				return -EFAULT;			break;		}				case VIDIOCGAUDIO:		{			struct video_audio *v = arg;						strcpy(v->name, "Microphone");			v->audio = -1; /* unknown audio minor */			v->flags = 0;			v->mode = VIDEO_SOUND_MONO;			v->volume = 0;			v->bass = 0;			v->treble = 0;			v->balance = 0x8000;			v->step = 1;			break;			}				case VIDIOCSAUDIO:		{			/* Dummy: nothing can be set */			break;		}				case VIDIOCGUNIT:		{			struct video_unit *vu = arg;						vu->video = pdev->vdev->minor & 0x3F;			vu->audio = -1; /* not known yet */			vu->vbi = -1;			vu->radio = -1;			vu->teletext = -1;			break;		}		default:			return pwc_ioctl(pdev, cmd, arg);	} /* ..switch */	return 0;}	static int pwc_video_ioctl(struct inode *inode, struct file *file,			   unsigned int cmd, unsigned long arg){	return video_usercopy(inode, file, cmd, arg, pwc_video_do_ioctl);}static int pwc_video_mmap(struct file *file, struct vm_area_struct *vma){	struct video_device *vdev = file->private_data;	struct pwc_device *pdev;	unsigned long start = vma->vm_start;	unsigned long size  = vma->vm_end-vma->vm_start;	unsigned long page, pos;		Trace(TRACE_MEMORY, "mmap(0x%p, 0x%lx, %lu) called.\n", vdev, start, size);	pdev = vdev->priv;	/* FIXME - audit mmap during a read */			pos = (unsigned long)pdev->image_data;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -