⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 video_freebsd.c

📁 video motion detection of linux base
💻 C
📖 第 1 页 / 共 2 页
字号:
 - set_channelset - set_channel - set_capture_mode*/static unsigned char *v4l_start(struct context *cnt, struct video_dev *viddev, int width, int height, unsigned short input, unsigned short norm, unsigned long freq){	int dev_bktr=viddev->fd_bktr;	//int dev_tunner=viddev->fd_tuner;	/* to ensure that all device will be support the capture mode 	  _TODO_ : Autodected the best capture mode .	*/	int dummy = 1;//	int pixelformat = BSD_VIDFMT_I420;	int single  = METEOR_CAP_SINGLE;	void *map;	/* if we have choose the tuner is needed to setup the frequency */	if ( (viddev->tuner_device != NULL) && ( input == IN_TV ) ) {		if (!freq) {			motion_log(LOG_ERR, 1, "Not valid Frequency [%lu] for Source input [%i]", freq, input);			return (NULL);		}else if (set_freq(viddev, freq) == -1) {			motion_log(LOG_ERR, 1, "Frequency [%lu] Source input [%i]", freq, input);			return (NULL);		}	}		/* FIXME if we set as input tuner , we need to set option for tuner not for bktr */	if ( set_input_format(viddev, norm) == -1 ) {		motion_log(LOG_ERR, 1, "set input format [%d]",norm);		return (NULL);	}	if (set_geometry(viddev, width, height) == -1) {		motion_log(LOG_ERR, 1, "set geometry [%d]x[%d]",width, height);		return (NULL);	}/*	if (ioctl(dev_bktr, METEORSACTPIXFMT, &pixelformat) < 0 ){        	motion_log(LOG_ERR, 1, "set encoding method BSD_VIDFMT_I420"); 		return(NULL);        }	NEEDED !? FIXME 	if ( setup_pixelformat(viddev) == -1) {		return (NULL);	}*/	if (freq) {		if (cnt->conf.setup_mode)			motion_log(-1, 0, "Frequency set (no implemented yet");	/*	 TODO missing implementation		set_channelset(viddev);		set_channel(viddev);		if (set_freq (viddev, freq) == -1) {			return (NULL);		}	*/	}	/* set capture mode and capture buffers */	/* That is the buffer size for capture images ,	 so is dependent of color space of input format / FIXME */	viddev->v4l_bufsize = (((width*height*3/2)) * sizeof(unsigned char *));	viddev->v4l_fmt = VIDEO_PALETTE_YUV420P;		map = mmap((caddr_t)0,viddev->v4l_bufsize,PROT_READ|PROT_WRITE,MAP_SHARED, dev_bktr, (off_t)0);	if (map == MAP_FAILED){		motion_log(LOG_ERR, 1, "mmap failed");		return (NULL);	}	/* FIXME double buffer */ 	if (0) {		viddev->v4l_maxbuffer=2;		viddev->v4l_buffers[0]=map;		viddev->v4l_buffers[1]=(unsigned char *)map+0; /* 0 is not valid just a test */		//viddev->v4l_buffers[1]=map+vid_buf.offsets[1];	} else {		viddev->v4l_buffers[0]=map;		viddev->v4l_maxbuffer=1;	}	viddev->v4l_curbuffer=0;	/* Clear the buffer */        if (ioctl(dev_bktr, BT848SCBUF, &dummy) < 0) {                motion_log(LOG_ERR, 1, "BT848SCBUF");                return NULL;        }	// settle , sleep(1) replaced	SLEEP(1,0)	if (ioctl(dev_bktr, METEORCAPTUR, &single) < 0){		motion_log(LOG_ERR, 1, "METEORCAPTUR using single method Error capturing");        }else viddev->capture_method = CAPTURE_SINGLE;	/* FIXME*/	switch (viddev->v4l_fmt) {		case VIDEO_PALETTE_YUV420P:			viddev->v4l_bufsize=(width*height*3)/2;			motion_log(-1, 0, "VIDEO_PALETTE_YUV420P palette setting bufsize");			break;		case VIDEO_PALETTE_YUV422:			viddev->v4l_bufsize=(width*height*2);			break;		case VIDEO_PALETTE_RGB24:			viddev->v4l_bufsize=(width*height*3);			break;		case VIDEO_PALETTE_GREY:			viddev->v4l_bufsize=width*height;			break;	}		{	int val;	motion_log(LOG_INFO,0,"HUE [%d]",get_hue(dev_bktr,&val));	motion_log(LOG_INFO,0,"SATURATION [%d]",get_saturation(dev_bktr,&val));	motion_log(LOG_INFO,0,"BRIGHTNESS [%d]",get_brightness(dev_bktr,&val));	motion_log(LOG_INFO,0,"CONTRAST [%d]",get_contrast(dev_bktr,&val));	}	return map;}/** * v4l_next fetches a video frame from a v4l device * Parameters: *     viddev     Pointer to struct containing video device handle *     map        Pointer to the buffer in which the function puts the new image *     width      Width of image in pixels *     height     Height of image in pixels * * Returns *     0          Success *    -1          Fatal error *     1          Non fatal error (not implemented) */static int v4l_next(struct video_dev *viddev,unsigned char *map, int width, int height){	int dev_bktr=viddev->fd_bktr;	unsigned char *cap_map=NULL;	int single  = METEOR_CAP_SINGLE;	int continous = METEOR_CAP_CONTINOUS;	sigset_t  set, old;	/* ONLY MMAP method is used to Capture */	/* Allocate a new mmap buffer */	/* Block signals during IOCTL */	sigemptyset (&set);	sigaddset (&set, SIGCHLD);	sigaddset (&set, SIGALRM);	sigaddset (&set, SIGUSR1);	sigaddset (&set, SIGTERM);	sigaddset (&set, SIGHUP);	pthread_sigmask (SIG_BLOCK, &set, &old);	cap_map=viddev->v4l_buffers[viddev->v4l_curbuffer];	viddev->v4l_curbuffer++;	if (viddev->v4l_curbuffer >= viddev->v4l_maxbuffer)		viddev->v4l_curbuffer=0;	/* capture */	if (viddev->capture_method == CAPTURE_CONTINOUS){		if (ioctl(dev_bktr, METEORCAPTUR, &continous) < 0) {			motion_log(LOG_ERR, 1, "Error capturing using continuous method");			sigprocmask (SIG_UNBLOCK, &old, NULL);			return (-1);		}	}else{		if (ioctl(dev_bktr, METEORCAPTUR, &single) < 0) {			motion_log(LOG_ERR, 1, "Error capturing using single method");			sigprocmask (SIG_UNBLOCK, &old, NULL);			return (-1);		}	}	/*undo the signal blocking*/	pthread_sigmask (SIG_UNBLOCK, &old, NULL);		switch (viddev->v4l_fmt){		case VIDEO_PALETTE_RGB24:			rgb24toyuv420p(map, cap_map, width, height);			break;		case VIDEO_PALETTE_YUV422:			yuv422to420p(map, cap_map, width, height);			break;		default:			memcpy(map, cap_map, viddev->v4l_bufsize);	}		return 0;}/* set input & freq if needed FIXME not allowed use Tuner yet */static void v4l_set_input(struct context *cnt, struct video_dev *viddev, unsigned char *map, int width, int height,                    unsigned short input, unsigned short norm, int skip, unsigned long freq){	int i;	unsigned long frequnits = freq;	if (input != viddev->input || width != viddev->width || height!=viddev->height || freq!=viddev->freq){ 		if (set_input(viddev, input) == -1)			return;		if (set_input_format(viddev, norm) == -1 )			return;				if ((viddev->tuner_device != NULL) && ( input == IN_TV ) && (frequnits > 0)) {			if (set_freq (viddev, freq) == -1)				return;		}		// FIXME		/*		if ( setup_pixelformat(viddev) == -1) {			motion_log(LOG_ERR, 1, "ioctl (VIDIOCSFREQ)");			return		}		*/		if (set_geometry(viddev, width, height) == -1)			return;			v4l_picture_controls(cnt, viddev);				viddev->input=input;		viddev->width=width;		viddev->height=height;		viddev->freq=freq;		viddev->norm=norm;		/* skip a few frames if needed */		for (i=0; i<skip; i++)			v4l_next(viddev, map, width, height);	}else{		/* No round robin - we only adjust picture controls */		v4l_picture_controls(cnt, viddev);	}}/*****************************************************************************	Wrappers calling the current capture routines *****************************************************************************//*vid_init - Allocate viddev struct.vid_start - Setup Device parameters ( device , channel , freq , contrast , hue , saturation , brightness ) and open it.vid_next - Capture a frame and set input , contrast , hue , saturation and brightness if necessary. vid_close - close devices. vid_cleanup - Free viddev struct.*//* big lock for vid_start */pthread_mutex_t vid_mutex;/* structure used for per device locking */struct video_dev **viddevs=NULL;void vid_init(void){	if (!viddevs) { 		viddevs=mymalloc(sizeof(struct video_dev *));		viddevs[0]=NULL;	}	pthread_mutex_init(&vid_mutex, NULL);}/* Called by childs to get rid of open video devices */void vid_close(void){	int i=-1;	if (viddevs){ 		while(viddevs[++i]){			close(viddevs[i]->fd_bktr);			close(viddevs[i]->fd_tuner);		}	}}void vid_cleanup(void){	int i=-1;	if (viddevs){ 		while(viddevs[++i]){			munmap(viddevs[i]->v4l_buffers[0],viddevs[i]->v4l_bufsize);			viddevs[i]->v4l_buffers[0] = MAP_FAILED;			free(viddevs[i]);		}		free(viddevs);		viddevs=NULL;	}}#endif /*WITHOUT_V4L*/int vid_start(struct context *cnt){	struct config *conf=&cnt->conf;	int fd_bktr=-1;		if (conf->netcam_url)		return netcam_start(cnt);#ifndef WITHOUT_V4L	{		int fd_tuner=-1;		int i=-1;		int width, height;		unsigned short input, norm;		unsigned long frequency;		/* We use width and height from conf in this function. They will be assigned		 * to width and height in imgs here, and cap_width and cap_height in 		 * rotate_data won't be set until in rotate_init.		 * Motion requires that width and height are multiples of 16 so we check for this		 */		if (conf->width % 16) {			motion_log(LOG_ERR, 0,			           "config image width (%d) is not modulo 16",			           conf->width);			return -1;		}		if (conf->height % 16) {			motion_log(LOG_ERR, 0,			           "config image height (%d) is not modulo 16",			           conf->height);			return -1;		}		width = conf->width;		height = conf->height;		input = conf->input;		norm = conf->norm;		frequency = conf->frequency;		pthread_mutex_lock(&vid_mutex);		/* Transfer width and height from conf to imgs. The imgs values are the ones		 * that is used internally in Motion. That way, setting width and height via		 * http remote control won't screw things up.		 */		cnt->imgs.width=width;		cnt->imgs.height=height;		/* First we walk through the already discovered video devices to see		 * if we have already setup the same device before. If this is the case		 * the device is a Round Robin device and we set the basic settings		 * and return the file descriptor		 */		while (viddevs[++i]) { 			if (!strcmp(conf->video_device, viddevs[i]->video_device)) {				int fd;				cnt->imgs.type=viddevs[i]->v4l_fmt;				motion_log(-1, 0, "vid_start cnt->imgs.type [%i]", cnt->imgs.type);				switch (cnt->imgs.type) {					case VIDEO_PALETTE_GREY:						cnt->imgs.motionsize=width*height;						cnt->imgs.size=width*height;					break;					case VIDEO_PALETTE_RGB24:					case VIDEO_PALETTE_YUV422:						cnt->imgs.type=VIDEO_PALETTE_YUV420P;					case VIDEO_PALETTE_YUV420P:						motion_log(-1, 0,						           " VIDEO_PALETTE_YUV420P setting imgs.size and imgs.motionsize");						cnt->imgs.motionsize=width*height;						cnt->imgs.size=(width*height*3)/2;					break;				}				fd=viddevs[i]->fd_bktr; // FIXME return fd_tuner ?!				pthread_mutex_unlock(&vid_mutex);				return fd;			}		}		viddevs=myrealloc(viddevs, sizeof(struct video_dev *)*(i+2), "vid_start");		viddevs[i]=mymalloc(sizeof(struct video_dev));		viddevs[i+1]=NULL;		pthread_mutexattr_init(&viddevs[i]->attr);		pthread_mutex_init(&viddevs[i]->mutex, NULL);		fd_bktr=open(conf->video_device, O_RDWR);		if (fd_bktr <0) { 			motion_log(LOG_ERR, 1, "open video device %s",conf->video_device);			motion_log(LOG_ERR, 0, "Motion Exits.");			return -1;		}		/* Only open tuner if conf->tuner_device has set , freq and input is 1 */		if ( (conf->tuner_device != NULL) && (frequency > 0) && ( input == IN_TV )) {			fd_tuner=open(conf->tuner_device, O_RDWR);			if (fd_tuner <0) { 				motion_log(LOG_ERR, 1, "open tuner device %s",conf->tuner_device);				motion_log(LOG_ERR, 0, "Motion Exits.");				return -1;			}		}		viddevs[i]->video_device=conf->video_device;		viddevs[i]->tuner_device=conf->tuner_device;		viddevs[i]->fd_bktr=fd_bktr;		viddevs[i]->fd_tuner=fd_tuner;		viddevs[i]->input=input;		viddevs[i]->height=height;		viddevs[i]->width=width;		viddevs[i]->freq=frequency;		viddevs[i]->owner=-1;		/* We set brightness, contrast, saturation and hue = 0 so that they only get                 * set if the config is not zero.                 */				viddevs[i]->brightness=0;		viddevs[i]->contrast=0;		viddevs[i]->saturation=0;		viddevs[i]->hue=0;		viddevs[i]->owner=-1;		/* default palette */ 		viddevs[i]->v4l_fmt=VIDEO_PALETTE_YUV420P;		viddevs[i]->v4l_curbuffer=0;		viddevs[i]->v4l_maxbuffer=1;		if (!v4l_start (cnt, viddevs[i], width, height, input, norm, frequency)){ 			pthread_mutex_unlock(&vid_mutex);			return -1;		}			cnt->imgs.type=viddevs[i]->v4l_fmt;			switch (cnt->imgs.type) { 			case VIDEO_PALETTE_GREY:				cnt->imgs.size=width*height;				cnt->imgs.motionsize=width*height;			break;			case VIDEO_PALETTE_RGB24:			case VIDEO_PALETTE_YUV422:				cnt->imgs.type=VIDEO_PALETTE_YUV420P;			case VIDEO_PALETTE_YUV420P:				motion_log(-1, 0, "VIDEO_PALETTE_YUV420P imgs.type");				cnt->imgs.size=(width*height*3)/2;				cnt->imgs.motionsize=width*height;			break;		}			pthread_mutex_unlock(&vid_mutex);	}#endif /*WITHOUT_V4L*/	/* FIXME needed tuner device ?! */	return fd_bktr;}/** * vid_next fetches a video frame from a either v4l device or netcam * Parameters: *     cnt        Pointer to the context for this thread *     map        Pointer to the buffer in which the function puts the new image * * Returns *     0          Success *    -1          Fatal V4L error *    -2          Fatal Netcam error *     1          Non fatal V4L error (not implemented) *     2          Non fatal Netcam error */int vid_next(struct context *cnt, unsigned char *map){	struct config *conf=&cnt->conf;	int ret = -1;	if (conf->netcam_url) {		if (cnt->video_dev == -1)			return NETCAM_GENERAL_ERROR;		ret = netcam_next(cnt, map);		return ret;	}#ifndef WITHOUT_V4L		int i=-1;	int width, height;	int dev_bktr = cnt->video_dev;	/* NOTE: Since this is a capture, we need to use capture dimensions. */	width = cnt->rotate_data.cap_width;	height = cnt->rotate_data.cap_height;			while (viddevs[++i])		if (viddevs[i]->fd_bktr==dev_bktr)			break;	if (!viddevs[i])		return -1;	if (viddevs[i]->owner!=cnt->threadnr) { 		pthread_mutex_lock(&viddevs[i]->mutex);		viddevs[i]->owner=cnt->threadnr;		viddevs[i]->frames=conf->roundrobin_frames;		cnt->switched=1;	}	v4l_set_input(cnt, viddevs[i], map, width, height, conf->input, conf->norm,	              conf->roundrobin_skip, conf->frequency);		ret = v4l_next(viddevs[i], map, width, height);	if (--viddevs[i]->frames <= 0) { 		viddevs[i]->owner=-1;		pthread_mutex_unlock(&viddevs[i]->mutex);	}	 	if(cnt->rotate_data.degrees > 0){ 		/* rotate the image as specified */		rotate_map(cnt, map); 	}	#endif /*WITHOUT_V4L*/	return ret;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -