⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 video.c

📁 video motion detection of linux base
💻 C
📖 第 1 页 / 共 2 页
字号:
/*	video.c * *	Video stream functions for motion. *	Copyright 2000 by Jeroen Vreeken (pe1rxq@amsat.org) *	This software is distributed under the GNU public license version 2 *	See also the file 'COPYING'. * *//* Common stuff: */#include "motion.h"#include "video.h"/* for rotation */#include "rotate.h"#ifndef WITHOUT_V4L/* for the v4l stuff: */#include "pwc-ioctl.h"#include <sys/mman.h>//#include "sys/ioctl.h"#include <math.h>#include <sys/utsname.h>#include <dirent.h>#define MAX2(x, y) ((x) > (y) ? (x) : (y))#define MIN2(x, y) ((x) < (y) ? (x) : (y))/* Constants used by auto brightness feature * Defined as constant to make it easier for people to tweak code for a * difficult camera. * The experience gained from people could help improving the feature without * adding too many new options. * AUTOBRIGHT_HYSTERESIS sets the minimum the light intensity must change before * we adjust brigtness. * AUTOBRIGHTS_DAMPER damps the speed with which we adjust the brightness * When the brightness changes a lot we step in large steps and as we approach the * target value we slow down to avoid overshoot and oscillations. If the camera * adjusts too slowly decrease the DAMPER value. If the camera oscillates try * increasing the DAMPER value. DAMPER must be minimum 1. * MAX and MIN are the max and min values of brightness setting we will send to * the camera device. */#define AUTOBRIGHT_HYSTERESIS 10#define AUTOBRIGHT_DAMPER 5#define AUTOBRIGHT_MAX 255#define AUTOBRIGHT_MIN 0static void v4l_picture_controls(struct context *cnt, struct video_dev *viddev){	int dev = viddev->fd;	unsigned char *image = cnt->imgs.image_ring_buffer;	struct video_picture vid_pic;	int make_change = 0;	if (cnt->conf.contrast && cnt->conf.contrast != viddev->contrast) {		if (ioctl(dev, VIDIOCGPICT, &vid_pic)==-1)			motion_log(LOG_ERR, 1, "ioctl (VIDIOCGPICT)");		make_change = 1;		vid_pic.contrast = cnt->conf.contrast * 256;		viddev->contrast = cnt->conf.contrast;	}	if (cnt->conf.saturation && cnt->conf.saturation != viddev->saturation) {		if (!make_change) {			if (ioctl(dev, VIDIOCGPICT, &vid_pic)==-1)				motion_log(LOG_ERR, 1, "ioctl (VIDIOCGPICT)");		}		make_change = 1;		vid_pic.colour = cnt->conf.saturation * 256;		viddev->saturation = cnt->conf.saturation;	}	if (cnt->conf.hue && cnt->conf.hue != viddev->hue) {		if (!make_change) {			if (ioctl(dev, VIDIOCGPICT, &vid_pic)==-1)				motion_log(LOG_ERR, 1, "ioctl (VIDIOCGPICT)");		}		make_change = 1;		vid_pic.hue = cnt->conf.hue * 256;		viddev->hue = cnt->conf.hue;	}		if (cnt->conf.autobright) {				int brightness_window_high;		int brightness_window_low;		int brightness_target;		int i, j = 0, avg = 0, step = 0;				if (cnt->conf.brightness)			brightness_target = cnt->conf.brightness;		else			brightness_target = 128;				brightness_window_high = MIN2(brightness_target + AUTOBRIGHT_HYSTERESIS, 255);		brightness_window_low = MAX2(brightness_target - AUTOBRIGHT_HYSTERESIS, 1);				for (i = 0; i < cnt->imgs.motionsize; i += 101) {			avg += image[i];			j++;		}		avg = avg / j;				if (avg > brightness_window_high || avg < brightness_window_low) {			/* If we already read the VIDIOGPICT - we should not do it again */			if (!make_change) {				if (ioctl(dev, VIDIOCGPICT, &vid_pic)==-1)					motion_log(LOG_ERR, 1, "ioctl (VIDIOCGPICT)");			}			/* average is above window - turn down brightness - go for the target */			if (avg > brightness_window_high) {				step = MIN2((avg - brightness_target)/AUTOBRIGHT_DAMPER + 1,				             viddev->brightness - AUTOBRIGHT_MIN);				if (viddev->brightness > step + 1 - AUTOBRIGHT_MIN) {					viddev->brightness -= step;					vid_pic.brightness = viddev->brightness * 256;					make_change = 1;				}			}			/* average is below window - turn up brightness - go for the target */			if (avg < brightness_window_low) {				step = MIN2((brightness_target - avg)/AUTOBRIGHT_DAMPER + 1,				             AUTOBRIGHT_MAX - viddev->brightness);				if (viddev->brightness < AUTOBRIGHT_MAX - step ) {					viddev->brightness += step;					vid_pic.brightness = viddev->brightness * 256;					make_change = 1;				}			}		}	} else {		if (cnt->conf.brightness && cnt->conf.brightness != viddev->brightness) {			if (!make_change) {				if (ioctl(dev, VIDIOCGPICT, &vid_pic)==-1)					motion_log(LOG_ERR, 1, "ioctl (VIDIOCGPICT)");			}				make_change = 1;			vid_pic.brightness = cnt->conf.brightness * 256;			viddev->brightness = cnt->conf.brightness;		}	}	if (make_change) {		if (ioctl(dev, VIDIOCSPICT, &vid_pic)==-1)			motion_log(LOG_ERR, 1, "ioctl (VIDIOCSPICT)");	}}static void yuv422to420p(unsigned char *map, unsigned char *cap_map, int width, int height){	unsigned char *src, *dest, *src2, *dest2;	int i, j;	/* Create the Y plane */	src=cap_map;	dest=map;	for (i=width*height; i>0; i--) {		*dest++=*src;		src+=2;	}	/* Create U and V planes */	src=cap_map+1;	src2=cap_map+width*2+1;	dest=map+width*height;	dest2=dest+(width*height)/4;	for (i=height/2; i>0; i--) {		for (j=width/2; j>0; j--) {			*dest=((int)*src+(int)*src2)/2;			src+=2;			src2+=2;			dest++;			*dest2=((int)*src+(int)*src2)/2;			src+=2;			src2+=2;			dest2++;		}		src+=width*2;		src2+=width*2;	}}static void rgb24toyuv420p(unsigned char *map, unsigned char *cap_map, int width, int height){	unsigned char *y, *u, *v;	unsigned char *r, *g, *b;	int i, loop;	b=cap_map;	g=b+1;	r=g+1;	y=map;	u=y+width*height;	v=u+(width*height)/4;	memset(u, 0, width*height/4);	memset(v, 0, width*height/4);	for(loop=0; loop<height; loop++) {		for(i=0; i<width; i+=2) {			*y++=(9796**r+19235**g+3736**b)>>15;			*u+=((-4784**r-9437**g+14221**b)>>17)+32;			*v+=((20218**r-16941**g-3277**b)>>17)+32;			r+=3;			g+=3;			b+=3;			*y++=(9796**r+19235**g+3736**b)>>15;			*u+=((-4784**r-9437**g+14221**b)>>17)+32;			*v+=((20218**r-16941**g-3277**b)>>17)+32;			r+=3;			g+=3;			b+=3;			u++;			v++;		}		if ((loop & 1) == 0)		{			u-=width/2;			v-=width/2;		}	}}/*******************************************************************************************	Video4linux capture routines*/static unsigned char *v4l_start(struct context *cnt, struct video_dev *viddev, int width, int height,                                int input, int norm, unsigned long freq, int tuner_number){	int dev = viddev->fd;	struct video_capability vid_caps;	struct video_channel vid_chnl;	struct video_tuner vid_tuner;	struct video_mbuf vid_buf;	struct video_mmap vid_mmap;	void *map;	if (ioctl (dev, VIDIOCGCAP, &vid_caps) == -1) {		motion_log(LOG_ERR, 1, "ioctl (VIDIOCGCAP)");		return (NULL);	}	if (vid_caps.type & VID_TYPE_MONOCHROME)		viddev->v4l_fmt=VIDEO_PALETTE_GREY;	if (input != IN_DEFAULT) {		memset(&vid_chnl, 0, sizeof(struct video_channel));		vid_chnl.channel = input;		if (ioctl (dev, VIDIOCGCHAN, &vid_chnl) == -1) {			motion_log(LOG_ERR, 1, "ioctl (VIDIOCGCHAN)");		} else {			vid_chnl.channel = input;			vid_chnl.norm    = norm;			if (ioctl (dev, VIDIOCSCHAN, &vid_chnl) == -1) {				motion_log(LOG_ERR, 1, "ioctl (VIDIOCSCHAN)");				return (NULL);			}		}	}	if (freq) {		vid_tuner.tuner = tuner_number;		if (ioctl (dev, VIDIOCGTUNER, &vid_tuner)==-1) {			motion_log(LOG_ERR, 1, "ioctl (VIDIOCGTUNER)");		} else {			if (vid_tuner.flags & VIDEO_TUNER_LOW) {				freq=freq*16; /* steps of 1/16 KHz */			} else {				freq=(freq*10)/625;			}			if (ioctl(dev, VIDIOCSFREQ, &freq)==-1) {				motion_log(LOG_ERR, 1, "ioctl (VIDIOCSFREQ)");				return (NULL);			}			if (cnt->conf.setup_mode)				motion_log(-1, 0, "Frequency set");		}	}	if (ioctl (dev, VIDIOCGMBUF, &vid_buf) == -1) {		motion_log(LOG_ERR, 0, "ioctl(VIDIOCGMBUF) - Error device does not support memory map");		motion_log(LOG_ERR, 0, "V4L capturing using read is deprecated!");		motion_log(LOG_ERR, 0, "Motion only supports mmap.");		return NULL;	} else {		map=mmap(0, vid_buf.size, PROT_READ|PROT_WRITE, MAP_SHARED, dev, 0);		viddev->size_map=vid_buf.size;		if (vid_buf.frames>1) {			viddev->v4l_maxbuffer=2;			viddev->v4l_buffers[0]=map;			viddev->v4l_buffers[1]=(unsigned char *)map + vid_buf.offsets[1];		} else {			viddev->v4l_buffers[0]=map;			viddev->v4l_maxbuffer=1;		}		if (MAP_FAILED == map) {			return (NULL);		}		viddev->v4l_curbuffer=0;		vid_mmap.format=viddev->v4l_fmt;		vid_mmap.frame=viddev->v4l_curbuffer;		vid_mmap.width=width;		vid_mmap.height=height;		if (ioctl(dev, VIDIOCMCAPTURE, &vid_mmap) == -1) {			motion_log(LOG_DEBUG, 1, "Failed with YUV420P, trying YUV422 palette");			viddev->v4l_fmt=VIDEO_PALETTE_YUV422;			vid_mmap.format=viddev->v4l_fmt;			/* Try again... */			if (ioctl(dev, VIDIOCMCAPTURE, &vid_mmap) == -1) {				motion_log(LOG_DEBUG, 1, "Failed with YUV422, trying RGB24 palette");				viddev->v4l_fmt=VIDEO_PALETTE_RGB24;				vid_mmap.format=viddev->v4l_fmt;				/* Try again... */				if (ioctl(dev, VIDIOCMCAPTURE, &vid_mmap) == -1) {					motion_log(LOG_DEBUG, 1, "Failed with RGB24, trying GREYSCALE palette");					viddev->v4l_fmt=VIDEO_PALETTE_GREY;					vid_mmap.format=viddev->v4l_fmt;					/* Try one last time... */					if (ioctl(dev, VIDIOCMCAPTURE, &vid_mmap) == -1) {						motion_log(LOG_ERR, 1, "Failed with all supported palettes "						                            "- giving up");						return (NULL);					}				}			}		}	}	switch (viddev->v4l_fmt) {		case VIDEO_PALETTE_YUV420P:			viddev->v4l_bufsize=(width*height*3)/2;			break;		case VIDEO_PALETTE_YUV422:			viddev->v4l_bufsize=(width*height*2);			break;		case VIDEO_PALETTE_RGB24:			viddev->v4l_bufsize=(width*height*3);			break;		case VIDEO_PALETTE_GREY:			viddev->v4l_bufsize=width*height;			break;	}	return map;}/** * v4l_next *                v4l_next fetches a video frame from a v4l device * * Parameters: *     cnt        Pointer to the context for this thread *     viddev     Pointer to struct containing video device handle amd device parameters *     map        Pointer to the buffer in which the function puts the new image *     width      Width of image in pixels *     height     Height of image in pixels * * Returns *     0               Success *    V4L_FATAL_ERROR  Fatal error *    Positive with bit 0 set and bit 1 unset *                     Non fatal error (not implemented) */static int v4l_next(struct video_dev *viddev, unsigned char *map, int width, int height){	int dev = viddev->fd;	int frame = viddev->v4l_curbuffer;	struct video_mmap vid_mmap;	unsigned char *cap_map;	sigset_t  set, old;	/* MMAP method is used */	vid_mmap.format = viddev->v4l_fmt;	vid_mmap.width = width;	vid_mmap.height = height;	/* Block signals during IOCTL */	sigemptyset(&set);	sigaddset(&set, SIGCHLD);	sigaddset(&set, SIGALRM);	sigaddset(&set, SIGUSR1);	sigaddset(&set, SIGTERM);	sigaddset(&set, SIGHUP);	pthread_sigmask (SIG_BLOCK, &set, &old);	cap_map = viddev->v4l_buffers[viddev->v4l_curbuffer];	viddev->v4l_curbuffer++;	if (viddev->v4l_curbuffer >= viddev->v4l_maxbuffer)		viddev->v4l_curbuffer = 0;	vid_mmap.frame = viddev->v4l_curbuffer;	if (ioctl(dev, VIDIOCMCAPTURE, &vid_mmap) == -1) {		motion_log(LOG_ERR, 1, "mcapture error in proc %d", getpid());		sigprocmask (SIG_UNBLOCK, &old, NULL);		return V4L_FATAL_ERROR;	}	vid_mmap.frame=frame;	if (ioctl(dev, VIDIOCSYNC, &vid_mmap.frame) == -1) {		motion_log(LOG_ERR, 1, "sync error in proc %d", getpid());		sigprocmask (SIG_UNBLOCK, &old, NULL);	}	pthread_sigmask (SIG_UNBLOCK, &old, NULL);        /*undo the signal blocking*/	switch (viddev->v4l_fmt) {		case VIDEO_PALETTE_RGB24:			rgb24toyuv420p(map, cap_map, width, height);			break;		case VIDEO_PALETTE_YUV422:			yuv422to420p(map, cap_map, width, height);			break;		default:			memcpy(map, cap_map, viddev->v4l_bufsize);	}	return 0;}static void v4l_set_input(struct context *cnt, struct video_dev *viddev, unsigned char *map, int width, int height, int input,                    int norm, int skip, unsigned long freq, int tuner_number){	int dev=viddev->fd;	int i;	struct video_channel vid_chnl;	struct video_tuner vid_tuner;	unsigned long frequnits = freq;		if (input != viddev->input || width != viddev->width || height!=viddev->height ||	    freq!=viddev->freq || tuner_number!=viddev->tuner_number) {		if (freq) {			vid_tuner.tuner = tuner_number;			if (ioctl (dev, VIDIOCGTUNER, &vid_tuner)==-1) {				motion_log(LOG_ERR, 1, "ioctl (VIDIOCGTUNER)");			} else {				if (vid_tuner.flags & VIDEO_TUNER_LOW) {					frequnits=freq*16; /* steps of 1/16 KHz */				} else {					frequnits=(freq*10)/625;				}				if (ioctl(dev, VIDIOCSFREQ, &frequnits)==-1) {					motion_log(LOG_ERR, 1, "ioctl (VIDIOCSFREQ)");					return;				}			}		}		vid_chnl.channel = input;				if (ioctl (dev, VIDIOCGCHAN, &vid_chnl) == -1) {			motion_log(LOG_ERR, 1, "ioctl (VIDIOCGCHAN)");		} else {			vid_chnl.channel = input;			vid_chnl.norm = norm;			if (ioctl (dev, VIDIOCSCHAN, &vid_chnl) == -1) {				motion_log(LOG_ERR, 1, "ioctl (VIDIOCSCHAN)");				return;			}		}		v4l_picture_controls(cnt, viddev);		viddev->input=input;		viddev->width=width;		viddev->height=height;		viddev->freq=freq;		viddev->tuner_number=tuner_number;		/* skip a few frames if needed */		for (i=0; i<skip; i++)			v4l_next(viddev, map, width, height);	} else {		/* No round robin - we only adjust picture controls */		v4l_picture_controls(cnt, viddev);	}}static int v4l_open_vidpipe(void){	int pipe_fd = -1;	char pipepath[255];	char buffer[255];	char *major;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -