⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 camera.c

📁 这是我写的基于ov511控制芯片的usb视频代码
💻 C
字号:
/*the oldest file made in 2007.3.28
frame capture has problem*/


/*包含的头文件*/

#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <ctype.h>
#include <errno.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <sys/ioctl.h>
#include <linux/videodev.h>
#include "jpeglib.h"


#define COLS 320
#define ROWS 240
#define DEPTH 3
#define QUALITY 80


void put_image_jpeg (FILE *out, char *image, int width, int height, int quality, int palette)
{
	int y, x, line_width;
	JSAMPROW row_ptr[1];  ///* pointer to a single row 向下面的程序处理提供一个2维的空间*/
	struct jpeg_compress_struct cjpeg;
	struct jpeg_error_mgr jerr;
	char *line;       //physical row width in buffer 

	line = malloc (width * 3);  //line 反映是是每行有多少条“扫描的竖线”,既在image_buffer
	                            //中查看每行有多少个采样点
	if (!line)
		return;
	cjpeg.err = jpeg_std_error(&jerr);   //创建jpeg的错误结构体
	jpeg_create_compress (&cjpeg); //创建jpeg压缩结构体
	cjpeg.image_width = width;
	cjpeg.image_height= height;
	if (palette == VIDEO_PALETTE_GREY) {
		cjpeg.input_components = 1;  //若为灰度图象,每个相素一个byte
		cjpeg.in_color_space = JCS_GRAYSCALE;  //Color space of source image
	//	jpeg_set_colorspace (&cjpeg, JCS_GRAYSCALE);
	} else {
		cjpeg.input_components = 3;  //若不是灰度图象,每个象素三个byte
		cjpeg.in_color_space = JCS_RGB; //rgb的颜色空间
	}
	jpeg_set_defaults (&cjpeg);      //按照默认的编码格式  
	jpeg_set_quality (&cjpeg, quality, TRUE);//The "TRUE" parameter ensures that a complete JPEG 
	                                         //interchange datastream will be written.
 	cjpeg.dct_method = JDCT_FASTEST;
	jpeg_stdio_dest (&cjpeg, out);


	jpeg_start_compress (&cjpeg, TRUE);
	row_ptr[0] = line;   //将指针line的地址赋值给row_ptr[0]
	if (palette == VIDEO_PALETTE_GREY) {
		line_width = width;
		for ( y = 0; y < height; y++) {
			row_ptr[0] = image;
			jpeg_write_scanlines (&cjpeg, row_ptr, 1);
			image += line_width;
		}
	} else {
		line_width = width * 3;
		for ( y = 0; y < height; y++) {
			for (x = 0; x < line_width; x+=3) {
				line[x]   = image[x+2];
				line[x+1] = image[x+1];
				line[x+2] = image[x];
			}
			jpeg_write_scanlines (&cjpeg, row_ptr, 1);
			image += line_width;
		}
	}
	jpeg_finish_compress (&cjpeg);
	jpeg_destroy_compress (&cjpeg);
	free (line);
}
void swap_rgb24(unsigned char *mem, int n)   //颜色交换 
{
	unsigned char c;
	unsigned char *p = mem;
	int i = n ;
	while (--i)       //因为采集回来的数据是BGR格式的,要在VGB上正常显示,需要交换成RGB
	{
		c = p[0];
		p[0] = p[2];
		p[2] = c;
		p += 3;
	}
}

int save_pnm (char *buf, int x, int y, int depth) 
{
	static int inc=0;
	FILE *fp;
	char bewf[128];
	sprintf (bewf, "image-%d.pnm", inc);
	if ((fp=fopen(bewf, "w+")) == NULL) 
	{
		perror("open");
		exit(1);
	}
	printf("The color depth is %d\n",depth );
	if (depth==3) 
		fprintf(fp, "P6\n%d %d\n255\n", x, y);  //这些是往文件里面输入pnm的头结构
	else if (depth==1) 
		fprintf(fp, "P5\n%d %d\n255\n", x, y);
	fwrite ((unsigned char*) buf, x * y * depth, 1, fp);
	inc ++; // next name
	fclose (fp);
}

int main()
{
	int camera;  //设备文件描述标志
	//int picFile;  //保存图象数据的文件
	int frame;   //帧号
 	int i;       //用于做循环控制的局部变量
 	int done=1;
 	//------------------------------
 	FILE *fp;
 	static int inc=0;    //用于保存jpeg文件
 	char bewf[128];
 	//------------------------------------
 	int pal[] ={/*VIDEO_PALETTE_JPEG,*/VIDEO_PALETTE_YUV420P,VIDEO_PALETTE_RGB24,
 		           VIDEO_PALETTE_RGB565,VIDEO_PALETTE_RGB32};
	struct video_capability capability;   //包含设备的基本信息(设备名称、支持的最大最小分辨率、信号源信息等)           
	struct video_window window;           //包含关于capture area的信息
	struct video_channel channel[8];      //关于各个信号源的属性   
	struct video_picture picture;         //设备采集的图象的各种属性
	struct video_mmap mapbuf;               //用于mmap
	struct video_mbuf vidbuf;               //利用mmap进行映射的帧的信息
	unsigned char *grab_data;               //内存映射的首地址
	unsigned char *buf;                     //图形数据的内存地址空间
	
	//打开设备
	camera=open("/dev/v4l/video0",O_RDWR);
	if(camera<0)
	{
		printf("No camera Connected\n!");
		exit(-1);
	}
	
	//获取设备的属性
	if(ioctl(camera,VIDIOCGCAP, &capability)<0)
	{
		perror("iotcl VIDIOCGCAP failed!");
		exit(-1);
	}
	printf("Device name-> %s\n",capability.name);
	printf("Device type-> %d\n",capability.type);
	printf("Device channels-> %d\n",capability.channels);
	printf("Device Maxwidth-> %d\n",capability.maxwidth);
	printf("Device Maxheight-> %d\n",capability.maxheight);
	printf("Device Minwidth-> %d\n",capability.minwidth);
	printf("Device Minheight-> %d\n",capability.minheight);
	
	//获取图象的信息
	//通过ioctl对video_picture picture的get命令,把包含format的参数保存到video_picture pict中
	if(ioctl(camera,VIDIOCGPICT,&picture)<0)
	{
		perror("ioctl VIDIOCGPICT failed!");
		exit(-1);
	}
	printf("Brightness -> %d\n",picture.brightness/256);  //亮度
	printf("Colour -> %d\n",picture.colour/256);          //色彩
	printf("Contrast -> %d\n",picture.contrast/256);      //对比度
	printf("Whiteness -> %d\n",picture.whiteness/256);    //白平衡
	printf("Depth -> %d\n",picture.depth);                //色深
	printf("Palette -> %d\n",picture.palette);            //调色板参数(eg.RGB24	)
	
	////做一个循环,验证上述5个format的可用性
	//for(i = 0; i < 5 ; i++)
	//{
	//	picture.palette = pal[i];
	//  
	//	printf("try palette %d depth %d\n",picture.palette,picture.depth);
	//	if (ioctl (camera, VIDIOCSPICT, &picture) < 0)
 //   {
 //     printf("Couldnt set palette first try %d \n", pal[i]);
 //   
 //   }
	  //设置成功则返回大于0值,返回小于0则说明该format值不可用
	  /*if (ioctl (camera, VIDIOCGPICT, &picture) < 0)
	  {
	  	printf("Couldnt get palette %d \n", pal[i]);
	      
	  }
	  else 
	  	printf("now set the palette %d depth %d \n",picture.palette,picture.depth);

	}*/
	
	//将设备文件映射到内存
	//首先通过VIDIOCGMBUF查询实际可用的缓存数目,然后利用mmap()将设备文件映射
	//到内存,这样不但可以加速文件I/O的存取速度,实现高速的视频采集,还可以利
	//用多进程技术实现数据共享
	//vidbuf(video_mbuf)实际上是输入到摄相头存储缓冲当中的帧信息,包括size
	//(帧的大小),frames(帧数)、offsets(每帧相对于基址的偏移量)
	if(ioctl(camera,VIDIOCGMBUF,&vidbuf)<0)  //获得摄相头缓冲区的帧信息
	{
		perror("ioctl VIDIOCGMBUF failed!");
		exit(-1);
	}
	//对mmap结构赋初值
	mapbuf.width=COLS;
	mapbuf.height=ROWS;
	mapbuf.format=VIDEO_PALETTE_RGB24;
	//vidbuf.size是表示了frames帧图象的大小
	grab_data=(unsigned char *)mmap(0,vidbuf.size,PROT_READ|PROT_WRITE,MAP_SHARED,
	                            camera,0);
	
	
	//视频图象的采集过程(采用连续帧采集的方式)
	
	//初始化时,填满所有的帧缓冲区;
	for(frame=0;frame<vidbuf.frames;frame++)   //对网眼v2000,frames为2
	{
			mapbuf.frame=frame;
			if(ioctl(camera,VIDIOCMCAPTURE,&mapbuf)<0)   //开始图象的截取,采用非阻塞方式
			{
				perror("VIDIOCMCAPTURE");
				//exit(-1);
			}
	}
	
	frame=0;
	//while(1)
	for(done=1;done<5;done++)
	{
			//以下是采集一帧图象的代码
			int i=-1;
			while(i<0)
			{
				i=ioctl(camera,VIDIOCSYNC,&frame);  //检查此次采集是否采集完了frame帧的数据
				if(i<0 && errno==EINTR)
					continue;
					printf("采集完成第 %d!帧数据\n",frame);
				break;
			}//如果采集完成了一帧数据就break出来
			buf=grab_data+vidbuf.offsets[frame];

		  //swap_rgb24(buf, COLS*ROWS*DEPTH);
			//以下过程是将图象进行保存
		  //save_pnm (buf, COLS,ROWS,DEPTH);
		  sprintf (bewf, "image-%d.jpg", inc);
			if ((fp=fopen(bewf, "wb+")) == NULL) 
			{
				perror("open");
				exit(1);
			}		
			put_image_jpeg(fp,buf,COLS,ROWS,QUALITY,VIDEO_PALETTE_RGB24);
			fclose(fp);
			mapbuf.frame=frame;
			if(ioctl(camera,VIDIOCMCAPTURE,&mapbuf)<0)   //开始图象的截取,采用非阻塞方式
				                                         //采集的数据放刚才读走那帧数据的位置
			{
				perror("VIDIOCMCAPTURE");
				exit(-1);
			}
			frame++;
			if(frame>=vidbuf.frames) 
				frame=0;
	}
}





	

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -