📄 testcamera.c
字号:
{0x94,0x88},{0x95,0x88},{0x40,0xc1},{0x29,0x3f},{0x0f,0x42},{0x3d,0x92},{0x69,0x40},{0x5C,0xb9},{0x5D,0x96},{0x5E,0x10},{0x59,0xc0},{0x5A,0xaf},{0x5B,0x55},{0x43,0xf0},{0x44,0x10},{0x45,0x68},{0x46,0x96},{0x47,0x60},{0x48,0x80},{0x5F,0xe0},{0x60,0x8c},{0x61,0x20},{0xa5,0xd9},{0xa4,0x74},{0x8d,0x02},{0x13,0xe7},{0x4f,0x3a},{0x50,0x3d},{0x51,0x03},{0x52,0x12},{0x53,0x26},{0x54,0x38},{0x55,0x40},{0x56,0x40},{0x57,0x40},{0x58,0x0d},{0x8C,0x23},{0x3E,0x02},{0xa9,0xb8},{0xaa,0x92},{0xab,0x0a},{0x8f,0xdf},{0x90,0x00},{0x91,0x00},{0x9f,0x00},{0xa0,0x00},{0x3A,0x01},{0x24,0x70},{0x25,0x64},{0x26,0xc3},{0x2a,0x00},{0x2b,0x00},{0x6c,0x40},{0x6d,0x30},{0x6e,0x4b},{0x6f,0x60},{0x70,0x70},{0x71,0x70},{0x72,0x70},{0x73,0x70},{0x74,0x60},{0x75,0x60},{0x76,0x50},{0x77,0x48},{0x78,0x3a},{0x79,0x2e},{0x7a,0x28},{0x7b,0x22},{0x7c,0x04},{0x7d,0x07},{0x7e,0x10},{0x7f,0x28},{0x80,0x36},{0x81,0x44},{0x82,0x52},{0x83,0x60},{0x84,0x6c},{0x85,0x78},{0x86,0x8c},{0x87,0x9e},{0x88,0xbb},{0x89,0xd2},{0x8a,0xe6},// {0x3a, 0x0d}, // //{0x3a, 0x1d}, //for test //{0x67, 'U'}, //fixed value for U //{0x68, 'V'}, //fixed value for V //{0x15, 0x12}, //PCLK reverse, VSYNC negative //{0x12, 0x10}, //QVGA //{0x04, 0x20}, //QQVGA //{0x15, 0x20}, //no PCLK when HREF is low};#define CAMERA_REGS (sizeof(camera_regs)/sizeof(camera_regs[0]))#include <sys/time.h>static void delay_ms(long ms){/* struct timeval tvs, tve; struct timezone tzs, tze; if(!ms) return; gettimeofday(&tvs, &tzs); while(1) { long ds, dus; gettimeofday(&tve, &tze); dus = tve.tv_usec - tvs.tv_usec; if(dus<0) { dus += 1000000; tve.tv_sec--; } ds = tve.tv_sec - tvs.tv_sec; if((ds*1000000+dus)/1000>=ms) break; }*/ fd_set rfds; struct timeval tv; FD_ZERO(&rfds); FD_SET(0, &rfds); tv.tv_sec = 0; tv.tv_usec = ms*1000; select(1, &rfds, NULL, NULL, &tv);}#endif#ifdef USER_I2C_INITstatic int i2c_write(int fd, __u8 reg, __u8 val){ int retries; __u8 data[2]; data[0] = reg; data[1] = val; for(retries=5; retries; retries--) { if(write(fd, data, 2)==2) return 0; delay_ms(2); } //printf("write fail %x %x\n", data[0], data[1]); return -1;}static int i2c_read(int fd, __u8 reg, __u8 *val){ int retries; for(retries=5; retries; retries--) { if(write(fd, ®, 1)==1) if(read(fd, val, 1)==1) return 0; delay_ms(2); } //printf("read fail\n"); return -1;}#define I2C_SLAVE 0x0703 /* Change slave address */static int camera_i2c_init(void){ int i, fd; __u8 id[4]; //delay_ms(100); printf("open i2c device...\n"); fd = open("/dev/i2c/0", O_RDWR); if(fd<0) { fd = open("/dev/misc/i2c", O_RDWR); if(fd<0) { printf("fail to open i2c adapter device!\n"); return -1; } } if(ioctl(fd, I2C_SLAVE, CAMERA_I2C_ADDR)<0) { printf("fail to set i2c device slave address!\n"); close(fd); return -1; } printf("set slave address to 0x%x success!\n", CAMERA_I2C_ADDR); if(i2c_read(fd, CAMERA_MIDH, id)||i2c_read(fd, CAMERA_MIDL, id+1)) { printf("fail to get camera MID!\n"); close(fd); return -1; } printf("manufactory ID is 0x%04x\n", (id[0]<<8)|id[1]); i2c_read(fd, CAMERA_PIDH, id+2); i2c_read(fd, CAMERA_PIDL, id+3); printf("product ID is 0x%04x\n", (id[2]<<8)|id[3]); for(i=0; i<CAMERA_REGS; i++) { if(camera_regs[i].subaddr==CHIP_DELAY) delay_ms(camera_regs[i].value); else if(i2c_write(fd, camera_regs[i].subaddr, camera_regs[i].value)) printf("write subaddr 0x%x fail!\n", camera_regs[i].subaddr); //delay_ms(2); //printf("%d\n", i); } if(cpu_type==1) { //s3c2440a i2c_write(fd, 0x15, 0x00); //always has PCLK i2c_write(fd, 0x3a, 0x01); //YCbYCr order } close(fd); return 0;}#else#define camera_i2c_init() 0#endif#ifdef SCCB_INITstatic int sccb_init(int fd){ int i; __u16 val; __u8 id[4]; val = CAMERA_MIDH<<8; if(ioctl(fd, WCAM_VIDIOCGCAMREG, &val)<0) { printf("get camera register fail!\n"); return -1; } id[0] = val; val = CAMERA_MIDL<<8; if(ioctl(fd, WCAM_VIDIOCGCAMREG, &val)<0) { printf("get camera register fail!\n"); return -1; } id[1] = val; printf("manufactory ID is 0x%04x\n", (id[0]<<8)|id[1]); val = CAMERA_PIDH<<8; if(ioctl(fd, WCAM_VIDIOCGCAMREG, &val)<0) { printf("get camera register fail!\n"); return -1; } id[2] = val; val = CAMERA_PIDL<<8; if(ioctl(fd, WCAM_VIDIOCGCAMREG, &val)<0) { printf("get camera register fail!\n"); return -1; } id[3] = val; printf("product ID is 0x%04x\n", (id[2]<<8)|id[3]); for(i=0; i<CAMERA_REGS; i++) { if(camera_regs[i].subaddr==CHIP_DELAY) delay_ms(camera_regs[i].value); else { val = (camera_regs[i].subaddr<<8)|camera_regs[i].value; if(ioctl(fd, WCAM_VIDIOCSCAMREG, &val)<0) printf("write subaddr 0x%x fail!\n", camera_regs[i].subaddr); } } return 0;}#else#define sccb_init(fd) 0#endif/*************************************************************/static void usage(void){ int i; printf("testcamera [-x/y/p/m/v/o/f/u/h options]\n"); printf("-w width (set output image width, must be 4 alignment)\n"); printf("-h height (set output image height, must be 4 alignment)\n"); printf("-p palette (set output image color space)\n"); printf(" optional image:\n"); for(i=0; i<MAX_IMAGE_FORMAT; i++) printf(" [%2d] : %s\n", i, optional_image_format[i].name); printf("-m (use mmap method for camera)\n"); printf("-v (capture video instead of still image)\n"); printf("-o number (set save jpeg file optimization, must be 0~100)\n"); printf("-f name (set save jpeg file prefix name)\n"); printf("-u arch (set CPU type)\n"); printf(" current special CPU:\n"); printf(" pxa27x\n"); printf(" s3c2440a\n"); printf(" default is pxa27x\n"); printf("-h show help\n");}static int getoptions(int argc, char *argv[]){ int ret = 0; char c; opterr = 0; //don't print error message while ((c = getopt (argc, argv, "x:y:p:o:f:u:mvh")) != (char)EOF) /* Scan the command line for options */ switch (c) { case 'x': image_width = atoi(optarg); if((image_width%4)||(image_width>FIXED_SOURCE_WIDTH)) ret = -1; break; case 'y': image_height = atoi(optarg); if((image_height%4)||(image_height>FIXED_SOURCE_HEIGHT)) ret = -1; break; case 'p': image_format = atoi(optarg); if(image_format>=MAX_IMAGE_FORMAT) ret = -1; break; case 'o': optimization = atoi(optarg); if(optimization>100) ret = -1; break; case 'f': image_file = optarg; break; case 'm': mmap_camera = 1; break; case 'v': capture_video = 1; break; case 'u': if(strncmp(optarg, "pxa27x", 6)==0) cpu_type = 0; else if(strncmp(optarg, "s3c2440a", 8)==0) cpu_type = 1; else printf("unknown CPU type\n"); break; case 'h': //ret = -1; //break; default: ret = -1; break; } return ret;}/*************************************************************/int main(int argc, char *argv[]){ int camera,fbfd; //设备文件描述标志 //int picFile; //保存图象数据的文件 int frame; //帧号 struct fb_var_screeninfo vinfo; struct fb_fix_screeninfo finfo; __u8 *fb_buf; __u32 screensize; int i; //用于做循环控制的局部变量 int done=1; //------------------------------ FILE *fp; static int inc=0; //用于保存jpeg文件 char bewf[128]; //------------------------------------ int pal[] ={/*VIDEO_PALETTE_JPEG,*/VIDEO_PALETTE_YUV420P,VIDEO_PALETTE_RGB24, VIDEO_PALETTE_RGB565,VIDEO_PALETTE_RGB32}; struct video_capability capability; //包含设备的基本信息(设备名称、支持的最大最小分辨率、信号源信息等) struct video_window window; //包含关于capture area的信息 struct video_channel channel[8]; //关于各个信号源的属性 struct video_picture picture; //设备采集的图象的各种属性 struct video_mmap mapbuf; //用于mmap struct video_mbuf vidbuf; //利用mmap进行映射的帧的信息 unsigned char *grab_data; //内存映射的首地址 unsigned char *buf; //图形数据的内存地址空间 __u16 rgb565[COLS*ROWS]; //打开FB fbfd = open("/dev/fb0", O_RDWR); if (fbfd < 0) { fbfd = open("/dev/fb/0", O_RDWR); if(fbfd<0) { printf("Error: cannot open framebuffer device.\n"); return -1; } } // Get fixed screen information if (ioctl(fbfd, FBIOGET_FSCREENINFO, &finfo)) { printf("Error reading fixed information.\n"); close(fbfd); return -1; } // Get variable screen information if (ioctl(fbfd, FBIOGET_VSCREENINFO, &vinfo)) { printf("Error reading variable information.\n"); close(fbfd); return -1; } printf("%dx%d, %dbpp\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel ); fb_xres = vinfo.xres; fb_yres = vinfo.yres; fb_bpp = vinfo.bits_per_pixel; screensize = vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / 8; fb_buf = (char *)mmap(0, screensize, PROT_READ | PROT_WRITE, MAP_SHARED, fbfd, 0); if ((int)fb_buf == -1) { printf("Error: failed to map framebuffer device to memory.\n"); close(fbfd); return -1; } //打开设备 camera=open("/dev/v4l/video1",O_RDWR); if(camera<0) { printf("No camera Connected\n!"); exit(-1); } //获取设备的属性 if(ioctl(camera,VIDIOCGCAP, &capability)<0) { perror("iotcl VIDIOCGCAP failed!"); exit(-1); } printf("Device name-> %s\n",capability.name); printf("Device type-> %d\n",capability.type); printf("Device channels-> %d\n",capability.channels); printf("Device Maxwidth-> %d\n",capability.maxwidth); printf("Device Maxheight-> %d\n",capability.maxheight); printf("Device Minwidth-> %d\n",capability.minwidth); printf("Device Minheight-> %d\n",capability.minheight); //获取图象的信息 //通过ioctl对video_picture picture的get命令,把包含format的参数保存到video_picture pict中 if(ioctl(camera,VIDIOCGPICT,&picture)<0) { perror("ioctl VIDIOCGPICT failed!"); exit(-1); } printf("Brightness -> %d\n",picture.brightness/256); //亮度 printf("Colour -> %d\n",picture.colour/256); //色彩 printf("Contrast -> %d\n",picture.contrast/256); //对比度 printf("Whiteness -> %d\n",picture.whiteness/256); //白平衡 printf("Depth -> %d\n",picture.depth); //色深 printf("Palette -> %d\n",picture.palette); //调色板参数(eg.RGB24 ) picture.palette = VIDEO_PALETTE_YUV420P; if (ioctl (camera, VIDIOCSPICT, &picture) < 0) { printf("Couldnt set palette %d \n", VIDEO_PALETTE_YUV420P); }/* ////做一个循环,验证上述5个format的可用性 for(i = 0; i < 5 ; i++) { picture.palette = pal[i]; printf("try palette %d depth %d\n",picture.palette,picture.depth); if (ioctl (camera, VIDIOCSPICT, &picture) < 0) { printf("Couldnt set palette first try %d \n", pal[i]); } //设置成功则返回大于0值,返回小于0则说明该format值不可用 if (ioctl (camera, VIDIOCGPICT, &picture) < 0) { printf("Couldnt get palette %d \n", pal[i]); } else printf("now set the palette %d depth %d \n",picture.palette,picture.depth); } */ //将设备文件映射到内存 //首先通过VIDIOCGMBUF查询实际可用的缓存数目,然后利用mmap()将设备文件映射 //到内存,这样不但可以加速文件I/O的存取速度,实现高速的视频采集,还可以利 //用多进程技术实现数据共享 //vidbuf(video_mbuf)实际上是输入到摄相头存储缓冲当中的帧信息,包括size //(帧的大小),frames(帧数)、offsets(每帧相对于基址的偏移量) if(ioctl(camera,VIDIOCGMBUF,&vidbuf)<0) //获得摄相头缓冲区的帧信息 { perror("ioctl VIDIOCGMBUF failed!"); exit(-1); } //对mmap结构赋初值 mapbuf.width=COLS; mapbuf.height=ROWS; mapbuf.format=VIDEO_PALETTE_YUV420P; //vidbuf.size是表示了frames帧图象的大小 grab_data=(unsigned char *)mmap(0,vidbuf.size,PROT_READ|PROT_WRITE,MAP_SHARED, camera,0); //视频图象的采集过程(采用连续帧采集的方式) //初始化时,填满所有的帧缓冲区; for(frame=0;frame<vidbuf.frames;frame++) //对网眼v2000,frames为2 { mapbuf.frame=frame; //ioctl(camera,VIDIOCSYNC,&frame); if(ioctl(camera,VIDIOCMCAPTURE,&mapbuf)<0) //开始图象的截取,采用非阻塞方式 { perror("VIDIOCMCAPTURE"); //exit(-1); } } frame=0; //while(1) while(1) { //以下是采集一帧图象的代码 int i=-1; while(i<0) { i=ioctl(camera,VIDIOCSYNC,&frame); //检查此次采集是否采集完了frame帧的数据 if(i<0 && errno==EINTR) continue; printf("采集完成第 %d!帧数据\n",frame); break; }//如果采集完成了一帧数据就break出来 buf=grab_data+vidbuf.offsets[frame];/* //swap_rgb24(buf, COLS*ROWS*DEPTH); //以下过程是将图象进行保存 //save_pnm (buf, COLS,ROWS,DEPTH); sprintf (bewf, "image-%d.jpg", inc); if ((fp=fopen(bewf, "wb+")) == NULL) { perror("open"); exit(1); } put_image_jpeg(fp,buf,COLS,ROWS,QUALITY,VIDEO_PALETTE_RGB24); fclose(fp);*/ printf("show image on the screen \n"); ccvt_420p_rgb565(COLS, ROWS, buf, rgb565); show_rgb565_img(fb_buf,rgb565); mapbuf.frame=frame; if(ioctl(camera,VIDIOCMCAPTURE,&mapbuf)<0) //开始图象的截取,采用非阻塞方式 //采集的数据放刚才读走那帧数据的位置 { perror("VIDIOCMCAPTURE"); exit(-1); } frame++; if(frame>=vidbuf.frames) frame=0; } }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -