📄 video.cpp
字号:
/**-****************************************************************************************** *class : video *description: this class is on the board DOOR,it reads the net eye scanner,and write it on the * can bus.And this cache hold only one line of the whole graph.but it tranports * the data to the KERNEL MODULE in which the data is stored the whole graph for * each cache *version: 0.01 *author: YslayerY *begain_date: 2008-7-21 *finish_date: ?? *modify_times: *******************************************************************************************/#include"video.h"video::video(){ color_diff_ts = DEF_CLR_TS +1 -1; hotblk_ts = DEFAULT_BLK_X * DEFAULT_BLK_Y / 64 + 1; which = 0; connect(&(this->timer),SIGNAL(timeout()),this,SLOT(read_video_write_jpeg()));// timer.start(1000); pthread_cond_init(&(this->write_OK),NULL); }video::~video(){}int video::shut_video(){ timer.stop(); return 0;}int video::restart_video(int time){ timer.start(time); return 0;}int video::start_video(){ open_video(24,VIDEO_PALETTE_RGB24,VIDEO_W,VIDEO_H); timer.start(200); return 0; }void video::read_video_write_jpeg(){ //printf("in read video write jpeg \n"); unsigned char * imgptr; get_next_frame(); imgptr = (unsigned char *) get_frame_address(); //printf("out get frame addr \n");// int off_set = sizeof(RGB24)*DEFAULT_BLK_X*DEFAULT_BLK_Y;// proc_img(color+which*off_set,imgptr,DEFAULT_BLK_X,DEFAULT_BLK_Y); // if(cmp_blks(color,color + off_set,DEFAULT_BLK_X*DEFAULT_BLK_Y,VIDEO_DEP,color_diff_ts,hotblk_ts))// { putImageJpeg(JpegFileName,imgptr,QUAL_DEFAULT); emit signal_video_detect_mov();// } //here is the picture diff part //here we should write can // which ^=1; get_next_frame(); ///?????? //printf("finish read video write jpeg \n");}int video::cmp_blks(RGB24 * a,RGB24 * b,int num,int depth,int c_limit,int b_limit){ int i, count; int color_diff[8192]; count = 0; for (i = 0; i < num; i++) { if (depth == 1) { color_diff[i] = abs(a[i].red - b[i].red) * 3; } else { color_diff[i] = abs(a[i].red - b[i].red) + abs(a[i].green - b[i].green) + abs(a[i].blue - b[i].blue); } if (color_diff[i] > c_limit) { if (++count >= b_limit) return 1; } } return 0;}int video::proc_img(RGB24 * r,unsigned char * buf,int sx,int sy){ int w = VIDEO_W; int h = VIDEO_H; int depth = VIDEO_DEP; unsigned char *p; int x, y, m, n; int blk_wid = w / sx, blk_hgt = h / sy; int x0, y0, x1, y1; int s = blk_wid * blk_hgt; unsigned int R, G, B; int r_offset, g_offset; if (depth == 1) { r_offset = 0; g_offset = 0; } else { r_offset = 2; g_offset = 1; } for (n = 1; n <= sy; n++) { for (m = 1; m <= sx; m++) { x0 = (m - 1) * blk_wid; y0 = (n - 1) * blk_hgt; x1 = blk_wid * m - 1; y1 = blk_hgt * n - 1; R = G = B = 0; for (y = y0; y <= y1; y++) { for (x = x0; x <= x1; x++) { p = buf + (y * w + x) * depth; R += *(p + r_offset); G += *(p + g_offset); B += *p; } } r->red = R / s; r->green = G / s; r->blue = B / s; r++; } } return s;}int video::putImageJpeg(char * fileName ,unsigned char * mem,int quality){ unsigned char *buf = mem; int width = VIDEO_W; int height = VIDEO_H; int depth = VIDEO_DEP; int y, x, line_width; JSAMPROW row_ptr[1]; struct jpeg_compress_struct cjpeg; struct jpeg_error_mgr jerr; char *line; FILE *fd; //printf("opening file ... \n"); if ((fd = fopen(fileName, "wb+")) == NULL) { //printf("error open file \n"); } //else printf("open file success \n");// lockf(fd,1,0); line = new char[width * 3]; if (!line)return -1; cjpeg.err = jpeg_std_error(&jerr); jpeg_create_compress(&cjpeg); cjpeg.image_width = width; cjpeg.image_height = height; cjpeg.input_components = 3; cjpeg.in_color_space = JCS_RGB; if (depth == 1) { cjpeg.input_components = 1; cjpeg.in_color_space = JCS_GRAYSCALE; } jpeg_set_defaults(&cjpeg); jpeg_set_quality(&cjpeg, quality, TRUE); cjpeg.dct_method = JDCT_FASTEST; jpeg_stdio_dest(&cjpeg, fd); //printf("starting compress ... \n"); jpeg_start_compress(&cjpeg, TRUE); row_ptr[0] = (JSAMPROW)line; line_width = width * 3; for (y = 0; y < height; y++) { for (x = 0; x < line_width; x += 3) { line[x] = buf[x + 2]; line[x + 1] = buf[x + 1]; line[x + 2] = buf[x]; } jpeg_write_scanlines(&cjpeg, row_ptr, 1); buf += line_width; } jpeg_finish_compress(&cjpeg); jpeg_destroy_compress(&cjpeg); delete line; // lockf(fd,0,0); fclose(fd); //printf("finishi put jpeg \n"); return 0; }////following are some camera control functionsint video::open_video(int dep,int pal,int width,int height){ if ((vd.fd = open(V4L_FILE, O_RDWR)) < 0) { printf("v4l_open: error\n"); return ERR_VIDEO_OPEN; } printf("=============Open Video Success=======================\n"); if (ioctl(vd.fd, VIDIOCGCAP, &(vd.capability)) < 0) { printf("v4l_get_capability error \n"); return ERR_VIDEO_GCAP; } printf("=============Get Device Success=======================\n"); if (ioctl(vd.fd, VIDIOCGPICT, &(vd.picture)) < 0) { printf("v4l_get_picture error \n"); return ERR_VIDEO_GPIC; } printf("=============Get Picture Success=======================\n"); vd.picture.palette = pal; vd.picture.depth = dep; printf("=====Capture depth:Palette================\n"); vd.mmap.format =pal; if (ioctl(vd.fd, VIDIOCSPICT, &(vd.picture)) < 0) { printf("v4l_set_palette error \n"); return ERR_VIDEO_SPIC; } vd.mmap.width = width; // width; vd.mmap.height = height; // height; vd.mmap.format = vd.picture.palette; vd.frame_current = 0; vd.frame_using[0] = 0; vd.frame_using[1] = 0; if (ioctl(vd.fd, VIDIOCGMBUF, &(vd.mbuf)) < 0) { printf("v4l_get_mbuf error\n"); return -1; } printf("%d \n",VIDEO_MAX_FRAME); vd.map = (unsigned char *)mmap(0, vd.mbuf.size, PROT_READ|PROT_WRITE, MAP_SHARED, vd.fd, 0); printf("content : %d \n",*(int *)(vd.map)); if ( vd.map < 0) { printf("v4l_mmap_init:mmap error \n"); return -1; } printf("The video device was opened successfully.\n"); return 0;}unsigned char * video::get_frame_address(){ //printf("in get frame address \n"); return (vd.map) + (vd.mbuf.offsets)[vd.frame_current]; //printf("out get frame address \n");}int video::get_grab_frame(int frame){ if (vd.frame_using[frame]) {// fprintf(stderr, "get_grab_frame: frame %d is already used.\n", frame); return ERR_FRAME_USING; } vd.mmap.frame = frame; /** Start Picture capture from this moment **/ if (ioctl(vd.fd, VIDIOCMCAPTURE, &(vd.mmap)) < 0) { perror("v4l_grab_frame"); return ERR_GET_FRAME; } vd.frame_using[frame] = 1; vd.frame_current = frame; return 0;}int video::get_first_frame(){ int ret; vd.frame_current = 0; ret = get_grab_frame(0); if ( ret<0 ) return ret; if (ioctl(vd.fd, VIDIOCSYNC, &(vd.frame_current)) < 0) { perror("v4l_grab_sync"); return ERR_SYNC; } vd.frame_using[vd.frame_current] = 0 ; return (0);}int video::get_next_frame(){ int ret; vd.frame_current ^= 1; ret = get_grab_frame(vd.frame_current); if( ret < 0 ) return ret; if (ioctl(vd.fd, VIDIOCSYNC, &(vd.frame_current)) < 0) { perror("v4l_grab_sync"); return ERR_SYNC; } vd.frame_using[vd.frame_current] = 0 ; return 0; }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -