📄 spcaview.c
字号:
int testbpp=16; /*********************************/ pictstruct mypict ; pthread_t waitandshoot_id; int wstatus ; /*********************************/ RingBuffer.ptread =0; RingBuffer.ptwrite =0; /* spcaview grabber */ printf ("Initializing SDL.\n"); /* Initialize defaults, Video and Audio */ if ((SDL_Init (SDL_INIT_VIDEO | SDL_INIT_TIMER) == -1)) { printf ("Could not initialize SDL: %s.\n", SDL_GetError ()); exit (-1); } /* Clean up on exit */ atexit (SDL_Quit); if(!owidth || !oheight){ owidth = image_width; oheight = image_height; } printf ("SDL initialized.\n"); /* validate parameters */ printf ("bpp %d format %d\n", bpp, format); if(!videoOn) { /* acquire raw data force palette to raw */ printf ("VideoOn = 0\n"); snprintf (fourcc, 5, "RAWD"); format = VIDEO_PALETTE_RAW_JPEG; } if(videocomp) { /* acquire yuv420p and compress diff pixels static huffman */ snprintf (fourcc, 5, "DPSH"); format = VIDEO_PALETTE_YUV420P; } if (videodevice == NULL || *videodevice == 0) { videodevice = "/dev/video0"; } printf ("Using video device %s.\n", videodevice); printf ("Initializing v4l.\n"); //v4l init if ((fd = open (videodevice, O_RDWR)) == -1) { perror ("ERROR opening V4L interface \n"); exit (1); } printf("**************** PROBING CAMERA *********************\n"); if (ioctl (fd, VIDIOCGCAP, &videocap) == -1) { printf ("wrong device\n"); exit (1); } printf("Camera found: %s \n",videocap.name); if (ioctl (fd, VIDIOCGCHAN, &videochan) == -1) { printf ("Hmm did not support Video_channel\n"); isVideoChannel = 0; } if (isVideoChannel){ videochan.norm = norme; videochan.channel = channel; if (ioctl (fd, VIDIOCSCHAN, &videochan) == -1) { printf ("ERROR setting channel and norme \n"); exit (1); } /************ just to be sure *************/ if (ioctl (fd, VIDIOCGCHAN, &videochan) == -1) { printf ("wrong device\n"); exit (1); } printf("Bridge found: %s \n",videochan.name); streamid = getStreamId (videochan.name); if (streamid >= 0){ printf("StreamId: %s Camera\n",Plist[streamid].name); /* look a spca5xx webcam try to set the video param struct */ spcaPrintParam (fd,&videoparam); } else { printf("StreamId: %d Unknow Camera\n",streamid); } /* test jpeg capability if not and jpeg ask without raw data set default format to YUVP */ if ((format == VIDEO_PALETTE_RAW_JPEG || format == VIDEO_PALETTE_JPEG )&& streamid != JPEG && videoOn) { printf ("Camera unable to stream in JPEG mode switch to YUV420P\n"); format = VIDEO_PALETTE_YUV420P; } if(probeSize(videochan.name,&image_width,&image_height) < 0) printf("unable to probe size !!\n"); } printf("*****************************************************\n"); /* Init grab method mmap */ if (grabMethod) { printf(" grabbing method default MMAP asked \n"); // MMAP VIDEO acquisition memset (&videombuf, 0, sizeof (videombuf)); if (ioctl (fd, VIDIOCGMBUF, &videombuf) < 0) { perror (" init VIDIOCGMBUF FAILED\n"); } printf ("VIDIOCGMBUF size %d frames %d offets[0]=%d offsets[1]=%d\n", videombuf.size, videombuf.frames, videombuf.offsets[0], videombuf.offsets[1]); pFramebuffer = (unsigned char *) mmap (0, videombuf.size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0); mmapsize = videombuf.size; vmmap.height = image_height; vmmap.width = image_width; vmmap.format = format; for (f = 0; f < videombuf.frames; f++) { vmmap.frame = f; if (ioctl (fd, VIDIOCMCAPTURE, &vmmap)) { perror ("cmcapture"); } } vmmap.frame = 0; /* Compute the estimate frame size we expect that jpeg compress factor 10 if ((format == VIDEO_PALETTE_RAW_JPEG) && videoOn) frame_size = videombuf.size / (10 * videombuf.frames); else frame_size = videombuf.size / videombuf.frames; */ } else { /* read method */ printf(" grabbing method READ asked \n"); if (ioctl (fd, VIDIOCGWIN, &videowin) < 0) perror ("VIDIOCGWIN failed \n"); videowin.height = image_height; videowin.width = image_width; if (ioctl (fd, VIDIOCSWIN, &videowin) < 0) perror ("VIDIOCSWIN failed \n"); printf ("VIDIOCGWIN height %d width %d \n", videowin.height, videowin.width); } switch (format) { case VIDEO_PALETTE_JPEG:{ frame_size = image_width * image_height; } break; case VIDEO_PALETTE_RAW_JPEG:{ frame_size = image_width * image_height * 3; } break; case VIDEO_PALETTE_YUV420P:{ frame_size = (image_width * image_height * 3) >> 1; } break; case VIDEO_PALETTE_RGB565: case VIDEO_PALETTE_RGB24: case VIDEO_PALETTE_RGB32:{ frame_size = image_width * image_height * bpp; } break; default: break; } /* struct video_picture VIDIOCGPICT VIDIOCSPICT */ if (ioctl (fd, VIDIOCGPICT, &videopict) < 0) { perror ("Couldnt get videopict params with VIDIOCGPICT\n"); } printf ("VIDIOCGPICT\n"); printf("brightnes=%d hue=%d color=%d contrast=%d whiteness=%d \n" , videopict.brightness, videopict.hue, videopict.colour, videopict.contrast, videopict.whiteness); printf("depth=%d palette=%d\n", videopict.depth, videopict.palette); videopict.palette = format; videopict.depth = bpp * 8; //videopict.brightness = INIT_BRIGHT; sleep (1); setVideoPict (&videopict, fd); /* * Initialize the display */if ( decodeOn && videoOn ) { /* Display data */ pscreen = SDL_SetVideoMode (owidth, oheight, testbpp, SDL_SWSURFACE); if (pscreen == NULL) { printf ("Couldn't set %d*%dx%d video mode: %s\n", owidth, oheight,3 * 8, SDL_GetError ()); exit (1); }// SDL_WM_SetCaption ("Spcaview Grabber ", NULL);SDL_WM_SetCaption (videocap.name, NULL);} printf ("\n"); time = SDL_GetTicks (); prevtime = time; /* If need open the avi file and alloc jpegbuffer */ if (outputfile) { jpegData = malloc (frame_size); dpshDest = malloc(frame_size); if ((out_fd = AVI_open_output_file (outputfile)) == NULL) { printf ("cannot open write file ? \n"); exit (1); } if(interval){ /* picture goes in avi */ AVI_set_video (out_fd, image_width, image_height, 1, "MJPG"); mypict.mode = AVIPICT; mypict.out_fd = out_fd; } else { AVI_set_video (out_fd, image_width, image_height, 20, fourcc); } if (audioout && !interval) { /* init the sound recorder if no picture*/ spec.format=AUDIO_FORMAT; spec.freq=AUDIO_SAMPLERATE; spec.callback=callback_record; spec.samples=AUDIO_SAMPLES; spec.channels=1; spec.userdata=(void*)&RingBuffer; if(SDL_InitAudioIn()<0) { fprintf(stderr,"Couldn't initialize SDL_AudioIn: %s\n",SDL_GetError()); SDL_Quit(); return(2); } if(SDL_OpenAudioIn(&spec,&result)<0) { fprintf(stderr,"Couldn't open audio input device: %s\n",SDL_GetError()); SDL_CloseAudioIn(); SDL_Quit(); } AVI_set_audio (out_fd, spec.channels, spec.freq, 16, WAVE_AUDIO_PCM); //printf ("audio record setting channel %d frequency %d format %d",spec.channels, spec.freq, WAVE_AUDIO_PCM); // SDL_PauseAudioIn(0); // start record initAudio = 1; } } else mypict.mode = PICTURE; /* Allocate tmp buffer for one frame. */ tmp = (unsigned char*)malloc (frame_size); /* laugth the picture thread */ mypict.data = malloc(frame_size); mypict.sizeIn = frame_size; mypict.width = image_width; mypict.height = image_height; mypict.formatIn = format; pthread_mutex_init(&mypict.mutex, NULL); pthread_cond_init(&mypict.cond, NULL); wstatus = pthread_create (&waitandshoot_id, NULL, (void *) waitandshoot, &mypict); if (wstatus != 0) { fprintf(stderr,"thread shoot Create error!\n"); exit(-1); } if (interval && videoOn){ // set_timer(interval); SDL_SetTimer((Uint32) interval,callback_timer); } i = 0; while (run && Oneshoot) { memset(tmp,0x00,frame_size); intime = SDL_GetTicks (); pictime = intime - delaytime; delaytime = intime; /* Try to synchronize sound with frame rate */ if (initAudio && i > 9){ initAudio = 0; SDL_PauseAudioIn(0); // start record } /* compute bytes sound */ if (pictime < 100) { bytes_per_read = ((AUDIO_SAMPLERATE / 1000) * 2 * pictime); } i++; if (grabMethod) { ff = vmmap.frame; if (ioctl (fd, VIDIOCSYNC, &ff) < 0) { perror ("cvsync err\n"); } vmmap.frame = ff; memcpy (tmp, pFramebuffer + videombuf.offsets[vmmap.frame], frame_size); if ((status = ioctl (fd, VIDIOCMCAPTURE, &vmmap)) < 0) { perror ("cmcapture"); printf (">>cmcapture err %d\n", status); } vmmap.frame = (vmmap.frame + 1) % videombuf.frames; } else { /* read method */ len = read (fd, tmp, frame_size); // printf ("len %d asked %d \n",len,frame_size); } synctime = SDL_GetTicks (); /*here the frame is in tmp ready to used */ if (pictFlag){ // printf("get Picture condition \n"); wstatus = pthread_mutex_lock (&mypict.mutex); if (wstatus != 0) {fprintf(stderr,"Lock error!\n"); exit(-1);} memcpy (mypict.data,tmp,frame_size); //printf("COND OK !!\n"); wstatus = pthread_cond_signal (&mypict.cond); if (wstatus != 0) {fprintf(stderr,"Signal error!\n"); exit(-1);} wstatus = pthread_mutex_unlock (&mypict.mutex); if (wstatus != 0) {fprintf(stderr,"Unlock error!\n"); exit(-1);} } if ((outputfile) && (i > 10) && !interval) { /* Output video frame with the good format */ switch (format) { case VIDEO_PALETTE_JPEG:{ int fs = get_jpegsize (tmp , frame_size); if (AVI_write_frame (out_fd, (unsigned char *) tmp, fs) < 0) printf ("write error on avi out \n"); method = 0; } break; case VIDEO_PALETTE_RAW_JPEG:{ if (AVI_write_frame (out_fd, (unsigned char *) tmp, frame_size) < 0) printf ("write error on avi out \n"); } break; case VIDEO_PALETTE_YUV420P:{ if ( videocomp ) { memcpy( jpegData,tmp,frame_size); jpegSize=frame_size; /*jpegData is destroy here */ dpsh_yuv_encode(jpegData, dpshDest, &jpegSize); // printf ("write DPSH Size %d \n", jpegSize); if (AVI_write_frame (out_fd, (unsigned char *) dpshDest, jpegSize) < 0) printf ("write error on avi out \n"); }else{ if (AVI_write_frame (out_fd, (unsigned char *) tmp, image_width * image_height * 1.5) < 0) printf ("write error on avi out \n"); } } break; case VIDEO_PALETTE_RGB565: case VIDEO_PALETTE_RGB24: case VIDEO_PALETTE_RGB32:{ if (AVI_write_frame (out_fd, (unsigned char *) tmp, image_width * image_height * bpp) < 0) printf ("write error on avi out \n"); } break; default: break; } /* write sound in avi */ if (audioout) { //printf("bytes per read sound %d \n",bytes_per_read); SDL_LockAudio(); ptread = RingBuffer.ptread; ptwrite = RingBuffer.ptwrite; SDL_UnlockAudio(); if (ptwrite > ptread) { if (AVI_write_audio (out_fd, (char *) RingBuffer.buffer+ptread, ptwrite-ptread) < 0) printf (" write AVI error \n"); RingBuffer.ptread = (RingBuffer.ptread + (ptwrite-ptread))%MAXBUFFERSIZE; } else if (ptwrite < ptread ){ if (AVI_write_audio (out_fd, (char *) RingBuffer.buffer+ptread, MAXBUFFERSIZE-ptread) < 0) printf (" write AVI error \n"); if (AVI_write_audio (out_fd, (char *) RingBuffer.buffer, ptwrite) < 0) printf (" write AVI error \n"); RingBuffer.ptread = (RingBuffer.ptread + ptwrite+(MAXBUFFERSIZE-ptread))%MAXBUFFERSIZE; } } } compresstime =SDL_GetTicks (); if ( decodeOn && videoOn && (i > 10)) { /* Display data */ p = pp = pscreen->pixels; refresh_screen ( tmp, p, format, image_width,image_height,owidth,oheight,image_width*image_height*bpp,autobright); if (autobright) adjust_bright(&videopict, fd); decodetime = SDL_GetTicks (); SDL_UpdateRect (pscreen, 0, 0, 0, 0); //update the entire screen } else { decodetime = SDL_GetTicks (); } /* increment the real frame count and update statistics */ framecount++; total_decode_time += decodetime - synctime; average_decode_time = total_decode_time / framecount; if (!autobright && statOn){// \r printf ("frames:%04d pict:%03dms synch:%03dms write/comp:%03dms decode:%03dms display:%03dms \n", framecount, pictime, synctime-intime, compresstime-synctime, decodetime-compresstime,SDL_GetTicks ()-decodetime); fflush (stdout); } if (SDL_PollEvent (&sdlevent) == 1) { switch (sdlevent.type) { case SDL_KEYDOWN: switch (sdlevent.key.keysym.sym) { case SDLK_b: videopict. brightness -= 0x100; setVideoPict (&videopict, fd); break; case SDLK_n: videopict. brightness +=
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -