📄 cvvideo.cpp
字号:
pthread_mutex_init(&(camera->pausemutex), NULL);
pthread_mutex_init(&(camera->capturestatemutex), NULL);
pthread_cond_init(&(camera->capturestatecond), NULL);
camera->capturestate = FINISHED;
camera->memorymap = NULL;
camera->renderstate = 0;
pthread_mutex_init(&(camera->updatedmutex), NULL);
pthread_cond_init(&(camera->updatedcond), NULL);
camera->updated = 0;
camera->videopp.picture.palette = VIDEO_PALETTE_RGB24;
camera->imagepalette = VIDEO_PALETTE_RGB24;
camera->description.maxwidth = VidCap.maxwidth;
camera->description.maxheight = VidCap.maxheight;
camera->description.minwidth = VidCap.minwidth;
if(camera->description.minwidth<160)
camera->description.minwidth = 160;
camera->description.minheight = VidCap.minheight;
if(camera->description.minheight<120)
camera->description.minheight = 120;
if(ioctl(device,VIDIOCGPICT,
&(camera->videopp.picture) )==-1)
return 0;
camera->videopp.picture.palette = VIDEO_PALETTE_RGB24;
camera->imagepalette = VIDEO_PALETTE_RGB24;
camera->videopp.width = camera->description.maxwidth;
camera->videopp.height = camera->description.maxheight;
camera->videopp.picture.depth = 24;
camera->videopp.picture.brightness = 30000;
camera->videopp.picture.hue = 30000;
camera->videopp.picture.colour = 30000;
camera->videopp.picture.contrast = 30000;
camera->videopp.picture.whiteness = 30000;
camera->callback = NULL;
camera->renderwidth = 0;
camera->renderheight = 0;
if(ioctl(device,VIDIOCSPICT,
&(camera->videopp.picture) )==-1)
{
#ifdef DEBUG_CVCAM
fprintf(stdout, "icvSetDefaultValues: ioctl VIDIOCSPICT failed, trying YUV420P format\n");
#endif
camera->videopp.picture.palette = VIDEO_PALETTE_YUV420P;
if(ioctl(device,VIDIOCSPICT,
&(camera->videopp.picture) )==-1)
{
fprintf(stdout, "icvSetDefaultValues: ioctl VIDIOCSPICT failed even in YUV420P format\n");
#ifdef DEBUG_CVCAM
fprintf(stdout,"camera=%d,brightness=%d,hue=%d,colour=%d,contrast=%d,whiteness=%d,depth=%d,palette=%d\n",
camera,
camera->videopp.picture.brightness,
camera->videopp.picture.hue,
camera->videopp.picture.colour,
camera->videopp.picture.contrast,
camera->videopp.picture.whiteness,
camera->videopp.picture.depth,
camera->videopp.picture.palette);
#endif
return 0;
}
}
#ifdef DEBUG_CVCAM
fprintf(stdout,"OK! camera=%d,brightness=%d,hue=%d,colour=%d,contrast=%d,whiteness=%d,depth=%d,palette=%d\n",
camera,
camera->videopp.picture.brightness,
camera->videopp.picture.hue,
camera->videopp.picture.colour,
camera->videopp.picture.contrast,camera->videopp.picture.whiteness,camera->videopp.picture.depth,camera->videopp.picture.palette);
#endif
return 1;
}
/**********************************************************************
*
* Color correction functions
*
**********************************************************************/
/*
* Turn a YUV4:2:0 block into an RGB block
*
* Video4Linux seems to use the blue, green, red channel
* order convention-- rgb[0] is blue, rgb[1] is green, rgb[2] is red.
*
* Color space conversion coefficients taken from the excellent
* http://www.inforamp.net/~poynton/ColorFAQ.html
* In his terminology, this is a CCIR 601.1 YCbCr -> RGB.
* Y values are given for all 4 pixels, but the U (Pb)
* and V (Pr) are assumed constant over the 2x2 block.
*
* To avoid floating point arithmetic, the color conversion
* coefficients are scaled into 16.16 fixed-point integers.
* They were determined as follows:
*
* double brightness = 1.0; (0->black; 1->full scale)
* double saturation = 1.0; (0->greyscale; 1->full color)
* double fixScale = brightness * 256 * 256;
* int rvScale = (int)(1.402 * saturation * fixScale);
* int guScale = (int)(-0.344136 * saturation * fixScale);
* int gvScale = (int)(-0.714136 * saturation * fixScale);
* int buScale = (int)(1.772 * saturation * fixScale);
* int yScale = (int)(fixScale);
*/
/* LIMIT: convert a 16.16 fixed-point value to a byte, with clipping. */
#define LIMIT(x) ((x)>0xffffff?0xff: ((x)<=0xffff?0:((x)>>16)))
static inline void
move_420_block(int yTL, int yTR, int yBL, int yBR, int u, int v,
int rowPixels, unsigned char * rgb, int bits)
{
const int rvScale = 91881;
const int guScale = -22553;
const int gvScale = -46801;
const int buScale = 116129;
const int yScale = 65536;
int r, g, b;
g = guScale * u + gvScale * v;
// if (force_rgb) {
// r = buScale * u;
// b = rvScale * v;
// } else {
r = rvScale * v;
b = buScale * u;
// }
yTL *= yScale; yTR *= yScale;
yBL *= yScale; yBR *= yScale;
if (bits == 24) {
/* Write out top two pixels */
rgb[0] = LIMIT(b+yTL); rgb[1] = LIMIT(g+yTL);
rgb[2] = LIMIT(r+yTL);
rgb[3] = LIMIT(b+yTR); rgb[4] = LIMIT(g+yTR);
rgb[5] = LIMIT(r+yTR);
/* Skip down to next line to write out bottom two pixels */
rgb += 3 * rowPixels;
rgb[0] = LIMIT(b+yBL); rgb[1] = LIMIT(g+yBL);
rgb[2] = LIMIT(r+yBL);
rgb[3] = LIMIT(b+yBR); rgb[4] = LIMIT(g+yBR);
rgb[5] = LIMIT(r+yBR);
} else if (bits == 16) {
/* Write out top two pixels */
rgb[0] = ((LIMIT(b+yTL) >> 3) & 0x1F)
| ((LIMIT(g+yTL) << 3) & 0xE0);
rgb[1] = ((LIMIT(g+yTL) >> 5) & 0x07)
| (LIMIT(r+yTL) & 0xF8);
rgb[2] = ((LIMIT(b+yTR) >> 3) & 0x1F)
| ((LIMIT(g+yTR) << 3) & 0xE0);
rgb[3] = ((LIMIT(g+yTR) >> 5) & 0x07)
| (LIMIT(r+yTR) & 0xF8);
/* Skip down to next line to write out bottom two pixels */
rgb += 2 * rowPixels;
rgb[0] = ((LIMIT(b+yBL) >> 3) & 0x1F)
| ((LIMIT(g+yBL) << 3) & 0xE0);
rgb[1] = ((LIMIT(g+yBL) >> 5) & 0x07)
| (LIMIT(r+yBL) & 0xF8);
rgb[2] = ((LIMIT(b+yBR) >> 3) & 0x1F)
| ((LIMIT(g+yBR) << 3) & 0xE0);
rgb[3] = ((LIMIT(g+yBR) >> 5) & 0x07)
| (LIMIT(r+yBR) & 0xF8);
}
}
/* Converts from planar YUV420 to RGB24. */
static void
yuv420p_to_rgb(int width, int height,
unsigned char *pIn0, unsigned char *pOut0, int bits)
{
const int numpix = width * height;
const int bytes = bits >> 3;
int i, j, y00, y01, y10, y11, u, v;
unsigned char *pY = pIn0;
unsigned char *pU = pY + numpix;
unsigned char *pV = pU + numpix / 4;
unsigned char *pOut = pOut0;
for (j = 0; j <= height - 2; j += 2) {
for (i = 0; i <= width - 2; i += 2) {
y00 = *pY;
y01 = *(pY + 1);
y10 = *(pY + width);
y11 = *(pY + width + 1);
u = (*pU++) - 128;
v = (*pV++) - 128;
move_420_block(y00, y01, y10, y11, u, v,
width, pOut, bits);
pY += 2;
pOut += 2 * bytes;
}
pY += width;
pOut += width * bytes;
}
}
// allocates a nes iplimage containing the last grabbed image.
// Can be called from the main thread or the render thread, via
// GetProperty("raw_image"), or from the capturing thread, via
// the callback
static IplImage *
icvVideoGetImage(int cameraid)
{
CvSize size;
CvVideoCamera *const camera = &(cameras[cameraid]);
IplImage *image;
void *data;
int lastframe;
int depth = icvVideoFormat2Depth(camera->imagepalette);
pthread_mutex_lock(&(camera->capturestatemutex));
if(!(camera->capturestate==CAPTURING))
{
pthread_mutex_unlock(&(camera->capturestatemutex));
return NULL;
}
pthread_mutex_unlock(&(camera->capturestatemutex));
size.width = camera->videopp.width;
size.height = camera->videopp.height;
assert(camera->videopp.picture.palette);
assert(icvVideoFormat2Depth(camera->imagepalette));
image = cvCreateImageHeader(size,IPL_DEPTH_8U,depth/8);
pthread_mutex_lock(&(camera->lastframemutex));
lastframe = camera->lastframe;
pthread_mutex_unlock(&(camera->lastframemutex));
pthread_rwlock_rdlock(&(camera->framelock[lastframe]));
if (camera->imagepalette == camera->videopp.picture.palette)
{
IplImage *buffer = cvCreateImageHeader(size,IPL_DEPTH_8U,depth/8);
cvSetImageData(buffer,
(void*)(camera->memorymap+
camera->mbuf.offsets[camera->lastframe]),
size.width*(depth/8));
cvCreateImageData(image);
cvCopyImage(buffer, image);
pthread_rwlock_unlock(&(camera->framelock[lastframe]));
cvReleaseImageHeader(&buffer);
return image;
}
// only one conversion is implemented, for now
assert(camera->videopp.picture.palette == VIDEO_PALETTE_YUV420P);
assert((camera->imagepalette == VIDEO_PALETTE_RGB24) ||
(camera->imagepalette == VIDEO_PALETTE_RGB565));
data = (void*)cvAlloc(size.width*size.height*(depth/8));
cvSetImageData(image, data, size.width*(depth/8));
yuv420p_to_rgb(camera->videopp.width,
camera->videopp.height,
(unsigned char*)(camera->memorymap+
camera->mbuf.offsets[camera->lastframe]),
(unsigned char*)(data),
depth);
pthread_rwlock_unlock(&(camera->framelock[lastframe]));
return image;
}
//Stubs for avi files
/*Plays a specified avi file into a specified window
if file is NULL, file browser is opened. if window is 0,
it is created. width and height mean output size's 0 means
those of avi file are used. __cdecl (*callback)(IplImage*) would be
called on every frame. NULL means no callback*/
CVCAM_API int cvcamPlayAVI(const char* file,
void* window,
int width,
int height,
void* callback)
{
return -1;
}
/*Advanced API for dealing with AVI files*/
/*Opens a given file or pops up a dialog if file is NULL
returns a handle to the file opened for success or -1 for failure*/
CVCAM_API cvcamAVIFILE cvcamAVIOpenFile(char* file)
{
return (cvcamAVIFILE)-1;
}
/*The next functions just do what they say and return 0
for success, anything other for failure*/
CVCAM_API int cvcamAVICloseFile(cvcamAVIFILE file)
{
return -1;
}
CVCAM_API int cvcamAVISetWindow(cvcamAVIFILE file, void* window)
{
return -1;
}
CVCAM_API int cvcamAVISetCallback(cvcamAVIFILE file, void* callback)
{
return -1;
}
CVCAM_API int cvcamAVISetSize(cvcamAVIFILE file, int width, int height)
{
return -1;
}
CVCAM_API int cvcamAVIRun(cvcamAVIFILE file)
{
return -1;
}
CVCAM_API int cvcamAVIStop(cvcamAVIFILE file)
{
return -1;
}
CVCAM_API int cvcamAVIPause(cvcamAVIFILE file)
{
return -1;
}
CVCAM_API int cvcamAVIResume(cvcamAVIFILE file)
{
return -1;
}
CVCAM_API int cvcamAVIWaitCompletion(cvcamAVIFILE file)
{
return -1;
}
CVCAM_API int cvcamAVIIsRunning(cvcamAVIFILE file)
{
return -1;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -