📄 capprocclassify.cpp
字号:
/****************************************************************************************/
// Compile: At the MATLAB prompt type: 'cc'
//
// fw-01-08
//
// capture an image in the requested format (RGB, YUV411),
// perform 'processing' (CMVision) and return result data to MATLAB (as structure & RGB)
// classify image
//
/****************************************************************************************/
/* includes --------------------------------------------------------------------- */
#include <windows.h>
#include <string.h> // strcmpi, strcpy
#include "1394camera.h"
#include "cmvision.h"
#include "mex.h"
#include "matrix.h"
/* (static) global variables ---------------------------------------------------- */
C1394Camera theCamera;
C1394CameraControl *pControl;
/* camera display modes */
// mode table (supported by the CMU driver, format '0' only):
//
// 0: {160 ,120 ,COLOR_CODE_YUV444},
// 1: {320 ,240 ,COLOR_CODE_YUV422},
// 2: {640 ,480 ,COLOR_CODE_YUV411},
// 3: {640 ,480 ,COLOR_CODE_YUV422},
// 4: {640 ,480 ,COLOR_CODE_RGB8},
// 5: {640 ,480 ,COLOR_CODE_Y8},
// 6: {640 ,480 ,COLOR_CODE_Y16},
//
// NOTE: mode enumeration now in line with camera modes
// (formerly: 0 = RGB, 1 = YUV)
//
// fw-12-07
//
#define numMYCAMERAMODES 7
typedef enum {
CAMERA_STOP = -1,
CAMERA_YUV444_160x120,
CAMERA_YUV422_320x240,
CAMERA_YUV411_640x480,
CAMERA_YUV422_640x480,
CAMERA_RGB8_640x480,
CAMERA_Y8_640x480,
CAMERA_Y16_640x480
} myCameraModes;
/* camera features to be controlled manually */
#define numMYFEATURES 7
static const struct feature_description {
LPCSTR name;
LPCSTR unit;
} myFeatures[numMYFEATURES] =
{
{"Brightness","%"},
{"Auto Exposure","eV"},
{"Sharpness",""},
{"White Balance", "K"},
{"Saturation","%"},
{"Shutter","sec"},
{"Gain","dB"},
};
static unsigned char bufRGB[640*480*3]; // local RGB image buffer (max required size)
#define NDIMS 3 // (maximum) dimension of the output data (RGB)
static unsigned int dims[NDIMS];
static unsigned long frameWidth, frameHeight;
static unsigned int origX, origY;
static unsigned long nElements;
/* these variables control the reduction from the maximum frame size to a smaller window */
static unsigned int pixPerPacket; // number of pixels per camera packet (eg. 2 in YUYV, 1 in RGB, etc.)
static unsigned int packetSize; // camera packet size (4 for YUYV, 6 for UYYVYY, etc.)
static unsigned int firstPacketStart; // integer multiple of the size of a pixel packet
static unsigned int lastPacketStart; // integer multiple of the size of a pixel packet
static unsigned int processFrameWidth; // integer multiple of the size of a pixel packet
static unsigned int processFrameNumPix; // adjusted width (dims[1])
static unsigned int packetOffsetX; // return image might not start on a packet boundary...
/* this variable is set to one when the grabber object has been initialized */
static unsigned int initCamera = 0;
/* verbosity control variable */
static unsigned int verboseFlag = 0;
/* this variable is set according to the 'mode' call-up parameter */
static myCameraModes currentMode = CAMERA_STOP; // start with CAMERA_STOP mode (required for correct function)
/* allow maximum width and height to be reduced... */
static unsigned int requestedWidth;
static unsigned int currWidth = 0;
static unsigned int requestedHeight;
static unsigned int currHeight = 0;
static unsigned int requestedOrigX;
static unsigned int currOrigX = 0;
static unsigned int requestedOrigY;
static unsigned int currOrigY = 0;
// ######### CMVision ##########
static CMVision vision; /* CMVision class */
CMVision::region *reg;
static rgb *img = NULL; /* dynamically allocated buffer... used by 'testClassify' */
static yuv *yuvBuf = NULL; /* pointer to converted input image (yuv), this is only an intermediate format */
static image_pixel *yuyvBuf = NULL; /* pointer to converted input image (image_pixel, yuv422) */
#define colourFilenameLength 100
static char colourFilename[colourFilenameLength];
static const char *defColourFilename = "testcolors.txt"; /* default colour file name */
// results of vision processing (struct myREG)
#define defCOL 1 // default number of colours to be scanned for
#define maxREG 10 // maximum number of regions to be recogniced per colour
#define minAREA 16 // minimum area of 'valid' regions
struct myREG {
rgb colour;
unsigned int nREG;
unsigned int colourID;
unsigned int cen_x[maxREG];
unsigned int cen_y[maxREG];
unsigned int x1[maxREG];
unsigned int y1[maxREG];
unsigned int x2[maxREG];
unsigned int y2[maxREG];
} *retREG[CMV_MAX_COLORS];
const char *fnames[4] = {"ColourID", "Colour", "nRegions", "Regions"};
int foutdims[] = {0,0}; // dimensions of the output cell array
/* local functions -------------------------------------------------------- */
//----------------------------------------------------------
// additional CMVision functions
//----------------------------------------------------------
// taken from CMU driver: ...\1394CamRGB.cpp
#define CLAMP_TO_UCHAR(a) ((unsigned char)((a) < 0 ? 0 : ((a) > 255 ? 255 : (a))))
//----------------------------------------------------------
yuv rgb_to_yuv(int r, int g, int b) {
yuv c;
c.y = CLAMP_TO_UCHAR((int)(0.3*r + 0.5881*g + 0.1118*b));
c.u = CLAMP_TO_UCHAR((int)(-0.15*r - 0.2941*g + 0.3882*b + 128));
c.v = CLAMP_TO_UCHAR((int)(0.35*r - 0.2941*g - 0.0559*b + 128));
return c;
}
//----------------------------------------------------------
//----------------------------------------------------------
rgb yuv_to_rgb(unsigned char y, unsigned char u, unsigned char v) {
#ifdef ERASE________________________________________________
/*
* replaced by the equivalent calculation found in the CMU driver which
* uses integer maths only (-> higher performance)
* fw-01-08
*/
int U, V;
rgb c;
U = (int)(2*u - 256);
V = (int)(2*v - 256);
c.red = CLAMP_TO_UCHAR(y + V);
c.green = CLAMP_TO_UCHAR((int)(y - 0.51*V - 0.19*U));
c.blue = CLAMP_TO_UCHAR(y + U);
#endif /* ERASE_____________________________________________ */
/*
* in line with the CMU driver...
*/
long U, V, deltaG;
rgb c;
U = u - 128;
V = v - 128;
deltaG = (12727 * U + 33384 * V);
deltaG += (deltaG > 0 ? 32768 : -32768);
deltaG >>= 16;
c.red = CLAMP_TO_UCHAR( y + V );
c.green = CLAMP_TO_UCHAR( y - deltaG );
c.blue = CLAMP_TO_UCHAR( y + U );
return c;
}
//----------------------------------------------------------
//----------------------------------------------------------
void convYUYV2RGB(rgb *dest, yuv422 *src) {
unsigned long i;
yuv422 p;
unsigned long size = dims[0]*processFrameNumPix/2; // 2 pixels per yuv422 packet (YUYV)
for(i=0; i<size; i++) {
p = src[i];
dest[2*i + 0] = yuv_to_rgb(p.y1, p.u, p.v);
dest[2*i + 1] = yuv_to_rgb(p.y2, p.u, p.v);
}
}
//----------------------------------------------------------
#ifdef ERASE
//----------------------------------------------------------
void convMatlabRGB2YUV(yuv *dst, unsigned char *src, int Wx, int Wy) {
int row, col;
// convert RGB (MATLAB) to YUV
for(row=0; row<Wy; row++) {
for(col=0; col<Wx; col++) {
dst[row*Wx + col] = rgb_to_yuv(src[row+col*Wy+0*Wx*Wy], src[row+col*Wy+1*Wx*Wy], src[row+col*Wy+2*Wx*Wy]);
}
}
}
//----------------------------------------------------------
//----------------------------------------------------------
void convCameraRGB2YUV(yuv *dst, unsigned char *src, int Wx, int Wy) {
int row, col;
// convert RGB (CAMERA) to YUV
for(row=0; row<Wy; row++) {
for(col=0; col<Wx; col++) {
dst[col + row*Wx] = rgb_to_yuv(src[col*3 + row*Wx*3], src[col*3 + row*Wx*3 + 1], src[col*3 + row*Wx*3 + 2]);
}
}
}
//----------------------------------------------------------
//----------------------------------------------------------
void convCameraRGB_2_YUV(yuv *dst, unsigned char *src, int Wx, int Wy) {
int row, col;
// convert RGB (CAMERA) to YUV
for(row=0; row<Wy; row++) {
for(col=0; col<Wx; col++) {
dst[col + row*Wx] = rgb_to_yuv(src[(origX+col)*3 + (origY+row)*(frameWidth*3) ], \
src[(origX+col)*3 + (origY+row)*(frameWidth*3) + 1], \
src[(origX+col)*3 + (origY+row)*(frameWidth*3) + 2] );
}
}
}
//----------------------------------------------------------
//----------------------------------------------------------
void convYUV2YUYV(image_pixel *dst, yuv *src, int Wx, int Wy) {
int row, col, k;
// convert YUV to YUYV (linear buffer)
k = 0;
for(row=0; row<Wy; row++) {
for(col=0; col<Wx; col+=2, k++) {
dst[k].y1 = src[row*Wx + col].y;
dst[k].u = (unsigned char)((src[row*Wx + col].u + src[row*Wx + col+1].u)/2);
dst[k].y2 = src[row*Wx + col+1].y;
dst[k].v = (unsigned char)((src[row*Wx + col].v + src[row*Wx + col+1].v)/2);
}
}
//mexPrintf("yuyv buffer contains %d elements\n", k);
}
//----------------------------------------------------------
#endif
//----------------------------------------------------------
void convY8_2_YUYV(image_pixel *dst, unsigned char *src) {
unsigned int row, col, k;
unsigned int BytesPerRow = (unsigned int)(frameWidth);
yuv myYUV[2]; // intermediate result
k = 0;
for(row=0; row<dims[0] /* height */; row++) {
for(col = 0; col<processFrameWidth; col+=2, k++) {
/* Y8 -> YUV */
myYUV[0] = rgb_to_yuv(src[(origY+row)*BytesPerRow + firstPacketStart+col ], \
src[(origY+row)*BytesPerRow + firstPacketStart+col ], \
src[(origY+row)*BytesPerRow + firstPacketStart+col ] );
myYUV[1] = rgb_to_yuv(src[(origY+row)*BytesPerRow + firstPacketStart+col + 1], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 1], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 1] );
/* YUV -> YUYV */
dst[k].u = (unsigned char)((myYUV[0].u + myYUV[1].u) >> 1); // u
dst[k].y1 = myYUV[0].y; // y1
dst[k].v = (unsigned char)((myYUV[0].v + myYUV[1].v) >> 1); // u
dst[k].y2 = myYUV[1].y; // y1
}
}
//mexPrintf("yuyv buffer contains %d elements\n", k);
}
//----------------------------------------------------------
//----------------------------------------------------------
void convY16_2_YUYV(image_pixel *dst, unsigned char *src) {
unsigned int row, col, k;
unsigned int u, v;
unsigned int BytesPerRow = (unsigned int)(frameWidth*2);
yuv myYUV[4]; // intermediate result
k = 0;
for(row=0; row<dims[0] /* height */; row++) {
for(col = 0; col<processFrameWidth; col+=4, k++) {
/* Y16 -> YUV */
myYUV[0] = rgb_to_yuv(src[(origY+row)*BytesPerRow + firstPacketStart+col ], \
src[(origY+row)*BytesPerRow + firstPacketStart+col ], \
src[(origY+row)*BytesPerRow + firstPacketStart+col ] );
myYUV[1] = rgb_to_yuv(src[(origY+row)*BytesPerRow + firstPacketStart+col + 1], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 1], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 1] );
myYUV[2] = rgb_to_yuv(src[(origY+row)*BytesPerRow + firstPacketStart+col + 2], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 2], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 2] );
myYUV[3] = rgb_to_yuv(src[(origY+row)*BytesPerRow + firstPacketStart+col + 3], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 3], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 3] );
/* YUV -> YUYV */
u = ((unsigned int)(myYUV[0].u) << 8) + (unsigned int)(myYUV[1].u);
dst[k].u = (unsigned char)((u + ((unsigned int)(myYUV[2].u) << 8) + (unsigned int)(myYUV[3].u)) >> 1); // u
dst[k].y1 = (unsigned char)(((unsigned int)(myYUV[0].y) << 8) + (unsigned int)(myYUV[1].y)); // y1
v = ((unsigned int)(myYUV[0].v) << 8) + (unsigned int)(myYUV[1].v);
dst[k].v = (unsigned char)((v + ((unsigned int)(myYUV[2].v) << 8) + (unsigned int)(myYUV[3].v)) >> 1); // v
dst[k].y1 = (unsigned char)(((unsigned int)(myYUV[2].y) << 8) + (unsigned int)(myYUV[3].y)); // y1
}
}
//mexPrintf("yuyv buffer contains %d elements\n", k);
}
//----------------------------------------------------------
//----------------------------------------------------------
void convCameraRGB_2_YUYV(image_pixel *dst, unsigned char *src) {
unsigned int row, col, k;
unsigned int BytesPerRow = (unsigned int)(frameWidth*3);
yuv myYUV[2]; // intermediate result
k = 0;
for(row=0; row<dims[0] /* height */; row++) {
for(col = 0; col<processFrameWidth; col+=6, k++) {
/* RGB8 -> YUV */
myYUV[0] = rgb_to_yuv(src[(origY+row)*BytesPerRow + firstPacketStart+col ], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 1], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 2] );
myYUV[1] = rgb_to_yuv(src[(origY+row)*BytesPerRow + firstPacketStart+col + 3], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 4], \
src[(origY+row)*BytesPerRow + firstPacketStart+col + 5] );
/* YUV -> YUYV */
dst[k].u = (unsigned char)((myYUV[0].u + myYUV[1].u) >> 1); // u
dst[k].y1 = myYUV[0].y; // y1
dst[k].v = (unsigned char)((myYUV[0].v + myYUV[1].v) >> 1); // u
dst[k].y2 = myYUV[1].y; // y1
}
}
//mexPrintf("yuyv buffer contains %d elements\n", k);
}
//----------------------------------------------------------
//----------------------------------------------------------
// CMU driver: The YUV4,4,4 format is UYV
// -> 'convert' YUV444 (u1 y1 v1 u2 y2 v2) image to YUYV (u y1 v y2) equivalent (copy)
void convYUV444_2_YUYV(image_pixel *dst, unsigned char *src) {
unsigned int row, col, k;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -