📄 processeffect.java
字号:
package visage_v3_2;
/*Autor: Adel Restom adel_restom@yahoo.com*/
import javax.media.*;
import javax.media.format.*;
import java.awt.*;
import svm.*;
import javax.media.util.*;
import com.graphbuilder.geom.Geom;
import com.graphbuilder.geom.*;
import java.awt.image.BufferedImage;
import java.awt.event.*;
/*
This class is the video stream effect that makes all the processes from face detection,
face tracking, to blink detection.
*/
public class ProcessEffect implements Effect
{
//Attributes for the codec
Format inputFormat;
Format outputFormat;
Format formats[];
byte outData[];
Frame frame;
FaceDetector faceDetector;
FaceTracker faceTracker;
BufferToImage bti;
Image image,imageT_1;
//Colors to draw rectangles
final byte red = (byte)Color.black.getRed();
final byte green = (byte)Color.black.getGreen();
final byte blue = (byte)Color.black.getBlue();
static final int left = 0;
static final int right = 1;
//Widths and heights of 's'earch areas,'t'emplates, and 'p'upils' search areas
final int sWid = 25,sHei = 21;
final int tWid = 15,tHei = 11;
final int pWid = 25,pHei = 21;
boolean fixNose;
int nX,nY,lX,lY,rX,rY,bteX,bteY;//'n'ose, 'l'eft eye, 'r'ight eye, bte: between the eyes
//Shift values for the ROIs
int nxShift,nyShift,tlbtex,tlbtey,trbtex,trbtey,bxShift,byShift;//tlbte: to the left of BTE, trbte: to the right of BTE
//Motion cue pixel arrays
int leftEyePixelsT_1[],leftEyePixels[];
int rightEyePixelsT_1[],rightEyePixels[];
int tempPixels2[];
//Templates
int noseTemplate[][],BteTemplate[][],lTemplate[][],rTemplate[][];
//ROIs (true color arrays)
int nosePixels[],BTEPixels[];
//ROIs (grayscale pixels)
int noseROI[],BteROI[];
//Integral image arrays
int lii[][],rii[][],ls[][],rs[][];
//Angle limit
final int limit = 30;
//Controls the mouse
Robot robot;
int centerX,centerY;
//Blinking
boolean leftBlink,rightBlink,detectLEye,detectREye;
long lt,rt;
//Eyebrows
int leftBrow[],rightBrow[];
public ProcessEffect(CaptureDeviceInfo cdi,svm_model model,Frame frame,int centerX,int centerY)
{
formats = cdi.getFormats();
outData = new byte[320*240*3];
this.frame = frame;
imageT_1 = new BufferedImage(320,240,BufferedImage.TYPE_INT_RGB);
faceDetector = new FaceDetector(model);
faceTracker = new FaceTracker(pWid,pHei);
leftEyePixelsT_1 = new int[pWid * pHei];
leftEyePixels = new int[pWid * pHei];
rightEyePixelsT_1 = new int[pWid * pHei];
rightEyePixels = new int[pWid * pHei];
tempPixels2 = new int[pWid * pHei];
noseTemplate = new int[tWid][tHei];
BteTemplate = new int[tWid][tHei];
lTemplate = new int[tWid][tHei];
rTemplate = new int[tWid][tHei];
nosePixels = new int[sWid*sHei];
BTEPixels = new int[sWid*sHei];
noseROI = new int[sWid*sHei];
BteROI = new int[sWid*sHei];
lii = new int[pWid][pHei];
rii = new int[pWid][pHei];
ls = new int[pWid][pHei];
rs = new int[pWid][pHei];
this.centerX = centerX;
this.centerY = centerY;
try
{
robot = new Robot();
}
catch (Exception ex){ }
}
///////////////////////////////
public int process(Buffer inBuffer, Buffer outBuffer)
{
outData = (byte[])inBuffer.getData();
//If nose and eyes were detected
if( frame.track )
{
imageT_1 = image; //frame @ t-1
image = bti.createImage(inBuffer); //frame @ t
//Store old values
int onX = nX;
int onY = nY;
int obteX = bteX;
int obteY = bteY;
//Force search areas to stay in the frame boundaries
nxShift = faceTracker.checkHorizontalBoundaries(nX,sWid,nxShift,319);
nyShift = faceTracker.checkVerticalBoundaries(nY,sHei,nyShift,239);
bxShift = faceTracker.checkHorizontalBoundaries(bteX,sWid,bxShift,319);
byShift = faceTracker.checkVerticalBoundaries(bteY,sHei,byShift,239);
//Extract ROIs of the nose and the BTE
nosePixels = ImageProcessing.extractPixels(image,nX+nxShift-sWid/2,nY+nyShift-sHei/2,sWid,sHei,nosePixels);
noseROI = ImageProcessing.toGrayscale(nosePixels,noseROI);
if( frame.drawROI && frame.drawNose )
outData = drawRect(nX+nxShift-sWid/2,nY+nyShift-sHei/2,sWid,sHei,outData);
BTEPixels = ImageProcessing.extractPixels(image,bteX+bxShift-sWid/2,bteY+byShift-sHei/2,sWid,sHei,BTEPixels);
BteROI = ImageProcessing.toGrayscale(BTEPixels,BteROI);
if( frame.drawROI && frame.drawBte )
outData = drawRect(bteX+bxShift-sWid/2,bteY+byShift-sHei/2,sWid,sHei,outData);
Point noseTip,BTE;
noseTip = faceTracker.trackTemplate(noseTemplate,
noseROI, sWid, tWid,sHei,tHei);
BTE = faceTracker.trackTemplate(BteTemplate, BteROI,
sWid, tWid,sHei,tHei);
//Set new nose tip and BTE values
int x0 = (int)noseTip.getX(); //will be used in template extraction
int y0 = (int)noseTip.getY();
nX = nxShift + nX - sWid / 2 + x0;
nY = nyShift + nY - sHei / 2 + y0;
int x1 = (int)BTE.getX(); //will be used in template extraction
int y1 = (int)BTE.getY();
bteX = bxShift + bteX - sWid / 2 + x1;
bteY = byShift + bteY - sHei / 2 + y1;
//Force search areas to stay in the frame boundaries
trbtex = faceTracker.checkHorizontalBoundaries(bteX,pWid,trbtex,319);
trbtey = faceTracker.checkVerticalBoundaries(bteY,pHei,trbtey,239);
tlbtex = faceTracker.checkHorizontalBoundaries(bteX,pWid,tlbtex,319);
tlbtey = faceTracker.checkVerticalBoundaries(bteY,pHei,tlbtey,239);
int shlx = bteX+tlbtex;
int shly = bteY+tlbtey;
int shrx = bteX+trbtex;
int shry = bteY+trbtey;
double eyesLen = Math.sqrt(Math.pow(shlx-shrx,2)+Math.pow(shly-shry,2));
double slope = (shrx-shlx != 0 ? (double)(shry-shly) / (double)(shrx-shlx) : Math.tan(Math.toRadians(-90)));
leftBrow = faceTracker.findEyeBrowsLine(shlx, shly, shrx, shry, image, left,eyesLen,slope,frame.brThreshold);
if (leftBrow != null)
{
int lsx = leftBrow[0];
int lsy = leftBrow[1];
int lex = leftBrow[2];
int ley = leftBrow[3];
int sx = leftBrow[4];
int sy = leftBrow[5];
int y = (lsy > ley ? lsy : ley );
if( frame.drawBrows )
outData = drawLine(sx + lsx, sy + lsy, sx + lex, sy + ley, outData);
if( bteY+tlbtey-pHei/2 < sy+y-5 )
tlbtey = sy+y-bteY+pHei/2-5;
}
else
{
shly += 15;
eyesLen = Math.sqrt(Math.pow(shlx-shrx,2)+Math.pow(shly-shry,2));
slope = (shrx-shlx != 0 ? (double)(shry-shly) / (double)(shrx-shlx) : Math.tan(Math.toRadians(-90)));
leftBrow = faceTracker.findEyeBrowsLine(shlx, shly, shrx, shry, image, left, eyesLen, slope,frame.brThreshold);
if (leftBrow != null)
{
int lsx = leftBrow[0];
int lsy = leftBrow[1];
int lex = leftBrow[2];
int ley = leftBrow[3];
int sx = leftBrow[4];
int sy = leftBrow[5];
int y = (lsy > ley ? lsy : ley);
if( frame.drawBrows )
outData = drawLine(sx + lsx, sy + lsy, sx + lex, sy + ley, outData);
if( bteY+tlbtey-pHei/2 < sy+y-5 )
tlbtey = sy+y-bteY+pHei/2-5;
}
}
eyesLen = Math.sqrt(Math.pow(shlx-shrx,2)+Math.pow(shly-shry,2));
slope = (shrx-shlx != 0 ? (double)(shry-shly) / (double)(shrx-shlx) : Math.tan(Math.toRadians(-90)));
rightBrow = faceTracker.findEyeBrowsLine(shlx, shly, shrx, shry, image, right,eyesLen,slope,frame.brThreshold);
if (rightBrow != null)
{
int lsx = rightBrow[0];
int lsy = rightBrow[1];
int lex = rightBrow[2];
int ley = rightBrow[3];
int sx = rightBrow[4];
int sy = rightBrow[5];
int y = (lsy > ley ? lsy : ley );
if( frame.drawBrows )
outData = drawLine(sx + lsx, sy + lsy, sx + lex, sy + ley, outData);
if( bteY+trbtey-pHei/2 < sy+y-5 )
trbtey = sy+y-bteY+pHei/2-5;
}
else
{
shry += 15;
eyesLen = Math.sqrt(Math.pow(shlx-shrx,2)+Math.pow(shly-shry,2));
slope = (shrx-shlx != 0 ? (double)(shry-shly) / (double)(shrx-shlx) : Math.tan(Math.toRadians(-90)));
rightBrow = faceTracker.findEyeBrowsLine(shlx, shly, shrx, shry, image, right,eyesLen,slope,frame.brThreshold);
if (rightBrow != null)
{
int lsx = rightBrow[0];
int lsy = rightBrow[1];
int lex = rightBrow[2];
int ley = rightBrow[3];
int sx = rightBrow[4];
int sy = rightBrow[5];
int y = (lsy > ley ? lsy : ley);
if( frame.drawBrows )
outData = drawLine(sx + lsx, sy + lsy, sx + lex, sy + ley, outData);
if( bteY+trbtey-pHei/2 < sy+y-5 )
trbtey = sy+y-bteY+pHei/2-5;
}
}
//Force search areas to stay in the frame boundaries
trbtey = faceTracker.checkVerticalBoundaries(bteY,pHei,trbtey,239);
tlbtey = faceTracker.checkVerticalBoundaries(bteY,pHei,tlbtey,239);
//Extract new motion cues for the left eye
tempPixels2 = ImageProcessing.extractPixels(imageT_1,bteX+tlbtex-pWid/2,bteY+tlbtey-pHei/2,pWid,pHei,tempPixels2);
leftEyePixelsT_1 = ImageProcessing.toGrayscale(tempPixels2,leftEyePixelsT_1);
tempPixels2 = ImageProcessing.extractPixels(image,bteX+tlbtex-pWid/2,bteY+tlbtey-pHei/2,pWid,pHei,tempPixels2);
leftEyePixels = ImageProcessing.toGrayscale(tempPixels2,leftEyePixels);
//Extract new motion cues for the right eye
tempPixels2 = ImageProcessing.extractPixels(imageT_1,bteX+trbtex-pWid/2,bteY+trbtey-pHei/2,pWid,pHei,tempPixels2);
rightEyePixelsT_1 = ImageProcessing.toGrayscale(tempPixels2,rightEyePixelsT_1);
tempPixels2 = ImageProcessing.extractPixels(image,bteX+trbtex-pWid/2,bteY+trbtey-pHei/2,pWid,pHei,tempPixels2);
rightEyePixels = ImageProcessing.toGrayscale(tempPixels2,rightEyePixels);
//temporary values
nxShift = nX-onX;
nyShift = nY-onY;
bxShift = bteX - obteX;
byShift = bteY - obteY;
//Detect left eye motion
faceTracker.detectMotion(leftEyePixelsT_1,leftEyePixels,pWid,pHei,frame.cThreshold,25,
frame.drawMotion,frame.drawEyes,this,
bteX+tlbtex-pWid/2,bteY+tlbtey-pHei/2,left,frame.bThreshold,
nxShift,nyShift,bxShift,byShift);
//Detect right eye motion
faceTracker.detectMotion(rightEyePixelsT_1,rightEyePixels,pWid,pHei,frame.cThreshold,25,
frame.drawMotion,frame.drawEyes,this,
bteX+trbtex-pWid/2,bteY+trbtey-pHei/2,right,frame.bThreshold,
nxShift,nyShift,bxShift,byShift);
if( (leftBlink && rightBlink) || (Math.abs(lt-rt) < frame.blinkLength) )
{
leftBlink = false;
rightBlink = false;
//meaningless initialization values
lt = -1;
rt = -5;
detectLEye = detectREye = true;
}
if( leftBlink )
{
detectLEye = false;
if (frame.drawBlink)
outData = drawRect(bteX + tlbtex - pWid / 2,bteY + tlbtey - pHei / 2,pWid, pHei, outData);
if (System.currentTimeMillis() - lt > (int) (frame.blinkLength * 1000))
{
lt = System.currentTimeMillis();
if( frame.enable )
{
robot.mousePress(InputEvent.BUTTON1_MASK);
robot.mouseRelease(InputEvent.BUTTON1_MASK);
}
}
leftBlink = false;
}
else
detectLEye = true;
if( rightBlink )
{
detectREye = false;
if (frame.drawBlink)
outData = drawRect(bteX + trbtex - pWid / 2,bteY + trbtey - pHei / 2,pWid, pHei, outData);
if (System.currentTimeMillis() - rt > (int) (frame.blinkLength * 1000))
{
rt = System.currentTimeMillis();
if( frame.enable )
{
robot.mousePress(InputEvent.BUTTON3_MASK);
robot.mouseRelease(InputEvent.BUTTON3_MASK);
}
}
rightBlink = false;
}
else
detectREye = true;
if( detectLEye )
{
if (frame.drawROI && frame.drawEyes)
outData = drawRect(bteX + tlbtex - pWid / 2, bteY + tlbtey - pHei / 2,pWid, pHei, outData);
lii = ImageProcessing.calculateIntegralImage(pWid, pHei, leftEyePixels, ls, lii);
Point lPupil = faceTracker.findPupil(lii, pWid, pHei, lTemplate,
leftEyePixels, tWid, tHei);
//Extract left eye template
for (int y = 0; y < tHei; y++)
for (int x = 0; x < tWid; x++)
lTemplate[x][y] = leftEyePixels[ ( (int) lPupil.getY() - tHei / 2 + y) *pWid +( (int) lPupil.getX() - tWid / 2 + x)];
lX = bteX + tlbtex - pWid / 2 + (int) lPupil.getX();
lY = bteY + tlbtey - pHei / 2 + (int) lPupil.getY();
}
if( detectREye )
{
if (frame.drawROI && frame.drawEyes)
outData = drawRect(bteX + trbtex - pWid / 2, bteY + trbtey - pHei / 2,pWid, pHei, outData);
rii = ImageProcessing.calculateIntegralImage(pWid, pHei, rightEyePixels, rs,rii);
Point rPupil = faceTracker.findPupil(rii, pWid, pHei,rTemplate,rightEyePixels, tWid, tHei);
for (int y = 0; y < tHei; y++)
for (int x = 0; x < tWid; x++)
rTemplate[x][y] = rightEyePixels[ ( (int) rPupil.getY() - tHei / 2 + y) *pWid +( (int) rPupil.getX() - tWid / 2 + x)];
rX = bteX + trbtex - pWid / 2 + (int) rPupil.getX();
rY = bteY + trbtey - pHei / 2 + (int) rPupil.getY();
}
//The distance between the nose tip and the BTE should be close to
//half of the distance between the eyes
eyesLen = Math.sqrt(Math.pow(lX-rX,2)+Math.pow(lY-rY,2));
//Update BTE location
double xLen = rX - lX;
double yLen = rY - lY;
slope = (xLen != 0 ? yLen / xLen : Math.tan(Math.toRadians(-90)));
int sbteX = lX+(int)((eyesLen/2) * Math.cos(Math.atan(slope))); //sbte is in the middle of the eyes line
int sbteY = lY+(int)((eyesLen/2) * Math.sin(Math.atan(slope)));
//Extract new BTE ROI
BTEPixels = ImageProcessing.extractPixels(image, sbteX - sWid / 2,sbteY - sHei / 2, sWid, sHei,BTEPixels);
BteROI = ImageProcessing.toGrayscale(BTEPixels, BteROI);
BTE = faceTracker.trackTemplate(BteTemplate, BteROI,sWid, tWid, sHei, tHei);
x1 = (int) BTE.getX();
y1 = (int) BTE.getY();
bteX = sbteX - sWid / 2 + x1;
bteY = sbteY - sHei / 2 + y1;
double noseLen = Math.sqrt(Math.pow(nX - bteX, 2) + Math.pow(nY - bteY, 2));
double angle;
double res[] = new double[3];
double point[] = new double[3];
double dist,Len;
dist = Math.sqrt(Geom.ptLineDistSq(lX, lY, rX, rY, nX, nY, res));
if (Geom.PARALLEL == Geom.getLineLineIntersection(lX, lY, rX, rY, bteX, bteY, nX, nY,point))
{
point[0] = bteX;
point[1] = bteY;
angle = 0;
Len = Math.sqrt(Math.pow(point[0] - nX, 2) +
Math.pow(point[1] - nY, 2));
}
else
{
Len = Math.sqrt(Math.pow(point[0] - nX, 2) +
Math.pow(point[1] - nY, 2));
angle = (Len != 0 ? Math.asin( (dist / Len)) * 57.2957795 : 0); //Convert to degrees
}
if( nY < bteY )
fixNose = true;
else
if (noseLen > eyesLen)
fixNose = true;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -