📄 aigesture.cpp
字号:
CV_RGB(244,0,0));
GetFeature(dst,center,radius,angle,anglecha,count);
CvFileStorage *fs=cvOpenFileStorage("手势特征文件.yml",0,CV_STORAGE_READ);
CvFileNode* filenode;
for( i=0;i<GestureNum;i++)
{
filenode=cvGetFileNodeByName(fs,NULL,GestureName[i]+"anglecountName");
cvReadRawData(fs,filenode,Sbcount,"f");//读取数据到Sbcount这个数组中
for(int j=0;j<FeatureNum;j++)
{
mask_tmp+=fabs(Sbcount[j]-count[j])*fabs(Sbcount[j]-count[j]);//先对肤色角度的个数进行匹配
}
if(mask_tmp<5)
{
mask[maskcount]=i;
maskcount++;
}
mask_tmp=0;
}
for(int i=0;i<maskcount;i++)
{
filenode=cvGetFileNodeByName(fs,NULL,GestureName[mask[i]]+"angleName");
cvReadRawData(fs,filenode,Sbangle,"f");
for(int j=0;j<FeatureNum;j++)
{
for(int k=0;k<10;k++)
{
mask_tmp1+=fabs(Sbangle[j][k]-angle[j][k])*fabs(Sbangle[j][k]-angle[j][k]);//对肤色角度进行匹配
}
}
if(mask_tmp1<10)
{
mask1[maskcount1]=mask[i];
maskcount1++;
}
mask_tmp1=0;
}
for(int i=0;i<maskcount1;i++)
{
filenode=cvGetFileNodeByName(fs,NULL,GestureName[mask1[i]]+"anglechaName");
cvReadRawData(fs,filenode,Sbanglecha,"f");
for(int j=0;j<FeatureNum;j++)
{
for(int k=0;k<10;k++)
{
angleresult[i]+=(float)(fabs(Sbanglecha[j][k]-anglecha[j][k])*fabs(Sbanglecha[j][k]-anglecha[j][k]));//对非肤色角度进行匹配
}
}
}
CvFont font;
double hScale=0.7;
double vScale=0.7;
int lineWidth=1;
cvInitFont(&font,CV_FONT_HERSHEY_SIMPLEX|CV_FONT_ITALIC, hScale,vScale,0,lineWidth);//初始化字体,准备写到图片上的
if(maskcount==0||maskcount1==0)
{
cvReleaseFileStorage(&fs);
return;
}
float angletmp=angleresult[0];
int angletmp1=0;
for(int i=0;i<maskcount1;i++)
{
if(angleresult[i]<=angletmp)
{
angletmp=angleresult[i];
angletmp1=mask1[i];
}
}
result=GestureName[angletmp1];
cvPutText(src,GestureName[angletmp1],cvPoint(20,20),&font,CV_RGB(255,0,0));//在图片中输出当前图片手势的名字
cvReleaseFileStorage(&fs);
cvReleaseImage(&dst);
delete []GestureName;
}
void CAIGesture::Follow(IplImage* src,CvRect& track_window,CvBox2D &track_box)//跟踪手势,CvBox2D这个结构里面有一个角度变量,可能以后会用到
{
CvConnectedComp track_comp;
IplImage* hsv=0;//HSV图像空间
IplImage* h=0;//HSV图像空间中的H分量
IplImage* s=0;
IplImage* v=0;
IplImage* mask=0;
IplImage* mask_tmp1=0;
IplImage* mask_tmp2=0;
IplImage* mask_tmp3=0;
IplImage* mask_tmp4=0;
IplImage* backproject=0;
int hdims = 150; // 划分HIST的个数,越高越精确
float hranges_arr[] = {0,180};
float* hranges = hranges_arr;
CvHistogram *hist = 0;
int bin_w=0;
hsv=cvCreateImage(cvGetSize(src),8,3);
h=cvCreateImage(cvGetSize(src),8,1);
s=cvCreateImage(cvGetSize(src),8,1);
v=cvCreateImage(cvGetSize(src),8,1);
mask=cvCreateImage(cvGetSize(src),8,1);//掩码图像
mask_tmp1=cvCreateImage(cvGetSize(src),8,1);
mask_tmp2=cvCreateImage(cvGetSize(src),8,1);
mask_tmp3=cvCreateImage(cvGetSize(src),8,1);
mask_tmp4=cvCreateImage(cvGetSize(src),8,1);
backproject=cvCreateImage(cvGetSize(src),8,1);
hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 );
cvCvtColor(src,hsv,CV_BGR2HSV);
cvCvtPixToPlane(hsv,h,s,v,0);
cvInRangeS(h,cvScalar(0.0,0.0,0,0),cvScalar(12.0,0.0,0,0),mask_tmp1);
cvInRangeS(s,cvScalar(75.0,0.0,0,0),cvScalar(200.0,0.0,0,0),mask_tmp2);
cvAnd(mask_tmp1,mask_tmp2,mask_tmp1,0);
cvInRangeS(h,cvScalar(0.0,0.0,0,0),cvScalar(13.0,0.0,0,0),mask_tmp2);
cvInRangeS(s,cvScalar(20.0,0.0,0,0),cvScalar(90.0,0.0,0,0),mask_tmp3);
cvAnd(mask_tmp2,mask_tmp3,mask_tmp2,0);
cvInRangeS(h,cvScalar(170.0,0.0,0,0),cvScalar(180.0,0.0,0,0),mask_tmp3);
cvInRangeS(s,cvScalar(15.0,0.0,0,0),cvScalar(90.,0.0,0,0),mask_tmp4);
cvAnd(mask_tmp3,mask_tmp4,mask_tmp3,0);
cvOr(mask_tmp3,mask_tmp2,mask_tmp2,0);
cvOr(mask_tmp2,mask_tmp1,mask,0);
cvCalcHist(&h,hist,0,mask);
cvCalcBackProject(&h,backproject,hist);
cvAnd(backproject,mask,backproject,0);
cvCamShift(backproject,track_window,
cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
&track_comp, &track_box );
cvSetImageROI(mask,track_window);
if(cvCountNonZero(mask)<40)
{
// AfxMessageBox("跟踪丢失");
cvReleaseImage(&hsv);
cvReleaseImage(&h);
cvReleaseImage(&s);
cvReleaseImage(&v);
cvReleaseImage(&mask);
cvReleaseImage(&mask_tmp1);
cvReleaseImage(&mask_tmp2);
cvReleaseImage(&mask_tmp3);
cvReleaseImage(&mask_tmp4);
cvReleaseImage(&backproject);
cvReleaseHist(&hist);
return;
}
cvResetImageROI(mask);
track_window=track_comp.rect;
cvRectangle(src,
cvPoint(track_window.x,track_window.y),
cvPoint(track_window.x+track_window.width,track_window.y+track_window.height),
CV_RGB(255,255,0));
cvReleaseImage(&hsv);
cvReleaseImage(&h);
cvReleaseImage(&s);
cvReleaseImage(&v);
cvReleaseImage(&mask);
cvReleaseImage(&mask_tmp1);
cvReleaseImage(&mask_tmp2);
cvReleaseImage(&mask_tmp3);
cvReleaseImage(&mask_tmp4);
cvReleaseImage(&backproject);
cvReleaseHist(&hist);
}
void CAIGesture::RecogniseSeq(CvPoint pt[8],float width,float height,CString &result)
{
float x_min_tmp=(float)(pt[0].x);//运动轨迹x坐标的最小值
float x_max_tmp=(float)(pt[0].x);//运动轨迹x坐标的最大值
float y_min_tmp=(float)(pt[0].y);//运动轨迹y坐标的最小值
float y_max_tmp=(float)(pt[0].y);//运动轨迹y坐标的最大值
for(int i=0;i<8;i++)
{
if(pt[i].x<x_min_tmp)
x_min_tmp=(float)(pt[i].x);
if(pt[i].x>x_max_tmp)
x_max_tmp=(float)(pt[i].x);
if(pt[i].y<y_min_tmp)
y_min_tmp=(float)(pt[i].y);
if(pt[i].y>y_max_tmp)
y_max_tmp=(float)(pt[i].y);
}
if(x_max_tmp-x_min_tmp>2*width&&y_max_tmp-y_min_tmp<height)
{
if(pt[7].x-pt[0].x>0)
{
result="向左";
}
else
{
result="向右";
}
}
else if(x_max_tmp-x_min_tmp<width&&y_max_tmp-y_min_tmp>height)
{
if(pt[7].y-pt[0].y>0)
{
result="后退";
}
else
{
result="过来";
}
}
}
void CAIGesture::SaveImage(IplImage* src)
{
CTime t = CTime::GetCurrentTime();
CString imgname = t.Format( "%D%H%M%S.jpg" );
cvSaveImage(imgname,src);
}
void CAIGesture::RecogniseResult(CString str[8],CString &result)
{
CString tmp;
int count=0;
for(int i=0;i<8;i++)
{
tmp=str[i];
for(int j=0;j<8;j++)
{
if(tmp==str[j])
count++;
if(count>6)
{
result=str[j];
break;
}
}
if(count>6)
break;
count=0;
}
}
/*下面程序段作为备用,目前还没有得到应用*/
void CAIGesture::SaveForTrain(IplImage* src,CvPoint pt[8],CString &result)//保存一个序列作为训练用,目前还没用上,可能以后会用上!
{
IplImage* tmp=cvCreateImage(cvGetSize(src),8,1);
cvZero(tmp);
for(int i=1;i<8;i++)
{
cvCircle(tmp,pt[i-1],10,CV_RGB(255,255,255),3,8,0);
cvLine(tmp,pt[i-1],pt[i],CV_RGB(255,255,255),3,8,0);
}
cvCircle(tmp,pt[7],10,CV_RGB(255,255,255),3,8,0);
CTime t = CTime::GetCurrentTime();
CString imgname = t.Format( result+"%H%M%S.jpg" );
cvSaveImage(imgname,tmp);
}
void CAIGesture::Follow(IplImage* src,CvRect& track_window,CvHistogram* &hist,BOOL& Flag)//用指定的直方图跟踪物体
{
CvConnectedComp track_comp;
CvBox2D track_box;
IplImage* hsv=0;//HSV图像空间
IplImage* h=0;//HSV图像空间中的H分量
IplImage* s=0;
IplImage* v=0;
IplImage* mask=0;
IplImage* mask_tmp1=0;
IplImage* mask_tmp2=0;
IplImage* mask_tmp3=0;
IplImage* mask_tmp4=0;
IplImage* backproject=0;
hsv=cvCreateImage(cvGetSize(src),8,3);
h=cvCreateImage(cvGetSize(src),8,1);
s=cvCreateImage(cvGetSize(src),8,1);
v=cvCreateImage(cvGetSize(src),8,1);
mask=cvCreateImage(cvGetSize(src),8,1);//掩码图像
mask_tmp1=cvCreateImage(cvGetSize(src),8,1);
mask_tmp2=cvCreateImage(cvGetSize(src),8,1);
mask_tmp3=cvCreateImage(cvGetSize(src),8,1);
mask_tmp4=cvCreateImage(cvGetSize(src),8,1);
backproject=cvCreateImage(cvGetSize(src),8,1);
cvCvtColor(src,hsv,CV_BGR2HSV);
cvCvtPixToPlane(hsv,h,s,v,0);
cvInRangeS(h,cvScalar(0.0,0.0,0,0),cvScalar(12.0,0.0,0,0),mask_tmp1);
cvInRangeS(s,cvScalar(75.0,0.0,0,0),cvScalar(200.0,0.0,0,0),mask_tmp2);
cvAnd(mask_tmp1,mask_tmp2,mask_tmp1,0);
cvInRangeS(h,cvScalar(0.0,0.0,0,0),cvScalar(13.0,0.0,0,0),mask_tmp2);
cvInRangeS(s,cvScalar(20.0,0.0,0,0),cvScalar(90.0,0.0,0,0),mask_tmp3);
cvAnd(mask_tmp2,mask_tmp3,mask_tmp2,0);
cvInRangeS(h,cvScalar(170.0,0.0,0,0),cvScalar(180.0,0.0,0,0),mask_tmp3);
cvInRangeS(s,cvScalar(15.0,0.0,0,0),cvScalar(90.,0.0,0,0),mask_tmp4);
cvAnd(mask_tmp3,mask_tmp4,mask_tmp3,0);
cvOr(mask_tmp3,mask_tmp2,mask_tmp2,0);
cvOr(mask_tmp2,mask_tmp1,mask,0);
if(Flag)//当Flag为TRUE的话,先跟踪后求直方图,这个直方图是下一祯跟踪的直方图
{
cvAnd(backproject,mask,backproject,0);
cvCalcBackProject(&h,backproject,hist);
}
else//当Flag为FALSE的话,先求直方图后直接退出,相当这一祯是作为初始化用的,不跟踪对象
{
cvCalcBackProject(&h,backproject,hist);
cvAnd(backproject,mask,backproject,0);
Flag=TRUE;
cvReleaseImage(&hsv);
cvReleaseImage(&h);
cvReleaseImage(&s);
cvReleaseImage(&v);
cvReleaseImage(&mask);
cvReleaseImage(&mask_tmp1);
cvReleaseImage(&mask_tmp2);
cvReleaseImage(&mask_tmp3);
cvReleaseImage(&mask_tmp4);
cvReleaseImage(&backproject);
return;
}
cvCamShift(backproject,track_window,
cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
&track_comp, &track_box );
track_window=track_comp.rect;
if(cvCountNonZero(mask)<40)//当目标物体过小时认为跟踪丢失!
{
AfxMessageBox("跟踪丢失");
cvReleaseImage(&hsv);
cvReleaseImage(&h);
cvReleaseImage(&s);
cvReleaseImage(&v);
cvReleaseImage(&mask);
cvReleaseImage(&mask_tmp1);
cvReleaseImage(&mask_tmp2);
cvReleaseImage(&mask_tmp3);
cvReleaseImage(&mask_tmp4);
cvReleaseImage(&backproject);
track_window=cvRect(150,150,20,20);
return;
}
cvReleaseImage(&hsv);
cvReleaseImage(&h);
cvReleaseImage(&s);
cvReleaseImage(&v);
cvReleaseImage(&mask);
cvReleaseImage(&mask_tmp1);
cvReleaseImage(&mask_tmp2);
cvReleaseImage(&mask_tmp3);
cvReleaseImage(&mask_tmp4);
cvReleaseImage(&backproject);
}
void CAIGesture::CalcHist(IplImage* src,CvRect& rect,CvHistogram* &hist)//对一个指定的Rect求直方图
{
IplImage* hsv=0;//HSV图像空间
IplImage* h=0;//HSV图像空间中的H分量
IplImage* s=0;
IplImage* v=0;
int hdims = 150; // 划分HIST的个数,越高越精确
float hranges_arr[] = {0,180};
float* hranges = hranges_arr;
int bin_w=0;
hsv=cvCreateImage(cvGetSize(src),8,3);
h=cvCreateImage(cvGetSize(src),8,1);
s=cvCreateImage(cvGetSize(src),8,1);
v=cvCreateImage(cvGetSize(src),8,1);
hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 );
cvCvtColor(src,hsv,CV_BGR2HSV);
cvCvtPixToPlane(hsv,h,s,v,0);
cvSetImageROI( h, rect );
cvCalcHist(&h,hist,0,NULL);
cvResetImageROI(h);
cvReleaseImage(&hsv);
cvReleaseImage(&h);
cvReleaseImage(&s);
cvReleaseImage(&v);
}
void CAIGesture::Location(IplImage* src[8],CRect& rect)
{
IplImage* dst=cvCreateImage(cvGetSize(src[0]),8,1);
CvSeq* contour_tmp;
CvMemStorage* storage=cvCreateMemStorage(0);
double maxcontArea=0;
CvRect bndRect = cvRect(0,0,0,0);
CRect tmpRect;
CString result;
CList<GestrueInfo,GestrueInfo&> tmpList;
POSITION pos;
GestrueInfo tmpinfo;
for(int i=0;i<8;i++)
{
SkinDetect(src[i],dst);
int contourcount=cvFindContours(dst, storage, &contour_tmp, sizeof(CvContour), CV_RETR_LIST, CV_CHAIN_APPROX_NONE );
if(contourcount==0)
return;
if(tmpList.IsEmpty())
{
pos=tmpList.GetHeadPosition();
for( ; contour_tmp != 0; contour_tmp = contour_tmp->h_next )
{
bndRect = cvBoundingRect( contour_tmp, 0 );
if(bndRect.width>20&&bndRect.height>20)
{
cvSetImageROI(src[i],bndRect);
Recognise(src[i],result);
if(result.IsEmpty())
result="NULL";
cvResetImageROI(src[i]);
tmpRect=CRect(CPoint(bndRect.x,bndRect.y),CPoint(bndRect.x+bndRect.width,bndRect.y+bndRect.height));
tmpinfo.SetRect(tmpRect);
tmpinfo.AddGestureName(result);
tmpList.AddTail(tmpinfo);
}
}
}
else
{
for( ; contour_tmp != 0; contour_tmp = contour_tmp->h_next )
{
pos=tmpList.GetHeadPosition();
bndRect = cvBoundingRect( contour_tmp, 0 );
tmpRect=CRect(CPoint(bndRect.x,bndRect.y),CPoint(bndRect.x+bndRect.width,bndRect.y+bndRect.height));
while(!tmpList.IsEmpty()&&pos!=NULL)
{
if(bndRect.width>20&&bndRect.height>20)
{
if(tmpList.GetAt(pos).IsTheSamePosition(tmpList.GetAt(pos).GetRect(),tmpRect))
{
cvSetImageROI(src[i],bndRect);
Recognise(src[i],result);
if(result.IsEmpty())
result="NULL";
cvResetImageROI(src[i]);
tmpList.GetAt(pos).AddGestureName(result);
}
}
tmpList.GetNext(pos);
}
}
}
}
pos=tmpList.GetHeadPosition();
while(pos!=NULL)
{
CString result;
RecogniseResult(tmpList.GetAt(pos).GetGestureName(),result);
if(!result.IsEmpty())
{
AfxMessageBox(result);
rect=tmpList.GetAt(pos).GetRect();
break;
}
tmpList.GetNext(pos);
}
if(result.IsEmpty())
{
rect.SetRectEmpty();
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -