📄 handvu.cpp
字号:
}bool HandVu::VerifyColor(){ ConstMaskIt mask = m_pConductor->GetMask(m_last_match.name); CRect roi(m_last_match); double coverage = m_pSkincolor->GetCoverage(m_rgbImage, roi, mask, false); VERBOSE0(3, "HandVu verifyed color"); bool sufficient = (coverage>=m_pConductor->m_dt_min_color_coverage); if (sufficient) { VERBOSE0(3, "HandVu: color match!"); } else { VERBOSE0(3, "HandVu: insufficient color match"); } return sufficient;}void HandVu::InitializeTracking(){ ConstMaskIt mask = m_pConductor->GetMask(m_last_match.name); // if we haven't done so for some time, and the image was taken after // m_time_to_learn_color: if (m_time_to_learn_color<=m_sample_time) { // learn the RGB lookup table and // use it for subsequent segmentations m_pLearnedColor->LearnFromGroundTruth(m_rgbImage, m_last_match, mask); m_time_to_learn_color = m_sample_time + m_min_time_between_learning_color; } if (m_pConductor->m_tr_type==VisionConductor::VC_TT_CAMSHIFT_HSV) { m_pCamShift->PrepareTracking(m_rgbImage, NULL, CRect(m_last_match)); } else if (m_pConductor->m_tr_type==VisionConductor::VC_TT_CAMSHIFT_LEARNED) { m_pCamShift->PrepareTracking(m_rgbImage, m_pLearnedColor, CRect(m_last_match)); } else { // color segmentation provides a probability distribution to the // optical flow filter to place features m_pOpticalFlow->PrepareTracking(m_rgbImage, m_grayImages[m_curr_buf_indx], m_curr_buf_indx, m_pLearnedColor, m_last_match, mask, m_pConductor->m_tr_num_KLT_features, m_pConductor->m_tr_winsize_width, m_pConductor->m_tr_winsize_height, m_pConductor->m_tr_min_feature_distance, m_pConductor->m_tr_max_feature_error); } m_center_pos.x = (m_last_match.right+m_last_match.left)/2.0f; m_center_pos.y = (m_last_match.bottom+m_last_match.top)/2.0f;}bool HandVu::DoTracking(){ // the size of the last Cubicle match determines how far we let // KLT features spread (during the call to Track) int last_width = m_last_match.right-m_last_match.left; int last_height = m_last_match.bottom-m_last_match.top; if (m_pConductor->m_tr_type & VisionConductor::VC_TT_CAMSHIFT) { m_pCamShift->Track(m_rgbImage); m_pCamShift->GetArea(m_scan_area); m_center_pos.x = (m_scan_area.right+m_scan_area.left)/2.0f; m_center_pos.y = (m_scan_area.bottom+m_scan_area.top)/2.0f; } else { bool flock = m_pConductor->m_tr_type==VisionConductor::VC_TT_OPTICAL_FLOW_FLOCK; bool color = m_pConductor->m_tr_type==VisionConductor::VC_TT_OPTICAL_FLOW_COLOR; if (m_pConductor->m_tr_type==VisionConductor::VC_TT_OPTICAL_FLOW_COLORFLOCK) { flock = color = true; } int num_tracked = m_pOpticalFlow->Track(m_rgbImage, m_grayImages[m_prev_buf_indx], m_grayImages[m_curr_buf_indx], m_prev_buf_indx, m_curr_buf_indx, last_width, last_height, flock, color); if (num_tracked<m_pConductor->m_tr_min_KLT_features) { // lost tracking for sure // Beep(300, 150); // don't change scan_area return false; } m_pOpticalFlow->GetMeanFeaturePos(m_center_pos); // VERBOSE1(4, "OpticalFlow features: %d\n", event.m_num_features_tracked); } int rnx = cvRound(m_center_pos.x), rny = cvRound(m_center_pos.y); SetScanAreaVerified(CRect(rnx-last_width, rny-last_height, rnx+last_width, rny+last_height)); return true;}void HandVu::CheckAndCorrectExposure(){ if (!m_adjust_exposure || m_scan_area.left>=m_scan_area.right || m_scan_area.top>=m_scan_area.bottom) { return; } ASSERT(m_pCameraController); ASSERT(m_pCameraController->CanAdjustExposure()); // is it time to adjust camera? RefTime curr_time = m_pClock->GetCurrentTimeUsec(); if (curr_time<m_adjust_exposure_at_time) { return; } // find out if we should change the exposure at all int bins = 5; CvHistogram* pHist = cvCreateHist(1, &bins, CV_HIST_ARRAY); CvRect rect = cvRect(m_scan_area.left, m_scan_area.top, m_scan_area.right-m_scan_area.left, m_scan_area.bottom-m_scan_area.top); cvSetImageROI(m_grayImages[m_curr_buf_indx], rect); cvCalcHist(&m_grayImages[m_curr_buf_indx], pHist, 0, NULL); cvResetImageROI(m_grayImages[m_curr_buf_indx]); double bright_pixels = cvQueryHistValue_1D(pHist, 4); bright_pixels /= (double) (rect.width*rect.height); cvReleaseHist(&pHist); VERBOSE1(4, "HandVu: bright_pixels %f", bright_pixels); double exposure_factor = 1.0; const double max_bright = 0.3; const double min_bright = 0.1; if (bright_pixels>max_bright) { if (bright_pixels!=0) { exposure_factor = max(0.75, max_bright/bright_pixels); } else { exposure_factor = 0.9; } m_adjust_exposure_at_time = curr_time+500000l; //.5 sec } else if (bright_pixels<min_bright) { if (bright_pixels!=0) { exposure_factor = min(1.25, min_bright/bright_pixels); } else { exposure_factor = 1.1; } m_adjust_exposure_at_time = curr_time+500000l; //.5 sec } else { // we're right on, don't check again for some time m_adjust_exposure_at_time = curr_time+1000000l; //1 sec } if (exposure_factor!=1.0) { if (m_exposure_level<=0.01) { m_exposure_level = 0.01; } else { m_exposure_level *= exposure_factor; } m_exposure_level = max(0.0, min(m_exposure_level, 1.0)); bool changed = m_pCameraController->SetExposure(m_exposure_level); while (!changed && 0<m_exposure_level && m_exposure_level<1.0) { m_exposure_level *= exposure_factor; changed = m_pCameraController->SetExposure(m_exposure_level); } VERBOSE2(4, "HandVu: set exposure level to %f (%d)\n", m_exposure_level, changed); }}void HandVu::FindSecondHand(){ if (m_id1_on) { CvPoint2D32f pos; m_pOpticalFlow->GetMeanFeaturePos(pos); double w = m_last_match.right - m_last_match.left; double h = m_last_match.bottom - m_last_match.top; CRect area(10, cvRound(pos.y+h/3), cvRound(pos.x-w/3), m_rgbImage->height-10); // pos is output parameter: m_pLearnedColor->GetMostRightUpBlob(m_rgbImage, area, pos); m_id1_x = pos.x; m_id1_y = pos.y; }}void HandVu::CalculateDepth(const IplImage* rightImage, const CvRect& area){ cvSetImageROI((IplImage*)rightImage, area); cvSetImageROI(m_rightGrayImage, area); cvCvtColor(rightImage, m_rightGrayImage, CV_BGR2GRAY); cvResetImageROI((IplImage*)rightImage); cvSetImageROI(m_grayImages[m_curr_buf_indx], area); int maxDisparity = min(30, area.width-1); cvSetImageROI(m_depthImage, area); cvFindStereoCorrespondence(m_grayImages[m_curr_buf_indx], m_rightGrayImage, CV_DISPARITY_BIRCHFIELD, m_depthImage, maxDisparity, 15, 3, 6, 8, 15 ); cvResetImageROI(m_grayImages[m_curr_buf_indx]); cvResetImageROI(m_rightGrayImage); cvConvertScale(m_depthImage, m_depthImage, 255.0/(double)maxDisparity); cvSetImageROI(m_rgbImage, area); cvCvtColor(m_depthImage, m_rgbImage, CV_GRAY2BGR); cvResetImageROI(m_rgbImage); CvScalar s = cvAvg(m_depthImage); m_center_depth = s.val[0];}/* SetDetectionArea sets the detection scan area in the video. * The coordinates need not be in a specific orientation, i.e. left and right* can be switched and top and bottom can be switched.*/void HandVu::SetDetectionArea(int left, int top, int right, int bottom){ // righten area, smaller numbers to left and up CRect scan_area(min(left, right), min(top, bottom), max(left, right), max(top, bottom)); for (int scc=0; scc<m_pConductor->m_dt_cascades_end; scc++) { cuSetScanArea((CuCascadeID)scc, scan_area.left, scan_area.top, scan_area.right, scan_area.bottom); m_pConductor->m_orig_areas[scc].fromRect(scan_area, m_img_width, m_img_height); }}void HandVu::GetDetectionArea(CQuadruple& area) const{ // righten area, smaller numbers to left and up area.left = area.top = 1.0; area.right = area.bottom = 0.0; for (int scc=0; scc<m_pConductor->m_dt_cascades_end; scc++) { const CQuadruple& curr = m_pConductor->m_orig_areas[scc]; area.left = min(area.left, curr.left); area.right = max(area.right, curr.right); area.top = min(area.top, curr.top); area.bottom = max(area.bottom, curr.bottom); } // if no detection cascades, or wrong data, set all to zero if (area.left>=area.right || area.top>=area.bottom) { area.left = area.right = area.top = area.bottom = 0.0; }}void HandVu::SetOverlayLevel(int level){ if (m_overlay_level<0 || m_overlay_level>MAX_OVERLAY_LEVEL) { throw HVException("invalid overlay level"); } m_overlay_level = level;}int HandVu::GetOverlayLevel(){ return m_overlay_level;}bool HandVu::CanCorrectDistortion() const{ return m_pUndistortion->CanUndistort();}bool HandVu::IsCorrectingDistortion() const { if (!m_pUndistortion->CanUndistort()) { return false; } return m_undistort;}void HandVu::CorrectDistortion(bool enable){ if (enable && !m_pUndistortion->CanUndistort()) { throw HVException("can not undistort"); } m_undistort = enable;}void HandVu::SetAdjustExposure(bool enable/*=true*/){ if (enable == m_adjust_exposure) { return; } if (!m_pCameraController) { throw HVException("no camera controller set"); } if (!enable) { m_pCameraController->SetCameraAutoExposure(true); m_adjust_exposure = false; return; } if (!m_pCameraController->CanAdjustExposure()) { throw HVException("camera controller incapable of setting exposure"); } m_exposure_level = m_pCameraController->GetCurrentExposure(); m_adjust_exposure = enable;}bool HandVu::CanAdjustExposure() const{ return (m_pCameraController && m_pCameraController->CanAdjustExposure());}bool HandVu::IsAdjustingExposure() const{ return m_adjust_exposure;}void HandVu::GetState(int id, HVState& state) const{ ASSERT(m_pClock); state.m_tstamp = m_sample_time; state.m_obj_id = id; if (!m_active) { state.m_tracked = false; state.m_recognized = false; return; } if (id==0) { state.m_tracked = m_tracking; state.m_recognized = m_recognized; if (m_tracking || m_recognized) { state.m_center_xpos = m_center_pos.x/(float)m_img_width; state.m_center_ypos = m_center_pos.y/(float)m_img_height; state.m_scale = m_center_depth?m_center_depth:m_last_match.scale; } else { state.m_center_xpos = -1; state.m_center_ypos = -1; state.m_scale = 0; } if (m_recognized) { state.m_posture = m_last_match.name; } else { state.m_posture = ""; } } else if (id==1) { if (!m_id1_on) { throw HVException("object ID1 is not being searched for"); } if (m_id1_x==-1 || m_id1_y==-1) { state.m_tracked = false; state.m_center_xpos = -1; state.m_center_ypos = -1; state.m_recognized = false; state.m_posture = ""; } else { state.m_tracked = true; state.m_center_xpos = m_id1_x / m_img_width; state.m_center_ypos = m_id1_y / m_img_height; state.m_recognized = false; state.m_posture = ""; } } else { throw HVException("nothing known about this ID"); }}void HandVu::SetLogfile(const string& filename){ FILE* fp = fopen(filename.c_str(), "r"); while (fp) { fclose(fp); string str("HandVu will not destroy existing file, please delete:\n"); str.append(filename); throw HVException(str);// fp = fopen(filename.c_str(), "r"); } m_logfile_name = filename;/* todo: log stuff!frame numberexposure control levelslatency resultstate, actions -> dropped, skipped, processedscan areanum features tracked, lostcoverage, TestLearned*/}void HandVu::SetScanAreaVerified(const CRect& area){ double maxwidth = m_img_width*m_pConductor->m_rc_max_scan_width; if (area.right-area.left > maxwidth) { double halfwidth = maxwidth/2.0; double center = (double)(area.right+area.left)/2.0; m_scan_area.left = (int)(center-halfwidth); m_scan_area.right = (int)(center+halfwidth); } else { m_scan_area.left = area.left; m_scan_area.right = area.right; } double maxheight = m_img_height*m_pConductor->m_rc_max_scan_height; if (area.bottom-area.top > maxheight) { double halfheight = maxheight/2.0; double center = (double)(area.top+area.bottom)/2.0; m_scan_area.top = (int)(center-halfheight); m_scan_area.bottom = (int)(center+halfheight); } else { m_scan_area.top = area.top; m_scan_area.bottom = area.bottom; } VERBOSE4(4, "Set scan area to %d, %d, %d, %d", m_scan_area.left, m_scan_area.top, m_scan_area.right, m_scan_area.bottom);}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -