📄 handvu.cpp
字号:
#include "Common.h"#include "CubicleWrapper.h"#include "Skincolor.h"#include "LearnedColor.h"#include "OpticalFlow.h"#include "CamShift.h"#include "Undistortion.h"#include "VisionConductor.h"#include "GestureServer.h"#include "HandVu.hpp"#include <fstream>#if defined(WIN32) && defined(DEBUG)//#include <streams.h>#endif#ifdef USE_MFC#ifdef _DEBUG#define new DEBUG_NEW#undef THIS_FILEstatic char THIS_FILE[] = __FILE__;#endif // _DEBUG#endif // USE_MFC/////////////////////////////////////////////////////////////////////////////// HandVuHandVu::HandVu() : m_active(false), m_tracking(false), m_recognized(false), m_id1_on(false), m_do_track(true), m_video_width(0), m_video_height(0), m_alignment_crosses(1), m_buf_indx_cycler(-1), m_curr_buf_indx(-1), m_prev_buf_indx(-1), m_rgbImage(NULL), m_depthImage(NULL), m_rightGrayImage(NULL), m_center_depth(0), m_initialized(false), m_quit_thread(false), m_pAsyncThread(NULL), m_img_width(-1), m_img_height(-1), m_undistort(false), m_overlay_level(0), m_sample_time(0), m_num_succ_dropped_frames(0), m_max_normal_latency(-1), m_max_abnormal_latency(-1), m_determine_normal_latency(false), m_t_start_processing(0), m_time_to_learn_color(0), m_min_time_between_learning_color(0), m_adjust_exposure(false), m_adjust_exposure_at_time(0), m_pCameraController(NULL), m_pClock(NULL){ m_pCubicle = new CubicleWrapper(); m_pSkincolor = new Skincolor(); m_pLearnedColor = new LearnedColor(); m_pOpticalFlow = new OpticalFlow(); m_pCamShift = new CamShift(); m_pUndistortion = new Undistortion(); m_pConductor = new VisionConductor(); m_grayImages[0] = NULL; m_grayImages[1] = NULL;// g_ostream = fopen("c:\\tmp\\HVout.txt", "aw+"); g_ostream = NULL; g_verbose = 0;}HandVu::~HandVu(){ // quit Aync thread if (m_pAsyncThread) { m_quit_thread = true; // will be set to false by thread upon exit m_pAsyncThread->Resume(); } // our datastructures delete m_pCubicle; delete m_pSkincolor; delete m_pLearnedColor; delete m_pOpticalFlow; delete m_pCamShift; delete m_pUndistortion; delete m_pConductor; // images cvReleaseImage(&m_grayImages[0]); cvReleaseImage(&m_grayImages[1]); cvReleaseImage(&m_depthImage); cvReleaseImage(&m_rightGrayImage); // wait for Aync thread, then delete ring buffer if (m_pAsyncThread) { m_pAsyncThread->Join(); } for (int b=0; b<(int)m_ring_buffer.size(); b++) { cvReleaseImage(&m_ring_buffer[b]); }}/* pCamCon may be NULL*/void HandVu::Initialize(int width, int height, RefClock* pClock, CameraController* pCamCon){ VERBOSE3(5, "HandVu: initializing with size %dx%d, CameraController: %s", width, height, pCamCon?"yes":"no"); cvReleaseImage(&m_grayImages[0]); cvReleaseImage(&m_grayImages[1]); cvReleaseImage(&m_depthImage); cvReleaseImage(&m_rightGrayImage); m_grayImages[0] = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 1); m_grayImages[1] = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 1); m_rightGrayImage = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 1); m_depthImage = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 1); m_pCubicle->Initialize(width, height); m_pSkincolor->Initialize(width, height); m_pLearnedColor->Initialize(width, height); m_pOpticalFlow->Initialize(width, height); m_pUndistortion->Initialize(width, height); m_pCamShift->Initialize(width, height); m_img_width = width; m_img_height = height; if (!pClock) { throw HVException("no clock set!"); } m_pClock = pClock; if (!pCamCon) { VERBOSE0(3, "no camera controller will be available"); } m_pCameraController = pCamCon; m_scan_area = CRect(width/3, height/3, 2*width/3, 2*height/3); m_min_time_between_learning_color = 3*1000*1000; // 3 sec // normal latencies etc. m_determine_normal_latency = true; m_max_normal_latency = 3000; // 3ms m_max_abnormal_latency = 15000; // 15ms m_last_latencies.resize(1); m_initialized = true;}bool HandVu::ConductorLoaded() const{ return m_pConductor->IsLoaded();}void HandVu::LoadConductor(const string& filename){ VERBOSE1(5, "HandVu: loading conductor from file %s", filename.c_str()); m_pConductor->Load(filename); // load a calibration matrix, if available, and set active if (m_pConductor->m_camera_calib!="") { m_pUndistortion->Load(m_pConductor->m_camera_calib.c_str()); m_undistort = true; } if (m_pConductor->m_adjust_exposure && m_pCameraController && m_pCameraController->CanAdjustExposure()) { bool turned_off = m_pCameraController->SetCameraAutoExposure(false); if (!turned_off) { throw HVException("can not turn off camera auto exposure"); } m_adjust_exposure = true; } VERBOSE0(5, "HandVu: done loading conductor");}// put filters in the right state, supply scanners and cascades//void HandVu::StartRecognition(int id/*=0*/){ if(!m_initialized) { throw HVException("HandVu not initialized, cannot start"); } if(!m_pConductor->IsLoaded()) { throw HVException("no conductor loaded"); } VERBOSE1(5, "HandVu: starting recognition for obj_id %d", id); if (id==1) { m_id1_on = true; m_id1_x = -1; m_id1_y = -1; if (m_active) return; } else if (id!=0) { throw HVException("unknown ID"); } // other detection m_dt_first_match_time = 0; m_dt_first_match = CuScanMatch(); // activate detection scanners for (int cc=0; cc<m_pConductor->m_dt_cascades_end; cc++) { cuSetScannerActive((CuCascadeID)cc, true); CRect area(m_pConductor->m_orig_areas[cc].toRect(m_img_width, m_img_height)); cuSetScanArea((CuCascadeID)cc, area.left, area.top, area.right, area.bottom); } // de-activate recognition scanners for (int cc=m_pConductor->m_rc_cascades_start; cc<m_pConductor->m_rc_cascades_end; cc++) { cuSetScannerActive((CuCascadeID)cc, false); } // the scan area is mostly used during tracking, but also // to do the exposure control CQuadruple quad; GetDetectionArea(quad); m_scan_area = quad.toRect(m_img_width, m_img_height); m_frame_times.clear(); m_processed_frame_times.clear(); m_prcs_times.clear(); m_buf_indx_cycler = 0; m_curr_buf_indx = m_buf_indx_cycler; m_prev_buf_indx = 1-m_buf_indx_cycler; m_do_track = true; m_active = true; m_tracking = false;}void HandVu::StopRecognition(int id/*=0*/){ VERBOSE1(5, "HandVu: stopping recognition for obj_id %d", id); if (id==1) { m_id1_on = false; return; } else if (id!=0) { throw HVException("unknown ID"); } m_active = false;}/* --------------------------------------------------------* THE ALMIGHTY ProcessFrame routine !!!!!!!!!!!!!!!!!!!!!* ----------------------------------------------------------*/HandVu::HVActionHandVu::ProcessFrame(GrabbedImage& inOutImage, const IplImage* rightImage){ m_rgbImage = inOutImage.GetImage(); m_sample_time = inOutImage.GetSampleTime(); // sanity checks if(!m_initialized) { throw HVException("HandVu not initialized, cannot process"); } if (m_rgbImage->width!=m_img_width || m_rgbImage->height!=m_img_height) { throw HVException("image dimensions do not match initialization"); } VERBOSE2(5, "HandVu: processing frame (active: %s, tracking: %s)", m_active?"yes":"no", m_tracking?"yes":"no"); if (m_rgbImage->origin==1) { m_rgbImage->origin = 0; cvMirror(m_rgbImage, NULL, 0); } // check latency of the incoming frame - the result is used // after KLT tracking HVAction action = CheckLatency(); // return if we're not supposed to do anything if (!m_active || m_scan_area.left>=m_scan_area.right || m_scan_area.top>=m_scan_area.bottom) { if (action==HV_PROCESS_FRAME) { // adjust exposure CheckAndCorrectExposure(); // undistort image, adjust location of centroid if (m_undistort) { m_pUndistortion->Undistort(m_rgbImage); // m_pUndistortion->Transform(centroid); } } KeepStatistics(action); DrawOverlay(); SendEvent(); return action; } // set current buffer m_buf_indx_cycler = 1-m_buf_indx_cycler; m_curr_buf_indx = m_buf_indx_cycler; m_prev_buf_indx = 1-m_buf_indx_cycler; // create gray scale image { int scan_width = m_scan_area.right-m_scan_area.left; int cvt_left = max(0, m_scan_area.left-scan_width/2); int cvt_width = min(m_scan_area.right+scan_width/2, m_rgbImage->width)-cvt_left; int scan_height = m_scan_area.bottom-m_scan_area.top; int cvt_top = max(0, m_scan_area.top-scan_height/2); int cvt_height = min(m_scan_area.bottom+scan_height/2, m_rgbImage->height)-cvt_top; ASSERT(cvt_height>0 && cvt_width>0); if (!(cvt_height>0 && cvt_width>0)) { KeepStatistics(action); DrawOverlay(); SendEvent(); return action; } cvSetImageROI(m_rgbImage, cvRect(cvt_left, cvt_top, cvt_width, cvt_height)); cvSetImageROI(m_grayImages[m_curr_buf_indx], cvRect(cvt_left, cvt_top, cvt_width, cvt_height)); cvCvtColor(m_rgbImage, m_grayImages[m_curr_buf_indx], CV_BGR2GRAY); cvResetImageROI(m_rgbImage); cvResetImageROI(m_grayImages[m_curr_buf_indx]); } // do the all-important, fast KLT tracking m_recognized = false; if (m_tracking) { m_tracking = DoTracking(); if (!m_tracking) { // lost tracking StartRecognition(); } } // take the recommendation from CheckLatency to heart if (action!=HV_PROCESS_FRAME) { KeepStatistics(action); DrawOverlay(); SendEvent(); return action; } if (m_tracking) { m_recognized = DoRecognition(); FindSecondHand(); } else { m_recognized = DoDetection(); } if (m_recognized && m_do_track) { InitializeTracking(); m_tracking = true; } // restrict the general color segmentation ROI; this is // not really important for anything but high overlay_levels int scan_left = max(0, m_scan_area.left); int scan_right = min(m_scan_area.right, m_rgbImage->width); int scan_top = max(0, m_scan_area.top); int scan_bottom = min(m_scan_area.bottom, m_rgbImage->height); CRect scan_area(scan_left, scan_top, scan_right, scan_bottom); // use stereo correspondence to obtain distance of hand from camera if (m_tracking && rightImage) { int sz = (scan_right-scan_left)/5; CvRect area = cvRect((int)m_center_pos.x-sz, (int)m_center_pos.y-sz, 2*sz, 2*sz); CalculateDepth(rightImage, area); } // adjust exposure CheckAndCorrectExposure(); // drawing m_pSkincolor->DrawOverlay(m_rgbImage, m_overlay_level, CRect(m_last_match)); if (m_tracking) { m_pLearnedColor->DrawOverlay(m_rgbImage, m_overlay_level, scan_area); } m_pCubicle->DrawOverlay(m_rgbImage, m_overlay_level); if (m_tracking) { if (m_pConductor->m_tr_type & VisionConductor::VC_TT_CAMSHIFT) { m_pCamShift->DrawOverlay(m_rgbImage, m_overlay_level); } else { m_pOpticalFlow->DrawOverlay(m_rgbImage, m_overlay_level); } } // undistort image, adjust location of centroid if (m_undistort) { m_pUndistortion->Undistort(m_rgbImage);// m_pUndistortion->Transform(centroid); } KeepStatistics(action); DrawOverlay(); SendEvent(); return action;}/* --------------------------------------------------------
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -