📄 cvcam.cpp
字号:
exit(-1); } else return 0; } break; } _cvcamProxyTrans->set_transform( (void(__cdecl*)(void*))cvcam_properties[camera_index].callback, 0); ASSERT(!_cvcamMediaControl.is_valid()); _cvcamGraphBuilder->QueryInterface(IID_IMediaControl,(void**)&_cvcamMediaControl); _cvcamGraphBuilder->QueryInterface(IID_IMediaEventEx,(void**)&_cvcamMediaEventEx); _cvcamGraphBuilder->QueryInterface(IID_IVideoWindow, (void**)&_cvcamVideoWindow); IBaseFilter* pProxyTrans = 0; _cvcamProxyTrans->QueryInterface(IID_IBaseFilter, (void**)&pProxyTrans); _objects.push_back(SafeUnknown(pProxyTrans)); /* link all filters together*/ if( _cvcamGraphBuilder.is_valid() ) { HRESULT hr; hr = _cvcamGraphBuilder->AddFilter( pProxyTrans, L"Script processing"); IPin* pSourceOut = get_source_pin( _cvcamSource[camera_index].value(), PINDIR_OUTPUT ); IPin* pProxyTransIn = get_pin( pProxyTrans, PINDIR_INPUT ); IPin* pProxyTransOut = get_pin( pProxyTrans, PINDIR_OUTPUT ); _objects.push_back(SafeUnknown(pSourceOut)); _objects.push_back(SafeUnknown(pProxyTransIn)); _objects.push_back(SafeUnknown(pProxyTransOut)); if( pSourceOut && pProxyTransIn && pProxyTransOut ) { hr = _cvcamGraphBuilder->Connect(pSourceOut, pProxyTransIn); int render; cvcamGetProperty(camera_index, CVCAM_PROP_RENDER, &render); if(render) hr = _cvcamGraphBuilder->Render( pProxyTransOut ); } } return 1;}///////////////////////////////////////////////////////////////////////////////////////////Makes the graph from several cameras and extended cvSync filterstatic int _cvcamInitSeveralCams(){ int i; /* Create a cvSync filter */ if(FAILED(CoCreateInstance(CLSID_SyncFilter, NULL, CLSCTX_INPROC_SERVER, IID_ISyncFilter, (void**)&_cvcamCvSync))) { return 0; } if (2==nb_cameras && stereo2_callback) _cvcamCvSync->SetCallBack2( (void(__cdecl*)(void*, void*))stereo2_callback ); else if (3==nb_cameras && stereo3_callback) _cvcamCvSync->SetCallBack3( (void(__cdecl*)(void*, void*, void*))stereo3_callback ); else if (4==nb_cameras && stereo4_callback) _cvcamCvSync->SetCallBack4( (void(__cdecl*)(void*, void*, void*, void*))stereo4_callback ); ASSERT(!_cvcamMediaControl.is_valid()); _cvcamGraphBuilder->QueryInterface(IID_IMediaControl,(void**)&_cvcamMediaControl); //_cvcamGraphBuilder->QueryInterface(IID_IMediaEventEx,(void**)&_cvcamMediaEventEx); IBaseFilter* pSyncFilter = 0; _cvcamCvSync->QueryInterface(IID_IBaseFilter, (void**)&pSyncFilter); vector<SafeUnknown> _objects; _objects.push_back(SafeUnknown(pSyncFilter)); /* link all filters together*/ if( _cvcamGraphBuilder.is_valid() ) { HRESULT hr; vector<int> indexes; IPin* pSourceOut[4]; IPin* pSyncInput[4]; IPin* pSyncOutput[4]; for(i = 0; i < cvcam_properties.size(); i++) { if(cvcam_properties[i]._enabled) { indexes.push_back(i); } } hr = _cvcamGraphBuilder->AddFilter( pSyncFilter, L"Script processing"); if(FAILED(hr)) { return hr; } for (i=0;i<nb_cameras;i++) { pSourceOut[i] = get_source_pin( _cvcamSource[indexes[i] ].value(), PINDIR_OUTPUT ); if(!pSourceOut[i]) { return 0; } } pSyncFilter->FindPin(L"Input1", &pSyncInput[0]); pSyncFilter->FindPin(L"Input2", &pSyncInput[1]); pSyncFilter->FindPin(L"Output1", &pSyncOutput[0]); pSyncFilter->FindPin(L"Output2", &pSyncOutput[1]); if (3==nb_cameras) { pSyncFilter->FindPin(L"Input3", &pSyncInput[2]); pSyncFilter->FindPin(L"Output3", &pSyncOutput[2]); } else if (4==nb_cameras) { pSyncFilter->FindPin(L"Input3", &pSyncInput[2]); pSyncFilter->FindPin(L"Input4", &pSyncInput[3]); pSyncFilter->FindPin(L"Output3", &pSyncOutput[2]); pSyncFilter->FindPin(L"Output4", &pSyncOutput[3]); } for (i=0;i<nb_cameras;i++) { _objects.push_back(SafeUnknown(pSourceOut[i])); _objects.push_back(SafeUnknown(pSyncInput[i])); _objects.push_back(SafeUnknown(pSyncOutput[i])); } for (i=0;i<nb_cameras;i++) { if( pSourceOut[i] && pSyncInput[i] && pSyncOutput[i] ) { hr = _cvcamGraphBuilder->Connect(pSourceOut[i], pSyncInput[i]); if(FAILED(hr)) { AMGetErrorText(hr, errorText, 100); MessageBox(0, errorText, "cvcam error", MB_OK); return hr; } int render; cvcamGetProperty(indexes[i], CVCAM_PROP_RENDER, &render); if(render) hr = _cvcamGraphBuilder->Render( pSyncOutput[i] ); if(FAILED(hr)) { AMGetErrorText(hr, errorText, 100); MessageBox(0, errorText, "cvcam error", MB_OK); return hr; } } } } return 1;};/////////////////////////////////////////////////////////////////////////////////////////int _cvcamStartSeveral(){ int i; vector<SafeUnknown> _objects; IBaseFilter* pcvSync = 0; vector<int> indexes; for(i = 0; i < cvcam_properties.size(); i++) { if(cvcam_properties[i]._enabled) { indexes.push_back(i); } } if(!(_cvcamCvSync.is_valid())) return -1; //We'll adjust video windows parameters. As we've possibly got 2 //renderers and so 2 windows, we can't get the IVideoWindow interface //from IGraphBuilder, so let's go directly to renderers; bool Render=false; for (i=0;i<nb_cameras;i++) if (cvcam_properties[indexes[i]].render) { Render=true; break; } if (Render) { _cvcamCvSync->QueryInterface(IID_IBaseFilter, (void**)&pcvSync); _objects.push_back(SafeUnknown(pcvSync)); IPin* output[4]; ASSERT(pcvSync); pcvSync->FindPin(L"Output1", &output[0]); pcvSync->FindPin(L"Output2", &output[1]); if (3==nb_cameras) pcvSync->FindPin(L"Output3", &output[2]); else if (4==nb_cameras) { pcvSync->FindPin(L"Output3", &output[2]); pcvSync->FindPin(L"Output4", &output[3]); } for (i=0;i<nb_cameras;i++) _objects.push_back(SafeUnknown(output[i])); for (i=0;i<nb_cameras;i++) ASSERT(output[i]); if( !_cvcamMediaControl.is_valid() ) { return -1; }//if( !_cvcamMediaControl.is_valid() ) PIN_INFO PinInfo; IBaseFilter* nextFilter; IPin* pPin; for (i=0;i<nb_cameras;i++) { if (cvcam_properties[indexes[i]].render) { output[i]->ConnectedTo(&pPin); if(!pPin) return VFW_E_NOT_CONNECTED; _objects.push_back(SafeUnknown(pPin)); pPin->QueryPinInfo(&PinInfo); nextFilter = PinInfo.pFilter; _objects.push_back(SafeUnknown(nextFilter)); //IVideoWindow* pVideoWindow; // nextFilter->QueryInterface(IID_IVideoWindow, (void**)&_cvcamVideoWindow); HRESULT hr = _cvcamCapGraphBuilder->FindInterface(NULL, &MEDIATYPE_Video, nextFilter, IID_IVideoWindow, (void **)&_cvcamVideoWindow); if(hr != NOERROR) return hr; HWND hWnd; cvcamGetProperty(indexes[i], CVCAM_PROP_WINDOW, &hWnd); if(!hWnd) { hWnd = _cvcamCreateWindow(); cvcamSetProperty(indexes[i], CVCAM_PROP_WINDOW, &hWnd); } _cvcamVideoWindow->put_Owner((OAHWND)hWnd); long flags; _cvcamVideoWindow->get_WindowStyle(&flags); _cvcamVideoWindow->put_WindowStyle(flags & (~WS_CAPTION) | WS_CHILD); _cvcamVideoWindow->put_MessageDrain((OAHWND)hWnd); // Get the rectangle dimensions and resize the client window AM_MEDIA_TYPE amt; pPin->ConnectionMediaType(&amt); VIDEOINFOHEADER* vih = (VIDEOINFOHEADER*)amt.pbFormat; if(!vih) { return -1; } int rndheight = cvcam_properties[indexes[i]].rndheight? cvcam_properties[indexes[i]].rndheight: vih->bmiHeader.biHeight; int rndwidth = cvcam_properties[indexes[i]].rndwidth? cvcam_properties[indexes[i]].rndwidth: vih->bmiHeader.biWidth; _cvcamVideoWindow->SetWindowPosition( 0, 0, rndwidth, rndheight ); const char* name = cvGetWindowName(hWnd); cvResizeWindow(name, rndwidth, rndheight); }//if(cvcam_properties[indexes[i]].render) } }//if(cvcam_properties[indexes[0]].render||cvcam_properties[indexes[1]].render) // MessageBox(NULL,"going run", NULL,MB_OK); _cvcamMediaControl->Run(); // MessageBox(NULL,"ran", NULL,MB_OK); return 1;}//////////////////////////////////////////////////////////////////////////////////////////* Start the video */CVCAM_API int cvcamStart(){ if(_cvcamNumberOfEnabled() >= 2) return _cvcamStartSeveral(); vector<SafeUnknown> _objects; IBaseFilter* pProxyTrans = 0; _cvcamProxyTrans->QueryInterface(IID_IBaseFilter, (void**)&pProxyTrans); _objects.push_back(SafeUnknown(pProxyTrans)); IPin* pProxyTransOut = get_pin( pProxyTrans, PINDIR_OUTPUT ); _objects.push_back(SafeUnknown(pProxyTransOut)); if(!pProxyTrans || !pProxyTransOut) { return -1; } if( !_cvcamMediaControl.is_valid() ) { return -1; } int render; cvcamGetProperty(camera_index, CVCAM_PROP_RENDER, &render); if(render) { /* Get the window */ HWND hWnd; cvcamGetProperty(camera_index, CVCAM_PROP_WINDOW, &hWnd); if(!hWnd) { hWnd = _cvcamCreateWindow(); cvcamSetProperty(camera_index, CVCAM_PROP_WINDOW, &hWnd); } HRESULT hres = _cvcamVideoWindow->put_Owner((OAHWND)hWnd); long flags; hres = _cvcamMediaEventEx->SetNotifyWindow((OAHWND)hWnd, WM_GRAPHNOTIFY, 0); hres = _cvcamMediaEventEx->SetNotifyFlags(0x00); hres = _cvcamMediaEventEx->CancelDefaultHandling(EC_COMPLETE); hres = _cvcamVideoWindow->get_WindowStyle(&flags); hres = _cvcamVideoWindow->put_WindowStyle(flags & (~WS_CAPTION) | WS_CHILD); hres = _cvcamVideoWindow->put_MessageDrain((OAHWND)hWnd); // Get the rectangle dimensions and resize the client window AM_MEDIA_TYPE amt; pProxyTransOut->ConnectionMediaType(&amt); VIDEOINFOHEADER* vih = (VIDEOINFOHEADER*)amt.pbFormat; if(!vih) { return -1; } int rndheight = cvcam_properties[camera_index].rndheight? cvcam_properties[camera_index].rndheight: vih->bmiHeader.biHeight; int rndwidth = cvcam_properties[camera_index].rndwidth? cvcam_properties[camera_index].rndwidth: vih->bmiHeader.biWidth; _cvcamVideoWindow->SetWindowPosition( 0, 0, rndwidth, rndheight ); const char* name = cvGetWindowName(hWnd); cvResizeWindow(name, rndwidth, rndheight); } _cvcamMediaControl->Run(); return 0;}//////////////////////////////////////////////////////////////////////////////////////////* Stop the video */CVCAM_API int cvcamStop(){ if( _cvcamMediaControl.is_valid() ) { OAFilterState fs; _cvcamMediaControl->GetState(0,&fs); if(fs == State_Stopped) return S_OK; _cvcamMediaControl->StopWhenReady(); if(_cvcamVideoWindow.is_valid() ) { _cvcamVideoWindow->put_Visible(OAFALSE); _cvcamVideoWindow->put_Owner(NULL); _cvcamVideoWindow->put_MessageDrain(0); } if(_cvcamVideoWindow2.is_valid() ) { _cvcamVideoWindow2->put_Visible(OAFALSE); _cvcamVideoWindow2->put_Owner(NULL); _cvcamVideoWindow2->put_MessageDrain(0); } if(_cvcamVideoWindow3.is_valid() ) { _cvcamVideoWindow3->put_Visible(OAFALSE); _cvcamVideoWindow3->put_Owner(NULL); _cvcamVideoWindow3->put_MessageDrain(0); } return 0; } else { return -1; }}//////////////////////////////////////////////////////////////////////////////////////////* Pause the video; should be used for preventing data changes during frame reading using "frame" and other properties */CVCAM_API int cvcamPause(){ if( _cvcamMediaControl.is_valid() ) { OAFilterState fs; _cvcamMediaControl->GetState(0,&fs); if(fs == State_Stopped) return S_OK; _cvcamMediaControl->Pause(); } return 0;}//////////////////////////////////////////////////////////////////////////////////////////* Resume the video */CVCAM_API int cvcamResume(){ if( _cvcamMediaControl.is_valid() ) { OAFilterState fs; _cvcamMediaControl->GetState(0,&fs); if(fs == State_Stopped) return S_OK; _cvcamMediaControl->Run(); } return 0;}//////////////////////////////////////////////////////////////////////////////////////////* Frees all resources */CVCAM_API int cvcamExit(){ _cvcamSource.clear(); _cvcamReset(); _cvcamCreateDevEnum = 0; _cvcamEnumMon = 0; _cvcamMonikers.clear(); cvcam_properties.clear(); return 0;}/////////////////////////////////////////////////////////////////////////////////////////CVCAM_API int cvcamBuildStereo(){ return -1;}//////////////////////////////////////////////////////////////////////////////////////////*Procedure for camera selection dialog*/BOOL CALLBACK SelectionDlgProc(HWND hwndDlg, UINT message, WPARAM wParam, LPARAM lParam){ switch (message) { case WM_INITDIALOG: { int n = cvcamGetCamerasCount(); CameraDescription descr; HWND cbwnd = GetDlgItem(hwndDlg,IDC_COMBO1); for(int k = 0; k < n; k++) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -