📄 cvcam.cpp
字号:
return count; } else { //called from cvcamGetCamerasCount for second time and more if(!filter) return cvcam_properties.size(); for(uint i = 0; i < cvcam_properties.size(); i++) { if(cvcam_properties[i]._enabled) { /* Initialize the camera */ _cvcamMonikers[i]->BindToObject(0, 0, IID_IBaseFilter, (void **)filter); if(filter) { _cvcamSource[i]=*filter; camera_index = i; return i; } } } /* No camera has been selected */ *filter = 0; return -1; }}/* Creates a window for DS rendering */HWND _cvcamCreateWindow(){ cvNamedWindow("cvcam window", 0); return (HWND)cvGetWindowHandle("cvcam window");}/* Returns the actual number of currently available cameras */CVCAM_API int cvcamGetCamerasCount(){ int n = _cvcamInitVideoSource(0); return (n>0)?n:0;}/* Summons the video format property page */static void _cvcamSummonPinPropPage(int camera){ if(!cvcam_properties[camera]._enabled) return; //Find the output pit that is connected to the next filter... CAUUID uuID; ISpecifyPropertyPages* pspp = 0; int fcvcamInit = 0; OAFilterState state = State_Stopped; //if _cvcamMediaControl is valid, it means Graph has already been builded //therefore we have to disconnect all filters first if(_cvcamMediaControl.is_valid()) { fcvcamInit = 1; //keep graph state before disconnect it, os we can restart from current state _cvcamMediaControl->GetState(0, &state); //disconnect graph _cvcamTearDownGraph(); } IBaseFilter* filter = _cvcamSource[camera].value(); IPin* pPin = get_source_pin(_cvcamSource[camera].value(), PINDIR_OUTPUT); if(!pPin) pPin=get_pin(filter, PINDIR_OUTPUT); if(!pPin) return; pPin->QueryInterface(IID_ISpecifyPropertyPages, (void**)&pspp); if(!pspp) return; pspp->GetPages(&uuID); pspp->Release(); OleCreatePropertyFrame(NULL, 0, 0, L"Video Source", 1, (IUnknown**)&pPin, uuID.cElems, uuID.pElems, 0, 0, NULL); CoTaskMemFree(uuID.pElems); pPin->Release(); //store this video source resolution IAMStreamConfig* pVSC; AM_MEDIA_TYPE *pmt = NULL; _cvcamCapGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, _cvcamSource[camera].value(), IID_IAMStreamConfig, (void **)&pVSC); pVSC->GetFormat(&pmt); VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); cvcam_properties[(uint)camera].srcwidth = pVih->bmiHeader.biWidth; cvcam_properties[(uint)camera].srcheight = abs(pVih->bmiHeader.biHeight); DeleteMediaType(pmt); pVSC->Release(); // take them back to where they were before this function was called if(fcvcamInit) { cvcamInit(); if(state == State_Running) cvcamStart(); }}static void _cvcamSetVideoFormat(int camera, void* value){ if(!cvcam_properties[camera]._enabled) return; VidFormat* vidFmt = (VidFormat*)value; int fcvcamInit = 0; OAFilterState state = State_Stopped; if(_cvcamMediaControl.is_valid()) { fcvcamInit = 1; _cvcamMediaControl->GetState(0, &state); _cvcamTearDownGraph(); } IAMStreamConfig* pVSC; AM_MEDIA_TYPE *pmt = NULL; _cvcamCapGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, _cvcamSource[camera].value(), IID_IAMStreamConfig, (void **)&pVSC); VIDEO_STREAM_CONFIG_CAPS scc; int piCount, piSize; HRESULT hr = pVSC->GetNumberOfCapabilities(&piCount, &piSize); if(hr == S_OK) { for(int i = 0; i < piCount; i++) { pVSC->GetStreamCaps(i, &pmt, reinterpret_cast<BYTE*>(&scc)); VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); if(pVih->bmiHeader.biWidth == vidFmt->width && pVih->bmiHeader.biHeight == vidFmt->height) { if( vidFmt->framerate > 0 ) pVih->AvgTimePerFrame = (LONGLONG)(10000000 / vidFmt->framerate); pVSC->SetFormat(pmt); cvcam_properties[(uint)camera].srcwidth = pVih->bmiHeader.biWidth; cvcam_properties[(uint)camera].srcheight = abs(pVih->bmiHeader.biHeight); DeleteMediaType(pmt); break; } DeleteMediaType(pmt); } } pVSC->Release(); // take them back to where they were before this function was called if(fcvcamInit) { cvcamInit(); if(state == State_Running) cvcamStart(); }}/* Summons the video format property page */static void _cvcamSummonFilterPropPage(int camera){ if(!cvcam_properties[camera]._enabled) return; //Find the output pit that is connected to the next filter... CAUUID uuID; ISpecifyPropertyPages* pspp = 0; //IBaseFilter* filter =_cvcamSource[camera].value(); //IMoniker* mon = _cvcamMonikers[camera].value(); //_cvcamProxyTrans->QueryInterface(IID_IBaseFilter, (void**)&filter); //IPin* pProxyPin = get_pin(filter, PINDIR_INPUT); //pProxyPin->Disconnect(); //_cvcamMonikers[0]->BindToObject(0, 0, IID_IBaseFilter, (void **)&filter); //IMoniker* mon = _cvcamMonikers[camera].value(); //IMoniker* pmon; //_cvcamMonikers[camera]->QueryInterface(IID_IMoniker,(void**)&pmon); //pmon->BindToStorage(0,0,IID_IBaseFilter, (void **)&filter); //BindMoniker(mon,0,IID_IBaseFilter,(void **)&filter); //filter= _cvcamSource.value; /* IPropertyBag *pBag; HRESULT hr = mon->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag); if(SUCCEEDED(hr)) { VARIANT var; var.vt = VT_BSTR; hr = pBag->Read(L"FriendlyName", &var, NULL); pBag->Release(); } */ //BindMoniker(mon,0,IID_IBaseFilter,(void **)&filter); //mon->BindToObject(0,0,IID_IBaseFilter, (void **)&filter); //IPin* pPin = get_source_pin(_cvcamSource[camera].value(), PINDIR_OUTPUT); //IPin* pPin = get_source_pin(filter, PINDIR_OUTPUT); //if(!pPin) // pPin=get_pin(filter, PINDIR_OUTPUT); //if(!pPin) // return; IBaseFilter* bf = _cvcamSource[camera].value();// IUnknown** ppobject = (IUnknown**)&pPin; bf->QueryInterface(IID_ISpecifyPropertyPages, (void**)&pspp); if(!pspp) return; pspp->GetPages(&uuID); pspp->Release(); /* Disconnect the pin to enable possible changes in the output format... */ OAFilterState state = State_Stopped; if(_cvcamMediaControl.is_valid()) _cvcamMediaControl->GetState(0, &state);// if(state != State_Stopped)// cvcamStop();/* IPin* pTempPin = if(pPin)*/ // pPin->Disconnect(); //IBaseFilter* pFilter; //_cvcamProxyTrans->QueryInterface(IID_IBaseFilter, (void**)&pFilter); //IPin* pProxyPin = get_pin(pFilter, PINDIR_INPUT); // pProxyPin->Disconnect(); OleCreatePropertyFrame(NULL, 0, 0, L"Video Source", 1, (IUnknown**)&bf, uuID.cElems, uuID.pElems, 0, 0, NULL); //_cvcamGraphBuilder->Connect(pSPin, pProxyPin);// if(state == State_Running) //_cvcamMediaControl->Run();// cvcamStart();}#define CHECK_CAMERA(p) if((p) >= cvcam_properties.size()) return -1;#define CHECK_ZERO if(!value) return -1;#define CHECK_POSITIVE(p) if((p) < 0) return -1;/* get/set the property of the camera. returns 0 if the property is not supported */CVCAM_API int cvcamGetProperty(int camera, const char* property, void* value){ if (camera>=AVIS_START) { return cvcamAVIGetProperty(camera,property,value); } CHECK_CAMERA((uint)camera); if(strcmp(property, CVCAM_PROP_ENABLE) == 0) { CHECK_ZERO(value); *(int*)value = cvcam_properties[(uint)camera]._enabled; return 0; } else if(strcmp(property, CVCAM_PROP_RENDER) == 0) { CHECK_ZERO(value); *(int*)value = cvcam_properties[(uint)camera].render; return 0; } else if(strcmp(property, CVCAM_PROP_WINDOW) == 0) { CHECK_ZERO(value); *(int*)value = cvcam_properties[(uint)camera].window; return 0; } else if (strcmp(property, CVCAM_RNDWIDTH) == 0) { CHECK_ZERO(value); *(int*)value = cvcam_properties[(uint)camera].rndwidth; return 0; } else if (strcmp(property, CVCAM_RNDHEIGHT) == 0) { CHECK_ZERO(value); *(int*)value = cvcam_properties[(uint)camera].rndheight; return 0; } else if (strcmp(property, CVCAM_SRCWIDTH) == 0) { CHECK_ZERO(value); *(int*)value = cvcam_properties[(uint)camera].srcwidth; return 0; } else if (strcmp(property, CVCAM_SRCHEIGHT) == 0) { CHECK_ZERO(value); *(int*)value = cvcam_properties[(uint)camera].srcheight; return 0; } else if(strcmp(property, CVCAM_VIDEOFORMAT) == 0) { _cvcamSummonPinPropPage(camera); return 0; } else if(strcmp(property, CVCAM_CAMERAPROPS) == 0) { _cvcamSummonFilterPropPage(camera); return 0; } else if(strcmp(property, CVCAM_DESCRIPTION) == 0) { IPropertyBag *pBag; HRESULT hr = _cvcamMonikers[camera]->BindToStorage (0, 0, IID_IPropertyBag, (void **)&pBag); if(SUCCEEDED(hr)) { VARIANT var; var.vt = VT_BSTR; hr = pBag->Read(L"FriendlyName", &var, NULL); pBag->Release(); char pcOut[100]; strcpy (pcOut, ""); int res = wcstombs(pcOut,var.bstrVal,100); strcpy(((CameraDescription*)value)->DeviceDescription, pcOut); VariantClear(&var); return (res>=0)-1; } else { strcpy(((CameraDescription*)value)->DeviceDescription,""); return -1; }//if(SUCCEEDED(hr)) } return -2;}/////////////////////////////////////////////////////////////////////////////////////////CVCAM_API int cvcamSetProperty(int camera, const char* property, void* value){ if (camera>=AVIS_START) { return cvcamAVISetProperty(camera,property,value); } if(strcmp(property, CVCAM_PROP_ENABLE) == 0) { int val = (int)value; if(cvcam_properties[(uint)camera]._enabled && val) return -3; CHECK_POSITIVE(camera); if((uint)camera >= cvcam_properties.size()) return 0; cvcam_properties[(uint)camera]._enabled = val; if(val) { if(_cvcamInitCapFilters(camera) < 0) cvcam_properties[(uint)camera]._enabled = 0; } return 0; } else if (strcmp(property, CVCAM_PROP_RENDER) == 0) { int val = (int)value; cvcam_properties[(uint)camera].render = val; return 0; } else if (strcmp(property, CVCAM_PROP_WINDOW) == 0) { int val = *(int*)value; cvcam_properties[camera].window = val; return 0; } else if(strcmp(property, CVCAM_PROP_CALLBACK) == 0) { cvcam_properties[camera].callback = value; return 0; } else if(strcmp(property, CVCAM_STEREO_CALLBACK) == 0) { stereo2_callback = value; nb_cameras=2; return 0; } else if(strcmp(property, CVCAM_STEREO3_CALLBACK) == 0) { stereo3_callback = value; nb_cameras=3; return 0; } else if(strcmp(property, CVCAM_STEREO4_CALLBACK) == 0) { stereo4_callback = value; nb_cameras=4; return 0; } else if (strcmp(property, CVCAM_RNDWIDTH) == 0) { int val = *(int*)value; OAFilterState state = State_Stopped; if(_cvcamMediaControl.is_valid()) _cvcamMediaControl->GetState(0, &state); if(state != State_Stopped) cvcamStop(); cvcam_properties[camera].rndwidth = val; if(state == State_Running) cvcamStart(); return 0; } else if (strcmp(property, CVCAM_RNDHEIGHT) == 0) { int val = *(int*)value; OAFilterState state = State_Stopped; if(_cvcamMediaControl.is_valid()) _cvcamMediaControl->GetState(0, &state); if(state != State_Stopped) cvcamStop(); cvcam_properties[camera].rndheight = val; if(state == State_Running) cvcamStart(); return 0; } else if (strcmp(property, CVCAM_PROP_SETFORMAT) == 0) { _cvcamSetVideoFormat(camera, value); return 0; } return -2;}//////////////////////////////////////////////////////////////////////////////////////////* gets all property names. the actual number of properties is returned. */CVCAM_API int cvcamGetPropertiesList(int camera, const char** properties, int count){ CHECK_CAMERA((uint)camera); if(count-- > 0) { properties[0] = CVCAM_PROP_ENABLE; } if(count-- > 0) { properties[1] = CVCAM_PROP_RENDER; } if(count-- > 0) { properties[2] = CVCAM_PROP_WINDOW; } return 3;}//////////////////////////////////////////////////////////////////////////////////////////* Prepares the currently enabled cameras for work */CVCAM_API int cvcamInit(){ vector<SafeUnknown> _objects; if(_cvcamNumberOfEnabled() >= 2) return _cvcamInitSeveralCams(); for(;;) { /* Create a proxy transform filter */ if(FAILED(CoCreateInstance(CLSID_ProxyTransform, NULL, CLSCTX_INPROC_SERVER, IID_IProxyTransform, (void**)&_cvcamProxyTrans))) { int code = MessageBox( 0, "ProxyTrans.ax could not be loaded. Please, register it using regsvr32.exe", "Error", MB_ABORTRETRYIGNORE ); if( code == IDRETRY ) continue; else if( code == IDABORT ) { cvcamExit();
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -