📄 vidinput_directx.cxx
字号:
return PFalse;
}
/*
* Even after a WaitForCompletion, the webcam is not available, so wait
* until the server give us a frame
*/
count = 0;
while (count < 100)
{
long cbBuffer;
hr = pGrabber->GetCurrentBuffer(&cbBuffer, NULL);
if (hr == S_OK && cbBuffer > 0)
break;
else if (hr == VFW_E_WRONG_STATE)
{
/* Not available */
PThread::Current()->Sleep(100);
}
else
{
PTRACE(1, "PVidDirectShow\tWhile waiting the webcam to be ready, we have got this error: " << ErrorMessage(hr));
PThread::Current()->Sleep(10);
}
count++;
}
#if 0
SetHue((unsigned)-1);
SetWhiteness((unsigned)-1);
SetColour((unsigned)-1);
SetContrast((unsigned)-1);
SetBrightness((unsigned)-1);
#endif
isCapturingNow = PTrue;
return PTrue;
}
PBoolean PVideoInputDevice_DirectShow::Stop()
{
PTRACE(1,"PVidDirectShow\tStop()");
if (IsCapturing())
return PFalse;
if (pMC)
pMC->StopWhenReady();
isCapturingNow = PFalse;
return PTrue;
}
PBoolean PVideoInputDevice_DirectShow::IsCapturing()
{
return isCapturingNow;
}
/*
*
*
*/
PBoolean PVideoInputDevice_DirectShow::GetFrameData(BYTE * buffer, PINDEX * bytesReturned)
{
m_pacing.Delay(1000/GetFrameRate());
return GetFrameDataNoDelay(buffer, bytesReturned);
}
PBoolean PVideoInputDevice_DirectShow::GetFrameDataNoDelay(BYTE *destFrame, PINDEX * bytesReturned)
{
HRESULT hr;
long cbBuffer = frameBytes;
if (converter != NULL)
{
if (tempFrame == NULL)
return PFalse;
hr = pGrabber->GetCurrentBuffer(&cbBuffer, NULL);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to get the current buffer size: " << ErrorMessage(hr));
return PFalse;
}
if (frameBytes < cbBuffer)
{
PTRACE(1, "PVidDirectShow\tBuffer too short (needed "<< cbBuffer << " got " << frameBytes);
return PFalse;
}
hr = pGrabber->GetCurrentBuffer(&cbBuffer, (long*)tempFrame);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to get the current buffer: " << ErrorMessage(hr));
return PFalse;
}
converter->Convert((BYTE *)tempFrame, destFrame, cbBuffer, bytesReturned);
}
else
{
hr = pGrabber->GetCurrentBuffer(&cbBuffer, (long*)destFrame);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to get the current buffer: " << ErrorMessage(hr));
return PFalse;
}
*bytesReturned = cbBuffer;
}
return PTrue;
}
PINDEX PVideoInputDevice_DirectShow::GetMaxFrameBytes()
{
// PTRACE(1,"PVidDirectShow\tGetMaxFrameBytes() return " << GetMaxFrameBytesConverted(frameBytes));
return GetMaxFrameBytesConverted(frameBytes);
}
/*
*
* Set the FrameRate, FrameSize, ...
*
*
*/
/*
* Change Colourspace AND FrameSize by looking if the resolution is supported by the hardware.
*
* For example a Logitech Pro 4000:
* Fmt[0] = (RGB24, 320x240, 30fps)
* Fmt[1] = (RGB24, 640x480, 15fps)
* Fmt[2] = (RGB24, 352x288, 30fps)
* Fmt[3] = (RGB24, 176x144, 30fps)
* Fmt[4] = (RGB24, 160x120, 30fps)
* Fmt[5] = (YUV420P, 320x240, 30fps)
* Fmt[6] = (YUV420P, 640x480, 15fps)
* Fmt[7] = (YUV420P, 352x288, 30fps)
* Fmt[8] = (YUV420P, 176x144, 30fps)
* Fmt[9] = (YUV420P, 160x120, 30fps)
* Fmt[10] = (IYUV, 320x240, 30fps)
* Fmt[11] = (IYUV, 640x480, 15fps)
* Fmt[12] = (IYUV, 352x288, 30fps)
* Fmt[13] = (IYUV, 176x144, 30fps)
* Fmt[14] = (IYUV, 160x120, 30fps)
*
* For example a Logitech Fusion that support MPJEG in hardware, doesn't return a MJPEG format :(
* Fmt[0] = (RGB24, 320x240, 15fps)
* Fmt[1] = (RGB24, 176x144, 30fps)
* Fmt[2] = (RGB24, 160x120, 30fps)
* Fmt[3] = (RGB24, 352x288, 30fps)
* Fmt[4] = (RGB24, 432x240, 30fps)
* Fmt[5] = (RGB24, 480x360, 30fps)
* Fmt[6] = (RGB24, 512x288, 30fps)
* Fmt[7] = (RGB24, 640x360, 30fps)
* Fmt[8] = (RGB24, 640x480, 15fps)
* Fmt[9] = (RGB24, 704x576, 15fps)
* Fmt[10] = (RGB24, 864x480, 15fps)
* Fmt[11] = (RGB24, 960x720, 15fps)
* Fmt[12] = (RGB24, 1024x576, 10fps)
* Fmt[13] = (RGB24, 1280x960, 7.5fps)
* Fmt[14] = (YUV420P, 320x240, 15fps)
* Fmt[15] = (YUV420P, 176x144, 30fps)
* Fmt[16] = (YUV420P, 160x120, 30fps)
* Fmt[17] = (YUV420P, 352x288, 30fps)
* Fmt[18] = (YUV420P, 432x240, 30fps)
* Fmt[19] = (YUV420P, 480x360, 30fps)
* Fmt[20] = (YUV420P, 512x288, 30fps)
* Fmt[21] = (YUV420P, 640x360, 30fps)
* Fmt[22] = (YUV420P, 640x480, 15fps)
* Fmt[23] = (YUV420P, 704x576, 15fps)
* Fmt[24] = (YUV420P, 864x480, 15fps)
* Fmt[25] = (YUV420P, 960x720, 15fps)
* Fmt[26] = (YUV420P, 1024x576, 10fps)
* Fmt[27] = (YUV420P, 1280x960, 7.5fps)
*/
PBoolean PVideoInputDevice_DirectShow::SetFormat(const PString &wanted_format, int width, int height, int fps)
{
HRESULT hr;
IAMStreamConfig *pStreamConfig;
AM_MEDIA_TYPE *pMediaFormat;
int iCount, iSize;
VIDEO_STREAM_CONFIG_CAPS scc;
int i;
PBoolean was_capturing = PFalse;
OAFilterState filterState = State_Stopped;
PTRACE(4, "PVidDirectShow\tSetFormat(\""
<< (wanted_format.IsEmpty()?"Not changed":wanted_format) <<"\", "
<< width<<"x"<<height <<", "
<< fps <<"fps)");
const GUID wanted_guid_format = pwlib_format_to_media_format(wanted_format);
if (!wanted_format.IsEmpty() && wanted_guid_format == MEDIATYPE_NULL)
{
PTRACE(4, "PVidDirectShow\tColorspace not supported ("<< wanted_format << ")");
return PFalse;
}
hr = pCapture->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pSrcFilter, IID_IAMStreamConfig, (void **)&pStreamConfig);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to find StreamConfig Video interface: " << ErrorMessage(hr));
return PFalse;
}
hr = pStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to GetNumberOfCapabilities: " << ErrorMessage(hr));
pStreamConfig->Release();
return PFalse;
}
/* Sanity check: just to be sure that the Streamcaps is a VIDEOSTREAM and not AUDIOSTREAM */
if (sizeof(scc) != iSize)
{
PTRACE(1, "PVidDirectShow\tBad Capapabilities (not a VIDEO_STREAM_CONFIG_CAPS)");
pStreamConfig->Release();
return PFalse;
}
for (i=0; i<iCount; i++, MyDeleteMediaType(pMediaFormat))
{
pMediaFormat = NULL;
hr = pStreamConfig->GetStreamCaps(i, &pMediaFormat, (BYTE *)&scc);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to GetStreamCaps(" << i <<"): " << ErrorMessage(hr));
continue;
}
if (!((pMediaFormat->formattype == FORMAT_VideoInfo) &&
(pMediaFormat->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
(pMediaFormat->pbFormat != NULL)))
continue;
VIDEOINFOHEADER *VideoInfo = (VIDEOINFOHEADER *)pMediaFormat->pbFormat;
BITMAPINFOHEADER *BitmapInfo = &(VideoInfo->bmiHeader);
const int maxfps = (int)(10000000.0/VideoInfo->AvgTimePerFrame);
if (!wanted_format.IsEmpty() && (wanted_guid_format != pMediaFormat->subtype))
continue;
if (width && BitmapInfo->biWidth != width)
continue;
if (width && BitmapInfo->biHeight != height)
continue;
if (fps && fps <= maxfps)
VideoInfo->AvgTimePerFrame = (LONGLONG) (10000000.0 / (double)fps);
/* We have match a goo format, Use it to change the format */
PTRACE(1,"PVidDirectShow\tUsing setting ["<< i << "] = ("
<< media_format_to_pwlib_format(pMediaFormat->subtype) << ", "
<< BitmapInfo->biWidth << "x" << BitmapInfo->biHeight << ", "
<< fps << "fps, max:" << maxfps << "fps)");
#if 1
if (pMC)
{
hr = pMC->GetState(1000, &filterState);
PTRACE_IF(1, FAILED(hr), "PVidDirectShow\tGetState failed: " << ErrorMessage(hr));
pMC->StopWhenReady();
}
hr = pStreamConfig->SetFormat(pMediaFormat);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to setFormat: " << ErrorMessage(hr));
if (hr != VFW_E_INVALIDMEDIATYPE)
continue;
PTRACE(1, "PVidDirectShow\tRetrying ...");
was_capturing = isCapturingNow;
Close();
Open(deviceName, PFalse);
hr = pStreamConfig->SetFormat(pMediaFormat);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to setFormat (Try #2 graph deconnected): " << ErrorMessage(hr));
continue;
}
if (was_capturing)
Start();
}
if (pMC) {
if (filterState==State_Running)
{
pMC->Run();
}
else if (filterState==State_Paused)
{
pMC->Pause();
}
}
PTRACE(1, "PVidDirectShow\tOk");
if (pMediaFormat->subtype == MEDIASUBTYPE_RGB32 ||
pMediaFormat->subtype == MEDIASUBTYPE_RGB24 ||
pMediaFormat->subtype == MEDIASUBTYPE_RGB565 ||
pMediaFormat->subtype == MEDIASUBTYPE_RGB555)
{
nativeVerticalFlip = true;
}
else
{
nativeVerticalFlip = false;
}
#endif
#if 0
hr = pGrabber->SetMediaType(pMediaFormat);
if (FAILED(hr))
{
PTRACE(1, "PVidDirectShow\tFailed to setFormat on pGrabber: " << ErrorMessage(hr));
}
#endif
MyDeleteMediaType(pMediaFormat);
pStreamConfig->Release();
return PTrue;
}
pStreamConfig->Release();
return PFalse;
}
PBoolean PVideoInputDevice_DirectShow::SetFrameSize(unsigned width, unsigned height)
{
PTRACE(1,"PVidDirectShow\tSetFrameSize(" << width << ", " << height << ")");
if (!SetFormat(colourFormat, width, height, frameRate))
return PFalse;
PTRACE(1,"PVidDirectShow\tSetFrameSize " << width << "x" << height << " is suported in hardware");
if (!PVideoDevice::SetFrameSize(width, height))
return PFalse;
frameBytes = CalculateFrameBytes(frameWidth, frameHeight, colourFormat);
if (tempFrame)
free(tempFrame);
tempFrame = (char *)malloc(frameBytes);
if (tempFrame == NULL)
{
PTRACE(1,"PVidDirectShow\tNot enought memory to allocate tempFrame ("<<frameBytes<<")");
return PFalse;
}
PTRACE(4,"PVidDirectShow\tset frame size " << width << "x" << height << " frameBytes="<<frameBytes);
return PTrue;
}
PBoolean PVideoInputDevice_DirectShow::SetFrameRate(unsigned rate)
{
PTRACE(1,"PVidDirectShow\tSetFrameRate("<<rate<<"fps)");
if (rate < 1)
rate = 1;
else if (rate > 50)
rate = 50;
if (!SetFormat(colourFormat, frameWidth, frameHeight, rate))
return PFalse;
return PVideoDevice::SetFrameRate(rate);
}
PBoolean PVideoInputDevice_DirectShow::SetColourFormat(const PString & colourFmt)
{
PTRACE(1,"PVidDirectShow\tSetColourFormat("<<colourFmt<<")");
if (!SetFormat(colourFmt, frameWidth, frameHeight, frameRate))
return PFalse;
if (!PVideoDevice::SetColourFormat(colourFmt))
return PFalse;
return PTrue;
}
/*
*
* Get brightness, contrast, hue, saturation
*
*
*/
PBoolean PVideoInputDevice_DirectShow::GetControlCommon(long control, int *newValue)
{
IAMVideoProcAmp *pVideoProcAmp;
long Min, Max, Stepping, Def, CapsFlags, Val;
HRESULT hr;
hr = pSrcFilter->QueryInterface(IID_IAMVideoProcAmp, (void **)&pVideoProcAmp);
if (FAILED(hr))
{
PTRACE(4, "PVidDirectShow\tFailed to find VideoProcAmp interface: " << ErrorMessage(hr));
return PFalse;
}
hr = pVideoProcAmp->GetRange(control, &Min, &Max, &Stepping, &Def, &CapsFlags);
if (FAILED(hr))
{
PTRACE(4, "PVidDirectShow\tFailed to getRange interface on " << control << " : " << ErrorMessage(hr));
pVideoProcAmp->Release();
return PFalse;
}
hr = pVideoProcAmp->Get(control, &Val, &CapsFlags);
if (FAILED(hr))
{
PTRACE(4, "PVidDirectShow\tFailed to setRange interface on " << control << " : " << ErrorMessage(hr));
Val = Def;
}
if (CapsFlags == VideoProcAmp_Flags_Auto)
*newValue = -1;
else
*newValue = ((Val - Min) * 65536) / ((Max-Min));
pVideoProcAmp->Release();
return PTrue;
}
int PVideoInputDevice_DirectShow::GetBrightness()
{
return GetControlCommon(VideoProcAmp_Brightness, &frameBrightness);
}
int PVideoInputDevice_DirectShow::GetWhiteness()
{
return GetControlCommon(VideoProcAmp_Gamma, &frameWhiteness);
}
int PVideoInputDevice_DirectShow::GetColour()
{
return GetControlCommon(VideoProcAmp_Saturation, &frameColour);
}
int PVideoInputDevice_DirectShow::GetContrast()
{
return GetControlCommon(VideoProcAmp_Contrast, &frameContrast);
}
int PVideoInputDevice_DirectShow::GetHue()
{
return GetControlCommon(VideoProcAmp_Hue, &frameHue);
}
PBoolean PVideoInputDevice_DirectShow::GetParameters(int *whiteness, int *brightness, int *colour, int *contrast, int *hue)
{
if (!IsOpen())
return PFalse;
frameWhiteness = -1;
frameBrightness = -1;
frameColour = -1;
frameContrast = -1;
frameHue = -1;
GetWhiteness();
GetBrightness();
GetColour();
GetContrast();
GetHue();
PTRACE(4, "PVidDirectShow\tGetWhiteness() = " << frameWhiteness);
PTRACE(4, "PVidDirectShow\tGetBrighness() = " << frameBrightness);
PTRACE(4, "PVidDirectShow\tGetColour() = " << frameColour);
PTRACE(4, "PVidDirectShow\tGetContrast() = " << frameContrast);
PTRACE(4, "PVidDirectShow\tGetHue() = " << frameHue);
*whiteness = frameWhiteness;
*brightness = frameBrightness;
*colour = frameColour;
*contrast = frameContrast;
*hue = frameHue;
return PTrue;
}
/*
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -