📄 cameraimagerenderer.h
字号:
#pragma once
#include "color.h"
#define SAMPLEGRABBER_API
#pragma comment(lib,"Strmbase")
#pragma comment(lib,"Strmiids")
#pragma comment(lib,"Mmtimer")
EXTERN_GUID(CLSID_SimpleCameraRenderer, 0x00780718, 0x8864, 0x4a60, 0xb2, 0x6c, 0x55, 0x2f, 0x9a, 0xa4, 0x72, 0xe1);
EXTERN_GUID(CLSID_NullRenderer, 0xd11dfe19, 0x8864, 0x4a60, 0xb2, 0x6c, 0x55, 0x2f, 0x9a, 0xa4, 0x72, 0xe1);
// Globals
struct BitmapSample
{
REFERENCE_TIME tmSampleTime;
long lBufferSize;
BYTE *pBuffer;
BITMAPINFOHEADER bih;
DWORD biSize;
private:
BYTE * AllocBuffer( long BufferSize )
{
if( lBufferSize < BufferSize )
{
delete [] pBuffer;
pBuffer = NULL;
lBufferSize = 0;
}
// If we haven't yet allocated the data buffer, do it now.
// Just allocate what we need to store the new bitmap.
if (!pBuffer)
{
pBuffer = new BYTE[BufferSize];
lBufferSize = BufferSize;
}
if( !pBuffer )
{
lBufferSize = 0;
return NULL;
}
return pBuffer;
}
public:
BitmapSample()
{
memset( this, 0, sizeof( BitmapSample ) );
pBuffer = NULL;
}
~BitmapSample()
{
delete [] pBuffer;
}
HRESULT FromMediaSample(IPin *pInputPin, IMediaSample *pMediaSample)
{
//////////////////////////////////////////////////////////////////////////
HRESULT hr = S_OK;
ATLASSERT(pMediaSample!=NULL && pInputPin != NULL);
if(pMediaSample == NULL || pInputPin == NULL)
return E_INVALIDARG;
BYTE *pByte = NULL;
LONG lBufferSize = 0;
AM_MEDIA_TYPE mt;
REFERENCE_TIME tmStart,tmEnd;
hr = pMediaSample->GetPointer(&pByte);
ATLASSERT(pByte != NULL);
if (pByte == NULL)
return hr;
ATLASSERT(pInputPin != NULL);
hr = pInputPin->ConnectionMediaType(&mt);
ATLASSERT(SUCCEEDED(hr));
if(hr != S_OK)
return hr;
lBufferSize = pMediaSample->GetActualDataLength();
ATLASSERT(lBufferSize > 0);
hr = pMediaSample->GetTime(&tmStart, &tmEnd);
ATLASSERT(SUCCEEDED(hr));
hr = FromBuffer(mt, tmStart, pByte, lBufferSize);
FreeMediaType(mt);
return hr;
}
HRESULT FromBuffer(AM_MEDIA_TYPE &mt, REFERENCE_TIME SampleTime, BYTE * pbPixel, long BufferSize)
{
// Ask for the connection media type so we know its size
//
VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*) mt.pbFormat;
bih = vih->bmiHeader;
if (!pbPixel)
return E_POINTER;
// Since we can't access Windows API functions in this callback, just
// copy the bitmap data to a global structure for later reference.
tmSampleTime = SampleTime;
if( !AllocBuffer(BufferSize) )
return E_OUTOFMEMORY;
// Copy the bitmap data into our global buffer
memcpy(pBuffer, pbPixel, lBufferSize);
return 0;
}
ULONG CalcBitmapInfoSize(const BITMAPINFOHEADER &bmiHeader)
{
UINT bmiSize = (bmiHeader.biSize != 0) ? bmiHeader.biSize : sizeof(BITMAPINFOHEADER);
return bmiSize + bmiHeader.biClrUsed * sizeof(RGBQUAD);
}
int GetBitmapInfo(PBITMAPINFO *ppBitmap)
{
// callback got the sample
BYTE *bitmapData = NULL;
biSize = CalcBitmapInfoSize(bih);
ULONG Size = biSize + lBufferSize;
if(ppBitmap) // If we have a valid address from caller
{
*ppBitmap = (BITMAPINFO *) new BYTE[Size];
if(*ppBitmap)
{
(**ppBitmap).bmiHeader = bih; // copy bitmap info header
bitmapData = (BYTE *)(*ppBitmap) + biSize;
memcpy(bitmapData, pBuffer, lBufferSize); // copy bitmap pixels
}
}
return lBufferSize;
}
void Convert2RGB24(BitmapSample &bs)
{
long height = abs(bih.biHeight);
long width = bih.biWidth;
bs.tmSampleTime = tmSampleTime;
bs.lBufferSize = (width * height * 3);
bs.bih.biSize = sizeof( bs.bih );
bs.bih.biWidth = width;
bs.bih.biHeight = height;
bs.bih.biPlanes = 1;
bs.bih.biBitCount = 24;
bs.AllocBuffer(bs.lBufferSize);
struct color_cvt ccvt;
ycc_rgb_init(&ccvt, CHROM_420, width, height, 24);
ycc_rgb_convert(&ccvt, pBuffer, bs.pBuffer);
ycc_rgb_free(&ccvt);
// if top-down
if(bih.biHeight < 0)
{
for (int h=0; h<height; h++)
{
BYTE *pbLeft = bs.pBuffer + h * width * 3;
BYTE *pbRight = pbLeft + width * 3;
for (int w=0; w<width/2; w++)
{
Swap(pbLeft + 3*w, pbRight -3*w - 3);
Swap(pbLeft + 3*w + 1, pbRight - 3*w - 2);
Swap(pbLeft + 3*w + 2, pbRight - 3*w - 1);
}
}
}
}
inline void Swap(BYTE *pb1, BYTE *pb2)
{
BYTE b = *pb1;
*pb1 = *pb2;
*pb2 = b;
}
// This is the implementation function that writes the captured video
// data onto a bitmap on the user's disk.
//
int SaveBitmap(TCHAR *szFilePath = NULL)
{
BOOL bRet = FALSE;
//
// Convert the buffer into a bitmap
//
TCHAR szFilename[MAX_PATH * 2] = {0};
if(szFilePath == NULL)
(void)StringCchPrintf(szFilename, NUMELMS(szFilename), TEXT("\\My Documents\\Bitmap%5.5d.bmp\0"), long( tmSampleTime / 1000) );
else
wcscpy(szFilename, szFilePath);
// Create a file to hold the bitmap
HANDLE hf = CreateFile(szFilename, GENERIC_WRITE, FILE_SHARE_READ,
NULL, CREATE_ALWAYS, NULL, NULL );
if( hf == INVALID_HANDLE_VALUE )
{
return 0;
}
// Write out the file header
//
BITMAPFILEHEADER bfh;
memset( &bfh, 0, sizeof( bfh ) );
bfh.bfType = 'MB';
bfh.bfSize = sizeof( bfh ) + lBufferSize + sizeof( BITMAPINFOHEADER );
bfh.bfOffBits = sizeof( BITMAPINFOHEADER ) + sizeof( BITMAPFILEHEADER );
DWORD Written = 0;
bRet = WriteFile( hf, &bfh, sizeof( bfh ), &Written, NULL );
if( !bRet )
return bRet;
Written = 0;
bRet = WriteFile( hf, &(bih), sizeof( bih ), &Written, NULL );
if( !bRet )
return bRet;
// Write the bitmap bits
//
Written = 0;
bRet = WriteFile( hf, pBuffer, lBufferSize, &Written, NULL );
if( !bRet )
return bRet;
CloseHandle( hf );
return TRUE;
}
};
class SAMPLEGRABBER_API CMediaSampleCallback
{
CMediaSampleCallback *m_cbForward;
public:
CMediaSampleCallback() : m_cbForward(NULL) {};
HRESULT SetForwardCallback(CMediaSampleCallback *pcb)
{
m_cbForward = pcb;
return S_OK;
}
virtual HRESULT OnMediaSampleShoot( BitmapSample &sample )
{
if(m_cbForward)
return m_cbForward->OnMediaSampleShoot( sample );
else
{
ATLASSERT(FALSE);
return S_FALSE;
}
}
virtual HRESULT OnDoRenderSample( BitmapSample &sample )
{
if(m_cbForward)
return m_cbForward->OnDoRenderSample( sample );
else
{
ATLASSERT(FALSE);
return S_FALSE;
}
}
};
#if 1
class SAMPLEGRABBER_API CCameraImageRenderer : public CBaseVideoRenderer
{
CMediaSampleCallback *m_cbMediaSample;
public:
DECLARE_IUNKNOWN;
CCameraImageRenderer(LPUNKNOWN pUnk, HRESULT* phr, CMediaSampleCallback *cbMediaSample = NULL) :
CBaseVideoRenderer(CLSID_SimpleCameraRenderer, NAME("CCameraImageRenderer"), pUnk, phr)
{
m_cbMediaSample = cbMediaSample;
}
HRESULT CheckMediaType(const CMediaType *pMT)
{
CheckPointer(pMT,E_POINTER);
return S_OK;
}
// DirectShow Filters中有一个叫MSYUV Color Space Converter Codec的Filter可以做YUV到RGB格式的转换,
// 但我找不到其相应的接口,有哪为大虾知道?
// This is called for each media sample received.
HRESULT DoRenderSample(IMediaSample *pMediaSample)
{
HRESULT hr = S_OK;
ATLASSERT(pMediaSample!=NULL);
BitmapSample sample;
hr = sample.FromMediaSample(m_pInputPin, pMediaSample);
ATLASSERT(hr == S_OK);
if(m_cbMediaSample != NULL)
{
hr = m_cbMediaSample->OnDoRenderSample( sample );
ATLASSERT(SUCCEEDED(hr));
}
return hr;
}
virtual void OnRenderStart(IMediaSample *pMediaSample)
{
CBaseVideoRenderer::OnRenderStart(pMediaSample);
}
// This is called for each media sample received.
virtual void OnRenderEnd(IMediaSample *pMediaSample)
{
CBaseVideoRenderer::OnRenderEnd(pMediaSample);
//////////////////////////////////////////////////////////////////////////
// user shot the image
HRESULT hr = S_OK;
ATLASSERT(pMediaSample!=NULL);
BitmapSample sample;
hr = sample.FromMediaSample(m_pInputPin, pMediaSample);
ATLASSERT(hr == S_OK);
if(m_cbMediaSample != NULL)
{
hr = m_cbMediaSample->OnMediaSampleShoot( sample );
ATLASSERT(SUCCEEDED(hr));
}
return;
}
};
#else
//////////////////////////////////////////////////////////////////////////
// Fill in media type information below, this information is used when
// registering the filter with dshow
const AMOVIESETUP_MEDIATYPE sudPinTypes = { &MEDIATYPE_NULL,&MEDIASUBTYPE_NULL };
const AMOVIESETUP_PIN sudPins = { L"Null", FALSE, FALSE, FALSE, FALSE,&CLSID_NULL, NULL, 1, &sudPinTypes };
const AMOVIESETUP_FILTER sudNULL = { &CLSID_NullRenderer, L"NullRend",MERIT_NORMAL, 1, &sudPins };
class SAMPLEGRABBER_API CCameraImageRenderer : public CBaseRenderer
{
CMediaSampleCallback *m_cbMediaSample;
public:
DECLARE_IUNKNOWN;
CCameraImageRenderer(LPUNKNOWN pUnk, HRESULT* phr, CMediaSampleCallback *cbMediaSample = NULL) :
CBaseRenderer(CLSID_SimpleCameraRenderer, NAME("CCameraImageRenderer"), pUnk, phr)
{
m_cbMediaSample = cbMediaSample;
}
static CUnknown* WINAPI CreateInstance(LPUNKNOWN punk, HRESULT* phr)
{
return new CCameraImageRenderer(punk, phr);
}
HRESULT CheckMediaType(const CMediaType *pMT)
{
CheckPointer(pMT,E_POINTER);
return S_OK;
}
// DirectShow Filters中有一个叫MSYUV Color Space Converter Codec的Filter可以做YUV到RGB格式的转换,
// 但我找不到其相应的接口,有哪为大虾知道?
// This is called for each media sample received.
HRESULT DoRenderSample(IMediaSample *pMediaSample)
{
HRESULT hr = S_OK;
ATLASSERT(pMediaSample!=NULL);
BitmapSample sample;
hr = sample.FromMediaSample(m_pInputPin, pMediaSample);
ATLASSERT(hr == S_OK);
if(m_cbMediaSample != NULL)
{
hr = m_cbMediaSample->OnDoRenderSample( sample );
ATLASSERT(SUCCEEDED(hr));
}
return S_OK;
}
// This is called for each media sample received.
virtual void OnRenderEnd(IMediaSample *pMediaSample)
{
CBaseRenderer::OnRenderEnd(pMediaSample);
//////////////////////////////////////////////////////////////////////////
// user shot the image
HRESULT hr = S_OK;
ATLASSERT(pMediaSample!=NULL);
BitmapSample sample;
hr = sample.FromMediaSample(m_pInputPin, pMediaSample);
ATLASSERT(hr == S_OK);
if(m_cbMediaSample != NULL)
{
hr = m_cbMediaSample->OnMediaSampleShoot( sample );
ATLASSERT(SUCCEEDED(hr));
}
return;
}
AMOVIESETUP_FILTER *GetSetupData()
{
return((AMOVIESETUP_FILTER*)&sudNULL);
}
ULONG NonDelegatingRelease(){ return S_OK; }
};
#endif
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -