📄 t264enc.cpp
字号:
pv1+=2;
pv2+=2;
pv3+=2;
pv4+=2;
}
pu1+=w;
pu2+=w;
pu3+=w;
pu4+=w;
pv1+=w;
pv2+=w;
pv3+=w;
pv4+=w;
}
delete uu;
delete vv;
return 1;
}
HRESULT CT264Enc::CheckInputType(const CMediaType *mtIn)
{
CheckPointer(mtIn,E_POINTER);
if(*mtIn->FormatType() == FORMAT_VideoInfo)
{
if(IsEqualGUID(*mtIn->Subtype(), MEDIASUBTYPE_YV12))
{
if(mtIn->FormatLength() < sizeof(VIDEOINFOHEADER))
return E_INVALIDARG;
VIDEOINFO *pInput = (VIDEOINFO *) mtIn->Format();
m_param.width = pInput->bmiHeader.biWidth;
m_param.height = pInput->bmiHeader.biHeight;
m_param.framerate = (float)(INT)((float)10000000 / pInput->AvgTimePerFrame + 0.5);
m_avgFrameTime = pInput->AvgTimePerFrame;
return NOERROR;
}
}
else if (*mtIn->FormatType() == FORMAT_VideoInfo2)
{
if(IsEqualGUID(*mtIn->Subtype(), MEDIASUBTYPE_YV12))
{
if(mtIn->FormatLength() < sizeof(VIDEOINFOHEADER2))
return E_INVALIDARG;
VIDEOINFOHEADER2 *pInput = (VIDEOINFOHEADER2*) mtIn->Format();
m_param.width = pInput->bmiHeader.biWidth;
m_param.height = pInput->bmiHeader.biHeight;
m_avgFrameTime = (LONGLONG)((float)10000000 / m_param.framerate);//pInput->AvgTimePerFrame;
m_param.framerate = (float)(INT)(m_param.framerate + 0.5f);
return NOERROR;
}
}
return E_INVALIDARG;
} // CheckInputType
HRESULT CT264Enc::CheckTransform(const CMediaType *mtIn,const CMediaType *mtOut)
{
CheckPointer(mtIn,E_POINTER);
CheckPointer(mtOut,E_POINTER);
HRESULT hr;
if(FAILED(hr = CheckInputType(mtIn)))
{
return hr;
}
if(*mtOut->FormatType() != FORMAT_VideoInfo)
{
return E_INVALIDARG;
}
// formats must be big enough
if(mtIn->FormatLength() < sizeof(VIDEOINFOHEADER) ||
mtOut->FormatLength() < sizeof(VIDEOINFOHEADER))
return E_INVALIDARG;
return NOERROR;
} // CheckTransform
HRESULT CT264Enc::InitOutMediaType(CMediaType* pmt)
{
pmt->InitMediaType();
pmt->SetType(&MEDIATYPE_Video);
pmt->SetSubtype(&CLSID_T264SUBTYPE);
VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER*)pmt->AllocFormatBuffer(sizeof(VIDEOINFOHEADER));
ZeroMemory(pvi, sizeof(VIDEOINFOHEADER));
DWORD fcc = '462T';
pvi->dwBitRate = m_param.bitrate;
pvi->AvgTimePerFrame = m_avgFrameTime;
pvi->bmiHeader.biCompression = fcc;
pvi->bmiHeader.biBitCount = 12;
pvi->bmiHeader.biPlanes = 1;
pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
pvi->bmiHeader.biWidth = m_param.width;
pvi->bmiHeader.biHeight = m_param.height;
pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
SetRectEmpty(&(pvi->rcSource));
SetRectEmpty(&(pvi->rcTarget));
pmt->SetFormatType(&FORMAT_VideoInfo);
pmt->SetVariableSize();
pmt->SetTemporalCompression(true);
return S_OK;
}
//
// DecideBufferSize
//
// Tell the output pin's allocator what size buffers we
// require. Can only do this when the input is connected
//
HRESULT CT264Enc::DecideBufferSize(IMemAllocator *pAlloc,ALLOCATOR_PROPERTIES *pProperties)
{
CheckPointer(pAlloc,E_POINTER);
CheckPointer(pProperties,E_POINTER);
// Is the input pin connected
if(m_pInput->IsConnected() == FALSE)
{
return E_UNEXPECTED;
}
HRESULT hr = NOERROR;
pProperties->cBuffers = 1;
pProperties->cbBuffer = m_pInput->CurrentMediaType().GetSampleSize();
ASSERT(pProperties->cbBuffer);
// If we don't have fixed sized samples we must guess some size
if(!m_pInput->CurrentMediaType().bFixedSizeSamples)
{
if(pProperties->cbBuffer < OUTPIN_BUFFER_SIZE)
{
// nothing more than a guess!!
pProperties->cbBuffer = OUTPIN_BUFFER_SIZE;
}
}
// Ask the allocator to reserve us some sample memory, NOTE the function
// can succeed (that is return NOERROR) but still not have allocated the
// memory that we requested, so we must check we got whatever we wanted
ALLOCATOR_PROPERTIES Actual;
hr = pAlloc->SetProperties(pProperties,&Actual);
if(FAILED(hr))
{
return hr;
}
ASSERT(Actual.cBuffers == 1);
if(pProperties->cBuffers > Actual.cBuffers ||
pProperties->cbBuffer > Actual.cbBuffer)
{
return E_FAIL;
}
return NOERROR;
} // DecideBufferSize
//
// GetMediaType
//
// I support one type, namely the type of the input pin
// We must be connected to support the single output type
//
HRESULT CT264Enc::GetMediaType(int iPosition, CMediaType *pMediaType)
{
// Is the input pin connected
if(m_pInput->IsConnected() == FALSE)
{
return E_UNEXPECTED;
}
// This should never happen
if(iPosition < 0)
{
return E_INVALIDARG;
}
// Do we have more items to offer
if(iPosition > 0)
{
return VFW_S_NO_MORE_ITEMS;
}
CheckPointer(pMediaType,E_POINTER);
InitOutMediaType(pMediaType);
return NOERROR;
} // GetMediaType
HRESULT CT264Enc::StartStreaming()
{
_asm emms
if (m_t264 == NULL)
{
INT plane = m_param.width * m_param.height;
m_t264 = T264_open(&m_param);
m_pBuffer = (BYTE*)_aligned_malloc(plane + (plane >> 1), 16);
ASSERT(m_t264);
}
return CTransformFilter::StartStreaming();
}
HRESULT CT264Enc::StopStreaming()
{
if (m_t264 != NULL)
{
T264_close(m_t264);
m_t264 = 0;
_aligned_free(m_pBuffer);
m_pBuffer = 0;
}
return CTransformFilter::StopStreaming();
}
//
// GetPages
//
// This is the sole member of ISpecifyPropertyPages
// Returns the clsid's of the property pages we support
//
STDMETHODIMP CT264Enc::GetPages(CAUUID *pPages)
{
CheckPointer(pPages,E_POINTER);
pPages->cElems = 1;
pPages->pElems = (GUID *) CoTaskMemAlloc(sizeof(GUID));
if(pPages->pElems == NULL)
{
return E_OUTOFMEMORY;
}
*(pPages->pElems) = CLSID_T264PropPage;
return NOERROR;
} // GetPages
HRESULT CT264Enc::get_Para(INT** pPara)
{
*pPara = (INT*)&m_param;
return S_OK;
}
HRESULT CT264Enc::put_Default()
{
m_param.flags = //USE_INTRA16x16|
USE_INTRA4x4|
USE_HALFPEL|
USE_QUARTPEL|
USE_SUBBLOCK|
// USE_FULLSEARCH|
USE_DIAMONDSEACH|
USE_FORCEBLOCKSIZE|
USE_FASTINTERPOLATE|
USE_SAD|
USE_EXTRASUBPELSEARCH|
USE_INTRAININTER|
USE_SCENEDETECT;
m_param.iframe = 30;
m_param.idrframe = 3000 * 300;
m_param.qp = 28;
m_param.min_qp = 8;
m_param.max_qp = 34;
m_param.bitrate = 600 * 1024;
m_param.framerate= 30;
m_param.search_x = 16;
m_param.search_y = 16;
m_param.block_size = SEARCH_16x16P
|SEARCH_16x8P
|SEARCH_8x16P
// |SEARCH_8x8P
// |SEARCH_8x4P
// |SEARCH_4x8P
// |SEARCH_4x4P;
|SEARCH_16x16B
|SEARCH_16x8B
|SEARCH_8x16B;
m_param.disable_filter = 0;
m_param.aspect_ratio = 2;
m_param.video_format = 1;
m_param.ref_num = 3;
m_param.luma_coeff_cost = 4; // default 4, min qp please decrease this value
m_param.cpu = 0;
m_param.cabac = 1;
m_param.b_num = 1;
if (m_param.bitrate != 0)
m_param.enable_rc = 1;
else
m_param.enable_rc = 0;
return S_OK;
}
HRESULT CT264Enc::put_InfoWnd(INT hWnd)
{
m_hWnd = (HWND)hWnd;
return S_OK;
}
//////////////////////////////////////////////////////////////////////////
// CT264Dec
CT264Dec::CT264Dec(TCHAR *tszName,LPUNKNOWN punk,HRESULT *phr) :
CTransformFilter(tszName, punk, CLSID_T264ENC)
{
m_t264 = 0;
m_hWnd = 0;
}
CT264Dec::~CT264Dec()
{
if (m_t264 != 0)
{
T264dec_close(m_t264);
}
}
//
// CreateInstance
//
// Provide the way for COM to create a CT264Dec object
//
CUnknown * WINAPI CT264Dec::CreateInstance(LPUNKNOWN punk, HRESULT *phr)
{
ASSERT(phr);
CT264Dec *pNewObject = new CT264Dec(NAME("T264Dec"), punk, phr);
if (pNewObject == NULL) {
if (phr)
*phr = E_OUTOFMEMORY;
}
return pNewObject;
} // CreateInstance
//
// NonDelegatingQueryInterface
//
// Reveals IContrast and ISpecifyPropertyPages
//
STDMETHODIMP CT264Dec::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
CheckPointer(ppv, E_POINTER);
//if (riid == IID_IProp)
//{
// return GetInterface((IProp*) this, ppv);
//}
//else if (riid == IID_ISpecifyPropertyPages)
//{
// return GetInterface((ISpecifyPropertyPages*) this, ppv);
//}
//else
{
return CTransformFilter::NonDelegatingQueryInterface(riid, ppv);
}
} // NonDelegatingQueryInterface
HRESULT CT264Dec::Transform(IMediaSample *pIn, IMediaSample *pOut)
{
return S_OK;
} // Transform
HRESULT CT264Dec::SendSample(T264_t* t, T264_frame_t *frame, IMediaSample* pSample)
{
INT i;
BYTE* p, *pDst;
HRESULT hr;
IMediaSample* pOutSample;
// Set up the output sample
hr = InitializeOutputSample(pSample, &pOutSample);
if (FAILED(hr))
{
return hr;
}
hr = pOutSample->GetPointer(&pDst);
ASSERT(hr == S_OK);
p = frame->Y[0];
for(i = 0 ; i < t->height ; i ++)
{
memcpy(pDst, p, t->width);
pDst += m_nStride;
p += t->edged_stride;
}
p = frame->V;
for(i = 0 ; i < t->height >> 1 ; i ++)
{
memcpy(pDst, p, t->width);
pDst += m_nStride >> 1;
p += t->edged_stride_uv;
}
p = frame->U;
for(i = 0 ; i < t->height >> 1 ; i ++)
{
memcpy(pDst, p, t->width);
pDst += m_nStride >> 1;
p += t->edged_stride_uv;
}
pOutSample->SetActualDataLength(t->width * t->height + (t->width * t->height >> 1));
pOutSample->SetSyncPoint(TRUE);
CRefTime rtEnd = m_time + m_avgFrameTime;
pOutSample->SetTime(&m_time.m_time, &rtEnd.m_time);
m_time = rtEnd;
hr = m_pNextFilterInputpin->Receive(pOutSample);
m_bSampleSkipped = FALSE; // last thing no longer dropped
// release the output buffer. If the connected pin still needs it,
// it will have addrefed it itself.
pOutSample->Release();
return hr;
}
HRESULT CT264Dec::CompleteConnect(PIN_DIRECTION direction, IPin *pReceivePin)
{
HRESULT hr = CTransformFilter::CompleteConnect(direction, pReceivePin);
if (direction == PINDIR_OUTPUT)
{
hr = pReceivePin->QueryInterface(__uuidof(m_pNextFilterInputpin), (VOID**)&m_pNextFilterInputpin);
ASSERT(hr == S_OK);
// we do not want to hold the reference of the input pin
m_pNextFilterInputpin->Release();
}
return hr;
}
HRESULT CT264Dec::Receive(IMediaSample* pSample)
{
/* Check for other streams and pass them on */
AM_SAMPLE2_PROPERTIES * const pProps = m_pInput->SampleProps();
if (pProps->dwStreamId != AM_STREAM_MEDIA) {
return m_pNextFilterInputpin->Receive(pSample);
}
HRESULT hr;
ASSERT(pSample);
ASSERT (m_pOutput != NULL) ;
{
// some decoder does not reset emms(such as mainconcept mpeg2 decoder), and we need compute floating ...
BYTE* pSrc;
LONG lSrcSize;
INT run = 1;
hr = pSample->GetPointer(&pSrc);
ASSERT(hr == S_OK);
lSrcSize = pSample->GetSize();
T264dec_buffer(m_t264, pSrc, lSrcSize);
while (run)
{
decoder_state_t state = T264dec_parse(m_t264);
switch(state)
{
case DEC_STATE_SLICE:
{
if (m_t264->output.poc >= 0)
{
SendSample(m_t264, &m_t264->output, pSample);
}
}
break;
case DEC_STATE_BUFFER:
/* read more data */
return S_OK;
case DEC_STATE_SEQ:
if (m_t264->frame_id > 0)
{
SendSample(m_t264, T264dec_flush_frame(m_t264), pSample);
}
break;
/*case DEC_STATE_PIC:*/
default:
/* do not care */
break;
}
}
}
return hr;
}
HRESULT CT264Dec::Copy(IMediaSample *pSource, IMediaSample *pDest) const
{
CheckPointer(pSource,E_POINTER);
CheckPointer(pDest,E_POINTER);
// Copy the sample data
BYTE *pSourceBuffer, *pDestBuffer;
long lSourceSize = pSource->GetActualDataLength();
#ifdef DEBUG
long lDestSize = pDest->GetSize();
ASSERT(lDestSize >= lSourceSize);
#endif
pSource->GetPointer(&pSourceBuffer);
pDest->GetPointer(&pDestBuffer);
CopyMemory((PVOID) pDestBuffer,(PVOID) pSourceBuffer,lSourceSize);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -