📄 videofiledshow.cpp
字号:
#include "StdAfx.h"
#include <iostream>
//#include "windows.h"
#include "VideoFileDShow.h"
#include "DirectShowHelpers.h"
using namespace std;
VideoFileDShow::VideoFileDShow(void)
{
paused = false;
}
VideoFileDShow::~VideoFileDShow(void)
{
}
bool VideoFileDShow::loadVideoFile( std::string name, VideoManInputFormat *aFormat )
{
// Initialize DirectShow and query for needed interfaces
HRESULT hr = initDirectShow();
if ( FAILED( hr ) )
{
freeDirectShow();
cerr << "Failed to initialize DirectShow!" << endl;
return false;
}
identification.fileName = name;
hr = prepareMedia( name, aFormat );
if (FAILED(hr))
{
freeDirectShow();
cerr << "Failed preparing media! " << endl;
cerr << "fileName: " << name << endl;
return false;
}
//Set the time format
format.timeFormat = SECONDS;
hr = pMS->SetTimeFormat( &TIME_FORMAT_MEDIA_TIME );
if ( aFormat == NULL || aFormat->timeFormat == FRAMES )
{
hr = pMS->SetTimeFormat( &TIME_FORMAT_FRAME );
if ( hr == S_OK )
{
format.timeFormat = FRAMES;
}
}
LONGLONG duration;
pMS->GetDuration( &duration );
if ( aFormat->timeFormat == SECONDS )
videoLength = referenceTime2seconds( duration);
else
videoLength = static_cast<double>( duration );
/*hr = pMC->Run();
if ( FAILED( hr ) )
{
cerr << "Failed running file " << name << endl;
freeDirectShow();
return false;
}*/
frameCaptured = false;
if ( format.width<=0 || format.height<=0 )
{
freeDirectShow();
return false;
}
if (format.fps<=0)
format.fps=30;
if ( aFormat != NULL )
*aFormat = format;
return true;
}
HRESULT VideoFileDShow::prepareMedia( const std::string &name, VideoManInputFormat *aFormat )
{
HRESULT hr = S_OK;
/*** Add the source filter ***/
//std::string filterName = "CLSID_CAVIFileSynth";
if ( name.find( "http://" ) != std::string::npos )
{
//CLSID_AsyncReader //CLSID_WMAsfReader //CLSID_CAVIFileSynth
if ( FAILED(hr = CoCreateInstance(CLSID_WMAsfReader, NULL, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (void**)&(videoSource))))
return hr;
CComPtr<IFileSourceFilter> pFileSource = NULL;
if ( FAILED(videoSource->QueryInterface(IID_IFileSourceFilter,(void**)&pFileSource)))
return hr;
if ( FAILED( hr = pGB->AddFilter(videoSource, L"File Reader")))
return hr;
if ( FAILED( hr = pFileSource->Load( (LPCWSTR)bstr_t(name.c_str()), NULL ) ) )
return hr;
}
else
{
if ( FAILED( hr = pGB->AddSourceFilter( (LPCWSTR)bstr_t(name.c_str()), NULL, &videoSource ) ) )
return hr;
}
/*** Get the output pin of the video source filter ***/
CComPtr<IPin> sourceOutPin = NULL;
if ( FAILED( hr = getPin(videoSource, PINDIR_OUTPUT, MEDIATYPE_Video, sourceOutPin ) ) &&
FAILED( hr = getPin(videoSource, PINDIR_OUTPUT, MEDIATYPE_Stream, sourceOutPin ) ) )
{
//If the video output pin was not found, take the first pin
if ( FAILED(hr = getPin(videoSource,PINDIR_OUTPUT,1,sourceOutPin)))
return hr;
}
/*** Add the NULL renderer ***/
CComPtr <IBaseFilter> pVideoRenderer = NULL;
if ( FAILED( hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&(pVideoRenderer) ) ) )
return hr;
if ( FAILED ( hr = pGB->AddFilter(pVideoRenderer, L"NULL Video Renderer") ) )
return hr;
/*** Add the sample grabber ***/
if ( FAILED( hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (void**)&pSG) ) )
return hr;
if ( FAILED( hr = pSG->QueryInterface(IID_ISampleGrabber,(void**)&sampleGrabber)) )
return(hr);
/*** Set sampleGrabber format ***/
CMediaType mt;
mt.majortype = MEDIATYPE_Video;
mt.formattype = GUID_NULL;
if ( aFormat != NULL )
mt.subtype = translatePIXEL_FORMAT( aFormat->getPixelFormatOut() );
else
mt.subtype = translatePIXEL_FORMAT( RGB24 );
if ( FAILED( hr = sampleGrabber->SetMediaType(&mt) ) )
return hr;
if ( FAILED( hr = pGB->AddFilter(pSG, L"Sample Grabber") ) )
return hr;
/*** Get the input and output pins of the samplegrabber ****/
CComPtr<IPin> sampleGrabberPinIn = NULL;
if ( FAILED( hr = getPin( pSG, PINDIR_INPUT, 1, sampleGrabberPinIn ) ) )
return(hr);
CComPtr<IPin> sampleGrabberPinOut = NULL;
if ( FAILED( hr = getPin( pSG, PINDIR_OUTPUT, 1, sampleGrabberPinOut ) ) )
return(hr);
/*** Get the input pin of the renderer ****/
CComPtr<IPin> rendererPin = NULL;
if ( FAILED ( hr = getPin( pVideoRenderer, PINDIR_INPUT, 1, rendererPin ) ) )
return(hr);
//Conect the splitter and the grabber
// if ( FAILED( hr = autoConnectFilters(videoSource,1,pSG,1,pGB) ) )
// return hr;
//Conect the grabber and the renderer
// if ( FAILED( hr = autoConnectFilters(pSG,1,pVideoRenderer,1,pGB) ) )
// return hr;
//Conect VideoSource and the splitter
/*** Connect the source filter and the grabber ***/
if( FAILED( hr = pGB->Connect( sourceOutPin, sampleGrabberPinIn ) ) )
return hr;
/*** Connect the grabber and the renderer ***/
if( FAILED( hr = pGB->Connect( sampleGrabberPinOut, rendererPin ) ) )
return hr;
/*** Render the audio ***/
if ( aFormat == NULL || aFormat->renderAudio )
{
/** Para poner el audio, primero hay que ver si el videoSource tiene pin de salida de audio.
Si no lo tiene hay que a馻dir el splitter de un tipo concreto ( Al conectar se mete automaticamente)
Habr韆 que coger el splitter y render la salida de audio**/
/** Al conectar, el splitter se mete automaticamente, ya no hace falta meterlo **/
/*CComPtr <IBaseFilter> pStreamSplitter = NULL;
//CLSID_MPEG1Splitter
if ( FAILED(hr = CoCreateInstance(CLSID_AviSplitter, NULL, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (void**)&(pStreamSplitter))))
return hr;
if ( FAILED(hr = pGB->AddFilter(pStreamSplitter, L"Stream Splitter")))
return hr;
//Conect VideoSource and the splitter
if ( FAILED( hr = connectFilters(videoSource,1,pStreamSplitter,1)))
return hr; */
CComPtr<IPin> AudioOutPin = NULL;
if ( !FAILED( hr = getPin( videoSource, PINDIR_OUTPUT, MEDIATYPE_Audio, AudioOutPin ) ) )
{
//The video source has audio output pin
hr = pGB->Render(AudioOutPin);
}
else
{
/** The video source has not audio output pin.
Find the splitter and render its audio output pin **/
CComPtr <IBaseFilter> pStreamSplitter = NULL;
if ( !FAILED( findFilter( pGB, L"Splitter", &pStreamSplitter ) ) )
{
CComPtr<IPin> AudioOutPin = NULL;
if ( !FAILED( hr = getPin( pStreamSplitter, PINDIR_OUTPUT, MEDIATYPE_Audio,AudioOutPin ) ) )
{
IPin* pConnectedPin = NULL;
if( AudioOutPin->ConnectedTo( &pConnectedPin ) == VFW_E_NOT_CONNECTED )
hr = pGB->Render(AudioOutPin);
if ( pConnectedPin != NULL )
pConnectedPin->Release();
}
}
}
//Connect the splitter audio pin
/*IPin *pPin;
CComPtr<IEnumPins> EnumPins;
ULONG fetched;
PIN_INFO pinfo;
pStreamSplitter->EnumPins(&EnumPins);
EnumPins->Reset();
EnumPins->Next(1, &pPin, &fetched);
pPin->QueryPinInfo(&pinfo);
if(fetched > 0) do
{
if(pinfo.dir == PINDIR_OUTPUT)
{
IPin* pConnectedPin = NULL;
if(pPin->ConnectedTo(&pConnectedPin) == VFW_E_NOT_CONNECTED)
hr = pGB->Render(pPin);
if(pConnectedPin != NULL) pConnectedPin->Release();
}
pPin->Release();
EnumPins->Next(1, &pPin, &fetched);
pPin->QueryPinInfo(&pinfo);
} while(fetched > 0);*/
}
#ifdef _DEBUG
hr = addToRot( pGB, &dwRegisterROT);
#endif
CMediaType mediaType;
rendererPin->ConnectionMediaType(&mediaType);
VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) mediaType.pbFormat;
FILTER_INFO filter_info;
videoSource->QueryFilterInfo(&filter_info);
if( filter_info.pGraph != NULL )
filter_info.pGraph->Release();
GUID subtype = mediaType.subtype;
format.width = pvi->bmiHeader.biWidth;
format.height = pvi->bmiHeader.biHeight;
format.fps = referenceTime2fps( pvi->AvgTimePerFrame );
format.setPixelFormat( UNKNOWN, translateMEDIASUBTYPE( mediaType.subtype ) );
FreeMediaType(mediaType);
if ( aFormat != NULL && !aFormat->clock )
{
CComPtr<IMediaFilter> pMediaFilter = NULL;
hr = pGB->QueryInterface(IID_IMediaFilter, (void**)&pMediaFilter);
if ( !FAILED(hr) )
{
hr = pMediaFilter->SetSyncSource(NULL);
}
hr = videoSource->SetSyncSource(NULL);
hr = pVideoRenderer->SetSyncSource(NULL);
hr = pSG->SetSyncSource(NULL);
}
CComPtr<IMediaEvent> pMediaEvent = NULL;
hr = pGB->QueryInterface(IID_IMediaEvent, (void**)&pMediaEvent);
if ( !FAILED(hr) )
{
long event;
pMediaEvent->WaitForCompletion( INFINITE, &event );
if ( EC_COMPLETE == event )
int a = 0;
}
hr = EnableMemoryBuffer();
if ( aFormat != NULL )
dropFrames = aFormat->dropFrames;
//pMS->SetRate(2);
return hr;
}
inline char *VideoFileDShow::getFrame( bool wait)
{
/* m_CSec.Lock();
if ( mb.size() > 0 )
{
mb.front().blocked = true;
m_CSec.Unlock();
mb.front().media_sample->GetPointer((BYTE**)&pixelBuffer);
frameCaptured = true;
return pixelBuffer;
}
m_CSec.Unlock();
return NULL;*/
if ( mediaSample != NULL )
{
mediaSample->GetPointer((BYTE**)&pixelBuffer);
frameCaptured = true;
return pixelBuffer;
}
return NULL;
}
inline void VideoFileDShow::releaseFrame()
{
if ( mediaSample != NULL )
{
pixelBuffer = NULL;
mediaSample->Release();
mediaSample = NULL;
frameCaptured = false;
if ( !dropFrames && !paused )
pMC->Run();
}
}
void VideoFileDShow::play()
{
HRESULT hr;
if ( pMC != NULL )
{
hr = pMC->Run();
if ( FAILED( hr ) )
cerr << "Failed running file " << identification.fileName << endl;
}
paused = false;
}
void VideoFileDShow::pause()
{
if ( pMC != NULL )
pMC->Pause();
paused = true;
}
void VideoFileDShow::stop()
{
stopMedia();
}
void VideoFileDShow::goToFrame( int frame )
{
FILTER_STATE state;
pMC->GetState( 1000, (OAFilterState*)&state );
stop();
//Change to time format frames
if ( format.timeFormat == SECONDS )
pMS->SetTimeFormat( &TIME_FORMAT_FRAME );
LONGLONG time = static_cast<LONGLONG>( frame );
if ( pMS != NULL)
pMS->SetPositions(&time,AM_SEEKING_AbsolutePositioning ,
NULL, AM_SEEKING_NoPositioning);
//Return to time format frames
if ( format.timeFormat == SECONDS )
pMS->SetTimeFormat( &TIME_FORMAT_MEDIA_TIME );
play();
if ( state == State_Paused || state == State_Stopped )
pause();
}
void VideoFileDShow::goToMilisecond( double milisecond )
{
FILTER_STATE state;
pMC->GetState( 1000, (OAFilterState*)&state );
stop();
//Change to time format seconds
if ( format.timeFormat == FRAMES )
pMS->SetTimeFormat( &TIME_FORMAT_MEDIA_TIME );
LONGLONG time = seconds2referenceTime( milisecond *0.001 );
if ( pMS != NULL)
pMS->SetPositions( &time, AM_SEEKING_AbsolutePositioning ,
NULL, AM_SEEKING_NoPositioning );
//Return to time format frames
if ( format.timeFormat == FRAMES )
pMS->SetTimeFormat( &TIME_FORMAT_FRAME );
play();
if ( state == State_Paused || state == State_Stopped )
pause();
}
double VideoFileDShow::getLength()
{
return videoLength;
}
double VideoFileDShow::getPosition()
{
LONGLONG position, stopTime;
if ( pMS != NULL)
{
pMS->GetPositions(&position, &stopTime );
double time;
if ( format.timeFormat == SECONDS )
{
time = referenceTime2seconds( position );
}
else
{
time = static_cast<double>( position );
}
return time;
}
return 0.0;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -