tffdecoder.cpp.svn-base

来自「ffshow源码」· SVN-BASE 代码 · 共 1,162 行 · 第 1/3 页

SVN-BASE
1,162
字号
 pOut->GetMediaType(&pmtOut); if (pmtOut!=NULL && pmtOut->pbFormat!=NULL)   {   // spew some debug output   ASSERT(!IsEqualGUID(pmtOut->majortype, GUID_NULL));   // now switch to using the new format.  I am assuming that the   // derived filter will do the right thing when its media type is   // switched and streaming is restarted.   StopStreaming();   m_pOutput->CurrentMediaType() = *pmtOut;   DeleteMediaType(pmtOut);   hr = StartStreaming();   if (SUCCEEDED(hr))     {     // a new format, means a new empty buffer, so wait for a keyframe     // before passing anything on to the renderer.     // !!! a keyframe may never come, so give up after 30 frames     DPRINTF(_l("Output format change /*means we must wait for a keyframe*/"));     //waitForKeyframe = 30;    }   else // if this fails, playback will stop, so signal an error    {     //  Must release the sample before calling AbortPlayback     //  because we might be holding the win16 lock or     //  ddraw lock     abortPlayback(hr);     return hr;    }  } AM_MEDIA_TYPE *mtOut=NULL; pOut->GetMediaType(&mtOut); if (mtOut!=NULL)  {   hr=setOutputMediaType(*mtOut);   DeleteMediaType(mtOut);   if (hr!=S_OK)     return hr;  } int sync=(pict.frametype&FRAME_TYPE::typemask)==FRAME_TYPE::I?TRUE:FALSE; pOut->SetSyncPoint(sync); if (outOverlayMixer)  pOut->SetDiscontinuity(TRUE);  REFERENCE_TIME rtStart=pict.rtStart-segmentStart; if (rtStart!=REFTIME_INVALID)  {   REFERENCE_TIME rtStop=pict.rtStop-segmentStart;   pOut->SetTime(&rtStart,&rtStop);  }  if (pict.mediatimeStart!=REFTIME_INVALID)  pOut->SetMediaTime(&pict.mediatimeStart,&pict.mediatimeStop); unsigned char *dst; if (pOut->GetPointer(&dst)!=S_OK)   return S_FALSE; LONG dstSize=pOut->GetSize();  HRESULT cr=imgFilters->convertOutputSample(pict,m_frame.dstColorspace,&dst,&m_frame.dstStride,dstSize,presetSettings->output); pOut->SetActualDataLength(cr==S_FALSE?dstSize:m_frame.dstSize); if(presetSettings->multiThread && CPUcount()>=2 && m_State == State_Running && m_pOutput->IsOldVideoRenderer()==false)  {   // create thread for video rendering   SECURITY_ATTRIBUTES sa={sizeof(SECURITY_ATTRIBUTES) ,  NULL,  TRUE};   dtp.pOut= pOut;   dtp.pOut->AddRef();   hVideoThread= CreateThread(&sa, 0, (LPTHREAD_START_ROUTINE) deliverProcessedSampleThreadEntry, this, 0, &ThreadID);   m_csReceive.Unlock();                           // hand m_csReceive to Calldeliver. No other threads can hold it because we are holding m_csReceiveProtector.   IsUnlockRequired= false;   WaitForSingleObject(hVideoSemaphore,INFINITE);  // wait untill Calldeliver grabs m_csReceive   return threadexitcode;  } else  {   return m_pOutput->Deliver(pOut);  }}HRESULT TffdshowDecVideo::onGraphRemove(void){ if (videoWindow) {videoWindow=NULL;wasVideoWindow=false;} if (basicVideo) {basicVideo=NULL;wasBasicVideo=false;} if (imgFilters) delete imgFilters;imgFilters=NULL; return TffdshowDec::onGraphRemove();}STDMETHODIMP TffdshowDecVideo::Run(REFERENCE_TIME tStart){ DPRINTF(_l("TffdshowDecVideo::Run")); if (!wasVideoWindow)  {   wasVideoWindow=true;   if (SUCCEEDED(m_pGraph->QueryInterface(IID_IVideoWindow,(void**)&videoWindow)))    videoWindow->Release();  } if (!wasBasicVideo)  {   wasBasicVideo=true;   if (SUCCEEDED(m_pGraph->QueryInterface(IID_IBasicVideo,(void**)&basicVideo)))    basicVideo->Release();  } return CTransformFilter::Run(tStart);}STDMETHODIMP TffdshowDecVideo::Stop(void){ if (videoWindow) {videoWindow=NULL;wasVideoWindow=false;} if (basicVideo) {basicVideo=NULL;wasBasicVideo=false;} return CTransformFilter::Stop();}void TffdshowDecVideo::lockReceive(void){ m_csReceiveProtector.Lock(); m_csReceive.Lock();} void TffdshowDecVideo::unlockReceive(void){ m_csReceive.Unlock(); m_csReceiveProtector.Unlock();} HRESULT TffdshowDecVideo::NewSegment(REFERENCE_TIME tStart,REFERENCE_TIME tStop,double dRate){ DPRINTF(_l("TffdshowDecVideo::NewSegment")); segmentStart=tStart; segmentFrameCnt=0; for (size_t i=0;i<textpins.size();i++)  if (textpins[i]->needSegment)    textpins[i]->NewSegment(tStart,tStop,dRate); late=lastrtStart=0; frameCnt=0;bytesCnt=0; return TffdshowDec::NewSegment(tStart,tStop,dRate);}STDMETHODIMP TffdshowDecVideo::findOverlayControl(IMixerPinConfig2* *overlayPtr){ if (!overlayPtr) return E_POINTER; *overlayPtr=NULL;  if (!m_pGraph) return E_UNEXPECTED; return searchPinInterface(m_pGraph,IID_IMixerPinConfig2,(IUnknown**)overlayPtr)?S_OK:S_FALSE;}struct TvmrInterfaceCmp{private: const IID &iid;public: mutable comptr<IVMRMixerControl9> vmr9;mutable int id; TvmrInterfaceCmp(const IID &Iiid):iid(Iiid),vmr9(NULL),id(-1) {}  bool operator()(IBaseFilter *f,IPin *ipin) const  {   if (FAILED(f->QueryInterface(iid,(void**)&vmr9)) || vmr9==NULL)    return false;   comptr<IEnumPins> epi;   if (f->EnumPins(&epi)==S_OK)    {     epi->Reset();id=0;     for (comptr<IPin> bpi;epi->Next(1,&bpi,NULL)==S_OK;bpi=NULL,id++)      {       comptr<IPin> cpin;bpi->ConnectedTo(&cpin);       if (ipin==cpin)        return true;      }    }   return false;   }};STDMETHODIMP TffdshowDecVideo::findOverlayControl2(IhwOverlayControl* *overlayPtr){ if (!overlayPtr) return E_POINTER; *overlayPtr=NULL;  if (m_pGraph)  {   comptr<IMixerPinConfig2> overlay;   if (searchPinInterface(m_pGraph,IID_IMixerPinConfig2,(IUnknown**)&overlay) && overlay)    {     (*overlayPtr=new ThwOverlayControlOverlay(overlay))->AddRef();     return S_OK;    }   else     {     TvmrInterfaceCmp vmr9comp(IID_IVMRMixerControl9);     if (searchPrevNextFilter(PINDIR_OUTPUT,m_pOutput,m_pOutput,NULL,vmr9comp))      {       (*overlayPtr=new ThwOverlayControlVMR9(vmr9comp.vmr9,vmr9comp.id))->AddRef();       return S_OK;      }     }   }   (*overlayPtr=new ThwOverlayControlBase)->AddRef(); return S_FALSE;}int TffdshowDecVideo::GetPinCount(void){ return int(2+textpins.size()+(textpins.size()==textpins.getNumConnectedInpins()?1:0));}CBasePin* TffdshowDecVideo::GetPin(int n){ if (n==0)  return m_pInput; else if (n==1)  return m_pOutput; else  {    n-=2;   if (n<(int)textpins.size())    return textpins[n];   else    {      wchar_t name[50];     if (n==0)      swprintf(name,L"In Text");     else       swprintf(name,L"In Text %i",n+1);     HRESULT phr=0;     TtextInputPin *textpin=new TtextInputPin(this,&phr,name,n+1);     if (FAILED(phr)) return NULL;     textpins.push_back(textpin);     return textpin;    }   }}STDMETHODIMP TffdshowDecVideo::FindPin(LPCWSTR Id,IPin **ppPin){ CheckPointer(ppPin,E_POINTER); if (lstrcmpW(Id,m_pInput->Name())==0)   *ppPin=m_pInput; else if (lstrcmpW(Id,m_pOutput->Name())==0)   *ppPin=m_pOutput; else   *ppPin=textpins.find(Id);   if (*ppPin)  {    (*ppPin)->AddRef();   return S_OK;  }  else   return VFW_E_NOT_FOUND;}HRESULT TffdshowDecVideo::reconnectOutput(const TffPict &newpict){ if (newpict.rectFull!=oldRect || newpict.rectFull.sar!=oldRect.sar)  {   CMediaType &mt=m_pOutput->CurrentMediaType();   BITMAPINFOHEADER *bmi=NULL;   if (mt.formattype==FORMAT_VideoInfo)    {     VIDEOINFOHEADER *vih=(VIDEOINFOHEADER*)mt.Format();     SetRect(&vih->rcSource,0,0,newpict.rectFull.dx,newpict.rectFull.dy);     SetRect(&vih->rcTarget,0,0,newpict.rectFull.dx,newpict.rectFull.dy);     bmi=&vih->bmiHeader;     //bmi->biXPelsPerMeter = m_win * m_aryin;     //bmi->biYPelsPerMeter = m_hin * m_arxin;    }   else if (mt.formattype==FORMAT_VideoInfo2)    {     VIDEOINFOHEADER2* vih=(VIDEOINFOHEADER2*)mt.Format();     SetRect(&vih->rcSource,0,0,newpict.rectFull.dx,newpict.rectFull.dy);     SetRect(&vih->rcTarget,0,0,newpict.rectFull.dx,newpict.rectFull.dy);     bmi=&vih->bmiHeader;     setVIH2aspect(vih,newpict.rectFull,presetSettings->output->hwOverlayAspect);    }   bmi->biWidth=newpict.rectFull.dx;   bmi->biHeight=newpict.rectFull.dy;   bmi->biSizeImage=newpict.rectFull.dx*newpict.rectFull.dy*bmi->biBitCount>>3;   HRESULT hr=m_pOutput->GetConnected()->QueryAccept(&mt);   if (FAILED(hr=m_pOutput->GetConnected()->ReceiveConnection(m_pOutput, &mt)))    return hr;   comptr<IMediaSample> pOut;   if (SUCCEEDED(m_pOutput->GetDeliveryBuffer(&pOut,NULL,NULL,0)))    {     AM_MEDIA_TYPE *opmt;     if (SUCCEEDED(pOut->GetMediaType(&opmt)) && opmt)      {       CMediaType omt=*opmt;       m_pOutput->SetMediaType(&omt);       DeleteMediaType(opmt);      }     else // stupid overlay mixer won't let us know the new pitch...      {       long size=pOut->GetSize();       m_frame.dstStride=bmi->biWidth=size/bmi->biHeight*8/bmi->biBitCount;      }    }   // some renderers don't send this   NotifyEvent(EC_VIDEO_SIZE_CHANGED,MAKELPARAM(newpict.rectFull.dx,newpict.rectFull.dy),0);   oldRect=newpict.rectFull;   return S_OK;  } return S_FALSE;}HRESULT TffdshowDecVideo::AlterQuality(Quality q){ late=q.Late; return S_FALSE;}HRESULT TffdshowDecVideo::initializeOutputSample(IMediaSample **ppOutSample){ // default - times are the same AM_SAMPLE2_PROPERTIES * const pProps=m_pInput->SampleProps(); DWORD dwFlags=m_bSampleSkipped?AM_GBF_PREVFRAMESKIPPED:0; // This will prevent the image renderer from switching us to DirectDraw // when we can't do it without skipping frames because we're not on a // keyframe.  If it really has to switch us, it still will, but then we // will have to wait for the next keyframe if (!(pProps->dwSampleFlags&AM_SAMPLE_SPLICEPOINT))  dwFlags|=AM_GBF_NOTASYNCPOINT; //ASSERT(m_pOutput->m_pAllocator != NULL); IMediaSample *pOutSample; HRESULT hr=m_pOutput->GetDeliveryBuffer(&pOutSample,                                         pProps->dwSampleFlags&AM_SAMPLE_TIMEVALID?&pProps->tStart:NULL,                                         pProps->dwSampleFlags&AM_SAMPLE_STOPVALID?&pProps->tStop :NULL,                                         dwFlags); if (FAILED(hr))  return hr; *ppOutSample=pOutSample; ASSERT(pOutSample); setPropsTime(pOutSample,pProps->tStart,pProps->tStop,pProps,&m_bSampleSkipped); return S_OK;}void TffdshowDecVideo::setSampleSkipped(bool sendSkip){ DPRINTF(_l("dropframe")); m_bSampleSkipped=TRUE; if (sendSkip && inpin) inpin->setSampleSkipped();}#ifdef OSDTIMETABALE/* Usage * * OSDTIMETABALE is defined in Tinfo.h. * Enable OSDTIMETABALE. * * This item is used to reserch time table on multithreading. * * Call OSDtimeStartSampling to start sampling. * Call OSDtimeEndSampling or OSDtimeEndSamplingMax at the end of sampling. * Open ffdshow dialog box and see Info & debug. * */void TffdshowDecVideo::OSDtimeStartSampling(void){ m_pClock->GetTime(&OSDtime1);}void TffdshowDecVideo::OSDtimeEndSampling(void){ m_pClock->GetTime(&OSDtime2); if(/*OSDtime2>OSDlastdisplayed+5000000 && OSDtime2!=OSDtime1*/true) {  OSDtime3=OSDtime2-OSDtime1;  OSDlastdisplayed=OSDtime2; }}void TffdshowDecVideo::OSDtimeEndSamplingMax(void){ m_pClock->GetTime(&OSDtime2); if(OSDtime2-OSDtime1>OSDtimeMax)  OSDtimeMax= OSDtime2-OSDtime1; if(OSDtime2>OSDlastdisplayed+5000000 && OSDtime2!=OSDtime1) {  OSDtime3=OSDtimeMax;  OSDtimeMax=0;  OSDlastdisplayed=OSDtime2; }}#endif

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?