tffdecoder.cpp.svn-base

来自「ffshow源码」· SVN-BASE 代码 · 共 1,162 行 · 第 1/3 页

SVN-BASE
1,162
字号
HRESULT TffdshowDecVideo::CompleteConnect(PIN_DIRECTION direction,IPin *pReceivePin){ if (direction==PINDIR_INPUT)  {  } else if (direction==PINDIR_OUTPUT)  {   const CLSID &out=GetCLSID(m_pOutput->GetConnected());   outOverlayMixer=!!(out==CLSID_OverlayMixer);  }  return CTransformFilter::CompleteConnect(direction,pReceivePin);}// alloc output bufferHRESULT TffdshowDecVideo::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest){ DPRINTF(_l("TffdshowDecVideo::DecideBufferSize")); if (m_pInput->IsConnected()==FALSE)  return E_UNEXPECTED;  if (!presetSettings) initPreset(); ppropInputRequest->cBuffers=1; TffPictBase pictOut=inpin->pictIn;calcNewSize(pictOut); ppropInputRequest->cbBuffer=pictOut.rectFull.dx*pictOut.rectFull.dy*4; // cbAlign 16 causes problems with the resize filter */ //ppropInputRequest->cbAlign =1; ppropInputRequest->cbPrefix=0; ALLOCATOR_PROPERTIES ppropActual; HRESULT result=pAlloc->SetProperties(ppropInputRequest,&ppropActual); if (result!=S_OK)  return result; if (ppropActual.cbBuffer<ppropInputRequest->cbBuffer)  return E_FAIL; return S_OK;}HRESULT TffdshowDecVideo::ReceiveOnThread(void) { HRESULT hr= ReceiveLatterHalf(pSampleCopy); pSampleCopy->Release(); return hr;}DWORD WINAPI ReceiveThreadEntry(TffdshowDecVideo *c){ return c->ReceiveOnThread();}HRESULT TffdshowDecVideo::Receive(IMediaSample *pSample) { // If the next filter downstream is the video renderer, then it may // be able to operate in DirectDraw mode which saves copying the data // and gives higher performance.  In that case the buffer which we // get from GetDeliveryBuffer will be a DirectDraw buffer, and // drawing into this buffer draws directly onto the display surface. // This means that any waiting for the correct time to draw occurs // during GetDeliveryBuffer, and that once the buffer is given to us // the video renderer will count it in its statistics as a frame drawn. // This means that any decision to drop the frame must be taken before // calling GetDeliveryBuffer. ASSERT(pSample); // If no output pin to deliver to then no point sending us data ASSERT(m_pOutput!=NULL); HRESULT threadexitcode; DWORD ThreadID; pSampleCopy= pSample; if(false/* CPUCount()>=3 */)  // CPUCount()>=3 : I don't have such CPU. That is not tested enough. That would work, but I guess that would not be any faster.   {   threadexitcode= waitForReceiveThread();   pSample->AddRef();   SECURITY_ATTRIBUTES sa={sizeof(SECURITY_ATTRIBUTES) ,  NULL,  TRUE};   hReceiveThread= CreateThread(&sa, 0, (LPTHREAD_START_ROUTINE) ReceiveThreadEntry, this, 0, &ThreadID);   return threadexitcode;  } else  {   return ReceiveLatterHalf(pSample);  }}HRESULT TffdshowDecVideo::ReceiveLatterHalf(IMediaSample *pSample) { AM_MEDIA_TYPE *pmt; // The source filter may dynamically ask us to start transforming from a // different media type than the one we're using now.  If we don't, we'll // draw garbage. (typically, this is a palette change in the movie, // but could be something more sinister like the compression type changing, // or even the video size changing) pSample->GetMediaType(&pmt); if (pmt!=NULL && pmt->pbFormat!=NULL)   {   // spew some debug output   ASSERT(!IsEqualGUID(pmt->majortype, GUID_NULL));   // now switch to using the new format.  I am assuming that the   // derived filter will do the right thing when its media type is   // switched and streaming is restarted.   StopStreaming();   m_pInput->CurrentMediaType() = *pmt;   DeleteMediaType(pmt);   // if this fails, playback will stop, so signal an error   HRESULT hr=StartStreaming();   if (FAILED(hr)) return abortPlayback(hr);  } // Now that we have noticed any format changes on the input sample, it's OK to discard it. REFERENCE_TIME rtStart,rtStop; if (pSample->GetTime(&rtStart,&rtStop)==S_OK)  {   late-=ff_abs(rtStart-lastrtStart);   lastrtStart=rtStart;  } if (late && pSample->IsSyncPoint()==S_OK) late=0; if (!mpeg12_codec(inpin->getInCodecId2()) && late>1500*(REF_SECOND_MULT/1000))  {   //MSR_NOTE(m_idSkip);   setSampleSkipped(true);   //late=0;   waitForKeyframe=1000;   return S_OK;  } // After a discontinuity, we need to wait for the next key frame if (pSample->IsDiscontinuity()==S_OK && inpin->waitForKeyframes())  {   DPRINTF(_l("Non-key discontinuity - wait for keyframe"));   waitForKeyframe=100;  } if (firsttransform)  {   firsttransform=false;   initKeys();   initMouse();   initRemote();   onTrayIconChange(0,0);   options->notifyParam(IDFF_isKeys,0);   options->notifyParam(IDFF_isMouse,0);   remote->onChange(0,0);   lastTime=clock();  } long srcLength; HRESULT hr=inpin->decompress(pSample,&srcLength); if (srcLength<0)  return S_FALSE; bytesCnt+=srcLength; if (waitForKeyframe)  waitForKeyframe--;  if (hr==S_FALSE)  {   setSampleSkipped(false);   if (!m_bQualityChanged)     {     m_bQualityChanged=TRUE;     NotifyEvent(EC_QUALITY_CHANGE,0,0);    }   hr=S_OK;  } return hr; }STDMETHODIMP TffdshowDecVideo::waitForReceiveThread(void){ DWORD threadexitcode; if(hReceiveThread)  {   WaitForSingleObject(hReceiveThread,INFINITE);   GetExitCodeThread(hReceiveThread, &threadexitcode);   CloseHandle(hReceiveThread);   hReceiveThread= NULL;  } else   threadexitcode= S_OK; return threadexitcode;}STDMETHODIMP TffdshowDecVideo::deliverPreroll(int frametype){ // Maybe we're waiting for a keyframe still? if (waitForKeyframe && (frametype&FRAME_TYPE::typemask)==FRAME_TYPE::I)  waitForKeyframe=FALSE; // if so, then we don't want to pass this on to the renderer if (waitForKeyframe)   DPRINTF(_l("still waiting for a keyframe")); return S_FALSE;}STDMETHODIMP TffdshowDecVideo::flushDecodedSamples(void){ TffPict pict; return deliverDecodedSample(pict);}STDMETHODIMP TffdshowDecVideo::deliverDecodedSample(TffPict &pict){ // Maybe we're waiting for a keyframe still? if (waitForKeyframe && (pict.frametype&FRAME_TYPE::typemask)==FRAME_TYPE::I)  waitForKeyframe=FALSE; // if so, then we don't want to pass this on to the renderer if (waitForKeyframe)   {   DPRINTF(_l("still waiting for a keyframe"));   return S_FALSE;  } //if (m_frame.srcLength==0) return S_FALSE; HRESULT frameTimeOk=S_FALSE; if (mpeg12_codec(inpin->getInCodecId2()) && inpin->biIn.bmiHeader.biCompression!=FOURCC_MPEG)  {   if (pict.rtStart<0)    return S_FALSE;   frameTimeOk=S_OK;   } else  if (inpin->sourceFlags&TvideoCodecDec::SOURCE_NEROAVC && pict.rtStart!=REFTIME_INVALID && pict.rtStop==REFTIME_INVALID)   {    pict.rtStop=pict.rtStart+inpin->avgTimePerFrame;     frameTimeOk=S_OK;   }  if (frameTimeOk!=S_OK)  frameTimeOk=(pict.rtStart!=REFTIME_INVALID)?S_OK:S_FALSE;// pIn->GetTime(&pict.rtStart,&pict.rtStop); if (frameTimeOk==S_OK && pict.rtStop-pict.rtStart!=0)  {   if (inpin->avgTimePerFrame==0)    inpin->avgTimePerFrame=pict.rtStop-pict.rtStart;  } else   {   frameTimeOk=S_OK;   pict.rtStart=REFERENCE_TIME((segmentFrameCnt  )*inpin->avgTimePerFrame);   pict.rtStop =REFERENCE_TIME((segmentFrameCnt+1)*inpin->avgTimePerFrame-1);  } //LONGLONG mediaTime1=-1,mediaTime2=-1; //HRESULT mediaTimeOk=pIn->GetMediaTime(&mediaTime1,&mediaTime2); if (pict.mediatimeStart!=REFTIME_INVALID)  currentFrame=(unsigned long)pict.mediatimeStart; else if (frameTimeOk==S_OK && inpin->avgTimePerFrame)  currentFrame=long((pict.rtStart+segmentStart)/inpin->avgTimePerFrame); else  currentFrame++;  int videoDelay; if (moviesecs>0 && presetSettings->isVideoDelayEnd && presetSettings->videoDelay!=presetSettings->videoDelayEnd)  {   unsigned int msecs;   if (SUCCEEDED(getCurrentFrameTimeMS(&msecs)))    videoDelay=msecs*(presetSettings->videoDelayEnd-presetSettings->videoDelay)/(moviesecs*1000)+presetSettings->videoDelay;   else    videoDelay=presetSettings->videoDelay;  } else  videoDelay=presetSettings->videoDelay; if (videoDelay)   {   REFERENCE_TIME delay100ns=videoDelay*10000LL;   pict.rtStart+=delay100ns;   pict.rtStop +=delay100ns;  } pict.rtStart+=segmentStart; pict.rtStop+=segmentStart; clock_t t=clock(); decodingFps=(t!=lastTime)?1000*CLOCKS_PER_SEC/(t-lastTime):0; lastTime=t;  if (pict.csp!=FF_CSP_NULL) decodingCsp=pict.csp; if (!cpu && cpus==-1)  {   cpu=new TcpuUsage;   cpus=cpu->GetCPUCount();   if (cpus==0) {delete cpu;cpu=NULL;};  } if (!imgFilters) imgFilters=createImgFilters(); if (wasSubtitleResetTime) imgFilters->subtitleResetTime=pict.rtStart; return imgFilters->process(pict,presetSettings);}STDMETHODIMP TffdshowDecVideo::waitForVideoThread(void){ DWORD threadexitcode; if(hVideoThread)  {   WaitForSingleObject(hVideoThread,INFINITE);   GetExitCodeThread(hVideoThread, &threadexitcode);   CloseHandle(hVideoThread);   hVideoThread= NULL;  } else   threadexitcode= S_OK; return threadexitcode;}STDMETHODIMP TffdshowDecVideo::Calldeliver(void){ CAutoLock lck1(&m_csReceive); ReleaseSemaphore(hVideoSemaphore, 1, NULL);  // Let the caller(deliverProcessedSampleSub) go ASSERT(m_pOutput); ASSERT(m_pOutput->IsConnected()); libmplayer->decCPUCount();                   // Swscaler should give one of CPU for this thread. HRESULT hr= m_pOutput->Deliver(dtp.pOut); dtp.pOut->Release(); libmplayer->incCPUCount(); return hr;}DWORD WINAPI TffdshowDecVideo::deliverProcessedSampleThreadEntry(TffdshowDecVideo *c){ return c->Calldeliver();}STDMETHODIMP TffdshowDecVideo::deliverProcessedSample(TffPict &pict){ if (pict.csp==FF_CSP_NULL)  return S_OK;  BOOL IsUnlockRequired= true; HRESULT hr= deliverProcessedSampleSub(pict, IsUnlockRequired); if (IsUnlockRequired)  m_csReceive.Unlock(); return hr;}STDMETHODIMP TffdshowDecVideo::deliverProcessedSampleSub(TffPict &pict, BOOL &IsUnlockRequired){ DWORD ThreadID; HRESULT threadexitcode; CAutoLock lck1(&m_csReceiveProtector); threadexitcode=waitForVideoThread();  // This means that the return value is one of the prior frame. This might cause problem. m_csReceive.Lock(); ASSERT(m_pOutput); ASSERT(m_pOutput->IsConnected()); sendOnFrameMsg();  if (presetSettings->output->hwOverlayAspect)  pict.setDar(Rational(presetSettings->output->hwOverlayAspect>>8,256)); if (!outdv && allowOutChange)   {   HRESULT hr=reconnectOutput(pict);   if (FAILED(hr))    return S_FALSE;//hr;  }   segmentFrameCnt++; frameCnt++; comptr<IMediaSample> pOut=NULL; HRESULT hr=initializeOutputSample(&pOut); if (FAILED(hr))   return hr; if (!outdv && hwDeinterlace)  if (comptrQ<IMediaSample2> pOut2=pOut)   {    AM_SAMPLE2_PROPERTIES outProp2;    if (SUCCEEDED(pOut2->GetProperties(FIELD_OFFSET(AM_SAMPLE2_PROPERTIES,tStart),(PBYTE)&outProp2)))     {      // Set interlace information (every sample)      outProp2.dwTypeSpecificFlags=AM_VIDEO_FLAG_INTERLEAVED_FRAME;      if (!(pict.fieldtype&FIELD_TYPE::MASK_INT))       outProp2.dwTypeSpecificFlags|=AM_VIDEO_FLAG_WEAVE;      else if (pict.fieldtype&FIELD_TYPE::INT_TFF)       outProp2.dwTypeSpecificFlags|=AM_VIDEO_FLAG_FIELD1FIRST;      pOut2->SetProperties(FIELD_OFFSET(AM_SAMPLE2_PROPERTIES,dwStreamId),(PBYTE)&outProp2);     }    } m_bSampleSkipped=FALSE; // The renderer may ask us to on-the-fly to start transforming to a // different format.  If we don't obey it, we'll draw garbage AM_MEDIA_TYPE *pmtOut;

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?