⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 ff_wmv9.cpp

📁 从FFMPEG转换而来的H264解码程序,VC下编译..
💻 CPP
📖 第 1 页 / 共 2 页
字号:
   if( FAILED( hr ) ) return false;
   if( dwFlags & DMO_INPUT_STREAMF_HOLDS_BUFFERS ) return false;
   //
   //copy AvgTimePerFrame from mtIn to the context
   //
   if( FORMAT_VideoInfo == pContext.mtIn.formattype )
    {
     if( 0 == pContext.rtFrameDuration )
      {
       pContext.rtFrameDuration = ((VIDEOINFOHEADER*)pContext.mtIn.pbFormat)->AvgTimePerFrame;
      }
     if( pParams.dFrameRate > 0 )
      {
       pContext.rtDecimatorDuration = (REFERENCE_TIME)(10000000.0 / pParams.dFrameRate );
      }
    }
#define   MAX_JITTER_PERCENT       20
#define   JITTER_OFFSET            50000

   pContext.rtMaxJitter = ( pContext.rtFrameDuration * MAX_JITTER_PERCENT ) / 100;
   pContext.rtMaxJitter += pContext.rtFrameDuration;
   pContext.rtDecimatorTimeStamp=0;

   nOutFrames=0;
   writing=true;
   return true;
  }
 void __stdcall getExtradata(const void* *ptr,size_t *len)
  {
   unsigned int dif;
   if (pContext.mtOut.cbFormat==0 || (dif=pContext.mtOut.cbFormat-sizeof(VIDEOINFOHEADER))==0)
    {
     *ptr=NULL;
     *len=NULL;
    }
   else
    {
     if (ptr) *ptr=pContext.mtOut.pbFormat+sizeof(VIDEOINFOHEADER);
     *len=dif;
    }
  };
 HRESULT FeedData( IMediaObject         *pDMO,
                   BYTE                 *pbData,
                   LONG                 cbData,
                   REFERENCE_TIME       rtStart,
                   REFERENCE_TIME       rtLength,
                   CHandlingMediaBuffer *pMediaBuffer )
  {
   HRESULT hr  = S_OK;
   DWORD   dwFlags = DMO_INPUT_DATA_BUFFERF_SYNCPOINT  |
                     DMO_INPUT_DATA_BUFFERF_TIME       |
                     DMO_INPUT_DATA_BUFFERF_TIMELENGTH;
   DWORD   dwStatus;

   if( NULL == pDMO         ||
       NULL == pbData       ||
       NULL == pMediaBuffer ||
       0    >= cbData   )
    {
     return ( E_INVALIDARG );
    }

   hr = pDMO->GetInputStatus(0, &dwStatus);
   if (FAILED(hr))
    {
     return ( hr );
    }

   if (!(dwStatus & DMO_INPUT_STATUSF_ACCEPT_DATA))
    {
     return ( E_DMO_NOTACCEPTING );
    }
   //
   //set the params for the input buffer
   //
   pMediaBuffer->set_Buffer( pbData,
                             cbData,
                             cbData );

   return pDMO->ProcessInput(0, pMediaBuffer, dwFlags, rtStart, rtLength );
  }
  HRESULT FeedFrame( LONG nFrame)
  {

   REFERENCE_TIME  rtStartIn;
   REFERENCE_TIME  rtLenIn;

   //DWORD   dwStatus    = 0;
   HRESULT hr          = S_OK;

   //
   //Compute the time stamps and decide if the frame should be decimated
   //
   rtLenIn = REFERENCE_TIME(10000000.0 * (1/cfg.fps));
   rtStartIn = REFERENCE_TIME(nFrame * 10000000.0 * (1/cfg.fps));
   if( pContext.rtDecimatorDuration > 0 )
    {
     if( rtStartIn >= pContext.rtDecimatorTimeStamp - ( JITTER_OFFSET ) )
      {
       pContext.rtDecimatorTimeStamp = rtStartIn + pContext.rtDecimatorDuration;
      }
     else
      {
       return ( S_FALSE );   // this frame is dropped
      }
    }

   hr = FeedData( pContext.pDMO,
                  pContext.input,
                  pContext.inputSize,
                  rtStartIn,
                  rtLenIn,
                  pContext.pInputBuffer );

   return ( hr );
  }
 HRESULT PullData( IMediaObject         *pDMO,
                   BYTE                 *pbData,
                   LONG                 cbData,
                   DWORD                *pdwStatus,
                   REFERENCE_TIME       *prtStart,
                   REFERENCE_TIME       *prtLength,
                   CHandlingMediaBuffer *pMediaBuffer )
  {
   DMO_OUTPUT_DATA_BUFFER OutputBufferStructs[1];
   HRESULT                hr             = S_OK;
   DWORD                  dwStatus       = 0;
   //DWORD                  cbProduced     = NULL;

   if( NULL == pDMO         ||
       NULL == pMediaBuffer ||
       NULL == pdwStatus    ||
       NULL == pbData       ||
       0    >= cbData       )
    {
     return ( E_INVALIDARG );
    }

   *pdwStatus = 0;

   pMediaBuffer->set_Buffer( pbData, 0, cbData );

   OutputBufferStructs[0].pBuffer = pMediaBuffer;
   OutputBufferStructs[0].dwStatus = 0;

   hr = pDMO->ProcessOutput(0, 1, OutputBufferStructs, &dwStatus);
   if( S_OK != hr )
    {
     return ( hr );
    }

   *pdwStatus = OutputBufferStructs[0].dwStatus;

   if( NULL != prtStart )
    {
     *prtStart = OutputBufferStructs[0].rtTimestamp;
    }
   if( NULL != prtLength )
    {
     *prtLength = OutputBufferStructs[0].rtTimelength;
    }

   return ( hr );
  }
 int writeAVI(unsigned int framenum,int csp,const unsigned char * const src[4],const stride_t srcStride[4],void *dst)
  {
   if (csp&FF_CSP_FLAGS_YUV_ADJ)
    memcpy(pContext.input,src[0],(csp&FF_CSP_420P)?3*cfg.width*cfg.height/2:srcStride[0]*cfg.height);
   else
    {
     int Bpp=csp_getInfo(csp)->Bpp;
     for (int y=0;y<cfg.height;y++)
      memcpy(pContext.input+cfg.width*Bpp*y,src[0]+srcStride[0]*y,cfg.width*Bpp);
    }

   HRESULT hr=FeedFrame(framenum);
   if (S_FALSE==hr || FAILED( hr )) return 0;
   DWORD           dwVideoStatus;
   REFERENCE_TIME  rtStartOut;
   REFERENCE_TIME  rtLenOut;

   hr = PullData( pContext.pDMO,
                  (BYTE*)dst,
                  cbVideoOut,
                  &dwVideoStatus,
                  &rtStartOut,
                  &rtLenOut,
                  pContext.pOutputBuffer );
   if( S_OK != hr )
    return 0;
   DWORD ulLength;
   hr=pContext.pOutputBuffer->GetBufferAndLength( NULL, &ulLength );
   if( S_OK != hr || 0 == ulLength )
    return 0;
   /*
   while( nOutFrames > 0 && rtStartOut - pContext.rtTimeStamp >= pContext.rtMaxJitter )
    {
     hr = AVIStreamWrite( pVideoContext->pAviOutStream,
                          (*pnOutFrames)++,
                          1,
                          NULL,
                          0,
                          0,
                          NULL,
                          NULL );
     if( FAILED( hr ) )
      {
       break;
      }
     pVideoContext->rtTimeStamp += pVideoContext->rtFrameDuration;
    }
   if( FAILED( hr ) )
    {
     break;
    }*/

   return (dwVideoStatus&DMO_OUTPUT_DATA_BUFFERF_SYNCPOINT)?-ulLength:ulLength;
  }
 void endAVI(void)
  {
   if( NULL != pContext.pDMO )
    {
     SAFERELEASE( pContext.pDMO );
    }
   if( NULL != pContext.mtOut.pbFormat )
    {
     CoTaskMemFree( pContext.mtOut.pbFormat );
     memset( &pContext.mtOut, 0, sizeof( AM_MEDIA_TYPE ) );
    }
   if( NULL != pContext.mtIn.pbFormat )
    {
     CoTaskMemFree( pContext.mtIn.pbFormat );
     memset( &pContext.mtIn, 0, sizeof( AM_MEDIA_TYPE ) );
    }
   if (pContext.input ) free(pContext.input );pContext.input =NULL;//SAFEDELETE ( pContext.input );
   if (pContext.output) free(pContext.output);pContext.output=NULL;//SAFEDELETE ( pContext.output);
   SAFEDELETES( pContext.pInputBuffer );
   SAFEDELETES( pContext.pOutputBuffer );
  };
private:
 REFERENCE_TIME decRtLength;
 int decmult;
public:
 virtual const Tff_wmv9codecInfo* __stdcall findCodec(FOURCC fcc)
  {
   for (vector<Tff_wmv9codecInfo>::const_iterator c=codecs.begin();c!=codecs.end();c++)
    if (c->fcc==fcc)
     return &*c;
   return NULL;
  }
 virtual bool __stdcall decStart(FOURCC fcc,double fps,unsigned int dx,unsigned int dy,const void *extradata,size_t extradata_len,int *csp)
  {
   cfg.avioutput=true;
   cfg.width=dx;cfg.height=dy;cfg.fps=fps;
   decRtLength=REFERENCE_TIME(10000000.0/cfg.fps);
   HRESULT  hr=S_OK;
   vector<Tff_wmv9codecInfo>::const_iterator c;
   for (c=codecs.begin();c!=codecs.end();c++)
    if (c->fcc==fcc)
     break;
   pContext.pDMO=NULL;
   VIDEOINFOHEADER *vih=(VIDEOINFOHEADER*)CoTaskMemAlloc(sizeof(VIDEOINFOHEADER)+extradata_len);
   vih->rcSource.left=0;vih->rcSource.top=0;vih->rcSource.right=dx;vih->rcSource.bottom=dy;
   vih->rcTarget=vih->rcSource;
   vih->dwBitRate=DWORD(cfg.fps*dx*dy);
   vih->dwBitErrorRate=0;
   vih->AvgTimePerFrame=decRtLength;
   vih->bmiHeader.biSize=DWORD(sizeof(vih->bmiHeader)+extradata_len);
   vih->bmiHeader.biWidth=dx;
   vih->bmiHeader.biHeight=dy;
   vih->bmiHeader.biPlanes=1;
   vih->bmiHeader.biBitCount=24;
   vih->bmiHeader.biCompression=c->fcc;
   vih->bmiHeader.biSizeImage=3*dx*dy;
   vih->bmiHeader.biXPelsPerMeter=0;
   vih->bmiHeader.biYPelsPerMeter=0;
   vih->bmiHeader.biClrUsed=0;
   vih->bmiHeader.biClrImportant=0;
   memcpy((unsigned char*)(vih+1),extradata,extradata_len);
   pContext.pInputBuffer=new CHandlingMediaBuffer;
   pContext.mtIn.majortype=MEDIATYPE_Video;
   pContext.mtIn.subtype=c->mediatype;
   pContext.mtIn.bFixedSizeSamples=FALSE;
   pContext.mtIn.bTemporalCompression=TRUE;
   pContext.mtIn.lSampleSize=0;
   pContext.mtIn.formattype=FORMAT_VideoInfo;
   pContext.mtIn.pUnk=NULL;
   pContext.mtIn.cbFormat=DWORD(sizeof(VIDEOINFOHEADER)+extradata_len);
   pContext.mtIn.pbFormat=(unsigned char*)vih;

   vih=(VIDEOINFOHEADER*)CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
   vih->rcSource.left=0;vih->rcSource.top=0;vih->rcSource.right=dx;vih->rcSource.bottom=dy;
   vih->rcTarget=vih->rcSource;
   vih->dwBitRate=DWORD(cfg.fps*3*dx*dy/2);
   vih->dwBitErrorRate=0;
   vih->AvgTimePerFrame=decRtLength;
   vih->bmiHeader.biSize=sizeof(vih->bmiHeader);
   vih->bmiHeader.biWidth=dx;
   vih->bmiHeader.biHeight=dy;
   vih->bmiHeader.biPlanes=1;
   switch (fcc)
    {
     case WMCFOURCC_MSS1:
     case WMCFOURCC_MSS2:
      vih->bmiHeader.biBitCount=24;
      vih->bmiHeader.biCompression=0;
      pContext.mtOut.subtype=MEDIASUBTYPE_RGB24;
      vih->bmiHeader.biSizeImage=3*dx*dy;
      *csp=FF_CSP_RGB24|FF_CSP_FLAGS_VFLIP;
      decmult=3;
      break;
     default:
      vih->bmiHeader.biBitCount=12;
      vih->bmiHeader.biCompression=WMCFOURCC_YV12;
      pContext.mtOut.subtype=MEDIASUBTYPE_YV12;
      vih->bmiHeader.biSizeImage=3*dx*dy/2;
      *csp=FF_CSP_420P|FF_CSP_FLAGS_YUV_ADJ;
      decmult=1;
      break;
    }
   vih->bmiHeader.biXPelsPerMeter=0;
   vih->bmiHeader.biYPelsPerMeter=0;
   vih->bmiHeader.biClrUsed=0;
   vih->bmiHeader.biClrImportant=0;
   pContext.pOutputBuffer=new CHandlingMediaBuffer;
   pContext.mtOut.majortype=MEDIATYPE_Video;
   pContext.mtOut.bFixedSizeSamples=TRUE;
   pContext.mtOut.bTemporalCompression=FALSE;
   pContext.mtOut.lSampleSize=vih->bmiHeader.biSizeImage;
   pContext.mtOut.formattype=FORMAT_VideoInfo;
   pContext.mtOut.pUnk=NULL;
   pContext.mtOut.cbFormat=sizeof(VIDEOINFOHEADER);
   pContext.mtOut.pbFormat=(unsigned char*)vih;

   CLSID decid;
   switch (fcc)
    {
     case WMCFOURCC_MSS1:
     case WMCFOURCC_MSS2:
      decid=CLSID_CMSSCDecMediaObject;
      break;
     case WMCFOURCC_WMVA:
      decid=CLSID_CWMVDecMediaObject2;
      break;
     default:
     case WMCFOURCC_WMV1:
     case WMCFOURCC_WMV2:
     case WMCFOURCC_WMV3:
     case WMCFOURCC_WVC1:
     case WMCFOURCC_WMVP:
     case WMCFOURCC_WVP2:
      decid=CLSID_CWMVDecMediaObject;
      break;
    }
   hr = CoCreateInstance( decid,
                          NULL,
                          CLSCTX_INPROC_SERVER,
                          IID_IMediaObject,
                          (void**)&pContext.pDMO);
   if (FAILED(hr)) return false;
   hr = pContext.pDMO->SetInputType(0, &pContext.mtIn, 0);
   if (FAILED(hr)) return false;
   hr = pContext.pDMO->SetOutputType(0, &pContext.mtOut, 0);
   if (FAILED(hr)) return false;
   pContext.output=(unsigned char*)malloc(pContext.outputSize=vih->bmiHeader.biSizeImage);
   return true;
  }
 virtual size_t __stdcall decompress(const unsigned char *src,size_t srcLen,unsigned char* *dst,stride_t *dstStride)
  {
   *dst=NULL;
   HRESULT hr;
   DWORD dwStatus;
   hr = pContext.pDMO->GetInputStatus(0, &dwStatus);
   if (FAILED(hr)) return 0;
   if (!(dwStatus & DMO_INPUT_STATUSF_ACCEPT_DATA)) return 0;
   pContext.pInputBuffer->set_Buffer((BYTE*)src, (DWORD)srcLen, (DWORD)srcLen );
   DWORD dwFlags = DMO_INPUT_DATA_BUFFERF_SYNCPOINT| DMO_INPUT_DATA_BUFFERF_TIME|DMO_INPUT_DATA_BUFFERF_TIMELENGTH;
   REFERENCE_TIME rtStart=0;
   hr=pContext.pDMO->ProcessInput(0, pContext.pInputBuffer, dwFlags, rtStart, decRtLength );
   if (S_FALSE==hr || FAILED( hr )) return srcLen;

   pContext.pOutputBuffer->set_Buffer( pContext.output, 0, pContext.outputSize );
   DMO_OUTPUT_DATA_BUFFER OutputBufferStructs[1];
   OutputBufferStructs[0].pBuffer = pContext.pOutputBuffer;
   OutputBufferStructs[0].dwStatus = 0;

   hr = pContext.pDMO->ProcessOutput(0, 1, OutputBufferStructs, &dwStatus);
   *dst=pContext.output;
   *dstStride=cfg.width*decmult;
   return srcLen;
  }
};

//===========================================================================================
extern "C" Iff_wmv9* __stdcall createWmv9(void)
{
 return new Tff_wmv9;
}
extern "C" void __stdcall destroyWmv9(Iff_wmv9 *self)
{
 if (self) delete (Tff_wmv9*)self;
}
extern "C" void __stdcall getVersion(char *ver,const char* *license)
{
 strcpy(ver,"2.7a, build date "__DATE__" "__TIME__" ("COMPILER COMPILER_X64")");
 *license="";
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -