⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 sesenc.cpp

📁 jpeg and mpeg 编解码技术源代码
💻 CPP
📖 第 1 页 / 共 5 页
字号:
				 / (m_iLastFrame - m_iFirstFrame + 1)),
				m_rgvolmd [BASE_LAYER][iVOrelative].dFrameHz
			);
		}

		// algorithm for generation of IPB sequences with arbitrary shape
		 //   (transparency allows for possible skipped frames) 
		 //   P prediction across skips is not allowed 

#define DUMP_CURR	0
#define DUMP_PREV	1
#define DUMP_NONE	2

		Int iRefFrame;
		Int iDT = volmd.iTemporalRate;
		Int iDT_enhn = volmd_enhn.iTemporalRate; // added by Sharp (98/2/12)
		Int iRefInterval = volmd.iBbetweenP + 1;
		Int iPCount;
		Bool bObjectExists;
		const CVOPU8YUVBA* pvopcBaseQuant = NULL;
		Int iEcount = 0; // added by Sharp (98/2/12)
		Bool bCachedRefDump = FALSE;
		Bool bCachedRefCoded = TRUE;
		Bool bPrevObjectExists = FALSE; // added by Sharp (99/1/27)
		for(iRefFrame = m_iFirstFrame; iRefFrame <= m_iLastFrame; iRefFrame += iDT)
		{
			// encode initial I frame or non-coded sequence
			if(rgpvoenc [BASE_LAYER] -> skipTest(iRefFrame,IVOP)) // rate control
				continue;

			//encode GOV header added by SONY 980212
			// moved down slightly by swinder 980219
			//CAUTION:I don't know how GOV header is encoded in sprite mode
			// re-done by swinder 980511
			// gov header is output every n output frames, where n is iGOVperiod
			if (volmd.iGOVperiod != 0 
				&& ((iRefFrame-m_iFirstFrame) % (volmd.iGOVperiod * volmd.iTemporalRate)) == 0)
			{
				rgpvoenc [BASE_LAYER] -> codeGOVHead (iRefFrame - m_iFirstFrame);
				rgpostrm [BASE_LAYER]->write (rgpvoenc [BASE_LAYER]->pOutStream ()->str (),
					rgpvoenc [BASE_LAYER]->pOutStream ()->pcount ());
			}
			//980211

			// first dump any cached frames, otherwise a non coded frame will be out of order
			if(bCachedRefDump && m_rguiSpriteUsage [iVOrelative] == 0)
			{
				bCachedRefDump = FALSE;
				// last ref frame needs to be output
#ifndef __OUT_ONE_FRAME_

				if ( bCachedRefCoded )
					dumpData (rgpfReconYUV [BASE_LAYER], rgpfReconSeg [BASE_LAYER], rgpvoenc[BASE_LAYER] ->pvopcRefQLater(), m_rctOrg, volmd);
				else
					dumpNonCodedFrame(rgpfReconYUV [BASE_LAYER], rgpfReconSeg [BASE_LAYER], m_rctOrg, volmd.nBits);

				if (m_rgbSpatialScalability [iVOrelative] && !bTemporalScalability)
					dumpData (rgpfReconYUV [ENHN_LAYER], rgpfReconSeg [ENHN_LAYER], rgpvoenc[ENHN_LAYER] ->pvopcRefQLater(), m_rctOrgSpatialEnhn, volmd);
// begin: deleted by Sharp (98/11/11)
// #else
// 					dumpDataOneFrame (iFrame, iVO, rgpvoenc[BASE_LAYER] ->pvopcRefQLater(), volmd); // save one frame
// 					if (m_rgbSpatialScalability [iVOrelative] && !bTemporalScalability)
// 						dumpDataOneFrame (iFrame, iVO, rgpvoenc[BASE_LAYER] ->pvopcRefQLater(), volmd); // save one frame
// end: deleted by Sharp (98/11/11)
#endif
			}

			int iGOPperiod = (volmd.iPbetweenI + 1) * (volmd.iBbetweenP + 1);
			if (rgpvoenc [BASE_LAYER]->m_uiRateControl >= RC_TM5 && iGOPperiod != 0 
				&& ((iRefFrame-m_iFirstFrame) % (iGOPperiod * volmd.iTemporalRate)) == 0)
			{
				Int nppic, npic = (m_iLastFrame - iRefFrame + 1) / volmd.iTemporalRate;
				if (iRefFrame == m_iFirstFrame) {
					if (npic > (iGOPperiod - volmd.iBbetweenP))
						npic = iGOPperiod - volmd.iBbetweenP;
				} else {
					npic += volmd.iBbetweenP;
					if (npic > iGOPperiod)
						npic = iGOPperiod;
				}
				nppic = (npic + volmd.iBbetweenP) / (volmd.iBbetweenP + 1) - 1;
				rgpvoenc [BASE_LAYER] -> m_tm5rc.tm5rc_init_GOP(nppic, npic - nppic - 1); //  np, nb remain
			}

			// encode non-coded frames or initial IVOPs
			// we always dump these out
			bObjectExists = loadDataSpriteCheck (iVOrelative,iRefFrame, pfYuvSrc, pfSegSrc, pxlcObjColor, rgpvoenc [BASE_LAYER]->m_pvopcOrig, volmd);
			encodeVideoObject(bObjectExists, bObjectExists, iRefFrame, IVOP, DUMP_CURR,
							  iVO, iVOrelative, BASE_LAYER, 
							  pfYuvSrc,pfSegSrc,rgpfReconYUV,rgpfReconSeg,
							  pxlcObjColor, rgpvoenc, volmd, rgpostrm);

			if (m_rgbSpatialScalability [iVOrelative] && !bTemporalScalability ) { // modified by Sharp (98/2/12)
				pvopcBaseQuant = rgpvoenc [BASE_LAYER]->pvopcReconCurr ();
				encodeVideoObject (bObjectExists, bObjectExists, iRefFrame, PVOP, DUMP_CURR,
								   iVO, iVOrelative, ENHN_LAYER,
								   pfYuvSrcSpatialEnhn, pfSegSrc, rgpfReconYUV, rgpfReconSeg,
								   pxlcObjColor, rgpvoenc, volmd, rgpostrm,
								   pvopcBaseQuant);
			}
// begin: added by Sharp (98/2/12)
				else if (m_rgbSpatialScalability [iVOrelative] && bTemporalScalability)
					pBufP2->getBuf( rgpvoenc[BASE_LAYER] );
// end: added by Sharp (98/2/12)

			// go to next frame if this was not coded or we are just coding sprites
			if(!bObjectExists)
				continue; 

			// we dumped first frame so rest must be delayed by one for re-order
			iPCount = volmd.iPbetweenI;			
			Int iWaitInterval = 0;

			while (TRUE) {
				// search for next reference frame
				Int iSearchFrame;
				for(iSearchFrame = iRefFrame + iDT * iRefInterval + iWaitInterval;
						iSearchFrame > iRefFrame; iSearchFrame -= iDT)
					if(iSearchFrame <= m_iLastFrame)
					{
						bObjectExists = loadDataSpriteCheck(iVOrelative,iSearchFrame, pfYuvSrc, pfSegSrc, pxlcObjColor, rgpvoenc [BASE_LAYER]->m_pvopcOrig, volmd);
						break;  // found a suitable reference frame
						// may not be coded
					}

				if(iSearchFrame==iRefFrame)
					break;

				if (rgpvoenc [BASE_LAYER] -> skipTest(iSearchFrame,iPCount ? PVOP : IVOP)) // rate control
				{
					// too early! need to wait a frame
					iWaitInterval += iDT;
					continue;
				}
				iWaitInterval = 0;
				CVOPU8YUVBA* pvopcBasePVOPQuant = NULL;

// begin: added by Sharp (98/2/12)
				if ( bTemporalScalability )
					if ( pBufP2 -> m_bCodedFutureRef == 1 ) // added by Sharp (99/1/28)
					pBufP1->copyBuf ( *pBufP2 );
// end: added by Sharp (98/2/12)
				// encode the next reference frame
				//Bool bCachedRefDumpSaveForSpatialScalability = bCachedRefDump;
				if(iPCount==0)
				{
					//added to encode GOV header by SONY 980212
					// moved to here by swinder 980219
					//CAUTION:I don't know how GOV header is encoded in sprite mode - SONY
					// update by swinder 980511
					if (volmd.iGOVperiod != 0 
						&& ((iSearchFrame-m_iFirstFrame) % (volmd.iGOVperiod * volmd.iTemporalRate)) == 0)
					{
//modified by SONY (98/03/30)
						rgpvoenc [BASE_LAYER] -> codeGOVHead (iRefFrame - m_iFirstFrame + iDT);
//modified by SONY (98/03/30) End
			/*				rgpvoenc [BASE_LAYER] -> codeGOVHead (iSearchFrame - m_iFirstFrame);
					Original*/ // why was this changed? - swinder
						rgpostrm [BASE_LAYER]->write (rgpvoenc [BASE_LAYER]->pOutStream ()->str (),
							rgpvoenc [BASE_LAYER]->pOutStream ()->pcount ());
					}
					//980212

					int iGOPperiod = (volmd.iPbetweenI + 1) * (volmd.iBbetweenP + 1);
					if (rgpvoenc [BASE_LAYER]->m_uiRateControl >= RC_TM5 && iGOPperiod != 0 
						&& ((iSearchFrame-m_iFirstFrame) % (iGOPperiod * volmd.iTemporalRate)) == 0)
					{
						Int nppic, npic = (m_iLastFrame - iSearchFrame + 1) / volmd.iTemporalRate;
						if (iRefFrame == m_iFirstFrame) {
							if (npic > (iGOPperiod - volmd.iBbetweenP))
								npic = iGOPperiod - volmd.iBbetweenP;
						} else {
							npic += volmd.iBbetweenP;
							if (npic > iGOPperiod)
								npic = iGOPperiod;
						}
						nppic = (npic + volmd.iBbetweenP) / (volmd.iBbetweenP + 1) - 1;
						rgpvoenc [BASE_LAYER] -> m_tm5rc.tm5rc_init_GOP(nppic, npic - nppic - 1); //  np, nb remain
					}

					// encode IVOP

					encodeVideoObject(bObjectExists, bPrevObjectExists, iSearchFrame, IVOP, bCachedRefDump ? DUMP_PREV : DUMP_NONE, // modified by Sharp (99/1/27)
									  iVO, iVOrelative, BASE_LAYER, 
									  pfYuvSrc, pfSegSrc, rgpfReconYUV, rgpfReconSeg,
 									  pxlcObjColor, rgpvoenc, volmd, rgpostrm);

					bCachedRefDump = TRUE; // need to output this frame later
					bCachedRefCoded = bObjectExists;

					iPCount = volmd.iPbetweenI;
					if (m_rgbSpatialScalability [iVOrelative] && !bTemporalScalability )  // modified by Sharp
                        pvopcBasePVOPQuant = new CVOPU8YUVBA (*(rgpvoenc [BASE_LAYER]->pvopcReconCurr ()),
												        	rgpvoenc [BASE_LAYER]->pvopcReconCurr ()->whereY());
// begin: added by Sharp (98/2/12)
					else if (m_rgbSpatialScalability [iVOrelative] && bTemporalScalability)
						pBufP2->getBuf( rgpvoenc[BASE_LAYER] );
// end: added by Sharp (98/2/12)
				}
				else
				{
					// encoder PVOP
					encodeVideoObject(bObjectExists, bPrevObjectExists, iSearchFrame, PVOP, bCachedRefDump ? DUMP_PREV : DUMP_NONE, // modified by Sharp (99/1/27)
									  iVO, iVOrelative, BASE_LAYER, 
									  pfYuvSrcSpatialEnhn, pfSegSrc, rgpfReconYUV, rgpfReconSeg,
									  pxlcObjColor, rgpvoenc, volmd, rgpostrm);
					bCachedRefDump = TRUE; // need to output this frame later
					bCachedRefCoded = bObjectExists;

					if (m_rgbSpatialScalability [iVOrelative] && !bTemporalScalability )  // modified by Sharp (98/2/12)
						pvopcBasePVOPQuant = new CVOPU8YUVBA (*(rgpvoenc [BASE_LAYER]->pvopcReconCurr ()),
												        	rgpvoenc [BASE_LAYER]->pvopcReconCurr ()->whereY());
// begin: added by Sharp (98/2/12)
						else if (m_rgbSpatialScalability [iVOrelative] && bTemporalScalability)
							pBufP2->getBuf( rgpvoenc[BASE_LAYER] );
// end: added by Sharp (98/2/12)

					if (iPCount>0)  // needed to handle iPCount = -1
						iPCount--;
				}
				bPrevObjectExists = bObjectExists;

				// encode B frames if needed
				Int iBFrame = iRefFrame + iDT; // added by Sharp (98/2/12)
				if(iRefInterval>1)
				{
					Bool bCachedBVOP = FALSE; // added by Sharp (98/11/11)
//						Int iBFrame;  // deleted by Sharp (98/2/12)
					for(iBFrame = iRefFrame + iDT; iBFrame < iSearchFrame; iBFrame += iDT)
					{
						if(rgpvoenc [BASE_LAYER] -> skipTest(iBFrame,BVOP))
							continue;
						bObjectExists = loadDataSpriteCheck(iVOrelative,iBFrame, pfYuvSrc, pfSegSrc, pxlcObjColor, rgpvoenc [BASE_LAYER]->m_pvopcOrig, volmd);
						encodeVideoObject (bObjectExists, bObjectExists, iBFrame, BVOP, bTemporalScalability ? DUMP_NONE: DUMP_CURR, // modified by Sharp (98/11/11)
										   iVO, iVOrelative, BASE_LAYER,
										   pfYuvSrc,pfSegSrc,rgpfReconYUV,rgpfReconSeg,
										   pxlcObjColor,rgpvoenc,volmd,rgpostrm);
						bCachedBVOP = bTemporalScalability ? TRUE : FALSE; // added by Sharp (98/11/11)
						if (m_rgbSpatialScalability [iVOrelative] && !bTemporalScalability) { // modified by Sharp (98/2/12)
							pvopcBaseQuant = rgpvoenc [BASE_LAYER]->pvopcReconCurr ();
							// Spatial Scalabe BVOP 
							encodeVideoObject (bObjectExists, bObjectExists, iBFrame, BVOP, DUMP_CURR, 
											   iVO, iVOrelative, ENHN_LAYER,
											   pfYuvSrcSpatialEnhn, pfSegSrc, rgpfReconYUV, rgpfReconSeg,
											   pxlcObjColor, rgpvoenc, volmd, rgpostrm,
											   pvopcBaseQuant);
						}
// begin: added by Sharp (98/2/12)
						else if (m_rgbSpatialScalability [iVOrelative] && bTemporalScalability)
							pBufB2->getBuf( rgpvoenc[BASE_LAYER] );
						
						if ( m_rgbSpatialScalability [iVOrelative] && bTemporalScalability ) {  // for TPS enhancement layer
							rgpvoenc [ENHN_LAYER] -> m_iBCount = 0;
							for (Int iEFrame = iBFrame - iDT + iDT_enhn; iEFrame < iBFrame; iEFrame += iDT_enhn ) {

								updateRefForTPS( rgpvoenc[ENHN_LAYER], pBufP1, pBufP2, pBufB1, pBufB2, pBufE,
									0, iVOrelative, iEcount, iBFrame-iDT+iDT_enhn, iEFrame, 0 );
								iEcount++;
								encodeEnhanceVideoObject(bObjectExists, iEFrame, rgpvoenc[ENHN_LAYER]->m_vopmd.vopPredType, DUMP_CURR,
									iVO,iVOrelative, pfYuvSrc,pfSegSrc,rgpfReconYUV,rgpfReconSeg,
									pxlcObjColor,rgpvoenc[ENHN_LAYER],volmd, volmd_enhn, iBFrame - iDT + iDT_enhn, rgpostrm,
									*pBufP1, *pBufP2, *pBufB1, *pBufB2, *pBufE
									);
							
								if ( !pBufB2->empty() ){
									if ( pBufB2 -> m_bCodedFutureRef == 1 ) // added by Sharp (99/1/28)
									pBufB1->copyBuf( *pBufB2 );
									pBufB2->dispose();
								}
							}
						}
// end: added by Sharp (98/2/12)

// begin: added by Sharp (98/11/11)
						if(bCachedBVOP && m_rguiSpriteUsage [iVOrelative] == 0)
						{
							// only for temporal scalability
#ifndef __OUT_ONE_FRAME_
							// last ref frame needs to be output
							dumpData (rgpfReconYUV [BASE_LAYER], rgpfReconSeg [BASE_LAYER],
								rgpvoenc[BASE_LAYER] ->pvopcReconCurr(), m_rctOrg, volmd);
							if (m_rgbSpatialScalability [iVOrelative] && !bTemporalScalability)
								dumpData (rgpfReconYUV [BASE_LAYER], rgpfReconSeg [BASE_LAYER],
									rgpvoenc[BASE_LAYER] ->pvopcReconCurr(), m_rctOrgSpatialEnhn, volmd);
#endif
							bCachedBVOP = FALSE;
						}
// end: added by Sharp (98/11/11)

					}
				}

				if (m_rgbSpatialScalability [iVOrelative] && !bTemporalScalability) { // modified by Sharp (98/2/12)
/* (98/3/30) modified by SONY*/
					if (iPCount == volmd.iPbetweenI)        {
/* (98/3/20) modified by SONY(end)*/
/*            ORIGINAL
					if (iPCount == 0)       {
					*/
					encodeVideoObject(TRUE, TRUE, iSearchFrame, PVOP,
										  DUMP_CURR, // sony
										  iVO, iVOrelative, ENHN_LAYER, 
										  pfYuvSrcSpatialEnhn, pfSegSrc, rgpfReconYUV, rgpfReconSeg,
										  pxlcObjColor,rgpvoenc,volmd,rgpostrm,
										  pvopcBasePVOPQuant);
					}
					else {
						VOPpredType PrevType = (rgpvoenc[ENHN_LAYER]->m_volmd.iSpatialOption == 0 )? BVOP: PVOP;
						encodeVideoObject (bObjectExists, bObjectExists, iSearchFrame, PrevType,
										   DUMP_CURR, // sony
										   iVO, iVOrelative, ENHN_LAYER, 
										   pfYuvSrcSpatialEnhn, pfSegSrc, rgpfReconYUV, rgpfReconSeg,
										   pxlcObjColor, rgpvoenc, volmd, rgpostrm,
										   pvopcBasePVOPQuant);
					}
					delete pvopcBasePVOPQuant;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -