📄 umc_frame_constructor.cpp
字号:
m_lLastBytePos -= m_CurFrame.iBufOffset;
m_lCurPos -= m_CurFrame.iBufOffset;
m_lCurPicStart -= IPP_MIN(m_lCurPicStart, m_CurFrame.iBufOffset);
m_PrevSample.iBufOffset -= IPP_MIN(m_PrevSample.iBufOffset, m_CurFrame.iBufOffset);
m_LastSample.iBufOffset -= IPP_MIN(m_LastSample.iBufOffset, m_CurFrame.iBufOffset);
m_CurFrame.iBufOffset = 0;
}
// set free pointer
in->SetBufferPointer(m_pBuf + m_lLastBytePos, in->GetDataSize());
in->SetDataSize(0);
return UMC_OK;
}
Status FrameConstructor::UnLockInputBuffer(MediaData *in, Status streamStatus)
{
Status umcRes = PreUnLockInputBuffer(in, streamStatus);
AutomaticMutex guard(m_synchro);
// automatically commit all uncommited samples
if (UMC_OK == umcRes)
m_uiCommitedFrames = m_uiTotalFrames;
return umcRes;
}
Status FrameConstructor::PreUnLockInputBuffer(MediaData *in, Status streamStatus)
{
AutomaticMutex guard(m_synchro);
Status umcRes = UMC_OK;
SplMediaData frame;
if (NULL != in && UMC_OK == streamStatus)
{
if (DynamicCast<SplMediaData>(in) && ((SplMediaData *)in)->GetAbsPos() == m_LastSample.uiAbsPos)
{ // add on at the end of last input
m_LastSample.uiSize += (Ipp32u)in->GetDataSize();
}
else
{ // scroll input samples
m_PrevSample = m_LastSample;
m_LastSample.CopyFrom(in[0], m_lLastBytePos);
}
m_lLastBytePos += (Ipp32s)in->GetDataSize();
}
if (UMC_OK != streamStatus)
{
m_uiCommitedFrames = m_uiTotalFrames;
m_bEndOfStream = true;
}
umcRes = GetFrame(&frame);
if (UMC_OK == umcRes)
{
// to avoid assigning of the same PTS to several frames
if (ArePTSEqual(frame.GetTime(), m_LastFrame.GetTime()))
frame.SetTime(-1.0, -1.0);
FCSample sample;
sample.CopyFrom(frame, (Ipp8u *)frame.GetDataPointer() - m_pBuf);
m_iCurrentLevel += (Ipp32s)frame.GetDataSize();
if (sample.uiAbsPos < m_LastFrame.GetAbsPos())
m_uiCommitedFrames = m_uiTotalFrames;
m_OutputQueue.Add(sample, m_uiCommitedFrames);
m_LastFrame = frame;
m_uiTotalFrames++;
}
else if (UMC_ERR_SYNC == umcRes && 0 == m_uiTotalFrames)
{ // syncword was not found, so skip parsed bytes
m_lFirstBytePos = m_CurFrame.iBufOffset = m_lCurPos;
}
return umcRes;
}
Status FrameConstructor::GetSampleFromQueue(FCSample *pSample)
{
Status umcRes = m_OutputQueue.First(pSample[0]);
return UMC_OK == umcRes ? UMC_OK : m_bEndOfStream ? UMC_ERR_END_OF_STREAM : UMC_ERR_NOT_ENOUGH_DATA;
}
Status FrameConstructor::LockOutputBuffer(MediaData *out)
{
AutomaticMutex guard(m_synchro);
// check error(s)
if (NULL == out)
return UMC_ERR_NULL_PTR;
// check if there is at least one sample
if (0 == m_uiCommitedFrames)
return m_bEndOfStream ? UMC_ERR_END_OF_STREAM : UMC_ERR_NOT_ENOUGH_DATA;
FCSample sample;
Status umcRes = GetSampleFromQueue(&sample);
if (UMC_OK != umcRes)
return umcRes;
// fill MediaData
sample.CopyTo(out[0], m_pBuf);
m_bIsOutputBufferLocked = true;
return UMC_OK;
}
Status FrameConstructor::UnLockOutputBuffer(MediaData *out)
{
AutomaticMutex guard(m_synchro);
// check error(s)
if (NULL == out)
return UMC_ERR_NULL_PTR;
FCSample sample;
Status umcRes = GetSampleFromQueue(&sample);
if (UMC_OK != umcRes || out->GetBufferPointer() != m_pBuf + sample.iBufOffset)
return UMC_OK;
m_OutputQueue.Remove(m_iFirstInDecOrderIdx);
m_uiCommitedFrames--;
m_uiTotalFrames--;
if (m_dRate < 0.0)
m_iFirstInDecOrderIdx--;
// update bottom boundary
if (m_OutputQueue.Size() > 0)
m_lFirstBytePos = m_OutputQueue.FirstBO()->iBufOffset;
else
m_lFirstBytePos = m_CurFrame.iBufOffset;
m_iCurrentLevel = CalcCurrentLevel(m_lFirstBytePos, m_lLastBytePos, m_lBufferSize);
m_bIsOutputBufferLocked = false;
return UMC_OK;
}
Status FrameConstructor::GetLastFrame(MediaData *data)
{
AutomaticMutex guard(m_synchro);
if (NULL == m_LastFrame.GetBufferPointer())
return UMC_ERR_NOT_ENOUGH_DATA;
if (DynamicCast<SplMediaData>(data))
*((SplMediaData *)data) = m_LastFrame;
else
*data = *((MediaData *)&m_LastFrame);
return UMC_OK;
}
Status FrameConstructor::GetFrame(SplMediaData *frame)
{
if ((m_lLastBytePos - m_lCurPos) == 0)
return m_bEndOfStream ? UMC_ERR_END_OF_STREAM : UMC_ERR_NOT_ENOUGH_DATA;
m_CurFrame = m_LastSample;
m_CurFrame.uiSize = m_lLastBytePos - m_lCurPos;
m_CurFrame.iBufOffset = m_lCurPos;
m_CurFrame.CopyTo(frame[0], m_pBuf);
m_lCurPos = m_lLastBytePos;
return UMC_OK;
}
Mpeg2TrackInfo *FrameConstructor::GetInfo(void)
{
AutomaticMutex guard(m_synchro);
m_pInfo->m_uiFramesReady = m_uiCommitedFrames - (m_bIsOutputBufferLocked ? 1 : 0);
return m_pInfo;
}
void FrameConstructor::AssignAbsPos(Ipp32s iPos)
{
if (m_PrevSample.IsHit(iPos))
m_CurFrame.uiAbsPos = m_PrevSample.uiAbsPos;
else if (m_LastSample.IsHit(iPos))
m_CurFrame.uiAbsPos = m_LastSample.uiAbsPos;
}
void FrameConstructor::AssignTimeStamps(Ipp32s iPos)
{
if (m_PrevSample.IsHit(iPos) && !m_PrevSample.GetFlag(FCSample::STAMPS_APPLIED))
{
m_CurFrame.dPTS = m_PrevSample.dPTS;
m_CurFrame.dDTS = m_PrevSample.dDTS;
m_PrevSample.SetFlag(FCSample::STAMPS_APPLIED, true);
}
else if (m_LastSample.IsHit(iPos) && !m_LastSample.GetFlag(FCSample::STAMPS_APPLIED))
{
m_CurFrame.dPTS = m_LastSample.dPTS;
m_CurFrame.dDTS = m_LastSample.dDTS;
m_LastSample.SetFlag(FCSample::STAMPS_APPLIED, true);
}
else
{
m_CurFrame.dPTS = -1.0;
m_CurFrame.dDTS = -1.0;
}
}
VideoFrameConstructor::VideoFrameConstructor()
{
m_bSeqSCFound = false;
m_bPicSCFound = false;
m_bFrameBegFound = false;
m_bIsFinalizeSequenceSent = false;
m_iFirstInDecOrderIdx = 0;
}
Status VideoFrameConstructor::Init(MediaReceiverParams *pInit)
{
Status umcRes = FrameConstructor::Init(pInit);
m_bSeqSCFound = false;
m_bPicSCFound = false;
m_bFrameBegFound = false;
m_bIsFinalizeSequenceSent = false;
m_iFirstInDecOrderIdx = 0;
return umcRes;
}
Status VideoFrameConstructor::GetSampleFromQueue(FCSample *pSample)
{
if (m_dRate >= 0.0)
{ // case of forward playback
return FrameConstructor::GetSampleFromQueue(pSample);
}
if (0 == (Ipp32s)m_uiCommitedFrames)
return UMC_ERR_NOT_ENOUGH_DATA;
if (m_iFirstInDecOrderIdx >= 0)
return m_OutputQueue.Get(pSample[0], m_iFirstInDecOrderIdx);
Ipp32s iIdx;
for (iIdx = 0; iIdx < (Ipp32s)m_uiCommitedFrames; iIdx++)
{
m_OutputQueue.Get(pSample[0], iIdx);
if (I_PICTURE == pSample[0].GetFrameType())
break;
}
// unable to find I_PICTURE
if (iIdx >= (Ipp32s)m_uiCommitedFrames)
return m_bEndOfStream ? UMC_ERR_END_OF_STREAM : UMC_ERR_NOT_ENOUGH_DATA;
m_iFirstInDecOrderIdx = iIdx;
return UMC_OK;
}
Status VideoFrameConstructor::Reset()
{
AutomaticMutex guard(m_synchro);
Status umcRes = FrameConstructor::Reset();
m_bSeqSCFound = false;
m_bPicSCFound = false;
m_bFrameBegFound = false;
m_bIsFinalizeSequenceSent = false;
m_iFirstInDecOrderIdx = m_dRate < 0.0 ? -1 : 0;
return umcRes;
}
Status VideoFrameConstructor::SoftReset()
{
AutomaticMutex guard(m_synchro);
FrameConstructor::SoftReset();
m_bPicSCFound = false;
m_bFrameBegFound = false;
return UMC_OK;
}
void VideoFrameConstructor::SetRate(Ipp64f dRate)
{
m_dRate = dRate;
m_iFirstInDecOrderIdx = m_dRate < 0.0 ? -1 : 0;
}
bool VideoFrameConstructor::IsFrameStartFound(void)
{
if (m_bFrameBegFound && m_bPicSCFound)
{
if (I_PICTURE == m_CurFrame.GetFrameType())
return true;
if (P_PICTURE == m_CurFrame.GetFrameType() && m_dRate < 4.0 && m_dRate > -4.0)
return true;
if (B_PICTURE == m_CurFrame.GetFrameType() && m_dRate < 3.0 && m_dRate > -3.0)
return true;
}
return false;
}
bool VideoFrameConstructor::IsSampleComplyWithTmPolicy(FCSample &sample, Ipp64f dRate)
{
return ((I_PICTURE == sample.GetFrameType()) || (D_PICTURE == sample.GetFrameType()) ||
(P_PICTURE == sample.GetFrameType() && dRate < 4.0 && dRate > -4.0) ||
(B_PICTURE == sample.GetFrameType() && dRate < 3.0 && dRate > -3.0));
}
Status Mpeg2FrameConstructor::GetFrame(SplMediaData *frame)
{
Status umcRes;
Ipp8u *buf = m_pBuf;
bool bFound = false;
while (!bFound)
{
// Find and parse sequence header
if (!m_bSeqSCFound)
{
while (m_lCurPos < m_lLastBytePos - 3 && !IS_CODE(&m_pBuf[m_lCurPos], 0xb3))
m_lCurPos++;
if (m_lCurPos >= m_lLastBytePos - 3)
return m_bEndOfStream ? UMC_ERR_END_OF_STREAM : UMC_ERR_SYNC;
umcRes = ParseSequenceHeader(&m_pBuf[m_lCurPos], m_lLastBytePos - m_lCurPos, m_bInfoFilled ? NULL : m_pInfo);
if (UMC_OK != umcRes)
return m_bEndOfStream ? UMC_ERR_END_OF_STREAM : UMC_ERR_NOT_ENOUGH_DATA;
m_CurFrame.iBufOffset = m_lCurPos;
m_lCurPos += 4;
m_bSeqSCFound = true;
m_bFrameBegFound = true;
AssignAbsPos(m_CurFrame.iBufOffset);
}
// Find begin of frame
if (!m_bFrameBegFound)
{
Ipp32s savePos = m_lCurPos;
while (m_lCurPos < m_lLastBytePos - 3 && !IS_CODE_3(&m_pBuf[m_lCurPos], 0xb3, 0xb8, 0x00))
m_lCurPos++;
if (m_lCurPos >= m_lLastBytePos - 3)
{
if (m_bEndOfStream)
return UMC_ERR_END_OF_STREAM;
CutInterval(m_PrevSample, m_LastSample, m_pBuf, savePos, m_lCurPos, m_lLastBytePos);
m_lLastBytePos -= m_lCurPos - savePos;
m_lCurPos -= m_lCurPos - savePos;
return UMC_ERR_NOT_ENOUGH_DATA;
}
m_CurFrame.iBufOffset = m_lCurPos;
m_bFrameBegFound = true;
AssignAbsPos(m_CurFrame.iBufOffset);
}
// Find picture start code
if (!m_bPicSCFound)
{
while (m_lCurPos < m_lLastBytePos - 5 && !IS_CODE(&m_pBuf[m_lCurPos], 0x00))
m_lCurPos++;
if (m_lCurPos >= m_lLastBytePos - 5)
return m_bEndOfStream ? UMC_ERR_END_OF_STREAM : UMC_ERR_NOT_ENOUGH_DATA;
VideoStreamInfo *pVSI = (VideoStreamInfo *)m_pInfo->m_pStreamInfo;
if (!m_bInfoFilled && MPEG2_VIDEO == pVSI->stream_type && PROGRESSIVE != pVSI->interlace_type)
{
umcRes = ParsePictureHeader(&m_pBuf[m_lCurPos], m_lLastBytePos - m_lCurPos, m_bInfoFilled ? NULL : m_pInfo);
if (UMC_OK != umcRes)
return umcRes;
m_bInfoFilled = true;
}
// set time stamps
AssignTimeStamps(m_lCurPos);
// set frame type
m_CurFrame.SetFrameType((buf[m_lCurPos + 5] >> 3) & 0x07);
m_lCurPos += 6;
m_bPicSCFound = true;
}
// Find begin of the next frame
if (m_bFrameBegFound && m_bPicSCFound)
{
while (m_lCurPos < m_lLastBytePos - 3 && !IS_CODE_4(&m_pBuf[m_lCurPos], 0xb3, 0xb7, 0xb8, 0x00))
m_lCurPos++;
if (m_lCurPos >= m_lLastBytePos - 3)
{
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -