📄 umc_mpeg2_dec_pic.cpp
字号:
switch(frame_rate_code)
{
case 0:
sequenceHeader.delta_frame_time = 1000./30000.; break;
case 1:
sequenceHeader.delta_frame_time = 1001./24000.; break;
case 2:
sequenceHeader.delta_frame_time = 1000./24000.; break;
case 3:
sequenceHeader.delta_frame_time = 1000./25000.; break;
case 4:
sequenceHeader.delta_frame_time = 1001./30000.; break;
case 5:
sequenceHeader.delta_frame_time = 1000./30000.; break;
case 6:
sequenceHeader.delta_frame_time = 1000./50000.; break;
case 7:
sequenceHeader.delta_frame_time = 1001./60000.; break;
case 8:
sequenceHeader.delta_frame_time = 1000./60000.; break;
default:
sequenceHeader.delta_frame_time = 1000./30000.;
//VM_ASSERT(0);
break;
}
m_ClipInfo.framerate = 1.0 / sequenceHeader.delta_frame_time;
if(m_ClipInfo.stream_type == MPEG1_VIDEO) {
Ipp32s ar_m1_tab[][2] = { // 11172-2 2.4.3.2
{ 1, 1}, // 0 forbidden
{ 1, 1}, // 1 1.0000 VGA etc.
{33,49}, // 2 0.6735
{45,64}, // 3 0.7031 16:9, 625line
{16,21}, // 4 0.7615
{29,36}, // 5 0.8055
{27,32}, // 6 0.8437 16:9, 525line
{42,47}, // 7 0.8935
{15,16}, // 8 0.9375 CCIR601, 625line
{53,54}, // 9 0.9815
{40,39}, // 10 1.0255
{77,72}, // 11 1.0695
{ 9, 8}, // 12 1.1250 CCIR601, 525line
{22,19}, // 13 1.1575
{161,134}, // 14 1.2015
{ 1, 1} // 15 reserved
};
m_ClipInfo.aspect_ratio_width = ar_m1_tab[dar_code][0];
m_ClipInfo.aspect_ratio_height = ar_m1_tab[dar_code][1];
return (UMC_OK);
}
if(dar_code == 2)
{
ret = DARtoPAR(m_ClipInfo.clip_info.width, m_ClipInfo.clip_info.height, 4, 3,
&m_ClipInfo.aspect_ratio_width, &m_ClipInfo.aspect_ratio_height);
}
else if(dar_code == 3)
{
ret = DARtoPAR(m_ClipInfo.clip_info.width, m_ClipInfo.clip_info.height, 16, 9,
&m_ClipInfo.aspect_ratio_width, &m_ClipInfo.aspect_ratio_height);
}
else if(dar_code == 4)
{
ret = DARtoPAR(m_ClipInfo.clip_info.width, m_ClipInfo.clip_info.height, 221, 100,
&m_ClipInfo.aspect_ratio_width, &m_ClipInfo.aspect_ratio_height);
}
else // dar_code == 1 or unknown
{
ret = UMC_OK;
m_ClipInfo.aspect_ratio_width = 1;
m_ClipInfo.aspect_ratio_height = 1;
}
sequenceHeader.delta_frame_time *= (Ipp64f)(sequenceHeader.frame_rate_extension_d + 1) /
(sequenceHeader.frame_rate_extension_n + 1);
m_ClipInfo.framerate = 1.0 / sequenceHeader.delta_frame_time;
}
else
sequenceHeader.delta_frame_time = 1.0 / m_ClipInfo.framerate;
return (UMC_OK);
}
void MPEG2VideoDecoderBase::sequence_display_extension()
{
Ipp32u code;
IppVideoContext* video = Video[0];
GET_TO9BITS(video->bs, 3, code)//video_format
GET_1BIT(video->bs, code)//colour_description
if(code)
{
GET_BITS_LONG(video->bs, 24, code)
}
GET_BITS_LONG(video->bs, 29, code)
}
void MPEG2VideoDecoderBase::sequence_scalable_extension()
{
Ipp32u code;
IppVideoContext* video = Video[0];
sequenceHeader.extension_start_code_ID = SEQUENCE_SCALABLE_EXTENSION_ID;
GET_TO9BITS(video->bs, 2, sequenceHeader.scalable_mode)//scalable_mode
GET_TO9BITS(video->bs, 4, code);//layer_id
if(sequenceHeader.scalable_mode == SPARTIAL_SCALABILITY)
{
SKIP_BITS_32(video->bs)
SKIP_BITS_LONG(video->bs, 17)
}
else if(sequenceHeader.scalable_mode == TEMPORAL_SCALABILITY)
{
GET_1BIT(video->bs,code)//picture mux enable
if(code)
GET_1BIT(video->bs,code)//mux to progressive sequence
SKIP_BITS(video->bs, 6)
}
}
// compute and store PT for input, return PT of the frame to be out
// called after field_buffer_index switching
// when FLAG_VDEC_REORDER isn't set, time can be wrong
// if repeat_first_field happens != 0
void MPEG2VideoDecoderBase::CalculateFrameTime(Ipp64f in_time, Ipp64f * out_time)
{
Ipp32s index;
Ipp64f duration = sequenceHeader.delta_frame_time;
if(PictureHeader.repeat_first_field) {
if(sequenceHeader.progressive_sequence != 0) { // 2 or 3 frames duration
duration += sequenceHeader.delta_frame_time;
if(PictureHeader.top_field_first)
duration += sequenceHeader.delta_frame_time;
} else { // 3 fields duration
duration *= 1.5;
}
}
frame_buffer.frame_p_c_n [frame_buffer.curr_index].duration = duration;
if(PictureHeader.picture_structure == FRAME_PICTURE ||
frame_buffer.field_buffer_index == 0)
{ // Save time provided for the frame, ignore for second field
frame_buffer.frame_p_c_n [frame_buffer.curr_index].frame_time =
in_time;
// Compute current time, ignoring second field, don't recalc if absent
if(in_time > 0) {
if(PictureHeader.picture_coding_type == B_PICTURE)
sequenceHeader.stream_time = in_time;
else if ( !(m_lFlags & FLAG_VDEC_REORDER) ||
m_decodedFrameNum == 0){ // can happen in the beginning
// can become wrong
sequenceHeader.stream_time = in_time - sequenceHeader.delta_frame_time *
(PictureHeader.temporal_reference - sequenceHeader.stream_time_temporal_reference);
}
}
if(frame_buffer.retrieve >= 0 && //stream time is of output other IP
PictureHeader.picture_coding_type != B_PICTURE &&
frame_buffer.frame_p_c_n [1-frame_buffer.curr_index].frame_time >= 0)
sequenceHeader.stream_time =
frame_buffer.frame_p_c_n [1-frame_buffer.curr_index].frame_time;
}
// Compute time to be out
index = frame_buffer.retrieve;
if(index>=0 && frame_buffer.frame_p_c_n [index].frame_time < 0) {
// Frame to be out hasn't proper time
if(PictureHeader.picture_coding_type == B_PICTURE ||
m_lFlags & FLAG_VDEC_REORDER) { // use current time
*out_time = sequenceHeader.stream_time;
} else { // compute next ref_frame, really curr_time + IPdistance. But repeat field!
*out_time = sequenceHeader.stream_time + sequenceHeader.delta_frame_time *
(PictureHeader.temporal_reference - sequenceHeader.stream_time_temporal_reference);
}
} else if(index>=0) {
*out_time = frame_buffer.frame_p_c_n [index].frame_time;
} else {
*out_time = -1;
}
// Update current time after second field
if(PictureHeader.picture_structure == FRAME_PICTURE ||
frame_buffer.field_buffer_index == 1)
if(PictureHeader.picture_coding_type == B_PICTURE)
sequenceHeader.stream_time +=
frame_buffer.frame_p_c_n [frame_buffer.curr_index].duration
* (m_dPlaybackRate > 0 ? 1 : -1);
else if(index>=0) // previous reference frame is returned
sequenceHeader.stream_time +=
frame_buffer.frame_p_c_n [1-frame_buffer.curr_index].duration
* (m_dPlaybackRate > 0 ? 1 : -1);
}
Status MPEG2VideoDecoderBase::DecodeSlices(Ipp32s threadID)
{
IppVideoContext *video = Video[threadID];
Status umcRes;
for (;;) {
umcRes = DecodeSliceHeader(video);
if(umcRes != UMC_OK) break;
umcRes = DecodeSlice(video);
}
return (UMC_OK); //umcRes;
}
Status MPEG2VideoDecoderBase::DecodePicture()
{
IppVideoContext* video = Video[0];
Status umcRes;
Ipp32s i;
LockBuffers(); // lock internal buffers
#ifdef KEEP_HISTORY
Ipp8u *frame_history = frame_buffer.frame_p_c_n[frame_buffer.ind_his_curr].frame_history;
if (frame_history) {
memset(frame_history, 0, sequenceHeader.numMB);
}
#endif
Video[0]->slice_vertical_position = 1;
Ipp32s saveNumberOfThreads = m_nNumberOfThreads;
if (m_nNumberOfThreads > 1) {
#define MAX_START_CODES 1024
Ipp8u *start_ptr = GET_BYTE_PTR(video->bs);
Ipp8u *end_ptr = GET_END_PTR(video->bs)-3;
Ipp8u *ptr = start_ptr, *ptrz = start_ptr;
Ipp8u *prev_ptr;
Ipp32s curr_thread;
Ipp32s len = end_ptr - start_ptr;
Ipp32s j, start_count = 0;
Ipp32s start_pos[MAX_START_CODES];
for(start_count = 0; start_count < MAX_START_CODES; start_count++) {
Ipp32s code;
do {
while(ptr<end_ptr && (ptr[0] || ptr[1] || ptr[2] > 1)) ptr++;
ptrz = ptr;
while(ptr<end_ptr && !ptr[2]) ptr++;
} while(ptr<end_ptr && ptr[2] != 1);
if(ptr>=end_ptr) {
ptr = GET_END_PTR(video->bs);
break;
}
code = ptr[3];
if(code > 0 && code<0xb0) { // start of slice
start_pos[start_count] = ptrz - start_ptr;
ptr+=4;
} else {
break;
}
}
if (start_count == MAX_START_CODES) {
while(ptr<end_ptr && (ptr[0] || ptr[1] || ptr[2] != 1 || (ptr[3] > 0 && ptr[3] < 0xb0))) ptr++;
ptrz = ptr;
}
len = (ptrz - start_ptr);
prev_ptr = start_ptr;
curr_thread = 1; // 0th will be last
for(i=0, j=0; i<m_nNumberOfThreads; i++) {
Ipp32s approx = len * (i+1) / m_nNumberOfThreads;
if(start_pos[j] > approx) {
m_nNumberOfThreads --; // no data for thread - covered by previous
continue;
}
while(j<start_count && start_pos[j] < approx) j++;
if(j==start_count) { // it will be last thread -> to 0th
SET_PTR(Video[0]->bs, prev_ptr)
m_nNumberOfThreads = curr_thread;
break;
}
INIT_BITSTREAM(Video[curr_thread]->bs, prev_ptr, start_ptr + start_pos[j]);
curr_thread ++;
prev_ptr = start_ptr + start_pos[j];
}
}
for (i = 1; i < m_nNumberOfThreads; i += 1)
{
vm_event_signal(m_lpStartEvent + i);
}
umcRes = DecodeSlices(0);
// wait additional thread(s)
for (i = 1;i < m_nNumberOfThreads;i += 1)
vm_event_wait(m_lpStopEvent + i);
video = Video[0];
m_nNumberOfThreads = saveNumberOfThreads; // restore, could have been decreased
if(PictureHeader.picture_structure != IPPVC_FRAME_PICTURE)
{
frame_buffer.field_buffer_index ^= 1;
}
sequenceHeader.frame_count++;
UnlockBuffers(); // lock internal buffers
return (UMC_OK);
}
Status MPEG2VideoDecoderBase::DecodePictureHeader()
{
Ipp32u code;
Ipp32s pic_type = 0;
IppVideoContext *video = Video[0];
sPictureHeader *pPic = &PictureHeader;
FrameType picture_coding_type_save = PictureHeader.picture_coding_type;
if (GET_REMAINED_BYTES(video->bs) < 4) {
// return header back
UNGET_BITS_32(video->bs)
return (UMC_ERR_NOT_ENOUGH_DATA);
}
memset(&PictureHeader, 0, sizeof(PictureHeader));
GET_BITS(video->bs, 10, PictureHeader.temporal_reference)
GET_TO9BITS(video->bs, 3, pic_type)
GET_BITS(video->bs, 16 ,code)
PictureHeader.picture_coding_type = (UMC::FrameType)pic_type;
VM_ASSERT(PictureHeader.picture_coding_type > 0 && PictureHeader.picture_coding_type < 5);
if(PictureHeader.picture_coding_type == D_PICTURE) {
sequenceHeader.first_i_occure = 1; // no refs in this case
//PictureHeader.temporal_reference = sequenceHeader.stream_time_temporal_reference + 1;
} else if(frame_buffer.field_buffer_index != 0) { // second field must be the same, except IP
if(picture_coding_type_save != PictureHeader.picture_coding_type &&
picture_coding_type_save != I_PICTURE && PictureHeader.picture_coding_type != P_PICTURE)
{
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -