📄 umc_mp4_video_parser.cpp
字号:
//f }
//f }
VOP->ref_select_code = mp4_GetBits9(pInfo, 2);
}
}
return MP4_STATUS_OK;
}
Status MP4Splitter::ParseESDSHeader(T_trak_data *pTrak, Ipp32u nTrack)
{
Ipp32u j(0), k(0);
mp4_Info vInfo;
VideoStreamInfo *pVideoInfo = (VideoStreamInfo *)(m_pInfo->m_ppTrackInfo[nTrack]->m_pStreamInfo);
T_esds_data sEsds = pTrak->mdia.minf.stbl.stsd.table->esds;
vInfo.VisualObject.verid = 1;
// find start code VideoObjectLayer and parse width and height
for (j = 0; j < sEsds.decoderConfigLen; j++ ) {
if (0x00 == sEsds.decoderConfig[j + 0] &&
0x00 == sEsds.decoderConfig[j + 1] &&
0x01 == sEsds.decoderConfig[j + 2]) {
if (MP4_VISUAL_OBJECT_SC == sEsds.decoderConfig[j + 3]) {
vInfo.bufptr= sEsds.decoderConfig + j + 4;
vInfo.bitoff = 0;
if (MP4_STATUS_NOTSUPPORT == mp4_Parse_VisualObject(&vInfo))
return UMC_ERR_FAILED; // unsupported Video Object
j += 5;
continue;
}
if (MP4_VIDEO_OBJECT_LAYER_MIN_SC <= sEsds.decoderConfig[j + 3] &&
MP4_VIDEO_OBJECT_LAYER_MAX_SC >= sEsds.decoderConfig[j + 3]) {
vInfo.bufptr = sEsds.decoderConfig + j + 4;
vInfo.bitoff = 0;
if (MP4_STATUS_NOTSUPPORT == mp4_Parse_VideoObject(&vInfo))
return UMC_ERR_FAILED; // unsupported Video Object
////////////////////////////////////////////////////////////////////////////////////////
// set aspect ratio info
switch (vInfo.VisualObject.VideoObject.aspect_ratio_info) {
case MP4_ASPECT_RATIO_FORBIDDEN:
case MP4_ASPECT_RATIO_1_1:
pVideoInfo->aspect_ratio_width = 1;
pVideoInfo->aspect_ratio_height = 1;
break;
case MP4_ASPECT_RATIO_12_11:
pVideoInfo->aspect_ratio_width = 12;
pVideoInfo->aspect_ratio_height = 11;
break;
case MP4_ASPECT_RATIO_10_11:
pVideoInfo->aspect_ratio_width = 10;
pVideoInfo->aspect_ratio_height = 11;
break;
case MP4_ASPECT_RATIO_16_11:
pVideoInfo->aspect_ratio_width = 16;
pVideoInfo->aspect_ratio_height = 11;
break;
case MP4_ASPECT_RATIO_40_33:
pVideoInfo->aspect_ratio_width = 40;
pVideoInfo->aspect_ratio_height = 33;
break;
default:
pVideoInfo->aspect_ratio_width = vInfo.VisualObject.VideoObject.aspect_ratio_info_par_width;
pVideoInfo->aspect_ratio_height = vInfo.VisualObject.VideoObject.aspect_ratio_info_par_height;
}
pVideoInfo->clip_info.width = vInfo.VisualObject.VideoObject.width;
pVideoInfo->clip_info.height = vInfo.VisualObject.VideoObject.height;
pVideoInfo->interlace_type = (InterlaceType)vInfo.VisualObject.VideoObject.interlaced;
if (pVideoInfo->interlace_type != PROGRESSIVE) {
Status umcRes;
TrackIndex *pIndex = &m_pTrackIndex[nTrack];
IndexEntry m_Frame;
Ipp8u *p_frame;
// get first frame (index is already set to First frame)
umcRes = pIndex->Get(m_Frame);
UMC_CHECK_STATUS(umcRes)
p_frame = (Ipp8u *)ippsMalloc_8u(m_Frame.uiSize);
UMC_CHECK_PTR(p_frame)
m_ReaderMutex.Lock();
m_pReader->SetPosition((Ipp64u)m_Frame.stPosition);
m_pReader->GetData(p_frame, &m_Frame.uiSize);
m_ReaderMutex.Unlock();
vInfo.bitoff = 0;
vInfo.bufptr = p_frame;
for (k = 0; k < m_Frame.uiSize; k++) {
if (0x00 == vInfo.bufptr[k + 0] &&
0x00 == vInfo.bufptr[k + 1] &&
0x01 == vInfo.bufptr[k + 2] &&
0xb6 == vInfo.bufptr[k + 3]) {
vInfo.bufptr += (k + 4);
mp4_Status mp4Status = mp4_Parse_VideoObjectPlane(&vInfo);
if (MP4_STATUS_OK == mp4Status) {
if (pVideoInfo->interlace_type) {
if (1 == vInfo.VisualObject.VideoObject.VideoObjectPlane.top_field_first) {
pVideoInfo->interlace_type = INTERLEAVED_TOP_FIELD_FIRST;
} else {
pVideoInfo->interlace_type = INTERLEAVED_BOTTOM_FIELD_FIRST;
}//if
break;
}
}
}//if
}
ippsFree(p_frame);
}//if
return UMC_OK;
}//if
}//if
}// for
// headers was not found
return UMC_ERR_FAILED;
}//Status ParseESDSHeader(T_esds_data &sEsds, VideoStreamInfo* pVideoInfo)
Status MP4Splitter::ParseAVCCHeader(T_trak_data *pTrak,Ipp32u nTrack)
{
BitStreamReader stream;
Ipp32u profile_idc;
Ipp32u pic_order_cnt_type;
bool bTopFirst = true;
VideoStreamInfo *pVideoInfo = (VideoStreamInfo *)(m_pInfo->m_ppTrackInfo[nTrack]->m_pStreamInfo);
T_avcC_data sAvcc = pTrak->mdia.minf.stbl.stsd.table->avcC;
// check error(s)
if (NULL == sAvcc.decoderConfig)
return UMC_OK;
// initialize stream reader
stream.Init(sAvcc.decoderConfig);
// profile idc
profile_idc = stream.GetBits(8);
// constraint set flags
stream.GetBits(4);
// reserved 4 bits
stream.GetBits(4);
// level idc
stream.GetBits(8);
// seq parameter set id
stream.GetUE();
if ((100 == profile_idc) ||
(110 == profile_idc) ||
(122 == profile_idc) ||
(144 == profile_idc)) {
Ipp32u chroma_format_idc;
Ipp32u seq_scaling_matrix_present_flag;
// chroma_format_idc
chroma_format_idc = stream.GetUE();
if (3 == chroma_format_idc) {
// residual_colour_transform_flag
stream.GetBit();
}
// bit_depth_luma_minus8
stream.GetUE();
// bit_depth_chroma_minus8
stream.GetUE();
// qpprime_y_zero_transform_bypass_flag
stream.GetBit();
// seq_scaling_matrix_present_flag
seq_scaling_matrix_present_flag = stream.GetBit();
if (seq_scaling_matrix_present_flag) {
Ipp32s i;
for (i = 0; i < 8; i += 1) {
Ipp32u seq_scaling_list_present_flag;
// seq_scaling_list_present_flag
seq_scaling_list_present_flag = stream.GetBit();
if (seq_scaling_list_present_flag) {
Ipp32s iScalingListSize;
Ipp32s lastScale, nextScale;
Ipp32s j;
if (6 > i)
iScalingListSize = 16;
else
iScalingListSize = 64;
lastScale = 8;
nextScale = 8;
for (j = 0; j < iScalingListSize; j += 1) {
if (nextScale) {
Ipp32s delta_scale;
delta_scale = stream.GetSE();
nextScale = (lastScale + delta_scale + 256) % 256;
lastScale = nextScale;
}
}
}
}
}
}
// log2 max frame num minus4
stream.GetUE();
// pic order cnt type
pic_order_cnt_type = stream.GetUE();
if (0 == pic_order_cnt_type) {
// log2 max pic order cnt lsb minus4
stream.GetUE();
} else {
Ipp32s num_ref_frames_in_pic_order_cnt_cycle;
Ipp32s i;
// delta_pic_order_always_zero_flag
stream.GetBit();
// offset_for_non_ref_pic
stream.GetSE();
// offset_for_top_to_bottom_field
bTopFirst = stream.GetSE() >= 0;
// num_ref_frames_in_pic_order_cnt_cycle
num_ref_frames_in_pic_order_cnt_cycle = stream.GetUE();
for (i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i += 1)
stream.GetSE();
}
// num_ref_frames
stream.GetUE();
// gaps_in_frame_num_value_allowed_flag
stream.GetBit();
// pic_width_in_mbs_minus1
stream.GetUE();
// pic_height_in_map_units_minus1
stream.GetUE();
// frame_mbs_only_flag
if (stream.GetBit()) {
pVideoInfo->interlace_type = PROGRESSIVE;
} else {
if (bTopFirst) {
pVideoInfo->interlace_type = INTERLEAVED_TOP_FIELD_FIRST;
} else {
pVideoInfo->interlace_type = INTERLEAVED_BOTTOM_FIELD_FIRST;
}
}
return UMC_OK;
}//Status ParseAVCCHeader(T_avcC_data &sAVCc, VideoStreamInfo* pVideoInfo)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -