📄 encoder_mpeg2.cpp
字号:
if (tmpFrame) { delete tmpFrame; tmpFrame = NULL; } if (frames_buff) { delete frames_buff; frames_buff = NULL; } return ippStsOk;}void ippMPEG2VideoEncoder::ThreadWorkingRoutine(threadInfo* th){ for (;;) { if (!th || th->m_lpOwner != this) warning("ThreadWorkingRoutine bad 'this'"); // wait for start if (VM_OK != vm_event_wait(&th->start_event)) { warning("ThreadWorkingRoutine result wait start_event"); } if (VM_TIMEOUT != vm_event_timed_wait(&th->quit_event, 0)) { break; } switch (th->m_lpOwner->picture_coding_type) { case I_TYPE: th->m_lpOwner->encodeI(th->numTh); break; case P_TYPE: th->m_lpOwner->encodeP(th->numTh); break; case B_TYPE: th->m_lpOwner->encodeB(th->numTh); break; } if (VM_OK != vm_event_signal(&th->stop_event)) { warning("ThreadWorkingRoutine result signal stop_event"); } }}static void ExpandSrcImage(Ipp8u *frame1, int w1, int h1, int s1, Ipp8u *frame2, int w2, int h2, int s2){ IppiSize img_size = {w1, h1}; IppiSize img_size2 = {w2, h2}; ippiCopyReplicateBorder_8u_C1R(frame1, s1, img_size, frame2, s2, img_size2, 0, 0);}// Get source frame size (summary size of Y, U and V frames)Ipp32s ippMPEG2VideoEncoder::GetYUVFrameSize(){ return srcYUVFrameSize;}// Get pointer to internal encoder memory, where// next YUV frame can be stored before passing// this frame to encode functionIpp8u* ippMPEG2VideoEncoder::GetNextYUVPointer(){ Ipp32s i; if (frames_buff == NULL) { frames_buff = new Ipp8u[buff_size*srcYUVFrameSize]; } i = buff_ind + num_new + 1; if (i >= buff_size) i -= buff_size; return (frames_buff + i*srcYUVFrameSize);}// Encode frames (in source order)// Three possibilities:// 1. YUV != NULL, YUV_pitch != NULL:// YUV must contain three pointers: Y, U and V frames// YUV_pitch must contain three pitches (horizontal steps) of Y, U, V frames// 2. YUV != NULL, YUV_pitch == NULL:// only YUV[0] is used as a pointer to contiguous Y, U and V frames,// pitches are equal to the corresponding widths.// 3. YUV == NULL// no source frame// On first frames the function can return zero encoded_size. This is because// of buffering of B frames until the reference P frame comes. After the end of// source stream function EncodeFrame() must be called (IPDistance - 1) times// with YUV == NULL in order to encode the buffered frames.IppStatus ippMPEG2VideoEncoder::EncodeFrame(Ipp8u **YUV, Ipp32s *YUV_pitch, Ipp8u *out_buffer, Ipp32s out_buffer_size, Ipp32s *encoded_size){ Ipp8u *p_src, *p_buff; int i, num; Ipp32s pict_type; mEncodedSize = 0; if (encoded_size) *encoded_size = 0; if (m_bFirstFrame || frames_buff == NULL) { if (frames_buff == NULL) { frames_buff = new Ipp8u[buff_size*srcYUVFrameSize]; } buff_ind = 0; num_btype = 0; num_new = 0; curr_gop = 0; return EncodeFrameReordered(YUV, YUV_pitch, I_TYPE, out_buffer, out_buffer_size, encoded_size); } p_src = (YUV != NULL) ? YUV[0] : NULL; if (p_src) { // store incoming frame num_new++; i = buff_ind + num_new; if (i >= buff_size) i -= buff_size; p_buff = frames_buff + i*srcYUVFrameSize; if (p_src != p_buff) { if (YUV_pitch) { IppiSize roi_l = {srcYFrameHSize, srcYFrameVSize}; IppiSize roi_c = {srcUVFrameHSize, srcUVFrameVSize}; ippiCopy_8u_C1R(YUV[0], YUV_pitch[0], p_buff, srcYFrameHSize, roi_l); p_buff += srcYFrameSize; ippiCopy_8u_C1R(YUV[1], YUV_pitch[1], p_buff, srcUVFrameHSize, roi_c); p_buff += srcUVFrameSize; ippiCopy_8u_C1R(YUV[2], YUV_pitch[2], p_buff, srcUVFrameHSize, roi_c); } else { ippsCopy_8u(p_src, p_buff, srcYUVFrameSize); } } } if (num_btype > 0) { // B-frames in query i = buff_ind - num_btype; if (i < 0) i += buff_size; p_buff = frames_buff + i*srcYUVFrameSize; num_btype--; curr_gop--; return EncodeFrameReordered(&p_buff, NULL, B_TYPE, out_buffer, out_buffer_size, encoded_size); } // no more B-frames in query if (curr_gop <= 0) { curr_gop = encodeInfo.gopSize; } num = curr_gop; if (num > encodeInfo.IPDistance) num = encodeInfo.IPDistance; if (num_new < num) { if (p_src != NULL) { return ippStsOk; // wait for necessary number of frames } if (num_new <= 0) { // no incoming & no in buffer return ippStsErr; } num = num_new; } if (num == curr_gop) { pict_type = I_TYPE; } else { pict_type = P_TYPE; } num_btype = num - 1; buff_ind += num; num_new -= num; if (buff_ind >= buff_size) buff_ind -= buff_size; p_buff = frames_buff + buff_ind*srcYUVFrameSize; curr_gop--; return EncodeFrameReordered(&p_buff, NULL, pict_type, out_buffer, out_buffer_size, encoded_size);}// Encode reordered frames (B frames following the corresponding I/P frames).// pict_type must be supplied (I_TYPE, P_TYPE, B_TYPE).// No buffering because the frames are already reordered.// Rules for YUV and YUV_pitch are the same as in EncodeFrame() function.IppStatus ippMPEG2VideoEncoder::EncodeFrameReordered(Ipp8u **YUV, Ipp32s *YUV_pitch, Ipp32s pict_type, Ipp8u *out_buffer, Ipp32s out_buffer_size, Ipp32s *encoded_size){ int DisplayFrameNumber; vm_tick t_start, t_end; this->out_pointer = out_buffer; this->out_buffer_size = out_buffer_size; if (YUV == NULL) { return ippStsNullPtrErr; } if (YUV_pitch == NULL) { Y_src = (Ipp8u*)YUV[0]; U_src = Y_src + srcYFrameSize; V_src = U_src + srcUVFrameSize; Y_pitch = srcYFrameHSize; U_pitch = srcUVFrameHSize; V_pitch = srcUVFrameHSize; } else { Y_src = (Ipp8u*)YUV[0]; if (YUV[1] == NULL && YUV[2] == NULL) { U_src = Y_src + srcYFrameSize; V_src = U_src + srcUVFrameSize; } else { U_src = (Ipp8u*)YUV[1]; V_src = (Ipp8u*)YUV[2]; } Y_pitch = YUV_pitch[0]; U_pitch = YUV_pitch[1]; V_pitch = YUV_pitch[2]; } if (Y_src == NULL || U_src == NULL || V_src == NULL) { return ippStsNullPtrErr; } if ((srcYFrameHSize != YFrameHSize) || (srcYFrameVSize != YFrameVSize)) { // expand borders Ipp8u *Y_tmp, *U_tmp, *V_tmp; if (tmpFrame == NULL) { tmpFrame = new Ipp8u[YFrameSize + 2*UVFrameSize]; } Y_tmp = tmpFrame; U_tmp = Y_tmp + YFrameSize; V_tmp = U_tmp + UVFrameSize; // Y ExpandSrcImage(Y_src, srcYFrameHSize, srcYFrameVSize, Y_pitch, Y_tmp, YFrameHSize, YFrameVSize, YFrameHSize); Y_src = Y_tmp; Y_pitch = YFrameHSize; // U ExpandSrcImage(U_src, srcUVFrameHSize, srcUVFrameVSize, U_pitch, U_tmp, UVFrameHSize, UVFrameVSize, UVFrameHSize); U_src = U_tmp; U_pitch = UVFrameHSize; // V ExpandSrcImage(V_src, srcUVFrameHSize, srcUVFrameVSize, V_pitch, V_tmp, UVFrameHSize, UVFrameVSize, UVFrameHSize); V_src = V_tmp; V_pitch = UVFrameHSize; } Ipp32s w_shift = (encodeInfo.FieldPicture) ? 1 : 0; Ipp32s nYPitch = Y_pitch << w_shift; Ipp32s nUPitch = U_pitch << w_shift; Ipp32s nVPitch = V_pitch << w_shift; nPitch[0] = nYPitch; nPitch[1] = nUPitch; nPitch[2] = nVPitch; block_offset_frm[2] = 8*nYPitch; block_offset_frm[3] = 8*nYPitch + 8; block_offset_frm[6] = 8*nUPitch; block_offset_frm[7] = 8*nVPitch; block_offset_frm[10] = 8*nUPitch + 8; block_offset_frm[11] = 8*nVPitch + 8; block_offset_fld[2] = nYPitch; block_offset_fld[3] = nYPitch + 8; block_offset_fld[6] = nUPitch; block_offset_fld[7] = nVPitch; block_offset_fld[10] = nUPitch + 8; block_offset_fld[11] = nVPitch + 8; // prepare out buffers mEncodedSize = 0; PrepareBuffers(); // Mpeg2 sequence header if (m_bFirstFrame || (pict_type == I_TYPE)) { PutSequenceHeader(); PutSequenceExt(); PutSequenceDisplayExt(); // optionally output some text data if (encodeInfo.idStr[0]) { PutUserData(); } m_bFirstFrame = false; } picture_coding_type = pict_type; switch (pict_type) { case I_TYPE: forw_hor_f_code = forw_vert_f_code = 15; back_hor_f_code = back_vert_f_code = 15; m_GOP_Start = numEncodedFrames; PutGOPHeader(numEncodedFrames); break; case P_TYPE: B_count = 0; forw_hor_f_code = encodeInfo.pMotionData[0].forw_hor_f_code; forw_vert_f_code = encodeInfo.pMotionData[0].forw_vert_f_code; back_hor_f_code = back_vert_f_code = 15; break; case B_TYPE: B_count++; if (B_count >= M) { B_count = 1; } forw_hor_f_code = encodeInfo.pMotionData[B_count].forw_hor_f_code; forw_vert_f_code = encodeInfo.pMotionData[B_count].forw_vert_f_code; back_hor_f_code = encodeInfo.pMotionData[B_count].back_hor_f_code; back_vert_f_code = encodeInfo.pMotionData[B_count].back_vert_f_code; break; default: return ippStsErr; } DisplayFrameNumber = numEncodedFrames; if (picture_coding_type == B_TYPE) DisplayFrameNumber--; else if(numEncodedFrames != 0) DisplayFrameNumber += M-1; temporal_reference = DisplayFrameNumber - m_GOP_Start; if (picture_coding_type != B_TYPE) { Ipp8u *aux; // scroll frames RefB <-> RefF aux = YRefFrameF; YRefFrameF = YRefFrameB; YRefFrameB = aux; aux = URefFrameF; URefFrameF = URefFrameB; URefFrameB = aux; aux = VRefFrameF; VRefFrameF = VRefFrameB; VRefFrameB = aux; } if (!encodeInfo.FieldPicture) { curr_frame_pred = curr_frame_dct = encodeInfo.frame_pred_frame_dct[picture_coding_type - 1]; } else { curr_frame_dct = 1; curr_frame_pred = 1; } curr_intra_vlc_format = encodeInfo.intraVLCFormat[picture_coding_type - 1]; curr_scan = encodeInfo.altscan_tab[picture_coding_type - 1]; t_start = GET_TICKS; if (!encodeInfo.FieldPicture) { PictureRateControl(); PutPicture(); flushBuffer(); PostPictureRateControl(8*mEncodedSize); } else { Ipp8u *pSrc[3] = {Y_src, U_src, V_src}; Ipp8u *pRefF[3] = {YRefFrameF, URefFrameF, VRefFrameF}; Ipp8u *pRefB[3] = {YRefFrameB, URefFrameB, VRefFrameB}; MBInfo *pMBInfo0 = pMBInfo; Ipp64s field_endpos = 0; YFrameVSize >>= 1; UVFrameVSize >>= 1; YFrameSize >>= 1; UVFrameSize >>= 1; Y_pitch *= 2; U_pitch *= 2; V_pitch *= 2; for (curr_field = 0; curr_field < 2; curr_field++) { if (curr_field) PrepareBuffers(); picture_structure = (curr_field != encodeInfo.top_field_first) ? TOP_FIELD : BOTTOM_FIELD; if (picture_structure == TOP_FIELD) { Y_src = pSrc[0]; U_src = pSrc[1]; V_src = pSrc[2]; YRefFrameF = pRefF[0]; URefFrameF = pRefF[1]; VRefFrameF = pRefF[2]; YRefFrameB = pRefB[0]; URefFrameB = pRefB[1]; VRefFrameB = pRefB[2]; pMBInfo = pMBInfo0; YOffsetToOtherField = YFrameSize; UVOffsetToOtherField = UVFrameSize; } else { Y_src = pSrc[0] + (Y_pitch >> 1); U_src = pSrc[1] + (U_pitch >> 1); V_src = pSrc[2] + (V_pitch >> 1); YRefFrameF = pRefF[0] + YFrameSize; URefFrameF = pRefF[1] + UVFrameSize; VRefFrameF = pRefF[2] + UVFrameSize; YRefFrameB = pRefB[0] + YFrameSize; URefFrameB = pRefB[1] + UVFrameSize; VRefFrameB = pRefB[2] + UVFrameSize; pMBInfo = pMBInfo0 + (YFrameSize/(16*16)); YOffsetToOtherField = -YFrameSize; UVOffsetToOtherField = -UVFrameSize; } if (picture_coding_type == P_TYPE && curr_field) { // second field & P-type // switch between frames (YRefFrameF <-> YRefFrameB) if (YRefFrameF - m_lpbReference < YUVFrameSize) { YOffsetToOtherField += YUVFrameSize; UVOffsetToOtherField += YUVFrameSize; } else { YOffsetToOtherField -= YUVFrameSize; UVOffsetToOtherField -= YUVFrameSize; } } if (picture_coding_type == B_TYPE) YOffsetToOtherField = 0; YOffsetToOtherField = 0; PictureRateControl(); PutPicture(); flushBuffer(); PostPictureRateControl(8*mEncodedSize - field_endpos); field_endpos = 8*mEncodedSize; } Y_src = pSrc[0]; U_src = pSrc[1]; V_src = pSrc[2]; // restore params YRefFrameF = pRefF[0]; URefFrameF = pRefF[1]; VRefFrameF = pRefF[2]; YRefFrameB = pRefB[0]; URefFrameB = pRefB[1]; VRefFrameB = pRefB[2]; pMBInfo = pMBInfo0; YFrameVSize <<= 1; UVFrameVSize <<= 1; YFrameSize <<= 1; UVFrameSize <<= 1; } t_end = GET_TICKS; numEncodedFrames++; encodeInfo.encode_time += (double)(vm_var64s)(t_end-t_start)/cpu_freq; encodeInfo.performance = (double)numEncodedFrames/encodeInfo.encode_time; encodeInfo.motion_estimation_perf = (double)numEncodedFrames*cpu_freq/motion_estimation_time;#ifdef MPEG2_DEBUG_CODE if (encodeInfo.LogMask & LOG_SAVE_BMP) { vm_char bmp_fname[256]; vm_char frame_ch = (picture_coding_type == I_TYPE) ? 'i' : (picture_coding_type == P_TYPE) ? 'p' : 'b'; if (numEncodedFrames < 25) { vm_string_sprintf(bmp_fname, VM_STRING("frame_%02d_s.bmp"), numEncodedFrames); save_bmp(bmp_fname, -1); if (encodeInfo.IPDistance > 1) { vm_string_sprintf(bmp_fname, VM_STRING("frame_%02d%c_f.bmp"), numEncodedFrames, frame_ch); save_bmp(bmp_fname, 1); } } }#endif /* MPEG2_DEBUG_CODE */ if (encoded_size) *encoded_size = mEncodedSize; return (ippStsOk);}IppStatus ippMPEG2VideoEncoder::PutPicture(){ int i; PutPictureHeader(); PutPictureCodingExt(); if (m_numThreads > 1) { // start additional thread(s) for (i = 0; i < m_numThreads - 1; i++) { vm_event_signal(&threads[i].start_event); } } switch (picture_coding_type) { case I_TYPE: encodeI(0); break; case P_TYPE: encodeP(0); break; case B_TYPE: encodeB(0); break; } if (m_numThreads > 1) { // wait additional thread(s) for (i = 0; i < m_numThreads - 1; i++) { vm_event_wait(&threads[i].stop_event); } } return ippStsOk;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -