📄 umc_mpeg2_enc.cpp
字号:
#endif
UMC_ALLOC_PERSISTENT, ALIGN_VALUE)) {
vm_debug_trace(VM_DEBUG_ERROR, VM_STRING("External allocation failed\n"));
return UMC_ERR_ALLOC;
}
m_lpbReference = (Ipp8u*)m_pMemoryAllocator->Lock(mid_Reference);
if(!m_lpbReference) {
vm_debug_trace(VM_DEBUG_ERROR, VM_STRING("External Lock failed\n"));
return UMC_ERR_ALLOC;
}
for(i=0; i<4; i++) {
InitInternalFrame(RotatingFrames[i]);
#ifdef ME_REF_ORIGINAL
SetInternalFramePointers(RotatingFrames[i],
m_lpbReference + i*YUVFrameSize);
#else
SetInternalFramePointers(RotatingFrames[i],
m_lpbReference + (i>>1)*YUVFrameSize);
#endif
}
m_pMemoryAllocator->Unlock(mid_Reference);
}
// threads preparation
if (encodeInfo.numThreads != 1) {
if (encodeInfo.numThreads == 0)
encodeInfo.numThreads = vm_sys_info_get_cpu_num();
if (encodeInfo.numThreads < 1) {
encodeInfo.numThreads = 1;
}
Ipp32s max_threads = MBcountV;
if (encodeInfo.info.interlace_type != PROGRESSIVE) {
max_threads = MBcountV/2;
}
if (encodeInfo.numThreads > max_threads) {
encodeInfo.numThreads = max_threads;
}
}
// add threads if was fewer
if(threadsAllocated < encodeInfo.numThreads) {
threadSpecificData* cur_threadSpec = threadSpec;
threadSpec = MP2_ALLOC(threadSpecificData, encodeInfo.numThreads);
// copy existing
if(threadsAllocated) {
ippsCopy_8u((Ipp8u*)cur_threadSpec, (Ipp8u*)threadSpec,
threadsAllocated*sizeof(threadSpecificData));
MP2_FREE(cur_threadSpec);
}
if(encodeInfo.numThreads > 1) {
threadInfo** cur_threads = threads;
threads = MP2_ALLOC(threadInfo*, encodeInfo.numThreads - 1);
if(threadsAllocated > 1) {
for (i = 0; i < threadsAllocated - 1; i++)
threads[i] = cur_threads[i];
MP2_FREE(cur_threads);
}
}
// init newly allocated
for (i = threadsAllocated; i < encodeInfo.numThreads; i++) {
threadSpec[i].pDiff = MP2_ALLOC(Ipp16s, 3*256);
threadSpec[i].pDiff1 = MP2_ALLOC(Ipp16s, 3*256);
threadSpec[i].pMBlock = MP2_ALLOC(Ipp16s, 3*256);
threadSpec[i].me_matrix_size = 0;
threadSpec[i].me_matrix_buff = NULL;
threadSpec[i].me_matrix_id = 0;
if(i>0) {
threads[i-1] = MP2_ALLOC(threadInfo, 1);
vm_event_set_invalid(&threads[i-1]->start_event);
if (VM_OK != vm_event_init(&threads[i-1]->start_event, 0, 0))
return UMC_ERR_ALLOC;
vm_event_set_invalid(&threads[i-1]->stop_event);
if (VM_OK != vm_event_init(&threads[i-1]->stop_event, 0, 0))
return UMC_ERR_ALLOC;
vm_event_set_invalid(&threads[i-1]->quit_event);
if (VM_OK != vm_event_init(&threads[i-1]->quit_event, 0, 0))
return UMC_ERR_ALLOC;
vm_thread_set_invalid(&threads[i-1]->thread);
if (0 == vm_thread_create(&threads[i-1]->thread,
ThreadWorkingRoutine,
threads[i-1])) {
return UMC_ERR_ALLOC;
}
threads[i-1]->numTh = i;
threads[i-1]->m_lpOwner = this;
}
}
threadsAllocated = encodeInfo.numThreads;
}
Ipp32s mb_shift = (encodeInfo.info.interlace_type == PROGRESSIVE) ? 4 : 5;
for (i = 0; i < encodeInfo.numThreads; i++) {
threadSpec[i].start_row = ((YFrameVSize>>mb_shift)*i/encodeInfo.numThreads) << mb_shift;
}
// quantizer matrices
IntraQMatrix = encodeInfo.IntraQMatrix;
NonIntraQMatrix = (encodeInfo.CustomNonIntraQMatrix) ? encodeInfo.NonIntraQMatrix : NULL;
if (encodeInfo.CustomIntraQMatrix) {
InvIntraQMatrix = _InvIntraQMatrix;
for (i = 0; i < 64; i++) {
InvIntraQMatrix[i] = 1.f / (Ipp32f)IntraQMatrix[i];
}
} else {
InvIntraQMatrix = NULL;
}
if (encodeInfo.CustomNonIntraQMatrix) {
InvNonIntraQMatrix = _InvNonIntraQMatrix;
for (i = 0; i < 64; i++) {
InvNonIntraQMatrix[i] = 1.f / (Ipp32f)NonIntraQMatrix[i];
}
} else {
InvNonIntraQMatrix = NULL;
}
onlyIFrames = 0;
// field pictures use IP frames, so reconstruction is required for them
if (encodeInfo.gopSize == 1 && encodeInfo.IPDistance == 1 && !encodeInfo.FieldPicture) {
onlyIFrames = 1;
}
// initialize rate control
InitRateControl(encodeInfo.info.bitrate);
// frames list
m_GOP_Start = 0;
B_count = 0;
m_FirstFrame = 1;
closed_gop = 1;
// YUV frames buffer
if(buff_size < encodeInfo.IPDistance + 1) {
if(VideoData_buff != NULL) {
delete []VideoData_buff;
VideoData_buff = NULL;
}
if(frames_buff != NULL) {
if(is_frames_buff_locked) {
m_pMemoryAllocator->Unlock(mid_frames_buff);
is_frames_buff_locked = 0;
}
m_pMemoryAllocator->Free(mid_frames_buff);
frames_buff = NULL;
}
buff_size = encodeInfo.IPDistance + 1;
}
buff_ind = 0;
num_btype = 0;
num_new = 0;
curr_gop = 0;
m_Inited = 1;
return UMC_OK;
}
Status MPEG2VideoEncoderBase::GetInfo(BaseCodecParams *Params)
{
MPEG2EncoderParams *MPEG2Params = DynamicCast<MPEG2EncoderParams>(Params);
encodeInfo.qualityMeasure = 100 - (qscale[0]+qscale[1]+qscale[2])*100/(3*112); // rough
encodeInfo.info.stream_type = MPEG2_VIDEO;
if (NULL != MPEG2Params) {
*MPEG2Params = encodeInfo;
} else {
VideoEncoderParams *VideoParams = DynamicCast<VideoEncoderParams>(Params);
if (NULL != VideoParams) {
*VideoParams = *(VideoEncoderParams*)&encodeInfo;
} else if (NULL != Params){
*Params = *(BaseCodecParams*)&encodeInfo;
} else
return UMC_ERR_NULL_PTR;
}
return UMC_OK;
}
Status MPEG2VideoEncoderBase::Close()
{
Ipp32s i;
if (!threadSpec) {
return UMC_ERR_NULL_PTR;
}
if (threadsAllocated) {
// let all threads to exit
if (threadsAllocated > 1) {
for (i = 0; i < threadsAllocated - 1; i++) {
vm_event_signal(&threads[i]->quit_event);
vm_event_signal(&threads[i]->start_event);
}
for (i = 0; i < threadsAllocated - 1; i++)
{
if (vm_thread_is_valid(&threads[i]->thread)) {
vm_thread_wait(&threads[i]->thread);
vm_thread_set_invalid(&threads[i]->thread);
vm_thread_close(&threads[i]->thread);
}
if (vm_event_is_valid(&threads[i]->start_event)) {
vm_event_destroy(&threads[i]->start_event);
vm_event_set_invalid(&threads[i]->start_event);
}
if (vm_event_is_valid(&threads[i]->stop_event)) {
vm_event_destroy(&threads[i]->stop_event);
vm_event_set_invalid(&threads[i]->stop_event);
}
if (vm_event_is_valid(&threads[i]->quit_event)) {
vm_event_destroy(&threads[i]->quit_event);
vm_event_set_invalid(&threads[i]->quit_event);
}
MP2_FREE(threads[i]);
}
MP2_FREE(threads);
}
for(i = 0; i < threadsAllocated; i++)
{
if(threadSpec[i].pDiff)
{
MP2_FREE(threadSpec[i].pDiff);
threadSpec[i].pDiff = NULL;
}
if(threadSpec[i].pDiff1)
{
MP2_FREE(threadSpec[i].pDiff1);
threadSpec[i].pDiff1 = NULL;
}
if(threadSpec[i].pMBlock)
{
MP2_FREE(threadSpec[i].pMBlock);
threadSpec[i].pMBlock = NULL;
}
if (threadSpec[i].me_matrix_buff) {
MP2_FREE(threadSpec[i].me_matrix_buff);
threadSpec[i].me_matrix_buff = NULL;
}
}
MP2_FREE(threadSpec);
threadsAllocated = 0;
}
if(pMBInfo)
{
MP2_FREE(pMBInfo);
pMBInfo = NULL;
}
if (pMotionData) {
MP2_FREE(pMotionData);
pMotionData = NULL;
}
MotionDataCount = 0;
if(vlcTableB15)
{
ippiHuffmanTableFree_32s(vlcTableB15);
vlcTableB15 = NULL;
}
if(vlcTableB5c_e)
{
ippiHuffmanTableFree_32s(vlcTableB5c_e);
vlcTableB5c_e = NULL;
}
if (m_lpbReference)
{
m_pMemoryAllocator->Free(mid_Reference);
m_lpbReference = NULL;
}
for(i=0; i<4; i++) {
delete RotatingFrames[i];
RotatingFrames[i] = 0;
}
if (tmpFrame_buf) {
m_pMemoryAllocator->Free(mid_tmpFrame_buf);
tmpFrame_buf = NULL;
}
if (tmpFrame) {
delete tmpFrame;
tmpFrame = NULL;
}
if (VideoData_buff) {
delete []VideoData_buff;
VideoData_buff = NULL;
}
if (frames_buff) {
if(is_frames_buff_locked) {
m_pMemoryAllocator->Unlock(mid_frames_buff);
is_frames_buff_locked = 0;
}
m_pMemoryAllocator->Free(mid_frames_buff);
frames_buff = NULL;
}
if (frame_loader != NULL) {
delete frame_loader;
frame_loader = NULL;
}
m_Inited = 0;
BaseCodec::Close();
return UMC_OK;
}
// Get source frame size (summary size of Y, U and V frames)
Ipp32s MPEG2VideoEncoderBase::GetYUVFrameSize()
{
Ipp32s srcUVFrameHSize = (encodeInfo.info.color_format == YUV444) ?
encodeInfo.info.clip_info.width : (encodeInfo.info.clip_info.width >> 1);
Ipp32s srcUVFrameVSize = (encodeInfo.info.color_format == YUV420) ?
(encodeInfo.info.clip_info.height >> 1) : encodeInfo.info.clip_info.height;
return encodeInfo.info.clip_info.width*encodeInfo.info.clip_info.height +
2*srcUVFrameHSize*srcUVFrameVSize;
}
Status MPEG2VideoEncoderBase::CreateInternalBuffers(Ipp32s count)
{
Ipp32s i;
if(count <= 0)
return UMC_ERR_INVALID_PARAMS;
buff_size = count;
if (frames_buff == NULL) {
if (UMC_OK != m_pMemoryAllocator->Alloc(&mid_frames_buff,
buff_size*YUVFrameSize, UMC_ALLOC_PERSISTENT, ALIGN_VALUE)) {
vm_debug_trace(VM_DEBUG_ERROR, VM_STRING("External allocation failed\n"));
return UMC_ERR_ALLOC;
}
frames_buff = (Ipp8u*)m_pMemoryAllocator->Lock(mid_frames_buff);
if(!frames_buff) {
vm_debug_trace(VM_DEBUG_ERROR, VM_STRING("External Lock failed\n"));
return UMC_ERR_ALLOC;
}
m_pMemoryAllocator->Unlock(mid_frames_buff);
}
if (VideoData_buff == NULL) {
VideoData_buff = new VideoData[buff_size];
if(VideoData_buff == NULL)
return UMC_ERR_ALLOC;
for(i=0; i<buff_size; i++) {
InitInternalFrame(VideoData_buff+i);
}
}
for(i=0; i<buff_size; i++) {
SetInternalFramePointers(VideoData_buff+i, frames_buff + i*YUVFrameSize);
}
buff_ind = 0;
num_btype = 0;
num_new = -1;
curr_gop = 0;
return UMC_OK;
}
// Get pointer to internal encoder memory, where
// next YUV frame can be stored before passing
// this frame to encode function
VideoData* MPEG2VideoEncoderBase::GetNextYUVPointer()
{
Ipp32s i;
if (VideoData_buff == NULL) {
if(UMC_OK != CreateInternalBuffers(buff_size))
return 0;
}
if (is_frames_buff_locked == 0) {// nothing if hasn't been allocated
Ipp8u* newptr = (Ipp8u*)m_pMemoryAllocator->Lock(mid_frames_buff);
if(newptr != frames_buff) { //ptr changed, need to reset pointers
if(!newptr) {
vm_debug_trace(VM_DEBUG_ERROR, VM_STRING("External Lock failed\n"));
return 0;
}
frames_buff = newptr;
for(i=0; i<buff_size; i++) {
SetInternalFramePointers(VideoData_buff+i, frames_buff + i*YUVFrameSize);
}
}
is_frames_buff_locked = 1;
}
i = buff_ind + num_new + 1;
if (i >= buff_size) i -= buff_size;
return (VideoData_buff + i);
}
// Encode frames (in source order)
// On first frames the function can return zero encoded_size. This is because
// of buffering of B frames until the reference P frame comes. After the end of
// source stream function EncodeFrame() must be called (IPDistance - 1) times
// with frame == NULL in order to encode the buffered frames.
Status MPEG2VideoEncoderBase::EncodeFrame( VideoData *frame,
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -