⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 mpegvideo.c

📁 mpeg4 video codec mpeg4 video codec
💻 C
📖 第 1 页 / 共 5 页
字号:
int MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx){    int i;    AVFrame *pic;    s->mb_skipped = 0;    assert(s->last_picture_ptr==NULL || s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3);    /* mark&release old frames */    if (s->pict_type != B_TYPE && s->last_picture_ptr && s->last_picture_ptr != s->next_picture_ptr && s->last_picture_ptr->data[0]) {        avctx->release_buffer(avctx, (AVFrame*)s->last_picture_ptr);        /* release forgotten pictures */        /* if(mpeg124/h263) */        if(!s->encoding){            for(i=0; i<MAX_PICTURE_COUNT; i++){                if(s->picture[i].data[0] && &s->picture[i] != s->next_picture_ptr && s->picture[i].reference){                    av_log(avctx, AV_LOG_ERROR, "releasing zombie picture\n");                    avctx->release_buffer(avctx, (AVFrame*)&s->picture[i]);                                }            }        }    }alloc:    if(!s->encoding){        /* release non reference frames */        for(i=0; i<MAX_PICTURE_COUNT; i++){            if(s->picture[i].data[0] && !s->picture[i].reference /*&& s->picture[i].type!=FF_BUFFER_TYPE_SHARED*/){                s->avctx->release_buffer(s->avctx, (AVFrame*)&s->picture[i]);            }        }        if(s->current_picture_ptr && s->current_picture_ptr->data[0]==NULL)            pic= (AVFrame*)s->current_picture_ptr; //we allready have a unused image (maybe it was set before reading the header)        else{            i= ff_find_unused_picture(s, 0);            pic= (AVFrame*)&s->picture[i];        }        pic->reference= (s->pict_type != B_TYPE || s->codec_id == CODEC_ID_H264)                        && !s->dropable ? 3 : 0;        pic->coded_picture_number= s->coded_picture_number++;                    if( alloc_picture(s, (Picture*)pic, 0) < 0)            return -1;        s->current_picture_ptr= (Picture*)pic;        s->current_picture_ptr->top_field_first= s->top_field_first; //FIXME use only the vars from current_pic        s->current_picture_ptr->interlaced_frame= !s->progressive_frame && !s->progressive_sequence;        }    s->current_picture_ptr->pict_type= s->pict_type;//    if(s->flags && CODEC_FLAG_QSCALE)   //      s->current_picture_ptr->quality= s->new_picture_ptr->quality;    s->current_picture_ptr->key_frame= s->pict_type == I_TYPE;    copy_picture(&s->current_picture, s->current_picture_ptr);    if(s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3){    if (s->pict_type != B_TYPE) {        s->last_picture_ptr= s->next_picture_ptr;        if(!s->dropable)        s->next_picture_ptr= s->current_picture_ptr;    }/*    av_log(s->avctx, AV_LOG_DEBUG, "L%p N%p C%p L%p N%p C%p type:%d drop:%d\n", s->last_picture_ptr, s->next_picture_ptr,s->current_picture_ptr,        s->last_picture_ptr    ? s->last_picture_ptr->data[0] : NULL,         s->next_picture_ptr    ? s->next_picture_ptr->data[0] : NULL,         s->current_picture_ptr ? s->current_picture_ptr->data[0] : NULL,        s->pict_type, s->dropable);*/        if(s->last_picture_ptr) copy_picture(&s->last_picture, s->last_picture_ptr);    if(s->next_picture_ptr) copy_picture(&s->next_picture, s->next_picture_ptr);    if(s->pict_type != I_TYPE && (s->last_picture_ptr==NULL || s->last_picture_ptr->data[0]==NULL)){        av_log(avctx, AV_LOG_ERROR, "warning: first frame is no keyframe\n");        assert(s->pict_type != B_TYPE); //these should have been dropped if we don't have a reference        goto alloc;    }    assert(s->pict_type == I_TYPE || (s->last_picture_ptr && s->last_picture_ptr->data[0]));    if(s->picture_structure!=PICT_FRAME){        int i;        for(i=0; i<4; i++){            if(s->picture_structure == PICT_BOTTOM_FIELD){                 s->current_picture.data[i] += s->current_picture.linesize[i];            }             s->current_picture.linesize[i] *= 2;            s->last_picture.linesize[i] *=2;            s->next_picture.linesize[i] *=2;        }    }  }    s->hurry_up= s->avctx->hurry_up;    s->error_resilience= avctx->error_resilience;    /* set dequantizer, we can't do it during init as it might change for mpeg4       and we can't do it in the header decode as init isnt called for mpeg4 there yet */    if(s->mpeg_quant || s->codec_id == CODEC_ID_MPEG2VIDEO){        s->dct_unquantize_intra = s->dct_unquantize_mpeg2_intra;        s->dct_unquantize_inter = s->dct_unquantize_mpeg2_inter;    }else if(s->out_format == FMT_H263 || s->out_format == FMT_H261){        s->dct_unquantize_intra = s->dct_unquantize_h263_intra;        s->dct_unquantize_inter = s->dct_unquantize_h263_inter;    }else{        s->dct_unquantize_intra = s->dct_unquantize_mpeg1_intra;        s->dct_unquantize_inter = s->dct_unquantize_mpeg1_inter;    }    if(s->dct_error_sum){        assert(s->avctx->noise_reduction && s->encoding);        update_noise_reduction(s);    }        #ifdef HAVE_XVMC    if(s->avctx->xvmc_acceleration)        return XVMC_field_start(s, avctx);#endif    return 0;}/* generic function for encode/decode called after a frame has been coded/decoded */void MPV_frame_end(MpegEncContext *s){    int i;    /* draw edge for correct motion prediction if outside */#ifdef HAVE_XVMC//just to make sure that all data is rendered.    if(s->avctx->xvmc_acceleration){        XVMC_field_end(s);    }else#endif    if(s->unrestricted_mv && s->current_picture.reference && !s->intra_only && !(s->flags&CODEC_FLAG_EMU_EDGE)) {            draw_edges(s->current_picture.data[0], s->linesize  , s->h_edge_pos   , s->v_edge_pos   , EDGE_WIDTH  );            draw_edges(s->current_picture.data[1], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2);            draw_edges(s->current_picture.data[2], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2);        }    emms_c();        s->last_pict_type    = s->pict_type;    s->last_lambda_for[s->pict_type]= s->current_picture_ptr->quality;    if(s->pict_type!=B_TYPE){        s->last_non_b_pict_type= s->pict_type;    }#if 0    /* copy back current_picture variables */    for(i=0; i<MAX_PICTURE_COUNT; i++){        if(s->picture[i].data[0] == s->current_picture.data[0]){            s->picture[i]= s->current_picture;            break;        }        }    assert(i<MAX_PICTURE_COUNT);#endif        if(s->encoding){        /* release non-reference frames */    for(i=0; i<MAX_PICTURE_COUNT; i++){            if(s->picture[i].data[0] && !s->picture[i].reference /*&& s->picture[i].type!=FF_BUFFER_TYPE_SHARED*/){                s->avctx->release_buffer(s->avctx, (AVFrame*)&s->picture[i]);            }        }    }    // clear copies, to avoid confusion#if 0    memset(&s->last_picture, 0, sizeof(Picture));    memset(&s->next_picture, 0, sizeof(Picture));    memset(&s->current_picture, 0, sizeof(Picture));#endif    s->avctx->coded_frame= (AVFrame*)s->current_picture_ptr;}/** * prints debuging info for the given picture. */void ff_print_debug_info(MpegEncContext *s, AVFrame *pict){    if(!pict || !pict->mb_type) return;    if (s->avctx->debug_mv && pict->motion_val) {        pict->mb_height=s->mb_height;        pict->mb_width=s->mb_width;        pict->mb_stride=s->mb_stride;        pict->b8_stride=s->b8_stride;    }   if (pict)    {     pict->play_flags=(s->quarter_sample?CODEC_FLAG_QPEL:0);     pict->num_sprite_warping_points=s->num_sprite_warping_points;     pict->real_sprite_warping_points=s->real_sprite_warping_points;    } }#ifdef CONFIG_ENCODERSstatic int get_sae(uint8_t *src, int ref, int stride){    int x,y;    int acc=0;        for(y=0; y<16; y++){        for(x=0; x<16; x++){            acc+= ABS(src[x+y*stride] - ref);        }    }        return acc;}static int get_intra_count(MpegEncContext *s, uint8_t *src, uint8_t *ref, int stride){    int x, y, w, h;    int acc=0;        w= s->width &~15;    h= s->height&~15;        for(y=0; y<h; y+=16){        for(x=0; x<w; x+=16){            int offset= x + y*stride;            int sad = s->dsp.sad[0](NULL, src + offset, ref + offset, stride, 16);            int mean= (s->dsp.pix_sum(src + offset, stride) + 128)>>8;            int sae = get_sae(src + offset, mean, stride);                        acc+= sae + 500 < sad;        }    }    return acc;}static int load_input_picture(MpegEncContext *s, AVFrame *pic_arg){    AVFrame *pic=NULL;    int64_t pts;    int i;    const int encoding_delay= s->max_b_frames;    int direct=1;  if(pic_arg){        pts= pic_arg->pts;        pic_arg->display_picture_number= s->input_picture_number++;        if(pts != AV_NOPTS_VALUE){             if(s->user_specified_pts != AV_NOPTS_VALUE){                int64_t time= pts;                int64_t last= s->user_specified_pts;                            if(time <= last){                                av_log(s->avctx, AV_LOG_ERROR, "Error, Invalid timestamp=%"PRId64", last=%"PRId64"\n", pts, s->user_specified_pts);                    return -1;                }            }            s->user_specified_pts= pts;        }else{            if(s->user_specified_pts != AV_NOPTS_VALUE){                s->user_specified_pts=                 pts= s->user_specified_pts + 1;                av_log(s->avctx, AV_LOG_INFO, "Warning: AVFrame.pts=? trying to guess (%"PRId64")\n", pts);            }else{                pts= pic_arg->display_picture_number;            }        }    }  if(pic_arg){    if(encoding_delay && !(s->flags&CODEC_FLAG_INPUT_PRESERVED)) direct=0;    if(pic_arg->linesize[0] != s->linesize) direct=0;    if(pic_arg->linesize[1] != s->uvlinesize) direct=0;    if(pic_arg->linesize[2] != s->uvlinesize) direct=0;  //    av_log(AV_LOG_DEBUG, "%d %d %d %d\n",pic_arg->linesize[0], pic_arg->linesize[1], s->linesize, s->uvlinesize);        if(direct){        i= ff_find_unused_picture(s, 1);        pic= (AVFrame*)&s->picture[i];        pic->reference= 3;            for(i=0; i<4; i++){            pic->data[i]= pic_arg->data[i];            pic->linesize[i]= pic_arg->linesize[i];        }        alloc_picture(s, (Picture*)pic, 1);    }else{        int offset= 16;        i= ff_find_unused_picture(s, 0);        pic= (AVFrame*)&s->picture[i];        pic->reference= 3;        alloc_picture(s, (Picture*)pic, 0);        if(   pic->data[0] + offset == pic_arg->data[0]            && pic->data[1] + offset == pic_arg->data[1]           && pic->data[2] + offset == pic_arg->data[2]){       // empty        }else{            int h_chroma_shift, v_chroma_shift;            avcodec_get_chroma_sub_sample(s->avctx->pix_fmt, &h_chroma_shift, &v_chroma_shift);                    for(i=0; i<3; i++){                int src_stride= pic_arg->linesize[i];                int dst_stride= i ? s->uvlinesize : s->linesize;                int h_shift= i ? h_chroma_shift : 0;                int v_shift= i ? v_chroma_shift : 0;                int w= s->width >>h_shift;                int h= s->height>>v_shift;                uint8_t *src= pic_arg->data[i];                uint8_t *dst= pic->data[i] + offset;                            if(src_stride==dst_stride)                    memcpy(dst, src, src_stride*h);                else{                    while(h--){                        memcpy(dst, src, w);                        dst += dst_stride;                        src += src_stride;                    }                }            }        }    }    copy_picture_attributes(s, pic, pic_arg);    pic->pts= pts; //we set this here to avoid modifiying pic_arg  }    /* shift buffer entries */    for(i=1; i<MAX_PICTURE_COUNT /*s->encoding_delay+1*/; i++)        s->input_picture[i-1]= s->input_picture[i];            s->input_picture[encoding_delay]= (Picture*)pic;    return 0;}static int skip_check(MpegEncContext *s, Picture *p, Picture *ref){    int x, y, plane;    int score=0;    int64_t score64=0;    for(plane=0; plane<3; plane++){        const int stride= p->linesize[plane];        const int bw= plane ? 1 : 2;        for(y=0; y<s->mb_height*bw; y++){            for(x=0; x<s->mb_width*bw; x++){                int off= p->type == FF_BUFFER_TYPE_SHARED ? 0: 16;                int v= s->dsp.frame_skip_cmp[1](s, p->data[plane] + 8*(x + y*stride)+off, ref->data[plane] + 8*(x + y*stride), stride, 8);                                switch(s->avctx->frame_skip_exp){                    case 0: score= FFMAX(score, v); break;                    case 1: score+= ABS(v);break;                    case 2: score+= v*v;break;                    case 3: score64+= ABS(v*v*(int64_t)v);break;                    case 4: score64+= v*v*(int64_t)(v*v);break;                }            }        }    }        if(score) score64= score;    if(score64 < s->avctx->f

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -