⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 video.c

📁 video linux conference
💻 C
📖 第 1 页 / 共 2 页
字号:
    if( p_sys->p_context->width <= 0 || p_sys->p_context->height <= 0 )    {        p_sys->p_context->hurry_up = 5;        b_null_size = VLC_TRUE;    }    /*     * Do the actual decoding now     */    /* Check if post-processing was enabled */    p_sys->b_pp = p_sys->b_pp_async;    /* Don't forget that ffmpeg requires a little more bytes     * that the real frame size */    if( p_block->i_buffer > 0 )    {        p_sys->i_buffer = p_block->i_buffer;        if( p_sys->i_buffer + FF_INPUT_BUFFER_PADDING_SIZE >            p_sys->i_buffer_orig )        {            free( p_sys->p_buffer_orig );            p_sys->i_buffer_orig =                p_block->i_buffer + FF_INPUT_BUFFER_PADDING_SIZE;            p_sys->p_buffer_orig = malloc( p_sys->i_buffer_orig );        }        p_sys->p_buffer = p_sys->p_buffer_orig;        p_sys->i_buffer = p_block->i_buffer;        p_dec->p_vlc->pf_memcpy( p_sys->p_buffer, p_block->p_buffer,                                 p_block->i_buffer );        memset( p_sys->p_buffer + p_block->i_buffer, 0,                FF_INPUT_BUFFER_PADDING_SIZE );        p_block->i_buffer = 0;    }    while( p_sys->i_buffer > 0 )    {        int i_used, b_gotpicture;        picture_t *p_pic;        i_used = avcodec_decode_video( p_sys->p_context, p_sys->p_ff_pic,                                       &b_gotpicture,                                       p_sys->p_buffer, p_sys->i_buffer );        if( b_null_size && p_sys->p_context->width > 0 &&            p_sys->p_context->height > 0 )        {            /* Reparse it to not drop the I frame */            b_null_size = VLC_FALSE;            p_sys->p_context->hurry_up = 0;            i_used = avcodec_decode_video( p_sys->p_context, p_sys->p_ff_pic,                                           &b_gotpicture,                                           p_sys->p_buffer, p_sys->i_buffer );        }        if( i_used < 0 )        {            msg_Warn( p_dec, "cannot decode one frame (%d bytes)",                      p_sys->i_buffer );            block_Release( p_block );            return NULL;        }        else if( i_used > p_sys->i_buffer )        {            i_used = p_sys->i_buffer;        }        /* Consumed bytes */        p_sys->i_buffer -= i_used;        p_sys->p_buffer += i_used;        /* Nothing to display */        if( !b_gotpicture )        {            if( i_used == 0 ) break;            continue;        }        /* Update frame late count (except when doing preroll) */        if( p_sys->i_pts && p_sys->i_pts <= mdate() &&            !(p_block->i_flags & BLOCK_FLAG_PREROLL) )        {            p_sys->i_late_frames++;            if( p_sys->i_late_frames == 1 )                p_sys->i_late_frames_start = mdate();        }        else        {            p_sys->i_late_frames = 0;        }        if( !b_drawpicture || !p_sys->p_ff_pic->linesize[0] )        {            /* Do not display the picture */            continue;        }        if( !p_sys->p_ff_pic->opaque )        {            /* Get a new picture */            p_pic = ffmpeg_NewPictBuf( p_dec, p_sys->p_context );            if( !p_pic )            {                block_Release( p_block );                return NULL;            }            /* Fill p_picture_t from AVVideoFrame and do chroma conversion             * if needed */            ffmpeg_CopyPicture( p_dec, p_pic, p_sys->p_ff_pic );        }        else        {            p_pic = (picture_t *)p_sys->p_ff_pic->opaque;        }        /* Set the PTS */        if( p_sys->p_ff_pic->pts ) p_sys->i_pts = p_sys->p_ff_pic->pts;        /* Sanity check (seems to be needed for some streams ) */        if( p_sys->p_ff_pic->pict_type == FF_B_TYPE )        {            p_sys->b_has_b_frames = VLC_TRUE;        }        /* Send decoded frame to vout */        if( p_sys->i_pts )        {            p_pic->date = p_sys->i_pts;            /* interpolate the next PTS */#if LIBAVCODEC_BUILD >= 4754            if( p_dec->fmt_in.video.i_frame_rate > 0 &&                p_dec->fmt_in.video.i_frame_rate_base > 0 )            {                p_sys->i_pts += I64C(1000000) *                    (2 + p_sys->p_ff_pic->repeat_pict) *                    p_dec->fmt_in.video.i_frame_rate_base *                    p_block->i_rate / INPUT_RATE_DEFAULT /                    (2 * p_dec->fmt_in.video.i_frame_rate);            }            else if( p_sys->p_context->time_base.den > 0 )            {                p_sys->i_pts += I64C(1000000) *                    (2 + p_sys->p_ff_pic->repeat_pict) *                    p_sys->p_context->time_base.num *                    p_block->i_rate / INPUT_RATE_DEFAULT /                    (2 * p_sys->p_context->time_base.den);            }#else            if( p_sys->p_context->frame_rate > 0 )            {                p_sys->i_pts += I64C(1000000) *                    (2 + p_sys->p_ff_pic->repeat_pict) *                    p_sys->p_context->frame_rate_base *                    p_block->i_rate / INPUT_RATE_DEFAULT /                    (2 * p_sys->p_context->frame_rate);            }#endif            if( p_sys->b_first_frame )            {                /* Hack to force display of still pictures */                p_sys->b_first_frame = VLC_FALSE;                p_pic->b_force = VLC_TRUE;            }            p_pic->i_nb_fields = 2 + p_sys->p_ff_pic->repeat_pict;#if LIBAVCODEC_BUILD >= 4685            p_pic->b_progressive = !p_sys->p_ff_pic->interlaced_frame;            p_pic->b_top_field_first = p_sys->p_ff_pic->top_field_first;#endif            return p_pic;        }        else        {            p_dec->pf_vout_buffer_del( p_dec, p_pic );        }    }    block_Release( p_block );    return NULL;}/***************************************************************************** * EndVideo: decoder destruction ***************************************************************************** * This function is called when the thread ends after a successful * initialization. *****************************************************************************/void E_(EndVideoDec)( decoder_t *p_dec ){    decoder_sys_t *p_sys = p_dec->p_sys;    if( p_sys->p_ff_pic ) av_free( p_sys->p_ff_pic );#ifdef LIBAVCODEC_PP    E_(ClosePostproc)( p_dec, p_sys->p_pp );#endif    free( p_sys->p_buffer_orig );}/***************************************************************************** * ffmpeg_InitCodec: setup codec extra initialization data for ffmpeg *****************************************************************************/static void ffmpeg_InitCodec( decoder_t *p_dec ){    decoder_sys_t *p_sys = p_dec->p_sys;    int i_size = p_dec->fmt_in.i_extra;    if( !i_size ) return;    if( p_sys->i_codec_id == CODEC_ID_SVQ3 )    {        uint8_t *p;        p_sys->p_context->extradata_size = i_size + 12;        p = p_sys->p_context->extradata  =            malloc( p_sys->p_context->extradata_size );        memcpy( &p[0],  "SVQ3", 4 );        memset( &p[4], 0, 8 );        memcpy( &p[12], p_dec->fmt_in.p_extra, i_size );        /* Now remove all atoms before the SMI one */        if( p_sys->p_context->extradata_size > 0x5a &&            strncmp( &p[0x56], "SMI ", 4 ) )        {            uint8_t *psz = &p[0x52];            while( psz < &p[p_sys->p_context->extradata_size - 8] )            {                int i_size = GetDWBE( psz );                if( i_size <= 1 )                {                    /* FIXME handle 1 as long size */                    break;                }                if( !strncmp( &psz[4], "SMI ", 4 ) )                {                    memmove( &p[0x52], psz,                             &p[p_sys->p_context->extradata_size] - psz );                    break;                }                psz += i_size;            }        }    }    else if( p_dec->fmt_in.i_codec == VLC_FOURCC( 'R', 'V', '1', '0' ) ||             p_dec->fmt_in.i_codec == VLC_FOURCC( 'R', 'V', '1', '3' ) ||             p_dec->fmt_in.i_codec == VLC_FOURCC( 'R', 'V', '2', '0' ) )    {        if( p_dec->fmt_in.i_extra == 8 )        {            p_sys->p_context->extradata_size = 8;            p_sys->p_context->extradata = malloc( 8 );            memcpy( p_sys->p_context->extradata,                    p_dec->fmt_in.p_extra, p_dec->fmt_in.i_extra );            p_sys->p_context->sub_id= ((uint32_t*)p_dec->fmt_in.p_extra)[1];            msg_Warn( p_dec, "using extra data for RV codec sub_id=%08x",                      p_sys->p_context->sub_id );        }    }    else    {        p_sys->p_context->extradata_size = i_size;        p_sys->p_context->extradata =            malloc( i_size + FF_INPUT_BUFFER_PADDING_SIZE );        memcpy( p_sys->p_context->extradata,                p_dec->fmt_in.p_extra, i_size );        memset( &((uint8_t*)p_sys->p_context->extradata)[i_size],                0, FF_INPUT_BUFFER_PADDING_SIZE );    }}/***************************************************************************** * ffmpeg_CopyPicture: copy a picture from ffmpeg internal buffers to a *                     picture_t structure (when not in direct rendering mode). *****************************************************************************/static void ffmpeg_CopyPicture( decoder_t *p_dec,                                picture_t *p_pic, AVFrame *p_ff_pic ){    decoder_sys_t *p_sys = p_dec->p_sys;    if( ffmpeg_PixFmtToChroma( p_sys->p_context->pix_fmt ) )    {        int i_plane, i_size, i_line;        uint8_t *p_dst, *p_src;        int i_src_stride, i_dst_stride;#ifdef LIBAVCODEC_PP        if( p_sys->p_pp && p_sys->b_pp )            E_(PostprocPict)( p_dec, p_sys->p_pp, p_pic, p_ff_pic );        else#endif        for( i_plane = 0; i_plane < p_pic->i_planes; i_plane++ )        {            p_src  = p_ff_pic->data[i_plane];            p_dst = p_pic->p[i_plane].p_pixels;            i_src_stride = p_ff_pic->linesize[i_plane];            i_dst_stride = p_pic->p[i_plane].i_pitch;            i_size = __MIN( i_src_stride, i_dst_stride );            for( i_line = 0; i_line < p_pic->p[i_plane].i_visible_lines;                 i_line++ )            {                p_dec->p_vlc->pf_memcpy( p_dst, p_src, i_size );                p_src += i_src_stride;                p_dst += i_dst_stride;            }        }    }    else    {        AVPicture dest_pic;        int i;        /* we need to convert to I420 */        switch( p_sys->p_context->pix_fmt )        {        case PIX_FMT_YUV410P:        case PIX_FMT_YUV411P:        case PIX_FMT_BGR24:        case PIX_FMT_PAL8:            for( i = 0; i < p_pic->i_planes; i++ )            {                dest_pic.data[i] = p_pic->p[i].p_pixels;                dest_pic.linesize[i] = p_pic->p[i].i_pitch;            }            img_convert( &dest_pic, PIX_FMT_YUV420P,                         (AVPicture *)p_ff_pic,                         p_sys->p_context->pix_fmt,                         p_sys->p_context->width,                         p_sys->p_context->height );            break;        default:            msg_Err( p_dec, "don't know how to convert chroma %i",                     p_sys->p_context->pix_fmt );            p_dec->b_error = 1;            break;        }    }}/***************************************************************************** * ffmpeg_GetFrameBuf: callback used by ffmpeg to get a frame buffer. ***************************************************************************** * It is used for direct rendering as well as to get the right PTS for each * decoded picture (even in indirect rendering mode). *****************************************************************************/static int ffmpeg_GetFrameBuf( struct AVCodecContext *p_context,                               AVFrame *p_ff_pic ){    decoder_t *p_dec = (decoder_t *)p_context->opaque;    decoder_sys_t *p_sys = p_dec->p_sys;    picture_t *p_pic;    /* Set picture PTS */    if( p_sys->input_pts )    {        p_ff_pic->pts = p_sys->input_pts;    }    else if( p_sys->input_dts )    {        /* Some demuxers only set the dts so let's try to find a useful         * timestamp from this */        if( !p_context->has_b_frames || !p_sys->b_has_b_frames ||            !p_ff_pic->reference || !p_sys->i_pts )        {            p_ff_pic->pts = p_sys->input_dts;        }        else p_ff_pic->pts = 0;    }    else p_ff_pic->pts = 0;    if( p_sys->i_pts ) /* make sure 1st frame has a pts > 0 */    {        p_sys->input_pts = p_sys->input_dts = 0;    }    p_ff_pic->opaque = 0;    /* Not much to do in indirect rendering mode */    if( !p_sys->b_direct_rendering || p_sys->b_pp )    {        return avcodec_default_get_buffer( p_context, p_ff_pic );    }    /* Some codecs set pix_fmt only after the 1st frame has been decoded,     * so this check is necessary. */    if( !ffmpeg_PixFmtToChroma( p_context->pix_fmt ) ||        p_sys->p_context->width % 16 || p_sys->p_context->height % 16 )    {        msg_Dbg( p_dec, "disabling direct rendering" );        p_sys->b_direct_rendering = 0;        return avcodec_default_get_buffer( p_context, p_ff_pic );    }    /* Get a new picture */    //p_sys->p_vout->render.b_allow_modify_pics = 0;    p_pic = ffmpeg_NewPictBuf( p_dec, p_sys->p_context );    if( !p_pic )    {        p_sys->b_direct_rendering = 0;        return avcodec_default_get_buffer( p_context, p_ff_pic );    }    p_sys->p_context->draw_horiz_band = NULL;    p_ff_pic->opaque = (void*)p_pic;    p_ff_pic->type = FF_BUFFER_TYPE_USER;    p_ff_pic->data[0] = p_pic->p[0].p_pixels;    p_ff_pic->data[1] = p_pic->p[1].p_pixels;    p_ff_pic->data[2] = p_pic->p[2].p_pixels;    p_ff_pic->data[3] = NULL; /* alpha channel but I'm not sure */    p_ff_pic->linesize[0] = p_pic->p[0].i_pitch;    p_ff_pic->linesize[1] = p_pic->p[1].i_pitch;    p_ff_pic->linesize[2] = p_pic->p[2].i_pitch;    p_ff_pic->linesize[3] = 0;    if( p_ff_pic->reference != 0 )    {        p_dec->pf_picture_link( p_dec, p_pic );    }    /* FIXME what is that, should give good value */    p_ff_pic->age = 256*256*256*64; // FIXME FIXME from ffmpeg    return 0;}static void ffmpeg_ReleaseFrameBuf( struct AVCodecContext *p_context,                                    AVFrame *p_ff_pic ){    decoder_t *p_dec = (decoder_t *)p_context->opaque;    picture_t *p_pic;    if( !p_ff_pic->opaque )    {        avcodec_default_release_buffer( p_context, p_ff_pic );        return;    }    p_pic = (picture_t*)p_ff_pic->opaque;    p_ff_pic->data[0] = NULL;    p_ff_pic->data[1] = NULL;    p_ff_pic->data[2] = NULL;    p_ff_pic->data[3] = NULL;    if( p_ff_pic->reference != 0 )    {        p_dec->pf_picture_unlink( p_dec, p_pic );    }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -