📄 mpegvideo.c
字号:
/* convert fourcc to upper case */
s->avctx->codec_tag= toupper( s->avctx->codec_tag &0xFF)
+ (toupper((s->avctx->codec_tag>>8 )&0xFF)<<8 )
+ (toupper((s->avctx->codec_tag>>16)&0xFF)<<16)
+ (toupper((s->avctx->codec_tag>>24)&0xFF)<<24);
s->avctx->coded_frame= (AVFrame*)&s->current_picture;
CHECKED_ALLOCZ(s->mb_index2xy, (s->mb_num+1)*sizeof(int)) //error ressilience code looks cleaner with this
for(y=0; y<s->mb_height; y++){
for(x=0; x<s->mb_width; x++){
s->mb_index2xy[ x + y*s->mb_width ] = x + y*s->mb_stride;
}
}
s->mb_index2xy[ s->mb_height*s->mb_width ] = (s->mb_height-1)*s->mb_stride + s->mb_width; //FIXME really needed?
CHECKED_ALLOCZ(s->picture, MAX_PICTURE_COUNT * sizeof(Picture))
/* which mb is a intra block */
CHECKED_ALLOCZ(s->mbintra_table, mb_array_size);
memset(s->mbintra_table, 1, mb_array_size);
/* init macroblock skip table */
CHECKED_ALLOCZ(s->mbskip_table, mb_array_size+2);
//Note the +1 is for a quicker mpeg4 slice_end detection
CHECKED_ALLOCZ(s->prev_pict_types, PREV_PICT_TYPES_BUFFER_SIZE);
s->parse_context.state= -1;
s->context_initialized = 1;
s->thread_context[0]= s;
for(i=1; i<s->avctx->thread_count; i++){
s->thread_context[i]= av_malloc(sizeof(MpegEncContext));
memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
}
for(i=0; i<s->avctx->thread_count; i++){
if(init_duplicate_context(s->thread_context[i], s) < 0)
goto fail;
s->thread_context[i]->start_mb_y= (s->mb_height*(i ) + s->avctx->thread_count/2) / s->avctx->thread_count;
s->thread_context[i]->end_mb_y = (s->mb_height*(i+1) + s->avctx->thread_count/2) / s->avctx->thread_count;
}
return 0;
fail:
MPV_common_end(s);
return -1;
}
/* init common structure for both encoder and decoder */
/* draw the edges of width 'w' of an image of size width, height */
//FIXME check that this is ok for mpeg4 interlaced
static void draw_edges_c(uint8_t *buf, int wrap, int width, int height, int w)
{
uint8_t *ptr, *last_line;
int i;
last_line = buf + (height - 1) * wrap;
for(i=0;i<w;i++) {
/* top and bottom */
memcpy(buf - (i + 1) * wrap, buf, width);
memcpy(last_line + (i + 1) * wrap, last_line, width);
}
/* left and right */
ptr = buf;
for(i=0;i<height;i++) {
memset(ptr - w, ptr[0], w);
memset(ptr + width, ptr[width-1], w);
ptr += wrap;
}
/* corners */
for(i=0;i<w;i++) {
memset(buf - (i + 1) * wrap - w, buf[0], w); /* top left */
memset(buf - (i + 1) * wrap + width, buf[width-1], w); /* top right */
memset(last_line + (i + 1) * wrap - w, last_line[0], w); /* top left */
memset(last_line + (i + 1) * wrap + width, last_line[width-1], w); /* top right */
}
}
int ff_find_unused_picture(MpegEncContext *s, int shared){
int i;
if(shared){
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0]==NULL && s->picture[i].type==0) return i;
}
}else{
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0]==NULL && s->picture[i].type!=0) return i; //FIXME
}
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0]==NULL) return i;
}
}
#ifndef WINCE //assert
assert(0);
#endif
return -1;
}
/**
* generic function for encode/decode called after coding/decoding the header and before a frame is coded/decoded
*/
int MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx)
{
int i;
AVFrame *pic;
s->mb_skiped = 0;
#ifndef WINCE //assert
assert(s->last_picture_ptr==NULL || s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3);
#endif
/* mark&release old frames */
if (s->pict_type != B_TYPE && s->last_picture_ptr && s->last_picture_ptr->data[0]) {
avctx->release_buffer(avctx, (AVFrame*)s->last_picture_ptr);
/* release forgotten pictures */
/* if(mpeg124/h263) */
if(!s->encoding){
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0] && &s->picture[i] != s->next_picture_ptr && s->picture[i].reference){
//av_log(avctx, AV_LOG_ERROR, "releasing zombie picture\n");
avctx->release_buffer(avctx, (AVFrame*)&s->picture[i]);
}
}
}
}
alloc:
if(!s->encoding){
/* release non refernce frames */
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0] && !s->picture[i].reference /*&& s->picture[i].type!=FF_BUFFER_TYPE_SHARED*/){
s->avctx->release_buffer(s->avctx, (AVFrame*)&s->picture[i]);
}
}
if(s->current_picture_ptr && s->current_picture_ptr->data[0]==NULL)
pic= (AVFrame*)s->current_picture_ptr; //we allready have a unused image (maybe it was set before reading the header)
else{
i= ff_find_unused_picture(s, 0);
pic= (AVFrame*)&s->picture[i];
}
pic->reference= s->pict_type != B_TYPE ? 3 : 0;
pic->coded_picture_number= s->coded_picture_number++;
if( alloc_picture(s, (Picture*)pic, 0) < 0)
return -1;
s->current_picture_ptr= (Picture*)pic;
s->current_picture_ptr->top_field_first= s->top_field_first; //FIXME use only the vars from current_pic
s->current_picture_ptr->interlaced_frame= !s->progressive_frame && !s->progressive_sequence;
}
s->current_picture_ptr->pict_type= s->pict_type;
// if(s->flags && CODEC_FLAG_QSCALE)
// s->current_picture_ptr->quality= s->new_picture_ptr->quality;
s->current_picture_ptr->key_frame= s->pict_type == I_TYPE;
copy_picture(&s->current_picture, s->current_picture_ptr);
if(s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3){
if (s->pict_type != B_TYPE) {
s->last_picture_ptr= s->next_picture_ptr;
s->next_picture_ptr= s->current_picture_ptr;
}
if(s->last_picture_ptr) copy_picture(&s->last_picture, s->last_picture_ptr);
if(s->next_picture_ptr) copy_picture(&s->next_picture, s->next_picture_ptr);
if(s->pict_type != I_TYPE && (s->last_picture_ptr==NULL || s->last_picture_ptr->data[0]==NULL)){
//av_log(avctx, AV_LOG_ERROR, "warning: first frame is no keyframe\n");
#ifndef WINCE //assert
assert(s->pict_type != B_TYPE); //these should have been dropped if we dont have a reference
#endif
goto alloc;
}
#ifndef WINCE //assert
assert(s->pict_type == I_TYPE || (s->last_picture_ptr && s->last_picture_ptr->data[0]));
#endif
if(s->picture_structure!=PICT_FRAME){
int i;
for(i=0; i<4; i++){
if(s->picture_structure == PICT_BOTTOM_FIELD){
s->current_picture.data[i] += s->current_picture.linesize[i];
}
s->current_picture.linesize[i] *= 2;
s->last_picture.linesize[i] *=2;
s->next_picture.linesize[i] *=2;
}
}
}
s->hurry_up= s->avctx->hurry_up;
//if(s->dct_error_sum){
#ifndef WINCE //assert
assert(s->avctx->noise_reduction && s->encoding);
#endif
//update_noise_reduction(s);
//}
#ifdef HAVE_XVMC
if(s->avctx->xvmc_acceleration)
return XVMC_field_start(s, avctx);
#endif
return 0;
}
/* generic function for encode/decode called after a frame has been coded/decoded */
void MPV_frame_end(MpegEncContext *s)
{
/* draw edge for correct motion prediction if outside */
#ifdef HAVE_XVMC
//just to make sure that all data is rendered.
if(s->avctx->xvmc_acceleration){
XVMC_field_end(s);
}else
#endif
#if 1
if(s->unrestricted_mv && s->pict_type != B_TYPE && !s->intra_only && !(s->flags&CODEC_FLAG_EMU_EDGE)) {
draw_edges(s->current_picture.data[0], s->linesize , s->h_edge_pos , s->v_edge_pos , EDGE_WIDTH );
draw_edges(s->current_picture.data[1], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2);
draw_edges(s->current_picture.data[2], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2);
}
//emms_c();
#endif
s->last_pict_type = s->pict_type;
if(s->pict_type!=B_TYPE){
s->last_non_b_pict_type= s->pict_type;
}
#if 0
/* copy back current_picture variables */
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0] == s->current_picture.data[0]){
s->picture[i]= s->current_picture;
break;
}
}
assert(i<MAX_PICTURE_COUNT);
#endif
#if 0
if(s->encoding){
/* release non refernce frames */
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0] && !s->picture[i].reference /*&& s->picture[i].type!=FF_BUFFER_TYPE_SHARED*/){
s->avctx->release_buffer(s->avctx, (AVFrame*)&s->picture[i]);
}
}
}
#endif
// clear copies, to avoid confusion
#if 0
memset(&s->last_picture, 0, sizeof(Picture));
memset(&s->next_picture, 0, sizeof(Picture));
memset(&s->current_picture, 0, sizeof(Picture));
#endif
}
/**
* Copies a rectangular area of samples to a temporary buffer and replicates the boarder samples.
* @param buf destination buffer
* @param src source buffer
* @param linesize number of bytes between 2 vertically adjacent samples in both the source and destination buffers
* @param block_w width of block
* @param block_h height of block
* @param src_x x coordinate of the top left sample of the block in the source buffer
* @param src_y y coordinate of the top left sample of the block in the source buffer
* @param w width of the source buffer
* @param h height of the source buffer
*/
void ff_emulated_edge_mc(uint8_t *buf, uint8_t *src, int linesize, int block_w, int block_h,
int src_x, int src_y, int w, int h){
int x, y;
int start_y, start_x, end_y, end_x;
if(src_y>= h){
src+= (h-1-src_y)*linesize;
src_y=h-1;
}else if(src_y<=-block_h){
src+= (1-block_h-src_y)*linesize;
src_y=1-block_h;
}
if(src_x>= w){
src+= (w-1-src_x);
src_x=w-1;
}else if(src_x<=-block_w){
src+= (1-block_w-src_x);
src_x=1-block_w;
}
start_y= FFMAX(0, -src_y);
start_x= FFMAX(0, -src_x);
end_y= FFMIN(block_h, h-src_y);
end_x= FFMIN(block_w, w-src_x);
// copy existing part
for(y=start_y; y<end_y; y++){
for(x=start_x; x<end_x; x++){
buf[x + y*linesize]= src[x + y*linesize];
}
}
//top
for(y=0; y<start_y; y++){
for(x=start_x; x<end_x; x++){
buf[x + y*linesize]= buf[x + start_y*linesize];
}
}
//bottom
for(y=end_y; y<block_h; y++){
for(x=start_x; x<end_x; x++){
buf[x + y*linesize]= buf[x + (end_y-1)*linesize];
}
}
for(y=0; y<block_h; y++){
//left
for(x=0; x<start_x; x++){
buf[x + y*linesize]= buf[start_x + y*linesize];
}
//right
for(x=end_x; x<block_w; x++){
buf[x + y*linesize]= buf[end_x - 1 + y*linesize];
}
}
}
/**
* combines the (truncated) bitstream to a complete frame
* @returns -1 if no complete frame could be created
*/
int ff_combine_frame( MpegEncContext *s, int next, uint8_t **buf, int *buf_size){
ParseContext *pc= &s->parse_context;
#if 0
if(pc->overread){
printf("overread %d, state:%X next:%d index:%d o_index:%d\n", pc->overread, pc->state, next, pc->index, pc->overread_index);
printf("%X %X %X %X\n", (*buf)[0], (*buf)[1],(*buf)[2],(*buf)[3]);
}
#endif
/* copy overreaded byes from last frame into buffer */
for(; pc->overread>0; pc->overread--){
pc->buffer[pc->index++]= pc->buffer[pc->overread_index++];
}
pc->last_index= pc->index;
/* copy into buffer end return */
if(next == END_NOT_FOUND){
pc->buffer= av_fast_realloc(pc->buffer, &pc->buffer_size, (*buf_size) + pc->index + FF_INPUT_BUFFER_PADDING_SIZE);
memcpy(&pc->buffer[pc->index], *buf, *buf_size);
pc->index += *buf_size;
return -1;
}
*buf_size=
pc->overread_index= pc->index + next;
/* append to buffer */
if(pc->index){
pc->buffer= av_fast_realloc(pc->buffer, &pc->buffer_size, next + pc->index + FF_INPUT_BUFFER_PADDING_SIZE);
memcpy(&pc->buffer[pc->index], *buf, next + FF_INPUT_BUFFER_PADDING_SIZE );
pc->index = 0;
*buf= pc->buffer;
}
/* store overread bytes */
for(;next < 0; next++){
pc->state = (pc->state<<8) | pc->buffer[pc->last_index + next];
pc->overread++;
}
#if 0
if(pc->overread){
printf("overread %d, state:%X next:%d index:%d o_index:%d\n", pc->overread, pc->state, next, pc->index, pc->overread_index);
printf("%X %X %X %X\n", (*buf)[0], (*buf)[1],(*buf)[2],(*buf)[3]);
}
#endif
return 0;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -