📄 huffyuv.c
字号:
s->predictor= PLANE; s->decorrelate= avctx->bits_per_sample >= 24; break; case 4: s->predictor= MEDIAN; s->decorrelate= 0; break; default: s->predictor= LEFT; //OLD s->decorrelate= 0; break; } s->bitstream_bpp= avctx->bits_per_sample & ~7; s->context= 0; if(read_old_huffman_tables(s) < 0) return -1; } switch(s->bitstream_bpp){ case 12: avctx->pix_fmt = PIX_FMT_YUV420P; break; case 16: if(s->yuy2){ avctx->pix_fmt = PIX_FMT_YUV422; }else{ avctx->pix_fmt = PIX_FMT_YUV422P; } break; case 24: case 32: if(s->bgr32){ avctx->pix_fmt = PIX_FMT_RGBA32; }else{ avctx->pix_fmt = PIX_FMT_BGR24; } break; default: assert(0); } alloc_temp(s); // av_log(NULL, AV_LOG_DEBUG, "pred:%d bpp:%d hbpp:%d il:%d\n", s->predictor, s->bitstream_bpp, avctx->bits_per_sample, s->interlaced); return 0;}static int store_table(HYuvContext *s, uint8_t *len, uint8_t *buf){ int i; int index= 0; for(i=0; i<256;){ int val= len[i]; int repeat=0; for(; i<256 && len[i]==val && repeat<255; i++) repeat++; assert(val < 32 && val >0 && repeat<256 && repeat>0); if(repeat>7){ buf[index++]= val; buf[index++]= repeat; }else{ buf[index++]= val | (repeat<<5); } } return index;}static int encode_init(AVCodecContext *avctx){ HYuvContext *s = avctx->priv_data; int i, j; common_init(avctx); avctx->extradata= av_mallocz(1024*30); // 256*3+4 == 772 avctx->stats_out= av_mallocz(1024*30); // 21*256*3(%llu ) + 3(\n) + 1(0) = 16132 s->version=2; avctx->coded_frame= &s->picture; switch(avctx->pix_fmt){ case PIX_FMT_YUV420P: s->bitstream_bpp= 12; break; case PIX_FMT_YUV422P: s->bitstream_bpp= 16; break; default: av_log(avctx, AV_LOG_ERROR, "format not supported\n"); return -1; } avctx->bits_per_sample= s->bitstream_bpp; s->decorrelate= s->bitstream_bpp >= 24; s->predictor= avctx->prediction_method; s->interlaced= avctx->flags&CODEC_FLAG_INTERLACED_ME ? 1 : 0; if(avctx->context_model==1){ s->context= avctx->context_model; if(s->flags & (CODEC_FLAG_PASS1|CODEC_FLAG_PASS2)){ av_log(avctx, AV_LOG_ERROR, "context=1 is not compatible with 2 pass huffyuv encoding\n"); return -1; } }else s->context= 0; if(avctx->codec->id==CODEC_ID_HUFFYUV){ if(avctx->pix_fmt==PIX_FMT_YUV420P){ av_log(avctx, AV_LOG_ERROR, "Error: YV12 is not supported by huffyuv; use vcodec=ffvhuff or format=422p\n"); return -1; } if(avctx->context_model){ av_log(avctx, AV_LOG_ERROR, "Error: per-frame huffman tables are not supported by huffyuv; use vcodec=ffvhuff\n"); return -1; } if(s->interlaced != ( s->height > 288 )) av_log(avctx, AV_LOG_INFO, "using huffyuv 2.2.0 or newer interlacing flag\n"); }else if(avctx->strict_std_compliance>FF_COMPLIANCE_EXPERIMENTAL){ av_log(avctx, AV_LOG_ERROR, "This codec is under development; files encoded with it may not be decodable with future versions!!! Set vstrict=-2 / -strict -2 to use it anyway.\n"); return -1; } ((uint8_t*)avctx->extradata)[0]= s->predictor; ((uint8_t*)avctx->extradata)[1]= s->bitstream_bpp; ((uint8_t*)avctx->extradata)[2]= s->interlaced ? 0x10 : 0x20; if(s->context) ((uint8_t*)avctx->extradata)[2]|= 0x40; ((uint8_t*)avctx->extradata)[3]= 0; s->avctx->extradata_size= 4; if(avctx->stats_in){ char *p= avctx->stats_in; for(i=0; i<3; i++) for(j=0; j<256; j++) s->stats[i][j]= 1; for(;;){ for(i=0; i<3; i++){ char *next; for(j=0; j<256; j++){ s->stats[i][j]+= strtol(p, &next, 0); if(next==p) return -1; p=next; } } if(p[0]==0 || p[1]==0 || p[2]==0) break; } }else{ for(i=0; i<3; i++) for(j=0; j<256; j++){ int d= FFMIN(j, 256-j); s->stats[i][j]= 100000000/(d+1); } } for(i=0; i<3; i++){ generate_len_table(s->len[i], s->stats[i], 256); if(generate_bits_table(s->bits[i], s->len[i])<0){ return -1; } s->avctx->extradata_size+= store_table(s, s->len[i], &((uint8_t*)s->avctx->extradata)[s->avctx->extradata_size]); } if(s->context){ for(i=0; i<3; i++){ int pels = s->width*s->height / (i?40:10); for(j=0; j<256; j++){ int d= FFMIN(j, 256-j); s->stats[i][j]= pels/(d+1); } } }else{ for(i=0; i<3; i++) for(j=0; j<256; j++) s->stats[i][j]= 0; } // printf("pred:%d bpp:%d hbpp:%d il:%d\n", s->predictor, s->bitstream_bpp, avctx->bits_per_sample, s->interlaced); alloc_temp(s); s->picture_number=0; return 0;}static void decode_422_bitstream(HYuvContext *s, int count){ int i; count/=2; for(i=0; i<count; i++){ s->temp[0][2*i ]= get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3); s->temp[1][ i ]= get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3); s->temp[0][2*i+1]= get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3); s->temp[2][ i ]= get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3); }}static void decode_gray_bitstream(HYuvContext *s, int count){ int i; count/=2; for(i=0; i<count; i++){ s->temp[0][2*i ]= get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3); s->temp[0][2*i+1]= get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3); }}static int encode_422_bitstream(HYuvContext *s, int count){ int i; if(s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < 2*4*count){ av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n"); return -1; } count/=2; if(s->flags&CODEC_FLAG_PASS1){ for(i=0; i<count; i++){ s->stats[0][ s->temp[0][2*i ] ]++; s->stats[1][ s->temp[1][ i ] ]++; s->stats[0][ s->temp[0][2*i+1] ]++; s->stats[2][ s->temp[2][ i ] ]++; } } if(s->avctx->flags2&CODEC_FLAG2_NO_OUTPUT) return 0; if(s->context){ for(i=0; i<count; i++){ s->stats[0][ s->temp[0][2*i ] ]++; put_bits(&s->pb, s->len[0][ s->temp[0][2*i ] ], s->bits[0][ s->temp[0][2*i ] ]); s->stats[1][ s->temp[1][ i ] ]++; put_bits(&s->pb, s->len[1][ s->temp[1][ i ] ], s->bits[1][ s->temp[1][ i ] ]); s->stats[0][ s->temp[0][2*i+1] ]++; put_bits(&s->pb, s->len[0][ s->temp[0][2*i+1] ], s->bits[0][ s->temp[0][2*i+1] ]); s->stats[2][ s->temp[2][ i ] ]++; put_bits(&s->pb, s->len[2][ s->temp[2][ i ] ], s->bits[2][ s->temp[2][ i ] ]); } }else{ for(i=0; i<count; i++){ put_bits(&s->pb, s->len[0][ s->temp[0][2*i ] ], s->bits[0][ s->temp[0][2*i ] ]); put_bits(&s->pb, s->len[1][ s->temp[1][ i ] ], s->bits[1][ s->temp[1][ i ] ]); put_bits(&s->pb, s->len[0][ s->temp[0][2*i+1] ], s->bits[0][ s->temp[0][2*i+1] ]); put_bits(&s->pb, s->len[2][ s->temp[2][ i ] ], s->bits[2][ s->temp[2][ i ] ]); } } return 0;}static int encode_gray_bitstream(HYuvContext *s, int count){ int i; if(s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < 4*count){ av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n"); return -1; } count/=2; if(s->flags&CODEC_FLAG_PASS1){ for(i=0; i<count; i++){ s->stats[0][ s->temp[0][2*i ] ]++; s->stats[0][ s->temp[0][2*i+1] ]++; } } if(s->avctx->flags2&CODEC_FLAG2_NO_OUTPUT) return 0; if(s->context){ for(i=0; i<count; i++){ s->stats[0][ s->temp[0][2*i ] ]++; put_bits(&s->pb, s->len[0][ s->temp[0][2*i ] ], s->bits[0][ s->temp[0][2*i ] ]); s->stats[0][ s->temp[0][2*i+1] ]++; put_bits(&s->pb, s->len[0][ s->temp[0][2*i+1] ], s->bits[0][ s->temp[0][2*i+1] ]); } }else{ for(i=0; i<count; i++){ put_bits(&s->pb, s->len[0][ s->temp[0][2*i ] ], s->bits[0][ s->temp[0][2*i ] ]); put_bits(&s->pb, s->len[0][ s->temp[0][2*i+1] ], s->bits[0][ s->temp[0][2*i+1] ]); } } return 0;}static void decode_bgr_bitstream(HYuvContext *s, int count){ int i; if(s->decorrelate){ if(s->bitstream_bpp==24){ for(i=0; i<count; i++){ s->temp[0][4*i+G]= get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3); s->temp[0][4*i+B]= get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3) + s->temp[0][4*i+G]; s->temp[0][4*i+R]= get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3) + s->temp[0][4*i+G]; } }else{ for(i=0; i<count; i++){ s->temp[0][4*i+G]= get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3); s->temp[0][4*i+B]= get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3) + s->temp[0][4*i+G]; s->temp[0][4*i+R]= get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3) + s->temp[0][4*i+G]; get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3); //?! } } }else{ if(s->bitstream_bpp==24){ for(i=0; i<count; i++){ s->temp[0][4*i+B]= get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3); s->temp[0][4*i+G]= get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3); s->temp[0][4*i+R]= get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3); } }else{ for(i=0; i<count; i++){ s->temp[0][4*i+B]= get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3); s->temp[0][4*i+G]= get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3); s->temp[0][4*i+R]= get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3); get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3); //?! } } }}static void draw_slice(HYuvContext *s, int y){ int h, cy; int offset[4]; if(s->avctx->draw_horiz_band==NULL) return; h= y - s->last_slice_end; y -= h; if(s->bitstream_bpp==12){ cy= y>>1; }else{ cy= y; } offset[0] = s->picture.linesize[0]*y; offset[1] = s->picture.linesize[1]*cy; offset[2] = s->picture.linesize[2]*cy; offset[3] = 0; emms_c(); s->avctx->draw_horiz_band(s->avctx, &s->picture, offset, y, 3, h); s->last_slice_end= y + h;}static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, uint8_t *buf, int buf_size){ HYuvContext *s = avctx->priv_data; const int width= s->width; const int width2= s->width>>1; const int height= s->height; int fake_ystride, fake_ustride, fake_vstride; AVFrame * const p= &s->picture; int table_size= 0; AVFrame *picture = data; s->bitstream_buffer= av_fast_realloc(s->bitstream_buffer, &s->bitstream_buffer_size, buf_size + FF_INPUT_BUFFER_PADDING_SIZE); s->dsp.bswap_buf((uint32_t*)s->bitstream_buffer, (uint32_t*)buf, buf_size/4); if(p->data[0]) avctx->release_buffer(avctx, p); p->reference= 0; if(avctx->get_buffer(avctx, p) < 0){ av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n"); return -1; } if(s->context){ table_size = read_huffman_tables(s, s->bitstream_buffer, buf_size); if(table_size < 0) return -1; } init_get_bits(&s->gb, s->bitstream_buffer+table_size, (buf_size-table_size)*8); fake_ystride= s->interlaced ? p->linesize[0]*2 : p->linesize[0]; fake_ustride= s->interlaced ? p->linesize[1]*2 : p->linesize[1]; fake_vstride= s->interlaced ? p->linesize[2]*2 : p->linesize[2]; s->last_slice_end= 0; if(s->bitstream_bpp<24){ int y, cy; int lefty, leftu, leftv; int lefttopy, lefttopu, lefttopv; if(s->yuy2){ p->data[0][3]= get_bits(&s->gb, 8); p->data[0][2]= get_bits(&s->gb, 8); p->data[0][1]= get_bits(&s->gb, 8); p->data[0][0]= get_bits(&s->gb, 8); av_log(avctx, AV_LOG_ERROR, "YUY2 output is not implemented yet\n"); return -1; }else{ leftv= p->data[2][0]= get_bits(&s->gb, 8); lefty= p->data[0][1]= get_bits(&s->gb, 8); leftu= p->data[1][0]= get_bits(&s->gb, 8); p->data[0][0]= get_bits(&s->gb, 8); switch(s->predictor){ case LEFT: case PLANE: decode_422_bitstream(s, width-2); lefty= add_left_prediction(p->data[0] + 2, s->temp[0], width-2, lefty); if(!(s->flags&CODEC_FLAG_GRAY)){ leftu= add_left_prediction(p->data[1] + 1, s->temp[1], width2-1, leftu); leftv= add_left_prediction(p->data[2] + 1, s->temp[2], width2-1, leftv); }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -