📄 huffyuv.c.svn-base
字号:
map[i][G] = g; map[i][B] = g+b; map[i][R] = g+r; }else{ map[i][B] = g; map[i][G] = b; map[i][R] = r; } i++; } } } free_vlc(&s->vlc[3]); init_vlc(&s->vlc[3], VLC_BITS, i, len, 1, 1, bits, 2, 2, 0); }}static int read_huffman_tables(HYuvContext *s, uint8_t *src, int length){ GetBitContext gb; int i; init_get_bits(&gb, src, length*8); for(i=0; i<3; i++){ read_len_table(s->len[i], &gb); if(generate_bits_table(s->bits[i], s->len[i])<0){ return -1; }#if 0for(j=0; j<256; j++){printf("%6X, %2d, %3d\n", s->bits[i][j], s->len[i][j], j);}#endif free_vlc(&s->vlc[i]); init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1, s->bits[i], 4, 4, 0); } generate_joint_tables(s); return (get_bits_count(&gb)+7)/8;}static int read_old_huffman_tables(HYuvContext *s){#if 1 GetBitContext gb; int i; init_get_bits(&gb, classic_shift_luma, sizeof(classic_shift_luma)*8); read_len_table(s->len[0], &gb); init_get_bits(&gb, classic_shift_chroma, sizeof(classic_shift_chroma)*8); read_len_table(s->len[1], &gb); for(i=0; i<256; i++) s->bits[0][i] = classic_add_luma [i]; for(i=0; i<256; i++) s->bits[1][i] = classic_add_chroma[i]; if(s->bitstream_bpp >= 24){ memcpy(s->bits[1], s->bits[0], 256*sizeof(uint32_t)); memcpy(s->len[1] , s->len [0], 256*sizeof(uint8_t)); } memcpy(s->bits[2], s->bits[1], 256*sizeof(uint32_t)); memcpy(s->len[2] , s->len [1], 256*sizeof(uint8_t)); for(i=0; i<3; i++){ free_vlc(&s->vlc[i]); init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1, s->bits[i], 4, 4, 0); } generate_joint_tables(s); return 0;#else av_log(s->avctx, AV_LOG_DEBUG, "v1 huffyuv is not supported \n"); return -1;#endif}static void alloc_temp(HYuvContext *s){ int i; if(s->bitstream_bpp<24){ for(i=0; i<3; i++){ s->temp[i]= av_malloc(s->width + 16); } }else{ for(i=0; i<2; i++){ s->temp[i]= av_malloc(4*s->width + 16); } }}static int common_init(AVCodecContext *avctx){ HYuvContext *s = avctx->priv_data; s->avctx= avctx; s->flags= avctx->flags; dsputil_init(&s->dsp, avctx); s->width= avctx->width; s->height= avctx->height; assert(s->width>0 && s->height>0); return 0;}#ifdef CONFIG_DECODERSstatic int decode_init(AVCodecContext *avctx){ HYuvContext *s = avctx->priv_data; common_init(avctx); memset(s->vlc, 0, 3*sizeof(VLC)); avctx->coded_frame= &s->picture; s->interlaced= s->height > 288;s->bgr32=1;//if(avctx->extradata)// printf("extradata:%X, extradata_size:%d\n", *(uint32_t*)avctx->extradata, avctx->extradata_size); if(avctx->extradata_size){ if((avctx->bits_per_sample&7) && avctx->bits_per_sample != 12) s->version=1; // do such files exist at all? else s->version=2; }else s->version=0; if(s->version==2){ int method, interlace; method= ((uint8_t*)avctx->extradata)[0]; s->decorrelate= method&64 ? 1 : 0; s->predictor= method&63; s->bitstream_bpp= ((uint8_t*)avctx->extradata)[1]; if(s->bitstream_bpp==0) s->bitstream_bpp= avctx->bits_per_sample&~7; interlace= (((uint8_t*)avctx->extradata)[2] & 0x30) >> 4; s->interlaced= (interlace==1) ? 1 : (interlace==2) ? 0 : s->interlaced; s->context= ((uint8_t*)avctx->extradata)[2] & 0x40 ? 1 : 0; if(read_huffman_tables(s, ((uint8_t*)avctx->extradata)+4, avctx->extradata_size) < 0) return -1; }else{ switch(avctx->bits_per_sample&7){ case 1: s->predictor= LEFT; s->decorrelate= 0; break; case 2: s->predictor= LEFT; s->decorrelate= 1; break; case 3: s->predictor= PLANE; s->decorrelate= avctx->bits_per_sample >= 24; break; case 4: s->predictor= MEDIAN; s->decorrelate= 0; break; default: s->predictor= LEFT; //OLD s->decorrelate= 0; break; } s->bitstream_bpp= avctx->bits_per_sample & ~7; s->context= 0; if(read_old_huffman_tables(s) < 0) return -1; } switch(s->bitstream_bpp){ case 12: avctx->pix_fmt = PIX_FMT_YUV420P; break; case 16: if(s->yuy2){ avctx->pix_fmt = PIX_FMT_YUYV422; }else{ avctx->pix_fmt = PIX_FMT_YUV422P; } break; case 24: case 32: if(s->bgr32){ avctx->pix_fmt = PIX_FMT_RGB32; }else{ avctx->pix_fmt = PIX_FMT_BGR24; } break; default: assert(0); } alloc_temp(s);// av_log(NULL, AV_LOG_DEBUG, "pred:%d bpp:%d hbpp:%d il:%d\n", s->predictor, s->bitstream_bpp, avctx->bits_per_sample, s->interlaced); return 0;}#endif#ifdef CONFIG_ENCODERSstatic int store_table(HYuvContext *s, uint8_t *len, uint8_t *buf){ int i; int index= 0; for(i=0; i<256;){ int val= len[i]; int repeat=0; for(; i<256 && len[i]==val && repeat<255; i++) repeat++; assert(val < 32 && val >0 && repeat<256 && repeat>0); if(repeat>7){ buf[index++]= val; buf[index++]= repeat; }else{ buf[index++]= val | (repeat<<5); } } return index;}static int encode_init(AVCodecContext *avctx){ HYuvContext *s = avctx->priv_data; int i, j; common_init(avctx); avctx->extradata= av_mallocz(1024*30); // 256*3+4 == 772 avctx->stats_out= av_mallocz(1024*30); // 21*256*3(%llu ) + 3(\n) + 1(0) = 16132 s->version=2; avctx->coded_frame= &s->picture; switch(avctx->pix_fmt){ case PIX_FMT_YUV420P: s->bitstream_bpp= 12; break; case PIX_FMT_YUV422P: s->bitstream_bpp= 16; break; case PIX_FMT_RGB32: s->bitstream_bpp= 24; break; default: av_log(avctx, AV_LOG_ERROR, "format not supported\n"); return -1; } avctx->bits_per_sample= s->bitstream_bpp; s->decorrelate= s->bitstream_bpp >= 24; s->predictor= avctx->prediction_method; s->interlaced= avctx->flags&CODEC_FLAG_INTERLACED_ME ? 1 : 0; if(avctx->context_model==1){ s->context= avctx->context_model; if(s->flags & (CODEC_FLAG_PASS1|CODEC_FLAG_PASS2)){ av_log(avctx, AV_LOG_ERROR, "context=1 is not compatible with 2 pass huffyuv encoding\n"); return -1; } }else s->context= 0; if(avctx->codec->id==CODEC_ID_HUFFYUV){ if(avctx->pix_fmt==PIX_FMT_YUV420P){ av_log(avctx, AV_LOG_ERROR, "Error: YV12 is not supported by huffyuv; use vcodec=ffvhuff or format=422p\n"); return -1; } if(avctx->context_model){ av_log(avctx, AV_LOG_ERROR, "Error: per-frame huffman tables are not supported by huffyuv; use vcodec=ffvhuff\n"); return -1; } if(s->interlaced != ( s->height > 288 )) av_log(avctx, AV_LOG_INFO, "using huffyuv 2.2.0 or newer interlacing flag\n"); } if(s->bitstream_bpp>=24 && s->predictor==MEDIAN){ av_log(avctx, AV_LOG_ERROR, "Error: RGB is incompatible with median predictor\n"); return -1; } ((uint8_t*)avctx->extradata)[0]= s->predictor | (s->decorrelate << 6); ((uint8_t*)avctx->extradata)[1]= s->bitstream_bpp; ((uint8_t*)avctx->extradata)[2]= s->interlaced ? 0x10 : 0x20; if(s->context) ((uint8_t*)avctx->extradata)[2]|= 0x40; ((uint8_t*)avctx->extradata)[3]= 0; s->avctx->extradata_size= 4; if(avctx->stats_in){ char *p= avctx->stats_in; for(i=0; i<3; i++) for(j=0; j<256; j++) s->stats[i][j]= 1; for(;;){ for(i=0; i<3; i++){ char *next; for(j=0; j<256; j++){ s->stats[i][j]+= strtol(p, &next, 0); if(next==p) return -1; p=next; } } if(p[0]==0 || p[1]==0 || p[2]==0) break; } }else{ for(i=0; i<3; i++) for(j=0; j<256; j++){ int d= FFMIN(j, 256-j); s->stats[i][j]= 100000000/(d+1); } } for(i=0; i<3; i++){ generate_len_table(s->len[i], s->stats[i], 256); if(generate_bits_table(s->bits[i], s->len[i])<0){ return -1; } s->avctx->extradata_size+= store_table(s, s->len[i], &((uint8_t*)s->avctx->extradata)[s->avctx->extradata_size]); } if(s->context){ for(i=0; i<3; i++){ int pels = s->width*s->height / (i?40:10); for(j=0; j<256; j++){ int d= FFMIN(j, 256-j); s->stats[i][j]= pels/(d+1); } } }else{ for(i=0; i<3; i++) for(j=0; j<256; j++) s->stats[i][j]= 0; }// printf("pred:%d bpp:%d hbpp:%d il:%d\n", s->predictor, s->bitstream_bpp, avctx->bits_per_sample, s->interlaced); alloc_temp(s); s->picture_number=0; return 0;}#endif /* CONFIG_ENCODERS *//* TODO instead of restarting the read when the code isn't in the first level * of the joint table, jump into the 2nd level of the individual table. */#define READ_2PIX(dst0, dst1, plane1){\ uint16_t code = get_vlc2(&s->gb, s->vlc[3+plane1].table, VLC_BITS, 1);\ if(code != 0xffff){\ dst0 = code>>8;\ dst1 = code;\ }else{\ dst0 = get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3);\ dst1 = get_vlc2(&s->gb, s->vlc[plane1].table, VLC_BITS, 3);\ }\}static void decode_422_bitstream(HYuvContext *s, int count){ int i; count/=2;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -