📄 macroblock.c
字号:
for (n=0; n < 2; n++)
{
#if TRACE
snprintf(currSE.tracestring, TRACESTRING_SIZE, "FMVD (pred %d)",pmv[n]);
#endif
img->subblock_x = i; // position used for context determination
img->subblock_y = j; // position used for context determination
currSE.value2 = (!bframe ? n : 2*n); // identifies the component; only used for context determination
readSyntaxElement_UVLC(&currSE,inp);
curr_mvd = currSE.value1;
vec=curr_mvd+pmv[n]; /* find motion vector */
// need B support
if (!bframe)
{
for(ii=0;ii<step_h;ii++)
for(jj=0;jj<step_v;jj++)
img->mv[i4+ii+BLOCK_SIZE][j4+jj][n]=vec;
}
else // B frame
{
for(ii=0;ii<step_h;ii++)
for(jj=0;jj<step_v;jj++)
img->fw_mv[i4+ii+BLOCK_SIZE][j4+jj][n]=vec;
}
/* store (oversampled) mvd */
for (l=0; l < step_v; l++)
for (m=0; m < step_h; m++)
currMB->mvd[0][j+l][i+m][n] = curr_mvd;
}
}
}
else if (currMB->b8mode[k=2*j0+i0]==0)
{// direct mode
// by Junhao Zheng 2004-08-04 22:00:51
//step_v0 = step_h0 = 1;
for (j=j0; j<j0+step_v0; j++)
for (i=i0; i<i0+step_h0; i++)
{
ref= refFrArr[img->block_y+j][img->block_x+i];
img_block_y = (img->current_mb_nr%2) ? (img->block_y-4)/2:img->block_y/2;
if(ref == -1)
{
//sw
img->fw_refFrArr[img->block_y+j][img->block_x+i]=0;
img->bw_refFrArr[img->block_y+j][img->block_x+i]=0;
j4 = img->block_y+j;
i4 = img->block_x+i;
for (ii=0; ii < 2; ii++)
{
img->fw_mv[i4+BLOCK_SIZE][j4][ii]=0;
img->bw_mv[i4+BLOCK_SIZE][j4][ii]=0;
}
SetMotionVectorPredictor(img,&(img->fw_mv[i4+BLOCK_SIZE][j4][0]),
&(img->fw_mv[i4+BLOCK_SIZE][j4][1]),0,img->fw_refFrArr,
img->fw_mv,0,0,16,16, 0, 1);
SetMotionVectorPredictor(img,&(img->bw_mv[i4+BLOCK_SIZE][j4][0]),
&(img->bw_mv[i4+BLOCK_SIZE][j4][1]),0,img->bw_refFrArr,
img->bw_mv,0,0,16,16, -1, 1);
}
else
{
frame_no_next_P =2*img->imgtr_next_P;
frame_no_B = 2*img->tr;
delta_P = 2* (img->imgtr_next_P - img->imgtr_last_P);
if(!img->picture_structure)
{
if (img->current_mb_nr_fld < img->PicSizeInMbs) //top field
scale_refframe = ref == 0 ? 0 : 1;
else
scale_refframe = ref == 1 ? 1 : 2;
}
else
scale_refframe = 0;
if(img->picture_structure)
{
iTRp = (ref+1)*delta_P;
iTRp1 = (scale_refframe+1)*delta_P;
}else
{
if (img->current_mb_nr_fld < img->PicSizeInMbs) //top field
{
iTRp = delta_P*(ref/2+1)-(ref+1)%2; //the lates backward reference
iTRp1 = delta_P*(scale_refframe/2+1)-(scale_refframe+1)%2; //the lates backward reference
bw_ref = 0;
}
else
{
iTRp = 1 + delta_P*((ref+1)/2)-ref%2;
iTRp1 = 1 + delta_P*((scale_refframe+1)/2)-scale_refframe%2;
bw_ref = 1;
}
}
iTRd = frame_no_next_P - frame_no_B;
iTRb = iTRp1 - iTRd;
if(!img->picture_structure)
{
if (img->current_mb_nr_fld >= img->PicSizeInMbs)
scale_refframe --;
img->fw_refFrArr[img->block_y+j][img->block_x+i]=scale_refframe; // PLUS2, Krit, 7/06 (used to be + 1)
img->bw_refFrArr[img->block_y+j][img->block_x+i]=bw_ref;
}
else
{
img->fw_refFrArr[img->block_y+j][img->block_x+i]=0; // PLUS2, Krit, 7/06 (used to be + 1)
// by Junhao Zheng 2004-08-04 21:50:38
//img->bw_refFrArr[img->block_y+j][img->block_x+i]=1;
img->bw_refFrArr[img->block_y+j][img->block_x+i]=0;
}
j4 = img->block_y+j;
i4 = img->block_x+i;
for (ii=0; ii < 2; ii++)
{
if(img->mv[img->block_x+i+4][img->block_y+j][ii] < 0)
{
img->fw_mv[i4+BLOCK_SIZE][j4][ii] = -(((16384/iTRp)*(1-iTRb*img->mv[img->block_x+i+4][img->block_y+j][ii])-1)>>14);
img->bw_mv[i4+BLOCK_SIZE][j4][ii] = ((16384/iTRp)*(1-iTRd*img->mv[img->block_x+i+4][img->block_y+j][ii])-1)>>14;
}
else
{
img->fw_mv[i4+BLOCK_SIZE][j4][ii] = ((16384/iTRp)*(1+iTRb*img->mv[img->block_x+i+4][img->block_y+j][ii])-1)>>14;
img->bw_mv[i4+BLOCK_SIZE][j4][ii] = -(((16384/iTRp)*(1+iTRd*img->mv[img->block_x+i+4][img->block_y+j][ii])-1)>>14);
}
}
}
}
}
img_block_y = img->block_y;
i0+=max(1,step_h0);
}
j0+=max(1,step_v0);
}
//===== READ BACKWARD MOTION VECTORS =====
currSE.type = SE_MVD;
currSE.mapping = linfo_se;
img_block_y = img->block_y;
for (j0=0; j0<2; )
{
if(currMB->mb_type!=I4MB)
{
step_h0 = (BLOCK_STEP[(currMB->mb_type==P8x8)? 4 : currMB->mb_type][0]);
step_v0 = (BLOCK_STEP[(currMB->mb_type==P8x8)? 4 : currMB->mb_type][1]);
}
if((currMB->mb_type==I4MB&&j0==0))
{ j0 += 1; continue;}
for (i0=0; i0<2; )
{
k=2*j0+i0;
if ((currMB->b8pdir[k]==1 || currMB->b8pdir[k]==2) && (currMB->b8mode[k]!=0))//has backward vector
{
mv_mode = currMB->b8mode[k];
step_h = BLOCK_STEP [mv_mode][0];
step_v = BLOCK_STEP [mv_mode][1];
refframe = img->bw_refFrArr[img->block_y+j0][img->block_x+i0]; // always 0
use_scaled_mv = 0;
if(currMB->b8pdir[k]==2)
{
fw_refframe = img->fw_refFrArr[img->block_y+j0][img->block_x+i0];
current_tr = 2*img->tr_frm;
if((current_tr >= img->imgtr_next_P) && (current_tr >= img->imgtr_last_P))
{
use_scaled_mv = 1;
//mv_scale = ((refframe+1)*256)/(fw_refframe+1);
mv_scale = ((refframe+1)*512)/(fw_refframe+1);
}
}
for (j=j0; j<j0+step_v0; j+=step_v)
for (i=i0; i<i0+step_h0; i+=step_h)
{
j4 = img->block_y+j;
i4 = img->block_x+i;
// first make mv-prediction
if(use_scaled_mv)
{
//pmv[0] = (mv_scale*img->fw_mv[i4+BLOCK_SIZE][j4][0]+128)>>8;
//pmv[1] = (mv_scale*img->fw_mv[i4+BLOCK_SIZE][j4][1]+128)>>8;
pmv[0] = (mv_scale*img->fw_mv[i4+BLOCK_SIZE][j4][0]+256)>>9;
pmv[1] = (mv_scale*img->fw_mv[i4+BLOCK_SIZE][j4][1]+256)>>9;
}
else
SetMotionVectorPredictor (img, pmv, pmv+1, refframe, img->bw_refFrArr, img->bw_mv, i, j, 8*step_h, 8*step_v, -1, 0);//Lou 1016
for (k=0; k < 2; k++)
{
#if TRACE
snprintf(currSE.tracestring, TRACESTRING_SIZE, "BMVD (pred %d)",pmv[k]);
#endif
img->subblock_x = i; // position used for context determination
img->subblock_y = j; // position used for context determination
currSE.value2 = 2*k+1; // identifies the component; only used for context determination
if(currMB->b8pdir[2*j0+i0] == 2)
{
int delta_P,iTRp,DistanceIndexFw,DistanceIndexBw,refframe,delta_PB;
refframe = fw_refframe;
delta_P = 2*(img->imgtr_next_P - img->imgtr_last_P);
if(img->picture_structure)
iTRp = (refframe+1)*delta_P; //the lates backward reference
else
{
iTRp = delta_P;//refframe == 0 ? delta_P-1 : delta_P+1;
}
delta_PB = 2*(img->tr - img->imgtr_last_P);
if(!img->picture_structure)
{
if(img->current_mb_nr_fld < img->PicSizeInMbs) //top field
DistanceIndexFw = refframe == 0 ? delta_PB-1:delta_PB;
else
DistanceIndexFw = refframe == 0 ? delta_PB:delta_PB+1;
}
else
DistanceIndexFw = delta_PB;
DistanceIndexBw = iTRp - DistanceIndexFw;
curr_mvd = - ((img->fw_mv[i4+BLOCK_SIZE][j4][k]*DistanceIndexBw*(512/DistanceIndexFw)+256)>>9);
vec=curr_mvd; /* find motion vector */
}
else
{
readSyntaxElement_UVLC(&currSE,inp);
curr_mvd = currSE.value1;
vec=curr_mvd+pmv[k]; /* find motion vector */
}
for(ii=0;ii<step_h;ii++)
for(jj=0;jj<step_v;jj++)
img->bw_mv[i4+ii+BLOCK_SIZE][j4+jj][k]=vec;
/* store (oversampled) mvd */
for (l=0; l < step_v; l++)
for (m=0; m < step_h; m++)
currMB->mvd[1][j+l][i+m][k] = curr_mvd;
}
}
}
i0+=max(1,step_h0);
}
j0+=max(1,step_v0);
}
}
/*
*************************************************************************
* Function:Get coded block pattern and coefficients (run/level)
from the NAL
* Input:
* Output:
* Return:
* Attention:
*************************************************************************
*/
void readCBPandCoeffsFromNAL(struct img_par *img,struct inp_par *inp)
{
int i,j;
int mb_nr = img->current_mb_nr; //GBimg->current_mb_nr;
int m2,jg2;
Macroblock *currMB = &mb_data[mb_nr];
int iii,jjj;
int b8;
int block_x,block_y;
int qp_per = (img->qp-MIN_QP)/6;
int qp_rem = (img->qp-MIN_QP)%6;
int qp_per_uv = QP_SCALE_CR[img->qp-MIN_QP]/6;
int qp_rem_uv = QP_SCALE_CR[img->qp-MIN_QP]%6;
for (i=0;i<BLOCK_SIZE;i++)
for (j=0;j<BLOCK_SIZE;j++)
for(iii=0;iii<BLOCK_SIZE;iii++)
for(jjj=0;jjj<BLOCK_SIZE;jjj++)
img->cof[i][j][iii][jjj]=0;// reset luma coeffs
qp_per = (img->qp-MIN_QP)/6;
qp_rem = (img->qp-MIN_QP)%6;
qp_per_uv = QP_SCALE_CR[img->qp-MIN_QP]/6;
qp_rem_uv = QP_SCALE_CR[img->qp-MIN_QP]%6;
currMB->qp = img->qp;
// luma coefficients
for (block_y=0; block_y < 4; block_y += 2) /* all modes */
{
for (block_x=0; block_x < 4; block_x += 2)
{
b8 = 2*(block_y/2) + block_x/2;
if (currMB->cbp&(1<<b8))
{
readLumaCoeff_B8(b8, inp, img);
}
}
}
for (j=4;j<6;j++) // reset all chroma coeffs before read
for (i=0;i<4;i++)
for (iii=0;iii<4;iii++)
for (jjj=0;jjj<4;jjj++)
img->cof[i][j][iii][jjj]=0;
m2 =img->mb_x*2;
jg2=img->mb_y*2;
if ((currMB->cbp>>4)&1)
{
readChromaCoeff_B8(4, inp, img);
}
if ((currMB->cbp>>4)&2)
{
readChromaCoeff_B8(5, inp, img);
}
}
/*
*************************************************************************
* Function:decode one macroblock
* Input:
* Output:
* Return:
* Attention:
*************************************************************************
*/
int decode_one_macroblock(struct img_par *img,struct inp_par *inp)
{
unsigned char edgepixu[40];
#define EPU (edgepixu+20)
unsigned char edgepixv[40];
#define EPV (edgepixv+20)
int x,y,last_pix,new_pix;
int bs_x=8;
int bs_y=8;
int tmp_block[8][8];
int tmp_blockbw[8][8];
int i=0,j=0,ii=0,jj=0,i1=0,j1=0,j4=0,i4=0;
int js0=0,js1=0,js2=0,js3=0,jf=0;
int uv, hv;
int vec1_x=0,vec1_y=0,vec2_x=0,vec2_y=0;
int ioff,joff;
int curr_blk[B8_SIZE][B8_SIZE]; //SW for AVS
int tmp;
int block8x8; // needed for ABT
int bw_pred, fw_pred, ifx;
int ii0,jj0,ii1,jj1,if1,jf1,if0,jf0;
int mv_mul,f1,f2,f3,f4;
const byte decode_block_scan[16] = {0,1,4,5,2,3,6,7,8,9,12,13,10,11,14,15};
Macroblock *currMB = &mb_data[img->current_mb_nr];//GB current_mb_nr];
int refframe, fw_refframe, bw_refframe, mv_mode, pred_dir, intra_prediction; // = currMB->ref_frame;
int fw_ref_idx, bw_ref_idx;
int*** mv_array, ***fw_mv_array, ***bw_mv_array;
int bframe = (img->type==B_IMG);
int b8_s=0,b8_e=4,incr_y=1,off_y=0,even_y=4,add_y=0;
int frame_no_next_P, frame_no_B, delta_P;
int iTRb, iTRp,iTRd;
int mb_nr = img->current_mb_nr;//GBimg->current_mb_nr;
int mb_width = img->width/16;
int mb_available_up;
int mb_available_left;
int fwd_refframe_offset,bwd_refframe_offset;
int direct_pdir;
int scale_refframe,iTRp1,bw_ref;
// !! shenyanfei
int fw_lum_scale , fw_lum_shift ;
int bw_lum_scale , bw_lum_shift ;
//cjw 20051219 Weighted Predition
int fw_chrom_scale , fw_chrom_shift ;
int bw_chrom_scale , bw_chrom_shift ;
int mb_available_up_right=((img->mb_y==0)||(img->mb_x==img->width/MB_BLOCK_SIZE-1)) ? 0 : (mb_data[mb_nr].slice_nr == mb_data[mb_nr-mb_width+1].slice_nr);
int mb_available_left_down=((img->mb_x==0)||(img->mb_y==img->height/MB_BLOCK_SIZE-1)) ? 0 : (mb_data[mb_nr].slice_nr == mb_data[mb_nr+mb_width-1].slice_nr);
//by oliver according to 1658
mb_available_up = (img->mb_y == 0) ? 0 : (mb_data[mb_nr].slice_nr == mb_data[mb_nr-mb_width].slice_nr);
mb_available_left = (img->mb_x == 0) ? 0 : (mb_data[mb_nr].slice_nr == mb_data[mb_nr-1].slice_nr);
/*
//cjw weighted prediction parameter map 20060112
///frame coding/////////
P img->lum_scale[0] fw[0]
img->lum_scale[1] fw[1]
B img->lum_scale[0] fw[0]
img->lum_scale[1] bw[0]
///field coding////////
P img->lum_s
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -