⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 explode.c

📁 zip压缩
💻 C
📖 第 1 页 / 共 2 页
字号:
        DUMPBITS(8)      }      /* do the copy */      s = (s > (ulg)n ? s - (ulg)n : 0);      do {#if (defined(DLL) && !defined(NO_SLIDE_REDIR))        if (G.redirect_slide) {          /* &= w/ wszimpl not needed and wrong if redirect */          if (d >= wszimpl)            return 1;          e = wszimpl - (d > w ? d : w);        } else#endif          e = wszimpl - ((d &= wszimpl-1) > w ? d : w);        if (e > n) e = n;        n -= e;        if (u && w <= d)        {          memzero(redirSlide + w, e);          w += e;          d += e;        }        else#ifndef NOMEMCPY          if (w - d >= e)       /* (this test assumes unsigned comparison) */          {            memcpy(redirSlide + w, redirSlide + d, e);            w += e;            d += e;          }          else                  /* do it slow to avoid memcpy() overlap */#endif /* !NOMEMCPY */            do {              redirSlide[w++] = redirSlide[d++];            } while (--e);        if (w == wszimpl)        {          if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)            return retval;          w = u = 0;        }      } while (n);    }  }  /* flush out redirSlide */  if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)    return retval;  if (G.csize + G.incnt + (k >> 3))   /* should have read csize bytes, but */  {                        /* sometimes read one too many:  k>>3 compensates */    G.used_csize = G.lrec.csize - G.csize - G.incnt - (k >> 3);    return 5;  }  return 0;}static int explode_nolit(__G__ tl, td, bl, bd, bdl)     __GDEFstruct huft *tl, *td;   /* length and distance decoder tables */int bl, bd;             /* number of bits decoded by tl[] and td[] */unsigned bdl;           /* number of distance low bits *//* Decompress the imploded data using uncoded literals and a sliding   window (of size 2^(6+bdl) bytes). */{  ulg s;                /* bytes to decompress */  register unsigned e;  /* table entry flag/number of extra bits */  unsigned n, d;        /* length and index for copy */  unsigned w;           /* current window position */  struct huft *t;       /* pointer to table entry */  unsigned ml, md;      /* masks for bl and bd bits */  unsigned mdl;         /* mask for bdl (distance lower) bits */  register ulg b;       /* bit buffer */  register unsigned k;  /* number of bits in bit buffer */  unsigned u;           /* true if unflushed */  int retval = 0;       /* error code returned: initialized to "no error" */  /* explode the coded data */  b = k = w = 0;                /* initialize bit buffer, window */  u = 1;                        /* buffer unflushed */  ml = mask_bits[bl];           /* precompute masks for speed */  md = mask_bits[bd];  mdl = mask_bits[bdl];  s = G.lrec.ucsize;  while (s > 0)                 /* do until ucsize bytes uncompressed */  {    NEEDBITS(1)    if (b & 1)                  /* then literal--get eight bits */    {      DUMPBITS(1)      s--;      NEEDBITS(8)      redirSlide[w++] = (uch)b;      if (w == wszimpl)      {        if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)          return retval;        w = u = 0;      }      DUMPBITS(8)    }    else                        /* else distance/length */    {      DUMPBITS(1)      NEEDBITS(bdl)             /* get distance low bits */      d = (unsigned)b & mdl;      DUMPBITS(bdl)      DECODEHUFT(td, bd, md)    /* get coded distance high bits */      d = w - d - t->v.n;       /* construct offset */      DECODEHUFT(tl, bl, ml)    /* get coded length */      n = t->v.n;      if (e)                    /* get length extra bits */      {        NEEDBITS(8)        n += (unsigned)b & 0xff;        DUMPBITS(8)      }      /* do the copy */      s = (s > (ulg)n ? s - (ulg)n : 0);      do {#if (defined(DLL) && !defined(NO_SLIDE_REDIR))        if (G.redirect_slide) {          /* &= w/ wszimpl not needed and wrong if redirect */          if (d >= wszimpl)            return 1;          e = wszimpl - (d > w ? d : w);        } else#endif          e = wszimpl - ((d &= wszimpl-1) > w ? d : w);        if (e > n) e = n;        n -= e;        if (u && w <= d)        {          memzero(redirSlide + w, e);          w += e;          d += e;        }        else#ifndef NOMEMCPY          if (w - d >= e)       /* (this test assumes unsigned comparison) */          {            memcpy(redirSlide + w, redirSlide + d, e);            w += e;            d += e;          }          else                  /* do it slow to avoid memcpy() overlap */#endif /* !NOMEMCPY */            do {              redirSlide[w++] = redirSlide[d++];            } while (--e);        if (w == wszimpl)        {          if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)            return retval;          w = u = 0;        }      } while (n);    }  }  /* flush out redirSlide */  if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)    return retval;  if (G.csize + G.incnt + (k >> 3))   /* should have read csize bytes, but */  {                        /* sometimes read one too many:  k>>3 compensates */    G.used_csize = G.lrec.csize - G.csize - G.incnt - (k >> 3);    return 5;  }  return 0;}int explode(__G)     __GDEF/* Explode an imploded compressed stream.  Based on the general purpose   bit flag, decide on coded or uncoded literals, and an 8K or 4K sliding   window.  Construct the literal (if any), length, and distance codes and   the tables needed to decode them (using huft_build() from inflate.c),   and call the appropriate routine for the type of data in the remainder   of the stream.  The four routines are nearly identical, differing only   in whether the literal is decoded or simply read in, and in how many   bits are read in, uncoded, for the low distance bits. */{  unsigned r;           /* return codes */  struct huft *tb;      /* literal code table */  struct huft *tl;      /* length code table */  struct huft *td;      /* distance code table */  int bb;               /* bits for tb */  int bl;               /* bits for tl */  int bd;               /* bits for td */  unsigned bdl;         /* number of uncoded lower distance bits */  unsigned l[256];      /* bit lengths for codes */#if (defined(DLL) && !defined(NO_SLIDE_REDIR))  if (G.redirect_slide)    /* For 16-bit systems, it has already been checked at DLL entrance that     * the buffer size in G.redirect_size does not exceed unsigned range.     */    G._wsize = G.redirect_size, redirSlide = G.redirect_buffer;  else#if defined(USE_DEFLATE64) && defined(INT_16BIT)    /* For systems using 16-bit ints, reduce the used buffer size below     * the limit of "unsigned int" numbers range.     */    G._wsize = WSIZE>>1, redirSlide = slide;#else /* !(USE_DEFLATE64 && INT_16BIT) */    G._wsize = WSIZE, redirSlide = slide;#endif /* !(USE_DEFLATE64 && INT_16BIT) */#endif /* DLL && !NO_SLIDE_REDIR */  /* Tune base table sizes.  Note: I thought that to truly optimize speed,     I would have to select different bl, bd, and bb values for different     compressed file sizes.  I was surprised to find out that the values of     7, 7, and 9 worked best over a very wide range of sizes, except that     bd = 8 worked marginally better for large compressed sizes. */  bl = 7;  bd = (G.csize + G.incnt) > 200000L ? 8 : 7;#ifdef DEBUG  G.hufts = 0;                    /* initialize huft's malloc'ed */#endif  if (G.lrec.general_purpose_bit_flag & 4)  /* With literal tree--minimum match length is 3 */  {    bb = 9;                     /* base table size for literals */    if ((r = get_tree(__G__ l, 256)) != 0)      return (int)r;    if ((r = huft_build(__G__ l, 256, 256, NULL, NULL, &tb, &bb)) != 0)    {      if (r == 1)        huft_free(tb);      return (int)r;    }    if ((r = get_tree(__G__ l, 64)) != 0) {      huft_free(tb);      return (int)r;    }    if ((r = huft_build(__G__ l, 64, 0, cplen3, extra, &tl, &bl)) != 0)    {      if (r == 1)        huft_free(tl);      huft_free(tb);      return (int)r;    }  }  else  /* No literal tree--minimum match length is 2 */  {    tb = (struct huft *)NULL;    if ((r = get_tree(__G__ l, 64)) != 0)      return (int)r;    if ((r = huft_build(__G__ l, 64, 0, cplen2, extra, &tl, &bl)) != 0)    {      if (r == 1)        huft_free(tl);      return (int)r;    }  }  if ((r = get_tree(__G__ l, 64)) != 0) {    huft_free(tl);    if (tb != (struct huft *)NULL) huft_free(tb);    return (int)r;  }  if (G.lrec.general_purpose_bit_flag & 2)      /* true if 8K */  {    bdl = 7;    r = huft_build(__G__ l, 64, 0, cpdist8, extra, &td, &bd);  }  else                                          /* else 4K */  {    bdl = 6;    r = huft_build(__G__ l, 64, 0, cpdist4, extra, &td, &bd);  }  if (r != 0)  {    if (r == 1)      huft_free(td);    huft_free(tl);    if (tb != (struct huft *)NULL) huft_free(tb);    return (int)r;  }  if (tb != NULL) {    r = explode_lit(__G__ tb, tl, td, bb, bl, bd, bdl);    huft_free(tb);  } else {    r = explode_nolit(__G__ tl, td, bl, bd, bdl);  }  huft_free(td);  huft_free(tl);  Trace((stderr, "<%u > ", G.hufts));  return (int)r;}/* so explode.c and inflate.c can be compiled together into one object: */#undef DECODEHUFT#undef NEEDBITS#undef DUMPBITS#undef wszimpl

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -