Home | History | Annotate | Line # | Download | only in net
zlib.c revision 1.1
      1 /*	$NetBSD: zlib.c,v 1.1 1996/03/15 02:28:14 paulus Exp $	*/
      2 
      3 /*
      4  * This file is derived from various .h and .c files from the zlib-0.95
      5  * distribution by Jean-loup Gailly and Mark Adler, with some additions
      6  * by Paul Mackerras to aid in implementing Deflate compression and
      7  * decompression for PPP packets.  See zlib.h for conditions of
      8  * distribution and use.
      9  *
     10  * Changes that have been made include:
     11  * - changed functions not used outside this file to "local"
     12  * - added minCompression parameter to deflateInit2
     13  * - added Z_PACKET_FLUSH (see zlib.h for details)
     14  * - added inflateIncomp
     15  */
     16 
     17 
     18 /*+++++*/
     19 /* zutil.h -- internal interface and configuration of the compression library
     20  * Copyright (C) 1995 Jean-loup Gailly.
     21  * For conditions of distribution and use, see copyright notice in zlib.h
     22  */
     23 
     24 /* WARNING: this file should *not* be used by applications. It is
     25    part of the implementation of the compression library and is
     26    subject to change. Applications should only use zlib.h.
     27  */
     28 
     29 /* From: zutil.h,v 1.9 1995/05/03 17:27:12 jloup Exp */
     30 
     31 #define _Z_UTIL_H
     32 
     33 #include "zlib.h"
     34 
     35 #ifdef STDC
     36 #  include <string.h>
     37 #endif
     38 
     39 #ifndef local
     40 #  define local static
     41 #endif
     42 /* compile with -Dlocal if your debugger can't find static symbols */
     43 
     44 #define FAR
     45 
     46 typedef unsigned char  uch;
     47 typedef uch FAR uchf;
     48 typedef unsigned short ush;
     49 typedef ush FAR ushf;
     50 typedef unsigned long  ulg;
     51 
     52 extern char *z_errmsg[]; /* indexed by 1-zlib_error */
     53 
     54 #define ERR_RETURN(strm,err) return (strm->msg=z_errmsg[1-err], err)
     55 /* To be used only when the state is known to be valid */
     56 
     57 #ifndef NULL
     58 #define NULL	((void *) 0)
     59 #endif
     60 
     61         /* common constants */
     62 
     63 #define DEFLATED   8
     64 
     65 #ifndef DEF_WBITS
     66 #  define DEF_WBITS MAX_WBITS
     67 #endif
     68 /* default windowBits for decompression. MAX_WBITS is for compression only */
     69 
     70 #if MAX_MEM_LEVEL >= 8
     71 #  define DEF_MEM_LEVEL 8
     72 #else
     73 #  define DEF_MEM_LEVEL  MAX_MEM_LEVEL
     74 #endif
     75 /* default memLevel */
     76 
     77 #define STORED_BLOCK 0
     78 #define STATIC_TREES 1
     79 #define DYN_TREES    2
     80 /* The three kinds of block type */
     81 
     82 #define MIN_MATCH  3
     83 #define MAX_MATCH  258
     84 /* The minimum and maximum match lengths */
     85 
     86          /* functions */
     87 
     88 #if defined(KERNEL) || defined(_KERNEL)
     89 #  define zmemcpy(d, s, n)	bcopy((s), (d), (n))
     90 #  define zmemzero		bzero
     91 #else
     92 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
     93 #  define HAVE_MEMCPY
     94 #endif
     95 #ifdef HAVE_MEMCPY
     96 #    define zmemcpy memcpy
     97 #    define zmemzero(dest, len) memset(dest, 0, len)
     98 #else
     99    extern void zmemcpy  OF((Bytef* dest, Bytef* source, uInt len));
    100    extern void zmemzero OF((Bytef* dest, uInt len));
    101 #endif
    102 #endif
    103 
    104 /* Diagnostic functions */
    105 #ifdef DEBUG_ZLIB
    106 #  include <stdio.h>
    107 #  ifndef verbose
    108 #    define verbose 0
    109 #  endif
    110 #  define Assert(cond,msg) {if(!(cond)) z_error(msg);}
    111 #  define Trace(x) fprintf x
    112 #  define Tracev(x) {if (verbose) fprintf x ;}
    113 #  define Tracevv(x) {if (verbose>1) fprintf x ;}
    114 #  define Tracec(c,x) {if (verbose && (c)) fprintf x ;}
    115 #  define Tracecv(c,x) {if (verbose>1 && (c)) fprintf x ;}
    116 #else
    117 #  define Assert(cond,msg)
    118 #  define Trace(x)
    119 #  define Tracev(x)
    120 #  define Tracevv(x)
    121 #  define Tracec(c,x)
    122 #  define Tracecv(c,x)
    123 #endif
    124 
    125 
    126 typedef uLong (*check_func) OF((uLong check, Bytef *buf, uInt len));
    127 
    128 /* voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size)); */
    129 /* void   zcfree  OF((voidpf opaque, voidpf ptr)); */
    130 
    131 #define ZALLOC(strm, items, size) \
    132            (*((strm)->zalloc))((strm)->opaque, (items), (size))
    133 #define ZFREE(strm, addr, size)	\
    134 	   (*((strm)->zfree))((strm)->opaque, (voidpf)(addr), (size))
    135 #define TRY_FREE(s, p, n) {if (p) ZFREE(s, p, n);}
    136 
    137 /* deflate.h -- internal compression state
    138  * Copyright (C) 1995 Jean-loup Gailly
    139  * For conditions of distribution and use, see copyright notice in zlib.h
    140  */
    141 
    142 /* WARNING: this file should *not* be used by applications. It is
    143    part of the implementation of the compression library and is
    144    subject to change. Applications should only use zlib.h.
    145  */
    146 
    147 
    148 /*+++++*/
    149 /* From: deflate.h,v 1.5 1995/05/03 17:27:09 jloup Exp */
    150 
    151 /* ===========================================================================
    152  * Internal compression state.
    153  */
    154 
    155 /* Data type */
    156 #define BINARY  0
    157 #define ASCII   1
    158 #define UNKNOWN 2
    159 
    160 #define LENGTH_CODES 29
    161 /* number of length codes, not counting the special END_BLOCK code */
    162 
    163 #define LITERALS  256
    164 /* number of literal bytes 0..255 */
    165 
    166 #define L_CODES (LITERALS+1+LENGTH_CODES)
    167 /* number of Literal or Length codes, including the END_BLOCK code */
    168 
    169 #define D_CODES   30
    170 /* number of distance codes */
    171 
    172 #define BL_CODES  19
    173 /* number of codes used to transfer the bit lengths */
    174 
    175 #define HEAP_SIZE (2*L_CODES+1)
    176 /* maximum heap size */
    177 
    178 #define MAX_BITS 15
    179 /* All codes must not exceed MAX_BITS bits */
    180 
    181 #define INIT_STATE    42
    182 #define BUSY_STATE   113
    183 #define FLUSH_STATE  124
    184 #define FINISH_STATE 666
    185 /* Stream status */
    186 
    187 
    188 /* Data structure describing a single value and its code string. */
    189 typedef struct ct_data_s {
    190     union {
    191         ush  freq;       /* frequency count */
    192         ush  code;       /* bit string */
    193     } fc;
    194     union {
    195         ush  dad;        /* father node in Huffman tree */
    196         ush  len;        /* length of bit string */
    197     } dl;
    198 } FAR ct_data;
    199 
    200 #define Freq fc.freq
    201 #define Code fc.code
    202 #define Dad  dl.dad
    203 #define Len  dl.len
    204 
    205 typedef struct static_tree_desc_s  static_tree_desc;
    206 
    207 typedef struct tree_desc_s {
    208     ct_data *dyn_tree;           /* the dynamic tree */
    209     int     max_code;            /* largest code with non zero frequency */
    210     static_tree_desc *stat_desc; /* the corresponding static tree */
    211 } FAR tree_desc;
    212 
    213 typedef ush Pos;
    214 typedef Pos FAR Posf;
    215 typedef unsigned IPos;
    216 
    217 /* A Pos is an index in the character window. We use short instead of int to
    218  * save space in the various tables. IPos is used only for parameter passing.
    219  */
    220 
    221 typedef struct deflate_state {
    222     z_stream *strm;      /* pointer back to this zlib stream */
    223     int   status;        /* as the name implies */
    224     Bytef *pending_buf;  /* output still pending */
    225     Bytef *pending_out;  /* next pending byte to output to the stream */
    226     int   pending;       /* nb of bytes in the pending buffer */
    227     uLong adler;         /* adler32 of uncompressed data */
    228     int   noheader;      /* suppress zlib header and adler32 */
    229     Byte  data_type;     /* UNKNOWN, BINARY or ASCII */
    230     Byte  method;        /* STORED (for zip only) or DEFLATED */
    231     int	  minCompr;	 /* min size decrease for Z_FLUSH_NOSTORE */
    232 
    233                 /* used by deflate.c: */
    234 
    235     uInt  w_size;        /* LZ77 window size (32K by default) */
    236     uInt  w_bits;        /* log2(w_size)  (8..16) */
    237     uInt  w_mask;        /* w_size - 1 */
    238 
    239     Bytef *window;
    240     /* Sliding window. Input bytes are read into the second half of the window,
    241      * and move to the first half later to keep a dictionary of at least wSize
    242      * bytes. With this organization, matches are limited to a distance of
    243      * wSize-MAX_MATCH bytes, but this ensures that IO is always
    244      * performed with a length multiple of the block size. Also, it limits
    245      * the window size to 64K, which is quite useful on MSDOS.
    246      * To do: use the user input buffer as sliding window.
    247      */
    248 
    249     ulg window_size;
    250     /* Actual size of window: 2*wSize, except when the user input buffer
    251      * is directly used as sliding window.
    252      */
    253 
    254     Posf *prev;
    255     /* Link to older string with same hash index. To limit the size of this
    256      * array to 64K, this link is maintained only for the last 32K strings.
    257      * An index in this array is thus a window index modulo 32K.
    258      */
    259 
    260     Posf *head; /* Heads of the hash chains or NIL. */
    261 
    262     uInt  ins_h;          /* hash index of string to be inserted */
    263     uInt  hash_size;      /* number of elements in hash table */
    264     uInt  hash_bits;      /* log2(hash_size) */
    265     uInt  hash_mask;      /* hash_size-1 */
    266 
    267     uInt  hash_shift;
    268     /* Number of bits by which ins_h must be shifted at each input
    269      * step. It must be such that after MIN_MATCH steps, the oldest
    270      * byte no longer takes part in the hash key, that is:
    271      *   hash_shift * MIN_MATCH >= hash_bits
    272      */
    273 
    274     long block_start;
    275     /* Window position at the beginning of the current output block. Gets
    276      * negative when the window is moved backwards.
    277      */
    278 
    279     uInt match_length;           /* length of best match */
    280     IPos prev_match;             /* previous match */
    281     int match_available;         /* set if previous match exists */
    282     uInt strstart;               /* start of string to insert */
    283     uInt match_start;            /* start of matching string */
    284     uInt lookahead;              /* number of valid bytes ahead in window */
    285 
    286     uInt prev_length;
    287     /* Length of the best match at previous step. Matches not greater than this
    288      * are discarded. This is used in the lazy match evaluation.
    289      */
    290 
    291     uInt max_chain_length;
    292     /* To speed up deflation, hash chains are never searched beyond this
    293      * length.  A higher limit improves compression ratio but degrades the
    294      * speed.
    295      */
    296 
    297     uInt max_lazy_match;
    298     /* Attempt to find a better match only when the current match is strictly
    299      * smaller than this value. This mechanism is used only for compression
    300      * levels >= 4.
    301      */
    302 #   define max_insert_length  max_lazy_match
    303     /* Insert new strings in the hash table only if the match length is not
    304      * greater than this length. This saves time but degrades compression.
    305      * max_insert_length is used only for compression levels <= 3.
    306      */
    307 
    308     int level;    /* compression level (1..9) */
    309     int strategy; /* favor or force Huffman coding*/
    310 
    311     uInt good_match;
    312     /* Use a faster search when the previous match is longer than this */
    313 
    314      int nice_match; /* Stop searching when current match exceeds this */
    315 
    316                 /* used by trees.c: */
    317     /* Didn't use ct_data typedef below to supress compiler warning */
    318     struct ct_data_s dyn_ltree[HEAP_SIZE];   /* literal and length tree */
    319     struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
    320     struct ct_data_s bl_tree[2*BL_CODES+1];  /* Huffman tree for bit lengths */
    321 
    322     struct tree_desc_s l_desc;               /* desc. for literal tree */
    323     struct tree_desc_s d_desc;               /* desc. for distance tree */
    324     struct tree_desc_s bl_desc;              /* desc. for bit length tree */
    325 
    326     ush bl_count[MAX_BITS+1];
    327     /* number of codes at each bit length for an optimal tree */
    328 
    329     int heap[2*L_CODES+1];      /* heap used to build the Huffman trees */
    330     int heap_len;               /* number of elements in the heap */
    331     int heap_max;               /* element of largest frequency */
    332     /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
    333      * The same heap array is used to build all trees.
    334      */
    335 
    336     uch depth[2*L_CODES+1];
    337     /* Depth of each subtree used as tie breaker for trees of equal frequency
    338      */
    339 
    340     uchf *l_buf;          /* buffer for literals or lengths */
    341 
    342     uInt  lit_bufsize;
    343     /* Size of match buffer for literals/lengths.  There are 4 reasons for
    344      * limiting lit_bufsize to 64K:
    345      *   - frequencies can be kept in 16 bit counters
    346      *   - if compression is not successful for the first block, all input
    347      *     data is still in the window so we can still emit a stored block even
    348      *     when input comes from standard input.  (This can also be done for
    349      *     all blocks if lit_bufsize is not greater than 32K.)
    350      *   - if compression is not successful for a file smaller than 64K, we can
    351      *     even emit a stored file instead of a stored block (saving 5 bytes).
    352      *     This is applicable only for zip (not gzip or zlib).
    353      *   - creating new Huffman trees less frequently may not provide fast
    354      *     adaptation to changes in the input data statistics. (Take for
    355      *     example a binary file with poorly compressible code followed by
    356      *     a highly compressible string table.) Smaller buffer sizes give
    357      *     fast adaptation but have of course the overhead of transmitting
    358      *     trees more frequently.
    359      *   - I can't count above 4
    360      */
    361 
    362     uInt last_lit;      /* running index in l_buf */
    363 
    364     ushf *d_buf;
    365     /* Buffer for distances. To simplify the code, d_buf and l_buf have
    366      * the same number of elements. To use different lengths, an extra flag
    367      * array would be necessary.
    368      */
    369 
    370     ulg opt_len;        /* bit length of current block with optimal trees */
    371     ulg static_len;     /* bit length of current block with static trees */
    372     ulg compressed_len; /* total bit length of compressed file */
    373     uInt matches;       /* number of string matches in current block */
    374     int last_eob_len;   /* bit length of EOB code for last block */
    375 
    376 #ifdef DEBUG_ZLIB
    377     ulg bits_sent;      /* bit length of the compressed data */
    378 #endif
    379 
    380     ush bi_buf;
    381     /* Output buffer. bits are inserted starting at the bottom (least
    382      * significant bits).
    383      */
    384     int bi_valid;
    385     /* Number of valid bits in bi_buf.  All bits above the last valid bit
    386      * are always zero.
    387      */
    388 
    389     uInt blocks_in_packet;
    390     /* Number of blocks produced since the last time Z_PACKET_FLUSH
    391      * was used.
    392      */
    393 
    394 } FAR deflate_state;
    395 
    396 /* Output a byte on the stream.
    397  * IN assertion: there is enough room in pending_buf.
    398  */
    399 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
    400 
    401 
    402 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
    403 /* Minimum amount of lookahead, except at the end of the input file.
    404  * See deflate.c for comments about the MIN_MATCH+1.
    405  */
    406 
    407 #define MAX_DIST(s)  ((s)->w_size-MIN_LOOKAHEAD)
    408 /* In order to simplify the code, particularly on 16 bit machines, match
    409  * distances are limited to MAX_DIST instead of WSIZE.
    410  */
    411 
    412         /* in trees.c */
    413 local void ct_init       OF((deflate_state *s));
    414 local int  ct_tally      OF((deflate_state *s, int dist, int lc));
    415 local ulg ct_flush_block OF((deflate_state *s, charf *buf, ulg stored_len,
    416 			     int flush));
    417 local void ct_align      OF((deflate_state *s));
    418 local void ct_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,
    419                           int eof));
    420 local void ct_stored_type_only OF((deflate_state *s));
    421 
    422 
    423 /*+++++*/
    424 /* deflate.c -- compress data using the deflation algorithm
    425  * Copyright (C) 1995 Jean-loup Gailly.
    426  * For conditions of distribution and use, see copyright notice in zlib.h
    427  */
    428 
    429 /*
    430  *  ALGORITHM
    431  *
    432  *      The "deflation" process depends on being able to identify portions
    433  *      of the input text which are identical to earlier input (within a
    434  *      sliding window trailing behind the input currently being processed).
    435  *
    436  *      The most straightforward technique turns out to be the fastest for
    437  *      most input files: try all possible matches and select the longest.
    438  *      The key feature of this algorithm is that insertions into the string
    439  *      dictionary are very simple and thus fast, and deletions are avoided
    440  *      completely. Insertions are performed at each input character, whereas
    441  *      string matches are performed only when the previous match ends. So it
    442  *      is preferable to spend more time in matches to allow very fast string
    443  *      insertions and avoid deletions. The matching algorithm for small
    444  *      strings is inspired from that of Rabin & Karp. A brute force approach
    445  *      is used to find longer strings when a small match has been found.
    446  *      A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
    447  *      (by Leonid Broukhis).
    448  *         A previous version of this file used a more sophisticated algorithm
    449  *      (by Fiala and Greene) which is guaranteed to run in linear amortized
    450  *      time, but has a larger average cost, uses more memory and is patented.
    451  *      However the F&G algorithm may be faster for some highly redundant
    452  *      files if the parameter max_chain_length (described below) is too large.
    453  *
    454  *  ACKNOWLEDGEMENTS
    455  *
    456  *      The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
    457  *      I found it in 'freeze' written by Leonid Broukhis.
    458  *      Thanks to many people for bug reports and testing.
    459  *
    460  *  REFERENCES
    461  *
    462  *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
    463  *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
    464  *
    465  *      A description of the Rabin and Karp algorithm is given in the book
    466  *         "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
    467  *
    468  *      Fiala,E.R., and Greene,D.H.
    469  *         Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
    470  *
    471  */
    472 
    473 /* From: deflate.c,v 1.8 1995/05/03 17:27:08 jloup Exp */
    474 
    475 local char zlib_copyright[] = " deflate Copyright 1995 Jean-loup Gailly ";
    476 /*
    477   If you use the zlib library in a product, an acknowledgment is welcome
    478   in the documentation of your product. If for some reason you cannot
    479   include such an acknowledgment, I would appreciate that you keep this
    480   copyright string in the executable of your product.
    481  */
    482 
    483 #define NIL 0
    484 /* Tail of hash chains */
    485 
    486 #ifndef TOO_FAR
    487 #  define TOO_FAR 4096
    488 #endif
    489 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
    490 
    491 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
    492 /* Minimum amount of lookahead, except at the end of the input file.
    493  * See deflate.c for comments about the MIN_MATCH+1.
    494  */
    495 
    496 /* Values for max_lazy_match, good_match and max_chain_length, depending on
    497  * the desired pack level (0..9). The values given below have been tuned to
    498  * exclude worst case performance for pathological files. Better values may be
    499  * found for specific files.
    500  */
    501 
    502 typedef struct config_s {
    503    ush good_length; /* reduce lazy search above this match length */
    504    ush max_lazy;    /* do not perform lazy search above this match length */
    505    ush nice_length; /* quit search above this match length */
    506    ush max_chain;
    507 } config;
    508 
    509 local config configuration_table[10] = {
    510 /*      good lazy nice chain */
    511 /* 0 */ {0,    0,  0,    0},  /* store only */
    512 /* 1 */ {4,    4,  8,    4},  /* maximum speed, no lazy matches */
    513 /* 2 */ {4,    5, 16,    8},
    514 /* 3 */ {4,    6, 32,   32},
    515 
    516 /* 4 */ {4,    4, 16,   16},  /* lazy matches */
    517 /* 5 */ {8,   16, 32,   32},
    518 /* 6 */ {8,   16, 128, 128},
    519 /* 7 */ {8,   32, 128, 256},
    520 /* 8 */ {32, 128, 258, 1024},
    521 /* 9 */ {32, 258, 258, 4096}}; /* maximum compression */
    522 
    523 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
    524  * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
    525  * meaning.
    526  */
    527 
    528 #define EQUAL 0
    529 /* result of memcmp for equal strings */
    530 
    531 /* ===========================================================================
    532  *  Prototypes for local functions.
    533  */
    534 
    535 local void fill_window   OF((deflate_state *s));
    536 local int  deflate_fast  OF((deflate_state *s, int flush));
    537 local int  deflate_slow  OF((deflate_state *s, int flush));
    538 local void lm_init       OF((deflate_state *s));
    539 local int longest_match  OF((deflate_state *s, IPos cur_match));
    540 local void putShortMSB   OF((deflate_state *s, uInt b));
    541 local void flush_pending OF((z_stream *strm));
    542 local int read_buf       OF((z_stream *strm, charf *buf, unsigned size));
    543 #ifdef ASMV
    544       void match_init OF((void)); /* asm code initialization */
    545 #endif
    546 
    547 #ifdef DEBUG_ZLIB
    548 local  void check_match OF((deflate_state *s, IPos start, IPos match,
    549                             int length));
    550 #endif
    551 
    552 
    553 /* ===========================================================================
    554  * Update a hash value with the given input byte
    555  * IN  assertion: all calls to to UPDATE_HASH are made with consecutive
    556  *    input characters, so that a running hash key can be computed from the
    557  *    previous key instead of complete recalculation each time.
    558  */
    559 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
    560 
    561 
    562 /* ===========================================================================
    563  * Insert string str in the dictionary and set match_head to the previous head
    564  * of the hash chain (the most recent string with same hash key). Return
    565  * the previous length of the hash chain.
    566  * IN  assertion: all calls to to INSERT_STRING are made with consecutive
    567  *    input characters and the first MIN_MATCH bytes of str are valid
    568  *    (except for the last MIN_MATCH-1 bytes of the input file).
    569  */
    570 #define INSERT_STRING(s, str, match_head) \
    571    (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
    572     s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
    573     s->head[s->ins_h] = (str))
    574 
    575 /* ===========================================================================
    576  * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
    577  * prev[] will be initialized on the fly.
    578  */
    579 #define CLEAR_HASH(s) \
    580     s->head[s->hash_size-1] = NIL; \
    581     zmemzero((charf *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
    582 
    583 /* ========================================================================= */
    584 int deflateInit (strm, level)
    585     z_stream *strm;
    586     int level;
    587 {
    588     return deflateInit2 (strm, level, DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
    589 			 0, 0);
    590     /* To do: ignore strm->next_in if we use it as window */
    591 }
    592 
    593 /* ========================================================================= */
    594 int deflateInit2 (strm, level, method, windowBits, memLevel,
    595 		  strategy, minCompression)
    596     z_stream *strm;
    597     int  level;
    598     int  method;
    599     int  windowBits;
    600     int  memLevel;
    601     int  strategy;
    602     int  minCompression;
    603 {
    604     deflate_state *s;
    605     int noheader = 0;
    606 
    607     if (strm == Z_NULL) return Z_STREAM_ERROR;
    608 
    609     strm->msg = Z_NULL;
    610 /*    if (strm->zalloc == Z_NULL) strm->zalloc = zcalloc; */
    611 /*    if (strm->zfree == Z_NULL) strm->zfree = zcfree; */
    612 
    613     if (level == Z_DEFAULT_COMPRESSION) level = 6;
    614 
    615     if (windowBits < 0) { /* undocumented feature: suppress zlib header */
    616         noheader = 1;
    617         windowBits = -windowBits;
    618     }
    619     if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != DEFLATED ||
    620         windowBits < 8 || windowBits > 15 || level < 1 || level > 9) {
    621         return Z_STREAM_ERROR;
    622     }
    623     s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
    624     if (s == Z_NULL) return Z_MEM_ERROR;
    625     strm->state = (struct internal_state FAR *)s;
    626     s->strm = strm;
    627 
    628     s->noheader = noheader;
    629     s->w_bits = windowBits;
    630     s->w_size = 1 << s->w_bits;
    631     s->w_mask = s->w_size - 1;
    632 
    633     s->hash_bits = memLevel + 7;
    634     s->hash_size = 1 << s->hash_bits;
    635     s->hash_mask = s->hash_size - 1;
    636     s->hash_shift =  ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
    637 
    638     s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
    639     s->prev   = (Posf *)  ZALLOC(strm, s->w_size, sizeof(Pos));
    640     s->head   = (Posf *)  ZALLOC(strm, s->hash_size, sizeof(Pos));
    641 
    642     s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
    643 
    644     s->pending_buf = (uchf *) ZALLOC(strm, s->lit_bufsize, 2*sizeof(ush));
    645 
    646     if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
    647         s->pending_buf == Z_NULL) {
    648         strm->msg = z_errmsg[1-Z_MEM_ERROR];
    649         deflateEnd (strm);
    650         return Z_MEM_ERROR;
    651     }
    652     s->d_buf = (ushf *) &(s->pending_buf[s->lit_bufsize]);
    653     s->l_buf = (uchf *) &(s->pending_buf[3*s->lit_bufsize]);
    654     /* We overlay pending_buf and d_buf+l_buf. This works since the average
    655      * output size for (length,distance) codes is <= 32 bits (worst case
    656      * is 15+15+13=33).
    657      */
    658 
    659     s->level = level;
    660     s->strategy = strategy;
    661     s->method = (Byte)method;
    662     s->minCompr = minCompression;
    663     s->blocks_in_packet = 0;
    664 
    665     return deflateReset(strm);
    666 }
    667 
    668 /* ========================================================================= */
    669 int deflateReset (strm)
    670     z_stream *strm;
    671 {
    672     deflate_state *s;
    673 
    674     if (strm == Z_NULL || strm->state == Z_NULL ||
    675         strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;
    676 
    677     strm->total_in = strm->total_out = 0;
    678     strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
    679     strm->data_type = Z_UNKNOWN;
    680 
    681     s = (deflate_state *)strm->state;
    682     s->pending = 0;
    683     s->pending_out = s->pending_buf;
    684 
    685     if (s->noheader < 0) {
    686         s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
    687     }
    688     s->status = s->noheader ? BUSY_STATE : INIT_STATE;
    689     s->adler = 1;
    690 
    691     ct_init(s);
    692     lm_init(s);
    693 
    694     return Z_OK;
    695 }
    696 
    697 /* =========================================================================
    698  * Put a short in the pending buffer. The 16-bit value is put in MSB order.
    699  * IN assertion: the stream state is correct and there is enough room in
    700  * pending_buf.
    701  */
    702 local void putShortMSB (s, b)
    703     deflate_state *s;
    704     uInt b;
    705 {
    706     put_byte(s, (Byte)(b >> 8));
    707     put_byte(s, (Byte)(b & 0xff));
    708 }
    709 
    710 /* =========================================================================
    711  * Flush as much pending output as possible.
    712  */
    713 local void flush_pending(strm)
    714     z_stream *strm;
    715 {
    716     deflate_state *state = (deflate_state *) strm->state;
    717     unsigned len = state->pending;
    718 
    719     if (len > strm->avail_out) len = strm->avail_out;
    720     if (len == 0) return;
    721 
    722     if (strm->next_out != NULL) {
    723 	zmemcpy(strm->next_out, state->pending_out, len);
    724 	strm->next_out += len;
    725     }
    726     state->pending_out += len;
    727     strm->total_out += len;
    728     strm->avail_out -= len;
    729     state->pending -= len;
    730     if (state->pending == 0) {
    731         state->pending_out = state->pending_buf;
    732     }
    733 }
    734 
    735 /* ========================================================================= */
    736 int deflate (strm, flush)
    737     z_stream *strm;
    738     int flush;
    739 {
    740     deflate_state *state = (deflate_state *) strm->state;
    741 
    742     if (strm == Z_NULL || state == Z_NULL) return Z_STREAM_ERROR;
    743 
    744     if (strm->next_in == Z_NULL && strm->avail_in != 0) {
    745         ERR_RETURN(strm, Z_STREAM_ERROR);
    746     }
    747     if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
    748 
    749     state->strm = strm; /* just in case */
    750 
    751     /* Write the zlib header */
    752     if (state->status == INIT_STATE) {
    753 
    754         uInt header = (DEFLATED + ((state->w_bits-8)<<4)) << 8;
    755         uInt level_flags = (state->level-1) >> 1;
    756 
    757         if (level_flags > 3) level_flags = 3;
    758         header |= (level_flags << 6);
    759         header += 31 - (header % 31);
    760 
    761         state->status = BUSY_STATE;
    762         putShortMSB(state, header);
    763     }
    764 
    765     /* Flush as much pending output as possible */
    766     if (state->pending != 0) {
    767         flush_pending(strm);
    768         if (strm->avail_out == 0) return Z_OK;
    769     }
    770 
    771     /* If we came back in here to get the last output from
    772      * a previous flush, we're done for now.
    773      */
    774     if (state->status == FLUSH_STATE) {
    775 	state->status = BUSY_STATE;
    776 	if (flush != Z_NO_FLUSH && flush != Z_FINISH)
    777 	    return Z_OK;
    778     }
    779 
    780     /* User must not provide more input after the first FINISH: */
    781     if (state->status == FINISH_STATE && strm->avail_in != 0) {
    782         ERR_RETURN(strm, Z_BUF_ERROR);
    783     }
    784 
    785     /* Start a new block or continue the current one.
    786      */
    787     if (strm->avail_in != 0 || state->lookahead != 0 ||
    788         (flush == Z_FINISH && state->status != FINISH_STATE)) {
    789         int quit;
    790 
    791         if (flush == Z_FINISH) {
    792             state->status = FINISH_STATE;
    793         }
    794         if (state->level <= 3) {
    795             quit = deflate_fast(state, flush);
    796         } else {
    797             quit = deflate_slow(state, flush);
    798         }
    799         if (quit || strm->avail_out == 0)
    800 	    return Z_OK;
    801         /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
    802          * of deflate should use the same flush parameter to make sure
    803          * that the flush is complete. So we don't have to output an
    804          * empty block here, this will be done at next call. This also
    805          * ensures that for a very small output buffer, we emit at most
    806          * one empty block.
    807          */
    808     }
    809 
    810     /* If a flush was requested, we have a little more to output now. */
    811     if (flush != Z_NO_FLUSH && flush != Z_FINISH
    812 	&& state->status != FINISH_STATE) {
    813 	switch (flush) {
    814 	case Z_PARTIAL_FLUSH:
    815 	    ct_align(state);
    816 	    break;
    817 	case Z_PACKET_FLUSH:
    818 	    /* Output just the 3-bit `stored' block type value,
    819 	       but not a zero length. */
    820 	    ct_stored_type_only(state);
    821 	    break;
    822 	default:
    823 	    ct_stored_block(state, (char*)0, 0L, 0);
    824 	    /* For a full flush, this empty block will be recognized
    825 	     * as a special marker by inflate_sync().
    826 	     */
    827 	    if (flush == Z_FULL_FLUSH) {
    828 		CLEAR_HASH(state);             /* forget history */
    829 	    }
    830 	}
    831 	flush_pending(strm);
    832 	if (strm->avail_out == 0) {
    833 	    /* We'll have to come back to get the rest of the output;
    834 	     * this ensures we don't output a second zero-length stored
    835 	     * block (or whatever).
    836 	     */
    837 	    state->status = FLUSH_STATE;
    838 	    return Z_OK;
    839 	}
    840     }
    841 
    842     Assert(strm->avail_out > 0, "bug2");
    843 
    844     if (flush != Z_FINISH) return Z_OK;
    845     if (state->noheader) return Z_STREAM_END;
    846 
    847     /* Write the zlib trailer (adler32) */
    848     putShortMSB(state, (uInt)(state->adler >> 16));
    849     putShortMSB(state, (uInt)(state->adler & 0xffff));
    850     flush_pending(strm);
    851     /* If avail_out is zero, the application will call deflate again
    852      * to flush the rest.
    853      */
    854     state->noheader = -1; /* write the trailer only once! */
    855     return state->pending != 0 ? Z_OK : Z_STREAM_END;
    856 }
    857 
    858 /* ========================================================================= */
    859 int deflateEnd (strm)
    860     z_stream *strm;
    861 {
    862     deflate_state *state = (deflate_state *) strm->state;
    863 
    864     if (strm == Z_NULL || state == Z_NULL) return Z_STREAM_ERROR;
    865 
    866     TRY_FREE(strm, state->window, state->w_size * 2 * sizeof(Byte));
    867     TRY_FREE(strm, state->prev, state->w_size * sizeof(Pos));
    868     TRY_FREE(strm, state->head, state->hash_size * sizeof(Pos));
    869     TRY_FREE(strm, state->pending_buf, state->lit_bufsize * 2 * sizeof(ush));
    870 
    871     ZFREE(strm, state, sizeof(deflate_state));
    872     strm->state = Z_NULL;
    873 
    874     return Z_OK;
    875 }
    876 
    877 /* ===========================================================================
    878  * Read a new buffer from the current input stream, update the adler32
    879  * and total number of bytes read.
    880  */
    881 local int read_buf(strm, buf, size)
    882     z_stream *strm;
    883     charf *buf;
    884     unsigned size;
    885 {
    886     unsigned len = strm->avail_in;
    887     deflate_state *state = (deflate_state *) strm->state;
    888 
    889     if (len > size) len = size;
    890     if (len == 0) return 0;
    891 
    892     strm->avail_in  -= len;
    893 
    894     if (!state->noheader) {
    895         state->adler = adler32(state->adler, strm->next_in, len);
    896     }
    897     zmemcpy(buf, strm->next_in, len);
    898     strm->next_in  += len;
    899     strm->total_in += len;
    900 
    901     return (int)len;
    902 }
    903 
    904 /* ===========================================================================
    905  * Initialize the "longest match" routines for a new zlib stream
    906  */
    907 local void lm_init (s)
    908     deflate_state *s;
    909 {
    910     s->window_size = (ulg)2L*s->w_size;
    911 
    912     CLEAR_HASH(s);
    913 
    914     /* Set the default configuration parameters:
    915      */
    916     s->max_lazy_match   = configuration_table[s->level].max_lazy;
    917     s->good_match       = configuration_table[s->level].good_length;
    918     s->nice_match       = configuration_table[s->level].nice_length;
    919     s->max_chain_length = configuration_table[s->level].max_chain;
    920 
    921     s->strstart = 0;
    922     s->block_start = 0L;
    923     s->lookahead = 0;
    924     s->match_length = MIN_MATCH-1;
    925     s->match_available = 0;
    926     s->ins_h = 0;
    927 #ifdef ASMV
    928     match_init(); /* initialize the asm code */
    929 #endif
    930 }
    931 
    932 /* ===========================================================================
    933  * Set match_start to the longest match starting at the given string and
    934  * return its length. Matches shorter or equal to prev_length are discarded,
    935  * in which case the result is equal to prev_length and match_start is
    936  * garbage.
    937  * IN assertions: cur_match is the head of the hash chain for the current
    938  *   string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
    939  */
    940 #ifndef ASMV
    941 /* For 80x86 and 680x0, an optimized version will be provided in match.asm or
    942  * match.S. The code will be functionally equivalent.
    943  */
    944 local int longest_match(s, cur_match)
    945     deflate_state *s;
    946     IPos cur_match;                             /* current match */
    947 {
    948     unsigned chain_length = s->max_chain_length;/* max hash chain length */
    949     register Bytef *scan = s->window + s->strstart; /* current string */
    950     register Bytef *match;                       /* matched string */
    951     register int len;                           /* length of current match */
    952     int best_len = s->prev_length;              /* best match length so far */
    953     IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
    954         s->strstart - (IPos)MAX_DIST(s) : NIL;
    955     /* Stop when cur_match becomes <= limit. To simplify the code,
    956      * we prevent matches with the string of window index 0.
    957      */
    958     Posf *prev = s->prev;
    959     uInt wmask = s->w_mask;
    960 
    961 #ifdef UNALIGNED_OK
    962     /* Compare two bytes at a time. Note: this is not always beneficial.
    963      * Try with and without -DUNALIGNED_OK to check.
    964      */
    965     register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
    966     register ush scan_start = *(ushf*)scan;
    967     register ush scan_end   = *(ushf*)(scan+best_len-1);
    968 #else
    969     register Bytef *strend = s->window + s->strstart + MAX_MATCH;
    970     register Byte scan_end1  = scan[best_len-1];
    971     register Byte scan_end   = scan[best_len];
    972 #endif
    973 
    974     /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
    975      * It is easy to get rid of this optimization if necessary.
    976      */
    977     Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
    978 
    979     /* Do not waste too much time if we already have a good match: */
    980     if (s->prev_length >= s->good_match) {
    981         chain_length >>= 2;
    982     }
    983     Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
    984 
    985     do {
    986         Assert(cur_match < s->strstart, "no future");
    987         match = s->window + cur_match;
    988 
    989         /* Skip to next match if the match length cannot increase
    990          * or if the match length is less than 2:
    991          */
    992 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
    993         /* This code assumes sizeof(unsigned short) == 2. Do not use
    994          * UNALIGNED_OK if your compiler uses a different size.
    995          */
    996         if (*(ushf*)(match+best_len-1) != scan_end ||
    997             *(ushf*)match != scan_start) continue;
    998 
    999         /* It is not necessary to compare scan[2] and match[2] since they are
   1000          * always equal when the other bytes match, given that the hash keys
   1001          * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
   1002          * strstart+3, +5, ... up to strstart+257. We check for insufficient
   1003          * lookahead only every 4th comparison; the 128th check will be made
   1004          * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
   1005          * necessary to put more guard bytes at the end of the window, or
   1006          * to check more often for insufficient lookahead.
   1007          */
   1008         Assert(scan[2] == match[2], "scan[2]?");
   1009         scan++, match++;
   1010         do {
   1011         } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
   1012                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
   1013                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
   1014                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
   1015                  scan < strend);
   1016         /* The funny "do {}" generates better code on most compilers */
   1017 
   1018         /* Here, scan <= window+strstart+257 */
   1019         Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
   1020         if (*scan == *match) scan++;
   1021 
   1022         len = (MAX_MATCH - 1) - (int)(strend-scan);
   1023         scan = strend - (MAX_MATCH-1);
   1024 
   1025 #else /* UNALIGNED_OK */
   1026 
   1027         if (match[best_len]   != scan_end  ||
   1028             match[best_len-1] != scan_end1 ||
   1029             *match            != *scan     ||
   1030             *++match          != scan[1])      continue;
   1031 
   1032         /* The check at best_len-1 can be removed because it will be made
   1033          * again later. (This heuristic is not always a win.)
   1034          * It is not necessary to compare scan[2] and match[2] since they
   1035          * are always equal when the other bytes match, given that
   1036          * the hash keys are equal and that HASH_BITS >= 8.
   1037          */
   1038         scan += 2, match++;
   1039         Assert(*scan == *match, "match[2]?");
   1040 
   1041         /* We check for insufficient lookahead only every 8th comparison;
   1042          * the 256th check will be made at strstart+258.
   1043          */
   1044         do {
   1045         } while (*++scan == *++match && *++scan == *++match &&
   1046                  *++scan == *++match && *++scan == *++match &&
   1047                  *++scan == *++match && *++scan == *++match &&
   1048                  *++scan == *++match && *++scan == *++match &&
   1049                  scan < strend);
   1050 
   1051         Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
   1052 
   1053         len = MAX_MATCH - (int)(strend - scan);
   1054         scan = strend - MAX_MATCH;
   1055 
   1056 #endif /* UNALIGNED_OK */
   1057 
   1058         if (len > best_len) {
   1059             s->match_start = cur_match;
   1060             best_len = len;
   1061             if (len >= s->nice_match) break;
   1062 #ifdef UNALIGNED_OK
   1063             scan_end = *(ushf*)(scan+best_len-1);
   1064 #else
   1065             scan_end1  = scan[best_len-1];
   1066             scan_end   = scan[best_len];
   1067 #endif
   1068         }
   1069     } while ((cur_match = prev[cur_match & wmask]) > limit
   1070              && --chain_length != 0);
   1071 
   1072     return best_len;
   1073 }
   1074 #endif /* ASMV */
   1075 
   1076 #ifdef DEBUG_ZLIB
   1077 /* ===========================================================================
   1078  * Check that the match at match_start is indeed a match.
   1079  */
   1080 local void check_match(s, start, match, length)
   1081     deflate_state *s;
   1082     IPos start, match;
   1083     int length;
   1084 {
   1085     /* check that the match is indeed a match */
   1086     if (memcmp((charf *)s->window + match,
   1087                 (charf *)s->window + start, length) != EQUAL) {
   1088         fprintf(stderr,
   1089             " start %u, match %u, length %d\n",
   1090             start, match, length);
   1091         do { fprintf(stderr, "%c%c", s->window[match++],
   1092                      s->window[start++]); } while (--length != 0);
   1093         z_error("invalid match");
   1094     }
   1095     if (verbose > 1) {
   1096         fprintf(stderr,"\\[%d,%d]", start-match, length);
   1097         do { putc(s->window[start++], stderr); } while (--length != 0);
   1098     }
   1099 }
   1100 #else
   1101 #  define check_match(s, start, match, length)
   1102 #endif
   1103 
   1104 /* ===========================================================================
   1105  * Fill the window when the lookahead becomes insufficient.
   1106  * Updates strstart and lookahead.
   1107  *
   1108  * IN assertion: lookahead < MIN_LOOKAHEAD
   1109  * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
   1110  *    At least one byte has been read, or avail_in == 0; reads are
   1111  *    performed for at least two bytes (required for the zip translate_eol
   1112  *    option -- not supported here).
   1113  */
   1114 local void fill_window(s)
   1115     deflate_state *s;
   1116 {
   1117     register unsigned n, m;
   1118     register Posf *p;
   1119     unsigned more;    /* Amount of free space at the end of the window. */
   1120     uInt wsize = s->w_size;
   1121 
   1122     do {
   1123         more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
   1124 
   1125         /* Deal with !@#$% 64K limit: */
   1126         if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
   1127             more = wsize;
   1128         } else if (more == (unsigned)(-1)) {
   1129             /* Very unlikely, but possible on 16 bit machine if strstart == 0
   1130              * and lookahead == 1 (input done one byte at time)
   1131              */
   1132             more--;
   1133 
   1134         /* If the window is almost full and there is insufficient lookahead,
   1135          * move the upper half to the lower one to make room in the upper half.
   1136          */
   1137         } else if (s->strstart >= wsize+MAX_DIST(s)) {
   1138 
   1139             /* By the IN assertion, the window is not empty so we can't confuse
   1140              * more == 0 with more == 64K on a 16 bit machine.
   1141              */
   1142             zmemcpy((charf *)s->window, (charf *)s->window+wsize,
   1143                    (unsigned)wsize);
   1144             s->match_start -= wsize;
   1145             s->strstart    -= wsize; /* we now have strstart >= MAX_DIST */
   1146 
   1147             s->block_start -= (long) wsize;
   1148 
   1149             /* Slide the hash table (could be avoided with 32 bit values
   1150                at the expense of memory usage):
   1151              */
   1152             n = s->hash_size;
   1153             p = &s->head[n];
   1154             do {
   1155                 m = *--p;
   1156                 *p = (Pos)(m >= wsize ? m-wsize : NIL);
   1157             } while (--n);
   1158 
   1159             n = wsize;
   1160             p = &s->prev[n];
   1161             do {
   1162                 m = *--p;
   1163                 *p = (Pos)(m >= wsize ? m-wsize : NIL);
   1164                 /* If n is not on any hash chain, prev[n] is garbage but
   1165                  * its value will never be used.
   1166                  */
   1167             } while (--n);
   1168 
   1169             more += wsize;
   1170         }
   1171         if (s->strm->avail_in == 0) return;
   1172 
   1173         /* If there was no sliding:
   1174          *    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
   1175          *    more == window_size - lookahead - strstart
   1176          * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
   1177          * => more >= window_size - 2*WSIZE + 2
   1178          * In the BIG_MEM or MMAP case (not yet supported),
   1179          *   window_size == input_size + MIN_LOOKAHEAD  &&
   1180          *   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
   1181          * Otherwise, window_size == 2*WSIZE so more >= 2.
   1182          * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
   1183          */
   1184         Assert(more >= 2, "more < 2");
   1185 
   1186         n = read_buf(s->strm, (charf *)s->window + s->strstart + s->lookahead,
   1187                      more);
   1188         s->lookahead += n;
   1189 
   1190         /* Initialize the hash value now that we have some input: */
   1191         if (s->lookahead >= MIN_MATCH) {
   1192             s->ins_h = s->window[s->strstart];
   1193             UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
   1194 #if MIN_MATCH != 3
   1195             Call UPDATE_HASH() MIN_MATCH-3 more times
   1196 #endif
   1197         }
   1198         /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
   1199          * but this is not important since only literal bytes will be emitted.
   1200          */
   1201 
   1202     } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
   1203 }
   1204 
   1205 /* ===========================================================================
   1206  * Flush the current block, with given end-of-file flag.
   1207  * IN assertion: strstart is set to the end of the current match.
   1208  */
   1209 #define FLUSH_BLOCK_ONLY(s, flush) { \
   1210    ct_flush_block(s, (s->block_start >= 0L ? \
   1211            (charf *)&s->window[(unsigned)s->block_start] : \
   1212            (charf *)Z_NULL), (long)s->strstart - s->block_start, (flush)); \
   1213    s->block_start = s->strstart; \
   1214    flush_pending(s->strm); \
   1215    Tracev((stderr,"[FLUSH]")); \
   1216 }
   1217 
   1218 /* Same but force premature exit if necessary. */
   1219 #define FLUSH_BLOCK(s, flush) { \
   1220    FLUSH_BLOCK_ONLY(s, flush); \
   1221    if (s->strm->avail_out == 0) return 1; \
   1222 }
   1223 
   1224 /* ===========================================================================
   1225  * Compress as much as possible from the input stream, return true if
   1226  * processing was terminated prematurely (no more input or output space).
   1227  * This function does not perform lazy evaluationof matches and inserts
   1228  * new strings in the dictionary only for unmatched strings or for short
   1229  * matches. It is used only for the fast compression options.
   1230  */
   1231 local int deflate_fast(s, flush)
   1232     deflate_state *s;
   1233     int flush;
   1234 {
   1235     IPos hash_head = NIL; /* head of the hash chain */
   1236     int bflush;     /* set if current block must be flushed */
   1237 
   1238     s->prev_length = MIN_MATCH-1;
   1239 
   1240     for (;;) {
   1241         /* Make sure that we always have enough lookahead, except
   1242          * at the end of the input file. We need MAX_MATCH bytes
   1243          * for the next match, plus MIN_MATCH bytes to insert the
   1244          * string following the next match.
   1245          */
   1246         if (s->lookahead < MIN_LOOKAHEAD) {
   1247             fill_window(s);
   1248             if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) return 1;
   1249 
   1250             if (s->lookahead == 0) break; /* flush the current block */
   1251         }
   1252 
   1253         /* Insert the string window[strstart .. strstart+2] in the
   1254          * dictionary, and set hash_head to the head of the hash chain:
   1255          */
   1256         if (s->lookahead >= MIN_MATCH) {
   1257             INSERT_STRING(s, s->strstart, hash_head);
   1258         }
   1259 
   1260         /* Find the longest match, discarding those <= prev_length.
   1261          * At this point we have always match_length < MIN_MATCH
   1262          */
   1263         if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
   1264             /* To simplify the code, we prevent matches with the string
   1265              * of window index 0 (in particular we have to avoid a match
   1266              * of the string with itself at the start of the input file).
   1267              */
   1268             if (s->strategy != Z_HUFFMAN_ONLY) {
   1269                 s->match_length = longest_match (s, hash_head);
   1270             }
   1271             /* longest_match() sets match_start */
   1272 
   1273             if (s->match_length > s->lookahead) s->match_length = s->lookahead;
   1274         }
   1275         if (s->match_length >= MIN_MATCH) {
   1276             check_match(s, s->strstart, s->match_start, s->match_length);
   1277 
   1278             bflush = ct_tally(s, s->strstart - s->match_start,
   1279                               s->match_length - MIN_MATCH);
   1280 
   1281             s->lookahead -= s->match_length;
   1282 
   1283             /* Insert new strings in the hash table only if the match length
   1284              * is not too large. This saves time but degrades compression.
   1285              */
   1286             if (s->match_length <= s->max_insert_length &&
   1287                 s->lookahead >= MIN_MATCH) {
   1288                 s->match_length--; /* string at strstart already in hash table */
   1289                 do {
   1290                     s->strstart++;
   1291                     INSERT_STRING(s, s->strstart, hash_head);
   1292                     /* strstart never exceeds WSIZE-MAX_MATCH, so there are
   1293                      * always MIN_MATCH bytes ahead.
   1294                      */
   1295                 } while (--s->match_length != 0);
   1296                 s->strstart++;
   1297             } else {
   1298                 s->strstart += s->match_length;
   1299                 s->match_length = 0;
   1300                 s->ins_h = s->window[s->strstart];
   1301                 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
   1302 #if MIN_MATCH != 3
   1303                 Call UPDATE_HASH() MIN_MATCH-3 more times
   1304 #endif
   1305                 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
   1306                  * matter since it will be recomputed at next deflate call.
   1307                  */
   1308             }
   1309         } else {
   1310             /* No match, output a literal byte */
   1311             Tracevv((stderr,"%c", s->window[s->strstart]));
   1312             bflush = ct_tally (s, 0, s->window[s->strstart]);
   1313             s->lookahead--;
   1314             s->strstart++;
   1315         }
   1316         if (bflush) FLUSH_BLOCK(s, Z_NO_FLUSH);
   1317     }
   1318     FLUSH_BLOCK(s, flush);
   1319     return 0; /* normal exit */
   1320 }
   1321 
   1322 /* ===========================================================================
   1323  * Same as above, but achieves better compression. We use a lazy
   1324  * evaluation for matches: a match is finally adopted only if there is
   1325  * no better match at the next window position.
   1326  */
   1327 local int deflate_slow(s, flush)
   1328     deflate_state *s;
   1329     int flush;
   1330 {
   1331     IPos hash_head = NIL;    /* head of hash chain */
   1332     int bflush;              /* set if current block must be flushed */
   1333 
   1334     /* Process the input block. */
   1335     for (;;) {
   1336         /* Make sure that we always have enough lookahead, except
   1337          * at the end of the input file. We need MAX_MATCH bytes
   1338          * for the next match, plus MIN_MATCH bytes to insert the
   1339          * string following the next match.
   1340          */
   1341         if (s->lookahead < MIN_LOOKAHEAD) {
   1342             fill_window(s);
   1343             if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) return 1;
   1344 
   1345             if (s->lookahead == 0) break; /* flush the current block */
   1346         }
   1347 
   1348         /* Insert the string window[strstart .. strstart+2] in the
   1349          * dictionary, and set hash_head to the head of the hash chain:
   1350          */
   1351         if (s->lookahead >= MIN_MATCH) {
   1352             INSERT_STRING(s, s->strstart, hash_head);
   1353         }
   1354 
   1355         /* Find the longest match, discarding those <= prev_length.
   1356          */
   1357         s->prev_length = s->match_length, s->prev_match = s->match_start;
   1358         s->match_length = MIN_MATCH-1;
   1359 
   1360         if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
   1361             s->strstart - hash_head <= MAX_DIST(s)) {
   1362             /* To simplify the code, we prevent matches with the string
   1363              * of window index 0 (in particular we have to avoid a match
   1364              * of the string with itself at the start of the input file).
   1365              */
   1366             if (s->strategy != Z_HUFFMAN_ONLY) {
   1367                 s->match_length = longest_match (s, hash_head);
   1368             }
   1369             /* longest_match() sets match_start */
   1370             if (s->match_length > s->lookahead) s->match_length = s->lookahead;
   1371 
   1372             if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
   1373                  (s->match_length == MIN_MATCH &&
   1374                   s->strstart - s->match_start > TOO_FAR))) {
   1375 
   1376                 /* If prev_match is also MIN_MATCH, match_start is garbage
   1377                  * but we will ignore the current match anyway.
   1378                  */
   1379                 s->match_length = MIN_MATCH-1;
   1380             }
   1381         }
   1382         /* If there was a match at the previous step and the current
   1383          * match is not better, output the previous match:
   1384          */
   1385         if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
   1386             uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
   1387             /* Do not insert strings in hash table beyond this. */
   1388 
   1389             check_match(s, s->strstart-1, s->prev_match, s->prev_length);
   1390 
   1391             bflush = ct_tally(s, s->strstart -1 - s->prev_match,
   1392                               s->prev_length - MIN_MATCH);
   1393 
   1394             /* Insert in hash table all strings up to the end of the match.
   1395              * strstart-1 and strstart are already inserted. If there is not
   1396              * enough lookahead, the last two strings are not inserted in
   1397              * the hash table.
   1398              */
   1399             s->lookahead -= s->prev_length-1;
   1400             s->prev_length -= 2;
   1401             do {
   1402                 if (++s->strstart <= max_insert) {
   1403                     INSERT_STRING(s, s->strstart, hash_head);
   1404                 }
   1405             } while (--s->prev_length != 0);
   1406             s->match_available = 0;
   1407             s->match_length = MIN_MATCH-1;
   1408             s->strstart++;
   1409 
   1410             if (bflush) FLUSH_BLOCK(s, Z_NO_FLUSH);
   1411 
   1412         } else if (s->match_available) {
   1413             /* If there was no match at the previous position, output a
   1414              * single literal. If there was a match but the current match
   1415              * is longer, truncate the previous match to a single literal.
   1416              */
   1417             Tracevv((stderr,"%c", s->window[s->strstart-1]));
   1418             if (ct_tally (s, 0, s->window[s->strstart-1])) {
   1419                 FLUSH_BLOCK_ONLY(s, Z_NO_FLUSH);
   1420             }
   1421             s->strstart++;
   1422             s->lookahead--;
   1423             if (s->strm->avail_out == 0) return 1;
   1424         } else {
   1425             /* There is no previous match to compare with, wait for
   1426              * the next step to decide.
   1427              */
   1428             s->match_available = 1;
   1429             s->strstart++;
   1430             s->lookahead--;
   1431         }
   1432     }
   1433     Assert (flush != Z_NO_FLUSH, "no flush?");
   1434     if (s->match_available) {
   1435         Tracevv((stderr,"%c", s->window[s->strstart-1]));
   1436         ct_tally (s, 0, s->window[s->strstart-1]);
   1437         s->match_available = 0;
   1438     }
   1439     FLUSH_BLOCK(s, flush);
   1440     return 0;
   1441 }
   1442 
   1443 
   1444 /*+++++*/
   1445 /* trees.c -- output deflated data using Huffman coding
   1446  * Copyright (C) 1995 Jean-loup Gailly
   1447  * For conditions of distribution and use, see copyright notice in zlib.h
   1448  */
   1449 
   1450 /*
   1451  *  ALGORITHM
   1452  *
   1453  *      The "deflation" process uses several Huffman trees. The more
   1454  *      common source values are represented by shorter bit sequences.
   1455  *
   1456  *      Each code tree is stored in a compressed form which is itself
   1457  * a Huffman encoding of the lengths of all the code strings (in
   1458  * ascending order by source values).  The actual code strings are
   1459  * reconstructed from the lengths in the inflate process, as described
   1460  * in the deflate specification.
   1461  *
   1462  *  REFERENCES
   1463  *
   1464  *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
   1465  *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
   1466  *
   1467  *      Storer, James A.
   1468  *          Data Compression:  Methods and Theory, pp. 49-50.
   1469  *          Computer Science Press, 1988.  ISBN 0-7167-8156-5.
   1470  *
   1471  *      Sedgewick, R.
   1472  *          Algorithms, p290.
   1473  *          Addison-Wesley, 1983. ISBN 0-201-06672-6.
   1474  */
   1475 
   1476 /* From: trees.c,v 1.5 1995/05/03 17:27:12 jloup Exp */
   1477 
   1478 #ifdef DEBUG_ZLIB
   1479 #  include <ctype.h>
   1480 #endif
   1481 
   1482 /* ===========================================================================
   1483  * Constants
   1484  */
   1485 
   1486 #define MAX_BL_BITS 7
   1487 /* Bit length codes must not exceed MAX_BL_BITS bits */
   1488 
   1489 #define END_BLOCK 256
   1490 /* end of block literal code */
   1491 
   1492 #define REP_3_6      16
   1493 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
   1494 
   1495 #define REPZ_3_10    17
   1496 /* repeat a zero length 3-10 times  (3 bits of repeat count) */
   1497 
   1498 #define REPZ_11_138  18
   1499 /* repeat a zero length 11-138 times  (7 bits of repeat count) */
   1500 
   1501 local int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
   1502    = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
   1503 
   1504 local int extra_dbits[D_CODES] /* extra bits for each distance code */
   1505    = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
   1506 
   1507 local int extra_blbits[BL_CODES]/* extra bits for each bit length code */
   1508    = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
   1509 
   1510 local uch bl_order[BL_CODES]
   1511    = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
   1512 /* The lengths of the bit length codes are sent in order of decreasing
   1513  * probability, to avoid transmitting the lengths for unused bit length codes.
   1514  */
   1515 
   1516 #define Buf_size (8 * 2*sizeof(char))
   1517 /* Number of bits used within bi_buf. (bi_buf might be implemented on
   1518  * more than 16 bits on some systems.)
   1519  */
   1520 
   1521 /* ===========================================================================
   1522  * Local data. These are initialized only once.
   1523  * To do: initialize at compile time to be completely reentrant. ???
   1524  */
   1525 
   1526 local ct_data static_ltree[L_CODES+2];
   1527 /* The static literal tree. Since the bit lengths are imposed, there is no
   1528  * need for the L_CODES extra codes used during heap construction. However
   1529  * The codes 286 and 287 are needed to build a canonical tree (see ct_init
   1530  * below).
   1531  */
   1532 
   1533 local ct_data static_dtree[D_CODES];
   1534 /* The static distance tree. (Actually a trivial tree since all codes use
   1535  * 5 bits.)
   1536  */
   1537 
   1538 local uch dist_code[512];
   1539 /* distance codes. The first 256 values correspond to the distances
   1540  * 3 .. 258, the last 256 values correspond to the top 8 bits of
   1541  * the 15 bit distances.
   1542  */
   1543 
   1544 local uch length_code[MAX_MATCH-MIN_MATCH+1];
   1545 /* length code for each normalized match length (0 == MIN_MATCH) */
   1546 
   1547 local int base_length[LENGTH_CODES];
   1548 /* First normalized length for each code (0 = MIN_MATCH) */
   1549 
   1550 local int base_dist[D_CODES];
   1551 /* First normalized distance for each code (0 = distance of 1) */
   1552 
   1553 struct static_tree_desc_s {
   1554     ct_data *static_tree;        /* static tree or NULL */
   1555     intf    *extra_bits;         /* extra bits for each code or NULL */
   1556     int     extra_base;          /* base index for extra_bits */
   1557     int     elems;               /* max number of elements in the tree */
   1558     int     max_length;          /* max bit length for the codes */
   1559 };
   1560 
   1561 local static_tree_desc  static_l_desc =
   1562 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
   1563 
   1564 local static_tree_desc  static_d_desc =
   1565 {static_dtree, extra_dbits, 0,          D_CODES, MAX_BITS};
   1566 
   1567 local static_tree_desc  static_bl_desc =
   1568 {(ct_data *)0, extra_blbits, 0,      BL_CODES, MAX_BL_BITS};
   1569 
   1570 /* ===========================================================================
   1571  * Local (static) routines in this file.
   1572  */
   1573 
   1574 local void ct_static_init OF((void));
   1575 local void init_block     OF((deflate_state *s));
   1576 local void pqdownheap     OF((deflate_state *s, ct_data *tree, int k));
   1577 local void gen_bitlen     OF((deflate_state *s, tree_desc *desc));
   1578 local void gen_codes      OF((ct_data *tree, int max_code, ushf *bl_count));
   1579 local void build_tree     OF((deflate_state *s, tree_desc *desc));
   1580 local void scan_tree      OF((deflate_state *s, ct_data *tree, int max_code));
   1581 local void send_tree      OF((deflate_state *s, ct_data *tree, int max_code));
   1582 local int  build_bl_tree  OF((deflate_state *s));
   1583 local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
   1584                               int blcodes));
   1585 local void compress_block OF((deflate_state *s, ct_data *ltree,
   1586                               ct_data *dtree));
   1587 local void set_data_type  OF((deflate_state *s));
   1588 local unsigned bi_reverse OF((unsigned value, int length));
   1589 local void bi_windup      OF((deflate_state *s));
   1590 local void bi_flush       OF((deflate_state *s));
   1591 local void copy_block     OF((deflate_state *s, charf *buf, unsigned len,
   1592                               int header));
   1593 
   1594 #ifndef DEBUG_ZLIB
   1595 #  define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
   1596    /* Send a code of the given tree. c and tree must not have side effects */
   1597 
   1598 #else /* DEBUG_ZLIB */
   1599 #  define send_code(s, c, tree) \
   1600      { if (verbose>1) fprintf(stderr,"\ncd %3d ",(c)); \
   1601        send_bits(s, tree[c].Code, tree[c].Len); }
   1602 #endif
   1603 
   1604 #define d_code(dist) \
   1605    ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])
   1606 /* Mapping from a distance to a distance code. dist is the distance - 1 and
   1607  * must not have side effects. dist_code[256] and dist_code[257] are never
   1608  * used.
   1609  */
   1610 
   1611 /* ===========================================================================
   1612  * Output a short LSB first on the stream.
   1613  * IN assertion: there is enough room in pendingBuf.
   1614  */
   1615 #define put_short(s, w) { \
   1616     put_byte(s, (uch)((w) & 0xff)); \
   1617     put_byte(s, (uch)((ush)(w) >> 8)); \
   1618 }
   1619 
   1620 /* ===========================================================================
   1621  * Send a value on a given number of bits.
   1622  * IN assertion: length <= 16 and value fits in length bits.
   1623  */
   1624 #ifdef DEBUG_ZLIB
   1625 local void send_bits      OF((deflate_state *s, int value, int length));
   1626 
   1627 local void send_bits(s, value, length)
   1628     deflate_state *s;
   1629     int value;  /* value to send */
   1630     int length; /* number of bits */
   1631 {
   1632     Tracev((stderr," l %2d v %4x ", length, value));
   1633     Assert(length > 0 && length <= 15, "invalid length");
   1634     s->bits_sent += (ulg)length;
   1635 
   1636     /* If not enough room in bi_buf, use (valid) bits from bi_buf and
   1637      * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
   1638      * unused bits in value.
   1639      */
   1640     if (s->bi_valid > (int)Buf_size - length) {
   1641         s->bi_buf |= (value << s->bi_valid);
   1642         put_short(s, s->bi_buf);
   1643         s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
   1644         s->bi_valid += length - Buf_size;
   1645     } else {
   1646         s->bi_buf |= value << s->bi_valid;
   1647         s->bi_valid += length;
   1648     }
   1649 }
   1650 #else /* !DEBUG_ZLIB */
   1651 
   1652 #define send_bits(s, value, length) \
   1653 { int len = length;\
   1654   if (s->bi_valid > (int)Buf_size - len) {\
   1655     int val = value;\
   1656     s->bi_buf |= (val << s->bi_valid);\
   1657     put_short(s, s->bi_buf);\
   1658     s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
   1659     s->bi_valid += len - Buf_size;\
   1660   } else {\
   1661     s->bi_buf |= (value) << s->bi_valid;\
   1662     s->bi_valid += len;\
   1663   }\
   1664 }
   1665 #endif /* DEBUG_ZLIB */
   1666 
   1667 
   1668 #define MAX(a,b) (a >= b ? a : b)
   1669 /* the arguments must not have side effects */
   1670 
   1671 /* ===========================================================================
   1672  * Initialize the various 'constant' tables.
   1673  * To do: do this at compile time.
   1674  */
   1675 local void ct_static_init()
   1676 {
   1677     int n;        /* iterates over tree elements */
   1678     int bits;     /* bit counter */
   1679     int length;   /* length value */
   1680     int code;     /* code value */
   1681     int dist;     /* distance index */
   1682     ush bl_count[MAX_BITS+1];
   1683     /* number of codes at each bit length for an optimal tree */
   1684 
   1685     /* Initialize the mapping length (0..255) -> length code (0..28) */
   1686     length = 0;
   1687     for (code = 0; code < LENGTH_CODES-1; code++) {
   1688         base_length[code] = length;
   1689         for (n = 0; n < (1<<extra_lbits[code]); n++) {
   1690             length_code[length++] = (uch)code;
   1691         }
   1692     }
   1693     Assert (length == 256, "ct_static_init: length != 256");
   1694     /* Note that the length 255 (match length 258) can be represented
   1695      * in two different ways: code 284 + 5 bits or code 285, so we
   1696      * overwrite length_code[255] to use the best encoding:
   1697      */
   1698     length_code[length-1] = (uch)code;
   1699 
   1700     /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
   1701     dist = 0;
   1702     for (code = 0 ; code < 16; code++) {
   1703         base_dist[code] = dist;
   1704         for (n = 0; n < (1<<extra_dbits[code]); n++) {
   1705             dist_code[dist++] = (uch)code;
   1706         }
   1707     }
   1708     Assert (dist == 256, "ct_static_init: dist != 256");
   1709     dist >>= 7; /* from now on, all distances are divided by 128 */
   1710     for ( ; code < D_CODES; code++) {
   1711         base_dist[code] = dist << 7;
   1712         for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
   1713             dist_code[256 + dist++] = (uch)code;
   1714         }
   1715     }
   1716     Assert (dist == 256, "ct_static_init: 256+dist != 512");
   1717 
   1718     /* Construct the codes of the static literal tree */
   1719     for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
   1720     n = 0;
   1721     while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
   1722     while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
   1723     while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
   1724     while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
   1725     /* Codes 286 and 287 do not exist, but we must include them in the
   1726      * tree construction to get a canonical Huffman tree (longest code
   1727      * all ones)
   1728      */
   1729     gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
   1730 
   1731     /* The static distance tree is trivial: */
   1732     for (n = 0; n < D_CODES; n++) {
   1733         static_dtree[n].Len = 5;
   1734         static_dtree[n].Code = bi_reverse(n, 5);
   1735     }
   1736 }
   1737 
   1738 /* ===========================================================================
   1739  * Initialize the tree data structures for a new zlib stream.
   1740  */
   1741 local void ct_init(s)
   1742     deflate_state *s;
   1743 {
   1744     if (static_dtree[0].Len == 0) {
   1745         ct_static_init();              /* To do: at compile time */
   1746     }
   1747 
   1748     s->compressed_len = 0L;
   1749 
   1750     s->l_desc.dyn_tree = s->dyn_ltree;
   1751     s->l_desc.stat_desc = &static_l_desc;
   1752 
   1753     s->d_desc.dyn_tree = s->dyn_dtree;
   1754     s->d_desc.stat_desc = &static_d_desc;
   1755 
   1756     s->bl_desc.dyn_tree = s->bl_tree;
   1757     s->bl_desc.stat_desc = &static_bl_desc;
   1758 
   1759     s->bi_buf = 0;
   1760     s->bi_valid = 0;
   1761     s->last_eob_len = 8; /* enough lookahead for inflate */
   1762 #ifdef DEBUG_ZLIB
   1763     s->bits_sent = 0L;
   1764 #endif
   1765     s->blocks_in_packet = 0;
   1766 
   1767     /* Initialize the first block of the first file: */
   1768     init_block(s);
   1769 }
   1770 
   1771 /* ===========================================================================
   1772  * Initialize a new block.
   1773  */
   1774 local void init_block(s)
   1775     deflate_state *s;
   1776 {
   1777     int n; /* iterates over tree elements */
   1778 
   1779     /* Initialize the trees. */
   1780     for (n = 0; n < L_CODES;  n++) s->dyn_ltree[n].Freq = 0;
   1781     for (n = 0; n < D_CODES;  n++) s->dyn_dtree[n].Freq = 0;
   1782     for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
   1783 
   1784     s->dyn_ltree[END_BLOCK].Freq = 1;
   1785     s->opt_len = s->static_len = 0L;
   1786     s->last_lit = s->matches = 0;
   1787 }
   1788 
   1789 #define SMALLEST 1
   1790 /* Index within the heap array of least frequent node in the Huffman tree */
   1791 
   1792 
   1793 /* ===========================================================================
   1794  * Remove the smallest element from the heap and recreate the heap with
   1795  * one less element. Updates heap and heap_len.
   1796  */
   1797 #define pqremove(s, tree, top) \
   1798 {\
   1799     top = s->heap[SMALLEST]; \
   1800     s->heap[SMALLEST] = s->heap[s->heap_len--]; \
   1801     pqdownheap(s, tree, SMALLEST); \
   1802 }
   1803 
   1804 /* ===========================================================================
   1805  * Compares to subtrees, using the tree depth as tie breaker when
   1806  * the subtrees have equal frequency. This minimizes the worst case length.
   1807  */
   1808 #define smaller(tree, n, m, depth) \
   1809    (tree[n].Freq < tree[m].Freq || \
   1810    (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
   1811 
   1812 /* ===========================================================================
   1813  * Restore the heap property by moving down the tree starting at node k,
   1814  * exchanging a node with the smallest of its two sons if necessary, stopping
   1815  * when the heap property is re-established (each father smaller than its
   1816  * two sons).
   1817  */
   1818 local void pqdownheap(s, tree, k)
   1819     deflate_state *s;
   1820     ct_data *tree;  /* the tree to restore */
   1821     int k;               /* node to move down */
   1822 {
   1823     int v = s->heap[k];
   1824     int j = k << 1;  /* left son of k */
   1825     while (j <= s->heap_len) {
   1826         /* Set j to the smallest of the two sons: */
   1827         if (j < s->heap_len &&
   1828             smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
   1829             j++;
   1830         }
   1831         /* Exit if v is smaller than both sons */
   1832         if (smaller(tree, v, s->heap[j], s->depth)) break;
   1833 
   1834         /* Exchange v with the smallest son */
   1835         s->heap[k] = s->heap[j];  k = j;
   1836 
   1837         /* And continue down the tree, setting j to the left son of k */
   1838         j <<= 1;
   1839     }
   1840     s->heap[k] = v;
   1841 }
   1842 
   1843 /* ===========================================================================
   1844  * Compute the optimal bit lengths for a tree and update the total bit length
   1845  * for the current block.
   1846  * IN assertion: the fields freq and dad are set, heap[heap_max] and
   1847  *    above are the tree nodes sorted by increasing frequency.
   1848  * OUT assertions: the field len is set to the optimal bit length, the
   1849  *     array bl_count contains the frequencies for each bit length.
   1850  *     The length opt_len is updated; static_len is also updated if stree is
   1851  *     not null.
   1852  */
   1853 local void gen_bitlen(s, desc)
   1854     deflate_state *s;
   1855     tree_desc *desc;    /* the tree descriptor */
   1856 {
   1857     ct_data *tree  = desc->dyn_tree;
   1858     int max_code   = desc->max_code;
   1859     ct_data *stree = desc->stat_desc->static_tree;
   1860     intf *extra    = desc->stat_desc->extra_bits;
   1861     int base       = desc->stat_desc->extra_base;
   1862     int max_length = desc->stat_desc->max_length;
   1863     int h;              /* heap index */
   1864     int n, m;           /* iterate over the tree elements */
   1865     int bits;           /* bit length */
   1866     int xbits;          /* extra bits */
   1867     ush f;              /* frequency */
   1868     int overflow = 0;   /* number of elements with bit length too large */
   1869 
   1870     for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
   1871 
   1872     /* In a first pass, compute the optimal bit lengths (which may
   1873      * overflow in the case of the bit length tree).
   1874      */
   1875     tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
   1876 
   1877     for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
   1878         n = s->heap[h];
   1879         bits = tree[tree[n].Dad].Len + 1;
   1880         if (bits > max_length) bits = max_length, overflow++;
   1881         tree[n].Len = (ush)bits;
   1882         /* We overwrite tree[n].Dad which is no longer needed */
   1883 
   1884         if (n > max_code) continue; /* not a leaf node */
   1885 
   1886         s->bl_count[bits]++;
   1887         xbits = 0;
   1888         if (n >= base) xbits = extra[n-base];
   1889         f = tree[n].Freq;
   1890         s->opt_len += (ulg)f * (bits + xbits);
   1891         if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
   1892     }
   1893     if (overflow == 0) return;
   1894 
   1895     Trace((stderr,"\nbit length overflow\n"));
   1896     /* This happens for example on obj2 and pic of the Calgary corpus */
   1897 
   1898     /* Find the first bit length which could increase: */
   1899     do {
   1900         bits = max_length-1;
   1901         while (s->bl_count[bits] == 0) bits--;
   1902         s->bl_count[bits]--;      /* move one leaf down the tree */
   1903         s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
   1904         s->bl_count[max_length]--;
   1905         /* The brother of the overflow item also moves one step up,
   1906          * but this does not affect bl_count[max_length]
   1907          */
   1908         overflow -= 2;
   1909     } while (overflow > 0);
   1910 
   1911     /* Now recompute all bit lengths, scanning in increasing frequency.
   1912      * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
   1913      * lengths instead of fixing only the wrong ones. This idea is taken
   1914      * from 'ar' written by Haruhiko Okumura.)
   1915      */
   1916     for (bits = max_length; bits != 0; bits--) {
   1917         n = s->bl_count[bits];
   1918         while (n != 0) {
   1919             m = s->heap[--h];
   1920             if (m > max_code) continue;
   1921             if (tree[m].Len != (unsigned) bits) {
   1922                 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
   1923                 s->opt_len += ((long)bits - (long)tree[m].Len)
   1924                               *(long)tree[m].Freq;
   1925                 tree[m].Len = (ush)bits;
   1926             }
   1927             n--;
   1928         }
   1929     }
   1930 }
   1931 
   1932 /* ===========================================================================
   1933  * Generate the codes for a given tree and bit counts (which need not be
   1934  * optimal).
   1935  * IN assertion: the array bl_count contains the bit length statistics for
   1936  * the given tree and the field len is set for all tree elements.
   1937  * OUT assertion: the field code is set for all tree elements of non
   1938  *     zero code length.
   1939  */
   1940 local void gen_codes (tree, max_code, bl_count)
   1941     ct_data *tree;             /* the tree to decorate */
   1942     int max_code;              /* largest code with non zero frequency */
   1943     ushf *bl_count;            /* number of codes at each bit length */
   1944 {
   1945     ush next_code[MAX_BITS+1]; /* next code value for each bit length */
   1946     ush code = 0;              /* running code value */
   1947     int bits;                  /* bit index */
   1948     int n;                     /* code index */
   1949 
   1950     /* The distribution counts are first used to generate the code values
   1951      * without bit reversal.
   1952      */
   1953     for (bits = 1; bits <= MAX_BITS; bits++) {
   1954         next_code[bits] = code = (code + bl_count[bits-1]) << 1;
   1955     }
   1956     /* Check that the bit counts in bl_count are consistent. The last code
   1957      * must be all ones.
   1958      */
   1959     Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
   1960             "inconsistent bit counts");
   1961     Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
   1962 
   1963     for (n = 0;  n <= max_code; n++) {
   1964         int len = tree[n].Len;
   1965         if (len == 0) continue;
   1966         /* Now reverse the bits */
   1967         tree[n].Code = bi_reverse(next_code[len]++, len);
   1968 
   1969         Tracec(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
   1970              n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
   1971     }
   1972 }
   1973 
   1974 /* ===========================================================================
   1975  * Construct one Huffman tree and assigns the code bit strings and lengths.
   1976  * Update the total bit length for the current block.
   1977  * IN assertion: the field freq is set for all tree elements.
   1978  * OUT assertions: the fields len and code are set to the optimal bit length
   1979  *     and corresponding code. The length opt_len is updated; static_len is
   1980  *     also updated if stree is not null. The field max_code is set.
   1981  */
   1982 local void build_tree(s, desc)
   1983     deflate_state *s;
   1984     tree_desc *desc; /* the tree descriptor */
   1985 {
   1986     ct_data *tree   = desc->dyn_tree;
   1987     ct_data *stree  = desc->stat_desc->static_tree;
   1988     int elems       = desc->stat_desc->elems;
   1989     int n, m;          /* iterate over heap elements */
   1990     int max_code = -1; /* largest code with non zero frequency */
   1991     int node;          /* new node being created */
   1992 
   1993     /* Construct the initial heap, with least frequent element in
   1994      * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
   1995      * heap[0] is not used.
   1996      */
   1997     s->heap_len = 0, s->heap_max = HEAP_SIZE;
   1998 
   1999     for (n = 0; n < elems; n++) {
   2000         if (tree[n].Freq != 0) {
   2001             s->heap[++(s->heap_len)] = max_code = n;
   2002             s->depth[n] = 0;
   2003         } else {
   2004             tree[n].Len = 0;
   2005         }
   2006     }
   2007 
   2008     /* The pkzip format requires that at least one distance code exists,
   2009      * and that at least one bit should be sent even if there is only one
   2010      * possible code. So to avoid special checks later on we force at least
   2011      * two codes of non zero frequency.
   2012      */
   2013     while (s->heap_len < 2) {
   2014         node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
   2015         tree[node].Freq = 1;
   2016         s->depth[node] = 0;
   2017         s->opt_len--; if (stree) s->static_len -= stree[node].Len;
   2018         /* node is 0 or 1 so it does not have extra bits */
   2019     }
   2020     desc->max_code = max_code;
   2021 
   2022     /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
   2023      * establish sub-heaps of increasing lengths:
   2024      */
   2025     for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
   2026 
   2027     /* Construct the Huffman tree by repeatedly combining the least two
   2028      * frequent nodes.
   2029      */
   2030     node = elems;              /* next internal node of the tree */
   2031     do {
   2032         pqremove(s, tree, n);  /* n = node of least frequency */
   2033         m = s->heap[SMALLEST]; /* m = node of next least frequency */
   2034 
   2035         s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
   2036         s->heap[--(s->heap_max)] = m;
   2037 
   2038         /* Create a new node father of n and m */
   2039         tree[node].Freq = tree[n].Freq + tree[m].Freq;
   2040         s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
   2041         tree[n].Dad = tree[m].Dad = (ush)node;
   2042 #ifdef DUMP_BL_TREE
   2043         if (tree == s->bl_tree) {
   2044             fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
   2045                     node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
   2046         }
   2047 #endif
   2048         /* and insert the new node in the heap */
   2049         s->heap[SMALLEST] = node++;
   2050         pqdownheap(s, tree, SMALLEST);
   2051 
   2052     } while (s->heap_len >= 2);
   2053 
   2054     s->heap[--(s->heap_max)] = s->heap[SMALLEST];
   2055 
   2056     /* At this point, the fields freq and dad are set. We can now
   2057      * generate the bit lengths.
   2058      */
   2059     gen_bitlen(s, (tree_desc *)desc);
   2060 
   2061     /* The field len is now set, we can generate the bit codes */
   2062     gen_codes ((ct_data *)tree, max_code, s->bl_count);
   2063 }
   2064 
   2065 /* ===========================================================================
   2066  * Scan a literal or distance tree to determine the frequencies of the codes
   2067  * in the bit length tree.
   2068  */
   2069 local void scan_tree (s, tree, max_code)
   2070     deflate_state *s;
   2071     ct_data *tree;   /* the tree to be scanned */
   2072     int max_code;    /* and its largest code of non zero frequency */
   2073 {
   2074     int n;                     /* iterates over all tree elements */
   2075     int prevlen = -1;          /* last emitted length */
   2076     int curlen;                /* length of current code */
   2077     int nextlen = tree[0].Len; /* length of next code */
   2078     int count = 0;             /* repeat count of the current code */
   2079     int max_count = 7;         /* max repeat count */
   2080     int min_count = 4;         /* min repeat count */
   2081 
   2082     if (nextlen == 0) max_count = 138, min_count = 3;
   2083     tree[max_code+1].Len = (ush)0xffff; /* guard */
   2084 
   2085     for (n = 0; n <= max_code; n++) {
   2086         curlen = nextlen; nextlen = tree[n+1].Len;
   2087         if (++count < max_count && curlen == nextlen) {
   2088             continue;
   2089         } else if (count < min_count) {
   2090             s->bl_tree[curlen].Freq += count;
   2091         } else if (curlen != 0) {
   2092             if (curlen != prevlen) s->bl_tree[curlen].Freq++;
   2093             s->bl_tree[REP_3_6].Freq++;
   2094         } else if (count <= 10) {
   2095             s->bl_tree[REPZ_3_10].Freq++;
   2096         } else {
   2097             s->bl_tree[REPZ_11_138].Freq++;
   2098         }
   2099         count = 0; prevlen = curlen;
   2100         if (nextlen == 0) {
   2101             max_count = 138, min_count = 3;
   2102         } else if (curlen == nextlen) {
   2103             max_count = 6, min_count = 3;
   2104         } else {
   2105             max_count = 7, min_count = 4;
   2106         }
   2107     }
   2108 }
   2109 
   2110 /* ===========================================================================
   2111  * Send a literal or distance tree in compressed form, using the codes in
   2112  * bl_tree.
   2113  */
   2114 local void send_tree (s, tree, max_code)
   2115     deflate_state *s;
   2116     ct_data *tree; /* the tree to be scanned */
   2117     int max_code;       /* and its largest code of non zero frequency */
   2118 {
   2119     int n;                     /* iterates over all tree elements */
   2120     int prevlen = -1;          /* last emitted length */
   2121     int curlen;                /* length of current code */
   2122     int nextlen = tree[0].Len; /* length of next code */
   2123     int count = 0;             /* repeat count of the current code */
   2124     int max_count = 7;         /* max repeat count */
   2125     int min_count = 4;         /* min repeat count */
   2126 
   2127     /* tree[max_code+1].Len = -1; */  /* guard already set */
   2128     if (nextlen == 0) max_count = 138, min_count = 3;
   2129 
   2130     for (n = 0; n <= max_code; n++) {
   2131         curlen = nextlen; nextlen = tree[n+1].Len;
   2132         if (++count < max_count && curlen == nextlen) {
   2133             continue;
   2134         } else if (count < min_count) {
   2135             do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
   2136 
   2137         } else if (curlen != 0) {
   2138             if (curlen != prevlen) {
   2139                 send_code(s, curlen, s->bl_tree); count--;
   2140             }
   2141             Assert(count >= 3 && count <= 6, " 3_6?");
   2142             send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
   2143 
   2144         } else if (count <= 10) {
   2145             send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
   2146 
   2147         } else {
   2148             send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
   2149         }
   2150         count = 0; prevlen = curlen;
   2151         if (nextlen == 0) {
   2152             max_count = 138, min_count = 3;
   2153         } else if (curlen == nextlen) {
   2154             max_count = 6, min_count = 3;
   2155         } else {
   2156             max_count = 7, min_count = 4;
   2157         }
   2158     }
   2159 }
   2160 
   2161 /* ===========================================================================
   2162  * Construct the Huffman tree for the bit lengths and return the index in
   2163  * bl_order of the last bit length code to send.
   2164  */
   2165 local int build_bl_tree(s)
   2166     deflate_state *s;
   2167 {
   2168     int max_blindex;  /* index of last bit length code of non zero freq */
   2169 
   2170     /* Determine the bit length frequencies for literal and distance trees */
   2171     scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
   2172     scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
   2173 
   2174     /* Build the bit length tree: */
   2175     build_tree(s, (tree_desc *)(&(s->bl_desc)));
   2176     /* opt_len now includes the length of the tree representations, except
   2177      * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
   2178      */
   2179 
   2180     /* Determine the number of bit length codes to send. The pkzip format
   2181      * requires that at least 4 bit length codes be sent. (appnote.txt says
   2182      * 3 but the actual value used is 4.)
   2183      */
   2184     for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
   2185         if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
   2186     }
   2187     /* Update opt_len to include the bit length tree and counts */
   2188     s->opt_len += 3*(max_blindex+1) + 5+5+4;
   2189     Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
   2190             s->opt_len, s->static_len));
   2191 
   2192     return max_blindex;
   2193 }
   2194 
   2195 /* ===========================================================================
   2196  * Send the header for a block using dynamic Huffman trees: the counts, the
   2197  * lengths of the bit length codes, the literal tree and the distance tree.
   2198  * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
   2199  */
   2200 local void send_all_trees(s, lcodes, dcodes, blcodes)
   2201     deflate_state *s;
   2202     int lcodes, dcodes, blcodes; /* number of codes for each tree */
   2203 {
   2204     int rank;                    /* index in bl_order */
   2205 
   2206     Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
   2207     Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
   2208             "too many codes");
   2209     Tracev((stderr, "\nbl counts: "));
   2210     send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
   2211     send_bits(s, dcodes-1,   5);
   2212     send_bits(s, blcodes-4,  4); /* not -3 as stated in appnote.txt */
   2213     for (rank = 0; rank < blcodes; rank++) {
   2214         Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
   2215         send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
   2216     }
   2217     Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
   2218 
   2219     send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
   2220     Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
   2221 
   2222     send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
   2223     Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
   2224 }
   2225 
   2226 /* ===========================================================================
   2227  * Send a stored block
   2228  */
   2229 local void ct_stored_block(s, buf, stored_len, eof)
   2230     deflate_state *s;
   2231     charf *buf;       /* input block */
   2232     ulg stored_len;   /* length of input block */
   2233     int eof;          /* true if this is the last block for a file */
   2234 {
   2235     send_bits(s, (STORED_BLOCK<<1)+eof, 3);  /* send block type */
   2236     s->compressed_len = (s->compressed_len + 3 + 7) & ~7L;
   2237     s->compressed_len += (stored_len + 4) << 3;
   2238 
   2239     copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
   2240 }
   2241 
   2242 /* Send just the `stored block' type code without any length bytes or data.
   2243  */
   2244 local void ct_stored_type_only(s)
   2245     deflate_state *s;
   2246 {
   2247     send_bits(s, (STORED_BLOCK << 1), 3);
   2248     bi_windup(s);
   2249     s->compressed_len = (s->compressed_len + 3) & ~7L;
   2250 }
   2251 
   2252 
   2253 /* ===========================================================================
   2254  * Send one empty static block to give enough lookahead for inflate.
   2255  * This takes 10 bits, of which 7 may remain in the bit buffer.
   2256  * The current inflate code requires 9 bits of lookahead. If the EOB
   2257  * code for the previous block was coded on 5 bits or less, inflate
   2258  * may have only 5+3 bits of lookahead to decode this EOB.
   2259  * (There are no problems if the previous block is stored or fixed.)
   2260  */
   2261 local void ct_align(s)
   2262     deflate_state *s;
   2263 {
   2264     send_bits(s, STATIC_TREES<<1, 3);
   2265     send_code(s, END_BLOCK, static_ltree);
   2266     s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
   2267     bi_flush(s);
   2268     /* Of the 10 bits for the empty block, we have already sent
   2269      * (10 - bi_valid) bits. The lookahead for the EOB of the previous
   2270      * block was thus its length plus what we have just sent.
   2271      */
   2272     if (s->last_eob_len + 10 - s->bi_valid < 9) {
   2273         send_bits(s, STATIC_TREES<<1, 3);
   2274         send_code(s, END_BLOCK, static_ltree);
   2275         s->compressed_len += 10L;
   2276         bi_flush(s);
   2277     }
   2278     s->last_eob_len = 7;
   2279 }
   2280 
   2281 /* ===========================================================================
   2282  * Determine the best encoding for the current block: dynamic trees, static
   2283  * trees or store, and output the encoded block to the zip file. This function
   2284  * returns the total compressed length for the file so far.
   2285  */
   2286 local ulg ct_flush_block(s, buf, stored_len, flush)
   2287     deflate_state *s;
   2288     charf *buf;       /* input block, or NULL if too old */
   2289     ulg stored_len;   /* length of input block */
   2290     int flush;        /* Z_FINISH if this is the last block for a file */
   2291 {
   2292     ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
   2293     int max_blindex;  /* index of last bit length code of non zero freq */
   2294     int eof = flush == Z_FINISH;
   2295 
   2296     ++s->blocks_in_packet;
   2297 
   2298     /* Check if the file is ascii or binary */
   2299     if (s->data_type == UNKNOWN) set_data_type(s);
   2300 
   2301     /* Construct the literal and distance trees */
   2302     build_tree(s, (tree_desc *)(&(s->l_desc)));
   2303     Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
   2304             s->static_len));
   2305 
   2306     build_tree(s, (tree_desc *)(&(s->d_desc)));
   2307     Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
   2308             s->static_len));
   2309     /* At this point, opt_len and static_len are the total bit lengths of
   2310      * the compressed block data, excluding the tree representations.
   2311      */
   2312 
   2313     /* Build the bit length tree for the above two trees, and get the index
   2314      * in bl_order of the last bit length code to send.
   2315      */
   2316     max_blindex = build_bl_tree(s);
   2317 
   2318     /* Determine the best encoding. Compute first the block length in bytes */
   2319     opt_lenb = (s->opt_len+3+7)>>3;
   2320     static_lenb = (s->static_len+3+7)>>3;
   2321 
   2322     Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
   2323             opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
   2324             s->last_lit));
   2325 
   2326     if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
   2327 
   2328     /* If compression failed and this is the first and last block,
   2329      * and if the .zip file can be seeked (to rewrite the local header),
   2330      * the whole file is transformed into a stored file:
   2331      */
   2332 #ifdef STORED_FILE_OK
   2333 #  ifdef FORCE_STORED_FILE
   2334     if (eof && compressed_len == 0L) /* force stored file */
   2335 #  else
   2336     if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable())
   2337 #  endif
   2338     {
   2339         /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
   2340         if (buf == (charf*)0) error ("block vanished");
   2341 
   2342         copy_block(buf, (unsigned)stored_len, 0); /* without header */
   2343         s->compressed_len = stored_len << 3;
   2344         s->method = STORED;
   2345     } else
   2346 #endif /* STORED_FILE_OK */
   2347 
   2348     /* For Z_PACKET_FLUSH, if we don't achieve the required minimum
   2349      * compression, and this block contains all the data since the last
   2350      * time we used Z_PACKET_FLUSH, then just omit this block completely
   2351      * from the output.
   2352      */
   2353     if (flush == Z_PACKET_FLUSH && s->blocks_in_packet == 1
   2354 	&& opt_lenb > stored_len - s->minCompr) {
   2355 	s->blocks_in_packet = 0;
   2356 	/* output nothing */
   2357     } else
   2358 
   2359 #ifdef FORCE_STORED
   2360     if (buf != (char*)0) /* force stored block */
   2361 #else
   2362     if (stored_len+4 <= opt_lenb && buf != (char*)0)
   2363                        /* 4: two words for the lengths */
   2364 #endif
   2365     {
   2366         /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
   2367          * Otherwise we can't have processed more than WSIZE input bytes since
   2368          * the last block flush, because compression would have been
   2369          * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
   2370          * transform a block into a stored block.
   2371          */
   2372         ct_stored_block(s, buf, stored_len, eof);
   2373     } else
   2374 
   2375 #ifdef FORCE_STATIC
   2376     if (static_lenb >= 0) /* force static trees */
   2377 #else
   2378     if (static_lenb == opt_lenb)
   2379 #endif
   2380     {
   2381         send_bits(s, (STATIC_TREES<<1)+eof, 3);
   2382         compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
   2383         s->compressed_len += 3 + s->static_len;
   2384     } else {
   2385         send_bits(s, (DYN_TREES<<1)+eof, 3);
   2386         send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
   2387                        max_blindex+1);
   2388         compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
   2389         s->compressed_len += 3 + s->opt_len;
   2390     }
   2391     Assert (s->compressed_len == s->bits_sent, "bad compressed size");
   2392     init_block(s);
   2393 
   2394     if (eof) {
   2395         bi_windup(s);
   2396         s->compressed_len += 7;  /* align on byte boundary */
   2397     }
   2398     Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
   2399            s->compressed_len-7*eof));
   2400 
   2401     return s->compressed_len >> 3;
   2402 }
   2403 
   2404 /* ===========================================================================
   2405  * Save the match info and tally the frequency counts. Return true if
   2406  * the current block must be flushed.
   2407  */
   2408 local int ct_tally (s, dist, lc)
   2409     deflate_state *s;
   2410     int dist;  /* distance of matched string */
   2411     int lc;    /* match length-MIN_MATCH or unmatched char (if dist==0) */
   2412 {
   2413     s->d_buf[s->last_lit] = (ush)dist;
   2414     s->l_buf[s->last_lit++] = (uch)lc;
   2415     if (dist == 0) {
   2416         /* lc is the unmatched char */
   2417         s->dyn_ltree[lc].Freq++;
   2418     } else {
   2419         s->matches++;
   2420         /* Here, lc is the match length - MIN_MATCH */
   2421         dist--;             /* dist = match distance - 1 */
   2422         Assert((ush)dist < (ush)MAX_DIST(s) &&
   2423                (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
   2424                (ush)d_code(dist) < (ush)D_CODES,  "ct_tally: bad match");
   2425 
   2426         s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;
   2427         s->dyn_dtree[d_code(dist)].Freq++;
   2428     }
   2429 
   2430     /* Try to guess if it is profitable to stop the current block here */
   2431     if (s->level > 2 && (s->last_lit & 0xfff) == 0) {
   2432         /* Compute an upper bound for the compressed length */
   2433         ulg out_length = (ulg)s->last_lit*8L;
   2434         ulg in_length = (ulg)s->strstart - s->block_start;
   2435         int dcode;
   2436         for (dcode = 0; dcode < D_CODES; dcode++) {
   2437             out_length += (ulg)s->dyn_dtree[dcode].Freq *
   2438                 (5L+extra_dbits[dcode]);
   2439         }
   2440         out_length >>= 3;
   2441         Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
   2442                s->last_lit, in_length, out_length,
   2443                100L - out_length*100L/in_length));
   2444         if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
   2445     }
   2446     return (s->last_lit == s->lit_bufsize-1);
   2447     /* We avoid equality with lit_bufsize because of wraparound at 64K
   2448      * on 16 bit machines and because stored blocks are restricted to
   2449      * 64K-1 bytes.
   2450      */
   2451 }
   2452 
   2453 /* ===========================================================================
   2454  * Send the block data compressed using the given Huffman trees
   2455  */
   2456 local void compress_block(s, ltree, dtree)
   2457     deflate_state *s;
   2458     ct_data *ltree; /* literal tree */
   2459     ct_data *dtree; /* distance tree */
   2460 {
   2461     unsigned dist;      /* distance of matched string */
   2462     int lc;             /* match length or unmatched char (if dist == 0) */
   2463     unsigned lx = 0;    /* running index in l_buf */
   2464     unsigned code;      /* the code to send */
   2465     int extra;          /* number of extra bits to send */
   2466 
   2467     if (s->last_lit != 0) do {
   2468         dist = s->d_buf[lx];
   2469         lc = s->l_buf[lx++];
   2470         if (dist == 0) {
   2471             send_code(s, lc, ltree); /* send a literal byte */
   2472             Tracecv(isgraph(lc), (stderr," '%c' ", lc));
   2473         } else {
   2474             /* Here, lc is the match length - MIN_MATCH */
   2475             code = length_code[lc];
   2476             send_code(s, code+LITERALS+1, ltree); /* send the length code */
   2477             extra = extra_lbits[code];
   2478             if (extra != 0) {
   2479                 lc -= base_length[code];
   2480                 send_bits(s, lc, extra);       /* send the extra length bits */
   2481             }
   2482             dist--; /* dist is now the match distance - 1 */
   2483             code = d_code(dist);
   2484             Assert (code < D_CODES, "bad d_code");
   2485 
   2486             send_code(s, code, dtree);       /* send the distance code */
   2487             extra = extra_dbits[code];
   2488             if (extra != 0) {
   2489                 dist -= base_dist[code];
   2490                 send_bits(s, dist, extra);   /* send the extra distance bits */
   2491             }
   2492         } /* literal or match pair ? */
   2493 
   2494         /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
   2495         Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
   2496 
   2497     } while (lx < s->last_lit);
   2498 
   2499     send_code(s, END_BLOCK, ltree);
   2500     s->last_eob_len = ltree[END_BLOCK].Len;
   2501 }
   2502 
   2503 /* ===========================================================================
   2504  * Set the data type to ASCII or BINARY, using a crude approximation:
   2505  * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
   2506  * IN assertion: the fields freq of dyn_ltree are set and the total of all
   2507  * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
   2508  */
   2509 local void set_data_type(s)
   2510     deflate_state *s;
   2511 {
   2512     int n = 0;
   2513     unsigned ascii_freq = 0;
   2514     unsigned bin_freq = 0;
   2515     while (n < 7)        bin_freq += s->dyn_ltree[n++].Freq;
   2516     while (n < 128)    ascii_freq += s->dyn_ltree[n++].Freq;
   2517     while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
   2518     s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? BINARY : ASCII);
   2519 }
   2520 
   2521 /* ===========================================================================
   2522  * Reverse the first len bits of a code, using straightforward code (a faster
   2523  * method would use a table)
   2524  * IN assertion: 1 <= len <= 15
   2525  */
   2526 local unsigned bi_reverse(code, len)
   2527     unsigned code; /* the value to invert */
   2528     int len;       /* its bit length */
   2529 {
   2530     register unsigned res = 0;
   2531     do {
   2532         res |= code & 1;
   2533         code >>= 1, res <<= 1;
   2534     } while (--len > 0);
   2535     return res >> 1;
   2536 }
   2537 
   2538 /* ===========================================================================
   2539  * Flush the bit buffer, keeping at most 7 bits in it.
   2540  */
   2541 local void bi_flush(s)
   2542     deflate_state *s;
   2543 {
   2544     if (s->bi_valid == 16) {
   2545         put_short(s, s->bi_buf);
   2546         s->bi_buf = 0;
   2547         s->bi_valid = 0;
   2548     } else if (s->bi_valid >= 8) {
   2549         put_byte(s, (Byte)s->bi_buf);
   2550         s->bi_buf >>= 8;
   2551         s->bi_valid -= 8;
   2552     }
   2553 }
   2554 
   2555 /* ===========================================================================
   2556  * Flush the bit buffer and align the output on a byte boundary
   2557  */
   2558 local void bi_windup(s)
   2559     deflate_state *s;
   2560 {
   2561     if (s->bi_valid > 8) {
   2562         put_short(s, s->bi_buf);
   2563     } else if (s->bi_valid > 0) {
   2564         put_byte(s, (Byte)s->bi_buf);
   2565     }
   2566     s->bi_buf = 0;
   2567     s->bi_valid = 0;
   2568 #ifdef DEBUG_ZLIB
   2569     s->bits_sent = (s->bits_sent+7) & ~7;
   2570 #endif
   2571 }
   2572 
   2573 /* ===========================================================================
   2574  * Copy a stored block, storing first the length and its
   2575  * one's complement if requested.
   2576  */
   2577 local void copy_block(s, buf, len, header)
   2578     deflate_state *s;
   2579     charf    *buf;    /* the input data */
   2580     unsigned len;     /* its length */
   2581     int      header;  /* true if block header must be written */
   2582 {
   2583     bi_windup(s);        /* align on byte boundary */
   2584     s->last_eob_len = 8; /* enough lookahead for inflate */
   2585 
   2586     if (header) {
   2587         put_short(s, (ush)len);
   2588         put_short(s, (ush)~len);
   2589 #ifdef DEBUG_ZLIB
   2590         s->bits_sent += 2*16;
   2591 #endif
   2592     }
   2593 #ifdef DEBUG_ZLIB
   2594     s->bits_sent += (ulg)len<<3;
   2595 #endif
   2596     while (len--) {
   2597         put_byte(s, *buf++);
   2598     }
   2599 }
   2600 
   2601 
   2602 /*+++++*/
   2603 /* infblock.h -- header to use infblock.c
   2604  * Copyright (C) 1995 Mark Adler
   2605  * For conditions of distribution and use, see copyright notice in zlib.h
   2606  */
   2607 
   2608 /* WARNING: this file should *not* be used by applications. It is
   2609    part of the implementation of the compression library and is
   2610    subject to change. Applications should only use zlib.h.
   2611  */
   2612 
   2613 struct inflate_blocks_state;
   2614 typedef struct inflate_blocks_state FAR inflate_blocks_statef;
   2615 
   2616 local inflate_blocks_statef * inflate_blocks_new OF((
   2617     z_stream *z,
   2618     check_func c,               /* check function */
   2619     uInt w));                   /* window size */
   2620 
   2621 local int inflate_blocks OF((
   2622     inflate_blocks_statef *,
   2623     z_stream *,
   2624     int));                      /* initial return code */
   2625 
   2626 local void inflate_blocks_reset OF((
   2627     inflate_blocks_statef *,
   2628     z_stream *,
   2629     uLongf *));                  /* check value on output */
   2630 
   2631 local int inflate_blocks_free OF((
   2632     inflate_blocks_statef *,
   2633     z_stream *,
   2634     uLongf *));                  /* check value on output */
   2635 
   2636 local int inflate_addhistory OF((
   2637     inflate_blocks_statef *,
   2638     z_stream *));
   2639 
   2640 local int inflate_packet_flush OF((
   2641     inflate_blocks_statef *));
   2642 
   2643 /*+++++*/
   2644 /* inftrees.h -- header to use inftrees.c
   2645  * Copyright (C) 1995 Mark Adler
   2646  * For conditions of distribution and use, see copyright notice in zlib.h
   2647  */
   2648 
   2649 /* WARNING: this file should *not* be used by applications. It is
   2650    part of the implementation of the compression library and is
   2651    subject to change. Applications should only use zlib.h.
   2652  */
   2653 
   2654 /* Huffman code lookup table entry--this entry is four bytes for machines
   2655    that have 16-bit pointers (e.g. PC's in the small or medium model). */
   2656 
   2657 typedef struct inflate_huft_s FAR inflate_huft;
   2658 
   2659 struct inflate_huft_s {
   2660   union {
   2661     struct {
   2662       Byte Exop;        /* number of extra bits or operation */
   2663       Byte Bits;        /* number of bits in this code or subcode */
   2664     } what;
   2665     uInt Nalloc;	/* number of these allocated here */
   2666     Bytef *pad;         /* pad structure to a power of 2 (4 bytes for */
   2667   } word;               /*  16-bit, 8 bytes for 32-bit machines) */
   2668   union {
   2669     uInt Base;          /* literal, length base, or distance base */
   2670     inflate_huft *Next; /* pointer to next level of table */
   2671   } more;
   2672 };
   2673 
   2674 #ifdef DEBUG_ZLIB
   2675   local uInt inflate_hufts;
   2676 #endif
   2677 
   2678 local int inflate_trees_bits OF((
   2679     uIntf *,                    /* 19 code lengths */
   2680     uIntf *,                    /* bits tree desired/actual depth */
   2681     inflate_huft * FAR *,       /* bits tree result */
   2682     z_stream *));               /* for zalloc, zfree functions */
   2683 
   2684 local int inflate_trees_dynamic OF((
   2685     uInt,                       /* number of literal/length codes */
   2686     uInt,                       /* number of distance codes */
   2687     uIntf *,                    /* that many (total) code lengths */
   2688     uIntf *,                    /* literal desired/actual bit depth */
   2689     uIntf *,                    /* distance desired/actual bit depth */
   2690     inflate_huft * FAR *,       /* literal/length tree result */
   2691     inflate_huft * FAR *,       /* distance tree result */
   2692     z_stream *));               /* for zalloc, zfree functions */
   2693 
   2694 local int inflate_trees_fixed OF((
   2695     uIntf *,                    /* literal desired/actual bit depth */
   2696     uIntf *,                    /* distance desired/actual bit depth */
   2697     inflate_huft * FAR *,       /* literal/length tree result */
   2698     inflate_huft * FAR *));     /* distance tree result */
   2699 
   2700 local int inflate_trees_free OF((
   2701     inflate_huft *,             /* tables to free */
   2702     z_stream *));               /* for zfree function */
   2703 
   2704 
   2705 /*+++++*/
   2706 /* infcodes.h -- header to use infcodes.c
   2707  * Copyright (C) 1995 Mark Adler
   2708  * For conditions of distribution and use, see copyright notice in zlib.h
   2709  */
   2710 
   2711 /* WARNING: this file should *not* be used by applications. It is
   2712    part of the implementation of the compression library and is
   2713    subject to change. Applications should only use zlib.h.
   2714  */
   2715 
   2716 struct inflate_codes_state;
   2717 typedef struct inflate_codes_state FAR inflate_codes_statef;
   2718 
   2719 local inflate_codes_statef *inflate_codes_new OF((
   2720     uInt, uInt,
   2721     inflate_huft *, inflate_huft *,
   2722     z_stream *));
   2723 
   2724 local int inflate_codes OF((
   2725     inflate_blocks_statef *,
   2726     z_stream *,
   2727     int));
   2728 
   2729 local void inflate_codes_free OF((
   2730     inflate_codes_statef *,
   2731     z_stream *));
   2732 
   2733 
   2734 /*+++++*/
   2735 /* inflate.c -- zlib interface to inflate modules
   2736  * Copyright (C) 1995 Mark Adler
   2737  * For conditions of distribution and use, see copyright notice in zlib.h
   2738  */
   2739 
   2740 /* inflate private state */
   2741 struct internal_state {
   2742 
   2743   /* mode */
   2744   enum {
   2745       METHOD,   /* waiting for method byte */
   2746       FLAG,     /* waiting for flag byte */
   2747       BLOCKS,   /* decompressing blocks */
   2748       CHECK4,   /* four check bytes to go */
   2749       CHECK3,   /* three check bytes to go */
   2750       CHECK2,   /* two check bytes to go */
   2751       CHECK1,   /* one check byte to go */
   2752       DONE,     /* finished check, done */
   2753       BAD}      /* got an error--stay here */
   2754     mode;               /* current inflate mode */
   2755 
   2756   /* mode dependent information */
   2757   union {
   2758     uInt method;        /* if FLAGS, method byte */
   2759     struct {
   2760       uLong was;                /* computed check value */
   2761       uLong need;               /* stream check value */
   2762     } check;            /* if CHECK, check values to compare */
   2763     uInt marker;        /* if BAD, inflateSync's marker bytes count */
   2764   } sub;        /* submode */
   2765 
   2766   /* mode independent information */
   2767   int  nowrap;          /* flag for no wrapper */
   2768   uInt wbits;           /* log2(window size)  (8..15, defaults to 15) */
   2769   inflate_blocks_statef
   2770     *blocks;            /* current inflate_blocks state */
   2771 
   2772 };
   2773 
   2774 
   2775 int inflateReset(z)
   2776 z_stream *z;
   2777 {
   2778   uLong c;
   2779 
   2780   if (z == Z_NULL || z->state == Z_NULL)
   2781     return Z_STREAM_ERROR;
   2782   z->total_in = z->total_out = 0;
   2783   z->msg = Z_NULL;
   2784   z->state->mode = z->state->nowrap ? BLOCKS : METHOD;
   2785   inflate_blocks_reset(z->state->blocks, z, &c);
   2786   Trace((stderr, "inflate: reset\n"));
   2787   return Z_OK;
   2788 }
   2789 
   2790 
   2791 int inflateEnd(z)
   2792 z_stream *z;
   2793 {
   2794   uLong c;
   2795 
   2796   if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
   2797     return Z_STREAM_ERROR;
   2798   if (z->state->blocks != Z_NULL)
   2799     inflate_blocks_free(z->state->blocks, z, &c);
   2800   ZFREE(z, z->state, sizeof(struct internal_state));
   2801   z->state = Z_NULL;
   2802   Trace((stderr, "inflate: end\n"));
   2803   return Z_OK;
   2804 }
   2805 
   2806 
   2807 int inflateInit2(z, w)
   2808 z_stream *z;
   2809 int w;
   2810 {
   2811   /* initialize state */
   2812   if (z == Z_NULL)
   2813     return Z_STREAM_ERROR;
   2814 /*  if (z->zalloc == Z_NULL) z->zalloc = zcalloc; */
   2815 /*  if (z->zfree == Z_NULL) z->zfree = zcfree; */
   2816   if ((z->state = (struct internal_state FAR *)
   2817        ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL)
   2818     return Z_MEM_ERROR;
   2819   z->state->blocks = Z_NULL;
   2820 
   2821   /* handle undocumented nowrap option (no zlib header or check) */
   2822   z->state->nowrap = 0;
   2823   if (w < 0)
   2824   {
   2825     w = - w;
   2826     z->state->nowrap = 1;
   2827   }
   2828 
   2829   /* set window size */
   2830   if (w < 8 || w > 15)
   2831   {
   2832     inflateEnd(z);
   2833     return Z_STREAM_ERROR;
   2834   }
   2835   z->state->wbits = (uInt)w;
   2836 
   2837   /* create inflate_blocks state */
   2838   if ((z->state->blocks =
   2839        inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, 1 << w))
   2840       == Z_NULL)
   2841   {
   2842     inflateEnd(z);
   2843     return Z_MEM_ERROR;
   2844   }
   2845   Trace((stderr, "inflate: allocated\n"));
   2846 
   2847   /* reset state */
   2848   inflateReset(z);
   2849   return Z_OK;
   2850 }
   2851 
   2852 
   2853 int inflateInit(z)
   2854 z_stream *z;
   2855 {
   2856   return inflateInit2(z, DEF_WBITS);
   2857 }
   2858 
   2859 
   2860 #define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;}
   2861 #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
   2862 
   2863 int inflate(z, f)
   2864 z_stream *z;
   2865 int f;
   2866 {
   2867   int r;
   2868   uInt b;
   2869 
   2870   if (z == Z_NULL || z->next_in == Z_NULL)
   2871     return Z_STREAM_ERROR;
   2872   r = Z_BUF_ERROR;
   2873   while (1) switch (z->state->mode)
   2874   {
   2875     case METHOD:
   2876       NEEDBYTE
   2877       if (((z->state->sub.method = NEXTBYTE) & 0xf) != DEFLATED)
   2878       {
   2879         z->state->mode = BAD;
   2880         z->msg = "unknown compression method";
   2881         z->state->sub.marker = 5;       /* can't try inflateSync */
   2882         break;
   2883       }
   2884       if ((z->state->sub.method >> 4) + 8 > z->state->wbits)
   2885       {
   2886         z->state->mode = BAD;
   2887         z->msg = "invalid window size";
   2888         z->state->sub.marker = 5;       /* can't try inflateSync */
   2889         break;
   2890       }
   2891       z->state->mode = FLAG;
   2892     case FLAG:
   2893       NEEDBYTE
   2894       if ((b = NEXTBYTE) & 0x20)
   2895       {
   2896         z->state->mode = BAD;
   2897         z->msg = "invalid reserved bit";
   2898         z->state->sub.marker = 5;       /* can't try inflateSync */
   2899         break;
   2900       }
   2901       if (((z->state->sub.method << 8) + b) % 31)
   2902       {
   2903         z->state->mode = BAD;
   2904         z->msg = "incorrect header check";
   2905         z->state->sub.marker = 5;       /* can't try inflateSync */
   2906         break;
   2907       }
   2908       Trace((stderr, "inflate: zlib header ok\n"));
   2909       z->state->mode = BLOCKS;
   2910     case BLOCKS:
   2911       r = inflate_blocks(z->state->blocks, z, r);
   2912       if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0)
   2913 	  r = inflate_packet_flush(z->state->blocks);
   2914       if (r == Z_DATA_ERROR)
   2915       {
   2916         z->state->mode = BAD;
   2917         z->state->sub.marker = 0;       /* can try inflateSync */
   2918         break;
   2919       }
   2920       if (r != Z_STREAM_END)
   2921         return r;
   2922       r = Z_OK;
   2923       inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was);
   2924       if (z->state->nowrap)
   2925       {
   2926         z->state->mode = DONE;
   2927         break;
   2928       }
   2929       z->state->mode = CHECK4;
   2930     case CHECK4:
   2931       NEEDBYTE
   2932       z->state->sub.check.need = (uLong)NEXTBYTE << 24;
   2933       z->state->mode = CHECK3;
   2934     case CHECK3:
   2935       NEEDBYTE
   2936       z->state->sub.check.need += (uLong)NEXTBYTE << 16;
   2937       z->state->mode = CHECK2;
   2938     case CHECK2:
   2939       NEEDBYTE
   2940       z->state->sub.check.need += (uLong)NEXTBYTE << 8;
   2941       z->state->mode = CHECK1;
   2942     case CHECK1:
   2943       NEEDBYTE
   2944       z->state->sub.check.need += (uLong)NEXTBYTE;
   2945 
   2946       if (z->state->sub.check.was != z->state->sub.check.need)
   2947       {
   2948         z->state->mode = BAD;
   2949         z->msg = "incorrect data check";
   2950         z->state->sub.marker = 5;       /* can't try inflateSync */
   2951         break;
   2952       }
   2953       Trace((stderr, "inflate: zlib check ok\n"));
   2954       z->state->mode = DONE;
   2955     case DONE:
   2956       return Z_STREAM_END;
   2957     case BAD:
   2958       return Z_DATA_ERROR;
   2959     default:
   2960       return Z_STREAM_ERROR;
   2961   }
   2962 
   2963  empty:
   2964   if (f != Z_PACKET_FLUSH)
   2965     return r;
   2966   z->state->mode = BAD;
   2967   z->state->sub.marker = 0;       /* can try inflateSync */
   2968   return Z_DATA_ERROR;
   2969 }
   2970 
   2971 /*
   2972  * This subroutine adds the data at next_in/avail_in to the output history
   2973  * without performing any output.  The output buffer must be "caught up";
   2974  * i.e. no pending output (hence s->read equals s->write), and the state must
   2975  * be BLOCKS (i.e. we should be willing to see the start of a series of
   2976  * BLOCKS).  On exit, the output will also be caught up, and the checksum
   2977  * will have been updated if need be.
   2978  */
   2979 
   2980 int inflateIncomp(z)
   2981 z_stream *z;
   2982 {
   2983     if (z->state->mode != BLOCKS)
   2984 	return Z_DATA_ERROR;
   2985     return inflate_addhistory(z->state->blocks, z);
   2986 }
   2987 
   2988 
   2989 int inflateSync(z)
   2990 z_stream *z;
   2991 {
   2992   uInt n;       /* number of bytes to look at */
   2993   Bytef *p;     /* pointer to bytes */
   2994   uInt m;       /* number of marker bytes found in a row */
   2995   uLong r, w;   /* temporaries to save total_in and total_out */
   2996 
   2997   /* set up */
   2998   if (z == Z_NULL || z->state == Z_NULL)
   2999     return Z_STREAM_ERROR;
   3000   if (z->state->mode != BAD)
   3001   {
   3002     z->state->mode = BAD;
   3003     z->state->sub.marker = 0;
   3004   }
   3005   if ((n = z->avail_in) == 0)
   3006     return Z_BUF_ERROR;
   3007   p = z->next_in;
   3008   m = z->state->sub.marker;
   3009 
   3010   /* search */
   3011   while (n && m < 4)
   3012   {
   3013     if (*p == (Byte)(m < 2 ? 0 : 0xff))
   3014       m++;
   3015     else if (*p)
   3016       m = 0;
   3017     else
   3018       m = 4 - m;
   3019     p++, n--;
   3020   }
   3021 
   3022   /* restore */
   3023   z->total_in += p - z->next_in;
   3024   z->next_in = p;
   3025   z->avail_in = n;
   3026   z->state->sub.marker = m;
   3027 
   3028   /* return no joy or set up to restart on a new block */
   3029   if (m != 4)
   3030     return Z_DATA_ERROR;
   3031   r = z->total_in;  w = z->total_out;
   3032   inflateReset(z);
   3033   z->total_in = r;  z->total_out = w;
   3034   z->state->mode = BLOCKS;
   3035   return Z_OK;
   3036 }
   3037 
   3038 #undef NEEDBYTE
   3039 #undef NEXTBYTE
   3040 
   3041 /*+++++*/
   3042 /* infutil.h -- types and macros common to blocks and codes
   3043  * Copyright (C) 1995 Mark Adler
   3044  * For conditions of distribution and use, see copyright notice in zlib.h
   3045  */
   3046 
   3047 /* WARNING: this file should *not* be used by applications. It is
   3048    part of the implementation of the compression library and is
   3049    subject to change. Applications should only use zlib.h.
   3050  */
   3051 
   3052 /* inflate blocks semi-private state */
   3053 struct inflate_blocks_state {
   3054 
   3055   /* mode */
   3056   enum {
   3057       TYPE,     /* get type bits (3, including end bit) */
   3058       LENS,     /* get lengths for stored */
   3059       STORED,   /* processing stored block */
   3060       TABLE,    /* get table lengths */
   3061       BTREE,    /* get bit lengths tree for a dynamic block */
   3062       DTREE,    /* get length, distance trees for a dynamic block */
   3063       CODES,    /* processing fixed or dynamic block */
   3064       DRY,      /* output remaining window bytes */
   3065       DONEB,     /* finished last block, done */
   3066       BADB}      /* got a data error--stuck here */
   3067     mode;               /* current inflate_block mode */
   3068 
   3069   /* mode dependent information */
   3070   union {
   3071     uInt left;          /* if STORED, bytes left to copy */
   3072     struct {
   3073       uInt table;               /* table lengths (14 bits) */
   3074       uInt index;               /* index into blens (or border) */
   3075       uIntf *blens;             /* bit lengths of codes */
   3076       uInt bb;                  /* bit length tree depth */
   3077       inflate_huft *tb;         /* bit length decoding tree */
   3078       int nblens;		/* # elements allocated at blens */
   3079     } trees;            /* if DTREE, decoding info for trees */
   3080     struct {
   3081       inflate_huft *tl, *td;    /* trees to free */
   3082       inflate_codes_statef
   3083          *codes;
   3084     } decode;           /* if CODES, current state */
   3085   } sub;                /* submode */
   3086   uInt last;            /* true if this block is the last block */
   3087 
   3088   /* mode independent information */
   3089   uInt bitk;            /* bits in bit buffer */
   3090   uLong bitb;           /* bit buffer */
   3091   Bytef *window;        /* sliding window */
   3092   Bytef *end;           /* one byte after sliding window */
   3093   Bytef *read;          /* window read pointer */
   3094   Bytef *write;         /* window write pointer */
   3095   check_func checkfn;   /* check function */
   3096   uLong check;          /* check on output */
   3097 
   3098 };
   3099 
   3100 
   3101 /* defines for inflate input/output */
   3102 /*   update pointers and return */
   3103 #define UPDBITS {s->bitb=b;s->bitk=k;}
   3104 #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
   3105 #define UPDOUT {s->write=q;}
   3106 #define UPDATE {UPDBITS UPDIN UPDOUT}
   3107 #define LEAVE {UPDATE return inflate_flush(s,z,r);}
   3108 /*   get bytes and bits */
   3109 #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
   3110 #define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
   3111 #define NEXTBYTE (n--,*p++)
   3112 #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
   3113 #define DUMPBITS(j) {b>>=(j);k-=(j);}
   3114 /*   output bytes */
   3115 #define WAVAIL (q<s->read?s->read-q-1:s->end-q)
   3116 #define LOADOUT {q=s->write;m=WAVAIL;}
   3117 #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=WAVAIL;}}
   3118 #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
   3119 #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;}
   3120 #define OUTBYTE(a) {*q++=(Byte)(a);m--;}
   3121 /*   load local pointers */
   3122 #define LOAD {LOADIN LOADOUT}
   3123 
   3124 /* And'ing with mask[n] masks the lower n bits */
   3125 local uInt inflate_mask[] = {
   3126     0x0000,
   3127     0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
   3128     0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
   3129 };
   3130 
   3131 /* copy as much as possible from the sliding window to the output area */
   3132 local int inflate_flush OF((
   3133     inflate_blocks_statef *,
   3134     z_stream *,
   3135     int));
   3136 
   3137 /*+++++*/
   3138 /* inffast.h -- header to use inffast.c
   3139  * Copyright (C) 1995 Mark Adler
   3140  * For conditions of distribution and use, see copyright notice in zlib.h
   3141  */
   3142 
   3143 /* WARNING: this file should *not* be used by applications. It is
   3144    part of the implementation of the compression library and is
   3145    subject to change. Applications should only use zlib.h.
   3146  */
   3147 
   3148 local int inflate_fast OF((
   3149     uInt,
   3150     uInt,
   3151     inflate_huft *,
   3152     inflate_huft *,
   3153     inflate_blocks_statef *,
   3154     z_stream *));
   3155 
   3156 
   3157 /*+++++*/
   3158 /* infblock.c -- interpret and process block types to last block
   3159  * Copyright (C) 1995 Mark Adler
   3160  * For conditions of distribution and use, see copyright notice in zlib.h
   3161  */
   3162 
   3163 /* Table for deflate from PKZIP's appnote.txt. */
   3164 local uInt border[] = { /* Order of the bit length code lengths */
   3165         16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
   3166 
   3167 /*
   3168    Notes beyond the 1.93a appnote.txt:
   3169 
   3170    1. Distance pointers never point before the beginning of the output
   3171       stream.
   3172    2. Distance pointers can point back across blocks, up to 32k away.
   3173    3. There is an implied maximum of 7 bits for the bit length table and
   3174       15 bits for the actual data.
   3175    4. If only one code exists, then it is encoded using one bit.  (Zero
   3176       would be more efficient, but perhaps a little confusing.)  If two
   3177       codes exist, they are coded using one bit each (0 and 1).
   3178    5. There is no way of sending zero distance codes--a dummy must be
   3179       sent if there are none.  (History: a pre 2.0 version of PKZIP would
   3180       store blocks with no distance codes, but this was discovered to be
   3181       too harsh a criterion.)  Valid only for 1.93a.  2.04c does allow
   3182       zero distance codes, which is sent as one code of zero bits in
   3183       length.
   3184    6. There are up to 286 literal/length codes.  Code 256 represents the
   3185       end-of-block.  Note however that the static length tree defines
   3186       288 codes just to fill out the Huffman codes.  Codes 286 and 287
   3187       cannot be used though, since there is no length base or extra bits
   3188       defined for them.  Similarily, there are up to 30 distance codes.
   3189       However, static trees define 32 codes (all 5 bits) to fill out the
   3190       Huffman codes, but the last two had better not show up in the data.
   3191    7. Unzip can check dynamic Huffman blocks for complete code sets.
   3192       The exception is that a single code would not be complete (see #4).
   3193    8. The five bits following the block type is really the number of
   3194       literal codes sent minus 257.
   3195    9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
   3196       (1+6+6).  Therefore, to output three times the length, you output
   3197       three codes (1+1+1), whereas to output four times the same length,
   3198       you only need two codes (1+3).  Hmm.
   3199   10. In the tree reconstruction algorithm, Code = Code + Increment
   3200       only if BitLength(i) is not zero.  (Pretty obvious.)
   3201   11. Correction: 4 Bits: # of Bit Length codes - 4     (4 - 19)
   3202   12. Note: length code 284 can represent 227-258, but length code 285
   3203       really is 258.  The last length deserves its own, short code
   3204       since it gets used a lot in very redundant files.  The length
   3205       258 is special since 258 - 3 (the min match length) is 255.
   3206   13. The literal/length and distance code bit lengths are read as a
   3207       single stream of lengths.  It is possible (and advantageous) for
   3208       a repeat code (16, 17, or 18) to go across the boundary between
   3209       the two sets of lengths.
   3210  */
   3211 
   3212 
   3213 local void inflate_blocks_reset(s, z, c)
   3214 inflate_blocks_statef *s;
   3215 z_stream *z;
   3216 uLongf *c;
   3217 {
   3218   if (s->checkfn != Z_NULL)
   3219     *c = s->check;
   3220   if (s->mode == BTREE || s->mode == DTREE)
   3221     ZFREE(z, s->sub.trees.blens, s->sub.trees.nblens * sizeof(uInt));
   3222   if (s->mode == CODES)
   3223   {
   3224     inflate_codes_free(s->sub.decode.codes, z);
   3225     inflate_trees_free(s->sub.decode.td, z);
   3226     inflate_trees_free(s->sub.decode.tl, z);
   3227   }
   3228   s->mode = TYPE;
   3229   s->bitk = 0;
   3230   s->bitb = 0;
   3231   s->read = s->write = s->window;
   3232   if (s->checkfn != Z_NULL)
   3233     s->check = (*s->checkfn)(0L, Z_NULL, 0);
   3234   Trace((stderr, "inflate:   blocks reset\n"));
   3235 }
   3236 
   3237 
   3238 local inflate_blocks_statef *inflate_blocks_new(z, c, w)
   3239 z_stream *z;
   3240 check_func c;
   3241 uInt w;
   3242 {
   3243   inflate_blocks_statef *s;
   3244 
   3245   if ((s = (inflate_blocks_statef *)ZALLOC
   3246        (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
   3247     return s;
   3248   if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)
   3249   {
   3250     ZFREE(z, s, sizeof(struct inflate_blocks_state));
   3251     return Z_NULL;
   3252   }
   3253   s->end = s->window + w;
   3254   s->checkfn = c;
   3255   s->mode = TYPE;
   3256   Trace((stderr, "inflate:   blocks allocated\n"));
   3257   inflate_blocks_reset(s, z, &s->check);
   3258   return s;
   3259 }
   3260 
   3261 
   3262 local int inflate_blocks(s, z, r)
   3263 inflate_blocks_statef *s;
   3264 z_stream *z;
   3265 int r;
   3266 {
   3267   uInt t;               /* temporary storage */
   3268   uLong b;              /* bit buffer */
   3269   uInt k;               /* bits in bit buffer */
   3270   Bytef *p;             /* input data pointer */
   3271   uInt n;               /* bytes available there */
   3272   Bytef *q;             /* output window write pointer */
   3273   uInt m;               /* bytes to end of window or read pointer */
   3274 
   3275   /* copy input/output information to locals (UPDATE macro restores) */
   3276   LOAD
   3277 
   3278   /* process input based on current state */
   3279   while (1) switch (s->mode)
   3280   {
   3281     case TYPE:
   3282       NEEDBITS(3)
   3283       t = (uInt)b & 7;
   3284       s->last = t & 1;
   3285       switch (t >> 1)
   3286       {
   3287         case 0:                         /* stored */
   3288           Trace((stderr, "inflate:     stored block%s\n",
   3289                  s->last ? " (last)" : ""));
   3290           DUMPBITS(3)
   3291           t = k & 7;                    /* go to byte boundary */
   3292           DUMPBITS(t)
   3293           s->mode = LENS;               /* get length of stored block */
   3294           break;
   3295         case 1:                         /* fixed */
   3296           Trace((stderr, "inflate:     fixed codes block%s\n",
   3297                  s->last ? " (last)" : ""));
   3298           {
   3299             uInt bl, bd;
   3300             inflate_huft *tl, *td;
   3301 
   3302             inflate_trees_fixed(&bl, &bd, &tl, &td);
   3303             s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
   3304             if (s->sub.decode.codes == Z_NULL)
   3305             {
   3306               r = Z_MEM_ERROR;
   3307               LEAVE
   3308             }
   3309             s->sub.decode.tl = Z_NULL;  /* don't try to free these */
   3310             s->sub.decode.td = Z_NULL;
   3311           }
   3312           DUMPBITS(3)
   3313           s->mode = CODES;
   3314           break;
   3315         case 2:                         /* dynamic */
   3316           Trace((stderr, "inflate:     dynamic codes block%s\n",
   3317                  s->last ? " (last)" : ""));
   3318           DUMPBITS(3)
   3319           s->mode = TABLE;
   3320           break;
   3321         case 3:                         /* illegal */
   3322           DUMPBITS(3)
   3323           s->mode = BADB;
   3324           z->msg = "invalid block type";
   3325           r = Z_DATA_ERROR;
   3326           LEAVE
   3327       }
   3328       break;
   3329     case LENS:
   3330       NEEDBITS(32)
   3331       if (((~b) >> 16) != (b & 0xffff))
   3332       {
   3333         s->mode = BADB;
   3334         z->msg = "invalid stored block lengths";
   3335         r = Z_DATA_ERROR;
   3336         LEAVE
   3337       }
   3338       s->sub.left = (uInt)b & 0xffff;
   3339       b = k = 0;                      /* dump bits */
   3340       Tracev((stderr, "inflate:       stored length %u\n", s->sub.left));
   3341       s->mode = s->sub.left ? STORED : TYPE;
   3342       break;
   3343     case STORED:
   3344       if (n == 0)
   3345         LEAVE
   3346       NEEDOUT
   3347       t = s->sub.left;
   3348       if (t > n) t = n;
   3349       if (t > m) t = m;
   3350       zmemcpy(q, p, t);
   3351       p += t;  n -= t;
   3352       q += t;  m -= t;
   3353       if ((s->sub.left -= t) != 0)
   3354         break;
   3355       Tracev((stderr, "inflate:       stored end, %lu total out\n",
   3356               z->total_out + (q >= s->read ? q - s->read :
   3357               (s->end - s->read) + (q - s->window))));
   3358       s->mode = s->last ? DRY : TYPE;
   3359       break;
   3360     case TABLE:
   3361       NEEDBITS(14)
   3362       s->sub.trees.table = t = (uInt)b & 0x3fff;
   3363 #ifndef PKZIP_BUG_WORKAROUND
   3364       if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
   3365       {
   3366         s->mode = BADB;
   3367         z->msg = "too many length or distance symbols";
   3368         r = Z_DATA_ERROR;
   3369         LEAVE
   3370       }
   3371 #endif
   3372       t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
   3373       if (t < 19)
   3374         t = 19;
   3375       if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
   3376       {
   3377         r = Z_MEM_ERROR;
   3378         LEAVE
   3379       }
   3380       s->sub.trees.nblens = t;
   3381       DUMPBITS(14)
   3382       s->sub.trees.index = 0;
   3383       Tracev((stderr, "inflate:       table sizes ok\n"));
   3384       s->mode = BTREE;
   3385     case BTREE:
   3386       while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
   3387       {
   3388         NEEDBITS(3)
   3389         s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
   3390         DUMPBITS(3)
   3391       }
   3392       while (s->sub.trees.index < 19)
   3393         s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
   3394       s->sub.trees.bb = 7;
   3395       t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
   3396                              &s->sub.trees.tb, z);
   3397       if (t != Z_OK)
   3398       {
   3399         r = t;
   3400         if (r == Z_DATA_ERROR)
   3401           s->mode = BADB;
   3402         LEAVE
   3403       }
   3404       s->sub.trees.index = 0;
   3405       Tracev((stderr, "inflate:       bits tree ok\n"));
   3406       s->mode = DTREE;
   3407     case DTREE:
   3408       while (t = s->sub.trees.table,
   3409              s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
   3410       {
   3411         inflate_huft *h;
   3412         uInt i, j, c;
   3413 
   3414         t = s->sub.trees.bb;
   3415         NEEDBITS(t)
   3416         h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
   3417         t = h->word.what.Bits;
   3418         c = h->more.Base;
   3419         if (c < 16)
   3420         {
   3421           DUMPBITS(t)
   3422           s->sub.trees.blens[s->sub.trees.index++] = c;
   3423         }
   3424         else /* c == 16..18 */
   3425         {
   3426           i = c == 18 ? 7 : c - 14;
   3427           j = c == 18 ? 11 : 3;
   3428           NEEDBITS(t + i)
   3429           DUMPBITS(t)
   3430           j += (uInt)b & inflate_mask[i];
   3431           DUMPBITS(i)
   3432           i = s->sub.trees.index;
   3433           t = s->sub.trees.table;
   3434           if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
   3435               (c == 16 && i < 1))
   3436           {
   3437             s->mode = BADB;
   3438             z->msg = "invalid bit length repeat";
   3439             r = Z_DATA_ERROR;
   3440             LEAVE
   3441           }
   3442           c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
   3443           do {
   3444             s->sub.trees.blens[i++] = c;
   3445           } while (--j);
   3446           s->sub.trees.index = i;
   3447         }
   3448       }
   3449       inflate_trees_free(s->sub.trees.tb, z);
   3450       s->sub.trees.tb = Z_NULL;
   3451       {
   3452         uInt bl, bd;
   3453         inflate_huft *tl, *td;
   3454         inflate_codes_statef *c;
   3455 
   3456         bl = 9;         /* must be <= 9 for lookahead assumptions */
   3457         bd = 6;         /* must be <= 9 for lookahead assumptions */
   3458         t = s->sub.trees.table;
   3459         t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
   3460                                   s->sub.trees.blens, &bl, &bd, &tl, &td, z);
   3461         if (t != Z_OK)
   3462         {
   3463           if (t == (uInt)Z_DATA_ERROR)
   3464             s->mode = BADB;
   3465           r = t;
   3466           LEAVE
   3467         }
   3468         Tracev((stderr, "inflate:       trees ok\n"));
   3469         if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
   3470         {
   3471           inflate_trees_free(td, z);
   3472           inflate_trees_free(tl, z);
   3473           r = Z_MEM_ERROR;
   3474           LEAVE
   3475         }
   3476         ZFREE(z, s->sub.trees.blens, s->sub.trees.nblens * sizeof(uInt));
   3477         s->sub.decode.codes = c;
   3478         s->sub.decode.tl = tl;
   3479         s->sub.decode.td = td;
   3480       }
   3481       s->mode = CODES;
   3482     case CODES:
   3483       UPDATE
   3484       if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
   3485         return inflate_flush(s, z, r);
   3486       r = Z_OK;
   3487       inflate_codes_free(s->sub.decode.codes, z);
   3488       inflate_trees_free(s->sub.decode.td, z);
   3489       inflate_trees_free(s->sub.decode.tl, z);
   3490       LOAD
   3491       Tracev((stderr, "inflate:       codes end, %lu total out\n",
   3492               z->total_out + (q >= s->read ? q - s->read :
   3493               (s->end - s->read) + (q - s->window))));
   3494       if (!s->last)
   3495       {
   3496         s->mode = TYPE;
   3497         break;
   3498       }
   3499       if (k > 7)              /* return unused byte, if any */
   3500       {
   3501         Assert(k < 16, "inflate_codes grabbed too many bytes")
   3502         k -= 8;
   3503         n++;
   3504         p--;                    /* can always return one */
   3505       }
   3506       s->mode = DRY;
   3507     case DRY:
   3508       FLUSH
   3509       if (s->read != s->write)
   3510         LEAVE
   3511       s->mode = DONEB;
   3512     case DONEB:
   3513       r = Z_STREAM_END;
   3514       LEAVE
   3515     case BADB:
   3516       r = Z_DATA_ERROR;
   3517       LEAVE
   3518     default:
   3519       r = Z_STREAM_ERROR;
   3520       LEAVE
   3521   }
   3522 }
   3523 
   3524 
   3525 local int inflate_blocks_free(s, z, c)
   3526 inflate_blocks_statef *s;
   3527 z_stream *z;
   3528 uLongf *c;
   3529 {
   3530   inflate_blocks_reset(s, z, c);
   3531   ZFREE(z, s->window, s->end - s->window);
   3532   ZFREE(z, s, sizeof(struct inflate_blocks_state));
   3533   Trace((stderr, "inflate:   blocks freed\n"));
   3534   return Z_OK;
   3535 }
   3536 
   3537 /*
   3538  * This subroutine adds the data at next_in/avail_in to the output history
   3539  * without performing any output.  The output buffer must be "caught up";
   3540  * i.e. no pending output (hence s->read equals s->write), and the state must
   3541  * be BLOCKS (i.e. we should be willing to see the start of a series of
   3542  * BLOCKS).  On exit, the output will also be caught up, and the checksum
   3543  * will have been updated if need be.
   3544  */
   3545 local int inflate_addhistory(s, z)
   3546 inflate_blocks_statef *s;
   3547 z_stream *z;
   3548 {
   3549     uLong b;              /* bit buffer */  /* NOT USED HERE */
   3550     uInt k;               /* bits in bit buffer */ /* NOT USED HERE */
   3551     uInt t;               /* temporary storage */
   3552     Bytef *p;             /* input data pointer */
   3553     uInt n;               /* bytes available there */
   3554     Bytef *q;             /* output window write pointer */
   3555     uInt m;               /* bytes to end of window or read pointer */
   3556 
   3557     if (s->read != s->write)
   3558 	return Z_STREAM_ERROR;
   3559     if (s->mode != TYPE)
   3560 	return Z_DATA_ERROR;
   3561 
   3562     /* we're ready to rock */
   3563     LOAD
   3564     /* while there is input ready, copy to output buffer, moving
   3565      * pointers as needed.
   3566      */
   3567     while (n) {
   3568 	t = n;  /* how many to do */
   3569 	/* is there room until end of buffer? */
   3570 	if (t > m) t = m;
   3571 	/* update check information */
   3572 	if (s->checkfn != Z_NULL)
   3573 	    s->check = (*s->checkfn)(s->check, q, t);
   3574 	zmemcpy(q, p, t);
   3575 	q += t;
   3576 	p += t;
   3577 	n -= t;
   3578 	z->total_out += t;
   3579 	s->read = q;    /* drag read pointer forward */
   3580 /*      WRAP  */ 	/* expand WRAP macro by hand to handle s->read */
   3581 	if (q == s->end) {
   3582 	    s->read = q = s->window;
   3583 	    m = WAVAIL;
   3584 	}
   3585     }
   3586     UPDATE
   3587     return Z_OK;
   3588 }
   3589 
   3590 
   3591 /*
   3592  * At the end of a Deflate-compressed PPP packet, we expect to have seen
   3593  * a `stored' block type value but not the (zero) length bytes.
   3594  */
   3595 local int inflate_packet_flush(s)
   3596     inflate_blocks_statef *s;
   3597 {
   3598     if (s->mode != LENS)
   3599 	return Z_DATA_ERROR;
   3600     s->mode = TYPE;
   3601     return Z_OK;
   3602 }
   3603 
   3604 
   3605 /*+++++*/
   3606 /* inftrees.c -- generate Huffman trees for efficient decoding
   3607  * Copyright (C) 1995 Mark Adler
   3608  * For conditions of distribution and use, see copyright notice in zlib.h
   3609  */
   3610 
   3611 /* simplify the use of the inflate_huft type with some defines */
   3612 #define base more.Base
   3613 #define next more.Next
   3614 #define exop word.what.Exop
   3615 #define bits word.what.Bits
   3616 
   3617 
   3618 local int huft_build OF((
   3619     uIntf *,            /* code lengths in bits */
   3620     uInt,               /* number of codes */
   3621     uInt,               /* number of "simple" codes */
   3622     uIntf *,            /* list of base values for non-simple codes */
   3623     uIntf *,            /* list of extra bits for non-simple codes */
   3624     inflate_huft * FAR*,/* result: starting table */
   3625     uIntf *,            /* maximum lookup bits (returns actual) */
   3626     z_stream *));       /* for zalloc function */
   3627 
   3628 local voidpf falloc OF((
   3629     voidpf,             /* opaque pointer (not used) */
   3630     uInt,               /* number of items */
   3631     uInt));             /* size of item */
   3632 
   3633 local void ffree OF((
   3634     voidpf q,           /* opaque pointer (not used) */
   3635     voidpf p,           /* what to free (not used) */
   3636     uInt n));		/* number of bytes (not used) */
   3637 
   3638 /* Tables for deflate from PKZIP's appnote.txt. */
   3639 local uInt cplens[] = { /* Copy lengths for literal codes 257..285 */
   3640         3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
   3641         35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
   3642         /* actually lengths - 2; also see note #13 above about 258 */
   3643 local uInt cplext[] = { /* Extra bits for literal codes 257..285 */
   3644         0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
   3645         3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 192, 192}; /* 192==invalid */
   3646 local uInt cpdist[] = { /* Copy offsets for distance codes 0..29 */
   3647         1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
   3648         257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
   3649         8193, 12289, 16385, 24577};
   3650 local uInt cpdext[] = { /* Extra bits for distance codes */
   3651         0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
   3652         7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
   3653         12, 12, 13, 13};
   3654 
   3655 /*
   3656    Huffman code decoding is performed using a multi-level table lookup.
   3657    The fastest way to decode is to simply build a lookup table whose
   3658    size is determined by the longest code.  However, the time it takes
   3659    to build this table can also be a factor if the data being decoded
   3660    is not very long.  The most common codes are necessarily the
   3661    shortest codes, so those codes dominate the decoding time, and hence
   3662    the speed.  The idea is you can have a shorter table that decodes the
   3663    shorter, more probable codes, and then point to subsidiary tables for
   3664    the longer codes.  The time it costs to decode the longer codes is
   3665    then traded against the time it takes to make longer tables.
   3666 
   3667    This results of this trade are in the variables lbits and dbits
   3668    below.  lbits is the number of bits the first level table for literal/
   3669    length codes can decode in one step, and dbits is the same thing for
   3670    the distance codes.  Subsequent tables are also less than or equal to
   3671    those sizes.  These values may be adjusted either when all of the
   3672    codes are shorter than that, in which case the longest code length in
   3673    bits is used, or when the shortest code is *longer* than the requested
   3674    table size, in which case the length of the shortest code in bits is
   3675    used.
   3676 
   3677    There are two different values for the two tables, since they code a
   3678    different number of possibilities each.  The literal/length table
   3679    codes 286 possible values, or in a flat code, a little over eight
   3680    bits.  The distance table codes 30 possible values, or a little less
   3681    than five bits, flat.  The optimum values for speed end up being
   3682    about one bit more than those, so lbits is 8+1 and dbits is 5+1.
   3683    The optimum values may differ though from machine to machine, and
   3684    possibly even between compilers.  Your mileage may vary.
   3685  */
   3686 
   3687 
   3688 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
   3689 #define BMAX 15         /* maximum bit length of any code */
   3690 #define N_MAX 288       /* maximum number of codes in any set */
   3691 
   3692 #ifdef DEBUG_ZLIB
   3693   uInt inflate_hufts;
   3694 #endif
   3695 
   3696 local int huft_build(b, n, s, d, e, t, m, zs)
   3697 uIntf *b;               /* code lengths in bits (all assumed <= BMAX) */
   3698 uInt n;                 /* number of codes (assumed <= N_MAX) */
   3699 uInt s;                 /* number of simple-valued codes (0..s-1) */
   3700 uIntf *d;               /* list of base values for non-simple codes */
   3701 uIntf *e;               /* list of extra bits for non-simple codes */
   3702 inflate_huft * FAR *t;  /* result: starting table */
   3703 uIntf *m;               /* maximum lookup bits, returns actual */
   3704 z_stream *zs;           /* for zalloc function */
   3705 /* Given a list of code lengths and a maximum table size, make a set of
   3706    tables to decode that set of codes.  Return Z_OK on success, Z_BUF_ERROR
   3707    if the given code set is incomplete (the tables are still built in this
   3708    case), Z_DATA_ERROR if the input is invalid (all zero length codes or an
   3709    over-subscribed set of lengths), or Z_MEM_ERROR if not enough memory. */
   3710 {
   3711 
   3712   uInt a;                       /* counter for codes of length k */
   3713   uInt c[BMAX+1];               /* bit length count table */
   3714   uInt f;                       /* i repeats in table every f entries */
   3715   int g;                        /* maximum code length */
   3716   int h;                        /* table level */
   3717   register uInt i;              /* counter, current code */
   3718   register uInt j;              /* counter */
   3719   register int k;               /* number of bits in current code */
   3720   int l;                        /* bits per table (returned in m) */
   3721   register uIntf *p;            /* pointer into c[], b[], or v[] */
   3722   inflate_huft *q;              /* points to current table */
   3723   struct inflate_huft_s r;      /* table entry for structure assignment */
   3724   inflate_huft *u[BMAX];        /* table stack */
   3725   uInt v[N_MAX];                /* values in order of bit length */
   3726   register int w;               /* bits before this table == (l * h) */
   3727   uInt x[BMAX+1];               /* bit offsets, then code stack */
   3728   uIntf *xp;                    /* pointer into x */
   3729   int y;                        /* number of dummy codes added */
   3730   uInt z;                       /* number of entries in current table */
   3731 
   3732 
   3733   /* Generate counts for each bit length */
   3734   p = c;
   3735 #define C0 *p++ = 0;
   3736 #define C2 C0 C0 C0 C0
   3737 #define C4 C2 C2 C2 C2
   3738   C4                            /* clear c[]--assume BMAX+1 is 16 */
   3739   p = b;  i = n;
   3740   do {
   3741     c[*p++]++;                  /* assume all entries <= BMAX */
   3742   } while (--i);
   3743   if (c[0] == n)                /* null input--all zero length codes */
   3744   {
   3745     *t = (inflate_huft *)Z_NULL;
   3746     *m = 0;
   3747     return Z_OK;
   3748   }
   3749 
   3750 
   3751   /* Find minimum and maximum length, bound *m by those */
   3752   l = *m;
   3753   for (j = 1; j <= BMAX; j++)
   3754     if (c[j])
   3755       break;
   3756   k = j;                        /* minimum code length */
   3757   if ((uInt)l < j)
   3758     l = j;
   3759   for (i = BMAX; i; i--)
   3760     if (c[i])
   3761       break;
   3762   g = i;                        /* maximum code length */
   3763   if ((uInt)l > i)
   3764     l = i;
   3765   *m = l;
   3766 
   3767 
   3768   /* Adjust last length count to fill out codes, if needed */
   3769   for (y = 1 << j; j < i; j++, y <<= 1)
   3770     if ((y -= c[j]) < 0)
   3771       return Z_DATA_ERROR;
   3772   if ((y -= c[i]) < 0)
   3773     return Z_DATA_ERROR;
   3774   c[i] += y;
   3775 
   3776 
   3777   /* Generate starting offsets into the value table for each length */
   3778   x[1] = j = 0;
   3779   p = c + 1;  xp = x + 2;
   3780   while (--i) {                 /* note that i == g from above */
   3781     *xp++ = (j += *p++);
   3782   }
   3783 
   3784 
   3785   /* Make a table of values in order of bit lengths */
   3786   p = b;  i = 0;
   3787   do {
   3788     if ((j = *p++) != 0)
   3789       v[x[j]++] = i;
   3790   } while (++i < n);
   3791 
   3792 
   3793   /* Generate the Huffman codes and for each, make the table entries */
   3794   x[0] = i = 0;                 /* first Huffman code is zero */
   3795   p = v;                        /* grab values in bit order */
   3796   h = -1;                       /* no tables yet--level -1 */
   3797   w = -l;                       /* bits decoded == (l * h) */
   3798   u[0] = (inflate_huft *)Z_NULL;        /* just to keep compilers happy */
   3799   q = (inflate_huft *)Z_NULL;   /* ditto */
   3800   z = 0;                        /* ditto */
   3801 
   3802   /* go through the bit lengths (k already is bits in shortest code) */
   3803   for (; k <= g; k++)
   3804   {
   3805     a = c[k];
   3806     while (a--)
   3807     {
   3808       /* here i is the Huffman code of length k bits for value *p */
   3809       /* make tables up to required level */
   3810       while (k > w + l)
   3811       {
   3812         h++;
   3813         w += l;                 /* previous table always l bits */
   3814 
   3815         /* compute minimum size table less than or equal to l bits */
   3816         z = (z = g - w) > (uInt)l ? l : z;      /* table size upper limit */
   3817         if ((f = 1 << (j = k - w)) > a + 1)     /* try a k-w bit table */
   3818         {                       /* too few codes for k-w bit table */
   3819           f -= a + 1;           /* deduct codes from patterns left */
   3820           xp = c + k;
   3821           if (j < z)
   3822             while (++j < z)     /* try smaller tables up to z bits */
   3823             {
   3824               if ((f <<= 1) <= *++xp)
   3825                 break;          /* enough codes to use up j bits */
   3826               f -= *xp;         /* else deduct codes from patterns */
   3827             }
   3828         }
   3829         z = 1 << j;             /* table entries for j-bit table */
   3830 
   3831         /* allocate and link in new table */
   3832         if ((q = (inflate_huft *)ZALLOC
   3833              (zs,z + 1,sizeof(inflate_huft))) == Z_NULL)
   3834         {
   3835           if (h)
   3836             inflate_trees_free(u[0], zs);
   3837           return Z_MEM_ERROR;   /* not enough memory */
   3838         }
   3839 	q->word.Nalloc = z + 1;
   3840 #ifdef DEBUG_ZLIB
   3841         inflate_hufts += z + 1;
   3842 #endif
   3843         *t = q + 1;             /* link to list for huft_free() */
   3844         *(t = &(q->next)) = Z_NULL;
   3845         u[h] = ++q;             /* table starts after link */
   3846 
   3847         /* connect to last table, if there is one */
   3848         if (h)
   3849         {
   3850           x[h] = i;             /* save pattern for backing up */
   3851           r.bits = (Byte)l;     /* bits to dump before this table */
   3852           r.exop = (Byte)j;     /* bits in this table */
   3853           r.next = q;           /* pointer to this table */
   3854           j = i >> (w - l);     /* (get around Turbo C bug) */
   3855           u[h-1][j] = r;        /* connect to last table */
   3856         }
   3857       }
   3858 
   3859       /* set up table entry in r */
   3860       r.bits = (Byte)(k - w);
   3861       if (p >= v + n)
   3862         r.exop = 128 + 64;      /* out of values--invalid code */
   3863       else if (*p < s)
   3864       {
   3865         r.exop = (Byte)(*p < 256 ? 0 : 32 + 64);     /* 256 is end-of-block */
   3866         r.base = *p++;          /* simple code is just the value */
   3867       }
   3868       else
   3869       {
   3870         r.exop = (Byte)e[*p - s] + 16 + 64; /* non-simple--look up in lists */
   3871         r.base = d[*p++ - s];
   3872       }
   3873 
   3874       /* fill code-like entries with r */
   3875       f = 1 << (k - w);
   3876       for (j = i >> w; j < z; j += f)
   3877         q[j] = r;
   3878 
   3879       /* backwards increment the k-bit code i */
   3880       for (j = 1 << (k - 1); i & j; j >>= 1)
   3881         i ^= j;
   3882       i ^= j;
   3883 
   3884       /* backup over finished tables */
   3885       while ((i & ((1 << w) - 1)) != x[h])
   3886       {
   3887         h--;                    /* don't need to update q */
   3888         w -= l;
   3889       }
   3890     }
   3891   }
   3892 
   3893 
   3894   /* Return Z_BUF_ERROR if we were given an incomplete table */
   3895   return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
   3896 }
   3897 
   3898 
   3899 local int inflate_trees_bits(c, bb, tb, z)
   3900 uIntf *c;               /* 19 code lengths */
   3901 uIntf *bb;              /* bits tree desired/actual depth */
   3902 inflate_huft * FAR *tb; /* bits tree result */
   3903 z_stream *z;            /* for zfree function */
   3904 {
   3905   int r;
   3906 
   3907   r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, z);
   3908   if (r == Z_DATA_ERROR)
   3909     z->msg = "oversubscribed dynamic bit lengths tree";
   3910   else if (r == Z_BUF_ERROR)
   3911   {
   3912     inflate_trees_free(*tb, z);
   3913     z->msg = "incomplete dynamic bit lengths tree";
   3914     r = Z_DATA_ERROR;
   3915   }
   3916   return r;
   3917 }
   3918 
   3919 
   3920 local int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, z)
   3921 uInt nl;                /* number of literal/length codes */
   3922 uInt nd;                /* number of distance codes */
   3923 uIntf *c;               /* that many (total) code lengths */
   3924 uIntf *bl;              /* literal desired/actual bit depth */
   3925 uIntf *bd;              /* distance desired/actual bit depth */
   3926 inflate_huft * FAR *tl; /* literal/length tree result */
   3927 inflate_huft * FAR *td; /* distance tree result */
   3928 z_stream *z;            /* for zfree function */
   3929 {
   3930   int r;
   3931 
   3932   /* build literal/length tree */
   3933   if ((r = huft_build(c, nl, 257, cplens, cplext, tl, bl, z)) != Z_OK)
   3934   {
   3935     if (r == Z_DATA_ERROR)
   3936       z->msg = "oversubscribed literal/length tree";
   3937     else if (r == Z_BUF_ERROR)
   3938     {
   3939       inflate_trees_free(*tl, z);
   3940       z->msg = "incomplete literal/length tree";
   3941       r = Z_DATA_ERROR;
   3942     }
   3943     return r;
   3944   }
   3945 
   3946   /* build distance tree */
   3947   if ((r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, z)) != Z_OK)
   3948   {
   3949     if (r == Z_DATA_ERROR)
   3950       z->msg = "oversubscribed literal/length tree";
   3951     else if (r == Z_BUF_ERROR) {
   3952 #ifdef PKZIP_BUG_WORKAROUND
   3953       r = Z_OK;
   3954     }
   3955 #else
   3956       inflate_trees_free(*td, z);
   3957       z->msg = "incomplete literal/length tree";
   3958       r = Z_DATA_ERROR;
   3959     }
   3960     inflate_trees_free(*tl, z);
   3961     return r;
   3962 #endif
   3963   }
   3964 
   3965   /* done */
   3966   return Z_OK;
   3967 }
   3968 
   3969 
   3970 /* build fixed tables only once--keep them here */
   3971 local int fixed_lock = 0;
   3972 local int fixed_built = 0;
   3973 #define FIXEDH 530      /* number of hufts used by fixed tables */
   3974 local uInt fixed_left = FIXEDH;
   3975 local inflate_huft fixed_mem[FIXEDH];
   3976 local uInt fixed_bl;
   3977 local uInt fixed_bd;
   3978 local inflate_huft *fixed_tl;
   3979 local inflate_huft *fixed_td;
   3980 
   3981 
   3982 local voidpf falloc(q, n, s)
   3983 voidpf q;        /* opaque pointer (not used) */
   3984 uInt n;         /* number of items */
   3985 uInt s;         /* size of item */
   3986 {
   3987   Assert(s == sizeof(inflate_huft) && n <= fixed_left,
   3988          "inflate_trees falloc overflow");
   3989   if (q) s++; /* to make some compilers happy */
   3990   fixed_left -= n;
   3991   return (voidpf)(fixed_mem + fixed_left);
   3992 }
   3993 
   3994 
   3995 local void ffree(q, p, n)
   3996 voidpf q;
   3997 voidpf p;
   3998 uInt n;
   3999 {
   4000   Assert(0, "inflate_trees ffree called!");
   4001   if (q) q = p; /* to make some compilers happy */
   4002 }
   4003 
   4004 
   4005 local int inflate_trees_fixed(bl, bd, tl, td)
   4006 uIntf *bl;               /* literal desired/actual bit depth */
   4007 uIntf *bd;               /* distance desired/actual bit depth */
   4008 inflate_huft * FAR *tl;  /* literal/length tree result */
   4009 inflate_huft * FAR *td;  /* distance tree result */
   4010 {
   4011   /* build fixed tables if not built already--lock out other instances */
   4012   while (++fixed_lock > 1)
   4013     fixed_lock--;
   4014   if (!fixed_built)
   4015   {
   4016     int k;              /* temporary variable */
   4017     unsigned c[288];    /* length list for huft_build */
   4018     z_stream z;         /* for falloc function */
   4019 
   4020     /* set up fake z_stream for memory routines */
   4021     z.zalloc = falloc;
   4022     z.zfree = ffree;
   4023     z.opaque = Z_NULL;
   4024 
   4025     /* literal table */
   4026     for (k = 0; k < 144; k++)
   4027       c[k] = 8;
   4028     for (; k < 256; k++)
   4029       c[k] = 9;
   4030     for (; k < 280; k++)
   4031       c[k] = 7;
   4032     for (; k < 288; k++)
   4033       c[k] = 8;
   4034     fixed_bl = 7;
   4035     huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, &z);
   4036 
   4037     /* distance table */
   4038     for (k = 0; k < 30; k++)
   4039       c[k] = 5;
   4040     fixed_bd = 5;
   4041     huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, &z);
   4042 
   4043     /* done */
   4044     fixed_built = 1;
   4045   }
   4046   fixed_lock--;
   4047   *bl = fixed_bl;
   4048   *bd = fixed_bd;
   4049   *tl = fixed_tl;
   4050   *td = fixed_td;
   4051   return Z_OK;
   4052 }
   4053 
   4054 
   4055 local int inflate_trees_free(t, z)
   4056 inflate_huft *t;        /* table to free */
   4057 z_stream *z;            /* for zfree function */
   4058 /* Free the malloc'ed tables built by huft_build(), which makes a linked
   4059    list of the tables it made, with the links in a dummy first entry of
   4060    each table. */
   4061 {
   4062   register inflate_huft *p, *q;
   4063 
   4064   /* Go through linked list, freeing from the malloced (t[-1]) address. */
   4065   p = t;
   4066   while (p != Z_NULL)
   4067   {
   4068     q = (--p)->next;
   4069     ZFREE(z, p, p->word.Nalloc * sizeof(inflate_huft));
   4070     p = q;
   4071   }
   4072   return Z_OK;
   4073 }
   4074 
   4075 /*+++++*/
   4076 /* infcodes.c -- process literals and length/distance pairs
   4077  * Copyright (C) 1995 Mark Adler
   4078  * For conditions of distribution and use, see copyright notice in zlib.h
   4079  */
   4080 
   4081 /* simplify the use of the inflate_huft type with some defines */
   4082 #define base more.Base
   4083 #define next more.Next
   4084 #define exop word.what.Exop
   4085 #define bits word.what.Bits
   4086 
   4087 /* inflate codes private state */
   4088 struct inflate_codes_state {
   4089 
   4090   /* mode */
   4091   enum {        /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
   4092       START,    /* x: set up for LEN */
   4093       LEN,      /* i: get length/literal/eob next */
   4094       LENEXT,   /* i: getting length extra (have base) */
   4095       DIST,     /* i: get distance next */
   4096       DISTEXT,  /* i: getting distance extra */
   4097       COPY,     /* o: copying bytes in window, waiting for space */
   4098       LIT,      /* o: got literal, waiting for output space */
   4099       WASH,     /* o: got eob, possibly still output waiting */
   4100       END,      /* x: got eob and all data flushed */
   4101       BADCODE}  /* x: got error */
   4102     mode;               /* current inflate_codes mode */
   4103 
   4104   /* mode dependent information */
   4105   uInt len;
   4106   union {
   4107     struct {
   4108       inflate_huft *tree;       /* pointer into tree */
   4109       uInt need;                /* bits needed */
   4110     } code;             /* if LEN or DIST, where in tree */
   4111     uInt lit;           /* if LIT, literal */
   4112     struct {
   4113       uInt get;                 /* bits to get for extra */
   4114       uInt dist;                /* distance back to copy from */
   4115     } copy;             /* if EXT or COPY, where and how much */
   4116   } sub;                /* submode */
   4117 
   4118   /* mode independent information */
   4119   Byte lbits;           /* ltree bits decoded per branch */
   4120   Byte dbits;           /* dtree bits decoder per branch */
   4121   inflate_huft *ltree;          /* literal/length/eob tree */
   4122   inflate_huft *dtree;          /* distance tree */
   4123 
   4124 };
   4125 
   4126 
   4127 local inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
   4128 uInt bl, bd;
   4129 inflate_huft *tl, *td;
   4130 z_stream *z;
   4131 {
   4132   inflate_codes_statef *c;
   4133 
   4134   if ((c = (inflate_codes_statef *)
   4135        ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
   4136   {
   4137     c->mode = START;
   4138     c->lbits = (Byte)bl;
   4139     c->dbits = (Byte)bd;
   4140     c->ltree = tl;
   4141     c->dtree = td;
   4142     Tracev((stderr, "inflate:       codes new\n"));
   4143   }
   4144   return c;
   4145 }
   4146 
   4147 
   4148 local int inflate_codes(s, z, r)
   4149 inflate_blocks_statef *s;
   4150 z_stream *z;
   4151 int r;
   4152 {
   4153   uInt j;               /* temporary storage */
   4154   inflate_huft *t;      /* temporary pointer */
   4155   uInt e;               /* extra bits or operation */
   4156   uLong b;              /* bit buffer */
   4157   uInt k;               /* bits in bit buffer */
   4158   Bytef *p;             /* input data pointer */
   4159   uInt n;               /* bytes available there */
   4160   Bytef *q;             /* output window write pointer */
   4161   uInt m;               /* bytes to end of window or read pointer */
   4162   Bytef *f;             /* pointer to copy strings from */
   4163   inflate_codes_statef *c = s->sub.decode.codes;  /* codes state */
   4164 
   4165   /* copy input/output information to locals (UPDATE macro restores) */
   4166   LOAD
   4167 
   4168   /* process input and output based on current state */
   4169   while (1) switch (c->mode)
   4170   {             /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
   4171     case START:         /* x: set up for LEN */
   4172 #ifndef SLOW
   4173       if (m >= 258 && n >= 10)
   4174       {
   4175         UPDATE
   4176         r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
   4177         LOAD
   4178         if (r != Z_OK)
   4179         {
   4180           c->mode = r == Z_STREAM_END ? WASH : BADCODE;
   4181           break;
   4182         }
   4183       }
   4184 #endif /* !SLOW */
   4185       c->sub.code.need = c->lbits;
   4186       c->sub.code.tree = c->ltree;
   4187       c->mode = LEN;
   4188     case LEN:           /* i: get length/literal/eob next */
   4189       j = c->sub.code.need;
   4190       NEEDBITS(j)
   4191       t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
   4192       DUMPBITS(t->bits)
   4193       e = (uInt)(t->exop);
   4194       if (e == 0)               /* literal */
   4195       {
   4196         c->sub.lit = t->base;
   4197         Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
   4198                  "inflate:         literal '%c'\n" :
   4199                  "inflate:         literal 0x%02x\n", t->base));
   4200         c->mode = LIT;
   4201         break;
   4202       }
   4203       if (e & 16)               /* length */
   4204       {
   4205         c->sub.copy.get = e & 15;
   4206         c->len = t->base;
   4207         c->mode = LENEXT;
   4208         break;
   4209       }
   4210       if ((e & 64) == 0)        /* next table */
   4211       {
   4212         c->sub.code.need = e;
   4213         c->sub.code.tree = t->next;
   4214         break;
   4215       }
   4216       if (e & 32)               /* end of block */
   4217       {
   4218         Tracevv((stderr, "inflate:         end of block\n"));
   4219         c->mode = WASH;
   4220         break;
   4221       }
   4222       c->mode = BADCODE;        /* invalid code */
   4223       z->msg = "invalid literal/length code";
   4224       r = Z_DATA_ERROR;
   4225       LEAVE
   4226     case LENEXT:        /* i: getting length extra (have base) */
   4227       j = c->sub.copy.get;
   4228       NEEDBITS(j)
   4229       c->len += (uInt)b & inflate_mask[j];
   4230       DUMPBITS(j)
   4231       c->sub.code.need = c->dbits;
   4232       c->sub.code.tree = c->dtree;
   4233       Tracevv((stderr, "inflate:         length %u\n", c->len));
   4234       c->mode = DIST;
   4235     case DIST:          /* i: get distance next */
   4236       j = c->sub.code.need;
   4237       NEEDBITS(j)
   4238       t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
   4239       DUMPBITS(t->bits)
   4240       e = (uInt)(t->exop);
   4241       if (e & 16)               /* distance */
   4242       {
   4243         c->sub.copy.get = e & 15;
   4244         c->sub.copy.dist = t->base;
   4245         c->mode = DISTEXT;
   4246         break;
   4247       }
   4248       if ((e & 64) == 0)        /* next table */
   4249       {
   4250         c->sub.code.need = e;
   4251         c->sub.code.tree = t->next;
   4252         break;
   4253       }
   4254       c->mode = BADCODE;        /* invalid code */
   4255       z->msg = "invalid distance code";
   4256       r = Z_DATA_ERROR;
   4257       LEAVE
   4258     case DISTEXT:       /* i: getting distance extra */
   4259       j = c->sub.copy.get;
   4260       NEEDBITS(j)
   4261       c->sub.copy.dist += (uInt)b & inflate_mask[j];
   4262       DUMPBITS(j)
   4263       Tracevv((stderr, "inflate:         distance %u\n", c->sub.copy.dist));
   4264       c->mode = COPY;
   4265     case COPY:          /* o: copying bytes in window, waiting for space */
   4266 #ifndef __TURBOC__ /* Turbo C bug for following expression */
   4267       f = (uInt)(q - s->window) < c->sub.copy.dist ?
   4268           s->end - (c->sub.copy.dist - (q - s->window)) :
   4269           q - c->sub.copy.dist;
   4270 #else
   4271       f = q - c->sub.copy.dist;
   4272       if ((uInt)(q - s->window) < c->sub.copy.dist)
   4273         f = s->end - (c->sub.copy.dist - (q - s->window));
   4274 #endif
   4275       while (c->len)
   4276       {
   4277         NEEDOUT
   4278         OUTBYTE(*f++)
   4279         if (f == s->end)
   4280           f = s->window;
   4281         c->len--;
   4282       }
   4283       c->mode = START;
   4284       break;
   4285     case LIT:           /* o: got literal, waiting for output space */
   4286       NEEDOUT
   4287       OUTBYTE(c->sub.lit)
   4288       c->mode = START;
   4289       break;
   4290     case WASH:          /* o: got eob, possibly more output */
   4291       FLUSH
   4292       if (s->read != s->write)
   4293         LEAVE
   4294       c->mode = END;
   4295     case END:
   4296       r = Z_STREAM_END;
   4297       LEAVE
   4298     case BADCODE:       /* x: got error */
   4299       r = Z_DATA_ERROR;
   4300       LEAVE
   4301     default:
   4302       r = Z_STREAM_ERROR;
   4303       LEAVE
   4304   }
   4305 }
   4306 
   4307 
   4308 local void inflate_codes_free(c, z)
   4309 inflate_codes_statef *c;
   4310 z_stream *z;
   4311 {
   4312   ZFREE(z, c, sizeof(struct inflate_codes_state));
   4313   Tracev((stderr, "inflate:       codes free\n"));
   4314 }
   4315 
   4316 /*+++++*/
   4317 /* inflate_util.c -- data and routines common to blocks and codes
   4318  * Copyright (C) 1995 Mark Adler
   4319  * For conditions of distribution and use, see copyright notice in zlib.h
   4320  */
   4321 
   4322 /* copy as much as possible from the sliding window to the output area */
   4323 local int inflate_flush(s, z, r)
   4324 inflate_blocks_statef *s;
   4325 z_stream *z;
   4326 int r;
   4327 {
   4328   uInt n;
   4329   Bytef *p, *q;
   4330 
   4331   /* local copies of source and destination pointers */
   4332   p = z->next_out;
   4333   q = s->read;
   4334 
   4335   /* compute number of bytes to copy as far as end of window */
   4336   n = (uInt)((q <= s->write ? s->write : s->end) - q);
   4337   if (n > z->avail_out) n = z->avail_out;
   4338   if (n && r == Z_BUF_ERROR) r = Z_OK;
   4339 
   4340   /* update counters */
   4341   z->avail_out -= n;
   4342   z->total_out += n;
   4343 
   4344   /* update check information */
   4345   if (s->checkfn != Z_NULL)
   4346     s->check = (*s->checkfn)(s->check, q, n);
   4347 
   4348   /* copy as far as end of window */
   4349   if (p != NULL) {
   4350     zmemcpy(p, q, n);
   4351     p += n;
   4352   }
   4353   q += n;
   4354 
   4355   /* see if more to copy at beginning of window */
   4356   if (q == s->end)
   4357   {
   4358     /* wrap pointers */
   4359     q = s->window;
   4360     if (s->write == s->end)
   4361       s->write = s->window;
   4362 
   4363     /* compute bytes to copy */
   4364     n = (uInt)(s->write - q);
   4365     if (n > z->avail_out) n = z->avail_out;
   4366     if (n && r == Z_BUF_ERROR) r = Z_OK;
   4367 
   4368     /* update counters */
   4369     z->avail_out -= n;
   4370     z->total_out += n;
   4371 
   4372     /* update check information */
   4373     if (s->checkfn != Z_NULL)
   4374       s->check = (*s->checkfn)(s->check, q, n);
   4375 
   4376     /* copy */
   4377     if (p != NULL) {
   4378       zmemcpy(p, q, n);
   4379       p += n;
   4380     }
   4381     q += n;
   4382   }
   4383 
   4384   /* update pointers */
   4385   z->next_out = p;
   4386   s->read = q;
   4387 
   4388   /* done */
   4389   return r;
   4390 }
   4391 
   4392 
   4393 /*+++++*/
   4394 /* inffast.c -- process literals and length/distance pairs fast
   4395  * Copyright (C) 1995 Mark Adler
   4396  * For conditions of distribution and use, see copyright notice in zlib.h
   4397  */
   4398 
   4399 /* simplify the use of the inflate_huft type with some defines */
   4400 #define base more.Base
   4401 #define next more.Next
   4402 #define exop word.what.Exop
   4403 #define bits word.what.Bits
   4404 
   4405 /* macros for bit input with no checking and for returning unused bytes */
   4406 #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
   4407 #define UNGRAB {n+=(c=k>>3);p-=c;k&=7;}
   4408 
   4409 /* Called with number of bytes left to write in window at least 258
   4410    (the maximum string length) and number of input bytes available
   4411    at least ten.  The ten bytes are six bytes for the longest length/
   4412    distance pair plus four bytes for overloading the bit buffer. */
   4413 
   4414 local int inflate_fast(bl, bd, tl, td, s, z)
   4415 uInt bl, bd;
   4416 inflate_huft *tl, *td;
   4417 inflate_blocks_statef *s;
   4418 z_stream *z;
   4419 {
   4420   inflate_huft *t;      /* temporary pointer */
   4421   uInt e;               /* extra bits or operation */
   4422   uLong b;              /* bit buffer */
   4423   uInt k;               /* bits in bit buffer */
   4424   Bytef *p;             /* input data pointer */
   4425   uInt n;               /* bytes available there */
   4426   Bytef *q;             /* output window write pointer */
   4427   uInt m;               /* bytes to end of window or read pointer */
   4428   uInt ml;              /* mask for literal/length tree */
   4429   uInt md;              /* mask for distance tree */
   4430   uInt c;               /* bytes to copy */
   4431   uInt d;               /* distance back to copy from */
   4432   Bytef *r;             /* copy source pointer */
   4433 
   4434   /* load input, output, bit values */
   4435   LOAD
   4436 
   4437   /* initialize masks */
   4438   ml = inflate_mask[bl];
   4439   md = inflate_mask[bd];
   4440 
   4441   /* do until not enough input or output space for fast loop */
   4442   do {                          /* assume called with m >= 258 && n >= 10 */
   4443     /* get literal/length code */
   4444     GRABBITS(20)                /* max bits for literal/length code */
   4445     if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
   4446     {
   4447       DUMPBITS(t->bits)
   4448       Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
   4449                 "inflate:         * literal '%c'\n" :
   4450                 "inflate:         * literal 0x%02x\n", t->base));
   4451       *q++ = (Byte)t->base;
   4452       m--;
   4453       continue;
   4454     }
   4455     do {
   4456       DUMPBITS(t->bits)
   4457       if (e & 16)
   4458       {
   4459         /* get extra bits for length */
   4460         e &= 15;
   4461         c = t->base + ((uInt)b & inflate_mask[e]);
   4462         DUMPBITS(e)
   4463         Tracevv((stderr, "inflate:         * length %u\n", c));
   4464 
   4465         /* decode distance base of block to copy */
   4466         GRABBITS(15);           /* max bits for distance code */
   4467         e = (t = td + ((uInt)b & md))->exop;
   4468         do {
   4469           DUMPBITS(t->bits)
   4470           if (e & 16)
   4471           {
   4472             /* get extra bits to add to distance base */
   4473             e &= 15;
   4474             GRABBITS(e)         /* get extra bits (up to 13) */
   4475             d = t->base + ((uInt)b & inflate_mask[e]);
   4476             DUMPBITS(e)
   4477             Tracevv((stderr, "inflate:         * distance %u\n", d));
   4478 
   4479             /* do the copy */
   4480             m -= c;
   4481             if ((uInt)(q - s->window) >= d)     /* offset before dest */
   4482             {                                   /*  just copy */
   4483               r = q - d;
   4484               *q++ = *r++;  c--;        /* minimum count is three, */
   4485               *q++ = *r++;  c--;        /*  so unroll loop a little */
   4486             }
   4487             else                        /* else offset after destination */
   4488             {
   4489               e = d - (q - s->window);  /* bytes from offset to end */
   4490               r = s->end - e;           /* pointer to offset */
   4491               if (c > e)                /* if source crosses, */
   4492               {
   4493                 c -= e;                 /* copy to end of window */
   4494                 do {
   4495                   *q++ = *r++;
   4496                 } while (--e);
   4497                 r = s->window;          /* copy rest from start of window */
   4498               }
   4499             }
   4500             do {                        /* copy all or what's left */
   4501               *q++ = *r++;
   4502             } while (--c);
   4503             break;
   4504           }
   4505           else if ((e & 64) == 0)
   4506             e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop;
   4507           else
   4508           {
   4509             z->msg = "invalid distance code";
   4510             UNGRAB
   4511             UPDATE
   4512             return Z_DATA_ERROR;
   4513           }
   4514         } while (1);
   4515         break;
   4516       }
   4517       if ((e & 64) == 0)
   4518       {
   4519         if ((e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop) == 0)
   4520         {
   4521           DUMPBITS(t->bits)
   4522           Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
   4523                     "inflate:         * literal '%c'\n" :
   4524                     "inflate:         * literal 0x%02x\n", t->base));
   4525           *q++ = (Byte)t->base;
   4526           m--;
   4527           break;
   4528         }
   4529       }
   4530       else if (e & 32)
   4531       {
   4532         Tracevv((stderr, "inflate:         * end of block\n"));
   4533         UNGRAB
   4534         UPDATE
   4535         return Z_STREAM_END;
   4536       }
   4537       else
   4538       {
   4539         z->msg = "invalid literal/length code";
   4540         UNGRAB
   4541         UPDATE
   4542         return Z_DATA_ERROR;
   4543       }
   4544     } while (1);
   4545   } while (m >= 258 && n >= 10);
   4546 
   4547   /* not enough input or output--restore pointers and return */
   4548   UNGRAB
   4549   UPDATE
   4550   return Z_OK;
   4551 }
   4552 
   4553 
   4554 /*+++++*/
   4555 /* zutil.c -- target dependent utility functions for the compression library
   4556  * Copyright (C) 1995 Jean-loup Gailly.
   4557  * For conditions of distribution and use, see copyright notice in zlib.h
   4558  */
   4559 
   4560 /* From: zutil.c,v 1.8 1995/05/03 17:27:12 jloup Exp */
   4561 
   4562 char *zlib_version = ZLIB_VERSION;
   4563 
   4564 char *z_errmsg[] = {
   4565 "stream end",          /* Z_STREAM_END    1 */
   4566 "",                    /* Z_OK            0 */
   4567 "file error",          /* Z_ERRNO        (-1) */
   4568 "stream error",        /* Z_STREAM_ERROR (-2) */
   4569 "data error",          /* Z_DATA_ERROR   (-3) */
   4570 "insufficient memory", /* Z_MEM_ERROR    (-4) */
   4571 "buffer error",        /* Z_BUF_ERROR    (-5) */
   4572 ""};
   4573 
   4574 
   4575 /*+++++*/
   4576 /* adler32.c -- compute the Adler-32 checksum of a data stream
   4577  * Copyright (C) 1995 Mark Adler
   4578  * For conditions of distribution and use, see copyright notice in zlib.h
   4579  */
   4580 
   4581 /* From: adler32.c,v 1.6 1995/05/03 17:27:08 jloup Exp */
   4582 
   4583 #define BASE 65521L /* largest prime smaller than 65536 */
   4584 #define NMAX 5552
   4585 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
   4586 
   4587 #define DO1(buf)  {s1 += *buf++; s2 += s1;}
   4588 #define DO2(buf)  DO1(buf); DO1(buf);
   4589 #define DO4(buf)  DO2(buf); DO2(buf);
   4590 #define DO8(buf)  DO4(buf); DO4(buf);
   4591 #define DO16(buf) DO8(buf); DO8(buf);
   4592 
   4593 /* ========================================================================= */
   4594 uLong adler32(adler, buf, len)
   4595     uLong adler;
   4596     Bytef *buf;
   4597     uInt len;
   4598 {
   4599     unsigned long s1 = adler & 0xffff;
   4600     unsigned long s2 = (adler >> 16) & 0xffff;
   4601     int k;
   4602 
   4603     if (buf == Z_NULL) return 1L;
   4604 
   4605     while (len > 0) {
   4606         k = len < NMAX ? len : NMAX;
   4607         len -= k;
   4608         while (k >= 16) {
   4609             DO16(buf);
   4610             k -= 16;
   4611         }
   4612         if (k != 0) do {
   4613             DO1(buf);
   4614         } while (--k);
   4615         s1 %= BASE;
   4616         s2 %= BASE;
   4617     }
   4618     return (s2 << 16) | s1;
   4619 }
   4620