Home | History | Annotate | Line # | Download | only in net
zlib.c revision 1.2
      1 /*	$NetBSD: zlib.c,v 1.2 1996/03/16 23:55:40 christos Exp $	*/
      2 
      3 /*
      4  * This file is derived from various .h and .c files from the zlib-0.95
      5  * distribution by Jean-loup Gailly and Mark Adler, with some additions
      6  * by Paul Mackerras to aid in implementing Deflate compression and
      7  * decompression for PPP packets.  See zlib.h for conditions of
      8  * distribution and use.
      9  *
     10  * Changes that have been made include:
     11  * - changed functions not used outside this file to "local"
     12  * - added minCompression parameter to deflateInit2
     13  * - added Z_PACKET_FLUSH (see zlib.h for details)
     14  * - added inflateIncomp
     15  */
     16 
     17 
     18 /*+++++*/
     19 /* zutil.h -- internal interface and configuration of the compression library
     20  * Copyright (C) 1995 Jean-loup Gailly.
     21  * For conditions of distribution and use, see copyright notice in zlib.h
     22  */
     23 
     24 /* WARNING: this file should *not* be used by applications. It is
     25    part of the implementation of the compression library and is
     26    subject to change. Applications should only use zlib.h.
     27  */
     28 
     29 /* From: zutil.h,v 1.9 1995/05/03 17:27:12 jloup Exp */
     30 
     31 #define _Z_UTIL_H
     32 
     33 #include "zlib.h"
     34 
     35 #ifdef STDC
     36 #  include <string.h>
     37 #endif
     38 
     39 #ifndef local
     40 #  define local static
     41 #endif
     42 /* compile with -Dlocal if your debugger can't find static symbols */
     43 
     44 #define FAR
     45 
     46 typedef unsigned char  uch;
     47 typedef uch FAR uchf;
     48 typedef unsigned short ush;
     49 typedef ush FAR ushf;
     50 typedef unsigned long  ulg;
     51 
     52 extern char *z_errmsg[]; /* indexed by 1-zlib_error */
     53 
     54 #define ERR_RETURN(strm,err) return (strm->msg=z_errmsg[1-err], err)
     55 /* To be used only when the state is known to be valid */
     56 
     57 #ifndef NULL
     58 #define NULL	((void *) 0)
     59 #endif
     60 
     61         /* common constants */
     62 
     63 #define DEFLATED   8
     64 
     65 #ifndef DEF_WBITS
     66 #  define DEF_WBITS MAX_WBITS
     67 #endif
     68 /* default windowBits for decompression. MAX_WBITS is for compression only */
     69 
     70 #if MAX_MEM_LEVEL >= 8
     71 #  define DEF_MEM_LEVEL 8
     72 #else
     73 #  define DEF_MEM_LEVEL  MAX_MEM_LEVEL
     74 #endif
     75 /* default memLevel */
     76 
     77 #define STORED_BLOCK 0
     78 #define STATIC_TREES 1
     79 #define DYN_TREES    2
     80 /* The three kinds of block type */
     81 
     82 #define MIN_MATCH  3
     83 #define MAX_MATCH  258
     84 /* The minimum and maximum match lengths */
     85 
     86          /* functions */
     87 
     88 #if defined(KERNEL) || defined(_KERNEL)
     89 #  define zmemcpy(d, s, n)	bcopy((s), (d), (n))
     90 #  define zmemzero		bzero
     91 #else
     92 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
     93 #  define HAVE_MEMCPY
     94 #endif
     95 #ifdef HAVE_MEMCPY
     96 #    define zmemcpy memcpy
     97 #    define zmemzero(dest, len) memset(dest, 0, len)
     98 #else
     99    extern void zmemcpy  OF((Bytef* dest, Bytef* source, uInt len));
    100    extern void zmemzero OF((Bytef* dest, uInt len));
    101 #endif
    102 #endif
    103 
    104 /* Diagnostic functions */
    105 #ifdef DEBUG_ZLIB
    106 #  include <stdio.h>
    107 #  ifndef verbose
    108 #    define verbose 0
    109 #  endif
    110 #  define Assert(cond,msg) {if(!(cond)) z_error(msg);}
    111 #  define Trace(x) fprintf x
    112 #  define Tracev(x) {if (verbose) fprintf x ;}
    113 #  define Tracevv(x) {if (verbose>1) fprintf x ;}
    114 #  define Tracec(c,x) {if (verbose && (c)) fprintf x ;}
    115 #  define Tracecv(c,x) {if (verbose>1 && (c)) fprintf x ;}
    116 #else
    117 #  define Assert(cond,msg)
    118 #  define Trace(x)
    119 #  define Tracev(x)
    120 #  define Tracevv(x)
    121 #  define Tracec(c,x)
    122 #  define Tracecv(c,x)
    123 #endif
    124 
    125 
    126 typedef uLong (*check_func) OF((uLong check, Bytef *buf, uInt len));
    127 
    128 /* voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size)); */
    129 /* void   zcfree  OF((voidpf opaque, voidpf ptr)); */
    130 
    131 #define ZALLOC(strm, items, size) \
    132            (*((strm)->zalloc))((strm)->opaque, (items), (size))
    133 #define ZFREE(strm, addr, size)	\
    134 	   (*((strm)->zfree))((strm)->opaque, (voidpf)(addr), (size))
    135 #define TRY_FREE(s, p, n) {if (p) ZFREE(s, p, n);}
    136 
    137 /* deflate.h -- internal compression state
    138  * Copyright (C) 1995 Jean-loup Gailly
    139  * For conditions of distribution and use, see copyright notice in zlib.h
    140  */
    141 
    142 /* WARNING: this file should *not* be used by applications. It is
    143    part of the implementation of the compression library and is
    144    subject to change. Applications should only use zlib.h.
    145  */
    146 
    147 
    148 /*+++++*/
    149 /* From: deflate.h,v 1.5 1995/05/03 17:27:09 jloup Exp */
    150 
    151 /* ===========================================================================
    152  * Internal compression state.
    153  */
    154 
    155 /* Data type */
    156 #define BINARY  0
    157 #define ASCII   1
    158 #define UNKNOWN 2
    159 
    160 #define LENGTH_CODES 29
    161 /* number of length codes, not counting the special END_BLOCK code */
    162 
    163 #define LITERALS  256
    164 /* number of literal bytes 0..255 */
    165 
    166 #define L_CODES (LITERALS+1+LENGTH_CODES)
    167 /* number of Literal or Length codes, including the END_BLOCK code */
    168 
    169 #define D_CODES   30
    170 /* number of distance codes */
    171 
    172 #define BL_CODES  19
    173 /* number of codes used to transfer the bit lengths */
    174 
    175 #define HEAP_SIZE (2*L_CODES+1)
    176 /* maximum heap size */
    177 
    178 #define MAX_BITS 15
    179 /* All codes must not exceed MAX_BITS bits */
    180 
    181 #define INIT_STATE    42
    182 #define BUSY_STATE   113
    183 #define FLUSH_STATE  124
    184 #define FINISH_STATE 666
    185 /* Stream status */
    186 
    187 
    188 /* Data structure describing a single value and its code string. */
    189 typedef struct ct_data_s {
    190     union {
    191         ush  freq;       /* frequency count */
    192         ush  code;       /* bit string */
    193     } fc;
    194     union {
    195         ush  dad;        /* father node in Huffman tree */
    196         ush  len;        /* length of bit string */
    197     } dl;
    198 } FAR ct_data;
    199 
    200 #define Freq fc.freq
    201 #define Code fc.code
    202 #define Dad  dl.dad
    203 #define Len  dl.len
    204 
    205 typedef struct static_tree_desc_s  static_tree_desc;
    206 
    207 typedef struct tree_desc_s {
    208     ct_data *dyn_tree;           /* the dynamic tree */
    209     int     max_code;            /* largest code with non zero frequency */
    210     static_tree_desc *stat_desc; /* the corresponding static tree */
    211 } FAR tree_desc;
    212 
    213 typedef ush Pos;
    214 typedef Pos FAR Posf;
    215 typedef unsigned IPos;
    216 
    217 /* A Pos is an index in the character window. We use short instead of int to
    218  * save space in the various tables. IPos is used only for parameter passing.
    219  */
    220 
    221 typedef struct deflate_state {
    222     z_stream *strm;      /* pointer back to this zlib stream */
    223     int   status;        /* as the name implies */
    224     Bytef *pending_buf;  /* output still pending */
    225     Bytef *pending_out;  /* next pending byte to output to the stream */
    226     int   pending;       /* nb of bytes in the pending buffer */
    227     uLong adler;         /* adler32 of uncompressed data */
    228     int   noheader;      /* suppress zlib header and adler32 */
    229     Byte  data_type;     /* UNKNOWN, BINARY or ASCII */
    230     Byte  method;        /* STORED (for zip only) or DEFLATED */
    231     int	  minCompr;	 /* min size decrease for Z_FLUSH_NOSTORE */
    232 
    233                 /* used by deflate.c: */
    234 
    235     uInt  w_size;        /* LZ77 window size (32K by default) */
    236     uInt  w_bits;        /* log2(w_size)  (8..16) */
    237     uInt  w_mask;        /* w_size - 1 */
    238 
    239     Bytef *window;
    240     /* Sliding window. Input bytes are read into the second half of the window,
    241      * and move to the first half later to keep a dictionary of at least wSize
    242      * bytes. With this organization, matches are limited to a distance of
    243      * wSize-MAX_MATCH bytes, but this ensures that IO is always
    244      * performed with a length multiple of the block size. Also, it limits
    245      * the window size to 64K, which is quite useful on MSDOS.
    246      * To do: use the user input buffer as sliding window.
    247      */
    248 
    249     ulg window_size;
    250     /* Actual size of window: 2*wSize, except when the user input buffer
    251      * is directly used as sliding window.
    252      */
    253 
    254     Posf *prev;
    255     /* Link to older string with same hash index. To limit the size of this
    256      * array to 64K, this link is maintained only for the last 32K strings.
    257      * An index in this array is thus a window index modulo 32K.
    258      */
    259 
    260     Posf *head; /* Heads of the hash chains or NIL. */
    261 
    262     uInt  ins_h;          /* hash index of string to be inserted */
    263     uInt  hash_size;      /* number of elements in hash table */
    264     uInt  hash_bits;      /* log2(hash_size) */
    265     uInt  hash_mask;      /* hash_size-1 */
    266 
    267     uInt  hash_shift;
    268     /* Number of bits by which ins_h must be shifted at each input
    269      * step. It must be such that after MIN_MATCH steps, the oldest
    270      * byte no longer takes part in the hash key, that is:
    271      *   hash_shift * MIN_MATCH >= hash_bits
    272      */
    273 
    274     long block_start;
    275     /* Window position at the beginning of the current output block. Gets
    276      * negative when the window is moved backwards.
    277      */
    278 
    279     uInt match_length;           /* length of best match */
    280     IPos prev_match;             /* previous match */
    281     int match_available;         /* set if previous match exists */
    282     uInt strstart;               /* start of string to insert */
    283     uInt match_start;            /* start of matching string */
    284     uInt lookahead;              /* number of valid bytes ahead in window */
    285 
    286     uInt prev_length;
    287     /* Length of the best match at previous step. Matches not greater than this
    288      * are discarded. This is used in the lazy match evaluation.
    289      */
    290 
    291     uInt max_chain_length;
    292     /* To speed up deflation, hash chains are never searched beyond this
    293      * length.  A higher limit improves compression ratio but degrades the
    294      * speed.
    295      */
    296 
    297     uInt max_lazy_match;
    298     /* Attempt to find a better match only when the current match is strictly
    299      * smaller than this value. This mechanism is used only for compression
    300      * levels >= 4.
    301      */
    302 #   define max_insert_length  max_lazy_match
    303     /* Insert new strings in the hash table only if the match length is not
    304      * greater than this length. This saves time but degrades compression.
    305      * max_insert_length is used only for compression levels <= 3.
    306      */
    307 
    308     int level;    /* compression level (1..9) */
    309     int strategy; /* favor or force Huffman coding*/
    310 
    311     uInt good_match;
    312     /* Use a faster search when the previous match is longer than this */
    313 
    314      int nice_match; /* Stop searching when current match exceeds this */
    315 
    316                 /* used by trees.c: */
    317     /* Didn't use ct_data typedef below to supress compiler warning */
    318     struct ct_data_s dyn_ltree[HEAP_SIZE];   /* literal and length tree */
    319     struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
    320     struct ct_data_s bl_tree[2*BL_CODES+1];  /* Huffman tree for bit lengths */
    321 
    322     struct tree_desc_s l_desc;               /* desc. for literal tree */
    323     struct tree_desc_s d_desc;               /* desc. for distance tree */
    324     struct tree_desc_s bl_desc;              /* desc. for bit length tree */
    325 
    326     ush bl_count[MAX_BITS+1];
    327     /* number of codes at each bit length for an optimal tree */
    328 
    329     int heap[2*L_CODES+1];      /* heap used to build the Huffman trees */
    330     int heap_len;               /* number of elements in the heap */
    331     int heap_max;               /* element of largest frequency */
    332     /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
    333      * The same heap array is used to build all trees.
    334      */
    335 
    336     uch depth[2*L_CODES+1];
    337     /* Depth of each subtree used as tie breaker for trees of equal frequency
    338      */
    339 
    340     uchf *l_buf;          /* buffer for literals or lengths */
    341 
    342     uInt  lit_bufsize;
    343     /* Size of match buffer for literals/lengths.  There are 4 reasons for
    344      * limiting lit_bufsize to 64K:
    345      *   - frequencies can be kept in 16 bit counters
    346      *   - if compression is not successful for the first block, all input
    347      *     data is still in the window so we can still emit a stored block even
    348      *     when input comes from standard input.  (This can also be done for
    349      *     all blocks if lit_bufsize is not greater than 32K.)
    350      *   - if compression is not successful for a file smaller than 64K, we can
    351      *     even emit a stored file instead of a stored block (saving 5 bytes).
    352      *     This is applicable only for zip (not gzip or zlib).
    353      *   - creating new Huffman trees less frequently may not provide fast
    354      *     adaptation to changes in the input data statistics. (Take for
    355      *     example a binary file with poorly compressible code followed by
    356      *     a highly compressible string table.) Smaller buffer sizes give
    357      *     fast adaptation but have of course the overhead of transmitting
    358      *     trees more frequently.
    359      *   - I can't count above 4
    360      */
    361 
    362     uInt last_lit;      /* running index in l_buf */
    363 
    364     ushf *d_buf;
    365     /* Buffer for distances. To simplify the code, d_buf and l_buf have
    366      * the same number of elements. To use different lengths, an extra flag
    367      * array would be necessary.
    368      */
    369 
    370     ulg opt_len;        /* bit length of current block with optimal trees */
    371     ulg static_len;     /* bit length of current block with static trees */
    372     ulg compressed_len; /* total bit length of compressed file */
    373     uInt matches;       /* number of string matches in current block */
    374     int last_eob_len;   /* bit length of EOB code for last block */
    375 
    376 #ifdef DEBUG_ZLIB
    377     ulg bits_sent;      /* bit length of the compressed data */
    378 #endif
    379 
    380     ush bi_buf;
    381     /* Output buffer. bits are inserted starting at the bottom (least
    382      * significant bits).
    383      */
    384     int bi_valid;
    385     /* Number of valid bits in bi_buf.  All bits above the last valid bit
    386      * are always zero.
    387      */
    388 
    389     uInt blocks_in_packet;
    390     /* Number of blocks produced since the last time Z_PACKET_FLUSH
    391      * was used.
    392      */
    393 
    394 } FAR deflate_state;
    395 
    396 /* Output a byte on the stream.
    397  * IN assertion: there is enough room in pending_buf.
    398  */
    399 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
    400 
    401 
    402 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
    403 /* Minimum amount of lookahead, except at the end of the input file.
    404  * See deflate.c for comments about the MIN_MATCH+1.
    405  */
    406 
    407 #define MAX_DIST(s)  ((s)->w_size-MIN_LOOKAHEAD)
    408 /* In order to simplify the code, particularly on 16 bit machines, match
    409  * distances are limited to MAX_DIST instead of WSIZE.
    410  */
    411 
    412         /* in trees.c */
    413 local void ct_init       OF((deflate_state *s));
    414 local int  ct_tally      OF((deflate_state *s, int dist, int lc));
    415 local ulg ct_flush_block OF((deflate_state *s, charf *buf, ulg stored_len,
    416 			     int flush));
    417 local void ct_align      OF((deflate_state *s));
    418 local void ct_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,
    419                           int eof));
    420 local void ct_stored_type_only OF((deflate_state *s));
    421 
    422 
    423 /*+++++*/
    424 /* deflate.c -- compress data using the deflation algorithm
    425  * Copyright (C) 1995 Jean-loup Gailly.
    426  * For conditions of distribution and use, see copyright notice in zlib.h
    427  */
    428 
    429 /*
    430  *  ALGORITHM
    431  *
    432  *      The "deflation" process depends on being able to identify portions
    433  *      of the input text which are identical to earlier input (within a
    434  *      sliding window trailing behind the input currently being processed).
    435  *
    436  *      The most straightforward technique turns out to be the fastest for
    437  *      most input files: try all possible matches and select the longest.
    438  *      The key feature of this algorithm is that insertions into the string
    439  *      dictionary are very simple and thus fast, and deletions are avoided
    440  *      completely. Insertions are performed at each input character, whereas
    441  *      string matches are performed only when the previous match ends. So it
    442  *      is preferable to spend more time in matches to allow very fast string
    443  *      insertions and avoid deletions. The matching algorithm for small
    444  *      strings is inspired from that of Rabin & Karp. A brute force approach
    445  *      is used to find longer strings when a small match has been found.
    446  *      A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
    447  *      (by Leonid Broukhis).
    448  *         A previous version of this file used a more sophisticated algorithm
    449  *      (by Fiala and Greene) which is guaranteed to run in linear amortized
    450  *      time, but has a larger average cost, uses more memory and is patented.
    451  *      However the F&G algorithm may be faster for some highly redundant
    452  *      files if the parameter max_chain_length (described below) is too large.
    453  *
    454  *  ACKNOWLEDGEMENTS
    455  *
    456  *      The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
    457  *      I found it in 'freeze' written by Leonid Broukhis.
    458  *      Thanks to many people for bug reports and testing.
    459  *
    460  *  REFERENCES
    461  *
    462  *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
    463  *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
    464  *
    465  *      A description of the Rabin and Karp algorithm is given in the book
    466  *         "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
    467  *
    468  *      Fiala,E.R., and Greene,D.H.
    469  *         Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
    470  *
    471  */
    472 
    473 /* From: deflate.c,v 1.8 1995/05/03 17:27:08 jloup Exp */
    474 
    475 #if 0
    476 local char zlib_copyright[] = " deflate Copyright 1995 Jean-loup Gailly ";
    477 #endif
    478 /*
    479   If you use the zlib library in a product, an acknowledgment is welcome
    480   in the documentation of your product. If for some reason you cannot
    481   include such an acknowledgment, I would appreciate that you keep this
    482   copyright string in the executable of your product.
    483  */
    484 
    485 #define NIL 0
    486 /* Tail of hash chains */
    487 
    488 #ifndef TOO_FAR
    489 #  define TOO_FAR 4096
    490 #endif
    491 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
    492 
    493 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
    494 /* Minimum amount of lookahead, except at the end of the input file.
    495  * See deflate.c for comments about the MIN_MATCH+1.
    496  */
    497 
    498 /* Values for max_lazy_match, good_match and max_chain_length, depending on
    499  * the desired pack level (0..9). The values given below have been tuned to
    500  * exclude worst case performance for pathological files. Better values may be
    501  * found for specific files.
    502  */
    503 
    504 typedef struct config_s {
    505    ush good_length; /* reduce lazy search above this match length */
    506    ush max_lazy;    /* do not perform lazy search above this match length */
    507    ush nice_length; /* quit search above this match length */
    508    ush max_chain;
    509 } config;
    510 
    511 local config configuration_table[10] = {
    512 /*      good lazy nice chain */
    513 /* 0 */ {0,    0,  0,    0},  /* store only */
    514 /* 1 */ {4,    4,  8,    4},  /* maximum speed, no lazy matches */
    515 /* 2 */ {4,    5, 16,    8},
    516 /* 3 */ {4,    6, 32,   32},
    517 
    518 /* 4 */ {4,    4, 16,   16},  /* lazy matches */
    519 /* 5 */ {8,   16, 32,   32},
    520 /* 6 */ {8,   16, 128, 128},
    521 /* 7 */ {8,   32, 128, 256},
    522 /* 8 */ {32, 128, 258, 1024},
    523 /* 9 */ {32, 258, 258, 4096}}; /* maximum compression */
    524 
    525 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
    526  * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
    527  * meaning.
    528  */
    529 
    530 #define EQUAL 0
    531 /* result of memcmp for equal strings */
    532 
    533 /* ===========================================================================
    534  *  Prototypes for local functions.
    535  */
    536 
    537 local void fill_window   OF((deflate_state *s));
    538 local int  deflate_fast  OF((deflate_state *s, int flush));
    539 local int  deflate_slow  OF((deflate_state *s, int flush));
    540 local void lm_init       OF((deflate_state *s));
    541 local int longest_match  OF((deflate_state *s, IPos cur_match));
    542 local void putShortMSB   OF((deflate_state *s, uInt b));
    543 local void flush_pending OF((z_stream *strm));
    544 local int read_buf       OF((z_stream *strm, charf *buf, unsigned size));
    545 #ifdef ASMV
    546       void match_init OF((void)); /* asm code initialization */
    547 #endif
    548 
    549 #ifdef DEBUG_ZLIB
    550 local  void check_match OF((deflate_state *s, IPos start, IPos match,
    551                             int length));
    552 #endif
    553 
    554 
    555 /* ===========================================================================
    556  * Update a hash value with the given input byte
    557  * IN  assertion: all calls to to UPDATE_HASH are made with consecutive
    558  *    input characters, so that a running hash key can be computed from the
    559  *    previous key instead of complete recalculation each time.
    560  */
    561 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
    562 
    563 
    564 /* ===========================================================================
    565  * Insert string str in the dictionary and set match_head to the previous head
    566  * of the hash chain (the most recent string with same hash key). Return
    567  * the previous length of the hash chain.
    568  * IN  assertion: all calls to to INSERT_STRING are made with consecutive
    569  *    input characters and the first MIN_MATCH bytes of str are valid
    570  *    (except for the last MIN_MATCH-1 bytes of the input file).
    571  */
    572 #define INSERT_STRING(s, str, match_head) \
    573    (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
    574     s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
    575     s->head[s->ins_h] = (str))
    576 
    577 /* ===========================================================================
    578  * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
    579  * prev[] will be initialized on the fly.
    580  */
    581 #define CLEAR_HASH(s) \
    582     s->head[s->hash_size-1] = NIL; \
    583     zmemzero((charf *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
    584 
    585 /* ========================================================================= */
    586 int deflateInit (strm, level)
    587     z_stream *strm;
    588     int level;
    589 {
    590     return deflateInit2 (strm, level, DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
    591 			 0, 0);
    592     /* To do: ignore strm->next_in if we use it as window */
    593 }
    594 
    595 /* ========================================================================= */
    596 int deflateInit2 (strm, level, method, windowBits, memLevel,
    597 		  strategy, minCompression)
    598     z_stream *strm;
    599     int  level;
    600     int  method;
    601     int  windowBits;
    602     int  memLevel;
    603     int  strategy;
    604     int  minCompression;
    605 {
    606     deflate_state *s;
    607     int noheader = 0;
    608 
    609     if (strm == Z_NULL) return Z_STREAM_ERROR;
    610 
    611     strm->msg = Z_NULL;
    612 /*    if (strm->zalloc == Z_NULL) strm->zalloc = zcalloc; */
    613 /*    if (strm->zfree == Z_NULL) strm->zfree = zcfree; */
    614 
    615     if (level == Z_DEFAULT_COMPRESSION) level = 6;
    616 
    617     if (windowBits < 0) { /* undocumented feature: suppress zlib header */
    618         noheader = 1;
    619         windowBits = -windowBits;
    620     }
    621     if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != DEFLATED ||
    622         windowBits < 8 || windowBits > 15 || level < 1 || level > 9) {
    623         return Z_STREAM_ERROR;
    624     }
    625     s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
    626     if (s == Z_NULL) return Z_MEM_ERROR;
    627     strm->state = (struct internal_state FAR *)s;
    628     s->strm = strm;
    629 
    630     s->noheader = noheader;
    631     s->w_bits = windowBits;
    632     s->w_size = 1 << s->w_bits;
    633     s->w_mask = s->w_size - 1;
    634 
    635     s->hash_bits = memLevel + 7;
    636     s->hash_size = 1 << s->hash_bits;
    637     s->hash_mask = s->hash_size - 1;
    638     s->hash_shift =  ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
    639 
    640     s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
    641     s->prev   = (Posf *)  ZALLOC(strm, s->w_size, sizeof(Pos));
    642     s->head   = (Posf *)  ZALLOC(strm, s->hash_size, sizeof(Pos));
    643 
    644     s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
    645 
    646     s->pending_buf = (uchf *) ZALLOC(strm, s->lit_bufsize, 2*sizeof(ush));
    647 
    648     if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
    649         s->pending_buf == Z_NULL) {
    650         strm->msg = z_errmsg[1-Z_MEM_ERROR];
    651         deflateEnd (strm);
    652         return Z_MEM_ERROR;
    653     }
    654     s->d_buf = (ushf *) &(s->pending_buf[s->lit_bufsize]);
    655     s->l_buf = (uchf *) &(s->pending_buf[3*s->lit_bufsize]);
    656     /* We overlay pending_buf and d_buf+l_buf. This works since the average
    657      * output size for (length,distance) codes is <= 32 bits (worst case
    658      * is 15+15+13=33).
    659      */
    660 
    661     s->level = level;
    662     s->strategy = strategy;
    663     s->method = (Byte)method;
    664     s->minCompr = minCompression;
    665     s->blocks_in_packet = 0;
    666 
    667     return deflateReset(strm);
    668 }
    669 
    670 /* ========================================================================= */
    671 int deflateReset (strm)
    672     z_stream *strm;
    673 {
    674     deflate_state *s;
    675 
    676     if (strm == Z_NULL || strm->state == Z_NULL ||
    677         strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;
    678 
    679     strm->total_in = strm->total_out = 0;
    680     strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
    681     strm->data_type = Z_UNKNOWN;
    682 
    683     s = (deflate_state *)strm->state;
    684     s->pending = 0;
    685     s->pending_out = s->pending_buf;
    686 
    687     if (s->noheader < 0) {
    688         s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
    689     }
    690     s->status = s->noheader ? BUSY_STATE : INIT_STATE;
    691     s->adler = 1;
    692 
    693     ct_init(s);
    694     lm_init(s);
    695 
    696     return Z_OK;
    697 }
    698 
    699 /* =========================================================================
    700  * Put a short in the pending buffer. The 16-bit value is put in MSB order.
    701  * IN assertion: the stream state is correct and there is enough room in
    702  * pending_buf.
    703  */
    704 local void putShortMSB (s, b)
    705     deflate_state *s;
    706     uInt b;
    707 {
    708     put_byte(s, (Byte)(b >> 8));
    709     put_byte(s, (Byte)(b & 0xff));
    710 }
    711 
    712 /* =========================================================================
    713  * Flush as much pending output as possible.
    714  */
    715 local void flush_pending(strm)
    716     z_stream *strm;
    717 {
    718     deflate_state *state = (deflate_state *) strm->state;
    719     unsigned len = state->pending;
    720 
    721     if (len > strm->avail_out) len = strm->avail_out;
    722     if (len == 0) return;
    723 
    724     if (strm->next_out != NULL) {
    725 	zmemcpy(strm->next_out, state->pending_out, len);
    726 	strm->next_out += len;
    727     }
    728     state->pending_out += len;
    729     strm->total_out += len;
    730     strm->avail_out -= len;
    731     state->pending -= len;
    732     if (state->pending == 0) {
    733         state->pending_out = state->pending_buf;
    734     }
    735 }
    736 
    737 /* ========================================================================= */
    738 int deflate (strm, flush)
    739     z_stream *strm;
    740     int flush;
    741 {
    742     deflate_state *state = (deflate_state *) strm->state;
    743 
    744     if (strm == Z_NULL || state == Z_NULL) return Z_STREAM_ERROR;
    745 
    746     if (strm->next_in == Z_NULL && strm->avail_in != 0) {
    747         ERR_RETURN(strm, Z_STREAM_ERROR);
    748     }
    749     if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
    750 
    751     state->strm = strm; /* just in case */
    752 
    753     /* Write the zlib header */
    754     if (state->status == INIT_STATE) {
    755 
    756         uInt header = (DEFLATED + ((state->w_bits-8)<<4)) << 8;
    757         uInt level_flags = (state->level-1) >> 1;
    758 
    759         if (level_flags > 3) level_flags = 3;
    760         header |= (level_flags << 6);
    761         header += 31 - (header % 31);
    762 
    763         state->status = BUSY_STATE;
    764         putShortMSB(state, header);
    765     }
    766 
    767     /* Flush as much pending output as possible */
    768     if (state->pending != 0) {
    769         flush_pending(strm);
    770         if (strm->avail_out == 0) return Z_OK;
    771     }
    772 
    773     /* If we came back in here to get the last output from
    774      * a previous flush, we're done for now.
    775      */
    776     if (state->status == FLUSH_STATE) {
    777 	state->status = BUSY_STATE;
    778 	if (flush != Z_NO_FLUSH && flush != Z_FINISH)
    779 	    return Z_OK;
    780     }
    781 
    782     /* User must not provide more input after the first FINISH: */
    783     if (state->status == FINISH_STATE && strm->avail_in != 0) {
    784         ERR_RETURN(strm, Z_BUF_ERROR);
    785     }
    786 
    787     /* Start a new block or continue the current one.
    788      */
    789     if (strm->avail_in != 0 || state->lookahead != 0 ||
    790         (flush == Z_FINISH && state->status != FINISH_STATE)) {
    791         int quit;
    792 
    793         if (flush == Z_FINISH) {
    794             state->status = FINISH_STATE;
    795         }
    796         if (state->level <= 3) {
    797             quit = deflate_fast(state, flush);
    798         } else {
    799             quit = deflate_slow(state, flush);
    800         }
    801         if (quit || strm->avail_out == 0)
    802 	    return Z_OK;
    803         /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
    804          * of deflate should use the same flush parameter to make sure
    805          * that the flush is complete. So we don't have to output an
    806          * empty block here, this will be done at next call. This also
    807          * ensures that for a very small output buffer, we emit at most
    808          * one empty block.
    809          */
    810     }
    811 
    812     /* If a flush was requested, we have a little more to output now. */
    813     if (flush != Z_NO_FLUSH && flush != Z_FINISH
    814 	&& state->status != FINISH_STATE) {
    815 	switch (flush) {
    816 	case Z_PARTIAL_FLUSH:
    817 	    ct_align(state);
    818 	    break;
    819 	case Z_PACKET_FLUSH:
    820 	    /* Output just the 3-bit `stored' block type value,
    821 	       but not a zero length. */
    822 	    ct_stored_type_only(state);
    823 	    break;
    824 	default:
    825 	    ct_stored_block(state, (char*)0, 0L, 0);
    826 	    /* For a full flush, this empty block will be recognized
    827 	     * as a special marker by inflate_sync().
    828 	     */
    829 	    if (flush == Z_FULL_FLUSH) {
    830 		CLEAR_HASH(state);             /* forget history */
    831 	    }
    832 	}
    833 	flush_pending(strm);
    834 	if (strm->avail_out == 0) {
    835 	    /* We'll have to come back to get the rest of the output;
    836 	     * this ensures we don't output a second zero-length stored
    837 	     * block (or whatever).
    838 	     */
    839 	    state->status = FLUSH_STATE;
    840 	    return Z_OK;
    841 	}
    842     }
    843 
    844     Assert(strm->avail_out > 0, "bug2");
    845 
    846     if (flush != Z_FINISH) return Z_OK;
    847     if (state->noheader) return Z_STREAM_END;
    848 
    849     /* Write the zlib trailer (adler32) */
    850     putShortMSB(state, (uInt)(state->adler >> 16));
    851     putShortMSB(state, (uInt)(state->adler & 0xffff));
    852     flush_pending(strm);
    853     /* If avail_out is zero, the application will call deflate again
    854      * to flush the rest.
    855      */
    856     state->noheader = -1; /* write the trailer only once! */
    857     return state->pending != 0 ? Z_OK : Z_STREAM_END;
    858 }
    859 
    860 /* ========================================================================= */
    861 int deflateEnd (strm)
    862     z_stream *strm;
    863 {
    864     deflate_state *state = (deflate_state *) strm->state;
    865 
    866     if (strm == Z_NULL || state == Z_NULL) return Z_STREAM_ERROR;
    867 
    868     TRY_FREE(strm, state->window, state->w_size * 2 * sizeof(Byte));
    869     TRY_FREE(strm, state->prev, state->w_size * sizeof(Pos));
    870     TRY_FREE(strm, state->head, state->hash_size * sizeof(Pos));
    871     TRY_FREE(strm, state->pending_buf, state->lit_bufsize * 2 * sizeof(ush));
    872 
    873     ZFREE(strm, state, sizeof(deflate_state));
    874     strm->state = Z_NULL;
    875 
    876     return Z_OK;
    877 }
    878 
    879 /* ===========================================================================
    880  * Read a new buffer from the current input stream, update the adler32
    881  * and total number of bytes read.
    882  */
    883 local int read_buf(strm, buf, size)
    884     z_stream *strm;
    885     charf *buf;
    886     unsigned size;
    887 {
    888     unsigned len = strm->avail_in;
    889     deflate_state *state = (deflate_state *) strm->state;
    890 
    891     if (len > size) len = size;
    892     if (len == 0) return 0;
    893 
    894     strm->avail_in  -= len;
    895 
    896     if (!state->noheader) {
    897         state->adler = adler32(state->adler, strm->next_in, len);
    898     }
    899     zmemcpy(buf, strm->next_in, len);
    900     strm->next_in  += len;
    901     strm->total_in += len;
    902 
    903     return (int)len;
    904 }
    905 
    906 /* ===========================================================================
    907  * Initialize the "longest match" routines for a new zlib stream
    908  */
    909 local void lm_init (s)
    910     deflate_state *s;
    911 {
    912     s->window_size = (ulg)2L*s->w_size;
    913 
    914     CLEAR_HASH(s);
    915 
    916     /* Set the default configuration parameters:
    917      */
    918     s->max_lazy_match   = configuration_table[s->level].max_lazy;
    919     s->good_match       = configuration_table[s->level].good_length;
    920     s->nice_match       = configuration_table[s->level].nice_length;
    921     s->max_chain_length = configuration_table[s->level].max_chain;
    922 
    923     s->strstart = 0;
    924     s->block_start = 0L;
    925     s->lookahead = 0;
    926     s->match_length = MIN_MATCH-1;
    927     s->match_available = 0;
    928     s->ins_h = 0;
    929 #ifdef ASMV
    930     match_init(); /* initialize the asm code */
    931 #endif
    932 }
    933 
    934 /* ===========================================================================
    935  * Set match_start to the longest match starting at the given string and
    936  * return its length. Matches shorter or equal to prev_length are discarded,
    937  * in which case the result is equal to prev_length and match_start is
    938  * garbage.
    939  * IN assertions: cur_match is the head of the hash chain for the current
    940  *   string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
    941  */
    942 #ifndef ASMV
    943 /* For 80x86 and 680x0, an optimized version will be provided in match.asm or
    944  * match.S. The code will be functionally equivalent.
    945  */
    946 local int longest_match(s, cur_match)
    947     deflate_state *s;
    948     IPos cur_match;                             /* current match */
    949 {
    950     unsigned chain_length = s->max_chain_length;/* max hash chain length */
    951     register Bytef *scan = s->window + s->strstart; /* current string */
    952     register Bytef *match;                       /* matched string */
    953     register int len;                           /* length of current match */
    954     int best_len = s->prev_length;              /* best match length so far */
    955     IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
    956         s->strstart - (IPos)MAX_DIST(s) : NIL;
    957     /* Stop when cur_match becomes <= limit. To simplify the code,
    958      * we prevent matches with the string of window index 0.
    959      */
    960     Posf *prev = s->prev;
    961     uInt wmask = s->w_mask;
    962 
    963 #ifdef UNALIGNED_OK
    964     /* Compare two bytes at a time. Note: this is not always beneficial.
    965      * Try with and without -DUNALIGNED_OK to check.
    966      */
    967     register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
    968     register ush scan_start = *(ushf*)scan;
    969     register ush scan_end   = *(ushf*)(scan+best_len-1);
    970 #else
    971     register Bytef *strend = s->window + s->strstart + MAX_MATCH;
    972     register Byte scan_end1  = scan[best_len-1];
    973     register Byte scan_end   = scan[best_len];
    974 #endif
    975 
    976     /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
    977      * It is easy to get rid of this optimization if necessary.
    978      */
    979     Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
    980 
    981     /* Do not waste too much time if we already have a good match: */
    982     if (s->prev_length >= s->good_match) {
    983         chain_length >>= 2;
    984     }
    985     Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
    986 
    987     do {
    988         Assert(cur_match < s->strstart, "no future");
    989         match = s->window + cur_match;
    990 
    991         /* Skip to next match if the match length cannot increase
    992          * or if the match length is less than 2:
    993          */
    994 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
    995         /* This code assumes sizeof(unsigned short) == 2. Do not use
    996          * UNALIGNED_OK if your compiler uses a different size.
    997          */
    998         if (*(ushf*)(match+best_len-1) != scan_end ||
    999             *(ushf*)match != scan_start) continue;
   1000 
   1001         /* It is not necessary to compare scan[2] and match[2] since they are
   1002          * always equal when the other bytes match, given that the hash keys
   1003          * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
   1004          * strstart+3, +5, ... up to strstart+257. We check for insufficient
   1005          * lookahead only every 4th comparison; the 128th check will be made
   1006          * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
   1007          * necessary to put more guard bytes at the end of the window, or
   1008          * to check more often for insufficient lookahead.
   1009          */
   1010         Assert(scan[2] == match[2], "scan[2]?");
   1011         scan++, match++;
   1012         do {
   1013         } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
   1014                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
   1015                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
   1016                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
   1017                  scan < strend);
   1018         /* The funny "do {}" generates better code on most compilers */
   1019 
   1020         /* Here, scan <= window+strstart+257 */
   1021         Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
   1022         if (*scan == *match) scan++;
   1023 
   1024         len = (MAX_MATCH - 1) - (int)(strend-scan);
   1025         scan = strend - (MAX_MATCH-1);
   1026 
   1027 #else /* UNALIGNED_OK */
   1028 
   1029         if (match[best_len]   != scan_end  ||
   1030             match[best_len-1] != scan_end1 ||
   1031             *match            != *scan     ||
   1032             *++match          != scan[1])      continue;
   1033 
   1034         /* The check at best_len-1 can be removed because it will be made
   1035          * again later. (This heuristic is not always a win.)
   1036          * It is not necessary to compare scan[2] and match[2] since they
   1037          * are always equal when the other bytes match, given that
   1038          * the hash keys are equal and that HASH_BITS >= 8.
   1039          */
   1040         scan += 2, match++;
   1041         Assert(*scan == *match, "match[2]?");
   1042 
   1043         /* We check for insufficient lookahead only every 8th comparison;
   1044          * the 256th check will be made at strstart+258.
   1045          */
   1046         do {
   1047         } while (*++scan == *++match && *++scan == *++match &&
   1048                  *++scan == *++match && *++scan == *++match &&
   1049                  *++scan == *++match && *++scan == *++match &&
   1050                  *++scan == *++match && *++scan == *++match &&
   1051                  scan < strend);
   1052 
   1053         Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
   1054 
   1055         len = MAX_MATCH - (int)(strend - scan);
   1056         scan = strend - MAX_MATCH;
   1057 
   1058 #endif /* UNALIGNED_OK */
   1059 
   1060         if (len > best_len) {
   1061             s->match_start = cur_match;
   1062             best_len = len;
   1063             if (len >= s->nice_match) break;
   1064 #ifdef UNALIGNED_OK
   1065             scan_end = *(ushf*)(scan+best_len-1);
   1066 #else
   1067             scan_end1  = scan[best_len-1];
   1068             scan_end   = scan[best_len];
   1069 #endif
   1070         }
   1071     } while ((cur_match = prev[cur_match & wmask]) > limit
   1072              && --chain_length != 0);
   1073 
   1074     return best_len;
   1075 }
   1076 #endif /* ASMV */
   1077 
   1078 #ifdef DEBUG_ZLIB
   1079 /* ===========================================================================
   1080  * Check that the match at match_start is indeed a match.
   1081  */
   1082 local void check_match(s, start, match, length)
   1083     deflate_state *s;
   1084     IPos start, match;
   1085     int length;
   1086 {
   1087     /* check that the match is indeed a match */
   1088     if (memcmp((charf *)s->window + match,
   1089                 (charf *)s->window + start, length) != EQUAL) {
   1090         fprintf(stderr,
   1091             " start %u, match %u, length %d\n",
   1092             start, match, length);
   1093         do { fprintf(stderr, "%c%c", s->window[match++],
   1094                      s->window[start++]); } while (--length != 0);
   1095         z_error("invalid match");
   1096     }
   1097     if (verbose > 1) {
   1098         fprintf(stderr,"\\[%d,%d]", start-match, length);
   1099         do { putc(s->window[start++], stderr); } while (--length != 0);
   1100     }
   1101 }
   1102 #else
   1103 #  define check_match(s, start, match, length)
   1104 #endif
   1105 
   1106 /* ===========================================================================
   1107  * Fill the window when the lookahead becomes insufficient.
   1108  * Updates strstart and lookahead.
   1109  *
   1110  * IN assertion: lookahead < MIN_LOOKAHEAD
   1111  * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
   1112  *    At least one byte has been read, or avail_in == 0; reads are
   1113  *    performed for at least two bytes (required for the zip translate_eol
   1114  *    option -- not supported here).
   1115  */
   1116 local void fill_window(s)
   1117     deflate_state *s;
   1118 {
   1119     register unsigned n, m;
   1120     register Posf *p;
   1121     unsigned more;    /* Amount of free space at the end of the window. */
   1122     uInt wsize = s->w_size;
   1123 
   1124     do {
   1125         more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
   1126 
   1127         /* Deal with !@#$% 64K limit: */
   1128         if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
   1129             more = wsize;
   1130         } else if (more == (unsigned)(-1)) {
   1131             /* Very unlikely, but possible on 16 bit machine if strstart == 0
   1132              * and lookahead == 1 (input done one byte at time)
   1133              */
   1134             more--;
   1135 
   1136         /* If the window is almost full and there is insufficient lookahead,
   1137          * move the upper half to the lower one to make room in the upper half.
   1138          */
   1139         } else if (s->strstart >= wsize+MAX_DIST(s)) {
   1140 
   1141             /* By the IN assertion, the window is not empty so we can't confuse
   1142              * more == 0 with more == 64K on a 16 bit machine.
   1143              */
   1144             zmemcpy((charf *)s->window, (charf *)s->window+wsize,
   1145                    (unsigned)wsize);
   1146             s->match_start -= wsize;
   1147             s->strstart    -= wsize; /* we now have strstart >= MAX_DIST */
   1148 
   1149             s->block_start -= (long) wsize;
   1150 
   1151             /* Slide the hash table (could be avoided with 32 bit values
   1152                at the expense of memory usage):
   1153              */
   1154             n = s->hash_size;
   1155             p = &s->head[n];
   1156             do {
   1157                 m = *--p;
   1158                 *p = (Pos)(m >= wsize ? m-wsize : NIL);
   1159             } while (--n);
   1160 
   1161             n = wsize;
   1162             p = &s->prev[n];
   1163             do {
   1164                 m = *--p;
   1165                 *p = (Pos)(m >= wsize ? m-wsize : NIL);
   1166                 /* If n is not on any hash chain, prev[n] is garbage but
   1167                  * its value will never be used.
   1168                  */
   1169             } while (--n);
   1170 
   1171             more += wsize;
   1172         }
   1173         if (s->strm->avail_in == 0) return;
   1174 
   1175         /* If there was no sliding:
   1176          *    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
   1177          *    more == window_size - lookahead - strstart
   1178          * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
   1179          * => more >= window_size - 2*WSIZE + 2
   1180          * In the BIG_MEM or MMAP case (not yet supported),
   1181          *   window_size == input_size + MIN_LOOKAHEAD  &&
   1182          *   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
   1183          * Otherwise, window_size == 2*WSIZE so more >= 2.
   1184          * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
   1185          */
   1186         Assert(more >= 2, "more < 2");
   1187 
   1188         n = read_buf(s->strm, (charf *)s->window + s->strstart + s->lookahead,
   1189                      more);
   1190         s->lookahead += n;
   1191 
   1192         /* Initialize the hash value now that we have some input: */
   1193         if (s->lookahead >= MIN_MATCH) {
   1194             s->ins_h = s->window[s->strstart];
   1195             UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
   1196 #if MIN_MATCH != 3
   1197             Call UPDATE_HASH() MIN_MATCH-3 more times
   1198 #endif
   1199         }
   1200         /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
   1201          * but this is not important since only literal bytes will be emitted.
   1202          */
   1203 
   1204     } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
   1205 }
   1206 
   1207 /* ===========================================================================
   1208  * Flush the current block, with given end-of-file flag.
   1209  * IN assertion: strstart is set to the end of the current match.
   1210  */
   1211 #define FLUSH_BLOCK_ONLY(s, flush) { \
   1212    ct_flush_block(s, (s->block_start >= 0L ? \
   1213            (charf *)&s->window[(unsigned)s->block_start] : \
   1214            (charf *)Z_NULL), (long)s->strstart - s->block_start, (flush)); \
   1215    s->block_start = s->strstart; \
   1216    flush_pending(s->strm); \
   1217    Tracev((stderr,"[FLUSH]")); \
   1218 }
   1219 
   1220 /* Same but force premature exit if necessary. */
   1221 #define FLUSH_BLOCK(s, flush) { \
   1222    FLUSH_BLOCK_ONLY(s, flush); \
   1223    if (s->strm->avail_out == 0) return 1; \
   1224 }
   1225 
   1226 /* ===========================================================================
   1227  * Compress as much as possible from the input stream, return true if
   1228  * processing was terminated prematurely (no more input or output space).
   1229  * This function does not perform lazy evaluationof matches and inserts
   1230  * new strings in the dictionary only for unmatched strings or for short
   1231  * matches. It is used only for the fast compression options.
   1232  */
   1233 local int deflate_fast(s, flush)
   1234     deflate_state *s;
   1235     int flush;
   1236 {
   1237     IPos hash_head = NIL; /* head of the hash chain */
   1238     int bflush;     /* set if current block must be flushed */
   1239 
   1240     s->prev_length = MIN_MATCH-1;
   1241 
   1242     for (;;) {
   1243         /* Make sure that we always have enough lookahead, except
   1244          * at the end of the input file. We need MAX_MATCH bytes
   1245          * for the next match, plus MIN_MATCH bytes to insert the
   1246          * string following the next match.
   1247          */
   1248         if (s->lookahead < MIN_LOOKAHEAD) {
   1249             fill_window(s);
   1250             if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) return 1;
   1251 
   1252             if (s->lookahead == 0) break; /* flush the current block */
   1253         }
   1254 
   1255         /* Insert the string window[strstart .. strstart+2] in the
   1256          * dictionary, and set hash_head to the head of the hash chain:
   1257          */
   1258         if (s->lookahead >= MIN_MATCH) {
   1259             INSERT_STRING(s, s->strstart, hash_head);
   1260         }
   1261 
   1262         /* Find the longest match, discarding those <= prev_length.
   1263          * At this point we have always match_length < MIN_MATCH
   1264          */
   1265         if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
   1266             /* To simplify the code, we prevent matches with the string
   1267              * of window index 0 (in particular we have to avoid a match
   1268              * of the string with itself at the start of the input file).
   1269              */
   1270             if (s->strategy != Z_HUFFMAN_ONLY) {
   1271                 s->match_length = longest_match (s, hash_head);
   1272             }
   1273             /* longest_match() sets match_start */
   1274 
   1275             if (s->match_length > s->lookahead) s->match_length = s->lookahead;
   1276         }
   1277         if (s->match_length >= MIN_MATCH) {
   1278             check_match(s, s->strstart, s->match_start, s->match_length);
   1279 
   1280             bflush = ct_tally(s, s->strstart - s->match_start,
   1281                               s->match_length - MIN_MATCH);
   1282 
   1283             s->lookahead -= s->match_length;
   1284 
   1285             /* Insert new strings in the hash table only if the match length
   1286              * is not too large. This saves time but degrades compression.
   1287              */
   1288             if (s->match_length <= s->max_insert_length &&
   1289                 s->lookahead >= MIN_MATCH) {
   1290                 s->match_length--; /* string at strstart already in hash table */
   1291                 do {
   1292                     s->strstart++;
   1293                     INSERT_STRING(s, s->strstart, hash_head);
   1294                     /* strstart never exceeds WSIZE-MAX_MATCH, so there are
   1295                      * always MIN_MATCH bytes ahead.
   1296                      */
   1297                 } while (--s->match_length != 0);
   1298                 s->strstart++;
   1299             } else {
   1300                 s->strstart += s->match_length;
   1301                 s->match_length = 0;
   1302                 s->ins_h = s->window[s->strstart];
   1303                 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
   1304 #if MIN_MATCH != 3
   1305                 Call UPDATE_HASH() MIN_MATCH-3 more times
   1306 #endif
   1307                 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
   1308                  * matter since it will be recomputed at next deflate call.
   1309                  */
   1310             }
   1311         } else {
   1312             /* No match, output a literal byte */
   1313             Tracevv((stderr,"%c", s->window[s->strstart]));
   1314             bflush = ct_tally (s, 0, s->window[s->strstart]);
   1315             s->lookahead--;
   1316             s->strstart++;
   1317         }
   1318         if (bflush) FLUSH_BLOCK(s, Z_NO_FLUSH);
   1319     }
   1320     FLUSH_BLOCK(s, flush);
   1321     return 0; /* normal exit */
   1322 }
   1323 
   1324 /* ===========================================================================
   1325  * Same as above, but achieves better compression. We use a lazy
   1326  * evaluation for matches: a match is finally adopted only if there is
   1327  * no better match at the next window position.
   1328  */
   1329 local int deflate_slow(s, flush)
   1330     deflate_state *s;
   1331     int flush;
   1332 {
   1333     IPos hash_head = NIL;    /* head of hash chain */
   1334     int bflush;              /* set if current block must be flushed */
   1335 
   1336     /* Process the input block. */
   1337     for (;;) {
   1338         /* Make sure that we always have enough lookahead, except
   1339          * at the end of the input file. We need MAX_MATCH bytes
   1340          * for the next match, plus MIN_MATCH bytes to insert the
   1341          * string following the next match.
   1342          */
   1343         if (s->lookahead < MIN_LOOKAHEAD) {
   1344             fill_window(s);
   1345             if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) return 1;
   1346 
   1347             if (s->lookahead == 0) break; /* flush the current block */
   1348         }
   1349 
   1350         /* Insert the string window[strstart .. strstart+2] in the
   1351          * dictionary, and set hash_head to the head of the hash chain:
   1352          */
   1353         if (s->lookahead >= MIN_MATCH) {
   1354             INSERT_STRING(s, s->strstart, hash_head);
   1355         }
   1356 
   1357         /* Find the longest match, discarding those <= prev_length.
   1358          */
   1359         s->prev_length = s->match_length, s->prev_match = s->match_start;
   1360         s->match_length = MIN_MATCH-1;
   1361 
   1362         if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
   1363             s->strstart - hash_head <= MAX_DIST(s)) {
   1364             /* To simplify the code, we prevent matches with the string
   1365              * of window index 0 (in particular we have to avoid a match
   1366              * of the string with itself at the start of the input file).
   1367              */
   1368             if (s->strategy != Z_HUFFMAN_ONLY) {
   1369                 s->match_length = longest_match (s, hash_head);
   1370             }
   1371             /* longest_match() sets match_start */
   1372             if (s->match_length > s->lookahead) s->match_length = s->lookahead;
   1373 
   1374             if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
   1375                  (s->match_length == MIN_MATCH &&
   1376                   s->strstart - s->match_start > TOO_FAR))) {
   1377 
   1378                 /* If prev_match is also MIN_MATCH, match_start is garbage
   1379                  * but we will ignore the current match anyway.
   1380                  */
   1381                 s->match_length = MIN_MATCH-1;
   1382             }
   1383         }
   1384         /* If there was a match at the previous step and the current
   1385          * match is not better, output the previous match:
   1386          */
   1387         if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
   1388             uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
   1389             /* Do not insert strings in hash table beyond this. */
   1390 
   1391             check_match(s, s->strstart-1, s->prev_match, s->prev_length);
   1392 
   1393             bflush = ct_tally(s, s->strstart -1 - s->prev_match,
   1394                               s->prev_length - MIN_MATCH);
   1395 
   1396             /* Insert in hash table all strings up to the end of the match.
   1397              * strstart-1 and strstart are already inserted. If there is not
   1398              * enough lookahead, the last two strings are not inserted in
   1399              * the hash table.
   1400              */
   1401             s->lookahead -= s->prev_length-1;
   1402             s->prev_length -= 2;
   1403             do {
   1404                 if (++s->strstart <= max_insert) {
   1405                     INSERT_STRING(s, s->strstart, hash_head);
   1406                 }
   1407             } while (--s->prev_length != 0);
   1408             s->match_available = 0;
   1409             s->match_length = MIN_MATCH-1;
   1410             s->strstart++;
   1411 
   1412             if (bflush) FLUSH_BLOCK(s, Z_NO_FLUSH);
   1413 
   1414         } else if (s->match_available) {
   1415             /* If there was no match at the previous position, output a
   1416              * single literal. If there was a match but the current match
   1417              * is longer, truncate the previous match to a single literal.
   1418              */
   1419             Tracevv((stderr,"%c", s->window[s->strstart-1]));
   1420             if (ct_tally (s, 0, s->window[s->strstart-1])) {
   1421                 FLUSH_BLOCK_ONLY(s, Z_NO_FLUSH);
   1422             }
   1423             s->strstart++;
   1424             s->lookahead--;
   1425             if (s->strm->avail_out == 0) return 1;
   1426         } else {
   1427             /* There is no previous match to compare with, wait for
   1428              * the next step to decide.
   1429              */
   1430             s->match_available = 1;
   1431             s->strstart++;
   1432             s->lookahead--;
   1433         }
   1434     }
   1435     Assert (flush != Z_NO_FLUSH, "no flush?");
   1436     if (s->match_available) {
   1437         Tracevv((stderr,"%c", s->window[s->strstart-1]));
   1438         ct_tally (s, 0, s->window[s->strstart-1]);
   1439         s->match_available = 0;
   1440     }
   1441     FLUSH_BLOCK(s, flush);
   1442     return 0;
   1443 }
   1444 
   1445 
   1446 /*+++++*/
   1447 /* trees.c -- output deflated data using Huffman coding
   1448  * Copyright (C) 1995 Jean-loup Gailly
   1449  * For conditions of distribution and use, see copyright notice in zlib.h
   1450  */
   1451 
   1452 /*
   1453  *  ALGORITHM
   1454  *
   1455  *      The "deflation" process uses several Huffman trees. The more
   1456  *      common source values are represented by shorter bit sequences.
   1457  *
   1458  *      Each code tree is stored in a compressed form which is itself
   1459  * a Huffman encoding of the lengths of all the code strings (in
   1460  * ascending order by source values).  The actual code strings are
   1461  * reconstructed from the lengths in the inflate process, as described
   1462  * in the deflate specification.
   1463  *
   1464  *  REFERENCES
   1465  *
   1466  *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
   1467  *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
   1468  *
   1469  *      Storer, James A.
   1470  *          Data Compression:  Methods and Theory, pp. 49-50.
   1471  *          Computer Science Press, 1988.  ISBN 0-7167-8156-5.
   1472  *
   1473  *      Sedgewick, R.
   1474  *          Algorithms, p290.
   1475  *          Addison-Wesley, 1983. ISBN 0-201-06672-6.
   1476  */
   1477 
   1478 /* From: trees.c,v 1.5 1995/05/03 17:27:12 jloup Exp */
   1479 
   1480 #ifdef DEBUG_ZLIB
   1481 #  include <ctype.h>
   1482 #endif
   1483 
   1484 /* ===========================================================================
   1485  * Constants
   1486  */
   1487 
   1488 #define MAX_BL_BITS 7
   1489 /* Bit length codes must not exceed MAX_BL_BITS bits */
   1490 
   1491 #define END_BLOCK 256
   1492 /* end of block literal code */
   1493 
   1494 #define REP_3_6      16
   1495 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
   1496 
   1497 #define REPZ_3_10    17
   1498 /* repeat a zero length 3-10 times  (3 bits of repeat count) */
   1499 
   1500 #define REPZ_11_138  18
   1501 /* repeat a zero length 11-138 times  (7 bits of repeat count) */
   1502 
   1503 local int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
   1504    = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
   1505 
   1506 local int extra_dbits[D_CODES] /* extra bits for each distance code */
   1507    = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
   1508 
   1509 local int extra_blbits[BL_CODES]/* extra bits for each bit length code */
   1510    = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
   1511 
   1512 local uch bl_order[BL_CODES]
   1513    = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
   1514 /* The lengths of the bit length codes are sent in order of decreasing
   1515  * probability, to avoid transmitting the lengths for unused bit length codes.
   1516  */
   1517 
   1518 #define Buf_size (8 * 2*sizeof(char))
   1519 /* Number of bits used within bi_buf. (bi_buf might be implemented on
   1520  * more than 16 bits on some systems.)
   1521  */
   1522 
   1523 /* ===========================================================================
   1524  * Local data. These are initialized only once.
   1525  * To do: initialize at compile time to be completely reentrant. ???
   1526  */
   1527 
   1528 local ct_data static_ltree[L_CODES+2];
   1529 /* The static literal tree. Since the bit lengths are imposed, there is no
   1530  * need for the L_CODES extra codes used during heap construction. However
   1531  * The codes 286 and 287 are needed to build a canonical tree (see ct_init
   1532  * below).
   1533  */
   1534 
   1535 local ct_data static_dtree[D_CODES];
   1536 /* The static distance tree. (Actually a trivial tree since all codes use
   1537  * 5 bits.)
   1538  */
   1539 
   1540 local uch dist_code[512];
   1541 /* distance codes. The first 256 values correspond to the distances
   1542  * 3 .. 258, the last 256 values correspond to the top 8 bits of
   1543  * the 15 bit distances.
   1544  */
   1545 
   1546 local uch length_code[MAX_MATCH-MIN_MATCH+1];
   1547 /* length code for each normalized match length (0 == MIN_MATCH) */
   1548 
   1549 local int base_length[LENGTH_CODES];
   1550 /* First normalized length for each code (0 = MIN_MATCH) */
   1551 
   1552 local int base_dist[D_CODES];
   1553 /* First normalized distance for each code (0 = distance of 1) */
   1554 
   1555 struct static_tree_desc_s {
   1556     ct_data *static_tree;        /* static tree or NULL */
   1557     intf    *extra_bits;         /* extra bits for each code or NULL */
   1558     int     extra_base;          /* base index for extra_bits */
   1559     int     elems;               /* max number of elements in the tree */
   1560     int     max_length;          /* max bit length for the codes */
   1561 };
   1562 
   1563 local static_tree_desc  static_l_desc =
   1564 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
   1565 
   1566 local static_tree_desc  static_d_desc =
   1567 {static_dtree, extra_dbits, 0,          D_CODES, MAX_BITS};
   1568 
   1569 local static_tree_desc  static_bl_desc =
   1570 {(ct_data *)0, extra_blbits, 0,      BL_CODES, MAX_BL_BITS};
   1571 
   1572 /* ===========================================================================
   1573  * Local (static) routines in this file.
   1574  */
   1575 
   1576 local void ct_static_init OF((void));
   1577 local void init_block     OF((deflate_state *s));
   1578 local void pqdownheap     OF((deflate_state *s, ct_data *tree, int k));
   1579 local void gen_bitlen     OF((deflate_state *s, tree_desc *desc));
   1580 local void gen_codes      OF((ct_data *tree, int max_code, ushf *bl_count));
   1581 local void build_tree     OF((deflate_state *s, tree_desc *desc));
   1582 local void scan_tree      OF((deflate_state *s, ct_data *tree, int max_code));
   1583 local void send_tree      OF((deflate_state *s, ct_data *tree, int max_code));
   1584 local int  build_bl_tree  OF((deflate_state *s));
   1585 local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
   1586                               int blcodes));
   1587 local void compress_block OF((deflate_state *s, ct_data *ltree,
   1588                               ct_data *dtree));
   1589 local void set_data_type  OF((deflate_state *s));
   1590 local unsigned bi_reverse OF((unsigned value, int length));
   1591 local void bi_windup      OF((deflate_state *s));
   1592 local void bi_flush       OF((deflate_state *s));
   1593 local void copy_block     OF((deflate_state *s, charf *buf, unsigned len,
   1594                               int header));
   1595 
   1596 #ifndef DEBUG_ZLIB
   1597 #  define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
   1598    /* Send a code of the given tree. c and tree must not have side effects */
   1599 
   1600 #else /* DEBUG_ZLIB */
   1601 #  define send_code(s, c, tree) \
   1602      { if (verbose>1) fprintf(stderr,"\ncd %3d ",(c)); \
   1603        send_bits(s, tree[c].Code, tree[c].Len); }
   1604 #endif
   1605 
   1606 #define d_code(dist) \
   1607    ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])
   1608 /* Mapping from a distance to a distance code. dist is the distance - 1 and
   1609  * must not have side effects. dist_code[256] and dist_code[257] are never
   1610  * used.
   1611  */
   1612 
   1613 /* ===========================================================================
   1614  * Output a short LSB first on the stream.
   1615  * IN assertion: there is enough room in pendingBuf.
   1616  */
   1617 #define put_short(s, w) { \
   1618     put_byte(s, (uch)((w) & 0xff)); \
   1619     put_byte(s, (uch)((ush)(w) >> 8)); \
   1620 }
   1621 
   1622 /* ===========================================================================
   1623  * Send a value on a given number of bits.
   1624  * IN assertion: length <= 16 and value fits in length bits.
   1625  */
   1626 #ifdef DEBUG_ZLIB
   1627 local void send_bits      OF((deflate_state *s, int value, int length));
   1628 
   1629 local void send_bits(s, value, length)
   1630     deflate_state *s;
   1631     int value;  /* value to send */
   1632     int length; /* number of bits */
   1633 {
   1634     Tracev((stderr," l %2d v %4x ", length, value));
   1635     Assert(length > 0 && length <= 15, "invalid length");
   1636     s->bits_sent += (ulg)length;
   1637 
   1638     /* If not enough room in bi_buf, use (valid) bits from bi_buf and
   1639      * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
   1640      * unused bits in value.
   1641      */
   1642     if (s->bi_valid > (int)Buf_size - length) {
   1643         s->bi_buf |= (value << s->bi_valid);
   1644         put_short(s, s->bi_buf);
   1645         s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
   1646         s->bi_valid += length - Buf_size;
   1647     } else {
   1648         s->bi_buf |= value << s->bi_valid;
   1649         s->bi_valid += length;
   1650     }
   1651 }
   1652 #else /* !DEBUG_ZLIB */
   1653 
   1654 #define send_bits(s, value, length) \
   1655 { int len = length;\
   1656   if (s->bi_valid > (int)Buf_size - len) {\
   1657     int val = value;\
   1658     s->bi_buf |= (val << s->bi_valid);\
   1659     put_short(s, s->bi_buf);\
   1660     s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
   1661     s->bi_valid += len - Buf_size;\
   1662   } else {\
   1663     s->bi_buf |= (value) << s->bi_valid;\
   1664     s->bi_valid += len;\
   1665   }\
   1666 }
   1667 #endif /* DEBUG_ZLIB */
   1668 
   1669 
   1670 #define MAX(a,b) (a >= b ? a : b)
   1671 /* the arguments must not have side effects */
   1672 
   1673 /* ===========================================================================
   1674  * Initialize the various 'constant' tables.
   1675  * To do: do this at compile time.
   1676  */
   1677 local void ct_static_init()
   1678 {
   1679     int n;        /* iterates over tree elements */
   1680     int bits;     /* bit counter */
   1681     int length;   /* length value */
   1682     int code;     /* code value */
   1683     int dist;     /* distance index */
   1684     ush bl_count[MAX_BITS+1];
   1685     /* number of codes at each bit length for an optimal tree */
   1686 
   1687     /* Initialize the mapping length (0..255) -> length code (0..28) */
   1688     length = 0;
   1689     for (code = 0; code < LENGTH_CODES-1; code++) {
   1690         base_length[code] = length;
   1691         for (n = 0; n < (1<<extra_lbits[code]); n++) {
   1692             length_code[length++] = (uch)code;
   1693         }
   1694     }
   1695     Assert (length == 256, "ct_static_init: length != 256");
   1696     /* Note that the length 255 (match length 258) can be represented
   1697      * in two different ways: code 284 + 5 bits or code 285, so we
   1698      * overwrite length_code[255] to use the best encoding:
   1699      */
   1700     length_code[length-1] = (uch)code;
   1701 
   1702     /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
   1703     dist = 0;
   1704     for (code = 0 ; code < 16; code++) {
   1705         base_dist[code] = dist;
   1706         for (n = 0; n < (1<<extra_dbits[code]); n++) {
   1707             dist_code[dist++] = (uch)code;
   1708         }
   1709     }
   1710     Assert (dist == 256, "ct_static_init: dist != 256");
   1711     dist >>= 7; /* from now on, all distances are divided by 128 */
   1712     for ( ; code < D_CODES; code++) {
   1713         base_dist[code] = dist << 7;
   1714         for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
   1715             dist_code[256 + dist++] = (uch)code;
   1716         }
   1717     }
   1718     Assert (dist == 256, "ct_static_init: 256+dist != 512");
   1719 
   1720     /* Construct the codes of the static literal tree */
   1721     for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
   1722     n = 0;
   1723     while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
   1724     while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
   1725     while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
   1726     while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
   1727     /* Codes 286 and 287 do not exist, but we must include them in the
   1728      * tree construction to get a canonical Huffman tree (longest code
   1729      * all ones)
   1730      */
   1731     gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
   1732 
   1733     /* The static distance tree is trivial: */
   1734     for (n = 0; n < D_CODES; n++) {
   1735         static_dtree[n].Len = 5;
   1736         static_dtree[n].Code = bi_reverse(n, 5);
   1737     }
   1738 }
   1739 
   1740 /* ===========================================================================
   1741  * Initialize the tree data structures for a new zlib stream.
   1742  */
   1743 local void ct_init(s)
   1744     deflate_state *s;
   1745 {
   1746     if (static_dtree[0].Len == 0) {
   1747         ct_static_init();              /* To do: at compile time */
   1748     }
   1749 
   1750     s->compressed_len = 0L;
   1751 
   1752     s->l_desc.dyn_tree = s->dyn_ltree;
   1753     s->l_desc.stat_desc = &static_l_desc;
   1754 
   1755     s->d_desc.dyn_tree = s->dyn_dtree;
   1756     s->d_desc.stat_desc = &static_d_desc;
   1757 
   1758     s->bl_desc.dyn_tree = s->bl_tree;
   1759     s->bl_desc.stat_desc = &static_bl_desc;
   1760 
   1761     s->bi_buf = 0;
   1762     s->bi_valid = 0;
   1763     s->last_eob_len = 8; /* enough lookahead for inflate */
   1764 #ifdef DEBUG_ZLIB
   1765     s->bits_sent = 0L;
   1766 #endif
   1767     s->blocks_in_packet = 0;
   1768 
   1769     /* Initialize the first block of the first file: */
   1770     init_block(s);
   1771 }
   1772 
   1773 /* ===========================================================================
   1774  * Initialize a new block.
   1775  */
   1776 local void init_block(s)
   1777     deflate_state *s;
   1778 {
   1779     int n; /* iterates over tree elements */
   1780 
   1781     /* Initialize the trees. */
   1782     for (n = 0; n < L_CODES;  n++) s->dyn_ltree[n].Freq = 0;
   1783     for (n = 0; n < D_CODES;  n++) s->dyn_dtree[n].Freq = 0;
   1784     for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
   1785 
   1786     s->dyn_ltree[END_BLOCK].Freq = 1;
   1787     s->opt_len = s->static_len = 0L;
   1788     s->last_lit = s->matches = 0;
   1789 }
   1790 
   1791 #define SMALLEST 1
   1792 /* Index within the heap array of least frequent node in the Huffman tree */
   1793 
   1794 
   1795 /* ===========================================================================
   1796  * Remove the smallest element from the heap and recreate the heap with
   1797  * one less element. Updates heap and heap_len.
   1798  */
   1799 #define pqremove(s, tree, top) \
   1800 {\
   1801     top = s->heap[SMALLEST]; \
   1802     s->heap[SMALLEST] = s->heap[s->heap_len--]; \
   1803     pqdownheap(s, tree, SMALLEST); \
   1804 }
   1805 
   1806 /* ===========================================================================
   1807  * Compares to subtrees, using the tree depth as tie breaker when
   1808  * the subtrees have equal frequency. This minimizes the worst case length.
   1809  */
   1810 #define smaller(tree, n, m, depth) \
   1811    (tree[n].Freq < tree[m].Freq || \
   1812    (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
   1813 
   1814 /* ===========================================================================
   1815  * Restore the heap property by moving down the tree starting at node k,
   1816  * exchanging a node with the smallest of its two sons if necessary, stopping
   1817  * when the heap property is re-established (each father smaller than its
   1818  * two sons).
   1819  */
   1820 local void pqdownheap(s, tree, k)
   1821     deflate_state *s;
   1822     ct_data *tree;  /* the tree to restore */
   1823     int k;               /* node to move down */
   1824 {
   1825     int v = s->heap[k];
   1826     int j = k << 1;  /* left son of k */
   1827     while (j <= s->heap_len) {
   1828         /* Set j to the smallest of the two sons: */
   1829         if (j < s->heap_len &&
   1830             smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
   1831             j++;
   1832         }
   1833         /* Exit if v is smaller than both sons */
   1834         if (smaller(tree, v, s->heap[j], s->depth)) break;
   1835 
   1836         /* Exchange v with the smallest son */
   1837         s->heap[k] = s->heap[j];  k = j;
   1838 
   1839         /* And continue down the tree, setting j to the left son of k */
   1840         j <<= 1;
   1841     }
   1842     s->heap[k] = v;
   1843 }
   1844 
   1845 /* ===========================================================================
   1846  * Compute the optimal bit lengths for a tree and update the total bit length
   1847  * for the current block.
   1848  * IN assertion: the fields freq and dad are set, heap[heap_max] and
   1849  *    above are the tree nodes sorted by increasing frequency.
   1850  * OUT assertions: the field len is set to the optimal bit length, the
   1851  *     array bl_count contains the frequencies for each bit length.
   1852  *     The length opt_len is updated; static_len is also updated if stree is
   1853  *     not null.
   1854  */
   1855 local void gen_bitlen(s, desc)
   1856     deflate_state *s;
   1857     tree_desc *desc;    /* the tree descriptor */
   1858 {
   1859     ct_data *tree  = desc->dyn_tree;
   1860     int max_code   = desc->max_code;
   1861     ct_data *stree = desc->stat_desc->static_tree;
   1862     intf *extra    = desc->stat_desc->extra_bits;
   1863     int base       = desc->stat_desc->extra_base;
   1864     int max_length = desc->stat_desc->max_length;
   1865     int h;              /* heap index */
   1866     int n, m;           /* iterate over the tree elements */
   1867     int bits;           /* bit length */
   1868     int xbits;          /* extra bits */
   1869     ush f;              /* frequency */
   1870     int overflow = 0;   /* number of elements with bit length too large */
   1871 
   1872     for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
   1873 
   1874     /* In a first pass, compute the optimal bit lengths (which may
   1875      * overflow in the case of the bit length tree).
   1876      */
   1877     tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
   1878 
   1879     for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
   1880         n = s->heap[h];
   1881         bits = tree[tree[n].Dad].Len + 1;
   1882         if (bits > max_length) bits = max_length, overflow++;
   1883         tree[n].Len = (ush)bits;
   1884         /* We overwrite tree[n].Dad which is no longer needed */
   1885 
   1886         if (n > max_code) continue; /* not a leaf node */
   1887 
   1888         s->bl_count[bits]++;
   1889         xbits = 0;
   1890         if (n >= base) xbits = extra[n-base];
   1891         f = tree[n].Freq;
   1892         s->opt_len += (ulg)f * (bits + xbits);
   1893         if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
   1894     }
   1895     if (overflow == 0) return;
   1896 
   1897     Trace((stderr,"\nbit length overflow\n"));
   1898     /* This happens for example on obj2 and pic of the Calgary corpus */
   1899 
   1900     /* Find the first bit length which could increase: */
   1901     do {
   1902         bits = max_length-1;
   1903         while (s->bl_count[bits] == 0) bits--;
   1904         s->bl_count[bits]--;      /* move one leaf down the tree */
   1905         s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
   1906         s->bl_count[max_length]--;
   1907         /* The brother of the overflow item also moves one step up,
   1908          * but this does not affect bl_count[max_length]
   1909          */
   1910         overflow -= 2;
   1911     } while (overflow > 0);
   1912 
   1913     /* Now recompute all bit lengths, scanning in increasing frequency.
   1914      * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
   1915      * lengths instead of fixing only the wrong ones. This idea is taken
   1916      * from 'ar' written by Haruhiko Okumura.)
   1917      */
   1918     for (bits = max_length; bits != 0; bits--) {
   1919         n = s->bl_count[bits];
   1920         while (n != 0) {
   1921             m = s->heap[--h];
   1922             if (m > max_code) continue;
   1923             if (tree[m].Len != (unsigned) bits) {
   1924                 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
   1925                 s->opt_len += ((long)bits - (long)tree[m].Len)
   1926                               *(long)tree[m].Freq;
   1927                 tree[m].Len = (ush)bits;
   1928             }
   1929             n--;
   1930         }
   1931     }
   1932 }
   1933 
   1934 /* ===========================================================================
   1935  * Generate the codes for a given tree and bit counts (which need not be
   1936  * optimal).
   1937  * IN assertion: the array bl_count contains the bit length statistics for
   1938  * the given tree and the field len is set for all tree elements.
   1939  * OUT assertion: the field code is set for all tree elements of non
   1940  *     zero code length.
   1941  */
   1942 local void gen_codes (tree, max_code, bl_count)
   1943     ct_data *tree;             /* the tree to decorate */
   1944     int max_code;              /* largest code with non zero frequency */
   1945     ushf *bl_count;            /* number of codes at each bit length */
   1946 {
   1947     ush next_code[MAX_BITS+1]; /* next code value for each bit length */
   1948     ush code = 0;              /* running code value */
   1949     int bits;                  /* bit index */
   1950     int n;                     /* code index */
   1951 
   1952     /* The distribution counts are first used to generate the code values
   1953      * without bit reversal.
   1954      */
   1955     for (bits = 1; bits <= MAX_BITS; bits++) {
   1956         next_code[bits] = code = (code + bl_count[bits-1]) << 1;
   1957     }
   1958     /* Check that the bit counts in bl_count are consistent. The last code
   1959      * must be all ones.
   1960      */
   1961     Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
   1962             "inconsistent bit counts");
   1963     Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
   1964 
   1965     for (n = 0;  n <= max_code; n++) {
   1966         int len = tree[n].Len;
   1967         if (len == 0) continue;
   1968         /* Now reverse the bits */
   1969         tree[n].Code = bi_reverse(next_code[len]++, len);
   1970 
   1971         Tracec(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
   1972              n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
   1973     }
   1974 }
   1975 
   1976 /* ===========================================================================
   1977  * Construct one Huffman tree and assigns the code bit strings and lengths.
   1978  * Update the total bit length for the current block.
   1979  * IN assertion: the field freq is set for all tree elements.
   1980  * OUT assertions: the fields len and code are set to the optimal bit length
   1981  *     and corresponding code. The length opt_len is updated; static_len is
   1982  *     also updated if stree is not null. The field max_code is set.
   1983  */
   1984 local void build_tree(s, desc)
   1985     deflate_state *s;
   1986     tree_desc *desc; /* the tree descriptor */
   1987 {
   1988     ct_data *tree   = desc->dyn_tree;
   1989     ct_data *stree  = desc->stat_desc->static_tree;
   1990     int elems       = desc->stat_desc->elems;
   1991     int n, m;          /* iterate over heap elements */
   1992     int max_code = -1; /* largest code with non zero frequency */
   1993     int node;          /* new node being created */
   1994 
   1995     /* Construct the initial heap, with least frequent element in
   1996      * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
   1997      * heap[0] is not used.
   1998      */
   1999     s->heap_len = 0, s->heap_max = HEAP_SIZE;
   2000 
   2001     for (n = 0; n < elems; n++) {
   2002         if (tree[n].Freq != 0) {
   2003             s->heap[++(s->heap_len)] = max_code = n;
   2004             s->depth[n] = 0;
   2005         } else {
   2006             tree[n].Len = 0;
   2007         }
   2008     }
   2009 
   2010     /* The pkzip format requires that at least one distance code exists,
   2011      * and that at least one bit should be sent even if there is only one
   2012      * possible code. So to avoid special checks later on we force at least
   2013      * two codes of non zero frequency.
   2014      */
   2015     while (s->heap_len < 2) {
   2016         node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
   2017         tree[node].Freq = 1;
   2018         s->depth[node] = 0;
   2019         s->opt_len--; if (stree) s->static_len -= stree[node].Len;
   2020         /* node is 0 or 1 so it does not have extra bits */
   2021     }
   2022     desc->max_code = max_code;
   2023 
   2024     /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
   2025      * establish sub-heaps of increasing lengths:
   2026      */
   2027     for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
   2028 
   2029     /* Construct the Huffman tree by repeatedly combining the least two
   2030      * frequent nodes.
   2031      */
   2032     node = elems;              /* next internal node of the tree */
   2033     do {
   2034         pqremove(s, tree, n);  /* n = node of least frequency */
   2035         m = s->heap[SMALLEST]; /* m = node of next least frequency */
   2036 
   2037         s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
   2038         s->heap[--(s->heap_max)] = m;
   2039 
   2040         /* Create a new node father of n and m */
   2041         tree[node].Freq = tree[n].Freq + tree[m].Freq;
   2042         s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
   2043         tree[n].Dad = tree[m].Dad = (ush)node;
   2044 #ifdef DUMP_BL_TREE
   2045         if (tree == s->bl_tree) {
   2046             fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
   2047                     node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
   2048         }
   2049 #endif
   2050         /* and insert the new node in the heap */
   2051         s->heap[SMALLEST] = node++;
   2052         pqdownheap(s, tree, SMALLEST);
   2053 
   2054     } while (s->heap_len >= 2);
   2055 
   2056     s->heap[--(s->heap_max)] = s->heap[SMALLEST];
   2057 
   2058     /* At this point, the fields freq and dad are set. We can now
   2059      * generate the bit lengths.
   2060      */
   2061     gen_bitlen(s, (tree_desc *)desc);
   2062 
   2063     /* The field len is now set, we can generate the bit codes */
   2064     gen_codes ((ct_data *)tree, max_code, s->bl_count);
   2065 }
   2066 
   2067 /* ===========================================================================
   2068  * Scan a literal or distance tree to determine the frequencies of the codes
   2069  * in the bit length tree.
   2070  */
   2071 local void scan_tree (s, tree, max_code)
   2072     deflate_state *s;
   2073     ct_data *tree;   /* the tree to be scanned */
   2074     int max_code;    /* and its largest code of non zero frequency */
   2075 {
   2076     int n;                     /* iterates over all tree elements */
   2077     int prevlen = -1;          /* last emitted length */
   2078     int curlen;                /* length of current code */
   2079     int nextlen = tree[0].Len; /* length of next code */
   2080     int count = 0;             /* repeat count of the current code */
   2081     int max_count = 7;         /* max repeat count */
   2082     int min_count = 4;         /* min repeat count */
   2083 
   2084     if (nextlen == 0) max_count = 138, min_count = 3;
   2085     tree[max_code+1].Len = (ush)0xffff; /* guard */
   2086 
   2087     for (n = 0; n <= max_code; n++) {
   2088         curlen = nextlen; nextlen = tree[n+1].Len;
   2089         if (++count < max_count && curlen == nextlen) {
   2090             continue;
   2091         } else if (count < min_count) {
   2092             s->bl_tree[curlen].Freq += count;
   2093         } else if (curlen != 0) {
   2094             if (curlen != prevlen) s->bl_tree[curlen].Freq++;
   2095             s->bl_tree[REP_3_6].Freq++;
   2096         } else if (count <= 10) {
   2097             s->bl_tree[REPZ_3_10].Freq++;
   2098         } else {
   2099             s->bl_tree[REPZ_11_138].Freq++;
   2100         }
   2101         count = 0; prevlen = curlen;
   2102         if (nextlen == 0) {
   2103             max_count = 138, min_count = 3;
   2104         } else if (curlen == nextlen) {
   2105             max_count = 6, min_count = 3;
   2106         } else {
   2107             max_count = 7, min_count = 4;
   2108         }
   2109     }
   2110 }
   2111 
   2112 /* ===========================================================================
   2113  * Send a literal or distance tree in compressed form, using the codes in
   2114  * bl_tree.
   2115  */
   2116 local void send_tree (s, tree, max_code)
   2117     deflate_state *s;
   2118     ct_data *tree; /* the tree to be scanned */
   2119     int max_code;       /* and its largest code of non zero frequency */
   2120 {
   2121     int n;                     /* iterates over all tree elements */
   2122     int prevlen = -1;          /* last emitted length */
   2123     int curlen;                /* length of current code */
   2124     int nextlen = tree[0].Len; /* length of next code */
   2125     int count = 0;             /* repeat count of the current code */
   2126     int max_count = 7;         /* max repeat count */
   2127     int min_count = 4;         /* min repeat count */
   2128 
   2129     /* tree[max_code+1].Len = -1; */  /* guard already set */
   2130     if (nextlen == 0) max_count = 138, min_count = 3;
   2131 
   2132     for (n = 0; n <= max_code; n++) {
   2133         curlen = nextlen; nextlen = tree[n+1].Len;
   2134         if (++count < max_count && curlen == nextlen) {
   2135             continue;
   2136         } else if (count < min_count) {
   2137             do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
   2138 
   2139         } else if (curlen != 0) {
   2140             if (curlen != prevlen) {
   2141                 send_code(s, curlen, s->bl_tree); count--;
   2142             }
   2143             Assert(count >= 3 && count <= 6, " 3_6?");
   2144             send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
   2145 
   2146         } else if (count <= 10) {
   2147             send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
   2148 
   2149         } else {
   2150             send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
   2151         }
   2152         count = 0; prevlen = curlen;
   2153         if (nextlen == 0) {
   2154             max_count = 138, min_count = 3;
   2155         } else if (curlen == nextlen) {
   2156             max_count = 6, min_count = 3;
   2157         } else {
   2158             max_count = 7, min_count = 4;
   2159         }
   2160     }
   2161 }
   2162 
   2163 /* ===========================================================================
   2164  * Construct the Huffman tree for the bit lengths and return the index in
   2165  * bl_order of the last bit length code to send.
   2166  */
   2167 local int build_bl_tree(s)
   2168     deflate_state *s;
   2169 {
   2170     int max_blindex;  /* index of last bit length code of non zero freq */
   2171 
   2172     /* Determine the bit length frequencies for literal and distance trees */
   2173     scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
   2174     scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
   2175 
   2176     /* Build the bit length tree: */
   2177     build_tree(s, (tree_desc *)(&(s->bl_desc)));
   2178     /* opt_len now includes the length of the tree representations, except
   2179      * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
   2180      */
   2181 
   2182     /* Determine the number of bit length codes to send. The pkzip format
   2183      * requires that at least 4 bit length codes be sent. (appnote.txt says
   2184      * 3 but the actual value used is 4.)
   2185      */
   2186     for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
   2187         if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
   2188     }
   2189     /* Update opt_len to include the bit length tree and counts */
   2190     s->opt_len += 3*(max_blindex+1) + 5+5+4;
   2191     Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
   2192             s->opt_len, s->static_len));
   2193 
   2194     return max_blindex;
   2195 }
   2196 
   2197 /* ===========================================================================
   2198  * Send the header for a block using dynamic Huffman trees: the counts, the
   2199  * lengths of the bit length codes, the literal tree and the distance tree.
   2200  * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
   2201  */
   2202 local void send_all_trees(s, lcodes, dcodes, blcodes)
   2203     deflate_state *s;
   2204     int lcodes, dcodes, blcodes; /* number of codes for each tree */
   2205 {
   2206     int rank;                    /* index in bl_order */
   2207 
   2208     Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
   2209     Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
   2210             "too many codes");
   2211     Tracev((stderr, "\nbl counts: "));
   2212     send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
   2213     send_bits(s, dcodes-1,   5);
   2214     send_bits(s, blcodes-4,  4); /* not -3 as stated in appnote.txt */
   2215     for (rank = 0; rank < blcodes; rank++) {
   2216         Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
   2217         send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
   2218     }
   2219     Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
   2220 
   2221     send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
   2222     Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
   2223 
   2224     send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
   2225     Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
   2226 }
   2227 
   2228 /* ===========================================================================
   2229  * Send a stored block
   2230  */
   2231 local void ct_stored_block(s, buf, stored_len, eof)
   2232     deflate_state *s;
   2233     charf *buf;       /* input block */
   2234     ulg stored_len;   /* length of input block */
   2235     int eof;          /* true if this is the last block for a file */
   2236 {
   2237     send_bits(s, (STORED_BLOCK<<1)+eof, 3);  /* send block type */
   2238     s->compressed_len = (s->compressed_len + 3 + 7) & ~7L;
   2239     s->compressed_len += (stored_len + 4) << 3;
   2240 
   2241     copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
   2242 }
   2243 
   2244 /* Send just the `stored block' type code without any length bytes or data.
   2245  */
   2246 local void ct_stored_type_only(s)
   2247     deflate_state *s;
   2248 {
   2249     send_bits(s, (STORED_BLOCK << 1), 3);
   2250     bi_windup(s);
   2251     s->compressed_len = (s->compressed_len + 3) & ~7L;
   2252 }
   2253 
   2254 
   2255 /* ===========================================================================
   2256  * Send one empty static block to give enough lookahead for inflate.
   2257  * This takes 10 bits, of which 7 may remain in the bit buffer.
   2258  * The current inflate code requires 9 bits of lookahead. If the EOB
   2259  * code for the previous block was coded on 5 bits or less, inflate
   2260  * may have only 5+3 bits of lookahead to decode this EOB.
   2261  * (There are no problems if the previous block is stored or fixed.)
   2262  */
   2263 local void ct_align(s)
   2264     deflate_state *s;
   2265 {
   2266     send_bits(s, STATIC_TREES<<1, 3);
   2267     send_code(s, END_BLOCK, static_ltree);
   2268     s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
   2269     bi_flush(s);
   2270     /* Of the 10 bits for the empty block, we have already sent
   2271      * (10 - bi_valid) bits. The lookahead for the EOB of the previous
   2272      * block was thus its length plus what we have just sent.
   2273      */
   2274     if (s->last_eob_len + 10 - s->bi_valid < 9) {
   2275         send_bits(s, STATIC_TREES<<1, 3);
   2276         send_code(s, END_BLOCK, static_ltree);
   2277         s->compressed_len += 10L;
   2278         bi_flush(s);
   2279     }
   2280     s->last_eob_len = 7;
   2281 }
   2282 
   2283 /* ===========================================================================
   2284  * Determine the best encoding for the current block: dynamic trees, static
   2285  * trees or store, and output the encoded block to the zip file. This function
   2286  * returns the total compressed length for the file so far.
   2287  */
   2288 local ulg ct_flush_block(s, buf, stored_len, flush)
   2289     deflate_state *s;
   2290     charf *buf;       /* input block, or NULL if too old */
   2291     ulg stored_len;   /* length of input block */
   2292     int flush;        /* Z_FINISH if this is the last block for a file */
   2293 {
   2294     ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
   2295     int max_blindex;  /* index of last bit length code of non zero freq */
   2296     int eof = flush == Z_FINISH;
   2297 
   2298     ++s->blocks_in_packet;
   2299 
   2300     /* Check if the file is ascii or binary */
   2301     if (s->data_type == UNKNOWN) set_data_type(s);
   2302 
   2303     /* Construct the literal and distance trees */
   2304     build_tree(s, (tree_desc *)(&(s->l_desc)));
   2305     Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
   2306             s->static_len));
   2307 
   2308     build_tree(s, (tree_desc *)(&(s->d_desc)));
   2309     Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
   2310             s->static_len));
   2311     /* At this point, opt_len and static_len are the total bit lengths of
   2312      * the compressed block data, excluding the tree representations.
   2313      */
   2314 
   2315     /* Build the bit length tree for the above two trees, and get the index
   2316      * in bl_order of the last bit length code to send.
   2317      */
   2318     max_blindex = build_bl_tree(s);
   2319 
   2320     /* Determine the best encoding. Compute first the block length in bytes */
   2321     opt_lenb = (s->opt_len+3+7)>>3;
   2322     static_lenb = (s->static_len+3+7)>>3;
   2323 
   2324     Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
   2325             opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
   2326             s->last_lit));
   2327 
   2328     if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
   2329 
   2330     /* If compression failed and this is the first and last block,
   2331      * and if the .zip file can be seeked (to rewrite the local header),
   2332      * the whole file is transformed into a stored file:
   2333      */
   2334 #ifdef STORED_FILE_OK
   2335 #  ifdef FORCE_STORED_FILE
   2336     if (eof && compressed_len == 0L) /* force stored file */
   2337 #  else
   2338     if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable())
   2339 #  endif
   2340     {
   2341         /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
   2342         if (buf == (charf*)0) error ("block vanished");
   2343 
   2344         copy_block(buf, (unsigned)stored_len, 0); /* without header */
   2345         s->compressed_len = stored_len << 3;
   2346         s->method = STORED;
   2347     } else
   2348 #endif /* STORED_FILE_OK */
   2349 
   2350     /* For Z_PACKET_FLUSH, if we don't achieve the required minimum
   2351      * compression, and this block contains all the data since the last
   2352      * time we used Z_PACKET_FLUSH, then just omit this block completely
   2353      * from the output.
   2354      */
   2355     if (flush == Z_PACKET_FLUSH && s->blocks_in_packet == 1
   2356 	&& opt_lenb > stored_len - s->minCompr) {
   2357 	s->blocks_in_packet = 0;
   2358 	/* output nothing */
   2359     } else
   2360 
   2361 #ifdef FORCE_STORED
   2362     if (buf != (char*)0) /* force stored block */
   2363 #else
   2364     if (stored_len+4 <= opt_lenb && buf != (char*)0)
   2365                        /* 4: two words for the lengths */
   2366 #endif
   2367     {
   2368         /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
   2369          * Otherwise we can't have processed more than WSIZE input bytes since
   2370          * the last block flush, because compression would have been
   2371          * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
   2372          * transform a block into a stored block.
   2373          */
   2374         ct_stored_block(s, buf, stored_len, eof);
   2375     } else
   2376 
   2377 #ifdef FORCE_STATIC
   2378     if (static_lenb >= 0) /* force static trees */
   2379 #else
   2380     if (static_lenb == opt_lenb)
   2381 #endif
   2382     {
   2383         send_bits(s, (STATIC_TREES<<1)+eof, 3);
   2384         compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
   2385         s->compressed_len += 3 + s->static_len;
   2386     } else {
   2387         send_bits(s, (DYN_TREES<<1)+eof, 3);
   2388         send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
   2389                        max_blindex+1);
   2390         compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
   2391         s->compressed_len += 3 + s->opt_len;
   2392     }
   2393     Assert (s->compressed_len == s->bits_sent, "bad compressed size");
   2394     init_block(s);
   2395 
   2396     if (eof) {
   2397         bi_windup(s);
   2398         s->compressed_len += 7;  /* align on byte boundary */
   2399     }
   2400     Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
   2401            s->compressed_len-7*eof));
   2402 
   2403     return s->compressed_len >> 3;
   2404 }
   2405 
   2406 /* ===========================================================================
   2407  * Save the match info and tally the frequency counts. Return true if
   2408  * the current block must be flushed.
   2409  */
   2410 local int ct_tally (s, dist, lc)
   2411     deflate_state *s;
   2412     int dist;  /* distance of matched string */
   2413     int lc;    /* match length-MIN_MATCH or unmatched char (if dist==0) */
   2414 {
   2415     s->d_buf[s->last_lit] = (ush)dist;
   2416     s->l_buf[s->last_lit++] = (uch)lc;
   2417     if (dist == 0) {
   2418         /* lc is the unmatched char */
   2419         s->dyn_ltree[lc].Freq++;
   2420     } else {
   2421         s->matches++;
   2422         /* Here, lc is the match length - MIN_MATCH */
   2423         dist--;             /* dist = match distance - 1 */
   2424         Assert((ush)dist < (ush)MAX_DIST(s) &&
   2425                (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
   2426                (ush)d_code(dist) < (ush)D_CODES,  "ct_tally: bad match");
   2427 
   2428         s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;
   2429         s->dyn_dtree[d_code(dist)].Freq++;
   2430     }
   2431 
   2432     /* Try to guess if it is profitable to stop the current block here */
   2433     if (s->level > 2 && (s->last_lit & 0xfff) == 0) {
   2434         /* Compute an upper bound for the compressed length */
   2435         ulg out_length = (ulg)s->last_lit*8L;
   2436         ulg in_length = (ulg)s->strstart - s->block_start;
   2437         int dcode;
   2438         for (dcode = 0; dcode < D_CODES; dcode++) {
   2439             out_length += (ulg)s->dyn_dtree[dcode].Freq *
   2440                 (5L+extra_dbits[dcode]);
   2441         }
   2442         out_length >>= 3;
   2443         Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
   2444                s->last_lit, in_length, out_length,
   2445                100L - out_length*100L/in_length));
   2446         if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
   2447     }
   2448     return (s->last_lit == s->lit_bufsize-1);
   2449     /* We avoid equality with lit_bufsize because of wraparound at 64K
   2450      * on 16 bit machines and because stored blocks are restricted to
   2451      * 64K-1 bytes.
   2452      */
   2453 }
   2454 
   2455 /* ===========================================================================
   2456  * Send the block data compressed using the given Huffman trees
   2457  */
   2458 local void compress_block(s, ltree, dtree)
   2459     deflate_state *s;
   2460     ct_data *ltree; /* literal tree */
   2461     ct_data *dtree; /* distance tree */
   2462 {
   2463     unsigned dist;      /* distance of matched string */
   2464     int lc;             /* match length or unmatched char (if dist == 0) */
   2465     unsigned lx = 0;    /* running index in l_buf */
   2466     unsigned code;      /* the code to send */
   2467     int extra;          /* number of extra bits to send */
   2468 
   2469     if (s->last_lit != 0) do {
   2470         dist = s->d_buf[lx];
   2471         lc = s->l_buf[lx++];
   2472         if (dist == 0) {
   2473             send_code(s, lc, ltree); /* send a literal byte */
   2474             Tracecv(isgraph(lc), (stderr," '%c' ", lc));
   2475         } else {
   2476             /* Here, lc is the match length - MIN_MATCH */
   2477             code = length_code[lc];
   2478             send_code(s, code+LITERALS+1, ltree); /* send the length code */
   2479             extra = extra_lbits[code];
   2480             if (extra != 0) {
   2481                 lc -= base_length[code];
   2482                 send_bits(s, lc, extra);       /* send the extra length bits */
   2483             }
   2484             dist--; /* dist is now the match distance - 1 */
   2485             code = d_code(dist);
   2486             Assert (code < D_CODES, "bad d_code");
   2487 
   2488             send_code(s, code, dtree);       /* send the distance code */
   2489             extra = extra_dbits[code];
   2490             if (extra != 0) {
   2491                 dist -= base_dist[code];
   2492                 send_bits(s, dist, extra);   /* send the extra distance bits */
   2493             }
   2494         } /* literal or match pair ? */
   2495 
   2496         /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
   2497         Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
   2498 
   2499     } while (lx < s->last_lit);
   2500 
   2501     send_code(s, END_BLOCK, ltree);
   2502     s->last_eob_len = ltree[END_BLOCK].Len;
   2503 }
   2504 
   2505 /* ===========================================================================
   2506  * Set the data type to ASCII or BINARY, using a crude approximation:
   2507  * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
   2508  * IN assertion: the fields freq of dyn_ltree are set and the total of all
   2509  * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
   2510  */
   2511 local void set_data_type(s)
   2512     deflate_state *s;
   2513 {
   2514     int n = 0;
   2515     unsigned ascii_freq = 0;
   2516     unsigned bin_freq = 0;
   2517     while (n < 7)        bin_freq += s->dyn_ltree[n++].Freq;
   2518     while (n < 128)    ascii_freq += s->dyn_ltree[n++].Freq;
   2519     while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
   2520     s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? BINARY : ASCII);
   2521 }
   2522 
   2523 /* ===========================================================================
   2524  * Reverse the first len bits of a code, using straightforward code (a faster
   2525  * method would use a table)
   2526  * IN assertion: 1 <= len <= 15
   2527  */
   2528 local unsigned bi_reverse(code, len)
   2529     unsigned code; /* the value to invert */
   2530     int len;       /* its bit length */
   2531 {
   2532     register unsigned res = 0;
   2533     do {
   2534         res |= code & 1;
   2535         code >>= 1, res <<= 1;
   2536     } while (--len > 0);
   2537     return res >> 1;
   2538 }
   2539 
   2540 /* ===========================================================================
   2541  * Flush the bit buffer, keeping at most 7 bits in it.
   2542  */
   2543 local void bi_flush(s)
   2544     deflate_state *s;
   2545 {
   2546     if (s->bi_valid == 16) {
   2547         put_short(s, s->bi_buf);
   2548         s->bi_buf = 0;
   2549         s->bi_valid = 0;
   2550     } else if (s->bi_valid >= 8) {
   2551         put_byte(s, (Byte)s->bi_buf);
   2552         s->bi_buf >>= 8;
   2553         s->bi_valid -= 8;
   2554     }
   2555 }
   2556 
   2557 /* ===========================================================================
   2558  * Flush the bit buffer and align the output on a byte boundary
   2559  */
   2560 local void bi_windup(s)
   2561     deflate_state *s;
   2562 {
   2563     if (s->bi_valid > 8) {
   2564         put_short(s, s->bi_buf);
   2565     } else if (s->bi_valid > 0) {
   2566         put_byte(s, (Byte)s->bi_buf);
   2567     }
   2568     s->bi_buf = 0;
   2569     s->bi_valid = 0;
   2570 #ifdef DEBUG_ZLIB
   2571     s->bits_sent = (s->bits_sent+7) & ~7;
   2572 #endif
   2573 }
   2574 
   2575 /* ===========================================================================
   2576  * Copy a stored block, storing first the length and its
   2577  * one's complement if requested.
   2578  */
   2579 local void copy_block(s, buf, len, header)
   2580     deflate_state *s;
   2581     charf    *buf;    /* the input data */
   2582     unsigned len;     /* its length */
   2583     int      header;  /* true if block header must be written */
   2584 {
   2585     bi_windup(s);        /* align on byte boundary */
   2586     s->last_eob_len = 8; /* enough lookahead for inflate */
   2587 
   2588     if (header) {
   2589         put_short(s, (ush)len);
   2590         put_short(s, (ush)~len);
   2591 #ifdef DEBUG_ZLIB
   2592         s->bits_sent += 2*16;
   2593 #endif
   2594     }
   2595 #ifdef DEBUG_ZLIB
   2596     s->bits_sent += (ulg)len<<3;
   2597 #endif
   2598     while (len--) {
   2599         put_byte(s, *buf++);
   2600     }
   2601 }
   2602 
   2603 
   2604 /*+++++*/
   2605 /* infblock.h -- header to use infblock.c
   2606  * Copyright (C) 1995 Mark Adler
   2607  * For conditions of distribution and use, see copyright notice in zlib.h
   2608  */
   2609 
   2610 /* WARNING: this file should *not* be used by applications. It is
   2611    part of the implementation of the compression library and is
   2612    subject to change. Applications should only use zlib.h.
   2613  */
   2614 
   2615 struct inflate_blocks_state;
   2616 typedef struct inflate_blocks_state FAR inflate_blocks_statef;
   2617 
   2618 local inflate_blocks_statef * inflate_blocks_new OF((
   2619     z_stream *z,
   2620     check_func c,               /* check function */
   2621     uInt w));                   /* window size */
   2622 
   2623 local int inflate_blocks OF((
   2624     inflate_blocks_statef *,
   2625     z_stream *,
   2626     int));                      /* initial return code */
   2627 
   2628 local void inflate_blocks_reset OF((
   2629     inflate_blocks_statef *,
   2630     z_stream *,
   2631     uLongf *));                  /* check value on output */
   2632 
   2633 local int inflate_blocks_free OF((
   2634     inflate_blocks_statef *,
   2635     z_stream *,
   2636     uLongf *));                  /* check value on output */
   2637 
   2638 local int inflate_addhistory OF((
   2639     inflate_blocks_statef *,
   2640     z_stream *));
   2641 
   2642 local int inflate_packet_flush OF((
   2643     inflate_blocks_statef *));
   2644 
   2645 /*+++++*/
   2646 /* inftrees.h -- header to use inftrees.c
   2647  * Copyright (C) 1995 Mark Adler
   2648  * For conditions of distribution and use, see copyright notice in zlib.h
   2649  */
   2650 
   2651 /* WARNING: this file should *not* be used by applications. It is
   2652    part of the implementation of the compression library and is
   2653    subject to change. Applications should only use zlib.h.
   2654  */
   2655 
   2656 /* Huffman code lookup table entry--this entry is four bytes for machines
   2657    that have 16-bit pointers (e.g. PC's in the small or medium model). */
   2658 
   2659 typedef struct inflate_huft_s FAR inflate_huft;
   2660 
   2661 struct inflate_huft_s {
   2662   union {
   2663     struct {
   2664       Byte Exop;        /* number of extra bits or operation */
   2665       Byte Bits;        /* number of bits in this code or subcode */
   2666     } what;
   2667     uInt Nalloc;	/* number of these allocated here */
   2668     Bytef *pad;         /* pad structure to a power of 2 (4 bytes for */
   2669   } word;               /*  16-bit, 8 bytes for 32-bit machines) */
   2670   union {
   2671     uInt Base;          /* literal, length base, or distance base */
   2672     inflate_huft *Next; /* pointer to next level of table */
   2673   } more;
   2674 };
   2675 
   2676 #ifdef DEBUG_ZLIB
   2677   local uInt inflate_hufts;
   2678 #endif
   2679 
   2680 local int inflate_trees_bits OF((
   2681     uIntf *,                    /* 19 code lengths */
   2682     uIntf *,                    /* bits tree desired/actual depth */
   2683     inflate_huft * FAR *,       /* bits tree result */
   2684     z_stream *));               /* for zalloc, zfree functions */
   2685 
   2686 local int inflate_trees_dynamic OF((
   2687     uInt,                       /* number of literal/length codes */
   2688     uInt,                       /* number of distance codes */
   2689     uIntf *,                    /* that many (total) code lengths */
   2690     uIntf *,                    /* literal desired/actual bit depth */
   2691     uIntf *,                    /* distance desired/actual bit depth */
   2692     inflate_huft * FAR *,       /* literal/length tree result */
   2693     inflate_huft * FAR *,       /* distance tree result */
   2694     z_stream *));               /* for zalloc, zfree functions */
   2695 
   2696 local int inflate_trees_fixed OF((
   2697     uIntf *,                    /* literal desired/actual bit depth */
   2698     uIntf *,                    /* distance desired/actual bit depth */
   2699     inflate_huft * FAR *,       /* literal/length tree result */
   2700     inflate_huft * FAR *));     /* distance tree result */
   2701 
   2702 local int inflate_trees_free OF((
   2703     inflate_huft *,             /* tables to free */
   2704     z_stream *));               /* for zfree function */
   2705 
   2706 
   2707 /*+++++*/
   2708 /* infcodes.h -- header to use infcodes.c
   2709  * Copyright (C) 1995 Mark Adler
   2710  * For conditions of distribution and use, see copyright notice in zlib.h
   2711  */
   2712 
   2713 /* WARNING: this file should *not* be used by applications. It is
   2714    part of the implementation of the compression library and is
   2715    subject to change. Applications should only use zlib.h.
   2716  */
   2717 
   2718 struct inflate_codes_state;
   2719 typedef struct inflate_codes_state FAR inflate_codes_statef;
   2720 
   2721 local inflate_codes_statef *inflate_codes_new OF((
   2722     uInt, uInt,
   2723     inflate_huft *, inflate_huft *,
   2724     z_stream *));
   2725 
   2726 local int inflate_codes OF((
   2727     inflate_blocks_statef *,
   2728     z_stream *,
   2729     int));
   2730 
   2731 local void inflate_codes_free OF((
   2732     inflate_codes_statef *,
   2733     z_stream *));
   2734 
   2735 
   2736 /*+++++*/
   2737 /* inflate.c -- zlib interface to inflate modules
   2738  * Copyright (C) 1995 Mark Adler
   2739  * For conditions of distribution and use, see copyright notice in zlib.h
   2740  */
   2741 
   2742 /* inflate private state */
   2743 struct internal_state {
   2744 
   2745   /* mode */
   2746   enum {
   2747       METHOD,   /* waiting for method byte */
   2748       FLAG,     /* waiting for flag byte */
   2749       BLOCKS,   /* decompressing blocks */
   2750       CHECK4,   /* four check bytes to go */
   2751       CHECK3,   /* three check bytes to go */
   2752       CHECK2,   /* two check bytes to go */
   2753       CHECK1,   /* one check byte to go */
   2754       DONE,     /* finished check, done */
   2755       BAD}      /* got an error--stay here */
   2756     mode;               /* current inflate mode */
   2757 
   2758   /* mode dependent information */
   2759   union {
   2760     uInt method;        /* if FLAGS, method byte */
   2761     struct {
   2762       uLong was;                /* computed check value */
   2763       uLong need;               /* stream check value */
   2764     } check;            /* if CHECK, check values to compare */
   2765     uInt marker;        /* if BAD, inflateSync's marker bytes count */
   2766   } sub;        /* submode */
   2767 
   2768   /* mode independent information */
   2769   int  nowrap;          /* flag for no wrapper */
   2770   uInt wbits;           /* log2(window size)  (8..15, defaults to 15) */
   2771   inflate_blocks_statef
   2772     *blocks;            /* current inflate_blocks state */
   2773 
   2774 };
   2775 
   2776 
   2777 int inflateReset(z)
   2778 z_stream *z;
   2779 {
   2780   uLong c;
   2781 
   2782   if (z == Z_NULL || z->state == Z_NULL)
   2783     return Z_STREAM_ERROR;
   2784   z->total_in = z->total_out = 0;
   2785   z->msg = Z_NULL;
   2786   z->state->mode = z->state->nowrap ? BLOCKS : METHOD;
   2787   inflate_blocks_reset(z->state->blocks, z, &c);
   2788   Trace((stderr, "inflate: reset\n"));
   2789   return Z_OK;
   2790 }
   2791 
   2792 
   2793 int inflateEnd(z)
   2794 z_stream *z;
   2795 {
   2796   uLong c;
   2797 
   2798   if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
   2799     return Z_STREAM_ERROR;
   2800   if (z->state->blocks != Z_NULL)
   2801     inflate_blocks_free(z->state->blocks, z, &c);
   2802   ZFREE(z, z->state, sizeof(struct internal_state));
   2803   z->state = Z_NULL;
   2804   Trace((stderr, "inflate: end\n"));
   2805   return Z_OK;
   2806 }
   2807 
   2808 
   2809 int inflateInit2(z, w)
   2810 z_stream *z;
   2811 int w;
   2812 {
   2813   /* initialize state */
   2814   if (z == Z_NULL)
   2815     return Z_STREAM_ERROR;
   2816 /*  if (z->zalloc == Z_NULL) z->zalloc = zcalloc; */
   2817 /*  if (z->zfree == Z_NULL) z->zfree = zcfree; */
   2818   if ((z->state = (struct internal_state FAR *)
   2819        ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL)
   2820     return Z_MEM_ERROR;
   2821   z->state->blocks = Z_NULL;
   2822 
   2823   /* handle undocumented nowrap option (no zlib header or check) */
   2824   z->state->nowrap = 0;
   2825   if (w < 0)
   2826   {
   2827     w = - w;
   2828     z->state->nowrap = 1;
   2829   }
   2830 
   2831   /* set window size */
   2832   if (w < 8 || w > 15)
   2833   {
   2834     inflateEnd(z);
   2835     return Z_STREAM_ERROR;
   2836   }
   2837   z->state->wbits = (uInt)w;
   2838 
   2839   /* create inflate_blocks state */
   2840   if ((z->state->blocks =
   2841        inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, 1 << w))
   2842       == Z_NULL)
   2843   {
   2844     inflateEnd(z);
   2845     return Z_MEM_ERROR;
   2846   }
   2847   Trace((stderr, "inflate: allocated\n"));
   2848 
   2849   /* reset state */
   2850   inflateReset(z);
   2851   return Z_OK;
   2852 }
   2853 
   2854 
   2855 int inflateInit(z)
   2856 z_stream *z;
   2857 {
   2858   return inflateInit2(z, DEF_WBITS);
   2859 }
   2860 
   2861 
   2862 #define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;}
   2863 #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
   2864 
   2865 int inflate(z, f)
   2866 z_stream *z;
   2867 int f;
   2868 {
   2869   int r;
   2870   uInt b;
   2871 
   2872   if (z == Z_NULL || z->next_in == Z_NULL)
   2873     return Z_STREAM_ERROR;
   2874   r = Z_BUF_ERROR;
   2875   while (1) switch (z->state->mode)
   2876   {
   2877     case METHOD:
   2878       NEEDBYTE
   2879       if (((z->state->sub.method = NEXTBYTE) & 0xf) != DEFLATED)
   2880       {
   2881         z->state->mode = BAD;
   2882         z->msg = "unknown compression method";
   2883         z->state->sub.marker = 5;       /* can't try inflateSync */
   2884         break;
   2885       }
   2886       if ((z->state->sub.method >> 4) + 8 > z->state->wbits)
   2887       {
   2888         z->state->mode = BAD;
   2889         z->msg = "invalid window size";
   2890         z->state->sub.marker = 5;       /* can't try inflateSync */
   2891         break;
   2892       }
   2893       z->state->mode = FLAG;
   2894     case FLAG:
   2895       NEEDBYTE
   2896       if ((b = NEXTBYTE) & 0x20)
   2897       {
   2898         z->state->mode = BAD;
   2899         z->msg = "invalid reserved bit";
   2900         z->state->sub.marker = 5;       /* can't try inflateSync */
   2901         break;
   2902       }
   2903       if (((z->state->sub.method << 8) + b) % 31)
   2904       {
   2905         z->state->mode = BAD;
   2906         z->msg = "incorrect header check";
   2907         z->state->sub.marker = 5;       /* can't try inflateSync */
   2908         break;
   2909       }
   2910       Trace((stderr, "inflate: zlib header ok\n"));
   2911       z->state->mode = BLOCKS;
   2912     case BLOCKS:
   2913       r = inflate_blocks(z->state->blocks, z, r);
   2914       if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0)
   2915 	  r = inflate_packet_flush(z->state->blocks);
   2916       if (r == Z_DATA_ERROR)
   2917       {
   2918         z->state->mode = BAD;
   2919         z->state->sub.marker = 0;       /* can try inflateSync */
   2920         break;
   2921       }
   2922       if (r != Z_STREAM_END)
   2923         return r;
   2924       r = Z_OK;
   2925       inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was);
   2926       if (z->state->nowrap)
   2927       {
   2928         z->state->mode = DONE;
   2929         break;
   2930       }
   2931       z->state->mode = CHECK4;
   2932     case CHECK4:
   2933       NEEDBYTE
   2934       z->state->sub.check.need = (uLong)NEXTBYTE << 24;
   2935       z->state->mode = CHECK3;
   2936     case CHECK3:
   2937       NEEDBYTE
   2938       z->state->sub.check.need += (uLong)NEXTBYTE << 16;
   2939       z->state->mode = CHECK2;
   2940     case CHECK2:
   2941       NEEDBYTE
   2942       z->state->sub.check.need += (uLong)NEXTBYTE << 8;
   2943       z->state->mode = CHECK1;
   2944     case CHECK1:
   2945       NEEDBYTE
   2946       z->state->sub.check.need += (uLong)NEXTBYTE;
   2947 
   2948       if (z->state->sub.check.was != z->state->sub.check.need)
   2949       {
   2950         z->state->mode = BAD;
   2951         z->msg = "incorrect data check";
   2952         z->state->sub.marker = 5;       /* can't try inflateSync */
   2953         break;
   2954       }
   2955       Trace((stderr, "inflate: zlib check ok\n"));
   2956       z->state->mode = DONE;
   2957     case DONE:
   2958       return Z_STREAM_END;
   2959     case BAD:
   2960       return Z_DATA_ERROR;
   2961     default:
   2962       return Z_STREAM_ERROR;
   2963   }
   2964 
   2965  empty:
   2966   if (f != Z_PACKET_FLUSH)
   2967     return r;
   2968   z->state->mode = BAD;
   2969   z->state->sub.marker = 0;       /* can try inflateSync */
   2970   return Z_DATA_ERROR;
   2971 }
   2972 
   2973 /*
   2974  * This subroutine adds the data at next_in/avail_in to the output history
   2975  * without performing any output.  The output buffer must be "caught up";
   2976  * i.e. no pending output (hence s->read equals s->write), and the state must
   2977  * be BLOCKS (i.e. we should be willing to see the start of a series of
   2978  * BLOCKS).  On exit, the output will also be caught up, and the checksum
   2979  * will have been updated if need be.
   2980  */
   2981 
   2982 int inflateIncomp(z)
   2983 z_stream *z;
   2984 {
   2985     if (z->state->mode != BLOCKS)
   2986 	return Z_DATA_ERROR;
   2987     return inflate_addhistory(z->state->blocks, z);
   2988 }
   2989 
   2990 
   2991 int inflateSync(z)
   2992 z_stream *z;
   2993 {
   2994   uInt n;       /* number of bytes to look at */
   2995   Bytef *p;     /* pointer to bytes */
   2996   uInt m;       /* number of marker bytes found in a row */
   2997   uLong r, w;   /* temporaries to save total_in and total_out */
   2998 
   2999   /* set up */
   3000   if (z == Z_NULL || z->state == Z_NULL)
   3001     return Z_STREAM_ERROR;
   3002   if (z->state->mode != BAD)
   3003   {
   3004     z->state->mode = BAD;
   3005     z->state->sub.marker = 0;
   3006   }
   3007   if ((n = z->avail_in) == 0)
   3008     return Z_BUF_ERROR;
   3009   p = z->next_in;
   3010   m = z->state->sub.marker;
   3011 
   3012   /* search */
   3013   while (n && m < 4)
   3014   {
   3015     if (*p == (Byte)(m < 2 ? 0 : 0xff))
   3016       m++;
   3017     else if (*p)
   3018       m = 0;
   3019     else
   3020       m = 4 - m;
   3021     p++, n--;
   3022   }
   3023 
   3024   /* restore */
   3025   z->total_in += p - z->next_in;
   3026   z->next_in = p;
   3027   z->avail_in = n;
   3028   z->state->sub.marker = m;
   3029 
   3030   /* return no joy or set up to restart on a new block */
   3031   if (m != 4)
   3032     return Z_DATA_ERROR;
   3033   r = z->total_in;  w = z->total_out;
   3034   inflateReset(z);
   3035   z->total_in = r;  z->total_out = w;
   3036   z->state->mode = BLOCKS;
   3037   return Z_OK;
   3038 }
   3039 
   3040 #undef NEEDBYTE
   3041 #undef NEXTBYTE
   3042 
   3043 /*+++++*/
   3044 /* infutil.h -- types and macros common to blocks and codes
   3045  * Copyright (C) 1995 Mark Adler
   3046  * For conditions of distribution and use, see copyright notice in zlib.h
   3047  */
   3048 
   3049 /* WARNING: this file should *not* be used by applications. It is
   3050    part of the implementation of the compression library and is
   3051    subject to change. Applications should only use zlib.h.
   3052  */
   3053 
   3054 /* inflate blocks semi-private state */
   3055 struct inflate_blocks_state {
   3056 
   3057   /* mode */
   3058   enum {
   3059       TYPE,     /* get type bits (3, including end bit) */
   3060       LENS,     /* get lengths for stored */
   3061       STORED,   /* processing stored block */
   3062       TABLE,    /* get table lengths */
   3063       BTREE,    /* get bit lengths tree for a dynamic block */
   3064       DTREE,    /* get length, distance trees for a dynamic block */
   3065       CODES,    /* processing fixed or dynamic block */
   3066       DRY,      /* output remaining window bytes */
   3067       DONEB,     /* finished last block, done */
   3068       BADB}      /* got a data error--stuck here */
   3069     mode;               /* current inflate_block mode */
   3070 
   3071   /* mode dependent information */
   3072   union {
   3073     uInt left;          /* if STORED, bytes left to copy */
   3074     struct {
   3075       uInt table;               /* table lengths (14 bits) */
   3076       uInt index;               /* index into blens (or border) */
   3077       uIntf *blens;             /* bit lengths of codes */
   3078       uInt bb;                  /* bit length tree depth */
   3079       inflate_huft *tb;         /* bit length decoding tree */
   3080       int nblens;		/* # elements allocated at blens */
   3081     } trees;            /* if DTREE, decoding info for trees */
   3082     struct {
   3083       inflate_huft *tl, *td;    /* trees to free */
   3084       inflate_codes_statef
   3085          *codes;
   3086     } decode;           /* if CODES, current state */
   3087   } sub;                /* submode */
   3088   uInt last;            /* true if this block is the last block */
   3089 
   3090   /* mode independent information */
   3091   uInt bitk;            /* bits in bit buffer */
   3092   uLong bitb;           /* bit buffer */
   3093   Bytef *window;        /* sliding window */
   3094   Bytef *end;           /* one byte after sliding window */
   3095   Bytef *read;          /* window read pointer */
   3096   Bytef *write;         /* window write pointer */
   3097   check_func checkfn;   /* check function */
   3098   uLong check;          /* check on output */
   3099 
   3100 };
   3101 
   3102 
   3103 /* defines for inflate input/output */
   3104 /*   update pointers and return */
   3105 #define UPDBITS {s->bitb=b;s->bitk=k;}
   3106 #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
   3107 #define UPDOUT {s->write=q;}
   3108 #define UPDATE {UPDBITS UPDIN UPDOUT}
   3109 #define LEAVE {UPDATE return inflate_flush(s,z,r);}
   3110 /*   get bytes and bits */
   3111 #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
   3112 #define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
   3113 #define NEXTBYTE (n--,*p++)
   3114 #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
   3115 #define DUMPBITS(j) {b>>=(j);k-=(j);}
   3116 /*   output bytes */
   3117 #define WAVAIL (q<s->read?s->read-q-1:s->end-q)
   3118 #define LOADOUT {q=s->write;m=WAVAIL;}
   3119 #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=WAVAIL;}}
   3120 #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
   3121 #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;}
   3122 #define OUTBYTE(a) {*q++=(Byte)(a);m--;}
   3123 /*   load local pointers */
   3124 #define LOAD {LOADIN LOADOUT}
   3125 
   3126 /* And'ing with mask[n] masks the lower n bits */
   3127 local uInt inflate_mask[] = {
   3128     0x0000,
   3129     0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
   3130     0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
   3131 };
   3132 
   3133 /* copy as much as possible from the sliding window to the output area */
   3134 local int inflate_flush OF((
   3135     inflate_blocks_statef *,
   3136     z_stream *,
   3137     int));
   3138 
   3139 /*+++++*/
   3140 /* inffast.h -- header to use inffast.c
   3141  * Copyright (C) 1995 Mark Adler
   3142  * For conditions of distribution and use, see copyright notice in zlib.h
   3143  */
   3144 
   3145 /* WARNING: this file should *not* be used by applications. It is
   3146    part of the implementation of the compression library and is
   3147    subject to change. Applications should only use zlib.h.
   3148  */
   3149 
   3150 local int inflate_fast OF((
   3151     uInt,
   3152     uInt,
   3153     inflate_huft *,
   3154     inflate_huft *,
   3155     inflate_blocks_statef *,
   3156     z_stream *));
   3157 
   3158 
   3159 /*+++++*/
   3160 /* infblock.c -- interpret and process block types to last block
   3161  * Copyright (C) 1995 Mark Adler
   3162  * For conditions of distribution and use, see copyright notice in zlib.h
   3163  */
   3164 
   3165 /* Table for deflate from PKZIP's appnote.txt. */
   3166 local uInt border[] = { /* Order of the bit length code lengths */
   3167         16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
   3168 
   3169 /*
   3170    Notes beyond the 1.93a appnote.txt:
   3171 
   3172    1. Distance pointers never point before the beginning of the output
   3173       stream.
   3174    2. Distance pointers can point back across blocks, up to 32k away.
   3175    3. There is an implied maximum of 7 bits for the bit length table and
   3176       15 bits for the actual data.
   3177    4. If only one code exists, then it is encoded using one bit.  (Zero
   3178       would be more efficient, but perhaps a little confusing.)  If two
   3179       codes exist, they are coded using one bit each (0 and 1).
   3180    5. There is no way of sending zero distance codes--a dummy must be
   3181       sent if there are none.  (History: a pre 2.0 version of PKZIP would
   3182       store blocks with no distance codes, but this was discovered to be
   3183       too harsh a criterion.)  Valid only for 1.93a.  2.04c does allow
   3184       zero distance codes, which is sent as one code of zero bits in
   3185       length.
   3186    6. There are up to 286 literal/length codes.  Code 256 represents the
   3187       end-of-block.  Note however that the static length tree defines
   3188       288 codes just to fill out the Huffman codes.  Codes 286 and 287
   3189       cannot be used though, since there is no length base or extra bits
   3190       defined for them.  Similarily, there are up to 30 distance codes.
   3191       However, static trees define 32 codes (all 5 bits) to fill out the
   3192       Huffman codes, but the last two had better not show up in the data.
   3193    7. Unzip can check dynamic Huffman blocks for complete code sets.
   3194       The exception is that a single code would not be complete (see #4).
   3195    8. The five bits following the block type is really the number of
   3196       literal codes sent minus 257.
   3197    9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
   3198       (1+6+6).  Therefore, to output three times the length, you output
   3199       three codes (1+1+1), whereas to output four times the same length,
   3200       you only need two codes (1+3).  Hmm.
   3201   10. In the tree reconstruction algorithm, Code = Code + Increment
   3202       only if BitLength(i) is not zero.  (Pretty obvious.)
   3203   11. Correction: 4 Bits: # of Bit Length codes - 4     (4 - 19)
   3204   12. Note: length code 284 can represent 227-258, but length code 285
   3205       really is 258.  The last length deserves its own, short code
   3206       since it gets used a lot in very redundant files.  The length
   3207       258 is special since 258 - 3 (the min match length) is 255.
   3208   13. The literal/length and distance code bit lengths are read as a
   3209       single stream of lengths.  It is possible (and advantageous) for
   3210       a repeat code (16, 17, or 18) to go across the boundary between
   3211       the two sets of lengths.
   3212  */
   3213 
   3214 
   3215 local void inflate_blocks_reset(s, z, c)
   3216 inflate_blocks_statef *s;
   3217 z_stream *z;
   3218 uLongf *c;
   3219 {
   3220   if (s->checkfn != Z_NULL)
   3221     *c = s->check;
   3222   if (s->mode == BTREE || s->mode == DTREE)
   3223     ZFREE(z, s->sub.trees.blens, s->sub.trees.nblens * sizeof(uInt));
   3224   if (s->mode == CODES)
   3225   {
   3226     inflate_codes_free(s->sub.decode.codes, z);
   3227     inflate_trees_free(s->sub.decode.td, z);
   3228     inflate_trees_free(s->sub.decode.tl, z);
   3229   }
   3230   s->mode = TYPE;
   3231   s->bitk = 0;
   3232   s->bitb = 0;
   3233   s->read = s->write = s->window;
   3234   if (s->checkfn != Z_NULL)
   3235     s->check = (*s->checkfn)(0L, Z_NULL, 0);
   3236   Trace((stderr, "inflate:   blocks reset\n"));
   3237 }
   3238 
   3239 
   3240 local inflate_blocks_statef *inflate_blocks_new(z, c, w)
   3241 z_stream *z;
   3242 check_func c;
   3243 uInt w;
   3244 {
   3245   inflate_blocks_statef *s;
   3246 
   3247   if ((s = (inflate_blocks_statef *)ZALLOC
   3248        (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
   3249     return s;
   3250   if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)
   3251   {
   3252     ZFREE(z, s, sizeof(struct inflate_blocks_state));
   3253     return Z_NULL;
   3254   }
   3255   s->end = s->window + w;
   3256   s->checkfn = c;
   3257   s->mode = TYPE;
   3258   Trace((stderr, "inflate:   blocks allocated\n"));
   3259   inflate_blocks_reset(s, z, &s->check);
   3260   return s;
   3261 }
   3262 
   3263 
   3264 local int inflate_blocks(s, z, r)
   3265 inflate_blocks_statef *s;
   3266 z_stream *z;
   3267 int r;
   3268 {
   3269   uInt t;               /* temporary storage */
   3270   uLong b;              /* bit buffer */
   3271   uInt k;               /* bits in bit buffer */
   3272   Bytef *p;             /* input data pointer */
   3273   uInt n;               /* bytes available there */
   3274   Bytef *q;             /* output window write pointer */
   3275   uInt m;               /* bytes to end of window or read pointer */
   3276 
   3277   /* copy input/output information to locals (UPDATE macro restores) */
   3278   LOAD
   3279 
   3280   /* process input based on current state */
   3281   while (1) switch (s->mode)
   3282   {
   3283     case TYPE:
   3284       NEEDBITS(3)
   3285       t = (uInt)b & 7;
   3286       s->last = t & 1;
   3287       switch (t >> 1)
   3288       {
   3289         case 0:                         /* stored */
   3290           Trace((stderr, "inflate:     stored block%s\n",
   3291                  s->last ? " (last)" : ""));
   3292           DUMPBITS(3)
   3293           t = k & 7;                    /* go to byte boundary */
   3294           DUMPBITS(t)
   3295           s->mode = LENS;               /* get length of stored block */
   3296           break;
   3297         case 1:                         /* fixed */
   3298           Trace((stderr, "inflate:     fixed codes block%s\n",
   3299                  s->last ? " (last)" : ""));
   3300           {
   3301             uInt bl, bd;
   3302             inflate_huft *tl, *td;
   3303 
   3304             inflate_trees_fixed(&bl, &bd, &tl, &td);
   3305             s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
   3306             if (s->sub.decode.codes == Z_NULL)
   3307             {
   3308               r = Z_MEM_ERROR;
   3309               LEAVE
   3310             }
   3311             s->sub.decode.tl = Z_NULL;  /* don't try to free these */
   3312             s->sub.decode.td = Z_NULL;
   3313           }
   3314           DUMPBITS(3)
   3315           s->mode = CODES;
   3316           break;
   3317         case 2:                         /* dynamic */
   3318           Trace((stderr, "inflate:     dynamic codes block%s\n",
   3319                  s->last ? " (last)" : ""));
   3320           DUMPBITS(3)
   3321           s->mode = TABLE;
   3322           break;
   3323         case 3:                         /* illegal */
   3324           DUMPBITS(3)
   3325           s->mode = BADB;
   3326           z->msg = "invalid block type";
   3327           r = Z_DATA_ERROR;
   3328           LEAVE
   3329       }
   3330       break;
   3331     case LENS:
   3332       NEEDBITS(32)
   3333       if (((~b) >> 16) != (b & 0xffff))
   3334       {
   3335         s->mode = BADB;
   3336         z->msg = "invalid stored block lengths";
   3337         r = Z_DATA_ERROR;
   3338         LEAVE
   3339       }
   3340       s->sub.left = (uInt)b & 0xffff;
   3341       b = k = 0;                      /* dump bits */
   3342       Tracev((stderr, "inflate:       stored length %u\n", s->sub.left));
   3343       s->mode = s->sub.left ? STORED : TYPE;
   3344       break;
   3345     case STORED:
   3346       if (n == 0)
   3347         LEAVE
   3348       NEEDOUT
   3349       t = s->sub.left;
   3350       if (t > n) t = n;
   3351       if (t > m) t = m;
   3352       zmemcpy(q, p, t);
   3353       p += t;  n -= t;
   3354       q += t;  m -= t;
   3355       if ((s->sub.left -= t) != 0)
   3356         break;
   3357       Tracev((stderr, "inflate:       stored end, %lu total out\n",
   3358               z->total_out + (q >= s->read ? q - s->read :
   3359               (s->end - s->read) + (q - s->window))));
   3360       s->mode = s->last ? DRY : TYPE;
   3361       break;
   3362     case TABLE:
   3363       NEEDBITS(14)
   3364       s->sub.trees.table = t = (uInt)b & 0x3fff;
   3365 #ifndef PKZIP_BUG_WORKAROUND
   3366       if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
   3367       {
   3368         s->mode = BADB;
   3369         z->msg = "too many length or distance symbols";
   3370         r = Z_DATA_ERROR;
   3371         LEAVE
   3372       }
   3373 #endif
   3374       t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
   3375       if (t < 19)
   3376         t = 19;
   3377       if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
   3378       {
   3379         r = Z_MEM_ERROR;
   3380         LEAVE
   3381       }
   3382       s->sub.trees.nblens = t;
   3383       DUMPBITS(14)
   3384       s->sub.trees.index = 0;
   3385       Tracev((stderr, "inflate:       table sizes ok\n"));
   3386       s->mode = BTREE;
   3387     case BTREE:
   3388       while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
   3389       {
   3390         NEEDBITS(3)
   3391         s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
   3392         DUMPBITS(3)
   3393       }
   3394       while (s->sub.trees.index < 19)
   3395         s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
   3396       s->sub.trees.bb = 7;
   3397       t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
   3398                              &s->sub.trees.tb, z);
   3399       if (t != Z_OK)
   3400       {
   3401         r = t;
   3402         if (r == Z_DATA_ERROR)
   3403           s->mode = BADB;
   3404         LEAVE
   3405       }
   3406       s->sub.trees.index = 0;
   3407       Tracev((stderr, "inflate:       bits tree ok\n"));
   3408       s->mode = DTREE;
   3409     case DTREE:
   3410       while (t = s->sub.trees.table,
   3411              s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
   3412       {
   3413         inflate_huft *h;
   3414         uInt i, j, c;
   3415 
   3416         t = s->sub.trees.bb;
   3417         NEEDBITS(t)
   3418         h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
   3419         t = h->word.what.Bits;
   3420         c = h->more.Base;
   3421         if (c < 16)
   3422         {
   3423           DUMPBITS(t)
   3424           s->sub.trees.blens[s->sub.trees.index++] = c;
   3425         }
   3426         else /* c == 16..18 */
   3427         {
   3428           i = c == 18 ? 7 : c - 14;
   3429           j = c == 18 ? 11 : 3;
   3430           NEEDBITS(t + i)
   3431           DUMPBITS(t)
   3432           j += (uInt)b & inflate_mask[i];
   3433           DUMPBITS(i)
   3434           i = s->sub.trees.index;
   3435           t = s->sub.trees.table;
   3436           if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
   3437               (c == 16 && i < 1))
   3438           {
   3439             s->mode = BADB;
   3440             z->msg = "invalid bit length repeat";
   3441             r = Z_DATA_ERROR;
   3442             LEAVE
   3443           }
   3444           c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
   3445           do {
   3446             s->sub.trees.blens[i++] = c;
   3447           } while (--j);
   3448           s->sub.trees.index = i;
   3449         }
   3450       }
   3451       inflate_trees_free(s->sub.trees.tb, z);
   3452       s->sub.trees.tb = Z_NULL;
   3453       {
   3454         uInt bl, bd;
   3455         inflate_huft *tl, *td;
   3456         inflate_codes_statef *c;
   3457 
   3458         bl = 9;         /* must be <= 9 for lookahead assumptions */
   3459         bd = 6;         /* must be <= 9 for lookahead assumptions */
   3460         t = s->sub.trees.table;
   3461         t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
   3462                                   s->sub.trees.blens, &bl, &bd, &tl, &td, z);
   3463         if (t != Z_OK)
   3464         {
   3465           if (t == (uInt)Z_DATA_ERROR)
   3466             s->mode = BADB;
   3467           r = t;
   3468           LEAVE
   3469         }
   3470         Tracev((stderr, "inflate:       trees ok\n"));
   3471         if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
   3472         {
   3473           inflate_trees_free(td, z);
   3474           inflate_trees_free(tl, z);
   3475           r = Z_MEM_ERROR;
   3476           LEAVE
   3477         }
   3478         ZFREE(z, s->sub.trees.blens, s->sub.trees.nblens * sizeof(uInt));
   3479         s->sub.decode.codes = c;
   3480         s->sub.decode.tl = tl;
   3481         s->sub.decode.td = td;
   3482       }
   3483       s->mode = CODES;
   3484     case CODES:
   3485       UPDATE
   3486       if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
   3487         return inflate_flush(s, z, r);
   3488       r = Z_OK;
   3489       inflate_codes_free(s->sub.decode.codes, z);
   3490       inflate_trees_free(s->sub.decode.td, z);
   3491       inflate_trees_free(s->sub.decode.tl, z);
   3492       LOAD
   3493       Tracev((stderr, "inflate:       codes end, %lu total out\n",
   3494               z->total_out + (q >= s->read ? q - s->read :
   3495               (s->end - s->read) + (q - s->window))));
   3496       if (!s->last)
   3497       {
   3498         s->mode = TYPE;
   3499         break;
   3500       }
   3501       if (k > 7)              /* return unused byte, if any */
   3502       {
   3503         Assert(k < 16, "inflate_codes grabbed too many bytes")
   3504         k -= 8;
   3505         n++;
   3506         p--;                    /* can always return one */
   3507       }
   3508       s->mode = DRY;
   3509     case DRY:
   3510       FLUSH
   3511       if (s->read != s->write)
   3512         LEAVE
   3513       s->mode = DONEB;
   3514     case DONEB:
   3515       r = Z_STREAM_END;
   3516       LEAVE
   3517     case BADB:
   3518       r = Z_DATA_ERROR;
   3519       LEAVE
   3520     default:
   3521       r = Z_STREAM_ERROR;
   3522       LEAVE
   3523   }
   3524 }
   3525 
   3526 
   3527 local int inflate_blocks_free(s, z, c)
   3528 inflate_blocks_statef *s;
   3529 z_stream *z;
   3530 uLongf *c;
   3531 {
   3532   inflate_blocks_reset(s, z, c);
   3533   ZFREE(z, s->window, s->end - s->window);
   3534   ZFREE(z, s, sizeof(struct inflate_blocks_state));
   3535   Trace((stderr, "inflate:   blocks freed\n"));
   3536   return Z_OK;
   3537 }
   3538 
   3539 /*
   3540  * This subroutine adds the data at next_in/avail_in to the output history
   3541  * without performing any output.  The output buffer must be "caught up";
   3542  * i.e. no pending output (hence s->read equals s->write), and the state must
   3543  * be BLOCKS (i.e. we should be willing to see the start of a series of
   3544  * BLOCKS).  On exit, the output will also be caught up, and the checksum
   3545  * will have been updated if need be.
   3546  */
   3547 local int inflate_addhistory(s, z)
   3548 inflate_blocks_statef *s;
   3549 z_stream *z;
   3550 {
   3551     uLong b;              /* bit buffer */  /* NOT USED HERE */
   3552     uInt k;               /* bits in bit buffer */ /* NOT USED HERE */
   3553     uInt t;               /* temporary storage */
   3554     Bytef *p;             /* input data pointer */
   3555     uInt n;               /* bytes available there */
   3556     Bytef *q;             /* output window write pointer */
   3557     uInt m;               /* bytes to end of window or read pointer */
   3558 
   3559     if (s->read != s->write)
   3560 	return Z_STREAM_ERROR;
   3561     if (s->mode != TYPE)
   3562 	return Z_DATA_ERROR;
   3563 
   3564     /* we're ready to rock */
   3565     LOAD
   3566     /* while there is input ready, copy to output buffer, moving
   3567      * pointers as needed.
   3568      */
   3569     while (n) {
   3570 	t = n;  /* how many to do */
   3571 	/* is there room until end of buffer? */
   3572 	if (t > m) t = m;
   3573 	/* update check information */
   3574 	if (s->checkfn != Z_NULL)
   3575 	    s->check = (*s->checkfn)(s->check, q, t);
   3576 	zmemcpy(q, p, t);
   3577 	q += t;
   3578 	p += t;
   3579 	n -= t;
   3580 	z->total_out += t;
   3581 	s->read = q;    /* drag read pointer forward */
   3582 /*      WRAP  */ 	/* expand WRAP macro by hand to handle s->read */
   3583 	if (q == s->end) {
   3584 	    s->read = q = s->window;
   3585 	    m = WAVAIL;
   3586 	}
   3587     }
   3588     UPDATE
   3589     return Z_OK;
   3590 }
   3591 
   3592 
   3593 /*
   3594  * At the end of a Deflate-compressed PPP packet, we expect to have seen
   3595  * a `stored' block type value but not the (zero) length bytes.
   3596  */
   3597 local int inflate_packet_flush(s)
   3598     inflate_blocks_statef *s;
   3599 {
   3600     if (s->mode != LENS)
   3601 	return Z_DATA_ERROR;
   3602     s->mode = TYPE;
   3603     return Z_OK;
   3604 }
   3605 
   3606 
   3607 /*+++++*/
   3608 /* inftrees.c -- generate Huffman trees for efficient decoding
   3609  * Copyright (C) 1995 Mark Adler
   3610  * For conditions of distribution and use, see copyright notice in zlib.h
   3611  */
   3612 
   3613 /* simplify the use of the inflate_huft type with some defines */
   3614 #define base more.Base
   3615 #define next more.Next
   3616 #define exop word.what.Exop
   3617 #define bits word.what.Bits
   3618 
   3619 
   3620 local int huft_build OF((
   3621     uIntf *,            /* code lengths in bits */
   3622     uInt,               /* number of codes */
   3623     uInt,               /* number of "simple" codes */
   3624     uIntf *,            /* list of base values for non-simple codes */
   3625     uIntf *,            /* list of extra bits for non-simple codes */
   3626     inflate_huft * FAR*,/* result: starting table */
   3627     uIntf *,            /* maximum lookup bits (returns actual) */
   3628     z_stream *));       /* for zalloc function */
   3629 
   3630 local voidpf falloc OF((
   3631     voidpf,             /* opaque pointer (not used) */
   3632     uInt,               /* number of items */
   3633     uInt));             /* size of item */
   3634 
   3635 local void ffree OF((
   3636     voidpf q,           /* opaque pointer (not used) */
   3637     voidpf p,           /* what to free (not used) */
   3638     uInt n));		/* number of bytes (not used) */
   3639 
   3640 /* Tables for deflate from PKZIP's appnote.txt. */
   3641 local uInt cplens[] = { /* Copy lengths for literal codes 257..285 */
   3642         3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
   3643         35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
   3644         /* actually lengths - 2; also see note #13 above about 258 */
   3645 local uInt cplext[] = { /* Extra bits for literal codes 257..285 */
   3646         0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
   3647         3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 192, 192}; /* 192==invalid */
   3648 local uInt cpdist[] = { /* Copy offsets for distance codes 0..29 */
   3649         1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
   3650         257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
   3651         8193, 12289, 16385, 24577};
   3652 local uInt cpdext[] = { /* Extra bits for distance codes */
   3653         0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
   3654         7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
   3655         12, 12, 13, 13};
   3656 
   3657 /*
   3658    Huffman code decoding is performed using a multi-level table lookup.
   3659    The fastest way to decode is to simply build a lookup table whose
   3660    size is determined by the longest code.  However, the time it takes
   3661    to build this table can also be a factor if the data being decoded
   3662    is not very long.  The most common codes are necessarily the
   3663    shortest codes, so those codes dominate the decoding time, and hence
   3664    the speed.  The idea is you can have a shorter table that decodes the
   3665    shorter, more probable codes, and then point to subsidiary tables for
   3666    the longer codes.  The time it costs to decode the longer codes is
   3667    then traded against the time it takes to make longer tables.
   3668 
   3669    This results of this trade are in the variables lbits and dbits
   3670    below.  lbits is the number of bits the first level table for literal/
   3671    length codes can decode in one step, and dbits is the same thing for
   3672    the distance codes.  Subsequent tables are also less than or equal to
   3673    those sizes.  These values may be adjusted either when all of the
   3674    codes are shorter than that, in which case the longest code length in
   3675    bits is used, or when the shortest code is *longer* than the requested
   3676    table size, in which case the length of the shortest code in bits is
   3677    used.
   3678 
   3679    There are two different values for the two tables, since they code a
   3680    different number of possibilities each.  The literal/length table
   3681    codes 286 possible values, or in a flat code, a little over eight
   3682    bits.  The distance table codes 30 possible values, or a little less
   3683    than five bits, flat.  The optimum values for speed end up being
   3684    about one bit more than those, so lbits is 8+1 and dbits is 5+1.
   3685    The optimum values may differ though from machine to machine, and
   3686    possibly even between compilers.  Your mileage may vary.
   3687  */
   3688 
   3689 
   3690 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
   3691 #define BMAX 15         /* maximum bit length of any code */
   3692 #define N_MAX 288       /* maximum number of codes in any set */
   3693 
   3694 #ifdef DEBUG_ZLIB
   3695   uInt inflate_hufts;
   3696 #endif
   3697 
   3698 local int huft_build(b, n, s, d, e, t, m, zs)
   3699 uIntf *b;               /* code lengths in bits (all assumed <= BMAX) */
   3700 uInt n;                 /* number of codes (assumed <= N_MAX) */
   3701 uInt s;                 /* number of simple-valued codes (0..s-1) */
   3702 uIntf *d;               /* list of base values for non-simple codes */
   3703 uIntf *e;               /* list of extra bits for non-simple codes */
   3704 inflate_huft * FAR *t;  /* result: starting table */
   3705 uIntf *m;               /* maximum lookup bits, returns actual */
   3706 z_stream *zs;           /* for zalloc function */
   3707 /* Given a list of code lengths and a maximum table size, make a set of
   3708    tables to decode that set of codes.  Return Z_OK on success, Z_BUF_ERROR
   3709    if the given code set is incomplete (the tables are still built in this
   3710    case), Z_DATA_ERROR if the input is invalid (all zero length codes or an
   3711    over-subscribed set of lengths), or Z_MEM_ERROR if not enough memory. */
   3712 {
   3713 
   3714   uInt a;                       /* counter for codes of length k */
   3715   uInt c[BMAX+1];               /* bit length count table */
   3716   uInt f;                       /* i repeats in table every f entries */
   3717   int g;                        /* maximum code length */
   3718   int h;                        /* table level */
   3719   register uInt i;              /* counter, current code */
   3720   register uInt j;              /* counter */
   3721   register int k;               /* number of bits in current code */
   3722   int l;                        /* bits per table (returned in m) */
   3723   register uIntf *p;            /* pointer into c[], b[], or v[] */
   3724   inflate_huft *q;              /* points to current table */
   3725   struct inflate_huft_s r;      /* table entry for structure assignment */
   3726   inflate_huft *u[BMAX];        /* table stack */
   3727   uInt v[N_MAX];                /* values in order of bit length */
   3728   register int w;               /* bits before this table == (l * h) */
   3729   uInt x[BMAX+1];               /* bit offsets, then code stack */
   3730   uIntf *xp;                    /* pointer into x */
   3731   int y;                        /* number of dummy codes added */
   3732   uInt z;                       /* number of entries in current table */
   3733 
   3734 
   3735   /* Generate counts for each bit length */
   3736   p = c;
   3737 #define C0 *p++ = 0;
   3738 #define C2 C0 C0 C0 C0
   3739 #define C4 C2 C2 C2 C2
   3740   C4                            /* clear c[]--assume BMAX+1 is 16 */
   3741   p = b;  i = n;
   3742   do {
   3743     c[*p++]++;                  /* assume all entries <= BMAX */
   3744   } while (--i);
   3745   if (c[0] == n)                /* null input--all zero length codes */
   3746   {
   3747     *t = (inflate_huft *)Z_NULL;
   3748     *m = 0;
   3749     return Z_OK;
   3750   }
   3751 
   3752 
   3753   /* Find minimum and maximum length, bound *m by those */
   3754   l = *m;
   3755   for (j = 1; j <= BMAX; j++)
   3756     if (c[j])
   3757       break;
   3758   k = j;                        /* minimum code length */
   3759   if ((uInt)l < j)
   3760     l = j;
   3761   for (i = BMAX; i; i--)
   3762     if (c[i])
   3763       break;
   3764   g = i;                        /* maximum code length */
   3765   if ((uInt)l > i)
   3766     l = i;
   3767   *m = l;
   3768 
   3769 
   3770   /* Adjust last length count to fill out codes, if needed */
   3771   for (y = 1 << j; j < i; j++, y <<= 1)
   3772     if ((y -= c[j]) < 0)
   3773       return Z_DATA_ERROR;
   3774   if ((y -= c[i]) < 0)
   3775     return Z_DATA_ERROR;
   3776   c[i] += y;
   3777 
   3778 
   3779   /* Generate starting offsets into the value table for each length */
   3780   x[1] = j = 0;
   3781   p = c + 1;  xp = x + 2;
   3782   while (--i) {                 /* note that i == g from above */
   3783     *xp++ = (j += *p++);
   3784   }
   3785 
   3786 
   3787   /* Make a table of values in order of bit lengths */
   3788   p = b;  i = 0;
   3789   do {
   3790     if ((j = *p++) != 0)
   3791       v[x[j]++] = i;
   3792   } while (++i < n);
   3793 
   3794 
   3795   /* Generate the Huffman codes and for each, make the table entries */
   3796   x[0] = i = 0;                 /* first Huffman code is zero */
   3797   p = v;                        /* grab values in bit order */
   3798   h = -1;                       /* no tables yet--level -1 */
   3799   w = -l;                       /* bits decoded == (l * h) */
   3800   u[0] = (inflate_huft *)Z_NULL;        /* just to keep compilers happy */
   3801   q = (inflate_huft *)Z_NULL;   /* ditto */
   3802   z = 0;                        /* ditto */
   3803 
   3804   /* go through the bit lengths (k already is bits in shortest code) */
   3805   for (; k <= g; k++)
   3806   {
   3807     a = c[k];
   3808     while (a--)
   3809     {
   3810       /* here i is the Huffman code of length k bits for value *p */
   3811       /* make tables up to required level */
   3812       while (k > w + l)
   3813       {
   3814         h++;
   3815         w += l;                 /* previous table always l bits */
   3816 
   3817         /* compute minimum size table less than or equal to l bits */
   3818         z = (z = g - w) > (uInt)l ? l : z;      /* table size upper limit */
   3819         if ((f = 1 << (j = k - w)) > a + 1)     /* try a k-w bit table */
   3820         {                       /* too few codes for k-w bit table */
   3821           f -= a + 1;           /* deduct codes from patterns left */
   3822           xp = c + k;
   3823           if (j < z)
   3824             while (++j < z)     /* try smaller tables up to z bits */
   3825             {
   3826               if ((f <<= 1) <= *++xp)
   3827                 break;          /* enough codes to use up j bits */
   3828               f -= *xp;         /* else deduct codes from patterns */
   3829             }
   3830         }
   3831         z = 1 << j;             /* table entries for j-bit table */
   3832 
   3833         /* allocate and link in new table */
   3834         if ((q = (inflate_huft *)ZALLOC
   3835              (zs,z + 1,sizeof(inflate_huft))) == Z_NULL)
   3836         {
   3837           if (h)
   3838             inflate_trees_free(u[0], zs);
   3839           return Z_MEM_ERROR;   /* not enough memory */
   3840         }
   3841 	q->word.Nalloc = z + 1;
   3842 #ifdef DEBUG_ZLIB
   3843         inflate_hufts += z + 1;
   3844 #endif
   3845         *t = q + 1;             /* link to list for huft_free() */
   3846         *(t = &(q->next)) = Z_NULL;
   3847         u[h] = ++q;             /* table starts after link */
   3848 
   3849         /* connect to last table, if there is one */
   3850         if (h)
   3851         {
   3852           x[h] = i;             /* save pattern for backing up */
   3853           r.bits = (Byte)l;     /* bits to dump before this table */
   3854           r.exop = (Byte)j;     /* bits in this table */
   3855           r.next = q;           /* pointer to this table */
   3856           j = i >> (w - l);     /* (get around Turbo C bug) */
   3857           u[h-1][j] = r;        /* connect to last table */
   3858         }
   3859       }
   3860 
   3861       /* set up table entry in r */
   3862       r.bits = (Byte)(k - w);
   3863       if (p >= v + n)
   3864         r.exop = 128 + 64;      /* out of values--invalid code */
   3865       else if (*p < s)
   3866       {
   3867         r.exop = (Byte)(*p < 256 ? 0 : 32 + 64);     /* 256 is end-of-block */
   3868         r.base = *p++;          /* simple code is just the value */
   3869       }
   3870       else
   3871       {
   3872         r.exop = (Byte)e[*p - s] + 16 + 64; /* non-simple--look up in lists */
   3873         r.base = d[*p++ - s];
   3874       }
   3875 
   3876       /* fill code-like entries with r */
   3877       f = 1 << (k - w);
   3878       for (j = i >> w; j < z; j += f)
   3879         q[j] = r;
   3880 
   3881       /* backwards increment the k-bit code i */
   3882       for (j = 1 << (k - 1); i & j; j >>= 1)
   3883         i ^= j;
   3884       i ^= j;
   3885 
   3886       /* backup over finished tables */
   3887       while ((i & ((1 << w) - 1)) != x[h])
   3888       {
   3889         h--;                    /* don't need to update q */
   3890         w -= l;
   3891       }
   3892     }
   3893   }
   3894 
   3895 
   3896   /* Return Z_BUF_ERROR if we were given an incomplete table */
   3897   return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
   3898 }
   3899 
   3900 
   3901 local int inflate_trees_bits(c, bb, tb, z)
   3902 uIntf *c;               /* 19 code lengths */
   3903 uIntf *bb;              /* bits tree desired/actual depth */
   3904 inflate_huft * FAR *tb; /* bits tree result */
   3905 z_stream *z;            /* for zfree function */
   3906 {
   3907   int r;
   3908 
   3909   r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, z);
   3910   if (r == Z_DATA_ERROR)
   3911     z->msg = "oversubscribed dynamic bit lengths tree";
   3912   else if (r == Z_BUF_ERROR)
   3913   {
   3914     inflate_trees_free(*tb, z);
   3915     z->msg = "incomplete dynamic bit lengths tree";
   3916     r = Z_DATA_ERROR;
   3917   }
   3918   return r;
   3919 }
   3920 
   3921 
   3922 local int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, z)
   3923 uInt nl;                /* number of literal/length codes */
   3924 uInt nd;                /* number of distance codes */
   3925 uIntf *c;               /* that many (total) code lengths */
   3926 uIntf *bl;              /* literal desired/actual bit depth */
   3927 uIntf *bd;              /* distance desired/actual bit depth */
   3928 inflate_huft * FAR *tl; /* literal/length tree result */
   3929 inflate_huft * FAR *td; /* distance tree result */
   3930 z_stream *z;            /* for zfree function */
   3931 {
   3932   int r;
   3933 
   3934   /* build literal/length tree */
   3935   if ((r = huft_build(c, nl, 257, cplens, cplext, tl, bl, z)) != Z_OK)
   3936   {
   3937     if (r == Z_DATA_ERROR)
   3938       z->msg = "oversubscribed literal/length tree";
   3939     else if (r == Z_BUF_ERROR)
   3940     {
   3941       inflate_trees_free(*tl, z);
   3942       z->msg = "incomplete literal/length tree";
   3943       r = Z_DATA_ERROR;
   3944     }
   3945     return r;
   3946   }
   3947 
   3948   /* build distance tree */
   3949   if ((r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, z)) != Z_OK)
   3950   {
   3951     if (r == Z_DATA_ERROR)
   3952       z->msg = "oversubscribed literal/length tree";
   3953     else if (r == Z_BUF_ERROR) {
   3954 #ifdef PKZIP_BUG_WORKAROUND
   3955       r = Z_OK;
   3956     }
   3957 #else
   3958       inflate_trees_free(*td, z);
   3959       z->msg = "incomplete literal/length tree";
   3960       r = Z_DATA_ERROR;
   3961     }
   3962     inflate_trees_free(*tl, z);
   3963     return r;
   3964 #endif
   3965   }
   3966 
   3967   /* done */
   3968   return Z_OK;
   3969 }
   3970 
   3971 
   3972 /* build fixed tables only once--keep them here */
   3973 local int fixed_lock = 0;
   3974 local int fixed_built = 0;
   3975 #define FIXEDH 530      /* number of hufts used by fixed tables */
   3976 local uInt fixed_left = FIXEDH;
   3977 local inflate_huft fixed_mem[FIXEDH];
   3978 local uInt fixed_bl;
   3979 local uInt fixed_bd;
   3980 local inflate_huft *fixed_tl;
   3981 local inflate_huft *fixed_td;
   3982 
   3983 
   3984 local voidpf falloc(q, n, s)
   3985 voidpf q;        /* opaque pointer (not used) */
   3986 uInt n;         /* number of items */
   3987 uInt s;         /* size of item */
   3988 {
   3989   Assert(s == sizeof(inflate_huft) && n <= fixed_left,
   3990          "inflate_trees falloc overflow");
   3991   if (q) s++; /* to make some compilers happy */
   3992   fixed_left -= n;
   3993   return (voidpf)(fixed_mem + fixed_left);
   3994 }
   3995 
   3996 
   3997 local void ffree(q, p, n)
   3998 voidpf q;
   3999 voidpf p;
   4000 uInt n;
   4001 {
   4002   Assert(0, "inflate_trees ffree called!");
   4003   if (q) q = p; /* to make some compilers happy */
   4004 }
   4005 
   4006 
   4007 local int inflate_trees_fixed(bl, bd, tl, td)
   4008 uIntf *bl;               /* literal desired/actual bit depth */
   4009 uIntf *bd;               /* distance desired/actual bit depth */
   4010 inflate_huft * FAR *tl;  /* literal/length tree result */
   4011 inflate_huft * FAR *td;  /* distance tree result */
   4012 {
   4013   /* build fixed tables if not built already--lock out other instances */
   4014   while (++fixed_lock > 1)
   4015     fixed_lock--;
   4016   if (!fixed_built)
   4017   {
   4018     int k;              /* temporary variable */
   4019     unsigned c[288];    /* length list for huft_build */
   4020     z_stream z;         /* for falloc function */
   4021 
   4022     /* set up fake z_stream for memory routines */
   4023     z.zalloc = falloc;
   4024     z.zfree = ffree;
   4025     z.opaque = Z_NULL;
   4026 
   4027     /* literal table */
   4028     for (k = 0; k < 144; k++)
   4029       c[k] = 8;
   4030     for (; k < 256; k++)
   4031       c[k] = 9;
   4032     for (; k < 280; k++)
   4033       c[k] = 7;
   4034     for (; k < 288; k++)
   4035       c[k] = 8;
   4036     fixed_bl = 7;
   4037     huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, &z);
   4038 
   4039     /* distance table */
   4040     for (k = 0; k < 30; k++)
   4041       c[k] = 5;
   4042     fixed_bd = 5;
   4043     huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, &z);
   4044 
   4045     /* done */
   4046     fixed_built = 1;
   4047   }
   4048   fixed_lock--;
   4049   *bl = fixed_bl;
   4050   *bd = fixed_bd;
   4051   *tl = fixed_tl;
   4052   *td = fixed_td;
   4053   return Z_OK;
   4054 }
   4055 
   4056 
   4057 local int inflate_trees_free(t, z)
   4058 inflate_huft *t;        /* table to free */
   4059 z_stream *z;            /* for zfree function */
   4060 /* Free the malloc'ed tables built by huft_build(), which makes a linked
   4061    list of the tables it made, with the links in a dummy first entry of
   4062    each table. */
   4063 {
   4064   register inflate_huft *p, *q;
   4065 
   4066   /* Go through linked list, freeing from the malloced (t[-1]) address. */
   4067   p = t;
   4068   while (p != Z_NULL)
   4069   {
   4070     q = (--p)->next;
   4071     ZFREE(z, p, p->word.Nalloc * sizeof(inflate_huft));
   4072     p = q;
   4073   }
   4074   return Z_OK;
   4075 }
   4076 
   4077 /*+++++*/
   4078 /* infcodes.c -- process literals and length/distance pairs
   4079  * Copyright (C) 1995 Mark Adler
   4080  * For conditions of distribution and use, see copyright notice in zlib.h
   4081  */
   4082 
   4083 /* simplify the use of the inflate_huft type with some defines */
   4084 #define base more.Base
   4085 #define next more.Next
   4086 #define exop word.what.Exop
   4087 #define bits word.what.Bits
   4088 
   4089 /* inflate codes private state */
   4090 struct inflate_codes_state {
   4091 
   4092   /* mode */
   4093   enum {        /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
   4094       START,    /* x: set up for LEN */
   4095       LEN,      /* i: get length/literal/eob next */
   4096       LENEXT,   /* i: getting length extra (have base) */
   4097       DIST,     /* i: get distance next */
   4098       DISTEXT,  /* i: getting distance extra */
   4099       COPY,     /* o: copying bytes in window, waiting for space */
   4100       LIT,      /* o: got literal, waiting for output space */
   4101       WASH,     /* o: got eob, possibly still output waiting */
   4102       END,      /* x: got eob and all data flushed */
   4103       BADCODE}  /* x: got error */
   4104     mode;               /* current inflate_codes mode */
   4105 
   4106   /* mode dependent information */
   4107   uInt len;
   4108   union {
   4109     struct {
   4110       inflate_huft *tree;       /* pointer into tree */
   4111       uInt need;                /* bits needed */
   4112     } code;             /* if LEN or DIST, where in tree */
   4113     uInt lit;           /* if LIT, literal */
   4114     struct {
   4115       uInt get;                 /* bits to get for extra */
   4116       uInt dist;                /* distance back to copy from */
   4117     } copy;             /* if EXT or COPY, where and how much */
   4118   } sub;                /* submode */
   4119 
   4120   /* mode independent information */
   4121   Byte lbits;           /* ltree bits decoded per branch */
   4122   Byte dbits;           /* dtree bits decoder per branch */
   4123   inflate_huft *ltree;          /* literal/length/eob tree */
   4124   inflate_huft *dtree;          /* distance tree */
   4125 
   4126 };
   4127 
   4128 
   4129 local inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
   4130 uInt bl, bd;
   4131 inflate_huft *tl, *td;
   4132 z_stream *z;
   4133 {
   4134   inflate_codes_statef *c;
   4135 
   4136   if ((c = (inflate_codes_statef *)
   4137        ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
   4138   {
   4139     c->mode = START;
   4140     c->lbits = (Byte)bl;
   4141     c->dbits = (Byte)bd;
   4142     c->ltree = tl;
   4143     c->dtree = td;
   4144     Tracev((stderr, "inflate:       codes new\n"));
   4145   }
   4146   return c;
   4147 }
   4148 
   4149 
   4150 local int inflate_codes(s, z, r)
   4151 inflate_blocks_statef *s;
   4152 z_stream *z;
   4153 int r;
   4154 {
   4155   uInt j;               /* temporary storage */
   4156   inflate_huft *t;      /* temporary pointer */
   4157   uInt e;               /* extra bits or operation */
   4158   uLong b;              /* bit buffer */
   4159   uInt k;               /* bits in bit buffer */
   4160   Bytef *p;             /* input data pointer */
   4161   uInt n;               /* bytes available there */
   4162   Bytef *q;             /* output window write pointer */
   4163   uInt m;               /* bytes to end of window or read pointer */
   4164   Bytef *f;             /* pointer to copy strings from */
   4165   inflate_codes_statef *c = s->sub.decode.codes;  /* codes state */
   4166 
   4167   /* copy input/output information to locals (UPDATE macro restores) */
   4168   LOAD
   4169 
   4170   /* process input and output based on current state */
   4171   while (1) switch (c->mode)
   4172   {             /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
   4173     case START:         /* x: set up for LEN */
   4174 #ifndef SLOW
   4175       if (m >= 258 && n >= 10)
   4176       {
   4177         UPDATE
   4178         r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
   4179         LOAD
   4180         if (r != Z_OK)
   4181         {
   4182           c->mode = r == Z_STREAM_END ? WASH : BADCODE;
   4183           break;
   4184         }
   4185       }
   4186 #endif /* !SLOW */
   4187       c->sub.code.need = c->lbits;
   4188       c->sub.code.tree = c->ltree;
   4189       c->mode = LEN;
   4190     case LEN:           /* i: get length/literal/eob next */
   4191       j = c->sub.code.need;
   4192       NEEDBITS(j)
   4193       t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
   4194       DUMPBITS(t->bits)
   4195       e = (uInt)(t->exop);
   4196       if (e == 0)               /* literal */
   4197       {
   4198         c->sub.lit = t->base;
   4199         Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
   4200                  "inflate:         literal '%c'\n" :
   4201                  "inflate:         literal 0x%02x\n", t->base));
   4202         c->mode = LIT;
   4203         break;
   4204       }
   4205       if (e & 16)               /* length */
   4206       {
   4207         c->sub.copy.get = e & 15;
   4208         c->len = t->base;
   4209         c->mode = LENEXT;
   4210         break;
   4211       }
   4212       if ((e & 64) == 0)        /* next table */
   4213       {
   4214         c->sub.code.need = e;
   4215         c->sub.code.tree = t->next;
   4216         break;
   4217       }
   4218       if (e & 32)               /* end of block */
   4219       {
   4220         Tracevv((stderr, "inflate:         end of block\n"));
   4221         c->mode = WASH;
   4222         break;
   4223       }
   4224       c->mode = BADCODE;        /* invalid code */
   4225       z->msg = "invalid literal/length code";
   4226       r = Z_DATA_ERROR;
   4227       LEAVE
   4228     case LENEXT:        /* i: getting length extra (have base) */
   4229       j = c->sub.copy.get;
   4230       NEEDBITS(j)
   4231       c->len += (uInt)b & inflate_mask[j];
   4232       DUMPBITS(j)
   4233       c->sub.code.need = c->dbits;
   4234       c->sub.code.tree = c->dtree;
   4235       Tracevv((stderr, "inflate:         length %u\n", c->len));
   4236       c->mode = DIST;
   4237     case DIST:          /* i: get distance next */
   4238       j = c->sub.code.need;
   4239       NEEDBITS(j)
   4240       t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
   4241       DUMPBITS(t->bits)
   4242       e = (uInt)(t->exop);
   4243       if (e & 16)               /* distance */
   4244       {
   4245         c->sub.copy.get = e & 15;
   4246         c->sub.copy.dist = t->base;
   4247         c->mode = DISTEXT;
   4248         break;
   4249       }
   4250       if ((e & 64) == 0)        /* next table */
   4251       {
   4252         c->sub.code.need = e;
   4253         c->sub.code.tree = t->next;
   4254         break;
   4255       }
   4256       c->mode = BADCODE;        /* invalid code */
   4257       z->msg = "invalid distance code";
   4258       r = Z_DATA_ERROR;
   4259       LEAVE
   4260     case DISTEXT:       /* i: getting distance extra */
   4261       j = c->sub.copy.get;
   4262       NEEDBITS(j)
   4263       c->sub.copy.dist += (uInt)b & inflate_mask[j];
   4264       DUMPBITS(j)
   4265       Tracevv((stderr, "inflate:         distance %u\n", c->sub.copy.dist));
   4266       c->mode = COPY;
   4267     case COPY:          /* o: copying bytes in window, waiting for space */
   4268 #ifndef __TURBOC__ /* Turbo C bug for following expression */
   4269       f = (uInt)(q - s->window) < c->sub.copy.dist ?
   4270           s->end - (c->sub.copy.dist - (q - s->window)) :
   4271           q - c->sub.copy.dist;
   4272 #else
   4273       f = q - c->sub.copy.dist;
   4274       if ((uInt)(q - s->window) < c->sub.copy.dist)
   4275         f = s->end - (c->sub.copy.dist - (q - s->window));
   4276 #endif
   4277       while (c->len)
   4278       {
   4279         NEEDOUT
   4280         OUTBYTE(*f++)
   4281         if (f == s->end)
   4282           f = s->window;
   4283         c->len--;
   4284       }
   4285       c->mode = START;
   4286       break;
   4287     case LIT:           /* o: got literal, waiting for output space */
   4288       NEEDOUT
   4289       OUTBYTE(c->sub.lit)
   4290       c->mode = START;
   4291       break;
   4292     case WASH:          /* o: got eob, possibly more output */
   4293       FLUSH
   4294       if (s->read != s->write)
   4295         LEAVE
   4296       c->mode = END;
   4297     case END:
   4298       r = Z_STREAM_END;
   4299       LEAVE
   4300     case BADCODE:       /* x: got error */
   4301       r = Z_DATA_ERROR;
   4302       LEAVE
   4303     default:
   4304       r = Z_STREAM_ERROR;
   4305       LEAVE
   4306   }
   4307 }
   4308 
   4309 
   4310 local void inflate_codes_free(c, z)
   4311 inflate_codes_statef *c;
   4312 z_stream *z;
   4313 {
   4314   ZFREE(z, c, sizeof(struct inflate_codes_state));
   4315   Tracev((stderr, "inflate:       codes free\n"));
   4316 }
   4317 
   4318 /*+++++*/
   4319 /* inflate_util.c -- data and routines common to blocks and codes
   4320  * Copyright (C) 1995 Mark Adler
   4321  * For conditions of distribution and use, see copyright notice in zlib.h
   4322  */
   4323 
   4324 /* copy as much as possible from the sliding window to the output area */
   4325 local int inflate_flush(s, z, r)
   4326 inflate_blocks_statef *s;
   4327 z_stream *z;
   4328 int r;
   4329 {
   4330   uInt n;
   4331   Bytef *p, *q;
   4332 
   4333   /* local copies of source and destination pointers */
   4334   p = z->next_out;
   4335   q = s->read;
   4336 
   4337   /* compute number of bytes to copy as far as end of window */
   4338   n = (uInt)((q <= s->write ? s->write : s->end) - q);
   4339   if (n > z->avail_out) n = z->avail_out;
   4340   if (n && r == Z_BUF_ERROR) r = Z_OK;
   4341 
   4342   /* update counters */
   4343   z->avail_out -= n;
   4344   z->total_out += n;
   4345 
   4346   /* update check information */
   4347   if (s->checkfn != Z_NULL)
   4348     s->check = (*s->checkfn)(s->check, q, n);
   4349 
   4350   /* copy as far as end of window */
   4351   if (p != NULL) {
   4352     zmemcpy(p, q, n);
   4353     p += n;
   4354   }
   4355   q += n;
   4356 
   4357   /* see if more to copy at beginning of window */
   4358   if (q == s->end)
   4359   {
   4360     /* wrap pointers */
   4361     q = s->window;
   4362     if (s->write == s->end)
   4363       s->write = s->window;
   4364 
   4365     /* compute bytes to copy */
   4366     n = (uInt)(s->write - q);
   4367     if (n > z->avail_out) n = z->avail_out;
   4368     if (n && r == Z_BUF_ERROR) r = Z_OK;
   4369 
   4370     /* update counters */
   4371     z->avail_out -= n;
   4372     z->total_out += n;
   4373 
   4374     /* update check information */
   4375     if (s->checkfn != Z_NULL)
   4376       s->check = (*s->checkfn)(s->check, q, n);
   4377 
   4378     /* copy */
   4379     if (p != NULL) {
   4380       zmemcpy(p, q, n);
   4381       p += n;
   4382     }
   4383     q += n;
   4384   }
   4385 
   4386   /* update pointers */
   4387   z->next_out = p;
   4388   s->read = q;
   4389 
   4390   /* done */
   4391   return r;
   4392 }
   4393 
   4394 
   4395 /*+++++*/
   4396 /* inffast.c -- process literals and length/distance pairs fast
   4397  * Copyright (C) 1995 Mark Adler
   4398  * For conditions of distribution and use, see copyright notice in zlib.h
   4399  */
   4400 
   4401 /* simplify the use of the inflate_huft type with some defines */
   4402 #define base more.Base
   4403 #define next more.Next
   4404 #define exop word.what.Exop
   4405 #define bits word.what.Bits
   4406 
   4407 /* macros for bit input with no checking and for returning unused bytes */
   4408 #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
   4409 #define UNGRAB {n+=(c=k>>3);p-=c;k&=7;}
   4410 
   4411 /* Called with number of bytes left to write in window at least 258
   4412    (the maximum string length) and number of input bytes available
   4413    at least ten.  The ten bytes are six bytes for the longest length/
   4414    distance pair plus four bytes for overloading the bit buffer. */
   4415 
   4416 local int inflate_fast(bl, bd, tl, td, s, z)
   4417 uInt bl, bd;
   4418 inflate_huft *tl, *td;
   4419 inflate_blocks_statef *s;
   4420 z_stream *z;
   4421 {
   4422   inflate_huft *t;      /* temporary pointer */
   4423   uInt e;               /* extra bits or operation */
   4424   uLong b;              /* bit buffer */
   4425   uInt k;               /* bits in bit buffer */
   4426   Bytef *p;             /* input data pointer */
   4427   uInt n;               /* bytes available there */
   4428   Bytef *q;             /* output window write pointer */
   4429   uInt m;               /* bytes to end of window or read pointer */
   4430   uInt ml;              /* mask for literal/length tree */
   4431   uInt md;              /* mask for distance tree */
   4432   uInt c;               /* bytes to copy */
   4433   uInt d;               /* distance back to copy from */
   4434   Bytef *r;             /* copy source pointer */
   4435 
   4436   /* load input, output, bit values */
   4437   LOAD
   4438 
   4439   /* initialize masks */
   4440   ml = inflate_mask[bl];
   4441   md = inflate_mask[bd];
   4442 
   4443   /* do until not enough input or output space for fast loop */
   4444   do {                          /* assume called with m >= 258 && n >= 10 */
   4445     /* get literal/length code */
   4446     GRABBITS(20)                /* max bits for literal/length code */
   4447     if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
   4448     {
   4449       DUMPBITS(t->bits)
   4450       Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
   4451                 "inflate:         * literal '%c'\n" :
   4452                 "inflate:         * literal 0x%02x\n", t->base));
   4453       *q++ = (Byte)t->base;
   4454       m--;
   4455       continue;
   4456     }
   4457     do {
   4458       DUMPBITS(t->bits)
   4459       if (e & 16)
   4460       {
   4461         /* get extra bits for length */
   4462         e &= 15;
   4463         c = t->base + ((uInt)b & inflate_mask[e]);
   4464         DUMPBITS(e)
   4465         Tracevv((stderr, "inflate:         * length %u\n", c));
   4466 
   4467         /* decode distance base of block to copy */
   4468         GRABBITS(15);           /* max bits for distance code */
   4469         e = (t = td + ((uInt)b & md))->exop;
   4470         do {
   4471           DUMPBITS(t->bits)
   4472           if (e & 16)
   4473           {
   4474             /* get extra bits to add to distance base */
   4475             e &= 15;
   4476             GRABBITS(e)         /* get extra bits (up to 13) */
   4477             d = t->base + ((uInt)b & inflate_mask[e]);
   4478             DUMPBITS(e)
   4479             Tracevv((stderr, "inflate:         * distance %u\n", d));
   4480 
   4481             /* do the copy */
   4482             m -= c;
   4483             if ((uInt)(q - s->window) >= d)     /* offset before dest */
   4484             {                                   /*  just copy */
   4485               r = q - d;
   4486               *q++ = *r++;  c--;        /* minimum count is three, */
   4487               *q++ = *r++;  c--;        /*  so unroll loop a little */
   4488             }
   4489             else                        /* else offset after destination */
   4490             {
   4491               e = d - (q - s->window);  /* bytes from offset to end */
   4492               r = s->end - e;           /* pointer to offset */
   4493               if (c > e)                /* if source crosses, */
   4494               {
   4495                 c -= e;                 /* copy to end of window */
   4496                 do {
   4497                   *q++ = *r++;
   4498                 } while (--e);
   4499                 r = s->window;          /* copy rest from start of window */
   4500               }
   4501             }
   4502             do {                        /* copy all or what's left */
   4503               *q++ = *r++;
   4504             } while (--c);
   4505             break;
   4506           }
   4507           else if ((e & 64) == 0)
   4508             e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop;
   4509           else
   4510           {
   4511             z->msg = "invalid distance code";
   4512             UNGRAB
   4513             UPDATE
   4514             return Z_DATA_ERROR;
   4515           }
   4516         } while (1);
   4517         break;
   4518       }
   4519       if ((e & 64) == 0)
   4520       {
   4521         if ((e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop) == 0)
   4522         {
   4523           DUMPBITS(t->bits)
   4524           Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
   4525                     "inflate:         * literal '%c'\n" :
   4526                     "inflate:         * literal 0x%02x\n", t->base));
   4527           *q++ = (Byte)t->base;
   4528           m--;
   4529           break;
   4530         }
   4531       }
   4532       else if (e & 32)
   4533       {
   4534         Tracevv((stderr, "inflate:         * end of block\n"));
   4535         UNGRAB
   4536         UPDATE
   4537         return Z_STREAM_END;
   4538       }
   4539       else
   4540       {
   4541         z->msg = "invalid literal/length code";
   4542         UNGRAB
   4543         UPDATE
   4544         return Z_DATA_ERROR;
   4545       }
   4546     } while (1);
   4547   } while (m >= 258 && n >= 10);
   4548 
   4549   /* not enough input or output--restore pointers and return */
   4550   UNGRAB
   4551   UPDATE
   4552   return Z_OK;
   4553 }
   4554 
   4555 
   4556 /*+++++*/
   4557 /* zutil.c -- target dependent utility functions for the compression library
   4558  * Copyright (C) 1995 Jean-loup Gailly.
   4559  * For conditions of distribution and use, see copyright notice in zlib.h
   4560  */
   4561 
   4562 /* From: zutil.c,v 1.8 1995/05/03 17:27:12 jloup Exp */
   4563 
   4564 char *zlib_version = ZLIB_VERSION;
   4565 
   4566 char *z_errmsg[] = {
   4567 "stream end",          /* Z_STREAM_END    1 */
   4568 "",                    /* Z_OK            0 */
   4569 "file error",          /* Z_ERRNO        (-1) */
   4570 "stream error",        /* Z_STREAM_ERROR (-2) */
   4571 "data error",          /* Z_DATA_ERROR   (-3) */
   4572 "insufficient memory", /* Z_MEM_ERROR    (-4) */
   4573 "buffer error",        /* Z_BUF_ERROR    (-5) */
   4574 ""};
   4575 
   4576 
   4577 /*+++++*/
   4578 /* adler32.c -- compute the Adler-32 checksum of a data stream
   4579  * Copyright (C) 1995 Mark Adler
   4580  * For conditions of distribution and use, see copyright notice in zlib.h
   4581  */
   4582 
   4583 /* From: adler32.c,v 1.6 1995/05/03 17:27:08 jloup Exp */
   4584 
   4585 #define BASE 65521L /* largest prime smaller than 65536 */
   4586 #define NMAX 5552
   4587 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
   4588 
   4589 #define DO1(buf)  {s1 += *buf++; s2 += s1;}
   4590 #define DO2(buf)  DO1(buf); DO1(buf);
   4591 #define DO4(buf)  DO2(buf); DO2(buf);
   4592 #define DO8(buf)  DO4(buf); DO4(buf);
   4593 #define DO16(buf) DO8(buf); DO8(buf);
   4594 
   4595 /* ========================================================================= */
   4596 uLong adler32(adler, buf, len)
   4597     uLong adler;
   4598     Bytef *buf;
   4599     uInt len;
   4600 {
   4601     unsigned long s1 = adler & 0xffff;
   4602     unsigned long s2 = (adler >> 16) & 0xffff;
   4603     int k;
   4604 
   4605     if (buf == Z_NULL) return 1L;
   4606 
   4607     while (len > 0) {
   4608         k = len < NMAX ? len : NMAX;
   4609         len -= k;
   4610         while (k >= 16) {
   4611             DO16(buf);
   4612             k -= 16;
   4613         }
   4614         if (k != 0) do {
   4615             DO1(buf);
   4616         } while (--k);
   4617         s1 %= BASE;
   4618         s2 %= BASE;
   4619     }
   4620     return (s2 << 16) | s1;
   4621 }
   4622