zlib.c revision 1.23 1 /* $NetBSD: zlib.c,v 1.23 2006/01/14 18:58:05 christos Exp $ */
2 /*
3 * This file is derived from various .h and .c files from the zlib-1.0.4
4 * distribution by Jean-loup Gailly and Mark Adler, with some additions
5 * by Paul Mackerras to aid in implementing Deflate compression and
6 * decompression for PPP packets. See zlib.h for conditions of
7 * distribution and use.
8 *
9 * Changes that have been made include:
10 * - added Z_PACKET_FLUSH (see zlib.h for details)
11 * - added inflateIncomp and deflateOutputPending
12 * - allow strm->next_out to be NULL, meaning discard the output
13 *
14 * $Id: zlib.c,v 1.23 2006/01/14 18:58:05 christos Exp $
15 */
16
17 /*
18 * ==FILEVERSION 020312==
19 *
20 * This marker is used by the Linux installation script to determine
21 * whether an up-to-date version of this file is already installed.
22 */
23
24 #include <sys/cdefs.h>
25 __KERNEL_RCSID(0, "$NetBSD: zlib.c,v 1.23 2006/01/14 18:58:05 christos Exp $");
26
27 #define NO_DUMMY_DECL
28 #define NO_ZCFUNCS
29 #define MY_ZCALLOC
30
31 #if defined(__FreeBSD__) && (defined(KERNEL) || defined(_KERNEL))
32 #define inflate inflate_ppp /* FreeBSD already has an inflate :-( */
33 #endif
34
35
36 /* +++ zutil.h */
37
38 /* zutil.h -- internal interface and configuration of the compression library
39 * Copyright (C) 1995-2002 Jean-loup Gailly.
40 * For conditions of distribution and use, see copyright notice in zlib.h
41 */
42
43 /* WARNING: this file should *not* be used by applications. It is
44 part of the implementation of the compression library and is
45 subject to change. Applications should only use zlib.h.
46 */
47
48 /* @(#) $Id: zlib.c,v 1.23 2006/01/14 18:58:05 christos Exp $ */
49
50 #ifndef _Z_UTIL_H
51 #define _Z_UTIL_H
52
53 #include "zlib.h"
54
55 #if defined(KERNEL) || defined(_KERNEL)
56 /* Assume this is a *BSD or SVR4 kernel */
57 #include <sys/param.h>
58 #include <sys/time.h>
59 #include <sys/systm.h>
60 # define HAVE_MEMCPY
61 #else
62 #if defined(__KERNEL__)
63 /* Assume this is a Linux kernel */
64 #include <linux/string.h>
65 #define HAVE_MEMCPY
66
67 #else /* not kernel */
68
69 #if defined(__NetBSD__) && (defined(_KERNEL) || defined(_STANDALONE))
70
71 /* XXX doesn't seem to need anything at all, but this is for consistency. */
72 # include <lib/libkern/libkern.h>
73
74 #else
75 # include <sys/types.h>
76 # include <sys/param.h>
77 #ifdef STDC
78 # include <stddef.h>
79 # include <string.h>
80 # include <stdlib.h>
81 #endif
82 #ifdef NO_ERRNO_H
83 extern int errno;
84 #else
85 # include <errno.h>
86 #endif
87 #endif /* __NetBSD__ && _STANDALONE */
88 #endif /* __KERNEL__ */
89 #endif /* _KERNEL || KERNEL */
90
91
92 #ifndef local
93 # define local static
94 #endif
95 /* compile with -Dlocal if your debugger can't find static symbols */
96
97 typedef unsigned char uch;
98 typedef uch FAR uchf;
99 typedef unsigned short ush;
100 typedef ush FAR ushf;
101 typedef unsigned long ulg;
102
103 extern const char *z_errmsg[10]; /* indexed by 2-zlib_error */
104 /* (size given to avoid silly warnings with Visual C++) */
105
106 #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
107
108 #define ERR_RETURN(strm,err) \
109 return (strm->msg = ERR_MSG(err), (err))
110 /* To be used only when the state is known to be valid */
111
112 /* common constants */
113
114 #ifndef DEF_WBITS
115 # define DEF_WBITS MAX_WBITS
116 #endif
117 /* default windowBits for decompression. MAX_WBITS is for compression only */
118
119 #if MAX_MEM_LEVEL >= 8
120 # define DEF_MEM_LEVEL 8
121 #else
122 # define DEF_MEM_LEVEL MAX_MEM_LEVEL
123 #endif
124 /* default memLevel */
125
126 #define STORED_BLOCK 0
127 #define STATIC_TREES 1
128 #define DYN_TREES 2
129 /* The three kinds of block type */
130
131 #define MIN_MATCH 3
132 #define MAX_MATCH 258
133 /* The minimum and maximum match lengths */
134
135 #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */
136
137 /* target dependencies */
138
139 #ifdef MSDOS
140 # define OS_CODE 0x00
141 # if defined(__TURBOC__) || defined(__BORLANDC__)
142 # if(__STDC__ == 1) && (defined(__LARGE__) || defined(__COMPACT__))
143 /* Allow compilation with ANSI keywords only enabled */
144 void _Cdecl farfree( void *block );
145 void *_Cdecl farmalloc( unsigned long nbytes );
146 # else
147 # include <alloc.h>
148 # endif
149 # else /* MSC or DJGPP */
150 # include <malloc.h>
151 # endif
152 #endif
153
154 #ifdef OS2
155 # define OS_CODE 0x06
156 #endif
157
158 #ifdef WIN32 /* Window 95 & Windows NT */
159 # define OS_CODE 0x0b
160 #endif
161
162 #if defined(VAXC) || defined(VMS)
163 # define OS_CODE 0x02
164 # define F_OPEN(name, mode) \
165 fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512")
166 #endif
167
168 #ifdef AMIGA
169 # define OS_CODE 0x01
170 #endif
171
172 #if defined(ATARI) || defined(atarist)
173 # define OS_CODE 0x05
174 #endif
175
176 #if defined(MACOS) || defined(TARGET_OS_MAC)
177 # define OS_CODE 0x07
178 # if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os
179 # include <unix.h> /* for fdopen */
180 # else
181 # ifndef fdopen
182 # define fdopen(fd,mode) NULL /* No fdopen() */
183 # endif
184 # endif
185 #endif
186
187 #ifdef __50SERIES /* Prime/PRIMOS */
188 # define OS_CODE 0x0F
189 #endif
190
191 #ifdef TOPS20
192 # define OS_CODE 0x0a
193 #endif
194
195 #if defined(_BEOS_) || defined(RISCOS)
196 # define fdopen(fd,mode) NULL /* No fdopen() */
197 #endif
198
199 #if (defined(_MSC_VER) && (_MSC_VER > 600))
200 # define fdopen(fd,type) _fdopen(fd,type)
201 #endif
202
203
204 /* Common defaults */
205
206 #ifndef OS_CODE
207 # define OS_CODE 0x03 /* assume Unix */
208 #endif
209
210 #ifndef F_OPEN
211 # define F_OPEN(name, mode) fopen((name), (mode))
212 #endif
213
214 /* functions */
215
216 #ifdef HAVE_STRERROR
217 extern char *strerror __P((int));
218 # define zstrerror(errnum) strerror(errnum)
219 #else
220 # define zstrerror(errnum) ""
221 #endif
222
223 #if defined(pyr)
224 # define NO_MEMCPY
225 #endif
226 #if defined(SMALL_MEDIUM) && !defined(_MSC_VER) && !defined(__SC__)
227 /* Use our own functions for small and medium model with MSC <= 5.0.
228 * You may have to use the same strategy for Borland C (untested).
229 * The __SC__ check is for Symantec.
230 */
231 # define NO_MEMCPY
232 #endif
233 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
234 # define HAVE_MEMCPY
235 #endif
236 #ifdef HAVE_MEMCPY
237 # ifdef SMALL_MEDIUM /* MSDOS small or medium model */
238 # define zmemcpy _fmemcpy
239 # define zmemcmp _fmemcmp
240 # define zmemzero(dest, len) _fmemset(dest, 0, len)
241 # else
242 # define zmemcpy memcpy
243 # define zmemcmp memcmp
244 # define zmemzero(dest, len) memset(dest, 0, len)
245 # endif
246 #else
247 extern void zmemcpy __P((Bytef* dest, const Bytef* source, uInt len));
248 extern int zmemcmp __P((const Bytef* s1, const Bytef* s2, uInt len));
249 extern void zmemzero __P((Bytef* dest, uInt len));
250 #endif
251
252 /* Diagnostic functions */
253 #if defined(DEBUG_ZLIB) && !defined(_KERNEL) && !defined(_STANDALONE)
254 # include <stdio.h>
255 extern int z_verbose;
256 extern void z_error __P((char *m));
257 # define Assert(cond,msg) {if(!(cond)) z_error(msg);}
258 # define Trace(x) {if (z_verbose>=0) fprintf x ;}
259 # define Tracev(x) {if (z_verbose>0) fprintf x ;}
260 # define Tracevv(x) {if (z_verbose>1) fprintf x ;}
261 # define Tracec(c,x) {if (z_verbose>0 && (c)) fprintf x ;}
262 # define Tracecv(c,x) {if (z_verbose>1 && (c)) fprintf x ;}
263 #else
264 # define Assert(cond,msg)
265 # define Trace(x)
266 # define Tracev(x)
267 # define Tracevv(x)
268 # define Tracec(c,x)
269 # define Tracecv(c,x)
270 #endif
271
272
273 typedef uLong (ZEXPORT *check_func) __P((uLong check, const Bytef *buf,
274 uInt len));
275 voidpf zcalloc __P((voidpf opaque, unsigned items, unsigned size));
276 void zcfree __P((voidpf opaque, voidpf ptr));
277
278 #define ZALLOC(strm, items, size) \
279 (*((strm)->zalloc))((strm)->opaque, (items), (size))
280 #define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))
281 #define TRY_FREE(s, p) {if (p) ZFREE(s, p);}
282
283 #endif /* _Z_UTIL_H */
284 /* --- zutil.h */
285
286 /* +++ deflate.h */
287
288 /* deflate.h -- internal compression state
289 * Copyright (C) 1995-2002 Jean-loup Gailly
290 * For conditions of distribution and use, see copyright notice in zlib.h
291 */
292
293 /* WARNING: this file should *not* be used by applications. It is
294 part of the implementation of the compression library and is
295 subject to change. Applications should only use zlib.h.
296 */
297
298 /* @(#) $Id: zlib.c,v 1.23 2006/01/14 18:58:05 christos Exp $ */
299
300 #ifndef _DEFLATE_H
301 #define _DEFLATE_H
302
303 /* #include "zutil.h" */
304
305 /* ===========================================================================
306 * Internal compression state.
307 */
308
309 #define LENGTH_CODES 29
310 /* number of length codes, not counting the special END_BLOCK code */
311
312 #define LITERALS 256
313 /* number of literal bytes 0..255 */
314
315 #define L_CODES (LITERALS+1+LENGTH_CODES)
316 /* number of Literal or Length codes, including the END_BLOCK code */
317
318 #define D_CODES 30
319 /* number of distance codes */
320
321 #define BL_CODES 19
322 /* number of codes used to transfer the bit lengths */
323
324 #define HEAP_SIZE (2*L_CODES+1)
325 /* maximum heap size */
326
327 #define MAX_BITS 15
328 /* All codes must not exceed MAX_BITS bits */
329
330 #define INIT_STATE 42
331 #define BUSY_STATE 113
332 #define FINISH_STATE 666
333 /* Stream status */
334
335
336 /* Data structure describing a single value and its code string. */
337 typedef struct ct_data_s {
338 union {
339 ush freq; /* frequency count */
340 ush code; /* bit string */
341 } fc;
342 union {
343 ush dad; /* father node in Huffman tree */
344 ush len; /* length of bit string */
345 } dl;
346 } FAR ct_data;
347
348 #define Freq fc.freq
349 #define Code fc.code
350 #define Dad dl.dad
351 #define Len dl.len
352
353 typedef struct static_tree_desc_s static_tree_desc;
354
355 typedef struct tree_desc_s {
356 ct_data *dyn_tree; /* the dynamic tree */
357 int max_code; /* largest code with non zero frequency */
358 static_tree_desc *stat_desc; /* the corresponding static tree */
359 } FAR tree_desc;
360
361 typedef ush Pos;
362 typedef Pos FAR Posf;
363 typedef unsigned IPos;
364
365 /* A Pos is an index in the character window. We use short instead of int to
366 * save space in the various tables. IPos is used only for parameter passing.
367 */
368
369 typedef struct deflate_state {
370 z_streamp strm; /* pointer back to this zlib stream */
371 int status; /* as the name implies */
372 Bytef *pending_buf; /* output still pending */
373 ulg pending_buf_size; /* size of pending_buf */
374 Bytef *pending_out; /* next pending byte to output to the stream */
375 int pending; /* nb of bytes in the pending buffer */
376 int noheader; /* suppress zlib header and adler32 */
377 Byte data_type; /* UNKNOWN, BINARY or ASCII */
378 Byte method; /* STORED (for zip only) or DEFLATED */
379 int last_flush; /* value of flush param for previous deflate call */
380
381 /* used by deflate.c: */
382
383 uInt w_size; /* LZ77 window size (32K by default) */
384 uInt w_bits; /* log2(w_size) (8..16) */
385 uInt w_mask; /* w_size - 1 */
386
387 Bytef *window;
388 /* Sliding window. Input bytes are read into the second half of the window,
389 * and move to the first half later to keep a dictionary of at least wSize
390 * bytes. With this organization, matches are limited to a distance of
391 * wSize-MAX_MATCH bytes, but this ensures that IO is always
392 * performed with a length multiple of the block size. Also, it limits
393 * the window size to 64K, which is quite useful on MSDOS.
394 * To do: use the user input buffer as sliding window.
395 */
396
397 ulg window_size;
398 /* Actual size of window: 2*wSize, except when the user input buffer
399 * is directly used as sliding window.
400 */
401
402 Posf *prev;
403 /* Link to older string with same hash index. To limit the size of this
404 * array to 64K, this link is maintained only for the last 32K strings.
405 * An index in this array is thus a window index modulo 32K.
406 */
407
408 Posf *head; /* Heads of the hash chains or NIL. */
409
410 uInt ins_h; /* hash index of string to be inserted */
411 uInt hash_size; /* number of elements in hash table */
412 uInt hash_bits; /* log2(hash_size) */
413 uInt hash_mask; /* hash_size-1 */
414
415 uInt hash_shift;
416 /* Number of bits by which ins_h must be shifted at each input
417 * step. It must be such that after MIN_MATCH steps, the oldest
418 * byte no longer takes part in the hash key, that is:
419 * hash_shift * MIN_MATCH >= hash_bits
420 */
421
422 long block_start;
423 /* Window position at the beginning of the current output block. Gets
424 * negative when the window is moved backwards.
425 */
426
427 uInt match_length; /* length of best match */
428 IPos prev_match; /* previous match */
429 int match_available; /* set if previous match exists */
430 uInt strstart; /* start of string to insert */
431 uInt match_start; /* start of matching string */
432 uInt lookahead; /* number of valid bytes ahead in window */
433
434 uInt prev_length;
435 /* Length of the best match at previous step. Matches not greater than this
436 * are discarded. This is used in the lazy match evaluation.
437 */
438
439 uInt max_chain_length;
440 /* To speed up deflation, hash chains are never searched beyond this
441 * length. A higher limit improves compression ratio but degrades the
442 * speed.
443 */
444
445 uInt max_lazy_match;
446 /* Attempt to find a better match only when the current match is strictly
447 * smaller than this value. This mechanism is used only for compression
448 * levels >= 4.
449 */
450 # define max_insert_length max_lazy_match
451 /* Insert new strings in the hash table only if the match length is not
452 * greater than this length. This saves time but degrades compression.
453 * max_insert_length is used only for compression levels <= 3.
454 */
455
456 int level; /* compression level (1..9) */
457 int strategy; /* favor or force Huffman coding*/
458
459 uInt good_match;
460 /* Use a faster search when the previous match is longer than this */
461
462 int nice_match; /* Stop searching when current match exceeds this */
463
464 /* used by trees.c: */
465 /* Didn't use ct_data typedef below to supress compiler warning */
466 struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */
467 struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
468 struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */
469
470 struct tree_desc_s l_desc; /* desc. for literal tree */
471 struct tree_desc_s d_desc; /* desc. for distance tree */
472 struct tree_desc_s bl_desc; /* desc. for bit length tree */
473
474 ush bl_count[MAX_BITS+1];
475 /* number of codes at each bit length for an optimal tree */
476
477 int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */
478 int heap_len; /* number of elements in the heap */
479 int heap_max; /* element of largest frequency */
480 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
481 * The same heap array is used to build all trees.
482 */
483
484 uch depth[2*L_CODES+1];
485 /* Depth of each subtree used as tie breaker for trees of equal frequency
486 */
487
488 uchf *l_buf; /* buffer for literals or lengths */
489
490 uInt lit_bufsize;
491 /* Size of match buffer for literals/lengths. There are 4 reasons for
492 * limiting lit_bufsize to 64K:
493 * - frequencies can be kept in 16 bit counters
494 * - if compression is not successful for the first block, all input
495 * data is still in the window so we can still emit a stored block even
496 * when input comes from standard input. (This can also be done for
497 * all blocks if lit_bufsize is not greater than 32K.)
498 * - if compression is not successful for a file smaller than 64K, we can
499 * even emit a stored file instead of a stored block (saving 5 bytes).
500 * This is applicable only for zip (not gzip or zlib).
501 * - creating new Huffman trees less frequently may not provide fast
502 * adaptation to changes in the input data statistics. (Take for
503 * example a binary file with poorly compressible code followed by
504 * a highly compressible string table.) Smaller buffer sizes give
505 * fast adaptation but have of course the overhead of transmitting
506 * trees more frequently.
507 * - I can't count above 4
508 */
509
510 uInt last_lit; /* running index in l_buf */
511
512 ushf *d_buf;
513 /* Buffer for distances. To simplify the code, d_buf and l_buf have
514 * the same number of elements. To use different lengths, an extra flag
515 * array would be necessary.
516 */
517
518 ulg opt_len; /* bit length of current block with optimal trees */
519 ulg static_len; /* bit length of current block with static trees */
520 uInt matches; /* number of string matches in current block */
521 int last_eob_len; /* bit length of EOB code for last block */
522
523 #ifdef DEBUG_ZLIB
524 ulg compressed_len; /* total bit length of compressed file mod 2^32 */
525 ulg bits_sent; /* bit length of compressed data sent mod 2^32 */
526 #endif
527
528 ush bi_buf;
529 /* Output buffer. bits are inserted starting at the bottom (least
530 * significant bits).
531 */
532 int bi_valid;
533 /* Number of valid bits in bi_buf. All bits above the last valid bit
534 * are always zero.
535 */
536
537 } FAR deflate_state;
538
539 /* Output a byte on the stream.
540 * IN assertion: there is enough room in pending_buf.
541 */
542 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
543
544
545 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
546 /* Minimum amount of lookahead, except at the end of the input file.
547 * See deflate.c for comments about the MIN_MATCH+1.
548 */
549
550 #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD)
551 /* In order to simplify the code, particularly on 16 bit machines, match
552 * distances are limited to MAX_DIST instead of WSIZE.
553 */
554
555 /* in trees.c */
556 void _tr_init __P((deflate_state *s));
557 int _tr_tally __P((deflate_state *s, unsigned dist, unsigned lc));
558 void _tr_flush_block __P((deflate_state *s, charf *buf, ulg stored_len,
559 int eof));
560 void _tr_align __P((deflate_state *s));
561 void _tr_stored_block __P((deflate_state *s, charf *buf, ulg stored_len,
562 int eof));
563 void _tr_stored_type_only __P((deflate_state *));
564
565 #define d_code(dist) \
566 ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)])
567 /* Mapping from a distance to a distance code. dist is the distance - 1 and
568 * must not have side effects. _dist_code[256] and _dist_code[257] are never
569 * used.
570 */
571
572 #ifndef DEBUG_ZLIB
573 /* Inline versions of _tr_tally for speed: */
574
575 #if defined(GEN_TREES_H) || !defined(STDC)
576 extern uch _length_code[];
577 extern uch _dist_code[];
578 #else
579 extern const uch _length_code[];
580 extern const uch _dist_code[];
581 #endif
582
583 # define _tr_tally_lit(s, c, flush) \
584 { uch cc = (c); \
585 s->d_buf[s->last_lit] = 0; \
586 s->l_buf[s->last_lit++] = cc; \
587 s->dyn_ltree[cc].Freq++; \
588 flush = (s->last_lit == s->lit_bufsize-1); \
589 }
590 # define _tr_tally_dist(s, distance, length, flush) \
591 { uch len = (length); \
592 ush dist = (distance); \
593 s->d_buf[s->last_lit] = dist; \
594 s->l_buf[s->last_lit++] = len; \
595 dist--; \
596 s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
597 s->dyn_dtree[d_code(dist)].Freq++; \
598 flush = (s->last_lit == s->lit_bufsize-1); \
599 }
600 #else
601 # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c)
602 # define _tr_tally_dist(s, distance, length, flush) \
603 flush = _tr_tally(s, distance, length)
604 #endif
605
606 #endif
607 /* --- deflate.h */
608
609 /* +++ deflate.c */
610
611 /* deflate.c -- compress data using the deflation algorithm
612 * Copyright (C) 1995-2002 Jean-loup Gailly.
613 * For conditions of distribution and use, see copyright notice in zlib.h
614 */
615
616 /*
617 * ALGORITHM
618 *
619 * The "deflation" process depends on being able to identify portions
620 * of the input text which are identical to earlier input (within a
621 * sliding window trailing behind the input currently being processed).
622 *
623 * The most straightforward technique turns out to be the fastest for
624 * most input files: try all possible matches and select the longest.
625 * The key feature of this algorithm is that insertions into the string
626 * dictionary are very simple and thus fast, and deletions are avoided
627 * completely. Insertions are performed at each input character, whereas
628 * string matches are performed only when the previous match ends. So it
629 * is preferable to spend more time in matches to allow very fast string
630 * insertions and avoid deletions. The matching algorithm for small
631 * strings is inspired from that of Rabin & Karp. A brute force approach
632 * is used to find longer strings when a small match has been found.
633 * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
634 * (by Leonid Broukhis).
635 * A previous version of this file used a more sophisticated algorithm
636 * (by Fiala and Greene) which is guaranteed to run in linear amortized
637 * time, but has a larger average cost, uses more memory and is patented.
638 * However the F&G algorithm may be faster for some highly redundant
639 * files if the parameter max_chain_length (described below) is too large.
640 *
641 * ACKNOWLEDGEMENTS
642 *
643 * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
644 * I found it in 'freeze' written by Leonid Broukhis.
645 * Thanks to many people for bug reports and testing.
646 *
647 * REFERENCES
648 *
649 * Deutsch, L.P.,"DEFLATE Compressed Data Format Specification".
650 * Available in ftp://ds.internic.net/rfc/rfc1951.txt
651 *
652 * A description of the Rabin and Karp algorithm is given in the book
653 * "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
654 *
655 * Fiala,E.R., and Greene,D.H.
656 * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
657 *
658 */
659
660 /* @(#) $Id: zlib.c,v 1.23 2006/01/14 18:58:05 christos Exp $ */
661
662 /* #include "deflate.h" */
663
664 const char deflate_copyright[] =
665 " deflate 1.1.4 Copyright 1995-2002 Jean-loup Gailly ";
666 /*
667 If you use the zlib library in a product, an acknowledgment is welcome
668 in the documentation of your product. If for some reason you cannot
669 include such an acknowledgment, I would appreciate that you keep this
670 copyright string in the executable of your product.
671 */
672
673 /* ===========================================================================
674 * Function prototypes.
675 */
676 typedef enum {
677 need_more, /* block not completed, need more input or more output */
678 block_done, /* block flush performed */
679 finish_started, /* finish started, need only more output at next deflate */
680 finish_done /* finish done, accept no more input or output */
681 } block_state;
682
683 typedef block_state (*compress_func) __P((deflate_state *s, int flush));
684 /* Compression function. Returns the block state after the call. */
685
686 local void fill_window __P((deflate_state *s));
687 local block_state deflate_stored __P((deflate_state *s, int flush));
688 local block_state deflate_fast __P((deflate_state *s, int flush));
689 local block_state deflate_slow __P((deflate_state *s, int flush));
690 local void lm_init __P((deflate_state *s));
691 local void putShortMSB __P((deflate_state *s, uInt b));
692 local void flush_pending __P((z_streamp strm));
693 local int read_buf __P((z_streamp strm, Bytef *buf, unsigned size));
694 #ifdef ASMV
695 void match_init __P((void)); /* asm code initialization */
696 uInt longest_match __P((deflate_state *s, IPos cur_match));
697 #else
698 local uInt longest_match __P((deflate_state *s, IPos cur_match));
699 #endif
700
701 #ifdef DEBUG_ZLIB
702 local void check_match __P((deflate_state *s, IPos start, IPos match,
703 int length));
704 #endif
705
706 /* ===========================================================================
707 * Local data
708 */
709
710 #define NIL 0
711 /* Tail of hash chains */
712
713 #ifndef TOO_FAR
714 # define TOO_FAR 4096
715 #endif
716 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
717
718 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
719 /* Minimum amount of lookahead, except at the end of the input file.
720 * See deflate.c for comments about the MIN_MATCH+1.
721 */
722
723 /* Values for max_lazy_match, good_match and max_chain_length, depending on
724 * the desired pack level (0..9). The values given below have been tuned to
725 * exclude worst case performance for pathological files. Better values may be
726 * found for specific files.
727 */
728 typedef struct config_s {
729 ush good_length; /* reduce lazy search above this match length */
730 ush max_lazy; /* do not perform lazy search above this match length */
731 ush nice_length; /* quit search above this match length */
732 ush max_chain;
733 compress_func func;
734 } config;
735
736 local const config configuration_table[10] = {
737 /* good lazy nice chain */
738 /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */
739 /* 1 */ {4, 4, 8, 4, deflate_fast}, /* maximum speed, no lazy matches */
740 /* 2 */ {4, 5, 16, 8, deflate_fast},
741 /* 3 */ {4, 6, 32, 32, deflate_fast},
742
743 /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */
744 /* 5 */ {8, 16, 32, 32, deflate_slow},
745 /* 6 */ {8, 16, 128, 128, deflate_slow},
746 /* 7 */ {8, 32, 128, 256, deflate_slow},
747 /* 8 */ {32, 128, 258, 1024, deflate_slow},
748 /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */
749
750 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
751 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
752 * meaning.
753 */
754
755 #define EQUAL 0
756 /* result of memcmp for equal strings */
757
758 #ifndef NO_DUMMY_DECL
759 struct static_tree_desc_s {int dummy;}; /* for buggy compilers */
760 #endif
761
762 /* ===========================================================================
763 * Update a hash value with the given input byte
764 * IN assertion: all calls to to UPDATE_HASH are made with consecutive
765 * input characters, so that a running hash key can be computed from the
766 * previous key instead of complete recalculation each time.
767 */
768 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
769
770
771 /* ===========================================================================
772 * Insert string str in the dictionary and set match_head to the previous head
773 * of the hash chain (the most recent string with same hash key). Return
774 * the previous length of the hash chain.
775 * If this file is compiled with -DFASTEST, the compression level is forced
776 * to 1, and no hash chains are maintained.
777 * IN assertion: all calls to to INSERT_STRING are made with consecutive
778 * input characters and the first MIN_MATCH bytes of str are valid
779 * (except for the last MIN_MATCH-1 bytes of the input file).
780 */
781 #ifdef FASTEST
782 #define INSERT_STRING(s, str, match_head) \
783 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
784 match_head = s->head[s->ins_h], \
785 s->head[s->ins_h] = (Pos)(str))
786 #else
787 #define INSERT_STRING(s, str, match_head) \
788 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
789 s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
790 s->head[s->ins_h] = (Pos)(str))
791 #endif
792
793 /* ===========================================================================
794 * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
795 * prev[] will be initialized on the fly.
796 */
797 #define CLEAR_HASH(s) \
798 s->head[s->hash_size-1] = NIL; \
799 zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
800
801 /* ========================================================================= */
802 #if 0
803 int ZEXPORT deflateInit_(strm, level, version, stream_size)
804 z_streamp strm;
805 int level;
806 const char *version;
807 int stream_size;
808 {
809 return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
810 Z_DEFAULT_STRATEGY, version, stream_size);
811 /* To do: ignore strm->next_in if we use it as window */
812 }
813 #endif
814
815 /* ========================================================================= */
816 int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
817 vers, stream_size)
818 z_streamp strm;
819 int level;
820 int method;
821 int windowBits;
822 int memLevel;
823 int strategy;
824 const char *vers;
825 int stream_size;
826 {
827 deflate_state *s;
828 int noheader = 0;
829 static const char* my_version = ZLIB_VERSION;
830
831 ushf *overlay;
832 /* We overlay pending_buf and d_buf+l_buf. This works since the average
833 * output size for (length,distance) codes is <= 24 bits.
834 */
835
836 if (vers == Z_NULL || vers[0] != my_version[0] ||
837 stream_size != sizeof(z_stream)) {
838 return Z_VERSION_ERROR;
839 }
840 if (strm == Z_NULL) return Z_STREAM_ERROR;
841
842 strm->msg = Z_NULL;
843 #ifndef NO_ZCFUNCS
844 if (strm->zalloc == Z_NULL) {
845 strm->zalloc = zcalloc;
846 strm->opaque = (voidpf)0;
847 }
848 if (strm->zfree == Z_NULL) strm->zfree = zcfree;
849 #endif
850
851 if (level == Z_DEFAULT_COMPRESSION) level = 6;
852 #ifdef FASTEST
853 level = 1;
854 #endif
855
856 if (windowBits < 0) { /* undocumented feature: suppress zlib header */
857 noheader = 1;
858 windowBits = -windowBits;
859 }
860 if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED ||
861 windowBits < 9 || windowBits > 15 || level < 0 || level > 9 ||
862 strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
863 return Z_STREAM_ERROR;
864 }
865 s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
866 if (s == Z_NULL) return Z_MEM_ERROR;
867 strm->state = (struct internal_state FAR *)s;
868 s->strm = strm;
869
870 s->noheader = noheader;
871 s->w_bits = windowBits;
872 s->w_size = 1 << s->w_bits;
873 s->w_mask = s->w_size - 1;
874
875 s->hash_bits = memLevel + 7;
876 s->hash_size = 1 << s->hash_bits;
877 s->hash_mask = s->hash_size - 1;
878 s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
879
880 s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
881 s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos));
882 s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof(Pos));
883
884 s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
885
886 overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);
887 s->pending_buf = (uchf *) overlay;
888 s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);
889
890 if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
891 s->pending_buf == Z_NULL) {
892 strm->msg = ERR_MSG(Z_MEM_ERROR);
893 s->status = INIT_STATE;
894 deflateEnd (strm);
895 return Z_MEM_ERROR;
896 }
897 s->d_buf = overlay + s->lit_bufsize/sizeof(ush);
898 s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;
899
900 s->level = level;
901 s->strategy = strategy;
902 s->method = (Byte)method;
903
904 return deflateReset(strm);
905 }
906
907 /* ========================================================================= */
908 #if 0
909 int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength)
910 z_streamp strm;
911 const Bytef *dictionary;
912 uInt dictLength;
913 {
914 deflate_state *s;
915 uInt length = dictLength;
916 uInt n;
917 IPos hash_head = 0;
918
919 if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL)
920 return Z_STREAM_ERROR;
921
922 s = (deflate_state *)strm->state;
923 if (s->status != INIT_STATE) return Z_STREAM_ERROR;
924
925 strm->adler = adler32(strm->adler, dictionary, dictLength);
926
927 if (length < MIN_MATCH) return Z_OK;
928 if (length > MAX_DIST(s)) {
929 length = MAX_DIST(s);
930 #ifndef USE_DICT_HEAD
931 dictionary += dictLength - length; /* use the tail of the dictionary */
932 #endif
933 }
934 zmemcpy(s->window, dictionary, length);
935 s->strstart = length;
936 s->block_start = (long)length;
937
938 /* Insert all strings in the hash table (except for the last two bytes).
939 * s->lookahead stays null, so s->ins_h will be recomputed at the next
940 * call of fill_window.
941 */
942 s->ins_h = s->window[0];
943 UPDATE_HASH(s, s->ins_h, s->window[1]);
944 for (n = 0; n <= length - MIN_MATCH; n++) {
945 INSERT_STRING(s, n, hash_head);
946 }
947 if (hash_head) hash_head = 0; /* to make compiler happy */
948 return Z_OK;
949 }
950 #endif
951
952 /* ========================================================================= */
953 int ZEXPORT deflateReset (strm)
954 z_streamp strm;
955 {
956 deflate_state *s;
957
958 if (strm == Z_NULL || strm->state == Z_NULL ||
959 strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;
960
961 strm->total_in = strm->total_out = 0;
962 strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
963 strm->data_type = Z_UNKNOWN;
964
965 s = (deflate_state *)strm->state;
966 s->pending = 0;
967 s->pending_out = s->pending_buf;
968
969 if (s->noheader < 0) {
970 s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
971 }
972 s->status = s->noheader ? BUSY_STATE : INIT_STATE;
973 strm->adler = 1;
974 s->last_flush = Z_NO_FLUSH;
975
976 _tr_init(s);
977 lm_init(s);
978
979 return Z_OK;
980 }
981
982 /* ========================================================================= */
983 #if 0
984 int ZEXPORT deflateParams(strm, level, strategy)
985 z_streamp strm;
986 int level;
987 int strategy;
988 {
989 deflate_state *s;
990 compress_func func;
991 int err = Z_OK;
992
993 if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
994 s = (deflate_state *)strm->state;
995
996 if (level == Z_DEFAULT_COMPRESSION) {
997 level = 6;
998 }
999 if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
1000 return Z_STREAM_ERROR;
1001 }
1002 func = configuration_table[s->level].func;
1003
1004 if (func != configuration_table[level].func && strm->total_in != 0) {
1005 /* Flush the last buffer: */
1006 err = deflate(strm, Z_PARTIAL_FLUSH);
1007 }
1008 if (s->level != level) {
1009 s->level = level;
1010 s->max_lazy_match = configuration_table[level].max_lazy;
1011 s->good_match = configuration_table[level].good_length;
1012 s->nice_match = configuration_table[level].nice_length;
1013 s->max_chain_length = configuration_table[level].max_chain;
1014 }
1015 s->strategy = strategy;
1016 return err;
1017 }
1018 #endif
1019
1020 /* =========================================================================
1021 * Put a short in the pending buffer. The 16-bit value is put in MSB order.
1022 * IN assertion: the stream state is correct and there is enough room in
1023 * pending_buf.
1024 */
1025 local void putShortMSB (s, b)
1026 deflate_state *s;
1027 uInt b;
1028 {
1029 put_byte(s, (Byte)(b >> 8));
1030 put_byte(s, (Byte)(b & 0xff));
1031 }
1032
1033 /* =========================================================================
1034 * Flush as much pending output as possible. All deflate() output goes
1035 * through this function so some applications may wish to modify it
1036 * to avoid allocating a large strm->next_out buffer and copying into it.
1037 * (See also read_buf()).
1038 */
1039 local void flush_pending(strm)
1040 z_streamp strm;
1041 {
1042 deflate_state *s = (deflate_state *) strm->state;
1043 unsigned len = s->pending;
1044
1045 if (len > strm->avail_out) len = strm->avail_out;
1046 if (len == 0) return;
1047
1048 if (strm->next_out != Z_NULL) {
1049 zmemcpy(strm->next_out, s->pending_out, len);
1050 strm->next_out += len;
1051 }
1052 s->pending_out += len;
1053 strm->total_out += len;
1054 strm->avail_out -= len;
1055 s->pending -= len;
1056 if (s->pending == 0) {
1057 s->pending_out = s->pending_buf;
1058 }
1059 }
1060
1061 /* ========================================================================= */
1062 int ZEXPORT deflate (strm, flush)
1063 z_streamp strm;
1064 int flush;
1065 {
1066 int old_flush; /* value of flush param for previous deflate call */
1067 deflate_state *s;
1068
1069 if (strm == Z_NULL || strm->state == Z_NULL ||
1070 flush > Z_FINISH || flush < 0) {
1071 return Z_STREAM_ERROR;
1072 }
1073 s = (deflate_state *)strm->state;
1074
1075 if ((strm->next_in == Z_NULL && strm->avail_in != 0) ||
1076 (s->status == FINISH_STATE && flush != Z_FINISH)) {
1077 ERR_RETURN(strm, Z_STREAM_ERROR);
1078 }
1079 if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
1080
1081 s->strm = strm; /* just in case */
1082 old_flush = s->last_flush;
1083 s->last_flush = flush;
1084
1085 /* Write the zlib header */
1086 if (s->status == INIT_STATE) {
1087
1088 uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
1089 uInt level_flags = (s->level-1) >> 1;
1090
1091 if (level_flags > 3) level_flags = 3;
1092 header |= (level_flags << 6);
1093 if (s->strstart != 0) header |= PRESET_DICT;
1094 header += 31 - (header % 31);
1095
1096 s->status = BUSY_STATE;
1097 putShortMSB(s, header);
1098
1099 /* Save the adler32 of the preset dictionary: */
1100 if (s->strstart != 0) {
1101 putShortMSB(s, (uInt)(strm->adler >> 16));
1102 putShortMSB(s, (uInt)(strm->adler & 0xffff));
1103 }
1104 strm->adler = 1L;
1105 }
1106
1107 /* Flush as much pending output as possible */
1108 if (s->pending != 0) {
1109 flush_pending(strm);
1110 if (strm->avail_out == 0) {
1111 /* Since avail_out is 0, deflate will be called again with
1112 * more output space, but possibly with both pending and
1113 * avail_in equal to zero. There won't be anything to do,
1114 * but this is not an error situation so make sure we
1115 * return OK instead of BUF_ERROR at next call of deflate:
1116 */
1117 s->last_flush = -1;
1118 return Z_OK;
1119 }
1120
1121 /* Make sure there is something to do and avoid duplicate consecutive
1122 * flushes. For repeated and useless calls with Z_FINISH, we keep
1123 * returning Z_STREAM_END instead of Z_BUFF_ERROR.
1124 */
1125 } else if (strm->avail_in == 0 && flush <= old_flush &&
1126 flush != Z_FINISH) {
1127 ERR_RETURN(strm, Z_BUF_ERROR);
1128 }
1129
1130 /* User must not provide more input after the first FINISH: */
1131 if (s->status == FINISH_STATE && strm->avail_in != 0) {
1132 ERR_RETURN(strm, Z_BUF_ERROR);
1133 }
1134
1135 /* Start a new block or continue the current one.
1136 */
1137 if (strm->avail_in != 0 || s->lookahead != 0 ||
1138 (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) {
1139 block_state bstate;
1140
1141 bstate = (*(configuration_table[s->level].func))(s, flush);
1142
1143 if (bstate == finish_started || bstate == finish_done) {
1144 s->status = FINISH_STATE;
1145 }
1146 if (bstate == need_more || bstate == finish_started) {
1147 if (strm->avail_out == 0) {
1148 s->last_flush = -1; /* avoid BUF_ERROR next call, see above */
1149 }
1150 return Z_OK;
1151 /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
1152 * of deflate should use the same flush parameter to make sure
1153 * that the flush is complete. So we don't have to output an
1154 * empty block here, this will be done at next call. This also
1155 * ensures that for a very small output buffer, we emit at most
1156 * one empty block.
1157 */
1158 }
1159 if (bstate == block_done) {
1160 if (flush == Z_PARTIAL_FLUSH) {
1161 _tr_align(s);
1162 } else if (flush == Z_PACKET_FLUSH) {
1163 /* Output just the 3-bit `stored' block type value,
1164 but not a zero length. */
1165 _tr_stored_type_only(s);
1166 } else { /* FULL_FLUSH or SYNC_FLUSH */
1167 _tr_stored_block(s, (char*)0, 0L, 0);
1168 /* For a full flush, this empty block will be recognized
1169 * as a special marker by inflate_sync().
1170 */
1171 if (flush == Z_FULL_FLUSH) {
1172 CLEAR_HASH(s); /* forget history */
1173 }
1174 }
1175 flush_pending(strm);
1176 if (strm->avail_out == 0) {
1177 s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */
1178 return Z_OK;
1179 }
1180 }
1181 }
1182 Assert(strm->avail_out > 0, "bug2");
1183
1184 if (flush != Z_FINISH) return Z_OK;
1185 if (s->noheader) return Z_STREAM_END;
1186
1187 /* Write the zlib trailer (adler32) */
1188 putShortMSB(s, (uInt)(strm->adler >> 16));
1189 putShortMSB(s, (uInt)(strm->adler & 0xffff));
1190 flush_pending(strm);
1191 /* If avail_out is zero, the application will call deflate again
1192 * to flush the rest.
1193 */
1194 s->noheader = -1; /* write the trailer only once! */
1195 return s->pending != 0 ? Z_OK : Z_STREAM_END;
1196 }
1197
1198 /* ========================================================================= */
1199 int ZEXPORT deflateEnd (strm)
1200 z_streamp strm;
1201 {
1202 int status;
1203 deflate_state *s;
1204
1205 if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
1206 s = (deflate_state *) strm->state;
1207
1208 status = s->status;
1209 if (status != INIT_STATE && status != BUSY_STATE &&
1210 status != FINISH_STATE) {
1211 return Z_STREAM_ERROR;
1212 }
1213
1214 /* Deallocate in reverse order of allocations: */
1215 TRY_FREE(strm, s->pending_buf);
1216 TRY_FREE(strm, s->head);
1217 TRY_FREE(strm, s->prev);
1218 TRY_FREE(strm, s->window);
1219
1220 ZFREE(strm, s);
1221 strm->state = Z_NULL;
1222
1223 return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK;
1224 }
1225
1226 /* =========================================================================
1227 * Copy the source state to the destination state.
1228 * To simplify the source, this is not supported for 16-bit MSDOS (which
1229 * doesn't have enough memory anyway to duplicate compression states).
1230 */
1231 #if 0
1232 int ZEXPORT deflateCopy (dest, source)
1233 z_streamp dest;
1234 z_streamp source;
1235 {
1236 #ifdef MAXSEG_64K
1237 return Z_STREAM_ERROR;
1238 #else
1239 deflate_state *ds;
1240 deflate_state *ss;
1241 ushf *overlay;
1242
1243
1244 if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL) {
1245 return Z_STREAM_ERROR;
1246 }
1247
1248 ss = (deflate_state *)source->state;
1249
1250 *dest = *source;
1251
1252 ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state));
1253 if (ds == Z_NULL) return Z_MEM_ERROR;
1254 dest->state = (void *) ds;
1255 *ds = *ss;
1256 ds->strm = dest;
1257
1258 ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));
1259 ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos));
1260 ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos));
1261 overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2);
1262 ds->pending_buf = (uchf *) overlay;
1263
1264 if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||
1265 ds->pending_buf == Z_NULL) {
1266 ds->status = INIT_STATE;
1267 deflateEnd (dest);
1268 return Z_MEM_ERROR;
1269 }
1270 /* following zmemcpy do not work for 16-bit MSDOS */
1271 zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte));
1272 zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos));
1273 zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos));
1274 zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);
1275
1276 ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);
1277 ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush);
1278 ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize;
1279
1280 ds->l_desc.dyn_tree = ds->dyn_ltree;
1281 ds->d_desc.dyn_tree = ds->dyn_dtree;
1282 ds->bl_desc.dyn_tree = ds->bl_tree;
1283
1284 return Z_OK;
1285 #endif
1286 }
1287 #endif
1288
1289 /* ===========================================================================
1290 * Return the number of bytes of output which are immediately available
1291 * for output from the decompressor.
1292 */
1293 #if 0
1294 int deflateOutputPending (strm)
1295 z_streamp strm;
1296 {
1297 if (strm == Z_NULL || strm->state == Z_NULL) return 0;
1298
1299 return ((deflate_state *)(strm->state))->pending;
1300 }
1301 #endif
1302
1303 /* ===========================================================================
1304 * Read a new buffer from the current input stream, update the adler32
1305 * and total number of bytes read. All deflate() input goes through
1306 * this function so some applications may wish to modify it to avoid
1307 * allocating a large strm->next_in buffer and copying from it.
1308 * (See also flush_pending()).
1309 */
1310 local int read_buf(strm, buf, size)
1311 z_streamp strm;
1312 Bytef *buf;
1313 unsigned size;
1314 {
1315 unsigned len = strm->avail_in;
1316
1317 if (len > size) len = size;
1318 if (len == 0) return 0;
1319
1320 strm->avail_in -= len;
1321
1322 if (!((deflate_state *)(strm->state))->noheader) {
1323 strm->adler = adler32(strm->adler, strm->next_in, len);
1324 }
1325 zmemcpy(buf, strm->next_in, len);
1326 strm->next_in += len;
1327 strm->total_in += len;
1328
1329 return (int)len;
1330 }
1331
1332 /* ===========================================================================
1333 * Initialize the "longest match" routines for a new zlib stream
1334 */
1335 local void lm_init (s)
1336 deflate_state *s;
1337 {
1338 s->window_size = (ulg)2L*s->w_size;
1339
1340 CLEAR_HASH(s);
1341
1342 /* Set the default configuration parameters:
1343 */
1344 s->max_lazy_match = configuration_table[s->level].max_lazy;
1345 s->good_match = configuration_table[s->level].good_length;
1346 s->nice_match = configuration_table[s->level].nice_length;
1347 s->max_chain_length = configuration_table[s->level].max_chain;
1348
1349 s->strstart = 0;
1350 s->block_start = 0L;
1351 s->lookahead = 0;
1352 s->match_length = s->prev_length = MIN_MATCH-1;
1353 s->match_available = 0;
1354 s->ins_h = 0;
1355 #ifdef ASMV
1356 match_init(); /* initialize the asm code */
1357 #endif
1358 }
1359
1360 /* ===========================================================================
1361 * Set match_start to the longest match starting at the given string and
1362 * return its length. Matches shorter or equal to prev_length are discarded,
1363 * in which case the result is equal to prev_length and match_start is
1364 * garbage.
1365 * IN assertions: cur_match is the head of the hash chain for the current
1366 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
1367 * OUT assertion: the match length is not greater than s->lookahead.
1368 */
1369 #ifndef ASMV
1370 /* For 80x86 and 680x0, an optimized version will be provided in match.asm or
1371 * match.S. The code will be functionally equivalent.
1372 */
1373 #ifndef FASTEST
1374 local uInt longest_match(s, cur_match)
1375 deflate_state *s;
1376 IPos cur_match; /* current match */
1377 {
1378 unsigned chain_length = s->max_chain_length;/* max hash chain length */
1379 Bytef *scan = s->window + s->strstart; /* current string */
1380 Bytef *match; /* matched string */
1381 int len; /* length of current match */
1382 int best_len = s->prev_length; /* best match length so far */
1383 int nice_match = s->nice_match; /* stop if match long enough */
1384 IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
1385 s->strstart - (IPos)MAX_DIST(s) : NIL;
1386 /* Stop when cur_match becomes <= limit. To simplify the code,
1387 * we prevent matches with the string of window index 0.
1388 */
1389 Posf *prev = s->prev;
1390 uInt wmask = s->w_mask;
1391
1392 #ifdef UNALIGNED_OK
1393 /* Compare two bytes at a time. Note: this is not always beneficial.
1394 * Try with and without -DUNALIGNED_OK to check.
1395 */
1396 Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
1397 ush scan_start = *(ushf*)scan;
1398 ush scan_end = *(ushf*)(scan+best_len-1);
1399 #else
1400 Bytef *strend = s->window + s->strstart + MAX_MATCH;
1401 Byte scan_end1 = scan[best_len-1];
1402 Byte scan_end = scan[best_len];
1403 #endif
1404
1405 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
1406 * It is easy to get rid of this optimization if necessary.
1407 */
1408 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
1409
1410 /* Do not waste too much time if we already have a good match: */
1411 if (s->prev_length >= s->good_match) {
1412 chain_length >>= 2;
1413 }
1414 /* Do not look for matches beyond the end of the input. This is necessary
1415 * to make deflate deterministic.
1416 */
1417 if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead;
1418
1419 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
1420
1421 do {
1422 Assert(cur_match < s->strstart, "no future");
1423 match = s->window + cur_match;
1424
1425 /* Skip to next match if the match length cannot increase
1426 * or if the match length is less than 2:
1427 */
1428 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
1429 /* This code assumes sizeof(unsigned short) == 2. Do not use
1430 * UNALIGNED_OK if your compiler uses a different size.
1431 */
1432 if (*(ushf*)(match+best_len-1) != scan_end ||
1433 *(ushf*)match != scan_start) continue;
1434
1435 /* It is not necessary to compare scan[2] and match[2] since they are
1436 * always equal when the other bytes match, given that the hash keys
1437 * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
1438 * strstart+3, +5, ... up to strstart+257. We check for insufficient
1439 * lookahead only every 4th comparison; the 128th check will be made
1440 * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
1441 * necessary to put more guard bytes at the end of the window, or
1442 * to check more often for insufficient lookahead.
1443 */
1444 Assert(scan[2] == match[2], "scan[2]?");
1445 scan++, match++;
1446 do {
1447 } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1448 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1449 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1450 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1451 scan < strend);
1452 /* The funny "do {}" generates better code on most compilers */
1453
1454 /* Here, scan <= window+strstart+257 */
1455 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1456 if (*scan == *match) scan++;
1457
1458 len = (MAX_MATCH - 1) - (int)(strend-scan);
1459 scan = strend - (MAX_MATCH-1);
1460
1461 #else /* UNALIGNED_OK */
1462
1463 if (match[best_len] != scan_end ||
1464 match[best_len-1] != scan_end1 ||
1465 *match != *scan ||
1466 *++match != scan[1]) continue;
1467
1468 /* The check at best_len-1 can be removed because it will be made
1469 * again later. (This heuristic is not always a win.)
1470 * It is not necessary to compare scan[2] and match[2] since they
1471 * are always equal when the other bytes match, given that
1472 * the hash keys are equal and that HASH_BITS >= 8.
1473 */
1474 scan += 2, match++;
1475 Assert(*scan == *match, "match[2]?");
1476
1477 /* We check for insufficient lookahead only every 8th comparison;
1478 * the 256th check will be made at strstart+258.
1479 */
1480 do {
1481 } while (*++scan == *++match && *++scan == *++match &&
1482 *++scan == *++match && *++scan == *++match &&
1483 *++scan == *++match && *++scan == *++match &&
1484 *++scan == *++match && *++scan == *++match &&
1485 scan < strend);
1486
1487 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1488
1489 len = MAX_MATCH - (int)(strend - scan);
1490 scan = strend - MAX_MATCH;
1491
1492 #endif /* UNALIGNED_OK */
1493
1494 if (len > best_len) {
1495 s->match_start = cur_match;
1496 best_len = len;
1497 if (len >= nice_match) break;
1498 #ifdef UNALIGNED_OK
1499 scan_end = *(ushf*)(scan+best_len-1);
1500 #else
1501 scan_end1 = scan[best_len-1];
1502 scan_end = scan[best_len];
1503 #endif
1504 }
1505 } while ((cur_match = prev[cur_match & wmask]) > limit
1506 && --chain_length != 0);
1507
1508 if ((uInt)best_len <= s->lookahead) return (uInt)best_len;
1509 return s->lookahead;
1510 }
1511
1512 #else /* FASTEST */
1513 /* ---------------------------------------------------------------------------
1514 * Optimized version for level == 1 only
1515 */
1516 local uInt longest_match(s, cur_match)
1517 deflate_state *s;
1518 IPos cur_match; /* current match */
1519 {
1520 register Bytef *scan = s->window + s->strstart; /* current string */
1521 register Bytef *match; /* matched string */
1522 register int len; /* length of current match */
1523 register Bytef *strend = s->window + s->strstart + MAX_MATCH;
1524
1525 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
1526 * It is easy to get rid of this optimization if necessary.
1527 */
1528 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
1529
1530 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
1531
1532 Assert(cur_match < s->strstart, "no future");
1533
1534 match = s->window + cur_match;
1535
1536 /* Return failure if the match length is less than 2:
1537 */
1538 if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1;
1539
1540 /* The check at best_len-1 can be removed because it will be made
1541 * again later. (This heuristic is not always a win.)
1542 * It is not necessary to compare scan[2] and match[2] since they
1543 * are always equal when the other bytes match, given that
1544 * the hash keys are equal and that HASH_BITS >= 8.
1545 */
1546 scan += 2, match += 2;
1547 Assert(*scan == *match, "match[2]?");
1548
1549 /* We check for insufficient lookahead only every 8th comparison;
1550 * the 256th check will be made at strstart+258.
1551 */
1552 do {
1553 } while (*++scan == *++match && *++scan == *++match &&
1554 *++scan == *++match && *++scan == *++match &&
1555 *++scan == *++match && *++scan == *++match &&
1556 *++scan == *++match && *++scan == *++match &&
1557 scan < strend);
1558
1559 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1560
1561 len = MAX_MATCH - (int)(strend - scan);
1562
1563 if (len < MIN_MATCH) return MIN_MATCH - 1;
1564
1565 s->match_start = cur_match;
1566 return len <= s->lookahead ? len : s->lookahead;
1567 }
1568 #endif /* FASTEST */
1569 #endif /* ASMV */
1570
1571 #ifdef DEBUG_ZLIB
1572 /* ===========================================================================
1573 * Check that the match at match_start is indeed a match.
1574 */
1575 local void check_match(s, start, match, length)
1576 deflate_state *s;
1577 IPos start, match;
1578 int length;
1579 {
1580 /* check that the match is indeed a match */
1581 if (zmemcmp(s->window + match,
1582 s->window + start, length) != EQUAL) {
1583 fprintf(stderr, " start %u, match %u, length %d\n",
1584 start, match, length);
1585 do {
1586 fprintf(stderr, "%c%c", s->window[match++], s->window[start++]);
1587 } while (--length != 0);
1588 z_error("invalid match");
1589 }
1590 if (z_verbose > 1) {
1591 fprintf(stderr,"\\[%d,%d]", start-match, length);
1592 do { putc(s->window[start++], stderr); } while (--length != 0);
1593 }
1594 }
1595 #else
1596 # define check_match(s, start, match, length)
1597 #endif
1598
1599 /* ===========================================================================
1600 * Fill the window when the lookahead becomes insufficient.
1601 * Updates strstart and lookahead.
1602 *
1603 * IN assertion: lookahead < MIN_LOOKAHEAD
1604 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
1605 * At least one byte has been read, or avail_in == 0; reads are
1606 * performed for at least two bytes (required for the zip translate_eol
1607 * option -- not supported here).
1608 */
1609 local void fill_window(s)
1610 deflate_state *s;
1611 {
1612 unsigned n, m;
1613 Posf *p;
1614 unsigned more; /* Amount of free space at the end of the window. */
1615 uInt wsize = s->w_size;
1616
1617 do {
1618 more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
1619
1620 /* Deal with !@#$% 64K limit: */
1621 if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
1622 more = wsize;
1623
1624 } else if (more == (unsigned)(-1)) {
1625 /* Very unlikely, but possible on 16 bit machine if strstart == 0
1626 * and lookahead == 1 (input done one byte at time)
1627 */
1628 more--;
1629
1630 /* If the window is almost full and there is insufficient lookahead,
1631 * move the upper half to the lower one to make room in the upper half.
1632 */
1633 } else if (s->strstart >= wsize+MAX_DIST(s)) {
1634
1635 zmemcpy(s->window, s->window+wsize, (unsigned)wsize);
1636 s->match_start -= wsize;
1637 s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
1638 s->block_start -= (long) wsize;
1639
1640 /* Slide the hash table (could be avoided with 32 bit values
1641 at the expense of memory usage). We slide even when level == 0
1642 to keep the hash table consistent if we switch back to level > 0
1643 later. (Using level 0 permanently is not an optimal usage of
1644 zlib, so we don't care about this pathological case.)
1645 */
1646 n = s->hash_size;
1647 p = &s->head[n];
1648 do {
1649 m = *--p;
1650 *p = (Pos)(m >= wsize ? m-wsize : NIL);
1651 } while (--n);
1652
1653 n = wsize;
1654 #ifndef FASTEST
1655 p = &s->prev[n];
1656 do {
1657 m = *--p;
1658 *p = (Pos)(m >= wsize ? m-wsize : NIL);
1659 /* If n is not on any hash chain, prev[n] is garbage but
1660 * its value will never be used.
1661 */
1662 } while (--n);
1663 #endif
1664 more += wsize;
1665 }
1666 if (s->strm->avail_in == 0) return;
1667
1668 /* If there was no sliding:
1669 * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1670 * more == window_size - lookahead - strstart
1671 * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1672 * => more >= window_size - 2*WSIZE + 2
1673 * In the BIG_MEM or MMAP case (not yet supported),
1674 * window_size == input_size + MIN_LOOKAHEAD &&
1675 * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1676 * Otherwise, window_size == 2*WSIZE so more >= 2.
1677 * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1678 */
1679 Assert(more >= 2, "more < 2");
1680
1681 n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
1682 s->lookahead += n;
1683
1684 /* Initialize the hash value now that we have some input: */
1685 if (s->lookahead >= MIN_MATCH) {
1686 s->ins_h = s->window[s->strstart];
1687 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1688 #if MIN_MATCH != 3
1689 Call UPDATE_HASH() MIN_MATCH-3 more times
1690 #endif
1691 }
1692 /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
1693 * but this is not important since only literal bytes will be emitted.
1694 */
1695
1696 } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
1697 }
1698
1699 /* ===========================================================================
1700 * Flush the current block, with given end-of-file flag.
1701 * IN assertion: strstart is set to the end of the current match.
1702 */
1703 #define FLUSH_BLOCK_ONLY(s, eof) { \
1704 _tr_flush_block(s, (s->block_start >= 0L ? \
1705 (charf *)&s->window[(unsigned)s->block_start] : \
1706 (charf *)Z_NULL), \
1707 (ulg)((long)s->strstart - s->block_start), \
1708 (eof)); \
1709 s->block_start = s->strstart; \
1710 flush_pending(s->strm); \
1711 Tracev((stderr,"[FLUSH]")); \
1712 }
1713
1714 /* Same but force premature exit if necessary. */
1715 #define FLUSH_BLOCK(s, eof) { \
1716 FLUSH_BLOCK_ONLY(s, eof); \
1717 if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \
1718 }
1719
1720 /* ===========================================================================
1721 * Copy without compression as much as possible from the input stream, return
1722 * the current block state.
1723 * This function does not insert new strings in the dictionary since
1724 * uncompressible data is probably not useful. This function is used
1725 * only for the level=0 compression option.
1726 * NOTE: this function should be optimized to avoid extra copying from
1727 * window to pending_buf.
1728 */
1729 local block_state deflate_stored(s, flush)
1730 deflate_state *s;
1731 int flush;
1732 {
1733 /* Stored blocks are limited to 0xffff bytes, pending_buf is limited
1734 * to pending_buf_size, and each stored block has a 5 byte header:
1735 */
1736 ulg max_block_size = 0xffff;
1737 ulg max_start;
1738
1739 if (max_block_size > s->pending_buf_size - 5) {
1740 max_block_size = s->pending_buf_size - 5;
1741 }
1742
1743 /* Copy as much as possible from input to output: */
1744 for (;;) {
1745 /* Fill the window as much as possible: */
1746 if (s->lookahead <= 1) {
1747
1748 Assert(s->strstart < s->w_size+MAX_DIST(s) ||
1749 s->block_start >= (long)s->w_size, "slide too late");
1750
1751 fill_window(s);
1752 if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more;
1753
1754 if (s->lookahead == 0) break; /* flush the current block */
1755 }
1756 Assert(s->block_start >= 0L, "block gone");
1757
1758 s->strstart += s->lookahead;
1759 s->lookahead = 0;
1760
1761 /* Emit a stored block if pending_buf will be full: */
1762 max_start = s->block_start + max_block_size;
1763 if (s->strstart == 0 || (ulg)s->strstart >= max_start) {
1764 /* strstart == 0 is possible when wraparound on 16-bit machine */
1765 s->lookahead = (uInt)(s->strstart - max_start);
1766 s->strstart = (uInt)max_start;
1767 FLUSH_BLOCK(s, 0);
1768 }
1769 /* Flush if we may have to slide, otherwise block_start may become
1770 * negative and the data will be gone:
1771 */
1772 if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) {
1773 FLUSH_BLOCK(s, 0);
1774 }
1775 }
1776 FLUSH_BLOCK(s, flush == Z_FINISH);
1777 return flush == Z_FINISH ? finish_done : block_done;
1778 }
1779
1780 /* ===========================================================================
1781 * Compress as much as possible from the input stream, return the current
1782 * block state.
1783 * This function does not perform lazy evaluation of matches and inserts
1784 * new strings in the dictionary only for unmatched strings or for short
1785 * matches. It is used only for the fast compression options.
1786 */
1787 local block_state deflate_fast(s, flush)
1788 deflate_state *s;
1789 int flush;
1790 {
1791 IPos hash_head = NIL; /* head of the hash chain */
1792 int bflush; /* set if current block must be flushed */
1793
1794 for (;;) {
1795 /* Make sure that we always have enough lookahead, except
1796 * at the end of the input file. We need MAX_MATCH bytes
1797 * for the next match, plus MIN_MATCH bytes to insert the
1798 * string following the next match.
1799 */
1800 if (s->lookahead < MIN_LOOKAHEAD) {
1801 fill_window(s);
1802 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
1803 return need_more;
1804 }
1805 if (s->lookahead == 0) break; /* flush the current block */
1806 }
1807
1808 /* Insert the string window[strstart .. strstart+2] in the
1809 * dictionary, and set hash_head to the head of the hash chain:
1810 */
1811 if (s->lookahead >= MIN_MATCH) {
1812 INSERT_STRING(s, s->strstart, hash_head);
1813 }
1814
1815 /* Find the longest match, discarding those <= prev_length.
1816 * At this point we have always match_length < MIN_MATCH
1817 */
1818 if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
1819 /* To simplify the code, we prevent matches with the string
1820 * of window index 0 (in particular we have to avoid a match
1821 * of the string with itself at the start of the input file).
1822 */
1823 if (s->strategy != Z_HUFFMAN_ONLY) {
1824 s->match_length = longest_match (s, hash_head);
1825 }
1826 /* longest_match() sets match_start */
1827 }
1828 if (s->match_length >= MIN_MATCH) {
1829 check_match(s, s->strstart, s->match_start, s->match_length);
1830
1831 _tr_tally_dist(s, s->strstart - s->match_start,
1832 s->match_length - MIN_MATCH, bflush);
1833
1834 s->lookahead -= s->match_length;
1835
1836 /* Insert new strings in the hash table only if the match length
1837 * is not too large. This saves time but degrades compression.
1838 */
1839 #ifndef FASTEST
1840 if (s->match_length <= s->max_insert_length &&
1841 s->lookahead >= MIN_MATCH) {
1842 s->match_length--; /* string at strstart already in hash table */
1843 do {
1844 s->strstart++;
1845 INSERT_STRING(s, s->strstart, hash_head);
1846 /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1847 * always MIN_MATCH bytes ahead.
1848 */
1849 } while (--s->match_length != 0);
1850 s->strstart++;
1851 } else
1852 #endif
1853 {
1854 s->strstart += s->match_length;
1855 s->match_length = 0;
1856 s->ins_h = s->window[s->strstart];
1857 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1858 #if MIN_MATCH != 3
1859 Call UPDATE_HASH() MIN_MATCH-3 more times
1860 #endif
1861 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
1862 * matter since it will be recomputed at next deflate call.
1863 */
1864 }
1865 } else {
1866 /* No match, output a literal byte */
1867 Tracevv((stderr,"%c", s->window[s->strstart]));
1868 _tr_tally_lit (s, s->window[s->strstart], bflush);
1869 s->lookahead--;
1870 s->strstart++;
1871 }
1872 if (bflush) FLUSH_BLOCK(s, 0);
1873 }
1874 FLUSH_BLOCK(s, flush == Z_FINISH);
1875 return flush == Z_FINISH ? finish_done : block_done;
1876 }
1877
1878 /* ===========================================================================
1879 * Same as above, but achieves better compression. We use a lazy
1880 * evaluation for matches: a match is finally adopted only if there is
1881 * no better match at the next window position.
1882 */
1883 local block_state deflate_slow(s, flush)
1884 deflate_state *s;
1885 int flush;
1886 {
1887 IPos hash_head = NIL; /* head of hash chain */
1888 int bflush; /* set if current block must be flushed */
1889
1890 /* Process the input block. */
1891 for (;;) {
1892 /* Make sure that we always have enough lookahead, except
1893 * at the end of the input file. We need MAX_MATCH bytes
1894 * for the next match, plus MIN_MATCH bytes to insert the
1895 * string following the next match.
1896 */
1897 if (s->lookahead < MIN_LOOKAHEAD) {
1898 fill_window(s);
1899 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
1900 return need_more;
1901 }
1902 if (s->lookahead == 0) break; /* flush the current block */
1903 }
1904
1905 /* Insert the string window[strstart .. strstart+2] in the
1906 * dictionary, and set hash_head to the head of the hash chain:
1907 */
1908 if (s->lookahead >= MIN_MATCH) {
1909 INSERT_STRING(s, s->strstart, hash_head);
1910 }
1911
1912 /* Find the longest match, discarding those <= prev_length.
1913 */
1914 s->prev_length = s->match_length, s->prev_match = s->match_start;
1915 s->match_length = MIN_MATCH-1;
1916
1917 if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
1918 s->strstart - hash_head <= MAX_DIST(s)) {
1919 /* To simplify the code, we prevent matches with the string
1920 * of window index 0 (in particular we have to avoid a match
1921 * of the string with itself at the start of the input file).
1922 */
1923 if (s->strategy != Z_HUFFMAN_ONLY) {
1924 s->match_length = longest_match (s, hash_head);
1925 }
1926 /* longest_match() sets match_start */
1927
1928 if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
1929 (s->match_length == MIN_MATCH &&
1930 s->strstart - s->match_start > TOO_FAR))) {
1931
1932 /* If prev_match is also MIN_MATCH, match_start is garbage
1933 * but we will ignore the current match anyway.
1934 */
1935 s->match_length = MIN_MATCH-1;
1936 }
1937 }
1938 /* If there was a match at the previous step and the current
1939 * match is not better, output the previous match:
1940 */
1941 if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
1942 uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
1943 /* Do not insert strings in hash table beyond this. */
1944
1945 check_match(s, s->strstart-1, s->prev_match, s->prev_length);
1946
1947 _tr_tally_dist(s, s->strstart -1 - s->prev_match,
1948 s->prev_length - MIN_MATCH, bflush);
1949
1950 /* Insert in hash table all strings up to the end of the match.
1951 * strstart-1 and strstart are already inserted. If there is not
1952 * enough lookahead, the last two strings are not inserted in
1953 * the hash table.
1954 */
1955 s->lookahead -= s->prev_length-1;
1956 s->prev_length -= 2;
1957 do {
1958 if (++s->strstart <= max_insert) {
1959 INSERT_STRING(s, s->strstart, hash_head);
1960 }
1961 } while (--s->prev_length != 0);
1962 s->match_available = 0;
1963 s->match_length = MIN_MATCH-1;
1964 s->strstart++;
1965
1966 if (bflush) FLUSH_BLOCK(s, 0);
1967
1968 } else if (s->match_available) {
1969 /* If there was no match at the previous position, output a
1970 * single literal. If there was a match but the current match
1971 * is longer, truncate the previous match to a single literal.
1972 */
1973 Tracevv((stderr,"%c", s->window[s->strstart-1]));
1974 _tr_tally_lit(s, s->window[s->strstart-1], bflush);
1975 if (bflush) {
1976 FLUSH_BLOCK_ONLY(s, 0);
1977 }
1978 s->strstart++;
1979 s->lookahead--;
1980 if (s->strm->avail_out == 0) return need_more;
1981 } else {
1982 /* There is no previous match to compare with, wait for
1983 * the next step to decide.
1984 */
1985 s->match_available = 1;
1986 s->strstart++;
1987 s->lookahead--;
1988 }
1989 }
1990 Assert (flush != Z_NO_FLUSH, "no flush?");
1991 if (s->match_available) {
1992 Tracevv((stderr,"%c", s->window[s->strstart-1]));
1993 _tr_tally_lit(s, s->window[s->strstart-1], bflush);
1994 s->match_available = 0;
1995 }
1996 FLUSH_BLOCK(s, flush == Z_FINISH);
1997 return flush == Z_FINISH ? finish_done : block_done;
1998 }
1999 /* --- deflate.c */
2000
2001 /* +++ trees.c */
2002
2003 /* trees.c -- output deflated data using Huffman coding
2004 * Copyright (C) 1995-2002 Jean-loup Gailly
2005 * For conditions of distribution and use, see copyright notice in zlib.h
2006 */
2007
2008 /*
2009 * ALGORITHM
2010 *
2011 * The "deflation" process uses several Huffman trees. The more
2012 * common source values are represented by shorter bit sequences.
2013 *
2014 * Each code tree is stored in a compressed form which is itself
2015 * a Huffman encoding of the lengths of all the code strings (in
2016 * ascending order by source values). The actual code strings are
2017 * reconstructed from the lengths in the inflate process, as described
2018 * in the deflate specification.
2019 *
2020 * REFERENCES
2021 *
2022 * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
2023 * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
2024 *
2025 * Storer, James A.
2026 * Data Compression: Methods and Theory, pp. 49-50.
2027 * Computer Science Press, 1988. ISBN 0-7167-8156-5.
2028 *
2029 * Sedgewick, R.
2030 * Algorithms, p290.
2031 * Addison-Wesley, 1983. ISBN 0-201-06672-6.
2032 */
2033
2034 /* @(#) $Id: zlib.c,v 1.23 2006/01/14 18:58:05 christos Exp $ */
2035
2036 /* #define GEN_TREES_H */
2037
2038 /* #include "deflate.h" */
2039
2040 #ifdef DEBUG_ZLIB
2041 # include <ctype.h>
2042 #endif
2043
2044 /* ===========================================================================
2045 * Constants
2046 */
2047
2048 #define MAX_BL_BITS 7
2049 /* Bit length codes must not exceed MAX_BL_BITS bits */
2050
2051 #define END_BLOCK 256
2052 /* end of block literal code */
2053
2054 #define REP_3_6 16
2055 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
2056
2057 #define REPZ_3_10 17
2058 /* repeat a zero length 3-10 times (3 bits of repeat count) */
2059
2060 #define REPZ_11_138 18
2061 /* repeat a zero length 11-138 times (7 bits of repeat count) */
2062
2063 local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
2064 = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
2065
2066 local const int extra_dbits[D_CODES] /* extra bits for each distance code */
2067 = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
2068
2069 local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
2070 = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
2071
2072 local const uch bl_order[BL_CODES]
2073 = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
2074 /* The lengths of the bit length codes are sent in order of decreasing
2075 * probability, to avoid transmitting the lengths for unused bit length codes.
2076 */
2077
2078 #define Buf_size (8 * 2*sizeof(char))
2079 /* Number of bits used within bi_buf. (bi_buf might be implemented on
2080 * more than 16 bits on some systems.)
2081 */
2082
2083 /* ===========================================================================
2084 * Local data. These are initialized only once.
2085 */
2086
2087 #define DIST_CODE_LEN 512 /* see definition of array dist_code below */
2088
2089 #if defined(GEN_TREES_H) || !defined(STDC)
2090 /* non ANSI compilers may not accept trees.h */
2091
2092 local ct_data static_ltree[L_CODES+2];
2093 /* The static literal tree. Since the bit lengths are imposed, there is no
2094 * need for the L_CODES extra codes used during heap construction. However
2095 * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
2096 * below).
2097 */
2098
2099 local ct_data static_dtree[D_CODES];
2100 /* The static distance tree. (Actually a trivial tree since all codes use
2101 * 5 bits.)
2102 */
2103
2104 uch _dist_code[DIST_CODE_LEN];
2105 /* Distance codes. The first 256 values correspond to the distances
2106 * 3 .. 258, the last 256 values correspond to the top 8 bits of
2107 * the 15 bit distances.
2108 */
2109
2110 uch _length_code[MAX_MATCH-MIN_MATCH+1];
2111 /* length code for each normalized match length (0 == MIN_MATCH) */
2112
2113 local int base_length[LENGTH_CODES];
2114 /* First normalized length for each code (0 = MIN_MATCH) */
2115
2116 local int base_dist[D_CODES];
2117 /* First normalized distance for each code (0 = distance of 1) */
2118
2119 #else
2120 /* +++ trees.h */
2121
2122 /* header created automatically with -DGEN_TREES_H */
2123
2124 local const ct_data static_ltree[L_CODES+2] = {
2125 {{ 12},{ 8}}, {{140},{ 8}}, {{ 76},{ 8}}, {{204},{ 8}}, {{ 44},{ 8}},
2126 {{172},{ 8}}, {{108},{ 8}}, {{236},{ 8}}, {{ 28},{ 8}}, {{156},{ 8}},
2127 {{ 92},{ 8}}, {{220},{ 8}}, {{ 60},{ 8}}, {{188},{ 8}}, {{124},{ 8}},
2128 {{252},{ 8}}, {{ 2},{ 8}}, {{130},{ 8}}, {{ 66},{ 8}}, {{194},{ 8}},
2129 {{ 34},{ 8}}, {{162},{ 8}}, {{ 98},{ 8}}, {{226},{ 8}}, {{ 18},{ 8}},
2130 {{146},{ 8}}, {{ 82},{ 8}}, {{210},{ 8}}, {{ 50},{ 8}}, {{178},{ 8}},
2131 {{114},{ 8}}, {{242},{ 8}}, {{ 10},{ 8}}, {{138},{ 8}}, {{ 74},{ 8}},
2132 {{202},{ 8}}, {{ 42},{ 8}}, {{170},{ 8}}, {{106},{ 8}}, {{234},{ 8}},
2133 {{ 26},{ 8}}, {{154},{ 8}}, {{ 90},{ 8}}, {{218},{ 8}}, {{ 58},{ 8}},
2134 {{186},{ 8}}, {{122},{ 8}}, {{250},{ 8}}, {{ 6},{ 8}}, {{134},{ 8}},
2135 {{ 70},{ 8}}, {{198},{ 8}}, {{ 38},{ 8}}, {{166},{ 8}}, {{102},{ 8}},
2136 {{230},{ 8}}, {{ 22},{ 8}}, {{150},{ 8}}, {{ 86},{ 8}}, {{214},{ 8}},
2137 {{ 54},{ 8}}, {{182},{ 8}}, {{118},{ 8}}, {{246},{ 8}}, {{ 14},{ 8}},
2138 {{142},{ 8}}, {{ 78},{ 8}}, {{206},{ 8}}, {{ 46},{ 8}}, {{174},{ 8}},
2139 {{110},{ 8}}, {{238},{ 8}}, {{ 30},{ 8}}, {{158},{ 8}}, {{ 94},{ 8}},
2140 {{222},{ 8}}, {{ 62},{ 8}}, {{190},{ 8}}, {{126},{ 8}}, {{254},{ 8}},
2141 {{ 1},{ 8}}, {{129},{ 8}}, {{ 65},{ 8}}, {{193},{ 8}}, {{ 33},{ 8}},
2142 {{161},{ 8}}, {{ 97},{ 8}}, {{225},{ 8}}, {{ 17},{ 8}}, {{145},{ 8}},
2143 {{ 81},{ 8}}, {{209},{ 8}}, {{ 49},{ 8}}, {{177},{ 8}}, {{113},{ 8}},
2144 {{241},{ 8}}, {{ 9},{ 8}}, {{137},{ 8}}, {{ 73},{ 8}}, {{201},{ 8}},
2145 {{ 41},{ 8}}, {{169},{ 8}}, {{105},{ 8}}, {{233},{ 8}}, {{ 25},{ 8}},
2146 {{153},{ 8}}, {{ 89},{ 8}}, {{217},{ 8}}, {{ 57},{ 8}}, {{185},{ 8}},
2147 {{121},{ 8}}, {{249},{ 8}}, {{ 5},{ 8}}, {{133},{ 8}}, {{ 69},{ 8}},
2148 {{197},{ 8}}, {{ 37},{ 8}}, {{165},{ 8}}, {{101},{ 8}}, {{229},{ 8}},
2149 {{ 21},{ 8}}, {{149},{ 8}}, {{ 85},{ 8}}, {{213},{ 8}}, {{ 53},{ 8}},
2150 {{181},{ 8}}, {{117},{ 8}}, {{245},{ 8}}, {{ 13},{ 8}}, {{141},{ 8}},
2151 {{ 77},{ 8}}, {{205},{ 8}}, {{ 45},{ 8}}, {{173},{ 8}}, {{109},{ 8}},
2152 {{237},{ 8}}, {{ 29},{ 8}}, {{157},{ 8}}, {{ 93},{ 8}}, {{221},{ 8}},
2153 {{ 61},{ 8}}, {{189},{ 8}}, {{125},{ 8}}, {{253},{ 8}}, {{ 19},{ 9}},
2154 {{275},{ 9}}, {{147},{ 9}}, {{403},{ 9}}, {{ 83},{ 9}}, {{339},{ 9}},
2155 {{211},{ 9}}, {{467},{ 9}}, {{ 51},{ 9}}, {{307},{ 9}}, {{179},{ 9}},
2156 {{435},{ 9}}, {{115},{ 9}}, {{371},{ 9}}, {{243},{ 9}}, {{499},{ 9}},
2157 {{ 11},{ 9}}, {{267},{ 9}}, {{139},{ 9}}, {{395},{ 9}}, {{ 75},{ 9}},
2158 {{331},{ 9}}, {{203},{ 9}}, {{459},{ 9}}, {{ 43},{ 9}}, {{299},{ 9}},
2159 {{171},{ 9}}, {{427},{ 9}}, {{107},{ 9}}, {{363},{ 9}}, {{235},{ 9}},
2160 {{491},{ 9}}, {{ 27},{ 9}}, {{283},{ 9}}, {{155},{ 9}}, {{411},{ 9}},
2161 {{ 91},{ 9}}, {{347},{ 9}}, {{219},{ 9}}, {{475},{ 9}}, {{ 59},{ 9}},
2162 {{315},{ 9}}, {{187},{ 9}}, {{443},{ 9}}, {{123},{ 9}}, {{379},{ 9}},
2163 {{251},{ 9}}, {{507},{ 9}}, {{ 7},{ 9}}, {{263},{ 9}}, {{135},{ 9}},
2164 {{391},{ 9}}, {{ 71},{ 9}}, {{327},{ 9}}, {{199},{ 9}}, {{455},{ 9}},
2165 {{ 39},{ 9}}, {{295},{ 9}}, {{167},{ 9}}, {{423},{ 9}}, {{103},{ 9}},
2166 {{359},{ 9}}, {{231},{ 9}}, {{487},{ 9}}, {{ 23},{ 9}}, {{279},{ 9}},
2167 {{151},{ 9}}, {{407},{ 9}}, {{ 87},{ 9}}, {{343},{ 9}}, {{215},{ 9}},
2168 {{471},{ 9}}, {{ 55},{ 9}}, {{311},{ 9}}, {{183},{ 9}}, {{439},{ 9}},
2169 {{119},{ 9}}, {{375},{ 9}}, {{247},{ 9}}, {{503},{ 9}}, {{ 15},{ 9}},
2170 {{271},{ 9}}, {{143},{ 9}}, {{399},{ 9}}, {{ 79},{ 9}}, {{335},{ 9}},
2171 {{207},{ 9}}, {{463},{ 9}}, {{ 47},{ 9}}, {{303},{ 9}}, {{175},{ 9}},
2172 {{431},{ 9}}, {{111},{ 9}}, {{367},{ 9}}, {{239},{ 9}}, {{495},{ 9}},
2173 {{ 31},{ 9}}, {{287},{ 9}}, {{159},{ 9}}, {{415},{ 9}}, {{ 95},{ 9}},
2174 {{351},{ 9}}, {{223},{ 9}}, {{479},{ 9}}, {{ 63},{ 9}}, {{319},{ 9}},
2175 {{191},{ 9}}, {{447},{ 9}}, {{127},{ 9}}, {{383},{ 9}}, {{255},{ 9}},
2176 {{511},{ 9}}, {{ 0},{ 7}}, {{ 64},{ 7}}, {{ 32},{ 7}}, {{ 96},{ 7}},
2177 {{ 16},{ 7}}, {{ 80},{ 7}}, {{ 48},{ 7}}, {{112},{ 7}}, {{ 8},{ 7}},
2178 {{ 72},{ 7}}, {{ 40},{ 7}}, {{104},{ 7}}, {{ 24},{ 7}}, {{ 88},{ 7}},
2179 {{ 56},{ 7}}, {{120},{ 7}}, {{ 4},{ 7}}, {{ 68},{ 7}}, {{ 36},{ 7}},
2180 {{100},{ 7}}, {{ 20},{ 7}}, {{ 84},{ 7}}, {{ 52},{ 7}}, {{116},{ 7}},
2181 {{ 3},{ 8}}, {{131},{ 8}}, {{ 67},{ 8}}, {{195},{ 8}}, {{ 35},{ 8}},
2182 {{163},{ 8}}, {{ 99},{ 8}}, {{227},{ 8}}
2183 };
2184
2185 local const ct_data static_dtree[D_CODES] = {
2186 {{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}},
2187 {{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}},
2188 {{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}},
2189 {{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}},
2190 {{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}},
2191 {{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}}
2192 };
2193
2194 const uch _dist_code[DIST_CODE_LEN] = {
2195 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8,
2196 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10,
2197 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
2198 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
2199 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13,
2200 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
2201 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2202 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2203 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2204 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
2205 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
2206 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
2207 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17,
2208 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22,
2209 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
2210 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
2211 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
2212 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27,
2213 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
2214 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
2215 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
2216 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
2217 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
2218 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
2219 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
2220 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
2221 };
2222
2223 const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {
2224 0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12,
2225 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16,
2226 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19,
2227 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
2228 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22,
2229 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23,
2230 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
2231 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
2232 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
2233 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26,
2234 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
2235 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
2236 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28
2237 };
2238
2239 local const int base_length[LENGTH_CODES] = {
2240 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
2241 64, 80, 96, 112, 128, 160, 192, 224, 0
2242 };
2243
2244 local const int base_dist[D_CODES] = {
2245 0, 1, 2, 3, 4, 6, 8, 12, 16, 24,
2246 32, 48, 64, 96, 128, 192, 256, 384, 512, 768,
2247 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576
2248 };
2249 /* --- trees.h */
2250
2251 #endif /* GEN_TREES_H */
2252
2253 struct static_tree_desc_s {
2254 const ct_data *static_tree; /* static tree or NULL */
2255 const intf *extra_bits; /* extra bits for each code or NULL */
2256 int extra_base; /* base index for extra_bits */
2257 int elems; /* max number of elements in the tree */
2258 int max_length; /* max bit length for the codes */
2259 };
2260
2261 local static_tree_desc static_l_desc =
2262 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
2263
2264 local static_tree_desc static_d_desc =
2265 {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
2266
2267 local static_tree_desc static_bl_desc =
2268 {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
2269
2270 /* ===========================================================================
2271 * Local (static) routines in this file.
2272 */
2273
2274 local void tr_static_init __P((void));
2275 local void init_block __P((deflate_state *s));
2276 local void pqdownheap __P((deflate_state *s, ct_data *tree, int k));
2277 local void gen_bitlen __P((deflate_state *s, tree_desc *desc));
2278 local void gen_codes __P((ct_data *tree, int max_code, ushf *bl_count));
2279 local void build_tree __P((deflate_state *s, tree_desc *desc));
2280 local void scan_tree __P((deflate_state *s, ct_data *tree, int max_code));
2281 local void send_tree __P((deflate_state *s, ct_data *tree, int max_code));
2282 local int build_bl_tree __P((deflate_state *s));
2283 local void send_all_trees __P((deflate_state *s, int lcodes, int dcodes,
2284 int blcodes));
2285 local void compress_block __P((deflate_state *s, const ct_data *ltree,
2286 const ct_data *dtree));
2287 local void set_data_type __P((deflate_state *s));
2288 local unsigned bi_reverse __P((unsigned value, int length));
2289 local void bi_windup __P((deflate_state *s));
2290 local void bi_flush __P((deflate_state *s));
2291 local void copy_block __P((deflate_state *s, charf *buf, unsigned len,
2292 int header));
2293
2294 #ifdef GEN_TREES_H
2295 local void gen_trees_header __P((void));
2296 #endif
2297
2298 #ifndef DEBUG_ZLIB
2299 # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
2300 /* Send a code of the given tree. c and tree must not have side effects */
2301
2302 #else /* DEBUG_ZLIB */
2303 # define send_code(s, c, tree) \
2304 { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
2305 send_bits(s, tree[c].Code, tree[c].Len); }
2306 #endif
2307
2308 /* ===========================================================================
2309 * Output a short LSB first on the stream.
2310 * IN assertion: there is enough room in pendingBuf.
2311 */
2312 #define put_short(s, w) { \
2313 put_byte(s, (uch)((w) & 0xff)); \
2314 put_byte(s, (uch)((ush)(w) >> 8)); \
2315 }
2316
2317 /* ===========================================================================
2318 * Send a value on a given number of bits.
2319 * IN assertion: length <= 16 and value fits in length bits.
2320 */
2321 #ifdef DEBUG_ZLIB
2322 local void send_bits __P((deflate_state *s, int value, int length));
2323
2324 local void send_bits(s, value, length)
2325 deflate_state *s;
2326 int value; /* value to send */
2327 int length; /* number of bits */
2328 {
2329 Tracevv((stderr," l %2d v %4x ", length, value));
2330 Assert(length > 0 && length <= 15, "invalid length");
2331 s->bits_sent += (ulg)length;
2332
2333 /* If not enough room in bi_buf, use (valid) bits from bi_buf and
2334 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
2335 * unused bits in value.
2336 */
2337 if (s->bi_valid > (int)Buf_size - length) {
2338 s->bi_buf |= (value << s->bi_valid);
2339 put_short(s, s->bi_buf);
2340 s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
2341 s->bi_valid += length - Buf_size;
2342 } else {
2343 s->bi_buf |= value << s->bi_valid;
2344 s->bi_valid += length;
2345 }
2346 }
2347 #else /* !DEBUG_ZLIB */
2348
2349 #define send_bits(s, value, length) \
2350 { int len = length;\
2351 if (s->bi_valid > (int)Buf_size - len) {\
2352 int val = value;\
2353 s->bi_buf |= (val << s->bi_valid);\
2354 put_short(s, s->bi_buf);\
2355 s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
2356 s->bi_valid += len - Buf_size;\
2357 } else {\
2358 s->bi_buf |= (value) << s->bi_valid;\
2359 s->bi_valid += len;\
2360 }\
2361 }
2362 #endif /* DEBUG_ZLIB */
2363
2364
2365 /* ===========================================================================
2366 * Initialize the various 'constant' tables.
2367 */
2368 local void tr_static_init()
2369 {
2370 #if defined(GEN_TREES_H) || !defined(STDC)
2371 static int static_init_done = 0;
2372 int n; /* iterates over tree elements */
2373 int bits; /* bit counter */
2374 int length; /* length value */
2375 int code; /* code value */
2376 int dist; /* distance index */
2377 ush bl_count[MAX_BITS+1];
2378 /* number of codes at each bit length for an optimal tree */
2379
2380 if (static_init_done) return;
2381
2382 /* For some embedded targets, global variables are not initialized: */
2383 static_l_desc.static_tree = static_ltree;
2384 static_l_desc.extra_bits = extra_lbits;
2385 static_d_desc.static_tree = static_dtree;
2386 static_d_desc.extra_bits = extra_dbits;
2387 static_bl_desc.extra_bits = extra_blbits;
2388
2389 /* Initialize the mapping length (0..255) -> length code (0..28) */
2390 length = 0;
2391 for (code = 0; code < LENGTH_CODES-1; code++) {
2392 base_length[code] = length;
2393 for (n = 0; n < (1<<extra_lbits[code]); n++) {
2394 _length_code[length++] = (uch)code;
2395 }
2396 }
2397 Assert (length == 256, "tr_static_init: length != 256");
2398 /* Note that the length 255 (match length 258) can be represented
2399 * in two different ways: code 284 + 5 bits or code 285, so we
2400 * overwrite length_code[255] to use the best encoding:
2401 */
2402 _length_code[length-1] = (uch)code;
2403
2404 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
2405 dist = 0;
2406 for (code = 0 ; code < 16; code++) {
2407 base_dist[code] = dist;
2408 for (n = 0; n < (1<<extra_dbits[code]); n++) {
2409 _dist_code[dist++] = (uch)code;
2410 }
2411 }
2412 Assert (dist == 256, "tr_static_init: dist != 256");
2413 dist >>= 7; /* from now on, all distances are divided by 128 */
2414 for ( ; code < D_CODES; code++) {
2415 base_dist[code] = dist << 7;
2416 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
2417 _dist_code[256 + dist++] = (uch)code;
2418 }
2419 }
2420 Assert (dist == 256, "tr_static_init: 256+dist != 512");
2421
2422 /* Construct the codes of the static literal tree */
2423 for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
2424 n = 0;
2425 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
2426 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
2427 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
2428 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
2429 /* Codes 286 and 287 do not exist, but we must include them in the
2430 * tree construction to get a canonical Huffman tree (longest code
2431 * all ones)
2432 */
2433 gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
2434
2435 /* The static distance tree is trivial: */
2436 for (n = 0; n < D_CODES; n++) {
2437 static_dtree[n].Len = 5;
2438 static_dtree[n].Code = bi_reverse((unsigned)n, 5);
2439 }
2440 static_init_done = 1;
2441
2442 # ifdef GEN_TREES_H
2443 gen_trees_header();
2444 # endif
2445 #endif /* defined(GEN_TREES_H) || !defined(STDC) */
2446 }
2447
2448 /* ===========================================================================
2449 * Genererate the file trees.h describing the static trees.
2450 */
2451 #ifdef GEN_TREES_H
2452 # ifndef DEBUG_ZLIB
2453 # include <stdio.h>
2454 # endif
2455
2456 # define SEPARATOR(i, last, width) \
2457 ((i) == (last)? "\n};\n\n" : \
2458 ((i) % (width) == (width)-1 ? ",\n" : ", "))
2459
2460 void gen_trees_header()
2461 {
2462 FILE *header = fopen("trees.h", "w");
2463 int i;
2464
2465 Assert (header != NULL, "Can't open trees.h");
2466 fprintf(header,
2467 "/* header created automatically with -DGEN_TREES_H */\n\n");
2468
2469 fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n");
2470 for (i = 0; i < L_CODES+2; i++) {
2471 fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code,
2472 static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5));
2473 }
2474
2475 fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n");
2476 for (i = 0; i < D_CODES; i++) {
2477 fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code,
2478 static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5));
2479 }
2480
2481 fprintf(header, "const uch _dist_code[DIST_CODE_LEN] = {\n");
2482 for (i = 0; i < DIST_CODE_LEN; i++) {
2483 fprintf(header, "%2u%s", _dist_code[i],
2484 SEPARATOR(i, DIST_CODE_LEN-1, 20));
2485 }
2486
2487 fprintf(header, "const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {\n");
2488 for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) {
2489 fprintf(header, "%2u%s", _length_code[i],
2490 SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20));
2491 }
2492
2493 fprintf(header, "local const int base_length[LENGTH_CODES] = {\n");
2494 for (i = 0; i < LENGTH_CODES; i++) {
2495 fprintf(header, "%1u%s", base_length[i],
2496 SEPARATOR(i, LENGTH_CODES-1, 20));
2497 }
2498
2499 fprintf(header, "local const int base_dist[D_CODES] = {\n");
2500 for (i = 0; i < D_CODES; i++) {
2501 fprintf(header, "%5u%s", base_dist[i],
2502 SEPARATOR(i, D_CODES-1, 10));
2503 }
2504
2505 fclose(header);
2506 }
2507 #endif /* GEN_TREES_H */
2508
2509 /* ===========================================================================
2510 * Initialize the tree data structures for a new zlib stream.
2511 */
2512 void _tr_init(s)
2513 deflate_state *s;
2514 {
2515 tr_static_init();
2516
2517 s->l_desc.dyn_tree = s->dyn_ltree;
2518 s->l_desc.stat_desc = &static_l_desc;
2519
2520 s->d_desc.dyn_tree = s->dyn_dtree;
2521 s->d_desc.stat_desc = &static_d_desc;
2522
2523 s->bl_desc.dyn_tree = s->bl_tree;
2524 s->bl_desc.stat_desc = &static_bl_desc;
2525
2526 s->bi_buf = 0;
2527 s->bi_valid = 0;
2528 s->last_eob_len = 8; /* enough lookahead for inflate */
2529 #ifdef DEBUG_ZLIB
2530 s->compressed_len = 0L;
2531 s->bits_sent = 0L;
2532 #endif
2533
2534 /* Initialize the first block of the first file: */
2535 init_block(s);
2536 }
2537
2538 /* ===========================================================================
2539 * Initialize a new block.
2540 */
2541 local void init_block(s)
2542 deflate_state *s;
2543 {
2544 int n; /* iterates over tree elements */
2545
2546 /* Initialize the trees. */
2547 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
2548 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
2549 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
2550
2551 s->dyn_ltree[END_BLOCK].Freq = 1;
2552 s->opt_len = s->static_len = 0L;
2553 s->last_lit = s->matches = 0;
2554 }
2555
2556 #define SMALLEST 1
2557 /* Index within the heap array of least frequent node in the Huffman tree */
2558
2559
2560 /* ===========================================================================
2561 * Remove the smallest element from the heap and recreate the heap with
2562 * one less element. Updates heap and heap_len.
2563 */
2564 #define pqremove(s, tree, top) \
2565 {\
2566 top = s->heap[SMALLEST]; \
2567 s->heap[SMALLEST] = s->heap[s->heap_len--]; \
2568 pqdownheap(s, tree, SMALLEST); \
2569 }
2570
2571 /* ===========================================================================
2572 * Compares to subtrees, using the tree depth as tie breaker when
2573 * the subtrees have equal frequency. This minimizes the worst case length.
2574 */
2575 #define smaller(tree, n, m, depth) \
2576 (tree[n].Freq < tree[m].Freq || \
2577 (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
2578
2579 /* ===========================================================================
2580 * Restore the heap property by moving down the tree starting at node k,
2581 * exchanging a node with the smallest of its two sons if necessary, stopping
2582 * when the heap property is re-established (each father smaller than its
2583 * two sons).
2584 */
2585 local void pqdownheap(s, tree, k)
2586 deflate_state *s;
2587 ct_data *tree; /* the tree to restore */
2588 int k; /* node to move down */
2589 {
2590 int v = s->heap[k];
2591 int j = k << 1; /* left son of k */
2592 while (j <= s->heap_len) {
2593 /* Set j to the smallest of the two sons: */
2594 if (j < s->heap_len &&
2595 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
2596 j++;
2597 }
2598 /* Exit if v is smaller than both sons */
2599 if (smaller(tree, v, s->heap[j], s->depth)) break;
2600
2601 /* Exchange v with the smallest son */
2602 s->heap[k] = s->heap[j]; k = j;
2603
2604 /* And continue down the tree, setting j to the left son of k */
2605 j <<= 1;
2606 }
2607 s->heap[k] = v;
2608 }
2609
2610 /* ===========================================================================
2611 * Compute the optimal bit lengths for a tree and update the total bit length
2612 * for the current block.
2613 * IN assertion: the fields freq and dad are set, heap[heap_max] and
2614 * above are the tree nodes sorted by increasing frequency.
2615 * OUT assertions: the field len is set to the optimal bit length, the
2616 * array bl_count contains the frequencies for each bit length.
2617 * The length opt_len is updated; static_len is also updated if stree is
2618 * not null.
2619 */
2620 local void gen_bitlen(s, desc)
2621 deflate_state *s;
2622 tree_desc *desc; /* the tree descriptor */
2623 {
2624 ct_data *tree = desc->dyn_tree;
2625 int max_code = desc->max_code;
2626 const ct_data *stree = desc->stat_desc->static_tree;
2627 const intf *extra = desc->stat_desc->extra_bits;
2628 int base = desc->stat_desc->extra_base;
2629 int max_length = desc->stat_desc->max_length;
2630 int h; /* heap index */
2631 int n, m; /* iterate over the tree elements */
2632 int bits; /* bit length */
2633 int xbits; /* extra bits */
2634 ush f; /* frequency */
2635 int overflow = 0; /* number of elements with bit length too large */
2636
2637 for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
2638
2639 /* In a first pass, compute the optimal bit lengths (which may
2640 * overflow in the case of the bit length tree).
2641 */
2642 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
2643
2644 for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
2645 n = s->heap[h];
2646 bits = tree[tree[n].Dad].Len + 1;
2647 if (bits > max_length) bits = max_length, overflow++;
2648 tree[n].Len = (ush)bits;
2649 /* We overwrite tree[n].Dad which is no longer needed */
2650
2651 if (n > max_code) continue; /* not a leaf node */
2652
2653 s->bl_count[bits]++;
2654 xbits = 0;
2655 if (n >= base) xbits = extra[n-base];
2656 f = tree[n].Freq;
2657 s->opt_len += (ulg)f * (bits + xbits);
2658 if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
2659 }
2660 if (overflow == 0) return;
2661
2662 Trace((stderr,"\nbit length overflow\n"));
2663 /* This happens for example on obj2 and pic of the Calgary corpus */
2664
2665 /* Find the first bit length which could increase: */
2666 do {
2667 bits = max_length-1;
2668 while (s->bl_count[bits] == 0) bits--;
2669 s->bl_count[bits]--; /* move one leaf down the tree */
2670 s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
2671 s->bl_count[max_length]--;
2672 /* The brother of the overflow item also moves one step up,
2673 * but this does not affect bl_count[max_length]
2674 */
2675 overflow -= 2;
2676 } while (overflow > 0);
2677
2678 /* Now recompute all bit lengths, scanning in increasing frequency.
2679 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
2680 * lengths instead of fixing only the wrong ones. This idea is taken
2681 * from 'ar' written by Haruhiko Okumura.)
2682 */
2683 for (bits = max_length; bits != 0; bits--) {
2684 n = s->bl_count[bits];
2685 while (n != 0) {
2686 m = s->heap[--h];
2687 if (m > max_code) continue;
2688 if (tree[m].Len != (unsigned) bits) {
2689 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
2690 s->opt_len += ((long)bits - (long)tree[m].Len)
2691 *(long)tree[m].Freq;
2692 tree[m].Len = (ush)bits;
2693 }
2694 n--;
2695 }
2696 }
2697 }
2698
2699 /* ===========================================================================
2700 * Generate the codes for a given tree and bit counts (which need not be
2701 * optimal).
2702 * IN assertion: the array bl_count contains the bit length statistics for
2703 * the given tree and the field len is set for all tree elements.
2704 * OUT assertion: the field code is set for all tree elements of non
2705 * zero code length.
2706 */
2707 local void gen_codes (tree, max_code, bl_count)
2708 ct_data *tree; /* the tree to decorate */
2709 int max_code; /* largest code with non zero frequency */
2710 ushf *bl_count; /* number of codes at each bit length */
2711 {
2712 ush next_code[MAX_BITS+1]; /* next code value for each bit length */
2713 ush code = 0; /* running code value */
2714 int bits; /* bit index */
2715 int n; /* code index */
2716
2717 /* The distribution counts are first used to generate the code values
2718 * without bit reversal.
2719 */
2720 for (bits = 1; bits <= MAX_BITS; bits++) {
2721 next_code[bits] = code = (code + bl_count[bits-1]) << 1;
2722 }
2723 /* Check that the bit counts in bl_count are consistent. The last code
2724 * must be all ones.
2725 */
2726 Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
2727 "inconsistent bit counts");
2728 Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
2729
2730 for (n = 0; n <= max_code; n++) {
2731 int len = tree[n].Len;
2732 if (len == 0) continue;
2733 /* Now reverse the bits */
2734 tree[n].Code = bi_reverse(next_code[len]++, len);
2735
2736 Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
2737 n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
2738 }
2739 }
2740
2741 /* ===========================================================================
2742 * Construct one Huffman tree and assigns the code bit strings and lengths.
2743 * Update the total bit length for the current block.
2744 * IN assertion: the field freq is set for all tree elements.
2745 * OUT assertions: the fields len and code are set to the optimal bit length
2746 * and corresponding code. The length opt_len is updated; static_len is
2747 * also updated if stree is not null. The field max_code is set.
2748 */
2749 local void build_tree(s, desc)
2750 deflate_state *s;
2751 tree_desc *desc; /* the tree descriptor */
2752 {
2753 ct_data *tree = desc->dyn_tree;
2754 const ct_data *stree = desc->stat_desc->static_tree;
2755 int elems = desc->stat_desc->elems;
2756 int n, m; /* iterate over heap elements */
2757 int max_code = -1; /* largest code with non zero frequency */
2758 int node; /* new node being created */
2759
2760 /* Construct the initial heap, with least frequent element in
2761 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
2762 * heap[0] is not used.
2763 */
2764 s->heap_len = 0, s->heap_max = HEAP_SIZE;
2765
2766 for (n = 0; n < elems; n++) {
2767 if (tree[n].Freq != 0) {
2768 s->heap[++(s->heap_len)] = max_code = n;
2769 s->depth[n] = 0;
2770 } else {
2771 tree[n].Len = 0;
2772 }
2773 }
2774
2775 /* The pkzip format requires that at least one distance code exists,
2776 * and that at least one bit should be sent even if there is only one
2777 * possible code. So to avoid special checks later on we force at least
2778 * two codes of non zero frequency.
2779 */
2780 while (s->heap_len < 2) {
2781 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
2782 tree[node].Freq = 1;
2783 s->depth[node] = 0;
2784 s->opt_len--; if (stree) s->static_len -= stree[node].Len;
2785 /* node is 0 or 1 so it does not have extra bits */
2786 }
2787 desc->max_code = max_code;
2788
2789 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
2790 * establish sub-heaps of increasing lengths:
2791 */
2792 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
2793
2794 /* Construct the Huffman tree by repeatedly combining the least two
2795 * frequent nodes.
2796 */
2797 node = elems; /* next internal node of the tree */
2798 do {
2799 pqremove(s, tree, n); /* n = node of least frequency */
2800 m = s->heap[SMALLEST]; /* m = node of next least frequency */
2801
2802 s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
2803 s->heap[--(s->heap_max)] = m;
2804
2805 /* Create a new node father of n and m */
2806 tree[node].Freq = tree[n].Freq + tree[m].Freq;
2807 s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
2808 tree[n].Dad = tree[m].Dad = (ush)node;
2809 #ifdef DUMP_BL_TREE
2810 if (tree == s->bl_tree) {
2811 fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
2812 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
2813 }
2814 #endif
2815 /* and insert the new node in the heap */
2816 s->heap[SMALLEST] = node++;
2817 pqdownheap(s, tree, SMALLEST);
2818
2819 } while (s->heap_len >= 2);
2820
2821 s->heap[--(s->heap_max)] = s->heap[SMALLEST];
2822
2823 /* At this point, the fields freq and dad are set. We can now
2824 * generate the bit lengths.
2825 */
2826 gen_bitlen(s, (tree_desc *)desc);
2827
2828 /* The field len is now set, we can generate the bit codes */
2829 gen_codes ((ct_data *)tree, max_code, s->bl_count);
2830 }
2831
2832 /* ===========================================================================
2833 * Scan a literal or distance tree to determine the frequencies of the codes
2834 * in the bit length tree.
2835 */
2836 local void scan_tree (s, tree, max_code)
2837 deflate_state *s;
2838 ct_data *tree; /* the tree to be scanned */
2839 int max_code; /* and its largest code of non zero frequency */
2840 {
2841 int n; /* iterates over all tree elements */
2842 int prevlen = -1; /* last emitted length */
2843 int curlen; /* length of current code */
2844 int nextlen = tree[0].Len; /* length of next code */
2845 int count = 0; /* repeat count of the current code */
2846 int max_count = 7; /* max repeat count */
2847 int min_count = 4; /* min repeat count */
2848
2849 if (nextlen == 0) max_count = 138, min_count = 3;
2850 tree[max_code+1].Len = (ush)0xffff; /* guard */
2851
2852 for (n = 0; n <= max_code; n++) {
2853 curlen = nextlen; nextlen = tree[n+1].Len;
2854 if (++count < max_count && curlen == nextlen) {
2855 continue;
2856 } else if (count < min_count) {
2857 s->bl_tree[curlen].Freq += count;
2858 } else if (curlen != 0) {
2859 if (curlen != prevlen) s->bl_tree[curlen].Freq++;
2860 s->bl_tree[REP_3_6].Freq++;
2861 } else if (count <= 10) {
2862 s->bl_tree[REPZ_3_10].Freq++;
2863 } else {
2864 s->bl_tree[REPZ_11_138].Freq++;
2865 }
2866 count = 0; prevlen = curlen;
2867 if (nextlen == 0) {
2868 max_count = 138, min_count = 3;
2869 } else if (curlen == nextlen) {
2870 max_count = 6, min_count = 3;
2871 } else {
2872 max_count = 7, min_count = 4;
2873 }
2874 }
2875 }
2876
2877 /* ===========================================================================
2878 * Send a literal or distance tree in compressed form, using the codes in
2879 * bl_tree.
2880 */
2881 local void send_tree (s, tree, max_code)
2882 deflate_state *s;
2883 ct_data *tree; /* the tree to be scanned */
2884 int max_code; /* and its largest code of non zero frequency */
2885 {
2886 int n; /* iterates over all tree elements */
2887 int prevlen = -1; /* last emitted length */
2888 int curlen; /* length of current code */
2889 int nextlen = tree[0].Len; /* length of next code */
2890 int count = 0; /* repeat count of the current code */
2891 int max_count = 7; /* max repeat count */
2892 int min_count = 4; /* min repeat count */
2893
2894 /* tree[max_code+1].Len = -1; */ /* guard already set */
2895 if (nextlen == 0) max_count = 138, min_count = 3;
2896
2897 for (n = 0; n <= max_code; n++) {
2898 curlen = nextlen; nextlen = tree[n+1].Len;
2899 if (++count < max_count && curlen == nextlen) {
2900 continue;
2901 } else if (count < min_count) {
2902 do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
2903
2904 } else if (curlen != 0) {
2905 if (curlen != prevlen) {
2906 send_code(s, curlen, s->bl_tree); count--;
2907 }
2908 Assert(count >= 3 && count <= 6, " 3_6?");
2909 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
2910
2911 } else if (count <= 10) {
2912 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
2913
2914 } else {
2915 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
2916 }
2917 count = 0; prevlen = curlen;
2918 if (nextlen == 0) {
2919 max_count = 138, min_count = 3;
2920 } else if (curlen == nextlen) {
2921 max_count = 6, min_count = 3;
2922 } else {
2923 max_count = 7, min_count = 4;
2924 }
2925 }
2926 }
2927
2928 /* ===========================================================================
2929 * Construct the Huffman tree for the bit lengths and return the index in
2930 * bl_order of the last bit length code to send.
2931 */
2932 local int build_bl_tree(s)
2933 deflate_state *s;
2934 {
2935 int max_blindex; /* index of last bit length code of non zero freq */
2936
2937 /* Determine the bit length frequencies for literal and distance trees */
2938 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
2939 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
2940
2941 /* Build the bit length tree: */
2942 build_tree(s, (tree_desc *)(&(s->bl_desc)));
2943 /* opt_len now includes the length of the tree representations, except
2944 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2945 */
2946
2947 /* Determine the number of bit length codes to send. The pkzip format
2948 * requires that at least 4 bit length codes be sent. (appnote.txt says
2949 * 3 but the actual value used is 4.)
2950 */
2951 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
2952 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
2953 }
2954 /* Update opt_len to include the bit length tree and counts */
2955 s->opt_len += 3*(max_blindex+1) + 5+5+4;
2956 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
2957 s->opt_len, s->static_len));
2958
2959 return max_blindex;
2960 }
2961
2962 /* ===========================================================================
2963 * Send the header for a block using dynamic Huffman trees: the counts, the
2964 * lengths of the bit length codes, the literal tree and the distance tree.
2965 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
2966 */
2967 local void send_all_trees(s, lcodes, dcodes, blcodes)
2968 deflate_state *s;
2969 int lcodes, dcodes, blcodes; /* number of codes for each tree */
2970 {
2971 int rank; /* index in bl_order */
2972
2973 Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
2974 Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2975 "too many codes");
2976 Tracev((stderr, "\nbl counts: "));
2977 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
2978 send_bits(s, dcodes-1, 5);
2979 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
2980 for (rank = 0; rank < blcodes; rank++) {
2981 Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
2982 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
2983 }
2984 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
2985
2986 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
2987 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
2988
2989 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
2990 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
2991 }
2992
2993 /* ===========================================================================
2994 * Send a stored block
2995 */
2996 void _tr_stored_block(s, buf, stored_len, eof)
2997 deflate_state *s;
2998 charf *buf; /* input block */
2999 ulg stored_len; /* length of input block */
3000 int eof; /* true if this is the last block for a file */
3001 {
3002 send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */
3003 #ifdef DEBUG_ZLIB
3004 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
3005 s->compressed_len += (stored_len + 4) << 3;
3006 #endif
3007 copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
3008 }
3009
3010 /* Send just the `stored block' type code without any length bytes or data.
3011 */
3012 void _tr_stored_type_only(s)
3013 deflate_state *s;
3014 {
3015 send_bits(s, (STORED_BLOCK << 1), 3);
3016 bi_windup(s);
3017 #ifdef DEBUG_ZLIB
3018 s->compressed_len = (s->compressed_len + 3) & ~7L;
3019 #endif
3020 }
3021
3022 /* ===========================================================================
3023 * Send one empty static block to give enough lookahead for inflate.
3024 * This takes 10 bits, of which 7 may remain in the bit buffer.
3025 * The current inflate code requires 9 bits of lookahead. If the
3026 * last two codes for the previous block (real code plus EOB) were coded
3027 * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode
3028 * the last real code. In this case we send two empty static blocks instead
3029 * of one. (There are no problems if the previous block is stored or fixed.)
3030 * To simplify the code, we assume the worst case of last real code encoded
3031 * on one bit only.
3032 */
3033 void _tr_align(s)
3034 deflate_state *s;
3035 {
3036 send_bits(s, STATIC_TREES<<1, 3);
3037 send_code(s, END_BLOCK, static_ltree);
3038 #ifdef DEBUG_ZLIB
3039 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
3040 #endif
3041 bi_flush(s);
3042 /* Of the 10 bits for the empty block, we have already sent
3043 * (10 - bi_valid) bits. The lookahead for the last real code (before
3044 * the EOB of the previous block) was thus at least one plus the length
3045 * of the EOB plus what we have just sent of the empty static block.
3046 */
3047 if (1 + s->last_eob_len + 10 - s->bi_valid < 9) {
3048 send_bits(s, STATIC_TREES<<1, 3);
3049 send_code(s, END_BLOCK, static_ltree);
3050 #ifdef DEBUG_ZLIB
3051 s->compressed_len += 10L;
3052 #endif
3053 bi_flush(s);
3054 }
3055 s->last_eob_len = 7;
3056 }
3057
3058 /* ===========================================================================
3059 * Determine the best encoding for the current block: dynamic trees, static
3060 * trees or store, and output the encoded block to the zip file.
3061 */
3062 void _tr_flush_block(s, buf, stored_len, eof)
3063 deflate_state *s;
3064 charf *buf; /* input block, or NULL if too old */
3065 ulg stored_len; /* length of input block */
3066 int eof; /* true if this is the last block for a file */
3067 {
3068 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
3069 int max_blindex = 0; /* index of last bit length code of non zero freq */
3070
3071 /* Build the Huffman trees unless a stored block is forced */
3072 if (s->level > 0) {
3073
3074 /* Check if the file is ascii or binary */
3075 if (s->data_type == Z_UNKNOWN) set_data_type(s);
3076
3077 /* Construct the literal and distance trees */
3078 build_tree(s, (tree_desc *)(&(s->l_desc)));
3079 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
3080 s->static_len));
3081
3082 build_tree(s, (tree_desc *)(&(s->d_desc)));
3083 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
3084 s->static_len));
3085 /* At this point, opt_len and static_len are the total bit lengths of
3086 * the compressed block data, excluding the tree representations.
3087 */
3088
3089 /* Build the bit length tree for the above two trees, and get the index
3090 * in bl_order of the last bit length code to send.
3091 */
3092 max_blindex = build_bl_tree(s);
3093
3094 /* Determine the best encoding. Compute first the block length in bytes*/
3095 opt_lenb = (s->opt_len+3+7)>>3;
3096 static_lenb = (s->static_len+3+7)>>3;
3097
3098 Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
3099 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
3100 s->last_lit));
3101
3102 if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
3103
3104 } else {
3105 Assert(buf != (char*)0, "lost buf");
3106 opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
3107 }
3108
3109 #ifdef FORCE_STORED
3110 if (buf != (char*)0) { /* force stored block */
3111 #else
3112 if (stored_len+4 <= opt_lenb && buf != (char*)0) {
3113 /* 4: two words for the lengths */
3114 #endif
3115 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
3116 * Otherwise we can't have processed more than WSIZE input bytes since
3117 * the last block flush, because compression would have been
3118 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
3119 * transform a block into a stored block.
3120 */
3121 _tr_stored_block(s, buf, stored_len, eof);
3122
3123 #ifdef FORCE_STATIC
3124 } else if (static_lenb >= 0) { /* force static trees */
3125 #else
3126 } else if (static_lenb == opt_lenb) {
3127 #endif
3128 send_bits(s, (STATIC_TREES<<1)+eof, 3);
3129 compress_block(s, (const ct_data *)static_ltree, (const ct_data *)static_dtree);
3130 #ifdef DEBUG_ZLIB
3131 s->compressed_len += 3 + s->static_len;
3132 #endif
3133 } else {
3134 send_bits(s, (DYN_TREES<<1)+eof, 3);
3135 send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
3136 max_blindex+1);
3137 compress_block(s, (const ct_data *)s->dyn_ltree, (const ct_data *)s->dyn_dtree);
3138 #ifdef DEBUG_ZLIB
3139 s->compressed_len += 3 + s->opt_len;
3140 #endif
3141 }
3142 Assert (s->compressed_len == s->bits_sent, "bad compressed size");
3143 /* The above check is made mod 2^32, for files larger than 512 MB
3144 * and uLong implemented on 32 bits.
3145 */
3146 init_block(s);
3147
3148 if (eof) {
3149 bi_windup(s);
3150 #ifdef DEBUG_ZLIB
3151 s->compressed_len += 7; /* align on byte boundary */
3152 #endif
3153 }
3154 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
3155 s->compressed_len-7*eof));
3156 }
3157
3158 /* ===========================================================================
3159 * Save the match info and tally the frequency counts. Return true if
3160 * the current block must be flushed.
3161 */
3162 #if 0
3163 int _tr_tally (s, dist, lc)
3164 deflate_state *s;
3165 unsigned dist; /* distance of matched string */
3166 unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */
3167 {
3168 s->d_buf[s->last_lit] = (ush)dist;
3169 s->l_buf[s->last_lit++] = (uch)lc;
3170 if (dist == 0) {
3171 /* lc is the unmatched char */
3172 s->dyn_ltree[lc].Freq++;
3173 } else {
3174 s->matches++;
3175 /* Here, lc is the match length - MIN_MATCH */
3176 dist--; /* dist = match distance - 1 */
3177 Assert((ush)dist < (ush)MAX_DIST(s) &&
3178 (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
3179 (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
3180
3181 s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
3182 s->dyn_dtree[d_code(dist)].Freq++;
3183 }
3184
3185 #ifdef TRUNCATE_BLOCK
3186 /* Try to guess if it is profitable to stop the current block here */
3187 if ((s->last_lit & 0x1fff) == 0 && s->level > 2) {
3188 /* Compute an upper bound for the compressed length */
3189 ulg out_length = (ulg)s->last_lit*8L;
3190 ulg in_length = (ulg)((long)s->strstart - s->block_start);
3191 int dcode;
3192 for (dcode = 0; dcode < D_CODES; dcode++) {
3193 out_length += (ulg)s->dyn_dtree[dcode].Freq *
3194 (5L+extra_dbits[dcode]);
3195 }
3196 out_length >>= 3;
3197 Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
3198 s->last_lit, in_length, out_length,
3199 100L - out_length*100L/in_length));
3200 if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
3201 }
3202 #endif
3203 return (s->last_lit == s->lit_bufsize-1);
3204 /* We avoid equality with lit_bufsize because of wraparound at 64K
3205 * on 16 bit machines and because stored blocks are restricted to
3206 * 64K-1 bytes.
3207 */
3208 }
3209 #endif
3210
3211 /* ===========================================================================
3212 * Send the block data compressed using the given Huffman trees
3213 */
3214 local void compress_block(s, ltree, dtree)
3215 deflate_state *s;
3216 const ct_data *ltree; /* literal tree */
3217 const ct_data *dtree; /* distance tree */
3218 {
3219 unsigned dist; /* distance of matched string */
3220 int lc; /* match length or unmatched char (if dist == 0) */
3221 unsigned lx = 0; /* running index in l_buf */
3222 unsigned code; /* the code to send */
3223 int extra; /* number of extra bits to send */
3224
3225 if (s->last_lit != 0) do {
3226 dist = s->d_buf[lx];
3227 lc = s->l_buf[lx++];
3228 if (dist == 0) {
3229 send_code(s, lc, ltree); /* send a literal byte */
3230 Tracecv(isgraph(lc), (stderr," '%c' ", lc));
3231 } else {
3232 /* Here, lc is the match length - MIN_MATCH */
3233 code = _length_code[lc];
3234 send_code(s, code+LITERALS+1, ltree); /* send the length code */
3235 extra = extra_lbits[code];
3236 if (extra != 0) {
3237 lc -= base_length[code];
3238 send_bits(s, lc, extra); /* send the extra length bits */
3239 }
3240 dist--; /* dist is now the match distance - 1 */
3241 code = d_code(dist);
3242 Assert (code < D_CODES, "bad d_code");
3243
3244 send_code(s, code, dtree); /* send the distance code */
3245 extra = extra_dbits[code];
3246 if (extra != 0) {
3247 dist -= base_dist[code];
3248 send_bits(s, dist, extra); /* send the extra distance bits */
3249 }
3250 } /* literal or match pair ? */
3251
3252 /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
3253 Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
3254
3255 } while (lx < s->last_lit);
3256
3257 send_code(s, END_BLOCK, ltree);
3258 s->last_eob_len = ltree[END_BLOCK].Len;
3259 }
3260
3261 /* ===========================================================================
3262 * Set the data type to ASCII or BINARY, using a crude approximation:
3263 * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
3264 * IN assertion: the fields freq of dyn_ltree are set and the total of all
3265 * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
3266 */
3267 local void set_data_type(s)
3268 deflate_state *s;
3269 {
3270 int n = 0;
3271 unsigned ascii_freq = 0;
3272 unsigned bin_freq = 0;
3273 while (n < 7) bin_freq += s->dyn_ltree[n++].Freq;
3274 while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq;
3275 while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
3276 s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII);
3277 }
3278
3279 /* ===========================================================================
3280 * Reverse the first len bits of a code, using straightforward code (a faster
3281 * method would use a table)
3282 * IN assertion: 1 <= len <= 15
3283 */
3284 local unsigned bi_reverse(code, len)
3285 unsigned code; /* the value to invert */
3286 int len; /* its bit length */
3287 {
3288 unsigned res = 0;
3289 do {
3290 res |= code & 1;
3291 code >>= 1, res <<= 1;
3292 } while (--len > 0);
3293 return res >> 1;
3294 }
3295
3296 /* ===========================================================================
3297 * Flush the bit buffer, keeping at most 7 bits in it.
3298 */
3299 local void bi_flush(s)
3300 deflate_state *s;
3301 {
3302 if (s->bi_valid == 16) {
3303 put_short(s, s->bi_buf);
3304 s->bi_buf = 0;
3305 s->bi_valid = 0;
3306 } else if (s->bi_valid >= 8) {
3307 put_byte(s, (Byte)s->bi_buf);
3308 s->bi_buf >>= 8;
3309 s->bi_valid -= 8;
3310 }
3311 }
3312
3313 /* ===========================================================================
3314 * Flush the bit buffer and align the output on a byte boundary
3315 */
3316 local void bi_windup(s)
3317 deflate_state *s;
3318 {
3319 if (s->bi_valid > 8) {
3320 put_short(s, s->bi_buf);
3321 } else if (s->bi_valid > 0) {
3322 put_byte(s, (Byte)s->bi_buf);
3323 }
3324 s->bi_buf = 0;
3325 s->bi_valid = 0;
3326 #ifdef DEBUG_ZLIB
3327 s->bits_sent = (s->bits_sent+7) & ~7;
3328 #endif
3329 }
3330
3331 /* ===========================================================================
3332 * Copy a stored block, storing first the length and its
3333 * one's complement if requested.
3334 */
3335 local void copy_block(s, buf, len, header)
3336 deflate_state *s;
3337 charf *buf; /* the input data */
3338 unsigned len; /* its length */
3339 int header; /* true if block header must be written */
3340 {
3341 bi_windup(s); /* align on byte boundary */
3342 s->last_eob_len = 8; /* enough lookahead for inflate */
3343
3344 if (header) {
3345 put_short(s, (ush)len);
3346 put_short(s, (ush)~len);
3347 #ifdef DEBUG_ZLIB
3348 s->bits_sent += 2*16;
3349 #endif
3350 }
3351 #ifdef DEBUG_ZLIB
3352 s->bits_sent += (ulg)len<<3;
3353 #endif
3354 /* bundle up the put_byte(s, *buf++) calls */
3355 zmemcpy(&s->pending_buf[s->pending], buf, len);
3356 s->pending += len;
3357 }
3358 /* --- trees.c */
3359
3360 /* +++ inflate.c */
3361
3362 /* inflate.c -- zlib interface to inflate modules
3363 * Copyright (C) 1995-2002 Mark Adler
3364 * For conditions of distribution and use, see copyright notice in zlib.h
3365 */
3366
3367 /* #include "zutil.h" */
3368
3369 /* +++ infblock.h */
3370
3371 /* infblock.h -- header to use infblock.c
3372 * Copyright (C) 1995-2002 Mark Adler
3373 * For conditions of distribution and use, see copyright notice in zlib.h
3374 */
3375
3376 /* WARNING: this file should *not* be used by applications. It is
3377 part of the implementation of the compression library and is
3378 subject to change. Applications should only use zlib.h.
3379 */
3380
3381 struct inflate_blocks_state;
3382 typedef struct inflate_blocks_state FAR inflate_blocks_statef;
3383
3384 extern inflate_blocks_statef * inflate_blocks_new __P((
3385 z_streamp z,
3386 check_func c, /* check function */
3387 uInt w)); /* window size */
3388
3389 extern int inflate_blocks __P((
3390 inflate_blocks_statef *,
3391 z_streamp ,
3392 int)); /* initial return code */
3393
3394 extern void inflate_blocks_reset __P((
3395 inflate_blocks_statef *,
3396 z_streamp ,
3397 uLongf *)); /* check value on output */
3398
3399 extern int inflate_blocks_free __P((
3400 inflate_blocks_statef *,
3401 z_streamp));
3402
3403 extern void inflate_set_dictionary __P((
3404 inflate_blocks_statef *s,
3405 const Bytef *d, /* dictionary */
3406 uInt n)); /* dictionary length */
3407
3408 extern int inflate_blocks_sync_point __P((
3409 inflate_blocks_statef *s));
3410 extern int inflate_addhistory __P((
3411 inflate_blocks_statef *,
3412 z_streamp));
3413
3414 extern int inflate_packet_flush __P((
3415 inflate_blocks_statef *));
3416
3417 /* --- infblock.h */
3418
3419 #ifndef NO_DUMMY_DECL
3420 struct inflate_blocks_state {int dummy;}; /* for buggy compilers */
3421 #endif
3422
3423 typedef enum {
3424 METHOD, /* waiting for method byte */
3425 FLAG, /* waiting for flag byte */
3426 DICT4, /* four dictionary check bytes to go */
3427 DICT3, /* three dictionary check bytes to go */
3428 DICT2, /* two dictionary check bytes to go */
3429 DICT1, /* one dictionary check byte to go */
3430 DICT0, /* waiting for inflateSetDictionary */
3431 BLOCKS, /* decompressing blocks */
3432 CHECK4, /* four check bytes to go */
3433 CHECK3, /* three check bytes to go */
3434 CHECK2, /* two check bytes to go */
3435 CHECK1, /* one check byte to go */
3436 DONE, /* finished check, done */
3437 BAD} /* got an error--stay here */
3438 inflate_mode;
3439
3440 /* inflate private state */
3441 struct internal_state {
3442
3443 /* mode */
3444 inflate_mode mode; /* current inflate mode */
3445
3446 /* mode dependent information */
3447 union {
3448 uInt method; /* if FLAGS, method byte */
3449 struct {
3450 uLong was; /* computed check value */
3451 uLong need; /* stream check value */
3452 } check; /* if CHECK, check values to compare */
3453 uInt marker; /* if BAD, inflateSync's marker bytes count */
3454 } sub; /* submode */
3455
3456 /* mode independent information */
3457 int nowrap; /* flag for no wrapper */
3458 uInt wbits; /* log2(window size) (8..15, defaults to 15) */
3459 inflate_blocks_statef
3460 *blocks; /* current inflate_blocks state */
3461
3462 };
3463
3464
3465 int ZEXPORT inflateReset(z)
3466 z_streamp z;
3467 {
3468 if (z == Z_NULL || z->state == Z_NULL)
3469 return Z_STREAM_ERROR;
3470 z->total_in = z->total_out = 0;
3471 z->msg = Z_NULL;
3472 z->state->mode = z->state->nowrap ? BLOCKS : METHOD;
3473 inflate_blocks_reset(z->state->blocks, z, Z_NULL);
3474 Tracev((stderr, "inflate: reset\n"));
3475 return Z_OK;
3476 }
3477
3478
3479 int ZEXPORT inflateEnd(z)
3480 z_streamp z;
3481 {
3482 if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
3483 return Z_STREAM_ERROR;
3484 if (z->state->blocks != Z_NULL)
3485 inflate_blocks_free(z->state->blocks, z);
3486 ZFREE(z, z->state);
3487 z->state = Z_NULL;
3488 Tracev((stderr, "inflate: end\n"));
3489 return Z_OK;
3490 }
3491
3492
3493 int ZEXPORT inflateInit2_(z, w, vers, stream_size)
3494 z_streamp z;
3495 int w;
3496 const char *vers;
3497 int stream_size;
3498 {
3499 if (vers == Z_NULL || vers[0] != ZLIB_VERSION[0] ||
3500 stream_size != sizeof(z_stream))
3501 return Z_VERSION_ERROR;
3502
3503 /* initialize state */
3504 if (z == Z_NULL)
3505 return Z_STREAM_ERROR;
3506 z->msg = Z_NULL;
3507 #ifndef NO_ZCFUNCS
3508 if (z->zalloc == Z_NULL)
3509 {
3510 z->zalloc = zcalloc;
3511 z->opaque = (voidpf)0;
3512 }
3513 if (z->zfree == Z_NULL) z->zfree = zcfree;
3514 #endif
3515 if ((z->state = (struct internal_state FAR *)
3516 ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL)
3517 return Z_MEM_ERROR;
3518 z->state->blocks = Z_NULL;
3519
3520 /* handle undocumented nowrap option (no zlib header or check) */
3521 z->state->nowrap = 0;
3522 if (w < 0)
3523 {
3524 w = - w;
3525 z->state->nowrap = 1;
3526 }
3527
3528 /* set window size */
3529 if (w < 8 || w > 15)
3530 {
3531 inflateEnd(z);
3532 return Z_STREAM_ERROR;
3533 }
3534 z->state->wbits = (uInt)w;
3535
3536 /* create inflate_blocks state */
3537 if ((z->state->blocks =
3538 inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, (uInt)1 << w))
3539 == Z_NULL)
3540 {
3541 inflateEnd(z);
3542 return Z_MEM_ERROR;
3543 }
3544 Tracev((stderr, "inflate: allocated\n"));
3545
3546 /* reset state */
3547 inflateReset(z);
3548 return Z_OK;
3549 }
3550
3551
3552 #if 0
3553 int ZEXPORT inflateInit_(z, vers, stream_size)
3554 z_streamp z;
3555 const char *vers;
3556 int stream_size;
3557 {
3558 return inflateInit2_(z, DEF_WBITS, vers, stream_size);
3559 }
3560 #endif
3561
3562
3563 #define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;}
3564 #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
3565
3566 int ZEXPORT inflate(z, f)
3567 z_streamp z;
3568 int f;
3569 {
3570 int r, r2;
3571 uInt b;
3572
3573 if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL)
3574 return Z_STREAM_ERROR;
3575 r2 = f == Z_FINISH ? Z_BUF_ERROR : Z_OK;
3576 r = Z_BUF_ERROR;
3577 while (1) switch (z->state->mode)
3578 {
3579 case METHOD:
3580 NEEDBYTE
3581 if (((z->state->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED)
3582 {
3583 z->state->mode = BAD;
3584 z->msg = "unknown compression method";
3585 z->state->sub.marker = 5; /* can't try inflateSync */
3586 break;
3587 }
3588 if ((z->state->sub.method >> 4) + 8 > z->state->wbits)
3589 {
3590 z->state->mode = BAD;
3591 z->msg = "invalid window size";
3592 z->state->sub.marker = 5; /* can't try inflateSync */
3593 break;
3594 }
3595 z->state->mode = FLAG;
3596 case FLAG:
3597 NEEDBYTE
3598 b = NEXTBYTE;
3599 if (((z->state->sub.method << 8) + b) % 31)
3600 {
3601 z->state->mode = BAD;
3602 z->msg = "incorrect header check";
3603 z->state->sub.marker = 5; /* can't try inflateSync */
3604 break;
3605 }
3606 Tracev((stderr, "inflate: zlib header ok\n"));
3607 if (!(b & PRESET_DICT))
3608 {
3609 z->state->mode = BLOCKS;
3610 break;
3611 }
3612 z->state->mode = DICT4;
3613 case DICT4:
3614 NEEDBYTE
3615 z->state->sub.check.need = (uLong)NEXTBYTE << 24;
3616 z->state->mode = DICT3;
3617 case DICT3:
3618 NEEDBYTE
3619 z->state->sub.check.need += (uLong)NEXTBYTE << 16;
3620 z->state->mode = DICT2;
3621 case DICT2:
3622 NEEDBYTE
3623 z->state->sub.check.need += (uLong)NEXTBYTE << 8;
3624 z->state->mode = DICT1;
3625 case DICT1:
3626 NEEDBYTE
3627 z->state->sub.check.need += (uLong)NEXTBYTE;
3628 z->adler = z->state->sub.check.need;
3629 z->state->mode = DICT0;
3630 return Z_NEED_DICT;
3631 case DICT0:
3632 z->state->mode = BAD;
3633 z->msg = "need dictionary";
3634 z->state->sub.marker = 0; /* can try inflateSync */
3635 return Z_STREAM_ERROR;
3636 case BLOCKS:
3637 r = inflate_blocks(z->state->blocks, z, r);
3638 if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0)
3639 r = inflate_packet_flush(z->state->blocks);
3640 if (r == Z_DATA_ERROR)
3641 {
3642 z->state->mode = BAD;
3643 z->state->sub.marker = 0; /* can try inflateSync */
3644 break;
3645 }
3646 if (r == Z_OK)
3647 r = r2;
3648 if (r != Z_STREAM_END)
3649 return r;
3650 r = r2;
3651 inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was);
3652 if (z->state->nowrap)
3653 {
3654 z->state->mode = DONE;
3655 break;
3656 }
3657 z->state->mode = CHECK4;
3658 case CHECK4:
3659 NEEDBYTE
3660 z->state->sub.check.need = (uLong)NEXTBYTE << 24;
3661 z->state->mode = CHECK3;
3662 case CHECK3:
3663 NEEDBYTE
3664 z->state->sub.check.need += (uLong)NEXTBYTE << 16;
3665 z->state->mode = CHECK2;
3666 case CHECK2:
3667 NEEDBYTE
3668 z->state->sub.check.need += (uLong)NEXTBYTE << 8;
3669 z->state->mode = CHECK1;
3670 case CHECK1:
3671 NEEDBYTE
3672 z->state->sub.check.need += (uLong)NEXTBYTE;
3673
3674 if (z->state->sub.check.was != z->state->sub.check.need)
3675 {
3676 z->state->mode = BAD;
3677 z->msg = "incorrect data check";
3678 z->state->sub.marker = 5; /* can't try inflateSync */
3679 break;
3680 }
3681 Tracev((stderr, "inflate: zlib check ok\n"));
3682 z->state->mode = DONE;
3683 case DONE:
3684 return Z_STREAM_END;
3685 case BAD:
3686 return Z_DATA_ERROR;
3687 default:
3688 return Z_STREAM_ERROR;
3689 }
3690 empty:
3691 if (f != Z_PACKET_FLUSH)
3692 return r;
3693 z->state->mode = BAD;
3694 z->msg = "need more for packet flush";
3695 z->state->sub.marker = 0;
3696 return Z_DATA_ERROR;
3697 }
3698
3699
3700 #if 0
3701 int ZEXPORT inflateSetDictionary(z, dictionary, dictLength)
3702 z_streamp z;
3703 const Bytef *dictionary;
3704 uInt dictLength;
3705 {
3706 uInt length = dictLength;
3707
3708 if (z == Z_NULL || z->state == Z_NULL || z->state->mode != DICT0)
3709 return Z_STREAM_ERROR;
3710
3711 if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR;
3712 z->adler = 1L;
3713
3714 if (length >= ((uInt)1<<z->state->wbits))
3715 {
3716 length = (1<<z->state->wbits)-1;
3717 dictionary += dictLength - length;
3718 }
3719 inflate_set_dictionary(z->state->blocks, dictionary, length);
3720 z->state->mode = BLOCKS;
3721 return Z_OK;
3722 }
3723 #endif
3724
3725 /*
3726 * This subroutine adds the data at next_in/avail_in to the output history
3727 * without performing any output. The output buffer must be "caught up";
3728 * i.e. no pending output (hence s->read equals s->write), and the state must
3729 * be BLOCKS (i.e. we should be willing to see the start of a series of
3730 * BLOCKS). On exit, the output will also be caught up, and the checksum
3731 * will have been updated if need be.
3732 */
3733
3734 int inflateIncomp(z)
3735 z_stream *z;
3736 {
3737 if (z->state->mode != BLOCKS)
3738 return Z_DATA_ERROR;
3739 return inflate_addhistory(z->state->blocks, z);
3740 }
3741
3742 #if 0
3743 int ZEXPORT inflateSync(z)
3744 z_streamp z;
3745 {
3746 uInt n; /* number of bytes to look at */
3747 Bytef *p; /* pointer to bytes */
3748 uInt m; /* number of marker bytes found in a row */
3749 uLong r, w; /* temporaries to save total_in and total_out */
3750
3751 /* set up */
3752 if (z == Z_NULL || z->state == Z_NULL)
3753 return Z_STREAM_ERROR;
3754 if (z->state->mode != BAD)
3755 {
3756 z->state->mode = BAD;
3757 z->state->sub.marker = 0;
3758 }
3759 if ((n = z->avail_in) == 0)
3760 return Z_BUF_ERROR;
3761 p = z->next_in;
3762 m = z->state->sub.marker;
3763
3764 /* search */
3765 while (n && m < 4)
3766 {
3767 static const Byte mark[4] = {0, 0, 0xff, 0xff};
3768 if (*p == mark[m])
3769 m++;
3770 else if (*p)
3771 m = 0;
3772 else
3773 m = 4 - m;
3774 p++, n--;
3775 }
3776
3777 /* restore */
3778 z->total_in += p - z->next_in;
3779 z->next_in = p;
3780 z->avail_in = n;
3781 z->state->sub.marker = m;
3782
3783 /* return no joy or set up to restart on a new block */
3784 if (m != 4)
3785 return Z_DATA_ERROR;
3786 r = z->total_in; w = z->total_out;
3787 inflateReset(z);
3788 z->total_in = r; z->total_out = w;
3789 z->state->mode = BLOCKS;
3790 return Z_OK;
3791 }
3792 #endif
3793
3794
3795 /* Returns true if inflate is currently at the end of a block generated
3796 * by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP
3797 * implementation to provide an additional safety check. PPP uses Z_SYNC_FLUSH
3798 * but removes the length bytes of the resulting empty stored block. When
3799 * decompressing, PPP checks that at the end of input packet, inflate is
3800 * waiting for these length bytes.
3801 */
3802 #if 0
3803 int ZEXPORT inflateSyncPoint(z)
3804 z_streamp z;
3805 {
3806 if (z == Z_NULL || z->state == Z_NULL || z->state->blocks == Z_NULL)
3807 return Z_STREAM_ERROR;
3808 return inflate_blocks_sync_point(z->state->blocks);
3809 }
3810 #endif
3811 #undef NEEDBYTE
3812 #undef NEXTBYTE
3813 /* --- inflate.c */
3814
3815 /* +++ infblock.c */
3816
3817 /* infblock.c -- interpret and process block types to last block
3818 * Copyright (C) 1995-2002 Mark Adler
3819 * For conditions of distribution and use, see copyright notice in zlib.h
3820 */
3821
3822 /* #include "zutil.h" */
3823 /* #include "infblock.h" */
3824
3825 /* +++ inftrees.h */
3826
3827 /* inftrees.h -- header to use inftrees.c
3828 * Copyright (C) 1995-2002 Mark Adler
3829 * For conditions of distribution and use, see copyright notice in zlib.h
3830 */
3831
3832 /* WARNING: this file should *not* be used by applications. It is
3833 part of the implementation of the compression library and is
3834 subject to change. Applications should only use zlib.h.
3835 */
3836
3837 /* Huffman code lookup table entry--this entry is four bytes for machines
3838 that have 16-bit pointers (e.g. PC's in the small or medium model). */
3839
3840 typedef struct inflate_huft_s FAR inflate_huft;
3841
3842 struct inflate_huft_s {
3843 union {
3844 struct {
3845 Byte Exop; /* number of extra bits or operation */
3846 Byte Bits; /* number of bits in this code or subcode */
3847 } what;
3848 uInt pad; /* pad structure to a power of 2 (4 bytes for */
3849 } word; /* 16-bit, 8 bytes for 32-bit int's) */
3850 uInt base; /* literal, length base, distance base,
3851 or table offset */
3852 };
3853
3854 /* Maximum size of dynamic tree. The maximum found in a long but non-
3855 exhaustive search was 1004 huft structures (850 for length/literals
3856 and 154 for distances, the latter actually the result of an
3857 exhaustive search). The actual maximum is not known, but the
3858 value below is more than safe. */
3859 #define MANY 1440
3860
3861 extern int inflate_trees_bits __P((
3862 uIntf *, /* 19 code lengths */
3863 uIntf *, /* bits tree desired/actual depth */
3864 inflate_huft * FAR *, /* bits tree result */
3865 inflate_huft *, /* space for trees */
3866 z_streamp)); /* for messages */
3867
3868 extern int inflate_trees_dynamic __P((
3869 uInt, /* number of literal/length codes */
3870 uInt, /* number of distance codes */
3871 uIntf *, /* that many (total) code lengths */
3872 uIntf *, /* literal desired/actual bit depth */
3873 uIntf *, /* distance desired/actual bit depth */
3874 inflate_huft * FAR *, /* literal/length tree result */
3875 inflate_huft * FAR *, /* distance tree result */
3876 inflate_huft *, /* space for trees */
3877 z_streamp)); /* for messages */
3878
3879 extern int inflate_trees_fixed __P((
3880 uIntf *, /* literal desired/actual bit depth */
3881 uIntf *, /* distance desired/actual bit depth */
3882 inflate_huft * FAR *, /* literal/length tree result */
3883 inflate_huft * FAR *, /* distance tree result */
3884 z_streamp)); /* for memory allocation */
3885 /* --- inftrees.h */
3886
3887 /* +++ infcodes.h */
3888
3889 /* infcodes.h -- header to use infcodes.c
3890 * Copyright (C) 1995-2002 Mark Adler
3891 * For conditions of distribution and use, see copyright notice in zlib.h
3892 */
3893
3894 /* WARNING: this file should *not* be used by applications. It is
3895 part of the implementation of the compression library and is
3896 subject to change. Applications should only use zlib.h.
3897 */
3898
3899 struct inflate_codes_state;
3900 typedef struct inflate_codes_state FAR inflate_codes_statef;
3901
3902 extern inflate_codes_statef *inflate_codes_new __P((
3903 uInt, uInt,
3904 inflate_huft *, inflate_huft *,
3905 z_streamp ));
3906
3907 extern int inflate_codes __P((
3908 inflate_blocks_statef *,
3909 z_streamp ,
3910 int));
3911
3912 extern void inflate_codes_free __P((
3913 inflate_codes_statef *,
3914 z_streamp ));
3915
3916 /* --- infcodes.h */
3917
3918 /* +++ infutil.h */
3919
3920 /* infutil.h -- types and macros common to blocks and codes
3921 * Copyright (C) 1995-2002 Mark Adler
3922 * For conditions of distribution and use, see copyright notice in zlib.h
3923 */
3924
3925 /* WARNING: this file should *not* be used by applications. It is
3926 part of the implementation of the compression library and is
3927 subject to change. Applications should only use zlib.h.
3928 */
3929
3930 #ifndef _INFUTIL_H
3931 #define _INFUTIL_H
3932
3933 typedef enum {
3934 TYPE, /* get type bits (3, including end bit) */
3935 LENS, /* get lengths for stored */
3936 STORED, /* processing stored block */
3937 TABLE, /* get table lengths */
3938 BTREE, /* get bit lengths tree for a dynamic block */
3939 DTREE, /* get length, distance trees for a dynamic block */
3940 CODES, /* processing fixed or dynamic block */
3941 DRY, /* output remaining window bytes */
3942 DONEB, /* finished last block, done */
3943 BADB} /* got a data error--stuck here */
3944 inflate_block_mode;
3945
3946 /* inflate blocks semi-private state */
3947 struct inflate_blocks_state {
3948
3949 /* mode */
3950 inflate_block_mode mode; /* current inflate_block mode */
3951
3952 /* mode dependent information */
3953 union {
3954 uInt left; /* if STORED, bytes left to copy */
3955 struct {
3956 uInt table; /* table lengths (14 bits) */
3957 uInt index; /* index into blens (or border) */
3958 uIntf *blens; /* bit lengths of codes */
3959 uInt bb; /* bit length tree depth */
3960 inflate_huft *tb; /* bit length decoding tree */
3961 } trees; /* if DTREE, decoding info for trees */
3962 struct {
3963 inflate_codes_statef
3964 *codes;
3965 } decode; /* if CODES, current state */
3966 } sub; /* submode */
3967 uInt last; /* true if this block is the last block */
3968
3969 /* mode independent information */
3970 uInt bitk; /* bits in bit buffer */
3971 uLong bitb; /* bit buffer */
3972 inflate_huft *hufts; /* single malloc for tree space */
3973 Bytef *window; /* sliding window */
3974 Bytef *end; /* one byte after sliding window */
3975 Bytef *read; /* window read pointer */
3976 Bytef *write; /* window write pointer */
3977 check_func checkfn; /* check function */
3978 uLong check; /* check on output */
3979
3980 };
3981
3982
3983 /* defines for inflate input/output */
3984 /* update pointers and return */
3985 #define UPDBITS {s->bitb=b;s->bitk=k;}
3986 #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
3987 #define UPDOUT {s->write=q;}
3988 #define UPDATE {UPDBITS UPDIN UPDOUT}
3989 #define LEAVE {UPDATE return inflate_flush(s,z,r);}
3990 /* get bytes and bits */
3991 #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
3992 #define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
3993 #define NEXTBYTE (n--,*p++)
3994 #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
3995 #define DUMPBITS(j) {b>>=(j);k-=(j);}
3996 /* output bytes */
3997 #define WAVAIL (uInt)(q<s->read?s->read-q-1:s->end-q)
3998 #define LOADOUT {q=s->write;m=(uInt)WAVAIL;}
3999 #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}}
4000 #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
4001 #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;}
4002 #define OUTBYTE(a) {*q++=(Byte)(a);m--;}
4003 /* load local pointers */
4004 #define LOAD {LOADIN LOADOUT}
4005
4006 /* masks for lower bits (size given to avoid silly warnings with Visual C++) */
4007 extern uInt inflate_mask[17];
4008
4009 /* copy as much as possible from the sliding window to the output area */
4010 extern int inflate_flush __P((
4011 inflate_blocks_statef *,
4012 z_streamp ,
4013 int));
4014
4015 #ifndef NO_DUMMY_DECL
4016 struct internal_state {int dummy;}; /* for buggy compilers */
4017 #endif
4018
4019 #endif
4020 /* --- infutil.h */
4021
4022 #ifndef NO_DUMMY_DECL
4023 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
4024 #endif
4025
4026 /* simplify the use of the inflate_huft type with some defines */
4027 #define exop word.what.Exop
4028 #define bits word.what.Bits
4029
4030 /* Table for deflate from PKZIP's appnote.txt. */
4031 local const uInt border[] = { /* Order of the bit length code lengths */
4032 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
4033
4034 /*
4035 Notes beyond the 1.93a appnote.txt:
4036
4037 1. Distance pointers never point before the beginning of the output
4038 stream.
4039 2. Distance pointers can point back across blocks, up to 32k away.
4040 3. There is an implied maximum of 7 bits for the bit length table and
4041 15 bits for the actual data.
4042 4. If only one code exists, then it is encoded using one bit. (Zero
4043 would be more efficient, but perhaps a little confusing.) If two
4044 codes exist, they are coded using one bit each (0 and 1).
4045 5. There is no way of sending zero distance codes--a dummy must be
4046 sent if there are none. (History: a pre 2.0 version of PKZIP would
4047 store blocks with no distance codes, but this was discovered to be
4048 too harsh a criterion.) Valid only for 1.93a. 2.04c does allow
4049 zero distance codes, which is sent as one code of zero bits in
4050 length.
4051 6. There are up to 286 literal/length codes. Code 256 represents the
4052 end-of-block. Note however that the static length tree defines
4053 288 codes just to fill out the Huffman codes. Codes 286 and 287
4054 cannot be used though, since there is no length base or extra bits
4055 defined for them. Similarily, there are up to 30 distance codes.
4056 However, static trees define 32 codes (all 5 bits) to fill out the
4057 Huffman codes, but the last two had better not show up in the data.
4058 7. Unzip can check dynamic Huffman blocks for complete code sets.
4059 The exception is that a single code would not be complete (see #4).
4060 8. The five bits following the block type is really the number of
4061 literal codes sent minus 257.
4062 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
4063 (1+6+6). Therefore, to output three times the length, you output
4064 three codes (1+1+1), whereas to output four times the same length,
4065 you only need two codes (1+3). Hmm.
4066 10. In the tree reconstruction algorithm, Code = Code + Increment
4067 only if BitLength(i) is not zero. (Pretty obvious.)
4068 11. Correction: 4 Bits: # of Bit Length codes - 4 (4 - 19)
4069 12. Note: length code 284 can represent 227-258, but length code 285
4070 really is 258. The last length deserves its own, short code
4071 since it gets used a lot in very redundant files. The length
4072 258 is special since 258 - 3 (the min match length) is 255.
4073 13. The literal/length and distance code bit lengths are read as a
4074 single stream of lengths. It is possible (and advantageous) for
4075 a repeat code (16, 17, or 18) to go across the boundary between
4076 the two sets of lengths.
4077 */
4078
4079
4080 void inflate_blocks_reset(s, z, c)
4081 inflate_blocks_statef *s;
4082 z_streamp z;
4083 uLongf *c;
4084 {
4085 if (c != Z_NULL)
4086 *c = s->check;
4087 if (s->mode == BTREE || s->mode == DTREE)
4088 ZFREE(z, s->sub.trees.blens);
4089 if (s->mode == CODES)
4090 inflate_codes_free(s->sub.decode.codes, z);
4091 s->mode = TYPE;
4092 s->bitk = 0;
4093 s->bitb = 0;
4094 s->read = s->write = s->window;
4095 if (s->checkfn != Z_NULL)
4096 z->adler = s->check = (*s->checkfn)(0L, (const Bytef *)Z_NULL, 0);
4097 Tracev((stderr, "inflate: blocks reset\n"));
4098 }
4099
4100
4101 inflate_blocks_statef *inflate_blocks_new(z, c, w)
4102 z_streamp z;
4103 check_func c;
4104 uInt w;
4105 {
4106 inflate_blocks_statef *s;
4107
4108 if ((s = (inflate_blocks_statef *)ZALLOC
4109 (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
4110 return s;
4111 if ((s->hufts =
4112 (inflate_huft *)ZALLOC(z, sizeof(inflate_huft), MANY)) == Z_NULL)
4113 {
4114 ZFREE(z, s);
4115 return Z_NULL;
4116 }
4117 if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)
4118 {
4119 ZFREE(z, s->hufts);
4120 ZFREE(z, s);
4121 return Z_NULL;
4122 }
4123 s->end = s->window + w;
4124 s->checkfn = c;
4125 s->mode = TYPE;
4126 Tracev((stderr, "inflate: blocks allocated\n"));
4127 inflate_blocks_reset(s, z, Z_NULL);
4128 return s;
4129 }
4130
4131
4132 int inflate_blocks(s, z, r)
4133 inflate_blocks_statef *s;
4134 z_streamp z;
4135 int r;
4136 {
4137 uInt t; /* temporary storage */
4138 uLong b; /* bit buffer */
4139 uInt k; /* bits in bit buffer */
4140 Bytef *p; /* input data pointer */
4141 uInt n; /* bytes available there */
4142 Bytef *q; /* output window write pointer */
4143 uInt m; /* bytes to end of window or read pointer */
4144
4145 /* copy input/output information to locals (UPDATE macro restores) */
4146 LOAD
4147
4148 /* process input based on current state */
4149 while (1) switch (s->mode)
4150 {
4151 case TYPE:
4152 NEEDBITS(3)
4153 t = (uInt)b & 7;
4154 s->last = t & 1;
4155 switch (t >> 1)
4156 {
4157 case 0: /* stored */
4158 Tracev((stderr, "inflate: stored block%s\n",
4159 s->last ? " (last)" : ""));
4160 DUMPBITS(3)
4161 t = k & 7; /* go to byte boundary */
4162 DUMPBITS(t)
4163 s->mode = LENS; /* get length of stored block */
4164 break;
4165 case 1: /* fixed */
4166 Tracev((stderr, "inflate: fixed codes block%s\n",
4167 s->last ? " (last)" : ""));
4168 {
4169 uInt bl, bd;
4170 inflate_huft *tl, *td;
4171
4172 inflate_trees_fixed(&bl, &bd, &tl, &td, z);
4173 s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
4174 if (s->sub.decode.codes == Z_NULL)
4175 {
4176 r = Z_MEM_ERROR;
4177 LEAVE
4178 }
4179 }
4180 DUMPBITS(3)
4181 s->mode = CODES;
4182 break;
4183 case 2: /* dynamic */
4184 Tracev((stderr, "inflate: dynamic codes block%s\n",
4185 s->last ? " (last)" : ""));
4186 DUMPBITS(3)
4187 s->mode = TABLE;
4188 break;
4189 case 3: /* illegal */
4190 DUMPBITS(3)
4191 s->mode = BADB;
4192 z->msg = "invalid block type";
4193 r = Z_DATA_ERROR;
4194 LEAVE
4195 }
4196 break;
4197 case LENS:
4198 NEEDBITS(32)
4199 if ((((~b) >> 16) & 0xffff) != (b & 0xffff))
4200 {
4201 s->mode = BADB;
4202 z->msg = "invalid stored block lengths";
4203 r = Z_DATA_ERROR;
4204 LEAVE
4205 }
4206 s->sub.left = (uInt)b & 0xffff;
4207 b = k = 0; /* dump bits */
4208 Tracev((stderr, "inflate: stored length %u\n", s->sub.left));
4209 s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE);
4210 break;
4211 case STORED:
4212 if (n == 0)
4213 LEAVE
4214 NEEDOUT
4215 t = s->sub.left;
4216 if (t > n) t = n;
4217 if (t > m) t = m;
4218 zmemcpy(q, p, t);
4219 p += t; n -= t;
4220 q += t; m -= t;
4221 if ((s->sub.left -= t) != 0)
4222 break;
4223 Tracev((stderr, "inflate: stored end, %lu total out\n",
4224 z->total_out + (q >= s->read ? q - s->read :
4225 (s->end - s->read) + (q - s->window))));
4226 s->mode = s->last ? DRY : TYPE;
4227 break;
4228 case TABLE:
4229 NEEDBITS(14)
4230 s->sub.trees.table = t = (uInt)b & 0x3fff;
4231 #ifndef PKZIP_BUG_WORKAROUND
4232 if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
4233 {
4234 s->mode = BADB;
4235 z->msg = "too many length or distance symbols";
4236 r = Z_DATA_ERROR;
4237 LEAVE
4238 }
4239 #endif
4240 t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
4241 if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
4242 {
4243 r = Z_MEM_ERROR;
4244 LEAVE
4245 }
4246 DUMPBITS(14)
4247 s->sub.trees.index = 0;
4248 Tracev((stderr, "inflate: table sizes ok\n"));
4249 s->mode = BTREE;
4250 case BTREE:
4251 while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
4252 {
4253 NEEDBITS(3)
4254 s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
4255 DUMPBITS(3)
4256 }
4257 while (s->sub.trees.index < 19)
4258 s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
4259 s->sub.trees.bb = 7;
4260 t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
4261 &s->sub.trees.tb, s->hufts, z);
4262 if (t != Z_OK)
4263 {
4264 r = t;
4265 if (r == Z_DATA_ERROR)
4266 {
4267 ZFREE(z, s->sub.trees.blens);
4268 s->mode = BADB;
4269 }
4270 LEAVE
4271 }
4272 s->sub.trees.index = 0;
4273 Tracev((stderr, "inflate: bits tree ok\n"));
4274 s->mode = DTREE;
4275 case DTREE:
4276 while (t = s->sub.trees.table,
4277 s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
4278 {
4279 inflate_huft *h;
4280 uInt i, j, c;
4281
4282 t = s->sub.trees.bb;
4283 NEEDBITS(t)
4284 h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
4285 t = h->bits;
4286 c = h->base;
4287 if (c < 16)
4288 {
4289 DUMPBITS(t)
4290 s->sub.trees.blens[s->sub.trees.index++] = c;
4291 }
4292 else /* c == 16..18 */
4293 {
4294 i = c == 18 ? 7 : c - 14;
4295 j = c == 18 ? 11 : 3;
4296 NEEDBITS(t + i)
4297 DUMPBITS(t)
4298 j += (uInt)b & inflate_mask[i];
4299 DUMPBITS(i)
4300 i = s->sub.trees.index;
4301 t = s->sub.trees.table;
4302 if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
4303 (c == 16 && i < 1))
4304 {
4305 ZFREE(z, s->sub.trees.blens);
4306 s->mode = BADB;
4307 z->msg = "invalid bit length repeat";
4308 r = Z_DATA_ERROR;
4309 LEAVE
4310 }
4311 c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
4312 do {
4313 s->sub.trees.blens[i++] = c;
4314 } while (--j);
4315 s->sub.trees.index = i;
4316 }
4317 }
4318 s->sub.trees.tb = Z_NULL;
4319 {
4320 uInt bl, bd;
4321 inflate_huft *tl, *td;
4322 inflate_codes_statef *c;
4323
4324 bl = 9; /* must be <= 9 for lookahead assumptions */
4325 bd = 6; /* must be <= 9 for lookahead assumptions */
4326 t = s->sub.trees.table;
4327 t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
4328 s->sub.trees.blens, &bl, &bd, &tl, &td,
4329 s->hufts, z);
4330 if (t != Z_OK)
4331 {
4332 if (t == (uInt)Z_DATA_ERROR)
4333 {
4334 ZFREE(z, s->sub.trees.blens);
4335 s->mode = BADB;
4336 }
4337 r = t;
4338 LEAVE
4339 }
4340 Tracev((stderr, "inflate: trees ok\n"));
4341 if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
4342 {
4343 r = Z_MEM_ERROR;
4344 LEAVE
4345 }
4346 s->sub.decode.codes = c;
4347 }
4348 ZFREE(z, s->sub.trees.blens);
4349 s->mode = CODES;
4350 case CODES:
4351 UPDATE
4352 if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
4353 return inflate_flush(s, z, r);
4354 r = Z_OK;
4355 inflate_codes_free(s->sub.decode.codes, z);
4356 LOAD
4357 Tracev((stderr, "inflate: codes end, %lu total out\n",
4358 z->total_out + (q >= s->read ? q - s->read :
4359 (s->end - s->read) + (q - s->window))));
4360 if (!s->last)
4361 {
4362 s->mode = TYPE;
4363 break;
4364 }
4365 s->mode = DRY;
4366 case DRY:
4367 FLUSH
4368 if (s->read != s->write)
4369 LEAVE
4370 s->mode = DONEB;
4371 case DONEB:
4372 r = Z_STREAM_END;
4373 LEAVE
4374 case BADB:
4375 r = Z_DATA_ERROR;
4376 LEAVE
4377 default:
4378 r = Z_STREAM_ERROR;
4379 LEAVE
4380 }
4381 }
4382
4383
4384 int inflate_blocks_free(s, z)
4385 inflate_blocks_statef *s;
4386 z_streamp z;
4387 {
4388 inflate_blocks_reset(s, z, Z_NULL);
4389 ZFREE(z, s->window);
4390 ZFREE(z, s->hufts);
4391 ZFREE(z, s);
4392 Tracev((stderr, "inflate: blocks freed\n"));
4393 return Z_OK;
4394 }
4395
4396
4397 #if 0
4398 void inflate_set_dictionary(s, d, n)
4399 inflate_blocks_statef *s;
4400 const Bytef *d;
4401 uInt n;
4402 {
4403 zmemcpy(s->window, d, n);
4404 s->read = s->write = s->window + n;
4405 }
4406 #endif
4407
4408 /*
4409 * This subroutine adds the data at next_in/avail_in to the output history
4410 * without performing any output. The output buffer must be "caught up";
4411 * i.e. no pending output (hence s->read equals s->write), and the state must
4412 * be BLOCKS (i.e. we should be willing to see the start of a series of
4413 * BLOCKS). On exit, the output will also be caught up, and the checksum
4414 * will have been updated if need be.
4415 */
4416 int inflate_addhistory(s, z)
4417 inflate_blocks_statef *s;
4418 z_stream *z;
4419 {
4420 uLong b; /* bit buffer */ /* NOT USED HERE */
4421 uInt k; /* bits in bit buffer */ /* NOT USED HERE */
4422 uInt t; /* temporary storage */
4423 Bytef *p; /* input data pointer */
4424 uInt n; /* bytes available there */
4425 Bytef *q; /* output window write pointer */
4426 uInt m; /* bytes to end of window or read pointer */
4427
4428 if (s->read != s->write)
4429 return Z_STREAM_ERROR;
4430 if (s->mode != TYPE)
4431 return Z_DATA_ERROR;
4432
4433 /* we're ready to rock */
4434 LOAD
4435 /* while there is input ready, copy to output buffer, moving
4436 * pointers as needed.
4437 */
4438 while (n) {
4439 t = n; /* how many to do */
4440 /* is there room until end of buffer? */
4441 if (t > m) t = m;
4442 /* update check information */
4443 if (s->checkfn != Z_NULL)
4444 s->check = (*s->checkfn)(s->check, q, t);
4445 zmemcpy(q, p, t);
4446 q += t;
4447 p += t;
4448 n -= t;
4449 z->total_out += t;
4450 s->read = q; /* drag read pointer forward */
4451 /* WWRAP */ /* expand WWRAP macro by hand to handle s->read */
4452 if (q == s->end) {
4453 s->read = q = s->window;
4454 m = WAVAIL;
4455 }
4456 }
4457 UPDATE
4458 return Z_OK;
4459 }
4460
4461
4462 /*
4463 * At the end of a Deflate-compressed PPP packet, we expect to have seen
4464 * a `stored' block type value but not the (zero) length bytes.
4465 */
4466 int inflate_packet_flush(s)
4467 inflate_blocks_statef *s;
4468 {
4469 if (s->mode != LENS)
4470 return Z_DATA_ERROR;
4471 s->mode = TYPE;
4472 return Z_OK;
4473 }
4474
4475 /* Returns true if inflate is currently at the end of a block generated
4476 * by Z_SYNC_FLUSH or Z_FULL_FLUSH.
4477 * IN assertion: s != Z_NULL
4478 */
4479 #if 0
4480 int inflate_blocks_sync_point(s)
4481 inflate_blocks_statef *s;
4482 {
4483 return s->mode == LENS;
4484 }
4485 #endif
4486 /* --- infblock.c */
4487
4488
4489 /* +++ inftrees.c */
4490
4491 /* inftrees.c -- generate Huffman trees for efficient decoding
4492 * Copyright (C) 1995-2002 Mark Adler
4493 * For conditions of distribution and use, see copyright notice in zlib.h
4494 */
4495
4496 /* #include "zutil.h" */
4497 /* #include "inftrees.h" */
4498
4499 #if !defined(BUILDFIXED) && !defined(STDC)
4500 # define BUILDFIXED /* non ANSI compilers may not accept inffixed.h */
4501 #endif
4502
4503 const char inflate_copyright[] =
4504 " inflate 1.1.4 Copyright 1995-2002 Mark Adler ";
4505 /*
4506 If you use the zlib library in a product, an acknowledgment is welcome
4507 in the documentation of your product. If for some reason you cannot
4508 include such an acknowledgment, I would appreciate that you keep this
4509 copyright string in the executable of your product.
4510 */
4511
4512 #ifndef NO_DUMMY_DECL
4513 struct internal_state {int dummy;}; /* for buggy compilers */
4514 #endif
4515
4516 /* simplify the use of the inflate_huft type with some defines */
4517 #define exop word.what.Exop
4518 #define bits word.what.Bits
4519
4520
4521 local int huft_build __P((
4522 uIntf *, /* code lengths in bits */
4523 uInt, /* number of codes */
4524 uInt, /* number of "simple" codes */
4525 const uIntf *, /* list of base values for non-simple codes */
4526 const uIntf *, /* list of extra bits for non-simple codes */
4527 inflate_huft * FAR*,/* result: starting table */
4528 uIntf *, /* maximum lookup bits (returns actual) */
4529 inflate_huft *, /* space for trees */
4530 uInt *, /* hufts used in space */
4531 uIntf * )); /* space for values */
4532
4533 /* Tables for deflate from PKZIP's appnote.txt. */
4534 local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */
4535 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
4536 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
4537 /* see note #13 above about 258 */
4538 local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */
4539 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
4540 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */
4541 local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */
4542 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
4543 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
4544 8193, 12289, 16385, 24577};
4545 local const uInt cpdext[30] = { /* Extra bits for distance codes */
4546 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
4547 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
4548 12, 12, 13, 13};
4549
4550 /*
4551 Huffman code decoding is performed using a multi-level table lookup.
4552 The fastest way to decode is to simply build a lookup table whose
4553 size is determined by the longest code. However, the time it takes
4554 to build this table can also be a factor if the data being decoded
4555 is not very long. The most common codes are necessarily the
4556 shortest codes, so those codes dominate the decoding time, and hence
4557 the speed. The idea is you can have a shorter table that decodes the
4558 shorter, more probable codes, and then point to subsidiary tables for
4559 the longer codes. The time it costs to decode the longer codes is
4560 then traded against the time it takes to make longer tables.
4561
4562 This results of this trade are in the variables lbits and dbits
4563 below. lbits is the number of bits the first level table for literal/
4564 length codes can decode in one step, and dbits is the same thing for
4565 the distance codes. Subsequent tables are also less than or equal to
4566 those sizes. These values may be adjusted either when all of the
4567 codes are shorter than that, in which case the longest code length in
4568 bits is used, or when the shortest code is *longer* than the requested
4569 table size, in which case the length of the shortest code in bits is
4570 used.
4571
4572 There are two different values for the two tables, since they code a
4573 different number of possibilities each. The literal/length table
4574 codes 286 possible values, or in a flat code, a little over eight
4575 bits. The distance table codes 30 possible values, or a little less
4576 than five bits, flat. The optimum values for speed end up being
4577 about one bit more than those, so lbits is 8+1 and dbits is 5+1.
4578 The optimum values may differ though from machine to machine, and
4579 possibly even between compilers. Your mileage may vary.
4580 */
4581
4582
4583 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
4584 #define BMAX 15 /* maximum bit length of any code */
4585
4586 local int huft_build(b, n, s, d, e, t, m, hp, hn, v)
4587 uIntf *b; /* code lengths in bits (all assumed <= BMAX) */
4588 uInt n; /* number of codes (assumed <= 288) */
4589 uInt s; /* number of simple-valued codes (0..s-1) */
4590 const uIntf *d; /* list of base values for non-simple codes */
4591 const uIntf *e; /* list of extra bits for non-simple codes */
4592 inflate_huft * FAR *t; /* result: starting table */
4593 uIntf *m; /* maximum lookup bits, returns actual */
4594 inflate_huft *hp; /* space for trees */
4595 uInt *hn; /* hufts used in space */
4596 uIntf *v; /* working area: values in order of bit length */
4597 /* Given a list of code lengths and a maximum table size, make a set of
4598 tables to decode that set of codes. Return Z_OK on success, Z_BUF_ERROR
4599 if the given code set is incomplete (the tables are still built in this
4600 case), or Z_DATA_ERROR if the input is invalid. */
4601 {
4602
4603 uInt a; /* counter for codes of length k */
4604 uInt c[BMAX+1]; /* bit length count table */
4605 uInt f; /* i repeats in table every f entries */
4606 int g; /* maximum code length */
4607 int h; /* table level */
4608 uInt i; /* counter, current code */
4609 uInt j; /* counter */
4610 int k; /* number of bits in current code */
4611 int l; /* bits per table (returned in m) */
4612 uInt mask; /* (1 << w) - 1, to avoid cc -O bug on HP */
4613 uIntf *p; /* pointer into c[], b[], or v[] */
4614 inflate_huft *q; /* points to current table */
4615 struct inflate_huft_s r; /* table entry for structure assignment */
4616 inflate_huft *u[BMAX]; /* table stack */
4617 int w; /* bits before this table == (l * h) */
4618 uInt x[BMAX+1]; /* bit offsets, then code stack */
4619 uIntf *xp; /* pointer into x */
4620 int y; /* number of dummy codes added */
4621 uInt z; /* number of entries in current table */
4622
4623
4624 /* Generate counts for each bit length */
4625 p = c;
4626 #define C0 *p++ = 0;
4627 #define C2 C0 C0 C0 C0
4628 #define C4 C2 C2 C2 C2
4629 C4 /* clear c[]--assume BMAX+1 is 16 */
4630 p = b; i = n;
4631 do {
4632 c[*p++]++; /* assume all entries <= BMAX */
4633 } while (--i);
4634 if (c[0] == n) /* null input--all zero length codes */
4635 {
4636 *t = (inflate_huft *)Z_NULL;
4637 *m = 0;
4638 return Z_OK;
4639 }
4640
4641
4642 /* Find minimum and maximum length, bound *m by those */
4643 l = *m;
4644 for (j = 1; j <= BMAX; j++)
4645 if (c[j])
4646 break;
4647 k = j; /* minimum code length */
4648 if ((uInt)l < j)
4649 l = j;
4650 for (i = BMAX; i; i--)
4651 if (c[i])
4652 break;
4653 g = i; /* maximum code length */
4654 if ((uInt)l > i)
4655 l = i;
4656 *m = l;
4657
4658
4659 /* Adjust last length count to fill out codes, if needed */
4660 for (y = 1 << j; j < i; j++, y <<= 1)
4661 if ((y -= c[j]) < 0)
4662 return Z_DATA_ERROR;
4663 if ((y -= c[i]) < 0)
4664 return Z_DATA_ERROR;
4665 c[i] += y;
4666
4667
4668 /* Generate starting offsets into the value table for each length */
4669 x[1] = j = 0;
4670 p = c + 1; xp = x + 2;
4671 while (--i) { /* note that i == g from above */
4672 *xp++ = (j += *p++);
4673 }
4674
4675
4676 /* Make a table of values in order of bit lengths */
4677 p = b; i = 0;
4678 do {
4679 if ((j = *p++) != 0)
4680 v[x[j]++] = i;
4681 } while (++i < n);
4682 n = x[g]; /* set n to length of v */
4683
4684
4685 /* Generate the Huffman codes and for each, make the table entries */
4686 x[0] = i = 0; /* first Huffman code is zero */
4687 p = v; /* grab values in bit order */
4688 h = -1; /* no tables yet--level -1 */
4689 w = -l; /* bits decoded == (l * h) */
4690 u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */
4691 q = (inflate_huft *)Z_NULL; /* ditto */
4692 z = 0; /* ditto */
4693
4694 /* go through the bit lengths (k already is bits in shortest code) */
4695 for (; k <= g; k++)
4696 {
4697 a = c[k];
4698 while (a--)
4699 {
4700 /* here i is the Huffman code of length k bits for value *p */
4701 /* make tables up to required level */
4702 while (k > w + l)
4703 {
4704 h++;
4705 w += l; /* previous table always l bits */
4706
4707 /* compute minimum size table less than or equal to l bits */
4708 z = g - w;
4709 z = z > (uInt)l ? l : z; /* table size upper limit */
4710 if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */
4711 { /* too few codes for k-w bit table */
4712 f -= a + 1; /* deduct codes from patterns left */
4713 xp = c + k;
4714 if (j < z)
4715 while (++j < z) /* try smaller tables up to z bits */
4716 {
4717 if ((f <<= 1) <= *++xp)
4718 break; /* enough codes to use up j bits */
4719 f -= *xp; /* else deduct codes from patterns */
4720 }
4721 }
4722 z = 1 << j; /* table entries for j-bit table */
4723
4724 /* allocate new table */
4725 if (*hn + z > MANY) /* (note: doesn't matter for fixed) */
4726 return Z_DATA_ERROR; /* overflow of MANY */
4727 u[h] = q = hp + *hn;
4728 *hn += z;
4729
4730 /* connect to last table, if there is one */
4731 if (h)
4732 {
4733 x[h] = i; /* save pattern for backing up */
4734 r.bits = (Byte)l; /* bits to dump before this table */
4735 r.exop = (Byte)j; /* bits in this table */
4736 j = i >> (w - l);
4737 r.base = (uInt)(q - u[h-1] - j); /* offset to this table */
4738 u[h-1][j] = r; /* connect to last table */
4739 }
4740 else
4741 *t = q; /* first table is returned result */
4742 }
4743
4744 /* set up table entry in r */
4745 r.bits = (Byte)(k - w);
4746 if (p >= v + n)
4747 r.exop = 128 + 64; /* out of values--invalid code */
4748 else if (*p < s)
4749 {
4750 r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); /* 256 is end-of-block */
4751 r.base = *p++; /* simple code is just the value */
4752 }
4753 else
4754 {
4755 r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */
4756 r.base = d[*p++ - s];
4757 }
4758
4759 /* fill code-like entries with r */
4760 f = 1 << (k - w);
4761 for (j = i >> w; j < z; j += f)
4762 q[j] = r;
4763
4764 /* backwards increment the k-bit code i */
4765 for (j = 1 << (k - 1); i & j; j >>= 1)
4766 i ^= j;
4767 i ^= j;
4768
4769 /* backup over finished tables */
4770 mask = (1 << w) - 1; /* needed on HP, cc -O bug */
4771 while ((i & mask) != x[h])
4772 {
4773 h--; /* don't need to update q */
4774 w -= l;
4775 mask = (1 << w) - 1;
4776 }
4777 }
4778 }
4779
4780
4781 /* Return Z_BUF_ERROR if we were given an incomplete table */
4782 return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
4783 }
4784
4785
4786 int inflate_trees_bits(c, bb, tb, hp, z)
4787 uIntf *c; /* 19 code lengths */
4788 uIntf *bb; /* bits tree desired/actual depth */
4789 inflate_huft * FAR *tb; /* bits tree result */
4790 inflate_huft *hp; /* space for trees */
4791 z_streamp z; /* for messages */
4792 {
4793 int r;
4794 uInt hn = 0; /* hufts used in space */
4795 uIntf *v; /* work area for huft_build */
4796
4797 if ((v = (uIntf*)ZALLOC(z, 19, sizeof(uInt))) == Z_NULL)
4798 return Z_MEM_ERROR;
4799 r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL,
4800 tb, bb, hp, &hn, v);
4801 if (r == Z_DATA_ERROR)
4802 z->msg = "oversubscribed dynamic bit lengths tree";
4803 else if (r == Z_BUF_ERROR || *bb == 0)
4804 {
4805 z->msg = "incomplete dynamic bit lengths tree";
4806 r = Z_DATA_ERROR;
4807 }
4808 ZFREE(z, v);
4809 return r;
4810 }
4811
4812
4813 int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, hp, z)
4814 uInt nl; /* number of literal/length codes */
4815 uInt nd; /* number of distance codes */
4816 uIntf *c; /* that many (total) code lengths */
4817 uIntf *bl; /* literal desired/actual bit depth */
4818 uIntf *bd; /* distance desired/actual bit depth */
4819 inflate_huft * FAR *tl; /* literal/length tree result */
4820 inflate_huft * FAR *td; /* distance tree result */
4821 inflate_huft *hp; /* space for trees */
4822 z_streamp z; /* for messages */
4823 {
4824 int r;
4825 uInt hn = 0; /* hufts used in space */
4826 uIntf *v; /* work area for huft_build */
4827
4828 /* allocate work area */
4829 if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
4830 return Z_MEM_ERROR;
4831
4832 /* build literal/length tree */
4833 r = huft_build(c, nl, 257, cplens, cplext, tl, bl, hp, &hn, v);
4834 if (r != Z_OK || *bl == 0)
4835 {
4836 if (r == Z_DATA_ERROR)
4837 z->msg = "oversubscribed literal/length tree";
4838 else if (r != Z_MEM_ERROR)
4839 {
4840 z->msg = "incomplete literal/length tree";
4841 r = Z_DATA_ERROR;
4842 }
4843 ZFREE(z, v);
4844 return r;
4845 }
4846
4847 /* build distance tree */
4848 r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, hp, &hn, v);
4849 if (r != Z_OK || (*bd == 0 && nl > 257))
4850 {
4851 if (r == Z_DATA_ERROR)
4852 z->msg = "oversubscribed distance tree";
4853 else if (r == Z_BUF_ERROR) {
4854 #ifdef PKZIP_BUG_WORKAROUND
4855 r = Z_OK;
4856 }
4857 #else
4858 z->msg = "incomplete distance tree";
4859 r = Z_DATA_ERROR;
4860 }
4861 else if (r != Z_MEM_ERROR)
4862 {
4863 z->msg = "empty distance tree with lengths";
4864 r = Z_DATA_ERROR;
4865 }
4866 ZFREE(z, v);
4867 return r;
4868 #endif
4869 }
4870
4871 /* done */
4872 ZFREE(z, v);
4873 return Z_OK;
4874 }
4875
4876
4877 /* build fixed tables only once--keep them here */
4878 #ifdef BUILDFIXED
4879 local int fixed_built = 0;
4880 #define FIXEDH 544 /* number of hufts used by fixed tables */
4881 local inflate_huft fixed_mem[FIXEDH];
4882 local uInt fixed_bl;
4883 local uInt fixed_bd;
4884 local inflate_huft *fixed_tl;
4885 local inflate_huft *fixed_td;
4886 #else
4887
4888 /* +++ inffixed.h */
4889 /* inffixed.h -- table for decoding fixed codes
4890 * Generated automatically by the maketree.c program
4891 */
4892
4893 /* WARNING: this file should *not* be used by applications. It is
4894 part of the implementation of the compression library and is
4895 subject to change. Applications should only use zlib.h.
4896 */
4897
4898 local uInt fixed_bl = 9;
4899 local uInt fixed_bd = 5;
4900 local inflate_huft fixed_tl[] = {
4901 {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115},
4902 {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},192},
4903 {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},160},
4904 {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},224},
4905 {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},144},
4906 {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},208},
4907 {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},176},
4908 {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},240},
4909 {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227},
4910 {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},200},
4911 {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},168},
4912 {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},232},
4913 {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},152},
4914 {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},216},
4915 {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},184},
4916 {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},248},
4917 {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163},
4918 {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},196},
4919 {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},164},
4920 {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},228},
4921 {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},148},
4922 {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},212},
4923 {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},180},
4924 {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},244},
4925 {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0},
4926 {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},204},
4927 {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},172},
4928 {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},236},
4929 {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},156},
4930 {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},220},
4931 {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},188},
4932 {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},252},
4933 {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131},
4934 {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},194},
4935 {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},162},
4936 {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},226},
4937 {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},146},
4938 {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},210},
4939 {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},178},
4940 {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},242},
4941 {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258},
4942 {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},202},
4943 {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},170},
4944 {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},234},
4945 {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},154},
4946 {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},218},
4947 {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},186},
4948 {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},250},
4949 {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195},
4950 {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},198},
4951 {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},166},
4952 {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},230},
4953 {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},150},
4954 {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},214},
4955 {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},182},
4956 {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},246},
4957 {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0},
4958 {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},206},
4959 {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},174},
4960 {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},238},
4961 {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},158},
4962 {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},222},
4963 {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},190},
4964 {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},254},
4965 {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115},
4966 {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},193},
4967 {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},161},
4968 {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},225},
4969 {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},145},
4970 {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},209},
4971 {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},177},
4972 {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},241},
4973 {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227},
4974 {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},201},
4975 {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},169},
4976 {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},233},
4977 {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},153},
4978 {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},217},
4979 {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},185},
4980 {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},249},
4981 {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163},
4982 {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},197},
4983 {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},165},
4984 {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},229},
4985 {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},149},
4986 {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},213},
4987 {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},181},
4988 {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},245},
4989 {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0},
4990 {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},205},
4991 {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},173},
4992 {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},237},
4993 {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},157},
4994 {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},221},
4995 {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},189},
4996 {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},253},
4997 {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131},
4998 {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},195},
4999 {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},163},
5000 {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},227},
5001 {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},147},
5002 {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},211},
5003 {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},179},
5004 {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},243},
5005 {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258},
5006 {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},203},
5007 {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},171},
5008 {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},235},
5009 {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},155},
5010 {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},219},
5011 {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},187},
5012 {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},251},
5013 {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195},
5014 {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},199},
5015 {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},167},
5016 {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},231},
5017 {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},151},
5018 {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},215},
5019 {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},183},
5020 {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},247},
5021 {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0},
5022 {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},207},
5023 {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},175},
5024 {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},239},
5025 {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},159},
5026 {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},223},
5027 {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},191},
5028 {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},255}
5029 };
5030 local inflate_huft fixed_td[] = {
5031 {{{80,5}},1}, {{{87,5}},257}, {{{83,5}},17}, {{{91,5}},4097},
5032 {{{81,5}},5}, {{{89,5}},1025}, {{{85,5}},65}, {{{93,5}},16385},
5033 {{{80,5}},3}, {{{88,5}},513}, {{{84,5}},33}, {{{92,5}},8193},
5034 {{{82,5}},9}, {{{90,5}},2049}, {{{86,5}},129}, {{{192,5}},24577},
5035 {{{80,5}},2}, {{{87,5}},385}, {{{83,5}},25}, {{{91,5}},6145},
5036 {{{81,5}},7}, {{{89,5}},1537}, {{{85,5}},97}, {{{93,5}},24577},
5037 {{{80,5}},4}, {{{88,5}},769}, {{{84,5}},49}, {{{92,5}},12289},
5038 {{{82,5}},13}, {{{90,5}},3073}, {{{86,5}},193}, {{{192,5}},24577}
5039 };
5040 /* --- inffixed.h */
5041
5042 #endif
5043
5044
5045 int inflate_trees_fixed(bl, bd, tl, td, z)
5046 uIntf *bl; /* literal desired/actual bit depth */
5047 uIntf *bd; /* distance desired/actual bit depth */
5048 inflate_huft * FAR *tl; /* literal/length tree result */
5049 inflate_huft * FAR *td; /* distance tree result */
5050 z_streamp z; /* for memory allocation */
5051 {
5052 #ifdef BUILDFIXED
5053 /* build fixed tables if not already */
5054 if (!fixed_built)
5055 {
5056 int k; /* temporary variable */
5057 uInt f = 0; /* number of hufts used in fixed_mem */
5058 uIntf *c; /* length list for huft_build */
5059 uIntf *v; /* work area for huft_build */
5060
5061 /* allocate memory */
5062 if ((c = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
5063 return Z_MEM_ERROR;
5064 if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
5065 {
5066 ZFREE(z, c);
5067 return Z_MEM_ERROR;
5068 }
5069
5070 /* literal table */
5071 for (k = 0; k < 144; k++)
5072 c[k] = 8;
5073 for (; k < 256; k++)
5074 c[k] = 9;
5075 for (; k < 280; k++)
5076 c[k] = 7;
5077 for (; k < 288; k++)
5078 c[k] = 8;
5079 fixed_bl = 9;
5080 huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl,
5081 fixed_mem, &f, v);
5082
5083 /* distance table */
5084 for (k = 0; k < 30; k++)
5085 c[k] = 5;
5086 fixed_bd = 5;
5087 huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd,
5088 fixed_mem, &f, v);
5089
5090 /* done */
5091 ZFREE(z, v);
5092 ZFREE(z, c);
5093 fixed_built = 1;
5094 }
5095 #endif
5096 *bl = fixed_bl;
5097 *bd = fixed_bd;
5098 *tl = fixed_tl;
5099 *td = fixed_td;
5100 return Z_OK;
5101 }
5102 /* --- inftrees.c */
5103
5104 /* +++ infcodes.c */
5105
5106 /* infcodes.c -- process literals and length/distance pairs
5107 * Copyright (C) 1995-2002 Mark Adler
5108 * For conditions of distribution and use, see copyright notice in zlib.h
5109 */
5110
5111 /* #include "zutil.h" */
5112 /* #include "inftrees.h" */
5113 /* #include "infblock.h" */
5114 /* #include "infcodes.h" */
5115 /* #include "infutil.h" */
5116
5117 /* +++ inffast.h */
5118
5119 /* inffast.h -- header to use inffast.c
5120 * Copyright (C) 1995-2002 Mark Adler
5121 * For conditions of distribution and use, see copyright notice in zlib.h
5122 */
5123
5124 /* WARNING: this file should *not* be used by applications. It is
5125 part of the implementation of the compression library and is
5126 subject to change. Applications should only use zlib.h.
5127 */
5128
5129 extern int inflate_fast __P((
5130 uInt,
5131 uInt,
5132 inflate_huft *,
5133 inflate_huft *,
5134 inflate_blocks_statef *,
5135 z_streamp ));
5136 /* --- inffast.h */
5137
5138 /* simplify the use of the inflate_huft type with some defines */
5139 #define exop word.what.Exop
5140 #define bits word.what.Bits
5141
5142 typedef enum { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
5143 START, /* x: set up for LEN */
5144 LEN, /* i: get length/literal/eob next */
5145 LENEXT, /* i: getting length extra (have base) */
5146 DIST, /* i: get distance next */
5147 DISTEXT, /* i: getting distance extra */
5148 COPY, /* o: copying bytes in window, waiting for space */
5149 LIT, /* o: got literal, waiting for output space */
5150 WASH, /* o: got eob, possibly still output waiting */
5151 END, /* x: got eob and all data flushed */
5152 BADCODE} /* x: got error */
5153 inflate_codes_mode;
5154
5155 /* inflate codes private state */
5156 struct inflate_codes_state {
5157
5158 /* mode */
5159 inflate_codes_mode mode; /* current inflate_codes mode */
5160
5161 /* mode dependent information */
5162 uInt len;
5163 union {
5164 struct {
5165 inflate_huft *tree; /* pointer into tree */
5166 uInt need; /* bits needed */
5167 } code; /* if LEN or DIST, where in tree */
5168 uInt lit; /* if LIT, literal */
5169 struct {
5170 uInt get; /* bits to get for extra */
5171 uInt dist; /* distance back to copy from */
5172 } copy; /* if EXT or COPY, where and how much */
5173 } sub; /* submode */
5174
5175 /* mode independent information */
5176 Byte lbits; /* ltree bits decoded per branch */
5177 Byte dbits; /* dtree bits decoder per branch */
5178 inflate_huft *ltree; /* literal/length/eob tree */
5179 inflate_huft *dtree; /* distance tree */
5180
5181 };
5182
5183
5184 inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
5185 uInt bl, bd;
5186 inflate_huft *tl;
5187 inflate_huft *td; /* need separate declaration for Borland C++ */
5188 z_streamp z;
5189 {
5190 inflate_codes_statef *c;
5191
5192 if ((c = (inflate_codes_statef *)
5193 ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
5194 {
5195 c->mode = START;
5196 c->lbits = (Byte)bl;
5197 c->dbits = (Byte)bd;
5198 c->ltree = tl;
5199 c->dtree = td;
5200 Tracev((stderr, "inflate: codes new\n"));
5201 }
5202 return c;
5203 }
5204
5205
5206 int inflate_codes(s, z, r)
5207 inflate_blocks_statef *s;
5208 z_streamp z;
5209 int r;
5210 {
5211 uInt j; /* temporary storage */
5212 inflate_huft *t; /* temporary pointer */
5213 uInt e; /* extra bits or operation */
5214 uLong b; /* bit buffer */
5215 uInt k; /* bits in bit buffer */
5216 Bytef *p; /* input data pointer */
5217 uInt n; /* bytes available there */
5218 Bytef *q; /* output window write pointer */
5219 uInt m; /* bytes to end of window or read pointer */
5220 Bytef *f; /* pointer to copy strings from */
5221 inflate_codes_statef *c = s->sub.decode.codes; /* codes state */
5222
5223 /* copy input/output information to locals (UPDATE macro restores) */
5224 LOAD
5225
5226 /* process input and output based on current state */
5227 while (1) switch (c->mode)
5228 { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
5229 case START: /* x: set up for LEN */
5230 #ifndef SLOW
5231 if (m >= 258 && n >= 10)
5232 {
5233 UPDATE
5234 r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
5235 LOAD
5236 if (r != Z_OK)
5237 {
5238 c->mode = r == Z_STREAM_END ? WASH : BADCODE;
5239 break;
5240 }
5241 }
5242 #endif /* !SLOW */
5243 c->sub.code.need = c->lbits;
5244 c->sub.code.tree = c->ltree;
5245 c->mode = LEN;
5246 case LEN: /* i: get length/literal/eob next */
5247 j = c->sub.code.need;
5248 NEEDBITS(j)
5249 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
5250 DUMPBITS(t->bits)
5251 e = (uInt)(t->exop);
5252 if (e == 0) /* literal */
5253 {
5254 c->sub.lit = t->base;
5255 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5256 "inflate: literal '%c'\n" :
5257 "inflate: literal 0x%02x\n", t->base));
5258 c->mode = LIT;
5259 break;
5260 }
5261 if (e & 16) /* length */
5262 {
5263 c->sub.copy.get = e & 15;
5264 c->len = t->base;
5265 c->mode = LENEXT;
5266 break;
5267 }
5268 if ((e & 64) == 0) /* next table */
5269 {
5270 c->sub.code.need = e;
5271 c->sub.code.tree = t + t->base;
5272 break;
5273 }
5274 if (e & 32) /* end of block */
5275 {
5276 Tracevv((stderr, "inflate: end of block\n"));
5277 c->mode = WASH;
5278 break;
5279 }
5280 c->mode = BADCODE; /* invalid code */
5281 z->msg = "invalid literal/length code";
5282 r = Z_DATA_ERROR;
5283 LEAVE
5284 case LENEXT: /* i: getting length extra (have base) */
5285 j = c->sub.copy.get;
5286 NEEDBITS(j)
5287 c->len += (uInt)b & inflate_mask[j];
5288 DUMPBITS(j)
5289 c->sub.code.need = c->dbits;
5290 c->sub.code.tree = c->dtree;
5291 Tracevv((stderr, "inflate: length %u\n", c->len));
5292 c->mode = DIST;
5293 case DIST: /* i: get distance next */
5294 j = c->sub.code.need;
5295 NEEDBITS(j)
5296 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
5297 DUMPBITS(t->bits)
5298 e = (uInt)(t->exop);
5299 if (e & 16) /* distance */
5300 {
5301 c->sub.copy.get = e & 15;
5302 c->sub.copy.dist = t->base;
5303 c->mode = DISTEXT;
5304 break;
5305 }
5306 if ((e & 64) == 0) /* next table */
5307 {
5308 c->sub.code.need = e;
5309 c->sub.code.tree = t + t->base;
5310 break;
5311 }
5312 c->mode = BADCODE; /* invalid code */
5313 z->msg = "invalid distance code";
5314 r = Z_DATA_ERROR;
5315 LEAVE
5316 case DISTEXT: /* i: getting distance extra */
5317 j = c->sub.copy.get;
5318 NEEDBITS(j)
5319 c->sub.copy.dist += (uInt)b & inflate_mask[j];
5320 DUMPBITS(j)
5321 Tracevv((stderr, "inflate: distance %u\n", c->sub.copy.dist));
5322 c->mode = COPY;
5323 case COPY: /* o: copying bytes in window, waiting for space */
5324 f = q - c->sub.copy.dist;
5325 while (f < s->window) /* modulo window size-"while" instead */
5326 f += s->end - s->window; /* of "if" handles invalid distances */
5327 while (c->len)
5328 {
5329 NEEDOUT
5330 OUTBYTE(*f++)
5331 if (f == s->end)
5332 f = s->window;
5333 c->len--;
5334 }
5335 c->mode = START;
5336 break;
5337 case LIT: /* o: got literal, waiting for output space */
5338 NEEDOUT
5339 OUTBYTE(c->sub.lit)
5340 c->mode = START;
5341 break;
5342 case WASH: /* o: got eob, possibly more output */
5343 if (k > 7) /* return unused byte, if any */
5344 {
5345 Assert(k < 16, "inflate_codes grabbed too many bytes")
5346 k -= 8;
5347 n++;
5348 p--; /* can always return one */
5349 }
5350 FLUSH
5351 if (s->read != s->write)
5352 LEAVE
5353 c->mode = END;
5354 case END:
5355 r = Z_STREAM_END;
5356 LEAVE
5357 case BADCODE: /* x: got error */
5358 r = Z_DATA_ERROR;
5359 LEAVE
5360 default:
5361 r = Z_STREAM_ERROR;
5362 LEAVE
5363 }
5364 #ifdef NEED_DUMMY_RETURN
5365 return Z_STREAM_ERROR; /* Some dumb compilers complain without this */
5366 #endif
5367 }
5368
5369
5370 void inflate_codes_free(c, z)
5371 inflate_codes_statef *c;
5372 z_streamp z;
5373 {
5374 ZFREE(z, c);
5375 Tracev((stderr, "inflate: codes free\n"));
5376 }
5377 /* --- infcodes.c */
5378
5379 /* +++ infutil.c */
5380
5381 /* inflate_util.c -- data and routines common to blocks and codes
5382 * Copyright (C) 1995-2002 Mark Adler
5383 * For conditions of distribution and use, see copyright notice in zlib.h
5384 */
5385
5386 /* #include "zutil.h" */
5387 /* #include "infblock.h" */
5388 /* #include "inftrees.h" */
5389 /* #include "infcodes.h" */
5390 /* #include "infutil.h" */
5391
5392 #ifndef NO_DUMMY_DECL
5393 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
5394 #endif
5395
5396 /* And'ing with mask[n] masks the lower n bits */
5397 uInt inflate_mask[17] = {
5398 0x0000,
5399 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
5400 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
5401 };
5402
5403
5404 /* copy as much as possible from the sliding window to the output area */
5405 int inflate_flush(s, z, r)
5406 inflate_blocks_statef *s;
5407 z_streamp z;
5408 int r;
5409 {
5410 uInt n;
5411 Bytef *p;
5412 Bytef *q;
5413
5414 /* local copies of source and destination pointers */
5415 p = z->next_out;
5416 q = s->read;
5417
5418 /* compute number of bytes to copy as far as end of window */
5419 n = (uInt)((q <= s->write ? s->write : s->end) - q);
5420 if (n > z->avail_out) n = z->avail_out;
5421 if (n && r == Z_BUF_ERROR) r = Z_OK;
5422
5423 /* update counters */
5424 z->avail_out -= n;
5425 z->total_out += n;
5426
5427 /* update check information */
5428 if (s->checkfn != Z_NULL)
5429 z->adler = s->check = (*s->checkfn)(s->check, q, n);
5430
5431 /* copy as far as end of window */
5432 if (p != Z_NULL) {
5433 zmemcpy(p, q, n);
5434 p += n;
5435 }
5436 q += n;
5437
5438 /* see if more to copy at beginning of window */
5439 if (q == s->end)
5440 {
5441 /* wrap pointers */
5442 q = s->window;
5443 if (s->write == s->end)
5444 s->write = s->window;
5445
5446 /* compute bytes to copy */
5447 n = (uInt)(s->write - q);
5448 if (n > z->avail_out) n = z->avail_out;
5449 if (n && r == Z_BUF_ERROR) r = Z_OK;
5450
5451 /* update counters */
5452 z->avail_out -= n;
5453 z->total_out += n;
5454
5455 /* update check information */
5456 if (s->checkfn != Z_NULL)
5457 z->adler = s->check = (*s->checkfn)(s->check, q, n);
5458
5459 /* copy */
5460 if (p != NULL) {
5461 zmemcpy(p, q, n);
5462 p += n;
5463 }
5464 q += n;
5465 }
5466
5467 /* update pointers */
5468 z->next_out = p;
5469 s->read = q;
5470
5471 /* done */
5472 return r;
5473 }
5474 /* --- infutil.c */
5475
5476 /* +++ inffast.c */
5477
5478 /* inffast.c -- process literals and length/distance pairs fast
5479 * Copyright (C) 1995-2002 Mark Adler
5480 * For conditions of distribution and use, see copyright notice in zlib.h
5481 */
5482
5483 /* #include "zutil.h" */
5484 /* #include "inftrees.h" */
5485 /* #include "infblock.h" */
5486 /* #include "infcodes.h" */
5487 /* #include "infutil.h" */
5488 /* #include "inffast.h" */
5489
5490 #ifndef NO_DUMMY_DECL
5491 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
5492 #endif
5493
5494 /* simplify the use of the inflate_huft type with some defines */
5495 #define exop word.what.Exop
5496 #define bits word.what.Bits
5497
5498 /* macros for bit input with no checking and for returning unused bytes */
5499 #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
5500 #define UNGRAB {c=z->avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;}
5501
5502 /* Called with number of bytes left to write in window at least 258
5503 (the maximum string length) and number of input bytes available
5504 at least ten. The ten bytes are six bytes for the longest length/
5505 distance pair plus four bytes for overloading the bit buffer. */
5506
5507 int inflate_fast(bl, bd, tl, td, s, z)
5508 uInt bl, bd;
5509 inflate_huft *tl;
5510 inflate_huft *td; /* need separate declaration for Borland C++ */
5511 inflate_blocks_statef *s;
5512 z_streamp z;
5513 {
5514 inflate_huft *t; /* temporary pointer */
5515 uInt e; /* extra bits or operation */
5516 uLong b; /* bit buffer */
5517 uInt k; /* bits in bit buffer */
5518 Bytef *p; /* input data pointer */
5519 uInt n; /* bytes available there */
5520 Bytef *q; /* output window write pointer */
5521 uInt m; /* bytes to end of window or read pointer */
5522 uInt ml; /* mask for literal/length tree */
5523 uInt md; /* mask for distance tree */
5524 uInt c; /* bytes to copy */
5525 uInt d; /* distance back to copy from */
5526 Bytef *r; /* copy source pointer */
5527
5528 /* load input, output, bit values */
5529 LOAD
5530
5531 /* initialize masks */
5532 ml = inflate_mask[bl];
5533 md = inflate_mask[bd];
5534
5535 /* do until not enough input or output space for fast loop */
5536 do { /* assume called with m >= 258 && n >= 10 */
5537 /* get literal/length code */
5538 GRABBITS(20) /* max bits for literal/length code */
5539 if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
5540 {
5541 DUMPBITS(t->bits)
5542 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5543 "inflate: * literal '%c'\n" :
5544 "inflate: * literal 0x%02x\n", t->base));
5545 *q++ = (Byte)t->base;
5546 m--;
5547 continue;
5548 }
5549 do {
5550 DUMPBITS(t->bits)
5551 if (e & 16)
5552 {
5553 /* get extra bits for length */
5554 e &= 15;
5555 c = t->base + ((uInt)b & inflate_mask[e]);
5556 DUMPBITS(e)
5557 Tracevv((stderr, "inflate: * length %u\n", c));
5558
5559 /* decode distance base of block to copy */
5560 GRABBITS(15); /* max bits for distance code */
5561 e = (t = td + ((uInt)b & md))->exop;
5562 do {
5563 DUMPBITS(t->bits)
5564 if (e & 16)
5565 {
5566 /* get extra bits to add to distance base */
5567 e &= 15;
5568 GRABBITS(e) /* get extra bits (up to 13) */
5569 d = t->base + ((uInt)b & inflate_mask[e]);
5570 DUMPBITS(e)
5571 Tracevv((stderr, "inflate: * distance %u\n", d));
5572
5573 /* do the copy */
5574 m -= c;
5575 r = q - d;
5576 if (r < s->window) /* wrap if needed */
5577 {
5578 do {
5579 r += s->end - s->window; /* force pointer in window */
5580 } while (r < s->window); /* covers invalid distances */
5581 e = s->end - r;
5582 if (c > e)
5583 {
5584 c -= e; /* wrapped copy */
5585 do {
5586 *q++ = *r++;
5587 } while (--e);
5588 r = s->window;
5589 do {
5590 *q++ = *r++;
5591 } while (--c);
5592 }
5593 else /* normal copy */
5594 {
5595 *q++ = *r++; c--;
5596 *q++ = *r++; c--;
5597 do {
5598 *q++ = *r++;
5599 } while (--c);
5600 }
5601 }
5602 else /* normal copy */
5603 {
5604 *q++ = *r++; c--;
5605 *q++ = *r++; c--;
5606 do {
5607 *q++ = *r++;
5608 } while (--c);
5609 }
5610 break;
5611 }
5612 else if ((e & 64) == 0)
5613 {
5614 t += t->base;
5615 e = (t += ((uInt)b & inflate_mask[e]))->exop;
5616 }
5617 else
5618 {
5619 z->msg = "invalid distance code";
5620 UNGRAB
5621 UPDATE
5622 return Z_DATA_ERROR;
5623 }
5624 } while (1);
5625 break;
5626 }
5627 if ((e & 64) == 0)
5628 {
5629 t += t->base;
5630 if ((e = (t += ((uInt)b & inflate_mask[e]))->exop) == 0)
5631 {
5632 DUMPBITS(t->bits)
5633 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5634 "inflate: * literal '%c'\n" :
5635 "inflate: * literal 0x%02x\n", t->base));
5636 *q++ = (Byte)t->base;
5637 m--;
5638 break;
5639 }
5640 }
5641 else if (e & 32)
5642 {
5643 Tracevv((stderr, "inflate: * end of block\n"));
5644 UNGRAB
5645 UPDATE
5646 return Z_STREAM_END;
5647 }
5648 else
5649 {
5650 z->msg = "invalid literal/length code";
5651 UNGRAB
5652 UPDATE
5653 return Z_DATA_ERROR;
5654 }
5655 } while (1);
5656 } while (m >= 258 && n >= 10);
5657
5658 /* not enough input or output--restore pointers and return */
5659 UNGRAB
5660 UPDATE
5661 return Z_OK;
5662 }
5663 /* --- inffast.c */
5664
5665 /* +++ zutil.c */
5666
5667 /* zutil.c -- target dependent utility functions for the compression library
5668 * Copyright (C) 1995-2002 Jean-loup Gailly.
5669 * For conditions of distribution and use, see copyright notice in zlib.h
5670 */
5671
5672 /* @(#) Id */
5673
5674 #ifdef DEBUG_ZLIB
5675 #include <stdio.h>
5676 #endif
5677
5678 /* #include "zutil.h" */
5679
5680 #ifndef NO_DUMMY_DECL
5681 struct internal_state {int dummy;}; /* for buggy compilers */
5682 #endif
5683
5684 #ifndef STDC
5685 extern void exit __P((int));
5686 #endif
5687
5688 const char *z_errmsg[10] = {
5689 "need dictionary", /* Z_NEED_DICT 2 */
5690 "stream end", /* Z_STREAM_END 1 */
5691 "", /* Z_OK 0 */
5692 "file error", /* Z_ERRNO (-1) */
5693 "stream error", /* Z_STREAM_ERROR (-2) */
5694 "data error", /* Z_DATA_ERROR (-3) */
5695 "insufficient memory", /* Z_MEM_ERROR (-4) */
5696 "buffer error", /* Z_BUF_ERROR (-5) */
5697 "incompatible version",/* Z_VERSION_ERROR (-6) */
5698 ""};
5699
5700
5701 #if 0
5702 const char * ZEXPORT zlibVersion()
5703 {
5704 return ZLIB_VERSION;
5705 }
5706 #endif
5707
5708 #ifdef DEBUG_ZLIB
5709
5710 # ifndef verbose
5711 # define verbose 0
5712 # endif
5713 int z_verbose = verbose;
5714
5715 void z_error (m)
5716 char *m;
5717 {
5718 fprintf(stderr, "%s\n", m);
5719 exit(1);
5720 }
5721 #endif
5722
5723 /* exported to allow conversion of error code to string for compress() and
5724 * uncompress()
5725 */
5726 #if 0
5727 const char * ZEXPORT zError(err)
5728 int err;
5729 {
5730 return ERR_MSG(err);
5731 }
5732 #endif
5733
5734
5735 #ifndef HAVE_MEMCPY
5736
5737 void zmemcpy(dest, source, len)
5738 Bytef* dest;
5739 const Bytef* source;
5740 uInt len;
5741 {
5742 if (len == 0) return;
5743 do {
5744 *dest++ = *source++; /* ??? to be unrolled */
5745 } while (--len != 0);
5746 }
5747
5748 int zmemcmp(s1, s2, len)
5749 const Bytef* s1;
5750 const Bytef* s2;
5751 uInt len;
5752 {
5753 uInt j;
5754
5755 for (j = 0; j < len; j++) {
5756 if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;
5757 }
5758 return 0;
5759 }
5760
5761 void zmemzero(dest, len)
5762 Bytef* dest;
5763 uInt len;
5764 {
5765 if (len == 0) return;
5766 do {
5767 *dest++ = 0; /* ??? to be unrolled */
5768 } while (--len != 0);
5769 }
5770 #endif
5771
5772 #ifdef __TURBOC__
5773 #if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__)
5774 /* Small and medium model in Turbo C are for now limited to near allocation
5775 * with reduced MAX_WBITS and MAX_MEM_LEVEL
5776 */
5777 # define MY_ZCALLOC
5778
5779 /* Turbo C malloc() does not allow dynamic allocation of 64K bytes
5780 * and farmalloc(64K) returns a pointer with an offset of 8, so we
5781 * must fix the pointer. Warning: the pointer must be put back to its
5782 * original form in order to free it, use zcfree().
5783 */
5784
5785 #define MAX_PTR 10
5786 /* 10*64K = 640K */
5787
5788 local int next_ptr = 0;
5789
5790 typedef struct ptr_table_s {
5791 voidpf org_ptr;
5792 voidpf new_ptr;
5793 } ptr_table;
5794
5795 local ptr_table table[MAX_PTR];
5796 /* This table is used to remember the original form of pointers
5797 * to large buffers (64K). Such pointers are normalized with a zero offset.
5798 * Since MSDOS is not a preemptive multitasking OS, this table is not
5799 * protected from concurrent access. This hack doesn't work anyway on
5800 * a protected system like OS/2. Use Microsoft C instead.
5801 */
5802
5803 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
5804 {
5805 voidpf buf = opaque; /* just to make some compilers happy */
5806 ulg bsize = (ulg)items*size;
5807
5808 /* If we allocate less than 65520 bytes, we assume that farmalloc
5809 * will return a usable pointer which doesn't have to be normalized.
5810 */
5811 if (bsize < 65520L) {
5812 buf = farmalloc(bsize);
5813 if (*(ush*)&buf != 0) return buf;
5814 } else {
5815 buf = farmalloc(bsize + 16L);
5816 }
5817 if (buf == NULL || next_ptr >= MAX_PTR) return NULL;
5818 table[next_ptr].org_ptr = buf;
5819
5820 /* Normalize the pointer to seg:0 */
5821 *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;
5822 *(ush*)&buf = 0;
5823 table[next_ptr++].new_ptr = buf;
5824 return buf;
5825 }
5826
5827 void zcfree (voidpf opaque, voidpf ptr)
5828 {
5829 int n;
5830 if (*(ush*)&ptr != 0) { /* object < 64K */
5831 farfree(ptr);
5832 return;
5833 }
5834 /* Find the original pointer */
5835 for (n = 0; n < next_ptr; n++) {
5836 if (ptr != table[n].new_ptr) continue;
5837
5838 farfree(table[n].org_ptr);
5839 while (++n < next_ptr) {
5840 table[n-1] = table[n];
5841 }
5842 next_ptr--;
5843 return;
5844 }
5845 ptr = opaque; /* just to make some compilers happy */
5846 Assert(0, "zcfree: ptr not found");
5847 }
5848 #endif
5849 #endif /* __TURBOC__ */
5850
5851
5852 #if defined(M_I86) && !defined(__32BIT__)
5853 /* Microsoft C in 16-bit mode */
5854
5855 # define MY_ZCALLOC
5856
5857 #if (!defined(_MSC_VER) || (_MSC_VER <= 600))
5858 # define _halloc halloc
5859 # define _hfree hfree
5860 #endif
5861
5862 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
5863 {
5864 if (opaque) opaque = 0; /* to make compiler happy */
5865 return _halloc((long)items, size);
5866 }
5867
5868 void zcfree (voidpf opaque, voidpf ptr)
5869 {
5870 if (opaque) opaque = 0; /* to make compiler happy */
5871 _hfree(ptr);
5872 }
5873
5874 #endif /* MSC */
5875
5876
5877 #ifndef MY_ZCALLOC /* Any system without a special alloc function */
5878
5879 #ifndef STDC
5880 extern voidp calloc __P((uInt items, uInt size));
5881 extern void free __P((voidpf ptr));
5882 #endif
5883
5884 voidpf zcalloc (opaque, items, size)
5885 voidpf opaque;
5886 unsigned items;
5887 unsigned size;
5888 {
5889 if (opaque) items += size - size; /* make compiler happy */
5890 return (voidpf)calloc(items, size);
5891 }
5892
5893 void zcfree (opaque, ptr)
5894 voidpf opaque;
5895 voidpf ptr;
5896 {
5897 free(ptr);
5898 if (opaque) return; /* make compiler happy */
5899 }
5900
5901 #endif /* MY_ZCALLOC */
5902 /* --- zutil.c */
5903
5904 /* +++ adler32.c */
5905 /* adler32.c -- compute the Adler-32 checksum of a data stream
5906 * Copyright (C) 1995-2002 Mark Adler
5907 * For conditions of distribution and use, see copyright notice in zlib.h
5908 */
5909
5910 /* @(#) $Id: zlib.c,v 1.23 2006/01/14 18:58:05 christos Exp $ */
5911
5912 /* #include "zlib.h" */
5913
5914 #define BASE 65521L /* largest prime smaller than 65536 */
5915 #define NMAX 5552
5916 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
5917
5918 #define DO1(buf,i) {s1 += buf[i]; s2 += s1;}
5919 #define DO2(buf,i) DO1(buf,i); DO1(buf,i+1);
5920 #define DO4(buf,i) DO2(buf,i); DO2(buf,i+2);
5921 #define DO8(buf,i) DO4(buf,i); DO4(buf,i+4);
5922 #define DO16(buf) DO8(buf,0); DO8(buf,8);
5923
5924 /* ========================================================================= */
5925 uLong ZEXPORT adler32(adler, buf, len)
5926 uLong adler;
5927 const Bytef *buf;
5928 uInt len;
5929 {
5930 unsigned long s1 = adler & 0xffff;
5931 unsigned long s2 = (adler >> 16) & 0xffff;
5932 int k;
5933
5934 if (buf == Z_NULL) return 1L;
5935
5936 while (len > 0) {
5937 k = len < NMAX ? len : NMAX;
5938 len -= k;
5939 while (k >= 16) {
5940 DO16(buf);
5941 buf += 16;
5942 k -= 16;
5943 }
5944 if (k != 0) do {
5945 s1 += *buf++;
5946 s2 += s1;
5947 } while (--k);
5948 s1 %= BASE;
5949 s2 %= BASE;
5950 }
5951 return (s2 << 16) | s1;
5952 }
5953 /* --- adler32.c */
5954
5955