The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/libkern/zlib.c

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /*
    2  * Copyright (c) 2000-2006 Apple Computer, Inc. All rights reserved.
    3  *
    4  * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
    5  * 
    6  * This file contains Original Code and/or Modifications of Original Code
    7  * as defined in and that are subject to the Apple Public Source License
    8  * Version 2.0 (the 'License'). You may not use this file except in
    9  * compliance with the License. The rights granted to you under the License
   10  * may not be used to create, or enable the creation or redistribution of,
   11  * unlawful or unlicensed copies of an Apple operating system, or to
   12  * circumvent, violate, or enable the circumvention or violation of, any
   13  * terms of an Apple operating system software license agreement.
   14  * 
   15  * Please obtain a copy of the License at
   16  * http://www.opensource.apple.com/apsl/ and read it before using this file.
   17  * 
   18  * The Original Code and all software distributed under the License are
   19  * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
   20  * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
   21  * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
   22  * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
   23  * Please see the License for the specific language governing rights and
   24  * limitations under the License.
   25  * 
   26  * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
   27  */
   28 /*
   29  * This file is derived from various .h and .c files from the zlib-1.0.4
   30  * distribution by Jean-loup Gailly and Mark Adler, with some additions
   31  * by Paul Mackerras to aid in implementing Deflate compression and
   32  * decompression for PPP packets.  See zlib.h for conditions of
   33  * distribution and use.
   34  *
   35  * Changes that have been made include:
   36  * - added Z_PACKET_FLUSH (see zlib.h for details)
   37  * - added inflateIncomp and deflateOutputPending
   38  * - allow strm->next_out to be NULL, meaning discard the output
   39  *
   40  * $FreeBSD: src/sys/net/zlib.c,v 1.10 1999/12/29 04:38:38 peter Exp $
   41  */
   42 
   43 #define STDC
   44 #define NO_DUMMY_DECL
   45 #define NO_ZCFUNCS
   46 #define MY_ZCALLOC
   47 
   48 /* +++ zutil.h */
   49 /* zutil.h -- internal interface and configuration of the compression library
   50  * Copyright (C) 1995-2002 Jean-loup Gailly.
   51  * For conditions of distribution and use, see copyright notice in zlib.h
   52  */
   53 
   54 /* WARNING: this file should *not* be used by applications. It is
   55    part of the implementation of the compression library and is
   56    subject to change. Applications should only use zlib.h.
   57  */
   58 
   59 #ifndef _Z_UTIL_H
   60 #define _Z_UTIL_H
   61 
   62 #ifdef KERNEL
   63 #include <libkern/zlib.h>
   64 #else
   65 #include "zlib.h"
   66 #endif
   67 
   68 #ifdef KERNEL
   69 /* Assume this is a *BSD or SVR4 kernel */
   70 #include <sys/types.h>
   71 #include <sys/time.h>
   72 #include <sys/systm.h>
   73 #  define HAVE_MEMCPY
   74 #  define memcpy(d, s, n)       bcopy((s), (d), (n))
   75 #  define memset(d, v, n)       bzero((d), (n))
   76 #  define memcmp                bcmp
   77 
   78 #else
   79 #if defined(__KERNEL__)
   80 /* Assume this is a Linux kernel */
   81 #include <linux/string.h>
   82 #define HAVE_MEMCPY
   83 
   84 #else /* not kernel */
   85 #ifdef STDC
   86 #  include <stddef.h>
   87 #  include <string.h>
   88 #  include <stdlib.h>
   89 #endif
   90 #ifdef NO_ERRNO_H
   91     extern int errno;
   92 #else
   93 #   include <errno.h>
   94 #endif
   95 #endif /* __KERNEL__ */
   96 #endif /* KERNEL */
   97 
   98 typedef unsigned char  uch;
   99 typedef uch FAR uchf;
  100 typedef unsigned short ush;
  101 typedef ush FAR ushf;
  102 typedef unsigned long  ulg;
  103 
  104 /* (size given to avoid silly warnings with Visual C++) */
  105 static const char *z_errmsg[10] = {  /* indexed by 2-zlib_error */
  106 "need dictionary",     /* Z_NEED_DICT       2  */
  107 "stream end",          /* Z_STREAM_END      1  */
  108 "",                    /* Z_OK              0  */
  109 "file error",          /* Z_ERRNO         (-1) */
  110 "stream error",        /* Z_STREAM_ERROR  (-2) */
  111 "data error",          /* Z_DATA_ERROR    (-3) */
  112 "insufficient memory", /* Z_MEM_ERROR     (-4) */
  113 "buffer error",        /* Z_BUF_ERROR     (-5) */
  114 "incompatible version",/* Z_VERSION_ERROR (-6) */
  115 ""};
  116 
  117 #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
  118 
  119 #define ERR_RETURN(strm,err) \
  120   return (strm->msg = (char*)ERR_MSG(err), (err))
  121 /* To be used only when the state is known to be valid */
  122 
  123         /* common constants */
  124 
  125 #ifndef DEF_WBITS
  126 #  define DEF_WBITS MAX_WBITS
  127 #endif
  128 /* default windowBits for decompression. MAX_WBITS is for compression only */
  129 
  130 #if MAX_MEM_LEVEL >= 8
  131 #  define DEF_MEM_LEVEL 8
  132 #else
  133 #  define DEF_MEM_LEVEL  MAX_MEM_LEVEL
  134 #endif
  135 /* default memLevel */
  136 
  137 #define STORED_BLOCK 0
  138 #define STATIC_TREES 1
  139 #define DYN_TREES    2
  140 /* The three kinds of block type */
  141 
  142 #define MIN_MATCH  3
  143 #define MAX_MATCH  258
  144 /* The minimum and maximum match lengths */
  145 
  146 #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */
  147 
  148         /* target dependencies */
  149 
  150 #ifdef MSDOS
  151 #  define OS_CODE  0x00
  152 #  if defined(__TURBOC__) || defined(__BORLANDC__)
  153 #    if(__STDC__ == 1) && (defined(__LARGE__) || defined(__COMPACT__))
  154        /* Allow compilation with ANSI keywords only enabled */
  155        void _Cdecl farfree( void *block );
  156        void *_Cdecl farmalloc( unsigned long nbytes );
  157 #    else
  158 #     include <alloc.h>
  159 #    endif
  160 #  else /* MSC or DJGPP */
  161 #    include <malloc.h>
  162 #  endif
  163 #endif
  164 
  165 #ifdef OS2
  166 #  define OS_CODE  0x06
  167 #endif
  168 
  169 #ifdef WIN32 /* Window 95 & Windows NT */
  170 #  define OS_CODE  0x0b
  171 #endif
  172 
  173 #if defined(VAXC) || defined(VMS)
  174 #  define OS_CODE  0x02
  175 #  define F_OPEN(name, mode) \
  176      fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512")
  177 #endif
  178 
  179 #ifdef AMIGA
  180 #  define OS_CODE  0x01
  181 #endif
  182 
  183 #if defined(ATARI) || defined(atarist)
  184 #  define OS_CODE  0x05
  185 #endif
  186 
  187 #if defined(MACOS) || defined(TARGET_OS_MAC)
  188 #  define OS_CODE  0x07
  189 #  if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os
  190 #    include <unix.h> /* for fdopen */
  191 #  else
  192 #    ifndef fdopen
  193 #      define fdopen(fd,mode) NULL /* No fdopen() */
  194 #    endif
  195 #  endif
  196 #endif
  197 
  198 #ifdef __50SERIES /* Prime/PRIMOS */
  199 #  define OS_CODE  0x0F
  200 #endif
  201 
  202 #ifdef TOPS20
  203 #  define OS_CODE  0x0a
  204 #endif
  205 
  206 #if defined(_BEOS_) || defined(RISCOS)
  207 #  define fdopen(fd,mode) NULL /* No fdopen() */
  208 #endif
  209 
  210 #if (defined(_MSC_VER) && (_MSC_VER > 600))
  211 #  define fdopen(fd,type)  _fdopen(fd,type)
  212 #endif
  213 
  214 
  215         /* Common defaults */
  216 
  217 #ifndef OS_CODE
  218 #  define OS_CODE  0x03  /* assume Unix */
  219 #endif
  220 
  221 #ifndef F_OPEN
  222 #  define F_OPEN(name, mode) fopen((name), (mode))
  223 #endif
  224 
  225          /* functions */
  226 
  227 #ifdef HAVE_STRERROR
  228    extern char *strerror OF((int));
  229 #  define zstrerror(errnum) strerror(errnum)
  230 #else
  231 #  define zstrerror(errnum) ""
  232 #endif
  233 
  234 #if defined(pyr)
  235 #  define NO_MEMCPY
  236 #endif
  237 #if defined(SMALL_MEDIUM) && !defined(_MSC_VER) && !defined(__SC__)
  238  /* Use our own functions for small and medium model with MSC <= 5.0.
  239   * You may have to use the same strategy for Borland C (untested).
  240   * The __SC__ check is for Symantec.
  241   */
  242 #  define NO_MEMCPY
  243 #endif
  244 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
  245 #  define HAVE_MEMCPY
  246 #endif
  247 #ifdef HAVE_MEMCPY
  248 #  ifdef SMALL_MEDIUM /* MSDOS small or medium model */
  249 #    define zmemcpy _fmemcpy
  250 #    define zmemcmp _fmemcmp
  251 #    define zmemzero(dest, len) _fmemset(dest, 0, len)
  252 #  else
  253 #    define zmemcpy memcpy
  254 #    define zmemcmp memcmp
  255 #    define zmemzero(dest, len) memset(dest, 0, len)
  256 #  endif
  257 #else
  258    extern void zmemcpy  OF((Bytef* dest, const Bytef* source, uInt len));
  259    extern int  zmemcmp  OF((const Bytef* s1, const Bytef* s2, uInt len));
  260    extern void zmemzero OF((Bytef* dest, uInt len));
  261 #endif
  262 
  263 /* Diagnostic functions */
  264 #ifdef DEBUG_ZLIB
  265 #  include <stdio.h>
  266    extern int z_verbose;
  267    extern void z_error    OF((char *m));
  268 #  define Assert(cond,msg) {if(!(cond)) z_error(msg);}
  269 #  define Trace(x) {if (z_verbose>=0) fprintf x ;}
  270 #  define Tracev(x) {if (z_verbose>0) fprintf x ;}
  271 #  define Tracevv(x) {if (z_verbose>1) fprintf x ;}
  272 #  define Tracec(c,x) {if (z_verbose>0 && (c)) fprintf x ;}
  273 #  define Tracecv(c,x) {if (z_verbose>1 && (c)) fprintf x ;}
  274 #else
  275 #  define Assert(cond,msg) do {} while(0)
  276 #  define Trace(x) do {} while(0)
  277 #  define Tracev(x) do {} while(0)
  278 #  define Tracevv(x) do {} while(0)
  279 #  define Tracec(c,x) do {} while(0)
  280 #  define Tracecv(c,x) do {} while(0)
  281 #endif
  282 
  283 
  284 typedef uLong (ZEXPORT *check_func) OF((uLong check, const Bytef *buf,
  285                                        uInt len));
  286 voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size));
  287 void   zcfree  OF((voidpf opaque, voidpf ptr));
  288 
  289 #define ZALLOC(strm, items, size) \
  290            (*((strm)->zalloc))((strm)->opaque, (items), (size))
  291 #define ZFREE(strm, addr)  (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))
  292 #define TRY_FREE(s, p) {if (p) ZFREE(s, p);}
  293 
  294 #endif /* _Z_UTIL_H */
  295 /* --- zutil.h */
  296 
  297 /* +++ deflate.h */
  298 /* deflate.h -- internal compression state
  299  * Copyright (C) 1995-2002 Jean-loup Gailly
  300  * For conditions of distribution and use, see copyright notice in zlib.h 
  301  */
  302 
  303 /* WARNING: this file should *not* be used by applications. It is
  304    part of the implementation of the compression library and is
  305    subject to change. Applications should only use zlib.h.
  306  */
  307 
  308 #ifndef _DEFLATE_H
  309 #define _DEFLATE_H
  310 
  311 /* #include "zutil.h" */
  312 
  313 /* ===========================================================================
  314  * Internal compression state.
  315  */
  316 
  317 #define LENGTH_CODES 29
  318 /* number of length codes, not counting the special END_BLOCK code */
  319 
  320 #define LITERALS  256
  321 /* number of literal bytes 0..255 */
  322 
  323 #define L_CODES (LITERALS+1+LENGTH_CODES)
  324 /* number of Literal or Length codes, including the END_BLOCK code */
  325 
  326 #define D_CODES   30
  327 /* number of distance codes */
  328 
  329 #define BL_CODES  19
  330 /* number of codes used to transfer the bit lengths */
  331 
  332 #define HEAP_SIZE (2*L_CODES+1)
  333 /* maximum heap size */
  334 
  335 #define MAX_BITS 15
  336 /* All codes must not exceed MAX_BITS bits */
  337 
  338 #define INIT_STATE    42
  339 #define BUSY_STATE   113
  340 #define FINISH_STATE 666
  341 /* Stream status */
  342 
  343 
  344 /* Data structure describing a single value and its code string. */
  345 typedef struct ct_data_s {
  346     union {
  347         ush  freq;       /* frequency count */
  348         ush  code;       /* bit string */
  349     } fc;
  350     union {
  351         ush  dad;        /* father node in Huffman tree */
  352         ush  len;        /* length of bit string */
  353     } dl;
  354 } FAR ct_data;
  355 
  356 #define Freq fc.freq
  357 #define Code fc.code
  358 #define Dad  dl.dad
  359 #define Len  dl.len
  360 
  361 typedef struct static_tree_desc_s  static_tree_desc;
  362 
  363 typedef struct tree_desc_s {
  364     ct_data *dyn_tree;           /* the dynamic tree */
  365     int     max_code;            /* largest code with non zero frequency */
  366     static_tree_desc *stat_desc; /* the corresponding static tree */
  367 } FAR tree_desc;
  368 
  369 typedef ush Pos;
  370 typedef Pos FAR Posf;
  371 typedef unsigned IPos;
  372 
  373 /* A Pos is an index in the character window. We use short instead of int to
  374  * save space in the various tables. IPos is used only for parameter passing.
  375  */
  376 
  377 typedef struct deflate_state {
  378     z_streamp strm;      /* pointer back to this zlib stream */
  379     int   status;        /* as the name implies */
  380     Bytef *pending_buf;  /* output still pending */
  381     ulg   pending_buf_size; /* size of pending_buf */
  382     Bytef *pending_out;  /* next pending byte to output to the stream */
  383     int   pending;       /* nb of bytes in the pending buffer */
  384     int   noheader;      /* suppress zlib header and adler32 */
  385     Byte  data_type;     /* UNKNOWN, BINARY or ASCII */
  386     Byte  method;        /* STORED (for zip only) or DEFLATED */
  387     int   last_flush;    /* value of flush param for previous deflate call */
  388 
  389                 /* used by deflate.c: */
  390 
  391     uInt  w_size;        /* LZ77 window size (32K by default) */
  392     uInt  w_bits;        /* log2(w_size)  (8..16) */
  393     uInt  w_mask;        /* w_size - 1 */
  394 
  395     Bytef *window;
  396     /* Sliding window. Input bytes are read into the second half of the window,
  397      * and move to the first half later to keep a dictionary of at least wSize
  398      * bytes. With this organization, matches are limited to a distance of
  399      * wSize-MAX_MATCH bytes, but this ensures that IO is always
  400      * performed with a length multiple of the block size. Also, it limits
  401      * the window size to 64K, which is quite useful on MSDOS.
  402      * To do: use the user input buffer as sliding window.
  403      */
  404 
  405     ulg window_size;
  406     /* Actual size of window: 2*wSize, except when the user input buffer
  407      * is directly used as sliding window.
  408      */
  409 
  410     Posf *prev;
  411     /* Link to older string with same hash index. To limit the size of this
  412      * array to 64K, this link is maintained only for the last 32K strings.
  413      * An index in this array is thus a window index modulo 32K.
  414      */
  415 
  416     Posf *head; /* Heads of the hash chains or NIL. */
  417 
  418     uInt  ins_h;          /* hash index of string to be inserted */
  419     uInt  hash_size;      /* number of elements in hash table */
  420     uInt  hash_bits;      /* log2(hash_size) */
  421     uInt  hash_mask;      /* hash_size-1 */
  422 
  423     uInt  hash_shift;
  424     /* Number of bits by which ins_h must be shifted at each input
  425      * step. It must be such that after MIN_MATCH steps, the oldest
  426      * byte no longer takes part in the hash key, that is:
  427      *   hash_shift * MIN_MATCH >= hash_bits
  428      */
  429 
  430     long block_start;
  431     /* Window position at the beginning of the current output block. Gets
  432      * negative when the window is moved backwards.
  433      */
  434 
  435     uInt match_length;           /* length of best match */
  436     IPos prev_match;             /* previous match */
  437     int match_available;         /* set if previous match exists */
  438     uInt strstart;               /* start of string to insert */
  439     uInt match_start;            /* start of matching string */
  440     uInt lookahead;              /* number of valid bytes ahead in window */
  441 
  442     uInt prev_length;
  443     /* Length of the best match at previous step. Matches not greater than this
  444      * are discarded. This is used in the lazy match evaluation.
  445      */
  446 
  447     uInt max_chain_length;
  448     /* To speed up deflation, hash chains are never searched beyond this
  449      * length.  A higher limit improves compression ratio but degrades the
  450      * speed.
  451      */
  452 
  453     uInt max_lazy_match;
  454     /* Attempt to find a better match only when the current match is strictly
  455      * smaller than this value. This mechanism is used only for compression
  456      * levels >= 4.
  457      */
  458 #   define max_insert_length  max_lazy_match
  459     /* Insert new strings in the hash table only if the match length is not
  460      * greater than this length. This saves time but degrades compression.
  461      * max_insert_length is used only for compression levels <= 3.
  462      */
  463 
  464     int level;    /* compression level (1..9) */
  465     int strategy; /* favor or force Huffman coding*/
  466 
  467     uInt good_match;
  468     /* Use a faster search when the previous match is longer than this */
  469 
  470     int nice_match; /* Stop searching when current match exceeds this */
  471 
  472                 /* used by trees.c: */
  473     /* Didn't use ct_data typedef below to supress compiler warning */
  474     struct ct_data_s dyn_ltree[HEAP_SIZE];   /* literal and length tree */
  475     struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
  476     struct ct_data_s bl_tree[2*BL_CODES+1];  /* Huffman tree for bit lengths */
  477 
  478     struct tree_desc_s l_desc;               /* desc. for literal tree */
  479     struct tree_desc_s d_desc;               /* desc. for distance tree */
  480     struct tree_desc_s bl_desc;              /* desc. for bit length tree */
  481 
  482     ush bl_count[MAX_BITS+1];
  483     /* number of codes at each bit length for an optimal tree */
  484 
  485     int heap[2*L_CODES+1];      /* heap used to build the Huffman trees */
  486     int heap_len;               /* number of elements in the heap */
  487     int heap_max;               /* element of largest frequency */
  488     /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
  489      * The same heap array is used to build all trees.
  490      */
  491 
  492     uch depth[2*L_CODES+1];
  493     /* Depth of each subtree used as tie breaker for trees of equal frequency
  494      */
  495 
  496     uchf *l_buf;          /* buffer for literals or lengths */
  497 
  498     uInt  lit_bufsize;
  499     /* Size of match buffer for literals/lengths.  There are 4 reasons for
  500      * limiting lit_bufsize to 64K:
  501      *   - frequencies can be kept in 16 bit counters
  502      *   - if compression is not successful for the first block, all input
  503      *     data is still in the window so we can still emit a stored block even
  504      *     when input comes from standard input.  (This can also be done for
  505      *     all blocks if lit_bufsize is not greater than 32K.)
  506      *   - if compression is not successful for a file smaller than 64K, we can
  507      *     even emit a stored file instead of a stored block (saving 5 bytes).
  508      *     This is applicable only for zip (not gzip or zlib).
  509      *   - creating new Huffman trees less frequently may not provide fast
  510      *     adaptation to changes in the input data statistics. (Take for
  511      *     example a binary file with poorly compressible code followed by
  512      *     a highly compressible string table.) Smaller buffer sizes give
  513      *     fast adaptation but have of course the overhead of transmitting
  514      *     trees more frequently.
  515      *   - I can't count above 4
  516      */
  517 
  518     uInt last_lit;      /* running index in l_buf */
  519 
  520     ushf *d_buf;
  521     /* Buffer for distances. To simplify the code, d_buf and l_buf have
  522      * the same number of elements. To use different lengths, an extra flag
  523      * array would be necessary.
  524      */
  525 
  526     ulg opt_len;        /* bit length of current block with optimal trees */
  527     ulg static_len;     /* bit length of current block with static trees */
  528     uInt matches;       /* number of string matches in current block */
  529     int last_eob_len;   /* bit length of EOB code for last block */
  530 
  531 #ifdef DEBUG_ZLIB
  532     ulg compressed_len; /* total bit length of compressed file mod 2^32 */
  533     ulg bits_sent;      /* bit length of compressed data sent mod 2^32 */
  534 #endif
  535 
  536     ush bi_buf;
  537     /* Output buffer. bits are inserted starting at the bottom (least
  538      * significant bits).
  539      */
  540     int bi_valid;
  541     /* Number of valid bits in bi_buf.  All bits above the last valid bit
  542      * are always zero.
  543      */
  544 
  545 } FAR deflate_state;
  546 
  547 /* Output a byte on the stream.
  548  * IN assertion: there is enough room in pending_buf.
  549  */
  550 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
  551 
  552 
  553 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
  554 /* Minimum amount of lookahead, except at the end of the input file.
  555  * See deflate.c for comments about the MIN_MATCH+1.
  556  */
  557 
  558 #define MAX_DIST(s)  ((s)->w_size-MIN_LOOKAHEAD)
  559 /* In order to simplify the code, particularly on 16 bit machines, match
  560  * distances are limited to MAX_DIST instead of WSIZE.
  561  */
  562 
  563         /* in trees.c */
  564 static void _tr_init         OF((deflate_state *s));
  565 static int  _tr_tally        OF((deflate_state *s, unsigned dist, unsigned lc));
  566 static void _tr_flush_block  OF((deflate_state *s, charf *buf, ulg stored_len,
  567                           int eof));
  568 static void _tr_align        OF((deflate_state *s));
  569 static void _tr_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,
  570                           int eof));
  571 
  572 #define d_code(dist) \
  573    ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)])
  574 /* Mapping from a distance to a distance code. dist is the distance - 1 and
  575  * must not have side effects. _dist_code[256] and _dist_code[257] are never
  576  * used.
  577  */
  578 
  579 #ifndef DEBUG_ZLIB
  580 /* Inline versions of _tr_tally for speed: */
  581 
  582 #if defined(GEN_TREES_H) || !defined(STDC)
  583   extern uch _length_code[];
  584   extern uch _dist_code[];
  585 #else
  586   extern const uch _length_code[];
  587   extern const uch _dist_code[];
  588 #endif
  589 
  590 # define _tr_tally_lit(s, c, flush) \
  591   { uch cc = (c); \
  592     s->d_buf[s->last_lit] = 0; \
  593     s->l_buf[s->last_lit++] = cc; \
  594     s->dyn_ltree[cc].Freq++; \
  595     flush = (s->last_lit == s->lit_bufsize-1); \
  596    }
  597 # define _tr_tally_dist(s, distance, length, flush) \
  598   { uch len = (length); \
  599     ush dist = (distance); \
  600     s->d_buf[s->last_lit] = dist; \
  601     s->l_buf[s->last_lit++] = len; \
  602     dist--; \
  603     s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
  604     s->dyn_dtree[d_code(dist)].Freq++; \
  605     flush = (s->last_lit == s->lit_bufsize-1); \
  606   }
  607 #else
  608 # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c)
  609 # define _tr_tally_dist(s, distance, length, flush) \
  610               flush = _tr_tally(s, distance, length) 
  611 #endif
  612 
  613 #endif
  614 /* --- deflate.h */
  615 
  616 /* +++ deflate.c */
  617 /* deflate.c -- compress data using the deflation algorithm
  618  * Copyright (C) 1995-2002 Jean-loup Gailly.
  619  * For conditions of distribution and use, see copyright notice in zlib.h 
  620  */
  621 
  622 /*
  623  *  ALGORITHM
  624  *
  625  *      The "deflation" process depends on being able to identify portions
  626  *      of the input text which are identical to earlier input (within a
  627  *      sliding window trailing behind the input currently being processed).
  628  *
  629  *      The most straightforward technique turns out to be the fastest for
  630  *      most input files: try all possible matches and select the longest.
  631  *      The key feature of this algorithm is that insertions into the string
  632  *      dictionary are very simple and thus fast, and deletions are avoided
  633  *      completely. Insertions are performed at each input character, whereas
  634  *      string matches are performed only when the previous match ends. So it
  635  *      is preferable to spend more time in matches to allow very fast string
  636  *      insertions and avoid deletions. The matching algorithm for small
  637  *      strings is inspired from that of Rabin & Karp. A brute force approach
  638  *      is used to find longer strings when a small match has been found.
  639  *      A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
  640  *      (by Leonid Broukhis).
  641  *         A previous version of this file used a more sophisticated algorithm
  642  *      (by Fiala and Greene) which is guaranteed to run in linear amortized
  643  *      time, but has a larger average cost, uses more memory and is patented.
  644  *      However the F&G algorithm may be faster for some highly redundant
  645  *      files if the parameter max_chain_length (described below) is too large.
  646  *
  647  *  ACKNOWLEDGEMENTS
  648  *
  649  *      The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
  650  *      I found it in 'freeze' written by Leonid Broukhis.
  651  *      Thanks to many people for bug reports and testing.
  652  *
  653  *  REFERENCES
  654  *
  655  *      Deutsch, L.P.,"DEFLATE Compressed Data Format Specification".
  656  *      Available in ftp://ds.internic.net/rfc/rfc1951.txt
  657  *
  658  *      A description of the Rabin and Karp algorithm is given in the book
  659  *         "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
  660  *
  661  *      Fiala,E.R., and Greene,D.H.
  662  *         Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
  663  *
  664  */
  665 
  666 /* #include "deflate.h" */
  667 
  668 const char deflate_copyright[] =
  669    " deflate 1.1.4 Copyright 1995-2002 Jean-loup Gailly ";
  670 /*
  671   If you use the zlib library in a product, an acknowledgment is welcome
  672   in the documentation of your product. If for some reason you cannot
  673   include such an acknowledgment, I would appreciate that you keep this
  674   copyright string in the executable of your product.
  675  */
  676 
  677 /* ===========================================================================
  678  *  Function prototypes.
  679  */
  680 typedef enum {
  681     need_more,      /* block not completed, need more input or more output */
  682     block_done,     /* block flush performed */
  683     finish_started, /* finish started, need only more output at next deflate */
  684     finish_done     /* finish done, accept no more input or output */
  685 } block_state;
  686 
  687 typedef block_state (*compress_func) OF((deflate_state *s, int flush));
  688 /* Compression function. Returns the block state after the call. */
  689 
  690 static void fill_window    OF((deflate_state *s));
  691 static block_state deflate_stored OF((deflate_state *s, int flush));
  692 static block_state deflate_fast   OF((deflate_state *s, int flush));
  693 static block_state deflate_slow   OF((deflate_state *s, int flush));
  694 static void lm_init        OF((deflate_state *s));
  695 static void putShortMSB    OF((deflate_state *s, uInt b));
  696 static void flush_pending  OF((z_streamp strm));
  697 static int read_buf        OF((z_streamp strm, Bytef *buf, unsigned size));
  698 #ifdef ASMV
  699       void match_init OF((void)); /* asm code initialization */
  700       uInt longest_match  OF((deflate_state *s, IPos cur_match));
  701 #else
  702 static uInt longest_match  OF((deflate_state *s, IPos cur_match));
  703 #endif
  704 
  705 #ifdef DEBUG_ZLIB
  706 static  void check_match OF((deflate_state *s, IPos start, IPos match,
  707                             int length));
  708 #endif
  709 
  710 /* ===========================================================================
  711  * Local data
  712  */
  713 
  714 #define NIL 0
  715 /* Tail of hash chains */
  716 
  717 #ifndef TOO_FAR
  718 #  define TOO_FAR 4096
  719 #endif
  720 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
  721 
  722 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
  723 /* Minimum amount of lookahead, except at the end of the input file.
  724  * See deflate.c for comments about the MIN_MATCH+1.
  725  */
  726 
  727 /* Values for max_lazy_match, good_match and max_chain_length, depending on
  728  * the desired pack level (0..9). The values given below have been tuned to
  729  * exclude worst case performance for pathological files. Better values may be
  730  * found for specific files.
  731  */
  732 typedef struct config_s {
  733    ush good_length; /* reduce lazy search above this match length */
  734    ush max_lazy;    /* do not perform lazy search above this match length */
  735    ush nice_length; /* quit search above this match length */
  736    ush max_chain;
  737    compress_func func;
  738 } config;
  739 
  740 static const config configuration_table[10] = {
  741 /*      good lazy nice chain */
  742 /* 0 */ {0,    0,  0,    0, deflate_stored},  /* store only */
  743 /* 1 */ {4,    4,  8,    4, deflate_fast}, /* maximum speed, no lazy matches */
  744 /* 2 */ {4,    5, 16,    8, deflate_fast},
  745 /* 3 */ {4,    6, 32,   32, deflate_fast},
  746 
  747 /* 4 */ {4,    4, 16,   16, deflate_slow},  /* lazy matches */
  748 /* 5 */ {8,   16, 32,   32, deflate_slow},
  749 /* 6 */ {8,   16, 128, 128, deflate_slow},
  750 /* 7 */ {8,   32, 128, 256, deflate_slow},
  751 /* 8 */ {32, 128, 258, 1024, deflate_slow},
  752 /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */
  753 
  754 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
  755  * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
  756  * meaning.
  757  */
  758 
  759 #define EQUAL 0
  760 /* result of memcmp for equal strings */
  761 
  762 #ifndef NO_DUMMY_DECL
  763 struct static_tree_desc_s {int dummy;}; /* for buggy compilers */
  764 #endif
  765 
  766 /* ===========================================================================
  767  * Update a hash value with the given input byte
  768  * IN  assertion: all calls to to UPDATE_HASH are made with consecutive
  769  *    input characters, so that a running hash key can be computed from the
  770  *    previous key instead of complete recalculation each time.
  771  */
  772 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
  773 
  774 
  775 /* ===========================================================================
  776  * Insert string str in the dictionary and set match_head to the previous head
  777  * of the hash chain (the most recent string with same hash key). Return
  778  * the previous length of the hash chain.
  779  * If this file is compiled with -DFASTEST, the compression level is forced
  780  * to 1, and no hash chains are maintained.
  781  * IN  assertion: all calls to to INSERT_STRING are made with consecutive
  782  *    input characters and the first MIN_MATCH bytes of str are valid
  783  *    (except for the last MIN_MATCH-1 bytes of the input file).
  784  */
  785 #ifdef FASTEST
  786 #define INSERT_STRING(s, str, match_head) \
  787    (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
  788     match_head = s->head[s->ins_h], \
  789     s->head[s->ins_h] = (Pos)(str))
  790 #else
  791 #define INSERT_STRING(s, str, match_head) \
  792    (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
  793     s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
  794     s->head[s->ins_h] = (Pos)(str))
  795 #endif
  796 
  797 /* ===========================================================================
  798  * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
  799  * prev[] will be initialized on the fly.
  800  */
  801 #define CLEAR_HASH(s) \
  802     s->head[s->hash_size-1] = NIL; \
  803     zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
  804 
  805 /* ========================================================================= */
  806 int ZEXPORT
  807 deflateInit_(z_streamp strm, int level, const char *ver, int stream_size)
  808 {
  809     return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
  810                          Z_DEFAULT_STRATEGY, ver, stream_size);
  811     /* To do: ignore strm->next_in if we use it as window */
  812 }
  813 
  814 /* ========================================================================= */
  815 int ZEXPORT
  816 deflateInit2_(z_streamp strm, int level, int method, int windowBits,
  817               int memLevel, int strategy, const char *ver, int stream_size)
  818 {
  819     deflate_state *s;
  820     int noheader = 0;
  821     static const char* my_version = ZLIB_VERSION;
  822 
  823     ushf *overlay;
  824     /* We overlay pending_buf and d_buf+l_buf. This works since the average
  825      * output size for (length,distance) codes is <= 24 bits.
  826      */
  827 
  828     if (ver == Z_NULL || ver[0] != my_version[0] ||
  829         stream_size != sizeof(z_stream)) {
  830         return Z_VERSION_ERROR;
  831     }
  832     if (strm == Z_NULL) return Z_STREAM_ERROR;
  833 
  834     strm->msg = Z_NULL;
  835 #ifndef NO_ZCFUNCS
  836     if (strm->zalloc == Z_NULL) {
  837         strm->zalloc = zcalloc;
  838         strm->opaque = (voidpf)0;
  839     }
  840     if (strm->zfree == Z_NULL) strm->zfree = zcfree;
  841 #endif
  842 
  843     if (level == Z_DEFAULT_COMPRESSION) level = 6;
  844 #ifdef FASTEST
  845     level = 1;
  846 #endif
  847 
  848     if (windowBits < 0) { /* undocumented feature: suppress zlib header */
  849         noheader = 1;
  850         windowBits = -windowBits;
  851     }
  852     if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED ||
  853         windowBits < 9 || windowBits > 15 || level < 0 || level > 9 ||
  854         strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
  855         return Z_STREAM_ERROR;
  856     }
  857     s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
  858     if (s == Z_NULL) return Z_MEM_ERROR;
  859     strm->state = (struct internal_state FAR *)s;
  860     s->strm = strm;
  861 
  862     s->noheader = noheader;
  863     s->w_bits = windowBits;
  864     s->w_size = 1 << s->w_bits;
  865     s->w_mask = s->w_size - 1;
  866 
  867     s->hash_bits = memLevel + 7;
  868     s->hash_size = 1 << s->hash_bits;
  869     s->hash_mask = s->hash_size - 1;
  870     s->hash_shift =  ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
  871 
  872     s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
  873     s->prev   = (Posf *)  ZALLOC(strm, s->w_size, sizeof(Pos));
  874     s->head   = (Posf *)  ZALLOC(strm, s->hash_size, sizeof(Pos));
  875 
  876     s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
  877 
  878     overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);
  879     s->pending_buf = (uchf *) overlay;
  880     s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);
  881 
  882     if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
  883         s->pending_buf == Z_NULL) {
  884         strm->msg = (char*)ERR_MSG(Z_MEM_ERROR);
  885         deflateEnd (strm);
  886         return Z_MEM_ERROR;
  887     }
  888     s->d_buf = overlay + s->lit_bufsize/sizeof(ush);
  889     s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;
  890 
  891     s->level = level;
  892     s->strategy = strategy;
  893     s->method = (Byte)method;
  894 
  895     return deflateReset(strm);
  896 }
  897 
  898 /* ========================================================================= */
  899 int ZEXPORT
  900 deflateSetDictionary(z_streamp strm, const Bytef *dictionary, uInt dictLength)
  901 {
  902     deflate_state *s;
  903     uInt length = dictLength;
  904     uInt n;
  905     IPos hash_head = 0;
  906 
  907     if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL ||
  908         ((deflate_state*)strm->state)->status != INIT_STATE) return Z_STREAM_ERROR;
  909 
  910     s = (deflate_state*)strm->state;
  911     strm->adler = adler32(strm->adler, dictionary, dictLength);
  912 
  913     if (length < MIN_MATCH) return Z_OK;
  914     if (length > MAX_DIST(s)) {
  915         length = MAX_DIST(s);
  916 #ifndef USE_DICT_HEAD
  917         dictionary += dictLength - length; /* use the tail of the dictionary */
  918 #endif
  919     }
  920     zmemcpy(s->window, dictionary, length);
  921     s->strstart = length;
  922     s->block_start = (long)length;
  923 
  924     /* Insert all strings in the hash table (except for the last two bytes).
  925      * s->lookahead stays null, so s->ins_h will be recomputed at the next
  926      * call of fill_window.
  927      */
  928     s->ins_h = s->window[0];
  929     UPDATE_HASH(s, s->ins_h, s->window[1]);
  930     for (n = 0; n <= length - MIN_MATCH; n++) {
  931         INSERT_STRING(s, n, hash_head);
  932     }
  933     if (hash_head) hash_head = 0;  /* to make compiler happy */
  934     return Z_OK;
  935 }
  936 
  937 /* ========================================================================= */
  938 int ZEXPORT
  939 deflateReset(z_streamp strm)
  940 {
  941     deflate_state *s;
  942     
  943     if (strm == Z_NULL || strm->state == Z_NULL ||
  944         strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;
  945 
  946     strm->total_in = strm->total_out = 0;
  947     strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
  948     strm->data_type = Z_UNKNOWN;
  949 
  950     s = (deflate_state *)strm->state;
  951     s->pending = 0;
  952     s->pending_out = s->pending_buf;
  953 
  954     if (s->noheader < 0) {
  955         s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
  956     }
  957     s->status = s->noheader ? BUSY_STATE : INIT_STATE;
  958     strm->adler = 1;
  959     s->last_flush = Z_NO_FLUSH;
  960 
  961     _tr_init(s);
  962     lm_init(s);
  963 
  964     return Z_OK;
  965 }
  966 
  967 /* ========================================================================= */
  968 int ZEXPORT
  969 deflateParams(z_streamp strm, int level, int strategy)
  970 {
  971     deflate_state *s;
  972     compress_func func;
  973     int err = Z_OK;
  974 
  975     if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
  976     s = (deflate_state*)strm->state;
  977 
  978     if (level == Z_DEFAULT_COMPRESSION) {
  979         level = 6;
  980     }
  981     if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
  982         return Z_STREAM_ERROR;
  983     }
  984     func = configuration_table[s->level].func;
  985 
  986     if (func != configuration_table[level].func && strm->total_in != 0) {
  987         /* Flush the last buffer: */
  988         err = deflate(strm, Z_PARTIAL_FLUSH);
  989     }
  990     if (s->level != level) {
  991         s->level = level;
  992         s->max_lazy_match   = configuration_table[level].max_lazy;
  993         s->good_match       = configuration_table[level].good_length;
  994         s->nice_match       = configuration_table[level].nice_length;
  995         s->max_chain_length = configuration_table[level].max_chain;
  996     }
  997     s->strategy = strategy;
  998     return err;
  999 }
 1000 
 1001 /* =========================================================================
 1002  * Put a short in the pending buffer. The 16-bit value is put in MSB order.
 1003  * IN assertion: the stream state is correct and there is enough room in
 1004  * pending_buf.
 1005  */
 1006 static void
 1007 putShortMSB(deflate_state *s, uInt b)
 1008 {
 1009     put_byte(s, (Byte)(b >> 8));
 1010     put_byte(s, (Byte)(b & 0xff));
 1011 }   
 1012 
 1013 /* =========================================================================
 1014  * Flush as much pending output as possible. All deflate() output goes
 1015  * through this function so some applications may wish to modify it
 1016  * to avoid allocating a large strm->next_out buffer and copying into it.
 1017  * (See also read_buf()).
 1018  */
 1019 static void
 1020 flush_pending(z_streamp strm)
 1021 {
 1022         deflate_state* s = (deflate_state*)strm->state;
 1023     unsigned len = s->pending;
 1024 
 1025     if (len > strm->avail_out) len = strm->avail_out;
 1026     if (len == 0) return;
 1027 
 1028     zmemcpy(strm->next_out, s->pending_out, len);
 1029     strm->next_out  += len;
 1030     s->pending_out  += len;
 1031     strm->total_out += len;
 1032     strm->avail_out  -= len;
 1033     s->pending -= len;
 1034     if (s->pending == 0) {
 1035         s->pending_out = s->pending_buf;
 1036     }
 1037 }
 1038 
 1039 /* ========================================================================= */
 1040 int ZEXPORT
 1041 deflate (z_streamp strm, int flush)
 1042 {
 1043     int old_flush; /* value of flush param for previous deflate call */
 1044     deflate_state *s;
 1045 
 1046     if (strm == Z_NULL || strm->state == Z_NULL ||
 1047         flush > Z_FINISH || flush < 0) {
 1048         return Z_STREAM_ERROR;
 1049     }
 1050     s = (deflate_state*)strm->state;
 1051 
 1052     if (strm->next_out == Z_NULL ||
 1053         (strm->next_in == Z_NULL && strm->avail_in != 0) ||
 1054         (s->status == FINISH_STATE && flush != Z_FINISH)) {
 1055         ERR_RETURN(strm, Z_STREAM_ERROR);
 1056     }
 1057     if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
 1058 
 1059     s->strm = strm; /* just in case */
 1060     old_flush = s->last_flush;
 1061     s->last_flush = flush;
 1062 
 1063     /* Write the zlib header */
 1064     if (s->status == INIT_STATE) {
 1065 
 1066         uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
 1067         uInt level_flags = (s->level-1) >> 1;
 1068 
 1069         if (level_flags > 3) level_flags = 3;
 1070         header |= (level_flags << 6);
 1071         if (s->strstart != 0) header |= PRESET_DICT;
 1072         header += 31 - (header % 31);
 1073 
 1074         s->status = BUSY_STATE;
 1075         putShortMSB(s, header);
 1076 
 1077         /* Save the adler32 of the preset dictionary: */
 1078         if (s->strstart != 0) {
 1079             putShortMSB(s, (uInt)(strm->adler >> 16));
 1080             putShortMSB(s, (uInt)(strm->adler & 0xffff));
 1081         }
 1082         strm->adler = 1L;
 1083     }
 1084 
 1085     /* Flush as much pending output as possible */
 1086     if (s->pending != 0) {
 1087         flush_pending(strm);
 1088         if (strm->avail_out == 0) {
 1089             /* Since avail_out is 0, deflate will be called again with
 1090              * more output space, but possibly with both pending and
 1091              * avail_in equal to zero. There won't be anything to do,
 1092              * but this is not an error situation so make sure we
 1093              * return OK instead of BUF_ERROR at next call of deflate:
 1094              */
 1095             s->last_flush = -1;
 1096             return Z_OK;
 1097         }
 1098 
 1099     /* Make sure there is something to do and avoid duplicate consecutive
 1100      * flushes. For repeated and useless calls with Z_FINISH, we keep
 1101      * returning Z_STREAM_END instead of Z_BUFF_ERROR.
 1102      */
 1103     } else if (strm->avail_in == 0 && flush <= old_flush &&
 1104                flush != Z_FINISH) {
 1105         ERR_RETURN(strm, Z_BUF_ERROR);
 1106     }
 1107 
 1108     /* User must not provide more input after the first FINISH: */
 1109     if (s->status == FINISH_STATE && strm->avail_in != 0) {
 1110         ERR_RETURN(strm, Z_BUF_ERROR);
 1111     }
 1112 
 1113     /* Start a new block or continue the current one.
 1114      */
 1115     if (strm->avail_in != 0 || s->lookahead != 0 ||
 1116         (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) {
 1117         block_state bstate;
 1118 
 1119         bstate = (*(configuration_table[s->level].func))(s, flush);
 1120 
 1121         if (bstate == finish_started || bstate == finish_done) {
 1122             s->status = FINISH_STATE;
 1123         }
 1124         if (bstate == need_more || bstate == finish_started) {
 1125             if (strm->avail_out == 0) {
 1126                 s->last_flush = -1; /* avoid BUF_ERROR next call, see above */
 1127             }
 1128             return Z_OK;
 1129             /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
 1130              * of deflate should use the same flush parameter to make sure
 1131              * that the flush is complete. So we don't have to output an
 1132              * empty block here, this will be done at next call. This also
 1133              * ensures that for a very small output buffer, we emit at most
 1134              * one empty block.
 1135              */
 1136         }
 1137         if (bstate == block_done) {
 1138             if (flush == Z_PARTIAL_FLUSH) {
 1139                 _tr_align(s);
 1140             } else { /* FULL_FLUSH or SYNC_FLUSH */
 1141                 _tr_stored_block(s, (char*)0, 0L, 0);
 1142                 /* For a full flush, this empty block will be recognized
 1143                  * as a special marker by inflate_sync().
 1144                  */
 1145                 if (flush == Z_FULL_FLUSH) {
 1146                     CLEAR_HASH(s);             /* forget history */
 1147                 }
 1148             }
 1149             flush_pending(strm);
 1150             if (strm->avail_out == 0) {
 1151               s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */
 1152               return Z_OK;
 1153             }
 1154         }
 1155     }
 1156     Assert(strm->avail_out > 0, "bug2");
 1157 
 1158     if (flush != Z_FINISH) return Z_OK;
 1159     if (s->noheader) return Z_STREAM_END;
 1160 
 1161     /* Write the zlib trailer (adler32) */
 1162     putShortMSB(s, (uInt)(strm->adler >> 16));
 1163     putShortMSB(s, (uInt)(strm->adler & 0xffff));
 1164     flush_pending(strm);
 1165     /* If avail_out is zero, the application will call deflate again
 1166      * to flush the rest.
 1167      */
 1168     s->noheader = -1; /* write the trailer only once! */
 1169     return s->pending != 0 ? Z_OK : Z_STREAM_END;
 1170 }
 1171 
 1172 /* ========================================================================= */
 1173 int ZEXPORT
 1174 deflateEnd(z_streamp strm)
 1175 {
 1176         deflate_state*  s;
 1177     int status;
 1178 
 1179     if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
 1180 
 1181         s = (deflate_state*)strm->state;
 1182     status = s->status;
 1183     if (status != INIT_STATE && status != BUSY_STATE &&
 1184         status != FINISH_STATE) {
 1185       return Z_STREAM_ERROR;
 1186     }
 1187 
 1188     /* Deallocate in reverse order of allocations: */
 1189     TRY_FREE(strm, s->pending_buf);
 1190     TRY_FREE(strm, s->head);
 1191     TRY_FREE(strm, s->prev);
 1192     TRY_FREE(strm, s->window);
 1193 
 1194     ZFREE(strm, s);
 1195     strm->state = Z_NULL;
 1196 
 1197     return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK;
 1198 }
 1199 
 1200 /* =========================================================================
 1201  * Copy the source state to the destination state.
 1202  * To simplify the source, this is not supported for 16-bit MSDOS (which
 1203  * doesn't have enough memory anyway to duplicate compression states).
 1204  */
 1205 int ZEXPORT
 1206 deflateCopy(z_streamp dest, z_streamp source)
 1207 {
 1208 #ifdef MAXSEG_64K
 1209     return Z_STREAM_ERROR;
 1210 #else
 1211     deflate_state *ds;
 1212     deflate_state *ss;
 1213     ushf *overlay;
 1214 
 1215 
 1216     if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL) {
 1217         return Z_STREAM_ERROR;
 1218     }
 1219 
 1220     ss = (deflate_state*)source->state;
 1221 
 1222     *dest = *source;
 1223 
 1224     ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state));
 1225     if (ds == Z_NULL) return Z_MEM_ERROR;
 1226     dest->state = (struct internal_state FAR *) ds;
 1227     *ds = *ss;
 1228     ds->strm = dest;
 1229 
 1230     ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));
 1231     ds->prev   = (Posf *)  ZALLOC(dest, ds->w_size, sizeof(Pos));
 1232     ds->head   = (Posf *)  ZALLOC(dest, ds->hash_size, sizeof(Pos));
 1233     overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2);
 1234     ds->pending_buf = (uchf *) overlay;
 1235 
 1236     if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||
 1237         ds->pending_buf == Z_NULL) {
 1238         deflateEnd (dest);
 1239         return Z_MEM_ERROR;
 1240     }
 1241     /* following zmemcpy do not work for 16-bit MSDOS */
 1242     zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte));
 1243     zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos));
 1244     zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos));
 1245     zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);
 1246 
 1247     ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);
 1248     ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush);
 1249     ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize;
 1250 
 1251     ds->l_desc.dyn_tree = ds->dyn_ltree;
 1252     ds->d_desc.dyn_tree = ds->dyn_dtree;
 1253     ds->bl_desc.dyn_tree = ds->bl_tree;
 1254 
 1255     return Z_OK;
 1256 #endif
 1257 }
 1258 
 1259 /* ===========================================================================
 1260  * Read a new buffer from the current input stream, update the adler32
 1261  * and total number of bytes read.  All deflate() input goes through
 1262  * this function so some applications may wish to modify it to avoid
 1263  * allocating a large strm->next_in buffer and copying from it.
 1264  * (See also flush_pending()).
 1265  */
 1266 static int
 1267 read_buf(z_streamp strm, Bytef *buf, unsigned int size)
 1268 {
 1269     unsigned len = strm->avail_in;
 1270 
 1271     if (len > size) len = size;
 1272     if (len == 0) return 0;
 1273 
 1274     strm->avail_in  -= len;
 1275 
 1276     if (!((deflate_state*)strm->state)->noheader) {
 1277         strm->adler = adler32(strm->adler, strm->next_in, len);
 1278     }
 1279     zmemcpy(buf, strm->next_in, len);
 1280     strm->next_in  += len;
 1281     strm->total_in += len;
 1282 
 1283     return (int)len;
 1284 }
 1285 
 1286 /* ===========================================================================
 1287  * Initialize the "longest match" routines for a new zlib stream
 1288  */
 1289 static void
 1290 lm_init(deflate_state *s)
 1291 {
 1292     s->window_size = (ulg)2L*s->w_size;
 1293 
 1294     CLEAR_HASH(s);
 1295 
 1296     /* Set the default configuration parameters:
 1297      */
 1298     s->max_lazy_match   = configuration_table[s->level].max_lazy;
 1299     s->good_match       = configuration_table[s->level].good_length;
 1300     s->nice_match       = configuration_table[s->level].nice_length;
 1301     s->max_chain_length = configuration_table[s->level].max_chain;
 1302 
 1303     s->strstart = 0;
 1304     s->block_start = 0L;
 1305     s->lookahead = 0;
 1306     s->match_length = s->prev_length = MIN_MATCH-1;
 1307     s->match_available = 0;
 1308     s->ins_h = 0;
 1309 #ifdef ASMV
 1310     match_init(); /* initialize the asm code */
 1311 #endif
 1312 }
 1313 
 1314 /* ===========================================================================
 1315  * Set match_start to the longest match starting at the given string and
 1316  * return its length. Matches shorter or equal to prev_length are discarded,
 1317  * in which case the result is equal to prev_length and match_start is
 1318  * garbage.
 1319  * IN assertions: cur_match is the head of the hash chain for the current
 1320  *   string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
 1321  * OUT assertion: the match length is not greater than s->lookahead.
 1322  */
 1323 #ifndef ASMV
 1324 /* For 80x86 and 680x0, an optimized version will be provided in match.asm or
 1325  * match.S. The code will be functionally equivalent.
 1326  */
 1327 #ifndef FASTEST
 1328 static uInt
 1329 longest_match(deflate_state *s, IPos cur_match)
 1330 {
 1331     unsigned chain_length = s->max_chain_length;/* max hash chain length */
 1332     Bytef *scan = s->window + s->strstart; /* current string */
 1333     Bytef *match;                       /* matched string */
 1334     int len;                           /* length of current match */
 1335     int best_len = s->prev_length;              /* best match length so far */
 1336     int nice_match = s->nice_match;             /* stop if match long enough */
 1337     IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
 1338         s->strstart - (IPos)MAX_DIST(s) : NIL;
 1339     /* Stop when cur_match becomes <= limit. To simplify the code,
 1340      * we prevent matches with the string of window index 0.
 1341      */
 1342     Posf *prev = s->prev;
 1343     uInt wmask = s->w_mask;
 1344 
 1345 #ifdef UNALIGNED_OK
 1346     /* Compare two bytes at a time. Note: this is not always beneficial.
 1347      * Try with and without -DUNALIGNED_OK to check.
 1348      */
 1349     Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
 1350     ush scan_start = *(ushf*)scan;
 1351     ush scan_end   = *(ushf*)(scan+best_len-1);
 1352 #else
 1353     Bytef *strend = s->window + s->strstart + MAX_MATCH;
 1354     Byte scan_end1  = scan[best_len-1];
 1355     Byte scan_end   = scan[best_len];
 1356 #endif
 1357 
 1358     /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
 1359      * It is easy to get rid of this optimization if necessary.
 1360      */
 1361     Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
 1362 
 1363     /* Do not waste too much time if we already have a good match: */
 1364     if (s->prev_length >= s->good_match) {
 1365         chain_length >>= 2;
 1366     }
 1367     /* Do not look for matches beyond the end of the input. This is necessary
 1368      * to make deflate deterministic.
 1369      */
 1370     if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead;
 1371 
 1372     Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
 1373 
 1374     do {
 1375         Assert(cur_match < s->strstart, "no future");
 1376         match = s->window + cur_match;
 1377 
 1378         /* Skip to next match if the match length cannot increase
 1379          * or if the match length is less than 2:
 1380          */
 1381 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
 1382         /* This code assumes sizeof(unsigned short) == 2. Do not use
 1383          * UNALIGNED_OK if your compiler uses a different size.
 1384          */
 1385         if (*(ushf*)(match+best_len-1) != scan_end ||
 1386             *(ushf*)match != scan_start) continue;
 1387 
 1388         /* It is not necessary to compare scan[2] and match[2] since they are
 1389          * always equal when the other bytes match, given that the hash keys
 1390          * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
 1391          * strstart+3, +5, ... up to strstart+257. We check for insufficient
 1392          * lookahead only every 4th comparison; the 128th check will be made
 1393          * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
 1394          * necessary to put more guard bytes at the end of the window, or
 1395          * to check more often for insufficient lookahead.
 1396          */
 1397         Assert(scan[2] == match[2], "scan[2]?");
 1398         scan++, match++;
 1399         do {
 1400         } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
 1401                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
 1402                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
 1403                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
 1404                  scan < strend);
 1405         /* The funny "do {}" generates better code on most compilers */
 1406 
 1407         /* Here, scan <= window+strstart+257 */
 1408         Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
 1409         if (*scan == *match) scan++;
 1410 
 1411         len = (MAX_MATCH - 1) - (int)(strend-scan);
 1412         scan = strend - (MAX_MATCH-1);
 1413 
 1414 #else /* UNALIGNED_OK */
 1415 
 1416         if (match[best_len]   != scan_end  ||
 1417             match[best_len-1] != scan_end1 ||
 1418             *match            != *scan     ||
 1419             *++match          != scan[1])      continue;
 1420 
 1421         /* The check at best_len-1 can be removed because it will be made
 1422          * again later. (This heuristic is not always a win.)
 1423          * It is not necessary to compare scan[2] and match[2] since they
 1424          * are always equal when the other bytes match, given that
 1425          * the hash keys are equal and that HASH_BITS >= 8.
 1426          */
 1427         scan += 2, match++;
 1428         Assert(*scan == *match, "match[2]?");
 1429 
 1430         /* We check for insufficient lookahead only every 8th comparison;
 1431          * the 256th check will be made at strstart+258.
 1432          */
 1433         do {
 1434         } while (*++scan == *++match && *++scan == *++match &&
 1435                  *++scan == *++match && *++scan == *++match &&
 1436                  *++scan == *++match && *++scan == *++match &&
 1437                  *++scan == *++match && *++scan == *++match &&
 1438                  scan < strend);
 1439 
 1440         Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
 1441 
 1442         len = MAX_MATCH - (int)(strend - scan);
 1443         scan = strend - MAX_MATCH;
 1444 
 1445 #endif /* UNALIGNED_OK */
 1446 
 1447         if (len > best_len) {
 1448             s->match_start = cur_match;
 1449             best_len = len;
 1450             if (len >= nice_match) break;
 1451 #ifdef UNALIGNED_OK
 1452             scan_end = *(ushf*)(scan+best_len-1);
 1453 #else
 1454             scan_end1  = scan[best_len-1];
 1455             scan_end   = scan[best_len];
 1456 #endif
 1457         }
 1458     } while ((cur_match = prev[cur_match & wmask]) > limit
 1459              && --chain_length != 0);
 1460 
 1461     if ((uInt)best_len <= s->lookahead) return (uInt)best_len;
 1462     return s->lookahead;
 1463 }
 1464 
 1465 #else /* FASTEST */
 1466 /* ---------------------------------------------------------------------------
 1467  * Optimized version for level == 1 only
 1468  */
 1469 static uInt
 1470 longest_match(deflate_state *s, IPos cur_match)
 1471 {
 1472     Bytef *scan = s->window + s->strstart; /* current string */
 1473     Bytef *match;                       /* matched string */
 1474     int len;                           /* length of current match */
 1475     Bytef *strend = s->window + s->strstart + MAX_MATCH;
 1476 
 1477     /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
 1478      * It is easy to get rid of this optimization if necessary.
 1479      */
 1480     Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
 1481 
 1482     Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
 1483 
 1484     Assert(cur_match < s->strstart, "no future");
 1485 
 1486     match = s->window + cur_match;
 1487 
 1488     /* Return failure if the match length is less than 2:
 1489      */
 1490     if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1;
 1491 
 1492     /* The check at best_len-1 can be removed because it will be made
 1493      * again later. (This heuristic is not always a win.)
 1494      * It is not necessary to compare scan[2] and match[2] since they
 1495      * are always equal when the other bytes match, given that
 1496      * the hash keys are equal and that HASH_BITS >= 8.
 1497      */
 1498     scan += 2, match += 2;
 1499     Assert(*scan == *match, "match[2]?");
 1500 
 1501     /* We check for insufficient lookahead only every 8th comparison;
 1502      * the 256th check will be made at strstart+258.
 1503      */
 1504     do {
 1505     } while (*++scan == *++match && *++scan == *++match &&
 1506              *++scan == *++match && *++scan == *++match &&
 1507              *++scan == *++match && *++scan == *++match &&
 1508              *++scan == *++match && *++scan == *++match &&
 1509              scan < strend);
 1510 
 1511     Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
 1512 
 1513     len = MAX_MATCH - (int)(strend - scan);
 1514 
 1515     if (len < MIN_MATCH) return MIN_MATCH - 1;
 1516 
 1517     s->match_start = cur_match;
 1518     return len <= s->lookahead ? len : s->lookahead;
 1519 }
 1520 #endif /* FASTEST */
 1521 #endif /* ASMV */
 1522 
 1523 #ifdef DEBUG_ZLIB
 1524 /* ===========================================================================
 1525  * Check that the match at match_start is indeed a match.
 1526  */
 1527 static void
 1528 check_match(deflate_state *s, IPos start, IPos match, int length)
 1529 {
 1530     /* check that the match is indeed a match */
 1531     if (zmemcmp(s->window + match,
 1532                 s->window + start, length) != EQUAL) {
 1533         fprintf(stderr, " start %u, match %u, length %d\n",
 1534                 start, match, length);
 1535         do {
 1536             fprintf(stderr, "%c%c", s->window[match++], s->window[start++]);
 1537         } while (--length != 0);
 1538         z_error("invalid match");
 1539     }
 1540     if (z_verbose > 1) {
 1541         fprintf(stderr,"\\[%d,%d]", start-match, length);
 1542         do { putc(s->window[start++], stderr); } while (--length != 0);
 1543     }
 1544 }
 1545 #else
 1546 #  define check_match(s, start, match, length)
 1547 #endif
 1548 
 1549 /* ===========================================================================
 1550  * Fill the window when the lookahead becomes insufficient.
 1551  * Updates strstart and lookahead.
 1552  *
 1553  * IN assertion: lookahead < MIN_LOOKAHEAD
 1554  * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
 1555  *    At least one byte has been read, or avail_in == 0; reads are
 1556  *    performed for at least two bytes (required for the zip translate_eol
 1557  *    option -- not supported here).
 1558  */
 1559 static void
 1560 fill_window(deflate_state *s)
 1561 {
 1562     unsigned n, m;
 1563     Posf *p;
 1564     unsigned more;    /* Amount of free space at the end of the window. */
 1565     uInt wsize = s->w_size;
 1566 
 1567     do {
 1568         more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
 1569 
 1570         /* Deal with !@#$% 64K limit: */
 1571         if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
 1572             more = wsize;
 1573 
 1574         } else if (more == (unsigned)(-1)) {
 1575             /* Very unlikely, but possible on 16 bit machine if strstart == 0
 1576              * and lookahead == 1 (input done one byte at time)
 1577              */
 1578             more--;
 1579 
 1580         /* If the window is almost full and there is insufficient lookahead,
 1581          * move the upper half to the lower one to make room in the upper half.
 1582          */
 1583         } else if (s->strstart >= wsize+MAX_DIST(s)) {
 1584 
 1585             zmemcpy(s->window, s->window+wsize, (unsigned)wsize);
 1586             s->match_start -= wsize;
 1587             s->strstart    -= wsize; /* we now have strstart >= MAX_DIST */
 1588             s->block_start -= (long) wsize;
 1589 
 1590             /* Slide the hash table (could be avoided with 32 bit values
 1591                at the expense of memory usage). We slide even when level == 0
 1592                to keep the hash table consistent if we switch back to level > 0
 1593                later. (Using level 0 permanently is not an optimal usage of
 1594                zlib, so we don't care about this pathological case.)
 1595              */
 1596             n = s->hash_size;
 1597             p = &s->head[n];
 1598             do {
 1599                 m = *--p;
 1600                 *p = (Pos)(m >= wsize ? m-wsize : NIL);
 1601             } while (--n);
 1602 
 1603             n = wsize;
 1604 #ifndef FASTEST
 1605             p = &s->prev[n];
 1606             do {
 1607                 m = *--p;
 1608                 *p = (Pos)(m >= wsize ? m-wsize : NIL);
 1609                 /* If n is not on any hash chain, prev[n] is garbage but
 1610                  * its value will never be used.
 1611                  */
 1612             } while (--n);
 1613 #endif
 1614             more += wsize;
 1615         }
 1616         if (s->strm->avail_in == 0) return;
 1617 
 1618         /* If there was no sliding:
 1619          *    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
 1620          *    more == window_size - lookahead - strstart
 1621          * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
 1622          * => more >= window_size - 2*WSIZE + 2
 1623          * In the BIG_MEM or MMAP case (not yet supported),
 1624          *   window_size == input_size + MIN_LOOKAHEAD  &&
 1625          *   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
 1626          * Otherwise, window_size == 2*WSIZE so more >= 2.
 1627          * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
 1628          */
 1629         Assert(more >= 2, "more < 2");
 1630 
 1631         n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
 1632         s->lookahead += n;
 1633 
 1634         /* Initialize the hash value now that we have some input: */
 1635         if (s->lookahead >= MIN_MATCH) {
 1636             s->ins_h = s->window[s->strstart];
 1637             UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
 1638 #if MIN_MATCH != 3
 1639             Call UPDATE_HASH() MIN_MATCH-3 more times
 1640 #endif
 1641         }
 1642         /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
 1643          * but this is not important since only literal bytes will be emitted.
 1644          */
 1645 
 1646     } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
 1647 }
 1648 
 1649 /* ===========================================================================
 1650  * Flush the current block, with given end-of-file flag.
 1651  * IN assertion: strstart is set to the end of the current match.
 1652  */
 1653 #define FLUSH_BLOCK_ONLY(s, eof) { \
 1654    _tr_flush_block(s, (s->block_start >= 0L ? \
 1655                    (charf *)&s->window[(unsigned)s->block_start] : \
 1656                    (charf *)Z_NULL), \
 1657                 (ulg)((long)s->strstart - s->block_start), \
 1658                 (eof)); \
 1659    s->block_start = s->strstart; \
 1660    flush_pending(s->strm); \
 1661    Tracev((stderr,"[FLUSH]")); \
 1662 }
 1663 
 1664 /* Same but force premature exit if necessary. */
 1665 #define FLUSH_BLOCK(s, eof) { \
 1666    FLUSH_BLOCK_ONLY(s, eof); \
 1667    if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \
 1668 }
 1669 
 1670 /* ===========================================================================
 1671  * Copy without compression as much as possible from the input stream, return
 1672  * the current block state.
 1673  * This function does not insert new strings in the dictionary since
 1674  * uncompressible data is probably not useful. This function is used
 1675  * only for the level=0 compression option.
 1676  * NOTE: this function should be optimized to avoid extra copying from
 1677  * window to pending_buf.
 1678  */
 1679 static block_state
 1680 deflate_stored(deflate_state *s, int flush)
 1681 {
 1682     /* Stored blocks are limited to 0xffff bytes, pending_buf is limited
 1683      * to pending_buf_size, and each stored block has a 5 byte header:
 1684      */
 1685     ulg max_block_size = 0xffff;
 1686     ulg max_start;
 1687 
 1688     if (max_block_size > s->pending_buf_size - 5) {
 1689         max_block_size = s->pending_buf_size - 5;
 1690     }
 1691 
 1692     /* Copy as much as possible from input to output: */
 1693     for (;;) {
 1694         /* Fill the window as much as possible: */
 1695         if (s->lookahead <= 1) {
 1696 
 1697             Assert(s->strstart < s->w_size+MAX_DIST(s) ||
 1698                    s->block_start >= (long)s->w_size, "slide too late");
 1699 
 1700             fill_window(s);
 1701             if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more;
 1702 
 1703             if (s->lookahead == 0) break; /* flush the current block */
 1704         }
 1705         Assert(s->block_start >= 0L, "block gone");
 1706 
 1707         s->strstart += s->lookahead;
 1708         s->lookahead = 0;
 1709 
 1710         /* Emit a stored block if pending_buf will be full: */
 1711         max_start = s->block_start + max_block_size;
 1712         if (s->strstart == 0 || (ulg)s->strstart >= max_start) {
 1713             /* strstart == 0 is possible when wraparound on 16-bit machine */
 1714             s->lookahead = (uInt)(s->strstart - max_start);
 1715             s->strstart = (uInt)max_start;
 1716             FLUSH_BLOCK(s, 0);
 1717         }
 1718         /* Flush if we may have to slide, otherwise block_start may become
 1719          * negative and the data will be gone:
 1720          */
 1721         if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) {
 1722             FLUSH_BLOCK(s, 0);
 1723         }
 1724     }
 1725     FLUSH_BLOCK(s, flush == Z_FINISH);
 1726     return flush == Z_FINISH ? finish_done : block_done;
 1727 }
 1728 
 1729 /* ===========================================================================
 1730  * Compress as much as possible from the input stream, return the current
 1731  * block state.
 1732  * This function does not perform lazy evaluation of matches and inserts
 1733  * new strings in the dictionary only for unmatched strings or for short
 1734  * matches. It is used only for the fast compression options.
 1735  */
 1736 static block_state
 1737 deflate_fast(deflate_state *s, int flush)
 1738 {
 1739     IPos hash_head = NIL; /* head of the hash chain */
 1740     int bflush;           /* set if current block must be flushed */
 1741 
 1742     for (;;) {
 1743         /* Make sure that we always have enough lookahead, except
 1744          * at the end of the input file. We need MAX_MATCH bytes
 1745          * for the next match, plus MIN_MATCH bytes to insert the
 1746          * string following the next match.
 1747          */
 1748         if (s->lookahead < MIN_LOOKAHEAD) {
 1749             fill_window(s);
 1750             if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
 1751                 return need_more;
 1752             }
 1753             if (s->lookahead == 0) break; /* flush the current block */
 1754         }
 1755 
 1756         /* Insert the string window[strstart .. strstart+2] in the
 1757          * dictionary, and set hash_head to the head of the hash chain:
 1758          */
 1759         if (s->lookahead >= MIN_MATCH) {
 1760             INSERT_STRING(s, s->strstart, hash_head);
 1761         }
 1762 
 1763         /* Find the longest match, discarding those <= prev_length.
 1764          * At this point we have always match_length < MIN_MATCH
 1765          */
 1766         if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
 1767             /* To simplify the code, we prevent matches with the string
 1768              * of window index 0 (in particular we have to avoid a match
 1769              * of the string with itself at the start of the input file).
 1770              */
 1771             if (s->strategy != Z_HUFFMAN_ONLY) {
 1772                 s->match_length = longest_match (s, hash_head);
 1773             }
 1774             /* longest_match() sets match_start */
 1775         }
 1776         if (s->match_length >= MIN_MATCH) {
 1777             check_match(s, s->strstart, s->match_start, s->match_length);
 1778 
 1779             _tr_tally_dist(s, s->strstart - s->match_start,
 1780                            s->match_length - MIN_MATCH, bflush);
 1781 
 1782             s->lookahead -= s->match_length;
 1783 
 1784             /* Insert new strings in the hash table only if the match length
 1785              * is not too large. This saves time but degrades compression.
 1786              */
 1787 #ifndef FASTEST
 1788             if (s->match_length <= s->max_insert_length &&
 1789                 s->lookahead >= MIN_MATCH) {
 1790                 s->match_length--; /* string at strstart already in hash table */
 1791                 do {
 1792                     s->strstart++;
 1793                     INSERT_STRING(s, s->strstart, hash_head);
 1794                     /* strstart never exceeds WSIZE-MAX_MATCH, so there are
 1795                      * always MIN_MATCH bytes ahead.
 1796                      */
 1797                 } while (--s->match_length != 0);
 1798                 s->strstart++; 
 1799             } else
 1800 #endif
 1801             {
 1802                 s->strstart += s->match_length;
 1803                 s->match_length = 0;
 1804                 s->ins_h = s->window[s->strstart];
 1805                 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
 1806 #if MIN_MATCH != 3
 1807                 Call UPDATE_HASH() MIN_MATCH-3 more times
 1808 #endif
 1809                 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
 1810                  * matter since it will be recomputed at next deflate call.
 1811                  */
 1812             }
 1813         } else {
 1814             /* No match, output a literal byte */
 1815             Tracevv((stderr,"%c", s->window[s->strstart]));
 1816             _tr_tally_lit (s, s->window[s->strstart], bflush);
 1817             s->lookahead--;
 1818             s->strstart++; 
 1819         }
 1820         if (bflush) FLUSH_BLOCK(s, 0);
 1821     }
 1822     FLUSH_BLOCK(s, flush == Z_FINISH);
 1823     return flush == Z_FINISH ? finish_done : block_done;
 1824 }
 1825 
 1826 /* ===========================================================================
 1827  * Same as above, but achieves better compression. We use a lazy
 1828  * evaluation for matches: a match is finally adopted only if there is
 1829  * no better match at the next window position.
 1830  */
 1831 static block_state
 1832 deflate_slow(deflate_state *s, int flush)
 1833 {
 1834     IPos hash_head = NIL;    /* head of hash chain */
 1835     int bflush;              /* set if current block must be flushed */
 1836 
 1837     /* Process the input block. */
 1838     for (;;) {
 1839         /* Make sure that we always have enough lookahead, except
 1840          * at the end of the input file. We need MAX_MATCH bytes
 1841          * for the next match, plus MIN_MATCH bytes to insert the
 1842          * string following the next match.
 1843          */
 1844         if (s->lookahead < MIN_LOOKAHEAD) {
 1845             fill_window(s);
 1846             if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
 1847                 return need_more;
 1848             }
 1849             if (s->lookahead == 0) break; /* flush the current block */
 1850         }
 1851 
 1852         /* Insert the string window[strstart .. strstart+2] in the
 1853          * dictionary, and set hash_head to the head of the hash chain:
 1854          */
 1855         if (s->lookahead >= MIN_MATCH) {
 1856             INSERT_STRING(s, s->strstart, hash_head);
 1857         }
 1858 
 1859         /* Find the longest match, discarding those <= prev_length.
 1860          */
 1861         s->prev_length = s->match_length, s->prev_match = s->match_start;
 1862         s->match_length = MIN_MATCH-1;
 1863 
 1864         if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
 1865             s->strstart - hash_head <= MAX_DIST(s)) {
 1866             /* To simplify the code, we prevent matches with the string
 1867              * of window index 0 (in particular we have to avoid a match
 1868              * of the string with itself at the start of the input file).
 1869              */
 1870             if (s->strategy != Z_HUFFMAN_ONLY) {
 1871                 s->match_length = longest_match (s, hash_head);
 1872             }
 1873             /* longest_match() sets match_start */
 1874 
 1875             if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
 1876                  (s->match_length == MIN_MATCH &&
 1877                   s->strstart - s->match_start > TOO_FAR))) {
 1878 
 1879                 /* If prev_match is also MIN_MATCH, match_start is garbage
 1880                  * but we will ignore the current match anyway.
 1881                  */
 1882                 s->match_length = MIN_MATCH-1;
 1883             }
 1884         }
 1885         /* If there was a match at the previous step and the current
 1886          * match is not better, output the previous match:
 1887          */
 1888         if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
 1889             uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
 1890             /* Do not insert strings in hash table beyond this. */
 1891 
 1892             check_match(s, s->strstart-1, s->prev_match, s->prev_length);
 1893 
 1894             _tr_tally_dist(s, s->strstart -1 - s->prev_match,
 1895                            s->prev_length - MIN_MATCH, bflush);
 1896 
 1897             /* Insert in hash table all strings up to the end of the match.
 1898              * strstart-1 and strstart are already inserted. If there is not
 1899              * enough lookahead, the last two strings are not inserted in
 1900              * the hash table.
 1901              */
 1902             s->lookahead -= s->prev_length-1;
 1903             s->prev_length -= 2;
 1904             do {
 1905                 if (++s->strstart <= max_insert) {
 1906                     INSERT_STRING(s, s->strstart, hash_head);
 1907                 }
 1908             } while (--s->prev_length != 0);
 1909             s->match_available = 0;
 1910             s->match_length = MIN_MATCH-1;
 1911             s->strstart++;
 1912 
 1913             if (bflush) FLUSH_BLOCK(s, 0);
 1914 
 1915         } else if (s->match_available) {
 1916             /* If there was no match at the previous position, output a
 1917              * single literal. If there was a match but the current match
 1918              * is longer, truncate the previous match to a single literal.
 1919              */
 1920             Tracevv((stderr,"%c", s->window[s->strstart-1]));
 1921             _tr_tally_lit(s, s->window[s->strstart-1], bflush);
 1922             if (bflush) {
 1923                 FLUSH_BLOCK_ONLY(s, 0);
 1924             }
 1925             s->strstart++;
 1926             s->lookahead--;
 1927             if (s->strm->avail_out == 0) return need_more;
 1928         } else {
 1929             /* There is no previous match to compare with, wait for
 1930              * the next step to decide.
 1931              */
 1932             s->match_available = 1;
 1933             s->strstart++;
 1934             s->lookahead--;
 1935         }
 1936     }
 1937     Assert (flush != Z_NO_FLUSH, "no flush?");
 1938     if (s->match_available) {
 1939         Tracevv((stderr,"%c", s->window[s->strstart-1]));
 1940         _tr_tally_lit(s, s->window[s->strstart-1], bflush);
 1941         s->match_available = 0;
 1942     }
 1943     FLUSH_BLOCK(s, flush == Z_FINISH);
 1944     return flush == Z_FINISH ? finish_done : block_done;
 1945 }
 1946 /* --- deflate.c */
 1947 
 1948 /* +++ trees.c */
 1949 /* trees.c -- output deflated data using Huffman coding
 1950  * Copyright (C) 1995-2002 Jean-loup Gailly
 1951  * For conditions of distribution and use, see copyright notice in zlib.h 
 1952  */
 1953 
 1954 /*
 1955  *  ALGORITHM
 1956  *
 1957  *      The "deflation" process uses several Huffman trees. The more
 1958  *      common source values are represented by shorter bit sequences.
 1959  *
 1960  *      Each code tree is stored in a compressed form which is itself
 1961  * a Huffman encoding of the lengths of all the code strings (in
 1962  * ascending order by source values).  The actual code strings are
 1963  * reconstructed from the lengths in the inflate process, as described
 1964  * in the deflate specification.
 1965  *
 1966  *  REFERENCES
 1967  *
 1968  *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
 1969  *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
 1970  *
 1971  *      Storer, James A.
 1972  *          Data Compression:  Methods and Theory, pp. 49-50.
 1973  *          Computer Science Press, 1988.  ISBN 0-7167-8156-5.
 1974  *
 1975  *      Sedgewick, R.
 1976  *          Algorithms, p290.
 1977  *          Addison-Wesley, 1983. ISBN 0-201-06672-6.
 1978  */
 1979 
 1980 /* #define GEN_TREES_H */
 1981 
 1982 /* #include "deflate.h" */
 1983 
 1984 #ifdef DEBUG_ZLIB
 1985 #  include <ctype.h>
 1986 #endif
 1987 
 1988 /* ===========================================================================
 1989  * Constants
 1990  */
 1991 
 1992 #define MAX_BL_BITS 7
 1993 /* Bit length codes must not exceed MAX_BL_BITS bits */
 1994 
 1995 #define END_BLOCK 256
 1996 /* end of block literal code */
 1997 
 1998 #define REP_3_6      16
 1999 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
 2000 
 2001 #define REPZ_3_10    17
 2002 /* repeat a zero length 3-10 times  (3 bits of repeat count) */
 2003 
 2004 #define REPZ_11_138  18
 2005 /* repeat a zero length 11-138 times  (7 bits of repeat count) */
 2006 
 2007 static const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
 2008    = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
 2009 
 2010 static const int extra_dbits[D_CODES] /* extra bits for each distance code */
 2011    = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
 2012 
 2013 static const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
 2014    = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
 2015 
 2016 static const uch bl_order[BL_CODES]
 2017    = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
 2018 /* The lengths of the bit length codes are sent in order of decreasing
 2019  * probability, to avoid transmitting the lengths for unused bit length codes.
 2020  */
 2021 
 2022 #define Buf_size (8 * 2*sizeof(char))
 2023 /* Number of bits used within bi_buf. (bi_buf might be implemented on
 2024  * more than 16 bits on some systems.)
 2025  */
 2026 
 2027 /* ===========================================================================
 2028  * Local data. These are initialized only once.
 2029  */
 2030 
 2031 #define DIST_CODE_LEN  512 /* see definition of array dist_code below */
 2032 
 2033 #if defined(GEN_TREES_H) || !defined(STDC)
 2034 /* non ANSI compilers may not accept trees.h */
 2035 
 2036 static ct_data *static_ltree = Z_NULL;
 2037 /* The static literal tree. Since the bit lengths are imposed, there is no
 2038  * need for the L_CODES extra codes used during heap construction. However
 2039  * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
 2040  * below).
 2041  */
 2042 
 2043 static ct_data *static_dtree = Z_NULL;
 2044 /* The static distance tree. (Actually a trivial tree since all codes use
 2045  * 5 bits.)
 2046  */
 2047 
 2048 uch *_dist_code = Z_NULL;
 2049 /* Distance codes. The first 256 values correspond to the distances
 2050  * 3 .. 258, the last 256 values correspond to the top 8 bits of
 2051  * the 15 bit distances.
 2052  */
 2053 
 2054 uch *_length_code = Z_NULL;
 2055 /* length code for each normalized match length (0 == MIN_MATCH) */
 2056 
 2057 static int *base_length = Z_NULL;
 2058 /* First normalized length for each code (0 = MIN_MATCH) */
 2059 
 2060 static int *base_dist = Z_NULL;
 2061 /* First normalized distance for each code (0 = distance of 1) */
 2062 
 2063 #else
 2064 /* +++ trees.h */
 2065 /* header created automatically with -DGEN_TREES_H */
 2066 
 2067 static const ct_data static_ltree[L_CODES+2] = {
 2068 {{ 12},{  8}}, {{140},{  8}}, {{ 76},{  8}}, {{204},{  8}}, {{ 44},{  8}},
 2069 {{172},{  8}}, {{108},{  8}}, {{236},{  8}}, {{ 28},{  8}}, {{156},{  8}},
 2070 {{ 92},{  8}}, {{220},{  8}}, {{ 60},{  8}}, {{188},{  8}}, {{124},{  8}},
 2071 {{252},{  8}}, {{  2},{  8}}, {{130},{  8}}, {{ 66},{  8}}, {{194},{  8}},
 2072 {{ 34},{  8}}, {{162},{  8}}, {{ 98},{  8}}, {{226},{  8}}, {{ 18},{  8}},
 2073 {{146},{  8}}, {{ 82},{  8}}, {{210},{  8}}, {{ 50},{  8}}, {{178},{  8}},
 2074 {{114},{  8}}, {{242},{  8}}, {{ 10},{  8}}, {{138},{  8}}, {{ 74},{  8}},
 2075 {{202},{  8}}, {{ 42},{  8}}, {{170},{  8}}, {{106},{  8}}, {{234},{  8}},
 2076 {{ 26},{  8}}, {{154},{  8}}, {{ 90},{  8}}, {{218},{  8}}, {{ 58},{  8}},
 2077 {{186},{  8}}, {{122},{  8}}, {{250},{  8}}, {{  6},{  8}}, {{134},{  8}},
 2078 {{ 70},{  8}}, {{198},{  8}}, {{ 38},{  8}}, {{166},{  8}}, {{102},{  8}},
 2079 {{230},{  8}}, {{ 22},{  8}}, {{150},{  8}}, {{ 86},{  8}}, {{214},{  8}},
 2080 {{ 54},{  8}}, {{182},{  8}}, {{118},{  8}}, {{246},{  8}}, {{ 14},{  8}},
 2081 {{142},{  8}}, {{ 78},{  8}}, {{206},{  8}}, {{ 46},{  8}}, {{174},{  8}},
 2082 {{110},{  8}}, {{238},{  8}}, {{ 30},{  8}}, {{158},{  8}}, {{ 94},{  8}},
 2083 {{222},{  8}}, {{ 62},{  8}}, {{190},{  8}}, {{126},{  8}}, {{254},{  8}},
 2084 {{  1},{  8}}, {{129},{  8}}, {{ 65},{  8}}, {{193},{  8}}, {{ 33},{  8}},
 2085 {{161},{  8}}, {{ 97},{  8}}, {{225},{  8}}, {{ 17},{  8}}, {{145},{  8}},
 2086 {{ 81},{  8}}, {{209},{  8}}, {{ 49},{  8}}, {{177},{  8}}, {{113},{  8}},
 2087 {{241},{  8}}, {{  9},{  8}}, {{137},{  8}}, {{ 73},{  8}}, {{201},{  8}},
 2088 {{ 41},{  8}}, {{169},{  8}}, {{105},{  8}}, {{233},{  8}}, {{ 25},{  8}},
 2089 {{153},{  8}}, {{ 89},{  8}}, {{217},{  8}}, {{ 57},{  8}}, {{185},{  8}},
 2090 {{121},{  8}}, {{249},{  8}}, {{  5},{  8}}, {{133},{  8}}, {{ 69},{  8}},
 2091 {{197},{  8}}, {{ 37},{  8}}, {{165},{  8}}, {{101},{  8}}, {{229},{  8}},
 2092 {{ 21},{  8}}, {{149},{  8}}, {{ 85},{  8}}, {{213},{  8}}, {{ 53},{  8}},
 2093 {{181},{  8}}, {{117},{  8}}, {{245},{  8}}, {{ 13},{  8}}, {{141},{  8}},
 2094 {{ 77},{  8}}, {{205},{  8}}, {{ 45},{  8}}, {{173},{  8}}, {{109},{  8}},
 2095 {{237},{  8}}, {{ 29},{  8}}, {{157},{  8}}, {{ 93},{  8}}, {{221},{  8}},
 2096 {{ 61},{  8}}, {{189},{  8}}, {{125},{  8}}, {{253},{  8}}, {{ 19},{  9}},
 2097 {{275},{  9}}, {{147},{  9}}, {{403},{  9}}, {{ 83},{  9}}, {{339},{  9}},
 2098 {{211},{  9}}, {{467},{  9}}, {{ 51},{  9}}, {{307},{  9}}, {{179},{  9}},
 2099 {{435},{  9}}, {{115},{  9}}, {{371},{  9}}, {{243},{  9}}, {{499},{  9}},
 2100 {{ 11},{  9}}, {{267},{  9}}, {{139},{  9}}, {{395},{  9}}, {{ 75},{  9}},
 2101 {{331},{  9}}, {{203},{  9}}, {{459},{  9}}, {{ 43},{  9}}, {{299},{  9}},
 2102 {{171},{  9}}, {{427},{  9}}, {{107},{  9}}, {{363},{  9}}, {{235},{  9}},
 2103 {{491},{  9}}, {{ 27},{  9}}, {{283},{  9}}, {{155},{  9}}, {{411},{  9}},
 2104 {{ 91},{  9}}, {{347},{  9}}, {{219},{  9}}, {{475},{  9}}, {{ 59},{  9}},
 2105 {{315},{  9}}, {{187},{  9}}, {{443},{  9}}, {{123},{  9}}, {{379},{  9}},
 2106 {{251},{  9}}, {{507},{  9}}, {{  7},{  9}}, {{263},{  9}}, {{135},{  9}},
 2107 {{391},{  9}}, {{ 71},{  9}}, {{327},{  9}}, {{199},{  9}}, {{455},{  9}},
 2108 {{ 39},{  9}}, {{295},{  9}}, {{167},{  9}}, {{423},{  9}}, {{103},{  9}},
 2109 {{359},{  9}}, {{231},{  9}}, {{487},{  9}}, {{ 23},{  9}}, {{279},{  9}},
 2110 {{151},{  9}}, {{407},{  9}}, {{ 87},{  9}}, {{343},{  9}}, {{215},{  9}},
 2111 {{471},{  9}}, {{ 55},{  9}}, {{311},{  9}}, {{183},{  9}}, {{439},{  9}},
 2112 {{119},{  9}}, {{375},{  9}}, {{247},{  9}}, {{503},{  9}}, {{ 15},{  9}},
 2113 {{271},{  9}}, {{143},{  9}}, {{399},{  9}}, {{ 79},{  9}}, {{335},{  9}},
 2114 {{207},{  9}}, {{463},{  9}}, {{ 47},{  9}}, {{303},{  9}}, {{175},{  9}},
 2115 {{431},{  9}}, {{111},{  9}}, {{367},{  9}}, {{239},{  9}}, {{495},{  9}},
 2116 {{ 31},{  9}}, {{287},{  9}}, {{159},{  9}}, {{415},{  9}}, {{ 95},{  9}},
 2117 {{351},{  9}}, {{223},{  9}}, {{479},{  9}}, {{ 63},{  9}}, {{319},{  9}},
 2118 {{191},{  9}}, {{447},{  9}}, {{127},{  9}}, {{383},{  9}}, {{255},{  9}},
 2119 {{511},{  9}}, {{  0},{  7}}, {{ 64},{  7}}, {{ 32},{  7}}, {{ 96},{  7}},
 2120 {{ 16},{  7}}, {{ 80},{  7}}, {{ 48},{  7}}, {{112},{  7}}, {{  8},{  7}},
 2121 {{ 72},{  7}}, {{ 40},{  7}}, {{104},{  7}}, {{ 24},{  7}}, {{ 88},{  7}},
 2122 {{ 56},{  7}}, {{120},{  7}}, {{  4},{  7}}, {{ 68},{  7}}, {{ 36},{  7}},
 2123 {{100},{  7}}, {{ 20},{  7}}, {{ 84},{  7}}, {{ 52},{  7}}, {{116},{  7}},
 2124 {{  3},{  8}}, {{131},{  8}}, {{ 67},{  8}}, {{195},{  8}}, {{ 35},{  8}},
 2125 {{163},{  8}}, {{ 99},{  8}}, {{227},{  8}}
 2126 };
 2127 
 2128 static const ct_data static_dtree[D_CODES] = {
 2129 {{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}},
 2130 {{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}},
 2131 {{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}},
 2132 {{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}},
 2133 {{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}},
 2134 {{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}}
 2135 };
 2136 
 2137 const uch _dist_code[DIST_CODE_LEN] = {
 2138  0,  1,  2,  3,  4,  4,  5,  5,  6,  6,  6,  6,  7,  7,  7,  7,  8,  8,  8,  8,
 2139  8,  8,  8,  8,  9,  9,  9,  9,  9,  9,  9,  9, 10, 10, 10, 10, 10, 10, 10, 10,
 2140 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
 2141 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
 2142 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13,
 2143 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
 2144 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
 2145 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
 2146 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
 2147 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
 2148 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
 2149 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
 2150 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,  0,  0, 16, 17,
 2151 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22,
 2152 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
 2153 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
 2154 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
 2155 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27,
 2156 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
 2157 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
 2158 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
 2159 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
 2160 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
 2161 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
 2162 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
 2163 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
 2164 };
 2165 
 2166 const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {
 2167  0,  1,  2,  3,  4,  5,  6,  7,  8,  8,  9,  9, 10, 10, 11, 11, 12, 12, 12, 12,
 2168 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16,
 2169 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19,
 2170 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
 2171 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22,
 2172 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23,
 2173 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
 2174 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
 2175 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
 2176 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26,
 2177 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
 2178 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
 2179 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28
 2180 };
 2181 
 2182 static const int base_length[LENGTH_CODES] = {
 2183 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
 2184 64, 80, 96, 112, 128, 160, 192, 224, 0
 2185 };
 2186 
 2187 static const int base_dist[D_CODES] = {
 2188     0,     1,     2,     3,     4,     6,     8,    12,    16,    24,
 2189    32,    48,    64,    96,   128,   192,   256,   384,   512,   768,
 2190  1024,  1536,  2048,  3072,  4096,  6144,  8192, 12288, 16384, 24576
 2191 };
 2192 
 2193 /* --- trees.h */
 2194 #endif /* GEN_TREES_H */
 2195 
 2196 struct static_tree_desc_s {
 2197     const ct_data *static_tree;  /* static tree or NULL */
 2198     const intf *extra_bits;      /* extra bits for each code or NULL */
 2199     int     extra_base;          /* base index for extra_bits */
 2200     int     elems;               /* max number of elements in the tree */
 2201     int     max_length;          /* max bit length for the codes */
 2202 };
 2203 
 2204 static static_tree_desc  static_l_desc =
 2205 {NULL, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
 2206 
 2207 static static_tree_desc  static_d_desc =
 2208 {NULL, extra_dbits, 0,          D_CODES, MAX_BITS};
 2209 
 2210 static static_tree_desc  static_bl_desc =
 2211 {(const ct_data *)0, extra_blbits, 0,   BL_CODES, MAX_BL_BITS};
 2212 
 2213 /* ===========================================================================
 2214  * Local (static) routines in this file.
 2215  */
 2216 
 2217 static int tr_static_init  OF((z_streamp z));
 2218 static void init_block     OF((deflate_state *s));
 2219 static void pqdownheap     OF((deflate_state *s, ct_data *tree, int k));
 2220 static void gen_bitlen     OF((deflate_state *s, tree_desc *desc));
 2221 static void gen_codes      OF((ct_data *tree, int max_code, ushf *bl_count));
 2222 static void build_tree     OF((deflate_state *s, tree_desc *desc));
 2223 static void scan_tree      OF((deflate_state *s, ct_data *tree, int max_code));
 2224 static void send_tree      OF((deflate_state *s, ct_data *tree, int max_code));
 2225 static int  build_bl_tree  OF((deflate_state *s));
 2226 static void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
 2227                               int blcodes));
 2228 static void compress_block OF((deflate_state *s, ct_data *ltree,
 2229                               ct_data *dtree));
 2230 static void set_data_type  OF((deflate_state *s));
 2231 static unsigned bi_reverse OF((unsigned value, int length));
 2232 static void bi_windup      OF((deflate_state *s));
 2233 static void bi_flush       OF((deflate_state *s));
 2234 static void copy_block     OF((deflate_state *s, charf *buf, unsigned len,
 2235                               int header));
 2236 
 2237 #ifdef GEN_TREES_H
 2238 static void gen_trees_header OF((void));
 2239 #endif
 2240 
 2241 #ifndef DEBUG_ZLIB
 2242 #  define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
 2243    /* Send a code of the given tree. c and tree must not have side effects */
 2244 
 2245 #else /* DEBUG_ZLIB */
 2246 #  define send_code(s, c, tree) \
 2247      { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
 2248        send_bits(s, tree[c].Code, tree[c].Len); }
 2249 #endif
 2250 
 2251 /* ===========================================================================
 2252  * Output a short LSB first on the stream.
 2253  * IN assertion: there is enough room in pendingBuf.
 2254  */
 2255 #define put_short(s, w) { \
 2256     put_byte(s, (uch)((w) & 0xff)); \
 2257     put_byte(s, (uch)((ush)(w) >> 8)); \
 2258 }
 2259 
 2260 /* ===========================================================================
 2261  * Send a value on a given number of bits.
 2262  * IN assertion: length <= 16 and value fits in length bits.
 2263  */
 2264 #ifdef DEBUG_ZLIB
 2265 static void send_bits      OF((deflate_state *s, int value, int length));
 2266 
 2267 static void send_bits(deflate_state *s, int value, int length)
 2268 {
 2269     Tracevv((stderr," l %2d v %4x ", length, value));
 2270     Assert(length > 0 && length <= 15, "invalid length");
 2271     s->bits_sent += (ulg)length;
 2272 
 2273     /* If not enough room in bi_buf, use (valid) bits from bi_buf and
 2274      * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
 2275      * unused bits in value.
 2276      */
 2277     if (s->bi_valid > (int)Buf_size - length) {
 2278         s->bi_buf |= (value << s->bi_valid);
 2279         put_short(s, s->bi_buf);
 2280         s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
 2281         s->bi_valid += length - Buf_size;
 2282     } else {
 2283         s->bi_buf |= value << s->bi_valid;
 2284         s->bi_valid += length;
 2285     }
 2286 }
 2287 #else /* !DEBUG_ZLIB */
 2288 
 2289 #define send_bits(s, value, length) \
 2290 { int len = length;\
 2291   if (s->bi_valid > (int)Buf_size - len) {\
 2292     int val = value;\
 2293     s->bi_buf |= (val << s->bi_valid);\
 2294     put_short(s, s->bi_buf);\
 2295     s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
 2296     s->bi_valid += len - Buf_size;\
 2297   } else {\
 2298     s->bi_buf |= (value) << s->bi_valid;\
 2299     s->bi_valid += len;\
 2300   }\
 2301 }
 2302 #endif /* DEBUG_ZLIB */
 2303 
 2304 
 2305 #ifndef MAX
 2306 #define MAX(a,b) (a >= b ? a : b)
 2307 #endif
 2308 /* the arguments must not have side effects */
 2309 
 2310 typedef struct {
 2311         ct_data static_ltree[L_CODES+2];
 2312         ct_data static_dtree[D_CODES];
 2313         uch             _dist_code[DIST_CODE_LEN];
 2314         uch             _length_code[MAX_MATCH-MIN_MATCH+1];
 2315         int             base_length[LENGTH_CODES];
 2316         int             base_dist[D_CODES];
 2317 } __used_to_be_static;
 2318 
 2319 #if defined(GEN_TREES_H) || !defined(STDC)
 2320 static __used_to_be_static      *static_storage = Z_NULL;
 2321 #endif /* defined(GEN_TREES_H) || !defined(STDC) */
 2322 
 2323 /* ===========================================================================
 2324  * Initialize the various 'constant' tables.
 2325  */
 2326 static int tr_static_init(
 2327         z_streamp       z)
 2328 {
 2329 #if defined(GEN_TREES_H) || !defined(STDC)
 2330     static int static_init_done = 0;
 2331     int n;        /* iterates over tree elements */
 2332     int bits;     /* bit counter */
 2333     int length;   /* length value */
 2334     int code;     /* code value */
 2335     int dist;     /* distance index */
 2336     ush bl_count[MAX_BITS+1];
 2337     /* number of codes at each bit length for an optimal tree */
 2338 
 2339     if (static_init_done) return Z_OK;
 2340     
 2341     /* allocate storage for static structures */
 2342     if (static_storage == Z_NULL) {
 2343         static_storage = (__used_to_be_static*)ZALLOC(z, 1, sizeof(__used_to_be_static));
 2344         if (static_storage == Z_NULL)
 2345                 return Z_MEM_ERROR;
 2346     }
 2347     
 2348     static_ltree = static_storage->static_ltree;
 2349     static_dtree = static_storage->static_dtree;
 2350     _dist_code = static_storage->_dist_code;
 2351     _length_code = static_storage->_length_code;
 2352     base_length = static_storage->base_length;
 2353     base_dist = static_storage->base_dist;
 2354     
 2355     /* For some embedded targets, global variables are not initialized: */
 2356     static_l_desc.static_tree = static_ltree;
 2357     static_l_desc.extra_bits = extra_lbits;
 2358     static_d_desc.static_tree = static_dtree;
 2359     static_d_desc.extra_bits = extra_dbits;
 2360     static_bl_desc.extra_bits = extra_blbits;
 2361 
 2362     /* Initialize the mapping length (0..255) -> length code (0..28) */
 2363     length = 0;
 2364     for (code = 0; code < LENGTH_CODES-1; code++) {
 2365         base_length[code] = length;
 2366         for (n = 0; n < (1<<extra_lbits[code]); n++) {
 2367             _length_code[length++] = (uch)code;
 2368         }
 2369     }
 2370     Assert (length == 256, "tr_static_init: length != 256");
 2371     /* Note that the length 255 (match length 258) can be represented
 2372      * in two different ways: code 284 + 5 bits or code 285, so we
 2373      * overwrite length_code[255] to use the best encoding:
 2374      */
 2375     _length_code[length-1] = (uch)code;
 2376 
 2377     /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
 2378     dist = 0;
 2379     for (code = 0 ; code < 16; code++) {
 2380         base_dist[code] = dist;
 2381         for (n = 0; n < (1<<extra_dbits[code]); n++) {
 2382             _dist_code[dist++] = (uch)code;
 2383         }
 2384     }
 2385     Assert (dist == 256, "tr_static_init: dist != 256");
 2386     dist >>= 7; /* from now on, all distances are divided by 128 */
 2387     for ( ; code < D_CODES; code++) {
 2388         base_dist[code] = dist << 7;
 2389         for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
 2390             _dist_code[256 + dist++] = (uch)code;
 2391         }
 2392     }
 2393     Assert (dist == 256, "tr_static_init: 256+dist != 512");
 2394 
 2395     /* Construct the codes of the static literal tree */
 2396     for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
 2397     n = 0;
 2398     while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
 2399     while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
 2400     while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
 2401     while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
 2402     /* Codes 286 and 287 do not exist, but we must include them in the
 2403      * tree construction to get a canonical Huffman tree (longest code
 2404      * all ones)
 2405      */
 2406     gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
 2407 
 2408     /* The static distance tree is trivial: */
 2409     for (n = 0; n < D_CODES; n++) {
 2410         static_dtree[n].Len = 5;
 2411         static_dtree[n].Code = bi_reverse((unsigned)n, 5);
 2412     }
 2413     static_init_done = 1;
 2414 
 2415 #  ifdef GEN_TREES_H
 2416     gen_trees_header();
 2417 #  endif
 2418 #endif /* defined(GEN_TREES_H) || !defined(STDC) */
 2419     return Z_OK;
 2420 }
 2421 
 2422 /* ===========================================================================
 2423  * Genererate the file trees.h describing the static trees.
 2424  */
 2425 #ifdef GEN_TREES_H
 2426 #  ifndef DEBUG_ZLIB
 2427 #    include <stdio.h>
 2428 #  endif
 2429 
 2430 #  define SEPARATOR(i, last, width) \
 2431       ((i) == (last)? "\n};\n\n" :    \
 2432        ((i) % (width) == (width)-1 ? ",\n" : ", "))
 2433 
 2434 void gen_trees_header()
 2435 {
 2436     FILE *header = fopen("trees.h", "w");
 2437     int i;
 2438 
 2439     Assert (header != NULL, "Can't open trees.h");
 2440     fprintf(header,
 2441             "/* header created automatically with -DGEN_TREES_H */\n\n");
 2442 
 2443     fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n");
 2444     for (i = 0; i < L_CODES+2; i++) {
 2445         fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code,
 2446                 static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5));
 2447     }
 2448 
 2449     fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n");
 2450     for (i = 0; i < D_CODES; i++) {
 2451         fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code,
 2452                 static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5));
 2453     }
 2454 
 2455     fprintf(header, "const uch _dist_code[DIST_CODE_LEN] = {\n");
 2456     for (i = 0; i < DIST_CODE_LEN; i++) {
 2457         fprintf(header, "%2u%s", _dist_code[i],
 2458                 SEPARATOR(i, DIST_CODE_LEN-1, 20));
 2459     }
 2460 
 2461     fprintf(header, "const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {\n");
 2462     for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) {
 2463         fprintf(header, "%2u%s", _length_code[i],
 2464                 SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20));
 2465     }
 2466 
 2467     fprintf(header, "local const int base_length[LENGTH_CODES] = {\n");
 2468     for (i = 0; i < LENGTH_CODES; i++) {
 2469         fprintf(header, "%1u%s", base_length[i],
 2470                 SEPARATOR(i, LENGTH_CODES-1, 20));
 2471     }
 2472 
 2473     fprintf(header, "local const int base_dist[D_CODES] = {\n");
 2474     for (i = 0; i < D_CODES; i++) {
 2475         fprintf(header, "%5u%s", base_dist[i],
 2476                 SEPARATOR(i, D_CODES-1, 10));
 2477     }
 2478 
 2479     fclose(header);
 2480 }
 2481 #endif /* GEN_TREES_H */
 2482 
 2483 /* ===========================================================================
 2484  * Initialize the tree data structures for a new zlib stream.
 2485  */
 2486 static void
 2487 _tr_init(deflate_state *s)
 2488 {
 2489     tr_static_init(s->strm);
 2490 
 2491     s->l_desc.dyn_tree = s->dyn_ltree;
 2492     s->l_desc.stat_desc = &static_l_desc;
 2493 
 2494     s->d_desc.dyn_tree = s->dyn_dtree;
 2495     s->d_desc.stat_desc = &static_d_desc;
 2496 
 2497     s->bl_desc.dyn_tree = s->bl_tree;
 2498     s->bl_desc.stat_desc = &static_bl_desc;
 2499 
 2500     s->bi_buf = 0;
 2501     s->bi_valid = 0;
 2502     s->last_eob_len = 8; /* enough lookahead for inflate */
 2503 #ifdef DEBUG_ZLIB
 2504     s->compressed_len = 0L;
 2505     s->bits_sent = 0L;
 2506 #endif
 2507 
 2508     /* Initialize the first block of the first file: */
 2509     init_block(s);
 2510 }
 2511 
 2512 /* ===========================================================================
 2513  * Initialize a new block.
 2514  */
 2515 static void
 2516 init_block(deflate_state *s)
 2517 {
 2518     int n; /* iterates over tree elements */
 2519 
 2520     /* Initialize the trees. */
 2521     for (n = 0; n < L_CODES;  n++) s->dyn_ltree[n].Freq = 0;
 2522     for (n = 0; n < D_CODES;  n++) s->dyn_dtree[n].Freq = 0;
 2523     for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
 2524 
 2525     s->dyn_ltree[END_BLOCK].Freq = 1;
 2526     s->opt_len = s->static_len = 0L;
 2527     s->last_lit = s->matches = 0;
 2528 }
 2529 
 2530 #define SMALLEST 1
 2531 /* Index within the heap array of least frequent node in the Huffman tree */
 2532 
 2533 
 2534 /* ===========================================================================
 2535  * Remove the smallest element from the heap and recreate the heap with
 2536  * one less element. Updates heap and heap_len.
 2537  */
 2538 #define pqremove(s, tree, top) \
 2539 {\
 2540     top = s->heap[SMALLEST]; \
 2541     s->heap[SMALLEST] = s->heap[s->heap_len--]; \
 2542     pqdownheap(s, tree, SMALLEST); \
 2543 }
 2544 
 2545 /* ===========================================================================
 2546  * Compares to subtrees, using the tree depth as tie breaker when
 2547  * the subtrees have equal frequency. This minimizes the worst case length.
 2548  */
 2549 #define smaller(tree, n, m, depth) \
 2550    (tree[n].Freq < tree[m].Freq || \
 2551    (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
 2552 
 2553 /* ===========================================================================
 2554  * Restore the heap property by moving down the tree starting at node k,
 2555  * exchanging a node with the smallest of its two sons if necessary, stopping
 2556  * when the heap property is re-established (each father smaller than its
 2557  * two sons).
 2558  * ct_data *tree;  the tree to restore
 2559  * int k;               node to move down
 2560  */
 2561 static void
 2562 pqdownheap(deflate_state *s, ct_data *tree, int k)
 2563 {
 2564     int v = s->heap[k];
 2565     int j = k << 1;  /* left son of k */
 2566     while (j <= s->heap_len) {
 2567         /* Set j to the smallest of the two sons: */
 2568         if (j < s->heap_len &&
 2569             smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
 2570             j++;
 2571         }
 2572         /* Exit if v is smaller than both sons */
 2573         if (smaller(tree, v, s->heap[j], s->depth)) break;
 2574 
 2575         /* Exchange v with the smallest son */
 2576         s->heap[k] = s->heap[j];  k = j;
 2577 
 2578         /* And continue down the tree, setting j to the left son of k */
 2579         j <<= 1;
 2580     }
 2581     s->heap[k] = v;
 2582 }
 2583 
 2584 /* ===========================================================================
 2585  * Compute the optimal bit lengths for a tree and update the total bit length
 2586  * for the current block.
 2587  * IN assertion: the fields freq and dad are set, heap[heap_max] and
 2588  *    above are the tree nodes sorted by increasing frequency.
 2589  * OUT assertions: the field len is set to the optimal bit length, the
 2590  *     array bl_count contains the frequencies for each bit length.
 2591  *     The length opt_len is updated; static_len is also updated if stree is
 2592  *     not null.
 2593  */
 2594 static void
 2595 gen_bitlen(deflate_state *s, tree_desc *desc)
 2596 {
 2597     ct_data *tree        = desc->dyn_tree;
 2598     int max_code         = desc->max_code;
 2599     const ct_data *stree = desc->stat_desc->static_tree;
 2600     const intf *extra    = desc->stat_desc->extra_bits;
 2601     int base             = desc->stat_desc->extra_base;
 2602     int max_length       = desc->stat_desc->max_length;
 2603     int h;              /* heap index */
 2604     int n, m;           /* iterate over the tree elements */
 2605     int bits;           /* bit length */
 2606     int xbits;          /* extra bits */
 2607     ush f;              /* frequency */
 2608     int overflow = 0;   /* number of elements with bit length too large */
 2609 
 2610     for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
 2611 
 2612     /* In a first pass, compute the optimal bit lengths (which may
 2613      * overflow in the case of the bit length tree).
 2614      */
 2615     tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
 2616 
 2617     for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
 2618         n = s->heap[h];
 2619         bits = tree[tree[n].Dad].Len + 1;
 2620         if (bits > max_length) bits = max_length, overflow++;
 2621         tree[n].Len = (ush)bits;
 2622         /* We overwrite tree[n].Dad which is no longer needed */
 2623 
 2624         if (n > max_code) continue; /* not a leaf node */
 2625 
 2626         s->bl_count[bits]++;
 2627         xbits = 0;
 2628         if (n >= base) xbits = extra[n-base];
 2629         f = tree[n].Freq;
 2630         s->opt_len += (ulg)f * (bits + xbits);
 2631         if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
 2632     }
 2633     if (overflow == 0) return;
 2634 
 2635     Trace((stderr,"\nbit length overflow\n"));
 2636     /* This happens for example on obj2 and pic of the Calgary corpus */
 2637 
 2638     /* Find the first bit length which could increase: */
 2639     do {
 2640         bits = max_length-1;
 2641         while (s->bl_count[bits] == 0) bits--;
 2642         s->bl_count[bits]--;      /* move one leaf down the tree */
 2643         s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
 2644         s->bl_count[max_length]--;
 2645         /* The brother of the overflow item also moves one step up,
 2646          * but this does not affect bl_count[max_length]
 2647          */
 2648         overflow -= 2;
 2649     } while (overflow > 0);
 2650 
 2651     /* Now recompute all bit lengths, scanning in increasing frequency.
 2652      * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
 2653      * lengths instead of fixing only the wrong ones. This idea is taken
 2654      * from 'ar' written by Haruhiko Okumura.)
 2655      */
 2656     for (bits = max_length; bits != 0; bits--) {
 2657         n = s->bl_count[bits];
 2658         while (n != 0) {
 2659             m = s->heap[--h];
 2660             if (m > max_code) continue;
 2661             if (tree[m].Len != (unsigned) bits) {
 2662                 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
 2663                 s->opt_len += ((long)bits - (long)tree[m].Len)
 2664                               *(long)tree[m].Freq;
 2665                 tree[m].Len = (ush)bits;
 2666             }
 2667             n--;
 2668         }
 2669     }
 2670 }
 2671 
 2672 /* ===========================================================================
 2673  * Generate the codes for a given tree and bit counts (which need not be
 2674  * optimal).
 2675  * IN assertion: the array bl_count contains the bit length statistics for
 2676  * the given tree and the field len is set for all tree elements.
 2677  * OUT assertion: the field code is set for all tree elements of non
 2678  *     zero code length.
 2679  *
 2680  * ct_data *tree;             the tree to decorate
 2681  * int max_code;              largest code with non zero frequency
 2682  * ushf *bl_count;            number of codes at each bit length
 2683  */
 2684 static void
 2685 gen_codes(ct_data *tree, int max_code, ushf *bl_count)
 2686 {
 2687     ush next_code[MAX_BITS+1]; /* next code value for each bit length */
 2688     ush code = 0;              /* running code value */
 2689     int bits;                  /* bit index */
 2690     int n;                     /* code index */
 2691 
 2692     /* The distribution counts are first used to generate the code values
 2693      * without bit reversal.
 2694      */
 2695     for (bits = 1; bits <= MAX_BITS; bits++) {
 2696         next_code[bits] = code = (code + bl_count[bits-1]) << 1;
 2697     }
 2698     /* Check that the bit counts in bl_count are consistent. The last code
 2699      * must be all ones.
 2700      */
 2701     Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
 2702             "inconsistent bit counts");
 2703     Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
 2704 
 2705     for (n = 0;  n <= max_code; n++) {
 2706         int len = tree[n].Len;
 2707         if (len == 0) continue;
 2708         /* Now reverse the bits */
 2709         tree[n].Code = bi_reverse(next_code[len]++, len);
 2710 
 2711         Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
 2712              n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
 2713     }
 2714 }
 2715 
 2716 /* ===========================================================================
 2717  * Construct one Huffman tree and assigns the code bit strings and lengths.
 2718  * Update the total bit length for the current block.
 2719  * IN assertion: the field freq is set for all tree elements.
 2720  * OUT assertions: the fields len and code are set to the optimal bit length
 2721  *     and corresponding code. The length opt_len is updated; static_len is
 2722  *     also updated if stree is not null. The field max_code is set.
 2723  */
 2724 static void
 2725 build_tree(deflate_state *s, tree_desc *desc)
 2726 {
 2727     ct_data *tree         = desc->dyn_tree;
 2728     const ct_data *stree  = desc->stat_desc->static_tree;
 2729     int elems             = desc->stat_desc->elems;
 2730     int n, m;          /* iterate over heap elements */
 2731     int max_code = -1; /* largest code with non zero frequency */
 2732     int node;          /* new node being created */
 2733 
 2734     /* Construct the initial heap, with least frequent element in
 2735      * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
 2736      * heap[0] is not used.
 2737      */
 2738     s->heap_len = 0, s->heap_max = HEAP_SIZE;
 2739 
 2740     for (n = 0; n < elems; n++) {
 2741         if (tree[n].Freq != 0) {
 2742             s->heap[++(s->heap_len)] = max_code = n;
 2743             s->depth[n] = 0;
 2744         } else {
 2745             tree[n].Len = 0;
 2746         }
 2747     }
 2748 
 2749     /* The pkzip format requires that at least one distance code exists,
 2750      * and that at least one bit should be sent even if there is only one
 2751      * possible code. So to avoid special checks later on we force at least
 2752      * two codes of non zero frequency.
 2753      */
 2754     while (s->heap_len < 2) {
 2755         node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
 2756         tree[node].Freq = 1;
 2757         s->depth[node] = 0;
 2758         s->opt_len--; if (stree) s->static_len -= stree[node].Len;
 2759         /* node is 0 or 1 so it does not have extra bits */
 2760     }
 2761     desc->max_code = max_code;
 2762 
 2763     /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
 2764      * establish sub-heaps of increasing lengths:
 2765      */
 2766     for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
 2767 
 2768     /* Construct the Huffman tree by repeatedly combining the least two
 2769      * frequent nodes.
 2770      */
 2771     node = elems;              /* next internal node of the tree */
 2772     do {
 2773         pqremove(s, tree, n);  /* n = node of least frequency */
 2774         m = s->heap[SMALLEST]; /* m = node of next least frequency */
 2775 
 2776         s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
 2777         s->heap[--(s->heap_max)] = m;
 2778 
 2779         /* Create a new node father of n and m */
 2780         tree[node].Freq = tree[n].Freq + tree[m].Freq;
 2781         s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
 2782         tree[n].Dad = tree[m].Dad = (ush)node;
 2783 #ifdef DUMP_BL_TREE
 2784         if (tree == s->bl_tree) {
 2785             fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
 2786                     node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
 2787         }
 2788 #endif
 2789         /* and insert the new node in the heap */
 2790         s->heap[SMALLEST] = node++;
 2791         pqdownheap(s, tree, SMALLEST);
 2792 
 2793     } while (s->heap_len >= 2);
 2794 
 2795     s->heap[--(s->heap_max)] = s->heap[SMALLEST];
 2796 
 2797     /* At this point, the fields freq and dad are set. We can now
 2798      * generate the bit lengths.
 2799      */
 2800     gen_bitlen(s, (tree_desc *)desc);
 2801 
 2802     /* The field len is now set, we can generate the bit codes */
 2803     gen_codes ((ct_data *)tree, max_code, s->bl_count);
 2804 }
 2805 
 2806 /* ===========================================================================
 2807  * Scan a literal or distance tree to determine the frequencies of the codes
 2808  * in the bit length tree.
 2809  *
 2810  * ct_data *tree;   the tree to be scanned
 2811  * int max_code;    and its largest code of non zero frequency
 2812  */
 2813 static void
 2814 scan_tree(deflate_state *s, ct_data *tree, int max_code)
 2815 {
 2816     int n;                     /* iterates over all tree elements */
 2817     int prevlen = -1;          /* last emitted length */
 2818     int curlen;                /* length of current code */
 2819     int nextlen = tree[0].Len; /* length of next code */
 2820     int count = 0;             /* repeat count of the current code */
 2821     int max_count = 7;         /* max repeat count */
 2822     int min_count = 4;         /* min repeat count */
 2823 
 2824     if (nextlen == 0) max_count = 138, min_count = 3;
 2825     tree[max_code+1].Len = (ush)0xffff; /* guard */
 2826 
 2827     for (n = 0; n <= max_code; n++) {
 2828         curlen = nextlen; nextlen = tree[n+1].Len;
 2829         if (++count < max_count && curlen == nextlen) {
 2830             continue;
 2831         } else if (count < min_count) {
 2832             s->bl_tree[curlen].Freq += count;
 2833         } else if (curlen != 0) {
 2834             if (curlen != prevlen) s->bl_tree[curlen].Freq++;
 2835             s->bl_tree[REP_3_6].Freq++;
 2836         } else if (count <= 10) {
 2837             s->bl_tree[REPZ_3_10].Freq++;
 2838         } else {
 2839             s->bl_tree[REPZ_11_138].Freq++;
 2840         }
 2841         count = 0; prevlen = curlen;
 2842         if (nextlen == 0) {
 2843             max_count = 138, min_count = 3;
 2844         } else if (curlen == nextlen) {
 2845             max_count = 6, min_count = 3;
 2846         } else {
 2847             max_count = 7, min_count = 4;
 2848         }
 2849     }
 2850 }
 2851 
 2852 /* ===========================================================================
 2853  * Send a literal or distance tree in compressed form, using the codes in
 2854  * bl_tree.
 2855  *
 2856  * ct_data *tree; the tree to be scanned
 2857  * int max_code;       and its largest code of non zero frequency
 2858  */
 2859 static void
 2860 send_tree(deflate_state *s, ct_data *tree, int max_code)
 2861 {
 2862     int n;                     /* iterates over all tree elements */
 2863     int prevlen = -1;          /* last emitted length */
 2864     int curlen;                /* length of current code */
 2865     int nextlen = tree[0].Len; /* length of next code */
 2866     int count = 0;             /* repeat count of the current code */
 2867     int max_count = 7;         /* max repeat count */
 2868     int min_count = 4;         /* min repeat count */
 2869 
 2870     /* tree[max_code+1].Len = -1; */  /* guard already set */
 2871     if (nextlen == 0) max_count = 138, min_count = 3;
 2872 
 2873     for (n = 0; n <= max_code; n++) {
 2874         curlen = nextlen; nextlen = tree[n+1].Len;
 2875         if (++count < max_count && curlen == nextlen) {
 2876             continue;
 2877         } else if (count < min_count) {
 2878             do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
 2879 
 2880         } else if (curlen != 0) {
 2881             if (curlen != prevlen) {
 2882                 send_code(s, curlen, s->bl_tree); count--;
 2883             }
 2884             Assert(count >= 3 && count <= 6, " 3_6?");
 2885             send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
 2886 
 2887         } else if (count <= 10) {
 2888             send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
 2889 
 2890         } else {
 2891             send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
 2892         }
 2893         count = 0; prevlen = curlen;
 2894         if (nextlen == 0) {
 2895             max_count = 138, min_count = 3;
 2896         } else if (curlen == nextlen) {
 2897             max_count = 6, min_count = 3;
 2898         } else {
 2899             max_count = 7, min_count = 4;
 2900         }
 2901     }
 2902 }
 2903 
 2904 /* ===========================================================================
 2905  * Construct the Huffman tree for the bit lengths and return the index in
 2906  * bl_order of the last bit length code to send.
 2907  */
 2908 static int
 2909 build_bl_tree(deflate_state *s)
 2910 {
 2911     int max_blindex;  /* index of last bit length code of non zero freq */
 2912 
 2913     /* Determine the bit length frequencies for literal and distance trees */
 2914     scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
 2915     scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
 2916 
 2917     /* Build the bit length tree: */
 2918     build_tree(s, (tree_desc *)(&(s->bl_desc)));
 2919     /* opt_len now includes the length of the tree representations, except
 2920      * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
 2921      */
 2922 
 2923     /* Determine the number of bit length codes to send. The pkzip format
 2924      * requires that at least 4 bit length codes be sent. (appnote.txt says
 2925      * 3 but the actual value used is 4.)
 2926      */
 2927     for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
 2928         if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
 2929     }
 2930     /* Update opt_len to include the bit length tree and counts */
 2931     s->opt_len += 3*(max_blindex+1) + 5+5+4;
 2932     Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
 2933             s->opt_len, s->static_len));
 2934 
 2935     return max_blindex;
 2936 }
 2937 
 2938 /* ===========================================================================
 2939  * Send the header for a block using dynamic Huffman trees: the counts, the
 2940  * lengths of the bit length codes, the literal tree and the distance tree.
 2941  * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
 2942  */
 2943 static void
 2944 send_all_trees(deflate_state *s, int lcodes, int dcodes, int blcodes)
 2945 {
 2946     int rank;                    /* index in bl_order */
 2947 
 2948     Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
 2949     Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
 2950             "too many codes");
 2951     Tracev((stderr, "\nbl counts: "));
 2952     send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
 2953     send_bits(s, dcodes-1,   5);
 2954     send_bits(s, blcodes-4,  4); /* not -3 as stated in appnote.txt */
 2955     for (rank = 0; rank < blcodes; rank++) {
 2956         Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
 2957         send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
 2958     }
 2959     Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
 2960 
 2961     send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
 2962     Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
 2963 
 2964     send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
 2965     Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
 2966 }
 2967 
 2968 /* ===========================================================================
 2969  * Send a stored block
 2970  *
 2971  * charf *buf;       input block
 2972  * ulg stored_len;   length of input block
 2973  * int eof;          true if this is the last block for a file
 2974  */
 2975 static void
 2976 _tr_stored_block(deflate_state *s, charf *buf, ulg stored_len, int eof)
 2977 {
 2978     send_bits(s, (STORED_BLOCK<<1)+eof, 3);  /* send block type */
 2979 #ifdef DEBUG_ZLIB
 2980     s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
 2981     s->compressed_len += (stored_len + 4) << 3;
 2982 #endif
 2983     copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
 2984 }
 2985 
 2986 /* ===========================================================================
 2987  * Send one empty static block to give enough lookahead for inflate.
 2988  * This takes 10 bits, of which 7 may remain in the bit buffer.
 2989  * The current inflate code requires 9 bits of lookahead. If the
 2990  * last two codes for the previous block (real code plus EOB) were coded
 2991  * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode
 2992  * the last real code. In this case we send two empty static blocks instead
 2993  * of one. (There are no problems if the previous block is stored or fixed.)
 2994  * To simplify the code, we assume the worst case of last real code encoded
 2995  * on one bit only.
 2996  */
 2997 static void
 2998 _tr_align(deflate_state *s)
 2999 {
 3000     send_bits(s, STATIC_TREES<<1, 3);
 3001     send_code(s, END_BLOCK, static_ltree);
 3002 #ifdef DEBUG_ZLIB
 3003     s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
 3004 #endif
 3005     bi_flush(s);
 3006     /* Of the 10 bits for the empty block, we have already sent
 3007      * (10 - bi_valid) bits. The lookahead for the last real code (before
 3008      * the EOB of the previous block) was thus at least one plus the length
 3009      * of the EOB plus what we have just sent of the empty static block.
 3010      */
 3011     if (1 + s->last_eob_len + 10 - s->bi_valid < 9) {
 3012         send_bits(s, STATIC_TREES<<1, 3);
 3013         send_code(s, END_BLOCK, static_ltree);
 3014 #ifdef DEBUG_ZLIB
 3015         s->compressed_len += 10L;
 3016 #endif
 3017         bi_flush(s);
 3018     }
 3019     s->last_eob_len = 7;
 3020 }
 3021 
 3022 /* ===========================================================================
 3023  * Determine the best encoding for the current block: dynamic trees, static
 3024  * trees or store, and output the encoded block to the zip file.
 3025  *
 3026  * charf *buf;       input block, or NULL if too old
 3027  * ulg stored_len;   length of input block
 3028  * int eof;          true if this is the last block for a file
 3029  */
 3030 static void
 3031 _tr_flush_block(deflate_state *s, charf *buf, ulg stored_len, int eof)
 3032 {
 3033     ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
 3034     int max_blindex = 0;  /* index of last bit length code of non zero freq */
 3035 
 3036     /* Build the Huffman trees unless a stored block is forced */
 3037     if (s->level > 0) {
 3038 
 3039          /* Check if the file is ascii or binary */
 3040         if (s->data_type == Z_UNKNOWN) set_data_type(s);
 3041 
 3042         /* Construct the literal and distance trees */
 3043         build_tree(s, (tree_desc *)(&(s->l_desc)));
 3044         Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
 3045                 s->static_len));
 3046 
 3047         build_tree(s, (tree_desc *)(&(s->d_desc)));
 3048         Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
 3049                 s->static_len));
 3050         /* At this point, opt_len and static_len are the total bit lengths of
 3051          * the compressed block data, excluding the tree representations.
 3052          */
 3053 
 3054         /* Build the bit length tree for the above two trees, and get the index
 3055          * in bl_order of the last bit length code to send.
 3056          */
 3057         max_blindex = build_bl_tree(s);
 3058 
 3059         /* Determine the best encoding. Compute first the block length in bytes*/
 3060         opt_lenb = (s->opt_len+3+7)>>3;
 3061         static_lenb = (s->static_len+3+7)>>3;
 3062 
 3063         Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
 3064                 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
 3065                 s->last_lit));
 3066 
 3067         if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
 3068 
 3069     } else {
 3070         Assert(buf != (char*)0, "lost buf");
 3071         opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
 3072     }
 3073 
 3074 #ifdef FORCE_STORED
 3075     if (buf != (char*)0) { /* force stored block */
 3076 #else
 3077     if (stored_len+4 <= opt_lenb && buf != (char*)0) {
 3078                        /* 4: two words for the lengths */
 3079 #endif
 3080         /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
 3081          * Otherwise we can't have processed more than WSIZE input bytes since
 3082          * the last block flush, because compression would have been
 3083          * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
 3084          * transform a block into a stored block.
 3085          */
 3086         _tr_stored_block(s, buf, stored_len, eof);
 3087 
 3088 #ifdef FORCE_STATIC
 3089     } else if (static_lenb >= 0) { /* force static trees */
 3090 #else
 3091     } else if (static_lenb == opt_lenb) {
 3092 #endif
 3093         send_bits(s, (STATIC_TREES<<1)+eof, 3);
 3094         compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
 3095 #ifdef DEBUG_ZLIB
 3096         s->compressed_len += 3 + s->static_len;
 3097 #endif
 3098     } else {
 3099         send_bits(s, (DYN_TREES<<1)+eof, 3);
 3100         send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
 3101                        max_blindex+1);
 3102         compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
 3103 #ifdef DEBUG_ZLIB
 3104         s->compressed_len += 3 + s->opt_len;
 3105 #endif
 3106     }
 3107     Assert (s->compressed_len == s->bits_sent, "bad compressed size");
 3108     /* The above check is made mod 2^32, for files larger than 512 MB
 3109      * and uLong implemented on 32 bits.
 3110      */
 3111     init_block(s);
 3112 
 3113     if (eof) {
 3114         bi_windup(s);
 3115 #ifdef DEBUG_ZLIB
 3116         s->compressed_len += 7;  /* align on byte boundary */
 3117 #endif
 3118     }
 3119     Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
 3120            s->compressed_len-7*eof));
 3121 }
 3122 
 3123 /* ===========================================================================
 3124  * Save the match info and tally the frequency counts. Return true if
 3125  * the current block must be flushed.
 3126  *
 3127  * unsigned dist;  distance of matched string
 3128  * unsigned lc;    match length-MIN_MATCH or unmatched char (if dist==0)
 3129  */
 3130 static int
 3131 _tr_tally(deflate_state *s, unsigned int dist, unsigned int lc)
 3132 {
 3133     s->d_buf[s->last_lit] = (ush)dist;
 3134     s->l_buf[s->last_lit++] = (uch)lc;
 3135     if (dist == 0) {
 3136         /* lc is the unmatched char */
 3137         s->dyn_ltree[lc].Freq++;
 3138     } else {
 3139         s->matches++;
 3140         /* Here, lc is the match length - MIN_MATCH */
 3141         dist--;             /* dist = match distance - 1 */
 3142         Assert((ush)dist < (ush)MAX_DIST(s) &&
 3143                (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
 3144                (ush)d_code(dist) < (ush)D_CODES,  "_tr_tally: bad match");
 3145 
 3146         s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
 3147         s->dyn_dtree[d_code(dist)].Freq++;
 3148     }
 3149 
 3150 #ifdef TRUNCATE_BLOCK
 3151     /* Try to guess if it is profitable to stop the current block here */
 3152     if ((s->last_lit & 0x1fff) == 0 && s->level > 2) {
 3153         /* Compute an upper bound for the compressed length */
 3154         ulg out_length = (ulg)s->last_lit*8L;
 3155         ulg in_length = (ulg)((long)s->strstart - s->block_start);
 3156         int dcode;
 3157         for (dcode = 0; dcode < D_CODES; dcode++) {
 3158             out_length += (ulg)s->dyn_dtree[dcode].Freq *
 3159                 (5L+extra_dbits[dcode]);
 3160         }
 3161         out_length >>= 3;
 3162         Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
 3163                s->last_lit, in_length, out_length,
 3164                100L - out_length*100L/in_length));
 3165         if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
 3166     }
 3167 #endif
 3168     return (s->last_lit == s->lit_bufsize-1);
 3169     /* We avoid equality with lit_bufsize because of wraparound at 64K
 3170      * on 16 bit machines and because stored blocks are restricted to
 3171      * 64K-1 bytes.
 3172      */
 3173 }
 3174 
 3175 /* ===========================================================================
 3176  * Send the block data compressed using the given Huffman trees
 3177  *
 3178  * ct_data *ltree; literal tree
 3179  * ct_data *dtree; distance tree
 3180  */
 3181 static void
 3182 compress_block(deflate_state *s, ct_data *ltree, ct_data *dtree)
 3183 {
 3184     unsigned dist;      /* distance of matched string */
 3185     int lc;             /* match length or unmatched char (if dist == 0) */
 3186     unsigned lx = 0;    /* running index in l_buf */
 3187     unsigned code;      /* the code to send */
 3188     int extra;          /* number of extra bits to send */
 3189 
 3190     if (s->last_lit != 0) do {
 3191         dist = s->d_buf[lx];
 3192         lc = s->l_buf[lx++];
 3193         if (dist == 0) {
 3194             send_code(s, lc, ltree); /* send a literal byte */
 3195             Tracecv(isgraph(lc), (stderr," '%c' ", lc));
 3196         } else {
 3197             /* Here, lc is the match length - MIN_MATCH */
 3198             code = _length_code[lc];
 3199             send_code(s, code+LITERALS+1, ltree); /* send the length code */
 3200             extra = extra_lbits[code];
 3201             if (extra != 0) {
 3202                 lc -= base_length[code];
 3203                 send_bits(s, lc, extra);       /* send the extra length bits */
 3204             }
 3205             dist--; /* dist is now the match distance - 1 */
 3206             code = d_code(dist);
 3207             Assert (code < D_CODES, "bad d_code");
 3208 
 3209             send_code(s, code, dtree);       /* send the distance code */
 3210             extra = extra_dbits[code];
 3211             if (extra != 0) {
 3212                 dist -= base_dist[code];
 3213                 send_bits(s, dist, extra);   /* send the extra distance bits */
 3214             }
 3215         } /* literal or match pair ? */
 3216 
 3217         /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
 3218         Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
 3219 
 3220     } while (lx < s->last_lit);
 3221 
 3222     send_code(s, END_BLOCK, ltree);
 3223     s->last_eob_len = ltree[END_BLOCK].Len;
 3224 }
 3225 
 3226 /* ===========================================================================
 3227  * Set the data type to ASCII or BINARY, using a crude approximation:
 3228  * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
 3229  * IN assertion: the fields freq of dyn_ltree are set and the total of all
 3230  * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
 3231  */
 3232 static void
 3233 set_data_type(deflate_state *s)
 3234 {
 3235     int n = 0;
 3236     unsigned ascii_freq = 0;
 3237     unsigned bin_freq = 0;
 3238     while (n < 7)        bin_freq += s->dyn_ltree[n++].Freq;
 3239     while (n < 128)    ascii_freq += s->dyn_ltree[n++].Freq;
 3240     while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
 3241     s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII);
 3242 }
 3243 
 3244 /* ===========================================================================
 3245  * Reverse the first len bits of a code, using straightforward code (a faster
 3246  * method would use a table)
 3247  * IN assertion: 1 <= len <= 15
 3248  */
 3249 static unsigned
 3250 bi_reverse(unsigned code, int len)
 3251 {
 3252     unsigned res = 0;
 3253     do {
 3254         res |= code & 1;
 3255         code >>= 1, res <<= 1;
 3256     } while (--len > 0);
 3257     return res >> 1;
 3258 }
 3259 
 3260 /* ===========================================================================
 3261  * Flush the bit buffer, keeping at most 7 bits in it.
 3262  */
 3263 static void
 3264 bi_flush(deflate_state *s)
 3265 {
 3266     if (s->bi_valid == 16) {
 3267         put_short(s, s->bi_buf);
 3268         s->bi_buf = 0;
 3269         s->bi_valid = 0;
 3270     } else if (s->bi_valid >= 8) {
 3271         put_byte(s, (Byte)s->bi_buf);
 3272         s->bi_buf >>= 8;
 3273         s->bi_valid -= 8;
 3274     }
 3275 }
 3276 
 3277 /* ===========================================================================
 3278  * Flush the bit buffer and align the output on a byte boundary
 3279  */
 3280 static void
 3281 bi_windup(deflate_state *s)
 3282 {
 3283     if (s->bi_valid > 8) {
 3284         put_short(s, s->bi_buf);
 3285     } else if (s->bi_valid > 0) {
 3286         put_byte(s, (Byte)s->bi_buf);
 3287     }
 3288     s->bi_buf = 0;
 3289     s->bi_valid = 0;
 3290 #ifdef DEBUG_ZLIB
 3291     s->bits_sent = (s->bits_sent+7) & ~7;
 3292 #endif
 3293 }
 3294 
 3295 /* ===========================================================================
 3296  * Copy a stored block, storing first the length and its
 3297  * one's complement if requested.
 3298  *
 3299  * charf    *buf;    the input data
 3300  * unsigned len;     its length
 3301  * int      header;  true if block header must be written
 3302  */
 3303 static void
 3304 copy_block(deflate_state *s, charf *buf, unsigned int len, int header)
 3305 {
 3306     bi_windup(s);        /* align on byte boundary */
 3307     s->last_eob_len = 8; /* enough lookahead for inflate */
 3308 
 3309     if (header) {
 3310         put_short(s, (ush)len);   
 3311         put_short(s, (ush)~len);
 3312 #ifdef DEBUG_ZLIB
 3313         s->bits_sent += 2*16;
 3314 #endif
 3315     }
 3316 #ifdef DEBUG_ZLIB
 3317     s->bits_sent += (ulg)len<<3;
 3318 #endif
 3319     while (len--) {
 3320         put_byte(s, *buf++);
 3321     }
 3322 }
 3323 /* --- trees.c */
 3324 
 3325 /* +++ inflate.c */
 3326 /* inflate.c -- zlib interface to inflate modules
 3327  * Copyright (C) 1995-2002 Mark Adler
 3328  * For conditions of distribution and use, see copyright notice in zlib.h 
 3329  */
 3330 
 3331 /* #include "zutil.h" */
 3332 
 3333 /* +++ infblock.h */
 3334 /* infblock.h -- header to use infblock.c
 3335  * Copyright (C) 1995-2002 Mark Adler
 3336  * For conditions of distribution and use, see copyright notice in zlib.h 
 3337  */
 3338 
 3339 /* WARNING: this file should *not* be used by applications. It is
 3340    part of the implementation of the compression library and is
 3341    subject to change. Applications should only use zlib.h.
 3342  */
 3343 
 3344 struct inflate_blocks_state;
 3345 typedef struct inflate_blocks_state FAR inflate_blocks_statef;
 3346 
 3347 static inflate_blocks_statef * inflate_blocks_new OF((
 3348     z_streamp z,
 3349     check_func c,               /* check function */
 3350     uInt w));                   /* window size */
 3351 
 3352 static int inflate_blocks OF((
 3353     inflate_blocks_statef *,
 3354     z_streamp ,
 3355     int));                      /* initial return code */
 3356 
 3357 static void inflate_blocks_reset OF((
 3358     inflate_blocks_statef *,
 3359     z_streamp ,
 3360     uLongf *));                  /* check value on output */
 3361 
 3362 static int inflate_blocks_free OF((
 3363     inflate_blocks_statef *,
 3364     z_streamp));
 3365 
 3366 static void inflate_set_dictionary OF((
 3367     inflate_blocks_statef *s,
 3368     const Bytef *d,  /* dictionary */
 3369     uInt  n));       /* dictionary length */
 3370 
 3371 static int inflate_blocks_sync_point OF((
 3372     inflate_blocks_statef *s));
 3373 /* --- infblock.h */
 3374 
 3375 #ifndef NO_DUMMY_DECL
 3376 struct inflate_blocks_state {int dummy;}; /* for buggy compilers */
 3377 #endif
 3378 
 3379 /* inflate private state */
 3380 typedef struct inflate_state {
 3381 
 3382   /* mode */
 3383   enum {
 3384       METHOD,   /* waiting for method byte */
 3385       FLAG,     /* waiting for flag byte */
 3386       DICT4,    /* four dictionary check bytes to go */
 3387       DICT3,    /* three dictionary check bytes to go */
 3388       DICT2,    /* two dictionary check bytes to go */
 3389       DICT1,    /* one dictionary check byte to go */
 3390       DICT0,    /* waiting for inflateSetDictionary */
 3391       BLOCKS,   /* decompressing blocks */
 3392       CHECK4,   /* four check bytes to go */
 3393       CHECK3,   /* three check bytes to go */
 3394       CHECK2,   /* two check bytes to go */
 3395       CHECK1,   /* one check byte to go */
 3396       DONE,     /* finished check, done */
 3397       BAD}      /* got an error--stay here */
 3398     mode;               /* current inflate mode */
 3399 
 3400   /* mode dependent information */
 3401   union {
 3402     uInt method;        /* if FLAGS, method byte */
 3403     struct {
 3404       uLong was;                /* computed check value */
 3405       uLong need;               /* stream check value */
 3406     } check;            /* if CHECK, check values to compare */
 3407     uInt marker;        /* if BAD, inflateSync's marker bytes count */
 3408   } sub;        /* submode */
 3409 
 3410   /* mode independent information */
 3411   int  nowrap;          /* flag for no wrapper */
 3412   uInt wbits;           /* log2(window size)  (8..15, defaults to 15) */
 3413   inflate_blocks_statef 
 3414     *blocks;            /* current inflate_blocks state */
 3415 
 3416 }inflate_state;
 3417 
 3418 
 3419 int ZEXPORT
 3420 inflateReset(z_streamp z)
 3421 {
 3422         inflate_state* s;
 3423         if (z == Z_NULL || z->state == Z_NULL)
 3424                 return Z_STREAM_ERROR;
 3425 
 3426         s = (inflate_state*)z->state;
 3427         z->total_in = z->total_out = 0;
 3428         z->msg = Z_NULL;
 3429         s->mode = s->nowrap ? BLOCKS : METHOD;
 3430         inflate_blocks_reset(s->blocks, z, Z_NULL);
 3431         Tracev((stderr, "inflate: reset\n"));
 3432         return Z_OK;
 3433 }
 3434 
 3435 
 3436 int ZEXPORT
 3437 inflateEnd(z_streamp z)
 3438 {
 3439   if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
 3440     return Z_STREAM_ERROR;
 3441   if (((inflate_state*)z->state)->blocks != Z_NULL)
 3442     inflate_blocks_free(((inflate_state*)z->state)->blocks, z);
 3443   ZFREE(z, z->state);
 3444   z->state = Z_NULL;
 3445   Tracev((stderr, "inflate: end\n"));
 3446   return Z_OK;
 3447 }
 3448 
 3449 
 3450 int ZEXPORT
 3451 inflateInit2_(z_streamp z, int w, const char *ver, int stream_size)
 3452 {
 3453         inflate_state*  s;
 3454   if (ver == Z_NULL || ver[0] != ZLIB_VERSION[0] ||
 3455       stream_size != sizeof(z_stream))
 3456       return Z_VERSION_ERROR;
 3457 
 3458   /* initialize state */
 3459   if (z == Z_NULL)
 3460     return Z_STREAM_ERROR;
 3461   z->msg = Z_NULL;
 3462 #ifndef NO_ZCFUNCS
 3463   if (z->zalloc == Z_NULL)
 3464   {
 3465     z->zalloc = zcalloc;
 3466     z->opaque = (voidpf)0;
 3467   }
 3468   if (z->zfree == Z_NULL) z->zfree = zcfree;
 3469 #endif
 3470   if ((z->state = (struct internal_state FAR *)
 3471        ZALLOC(z,1,sizeof(struct inflate_state))) == Z_NULL)
 3472     return Z_MEM_ERROR;
 3473   s = (inflate_state*)z->state;
 3474   s->blocks = Z_NULL;
 3475 
 3476   /* handle undocumented nowrap option (no zlib header or check) */
 3477   s->nowrap = 0;
 3478   if (w < 0)
 3479   {
 3480     w = - w;
 3481     s->nowrap = 1;
 3482   }
 3483 
 3484   /* set window size */
 3485   if (w < 8 || w > 15)
 3486   {
 3487     inflateEnd(z);
 3488     return Z_STREAM_ERROR;
 3489   }
 3490   s->wbits = (uInt)w;
 3491 
 3492   /* create inflate_blocks state */
 3493   if ((s->blocks =
 3494       inflate_blocks_new(z, s->nowrap ? Z_NULL : adler32, (uInt)1 << w))
 3495       == Z_NULL)
 3496   {
 3497     inflateEnd(z);
 3498     return Z_MEM_ERROR;
 3499   }
 3500   Tracev((stderr, "inflate: allocated\n"));
 3501 
 3502   /* reset state */
 3503   inflateReset(z);
 3504   return Z_OK;
 3505 }
 3506 
 3507 
 3508 int ZEXPORT
 3509 inflateInit_(z_streamp z, const char *ver, int stream_size)
 3510 {
 3511   return inflateInit2_(z, DEF_WBITS, ver, stream_size);
 3512 }
 3513 
 3514 
 3515 #define NEEDBYTE {if(z->avail_in==0)return r;r=f;}
 3516 #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
 3517 
 3518 int ZEXPORT
 3519 inflate(z_streamp z, int f)
 3520 {
 3521   int r;
 3522   uInt b;
 3523   inflate_state* s;
 3524 
 3525   if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL)
 3526     return Z_STREAM_ERROR;
 3527   f = f == Z_FINISH ? Z_BUF_ERROR : Z_OK;
 3528   r = Z_BUF_ERROR;
 3529   s = (inflate_state*)z->state;
 3530   while (1) switch (s->mode)
 3531   {
 3532     case METHOD:
 3533       NEEDBYTE
 3534       if (((s->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED)
 3535       {
 3536         s->mode = BAD;
 3537         z->msg = (char*)"unknown compression method";
 3538         s->sub.marker = 5;       /* can't try inflateSync */
 3539         break;
 3540       }
 3541       if ((s->sub.method >> 4) + 8 > s->wbits)
 3542       {
 3543         s->mode = BAD;
 3544         z->msg = (char*)"invalid window size";
 3545         s->sub.marker = 5;       /* can't try inflateSync */
 3546         break;
 3547       }
 3548       s->mode = FLAG;
 3549     case FLAG:
 3550       NEEDBYTE
 3551       b = NEXTBYTE;
 3552       if (((s->sub.method << 8) + b) % 31)
 3553       {
 3554         s->mode = BAD;
 3555         z->msg = (char*)"incorrect header check";
 3556         s->sub.marker = 5;       /* can't try inflateSync */
 3557         break;
 3558       }
 3559       Tracev((stderr, "inflate: zlib header ok\n"));
 3560       if (!(b & PRESET_DICT))
 3561       {
 3562         s->mode = BLOCKS;
 3563         break;
 3564       }
 3565       s->mode = DICT4;
 3566     case DICT4:
 3567       NEEDBYTE
 3568       s->sub.check.need = (uLong)NEXTBYTE << 24;
 3569       s->mode = DICT3;
 3570     case DICT3:
 3571       NEEDBYTE
 3572       s->sub.check.need += (uLong)NEXTBYTE << 16;
 3573       s->mode = DICT2;
 3574     case DICT2:
 3575       NEEDBYTE
 3576       s->sub.check.need += (uLong)NEXTBYTE << 8;
 3577       s->mode = DICT1;
 3578     case DICT1:
 3579       NEEDBYTE
 3580       s->sub.check.need += (uLong)NEXTBYTE;
 3581       z->adler = s->sub.check.need;
 3582       s->mode = DICT0;
 3583       return Z_NEED_DICT;
 3584     case DICT0:
 3585       s->mode = BAD;
 3586       z->msg = (char*)"need dictionary";
 3587       s->sub.marker = 0;       /* can try inflateSync */
 3588       return Z_STREAM_ERROR;
 3589     case BLOCKS:
 3590       r = inflate_blocks(s->blocks, z, r);
 3591       if (r == Z_DATA_ERROR)
 3592       {
 3593         s->mode = BAD;
 3594         s->sub.marker = 0;       /* can try inflateSync */
 3595         break;
 3596       }
 3597       if (r == Z_OK)
 3598         r = f;
 3599       if (r != Z_STREAM_END)
 3600         return r;
 3601       r = f;
 3602       inflate_blocks_reset(s->blocks, z, &s->sub.check.was);
 3603       if (s->nowrap)
 3604       {
 3605         s->mode = DONE;
 3606         break;
 3607       }
 3608       s->mode = CHECK4;
 3609     case CHECK4:
 3610       NEEDBYTE
 3611       s->sub.check.need = (uLong)NEXTBYTE << 24;
 3612       s->mode = CHECK3;
 3613     case CHECK3:
 3614       NEEDBYTE
 3615       s->sub.check.need += (uLong)NEXTBYTE << 16;
 3616       s->mode = CHECK2;
 3617     case CHECK2:
 3618       NEEDBYTE
 3619       s->sub.check.need += (uLong)NEXTBYTE << 8;
 3620       s->mode = CHECK1;
 3621     case CHECK1:
 3622       NEEDBYTE
 3623       s->sub.check.need += (uLong)NEXTBYTE;
 3624 
 3625       if (s->sub.check.was != s->sub.check.need)
 3626       {
 3627         s->mode = BAD;
 3628         z->msg = (char*)"incorrect data check";
 3629         s->sub.marker = 5;       /* can't try inflateSync */
 3630         break;
 3631       }
 3632       Tracev((stderr, "inflate: zlib check ok\n"));
 3633       s->mode = DONE;
 3634     case DONE:
 3635       return Z_STREAM_END;
 3636     case BAD:
 3637       return Z_DATA_ERROR;
 3638     default:
 3639       return Z_STREAM_ERROR;
 3640   }
 3641 #ifdef NEED_DUMMY_RETURN
 3642   return Z_STREAM_ERROR;  /* Some dumb compilers complain without this */
 3643 #endif
 3644 }
 3645 
 3646 
 3647 int ZEXPORT inflateSetDictionary(z, dictionary, dictLength)
 3648 z_streamp z;
 3649 const Bytef *dictionary;
 3650 uInt  dictLength;
 3651 {
 3652   uInt length = dictLength;
 3653   inflate_state* s;
 3654 
 3655   if (z == Z_NULL || z->state == Z_NULL || ((inflate_state*)z->state)->mode != DICT0)
 3656     return Z_STREAM_ERROR;
 3657   s = (inflate_state*)z->state;
 3658 
 3659   if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR;
 3660   z->adler = 1L;
 3661 
 3662   if (length >= ((uInt)1<<s->wbits))
 3663   {
 3664     length = (1<<s->wbits)-1;
 3665     dictionary += dictLength - length;
 3666   }
 3667   inflate_set_dictionary(s->blocks, dictionary, length);
 3668   s->mode = BLOCKS;
 3669   return Z_OK;
 3670 }
 3671 
 3672 
 3673 int ZEXPORT inflateSync(z)
 3674 z_streamp z;
 3675 {
 3676   uInt n;       /* number of bytes to look at */
 3677   Bytef *p;     /* pointer to bytes */
 3678   uInt m;       /* number of marker bytes found in a row */
 3679   uLong r, w;   /* temporaries to save total_in and total_out */
 3680   inflate_state* s;
 3681 
 3682   /* set up */
 3683   if (z == Z_NULL || z->state == Z_NULL)
 3684     return Z_STREAM_ERROR;
 3685   s = (inflate_state*)z->state;
 3686   if (s->mode != BAD)
 3687   {
 3688     s->mode = BAD;
 3689     s->sub.marker = 0;
 3690   }
 3691   if ((n = z->avail_in) == 0)
 3692     return Z_BUF_ERROR;
 3693   p = z->next_in;
 3694   m = s->sub.marker;
 3695 
 3696   /* search */
 3697   while (n && m < 4)
 3698   {
 3699     static const Byte mark[4] = {0, 0, 0xff, 0xff};
 3700     if (*p == mark[m])
 3701       m++;
 3702     else if (*p)
 3703       m = 0;
 3704     else
 3705       m = 4 - m;
 3706     p++, n--;
 3707   }
 3708 
 3709   /* restore */
 3710   z->total_in += p - z->next_in;
 3711   z->next_in = p;
 3712   z->avail_in = n;
 3713   s->sub.marker = m;
 3714 
 3715   /* return no joy or set up to restart on a new block */
 3716   if (m != 4)
 3717     return Z_DATA_ERROR;
 3718   r = z->total_in;  w = z->total_out;
 3719   inflateReset(z);
 3720   z->total_in = r;  z->total_out = w;
 3721   s->mode = BLOCKS;
 3722   return Z_OK;
 3723 }
 3724 
 3725 
 3726 /* Returns true if inflate is currently at the end of a block generated
 3727  * by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP
 3728  * implementation to provide an additional safety check. PPP uses Z_SYNC_FLUSH
 3729  * but removes the length bytes of the resulting empty stored block. When
 3730  * decompressing, PPP checks that at the end of input packet, inflate is
 3731  * waiting for these length bytes.
 3732  */
 3733 int ZEXPORT inflateSyncPoint(z)
 3734 z_streamp z;
 3735 {
 3736   if (z == Z_NULL || z->state == Z_NULL || ((inflate_state*)z->state)->blocks == Z_NULL)
 3737     return Z_STREAM_ERROR;
 3738   return inflate_blocks_sync_point(((inflate_state*)z->state)->blocks);
 3739 }
 3740 #undef NEEDBYTE
 3741 #undef NEXTBYTE
 3742 /* --- inflate.c */
 3743 
 3744 /* +++ infblock.c */
 3745 /* infblock.c -- interpret and process block types to last block
 3746  * Copyright (C) 1995-2002 Mark Adler
 3747  * For conditions of distribution and use, see copyright notice in zlib.h 
 3748  */
 3749 
 3750 /* #include "zutil.h" */
 3751 /* #include "infblock.h" */
 3752 
 3753 /* +++ inftrees.h */
 3754 /* inftrees.h -- header to use inftrees.c
 3755  * Copyright (C) 1995-2002 Mark Adler
 3756  * For conditions of distribution and use, see copyright notice in zlib.h 
 3757  */
 3758 
 3759 /* WARNING: this file should *not* be used by applications. It is
 3760    part of the implementation of the compression library and is
 3761    subject to change. Applications should only use zlib.h.
 3762  */
 3763 
 3764 /* Huffman code lookup table entry--this entry is four bytes for machines
 3765    that have 16-bit pointers (e.g. PC's in the small or medium model). */
 3766 
 3767 typedef struct inflate_huft_s FAR inflate_huft;
 3768 
 3769 struct inflate_huft_s {
 3770   union {
 3771     struct {
 3772       Byte Exop;        /* number of extra bits or operation */
 3773       Byte Bits;        /* number of bits in this code or subcode */
 3774     } what;
 3775     uInt pad;           /* pad structure to a power of 2 (4 bytes for */
 3776   } word;               /*  16-bit, 8 bytes for 32-bit int's) */
 3777   uInt base;            /* literal, length base, distance base,
 3778                            or table offset */
 3779 };
 3780 
 3781 /* Maximum size of dynamic tree.  The maximum found in a long but non-
 3782    exhaustive search was 1004 huft structures (850 for length/literals
 3783    and 154 for distances, the latter actually the result of an
 3784    exhaustive search).  The actual maximum is not known, but the
 3785    value below is more than safe. */
 3786 #define MANY 1440
 3787 
 3788 static int inflate_trees_bits OF((
 3789     uIntf *,                    /* 19 code lengths */
 3790     uIntf *,                    /* bits tree desired/actual depth */
 3791     inflate_huft * FAR *,       /* bits tree result */
 3792     inflate_huft *,             /* space for trees */
 3793     z_streamp));                /* for messages */
 3794 
 3795 static int inflate_trees_dynamic OF((
 3796     uInt,                       /* number of literal/length codes */
 3797     uInt,                       /* number of distance codes */
 3798     uIntf *,                    /* that many (total) code lengths */
 3799     uIntf *,                    /* literal desired/actual bit depth */
 3800     uIntf *,                    /* distance desired/actual bit depth */
 3801     inflate_huft * FAR *,       /* literal/length tree result */
 3802     inflate_huft * FAR *,       /* distance tree result */
 3803     inflate_huft *,             /* space for trees */
 3804     z_streamp));                /* for messages */
 3805 
 3806 static int inflate_trees_fixed OF((
 3807     uIntf *,                    /* literal desired/actual bit depth */
 3808     uIntf *,                    /* distance desired/actual bit depth */
 3809     inflate_huft * FAR *,       /* literal/length tree result */
 3810     inflate_huft * FAR *,       /* distance tree result */
 3811     z_streamp));                /* for memory allocation */
 3812 /* --- inftrees.h */
 3813 
 3814 /* +++ infcodes.h */
 3815 /* infcodes.h -- header to use infcodes.c
 3816  * Copyright (C) 1995-2002 Mark Adler
 3817  * For conditions of distribution and use, see copyright notice in zlib.h 
 3818  */
 3819 
 3820 /* WARNING: this file should *not* be used by applications. It is
 3821    part of the implementation of the compression library and is
 3822    subject to change. Applications should only use zlib.h.
 3823  */
 3824 
 3825 struct inflate_codes_state;
 3826 typedef struct inflate_codes_state FAR inflate_codes_statef;
 3827 
 3828 static inflate_codes_statef *inflate_codes_new OF((
 3829     uInt, uInt,
 3830     inflate_huft *, inflate_huft *,
 3831     z_streamp ));
 3832 
 3833 static int inflate_codes OF((
 3834     inflate_blocks_statef *,
 3835     z_streamp ,
 3836     int));
 3837 
 3838 static void inflate_codes_free OF((
 3839     inflate_codes_statef *,
 3840     z_streamp ));
 3841 
 3842 /* --- infcodes.h */
 3843 
 3844 /* +++ infutil.h */
 3845 /* infutil.h -- types and macros common to blocks and codes
 3846  * Copyright (C) 1995-2002 Mark Adler
 3847  * For conditions of distribution and use, see copyright notice in zlib.h 
 3848  */
 3849 
 3850 /* WARNING: this file should *not* be used by applications. It is
 3851    part of the implementation of the compression library and is
 3852    subject to change. Applications should only use zlib.h.
 3853  */
 3854 
 3855 #ifndef _INFUTIL_H
 3856 #define _INFUTIL_H
 3857 
 3858 typedef enum {
 3859       TYPE,     /* get type bits (3, including end bit) */
 3860       LENS,     /* get lengths for stored */
 3861       STORED,   /* processing stored block */
 3862       TABLE,    /* get table lengths */
 3863       BTREE,    /* get bit lengths tree for a dynamic block */
 3864       DTREE,    /* get length, distance trees for a dynamic block */
 3865       CODES,    /* processing fixed or dynamic block */
 3866       DRY,      /* output remaining window bytes */
 3867       DONEB,     /* finished last block, done */
 3868       BADB}      /* got a data error--stuck here */
 3869 inflate_block_mode;
 3870 
 3871 /* inflate blocks semi-private state */
 3872 struct inflate_blocks_state {
 3873 
 3874   /* mode */
 3875   inflate_block_mode  mode;     /* current inflate_block mode */
 3876 
 3877   /* mode dependent information */
 3878   union {
 3879     uInt left;          /* if STORED, bytes left to copy */
 3880     struct {
 3881       uInt table;               /* table lengths (14 bits) */
 3882       uInt index;               /* index into blens (or border) */
 3883       uIntf *blens;             /* bit lengths of codes */
 3884       uInt bb;                  /* bit length tree depth */
 3885       inflate_huft *tb;         /* bit length decoding tree */
 3886     } trees;            /* if DTREE, decoding info for trees */
 3887     struct {
 3888       inflate_codes_statef 
 3889          *codes;
 3890     } decode;           /* if CODES, current state */
 3891   } sub;                /* submode */
 3892   uInt last;            /* true if this block is the last block */
 3893 
 3894   /* mode independent information */
 3895   uInt bitk;            /* bits in bit buffer */
 3896   uLong bitb;           /* bit buffer */
 3897   inflate_huft *hufts;  /* single malloc for tree space */
 3898   Bytef *window;        /* sliding window */
 3899   Bytef *end;           /* one byte after sliding window */
 3900   Bytef *read;          /* window read pointer */
 3901   Bytef *write;         /* window write pointer */
 3902   check_func checkfn;   /* check function */
 3903   uLong check;          /* check on output */
 3904 
 3905 };
 3906 
 3907 
 3908 /* defines for inflate input/output */
 3909 /*   update pointers and return */
 3910 #define UPDBITS {s->bitb=b;s->bitk=k;}
 3911 #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
 3912 #define UPDOUT {s->write=q;}
 3913 #define UPDATE {UPDBITS UPDIN UPDOUT}
 3914 #define LEAVE {UPDATE return inflate_flush(s,z,r);}
 3915 /*   get bytes and bits */
 3916 #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
 3917 #define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
 3918 #define NEXTBYTE (n--,*p++)
 3919 #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
 3920 #define DUMPBITS(j) {b>>=(j);k-=(j);}
 3921 /*   output bytes */
 3922 #define WAVAIL (uInt)(q<s->read?s->read-q-1:s->end-q)
 3923 #define LOADOUT {q=s->write;m=(uInt)WAVAIL;}
 3924 #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}}
 3925 #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
 3926 #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;}
 3927 #define OUTBYTE(a) {*q++=(Byte)(a);m--;}
 3928 /*   load local pointers */
 3929 #define LOAD {LOADIN LOADOUT}
 3930 
 3931 /* masks for lower bits (size given to avoid silly warnings with Visual C++) */
 3932 /* And'ing with mask[n] masks the lower n bits */
 3933 static uInt inflate_mask[17] = {
 3934     0x0000,
 3935     0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
 3936     0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
 3937 };
 3938 
 3939 /* copy as much as possible from the sliding window to the output area */
 3940 static int inflate_flush OF((
 3941     inflate_blocks_statef *,
 3942     z_streamp ,
 3943     int));
 3944 
 3945 #ifndef NO_DUMMY_DECL
 3946 struct internal_state      {int dummy;}; /* for buggy compilers */
 3947 #endif
 3948 
 3949 #endif
 3950 /* --- infutil.h */
 3951 
 3952 #ifndef NO_DUMMY_DECL
 3953 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
 3954 #endif
 3955 
 3956 /* simplify the use of the inflate_huft type with some defines */
 3957 #define exop word.what.Exop
 3958 #define bits word.what.Bits
 3959 
 3960 /* Table for deflate from PKZIP's appnote.txt. */
 3961 static const uInt border[] = { /* Order of the bit length code lengths */
 3962         16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
 3963 
 3964 /*
 3965    Notes beyond the 1.93a appnote.txt:
 3966 
 3967    1. Distance pointers never point before the beginning of the output
 3968       stream.
 3969    2. Distance pointers can point back across blocks, up to 32k away.
 3970    3. There is an implied maximum of 7 bits for the bit length table and
 3971       15 bits for the actual data.
 3972    4. If only one code exists, then it is encoded using one bit.  (Zero
 3973       would be more efficient, but perhaps a little confusing.)  If two
 3974       codes exist, they are coded using one bit each (0 and 1).
 3975    5. There is no way of sending zero distance codes--a dummy must be
 3976       sent if there are none.  (History: a pre 2.0 version of PKZIP would
 3977       store blocks with no distance codes, but this was discovered to be
 3978       too harsh a criterion.)  Valid only for 1.93a.  2.04c does allow
 3979       zero distance codes, which is sent as one code of zero bits in
 3980       length.
 3981    6. There are up to 286 literal/length codes.  Code 256 represents the
 3982       end-of-block.  Note however that the static length tree defines
 3983       288 codes just to fill out the Huffman codes.  Codes 286 and 287
 3984       cannot be used though, since there is no length base or extra bits
 3985       defined for them.  Similarily, there are up to 30 distance codes.
 3986       However, static trees define 32 codes (all 5 bits) to fill out the
 3987       Huffman codes, but the last two had better not show up in the data.
 3988    7. Unzip can check dynamic Huffman blocks for complete code sets.
 3989       The exception is that a single code would not be complete (see #4).
 3990    8. The five bits following the block type is really the number of
 3991       literal codes sent minus 257.
 3992    9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
 3993       (1+6+6).  Therefore, to output three times the length, you output
 3994       three codes (1+1+1), whereas to output four times the same length,
 3995       you only need two codes (1+3).  Hmm.
 3996   10. In the tree reconstruction algorithm, Code = Code + Increment
 3997       only if BitLength(i) is not zero.  (Pretty obvious.)
 3998   11. Correction: 4 Bits: # of Bit Length codes - 4     (4 - 19)
 3999   12. Note: length code 284 can represent 227-258, but length code 285
 4000       really is 258.  The last length deserves its own, short code
 4001       since it gets used a lot in very redundant files.  The length
 4002       258 is special since 258 - 3 (the min match length) is 255.
 4003   13. The literal/length and distance code bit lengths are read as a
 4004       single stream of lengths.  It is possible (and advantageous) for
 4005       a repeat code (16, 17, or 18) to go across the boundary between
 4006       the two sets of lengths.
 4007  */
 4008 
 4009 
 4010 static void inflate_blocks_reset(s, z, c)
 4011 inflate_blocks_statef *s;
 4012 z_streamp z;
 4013 uLongf *c;
 4014 {
 4015   if (c != Z_NULL)
 4016     *c = s->check;
 4017   if (s->mode == BTREE || s->mode == DTREE)
 4018     ZFREE(z, s->sub.trees.blens);
 4019   if (s->mode == CODES)
 4020     inflate_codes_free(s->sub.decode.codes, z);
 4021   s->mode = TYPE;
 4022   s->bitk = 0;
 4023   s->bitb = 0;
 4024   s->read = s->write = s->window;
 4025   if (s->checkfn != Z_NULL)
 4026     z->adler = s->check = (*s->checkfn)(0L, (const Bytef *)Z_NULL, 0);
 4027   Tracev((stderr, "inflate:   blocks reset\n"));
 4028 }
 4029 
 4030 
 4031 static inflate_blocks_statef *inflate_blocks_new(z, c, w)
 4032 z_streamp z;
 4033 check_func c;
 4034 uInt w;
 4035 {
 4036   inflate_blocks_statef *s;
 4037 
 4038   if ((s = (inflate_blocks_statef *)ZALLOC
 4039        (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
 4040     return s;
 4041   if ((s->hufts =
 4042        (inflate_huft *)ZALLOC(z, sizeof(inflate_huft), MANY)) == Z_NULL)
 4043   {
 4044     ZFREE(z, s);
 4045     return Z_NULL;
 4046   }
 4047   if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)
 4048   {
 4049     ZFREE(z, s->hufts);
 4050     ZFREE(z, s);
 4051     return Z_NULL;
 4052   }
 4053   s->end = s->window + w;
 4054   s->checkfn = c;
 4055   s->mode = TYPE;
 4056   Tracev((stderr, "inflate:   blocks allocated\n"));
 4057   inflate_blocks_reset(s, z, Z_NULL);
 4058   return s;
 4059 }
 4060 
 4061 
 4062 static int inflate_blocks(s, z, r)
 4063 inflate_blocks_statef *s;
 4064 z_streamp z;
 4065 int r;
 4066 {
 4067   uInt t;               /* temporary storage */
 4068   uLong b;              /* bit buffer */
 4069   uInt k;               /* bits in bit buffer */
 4070   Bytef *p;             /* input data pointer */
 4071   uInt n;               /* bytes available there */
 4072   Bytef *q;             /* output window write pointer */
 4073   uInt m;               /* bytes to end of window or read pointer */
 4074 
 4075   /* copy input/output information to locals (UPDATE macro restores) */
 4076   LOAD
 4077 
 4078   /* process input based on current state */
 4079   while (1) switch (s->mode)
 4080   {
 4081     case TYPE:
 4082       NEEDBITS(3)
 4083       t = (uInt)b & 7;
 4084       s->last = t & 1;
 4085       switch (t >> 1)
 4086       {
 4087         case 0:                         /* stored */
 4088           Tracev((stderr, "inflate:     stored block%s\n",
 4089                  s->last ? " (last)" : ""));
 4090           DUMPBITS(3)
 4091           t = k & 7;                    /* go to byte boundary */
 4092           DUMPBITS(t)
 4093           s->mode = LENS;               /* get length of stored block */
 4094           break;
 4095         case 1:                         /* fixed */
 4096           Tracev((stderr, "inflate:     fixed codes block%s\n",
 4097                  s->last ? " (last)" : ""));
 4098           {
 4099             uInt bl, bd;
 4100             inflate_huft *tl, *td;
 4101 
 4102             inflate_trees_fixed(&bl, &bd, &tl, &td, z);
 4103             s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
 4104             if (s->sub.decode.codes == Z_NULL)
 4105             {
 4106               r = Z_MEM_ERROR;
 4107               LEAVE
 4108             }
 4109           }
 4110           DUMPBITS(3)
 4111           s->mode = CODES;
 4112           break;
 4113         case 2:                         /* dynamic */
 4114           Tracev((stderr, "inflate:     dynamic codes block%s\n",
 4115                  s->last ? " (last)" : ""));
 4116           DUMPBITS(3)
 4117           s->mode = TABLE;
 4118           break;
 4119         case 3:                         /* illegal */
 4120           DUMPBITS(3)
 4121           s->mode = BADB;
 4122           z->msg = (char*)"invalid block type";
 4123           r = Z_DATA_ERROR;
 4124           LEAVE
 4125       }
 4126       break;
 4127     case LENS:
 4128       NEEDBITS(32)
 4129       if ((((~b) >> 16) & 0xffff) != (b & 0xffff))
 4130       {
 4131         s->mode = BADB;
 4132         z->msg = (char*)"invalid stored block lengths";
 4133         r = Z_DATA_ERROR;
 4134         LEAVE
 4135       }
 4136       s->sub.left = (uInt)b & 0xffff;
 4137       b = k = 0;                      /* dump bits */
 4138       Tracev((stderr, "inflate:       stored length %u\n", s->sub.left));
 4139       s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE);
 4140       break;
 4141     case STORED:
 4142       if (n == 0)
 4143         LEAVE
 4144       NEEDOUT
 4145       t = s->sub.left;
 4146       if (t > n) t = n;
 4147       if (t > m) t = m;
 4148       zmemcpy(q, p, t);
 4149       p += t;  n -= t;
 4150       q += t;  m -= t;
 4151       if ((s->sub.left -= t) != 0)
 4152         break;
 4153       Tracev((stderr, "inflate:       stored end, %lu total out\n",
 4154               z->total_out + (q >= s->read ? q - s->read :
 4155               (s->end - s->read) + (q - s->window))));
 4156       s->mode = s->last ? DRY : TYPE;
 4157       break;
 4158     case TABLE:
 4159       NEEDBITS(14)
 4160       s->sub.trees.table = t = (uInt)b & 0x3fff;
 4161 #ifndef PKZIP_BUG_WORKAROUND
 4162       if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
 4163       {
 4164         s->mode = BADB;
 4165         z->msg = (char*)"too many length or distance symbols";
 4166         r = Z_DATA_ERROR;
 4167         LEAVE
 4168       }
 4169 #endif
 4170       t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
 4171       if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
 4172       {
 4173         r = Z_MEM_ERROR;
 4174         LEAVE
 4175       }
 4176       DUMPBITS(14)
 4177       s->sub.trees.index = 0;
 4178       Tracev((stderr, "inflate:       table sizes ok\n"));
 4179       s->mode = BTREE;
 4180     case BTREE:
 4181       while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
 4182       {
 4183         NEEDBITS(3)
 4184         s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
 4185         DUMPBITS(3)
 4186       }
 4187       while (s->sub.trees.index < 19)
 4188         s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
 4189       s->sub.trees.bb = 7;
 4190       t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
 4191                              &s->sub.trees.tb, s->hufts, z);
 4192       if (t != Z_OK)
 4193       {
 4194         r = t;
 4195         if (r == Z_DATA_ERROR)
 4196         {
 4197           ZFREE(z, s->sub.trees.blens);
 4198           s->mode = BADB;
 4199         }
 4200         LEAVE
 4201       }
 4202       s->sub.trees.index = 0;
 4203       Tracev((stderr, "inflate:       bits tree ok\n"));
 4204       s->mode = DTREE;
 4205     case DTREE:
 4206       while (t = s->sub.trees.table,
 4207              s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
 4208       {
 4209         inflate_huft *h;
 4210         uInt i, j, c;
 4211 
 4212         t = s->sub.trees.bb;
 4213         NEEDBITS(t)
 4214         h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
 4215         t = h->bits;
 4216         c = h->base;
 4217         if (c < 16)
 4218         {
 4219           DUMPBITS(t)
 4220           s->sub.trees.blens[s->sub.trees.index++] = c;
 4221         }
 4222         else /* c == 16..18 */
 4223         {
 4224           i = c == 18 ? 7 : c - 14;
 4225           j = c == 18 ? 11 : 3;
 4226           NEEDBITS(t + i)
 4227           DUMPBITS(t)
 4228           j += (uInt)b & inflate_mask[i];
 4229           DUMPBITS(i)
 4230           i = s->sub.trees.index;
 4231           t = s->sub.trees.table;
 4232           if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
 4233               (c == 16 && i < 1))
 4234           {
 4235             ZFREE(z, s->sub.trees.blens);
 4236             s->mode = BADB;
 4237             z->msg = (char*)"invalid bit length repeat";
 4238             r = Z_DATA_ERROR;
 4239             LEAVE
 4240           }
 4241           c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
 4242           do {
 4243             s->sub.trees.blens[i++] = c;
 4244           } while (--j);
 4245           s->sub.trees.index = i;
 4246         }
 4247       }
 4248       s->sub.trees.tb = Z_NULL;
 4249       {
 4250         uInt bl, bd;
 4251         inflate_huft *tl, *td;
 4252         inflate_codes_statef *c;
 4253 
 4254         bl = 9;         /* must be <= 9 for lookahead assumptions */
 4255         bd = 6;         /* must be <= 9 for lookahead assumptions */
 4256         t = s->sub.trees.table;
 4257         t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
 4258                                   s->sub.trees.blens, &bl, &bd, &tl, &td,
 4259                                   s->hufts, z);
 4260         if (t != Z_OK)
 4261         {
 4262           if (t == (uInt)Z_DATA_ERROR)
 4263           {
 4264             ZFREE(z, s->sub.trees.blens);
 4265             s->mode = BADB;
 4266           }
 4267           r = t;
 4268           LEAVE
 4269         }
 4270         Tracev((stderr, "inflate:       trees ok\n"));
 4271         if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
 4272         {
 4273           r = Z_MEM_ERROR;
 4274           LEAVE
 4275         }
 4276         s->sub.decode.codes = c;
 4277       }
 4278       ZFREE(z, s->sub.trees.blens);
 4279       s->mode = CODES;
 4280     case CODES:
 4281       UPDATE
 4282       if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
 4283         return inflate_flush(s, z, r);
 4284       r = Z_OK;
 4285       inflate_codes_free(s->sub.decode.codes, z);
 4286       LOAD
 4287       Tracev((stderr, "inflate:       codes end, %lu total out\n",
 4288               z->total_out + (q >= s->read ? q - s->read :
 4289               (s->end - s->read) + (q - s->window))));
 4290       if (!s->last)
 4291       {
 4292         s->mode = TYPE;
 4293         break;
 4294       }
 4295       s->mode = DRY;
 4296     case DRY:
 4297       FLUSH
 4298       if (s->read != s->write)
 4299         LEAVE
 4300       s->mode = DONEB;
 4301     case DONEB:
 4302       r = Z_STREAM_END;
 4303       LEAVE
 4304     case BADB:
 4305       r = Z_DATA_ERROR;
 4306       LEAVE
 4307     default:
 4308       r = Z_STREAM_ERROR;
 4309       LEAVE
 4310   }
 4311 }
 4312 
 4313 
 4314 static int inflate_blocks_free(s, z)
 4315 inflate_blocks_statef *s;
 4316 z_streamp z;
 4317 {
 4318   inflate_blocks_reset(s, z, Z_NULL);
 4319   ZFREE(z, s->window);
 4320   ZFREE(z, s->hufts);
 4321   ZFREE(z, s);
 4322   Tracev((stderr, "inflate:   blocks freed\n"));
 4323   return Z_OK;
 4324 }
 4325 
 4326 
 4327 static void inflate_set_dictionary(s, d, n)
 4328 inflate_blocks_statef *s;
 4329 const Bytef *d;
 4330 uInt  n;
 4331 {
 4332   zmemcpy(s->window, d, n);
 4333   s->read = s->write = s->window + n;
 4334 }
 4335 
 4336 
 4337 /* Returns true if inflate is currently at the end of a block generated
 4338  * by Z_SYNC_FLUSH or Z_FULL_FLUSH. 
 4339  * IN assertion: s != Z_NULL
 4340  */
 4341 static int inflate_blocks_sync_point(s)
 4342 inflate_blocks_statef *s;
 4343 {
 4344   return s->mode == LENS;
 4345 }
 4346 /* --- infblock.c */
 4347 
 4348 /* +++ inftrees.c */
 4349 /* inftrees.c -- generate Huffman trees for efficient decoding
 4350  * Copyright (C) 1995-2002 Mark Adler
 4351  * For conditions of distribution and use, see copyright notice in zlib.h 
 4352  */
 4353 
 4354 /* #include "zutil.h" */
 4355 /* #include "inftrees.h" */
 4356 
 4357 #if !defined(BUILDFIXED) && !defined(STDC)
 4358 #  define BUILDFIXED   /* non ANSI compilers may not accept inffixed.h */
 4359 #endif
 4360 
 4361 const char inflate_copyright[] =
 4362    " inflate 1.1.4 Copyright 1995-2002 Mark Adler ";
 4363 /*
 4364   If you use the zlib library in a product, an acknowledgment is welcome
 4365   in the documentation of your product. If for some reason you cannot
 4366   include such an acknowledgment, I would appreciate that you keep this
 4367   copyright string in the executable of your product.
 4368  */
 4369  
 4370 #ifndef NO_DUMMY_DECL
 4371 struct internal_state  {int dummy;}; /* for buggy compilers */
 4372 #endif
 4373 
 4374 /* simplify the use of the inflate_huft type with some defines */
 4375 #define exop word.what.Exop
 4376 #define bits word.what.Bits
 4377 
 4378 
 4379 static int
 4380 huft_build OF((
 4381     uIntf *,            /* code lengths in bits */
 4382     uInt,               /* number of codes */
 4383     uInt,               /* number of "simple" codes */
 4384     const uIntf *,      /* list of base values for non-simple codes */
 4385     const uIntf *,      /* list of extra bits for non-simple codes */
 4386     inflate_huft * FAR*,/* result: starting table */
 4387     uIntf *,            /* maximum lookup bits (returns actual) */
 4388     inflate_huft *,     /* space for trees */
 4389     uInt *,             /* hufts used in space */
 4390     uIntf * ));         /* space for values */
 4391 
 4392 /* Tables for deflate from PKZIP's appnote.txt. */
 4393 static const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */
 4394         3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
 4395         35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
 4396         /* see note #13 above about 258 */
 4397 static const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */
 4398         0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
 4399         3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */
 4400 static const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */
 4401         1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
 4402         257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
 4403         8193, 12289, 16385, 24577};
 4404 static const uInt cpdext[30] = { /* Extra bits for distance codes */
 4405         0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
 4406         7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
 4407         12, 12, 13, 13};
 4408 
 4409 /*
 4410    Huffman code decoding is performed using a multi-level table lookup.
 4411    The fastest way to decode is to simply build a lookup table whose
 4412    size is determined by the longest code.  However, the time it takes
 4413    to build this table can also be a factor if the data being decoded
 4414    is not very long.  The most common codes are necessarily the
 4415    shortest codes, so those codes dominate the decoding time, and hence
 4416    the speed.  The idea is you can have a shorter table that decodes the
 4417    shorter, more probable codes, and then point to subsidiary tables for
 4418    the longer codes.  The time it costs to decode the longer codes is
 4419    then traded against the time it takes to make longer tables.
 4420 
 4421    This results of this trade are in the variables lbits and dbits
 4422    below.  lbits is the number of bits the first level table for literal/
 4423    length codes can decode in one step, and dbits is the same thing for
 4424    the distance codes.  Subsequent tables are also less than or equal to
 4425    those sizes.  These values may be adjusted either when all of the
 4426    codes are shorter than that, in which case the longest code length in
 4427    bits is used, or when the shortest code is *longer* than the requested
 4428    table size, in which case the length of the shortest code in bits is
 4429    used.
 4430 
 4431    There are two different values for the two tables, since they code a
 4432    different number of possibilities each.  The literal/length table
 4433    codes 286 possible values, or in a flat code, a little over eight
 4434    bits.  The distance table codes 30 possible values, or a little less
 4435    than five bits, flat.  The optimum values for speed end up being
 4436    about one bit more than those, so lbits is 8+1 and dbits is 5+1.
 4437    The optimum values may differ though from machine to machine, and
 4438    possibly even between compilers.  Your mileage may vary.
 4439  */
 4440 
 4441 
 4442 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
 4443 #define BMAX 15         /* maximum bit length of any code */
 4444 
 4445 /* Given a list of code lengths and a maximum table size, make a set of
 4446    tables to decode that set of codes.  Return Z_OK on success, Z_BUF_ERROR
 4447    if the given code set is incomplete (the tables are still built in this
 4448    case), or Z_DATA_ERROR if the input is invalid. */
 4449 #if 0
 4450 uIntf *b;               /* code lengths in bits (all assumed <= BMAX) */
 4451 uInt n;                 /* number of codes (assumed <= 288) */
 4452 uInt s;                 /* number of simple-valued codes (0..s-1) */
 4453 const uIntf *d;         /* list of base values for non-simple codes */
 4454 const uIntf *e;         /* list of extra bits for non-simple codes */
 4455 inflate_huft * FAR *t;  /* result: starting table */
 4456 uIntf *m;               /* maximum lookup bits, returns actual */
 4457 inflate_huft *hp;       /* space for trees */
 4458 uInt *hn;               /* hufts used in space */
 4459 uIntf *v;               /* working area: values in order of bit length */
 4460 #endif
 4461 
 4462 static int
 4463 huft_build(uIntf *b, uInt n, uInt s, const uIntf *d, const uIntf *e,
 4464            inflate_huft * FAR *t, uIntf *m, inflate_huft *hp, uInt *hn,
 4465            uIntf *v)
 4466 {
 4467   uInt a;                       /* counter for codes of length k */
 4468   uInt c[BMAX+1];               /* bit length count table */
 4469   uInt f;                       /* i repeats in table every f entries */
 4470   int g;                        /* maximum code length */
 4471   int h;                        /* table level */
 4472   uInt i;              /* counter, current code */
 4473   uInt j;              /* counter */
 4474   int k;               /* number of bits in current code */
 4475   int l;                        /* bits per table (returned in m) */
 4476   uInt mask;                    /* (1 << w) - 1, to avoid cc -O bug on HP */
 4477   uIntf *p;            /* pointer into c[], b[], or v[] */
 4478   inflate_huft *q;              /* points to current table */
 4479   struct inflate_huft_s r;      /* table entry for structure assignment */
 4480   inflate_huft *u[BMAX];        /* table stack */
 4481   int w;               /* bits before this table == (l * h) */
 4482   uInt x[BMAX+1];               /* bit offsets, then code stack */
 4483   uIntf *xp;                    /* pointer into x */
 4484   int y;                        /* number of dummy codes added */
 4485   uInt z;                       /* number of entries in current table */
 4486 
 4487 
 4488   /* Generate counts for each bit length */
 4489   p = c;
 4490 #define C0 *p++ = 0;
 4491 #define C2 C0 C0 C0 C0
 4492 #define C4 C2 C2 C2 C2
 4493   C4                            /* clear c[]--assume BMAX+1 is 16 */
 4494   p = b;  i = n;
 4495   do {
 4496     c[*p++]++;                  /* assume all entries <= BMAX */
 4497   } while (--i);
 4498   if (c[0] == n)                /* null input--all zero length codes */
 4499   {
 4500     *t = (inflate_huft *)Z_NULL;
 4501     *m = 0;
 4502     return Z_OK;
 4503   }
 4504 
 4505 
 4506   /* Find minimum and maximum length, bound *m by those */
 4507   l = *m;
 4508   for (j = 1; j <= BMAX; j++)
 4509     if (c[j])
 4510       break;
 4511   k = j;                        /* minimum code length */
 4512   if ((uInt)l < j)
 4513     l = j;
 4514   for (i = BMAX; i; i--)
 4515     if (c[i])
 4516       break;
 4517   g = i;                        /* maximum code length */
 4518   if ((uInt)l > i)
 4519     l = i;
 4520   *m = l;
 4521 
 4522 
 4523   /* Adjust last length count to fill out codes, if needed */
 4524   for (y = 1 << j; j < i; j++, y <<= 1)
 4525     if ((y -= c[j]) < 0)
 4526       return Z_DATA_ERROR;
 4527   if ((y -= c[i]) < 0)
 4528     return Z_DATA_ERROR;
 4529   c[i] += y;
 4530 
 4531 
 4532   /* Generate starting offsets into the value table for each length */
 4533   x[1] = j = 0;
 4534   p = c + 1;  xp = x + 2;
 4535   while (--i) {                 /* note that i == g from above */
 4536     *xp++ = (j += *p++);
 4537   }
 4538 
 4539 
 4540   /* Make a table of values in order of bit lengths */
 4541   p = b;  i = 0;
 4542   do {
 4543     if ((j = *p++) != 0)
 4544       v[x[j]++] = i;
 4545   } while (++i < n);
 4546   n = x[g];                     /* set n to length of v */
 4547 
 4548 
 4549   /* Generate the Huffman codes and for each, make the table entries */
 4550   x[0] = i = 0;                 /* first Huffman code is zero */
 4551   p = v;                        /* grab values in bit order */
 4552   h = -1;                       /* no tables yet--level -1 */
 4553   w = -l;                       /* bits decoded == (l * h) */
 4554   u[0] = (inflate_huft *)Z_NULL;        /* just to keep compilers happy */
 4555   q = (inflate_huft *)Z_NULL;   /* ditto */
 4556   z = 0;                        /* ditto */
 4557 
 4558   /* go through the bit lengths (k already is bits in shortest code) */
 4559   for (; k <= g; k++)
 4560   {
 4561     a = c[k];
 4562     while (a--)
 4563     {
 4564       /* here i is the Huffman code of length k bits for value *p */
 4565       /* make tables up to required level */
 4566       while (k > w + l)
 4567       {
 4568         h++;
 4569         w += l;                 /* previous table always l bits */
 4570 
 4571         /* compute minimum size table less than or equal to l bits */
 4572         z = g - w;
 4573         z = z > (uInt)l ? l : z;        /* table size upper limit */
 4574         if ((f = 1 << (j = k - w)) > a + 1)     /* try a k-w bit table */
 4575         {                       /* too few codes for k-w bit table */
 4576           f -= a + 1;           /* deduct codes from patterns left */
 4577           xp = c + k;
 4578           if (j < z)
 4579             while (++j < z)     /* try smaller tables up to z bits */
 4580             {
 4581               if ((f <<= 1) <= *++xp)
 4582                 break;          /* enough codes to use up j bits */
 4583               f -= *xp;         /* else deduct codes from patterns */
 4584             }
 4585         }
 4586         z = 1 << j;             /* table entries for j-bit table */
 4587 
 4588         /* allocate new table */
 4589         if (*hn + z > MANY)     /* (note: doesn't matter for fixed) */
 4590           return Z_DATA_ERROR;  /* overflow of MANY */
 4591         u[h] = q = hp + *hn;
 4592         *hn += z;
 4593 
 4594         /* connect to last table, if there is one */
 4595         if (h)
 4596         {
 4597           x[h] = i;             /* save pattern for backing up */
 4598           r.bits = (Byte)l;     /* bits to dump before this table */
 4599           r.exop = (Byte)j;     /* bits in this table */
 4600           j = i >> (w - l);
 4601           r.base = (uInt)(q - u[h-1] - j);   /* offset to this table */
 4602           u[h-1][j] = r;        /* connect to last table */
 4603         }
 4604         else
 4605           *t = q;               /* first table is returned result */
 4606       }
 4607 
 4608       /* set up table entry in r */
 4609       r.bits = (Byte)(k - w);
 4610       if (p >= v + n)
 4611         r.exop = 128 + 64;      /* out of values--invalid code */
 4612       else if (*p < s)
 4613       {
 4614         r.exop = (Byte)(*p < 256 ? 0 : 32 + 64);     /* 256 is end-of-block */
 4615         r.base = *p++;          /* simple code is just the value */
 4616       }
 4617       else
 4618       {
 4619         r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */
 4620         r.base = d[*p++ - s];
 4621       }
 4622 
 4623       /* fill code-like entries with r */
 4624       f = 1 << (k - w);
 4625       for (j = i >> w; j < z; j += f)
 4626         q[j] = r;
 4627 
 4628       /* backwards increment the k-bit code i */
 4629       for (j = 1 << (k - 1); i & j; j >>= 1)
 4630         i ^= j;
 4631       i ^= j;
 4632 
 4633       /* backup over finished tables */
 4634       mask = (1 << w) - 1;      /* needed on HP, cc -O bug */
 4635       while ((i & mask) != x[h])
 4636       {
 4637         h--;                    /* don't need to update q */
 4638         w -= l;
 4639         mask = (1 << w) - 1;
 4640       }
 4641     }
 4642   }
 4643 
 4644 
 4645   /* Return Z_BUF_ERROR if we were given an incomplete table */
 4646   return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
 4647 }
 4648 
 4649 
 4650 static int inflate_trees_bits(c, bb, tb, hp, z)
 4651 uIntf *c;               /* 19 code lengths */
 4652 uIntf *bb;              /* bits tree desired/actual depth */
 4653 inflate_huft * FAR *tb; /* bits tree result */
 4654 inflate_huft *hp;       /* space for trees */
 4655 z_streamp z;            /* for messages */
 4656 {
 4657   int r;
 4658   uInt hn = 0;          /* hufts used in space */
 4659   uIntf *v;             /* work area for huft_build */
 4660 
 4661   if ((v = (uIntf*)ZALLOC(z, 19, sizeof(uInt))) == Z_NULL)
 4662     return Z_MEM_ERROR;
 4663   r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL,
 4664                  tb, bb, hp, &hn, v);
 4665   if (r == Z_DATA_ERROR)
 4666     z->msg = (char*)"oversubscribed dynamic bit lengths tree";
 4667   else if (r == Z_BUF_ERROR || *bb == 0)
 4668   {
 4669     z->msg = (char*)"incomplete dynamic bit lengths tree";
 4670     r = Z_DATA_ERROR;
 4671   }
 4672   ZFREE(z, v);
 4673   return r;
 4674 }
 4675 
 4676 
 4677 static int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, hp, z)
 4678 uInt nl;                /* number of literal/length codes */
 4679 uInt nd;                /* number of distance codes */
 4680 uIntf *c;               /* that many (total) code lengths */
 4681 uIntf *bl;              /* literal desired/actual bit depth */
 4682 uIntf *bd;              /* distance desired/actual bit depth */
 4683 inflate_huft * FAR *tl; /* literal/length tree result */
 4684 inflate_huft * FAR *td; /* distance tree result */
 4685 inflate_huft *hp;       /* space for trees */
 4686 z_streamp z;            /* for messages */
 4687 {
 4688   int r;
 4689   uInt hn = 0;          /* hufts used in space */
 4690   uIntf *v;             /* work area for huft_build */
 4691 
 4692   /* allocate work area */
 4693   if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
 4694     return Z_MEM_ERROR;
 4695 
 4696   /* build literal/length tree */
 4697   r = huft_build(c, nl, 257, cplens, cplext, tl, bl, hp, &hn, v);
 4698   if (r != Z_OK || *bl == 0)
 4699   {
 4700     if (r == Z_DATA_ERROR)
 4701       z->msg = (char*)"oversubscribed literal/length tree";
 4702     else if (r != Z_MEM_ERROR)
 4703     {
 4704       z->msg = (char*)"incomplete literal/length tree";
 4705       r = Z_DATA_ERROR;
 4706     }
 4707     ZFREE(z, v);
 4708     return r;
 4709   }
 4710 
 4711   /* build distance tree */
 4712   r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, hp, &hn, v);
 4713   if (r != Z_OK || (*bd == 0 && nl > 257))
 4714   {
 4715     if (r == Z_DATA_ERROR)
 4716       z->msg = (char*)"oversubscribed distance tree";
 4717     else if (r == Z_BUF_ERROR) {
 4718 #ifdef PKZIP_BUG_WORKAROUND
 4719       r = Z_OK;
 4720     }
 4721 #else
 4722       z->msg = (char*)"incomplete distance tree";
 4723       r = Z_DATA_ERROR;
 4724     }
 4725     else if (r != Z_MEM_ERROR)
 4726     {
 4727       z->msg = (char*)"empty distance tree with lengths";
 4728       r = Z_DATA_ERROR;
 4729     }
 4730     ZFREE(z, v);
 4731     return r;
 4732 #endif
 4733   }
 4734 
 4735   /* done */
 4736   ZFREE(z, v);
 4737   return Z_OK;
 4738 }
 4739 
 4740 
 4741 /* build fixed tables only once--keep them here */
 4742 #ifdef BUILDFIXED
 4743 static int fixed_built = 0;
 4744 #define FIXEDH 544      /* number of hufts used by fixed tables */
 4745 static inflate_huft *fixed_mem = NULL;
 4746 static uInt fixed_bl;
 4747 static uInt fixed_bd;
 4748 static inflate_huft *fixed_tl;
 4749 static inflate_huft *fixed_td;
 4750 #else
 4751 /* +++ inffixed.h */
 4752 /* inffixed.h -- table for decoding fixed codes
 4753  * Generated automatically by the maketree.c program
 4754  */
 4755 
 4756 /* WARNING: this file should *not* be used by applications. It is
 4757    part of the implementation of the compression library and is
 4758    subject to change. Applications should only use zlib.h.
 4759  */
 4760 
 4761 static uInt fixed_bl = 9;
 4762 static uInt fixed_bd = 5;
 4763 static inflate_huft fixed_tl[] = {
 4764     {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115},
 4765     {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},192},
 4766     {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},160},
 4767     {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},224},
 4768     {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},144},
 4769     {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},208},
 4770     {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},176},
 4771     {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},240},
 4772     {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227},
 4773     {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},200},
 4774     {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},168},
 4775     {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},232},
 4776     {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},152},
 4777     {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},216},
 4778     {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},184},
 4779     {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},248},
 4780     {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163},
 4781     {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},196},
 4782     {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},164},
 4783     {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},228},
 4784     {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},148},
 4785     {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},212},
 4786     {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},180},
 4787     {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},244},
 4788     {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0},
 4789     {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},204},
 4790     {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},172},
 4791     {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},236},
 4792     {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},156},
 4793     {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},220},
 4794     {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},188},
 4795     {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},252},
 4796     {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131},
 4797     {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},194},
 4798     {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},162},
 4799     {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},226},
 4800     {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},146},
 4801     {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},210},
 4802     {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},178},
 4803     {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},242},
 4804     {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258},
 4805     {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},202},
 4806     {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},170},
 4807     {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},234},
 4808     {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},154},
 4809     {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},218},
 4810     {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},186},
 4811     {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},250},
 4812     {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195},
 4813     {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},198},
 4814     {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},166},
 4815     {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},230},
 4816     {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},150},
 4817     {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},214},
 4818     {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},182},
 4819     {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},246},
 4820     {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0},
 4821     {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},206},
 4822     {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},174},
 4823     {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},238},
 4824     {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},158},
 4825     {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},222},
 4826     {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},190},
 4827     {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},254},
 4828     {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115},
 4829     {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},193},
 4830     {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},161},
 4831     {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},225},
 4832     {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},145},
 4833     {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},209},
 4834     {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},177},
 4835     {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},241},
 4836     {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227},
 4837     {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},201},
 4838     {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},169},
 4839     {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},233},
 4840     {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},153},
 4841     {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},217},
 4842     {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},185},
 4843     {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},249},
 4844     {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163},
 4845     {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},197},
 4846     {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},165},
 4847     {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},229},
 4848     {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},149},
 4849     {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},213},
 4850     {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},181},
 4851     {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},245},
 4852     {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0},
 4853     {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},205},
 4854     {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},173},
 4855     {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},237},
 4856     {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},157},
 4857     {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},221},
 4858     {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},189},
 4859     {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},253},
 4860     {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131},
 4861     {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},195},
 4862     {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},163},
 4863     {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},227},
 4864     {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},147},
 4865     {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},211},
 4866     {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},179},
 4867     {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},243},
 4868     {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258},
 4869     {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},203},
 4870     {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},171},
 4871     {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},235},
 4872     {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},155},
 4873     {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},219},
 4874     {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},187},
 4875     {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},251},
 4876     {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195},
 4877     {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},199},
 4878     {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},167},
 4879     {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},231},
 4880     {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},151},
 4881     {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},215},
 4882     {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},183},
 4883     {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},247},
 4884     {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0},
 4885     {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},207},
 4886     {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},175},
 4887     {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},239},
 4888     {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},159},
 4889     {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},223},
 4890     {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},191},
 4891     {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},255}
 4892   };
 4893 static inflate_huft fixed_td[] = {
 4894     {{{80,5}},1}, {{{87,5}},257}, {{{83,5}},17}, {{{91,5}},4097},
 4895     {{{81,5}},5}, {{{89,5}},1025}, {{{85,5}},65}, {{{93,5}},16385},
 4896     {{{80,5}},3}, {{{88,5}},513}, {{{84,5}},33}, {{{92,5}},8193},
 4897     {{{82,5}},9}, {{{90,5}},2049}, {{{86,5}},129}, {{{192,5}},24577},
 4898     {{{80,5}},2}, {{{87,5}},385}, {{{83,5}},25}, {{{91,5}},6145},
 4899     {{{81,5}},7}, {{{89,5}},1537}, {{{85,5}},97}, {{{93,5}},24577},
 4900     {{{80,5}},4}, {{{88,5}},769}, {{{84,5}},49}, {{{92,5}},12289},
 4901     {{{82,5}},13}, {{{90,5}},3073}, {{{86,5}},193}, {{{192,5}},24577}
 4902   };
 4903 /* --- inffixed.h */
 4904 #endif
 4905 
 4906 
 4907 static int inflate_trees_fixed(bl, bd, tl, td, z)
 4908 uIntf *bl;               /* literal desired/actual bit depth */
 4909 uIntf *bd;               /* distance desired/actual bit depth */
 4910 inflate_huft * FAR *tl;  /* literal/length tree result */
 4911 inflate_huft * FAR *td;  /* distance tree result */
 4912 z_streamp z;             /* for memory allocation */
 4913 {
 4914 #ifdef BUILDFIXED
 4915   /* build fixed tables if not already */
 4916   if (!fixed_built)
 4917   {
 4918     int k;              /* temporary variable */
 4919     uInt f = 0;         /* number of hufts used in fixed_mem */
 4920     uIntf *c;           /* length list for huft_build */
 4921     uIntf *v;           /* work area for huft_build */
 4922 
 4923     /* allocate memory */
 4924     if ((c = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
 4925       return Z_MEM_ERROR;
 4926     if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
 4927     {
 4928       ZFREE(z, c);
 4929       return Z_MEM_ERROR;
 4930     }
 4931     
 4932     if ((fixed_mem = (inflate_huft*)ZALLOC(z, FIXEDH, sizeof(inflate_huft))) == Z_NULL)
 4933     {
 4934         ZFREE(z, c);
 4935         ZFREE(z, v);
 4936         return Z_MEM_ERROR;
 4937     }
 4938 
 4939     /* literal table */
 4940     for (k = 0; k < 144; k++)
 4941       c[k] = 8;
 4942     for (; k < 256; k++)
 4943       c[k] = 9;
 4944     for (; k < 280; k++)
 4945       c[k] = 7;
 4946     for (; k < 288; k++)
 4947       c[k] = 8;
 4948     fixed_bl = 9;
 4949     huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl,
 4950                fixed_mem, &f, v);
 4951 
 4952     /* distance table */
 4953     for (k = 0; k < 30; k++)
 4954       c[k] = 5;
 4955     fixed_bd = 5;
 4956     huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd,
 4957                fixed_mem, &f, v);
 4958 
 4959     /* done */
 4960     ZFREE(z, v);
 4961     ZFREE(z, c);
 4962     fixed_built = 1;
 4963   }
 4964 #endif
 4965   *bl = fixed_bl;
 4966   *bd = fixed_bd;
 4967   *tl = fixed_tl;
 4968   *td = fixed_td;
 4969   return Z_OK;
 4970 }
 4971 /* --- inftrees.c */
 4972 
 4973 /* +++ infcodes.c */
 4974 /* infcodes.c -- process literals and length/distance pairs
 4975  * Copyright (C) 1995-2002 Mark Adler
 4976  * For conditions of distribution and use, see copyright notice in zlib.h 
 4977  */
 4978 
 4979 /* #include "zutil.h" */
 4980 /* #include "inftrees.h" */
 4981 /* #include "infblock.h" */
 4982 /* #include "infcodes.h" */
 4983 /* #include "infutil.h" */
 4984 
 4985 /* +++ inffast.h */
 4986 /* inffast.h -- header to use inffast.c
 4987  * Copyright (C) 1995-2002 Mark Adler
 4988  * For conditions of distribution and use, see copyright notice in zlib.h 
 4989  */
 4990 
 4991 /* WARNING: this file should *not* be used by applications. It is
 4992    part of the implementation of the compression library and is
 4993    subject to change. Applications should only use zlib.h.
 4994  */
 4995 
 4996 static int inflate_fast OF((
 4997     uInt,
 4998     uInt,
 4999     inflate_huft *,
 5000     inflate_huft *,
 5001     inflate_blocks_statef *,
 5002     z_streamp ));
 5003 /* --- inffast.h */
 5004 
 5005 /* simplify the use of the inflate_huft type with some defines */
 5006 #define exop word.what.Exop
 5007 #define bits word.what.Bits
 5008 
 5009 typedef enum {        /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
 5010       START,    /* x: set up for LEN */
 5011       LEN,      /* i: get length/literal/eob next */
 5012       LENEXT,   /* i: getting length extra (have base) */
 5013       DIST,     /* i: get distance next */
 5014       DISTEXT,  /* i: getting distance extra */
 5015       COPY,     /* o: copying bytes in window, waiting for space */
 5016       LIT,      /* o: got literal, waiting for output space */
 5017       WASH,     /* o: got eob, possibly still output waiting */
 5018       END,      /* x: got eob and all data flushed */
 5019       BADCODE}  /* x: got error */
 5020 inflate_codes_mode;
 5021 
 5022 /* inflate codes private state */
 5023 struct inflate_codes_state {
 5024 
 5025   /* mode */
 5026   inflate_codes_mode mode;      /* current inflate_codes mode */
 5027 
 5028   /* mode dependent information */
 5029   uInt len;
 5030   union {
 5031     struct {
 5032       inflate_huft *tree;       /* pointer into tree */
 5033       uInt need;                /* bits needed */
 5034     } code;             /* if LEN or DIST, where in tree */
 5035     uInt lit;           /* if LIT, literal */
 5036     struct {
 5037       uInt get;                 /* bits to get for extra */
 5038       uInt dist;                /* distance back to copy from */
 5039     } copy;             /* if EXT or COPY, where and how much */
 5040   } sub;                /* submode */
 5041 
 5042   /* mode independent information */
 5043   Byte lbits;           /* ltree bits decoded per branch */
 5044   Byte dbits;           /* dtree bits decoder per branch */
 5045   inflate_huft *ltree;          /* literal/length/eob tree */
 5046   inflate_huft *dtree;          /* distance tree */
 5047 
 5048 };
 5049 
 5050 
 5051 static inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
 5052 uInt bl, bd;
 5053 inflate_huft *tl;
 5054 inflate_huft *td; /* need separate declaration for Borland C++ */
 5055 z_streamp z;
 5056 {
 5057   inflate_codes_statef *c;
 5058 
 5059   if ((c = (inflate_codes_statef *)
 5060        ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
 5061   {
 5062     c->mode = START;
 5063     c->lbits = (Byte)bl;
 5064     c->dbits = (Byte)bd;
 5065     c->ltree = tl;
 5066     c->dtree = td;
 5067     Tracev((stderr, "inflate:       codes new\n"));
 5068   }
 5069   return c;
 5070 }
 5071 
 5072 
 5073 static int inflate_codes(s, z, r)
 5074 inflate_blocks_statef *s;
 5075 z_streamp z;
 5076 int r;
 5077 {
 5078   uInt j;               /* temporary storage */
 5079   inflate_huft *t;      /* temporary pointer */
 5080   uInt e;               /* extra bits or operation */
 5081   uLong b;              /* bit buffer */
 5082   uInt k;               /* bits in bit buffer */
 5083   Bytef *p;             /* input data pointer */
 5084   uInt n;               /* bytes available there */
 5085   Bytef *q;             /* output window write pointer */
 5086   uInt m;               /* bytes to end of window or read pointer */
 5087   Bytef *f;             /* pointer to copy strings from */
 5088   inflate_codes_statef *c = s->sub.decode.codes;  /* codes state */
 5089 
 5090   /* copy input/output information to locals (UPDATE macro restores) */
 5091   LOAD
 5092 
 5093   /* process input and output based on current state */
 5094   while (1) switch (c->mode)
 5095   {             /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
 5096     case START:         /* x: set up for LEN */
 5097 #ifndef SLOW
 5098       if (m >= 258 && n >= 10)
 5099       {
 5100         UPDATE
 5101         r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
 5102         LOAD
 5103         if (r != Z_OK)
 5104         {
 5105           c->mode = r == Z_STREAM_END ? WASH : BADCODE;
 5106           break;
 5107         }
 5108       }
 5109 #endif /* !SLOW */
 5110       c->sub.code.need = c->lbits;
 5111       c->sub.code.tree = c->ltree;
 5112       c->mode = LEN;
 5113     case LEN:           /* i: get length/literal/eob next */
 5114       j = c->sub.code.need;
 5115       NEEDBITS(j)
 5116       t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
 5117       DUMPBITS(t->bits)
 5118       e = (uInt)(t->exop);
 5119       if (e == 0)               /* literal */
 5120       {
 5121         c->sub.lit = t->base;
 5122         Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
 5123                  "inflate:         literal '%c'\n" :
 5124                  "inflate:         literal 0x%02x\n", t->base));
 5125         c->mode = LIT;
 5126         break;
 5127       }
 5128       if (e & 16)               /* length */
 5129       {
 5130         c->sub.copy.get = e & 15;
 5131         c->len = t->base;
 5132         c->mode = LENEXT;
 5133         break;
 5134       }
 5135       if ((e & 64) == 0)        /* next table */
 5136       {
 5137         c->sub.code.need = e;
 5138         c->sub.code.tree = t + t->base;
 5139         break;
 5140       }
 5141       if (e & 32)               /* end of block */
 5142       {
 5143         Tracevv((stderr, "inflate:         end of block\n"));
 5144         c->mode = WASH;
 5145         break;
 5146       }
 5147       c->mode = BADCODE;        /* invalid code */
 5148       z->msg = (char*)"invalid literal/length code";
 5149       r = Z_DATA_ERROR;
 5150       LEAVE
 5151     case LENEXT:        /* i: getting length extra (have base) */
 5152       j = c->sub.copy.get;
 5153       NEEDBITS(j)
 5154       c->len += (uInt)b & inflate_mask[j];
 5155       DUMPBITS(j)
 5156       c->sub.code.need = c->dbits;
 5157       c->sub.code.tree = c->dtree;
 5158       Tracevv((stderr, "inflate:         length %u\n", c->len));
 5159       c->mode = DIST;
 5160     case DIST:          /* i: get distance next */
 5161       j = c->sub.code.need;
 5162       NEEDBITS(j)
 5163       t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
 5164       DUMPBITS(t->bits)
 5165       e = (uInt)(t->exop);
 5166       if (e & 16)               /* distance */
 5167       {
 5168         c->sub.copy.get = e & 15;
 5169         c->sub.copy.dist = t->base;
 5170         c->mode = DISTEXT;
 5171         break;
 5172       }
 5173       if ((e & 64) == 0)        /* next table */
 5174       {
 5175         c->sub.code.need = e;
 5176         c->sub.code.tree = t + t->base;
 5177         break;
 5178       }
 5179       c->mode = BADCODE;        /* invalid code */
 5180       z->msg = (char*)"invalid distance code";
 5181       r = Z_DATA_ERROR;
 5182       LEAVE
 5183     case DISTEXT:       /* i: getting distance extra */
 5184       j = c->sub.copy.get;
 5185       NEEDBITS(j)
 5186       c->sub.copy.dist += (uInt)b & inflate_mask[j];
 5187       DUMPBITS(j)
 5188       Tracevv((stderr, "inflate:         distance %u\n", c->sub.copy.dist));
 5189       c->mode = COPY;
 5190     case COPY:          /* o: copying bytes in window, waiting for space */
 5191       f = q - c->sub.copy.dist;
 5192       while (f < s->window)             /* modulo window size-"while" instead */
 5193         f += s->end - s->window;        /* of "if" handles invalid distances */
 5194       while (c->len)
 5195       {
 5196         NEEDOUT
 5197         OUTBYTE(*f++)
 5198         if (f == s->end)
 5199           f = s->window;
 5200         c->len--;
 5201       }
 5202       c->mode = START;
 5203       break;
 5204     case LIT:           /* o: got literal, waiting for output space */
 5205       NEEDOUT
 5206       OUTBYTE(c->sub.lit)
 5207       c->mode = START;
 5208       break;
 5209     case WASH:          /* o: got eob, possibly more output */
 5210       if (k > 7)        /* return unused byte, if any */
 5211       {
 5212         Assert(k < 16, "inflate_codes grabbed too many bytes");
 5213         k -= 8;
 5214         n++;
 5215         p--;            /* can always return one */
 5216       }
 5217       FLUSH
 5218       if (s->read != s->write)
 5219         LEAVE
 5220       c->mode = END;
 5221     case END:
 5222       r = Z_STREAM_END;
 5223       LEAVE
 5224     case BADCODE:       /* x: got error */
 5225       r = Z_DATA_ERROR;
 5226       LEAVE
 5227     default:
 5228       r = Z_STREAM_ERROR;
 5229       LEAVE
 5230   }
 5231 #ifdef NEED_DUMMY_RETURN
 5232   return Z_STREAM_ERROR;  /* Some dumb compilers complain without this */
 5233 #endif
 5234 }
 5235 
 5236 
 5237 static void inflate_codes_free(c, z)
 5238 inflate_codes_statef *c;
 5239 z_streamp z;
 5240 {
 5241   ZFREE(z, c);
 5242   Tracev((stderr, "inflate:       codes free\n"));
 5243 }
 5244 /* --- infcodes.c */
 5245 
 5246 /* +++ infutil.c */
 5247 /* inflate_util.c -- data and routines common to blocks and codes
 5248  * Copyright (C) 1995-2002 Mark Adler
 5249  * For conditions of distribution and use, see copyright notice in zlib.h 
 5250  */
 5251 
 5252 /* #include "zutil.h" */
 5253 /* #include "infblock.h" */
 5254 /* #include "inftrees.h" */
 5255 /* #include "infcodes.h" */
 5256 /* #include "infutil.h" */
 5257 
 5258 #ifndef NO_DUMMY_DECL
 5259 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
 5260 #endif
 5261 
 5262 /* copy as much as possible from the sliding window to the output area */
 5263 static int inflate_flush(s, z, r)
 5264 inflate_blocks_statef *s;
 5265 z_streamp z;
 5266 int r;
 5267 {
 5268   uInt n;
 5269   Bytef *p;
 5270   Bytef *q;
 5271 
 5272   /* local copies of source and destination pointers */
 5273   p = z->next_out;
 5274   q = s->read;
 5275 
 5276   /* compute number of bytes to copy as far as end of window */
 5277   n = (uInt)((q <= s->write ? s->write : s->end) - q);
 5278   if (n > z->avail_out) n = z->avail_out;
 5279   if (n && r == Z_BUF_ERROR) r = Z_OK;
 5280 
 5281   /* update counters */
 5282   z->avail_out -= n;
 5283   z->total_out += n;
 5284 
 5285   /* update check information */
 5286   if (s->checkfn != Z_NULL)
 5287     z->adler = s->check = (*s->checkfn)(s->check, q, n);
 5288 
 5289   /* copy as far as end of window */
 5290   zmemcpy(p, q, n);
 5291   p += n;
 5292   q += n;
 5293 
 5294   /* see if more to copy at beginning of window */
 5295   if (q == s->end)
 5296   {
 5297     /* wrap pointers */
 5298     q = s->window;
 5299     if (s->write == s->end)
 5300       s->write = s->window;
 5301 
 5302     /* compute bytes to copy */
 5303     n = (uInt)(s->write - q);
 5304     if (n > z->avail_out) n = z->avail_out;
 5305     if (n && r == Z_BUF_ERROR) r = Z_OK;
 5306 
 5307     /* update counters */
 5308     z->avail_out -= n;
 5309     z->total_out += n;
 5310 
 5311     /* update check information */
 5312     if (s->checkfn != Z_NULL)
 5313       z->adler = s->check = (*s->checkfn)(s->check, q, n);
 5314 
 5315     /* copy */
 5316     zmemcpy(p, q, n);
 5317     p += n;
 5318     q += n;
 5319   }
 5320 
 5321   /* update pointers */
 5322   z->next_out = p;
 5323   s->read = q;
 5324 
 5325   /* done */
 5326   return r;
 5327 }
 5328 /* --- infutil.c */
 5329 
 5330 /* +++ inffast.c */
 5331 /* inffast.c -- process literals and length/distance pairs fast
 5332  * Copyright (C) 1995-2002 Mark Adler
 5333  * For conditions of distribution and use, see copyright notice in zlib.h 
 5334  */
 5335 
 5336 /* #include "zutil.h" */
 5337 /* #include "inftrees.h" */
 5338 /* #include "infblock.h" */
 5339 /* #include "infcodes.h" */
 5340 /* #include "infutil.h" */
 5341 /* #include "inffast.h" */
 5342 
 5343 #ifndef NO_DUMMY_DECL
 5344 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
 5345 #endif
 5346 
 5347 /* simplify the use of the inflate_huft type with some defines */
 5348 #define exop word.what.Exop
 5349 #define bits word.what.Bits
 5350 
 5351 /* macros for bit input with no checking and for returning unused bytes */
 5352 #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
 5353 #define UNGRAB {c=z->avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;}
 5354 
 5355 /* Called with number of bytes left to write in window at least 258
 5356    (the maximum string length) and number of input bytes available
 5357    at least ten.  The ten bytes are six bytes for the longest length/
 5358    distance pair plus four bytes for overloading the bit buffer. */
 5359 
 5360 static int inflate_fast(bl, bd, tl, td, s, z)
 5361 uInt bl, bd;
 5362 inflate_huft *tl;
 5363 inflate_huft *td; /* need separate declaration for Borland C++ */
 5364 inflate_blocks_statef *s;
 5365 z_streamp z;
 5366 {
 5367   inflate_huft *t;      /* temporary pointer */
 5368   uInt e;               /* extra bits or operation */
 5369   uLong b;              /* bit buffer */
 5370   uInt k;               /* bits in bit buffer */
 5371   Bytef *p;             /* input data pointer */
 5372   uInt n;               /* bytes available there */
 5373   Bytef *q;             /* output window write pointer */
 5374   uInt m;               /* bytes to end of window or read pointer */
 5375   uInt ml;              /* mask for literal/length tree */
 5376   uInt md;              /* mask for distance tree */
 5377   uInt c;               /* bytes to copy */
 5378   uInt d;               /* distance back to copy from */
 5379   Bytef *r;             /* copy source pointer */
 5380 
 5381   /* load input, output, bit values */
 5382   LOAD
 5383 
 5384   /* initialize masks */
 5385   ml = inflate_mask[bl];
 5386   md = inflate_mask[bd];
 5387 
 5388   /* do until not enough input or output space for fast loop */
 5389   do {                          /* assume called with m >= 258 && n >= 10 */
 5390     /* get literal/length code */
 5391     GRABBITS(20)                /* max bits for literal/length code */
 5392     if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
 5393     {
 5394       DUMPBITS(t->bits)
 5395       Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
 5396                 "inflate:         * literal '%c'\n" :
 5397                 "inflate:         * literal 0x%02x\n", t->base));
 5398       *q++ = (Byte)t->base;
 5399       m--;
 5400       continue;
 5401     }
 5402     do {
 5403       DUMPBITS(t->bits)
 5404       if (e & 16)
 5405       {
 5406         /* get extra bits for length */
 5407         e &= 15;
 5408         c = t->base + ((uInt)b & inflate_mask[e]);
 5409         DUMPBITS(e)
 5410         Tracevv((stderr, "inflate:         * length %u\n", c));
 5411 
 5412         /* decode distance base of block to copy */
 5413         GRABBITS(15);           /* max bits for distance code */
 5414         e = (t = td + ((uInt)b & md))->exop;
 5415         do {
 5416           DUMPBITS(t->bits)
 5417           if (e & 16)
 5418           {
 5419             /* get extra bits to add to distance base */
 5420             e &= 15;
 5421             GRABBITS(e)         /* get extra bits (up to 13) */
 5422             d = t->base + ((uInt)b & inflate_mask[e]);
 5423             DUMPBITS(e)
 5424             Tracevv((stderr, "inflate:         * distance %u\n", d));
 5425 
 5426             /* do the copy */
 5427             m -= c;
 5428             r = q - d;
 5429             if (r < s->window)                  /* wrap if needed */
 5430             {
 5431               do {
 5432                 r += s->end - s->window;        /* force pointer in window */
 5433               } while (r < s->window);          /* covers invalid distances */
 5434               e = s->end - r;
 5435               if (c > e)
 5436               {
 5437                 c -= e;                         /* wrapped copy */
 5438                 do {
 5439                     *q++ = *r++;
 5440                 } while (--e);
 5441                 r = s->window;
 5442                 do {
 5443                     *q++ = *r++;
 5444                 } while (--c);
 5445               }
 5446               else                              /* normal copy */
 5447               {
 5448                 *q++ = *r++;  c--;
 5449                 *q++ = *r++;  c--;
 5450                 do {
 5451                     *q++ = *r++;
 5452                 } while (--c);
 5453               }
 5454             }
 5455             else                                /* normal copy */
 5456             {
 5457               *q++ = *r++;  c--;
 5458               *q++ = *r++;  c--;
 5459               do {
 5460                 *q++ = *r++;
 5461               } while (--c);
 5462             }
 5463             break;
 5464           }
 5465           else if ((e & 64) == 0)
 5466           {
 5467             t += t->base;
 5468             e = (t += ((uInt)b & inflate_mask[e]))->exop;
 5469           }
 5470           else
 5471           {
 5472             z->msg = (char*)"invalid distance code";
 5473             UNGRAB
 5474             UPDATE
 5475             return Z_DATA_ERROR;
 5476           }
 5477         } while (1);
 5478         break;
 5479       }
 5480       if ((e & 64) == 0)
 5481       {
 5482         t += t->base;
 5483         if ((e = (t += ((uInt)b & inflate_mask[e]))->exop) == 0)
 5484         {
 5485           DUMPBITS(t->bits)
 5486           Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
 5487                     "inflate:         * literal '%c'\n" :
 5488                     "inflate:         * literal 0x%02x\n", t->base));
 5489           *q++ = (Byte)t->base;
 5490           m--;
 5491           break;
 5492         }
 5493       }
 5494       else if (e & 32)
 5495       {
 5496         Tracevv((stderr, "inflate:         * end of block\n"));
 5497         UNGRAB
 5498         UPDATE
 5499         return Z_STREAM_END;
 5500       }
 5501       else
 5502       {
 5503         z->msg = (char*)"invalid literal/length code";
 5504         UNGRAB
 5505         UPDATE
 5506         return Z_DATA_ERROR;
 5507       }
 5508     } while (1);
 5509   } while (m >= 258 && n >= 10);
 5510 
 5511   /* not enough input or output--restore pointers and return */
 5512   UNGRAB
 5513   UPDATE
 5514   return Z_OK;
 5515 }
 5516 /* --- inffast.c */
 5517 
 5518 /* +++ zutil.c */
 5519 /* zutil.c -- target dependent utility functions for the compression library
 5520  * Copyright (C) 1995-2002 Jean-loup Gailly.
 5521  * For conditions of distribution and use, see copyright notice in zlib.h 
 5522  */
 5523 
 5524 /* #include "zutil.h" */
 5525 
 5526 #ifndef NO_DUMMY_DECL
 5527 struct internal_state      {int dummy;}; /* for buggy compilers */
 5528 #endif
 5529 
 5530 #ifndef STDC
 5531 extern void exit OF((int));
 5532 #endif
 5533 
 5534 const char * ZEXPORT zlibVersion()
 5535 {
 5536     return ZLIB_VERSION;
 5537 }
 5538 
 5539 #ifdef DEBUG_ZLIB
 5540 
 5541 #  ifndef verbose
 5542 #    define verbose 0
 5543 #  endif
 5544 int z_verbose = verbose;
 5545 
 5546 void z_error (m)
 5547     char *m;
 5548 {
 5549     fprintf(stderr, "%s\n", m);
 5550     exit(1);
 5551 }
 5552 #endif
 5553 
 5554 /* exported to allow conversion of error code to string for compress() and
 5555  * uncompress()
 5556  */
 5557 const char * ZEXPORT zError(err)
 5558     int err;
 5559 {
 5560     return ERR_MSG(err);
 5561 }
 5562 
 5563 
 5564 #ifndef HAVE_MEMCPY
 5565 
 5566 void zmemcpy(dest, source, len)
 5567     Bytef* dest;
 5568     const Bytef* source;
 5569     uInt  len;
 5570 {
 5571     if (len == 0) return;
 5572     do {
 5573         *dest++ = *source++; /* ??? to be unrolled */
 5574     } while (--len != 0);
 5575 }
 5576 
 5577 int zmemcmp(s1, s2, len)
 5578     const Bytef* s1;
 5579     const Bytef* s2;
 5580     uInt  len;
 5581 {
 5582     uInt j;
 5583 
 5584     for (j = 0; j < len; j++) {
 5585         if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;
 5586     }
 5587     return 0;
 5588 }
 5589 
 5590 void zmemzero(dest, len)
 5591     Bytef* dest;
 5592     uInt  len;
 5593 {
 5594     if (len == 0) return;
 5595     do {
 5596         *dest++ = 0;  /* ??? to be unrolled */
 5597     } while (--len != 0);
 5598 }
 5599 #endif
 5600 
 5601 #ifdef __TURBOC__
 5602 #if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__)
 5603 /* Small and medium model in Turbo C are for now limited to near allocation
 5604  * with reduced MAX_WBITS and MAX_MEM_LEVEL
 5605  */
 5606 #  define MY_ZCALLOC
 5607 
 5608 /* Turbo C malloc() does not allow dynamic allocation of 64K bytes
 5609  * and farmalloc(64K) returns a pointer with an offset of 8, so we
 5610  * must fix the pointer. Warning: the pointer must be put back to its
 5611  * original form in order to free it, use zcfree().
 5612  */
 5613 
 5614 #define MAX_PTR 10
 5615 /* 10*64K = 640K */
 5616 
 5617 static int next_ptr = 0;
 5618 
 5619 typedef struct ptr_table_s {
 5620     voidpf org_ptr;
 5621     voidpf new_ptr;
 5622 } ptr_table;
 5623 
 5624 static ptr_table table[MAX_PTR];
 5625 /* This table is used to remember the original form of pointers
 5626  * to large buffers (64K). Such pointers are normalized with a zero offset.
 5627  * Since MSDOS is not a preemptive multitasking OS, this table is not
 5628  * protected from concurrent access. This hack doesn't work anyway on
 5629  * a protected system like OS/2. Use Microsoft C instead.
 5630  */
 5631 
 5632 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
 5633 {
 5634     voidpf buf = opaque; /* just to make some compilers happy */
 5635     ulg bsize = (ulg)items*size;
 5636 
 5637     /* If we allocate less than 65520 bytes, we assume that farmalloc
 5638      * will return a usable pointer which doesn't have to be normalized.
 5639      */
 5640     if (bsize < 65520L) {
 5641         buf = farmalloc(bsize);
 5642         if (*(ush*)&buf != 0) return buf;
 5643     } else {
 5644         buf = farmalloc(bsize + 16L);
 5645     }
 5646     if (buf == NULL || next_ptr >= MAX_PTR) return NULL;
 5647     table[next_ptr].org_ptr = buf;
 5648 
 5649     /* Normalize the pointer to seg:0 */
 5650     *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;
 5651     *(ush*)&buf = 0;
 5652     table[next_ptr++].new_ptr = buf;
 5653     return buf;
 5654 }
 5655 
 5656 void  zcfree (voidpf opaque, voidpf ptr)
 5657 {
 5658     int n;
 5659     if (*(ush*)&ptr != 0) { /* object < 64K */
 5660         farfree(ptr);
 5661         return;
 5662     }
 5663     /* Find the original pointer */
 5664     for (n = 0; n < next_ptr; n++) {
 5665         if (ptr != table[n].new_ptr) continue;
 5666 
 5667         farfree(table[n].org_ptr);
 5668         while (++n < next_ptr) {
 5669             table[n-1] = table[n];
 5670         }
 5671         next_ptr--;
 5672         return;
 5673     }
 5674     ptr = opaque; /* just to make some compilers happy */
 5675     Assert(0, "zcfree: ptr not found");
 5676 }
 5677 #endif
 5678 #endif /* __TURBOC__ */
 5679 
 5680 
 5681 #if defined(M_I86) && !defined(__32BIT__)
 5682 /* Microsoft C in 16-bit mode */
 5683 
 5684 #  define MY_ZCALLOC
 5685 
 5686 #if (!defined(_MSC_VER) || (_MSC_VER <= 600))
 5687 #  define _halloc  halloc
 5688 #  define _hfree   hfree
 5689 #endif
 5690 
 5691 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
 5692 {
 5693     if (opaque) opaque = 0; /* to make compiler happy */
 5694     return _halloc((long)items, size);
 5695 }
 5696 
 5697 void  zcfree (voidpf opaque, voidpf ptr)
 5698 {
 5699     if (opaque) opaque = 0; /* to make compiler happy */
 5700     _hfree(ptr);
 5701 }
 5702 
 5703 #endif /* MSC */
 5704 
 5705 
 5706 #ifndef MY_ZCALLOC /* Any system without a special alloc function */
 5707 
 5708 #ifndef STDC
 5709 extern voidp  calloc OF((uInt items, uInt size));
 5710 extern void   free   OF((voidpf ptr));
 5711 #endif
 5712 
 5713 voidpf zcalloc (opaque, items, size)
 5714     voidpf opaque;
 5715     unsigned items;
 5716     unsigned size;
 5717 {
 5718     if (opaque) items += size - size; /* make compiler happy */
 5719     return (voidpf)calloc(items, size);
 5720 }
 5721 
 5722 void  zcfree (opaque, ptr)
 5723     voidpf opaque;
 5724     voidpf ptr;
 5725 {
 5726     _FREE(ptr);
 5727     if (opaque) return; /* make compiler happy */
 5728 }
 5729 
 5730 #endif /* MY_ZCALLOC */
 5731 /* --- zutil.c */
 5732 
 5733 /* +++ adler32.c */
 5734 /* adler32.c -- compute the Adler-32 checksum of a data stream
 5735  * Copyright (C) 1995-2002 Mark Adler
 5736  * For conditions of distribution and use, see copyright notice in zlib.h 
 5737  */
 5738 
 5739 /* #include "zlib.h" */
 5740 
 5741 #define BASE 65521L /* largest prime smaller than 65536 */
 5742 #define NMAX 5552
 5743 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
 5744 
 5745 #define DO1(buf,i)  {s1 += buf[i]; s2 += s1;}
 5746 #define DO2(buf,i)  DO1(buf,i); DO1(buf,i+1);
 5747 #define DO4(buf,i)  DO2(buf,i); DO2(buf,i+2);
 5748 #define DO8(buf,i)  DO4(buf,i); DO4(buf,i+4);
 5749 #define DO16(buf)   DO8(buf,0); DO8(buf,8);
 5750 
 5751 /* ========================================================================= */
 5752 uLong ZEXPORT
 5753 adler32(uLong adler, const Bytef *buf, uInt len)
 5754 {
 5755     unsigned long s1 = adler & 0xffff;
 5756     unsigned long s2 = (adler >> 16) & 0xffff;
 5757     int k;
 5758 
 5759     if (buf == Z_NULL) return 1L;
 5760 
 5761     while (len > 0) {
 5762         k = len < NMAX ? len : NMAX;
 5763         len -= k;
 5764         while (k >= 16) {
 5765             DO16(buf);
 5766             buf += 16;
 5767             k -= 16;
 5768         }
 5769         if (k != 0) do {
 5770             s1 += *buf++;
 5771             s2 += s1;
 5772         } while (--k);
 5773         s1 %= BASE;
 5774         s2 %= BASE;
 5775     }
 5776     return (s2 << 16) | s1;
 5777 }
 5778 /* --- adler32.c */

Cache object: 0ae5349a19e774fece33c68d9fc32403


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.