OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [rtos/] [ecos-3.0/] [packages/] [net/] [ppp/] [current/] [src/] [zlib.c] - Blame information for rev 786

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 786 skrzyp
//==========================================================================
2
//
3
//      src/zlib.c
4
//
5
//==========================================================================
6
// ####ECOSGPLCOPYRIGHTBEGIN####                                            
7
// -------------------------------------------                              
8
// This file is part of eCos, the Embedded Configurable Operating System.   
9
// Copyright (C) 2003 Free Software Foundation, Inc.                        
10
//
11
// eCos is free software; you can redistribute it and/or modify it under    
12
// the terms of the GNU General Public License as published by the Free     
13
// Software Foundation; either version 2 or (at your option) any later      
14
// version.                                                                 
15
//
16
// eCos is distributed in the hope that it will be useful, but WITHOUT      
17
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or    
18
// FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License    
19
// for more details.                                                        
20
//
21
// You should have received a copy of the GNU General Public License        
22
// along with eCos; if not, write to the Free Software Foundation, Inc.,    
23
// 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.            
24
//
25
// As a special exception, if other files instantiate templates or use      
26
// macros or inline functions from this file, or you compile this file      
27
// and link it with other works to produce a work based on this file,       
28
// this file does not by itself cause the resulting work to be covered by   
29
// the GNU General Public License. However the source code for this file    
30
// must still be made available in accordance with section (3) of the GNU   
31
// General Public License v2.                                               
32
//
33
// This exception does not invalidate any other reasons why a work based    
34
// on this file might be covered by the GNU General Public License.         
35
// -------------------------------------------                              
36
// ####ECOSGPLCOPYRIGHTEND####                                              
37
// ####BSDALTCOPYRIGHTBEGIN####                                             
38
// -------------------------------------------                              
39
// Portions of this software may have been derived from FreeBSD, OpenBSD,   
40
// or other sources, and if so are covered by the appropriate copyright     
41
// and license included herein.                                             
42
// -------------------------------------------                              
43
// ####BSDALTCOPYRIGHTEND####                                               
44
//==========================================================================
45
 
46
/*
47
 * This file is derived from various .h and .c files from the zlib-1.0.4
48
 * distribution by Jean-loup Gailly and Mark Adler, with some additions
49
 * by Paul Mackerras to aid in implementing Deflate compression and
50
 * decompression for PPP packets.  See zlib.h for conditions of
51
 * distribution and use.
52
 *
53
 * Changes that have been made include:
54
 * - added Z_PACKET_FLUSH (see zlib.h for details)
55
 * - added inflateIncomp and deflateOutputPending
56
 * - allow strm->next_out to be NULL, meaning discard the output
57
 *
58
 * $FreeBSD: src/sys/net/zlib.c,v 1.10.6.2 2002/03/24 23:23:58 jedgar Exp $
59
 */
60
 
61
/*
62
 *  ==FILEVERSION 971210==
63
 *
64
 * This marker is used by the Linux installation script to determine
65
 * whether an up-to-date version of this file is already installed.
66
 */
67
 
68
#define _KERNEL
69
 
70
#define NO_DUMMY_DECL
71
#define NO_ZCFUNCS
72
#define MY_ZCALLOC
73
 
74
#if 1 //defined(__FreeBSD__) && defined(_KERNEL)
75
#define inflate inflate_ppp     /* FreeBSD already has an inflate :-( */
76
#endif
77
 
78
 
79
/* +++ zutil.h */
80
/* zutil.h -- internal interface and configuration of the compression library
81
 * Copyright (C) 1995-1996 Jean-loup Gailly.
82
 * For conditions of distribution and use, see copyright notice in zlib.h
83
 */
84
 
85
/* WARNING: this file should *not* be used by applications. It is
86
   part of the implementation of the compression library and is
87
   subject to change. Applications should only use zlib.h.
88
 */
89
 
90
/* From: zutil.h,v 1.16 1996/07/24 13:41:13 me Exp $ */
91
 
92
#ifndef _Z_UTIL_H
93
#define _Z_UTIL_H
94
 
95
#ifdef _KERNEL
96
#include <cyg/ppp/net/zlib.h>
97
#else
98
#include "zlib.h"
99
#endif
100
 
101
#ifdef _KERNEL
102
/* Assume this is a *BSD or SVR4 kernel */
103
#include <sys/param.h>
104
#include <sys/types.h>
105
#include <sys/time.h>
106
//#include <sys/systm.h>
107
#ifndef __ECOS
108
#  define HAVE_MEMCPY
109
#  define memcpy(d, s, n)       bcopy((s), (d), (n))
110
#  define memset(d, v, n)       bzero((d), (n))
111
#  define memcmp                bcmp
112
#else
113
#  include <string.h>
114
//#  include <stdlib.h>
115
#endif
116
 
117
#else
118
#if defined(__KERNEL__)
119
/* Assume this is a Linux kernel */
120
#include <linux/string.h>
121
#define HAVE_MEMCPY
122
 
123
#else /* not kernel */
124
 
125
#if defined(MSDOS)||defined(VMS)||defined(CRAY)||defined(WIN32)||defined(RISCOS)
126
#   include <stddef.h>
127
#   include <errno.h>
128
#else
129
    extern int errno;
130
#endif
131
#ifdef STDC
132
#  include <string.h>
133
#  include <stdlib.h>
134
#endif
135
#endif /* __KERNEL__ */
136
#endif /* _KERNEL */
137
 
138
#ifndef local
139
#  define local static
140
#endif
141
/* compile with -Dlocal if your debugger can't find static symbols */
142
 
143
typedef unsigned char  uch;
144
typedef uch FAR uchf;
145
typedef unsigned short ush;
146
typedef ush FAR ushf;
147
typedef unsigned long  ulg;
148
 
149
extern const char *z_errmsg[10]; /* indexed by 2-zlib_error */
150
/* (size given to avoid silly warnings with Visual C++) */
151
 
152
#define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
153
 
154
#define ERR_RETURN(strm,err) \
155
  return (strm->msg = (const char*)ERR_MSG(err), (err))
156
/* To be used only when the state is known to be valid */
157
 
158
        /* common constants */
159
 
160
#ifndef DEF_WBITS
161
#  define DEF_WBITS MAX_WBITS
162
#endif
163
/* default windowBits for decompression. MAX_WBITS is for compression only */
164
 
165
#if MAX_MEM_LEVEL >= 8
166
#  define DEF_MEM_LEVEL 8
167
#else
168
#  define DEF_MEM_LEVEL  MAX_MEM_LEVEL
169
#endif
170
/* default memLevel */
171
 
172
#define STORED_BLOCK 0
173
#define STATIC_TREES 1
174
#define DYN_TREES    2
175
/* The three kinds of block type */
176
 
177
#define MIN_MATCH  3
178
#define MAX_MATCH  258
179
/* The minimum and maximum match lengths */
180
 
181
#define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */
182
 
183
        /* target dependencies */
184
 
185
#ifdef MSDOS
186
#  define OS_CODE  0x00
187
#  ifdef __TURBOC__
188
#    include <alloc.h>
189
#  else /* MSC or DJGPP */
190
#    include <malloc.h>
191
#  endif
192
#endif
193
 
194
#ifdef OS2
195
#  define OS_CODE  0x06
196
#endif
197
 
198
#ifdef WIN32 /* Window 95 & Windows NT */
199
#  define OS_CODE  0x0b
200
#endif
201
 
202
#if defined(VAXC) || defined(VMS)
203
#  define OS_CODE  0x02
204
#  define FOPEN(name, mode) \
205
     fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512")
206
#endif
207
 
208
#ifdef AMIGA
209
#  define OS_CODE  0x01
210
#endif
211
 
212
#if defined(ATARI) || defined(atarist)
213
#  define OS_CODE  0x05
214
#endif
215
 
216
#ifdef MACOS
217
#  define OS_CODE  0x07
218
#endif
219
 
220
#ifdef __50SERIES /* Prime/PRIMOS */
221
#  define OS_CODE  0x0F
222
#endif
223
 
224
#ifdef TOPS20
225
#  define OS_CODE  0x0a
226
#endif
227
 
228
#if defined(_BEOS_) || defined(RISCOS)
229
#  define fdopen(fd,mode) NULL /* No fdopen() */
230
#endif
231
 
232
        /* Common defaults */
233
 
234
#ifndef OS_CODE
235
#  define OS_CODE  0x03  /* assume Unix */
236
#endif
237
 
238
#ifndef FOPEN
239
#  define FOPEN(name, mode) fopen((name), (mode))
240
#endif
241
 
242
         /* functions */
243
 
244
#ifdef HAVE_STRERROR
245
   extern char *strerror OF((int));
246
#  define zstrerror(errnum) strerror(errnum)
247
#else
248
#  define zstrerror(errnum) ""
249
#endif
250
 
251
#if defined(pyr)
252
#  define NO_MEMCPY
253
#endif
254
#if (defined(M_I86SM) || defined(M_I86MM)) && !defined(_MSC_VER)
255
 /* Use our own functions for small and medium model with MSC <= 5.0.
256
  * You may have to use the same strategy for Borland C (untested).
257
  */
258
#  define NO_MEMCPY
259
#endif
260
#if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
261
#  define HAVE_MEMCPY
262
#endif
263
#ifdef HAVE_MEMCPY
264
#  ifdef SMALL_MEDIUM /* MSDOS small or medium model */
265
#    define zmemcpy _fmemcpy
266
#    define zmemcmp _fmemcmp
267
#    define zmemzero(dest, len) _fmemset(dest, 0, len)
268
#  else
269
#    define zmemcpy memcpy
270
#    define zmemcmp memcmp
271
#    define zmemzero(dest, len) memset(dest, 0, len)
272
#  endif
273
#else
274
   extern void zmemcpy  OF((Bytef* dest, Bytef* source, uInt len));
275
   extern int  zmemcmp  OF((Bytef* s1,   Bytef* s2, uInt len));
276
   extern void zmemzero OF((Bytef* dest, uInt len));
277
#endif
278
 
279
/* Diagnostic functions */
280
#ifdef DEBUG_ZLIB
281
#  include <stdio.h>
282
#  ifndef verbose
283
#    define verbose 0
284
#  endif
285
   extern void z_error    OF((char *m));
286
#  define Assert(cond,msg) {if(!(cond)) z_error(msg);}
287
#  define Trace(x) fprintf x
288
#  define Tracev(x) {if (verbose) fprintf x ;}
289
#  define Tracevv(x) {if (verbose>1) fprintf x ;}
290
#  define Tracec(c,x) {if (verbose && (c)) fprintf x ;}
291
#  define Tracecv(c,x) {if (verbose>1 && (c)) fprintf x ;}
292
#else
293
#  define Assert(cond,msg)
294
#  define Trace(x)
295
#  define Tracev(x)
296
#  define Tracevv(x)
297
#  define Tracec(c,x)
298
#  define Tracecv(c,x)
299
#endif
300
 
301
 
302
typedef uLong (*check_func) OF((uLong check, const Bytef *buf, uInt len));
303
 
304
voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size));
305
void   zcfree  OF((voidpf opaque, voidpf ptr));
306
 
307
#define ZALLOC(strm, items, size) \
308
           (*((strm)->zalloc))((strm)->opaque, (items), (size))
309
#define ZFREE(strm, addr)  (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))
310
#define TRY_FREE(s, p) {if (p) ZFREE(s, p);}
311
 
312
#endif /* _Z_UTIL_H */
313
/* --- zutil.h */
314
 
315
/* +++ deflate.h */
316
/* deflate.h -- internal compression state
317
 * Copyright (C) 1995-1996 Jean-loup Gailly
318
 * For conditions of distribution and use, see copyright notice in zlib.h
319
 */
320
 
321
/* WARNING: this file should *not* be used by applications. It is
322
   part of the implementation of the compression library and is
323
   subject to change. Applications should only use zlib.h.
324
 */
325
 
326
/* From: deflate.h,v 1.10 1996/07/02 12:41:00 me Exp $ */
327
 
328
#ifndef _DEFLATE_H
329
#define _DEFLATE_H
330
 
331
/* #include "zutil.h" */
332
 
333
/* ===========================================================================
334
 * Internal compression state.
335
 */
336
 
337
#define LENGTH_CODES 29
338
/* number of length codes, not counting the special END_BLOCK code */
339
 
340
#define LITERALS  256
341
/* number of literal bytes 0..255 */
342
 
343
#define L_CODES (LITERALS+1+LENGTH_CODES)
344
/* number of Literal or Length codes, including the END_BLOCK code */
345
 
346
#define D_CODES   30
347
/* number of distance codes */
348
 
349
#define BL_CODES  19
350
/* number of codes used to transfer the bit lengths */
351
 
352
#define HEAP_SIZE (2*L_CODES+1)
353
/* maximum heap size */
354
 
355
#define MAX_BITS 15
356
/* All codes must not exceed MAX_BITS bits */
357
 
358
#define INIT_STATE    42
359
#define BUSY_STATE   113
360
#define FINISH_STATE 666
361
/* Stream status */
362
 
363
 
364
/* Data structure describing a single value and its code string. */
365
typedef struct ct_data_s {
366
    union {
367
        ush  freq;       /* frequency count */
368
        ush  code;       /* bit string */
369
    } fc;
370
    union {
371
        ush  dad;        /* father node in Huffman tree */
372
        ush  len;        /* length of bit string */
373
    } dl;
374
} FAR ct_data;
375
 
376
#define Freq fc.freq
377
#define Code fc.code
378
#define Dad  dl.dad
379
#define Len  dl.len
380
 
381
typedef struct static_tree_desc_s  static_tree_desc;
382
 
383
typedef struct tree_desc_s {
384
    ct_data *dyn_tree;           /* the dynamic tree */
385
    int     max_code;            /* largest code with non zero frequency */
386
    static_tree_desc *stat_desc; /* the corresponding static tree */
387
} FAR tree_desc;
388
 
389
typedef ush Pos;
390
typedef Pos FAR Posf;
391
typedef unsigned IPos;
392
 
393
/* A Pos is an index in the character window. We use short instead of int to
394
 * save space in the various tables. IPos is used only for parameter passing.
395
 */
396
 
397
typedef struct deflate_state {
398
    z_streamp strm;      /* pointer back to this zlib stream */
399
    int   status;        /* as the name implies */
400
    Bytef *pending_buf;  /* output still pending */
401
    ulg   pending_buf_size; /* size of pending_buf */
402
    Bytef *pending_out;  /* next pending byte to output to the stream */
403
    int   pending;       /* nb of bytes in the pending buffer */
404
    int   noheader;      /* suppress zlib header and adler32 */
405
    Byte  data_type;     /* UNKNOWN, BINARY or ASCII */
406
    Byte  method;        /* STORED (for zip only) or DEFLATED */
407
    int   last_flush;    /* value of flush param for previous deflate call */
408
 
409
                /* used by deflate.c: */
410
 
411
    uInt  w_size;        /* LZ77 window size (32K by default) */
412
    uInt  w_bits;        /* log2(w_size)  (8..16) */
413
    uInt  w_mask;        /* w_size - 1 */
414
 
415
    Bytef *window;
416
    /* Sliding window. Input bytes are read into the second half of the window,
417
     * and move to the first half later to keep a dictionary of at least wSize
418
     * bytes. With this organization, matches are limited to a distance of
419
     * wSize-MAX_MATCH bytes, but this ensures that IO is always
420
     * performed with a length multiple of the block size. Also, it limits
421
     * the window size to 64K, which is quite useful on MSDOS.
422
     * To do: use the user input buffer as sliding window.
423
     */
424
 
425
    ulg window_size;
426
    /* Actual size of window: 2*wSize, except when the user input buffer
427
     * is directly used as sliding window.
428
     */
429
 
430
    Posf *prev;
431
    /* Link to older string with same hash index. To limit the size of this
432
     * array to 64K, this link is maintained only for the last 32K strings.
433
     * An index in this array is thus a window index modulo 32K.
434
     */
435
 
436
    Posf *head; /* Heads of the hash chains or NIL. */
437
 
438
    uInt  ins_h;          /* hash index of string to be inserted */
439
    uInt  hash_size;      /* number of elements in hash table */
440
    uInt  hash_bits;      /* log2(hash_size) */
441
    uInt  hash_mask;      /* hash_size-1 */
442
 
443
    uInt  hash_shift;
444
    /* Number of bits by which ins_h must be shifted at each input
445
     * step. It must be such that after MIN_MATCH steps, the oldest
446
     * byte no longer takes part in the hash key, that is:
447
     *   hash_shift * MIN_MATCH >= hash_bits
448
     */
449
 
450
    long block_start;
451
    /* Window position at the beginning of the current output block. Gets
452
     * negative when the window is moved backwards.
453
     */
454
 
455
    uInt match_length;           /* length of best match */
456
    IPos prev_match;             /* previous match */
457
    int match_available;         /* set if previous match exists */
458
    uInt strstart;               /* start of string to insert */
459
    uInt match_start;            /* start of matching string */
460
    uInt lookahead;              /* number of valid bytes ahead in window */
461
 
462
    uInt prev_length;
463
    /* Length of the best match at previous step. Matches not greater than this
464
     * are discarded. This is used in the lazy match evaluation.
465
     */
466
 
467
    uInt max_chain_length;
468
    /* To speed up deflation, hash chains are never searched beyond this
469
     * length.  A higher limit improves compression ratio but degrades the
470
     * speed.
471
     */
472
 
473
    uInt max_lazy_match;
474
    /* Attempt to find a better match only when the current match is strictly
475
     * smaller than this value. This mechanism is used only for compression
476
     * levels >= 4.
477
     */
478
#   define max_insert_length  max_lazy_match
479
    /* Insert new strings in the hash table only if the match length is not
480
     * greater than this length. This saves time but degrades compression.
481
     * max_insert_length is used only for compression levels <= 3.
482
     */
483
 
484
    int level;    /* compression level (1..9) */
485
    int strategy; /* favor or force Huffman coding*/
486
 
487
    uInt good_match;
488
    /* Use a faster search when the previous match is longer than this */
489
 
490
    int nice_match; /* Stop searching when current match exceeds this */
491
 
492
                /* used by trees.c: */
493
    /* Didn't use ct_data typedef below to supress compiler warning */
494
    struct ct_data_s dyn_ltree[HEAP_SIZE];   /* literal and length tree */
495
    struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
496
    struct ct_data_s bl_tree[2*BL_CODES+1];  /* Huffman tree for bit lengths */
497
 
498
    struct tree_desc_s l_desc;               /* desc. for literal tree */
499
    struct tree_desc_s d_desc;               /* desc. for distance tree */
500
    struct tree_desc_s bl_desc;              /* desc. for bit length tree */
501
 
502
    ush bl_count[MAX_BITS+1];
503
    /* number of codes at each bit length for an optimal tree */
504
 
505
    int heap[2*L_CODES+1];      /* heap used to build the Huffman trees */
506
    int heap_len;               /* number of elements in the heap */
507
    int heap_max;               /* element of largest frequency */
508
    /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
509
     * The same heap array is used to build all trees.
510
     */
511
 
512
    uch depth[2*L_CODES+1];
513
    /* Depth of each subtree used as tie breaker for trees of equal frequency
514
     */
515
 
516
    uchf *l_buf;          /* buffer for literals or lengths */
517
 
518
    uInt  lit_bufsize;
519
    /* Size of match buffer for literals/lengths.  There are 4 reasons for
520
     * limiting lit_bufsize to 64K:
521
     *   - frequencies can be kept in 16 bit counters
522
     *   - if compression is not successful for the first block, all input
523
     *     data is still in the window so we can still emit a stored block even
524
     *     when input comes from standard input.  (This can also be done for
525
     *     all blocks if lit_bufsize is not greater than 32K.)
526
     *   - if compression is not successful for a file smaller than 64K, we can
527
     *     even emit a stored file instead of a stored block (saving 5 bytes).
528
     *     This is applicable only for zip (not gzip or zlib).
529
     *   - creating new Huffman trees less frequently may not provide fast
530
     *     adaptation to changes in the input data statistics. (Take for
531
     *     example a binary file with poorly compressible code followed by
532
     *     a highly compressible string table.) Smaller buffer sizes give
533
     *     fast adaptation but have of course the overhead of transmitting
534
     *     trees more frequently.
535
     *   - I can't count above 4
536
     */
537
 
538
    uInt last_lit;      /* running index in l_buf */
539
 
540
    ushf *d_buf;
541
    /* Buffer for distances. To simplify the code, d_buf and l_buf have
542
     * the same number of elements. To use different lengths, an extra flag
543
     * array would be necessary.
544
     */
545
 
546
    ulg opt_len;        /* bit length of current block with optimal trees */
547
    ulg static_len;     /* bit length of current block with static trees */
548
    ulg compressed_len; /* total bit length of compressed file */
549
    uInt matches;       /* number of string matches in current block */
550
    int last_eob_len;   /* bit length of EOB code for last block */
551
 
552
#ifdef DEBUG_ZLIB
553
    ulg bits_sent;      /* bit length of the compressed data */
554
#endif
555
 
556
    ush bi_buf;
557
    /* Output buffer. bits are inserted starting at the bottom (least
558
     * significant bits).
559
     */
560
    int bi_valid;
561
    /* Number of valid bits in bi_buf.  All bits above the last valid bit
562
     * are always zero.
563
     */
564
 
565
} FAR deflate_state;
566
 
567
/* Output a byte on the stream.
568
 * IN assertion: there is enough room in pending_buf.
569
 */
570
#define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
571
 
572
 
573
#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
574
/* Minimum amount of lookahead, except at the end of the input file.
575
 * See deflate.c for comments about the MIN_MATCH+1.
576
 */
577
 
578
#define MAX_DIST(s)  ((s)->w_size-MIN_LOOKAHEAD)
579
/* In order to simplify the code, particularly on 16 bit machines, match
580
 * distances are limited to MAX_DIST instead of WSIZE.
581
 */
582
 
583
        /* in trees.c */
584
void _tr_init         OF((deflate_state *s));
585
int  _tr_tally        OF((deflate_state *s, unsigned dist, unsigned lc));
586
ulg  _tr_flush_block  OF((deflate_state *s, charf *buf, ulg stored_len,
587
                          int eof));
588
void _tr_align        OF((deflate_state *s));
589
void _tr_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,
590
                          int eof));
591
void _tr_stored_type_only OF((deflate_state *));
592
 
593
#endif
594
/* --- deflate.h */
595
 
596
/* +++ deflate.c */
597
/* deflate.c -- compress data using the deflation algorithm
598
 * Copyright (C) 1995-1996 Jean-loup Gailly.
599
 * For conditions of distribution and use, see copyright notice in zlib.h
600
 */
601
 
602
/*
603
 *  ALGORITHM
604
 *
605
 *      The "deflation" process depends on being able to identify portions
606
 *      of the input text which are identical to earlier input (within a
607
 *      sliding window trailing behind the input currently being processed).
608
 *
609
 *      The most straightforward technique turns out to be the fastest for
610
 *      most input files: try all possible matches and select the longest.
611
 *      The key feature of this algorithm is that insertions into the string
612
 *      dictionary are very simple and thus fast, and deletions are avoided
613
 *      completely. Insertions are performed at each input character, whereas
614
 *      string matches are performed only when the previous match ends. So it
615
 *      is preferable to spend more time in matches to allow very fast string
616
 *      insertions and avoid deletions. The matching algorithm for small
617
 *      strings is inspired from that of Rabin & Karp. A brute force approach
618
 *      is used to find longer strings when a small match has been found.
619
 *      A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
620
 *      (by Leonid Broukhis).
621
 *         A previous version of this file used a more sophisticated algorithm
622
 *      (by Fiala and Greene) which is guaranteed to run in linear amortized
623
 *      time, but has a larger average cost, uses more memory and is patented.
624
 *      However the F&G algorithm may be faster for some highly redundant
625
 *      files if the parameter max_chain_length (described below) is too large.
626
 *
627
 *  ACKNOWLEDGEMENTS
628
 *
629
 *      The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
630
 *      I found it in 'freeze' written by Leonid Broukhis.
631
 *      Thanks to many people for bug reports and testing.
632
 *
633
 *  REFERENCES
634
 *
635
 *      Deutsch, L.P.,"DEFLATE Compressed Data Format Specification".
636
 *      Available in ftp://ds.internic.net/rfc/rfc1951.txt
637
 *
638
 *      A description of the Rabin and Karp algorithm is given in the book
639
 *         "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
640
 *
641
 *      Fiala,E.R., and Greene,D.H.
642
 *         Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
643
 *
644
 */
645
 
646
/* From: deflate.c,v 1.15 1996/07/24 13:40:58 me Exp $ */
647
 
648
/* #include "deflate.h" */
649
 
650
char deflate_copyright[] = " deflate 1.0.4 Copyright 1995-1996 Jean-loup Gailly ";
651
/*
652
  If you use the zlib library in a product, an acknowledgment is welcome
653
  in the documentation of your product. If for some reason you cannot
654
  include such an acknowledgment, I would appreciate that you keep this
655
  copyright string in the executable of your product.
656
 */
657
 
658
/* ===========================================================================
659
 *  Function prototypes.
660
 */
661
typedef enum {
662
    need_more,      /* block not completed, need more input or more output */
663
    block_done,     /* block flush performed */
664
    finish_started, /* finish started, need only more output at next deflate */
665
    finish_done     /* finish done, accept no more input or output */
666
} block_state;
667
 
668
typedef block_state (*compress_func) OF((deflate_state *s, int flush));
669
/* Compression function. Returns the block state after the call. */
670
 
671
local void fill_window    OF((deflate_state *s));
672
local block_state deflate_stored OF((deflate_state *s, int flush));
673
local block_state deflate_fast   OF((deflate_state *s, int flush));
674
local block_state deflate_slow   OF((deflate_state *s, int flush));
675
local void lm_init        OF((deflate_state *s));
676
local void putShortMSB    OF((deflate_state *s, uInt b));
677
local void flush_pending  OF((z_streamp strm));
678
local int read_buf        OF((z_streamp strm, charf *buf, unsigned size));
679
#ifdef ASMV
680
      void match_init OF((void)); /* asm code initialization */
681
      uInt longest_match  OF((deflate_state *s, IPos cur_match));
682
#else
683
local uInt longest_match  OF((deflate_state *s, IPos cur_match));
684
#endif
685
 
686
#ifdef DEBUG_ZLIB
687
local  void check_match OF((deflate_state *s, IPos start, IPos match,
688
                            int length));
689
#endif
690
 
691
/* ===========================================================================
692
 * Local data
693
 */
694
 
695
#define NIL 0
696
/* Tail of hash chains */
697
 
698
#ifndef TOO_FAR
699
#  define TOO_FAR 4096
700
#endif
701
/* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
702
 
703
#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
704
/* Minimum amount of lookahead, except at the end of the input file.
705
 * See deflate.c for comments about the MIN_MATCH+1.
706
 */
707
 
708
/* Values for max_lazy_match, good_match and max_chain_length, depending on
709
 * the desired pack level (0..9). The values given below have been tuned to
710
 * exclude worst case performance for pathological files. Better values may be
711
 * found for specific files.
712
 */
713
typedef struct config_s {
714
   ush good_length; /* reduce lazy search above this match length */
715
   ush max_lazy;    /* do not perform lazy search above this match length */
716
   ush nice_length; /* quit search above this match length */
717
   ush max_chain;
718
   compress_func func;
719
} config;
720
 
721
local config configuration_table[10] = {
722
/*      good lazy nice chain */
723
/* 0 */ {0,    0,  0,    0, deflate_stored},  /* store only */
724
/* 1 */ {4,    4,  8,    4, deflate_fast}, /* maximum speed, no lazy matches */
725
/* 2 */ {4,    5, 16,    8, deflate_fast},
726
/* 3 */ {4,    6, 32,   32, deflate_fast},
727
 
728
/* 4 */ {4,    4, 16,   16, deflate_slow},  /* lazy matches */
729
/* 5 */ {8,   16, 32,   32, deflate_slow},
730
/* 6 */ {8,   16, 128, 128, deflate_slow},
731
/* 7 */ {8,   32, 128, 256, deflate_slow},
732
/* 8 */ {32, 128, 258, 1024, deflate_slow},
733
/* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */
734
 
735
/* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
736
 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
737
 * meaning.
738
 */
739
 
740
#define EQUAL 0
741
/* result of memcmp for equal strings */
742
 
743
#ifndef NO_DUMMY_DECL
744
struct static_tree_desc_s {int dummy;}; /* for buggy compilers */
745
#endif
746
 
747
/* ===========================================================================
748
 * Update a hash value with the given input byte
749
 * IN  assertion: all calls to to UPDATE_HASH are made with consecutive
750
 *    input characters, so that a running hash key can be computed from the
751
 *    previous key instead of complete recalculation each time.
752
 */
753
#define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
754
 
755
 
756
/* ===========================================================================
757
 * Insert string str in the dictionary and set match_head to the previous head
758
 * of the hash chain (the most recent string with same hash key). Return
759
 * the previous length of the hash chain.
760
 * IN  assertion: all calls to to INSERT_STRING are made with consecutive
761
 *    input characters and the first MIN_MATCH bytes of str are valid
762
 *    (except for the last MIN_MATCH-1 bytes of the input file).
763
 */
764
#define INSERT_STRING(s, str, match_head) \
765
   (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
766
    s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
767
    s->head[s->ins_h] = (Pos)(str))
768
 
769
/* ===========================================================================
770
 * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
771
 * prev[] will be initialized on the fly.
772
 */
773
#define CLEAR_HASH(s) \
774
    s->head[s->hash_size-1] = NIL; \
775
    zmemzero((charf *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
776
 
777
/* ========================================================================= */
778
int deflateInit_(strm, level, version, stream_size)
779
    z_streamp strm;
780
    int level;
781
    const char *version;
782
    int stream_size;
783
{
784
    return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
785
                         Z_DEFAULT_STRATEGY, version, stream_size);
786
    /* To do: ignore strm->next_in if we use it as window */
787
}
788
 
789
/* ========================================================================= */
790
int deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
791
                  version, stream_size)
792
    z_streamp strm;
793
    int  level;
794
    int  method;
795
    int  windowBits;
796
    int  memLevel;
797
    int  strategy;
798
    const char *version;
799
    int stream_size;
800
{
801
    deflate_state *s;
802
    int noheader = 0;
803
    static char* my_version = ZLIB_VERSION;
804
 
805
    ushf *overlay;
806
    /* We overlay pending_buf and d_buf+l_buf. This works since the average
807
     * output size for (length,distance) codes is <= 24 bits.
808
     */
809
 
810
    if (version == Z_NULL || version[0] != my_version[0] ||
811
        stream_size != sizeof(z_stream)) {
812
        return Z_VERSION_ERROR;
813
    }
814
    if (strm == Z_NULL) return Z_STREAM_ERROR;
815
 
816
    strm->msg = Z_NULL;
817
#ifndef NO_ZCFUNCS
818
    if (strm->zalloc == Z_NULL) {
819
        strm->zalloc = zcalloc;
820
        strm->opaque = (voidpf)0;
821
    }
822
    if (strm->zfree == Z_NULL) strm->zfree = zcfree;
823
#endif
824
 
825
    if (level == Z_DEFAULT_COMPRESSION) level = 6;
826
 
827
    if (windowBits < 0) { /* undocumented feature: suppress zlib header */
828
        noheader = 1;
829
        windowBits = -windowBits;
830
    }
831
    if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED ||
832
        windowBits < 9 || windowBits > 15 || level < 0 || level > 9 ||
833
        strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
834
        return Z_STREAM_ERROR;
835
    }
836
    s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
837
    if (s == Z_NULL) return Z_MEM_ERROR;
838
    strm->state = (struct internal_state FAR *)s;
839
    s->strm = strm;
840
 
841
    s->noheader = noheader;
842
    s->w_bits = windowBits;
843
    s->w_size = 1 << s->w_bits;
844
    s->w_mask = s->w_size - 1;
845
 
846
    s->hash_bits = memLevel + 7;
847
    s->hash_size = 1 << s->hash_bits;
848
    s->hash_mask = s->hash_size - 1;
849
    s->hash_shift =  ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
850
 
851
    s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
852
    s->prev   = (Posf *)  ZALLOC(strm, s->w_size, sizeof(Pos));
853
    s->head   = (Posf *)  ZALLOC(strm, s->hash_size, sizeof(Pos));
854
 
855
    s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
856
 
857
    overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);
858
    s->pending_buf = (uchf *) overlay;
859
    s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);
860
 
861
    if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
862
        s->pending_buf == Z_NULL) {
863
        strm->msg = (const char*)ERR_MSG(Z_MEM_ERROR);
864
        deflateEnd (strm);
865
        return Z_MEM_ERROR;
866
    }
867
    s->d_buf = overlay + s->lit_bufsize/sizeof(ush);
868
    s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;
869
 
870
    s->level = level;
871
    s->strategy = strategy;
872
    s->method = (Byte)method;
873
 
874
    return deflateReset(strm);
875
}
876
 
877
/* ========================================================================= */
878
int deflateSetDictionary (strm, dictionary, dictLength)
879
    z_streamp strm;
880
    const Bytef *dictionary;
881
    uInt  dictLength;
882
{
883
    deflate_state *s;
884
    uInt length = dictLength;
885
    uInt n;
886
    IPos hash_head = 0;
887
 
888
    if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL)
889
        return Z_STREAM_ERROR;
890
 
891
    s = (deflate_state *) strm->state;
892
    if (s->status != INIT_STATE) return Z_STREAM_ERROR;
893
 
894
    strm->adler = adler32(strm->adler, dictionary, dictLength);
895
 
896
    if (length < MIN_MATCH) return Z_OK;
897
    if (length > MAX_DIST(s)) {
898
        length = MAX_DIST(s);
899
#ifndef USE_DICT_HEAD
900
        dictionary += dictLength - length; /* use the tail of the dictionary */
901
#endif
902
    }
903
    zmemcpy((charf *)s->window, dictionary, length);
904
    s->strstart = length;
905
    s->block_start = (long)length;
906
 
907
    /* Insert all strings in the hash table (except for the last two bytes).
908
     * s->lookahead stays null, so s->ins_h will be recomputed at the next
909
     * call of fill_window.
910
     */
911
    s->ins_h = s->window[0];
912
    UPDATE_HASH(s, s->ins_h, s->window[1]);
913
    for (n = 0; n <= length - MIN_MATCH; n++) {
914
        INSERT_STRING(s, n, hash_head);
915
    }
916
    if (hash_head) hash_head = 0;  /* to make compiler happy */
917
    return Z_OK;
918
}
919
 
920
/* ========================================================================= */
921
int deflateReset (strm)
922
    z_streamp strm;
923
{
924
    deflate_state *s;
925
 
926
    if (strm == Z_NULL || strm->state == Z_NULL ||
927
        strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;
928
 
929
    strm->total_in = strm->total_out = 0;
930
    strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
931
    strm->data_type = Z_UNKNOWN;
932
 
933
    s = (deflate_state *)strm->state;
934
    s->pending = 0;
935
    s->pending_out = s->pending_buf;
936
 
937
    if (s->noheader < 0) {
938
        s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
939
    }
940
    s->status = s->noheader ? BUSY_STATE : INIT_STATE;
941
    strm->adler = 1;
942
    s->last_flush = Z_NO_FLUSH;
943
 
944
    _tr_init(s);
945
    lm_init(s);
946
 
947
    return Z_OK;
948
}
949
 
950
/* ========================================================================= */
951
int deflateParams(strm, level, strategy)
952
    z_streamp strm;
953
    int level;
954
    int strategy;
955
{
956
    deflate_state *s;
957
    compress_func func;
958
    int err = Z_OK;
959
 
960
    if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
961
    s = (deflate_state *) strm->state;
962
 
963
    if (level == Z_DEFAULT_COMPRESSION) {
964
        level = 6;
965
    }
966
    if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
967
        return Z_STREAM_ERROR;
968
    }
969
    func = configuration_table[s->level].func;
970
 
971
    if (func != configuration_table[level].func && strm->total_in != 0) {
972
        /* Flush the last buffer: */
973
        err = deflate(strm, Z_PARTIAL_FLUSH);
974
    }
975
    if (s->level != level) {
976
        s->level = level;
977
        s->max_lazy_match   = configuration_table[level].max_lazy;
978
        s->good_match       = configuration_table[level].good_length;
979
        s->nice_match       = configuration_table[level].nice_length;
980
        s->max_chain_length = configuration_table[level].max_chain;
981
    }
982
    s->strategy = strategy;
983
    return err;
984
}
985
 
986
/* =========================================================================
987
 * Put a short in the pending buffer. The 16-bit value is put in MSB order.
988
 * IN assertion: the stream state is correct and there is enough room in
989
 * pending_buf.
990
 */
991
local void putShortMSB (s, b)
992
    deflate_state *s;
993
    uInt b;
994
{
995
    put_byte(s, (Byte)(b >> 8));
996
    put_byte(s, (Byte)(b & 0xff));
997
}
998
 
999
/* =========================================================================
1000
 * Flush as much pending output as possible. All deflate() output goes
1001
 * through this function so some applications may wish to modify it
1002
 * to avoid allocating a large strm->next_out buffer and copying into it.
1003
 * (See also read_buf()).
1004
 */
1005
local void flush_pending(strm)
1006
    z_streamp strm;
1007
{
1008
    deflate_state *s = (deflate_state *) strm->state;
1009
    unsigned len = s->pending;
1010
 
1011
    if (len > strm->avail_out) len = strm->avail_out;
1012
    if (len == 0) return;
1013
 
1014
    if (strm->next_out != Z_NULL) {
1015
        zmemcpy(strm->next_out, s->pending_out, len);
1016
        strm->next_out += len;
1017
    }
1018
    s->pending_out += len;
1019
    strm->total_out += len;
1020
    strm->avail_out  -= len;
1021
    s->pending -= len;
1022
    if (s->pending == 0) {
1023
        s->pending_out = s->pending_buf;
1024
    }
1025
}
1026
 
1027
/* ========================================================================= */
1028
int deflate (strm, flush)
1029
    z_streamp strm;
1030
    int flush;
1031
{
1032
    int old_flush; /* value of flush param for previous deflate call */
1033
    deflate_state *s;
1034
 
1035
    if (strm == Z_NULL || strm->state == Z_NULL ||
1036
        flush > Z_FINISH || flush < 0) {
1037
        return Z_STREAM_ERROR;
1038
    }
1039
    s = (deflate_state *) strm->state;
1040
 
1041
    if ((strm->next_in == Z_NULL && strm->avail_in != 0) ||
1042
        (s->status == FINISH_STATE && flush != Z_FINISH)) {
1043
        ERR_RETURN(strm, Z_STREAM_ERROR);
1044
    }
1045
    if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
1046
 
1047
    s->strm = strm; /* just in case */
1048
    old_flush = s->last_flush;
1049
    s->last_flush = flush;
1050
 
1051
    /* Write the zlib header */
1052
    if (s->status == INIT_STATE) {
1053
 
1054
        uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
1055
        uInt level_flags = (s->level-1) >> 1;
1056
 
1057
        if (level_flags > 3) level_flags = 3;
1058
        header |= (level_flags << 6);
1059
        if (s->strstart != 0) header |= PRESET_DICT;
1060
        header += 31 - (header % 31);
1061
 
1062
        s->status = BUSY_STATE;
1063
        putShortMSB(s, header);
1064
 
1065
        /* Save the adler32 of the preset dictionary: */
1066
        if (s->strstart != 0) {
1067
            putShortMSB(s, (uInt)(strm->adler >> 16));
1068
            putShortMSB(s, (uInt)(strm->adler & 0xffff));
1069
        }
1070
        strm->adler = 1L;
1071
    }
1072
 
1073
    /* Flush as much pending output as possible */
1074
    if (s->pending != 0) {
1075
        flush_pending(strm);
1076
        if (strm->avail_out == 0) {
1077
            /* Since avail_out is 0, deflate will be called again with
1078
             * more output space, but possibly with both pending and
1079
             * avail_in equal to zero. There won't be anything to do,
1080
             * but this is not an error situation so make sure we
1081
             * return OK instead of BUF_ERROR at next call of deflate:
1082
             */
1083
            s->last_flush = -1;
1084
            return Z_OK;
1085
        }
1086
 
1087
    /* Make sure there is something to do and avoid duplicate consecutive
1088
     * flushes. For repeated and useless calls with Z_FINISH, we keep
1089
     * returning Z_STREAM_END instead of Z_BUFF_ERROR.
1090
     */
1091
    } else if (strm->avail_in == 0 && flush <= old_flush &&
1092
               flush != Z_FINISH) {
1093
        ERR_RETURN(strm, Z_BUF_ERROR);
1094
    }
1095
 
1096
    /* User must not provide more input after the first FINISH: */
1097
    if (s->status == FINISH_STATE && strm->avail_in != 0) {
1098
        ERR_RETURN(strm, Z_BUF_ERROR);
1099
    }
1100
 
1101
    /* Start a new block or continue the current one.
1102
     */
1103
    if (strm->avail_in != 0 || s->lookahead != 0 ||
1104
        (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) {
1105
        block_state bstate;
1106
 
1107
        bstate = (*(configuration_table[s->level].func))(s, flush);
1108
 
1109
        if (bstate == finish_started || bstate == finish_done) {
1110
            s->status = FINISH_STATE;
1111
        }
1112
        if (bstate == need_more || bstate == finish_started) {
1113
            if (strm->avail_out == 0) {
1114
                s->last_flush = -1; /* avoid BUF_ERROR next call, see above */
1115
            }
1116
            return Z_OK;
1117
            /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
1118
             * of deflate should use the same flush parameter to make sure
1119
             * that the flush is complete. So we don't have to output an
1120
             * empty block here, this will be done at next call. This also
1121
             * ensures that for a very small output buffer, we emit at most
1122
             * one empty block.
1123
             */
1124
        }
1125
        if (bstate == block_done) {
1126
            if (flush == Z_PARTIAL_FLUSH) {
1127
                _tr_align(s);
1128
            } else if (flush == Z_PACKET_FLUSH) {
1129
                /* Output just the 3-bit `stored' block type value,
1130
                   but not a zero length. */
1131
                _tr_stored_type_only(s);
1132
            } else { /* FULL_FLUSH or SYNC_FLUSH */
1133
                _tr_stored_block(s, (char*)0, 0L, 0);
1134
                /* For a full flush, this empty block will be recognized
1135
                 * as a special marker by inflate_sync().
1136
                 */
1137
                if (flush == Z_FULL_FLUSH) {
1138
                    CLEAR_HASH(s);             /* forget history */
1139
                }
1140
            }
1141
            flush_pending(strm);
1142
            if (strm->avail_out == 0) {
1143
              s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */
1144
              return Z_OK;
1145
            }
1146
        }
1147
    }
1148
    Assert(strm->avail_out > 0, "bug2");
1149
 
1150
    if (flush != Z_FINISH) return Z_OK;
1151
    if (s->noheader) return Z_STREAM_END;
1152
 
1153
    /* Write the zlib trailer (adler32) */
1154
    putShortMSB(s, (uInt)(strm->adler >> 16));
1155
    putShortMSB(s, (uInt)(strm->adler & 0xffff));
1156
    flush_pending(strm);
1157
    /* If avail_out is zero, the application will call deflate again
1158
     * to flush the rest.
1159
     */
1160
    s->noheader = -1; /* write the trailer only once! */
1161
    return s->pending != 0 ? Z_OK : Z_STREAM_END;
1162
}
1163
 
1164
/* ========================================================================= */
1165
int deflateEnd (strm)
1166
    z_streamp strm;
1167
{
1168
    int status;
1169
    deflate_state *s;
1170
 
1171
    if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
1172
    s = (deflate_state *) strm->state;
1173
 
1174
    status = s->status;
1175
    if (status != INIT_STATE && status != BUSY_STATE &&
1176
        status != FINISH_STATE) {
1177
      return Z_STREAM_ERROR;
1178
    }
1179
 
1180
    /* Deallocate in reverse order of allocations: */
1181
    TRY_FREE(strm, s->pending_buf);
1182
    TRY_FREE(strm, s->head);
1183
    TRY_FREE(strm, s->prev);
1184
    TRY_FREE(strm, s->window);
1185
 
1186
    ZFREE(strm, s);
1187
    strm->state = Z_NULL;
1188
 
1189
    return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK;
1190
}
1191
 
1192
/* =========================================================================
1193
 * Copy the source state to the destination state.
1194
 */
1195
int deflateCopy (dest, source)
1196
    z_streamp dest;
1197
    z_streamp source;
1198
{
1199
    deflate_state *ds;
1200
    deflate_state *ss;
1201
    ushf *overlay;
1202
 
1203
    if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL)
1204
        return Z_STREAM_ERROR;
1205
    ss = (deflate_state *) source->state;
1206
 
1207
    zmemcpy(dest, source, sizeof(*dest));
1208
 
1209
    ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state));
1210
    if (ds == Z_NULL) return Z_MEM_ERROR;
1211
    dest->state = (struct internal_state FAR *) ds;
1212
    zmemcpy(ds, ss, sizeof(*ds));
1213
    ds->strm = dest;
1214
 
1215
    ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));
1216
    ds->prev   = (Posf *)  ZALLOC(dest, ds->w_size, sizeof(Pos));
1217
    ds->head   = (Posf *)  ZALLOC(dest, ds->hash_size, sizeof(Pos));
1218
    overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2);
1219
    ds->pending_buf = (uchf *) overlay;
1220
 
1221
    if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||
1222
        ds->pending_buf == Z_NULL) {
1223
        deflateEnd (dest);
1224
        return Z_MEM_ERROR;
1225
    }
1226
    /* ??? following zmemcpy doesn't work for 16-bit MSDOS */
1227
    zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte));
1228
    zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos));
1229
    zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos));
1230
    zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);
1231
 
1232
    ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);
1233
    ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush);
1234
    ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize;
1235
 
1236
    ds->l_desc.dyn_tree = ds->dyn_ltree;
1237
    ds->d_desc.dyn_tree = ds->dyn_dtree;
1238
    ds->bl_desc.dyn_tree = ds->bl_tree;
1239
 
1240
    return Z_OK;
1241
}
1242
 
1243
/* ===========================================================================
1244
 * Return the number of bytes of output which are immediately available
1245
 * for output from the decompressor.
1246
 */
1247
int deflateOutputPending (strm)
1248
    z_streamp strm;
1249
{
1250
    if (strm == Z_NULL || strm->state == Z_NULL) return 0;
1251
 
1252
    return ((deflate_state *)(strm->state))->pending;
1253
}
1254
 
1255
/* ===========================================================================
1256
 * Read a new buffer from the current input stream, update the adler32
1257
 * and total number of bytes read.  All deflate() input goes through
1258
 * this function so some applications may wish to modify it to avoid
1259
 * allocating a large strm->next_in buffer and copying from it.
1260
 * (See also flush_pending()).
1261
 */
1262
local int read_buf(strm, buf, size)
1263
    z_streamp strm;
1264
    charf *buf;
1265
    unsigned size;
1266
{
1267
    unsigned len = strm->avail_in;
1268
 
1269
    if (len > size) len = size;
1270
    if (len == 0) return 0;
1271
 
1272
    strm->avail_in  -= len;
1273
 
1274
    if (!((deflate_state *)(strm->state))->noheader) {
1275
        strm->adler = adler32(strm->adler, strm->next_in, len);
1276
    }
1277
    zmemcpy(buf, strm->next_in, len);
1278
    strm->next_in  += len;
1279
    strm->total_in += len;
1280
 
1281
    return (int)len;
1282
}
1283
 
1284
/* ===========================================================================
1285
 * Initialize the "longest match" routines for a new zlib stream
1286
 */
1287
local void lm_init (s)
1288
    deflate_state *s;
1289
{
1290
    s->window_size = (ulg)2L*s->w_size;
1291
 
1292
    CLEAR_HASH(s);
1293
 
1294
    /* Set the default configuration parameters:
1295
     */
1296
    s->max_lazy_match   = configuration_table[s->level].max_lazy;
1297
    s->good_match       = configuration_table[s->level].good_length;
1298
    s->nice_match       = configuration_table[s->level].nice_length;
1299
    s->max_chain_length = configuration_table[s->level].max_chain;
1300
 
1301
    s->strstart = 0;
1302
    s->block_start = 0L;
1303
    s->lookahead = 0;
1304
    s->match_length = s->prev_length = MIN_MATCH-1;
1305
    s->match_available = 0;
1306
    s->ins_h = 0;
1307
#ifdef ASMV
1308
    match_init(); /* initialize the asm code */
1309
#endif
1310
}
1311
 
1312
/* ===========================================================================
1313
 * Set match_start to the longest match starting at the given string and
1314
 * return its length. Matches shorter or equal to prev_length are discarded,
1315
 * in which case the result is equal to prev_length and match_start is
1316
 * garbage.
1317
 * IN assertions: cur_match is the head of the hash chain for the current
1318
 *   string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
1319
 * OUT assertion: the match length is not greater than s->lookahead.
1320
 */
1321
#ifndef ASMV
1322
/* For 80x86 and 680x0, an optimized version will be provided in match.asm or
1323
 * match.S. The code will be functionally equivalent.
1324
 */
1325
local uInt longest_match(s, cur_match)
1326
    deflate_state *s;
1327
    IPos cur_match;                             /* current match */
1328
{
1329
    unsigned chain_length = s->max_chain_length;/* max hash chain length */
1330
    register Bytef *scan = s->window + s->strstart; /* current string */
1331
    register Bytef *match;                       /* matched string */
1332
    register int len;                           /* length of current match */
1333
    int best_len = s->prev_length;              /* best match length so far */
1334
    int nice_match = s->nice_match;             /* stop if match long enough */
1335
    IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
1336
        s->strstart - (IPos)MAX_DIST(s) : NIL;
1337
    /* Stop when cur_match becomes <= limit. To simplify the code,
1338
     * we prevent matches with the string of window index 0.
1339
     */
1340
    Posf *prev = s->prev;
1341
    uInt wmask = s->w_mask;
1342
 
1343
#ifdef UNALIGNED_OK
1344
    /* Compare two bytes at a time. Note: this is not always beneficial.
1345
     * Try with and without -DUNALIGNED_OK to check.
1346
     */
1347
    register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
1348
    register ush scan_start = *(ushf*)scan;
1349
    register ush scan_end   = *(ushf*)(scan+best_len-1);
1350
#else
1351
    register Bytef *strend = s->window + s->strstart + MAX_MATCH;
1352
    register Byte scan_end1  = scan[best_len-1];
1353
    register Byte scan_end   = scan[best_len];
1354
#endif
1355
 
1356
    /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
1357
     * It is easy to get rid of this optimization if necessary.
1358
     */
1359
    Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
1360
 
1361
    /* Do not waste too much time if we already have a good match: */
1362
    if (s->prev_length >= s->good_match) {
1363
        chain_length >>= 2;
1364
    }
1365
    /* Do not look for matches beyond the end of the input. This is necessary
1366
     * to make deflate deterministic.
1367
     */
1368
    if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead;
1369
 
1370
    Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
1371
 
1372
    do {
1373
        Assert(cur_match < s->strstart, "no future");
1374
        match = s->window + cur_match;
1375
 
1376
        /* Skip to next match if the match length cannot increase
1377
         * or if the match length is less than 2:
1378
         */
1379
#if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
1380
        /* This code assumes sizeof(unsigned short) == 2. Do not use
1381
         * UNALIGNED_OK if your compiler uses a different size.
1382
         */
1383
        if (*(ushf*)(match+best_len-1) != scan_end ||
1384
            *(ushf*)match != scan_start) continue;
1385
 
1386
        /* It is not necessary to compare scan[2] and match[2] since they are
1387
         * always equal when the other bytes match, given that the hash keys
1388
         * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
1389
         * strstart+3, +5, ... up to strstart+257. We check for insufficient
1390
         * lookahead only every 4th comparison; the 128th check will be made
1391
         * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
1392
         * necessary to put more guard bytes at the end of the window, or
1393
         * to check more often for insufficient lookahead.
1394
         */
1395
        Assert(scan[2] == match[2], "scan[2]?");
1396
        scan++, match++;
1397
        do {
1398
        } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1399
                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1400
                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1401
                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1402
                 scan < strend);
1403
        /* The funny "do {}" generates better code on most compilers */
1404
 
1405
        /* Here, scan <= window+strstart+257 */
1406
        Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1407
        if (*scan == *match) scan++;
1408
 
1409
        len = (MAX_MATCH - 1) - (int)(strend-scan);
1410
        scan = strend - (MAX_MATCH-1);
1411
 
1412
#else /* UNALIGNED_OK */
1413
 
1414
        if (match[best_len]   != scan_end  ||
1415
            match[best_len-1] != scan_end1 ||
1416
            *match            != *scan     ||
1417
            *++match          != scan[1])      continue;
1418
 
1419
        /* The check at best_len-1 can be removed because it will be made
1420
         * again later. (This heuristic is not always a win.)
1421
         * It is not necessary to compare scan[2] and match[2] since they
1422
         * are always equal when the other bytes match, given that
1423
         * the hash keys are equal and that HASH_BITS >= 8.
1424
         */
1425
        scan += 2, match++;
1426
        Assert(*scan == *match, "match[2]?");
1427
 
1428
        /* We check for insufficient lookahead only every 8th comparison;
1429
         * the 256th check will be made at strstart+258.
1430
         */
1431
        do {
1432
        } while (*++scan == *++match && *++scan == *++match &&
1433
                 *++scan == *++match && *++scan == *++match &&
1434
                 *++scan == *++match && *++scan == *++match &&
1435
                 *++scan == *++match && *++scan == *++match &&
1436
                 scan < strend);
1437
 
1438
        Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1439
 
1440
        len = MAX_MATCH - (int)(strend - scan);
1441
        scan = strend - MAX_MATCH;
1442
 
1443
#endif /* UNALIGNED_OK */
1444
 
1445
        if (len > best_len) {
1446
            s->match_start = cur_match;
1447
            best_len = len;
1448
            if (len >= nice_match) break;
1449
#ifdef UNALIGNED_OK
1450
            scan_end = *(ushf*)(scan+best_len-1);
1451
#else
1452
            scan_end1  = scan[best_len-1];
1453
            scan_end   = scan[best_len];
1454
#endif
1455
        }
1456
    } while ((cur_match = prev[cur_match & wmask]) > limit
1457
             && --chain_length != 0);
1458
 
1459
    if ((uInt)best_len <= s->lookahead) return best_len;
1460
    return s->lookahead;
1461
}
1462
#endif /* ASMV */
1463
 
1464
#ifdef DEBUG_ZLIB
1465
/* ===========================================================================
1466
 * Check that the match at match_start is indeed a match.
1467
 */
1468
local void check_match(s, start, match, length)
1469
    deflate_state *s;
1470
    IPos start, match;
1471
    int length;
1472
{
1473
    /* check that the match is indeed a match */
1474
    if (zmemcmp((charf *)s->window + match,
1475
                (charf *)s->window + start, length) != EQUAL) {
1476
        fprintf(stderr, " start %u, match %u, length %d\n",
1477
                start, match, length);
1478
        do {
1479
            fprintf(stderr, "%c%c", s->window[match++], s->window[start++]);
1480
        } while (--length != 0);
1481
        z_error("invalid match");
1482
    }
1483
    if (z_verbose > 1) {
1484
        fprintf(stderr,"\\[%d,%d]", start-match, length);
1485
        do { putc(s->window[start++], stderr); } while (--length != 0);
1486
    }
1487
}
1488
#else
1489
#  define check_match(s, start, match, length)
1490
#endif
1491
 
1492
/* ===========================================================================
1493
 * Fill the window when the lookahead becomes insufficient.
1494
 * Updates strstart and lookahead.
1495
 *
1496
 * IN assertion: lookahead < MIN_LOOKAHEAD
1497
 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
1498
 *    At least one byte has been read, or avail_in == 0; reads are
1499
 *    performed for at least two bytes (required for the zip translate_eol
1500
 *    option -- not supported here).
1501
 */
1502
local void fill_window(s)
1503
    deflate_state *s;
1504
{
1505
    register unsigned n, m;
1506
    register Posf *p;
1507
    unsigned more;    /* Amount of free space at the end of the window. */
1508
    uInt wsize = s->w_size;
1509
 
1510
    do {
1511
        more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
1512
 
1513
        /* Deal with !@#$% 64K limit: */
1514
        if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
1515
            more = wsize;
1516
 
1517
        } else if (more == (unsigned)(-1)) {
1518
            /* Very unlikely, but possible on 16 bit machine if strstart == 0
1519
             * and lookahead == 1 (input done one byte at time)
1520
             */
1521
            more--;
1522
 
1523
        /* If the window is almost full and there is insufficient lookahead,
1524
         * move the upper half to the lower one to make room in the upper half.
1525
         */
1526
        } else if (s->strstart >= wsize+MAX_DIST(s)) {
1527
 
1528
            zmemcpy((charf *)s->window, (charf *)s->window+wsize,
1529
                   (unsigned)wsize);
1530
            s->match_start -= wsize;
1531
            s->strstart    -= wsize; /* we now have strstart >= MAX_DIST */
1532
            s->block_start -= (long) wsize;
1533
 
1534
            /* Slide the hash table (could be avoided with 32 bit values
1535
               at the expense of memory usage). We slide even when level == 0
1536
               to keep the hash table consistent if we switch back to level > 0
1537
               later. (Using level 0 permanently is not an optimal usage of
1538
               zlib, so we don't care about this pathological case.)
1539
             */
1540
            n = s->hash_size;
1541
            p = &s->head[n];
1542
            do {
1543
                m = *--p;
1544
                *p = (Pos)(m >= wsize ? m-wsize : NIL);
1545
            } while (--n);
1546
 
1547
            n = wsize;
1548
            p = &s->prev[n];
1549
            do {
1550
                m = *--p;
1551
                *p = (Pos)(m >= wsize ? m-wsize : NIL);
1552
                /* If n is not on any hash chain, prev[n] is garbage but
1553
                 * its value will never be used.
1554
                 */
1555
            } while (--n);
1556
            more += wsize;
1557
        }
1558
        if (s->strm->avail_in == 0) return;
1559
 
1560
        /* If there was no sliding:
1561
         *    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1562
         *    more == window_size - lookahead - strstart
1563
         * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1564
         * => more >= window_size - 2*WSIZE + 2
1565
         * In the BIG_MEM or MMAP case (not yet supported),
1566
         *   window_size == input_size + MIN_LOOKAHEAD  &&
1567
         *   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1568
         * Otherwise, window_size == 2*WSIZE so more >= 2.
1569
         * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1570
         */
1571
        Assert(more >= 2, "more < 2");
1572
 
1573
        n = read_buf(s->strm, (charf *)s->window + s->strstart + s->lookahead,
1574
                     more);
1575
        s->lookahead += n;
1576
 
1577
        /* Initialize the hash value now that we have some input: */
1578
        if (s->lookahead >= MIN_MATCH) {
1579
            s->ins_h = s->window[s->strstart];
1580
            UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1581
#if MIN_MATCH != 3
1582
            Call UPDATE_HASH() MIN_MATCH-3 more times
1583
#endif
1584
        }
1585
        /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
1586
         * but this is not important since only literal bytes will be emitted.
1587
         */
1588
 
1589
    } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
1590
}
1591
 
1592
/* ===========================================================================
1593
 * Flush the current block, with given end-of-file flag.
1594
 * IN assertion: strstart is set to the end of the current match.
1595
 */
1596
#define FLUSH_BLOCK_ONLY(s, eof) { \
1597
   _tr_flush_block(s, (s->block_start >= 0L ? \
1598
                   (charf *)&s->window[(unsigned)s->block_start] : \
1599
                   (charf *)Z_NULL), \
1600
                (ulg)((long)s->strstart - s->block_start), \
1601
                (eof)); \
1602
   s->block_start = s->strstart; \
1603
   flush_pending(s->strm); \
1604
   Tracev((stderr,"[FLUSH]")); \
1605
}
1606
 
1607
/* Same but force premature exit if necessary. */
1608
#define FLUSH_BLOCK(s, eof) { \
1609
   FLUSH_BLOCK_ONLY(s, eof); \
1610
   if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \
1611
}
1612
 
1613
/* ===========================================================================
1614
 * Copy without compression as much as possible from the input stream, return
1615
 * the current block state.
1616
 * This function does not insert new strings in the dictionary since
1617
 * uncompressible data is probably not useful. This function is used
1618
 * only for the level=0 compression option.
1619
 * NOTE: this function should be optimized to avoid extra copying from
1620
 * window to pending_buf.
1621
 */
1622
local block_state deflate_stored(s, flush)
1623
    deflate_state *s;
1624
    int flush;
1625
{
1626
    /* Stored blocks are limited to 0xffff bytes, pending_buf is limited
1627
     * to pending_buf_size, and each stored block has a 5 byte header:
1628
     */
1629
    ulg max_block_size = 0xffff;
1630
    ulg max_start;
1631
 
1632
    if (max_block_size > s->pending_buf_size - 5) {
1633
        max_block_size = s->pending_buf_size - 5;
1634
    }
1635
 
1636
    /* Copy as much as possible from input to output: */
1637
    for (;;) {
1638
        /* Fill the window as much as possible: */
1639
        if (s->lookahead <= 1) {
1640
 
1641
            Assert(s->strstart < s->w_size+MAX_DIST(s) ||
1642
                   s->block_start >= (long)s->w_size, "slide too late");
1643
 
1644
            fill_window(s);
1645
            if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more;
1646
 
1647
            if (s->lookahead == 0) break; /* flush the current block */
1648
        }
1649
        Assert(s->block_start >= 0L, "block gone");
1650
 
1651
        s->strstart += s->lookahead;
1652
        s->lookahead = 0;
1653
 
1654
        /* Emit a stored block if pending_buf will be full: */
1655
        max_start = s->block_start + max_block_size;
1656
        if (s->strstart == 0 || (ulg)s->strstart >= max_start) {
1657
            /* strstart == 0 is possible when wraparound on 16-bit machine */
1658
            s->lookahead = (uInt)(s->strstart - max_start);
1659
            s->strstart = (uInt)max_start;
1660
            FLUSH_BLOCK(s, 0);
1661
        }
1662
        /* Flush if we may have to slide, otherwise block_start may become
1663
         * negative and the data will be gone:
1664
         */
1665
        if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) {
1666
            FLUSH_BLOCK(s, 0);
1667
        }
1668
    }
1669
    FLUSH_BLOCK(s, flush == Z_FINISH);
1670
    return flush == Z_FINISH ? finish_done : block_done;
1671
}
1672
 
1673
/* ===========================================================================
1674
 * Compress as much as possible from the input stream, return the current
1675
 * block state.
1676
 * This function does not perform lazy evaluation of matches and inserts
1677
 * new strings in the dictionary only for unmatched strings or for short
1678
 * matches. It is used only for the fast compression options.
1679
 */
1680
local block_state deflate_fast(s, flush)
1681
    deflate_state *s;
1682
    int flush;
1683
{
1684
    IPos hash_head = NIL; /* head of the hash chain */
1685
    int bflush;           /* set if current block must be flushed */
1686
 
1687
    for (;;) {
1688
        /* Make sure that we always have enough lookahead, except
1689
         * at the end of the input file. We need MAX_MATCH bytes
1690
         * for the next match, plus MIN_MATCH bytes to insert the
1691
         * string following the next match.
1692
         */
1693
        if (s->lookahead < MIN_LOOKAHEAD) {
1694
            fill_window(s);
1695
            if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
1696
                return need_more;
1697
            }
1698
            if (s->lookahead == 0) break; /* flush the current block */
1699
        }
1700
 
1701
        /* Insert the string window[strstart .. strstart+2] in the
1702
         * dictionary, and set hash_head to the head of the hash chain:
1703
         */
1704
        if (s->lookahead >= MIN_MATCH) {
1705
            INSERT_STRING(s, s->strstart, hash_head);
1706
        }
1707
 
1708
        /* Find the longest match, discarding those <= prev_length.
1709
         * At this point we have always match_length < MIN_MATCH
1710
         */
1711
        if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
1712
            /* To simplify the code, we prevent matches with the string
1713
             * of window index 0 (in particular we have to avoid a match
1714
             * of the string with itself at the start of the input file).
1715
             */
1716
            if (s->strategy != Z_HUFFMAN_ONLY) {
1717
                s->match_length = longest_match (s, hash_head);
1718
            }
1719
            /* longest_match() sets match_start */
1720
        }
1721
        if (s->match_length >= MIN_MATCH) {
1722
            check_match(s, s->strstart, s->match_start, s->match_length);
1723
 
1724
            bflush = _tr_tally(s, s->strstart - s->match_start,
1725
                               s->match_length - MIN_MATCH);
1726
 
1727
            s->lookahead -= s->match_length;
1728
 
1729
            /* Insert new strings in the hash table only if the match length
1730
             * is not too large. This saves time but degrades compression.
1731
             */
1732
            if (s->match_length <= s->max_insert_length &&
1733
                s->lookahead >= MIN_MATCH) {
1734
                s->match_length--; /* string at strstart already in hash table */
1735
                do {
1736
                    s->strstart++;
1737
                    INSERT_STRING(s, s->strstart, hash_head);
1738
                    /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1739
                     * always MIN_MATCH bytes ahead.
1740
                     */
1741
                } while (--s->match_length != 0);
1742
                s->strstart++;
1743
            } else {
1744
                s->strstart += s->match_length;
1745
                s->match_length = 0;
1746
                s->ins_h = s->window[s->strstart];
1747
                UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1748
#if MIN_MATCH != 3
1749
                Call UPDATE_HASH() MIN_MATCH-3 more times
1750
#endif
1751
                /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
1752
                 * matter since it will be recomputed at next deflate call.
1753
                 */
1754
            }
1755
        } else {
1756
            /* No match, output a literal byte */
1757
            Tracevv((stderr,"%c", s->window[s->strstart]));
1758
            bflush = _tr_tally (s, 0, s->window[s->strstart]);
1759
            s->lookahead--;
1760
            s->strstart++;
1761
        }
1762
        if (bflush) FLUSH_BLOCK(s, 0);
1763
    }
1764
    FLUSH_BLOCK(s, flush == Z_FINISH);
1765
    return flush == Z_FINISH ? finish_done : block_done;
1766
}
1767
 
1768
/* ===========================================================================
1769
 * Same as above, but achieves better compression. We use a lazy
1770
 * evaluation for matches: a match is finally adopted only if there is
1771
 * no better match at the next window position.
1772
 */
1773
local block_state deflate_slow(s, flush)
1774
    deflate_state *s;
1775
    int flush;
1776
{
1777
    IPos hash_head = NIL;    /* head of hash chain */
1778
    int bflush;              /* set if current block must be flushed */
1779
 
1780
    /* Process the input block. */
1781
    for (;;) {
1782
        /* Make sure that we always have enough lookahead, except
1783
         * at the end of the input file. We need MAX_MATCH bytes
1784
         * for the next match, plus MIN_MATCH bytes to insert the
1785
         * string following the next match.
1786
         */
1787
        if (s->lookahead < MIN_LOOKAHEAD) {
1788
            fill_window(s);
1789
            if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
1790
                return need_more;
1791
            }
1792
            if (s->lookahead == 0) break; /* flush the current block */
1793
        }
1794
 
1795
        /* Insert the string window[strstart .. strstart+2] in the
1796
         * dictionary, and set hash_head to the head of the hash chain:
1797
         */
1798
        if (s->lookahead >= MIN_MATCH) {
1799
            INSERT_STRING(s, s->strstart, hash_head);
1800
        }
1801
 
1802
        /* Find the longest match, discarding those <= prev_length.
1803
         */
1804
        s->prev_length = s->match_length, s->prev_match = s->match_start;
1805
        s->match_length = MIN_MATCH-1;
1806
 
1807
        if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
1808
            s->strstart - hash_head <= MAX_DIST(s)) {
1809
            /* To simplify the code, we prevent matches with the string
1810
             * of window index 0 (in particular we have to avoid a match
1811
             * of the string with itself at the start of the input file).
1812
             */
1813
            if (s->strategy != Z_HUFFMAN_ONLY) {
1814
                s->match_length = longest_match (s, hash_head);
1815
            }
1816
            /* longest_match() sets match_start */
1817
 
1818
            if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
1819
                 (s->match_length == MIN_MATCH &&
1820
                  s->strstart - s->match_start > TOO_FAR))) {
1821
 
1822
                /* If prev_match is also MIN_MATCH, match_start is garbage
1823
                 * but we will ignore the current match anyway.
1824
                 */
1825
                s->match_length = MIN_MATCH-1;
1826
            }
1827
        }
1828
        /* If there was a match at the previous step and the current
1829
         * match is not better, output the previous match:
1830
         */
1831
        if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
1832
            uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
1833
            /* Do not insert strings in hash table beyond this. */
1834
 
1835
            check_match(s, s->strstart-1, s->prev_match, s->prev_length);
1836
 
1837
            bflush = _tr_tally(s, s->strstart -1 - s->prev_match,
1838
                               s->prev_length - MIN_MATCH);
1839
 
1840
            /* Insert in hash table all strings up to the end of the match.
1841
             * strstart-1 and strstart are already inserted. If there is not
1842
             * enough lookahead, the last two strings are not inserted in
1843
             * the hash table.
1844
             */
1845
            s->lookahead -= s->prev_length-1;
1846
            s->prev_length -= 2;
1847
            do {
1848
                if (++s->strstart <= max_insert) {
1849
                    INSERT_STRING(s, s->strstart, hash_head);
1850
                }
1851
            } while (--s->prev_length != 0);
1852
            s->match_available = 0;
1853
            s->match_length = MIN_MATCH-1;
1854
            s->strstart++;
1855
 
1856
            if (bflush) FLUSH_BLOCK(s, 0);
1857
 
1858
        } else if (s->match_available) {
1859
            /* If there was no match at the previous position, output a
1860
             * single literal. If there was a match but the current match
1861
             * is longer, truncate the previous match to a single literal.
1862
             */
1863
            Tracevv((stderr,"%c", s->window[s->strstart-1]));
1864
            if (_tr_tally (s, 0, s->window[s->strstart-1])) {
1865
                FLUSH_BLOCK_ONLY(s, 0);
1866
            }
1867
            s->strstart++;
1868
            s->lookahead--;
1869
            if (s->strm->avail_out == 0) return need_more;
1870
        } else {
1871
            /* There is no previous match to compare with, wait for
1872
             * the next step to decide.
1873
             */
1874
            s->match_available = 1;
1875
            s->strstart++;
1876
            s->lookahead--;
1877
        }
1878
    }
1879
    Assert (flush != Z_NO_FLUSH, "no flush?");
1880
    if (s->match_available) {
1881
        Tracevv((stderr,"%c", s->window[s->strstart-1]));
1882
        _tr_tally (s, 0, s->window[s->strstart-1]);
1883
        s->match_available = 0;
1884
    }
1885
    FLUSH_BLOCK(s, flush == Z_FINISH);
1886
    return flush == Z_FINISH ? finish_done : block_done;
1887
}
1888
/* --- deflate.c */
1889
 
1890
/* +++ trees.c */
1891
/* trees.c -- output deflated data using Huffman coding
1892
 * Copyright (C) 1995-1996 Jean-loup Gailly
1893
 * For conditions of distribution and use, see copyright notice in zlib.h
1894
 */
1895
 
1896
/*
1897
 *  ALGORITHM
1898
 *
1899
 *      The "deflation" process uses several Huffman trees. The more
1900
 *      common source values are represented by shorter bit sequences.
1901
 *
1902
 *      Each code tree is stored in a compressed form which is itself
1903
 * a Huffman encoding of the lengths of all the code strings (in
1904
 * ascending order by source values).  The actual code strings are
1905
 * reconstructed from the lengths in the inflate process, as described
1906
 * in the deflate specification.
1907
 *
1908
 *  REFERENCES
1909
 *
1910
 *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
1911
 *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
1912
 *
1913
 *      Storer, James A.
1914
 *          Data Compression:  Methods and Theory, pp. 49-50.
1915
 *          Computer Science Press, 1988.  ISBN 0-7167-8156-5.
1916
 *
1917
 *      Sedgewick, R.
1918
 *          Algorithms, p290.
1919
 *          Addison-Wesley, 1983. ISBN 0-201-06672-6.
1920
 */
1921
 
1922
/* From: trees.c,v 1.11 1996/07/24 13:41:06 me Exp $ */
1923
 
1924
/* #include "deflate.h" */
1925
 
1926
#ifdef DEBUG_ZLIB
1927
#  include <ctype.h>
1928
#endif
1929
 
1930
/* ===========================================================================
1931
 * Constants
1932
 */
1933
 
1934
#define MAX_BL_BITS 7
1935
/* Bit length codes must not exceed MAX_BL_BITS bits */
1936
 
1937
#define END_BLOCK 256
1938
/* end of block literal code */
1939
 
1940
#define REP_3_6      16
1941
/* repeat previous bit length 3-6 times (2 bits of repeat count) */
1942
 
1943
#define REPZ_3_10    17
1944
/* repeat a zero length 3-10 times  (3 bits of repeat count) */
1945
 
1946
#define REPZ_11_138  18
1947
/* repeat a zero length 11-138 times  (7 bits of repeat count) */
1948
 
1949
local int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
1950
   = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
1951
 
1952
local int extra_dbits[D_CODES] /* extra bits for each distance code */
1953
   = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
1954
 
1955
local int extra_blbits[BL_CODES]/* extra bits for each bit length code */
1956
   = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
1957
 
1958
local uch bl_order[BL_CODES]
1959
   = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
1960
/* The lengths of the bit length codes are sent in order of decreasing
1961
 * probability, to avoid transmitting the lengths for unused bit length codes.
1962
 */
1963
 
1964
#define Buf_size (8 * 2*sizeof(char))
1965
/* Number of bits used within bi_buf. (bi_buf might be implemented on
1966
 * more than 16 bits on some systems.)
1967
 */
1968
 
1969
/* ===========================================================================
1970
 * Local data. These are initialized only once.
1971
 */
1972
 
1973
local ct_data static_ltree[L_CODES+2];
1974
/* The static literal tree. Since the bit lengths are imposed, there is no
1975
 * need for the L_CODES extra codes used during heap construction. However
1976
 * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
1977
 * below).
1978
 */
1979
 
1980
local ct_data static_dtree[D_CODES];
1981
/* The static distance tree. (Actually a trivial tree since all codes use
1982
 * 5 bits.)
1983
 */
1984
 
1985
local uch dist_code[512];
1986
/* distance codes. The first 256 values correspond to the distances
1987
 * 3 .. 258, the last 256 values correspond to the top 8 bits of
1988
 * the 15 bit distances.
1989
 */
1990
 
1991
local uch length_code[MAX_MATCH-MIN_MATCH+1];
1992
/* length code for each normalized match length (0 == MIN_MATCH) */
1993
 
1994
local int base_length[LENGTH_CODES];
1995
/* First normalized length for each code (0 = MIN_MATCH) */
1996
 
1997
local int base_dist[D_CODES];
1998
/* First normalized distance for each code (0 = distance of 1) */
1999
 
2000
struct static_tree_desc_s {
2001
    ct_data *static_tree;        /* static tree or NULL */
2002
    intf    *extra_bits;         /* extra bits for each code or NULL */
2003
    int     extra_base;          /* base index for extra_bits */
2004
    int     elems;               /* max number of elements in the tree */
2005
    int     max_length;          /* max bit length for the codes */
2006
};
2007
 
2008
local static_tree_desc  static_l_desc =
2009
{static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
2010
 
2011
local static_tree_desc  static_d_desc =
2012
{static_dtree, extra_dbits, 0,          D_CODES, MAX_BITS};
2013
 
2014
local static_tree_desc  static_bl_desc =
2015
{(ct_data *)0, extra_blbits, 0,      BL_CODES, MAX_BL_BITS};
2016
 
2017
/* ===========================================================================
2018
 * Local (static) routines in this file.
2019
 */
2020
 
2021
local void tr_static_init OF((void));
2022
local void init_block     OF((deflate_state *s));
2023
local void pqdownheap     OF((deflate_state *s, ct_data *tree, int k));
2024
local void gen_bitlen     OF((deflate_state *s, tree_desc *desc));
2025
local void gen_codes      OF((ct_data *tree, int max_code, ushf *bl_count));
2026
local void build_tree     OF((deflate_state *s, tree_desc *desc));
2027
local void scan_tree      OF((deflate_state *s, ct_data *tree, int max_code));
2028
local void send_tree      OF((deflate_state *s, ct_data *tree, int max_code));
2029
local int  build_bl_tree  OF((deflate_state *s));
2030
local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
2031
                              int blcodes));
2032
local void compress_block OF((deflate_state *s, ct_data *ltree,
2033
                              ct_data *dtree));
2034
local void set_data_type  OF((deflate_state *s));
2035
local unsigned bi_reverse OF((unsigned value, int length));
2036
local void bi_windup      OF((deflate_state *s));
2037
local void bi_flush       OF((deflate_state *s));
2038
local void copy_block     OF((deflate_state *s, charf *buf, unsigned len,
2039
                              int header));
2040
 
2041
#ifndef DEBUG_ZLIB
2042
#  define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
2043
   /* Send a code of the given tree. c and tree must not have side effects */
2044
 
2045
#else /* DEBUG_ZLIB */
2046
#  define send_code(s, c, tree) \
2047
     { if (verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
2048
       send_bits(s, tree[c].Code, tree[c].Len); }
2049
#endif
2050
 
2051
#define d_code(dist) \
2052
   ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])
2053
/* Mapping from a distance to a distance code. dist is the distance - 1 and
2054
 * must not have side effects. dist_code[256] and dist_code[257] are never
2055
 * used.
2056
 */
2057
 
2058
/* ===========================================================================
2059
 * Output a short LSB first on the stream.
2060
 * IN assertion: there is enough room in pendingBuf.
2061
 */
2062
#define put_short(s, w) { \
2063
    put_byte(s, (uch)((w) & 0xff)); \
2064
    put_byte(s, (uch)((ush)(w) >> 8)); \
2065
}
2066
 
2067
/* ===========================================================================
2068
 * Send a value on a given number of bits.
2069
 * IN assertion: length <= 16 and value fits in length bits.
2070
 */
2071
#ifdef DEBUG_ZLIB
2072
local void send_bits      OF((deflate_state *s, int value, int length));
2073
 
2074
local void send_bits(s, value, length)
2075
    deflate_state *s;
2076
    int value;  /* value to send */
2077
    int length; /* number of bits */
2078
{
2079
    Tracevv((stderr," l %2d v %4x ", length, value));
2080
    Assert(length > 0 && length <= 15, "invalid length");
2081
    s->bits_sent += (ulg)length;
2082
 
2083
    /* If not enough room in bi_buf, use (valid) bits from bi_buf and
2084
     * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
2085
     * unused bits in value.
2086
     */
2087
    if (s->bi_valid > (int)Buf_size - length) {
2088
        s->bi_buf |= (value << s->bi_valid);
2089
        put_short(s, s->bi_buf);
2090
        s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
2091
        s->bi_valid += length - Buf_size;
2092
    } else {
2093
        s->bi_buf |= value << s->bi_valid;
2094
        s->bi_valid += length;
2095
    }
2096
}
2097
#else /* !DEBUG_ZLIB */
2098
 
2099
#define send_bits(s, value, length) \
2100
{ int len = length;\
2101
  if (s->bi_valid > (int)Buf_size - len) {\
2102
    int val = value;\
2103
    s->bi_buf |= (val << s->bi_valid);\
2104
    put_short(s, s->bi_buf);\
2105
    s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
2106
    s->bi_valid += len - Buf_size;\
2107
  } else {\
2108
    s->bi_buf |= (value) << s->bi_valid;\
2109
    s->bi_valid += len;\
2110
  }\
2111
}
2112
#endif /* DEBUG_ZLIB */
2113
 
2114
 
2115
#define MAX(a,b) (a >= b ? a : b)
2116
/* the arguments must not have side effects */
2117
 
2118
/* ===========================================================================
2119
 * Initialize the various 'constant' tables. In a multi-threaded environment,
2120
 * this function may be called by two threads concurrently, but this is
2121
 * harmless since both invocations do exactly the same thing.
2122
 */
2123
local void tr_static_init()
2124
{
2125
    static int static_init_done = 0;
2126
    int n;        /* iterates over tree elements */
2127
    int bits;     /* bit counter */
2128
    int length;   /* length value */
2129
    int code;     /* code value */
2130
    int dist;     /* distance index */
2131
    ush bl_count[MAX_BITS+1];
2132
    /* number of codes at each bit length for an optimal tree */
2133
 
2134
    if (static_init_done) return;
2135
 
2136
    /* Initialize the mapping length (0..255) -> length code (0..28) */
2137
    length = 0;
2138
    for (code = 0; code < LENGTH_CODES-1; code++) {
2139
        base_length[code] = length;
2140
        for (n = 0; n < (1<<extra_lbits[code]); n++) {
2141
            length_code[length++] = (uch)code;
2142
        }
2143
    }
2144
    Assert (length == 256, "tr_static_init: length != 256");
2145
    /* Note that the length 255 (match length 258) can be represented
2146
     * in two different ways: code 284 + 5 bits or code 285, so we
2147
     * overwrite length_code[255] to use the best encoding:
2148
     */
2149
    length_code[length-1] = (uch)code;
2150
 
2151
    /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
2152
    dist = 0;
2153
    for (code = 0 ; code < 16; code++) {
2154
        base_dist[code] = dist;
2155
        for (n = 0; n < (1<<extra_dbits[code]); n++) {
2156
            dist_code[dist++] = (uch)code;
2157
        }
2158
    }
2159
    Assert (dist == 256, "tr_static_init: dist != 256");
2160
    dist >>= 7; /* from now on, all distances are divided by 128 */
2161
    for ( ; code < D_CODES; code++) {
2162
        base_dist[code] = dist << 7;
2163
        for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
2164
            dist_code[256 + dist++] = (uch)code;
2165
        }
2166
    }
2167
    Assert (dist == 256, "tr_static_init: 256+dist != 512");
2168
 
2169
    /* Construct the codes of the static literal tree */
2170
    for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
2171
    n = 0;
2172
    while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
2173
    while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
2174
    while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
2175
    while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
2176
    /* Codes 286 and 287 do not exist, but we must include them in the
2177
     * tree construction to get a canonical Huffman tree (longest code
2178
     * all ones)
2179
     */
2180
    gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
2181
 
2182
    /* The static distance tree is trivial: */
2183
    for (n = 0; n < D_CODES; n++) {
2184
        static_dtree[n].Len = 5;
2185
        static_dtree[n].Code = bi_reverse((unsigned)n, 5);
2186
    }
2187
    static_init_done = 1;
2188
}
2189
 
2190
/* ===========================================================================
2191
 * Initialize the tree data structures for a new zlib stream.
2192
 */
2193
void _tr_init(s)
2194
    deflate_state *s;
2195
{
2196
    tr_static_init();
2197
 
2198
    s->compressed_len = 0L;
2199
 
2200
    s->l_desc.dyn_tree = s->dyn_ltree;
2201
    s->l_desc.stat_desc = &static_l_desc;
2202
 
2203
    s->d_desc.dyn_tree = s->dyn_dtree;
2204
    s->d_desc.stat_desc = &static_d_desc;
2205
 
2206
    s->bl_desc.dyn_tree = s->bl_tree;
2207
    s->bl_desc.stat_desc = &static_bl_desc;
2208
 
2209
    s->bi_buf = 0;
2210
    s->bi_valid = 0;
2211
    s->last_eob_len = 8; /* enough lookahead for inflate */
2212
#ifdef DEBUG_ZLIB
2213
    s->bits_sent = 0L;
2214
#endif
2215
 
2216
    /* Initialize the first block of the first file: */
2217
    init_block(s);
2218
}
2219
 
2220
/* ===========================================================================
2221
 * Initialize a new block.
2222
 */
2223
local void init_block(s)
2224
    deflate_state *s;
2225
{
2226
    int n; /* iterates over tree elements */
2227
 
2228
    /* Initialize the trees. */
2229
    for (n = 0; n < L_CODES;  n++) s->dyn_ltree[n].Freq = 0;
2230
    for (n = 0; n < D_CODES;  n++) s->dyn_dtree[n].Freq = 0;
2231
    for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
2232
 
2233
    s->dyn_ltree[END_BLOCK].Freq = 1;
2234
    s->opt_len = s->static_len = 0L;
2235
    s->last_lit = s->matches = 0;
2236
}
2237
 
2238
#define SMALLEST 1
2239
/* Index within the heap array of least frequent node in the Huffman tree */
2240
 
2241
 
2242
/* ===========================================================================
2243
 * Remove the smallest element from the heap and recreate the heap with
2244
 * one less element. Updates heap and heap_len.
2245
 */
2246
#define pqremove(s, tree, top) \
2247
{\
2248
    top = s->heap[SMALLEST]; \
2249
    s->heap[SMALLEST] = s->heap[s->heap_len--]; \
2250
    pqdownheap(s, tree, SMALLEST); \
2251
}
2252
 
2253
/* ===========================================================================
2254
 * Compares to subtrees, using the tree depth as tie breaker when
2255
 * the subtrees have equal frequency. This minimizes the worst case length.
2256
 */
2257
#define smaller(tree, n, m, depth) \
2258
   (tree[n].Freq < tree[m].Freq || \
2259
   (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
2260
 
2261
/* ===========================================================================
2262
 * Restore the heap property by moving down the tree starting at node k,
2263
 * exchanging a node with the smallest of its two sons if necessary, stopping
2264
 * when the heap property is re-established (each father smaller than its
2265
 * two sons).
2266
 */
2267
local void pqdownheap(s, tree, k)
2268
    deflate_state *s;
2269
    ct_data *tree;  /* the tree to restore */
2270
    int k;               /* node to move down */
2271
{
2272
    int v = s->heap[k];
2273
    int j = k << 1;  /* left son of k */
2274
    while (j <= s->heap_len) {
2275
        /* Set j to the smallest of the two sons: */
2276
        if (j < s->heap_len &&
2277
            smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
2278
            j++;
2279
        }
2280
        /* Exit if v is smaller than both sons */
2281
        if (smaller(tree, v, s->heap[j], s->depth)) break;
2282
 
2283
        /* Exchange v with the smallest son */
2284
        s->heap[k] = s->heap[j];  k = j;
2285
 
2286
        /* And continue down the tree, setting j to the left son of k */
2287
        j <<= 1;
2288
    }
2289
    s->heap[k] = v;
2290
}
2291
 
2292
/* ===========================================================================
2293
 * Compute the optimal bit lengths for a tree and update the total bit length
2294
 * for the current block.
2295
 * IN assertion: the fields freq and dad are set, heap[heap_max] and
2296
 *    above are the tree nodes sorted by increasing frequency.
2297
 * OUT assertions: the field len is set to the optimal bit length, the
2298
 *     array bl_count contains the frequencies for each bit length.
2299
 *     The length opt_len is updated; static_len is also updated if stree is
2300
 *     not null.
2301
 */
2302
local void gen_bitlen(s, desc)
2303
    deflate_state *s;
2304
    tree_desc *desc;    /* the tree descriptor */
2305
{
2306
    ct_data *tree  = desc->dyn_tree;
2307
    int max_code   = desc->max_code;
2308
    ct_data *stree = desc->stat_desc->static_tree;
2309
    intf *extra    = desc->stat_desc->extra_bits;
2310
    int base       = desc->stat_desc->extra_base;
2311
    int max_length = desc->stat_desc->max_length;
2312
    int h;              /* heap index */
2313
    int n, m;           /* iterate over the tree elements */
2314
    int bits;           /* bit length */
2315
    int xbits;          /* extra bits */
2316
    ush f;              /* frequency */
2317
    int overflow = 0;   /* number of elements with bit length too large */
2318
 
2319
    for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
2320
 
2321
    /* In a first pass, compute the optimal bit lengths (which may
2322
     * overflow in the case of the bit length tree).
2323
     */
2324
    tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
2325
 
2326
    for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
2327
        n = s->heap[h];
2328
        bits = tree[tree[n].Dad].Len + 1;
2329
        if (bits > max_length) bits = max_length, overflow++;
2330
        tree[n].Len = (ush)bits;
2331
        /* We overwrite tree[n].Dad which is no longer needed */
2332
 
2333
        if (n > max_code) continue; /* not a leaf node */
2334
 
2335
        s->bl_count[bits]++;
2336
        xbits = 0;
2337
        if (n >= base) xbits = extra[n-base];
2338
        f = tree[n].Freq;
2339
        s->opt_len += (ulg)f * (bits + xbits);
2340
        if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
2341
    }
2342
    if (overflow == 0) return;
2343
 
2344
    Trace((stderr,"\nbit length overflow\n"));
2345
    /* This happens for example on obj2 and pic of the Calgary corpus */
2346
 
2347
    /* Find the first bit length which could increase: */
2348
    do {
2349
        bits = max_length-1;
2350
        while (s->bl_count[bits] == 0) bits--;
2351
        s->bl_count[bits]--;      /* move one leaf down the tree */
2352
        s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
2353
        s->bl_count[max_length]--;
2354
        /* The brother of the overflow item also moves one step up,
2355
         * but this does not affect bl_count[max_length]
2356
         */
2357
        overflow -= 2;
2358
    } while (overflow > 0);
2359
 
2360
    /* Now recompute all bit lengths, scanning in increasing frequency.
2361
     * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
2362
     * lengths instead of fixing only the wrong ones. This idea is taken
2363
     * from 'ar' written by Haruhiko Okumura.)
2364
     */
2365
    for (bits = max_length; bits != 0; bits--) {
2366
        n = s->bl_count[bits];
2367
        while (n != 0) {
2368
            m = s->heap[--h];
2369
            if (m > max_code) continue;
2370
            if (tree[m].Len != (unsigned) bits) {
2371
                Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
2372
                s->opt_len += ((long)bits - (long)tree[m].Len)
2373
                              *(long)tree[m].Freq;
2374
                tree[m].Len = (ush)bits;
2375
            }
2376
            n--;
2377
        }
2378
    }
2379
}
2380
 
2381
/* ===========================================================================
2382
 * Generate the codes for a given tree and bit counts (which need not be
2383
 * optimal).
2384
 * IN assertion: the array bl_count contains the bit length statistics for
2385
 * the given tree and the field len is set for all tree elements.
2386
 * OUT assertion: the field code is set for all tree elements of non
2387
 *     zero code length.
2388
 */
2389
local void gen_codes (tree, max_code, bl_count)
2390
    ct_data *tree;             /* the tree to decorate */
2391
    int max_code;              /* largest code with non zero frequency */
2392
    ushf *bl_count;            /* number of codes at each bit length */
2393
{
2394
    ush next_code[MAX_BITS+1]; /* next code value for each bit length */
2395
    ush code = 0;              /* running code value */
2396
    int bits;                  /* bit index */
2397
    int n;                     /* code index */
2398
 
2399
    /* The distribution counts are first used to generate the code values
2400
     * without bit reversal.
2401
     */
2402
    for (bits = 1; bits <= MAX_BITS; bits++) {
2403
        next_code[bits] = code = (code + bl_count[bits-1]) << 1;
2404
    }
2405
    /* Check that the bit counts in bl_count are consistent. The last code
2406
     * must be all ones.
2407
     */
2408
    Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
2409
            "inconsistent bit counts");
2410
    Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
2411
 
2412
    for (n = 0;  n <= max_code; n++) {
2413
        int len = tree[n].Len;
2414
        if (len == 0) continue;
2415
        /* Now reverse the bits */
2416
        tree[n].Code = bi_reverse(next_code[len]++, len);
2417
 
2418
        Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
2419
             n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
2420
    }
2421
}
2422
 
2423
/* ===========================================================================
2424
 * Construct one Huffman tree and assigns the code bit strings and lengths.
2425
 * Update the total bit length for the current block.
2426
 * IN assertion: the field freq is set for all tree elements.
2427
 * OUT assertions: the fields len and code are set to the optimal bit length
2428
 *     and corresponding code. The length opt_len is updated; static_len is
2429
 *     also updated if stree is not null. The field max_code is set.
2430
 */
2431
local void build_tree(s, desc)
2432
    deflate_state *s;
2433
    tree_desc *desc; /* the tree descriptor */
2434
{
2435
    ct_data *tree   = desc->dyn_tree;
2436
    ct_data *stree  = desc->stat_desc->static_tree;
2437
    int elems       = desc->stat_desc->elems;
2438
    int n, m;          /* iterate over heap elements */
2439
    int max_code = -1; /* largest code with non zero frequency */
2440
    int node;          /* new node being created */
2441
 
2442
    /* Construct the initial heap, with least frequent element in
2443
     * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
2444
     * heap[0] is not used.
2445
     */
2446
    s->heap_len = 0, s->heap_max = HEAP_SIZE;
2447
 
2448
    for (n = 0; n < elems; n++) {
2449
        if (tree[n].Freq != 0) {
2450
            s->heap[++(s->heap_len)] = max_code = n;
2451
            s->depth[n] = 0;
2452
        } else {
2453
            tree[n].Len = 0;
2454
        }
2455
    }
2456
 
2457
    /* The pkzip format requires that at least one distance code exists,
2458
     * and that at least one bit should be sent even if there is only one
2459
     * possible code. So to avoid special checks later on we force at least
2460
     * two codes of non zero frequency.
2461
     */
2462
    while (s->heap_len < 2) {
2463
        node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
2464
        tree[node].Freq = 1;
2465
        s->depth[node] = 0;
2466
        s->opt_len--; if (stree) s->static_len -= stree[node].Len;
2467
        /* node is 0 or 1 so it does not have extra bits */
2468
    }
2469
    desc->max_code = max_code;
2470
 
2471
    /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
2472
     * establish sub-heaps of increasing lengths:
2473
     */
2474
    for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
2475
 
2476
    /* Construct the Huffman tree by repeatedly combining the least two
2477
     * frequent nodes.
2478
     */
2479
    node = elems;              /* next internal node of the tree */
2480
    do {
2481
        pqremove(s, tree, n);  /* n = node of least frequency */
2482
        m = s->heap[SMALLEST]; /* m = node of next least frequency */
2483
 
2484
        s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
2485
        s->heap[--(s->heap_max)] = m;
2486
 
2487
        /* Create a new node father of n and m */
2488
        tree[node].Freq = tree[n].Freq + tree[m].Freq;
2489
        s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
2490
        tree[n].Dad = tree[m].Dad = (ush)node;
2491
#ifdef DUMP_BL_TREE
2492
        if (tree == s->bl_tree) {
2493
            fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
2494
                    node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
2495
        }
2496
#endif
2497
        /* and insert the new node in the heap */
2498
        s->heap[SMALLEST] = node++;
2499
        pqdownheap(s, tree, SMALLEST);
2500
 
2501
    } while (s->heap_len >= 2);
2502
 
2503
    s->heap[--(s->heap_max)] = s->heap[SMALLEST];
2504
 
2505
    /* At this point, the fields freq and dad are set. We can now
2506
     * generate the bit lengths.
2507
     */
2508
    gen_bitlen(s, (tree_desc *)desc);
2509
 
2510
    /* The field len is now set, we can generate the bit codes */
2511
    gen_codes ((ct_data *)tree, max_code, s->bl_count);
2512
}
2513
 
2514
/* ===========================================================================
2515
 * Scan a literal or distance tree to determine the frequencies of the codes
2516
 * in the bit length tree.
2517
 */
2518
local void scan_tree (s, tree, max_code)
2519
    deflate_state *s;
2520
    ct_data *tree;   /* the tree to be scanned */
2521
    int max_code;    /* and its largest code of non zero frequency */
2522
{
2523
    int n;                     /* iterates over all tree elements */
2524
    int prevlen = -1;          /* last emitted length */
2525
    int curlen;                /* length of current code */
2526
    int nextlen = tree[0].Len; /* length of next code */
2527
    int count = 0;             /* repeat count of the current code */
2528
    int max_count = 7;         /* max repeat count */
2529
    int min_count = 4;         /* min repeat count */
2530
 
2531
    if (nextlen == 0) max_count = 138, min_count = 3;
2532
    tree[max_code+1].Len = (ush)0xffff; /* guard */
2533
 
2534
    for (n = 0; n <= max_code; n++) {
2535
        curlen = nextlen; nextlen = tree[n+1].Len;
2536
        if (++count < max_count && curlen == nextlen) {
2537
            continue;
2538
        } else if (count < min_count) {
2539
            s->bl_tree[curlen].Freq += count;
2540
        } else if (curlen != 0) {
2541
            if (curlen != prevlen) s->bl_tree[curlen].Freq++;
2542
            s->bl_tree[REP_3_6].Freq++;
2543
        } else if (count <= 10) {
2544
            s->bl_tree[REPZ_3_10].Freq++;
2545
        } else {
2546
            s->bl_tree[REPZ_11_138].Freq++;
2547
        }
2548
        count = 0; prevlen = curlen;
2549
        if (nextlen == 0) {
2550
            max_count = 138, min_count = 3;
2551
        } else if (curlen == nextlen) {
2552
            max_count = 6, min_count = 3;
2553
        } else {
2554
            max_count = 7, min_count = 4;
2555
        }
2556
    }
2557
}
2558
 
2559
/* ===========================================================================
2560
 * Send a literal or distance tree in compressed form, using the codes in
2561
 * bl_tree.
2562
 */
2563
local void send_tree (s, tree, max_code)
2564
    deflate_state *s;
2565
    ct_data *tree; /* the tree to be scanned */
2566
    int max_code;       /* and its largest code of non zero frequency */
2567
{
2568
    int n;                     /* iterates over all tree elements */
2569
    int prevlen = -1;          /* last emitted length */
2570
    int curlen;                /* length of current code */
2571
    int nextlen = tree[0].Len; /* length of next code */
2572
    int count = 0;             /* repeat count of the current code */
2573
    int max_count = 7;         /* max repeat count */
2574
    int min_count = 4;         /* min repeat count */
2575
 
2576
    /* tree[max_code+1].Len = -1; */  /* guard already set */
2577
    if (nextlen == 0) max_count = 138, min_count = 3;
2578
 
2579
    for (n = 0; n <= max_code; n++) {
2580
        curlen = nextlen; nextlen = tree[n+1].Len;
2581
        if (++count < max_count && curlen == nextlen) {
2582
            continue;
2583
        } else if (count < min_count) {
2584
            do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
2585
 
2586
        } else if (curlen != 0) {
2587
            if (curlen != prevlen) {
2588
                send_code(s, curlen, s->bl_tree); count--;
2589
            }
2590
            Assert(count >= 3 && count <= 6, " 3_6?");
2591
            send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
2592
 
2593
        } else if (count <= 10) {
2594
            send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
2595
 
2596
        } else {
2597
            send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
2598
        }
2599
        count = 0; prevlen = curlen;
2600
        if (nextlen == 0) {
2601
            max_count = 138, min_count = 3;
2602
        } else if (curlen == nextlen) {
2603
            max_count = 6, min_count = 3;
2604
        } else {
2605
            max_count = 7, min_count = 4;
2606
        }
2607
    }
2608
}
2609
 
2610
/* ===========================================================================
2611
 * Construct the Huffman tree for the bit lengths and return the index in
2612
 * bl_order of the last bit length code to send.
2613
 */
2614
local int build_bl_tree(s)
2615
    deflate_state *s;
2616
{
2617
    int max_blindex;  /* index of last bit length code of non zero freq */
2618
 
2619
    /* Determine the bit length frequencies for literal and distance trees */
2620
    scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
2621
    scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
2622
 
2623
    /* Build the bit length tree: */
2624
    build_tree(s, (tree_desc *)(&(s->bl_desc)));
2625
    /* opt_len now includes the length of the tree representations, except
2626
     * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2627
     */
2628
 
2629
    /* Determine the number of bit length codes to send. The pkzip format
2630
     * requires that at least 4 bit length codes be sent. (appnote.txt says
2631
     * 3 but the actual value used is 4.)
2632
     */
2633
    for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
2634
        if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
2635
    }
2636
    /* Update opt_len to include the bit length tree and counts */
2637
    s->opt_len += 3*(max_blindex+1) + 5+5+4;
2638
    Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
2639
            s->opt_len, s->static_len));
2640
 
2641
    return max_blindex;
2642
}
2643
 
2644
/* ===========================================================================
2645
 * Send the header for a block using dynamic Huffman trees: the counts, the
2646
 * lengths of the bit length codes, the literal tree and the distance tree.
2647
 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
2648
 */
2649
local void send_all_trees(s, lcodes, dcodes, blcodes)
2650
    deflate_state *s;
2651
    int lcodes, dcodes, blcodes; /* number of codes for each tree */
2652
{
2653
    int rank;                    /* index in bl_order */
2654
 
2655
    Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
2656
    Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2657
            "too many codes");
2658
    Tracev((stderr, "\nbl counts: "));
2659
    send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
2660
    send_bits(s, dcodes-1,   5);
2661
    send_bits(s, blcodes-4,  4); /* not -3 as stated in appnote.txt */
2662
    for (rank = 0; rank < blcodes; rank++) {
2663
        Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
2664
        send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
2665
    }
2666
    Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
2667
 
2668
    send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
2669
    Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
2670
 
2671
    send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
2672
    Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
2673
}
2674
 
2675
/* ===========================================================================
2676
 * Send a stored block
2677
 */
2678
void _tr_stored_block(s, buf, stored_len, eof)
2679
    deflate_state *s;
2680
    charf *buf;       /* input block */
2681
    ulg stored_len;   /* length of input block */
2682
    int eof;          /* true if this is the last block for a file */
2683
{
2684
    send_bits(s, (STORED_BLOCK<<1)+eof, 3);  /* send block type */
2685
    s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
2686
    s->compressed_len += (stored_len + 4) << 3;
2687
 
2688
    copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
2689
}
2690
 
2691
/* Send just the `stored block' type code without any length bytes or data.
2692
 */
2693
void _tr_stored_type_only(s)
2694
    deflate_state *s;
2695
{
2696
    send_bits(s, (STORED_BLOCK << 1), 3);
2697
    bi_windup(s);
2698
    s->compressed_len = (s->compressed_len + 3) & ~7L;
2699
}
2700
 
2701
 
2702
/* ===========================================================================
2703
 * Send one empty static block to give enough lookahead for inflate.
2704
 * This takes 10 bits, of which 7 may remain in the bit buffer.
2705
 * The current inflate code requires 9 bits of lookahead. If the
2706
 * last two codes for the previous block (real code plus EOB) were coded
2707
 * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode
2708
 * the last real code. In this case we send two empty static blocks instead
2709
 * of one. (There are no problems if the previous block is stored or fixed.)
2710
 * To simplify the code, we assume the worst case of last real code encoded
2711
 * on one bit only.
2712
 */
2713
void _tr_align(s)
2714
    deflate_state *s;
2715
{
2716
    send_bits(s, STATIC_TREES<<1, 3);
2717
    send_code(s, END_BLOCK, static_ltree);
2718
    s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
2719
    bi_flush(s);
2720
    /* Of the 10 bits for the empty block, we have already sent
2721
     * (10 - bi_valid) bits. The lookahead for the last real code (before
2722
     * the EOB of the previous block) was thus at least one plus the length
2723
     * of the EOB plus what we have just sent of the empty static block.
2724
     */
2725
    if (1 + s->last_eob_len + 10 - s->bi_valid < 9) {
2726
        send_bits(s, STATIC_TREES<<1, 3);
2727
        send_code(s, END_BLOCK, static_ltree);
2728
        s->compressed_len += 10L;
2729
        bi_flush(s);
2730
    }
2731
    s->last_eob_len = 7;
2732
}
2733
 
2734
/* ===========================================================================
2735
 * Determine the best encoding for the current block: dynamic trees, static
2736
 * trees or store, and output the encoded block to the zip file. This function
2737
 * returns the total compressed length for the file so far.
2738
 */
2739
ulg _tr_flush_block(s, buf, stored_len, eof)
2740
    deflate_state *s;
2741
    charf *buf;       /* input block, or NULL if too old */
2742
    ulg stored_len;   /* length of input block */
2743
    int eof;          /* true if this is the last block for a file */
2744
{
2745
    ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
2746
    int max_blindex = 0;  /* index of last bit length code of non zero freq */
2747
 
2748
    /* Build the Huffman trees unless a stored block is forced */
2749
    if (s->level > 0) {
2750
 
2751
         /* Check if the file is ascii or binary */
2752
        if (s->data_type == Z_UNKNOWN) set_data_type(s);
2753
 
2754
        /* Construct the literal and distance trees */
2755
        build_tree(s, (tree_desc *)(&(s->l_desc)));
2756
        Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
2757
                s->static_len));
2758
 
2759
        build_tree(s, (tree_desc *)(&(s->d_desc)));
2760
        Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
2761
                s->static_len));
2762
        /* At this point, opt_len and static_len are the total bit lengths of
2763
         * the compressed block data, excluding the tree representations.
2764
         */
2765
 
2766
        /* Build the bit length tree for the above two trees, and get the index
2767
         * in bl_order of the last bit length code to send.
2768
         */
2769
        max_blindex = build_bl_tree(s);
2770
 
2771
        /* Determine the best encoding. Compute first the block length in bytes*/
2772
        opt_lenb = (s->opt_len+3+7)>>3;
2773
        static_lenb = (s->static_len+3+7)>>3;
2774
 
2775
        Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
2776
                opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
2777
                s->last_lit));
2778
 
2779
        if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
2780
 
2781
    } else {
2782
        Assert(buf != (char*)0, "lost buf");
2783
        opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
2784
    }
2785
 
2786
    /* If compression failed and this is the first and last block,
2787
     * and if the .zip file can be seeked (to rewrite the local header),
2788
     * the whole file is transformed into a stored file:
2789
     */
2790
#ifdef STORED_FILE_OK
2791
#  ifdef FORCE_STORED_FILE
2792
    if (eof && s->compressed_len == 0L) { /* force stored file */
2793
#  else
2794
    if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable()) {
2795
#  endif
2796
        /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
2797
        if (buf == (charf*)0) error ("block vanished");
2798
 
2799
        copy_block(s, buf, (unsigned)stored_len, 0); /* without header */
2800
        s->compressed_len = stored_len << 3;
2801
        s->method = STORED;
2802
    } else
2803
#endif /* STORED_FILE_OK */
2804
 
2805
#ifdef FORCE_STORED
2806
    if (buf != (char*)0) { /* force stored block */
2807
#else
2808
    if (stored_len+4 <= opt_lenb && buf != (char*)0) {
2809
                       /* 4: two words for the lengths */
2810
#endif
2811
        /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
2812
         * Otherwise we can't have processed more than WSIZE input bytes since
2813
         * the last block flush, because compression would have been
2814
         * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
2815
         * transform a block into a stored block.
2816
         */
2817
        _tr_stored_block(s, buf, stored_len, eof);
2818
 
2819
#ifdef FORCE_STATIC
2820
    } else if (static_lenb >= 0) { /* force static trees */
2821
#else
2822
    } else if (static_lenb == opt_lenb) {
2823
#endif
2824
        send_bits(s, (STATIC_TREES<<1)+eof, 3);
2825
        compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
2826
        s->compressed_len += 3 + s->static_len;
2827
    } else {
2828
        send_bits(s, (DYN_TREES<<1)+eof, 3);
2829
        send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
2830
                       max_blindex+1);
2831
        compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
2832
        s->compressed_len += 3 + s->opt_len;
2833
    }
2834
    Assert (s->compressed_len == s->bits_sent, "bad compressed size");
2835
    init_block(s);
2836
 
2837
    if (eof) {
2838
        bi_windup(s);
2839
        s->compressed_len += 7;  /* align on byte boundary */
2840
    }
2841
    Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
2842
           s->compressed_len-7*eof));
2843
 
2844
    return s->compressed_len >> 3;
2845
}
2846
 
2847
/* ===========================================================================
2848
 * Save the match info and tally the frequency counts. Return true if
2849
 * the current block must be flushed.
2850
 */
2851
int _tr_tally (s, dist, lc)
2852
    deflate_state *s;
2853
    unsigned dist;  /* distance of matched string */
2854
    unsigned lc;    /* match length-MIN_MATCH or unmatched char (if dist==0) */
2855
{
2856
    s->d_buf[s->last_lit] = (ush)dist;
2857
    s->l_buf[s->last_lit++] = (uch)lc;
2858
    if (dist == 0) {
2859
        /* lc is the unmatched char */
2860
        s->dyn_ltree[lc].Freq++;
2861
    } else {
2862
        s->matches++;
2863
        /* Here, lc is the match length - MIN_MATCH */
2864
        dist--;             /* dist = match distance - 1 */
2865
        Assert((ush)dist < (ush)MAX_DIST(s) &&
2866
               (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
2867
               (ush)d_code(dist) < (ush)D_CODES,  "_tr_tally: bad match");
2868
 
2869
        s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;
2870
        s->dyn_dtree[d_code(dist)].Freq++;
2871
    }
2872
 
2873
    /* Try to guess if it is profitable to stop the current block here */
2874
    if (s->level > 2 && (s->last_lit & 0xfff) == 0) {
2875
        /* Compute an upper bound for the compressed length */
2876
        ulg out_length = (ulg)s->last_lit*8L;
2877
        ulg in_length = (ulg)((long)s->strstart - s->block_start);
2878
        int dcode;
2879
        for (dcode = 0; dcode < D_CODES; dcode++) {
2880
            out_length += (ulg)s->dyn_dtree[dcode].Freq *
2881
                (5L+extra_dbits[dcode]);
2882
        }
2883
        out_length >>= 3;
2884
        Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
2885
               s->last_lit, in_length, out_length,
2886
               100L - out_length*100L/in_length));
2887
        if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
2888
    }
2889
    return (s->last_lit == s->lit_bufsize-1);
2890
    /* We avoid equality with lit_bufsize because of wraparound at 64K
2891
     * on 16 bit machines and because stored blocks are restricted to
2892
     * 64K-1 bytes.
2893
     */
2894
}
2895
 
2896
/* ===========================================================================
2897
 * Send the block data compressed using the given Huffman trees
2898
 */
2899
local void compress_block(s, ltree, dtree)
2900
    deflate_state *s;
2901
    ct_data *ltree; /* literal tree */
2902
    ct_data *dtree; /* distance tree */
2903
{
2904
    unsigned dist;      /* distance of matched string */
2905
    int lc;             /* match length or unmatched char (if dist == 0) */
2906
    unsigned lx = 0;    /* running index in l_buf */
2907
    unsigned code;      /* the code to send */
2908
    int extra;          /* number of extra bits to send */
2909
 
2910
    if (s->last_lit != 0) do {
2911
        dist = s->d_buf[lx];
2912
        lc = s->l_buf[lx++];
2913
        if (dist == 0) {
2914
            send_code(s, lc, ltree); /* send a literal byte */
2915
            Tracecv(isgraph(lc), (stderr," '%c' ", lc));
2916
        } else {
2917
            /* Here, lc is the match length - MIN_MATCH */
2918
            code = length_code[lc];
2919
            send_code(s, code+LITERALS+1, ltree); /* send the length code */
2920
            extra = extra_lbits[code];
2921
            if (extra != 0) {
2922
                lc -= base_length[code];
2923
                send_bits(s, lc, extra);       /* send the extra length bits */
2924
            }
2925
            dist--; /* dist is now the match distance - 1 */
2926
            code = d_code(dist);
2927
            Assert (code < D_CODES, "bad d_code");
2928
 
2929
            send_code(s, code, dtree);       /* send the distance code */
2930
            extra = extra_dbits[code];
2931
            if (extra != 0) {
2932
                dist -= base_dist[code];
2933
                send_bits(s, dist, extra);   /* send the extra distance bits */
2934
            }
2935
        } /* literal or match pair ? */
2936
 
2937
        /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
2938
        Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
2939
 
2940
    } while (lx < s->last_lit);
2941
 
2942
    send_code(s, END_BLOCK, ltree);
2943
    s->last_eob_len = ltree[END_BLOCK].Len;
2944
}
2945
 
2946
/* ===========================================================================
2947
 * Set the data type to ASCII or BINARY, using a crude approximation:
2948
 * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
2949
 * IN assertion: the fields freq of dyn_ltree are set and the total of all
2950
 * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
2951
 */
2952
local void set_data_type(s)
2953
    deflate_state *s;
2954
{
2955
    int n = 0;
2956
    unsigned ascii_freq = 0;
2957
    unsigned bin_freq = 0;
2958
    while (n < 7)        bin_freq += s->dyn_ltree[n++].Freq;
2959
    while (n < 128)    ascii_freq += s->dyn_ltree[n++].Freq;
2960
    while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
2961
    s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII);
2962
}
2963
 
2964
/* ===========================================================================
2965
 * Reverse the first len bits of a code, using straightforward code (a faster
2966
 * method would use a table)
2967
 * IN assertion: 1 <= len <= 15
2968
 */
2969
local unsigned bi_reverse(code, len)
2970
    unsigned code; /* the value to invert */
2971
    int len;       /* its bit length */
2972
{
2973
    register unsigned res = 0;
2974
    do {
2975
        res |= code & 1;
2976
        code >>= 1, res <<= 1;
2977
    } while (--len > 0);
2978
    return res >> 1;
2979
}
2980
 
2981
/* ===========================================================================
2982
 * Flush the bit buffer, keeping at most 7 bits in it.
2983
 */
2984
local void bi_flush(s)
2985
    deflate_state *s;
2986
{
2987
    if (s->bi_valid == 16) {
2988
        put_short(s, s->bi_buf);
2989
        s->bi_buf = 0;
2990
        s->bi_valid = 0;
2991
    } else if (s->bi_valid >= 8) {
2992
        put_byte(s, (Byte)s->bi_buf);
2993
        s->bi_buf >>= 8;
2994
        s->bi_valid -= 8;
2995
    }
2996
}
2997
 
2998
/* ===========================================================================
2999
 * Flush the bit buffer and align the output on a byte boundary
3000
 */
3001
local void bi_windup(s)
3002
    deflate_state *s;
3003
{
3004
    if (s->bi_valid > 8) {
3005
        put_short(s, s->bi_buf);
3006
    } else if (s->bi_valid > 0) {
3007
        put_byte(s, (Byte)s->bi_buf);
3008
    }
3009
    s->bi_buf = 0;
3010
    s->bi_valid = 0;
3011
#ifdef DEBUG_ZLIB
3012
    s->bits_sent = (s->bits_sent+7) & ~7;
3013
#endif
3014
}
3015
 
3016
/* ===========================================================================
3017
 * Copy a stored block, storing first the length and its
3018
 * one's complement if requested.
3019
 */
3020
local void copy_block(s, buf, len, header)
3021
    deflate_state *s;
3022
    charf    *buf;    /* the input data */
3023
    unsigned len;     /* its length */
3024
    int      header;  /* true if block header must be written */
3025
{
3026
    bi_windup(s);        /* align on byte boundary */
3027
    s->last_eob_len = 8; /* enough lookahead for inflate */
3028
 
3029
    if (header) {
3030
        put_short(s, (ush)len);
3031
        put_short(s, (ush)~len);
3032
#ifdef DEBUG_ZLIB
3033
        s->bits_sent += 2*16;
3034
#endif
3035
    }
3036
#ifdef DEBUG_ZLIB
3037
    s->bits_sent += (ulg)len<<3;
3038
#endif
3039
    /* bundle up the put_byte(s, *buf++) calls */
3040
    zmemcpy(&s->pending_buf[s->pending], buf, len);
3041
    s->pending += len;
3042
}
3043
/* --- trees.c */
3044
 
3045
/* +++ inflate.c */
3046
/* inflate.c -- zlib interface to inflate modules
3047
 * Copyright (C) 1995-1996 Mark Adler
3048
 * For conditions of distribution and use, see copyright notice in zlib.h
3049
 */
3050
 
3051
/* #include "zutil.h" */
3052
 
3053
/* +++ infblock.h */
3054
/* infblock.h -- header to use infblock.c
3055
 * Copyright (C) 1995-1996 Mark Adler
3056
 * For conditions of distribution and use, see copyright notice in zlib.h
3057
 */
3058
 
3059
/* WARNING: this file should *not* be used by applications. It is
3060
   part of the implementation of the compression library and is
3061
   subject to change. Applications should only use zlib.h.
3062
 */
3063
 
3064
struct inflate_blocks_state;
3065
typedef struct inflate_blocks_state FAR inflate_blocks_statef;
3066
 
3067
extern inflate_blocks_statef * inflate_blocks_new OF((
3068
    z_streamp z,
3069
    check_func c,               /* check function */
3070
    uInt w));                   /* window size */
3071
 
3072
extern int inflate_blocks OF((
3073
    inflate_blocks_statef *,
3074
    z_streamp ,
3075
    int));                      /* initial return code */
3076
 
3077
extern void inflate_blocks_reset OF((
3078
    inflate_blocks_statef *,
3079
    z_streamp ,
3080
    uLongf *));                  /* check value on output */
3081
 
3082
extern int inflate_blocks_free OF((
3083
    inflate_blocks_statef *,
3084
    z_streamp ,
3085
    uLongf *));                  /* check value on output */
3086
 
3087
extern void inflate_set_dictionary OF((
3088
    inflate_blocks_statef *s,
3089
    const Bytef *d,  /* dictionary */
3090
    uInt  n));       /* dictionary length */
3091
 
3092
extern int inflate_addhistory OF((
3093
    inflate_blocks_statef *,
3094
    z_streamp));
3095
 
3096
extern int inflate_packet_flush OF((
3097
    inflate_blocks_statef *));
3098
/* --- infblock.h */
3099
 
3100
#ifndef NO_DUMMY_DECL
3101
struct inflate_blocks_state {int dummy;}; /* for buggy compilers */
3102
#endif
3103
 
3104
/* inflate private state */
3105
struct internal_state {
3106
 
3107
  /* mode */
3108
  enum {
3109
      METHOD,   /* waiting for method byte */
3110
      FLAG,     /* waiting for flag byte */
3111
      DICT4,    /* four dictionary check bytes to go */
3112
      DICT3,    /* three dictionary check bytes to go */
3113
      DICT2,    /* two dictionary check bytes to go */
3114
      DICT1,    /* one dictionary check byte to go */
3115
      DICT0,    /* waiting for inflateSetDictionary */
3116
      BLOCKS,   /* decompressing blocks */
3117
      CHECK4,   /* four check bytes to go */
3118
      CHECK3,   /* three check bytes to go */
3119
      CHECK2,   /* two check bytes to go */
3120
      CHECK1,   /* one check byte to go */
3121
      DONE,     /* finished check, done */
3122
      BAD}      /* got an error--stay here */
3123
    mode;               /* current inflate mode */
3124
 
3125
  /* mode dependent information */
3126
  union {
3127
    uInt method;        /* if FLAGS, method byte */
3128
    struct {
3129
      uLong was;                /* computed check value */
3130
      uLong need;               /* stream check value */
3131
    } check;            /* if CHECK, check values to compare */
3132
    uInt marker;        /* if BAD, inflateSync's marker bytes count */
3133
  } sub;        /* submode */
3134
 
3135
  /* mode independent information */
3136
  int  nowrap;          /* flag for no wrapper */
3137
  uInt wbits;           /* log2(window size)  (8..15, defaults to 15) */
3138
  inflate_blocks_statef
3139
    *blocks;            /* current inflate_blocks state */
3140
 
3141
};
3142
 
3143
 
3144
int inflateReset(z)
3145
z_streamp z;
3146
{
3147
  uLong c;
3148
 
3149
  if (z == Z_NULL || z->state == Z_NULL)
3150
    return Z_STREAM_ERROR;
3151
  z->total_in = z->total_out = 0;
3152
  z->msg = Z_NULL;
3153
  z->state->mode = z->state->nowrap ? BLOCKS : METHOD;
3154
  inflate_blocks_reset(z->state->blocks, z, &c);
3155
  Trace((stderr, "inflate: reset\n"));
3156
  return Z_OK;
3157
}
3158
 
3159
 
3160
int inflateEnd(z)
3161
z_streamp z;
3162
{
3163
  uLong c;
3164
 
3165
  if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
3166
    return Z_STREAM_ERROR;
3167
  if (z->state->blocks != Z_NULL)
3168
    inflate_blocks_free(z->state->blocks, z, &c);
3169
  ZFREE(z, z->state);
3170
  z->state = Z_NULL;
3171
  Trace((stderr, "inflate: end\n"));
3172
  return Z_OK;
3173
}
3174
 
3175
 
3176
int inflateInit2_(z, w, version, stream_size)
3177
z_streamp z;
3178
int w;
3179
const char *version;
3180
int stream_size;
3181
{
3182
  if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||
3183
      stream_size != sizeof(z_stream))
3184
      return Z_VERSION_ERROR;
3185
 
3186
  /* initialize state */
3187
  if (z == Z_NULL)
3188
    return Z_STREAM_ERROR;
3189
  z->msg = Z_NULL;
3190
#ifndef NO_ZCFUNCS
3191
  if (z->zalloc == Z_NULL)
3192
  {
3193
    z->zalloc = zcalloc;
3194
    z->opaque = (voidpf)0;
3195
  }
3196
  if (z->zfree == Z_NULL) z->zfree = zcfree;
3197
#endif
3198
  if ((z->state = (struct internal_state FAR *)
3199
       ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL)
3200
    return Z_MEM_ERROR;
3201
  z->state->blocks = Z_NULL;
3202
 
3203
  /* handle undocumented nowrap option (no zlib header or check) */
3204
  z->state->nowrap = 0;
3205
  if (w < 0)
3206
  {
3207
    w = - w;
3208
    z->state->nowrap = 1;
3209
  }
3210
 
3211
  /* set window size */
3212
  if (w < 8 || w > 15)
3213
  {
3214
    inflateEnd(z);
3215
    return Z_STREAM_ERROR;
3216
  }
3217
  z->state->wbits = (uInt)w;
3218
 
3219
  /* create inflate_blocks state */
3220
  if ((z->state->blocks =
3221
      inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, (uInt)1 << w))
3222
      == Z_NULL)
3223
  {
3224
    inflateEnd(z);
3225
    return Z_MEM_ERROR;
3226
  }
3227
  Trace((stderr, "inflate: allocated\n"));
3228
 
3229
  /* reset state */
3230
  inflateReset(z);
3231
  return Z_OK;
3232
}
3233
 
3234
 
3235
int inflateInit_(z, version, stream_size)
3236
z_streamp z;
3237
const char *version;
3238
int stream_size;
3239
{
3240
  return inflateInit2_(z, DEF_WBITS, version, stream_size);
3241
}
3242
 
3243
 
3244
#define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;}
3245
#define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
3246
 
3247
int inflate(z, f)
3248
z_streamp z;
3249
int f;
3250
{
3251
  int r;
3252
  uInt b;
3253
 
3254
  if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL || f < 0)
3255
    return Z_STREAM_ERROR;
3256
  r = Z_BUF_ERROR;
3257
  while (1) switch (z->state->mode)
3258
  {
3259
    case METHOD:
3260
      NEEDBYTE
3261
      if (((z->state->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED)
3262
      {
3263
        z->state->mode = BAD;
3264
        z->msg = (char*)"unknown compression method";
3265
        z->state->sub.marker = 5;       /* can't try inflateSync */
3266
        break;
3267
      }
3268
      if ((z->state->sub.method >> 4) + 8 > z->state->wbits)
3269
      {
3270
        z->state->mode = BAD;
3271
        z->msg = (char*)"invalid window size";
3272
        z->state->sub.marker = 5;       /* can't try inflateSync */
3273
        break;
3274
      }
3275
      z->state->mode = FLAG;
3276
    case FLAG:
3277
      NEEDBYTE
3278
      b = NEXTBYTE;
3279
      if (((z->state->sub.method << 8) + b) % 31)
3280
      {
3281
        z->state->mode = BAD;
3282
        z->msg = (char*)"incorrect header check";
3283
        z->state->sub.marker = 5;       /* can't try inflateSync */
3284
        break;
3285
      }
3286
      Trace((stderr, "inflate: zlib header ok\n"));
3287
      if (!(b & PRESET_DICT))
3288
      {
3289
        z->state->mode = BLOCKS;
3290
        break;
3291
      }
3292
      z->state->mode = DICT4;
3293
    case DICT4:
3294
      NEEDBYTE
3295
      z->state->sub.check.need = (uLong)NEXTBYTE << 24;
3296
      z->state->mode = DICT3;
3297
    case DICT3:
3298
      NEEDBYTE
3299
      z->state->sub.check.need += (uLong)NEXTBYTE << 16;
3300
      z->state->mode = DICT2;
3301
    case DICT2:
3302
      NEEDBYTE
3303
      z->state->sub.check.need += (uLong)NEXTBYTE << 8;
3304
      z->state->mode = DICT1;
3305
    case DICT1:
3306
      NEEDBYTE
3307
      z->state->sub.check.need += (uLong)NEXTBYTE;
3308
      z->adler = z->state->sub.check.need;
3309
      z->state->mode = DICT0;
3310
      return Z_NEED_DICT;
3311
    case DICT0:
3312
      z->state->mode = BAD;
3313
      z->msg = (char*)"need dictionary";
3314
      z->state->sub.marker = 0;       /* can try inflateSync */
3315
      return Z_STREAM_ERROR;
3316
    case BLOCKS:
3317
      r = inflate_blocks(z->state->blocks, z, r);
3318
      if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0)
3319
          r = inflate_packet_flush(z->state->blocks);
3320
      if (r == Z_DATA_ERROR)
3321
      {
3322
        z->state->mode = BAD;
3323
        z->state->sub.marker = 0;       /* can try inflateSync */
3324
        break;
3325
      }
3326
      if (r != Z_STREAM_END)
3327
        return r;
3328
      r = Z_OK;
3329
      inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was);
3330
      if (z->state->nowrap)
3331
      {
3332
        z->state->mode = DONE;
3333
        break;
3334
      }
3335
      z->state->mode = CHECK4;
3336
    case CHECK4:
3337
      NEEDBYTE
3338
      z->state->sub.check.need = (uLong)NEXTBYTE << 24;
3339
      z->state->mode = CHECK3;
3340
    case CHECK3:
3341
      NEEDBYTE
3342
      z->state->sub.check.need += (uLong)NEXTBYTE << 16;
3343
      z->state->mode = CHECK2;
3344
    case CHECK2:
3345
      NEEDBYTE
3346
      z->state->sub.check.need += (uLong)NEXTBYTE << 8;
3347
      z->state->mode = CHECK1;
3348
    case CHECK1:
3349
      NEEDBYTE
3350
      z->state->sub.check.need += (uLong)NEXTBYTE;
3351
 
3352
      if (z->state->sub.check.was != z->state->sub.check.need)
3353
      {
3354
        z->state->mode = BAD;
3355
        z->msg = (char*)"incorrect data check";
3356
        z->state->sub.marker = 5;       /* can't try inflateSync */
3357
        break;
3358
      }
3359
      Trace((stderr, "inflate: zlib check ok\n"));
3360
      z->state->mode = DONE;
3361
    case DONE:
3362
      return Z_STREAM_END;
3363
    case BAD:
3364
      return Z_DATA_ERROR;
3365
    default:
3366
      return Z_STREAM_ERROR;
3367
  }
3368
 
3369
 empty:
3370
  if (f != Z_PACKET_FLUSH)
3371
    return r;
3372
  z->state->mode = BAD;
3373
  z->msg = (char *)"need more for packet flush";
3374
  z->state->sub.marker = 0;       /* can try inflateSync */
3375
  return Z_DATA_ERROR;
3376
}
3377
 
3378
 
3379
int inflateSetDictionary(z, dictionary, dictLength)
3380
z_streamp z;
3381
const Bytef *dictionary;
3382
uInt  dictLength;
3383
{
3384
  uInt length = dictLength;
3385
 
3386
  if (z == Z_NULL || z->state == Z_NULL || z->state->mode != DICT0)
3387
    return Z_STREAM_ERROR;
3388
 
3389
  if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR;
3390
  z->adler = 1L;
3391
 
3392
  if (length >= ((uInt)1<<z->state->wbits))
3393
  {
3394
    length = (1<<z->state->wbits)-1;
3395
    dictionary += dictLength - length;
3396
  }
3397
  inflate_set_dictionary(z->state->blocks, dictionary, length);
3398
  z->state->mode = BLOCKS;
3399
  return Z_OK;
3400
}
3401
 
3402
/*
3403
 * This subroutine adds the data at next_in/avail_in to the output history
3404
 * without performing any output.  The output buffer must be "caught up";
3405
 * i.e. no pending output (hence s->read equals s->write), and the state must
3406
 * be BLOCKS (i.e. we should be willing to see the start of a series of
3407
 * BLOCKS).  On exit, the output will also be caught up, and the checksum
3408
 * will have been updated if need be.
3409
 */
3410
 
3411
int inflateIncomp(z)
3412
z_stream *z;
3413
{
3414
    if (z->state->mode != BLOCKS)
3415
        return Z_DATA_ERROR;
3416
    return inflate_addhistory(z->state->blocks, z);
3417
}
3418
 
3419
 
3420
int inflateSync(z)
3421
z_streamp z;
3422
{
3423
  uInt n;       /* number of bytes to look at */
3424
  Bytef *p;     /* pointer to bytes */
3425
  uInt m;       /* number of marker bytes found in a row */
3426
  uLong r, w;   /* temporaries to save total_in and total_out */
3427
 
3428
  /* set up */
3429
  if (z == Z_NULL || z->state == Z_NULL)
3430
    return Z_STREAM_ERROR;
3431
  if (z->state->mode != BAD)
3432
  {
3433
    z->state->mode = BAD;
3434
    z->state->sub.marker = 0;
3435
  }
3436
  if ((n = z->avail_in) == 0)
3437
    return Z_BUF_ERROR;
3438
  p = z->next_in;
3439
  m = z->state->sub.marker;
3440
 
3441
  /* search */
3442
  while (n && m < 4)
3443
  {
3444
    if (*p == (Byte)(m < 2 ? 0 : 0xff))
3445
      m++;
3446
    else if (*p)
3447
      m = 0;
3448
    else
3449
      m = 4 - m;
3450
    p++, n--;
3451
  }
3452
 
3453
  /* restore */
3454
  z->total_in += p - z->next_in;
3455
  z->next_in = p;
3456
  z->avail_in = n;
3457
  z->state->sub.marker = m;
3458
 
3459
  /* return no joy or set up to restart on a new block */
3460
  if (m != 4)
3461
    return Z_DATA_ERROR;
3462
  r = z->total_in;  w = z->total_out;
3463
  inflateReset(z);
3464
  z->total_in = r;  z->total_out = w;
3465
  z->state->mode = BLOCKS;
3466
  return Z_OK;
3467
}
3468
 
3469
#undef NEEDBYTE
3470
#undef NEXTBYTE
3471
/* --- inflate.c */
3472
 
3473
/* +++ infblock.c */
3474
/* infblock.c -- interpret and process block types to last block
3475
 * Copyright (C) 1995-1996 Mark Adler
3476
 * For conditions of distribution and use, see copyright notice in zlib.h
3477
 */
3478
 
3479
/* #include "zutil.h" */
3480
/* #include "infblock.h" */
3481
 
3482
/* +++ inftrees.h */
3483
/* inftrees.h -- header to use inftrees.c
3484
 * Copyright (C) 1995-1996 Mark Adler
3485
 * For conditions of distribution and use, see copyright notice in zlib.h
3486
 */
3487
 
3488
/* WARNING: this file should *not* be used by applications. It is
3489
   part of the implementation of the compression library and is
3490
   subject to change. Applications should only use zlib.h.
3491
 */
3492
 
3493
/* Huffman code lookup table entry--this entry is four bytes for machines
3494
   that have 16-bit pointers (e.g. PC's in the small or medium model). */
3495
 
3496
typedef struct inflate_huft_s FAR inflate_huft;
3497
 
3498
struct inflate_huft_s {
3499
  union {
3500
    struct {
3501
      Byte Exop;        /* number of extra bits or operation */
3502
      Byte Bits;        /* number of bits in this code or subcode */
3503
    } what;
3504
    Bytef *pad;         /* pad structure to a power of 2 (4 bytes for */
3505
  } word;               /*  16-bit, 8 bytes for 32-bit machines) */
3506
  union {
3507
    uInt Base;          /* literal, length base, or distance base */
3508
    inflate_huft *Next; /* pointer to next level of table */
3509
  } more;
3510
};
3511
 
3512
#ifdef DEBUG_ZLIB
3513
  extern uInt inflate_hufts;
3514
#endif
3515
 
3516
extern int inflate_trees_bits OF((
3517
    uIntf *,                    /* 19 code lengths */
3518
    uIntf *,                    /* bits tree desired/actual depth */
3519
    inflate_huft * FAR *,       /* bits tree result */
3520
    z_streamp ));               /* for zalloc, zfree functions */
3521
 
3522
extern int inflate_trees_dynamic OF((
3523
    uInt,                       /* number of literal/length codes */
3524
    uInt,                       /* number of distance codes */
3525
    uIntf *,                    /* that many (total) code lengths */
3526
    uIntf *,                    /* literal desired/actual bit depth */
3527
    uIntf *,                    /* distance desired/actual bit depth */
3528
    inflate_huft * FAR *,       /* literal/length tree result */
3529
    inflate_huft * FAR *,       /* distance tree result */
3530
    z_streamp ));               /* for zalloc, zfree functions */
3531
 
3532
extern int inflate_trees_fixed OF((
3533
    uIntf *,                    /* literal desired/actual bit depth */
3534
    uIntf *,                    /* distance desired/actual bit depth */
3535
    inflate_huft * FAR *,       /* literal/length tree result */
3536
    inflate_huft * FAR *));     /* distance tree result */
3537
 
3538
extern int inflate_trees_free OF((
3539
    inflate_huft *,             /* tables to free */
3540
    z_streamp ));               /* for zfree function */
3541
 
3542
/* --- inftrees.h */
3543
 
3544
/* +++ infcodes.h */
3545
/* infcodes.h -- header to use infcodes.c
3546
 * Copyright (C) 1995-1996 Mark Adler
3547
 * For conditions of distribution and use, see copyright notice in zlib.h
3548
 */
3549
 
3550
/* WARNING: this file should *not* be used by applications. It is
3551
   part of the implementation of the compression library and is
3552
   subject to change. Applications should only use zlib.h.
3553
 */
3554
 
3555
struct inflate_codes_state;
3556
typedef struct inflate_codes_state FAR inflate_codes_statef;
3557
 
3558
extern inflate_codes_statef *inflate_codes_new OF((
3559
    uInt, uInt,
3560
    inflate_huft *, inflate_huft *,
3561
    z_streamp ));
3562
 
3563
extern int inflate_codes OF((
3564
    inflate_blocks_statef *,
3565
    z_streamp ,
3566
    int));
3567
 
3568
extern void inflate_codes_free OF((
3569
    inflate_codes_statef *,
3570
    z_streamp ));
3571
 
3572
/* --- infcodes.h */
3573
 
3574
/* +++ infutil.h */
3575
/* infutil.h -- types and macros common to blocks and codes
3576
 * Copyright (C) 1995-1996 Mark Adler
3577
 * For conditions of distribution and use, see copyright notice in zlib.h
3578
 */
3579
 
3580
/* WARNING: this file should *not* be used by applications. It is
3581
   part of the implementation of the compression library and is
3582
   subject to change. Applications should only use zlib.h.
3583
 */
3584
 
3585
#ifndef _INFUTIL_H
3586
#define _INFUTIL_H
3587
 
3588
typedef enum {
3589
      TYPE,     /* get type bits (3, including end bit) */
3590
      LENS,     /* get lengths for stored */
3591
      STORED,   /* processing stored block */
3592
      TABLE,    /* get table lengths */
3593
      BTREE,    /* get bit lengths tree for a dynamic block */
3594
      DTREE,    /* get length, distance trees for a dynamic block */
3595
      CODES,    /* processing fixed or dynamic block */
3596
      DRY,      /* output remaining window bytes */
3597
      DONEB,    /* finished last block, done */
3598
      BADB}     /* got a data error--stuck here */
3599
inflate_block_mode;
3600
 
3601
/* inflate blocks semi-private state */
3602
struct inflate_blocks_state {
3603
 
3604
  /* mode */
3605
  inflate_block_mode  mode;     /* current inflate_block mode */
3606
 
3607
  /* mode dependent information */
3608
  union {
3609
    uInt left;          /* if STORED, bytes left to copy */
3610
    struct {
3611
      uInt table;               /* table lengths (14 bits) */
3612
      uInt index;               /* index into blens (or border) */
3613
      uIntf *blens;             /* bit lengths of codes */
3614
      uInt bb;                  /* bit length tree depth */
3615
      inflate_huft *tb;         /* bit length decoding tree */
3616
    } trees;            /* if DTREE, decoding info for trees */
3617
    struct {
3618
      inflate_huft *tl;
3619
      inflate_huft *td;         /* trees to free */
3620
      inflate_codes_statef
3621
         *codes;
3622
    } decode;           /* if CODES, current state */
3623
  } sub;                /* submode */
3624
  uInt last;            /* true if this block is the last block */
3625
 
3626
  /* mode independent information */
3627
  uInt bitk;            /* bits in bit buffer */
3628
  uLong bitb;           /* bit buffer */
3629
  Bytef *window;        /* sliding window */
3630
  Bytef *end;           /* one byte after sliding window */
3631
  Bytef *read;          /* window read pointer */
3632
  Bytef *write;         /* window write pointer */
3633
  check_func checkfn;   /* check function */
3634
  uLong check;          /* check on output */
3635
 
3636
};
3637
 
3638
 
3639
/* defines for inflate input/output */
3640
/*   update pointers and return */
3641
#define UPDBITS {s->bitb=b;s->bitk=k;}
3642
#define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
3643
#define UPDOUT {s->write=q;}
3644
#define UPDATE {UPDBITS UPDIN UPDOUT}
3645
#define LEAVE {UPDATE return inflate_flush(s,z,r);}
3646
/*   get bytes and bits */
3647
#define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
3648
#define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
3649
#define NEXTBYTE (n--,*p++)
3650
#define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
3651
#define DUMPBITS(j) {b>>=(j);k-=(j);}
3652
/*   output bytes */
3653
#define WAVAIL (uInt)(q<s->read?s->read-q-1:s->end-q)
3654
#define LOADOUT {q=s->write;m=(uInt)WAVAIL;}
3655
#define WWRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}}
3656
#define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
3657
#define NEEDOUT {if(m==0){WWRAP if(m==0){FLUSH WWRAP if(m==0) LEAVE}}r=Z_OK;}
3658
#define OUTBYTE(a) {*q++=(Byte)(a);m--;}
3659
/*   load local pointers */
3660
#define LOAD {LOADIN LOADOUT}
3661
 
3662
/* masks for lower bits (size given to avoid silly warnings with Visual C++) */
3663
extern uInt inflate_mask[17];
3664
 
3665
/* copy as much as possible from the sliding window to the output area */
3666
extern int inflate_flush OF((
3667
    inflate_blocks_statef *,
3668
    z_streamp ,
3669
    int));
3670
 
3671
#ifndef NO_DUMMY_DECL
3672
struct internal_state      {int dummy;}; /* for buggy compilers */
3673
#endif
3674
 
3675
#endif
3676
/* --- infutil.h */
3677
 
3678
#ifndef NO_DUMMY_DECL
3679
struct inflate_codes_state {int dummy;}; /* for buggy compilers */
3680
#endif
3681
 
3682
/* Table for deflate from PKZIP's appnote.txt. */
3683
local const uInt border[] = { /* Order of the bit length code lengths */
3684
        16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
3685
 
3686
/*
3687
   Notes beyond the 1.93a appnote.txt:
3688
 
3689
   1. Distance pointers never point before the beginning of the output
3690
      stream.
3691
   2. Distance pointers can point back across blocks, up to 32k away.
3692
   3. There is an implied maximum of 7 bits for the bit length table and
3693
      15 bits for the actual data.
3694
   4. If only one code exists, then it is encoded using one bit.  (Zero
3695
      would be more efficient, but perhaps a little confusing.)  If two
3696
      codes exist, they are coded using one bit each (0 and 1).
3697
   5. There is no way of sending zero distance codes--a dummy must be
3698
      sent if there are none.  (History: a pre 2.0 version of PKZIP would
3699
      store blocks with no distance codes, but this was discovered to be
3700
      too harsh a criterion.)  Valid only for 1.93a.  2.04c does allow
3701
      zero distance codes, which is sent as one code of zero bits in
3702
      length.
3703
   6. There are up to 286 literal/length codes.  Code 256 represents the
3704
      end-of-block.  Note however that the static length tree defines
3705
      288 codes just to fill out the Huffman codes.  Codes 286 and 287
3706
      cannot be used though, since there is no length base or extra bits
3707
      defined for them.  Similarily, there are up to 30 distance codes.
3708
      However, static trees define 32 codes (all 5 bits) to fill out the
3709
      Huffman codes, but the last two had better not show up in the data.
3710
   7. Unzip can check dynamic Huffman blocks for complete code sets.
3711
      The exception is that a single code would not be complete (see #4).
3712
   8. The five bits following the block type is really the number of
3713
      literal codes sent minus 257.
3714
   9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
3715
      (1+6+6).  Therefore, to output three times the length, you output
3716
      three codes (1+1+1), whereas to output four times the same length,
3717
      you only need two codes (1+3).  Hmm.
3718
  10. In the tree reconstruction algorithm, Code = Code + Increment
3719
      only if BitLength(i) is not zero.  (Pretty obvious.)
3720
  11. Correction: 4 Bits: # of Bit Length codes - 4     (4 - 19)
3721
  12. Note: length code 284 can represent 227-258, but length code 285
3722
      really is 258.  The last length deserves its own, short code
3723
      since it gets used a lot in very redundant files.  The length
3724
      258 is special since 258 - 3 (the min match length) is 255.
3725
  13. The literal/length and distance code bit lengths are read as a
3726
      single stream of lengths.  It is possible (and advantageous) for
3727
      a repeat code (16, 17, or 18) to go across the boundary between
3728
      the two sets of lengths.
3729
 */
3730
 
3731
 
3732
void inflate_blocks_reset(s, z, c)
3733
inflate_blocks_statef *s;
3734
z_streamp z;
3735
uLongf *c;
3736
{
3737
  if (s->checkfn != Z_NULL)
3738
    *c = s->check;
3739
  if (s->mode == BTREE || s->mode == DTREE)
3740
    ZFREE(z, s->sub.trees.blens);
3741
  if (s->mode == CODES)
3742
  {
3743
    inflate_codes_free(s->sub.decode.codes, z);
3744
    inflate_trees_free(s->sub.decode.td, z);
3745
    inflate_trees_free(s->sub.decode.tl, z);
3746
  }
3747
  s->mode = TYPE;
3748
  s->bitk = 0;
3749
  s->bitb = 0;
3750
  s->read = s->write = s->window;
3751
  if (s->checkfn != Z_NULL)
3752
    z->adler = s->check = (*s->checkfn)(0L, Z_NULL, 0);
3753
  Trace((stderr, "inflate:   blocks reset\n"));
3754
}
3755
 
3756
 
3757
inflate_blocks_statef *inflate_blocks_new(z, c, w)
3758
z_streamp z;
3759
check_func c;
3760
uInt w;
3761
{
3762
  inflate_blocks_statef *s;
3763
 
3764
  if ((s = (inflate_blocks_statef *)ZALLOC
3765
       (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
3766
    return s;
3767
  if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)
3768
  {
3769
    ZFREE(z, s);
3770
    return Z_NULL;
3771
  }
3772
  s->end = s->window + w;
3773
  s->checkfn = c;
3774
  s->mode = TYPE;
3775
  Trace((stderr, "inflate:   blocks allocated\n"));
3776
  inflate_blocks_reset(s, z, &s->check);
3777
  return s;
3778
}
3779
 
3780
 
3781
#ifdef DEBUG_ZLIB
3782
  extern uInt inflate_hufts;
3783
#endif
3784
int inflate_blocks(s, z, r)
3785
inflate_blocks_statef *s;
3786
z_streamp z;
3787
int r;
3788
{
3789
  uInt t;               /* temporary storage */
3790
  uLong b;              /* bit buffer */
3791
  uInt k;               /* bits in bit buffer */
3792
  Bytef *p;             /* input data pointer */
3793
  uInt n;               /* bytes available there */
3794
  Bytef *q;             /* output window write pointer */
3795
  uInt m;               /* bytes to end of window or read pointer */
3796
 
3797
  /* copy input/output information to locals (UPDATE macro restores) */
3798
  LOAD
3799
 
3800
  /* process input based on current state */
3801
  while (1) switch (s->mode)
3802
  {
3803
    case TYPE:
3804
      NEEDBITS(3)
3805
      t = (uInt)b & 7;
3806
      s->last = t & 1;
3807
      switch (t >> 1)
3808
      {
3809
        case 0:                         /* stored */
3810
          Trace((stderr, "inflate:     stored block%s\n",
3811
                 s->last ? " (last)" : ""));
3812
          DUMPBITS(3)
3813
          t = k & 7;                    /* go to byte boundary */
3814
          DUMPBITS(t)
3815
          s->mode = LENS;               /* get length of stored block */
3816
          break;
3817
        case 1:                         /* fixed */
3818
          Trace((stderr, "inflate:     fixed codes block%s\n",
3819
                 s->last ? " (last)" : ""));
3820
          {
3821
            uInt bl, bd;
3822
            inflate_huft *tl, *td;
3823
 
3824
            inflate_trees_fixed(&bl, &bd, &tl, &td);
3825
            s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
3826
            if (s->sub.decode.codes == Z_NULL)
3827
            {
3828
              r = Z_MEM_ERROR;
3829
              LEAVE
3830
            }
3831
            s->sub.decode.tl = Z_NULL;  /* don't try to free these */
3832
            s->sub.decode.td = Z_NULL;
3833
          }
3834
          DUMPBITS(3)
3835
          s->mode = CODES;
3836
          break;
3837
        case 2:                         /* dynamic */
3838
          Trace((stderr, "inflate:     dynamic codes block%s\n",
3839
                 s->last ? " (last)" : ""));
3840
          DUMPBITS(3)
3841
          s->mode = TABLE;
3842
          break;
3843
        case 3:                         /* illegal */
3844
          DUMPBITS(3)
3845
          s->mode = BADB;
3846
          z->msg = (char*)"invalid block type";
3847
          r = Z_DATA_ERROR;
3848
          LEAVE
3849
      }
3850
      break;
3851
    case LENS:
3852
      NEEDBITS(32)
3853
      if ((((~b) >> 16) & 0xffff) != (b & 0xffff))
3854
      {
3855
        s->mode = BADB;
3856
        z->msg = (char*)"invalid stored block lengths";
3857
        r = Z_DATA_ERROR;
3858
        LEAVE
3859
      }
3860
      s->sub.left = (uInt)b & 0xffff;
3861
      b = k = 0;                      /* dump bits */
3862
      Tracev((stderr, "inflate:       stored length %u\n", s->sub.left));
3863
      s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE);
3864
      break;
3865
    case STORED:
3866
      if (n == 0)
3867
        LEAVE
3868
      NEEDOUT
3869
      t = s->sub.left;
3870
      if (t > n) t = n;
3871
      if (t > m) t = m;
3872
      zmemcpy(q, p, t);
3873
      p += t;  n -= t;
3874
      q += t;  m -= t;
3875
      if ((s->sub.left -= t) != 0)
3876
        break;
3877
      Tracev((stderr, "inflate:       stored end, %lu total out\n",
3878
              z->total_out + (q >= s->read ? q - s->read :
3879
              (s->end - s->read) + (q - s->window))));
3880
      s->mode = s->last ? DRY : TYPE;
3881
      break;
3882
    case TABLE:
3883
      NEEDBITS(14)
3884
      s->sub.trees.table = t = (uInt)b & 0x3fff;
3885
#ifndef PKZIP_BUG_WORKAROUND
3886
      if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
3887
      {
3888
        s->mode = BADB;
3889
        z->msg = (char*)"too many length or distance symbols";
3890
        r = Z_DATA_ERROR;
3891
        LEAVE
3892
      }
3893
#endif
3894
      t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
3895
      if (t < 19)
3896
        t = 19;
3897
      if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
3898
      {
3899
        r = Z_MEM_ERROR;
3900
        LEAVE
3901
      }
3902
      DUMPBITS(14)
3903
      s->sub.trees.index = 0;
3904
      Tracev((stderr, "inflate:       table sizes ok\n"));
3905
      s->mode = BTREE;
3906
    case BTREE:
3907
      while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
3908
      {
3909
        NEEDBITS(3)
3910
        s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
3911
        DUMPBITS(3)
3912
      }
3913
      while (s->sub.trees.index < 19)
3914
        s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
3915
      s->sub.trees.bb = 7;
3916
      t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
3917
                             &s->sub.trees.tb, z);
3918
      if (t != Z_OK)
3919
      {
3920
        r = t;
3921
        if (r == Z_DATA_ERROR) {
3922
          ZFREE(z, s->sub.trees.blens);
3923
          s->mode = BADB;
3924
        }
3925
        LEAVE
3926
      }
3927
      s->sub.trees.index = 0;
3928
      Tracev((stderr, "inflate:       bits tree ok\n"));
3929
      s->mode = DTREE;
3930
    case DTREE:
3931
      while (t = s->sub.trees.table,
3932
             s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
3933
      {
3934
        inflate_huft *h;
3935
        uInt i, j, c;
3936
 
3937
        t = s->sub.trees.bb;
3938
        NEEDBITS(t)
3939
        h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
3940
        t = h->word.what.Bits;
3941
        c = h->more.Base;
3942
        if (c < 16)
3943
        {
3944
          DUMPBITS(t)
3945
          s->sub.trees.blens[s->sub.trees.index++] = c;
3946
        }
3947
        else /* c == 16..18 */
3948
        {
3949
          i = c == 18 ? 7 : c - 14;
3950
          j = c == 18 ? 11 : 3;
3951
          NEEDBITS(t + i)
3952
          DUMPBITS(t)
3953
          j += (uInt)b & inflate_mask[i];
3954
          DUMPBITS(i)
3955
          i = s->sub.trees.index;
3956
          t = s->sub.trees.table;
3957
          if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
3958
              (c == 16 && i < 1))
3959
          {
3960
            inflate_trees_free(s->sub.trees.tb, z);
3961
            ZFREE(z, s->sub.trees.blens);
3962
            s->mode = BADB;
3963
            z->msg = (char*)"invalid bit length repeat";
3964
            r = Z_DATA_ERROR;
3965
            LEAVE
3966
          }
3967
          c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
3968
          do {
3969
            s->sub.trees.blens[i++] = c;
3970
          } while (--j);
3971
          s->sub.trees.index = i;
3972
        }
3973
      }
3974
      inflate_trees_free(s->sub.trees.tb, z);
3975
      s->sub.trees.tb = Z_NULL;
3976
      {
3977
        uInt bl, bd;
3978
        inflate_huft *tl, *td;
3979
        inflate_codes_statef *c;
3980
 
3981
        bl = 9;         /* must be <= 9 for lookahead assumptions */
3982
        bd = 6;         /* must be <= 9 for lookahead assumptions */
3983
        t = s->sub.trees.table;
3984
#ifdef DEBUG_ZLIB
3985
      inflate_hufts = 0;
3986
#endif
3987
        t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
3988
                                  s->sub.trees.blens, &bl, &bd, &tl, &td, z);
3989
        if (t != Z_OK)
3990
        {
3991
          if (t == (uInt)Z_DATA_ERROR) {
3992
            ZFREE(z, s->sub.trees.blens);
3993
            s->mode = BADB;
3994
          }
3995
          r = t;
3996
          LEAVE
3997
        }
3998
        Tracev((stderr, "inflate:       trees ok, %d * %d bytes used\n",
3999
              inflate_hufts, sizeof(inflate_huft)));
4000
        if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
4001
        {
4002
          inflate_trees_free(td, z);
4003
          inflate_trees_free(tl, z);
4004
          r = Z_MEM_ERROR;
4005
          LEAVE
4006
        }
4007
        /*
4008
         * this ZFREE must occur *BEFORE* we mess with sub.decode, because
4009
         * sub.trees is union'd with sub.decode.
4010
         */
4011
        ZFREE(z, s->sub.trees.blens);
4012
        s->sub.decode.codes = c;
4013
        s->sub.decode.tl = tl;
4014
        s->sub.decode.td = td;
4015
      }
4016
      s->mode = CODES;
4017
    case CODES:
4018
      UPDATE
4019
      if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
4020
        return inflate_flush(s, z, r);
4021
      r = Z_OK;
4022
      inflate_codes_free(s->sub.decode.codes, z);
4023
      inflate_trees_free(s->sub.decode.td, z);
4024
      inflate_trees_free(s->sub.decode.tl, z);
4025
      LOAD
4026
      Tracev((stderr, "inflate:       codes end, %lu total out\n",
4027
              z->total_out + (q >= s->read ? q - s->read :
4028
              (s->end - s->read) + (q - s->window))));
4029
      if (!s->last)
4030
      {
4031
        s->mode = TYPE;
4032
        break;
4033
      }
4034
      if (k > 7)              /* return unused byte, if any */
4035
      {
4036
        Assert(k < 16, "inflate_codes grabbed too many bytes")
4037
        k -= 8;
4038
        n++;
4039
        p--;                    /* can always return one */
4040
      }
4041
      s->mode = DRY;
4042
    case DRY:
4043
      FLUSH
4044
      if (s->read != s->write)
4045
        LEAVE
4046
      s->mode = DONEB;
4047
    case DONEB:
4048
      r = Z_STREAM_END;
4049
      LEAVE
4050
    case BADB:
4051
      r = Z_DATA_ERROR;
4052
      LEAVE
4053
    default:
4054
      r = Z_STREAM_ERROR;
4055
      LEAVE
4056
  }
4057
}
4058
 
4059
 
4060
int inflate_blocks_free(s, z, c)
4061
inflate_blocks_statef *s;
4062
z_streamp z;
4063
uLongf *c;
4064
{
4065
  inflate_blocks_reset(s, z, c);
4066
  ZFREE(z, s->window);
4067
  ZFREE(z, s);
4068
  Trace((stderr, "inflate:   blocks freed\n"));
4069
  return Z_OK;
4070
}
4071
 
4072
 
4073
void inflate_set_dictionary(s, d, n)
4074
inflate_blocks_statef *s;
4075
const Bytef *d;
4076
uInt  n;
4077
{
4078
  zmemcpy((charf *)s->window, d, n);
4079
  s->read = s->write = s->window + n;
4080
}
4081
 
4082
/*
4083
 * This subroutine adds the data at next_in/avail_in to the output history
4084
 * without performing any output.  The output buffer must be "caught up";
4085
 * i.e. no pending output (hence s->read equals s->write), and the state must
4086
 * be BLOCKS (i.e. we should be willing to see the start of a series of
4087
 * BLOCKS).  On exit, the output will also be caught up, and the checksum
4088
 * will have been updated if need be.
4089
 */
4090
int inflate_addhistory(s, z)
4091
inflate_blocks_statef *s;
4092
z_stream *z;
4093
{
4094
    uLong b;              /* bit buffer */  /* NOT USED HERE */
4095
    uInt k;               /* bits in bit buffer */ /* NOT USED HERE */
4096
    uInt t;               /* temporary storage */
4097
    Bytef *p;             /* input data pointer */
4098
    uInt n;               /* bytes available there */
4099
    Bytef *q;             /* output window write pointer */
4100
    uInt m;               /* bytes to end of window or read pointer */
4101
 
4102
    if (s->read != s->write)
4103
        return Z_STREAM_ERROR;
4104
    if (s->mode != TYPE)
4105
        return Z_DATA_ERROR;
4106
 
4107
    /* we're ready to rock */
4108
    LOAD
4109
    /* while there is input ready, copy to output buffer, moving
4110
     * pointers as needed.
4111
     */
4112
    while (n) {
4113
        t = n;  /* how many to do */
4114
        /* is there room until end of buffer? */
4115
        if (t > m) t = m;
4116
        /* update check information */
4117
        if (s->checkfn != Z_NULL)
4118
            s->check = (*s->checkfn)(s->check, q, t);
4119
        zmemcpy(q, p, t);
4120
        q += t;
4121
        p += t;
4122
        n -= t;
4123
        z->total_out += t;
4124
        s->read = q;    /* drag read pointer forward */
4125
/*      WWRAP  */       /* expand WWRAP macro by hand to handle s->read */
4126
        if (q == s->end) {
4127
            s->read = q = s->window;
4128
            m = WAVAIL;
4129
        }
4130
    }
4131
    UPDATE
4132
    return Z_OK;
4133
}
4134
 
4135
 
4136
/*
4137
 * At the end of a Deflate-compressed PPP packet, we expect to have seen
4138
 * a `stored' block type value but not the (zero) length bytes.
4139
 */
4140
int inflate_packet_flush(s)
4141
    inflate_blocks_statef *s;
4142
{
4143
    if (s->mode != LENS)
4144
        return Z_DATA_ERROR;
4145
    s->mode = TYPE;
4146
    return Z_OK;
4147
}
4148
/* --- infblock.c */
4149
 
4150
/* +++ inftrees.c */
4151
/* inftrees.c -- generate Huffman trees for efficient decoding
4152
 * Copyright (C) 1995-1996 Mark Adler
4153
 * For conditions of distribution and use, see copyright notice in zlib.h
4154
 */
4155
 
4156
/* #include "zutil.h" */
4157
/* #include "inftrees.h" */
4158
 
4159
char inflate_copyright[] = " inflate 1.0.4 Copyright 1995-1996 Mark Adler ";
4160
/*
4161
  If you use the zlib library in a product, an acknowledgment is welcome
4162
  in the documentation of your product. If for some reason you cannot
4163
  include such an acknowledgment, I would appreciate that you keep this
4164
  copyright string in the executable of your product.
4165
 */
4166
 
4167
#ifndef NO_DUMMY_DECL
4168
struct internal_state  {int dummy;}; /* for buggy compilers */
4169
#endif
4170
 
4171
/* simplify the use of the inflate_huft type with some defines */
4172
#define base more.Base
4173
#define next more.Next
4174
#define exop word.what.Exop
4175
#define bits word.what.Bits
4176
 
4177
 
4178
local int huft_build OF((
4179
    uIntf *,            /* code lengths in bits */
4180
    uInt,               /* number of codes */
4181
    uInt,               /* number of "simple" codes */
4182
    const uIntf *,      /* list of base values for non-simple codes */
4183
    const uIntf *,      /* list of extra bits for non-simple codes */
4184
    inflate_huft * FAR*,/* result: starting table */
4185
    uIntf *,            /* maximum lookup bits (returns actual) */
4186
    z_streamp ));       /* for zalloc function */
4187
 
4188
local voidpf falloc OF((
4189
    voidpf,             /* opaque pointer (not used) */
4190
    uInt,               /* number of items */
4191
    uInt));             /* size of item */
4192
 
4193
/* Tables for deflate from PKZIP's appnote.txt. */
4194
local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */
4195
        3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
4196
        35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
4197
        /* see note #13 above about 258 */
4198
local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */
4199
        0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
4200
        3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */
4201
local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */
4202
        1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
4203
        257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
4204
        8193, 12289, 16385, 24577};
4205
local const uInt cpdext[30] = { /* Extra bits for distance codes */
4206
        0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
4207
        7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
4208
        12, 12, 13, 13};
4209
 
4210
/*
4211
   Huffman code decoding is performed using a multi-level table lookup.
4212
   The fastest way to decode is to simply build a lookup table whose
4213
   size is determined by the longest code.  However, the time it takes
4214
   to build this table can also be a factor if the data being decoded
4215
   is not very long.  The most common codes are necessarily the
4216
   shortest codes, so those codes dominate the decoding time, and hence
4217
   the speed.  The idea is you can have a shorter table that decodes the
4218
   shorter, more probable codes, and then point to subsidiary tables for
4219
   the longer codes.  The time it costs to decode the longer codes is
4220
   then traded against the time it takes to make longer tables.
4221
 
4222
   This results of this trade are in the variables lbits and dbits
4223
   below.  lbits is the number of bits the first level table for literal/
4224
   length codes can decode in one step, and dbits is the same thing for
4225
   the distance codes.  Subsequent tables are also less than or equal to
4226
   those sizes.  These values may be adjusted either when all of the
4227
   codes are shorter than that, in which case the longest code length in
4228
   bits is used, or when the shortest code is *longer* than the requested
4229
   table size, in which case the length of the shortest code in bits is
4230
   used.
4231
 
4232
   There are two different values for the two tables, since they code a
4233
   different number of possibilities each.  The literal/length table
4234
   codes 286 possible values, or in a flat code, a little over eight
4235
   bits.  The distance table codes 30 possible values, or a little less
4236
   than five bits, flat.  The optimum values for speed end up being
4237
   about one bit more than those, so lbits is 8+1 and dbits is 5+1.
4238
   The optimum values may differ though from machine to machine, and
4239
   possibly even between compilers.  Your mileage may vary.
4240
 */
4241
 
4242
 
4243
/* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
4244
#define BMAX 15         /* maximum bit length of any code */
4245
#define N_MAX 288       /* maximum number of codes in any set */
4246
 
4247
#ifdef DEBUG_ZLIB
4248
  uInt inflate_hufts;
4249
#endif
4250
 
4251
local int huft_build(b, n, s, d, e, t, m, zs)
4252
uIntf *b;               /* code lengths in bits (all assumed <= BMAX) */
4253
uInt n;                 /* number of codes (assumed <= N_MAX) */
4254
uInt s;                 /* number of simple-valued codes (0..s-1) */
4255
const uIntf *d;         /* list of base values for non-simple codes */
4256
const uIntf *e;         /* list of extra bits for non-simple codes */
4257
inflate_huft * FAR *t;  /* result: starting table */
4258
uIntf *m;               /* maximum lookup bits, returns actual */
4259
z_streamp zs;           /* for zalloc function */
4260
/* Given a list of code lengths and a maximum table size, make a set of
4261
   tables to decode that set of codes.  Return Z_OK on success, Z_BUF_ERROR
4262
   if the given code set is incomplete (the tables are still built in this
4263
   case), Z_DATA_ERROR if the input is invalid (an over-subscribed set of
4264
   lengths), or Z_MEM_ERROR if not enough memory. */
4265
{
4266
 
4267
  uInt a;                       /* counter for codes of length k */
4268
  uInt c[BMAX+1];               /* bit length count table */
4269
  uInt f;                       /* i repeats in table every f entries */
4270
  int g;                        /* maximum code length */
4271
  int h;                        /* table level */
4272
  register uInt i;              /* counter, current code */
4273
  register uInt j;              /* counter */
4274
  register int k;               /* number of bits in current code */
4275
  int l;                        /* bits per table (returned in m) */
4276
  register uIntf *p;            /* pointer into c[], b[], or v[] */
4277
  inflate_huft *q;              /* points to current table */
4278
  struct inflate_huft_s r;      /* table entry for structure assignment */
4279
  inflate_huft *u[BMAX];        /* table stack */
4280
  uInt v[N_MAX];                /* values in order of bit length */
4281
  register int w;               /* bits before this table == (l * h) */
4282
  uInt x[BMAX+1];               /* bit offsets, then code stack */
4283
  uIntf *xp;                    /* pointer into x */
4284
  int y;                        /* number of dummy codes added */
4285
  uInt z;                       /* number of entries in current table */
4286
 
4287
 
4288
  /* Generate counts for each bit length */
4289
  p = c;
4290
#define C0 *p++ = 0;
4291
#define C2 C0 C0 C0 C0
4292
#define C4 C2 C2 C2 C2
4293
  C4                            /* clear c[]--assume BMAX+1 is 16 */
4294
  p = b;  i = n;
4295
  do {
4296
    c[*p++]++;                  /* assume all entries <= BMAX */
4297
  } while (--i);
4298
  if (c[0] == n)                /* null input--all zero length codes */
4299
  {
4300
    *t = (inflate_huft *)Z_NULL;
4301
    *m = 0;
4302
    return Z_OK;
4303
  }
4304
 
4305
 
4306
  /* Find minimum and maximum length, bound *m by those */
4307
  l = *m;
4308
  for (j = 1; j <= BMAX; j++)
4309
    if (c[j])
4310
      break;
4311
  k = j;                        /* minimum code length */
4312
  if ((uInt)l < j)
4313
    l = j;
4314
  for (i = BMAX; i; i--)
4315
    if (c[i])
4316
      break;
4317
  g = i;                        /* maximum code length */
4318
  if ((uInt)l > i)
4319
    l = i;
4320
  *m = l;
4321
 
4322
 
4323
  /* Adjust last length count to fill out codes, if needed */
4324
  for (y = 1 << j; j < i; j++, y <<= 1)
4325
    if ((y -= c[j]) < 0)
4326
      return Z_DATA_ERROR;
4327
  if ((y -= c[i]) < 0)
4328
    return Z_DATA_ERROR;
4329
  c[i] += y;
4330
 
4331
 
4332
  /* Generate starting offsets into the value table for each length */
4333
  x[1] = j = 0;
4334
  p = c + 1;  xp = x + 2;
4335
  while (--i) {                 /* note that i == g from above */
4336
    *xp++ = (j += *p++);
4337
  }
4338
 
4339
 
4340
  /* Make a table of values in order of bit lengths */
4341
  p = b;  i = 0;
4342
  do {
4343
    if ((j = *p++) != 0)
4344
      v[x[j]++] = i;
4345
  } while (++i < n);
4346
  n = x[g];                   /* set n to length of v */
4347
 
4348
 
4349
  /* Generate the Huffman codes and for each, make the table entries */
4350
  x[0] = i = 0;                 /* first Huffman code is zero */
4351
  p = v;                        /* grab values in bit order */
4352
  h = -1;                       /* no tables yet--level -1 */
4353
  w = -l;                       /* bits decoded == (l * h) */
4354
  u[0] = (inflate_huft *)Z_NULL;        /* just to keep compilers happy */
4355
  q = (inflate_huft *)Z_NULL;   /* ditto */
4356
  z = 0;                        /* ditto */
4357
 
4358
  /* go through the bit lengths (k already is bits in shortest code) */
4359
  for (; k <= g; k++)
4360
  {
4361
    a = c[k];
4362
    while (a--)
4363
    {
4364
      /* here i is the Huffman code of length k bits for value *p */
4365
      /* make tables up to required level */
4366
      while (k > w + l)
4367
      {
4368
        h++;
4369
        w += l;                 /* previous table always l bits */
4370
 
4371
        /* compute minimum size table less than or equal to l bits */
4372
        z = g - w;
4373
        z = z > (uInt)l ? l : z;        /* table size upper limit */
4374
        if ((f = 1 << (j = k - w)) > a + 1)     /* try a k-w bit table */
4375
        {                       /* too few codes for k-w bit table */
4376
          f -= a + 1;           /* deduct codes from patterns left */
4377
          xp = c + k;
4378
          if (j < z)
4379
            while (++j < z)     /* try smaller tables up to z bits */
4380
            {
4381
              if ((f <<= 1) <= *++xp)
4382
                break;          /* enough codes to use up j bits */
4383
              f -= *xp;         /* else deduct codes from patterns */
4384
            }
4385
        }
4386
        z = 1 << j;             /* table entries for j-bit table */
4387
 
4388
        /* allocate and link in new table */
4389
        if ((q = (inflate_huft *)ZALLOC
4390
             (zs,z + 1,sizeof(inflate_huft))) == Z_NULL)
4391
        {
4392
          if (h)
4393
            inflate_trees_free(u[0], zs);
4394
          return Z_MEM_ERROR;   /* not enough memory */
4395
        }
4396
#ifdef DEBUG_ZLIB
4397
        inflate_hufts += z + 1;
4398
#endif
4399
        *t = q + 1;             /* link to list for huft_free() */
4400
        *(t = &(q->next)) = Z_NULL;
4401
        u[h] = ++q;             /* table starts after link */
4402
 
4403
        /* connect to last table, if there is one */
4404
        if (h)
4405
        {
4406
          x[h] = i;             /* save pattern for backing up */
4407
          r.bits = (Byte)l;     /* bits to dump before this table */
4408
          r.exop = (Byte)j;     /* bits in this table */
4409
          r.next = q;           /* pointer to this table */
4410
          j = i >> (w - l);     /* (get around Turbo C bug) */
4411
          u[h-1][j] = r;        /* connect to last table */
4412
        }
4413
      }
4414
 
4415
      /* set up table entry in r */
4416
      r.bits = (Byte)(k - w);
4417
      if (p >= v + n)
4418
        r.exop = 128 + 64;      /* out of values--invalid code */
4419
      else if (*p < s)
4420
      {
4421
        r.exop = (Byte)(*p < 256 ? 0 : 32 + 64);     /* 256 is end-of-block */
4422
        r.base = *p++;          /* simple code is just the value */
4423
      }
4424
      else
4425
      {
4426
        r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */
4427
        r.base = d[*p++ - s];
4428
      }
4429
 
4430
      /* fill code-like entries with r */
4431
      f = 1 << (k - w);
4432
      for (j = i >> w; j < z; j += f)
4433
        q[j] = r;
4434
 
4435
      /* backwards increment the k-bit code i */
4436
      for (j = 1 << (k - 1); i & j; j >>= 1)
4437
        i ^= j;
4438
      i ^= j;
4439
 
4440
      /* backup over finished tables */
4441
      while ((i & ((1 << w) - 1)) != x[h])
4442
      {
4443
        h--;                    /* don't need to update q */
4444
        w -= l;
4445
      }
4446
    }
4447
  }
4448
 
4449
 
4450
  /* Return Z_BUF_ERROR if we were given an incomplete table */
4451
  return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
4452
}
4453
 
4454
 
4455
int inflate_trees_bits(c, bb, tb, z)
4456
uIntf *c;               /* 19 code lengths */
4457
uIntf *bb;              /* bits tree desired/actual depth */
4458
inflate_huft * FAR *tb; /* bits tree result */
4459
z_streamp z;            /* for zfree function */
4460
{
4461
  int r;
4462
 
4463
  r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, z);
4464
  if (r == Z_DATA_ERROR)
4465
    z->msg = (char*)"oversubscribed dynamic bit lengths tree";
4466
  else if (r == Z_BUF_ERROR || *bb == 0)
4467
  {
4468
    inflate_trees_free(*tb, z);
4469
    z->msg = (char*)"incomplete dynamic bit lengths tree";
4470
    r = Z_DATA_ERROR;
4471
  }
4472
  return r;
4473
}
4474
 
4475
 
4476
int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, z)
4477
uInt nl;                /* number of literal/length codes */
4478
uInt nd;                /* number of distance codes */
4479
uIntf *c;               /* that many (total) code lengths */
4480
uIntf *bl;              /* literal desired/actual bit depth */
4481
uIntf *bd;              /* distance desired/actual bit depth */
4482
inflate_huft * FAR *tl; /* literal/length tree result */
4483
inflate_huft * FAR *td; /* distance tree result */
4484
z_streamp z;            /* for zfree function */
4485
{
4486
  int r;
4487
 
4488
  /* build literal/length tree */
4489
  r = huft_build(c, nl, 257, cplens, cplext, tl, bl, z);
4490
  if (r != Z_OK || *bl == 0)
4491
  {
4492
    if (r == Z_DATA_ERROR)
4493
      z->msg = (char*)"oversubscribed literal/length tree";
4494
    else if (r != Z_MEM_ERROR)
4495
    {
4496
      inflate_trees_free(*tl, z);
4497
      z->msg = (char*)"incomplete literal/length tree";
4498
      r = Z_DATA_ERROR;
4499
    }
4500
    return r;
4501
  }
4502
 
4503
  /* build distance tree */
4504
  r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, z);
4505
  if (r != Z_OK || (*bd == 0 && nl > 257))
4506
  {
4507
    if (r == Z_DATA_ERROR)
4508
      z->msg = (char*)"oversubscribed distance tree";
4509
    else if (r == Z_BUF_ERROR) {
4510
#ifdef PKZIP_BUG_WORKAROUND
4511
      r = Z_OK;
4512
    }
4513
#else
4514
      inflate_trees_free(*td, z);
4515
      z->msg = (char*)"incomplete distance tree";
4516
      r = Z_DATA_ERROR;
4517
    }
4518
    else if (r != Z_MEM_ERROR)
4519
    {
4520
      z->msg = (char*)"empty distance tree with lengths";
4521
      r = Z_DATA_ERROR;
4522
    }
4523
    inflate_trees_free(*tl, z);
4524
    return r;
4525
#endif
4526
  }
4527
 
4528
  /* done */
4529
  return Z_OK;
4530
}
4531
 
4532
 
4533
/* build fixed tables only once--keep them here */
4534
local int fixed_built = 0;
4535
#define FIXEDH 530      /* number of hufts used by fixed tables */
4536
local inflate_huft fixed_mem[FIXEDH];
4537
local uInt fixed_bl;
4538
local uInt fixed_bd;
4539
local inflate_huft *fixed_tl;
4540
local inflate_huft *fixed_td;
4541
 
4542
 
4543
local voidpf falloc(q, n, s)
4544
voidpf q;       /* opaque pointer */
4545
uInt n;         /* number of items */
4546
uInt s;         /* size of item */
4547
{
4548
  Assert(s == sizeof(inflate_huft) && n <= *(intf *)q,
4549
         "inflate_trees falloc overflow");
4550
  *(intf *)q -= n+s-s; /* s-s to avoid warning */
4551
  return (voidpf)(fixed_mem + *(intf *)q);
4552
}
4553
 
4554
 
4555
int inflate_trees_fixed(bl, bd, tl, td)
4556
uIntf *bl;               /* literal desired/actual bit depth */
4557
uIntf *bd;               /* distance desired/actual bit depth */
4558
inflate_huft * FAR *tl;  /* literal/length tree result */
4559
inflate_huft * FAR *td;  /* distance tree result */
4560
{
4561
  /* build fixed tables if not already (multiple overlapped executions ok) */
4562
  if (!fixed_built)
4563
  {
4564
    int k;              /* temporary variable */
4565
    unsigned c[288];    /* length list for huft_build */
4566
    z_stream z;         /* for falloc function */
4567
    int f = FIXEDH;     /* number of hufts left in fixed_mem */
4568
 
4569
    /* set up fake z_stream for memory routines */
4570
    z.zalloc = falloc;
4571
    z.zfree = Z_NULL;
4572
    z.opaque = (voidpf)&f;
4573
 
4574
    /* literal table */
4575
    for (k = 0; k < 144; k++)
4576
      c[k] = 8;
4577
    for (; k < 256; k++)
4578
      c[k] = 9;
4579
    for (; k < 280; k++)
4580
      c[k] = 7;
4581
    for (; k < 288; k++)
4582
      c[k] = 8;
4583
    fixed_bl = 7;
4584
    huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, &z);
4585
 
4586
    /* distance table */
4587
    for (k = 0; k < 30; k++)
4588
      c[k] = 5;
4589
    fixed_bd = 5;
4590
    huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, &z);
4591
 
4592
    /* done */
4593
    Assert(f == 0, "invalid build of fixed tables");
4594
    fixed_built = 1;
4595
  }
4596
  *bl = fixed_bl;
4597
  *bd = fixed_bd;
4598
  *tl = fixed_tl;
4599
  *td = fixed_td;
4600
  return Z_OK;
4601
}
4602
 
4603
 
4604
int inflate_trees_free(t, z)
4605
inflate_huft *t;        /* table to free */
4606
z_streamp z;            /* for zfree function */
4607
/* Free the malloc'ed tables built by huft_build(), which makes a linked
4608
   list of the tables it made, with the links in a dummy first entry of
4609
   each table. */
4610
{
4611
  register inflate_huft *p, *q, *r;
4612
 
4613
  /* Reverse linked list */
4614
  p = Z_NULL;
4615
  q = t;
4616
  while (q != Z_NULL)
4617
  {
4618
    r = (q - 1)->next;
4619
    (q - 1)->next = p;
4620
    p = q;
4621
    q = r;
4622
  }
4623
  /* Go through linked list, freeing from the malloced (t[-1]) address. */
4624
  while (p != Z_NULL)
4625
  {
4626
    q = (--p)->next;
4627
    ZFREE(z,p);
4628
    p = q;
4629
  }
4630
  return Z_OK;
4631
}
4632
/* --- inftrees.c */
4633
 
4634
/* +++ infcodes.c */
4635
/* infcodes.c -- process literals and length/distance pairs
4636
 * Copyright (C) 1995-1996 Mark Adler
4637
 * For conditions of distribution and use, see copyright notice in zlib.h
4638
 */
4639
 
4640
/* #include "zutil.h" */
4641
/* #include "inftrees.h" */
4642
/* #include "infblock.h" */
4643
/* #include "infcodes.h" */
4644
/* #include "infutil.h" */
4645
 
4646
/* +++ inffast.h */
4647
/* inffast.h -- header to use inffast.c
4648
 * Copyright (C) 1995-1996 Mark Adler
4649
 * For conditions of distribution and use, see copyright notice in zlib.h
4650
 */
4651
 
4652
/* WARNING: this file should *not* be used by applications. It is
4653
   part of the implementation of the compression library and is
4654
   subject to change. Applications should only use zlib.h.
4655
 */
4656
 
4657
extern int inflate_fast OF((
4658
    uInt,
4659
    uInt,
4660
    inflate_huft *,
4661
    inflate_huft *,
4662
    inflate_blocks_statef *,
4663
    z_streamp ));
4664
/* --- inffast.h */
4665
 
4666
/* simplify the use of the inflate_huft type with some defines */
4667
#define base more.Base
4668
#define next more.Next
4669
#define exop word.what.Exop
4670
#define bits word.what.Bits
4671
 
4672
/* inflate codes private state */
4673
struct inflate_codes_state {
4674
 
4675
  /* mode */
4676
  enum {        /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
4677
      START,    /* x: set up for LEN */
4678
      LEN,      /* i: get length/literal/eob next */
4679
      LENEXT,   /* i: getting length extra (have base) */
4680
      DIST,     /* i: get distance next */
4681
      DISTEXT,  /* i: getting distance extra */
4682
      COPY,     /* o: copying bytes in window, waiting for space */
4683
      LIT,      /* o: got literal, waiting for output space */
4684
      WASH,     /* o: got eob, possibly still output waiting */
4685
      END,      /* x: got eob and all data flushed */
4686
      BADCODE}  /* x: got error */
4687
    mode;               /* current inflate_codes mode */
4688
 
4689
  /* mode dependent information */
4690
  uInt len;
4691
  union {
4692
    struct {
4693
      inflate_huft *tree;       /* pointer into tree */
4694
      uInt need;                /* bits needed */
4695
    } code;             /* if LEN or DIST, where in tree */
4696
    uInt lit;           /* if LIT, literal */
4697
    struct {
4698
      uInt get;                 /* bits to get for extra */
4699
      uInt dist;                /* distance back to copy from */
4700
    } copy;             /* if EXT or COPY, where and how much */
4701
  } sub;                /* submode */
4702
 
4703
  /* mode independent information */
4704
  Byte lbits;           /* ltree bits decoded per branch */
4705
  Byte dbits;           /* dtree bits decoder per branch */
4706
  inflate_huft *ltree;          /* literal/length/eob tree */
4707
  inflate_huft *dtree;          /* distance tree */
4708
 
4709
};
4710
 
4711
 
4712
inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
4713
uInt bl, bd;
4714
inflate_huft *tl;
4715
inflate_huft *td; /* need separate declaration for Borland C++ */
4716
z_streamp z;
4717
{
4718
  inflate_codes_statef *c;
4719
 
4720
  if ((c = (inflate_codes_statef *)
4721
       ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
4722
  {
4723
    c->mode = START;
4724
    c->lbits = (Byte)bl;
4725
    c->dbits = (Byte)bd;
4726
    c->ltree = tl;
4727
    c->dtree = td;
4728
    Tracev((stderr, "inflate:       codes new\n"));
4729
  }
4730
  return c;
4731
}
4732
 
4733
 
4734
int inflate_codes(s, z, r)
4735
inflate_blocks_statef *s;
4736
z_streamp z;
4737
int r;
4738
{
4739
  uInt j;               /* temporary storage */
4740
  inflate_huft *t;      /* temporary pointer */
4741
  uInt e;               /* extra bits or operation */
4742
  uLong b;              /* bit buffer */
4743
  uInt k;               /* bits in bit buffer */
4744
  Bytef *p;             /* input data pointer */
4745
  uInt n;               /* bytes available there */
4746
  Bytef *q;             /* output window write pointer */
4747
  uInt m;               /* bytes to end of window or read pointer */
4748
  Bytef *f;             /* pointer to copy strings from */
4749
  inflate_codes_statef *c = s->sub.decode.codes;  /* codes state */
4750
 
4751
  /* copy input/output information to locals (UPDATE macro restores) */
4752
  LOAD
4753
 
4754
  /* process input and output based on current state */
4755
  while (1) switch (c->mode)
4756
  {             /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
4757
    case START:         /* x: set up for LEN */
4758
#ifndef SLOW
4759
      if (m >= 258 && n >= 10)
4760
      {
4761
        UPDATE
4762
        r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
4763
        LOAD
4764
        if (r != Z_OK)
4765
        {
4766
          c->mode = r == Z_STREAM_END ? WASH : BADCODE;
4767
          break;
4768
        }
4769
      }
4770
#endif /* !SLOW */
4771
      c->sub.code.need = c->lbits;
4772
      c->sub.code.tree = c->ltree;
4773
      c->mode = LEN;
4774
    case LEN:           /* i: get length/literal/eob next */
4775
      j = c->sub.code.need;
4776
      NEEDBITS(j)
4777
      t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
4778
      DUMPBITS(t->bits)
4779
      e = (uInt)(t->exop);
4780
      if (e == 0)               /* literal */
4781
      {
4782
        c->sub.lit = t->base;
4783
        Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
4784
                 "inflate:         literal '%c'\n" :
4785
                 "inflate:         literal 0x%02x\n", t->base));
4786
        c->mode = LIT;
4787
        break;
4788
      }
4789
      if (e & 16)               /* length */
4790
      {
4791
        c->sub.copy.get = e & 15;
4792
        c->len = t->base;
4793
        c->mode = LENEXT;
4794
        break;
4795
      }
4796
      if ((e & 64) == 0)        /* next table */
4797
      {
4798
        c->sub.code.need = e;
4799
        c->sub.code.tree = t->next;
4800
        break;
4801
      }
4802
      if (e & 32)               /* end of block */
4803
      {
4804
        Tracevv((stderr, "inflate:         end of block\n"));
4805
        c->mode = WASH;
4806
        break;
4807
      }
4808
      c->mode = BADCODE;        /* invalid code */
4809
      z->msg = (char*)"invalid literal/length code";
4810
      r = Z_DATA_ERROR;
4811
      LEAVE
4812
    case LENEXT:        /* i: getting length extra (have base) */
4813
      j = c->sub.copy.get;
4814
      NEEDBITS(j)
4815
      c->len += (uInt)b & inflate_mask[j];
4816
      DUMPBITS(j)
4817
      c->sub.code.need = c->dbits;
4818
      c->sub.code.tree = c->dtree;
4819
      Tracevv((stderr, "inflate:         length %u\n", c->len));
4820
      c->mode = DIST;
4821
    case DIST:          /* i: get distance next */
4822
      j = c->sub.code.need;
4823
      NEEDBITS(j)
4824
      t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
4825
      DUMPBITS(t->bits)
4826
      e = (uInt)(t->exop);
4827
      if (e & 16)               /* distance */
4828
      {
4829
        c->sub.copy.get = e & 15;
4830
        c->sub.copy.dist = t->base;
4831
        c->mode = DISTEXT;
4832
        break;
4833
      }
4834
      if ((e & 64) == 0)        /* next table */
4835
      {
4836
        c->sub.code.need = e;
4837
        c->sub.code.tree = t->next;
4838
        break;
4839
      }
4840
      c->mode = BADCODE;        /* invalid code */
4841
      z->msg = (char*)"invalid distance code";
4842
      r = Z_DATA_ERROR;
4843
      LEAVE
4844
    case DISTEXT:       /* i: getting distance extra */
4845
      j = c->sub.copy.get;
4846
      NEEDBITS(j)
4847
      c->sub.copy.dist += (uInt)b & inflate_mask[j];
4848
      DUMPBITS(j)
4849
      Tracevv((stderr, "inflate:         distance %u\n", c->sub.copy.dist));
4850
      c->mode = COPY;
4851
    case COPY:          /* o: copying bytes in window, waiting for space */
4852
#ifndef __TURBOC__ /* Turbo C bug for following expression */
4853
      f = (uInt)(q - s->window) < c->sub.copy.dist ?
4854
          s->end - (c->sub.copy.dist - (q - s->window)) :
4855
          q - c->sub.copy.dist;
4856
#else
4857
      f = q - c->sub.copy.dist;
4858
      if ((uInt)(q - s->window) < c->sub.copy.dist)
4859
        f = s->end - (c->sub.copy.dist - (uInt)(q - s->window));
4860
#endif
4861
      while (c->len)
4862
      {
4863
        NEEDOUT
4864
        OUTBYTE(*f++)
4865
        if (f == s->end)
4866
          f = s->window;
4867
        c->len--;
4868
      }
4869
      c->mode = START;
4870
      break;
4871
    case LIT:           /* o: got literal, waiting for output space */
4872
      NEEDOUT
4873
      OUTBYTE(c->sub.lit)
4874
      c->mode = START;
4875
      break;
4876
    case WASH:          /* o: got eob, possibly more output */
4877
      FLUSH
4878
      if (s->read != s->write)
4879
        LEAVE
4880
      c->mode = END;
4881
    case END:
4882
      r = Z_STREAM_END;
4883
      LEAVE
4884
    case BADCODE:       /* x: got error */
4885
      r = Z_DATA_ERROR;
4886
      LEAVE
4887
    default:
4888
      r = Z_STREAM_ERROR;
4889
      LEAVE
4890
  }
4891
}
4892
 
4893
 
4894
void inflate_codes_free(c, z)
4895
inflate_codes_statef *c;
4896
z_streamp z;
4897
{
4898
  ZFREE(z, c);
4899
  Tracev((stderr, "inflate:       codes free\n"));
4900
}
4901
/* --- infcodes.c */
4902
 
4903
/* +++ infutil.c */
4904
/* inflate_util.c -- data and routines common to blocks and codes
4905
 * Copyright (C) 1995-1996 Mark Adler
4906
 * For conditions of distribution and use, see copyright notice in zlib.h
4907
 */
4908
 
4909
/* #include "zutil.h" */
4910
/* #include "infblock.h" */
4911
/* #include "inftrees.h" */
4912
/* #include "infcodes.h" */
4913
/* #include "infutil.h" */
4914
 
4915
#ifndef NO_DUMMY_DECL
4916
struct inflate_codes_state {int dummy;}; /* for buggy compilers */
4917
#endif
4918
 
4919
/* And'ing with mask[n] masks the lower n bits */
4920
uInt inflate_mask[17] = {
4921
    0x0000,
4922
    0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
4923
    0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
4924
};
4925
 
4926
 
4927
/* copy as much as possible from the sliding window to the output area */
4928
int inflate_flush(s, z, r)
4929
inflate_blocks_statef *s;
4930
z_streamp z;
4931
int r;
4932
{
4933
  uInt n;
4934
  Bytef *p;
4935
  Bytef *q;
4936
 
4937
  /* local copies of source and destination pointers */
4938
  p = z->next_out;
4939
  q = s->read;
4940
 
4941
  /* compute number of bytes to copy as far as end of window */
4942
  n = (uInt)((q <= s->write ? s->write : s->end) - q);
4943
  if (n > z->avail_out) n = z->avail_out;
4944
  if (n && r == Z_BUF_ERROR) r = Z_OK;
4945
 
4946
  /* update counters */
4947
  z->avail_out -= n;
4948
  z->total_out += n;
4949
 
4950
  /* update check information */
4951
  if (s->checkfn != Z_NULL)
4952
    z->adler = s->check = (*s->checkfn)(s->check, q, n);
4953
 
4954
  /* copy as far as end of window */
4955
  if (p != Z_NULL) {
4956
    zmemcpy(p, q, n);
4957
    p += n;
4958
  }
4959
  q += n;
4960
 
4961
  /* see if more to copy at beginning of window */
4962
  if (q == s->end)
4963
  {
4964
    /* wrap pointers */
4965
    q = s->window;
4966
    if (s->write == s->end)
4967
      s->write = s->window;
4968
 
4969
    /* compute bytes to copy */
4970
    n = (uInt)(s->write - q);
4971
    if (n > z->avail_out) n = z->avail_out;
4972
    if (n && r == Z_BUF_ERROR) r = Z_OK;
4973
 
4974
    /* update counters */
4975
    z->avail_out -= n;
4976
    z->total_out += n;
4977
 
4978
    /* update check information */
4979
    if (s->checkfn != Z_NULL)
4980
      z->adler = s->check = (*s->checkfn)(s->check, q, n);
4981
 
4982
    /* copy */
4983
    if (p != Z_NULL) {
4984
      zmemcpy(p, q, n);
4985
      p += n;
4986
    }
4987
    q += n;
4988
  }
4989
 
4990
  /* update pointers */
4991
  z->next_out = p;
4992
  s->read = q;
4993
 
4994
  /* done */
4995
  return r;
4996
}
4997
/* --- infutil.c */
4998
 
4999
/* +++ inffast.c */
5000
/* inffast.c -- process literals and length/distance pairs fast
5001
 * Copyright (C) 1995-1996 Mark Adler
5002
 * For conditions of distribution and use, see copyright notice in zlib.h
5003
 */
5004
 
5005
/* #include "zutil.h" */
5006
/* #include "inftrees.h" */
5007
/* #include "infblock.h" */
5008
/* #include "infcodes.h" */
5009
/* #include "infutil.h" */
5010
/* #include "inffast.h" */
5011
 
5012
#ifndef NO_DUMMY_DECL
5013
struct inflate_codes_state {int dummy;}; /* for buggy compilers */
5014
#endif
5015
 
5016
/* simplify the use of the inflate_huft type with some defines */
5017
#define base more.Base
5018
#define next more.Next
5019
#define exop word.what.Exop
5020
#define bits word.what.Bits
5021
 
5022
/* macros for bit input with no checking and for returning unused bytes */
5023
#define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
5024
#define UNGRAB {n+=(c=k>>3);p-=c;k&=7;}
5025
 
5026
/* Called with number of bytes left to write in window at least 258
5027
   (the maximum string length) and number of input bytes available
5028
   at least ten.  The ten bytes are six bytes for the longest length/
5029
   distance pair plus four bytes for overloading the bit buffer. */
5030
 
5031
int inflate_fast(bl, bd, tl, td, s, z)
5032
uInt bl, bd;
5033
inflate_huft *tl;
5034
inflate_huft *td; /* need separate declaration for Borland C++ */
5035
inflate_blocks_statef *s;
5036
z_streamp z;
5037
{
5038
  inflate_huft *t;      /* temporary pointer */
5039
  uInt e;               /* extra bits or operation */
5040
  uLong b;              /* bit buffer */
5041
  uInt k;               /* bits in bit buffer */
5042
  Bytef *p;             /* input data pointer */
5043
  uInt n;               /* bytes available there */
5044
  Bytef *q;             /* output window write pointer */
5045
  uInt m;               /* bytes to end of window or read pointer */
5046
  uInt ml;              /* mask for literal/length tree */
5047
  uInt md;              /* mask for distance tree */
5048
  uInt c;               /* bytes to copy */
5049
  uInt d;               /* distance back to copy from */
5050
  Bytef *r;             /* copy source pointer */
5051
 
5052
  /* load input, output, bit values */
5053
  LOAD
5054
 
5055
  /* initialize masks */
5056
  ml = inflate_mask[bl];
5057
  md = inflate_mask[bd];
5058
 
5059
  /* do until not enough input or output space for fast loop */
5060
  do {                          /* assume called with m >= 258 && n >= 10 */
5061
    /* get literal/length code */
5062
    GRABBITS(20)                /* max bits for literal/length code */
5063
    if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
5064
    {
5065
      DUMPBITS(t->bits)
5066
      Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5067
                "inflate:         * literal '%c'\n" :
5068
                "inflate:         * literal 0x%02x\n", t->base));
5069
      *q++ = (Byte)t->base;
5070
      m--;
5071
      continue;
5072
    }
5073
    do {
5074
      DUMPBITS(t->bits)
5075
      if (e & 16)
5076
      {
5077
        /* get extra bits for length */
5078
        e &= 15;
5079
        c = t->base + ((uInt)b & inflate_mask[e]);
5080
        DUMPBITS(e)
5081
        Tracevv((stderr, "inflate:         * length %u\n", c));
5082
 
5083
        /* decode distance base of block to copy */
5084
        GRABBITS(15);           /* max bits for distance code */
5085
        e = (t = td + ((uInt)b & md))->exop;
5086
        do {
5087
          DUMPBITS(t->bits)
5088
          if (e & 16)
5089
          {
5090
            /* get extra bits to add to distance base */
5091
            e &= 15;
5092
            GRABBITS(e)         /* get extra bits (up to 13) */
5093
            d = t->base + ((uInt)b & inflate_mask[e]);
5094
            DUMPBITS(e)
5095
            Tracevv((stderr, "inflate:         * distance %u\n", d));
5096
 
5097
            /* do the copy */
5098
            m -= c;
5099
            if ((uInt)(q - s->window) >= d)     /* offset before dest */
5100
            {                                   /*  just copy */
5101
              r = q - d;
5102
              *q++ = *r++;  c--;        /* minimum count is three, */
5103
              *q++ = *r++;  c--;        /*  so unroll loop a little */
5104
            }
5105
            else                        /* else offset after destination */
5106
            {
5107
              e = d - (uInt)(q - s->window); /* bytes from offset to end */
5108
              r = s->end - e;           /* pointer to offset */
5109
              if (c > e)                /* if source crosses, */
5110
              {
5111
                c -= e;                 /* copy to end of window */
5112
                do {
5113
                  *q++ = *r++;
5114
                } while (--e);
5115
                r = s->window;          /* copy rest from start of window */
5116
              }
5117
            }
5118
            do {                        /* copy all or what's left */
5119
              *q++ = *r++;
5120
            } while (--c);
5121
            break;
5122
          }
5123
          else if ((e & 64) == 0)
5124
            e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop;
5125
          else
5126
          {
5127
            z->msg = (char*)"invalid distance code";
5128
            UNGRAB
5129
            UPDATE
5130
            return Z_DATA_ERROR;
5131
          }
5132
        } while (1);
5133
        break;
5134
      }
5135
      if ((e & 64) == 0)
5136
      {
5137
        if ((e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop) == 0)
5138
        {
5139
          DUMPBITS(t->bits)
5140
          Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5141
                    "inflate:         * literal '%c'\n" :
5142
                    "inflate:         * literal 0x%02x\n", t->base));
5143
          *q++ = (Byte)t->base;
5144
          m--;
5145
          break;
5146
        }
5147
      }
5148
      else if (e & 32)
5149
      {
5150
        Tracevv((stderr, "inflate:         * end of block\n"));
5151
        UNGRAB
5152
        UPDATE
5153
        return Z_STREAM_END;
5154
      }
5155
      else
5156
      {
5157
        z->msg = (char*)"invalid literal/length code";
5158
        UNGRAB
5159
        UPDATE
5160
        return Z_DATA_ERROR;
5161
      }
5162
    } while (1);
5163
  } while (m >= 258 && n >= 10);
5164
 
5165
  /* not enough input or output--restore pointers and return */
5166
  UNGRAB
5167
  UPDATE
5168
  return Z_OK;
5169
}
5170
/* --- inffast.c */
5171
 
5172
/* +++ zutil.c */
5173
/* zutil.c -- target dependent utility functions for the compression library
5174
 * Copyright (C) 1995-1996 Jean-loup Gailly.
5175
 * For conditions of distribution and use, see copyright notice in zlib.h
5176
 */
5177
 
5178
/* From: zutil.c,v 1.17 1996/07/24 13:41:12 me Exp $ */
5179
 
5180
#ifdef DEBUG_ZLIB
5181
#include <stdio.h>
5182
#endif
5183
 
5184
/* #include "zutil.h" */
5185
 
5186
#ifndef NO_DUMMY_DECL
5187
struct internal_state      {int dummy;}; /* for buggy compilers */
5188
#endif
5189
 
5190
#ifndef STDC
5191
extern void exit OF((int));
5192
#endif
5193
 
5194
static const char *z_errmsg[10] = {
5195
"need dictionary",     /* Z_NEED_DICT       2  */
5196
"stream end",          /* Z_STREAM_END      1  */
5197
"",                    /* Z_OK              0  */
5198
"file error",          /* Z_ERRNO         (-1) */
5199
"stream error",        /* Z_STREAM_ERROR  (-2) */
5200
"data error",          /* Z_DATA_ERROR    (-3) */
5201
"insufficient memory", /* Z_MEM_ERROR     (-4) */
5202
"buffer error",        /* Z_BUF_ERROR     (-5) */
5203
"incompatible version",/* Z_VERSION_ERROR (-6) */
5204
""};
5205
 
5206
 
5207
const char *zlibVersion()
5208
{
5209
    return ZLIB_VERSION;
5210
}
5211
 
5212
#ifdef DEBUG_ZLIB
5213
void z_error (m)
5214
    char *m;
5215
{
5216
    fprintf(stderr, "%s\n", m);
5217
    exit(1);
5218
}
5219
#endif
5220
 
5221
#ifndef HAVE_MEMCPY
5222
 
5223
void zmemcpy(dest, source, len)
5224
    Bytef* dest;
5225
    Bytef* source;
5226
    uInt  len;
5227
{
5228
    if (len == 0) return;
5229
    do {
5230
        *dest++ = *source++; /* ??? to be unrolled */
5231
    } while (--len != 0);
5232
}
5233
 
5234
int zmemcmp(s1, s2, len)
5235
    Bytef* s1;
5236
    Bytef* s2;
5237
    uInt  len;
5238
{
5239
    uInt j;
5240
 
5241
    for (j = 0; j < len; j++) {
5242
        if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;
5243
    }
5244
    return 0;
5245
}
5246
 
5247
void zmemzero(dest, len)
5248
    Bytef* dest;
5249
    uInt  len;
5250
{
5251
    if (len == 0) return;
5252
    do {
5253
        *dest++ = 0;  /* ??? to be unrolled */
5254
    } while (--len != 0);
5255
}
5256
#endif
5257
 
5258
#ifdef __TURBOC__
5259
#if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__)
5260
/* Small and medium model in Turbo C are for now limited to near allocation
5261
 * with reduced MAX_WBITS and MAX_MEM_LEVEL
5262
 */
5263
#  define MY_ZCALLOC
5264
 
5265
/* Turbo C malloc() does not allow dynamic allocation of 64K bytes
5266
 * and farmalloc(64K) returns a pointer with an offset of 8, so we
5267
 * must fix the pointer. Warning: the pointer must be put back to its
5268
 * original form in order to free it, use zcfree().
5269
 */
5270
 
5271
#define MAX_PTR 10
5272
/* 10*64K = 640K */
5273
 
5274
local int next_ptr = 0;
5275
 
5276
typedef struct ptr_table_s {
5277
    voidpf org_ptr;
5278
    voidpf new_ptr;
5279
} ptr_table;
5280
 
5281
local ptr_table table[MAX_PTR];
5282
/* This table is used to remember the original form of pointers
5283
 * to large buffers (64K). Such pointers are normalized with a zero offset.
5284
 * Since MSDOS is not a preemptive multitasking OS, this table is not
5285
 * protected from concurrent access. This hack doesn't work anyway on
5286
 * a protected system like OS/2. Use Microsoft C instead.
5287
 */
5288
 
5289
voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
5290
{
5291
    voidpf buf = opaque; /* just to make some compilers happy */
5292
    ulg bsize = (ulg)items*size;
5293
 
5294
    /* If we allocate less than 65520 bytes, we assume that farmalloc
5295
     * will return a usable pointer which doesn't have to be normalized.
5296
     */
5297
    if (bsize < 65520L) {
5298
        buf = farmalloc(bsize);
5299
        if (*(ush*)&buf != 0) return buf;
5300
    } else {
5301
        buf = farmalloc(bsize + 16L);
5302
    }
5303
    if (buf == NULL || next_ptr >= MAX_PTR) return NULL;
5304
    table[next_ptr].org_ptr = buf;
5305
 
5306
    /* Normalize the pointer to seg:0 */
5307
    *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;
5308
    *(ush*)&buf = 0;
5309
    table[next_ptr++].new_ptr = buf;
5310
    return buf;
5311
}
5312
 
5313
void  zcfree (voidpf opaque, voidpf ptr)
5314
{
5315
    int n;
5316
    if (*(ush*)&ptr != 0) { /* object < 64K */
5317
        farfree(ptr);
5318
        return;
5319
    }
5320
    /* Find the original pointer */
5321
    for (n = 0; n < next_ptr; n++) {
5322
        if (ptr != table[n].new_ptr) continue;
5323
 
5324
        farfree(table[n].org_ptr);
5325
        while (++n < next_ptr) {
5326
            table[n-1] = table[n];
5327
        }
5328
        next_ptr--;
5329
        return;
5330
    }
5331
    ptr = opaque; /* just to make some compilers happy */
5332
    Assert(0, "zcfree: ptr not found");
5333
}
5334
#endif
5335
#endif /* __TURBOC__ */
5336
 
5337
 
5338
#if defined(M_I86) && !defined(__32BIT__)
5339
/* Microsoft C in 16-bit mode */
5340
 
5341
#  define MY_ZCALLOC
5342
 
5343
#if (!defined(_MSC_VER) || (_MSC_VER < 600))
5344
#  define _halloc  halloc
5345
#  define _hfree   hfree
5346
#endif
5347
 
5348
voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
5349
{
5350
    if (opaque) opaque = 0; /* to make compiler happy */
5351
    return _halloc((long)items, size);
5352
}
5353
 
5354
void  zcfree (voidpf opaque, voidpf ptr)
5355
{
5356
    if (opaque) opaque = 0; /* to make compiler happy */
5357
    _hfree(ptr);
5358
}
5359
 
5360
#endif /* MSC */
5361
 
5362
 
5363
#ifndef MY_ZCALLOC /* Any system without a special alloc function */
5364
 
5365
#ifndef STDC
5366
extern voidp  calloc OF((uInt items, uInt size));
5367
extern void   free   OF((voidpf ptr));
5368
#endif
5369
 
5370
voidpf zcalloc (opaque, items, size)
5371
    voidpf opaque;
5372
    unsigned items;
5373
    unsigned size;
5374
{
5375
    if (opaque) items += size - size; /* make compiler happy */
5376
    return (voidpf)calloc(items, size);
5377
}
5378
 
5379
void  zcfree (opaque, ptr)
5380
    voidpf opaque;
5381
    voidpf ptr;
5382
{
5383
    free(ptr);
5384
    if (opaque) return; /* make compiler happy */
5385
}
5386
 
5387
#endif /* MY_ZCALLOC */
5388
/* --- zutil.c */
5389
 
5390
/* +++ adler32.c */
5391
/* adler32.c -- compute the Adler-32 checksum of a data stream
5392
 * Copyright (C) 1995-1996 Mark Adler
5393
 * For conditions of distribution and use, see copyright notice in zlib.h
5394
 */
5395
 
5396
/* From: adler32.c,v 1.10 1996/05/22 11:52:18 me Exp $ */
5397
 
5398
/* #include "zlib.h" */
5399
 
5400
#define BASE 65521L /* largest prime smaller than 65536 */
5401
#define NMAX 5552
5402
/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
5403
 
5404
#define DO1(buf,i)  {s1 += buf[i]; s2 += s1;}
5405
#define DO2(buf,i)  DO1(buf,i); DO1(buf,i+1);
5406
#define DO4(buf,i)  DO2(buf,i); DO2(buf,i+2);
5407
#define DO8(buf,i)  DO4(buf,i); DO4(buf,i+4);
5408
#define DO16(buf)   DO8(buf,0); DO8(buf,8);
5409
 
5410
/* ========================================================================= */
5411
uLong adler32(adler, buf, len)
5412
    uLong adler;
5413
    const Bytef *buf;
5414
    uInt len;
5415
{
5416
    unsigned long s1 = adler & 0xffff;
5417
    unsigned long s2 = (adler >> 16) & 0xffff;
5418
    int k;
5419
 
5420
    if (buf == Z_NULL) return 1L;
5421
 
5422
    while (len > 0) {
5423
        k = len < NMAX ? len : NMAX;
5424
        len -= k;
5425
        while (k >= 16) {
5426
            DO16(buf);
5427
            buf += 16;
5428
            k -= 16;
5429
        }
5430
        if (k != 0) do {
5431
            s1 += *buf++;
5432
            s2 += s1;
5433
        } while (--k);
5434
        s1 %= BASE;
5435
        s2 %= BASE;
5436
    }
5437
    return (s2 << 16) | s1;
5438
}
5439
/* --- adler32.c */

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.