1 /* vi: set sw=4 ts=4: */
3 * Gzip implementation for busybox
5 * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly.
7 * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com>
8 * "this is a stripped down version of gzip I put into busybox, it does
9 * only standard in to standard out with -9 compression. It also requires
10 * the zcat module for some important functions."
12 * Adjusted further by Erik Andersen <andersen@codepoet.org> to support
13 * files as well as stdin/stdout, and to generally behave itself wrt
14 * command line handling.
16 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
18 /* big objects in bss:
20 * 00000074 b base_length
21 * 00000078 b base_dist
22 * 00000078 b static_dtree
24 * 000000f4 b dyn_dtree
25 * 00000100 b length_code
26 * 00000200 b dist_code
30 * 00000480 b static_ltree
31 * 000008f4 b dyn_ltree
33 /* TODO: full support for -v for DESKTOP
34 * "/usr/bin/gzip -v a bogus aa" should say:
35 a: 85.1% -- replaced with a.gz
36 gzip: bogus: No such file or directory
37 aa: 85.1% -- replaced with aa.gz
44 //config: gzip is used to compress files.
45 //config: It's probably the most widely used UNIX compression program.
47 //config:config FEATURE_GZIP_LONG_OPTIONS
48 //config: bool "Enable long options"
50 //config: depends on GZIP && LONG_OPTS
52 //config: Enable use of long options, increases size by about 106 Bytes
54 //config:config GZIP_FAST
55 //config: int "Trade memory for gzip speed (0:small,slow - 2:fast,big)"
58 //config: depends on GZIP
60 //config: Enable big memory options for gzip.
61 //config: 0: small buffers, small hash-tables
62 //config: 1: larger buffers, larger hash-tables
63 //config: 2: larger buffers, largest hash-tables
64 //config: Larger models may give slightly better compression
66 //applet:IF_GZIP(APPLET(gzip, BB_DIR_BIN, BB_SUID_DROP))
67 //kbuild:lib-$(CONFIG_GZIP) += gzip.o
69 //usage:#define gzip_trivial_usage
70 //usage: "[-cfd] [FILE]..."
71 //usage:#define gzip_full_usage "\n\n"
72 //usage: "Compress FILEs (or stdin)\n"
73 //usage: "\n -d Decompress"
74 //usage: "\n -c Write to stdout"
75 //usage: "\n -f Force"
77 //usage:#define gzip_example_usage
78 //usage: "$ ls -la /tmp/busybox*\n"
79 //usage: "-rw-rw-r-- 1 andersen andersen 1761280 Apr 14 17:47 /tmp/busybox.tar\n"
80 //usage: "$ gzip /tmp/busybox.tar\n"
81 //usage: "$ ls -la /tmp/busybox*\n"
82 //usage: "-rw-rw-r-- 1 andersen andersen 554058 Apr 14 17:49 /tmp/busybox.tar.gz\n"
85 #include "bb_archive.h"
88 /* ===========================================================================
91 /* Diagnostic functions */
93 # define Assert(cond,msg) { if (!(cond)) bb_error_msg(msg); }
94 # define Trace(x) fprintf x
95 # define Tracev(x) {if (verbose) fprintf x; }
96 # define Tracevv(x) {if (verbose > 1) fprintf x; }
97 # define Tracec(c,x) {if (verbose && (c)) fprintf x; }
98 # define Tracecv(c,x) {if (verbose > 1 && (c)) fprintf x; }
100 # define Assert(cond,msg)
105 # define Tracecv(c,x)
109 /* ===========================================================================
111 #if CONFIG_GZIP_FAST == 0
113 #elif CONFIG_GZIP_FAST == 1
115 #elif CONFIG_GZIP_FAST == 2
118 # error "Invalid CONFIG_GZIP_FAST value"
123 # define INBUFSIZ 0x2000 /* input buffer size */
125 # define INBUFSIZ 0x8000 /* input buffer size */
131 # define OUTBUFSIZ 8192 /* output buffer size */
133 # define OUTBUFSIZ 16384 /* output buffer size */
139 # define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */
141 # define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */
146 #define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */
147 #define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */
148 #define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */
149 #define ORIG_NAME 0x08 /* bit 3 set: original file name present */
150 #define COMMENT 0x10 /* bit 4 set: file comment present */
151 #define RESERVED 0xC0 /* bit 6,7: reserved */
153 /* internal file attribute */
154 #define UNKNOWN 0xffff
159 # define WSIZE 0x8000 /* window size--must be a power of two, and */
160 #endif /* at least 32K for zip's deflate method */
163 #define MAX_MATCH 258
164 /* The minimum and maximum match lengths */
166 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
167 /* Minimum amount of lookahead, except at the end of the input file.
168 * See deflate.c for comments about the MIN_MATCH+1.
171 #define MAX_DIST (WSIZE-MIN_LOOKAHEAD)
172 /* In order to simplify the code, particularly on 16 bit machines, match
173 * distances are limited to MAX_DIST instead of WSIZE.
177 # define MAX_PATH_LEN 1024 /* max pathname length */
180 #define seekable() 0 /* force sequential output */
181 #define translate_eol 0 /* no option -a yet */
186 #define INIT_BITS 9 /* Initial number of bits per code */
188 #define BIT_MASK 0x1f /* Mask for 'number of compression bits' */
189 /* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free.
190 * It's a pity that old uncompress does not check bit 0x20. That makes
191 * extension of the format actually undesirable because old compress
192 * would just crash on the new format instead of giving a meaningful
193 * error message. It does check the number of bits, but it's more
194 * helpful to say "unsupported format, get a new version" than
195 * "can only handle 16 bits".
199 # define MAX_SUFFIX MAX_EXT_CHARS
201 # define MAX_SUFFIX 30
205 /* ===========================================================================
206 * Compile with MEDIUM_MEM to reduce the memory requirements or
207 * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the
208 * entire input file can be held in memory (not possible on 16 bit systems).
209 * Warning: defining these symbols affects HASH_BITS (see below) and thus
210 * affects the compression ratio. The compressed output
211 * is still correct, and might even be smaller in some cases.
215 # define HASH_BITS 13 /* Number of bits used to hash strings */
218 # define HASH_BITS 14
221 # define HASH_BITS 15
222 /* For portability to 16 bit machines, do not use values above 15. */
225 #define HASH_SIZE (unsigned)(1<<HASH_BITS)
226 #define HASH_MASK (HASH_SIZE-1)
227 #define WMASK (WSIZE-1)
228 /* HASH_SIZE and WSIZE must be powers of two */
230 # define TOO_FAR 4096
232 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
235 /* ===========================================================================
236 * These types are not really 'char', 'short' and 'long'
239 typedef uint16_t ush;
240 typedef uint32_t ulg;
244 typedef unsigned IPos;
245 /* A Pos is an index in the character window. We use short instead of int to
246 * save space in the various tables. IPos is used only for parameter passing.
250 WINDOW_SIZE = 2 * WSIZE,
251 /* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the
252 * input file length plus MIN_LOOKAHEAD.
255 max_chain_length = 4096,
256 /* To speed up deflation, hash chains are never searched beyond this length.
257 * A higher limit improves compression ratio but degrades the speed.
260 max_lazy_match = 258,
261 /* Attempt to find a better match only when the current match is strictly
262 * smaller than this value. This mechanism is used only for compression
266 max_insert_length = max_lazy_match,
267 /* Insert new strings in the hash table only if the match length
268 * is not greater than this length. This saves time but degrades compression.
269 * max_insert_length is used only for compression levels <= 3.
273 /* Use a faster search when the previous match is longer than this */
275 /* Values for max_lazy_match, good_match and max_chain_length, depending on
276 * the desired pack level (0..9). The values given below have been tuned to
277 * exclude worst case performance for pathological files. Better values may be
278 * found for specific files.
281 nice_match = 258, /* Stop searching when current match exceeds this */
282 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
283 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
293 /* window position at the beginning of the current output block. Gets
294 * negative when the window is moved backwards.
296 unsigned ins_h; /* hash index of string to be inserted */
298 #define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH)
299 /* Number of bits by which ins_h and del_h must be shifted at each
300 * input step. It must be such that after MIN_MATCH steps, the oldest
301 * byte no longer takes part in the hash key, that is:
302 * H_SHIFT * MIN_MATCH >= HASH_BITS
305 unsigned prev_length;
307 /* Length of the best match at previous step. Matches not greater than this
308 * are discarded. This is used in the lazy match evaluation.
311 unsigned strstart; /* start of string to insert */
312 unsigned match_start; /* start of matching string */
313 unsigned lookahead; /* number of valid bytes ahead in window */
315 /* ===========================================================================
317 #define DECLARE(type, array, size) \
319 #define ALLOC(type, array, size) \
320 array = xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type))
321 #define FREE(array) \
322 do { free(array); array = NULL; } while (0)
326 /* buffer for literals or lengths */
327 /* DECLARE(uch, l_buf, LIT_BUFSIZE); */
328 DECLARE(uch, l_buf, INBUFSIZ);
330 DECLARE(ush, d_buf, DIST_BUFSIZE);
331 DECLARE(uch, outbuf, OUTBUFSIZ);
333 /* Sliding window. Input bytes are read into the second half of the window,
334 * and move to the first half later to keep a dictionary of at least WSIZE
335 * bytes. With this organization, matches are limited to a distance of
336 * WSIZE-MAX_MATCH bytes, but this ensures that IO is always
337 * performed with a length multiple of the block size. Also, it limits
338 * the window size to 64K, which is quite useful on MSDOS.
339 * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would
340 * be less efficient).
342 DECLARE(uch, window, 2L * WSIZE);
344 /* Link to older string with same hash index. To limit the size of this
345 * array to 64K, this link is maintained only for the last 32K strings.
346 * An index in this array is thus a window index modulo 32K.
348 /* DECLARE(Pos, prev, WSIZE); */
349 DECLARE(ush, prev, 1L << BITS);
351 /* Heads of the hash chains or 0. */
352 /* DECLARE(Pos, head, 1<<HASH_BITS); */
353 #define head (G1.prev + WSIZE) /* hash head (see deflate.c) */
355 /* number of input bytes */
356 ulg isize; /* only 32 bits stored in .gz file */
358 /* bbox always use stdin/stdout */
359 #define ifd STDIN_FILENO /* input file descriptor */
360 #define ofd STDOUT_FILENO /* output file descriptor */
363 unsigned insize; /* valid bytes in l_buf */
365 unsigned outcnt; /* bytes in output buffer */
367 smallint eofile; /* flag set at end of input file */
369 /* ===========================================================================
370 * Local data used by the "bit string" routines.
373 unsigned short bi_buf;
375 /* Output buffer. bits are inserted starting at the bottom (least significant
380 #define BUF_SIZE (8 * sizeof(G1.bi_buf))
381 /* Number of bits used within bi_buf. (bi_buf might be implemented on
382 * more than 16 bits on some systems.)
387 /* Current input function. Set to mem_read for in-memory compression */
390 ulg bits_sent; /* bit length of the compressed data */
393 /*uint32_t *crc_32_tab;*/
394 uint32_t crc; /* shift register contents */
397 #define G1 (*(ptr_to_globals - 1))
400 /* ===========================================================================
401 * Write the output buffer outbuf[0..outcnt-1] and update bytes_out.
402 * (used for the compressed data only)
404 static void flush_outbuf(void)
409 xwrite(ofd, (char *) G1.outbuf, G1.outcnt);
414 /* ===========================================================================
416 /* put_8bit is used for the compressed output */
417 #define put_8bit(c) \
419 G1.outbuf[G1.outcnt++] = (c); \
420 if (G1.outcnt == OUTBUFSIZ) \
424 /* Output a 16 bit value, lsb first */
425 static void put_16bit(ush w)
427 /* GCC 4.2.1 won't optimize out redundant loads of G1.outcnt
428 * (probably because of fear of aliasing with G1.outbuf[]
429 * stores), do it explicitly:
431 unsigned outcnt = G1.outcnt;
432 uch *dst = &G1.outbuf[outcnt];
434 #if BB_UNALIGNED_MEMACCESS_OK && BB_LITTLE_ENDIAN
435 if (outcnt < OUTBUFSIZ-2) {
437 ush *dst16 = (void*) dst;
438 *dst16 = w; /* unalinged LSB 16-bit store */
439 G1.outcnt = outcnt + 2;
447 if (outcnt < OUTBUFSIZ-2) {
450 G1.outcnt = outcnt + 2;
455 /* Slowpath: we will need to do flush_outbuf() */
456 G1.outcnt = ++outcnt;
457 if (outcnt == OUTBUFSIZ)
462 static void put_32bit(ulg n)
468 /* ===========================================================================
469 * Run a set of bytes through the crc shift register. If s is a NULL
470 * pointer, then initialize the crc shift register contents instead.
471 * Return the current crc in either case.
473 static void updcrc(uch * s, unsigned n)
475 G1.crc = crc32_block_endian0(G1.crc, s, n, global_crc32_table /*G1.crc_32_tab*/);
479 /* ===========================================================================
480 * Read a new buffer from the current input file, perform end-of-line
481 * translation, and update the crc and input file size.
482 * IN assertion: size >= 2 (for end-of-line translation)
484 static unsigned file_read(void *buf, unsigned size)
488 Assert(G1.insize == 0, "l_buf not empty");
490 len = safe_read(ifd, buf, size);
491 if (len == (unsigned)(-1) || len == 0)
500 /* ===========================================================================
501 * Send a value on a given number of bits.
502 * IN assertion: length <= 16 and value fits in length bits.
504 static void send_bits(int value, int length)
507 Tracev((stderr, " l %2d v %4x ", length, value));
508 Assert(length > 0 && length <= 15, "invalid length");
509 G1.bits_sent += length;
511 /* If not enough room in bi_buf, use (valid) bits from bi_buf and
512 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
513 * unused bits in value.
515 if (G1.bi_valid > (int) BUF_SIZE - length) {
516 G1.bi_buf |= (value << G1.bi_valid);
517 put_16bit(G1.bi_buf);
518 G1.bi_buf = (ush) value >> (BUF_SIZE - G1.bi_valid);
519 G1.bi_valid += length - BUF_SIZE;
521 G1.bi_buf |= value << G1.bi_valid;
522 G1.bi_valid += length;
527 /* ===========================================================================
528 * Reverse the first len bits of a code, using straightforward code (a faster
529 * method would use a table)
530 * IN assertion: 1 <= len <= 15
532 static unsigned bi_reverse(unsigned code, int len)
538 if (--len <= 0) return res;
545 /* ===========================================================================
546 * Write out any remaining bits in an incomplete byte.
548 static void bi_windup(void)
550 if (G1.bi_valid > 8) {
551 put_16bit(G1.bi_buf);
552 } else if (G1.bi_valid > 0) {
558 G1.bits_sent = (G1.bits_sent + 7) & ~7;
563 /* ===========================================================================
564 * Copy a stored block to the zip file, storing first the length and its
565 * one's complement if requested.
567 static void copy_block(char *buf, unsigned len, int header)
569 bi_windup(); /* align on byte boundary */
575 G1.bits_sent += 2 * 16;
579 G1.bits_sent += (ulg) len << 3;
587 /* ===========================================================================
588 * Fill the window when the lookahead becomes insufficient.
589 * Updates strstart and lookahead, and sets eofile if end of input file.
590 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0
591 * OUT assertions: at least one byte has been read, or eofile is set;
592 * file reads are performed for at least two bytes (required for the
593 * translate_eol option).
595 static void fill_window(void)
598 unsigned more = WINDOW_SIZE - G1.lookahead - G1.strstart;
599 /* Amount of free space at the end of the window. */
601 /* If the window is almost full and there is insufficient lookahead,
602 * move the upper half to the lower one to make room in the upper half.
604 if (more == (unsigned) -1) {
605 /* Very unlikely, but possible on 16 bit machine if strstart == 0
606 * and lookahead == 1 (input done one byte at time)
609 } else if (G1.strstart >= WSIZE + MAX_DIST) {
610 /* By the IN assertion, the window is not empty so we can't confuse
611 * more == 0 with more == 64K on a 16 bit machine.
613 Assert(WINDOW_SIZE == 2 * WSIZE, "no sliding with BIG_MEM");
615 memcpy(G1.window, G1.window + WSIZE, WSIZE);
616 G1.match_start -= WSIZE;
617 G1.strstart -= WSIZE; /* we now have strstart >= MAX_DIST: */
619 G1.block_start -= WSIZE;
621 for (n = 0; n < HASH_SIZE; n++) {
623 head[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0);
625 for (n = 0; n < WSIZE; n++) {
627 G1.prev[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0);
628 /* If n is not on any hash chain, prev[n] is garbage but
629 * its value will never be used.
634 /* At this point, more >= 2 */
636 n = file_read(G1.window + G1.strstart + G1.lookahead, more);
637 if (n == 0 || n == (unsigned) -1) {
646 /* ===========================================================================
647 * Set match_start to the longest match starting at the given string and
648 * return its length. Matches shorter or equal to prev_length are discarded,
649 * in which case the result is equal to prev_length and match_start is
651 * IN assertions: cur_match is the head of the hash chain for the current
652 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
655 /* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or
656 * match.s. The code is functionally equivalent, so you can use the C version
659 static int longest_match(IPos cur_match)
661 unsigned chain_length = max_chain_length; /* max hash chain length */
662 uch *scan = G1.window + G1.strstart; /* current string */
663 uch *match; /* matched string */
664 int len; /* length of current match */
665 int best_len = G1.prev_length; /* best match length so far */
666 IPos limit = G1.strstart > (IPos) MAX_DIST ? G1.strstart - (IPos) MAX_DIST : 0;
667 /* Stop when cur_match becomes <= limit. To simplify the code,
668 * we prevent matches with the string of window index 0.
671 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
672 * It is easy to get rid of this optimization if necessary.
674 #if HASH_BITS < 8 || MAX_MATCH != 258
675 # error Code too clever
677 uch *strend = G1.window + G1.strstart + MAX_MATCH;
678 uch scan_end1 = scan[best_len - 1];
679 uch scan_end = scan[best_len];
681 /* Do not waste too much time if we already have a good match: */
682 if (G1.prev_length >= good_match) {
685 Assert(G1.strstart <= WINDOW_SIZE - MIN_LOOKAHEAD, "insufficient lookahead");
688 Assert(cur_match < G1.strstart, "no future");
689 match = G1.window + cur_match;
691 /* Skip to next match if the match length cannot increase
692 * or if the match length is less than 2:
694 if (match[best_len] != scan_end
695 || match[best_len - 1] != scan_end1
696 || *match != *scan || *++match != scan[1]
701 /* The check at best_len-1 can be removed because it will be made
702 * again later. (This heuristic is not always a win.)
703 * It is not necessary to compare scan[2] and match[2] since they
704 * are always equal when the other bytes match, given that
705 * the hash keys are equal and that HASH_BITS >= 8.
709 /* We check for insufficient lookahead only every 8th comparison;
710 * the 256th check will be made at strstart+258.
713 } while (*++scan == *++match && *++scan == *++match &&
714 *++scan == *++match && *++scan == *++match &&
715 *++scan == *++match && *++scan == *++match &&
716 *++scan == *++match && *++scan == *++match && scan < strend);
718 len = MAX_MATCH - (int) (strend - scan);
719 scan = strend - MAX_MATCH;
721 if (len > best_len) {
722 G1.match_start = cur_match;
724 if (len >= nice_match)
726 scan_end1 = scan[best_len - 1];
727 scan_end = scan[best_len];
729 } while ((cur_match = G1.prev[cur_match & WMASK]) > limit
730 && --chain_length != 0);
737 /* ===========================================================================
738 * Check that the match at match_start is indeed a match.
740 static void check_match(IPos start, IPos match, int length)
742 /* check that the match is indeed a match */
743 if (memcmp(G1.window + match, G1.window + start, length) != 0) {
744 bb_error_msg(" start %d, match %d, length %d", start, match, length);
745 bb_error_msg("invalid match");
748 bb_error_msg("\\[%d,%d]", start - match, length);
750 bb_putchar_stderr(G1.window[start++]);
751 } while (--length != 0);
755 # define check_match(start, match, length) ((void)0)
759 /* trees.c -- output deflated data using Huffman coding
760 * Copyright (C) 1992-1993 Jean-loup Gailly
761 * This is free software; you can redistribute it and/or modify it under the
762 * terms of the GNU General Public License, see the file COPYING.
766 * Encode various sets of source values using variable-length
770 * The PKZIP "deflation" process uses several Huffman trees. The more
771 * common source values are represented by shorter bit sequences.
773 * Each code tree is stored in the ZIP file in a compressed form
774 * which is itself a Huffman encoding of the lengths of
775 * all the code strings (in ascending order by source values).
776 * The actual code strings are reconstructed from the lengths in
777 * the UNZIP process, as described in the "application note"
778 * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program.
782 * Data Compression: Techniques and Applications, pp. 53-55.
783 * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7.
786 * Data Compression: Methods and Theory, pp. 49-50.
787 * Computer Science Press, 1988. ISBN 0-7167-8156-5.
791 * Addison-Wesley, 1983. ISBN 0-201-06672-6.
795 * Allocate the match buffer, initialize the various tables [and save
796 * the location of the internal file attribute (ascii/binary) and
797 * method (DEFLATE/STORE) -- deleted in bbox]
799 * void ct_tally(int dist, int lc);
800 * Save the match info and tally the frequency counts.
802 * ulg flush_block(char *buf, ulg stored_len, int eof)
803 * Determine the best encoding for the current block: dynamic trees,
804 * static trees or store, and output the encoded block to the zip
805 * file. Returns the total compressed length for the file so far.
809 /* All codes must not exceed MAX_BITS bits */
811 #define MAX_BL_BITS 7
812 /* Bit length codes must not exceed MAX_BL_BITS bits */
814 #define LENGTH_CODES 29
815 /* number of length codes, not counting the special END_BLOCK code */
818 /* number of literal bytes 0..255 */
820 #define END_BLOCK 256
821 /* end of block literal code */
823 #define L_CODES (LITERALS+1+LENGTH_CODES)
824 /* number of Literal or Length codes, including the END_BLOCK code */
827 /* number of distance codes */
830 /* number of codes used to transfer the bit lengths */
832 /* extra bits for each length code */
833 static const uint8_t extra_lbits[LENGTH_CODES] ALIGN1 = {
834 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4,
838 /* extra bits for each distance code */
839 static const uint8_t extra_dbits[D_CODES] ALIGN1 = {
840 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9,
841 10, 10, 11, 11, 12, 12, 13, 13
844 /* extra bits for each bit length code */
845 static const uint8_t extra_blbits[BL_CODES] ALIGN1 = {
846 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 };
848 /* number of codes at each bit length for an optimal tree */
849 static const uint8_t bl_order[BL_CODES] ALIGN1 = {
850 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
852 #define STORED_BLOCK 0
853 #define STATIC_TREES 1
855 /* The three kinds of block type */
859 # define LIT_BUFSIZE 0x2000
862 # define LIT_BUFSIZE 0x4000
864 # define LIT_BUFSIZE 0x8000
869 # define DIST_BUFSIZE LIT_BUFSIZE
871 /* Sizes of match buffers for literals/lengths and distances. There are
872 * 4 reasons for limiting LIT_BUFSIZE to 64K:
873 * - frequencies can be kept in 16 bit counters
874 * - if compression is not successful for the first block, all input data is
875 * still in the window so we can still emit a stored block even when input
876 * comes from standard input. (This can also be done for all blocks if
877 * LIT_BUFSIZE is not greater than 32K.)
878 * - if compression is not successful for a file smaller than 64K, we can
879 * even emit a stored file instead of a stored block (saving 5 bytes).
880 * - creating new Huffman trees less frequently may not provide fast
881 * adaptation to changes in the input data statistics. (Take for
882 * example a binary file with poorly compressible code followed by
883 * a highly compressible string table.) Smaller buffer sizes give
884 * fast adaptation but have of course the overhead of transmitting trees
886 * - I can't count above 4
887 * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save
888 * memory at the expense of compression). Some optimizations would be possible
889 * if we rely on DIST_BUFSIZE == LIT_BUFSIZE.
892 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
894 /* repeat a zero length 3-10 times (3 bits of repeat count) */
895 #define REPZ_11_138 18
896 /* repeat a zero length 11-138 times (7 bits of repeat count) */
898 /* ===========================================================================
900 /* Data structure describing a single value and its code string. */
901 typedef struct ct_data {
903 ush freq; /* frequency count */
904 ush code; /* bit string */
907 ush dad; /* father node in Huffman tree */
908 ush len; /* length of bit string */
917 #define HEAP_SIZE (2*L_CODES + 1)
918 /* maximum heap size */
920 typedef struct tree_desc {
921 ct_data *dyn_tree; /* the dynamic tree */
922 ct_data *static_tree; /* corresponding static tree or NULL */
923 const uint8_t *extra_bits; /* extra bits for each code or NULL */
924 int extra_base; /* base index for extra_bits */
925 int elems; /* max number of elements in the tree */
926 int max_length; /* max bit length for the codes */
927 int max_code; /* largest code with non zero frequency */
932 ush heap[HEAP_SIZE]; /* heap used to build the Huffman trees */
933 int heap_len; /* number of elements in the heap */
934 int heap_max; /* element of largest frequency */
936 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
937 * The same heap array is used to build all trees.
940 ct_data dyn_ltree[HEAP_SIZE]; /* literal and length tree */
941 ct_data dyn_dtree[2 * D_CODES + 1]; /* distance tree */
943 ct_data static_ltree[L_CODES + 2];
945 /* The static literal tree. Since the bit lengths are imposed, there is no
946 * need for the L_CODES extra codes used during heap construction. However
947 * The codes 286 and 287 are needed to build a canonical tree (see ct_init
951 ct_data static_dtree[D_CODES];
953 /* The static distance tree. (Actually a trivial tree since all codes use
957 ct_data bl_tree[2 * BL_CODES + 1];
959 /* Huffman tree for the bit lengths */
965 ush bl_count[MAX_BITS + 1];
967 /* The lengths of the bit length codes are sent in order of decreasing
968 * probability, to avoid transmitting the lengths for unused bit length codes.
971 uch depth[2 * L_CODES + 1];
973 /* Depth of each subtree used as tie breaker for trees of equal frequency */
975 uch length_code[MAX_MATCH - MIN_MATCH + 1];
977 /* length code for each normalized match length (0 == MIN_MATCH) */
981 /* distance codes. The first 256 values correspond to the distances
982 * 3 .. 258, the last 256 values correspond to the top 8 bits of
983 * the 15 bit distances.
986 int base_length[LENGTH_CODES];
988 /* First normalized length for each code (0 = MIN_MATCH) */
990 int base_dist[D_CODES];
992 /* First normalized distance for each code (0 = distance of 1) */
994 uch flag_buf[LIT_BUFSIZE / 8];
996 /* flag_buf is a bit array distinguishing literals from lengths in
997 * l_buf, thus indicating the presence or absence of a distance.
1000 unsigned last_lit; /* running index in l_buf */
1001 unsigned last_dist; /* running index in d_buf */
1002 unsigned last_flags; /* running index in flag_buf */
1003 uch flags; /* current flags not yet saved in flag_buf */
1004 uch flag_bit; /* current bit used in flags */
1006 /* bits are filled in flags starting at bit 0 (least significant).
1007 * Note: these flags are overkill in the current code since we don't
1008 * take advantage of DIST_BUFSIZE == LIT_BUFSIZE.
1011 ulg opt_len; /* bit length of current block with optimal trees */
1012 ulg static_len; /* bit length of current block with static trees */
1014 ulg compressed_len; /* total bit length of compressed file */
1017 #define G2ptr ((struct globals2*)(ptr_to_globals))
1021 /* ===========================================================================
1023 static void gen_codes(ct_data * tree, int max_code);
1024 static void build_tree(tree_desc * desc);
1025 static void scan_tree(ct_data * tree, int max_code);
1026 static void send_tree(ct_data * tree, int max_code);
1027 static int build_bl_tree(void);
1028 static void send_all_trees(int lcodes, int dcodes, int blcodes);
1029 static void compress_block(ct_data * ltree, ct_data * dtree);
1033 /* Send a code of the given tree. c and tree must not have side effects */
1034 # define SEND_CODE(c, tree) send_bits(tree[c].Code, tree[c].Len)
1036 # define SEND_CODE(c, tree) \
1038 if (verbose > 1) bb_error_msg("\ncd %3d ", (c)); \
1039 send_bits(tree[c].Code, tree[c].Len); \
1043 #define D_CODE(dist) \
1044 ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)])
1045 /* Mapping from a distance to a distance code. dist is the distance - 1 and
1046 * must not have side effects. dist_code[256] and dist_code[257] are never
1048 * The arguments must not have side effects.
1052 /* ===========================================================================
1053 * Initialize a new block.
1055 static void init_block(void)
1057 int n; /* iterates over tree elements */
1059 /* Initialize the trees. */
1060 for (n = 0; n < L_CODES; n++)
1061 G2.dyn_ltree[n].Freq = 0;
1062 for (n = 0; n < D_CODES; n++)
1063 G2.dyn_dtree[n].Freq = 0;
1064 for (n = 0; n < BL_CODES; n++)
1065 G2.bl_tree[n].Freq = 0;
1067 G2.dyn_ltree[END_BLOCK].Freq = 1;
1068 G2.opt_len = G2.static_len = 0;
1069 G2.last_lit = G2.last_dist = G2.last_flags = 0;
1075 /* ===========================================================================
1076 * Restore the heap property by moving down the tree starting at node k,
1077 * exchanging a node with the smallest of its two sons if necessary, stopping
1078 * when the heap property is re-established (each father smaller than its
1082 /* Compares to subtrees, using the tree depth as tie breaker when
1083 * the subtrees have equal frequency. This minimizes the worst case length. */
1084 #define SMALLER(tree, n, m) \
1085 (tree[n].Freq < tree[m].Freq \
1086 || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m]))
1088 static void pqdownheap(ct_data * tree, int k)
1091 int j = k << 1; /* left son of k */
1093 while (j <= G2.heap_len) {
1094 /* Set j to the smallest of the two sons: */
1095 if (j < G2.heap_len && SMALLER(tree, G2.heap[j + 1], G2.heap[j]))
1098 /* Exit if v is smaller than both sons */
1099 if (SMALLER(tree, v, G2.heap[j]))
1102 /* Exchange v with the smallest son */
1103 G2.heap[k] = G2.heap[j];
1106 /* And continue down the tree, setting j to the left son of k */
1113 /* ===========================================================================
1114 * Compute the optimal bit lengths for a tree and update the total bit length
1115 * for the current block.
1116 * IN assertion: the fields freq and dad are set, heap[heap_max] and
1117 * above are the tree nodes sorted by increasing frequency.
1118 * OUT assertions: the field len is set to the optimal bit length, the
1119 * array bl_count contains the frequencies for each bit length.
1120 * The length opt_len is updated; static_len is also updated if stree is
1123 static void gen_bitlen(tree_desc * desc)
1125 ct_data *tree = desc->dyn_tree;
1126 const uint8_t *extra = desc->extra_bits;
1127 int base = desc->extra_base;
1128 int max_code = desc->max_code;
1129 int max_length = desc->max_length;
1130 ct_data *stree = desc->static_tree;
1131 int h; /* heap index */
1132 int n, m; /* iterate over the tree elements */
1133 int bits; /* bit length */
1134 int xbits; /* extra bits */
1135 ush f; /* frequency */
1136 int overflow = 0; /* number of elements with bit length too large */
1138 for (bits = 0; bits <= MAX_BITS; bits++)
1139 G2.bl_count[bits] = 0;
1141 /* In a first pass, compute the optimal bit lengths (which may
1142 * overflow in the case of the bit length tree).
1144 tree[G2.heap[G2.heap_max]].Len = 0; /* root of the heap */
1146 for (h = G2.heap_max + 1; h < HEAP_SIZE; h++) {
1148 bits = tree[tree[n].Dad].Len + 1;
1149 if (bits > max_length) {
1153 tree[n].Len = (ush) bits;
1154 /* We overwrite tree[n].Dad which is no longer needed */
1157 continue; /* not a leaf node */
1159 G2.bl_count[bits]++;
1162 xbits = extra[n - base];
1164 G2.opt_len += (ulg) f *(bits + xbits);
1167 G2.static_len += (ulg) f * (stree[n].Len + xbits);
1172 Trace((stderr, "\nbit length overflow\n"));
1173 /* This happens for example on obj2 and pic of the Calgary corpus */
1175 /* Find the first bit length which could increase: */
1177 bits = max_length - 1;
1178 while (G2.bl_count[bits] == 0)
1180 G2.bl_count[bits]--; /* move one leaf down the tree */
1181 G2.bl_count[bits + 1] += 2; /* move one overflow item as its brother */
1182 G2.bl_count[max_length]--;
1183 /* The brother of the overflow item also moves one step up,
1184 * but this does not affect bl_count[max_length]
1187 } while (overflow > 0);
1189 /* Now recompute all bit lengths, scanning in increasing frequency.
1190 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
1191 * lengths instead of fixing only the wrong ones. This idea is taken
1192 * from 'ar' written by Haruhiko Okumura.)
1194 for (bits = max_length; bits != 0; bits--) {
1195 n = G2.bl_count[bits];
1200 if (tree[m].Len != (unsigned) bits) {
1201 Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits));
1202 G2.opt_len += ((int32_t) bits - tree[m].Len) * tree[m].Freq;
1211 /* ===========================================================================
1212 * Generate the codes for a given tree and bit counts (which need not be
1214 * IN assertion: the array bl_count contains the bit length statistics for
1215 * the given tree and the field len is set for all tree elements.
1216 * OUT assertion: the field code is set for all tree elements of non
1219 static void gen_codes(ct_data * tree, int max_code)
1221 ush next_code[MAX_BITS + 1]; /* next code value for each bit length */
1222 ush code = 0; /* running code value */
1223 int bits; /* bit index */
1224 int n; /* code index */
1226 /* The distribution counts are first used to generate the code values
1227 * without bit reversal.
1229 for (bits = 1; bits <= MAX_BITS; bits++) {
1230 next_code[bits] = code = (code + G2.bl_count[bits - 1]) << 1;
1232 /* Check that the bit counts in bl_count are consistent. The last code
1235 Assert(code + G2.bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
1236 "inconsistent bit counts");
1237 Tracev((stderr, "\ngen_codes: max_code %d ", max_code));
1239 for (n = 0; n <= max_code; n++) {
1240 int len = tree[n].Len;
1244 /* Now reverse the bits */
1245 tree[n].Code = bi_reverse(next_code[len]++, len);
1247 Tracec(tree != G2.static_ltree,
1248 (stderr, "\nn %3d %c l %2d c %4x (%x) ", n,
1249 (n > ' ' ? n : ' '), len, tree[n].Code,
1250 next_code[len] - 1));
1255 /* ===========================================================================
1256 * Construct one Huffman tree and assigns the code bit strings and lengths.
1257 * Update the total bit length for the current block.
1258 * IN assertion: the field freq is set for all tree elements.
1259 * OUT assertions: the fields len and code are set to the optimal bit length
1260 * and corresponding code. The length opt_len is updated; static_len is
1261 * also updated if stree is not null. The field max_code is set.
1264 /* Remove the smallest element from the heap and recreate the heap with
1265 * one less element. Updates heap and heap_len. */
1268 /* Index within the heap array of least frequent node in the Huffman tree */
1270 #define PQREMOVE(tree, top) \
1272 top = G2.heap[SMALLEST]; \
1273 G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \
1274 pqdownheap(tree, SMALLEST); \
1277 static void build_tree(tree_desc * desc)
1279 ct_data *tree = desc->dyn_tree;
1280 ct_data *stree = desc->static_tree;
1281 int elems = desc->elems;
1282 int n, m; /* iterate over heap elements */
1283 int max_code = -1; /* largest code with non zero frequency */
1284 int node = elems; /* next internal node of the tree */
1286 /* Construct the initial heap, with least frequent element in
1287 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
1288 * heap[0] is not used.
1291 G2.heap_max = HEAP_SIZE;
1293 for (n = 0; n < elems; n++) {
1294 if (tree[n].Freq != 0) {
1295 G2.heap[++G2.heap_len] = max_code = n;
1302 /* The pkzip format requires that at least one distance code exists,
1303 * and that at least one bit should be sent even if there is only one
1304 * possible code. So to avoid special checks later on we force at least
1305 * two codes of non zero frequency.
1307 while (G2.heap_len < 2) {
1308 int new = G2.heap[++G2.heap_len] = (max_code < 2 ? ++max_code : 0);
1314 G2.static_len -= stree[new].Len;
1315 /* new is 0 or 1 so it does not have extra bits */
1317 desc->max_code = max_code;
1319 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1320 * establish sub-heaps of increasing lengths:
1322 for (n = G2.heap_len / 2; n >= 1; n--)
1323 pqdownheap(tree, n);
1325 /* Construct the Huffman tree by repeatedly combining the least two
1329 PQREMOVE(tree, n); /* n = node of least frequency */
1330 m = G2.heap[SMALLEST]; /* m = node of next least frequency */
1332 G2.heap[--G2.heap_max] = n; /* keep the nodes sorted by frequency */
1333 G2.heap[--G2.heap_max] = m;
1335 /* Create a new node father of n and m */
1336 tree[node].Freq = tree[n].Freq + tree[m].Freq;
1337 G2.depth[node] = MAX(G2.depth[n], G2.depth[m]) + 1;
1338 tree[n].Dad = tree[m].Dad = (ush) node;
1340 if (tree == G2.bl_tree) {
1341 bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)",
1342 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
1345 /* and insert the new node in the heap */
1346 G2.heap[SMALLEST] = node++;
1347 pqdownheap(tree, SMALLEST);
1349 } while (G2.heap_len >= 2);
1351 G2.heap[--G2.heap_max] = G2.heap[SMALLEST];
1353 /* At this point, the fields freq and dad are set. We can now
1354 * generate the bit lengths.
1356 gen_bitlen((tree_desc *) desc);
1358 /* The field len is now set, we can generate the bit codes */
1359 gen_codes((ct_data *) tree, max_code);
1363 /* ===========================================================================
1364 * Scan a literal or distance tree to determine the frequencies of the codes
1365 * in the bit length tree. Updates opt_len to take into account the repeat
1366 * counts. (The contribution of the bit length codes will be added later
1367 * during the construction of bl_tree.)
1369 static void scan_tree(ct_data * tree, int max_code)
1371 int n; /* iterates over all tree elements */
1372 int prevlen = -1; /* last emitted length */
1373 int curlen; /* length of current code */
1374 int nextlen = tree[0].Len; /* length of next code */
1375 int count = 0; /* repeat count of the current code */
1376 int max_count = 7; /* max repeat count */
1377 int min_count = 4; /* min repeat count */
1383 tree[max_code + 1].Len = 0xffff; /* guard */
1385 for (n = 0; n <= max_code; n++) {
1387 nextlen = tree[n + 1].Len;
1388 if (++count < max_count && curlen == nextlen)
1391 if (count < min_count) {
1392 G2.bl_tree[curlen].Freq += count;
1393 } else if (curlen != 0) {
1394 if (curlen != prevlen)
1395 G2.bl_tree[curlen].Freq++;
1396 G2.bl_tree[REP_3_6].Freq++;
1397 } else if (count <= 10) {
1398 G2.bl_tree[REPZ_3_10].Freq++;
1400 G2.bl_tree[REPZ_11_138].Freq++;
1410 } else if (curlen == nextlen) {
1418 /* ===========================================================================
1419 * Send a literal or distance tree in compressed form, using the codes in
1422 static void send_tree(ct_data * tree, int max_code)
1424 int n; /* iterates over all tree elements */
1425 int prevlen = -1; /* last emitted length */
1426 int curlen; /* length of current code */
1427 int nextlen = tree[0].Len; /* length of next code */
1428 int count = 0; /* repeat count of the current code */
1429 int max_count = 7; /* max repeat count */
1430 int min_count = 4; /* min repeat count */
1432 /* tree[max_code+1].Len = -1; *//* guard already set */
1434 max_count = 138, min_count = 3;
1436 for (n = 0; n <= max_code; n++) {
1438 nextlen = tree[n + 1].Len;
1439 if (++count < max_count && curlen == nextlen) {
1441 } else if (count < min_count) {
1443 SEND_CODE(curlen, G2.bl_tree);
1445 } else if (curlen != 0) {
1446 if (curlen != prevlen) {
1447 SEND_CODE(curlen, G2.bl_tree);
1450 Assert(count >= 3 && count <= 6, " 3_6?");
1451 SEND_CODE(REP_3_6, G2.bl_tree);
1452 send_bits(count - 3, 2);
1453 } else if (count <= 10) {
1454 SEND_CODE(REPZ_3_10, G2.bl_tree);
1455 send_bits(count - 3, 3);
1457 SEND_CODE(REPZ_11_138, G2.bl_tree);
1458 send_bits(count - 11, 7);
1465 } else if (curlen == nextlen) {
1476 /* ===========================================================================
1477 * Construct the Huffman tree for the bit lengths and return the index in
1478 * bl_order of the last bit length code to send.
1480 static int build_bl_tree(void)
1482 int max_blindex; /* index of last bit length code of non zero freq */
1484 /* Determine the bit length frequencies for literal and distance trees */
1485 scan_tree(G2.dyn_ltree, G2.l_desc.max_code);
1486 scan_tree(G2.dyn_dtree, G2.d_desc.max_code);
1488 /* Build the bit length tree: */
1489 build_tree(&G2.bl_desc);
1490 /* opt_len now includes the length of the tree representations, except
1491 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
1494 /* Determine the number of bit length codes to send. The pkzip format
1495 * requires that at least 4 bit length codes be sent. (appnote.txt says
1496 * 3 but the actual value used is 4.)
1498 for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) {
1499 if (G2.bl_tree[bl_order[max_blindex]].Len != 0)
1502 /* Update opt_len to include the bit length tree and counts */
1503 G2.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
1504 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
1510 /* ===========================================================================
1511 * Send the header for a block using dynamic Huffman trees: the counts, the
1512 * lengths of the bit length codes, the literal tree and the distance tree.
1513 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
1515 static void send_all_trees(int lcodes, int dcodes, int blcodes)
1517 int rank; /* index in bl_order */
1519 Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
1520 Assert(lcodes <= L_CODES && dcodes <= D_CODES
1521 && blcodes <= BL_CODES, "too many codes");
1522 Tracev((stderr, "\nbl counts: "));
1523 send_bits(lcodes - 257, 5); /* not +255 as stated in appnote.txt */
1524 send_bits(dcodes - 1, 5);
1525 send_bits(blcodes - 4, 4); /* not -3 as stated in appnote.txt */
1526 for (rank = 0; rank < blcodes; rank++) {
1527 Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
1528 send_bits(G2.bl_tree[bl_order[rank]].Len, 3);
1530 Tracev((stderr, "\nbl tree: sent %ld", G1.bits_sent));
1532 send_tree((ct_data *) G2.dyn_ltree, lcodes - 1); /* send the literal tree */
1533 Tracev((stderr, "\nlit tree: sent %ld", G1.bits_sent));
1535 send_tree((ct_data *) G2.dyn_dtree, dcodes - 1); /* send the distance tree */
1536 Tracev((stderr, "\ndist tree: sent %ld", G1.bits_sent));
1540 /* ===========================================================================
1541 * Save the match info and tally the frequency counts. Return true if
1542 * the current block must be flushed.
1544 static int ct_tally(int dist, int lc)
1546 G1.l_buf[G2.last_lit++] = lc;
1548 /* lc is the unmatched char */
1549 G2.dyn_ltree[lc].Freq++;
1551 /* Here, lc is the match length - MIN_MATCH */
1552 dist--; /* dist = match distance - 1 */
1553 Assert((ush) dist < (ush) MAX_DIST
1554 && (ush) lc <= (ush) (MAX_MATCH - MIN_MATCH)
1555 && (ush) D_CODE(dist) < (ush) D_CODES, "ct_tally: bad match"
1558 G2.dyn_ltree[G2.length_code[lc] + LITERALS + 1].Freq++;
1559 G2.dyn_dtree[D_CODE(dist)].Freq++;
1561 G1.d_buf[G2.last_dist++] = dist;
1562 G2.flags |= G2.flag_bit;
1566 /* Output the flags if they fill a byte: */
1567 if ((G2.last_lit & 7) == 0) {
1568 G2.flag_buf[G2.last_flags++] = G2.flags;
1572 /* Try to guess if it is profitable to stop the current block here */
1573 if ((G2.last_lit & 0xfff) == 0) {
1574 /* Compute an upper bound for the compressed length */
1575 ulg out_length = G2.last_lit * 8L;
1576 ulg in_length = (ulg) G1.strstart - G1.block_start;
1579 for (dcode = 0; dcode < D_CODES; dcode++) {
1580 out_length += G2.dyn_dtree[dcode].Freq * (5L + extra_dbits[dcode]);
1584 "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ",
1585 G2.last_lit, G2.last_dist, in_length, out_length,
1586 100L - out_length * 100L / in_length));
1587 if (G2.last_dist < G2.last_lit / 2 && out_length < in_length / 2)
1590 return (G2.last_lit == LIT_BUFSIZE - 1 || G2.last_dist == DIST_BUFSIZE);
1591 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K
1592 * on 16 bit machines and because stored blocks are restricted to
1597 /* ===========================================================================
1598 * Send the block data compressed using the given Huffman trees
1600 static void compress_block(ct_data * ltree, ct_data * dtree)
1602 unsigned dist; /* distance of matched string */
1603 int lc; /* match length or unmatched char (if dist == 0) */
1604 unsigned lx = 0; /* running index in l_buf */
1605 unsigned dx = 0; /* running index in d_buf */
1606 unsigned fx = 0; /* running index in flag_buf */
1607 uch flag = 0; /* current flags */
1608 unsigned code; /* the code to send */
1609 int extra; /* number of extra bits to send */
1611 if (G2.last_lit != 0) do {
1613 flag = G2.flag_buf[fx++];
1614 lc = G1.l_buf[lx++];
1615 if ((flag & 1) == 0) {
1616 SEND_CODE(lc, ltree); /* send a literal byte */
1617 Tracecv(lc > ' ', (stderr, " '%c' ", lc));
1619 /* Here, lc is the match length - MIN_MATCH */
1620 code = G2.length_code[lc];
1621 SEND_CODE(code + LITERALS + 1, ltree); /* send the length code */
1622 extra = extra_lbits[code];
1624 lc -= G2.base_length[code];
1625 send_bits(lc, extra); /* send the extra length bits */
1627 dist = G1.d_buf[dx++];
1628 /* Here, dist is the match distance - 1 */
1629 code = D_CODE(dist);
1630 Assert(code < D_CODES, "bad d_code");
1632 SEND_CODE(code, dtree); /* send the distance code */
1633 extra = extra_dbits[code];
1635 dist -= G2.base_dist[code];
1636 send_bits(dist, extra); /* send the extra distance bits */
1638 } /* literal or match pair ? */
1640 } while (lx < G2.last_lit);
1642 SEND_CODE(END_BLOCK, ltree);
1646 /* ===========================================================================
1647 * Determine the best encoding for the current block: dynamic trees, static
1648 * trees or store, and output the encoded block to the zip file. This function
1649 * returns the total compressed length for the file so far.
1651 static ulg flush_block(char *buf, ulg stored_len, int eof)
1653 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
1654 int max_blindex; /* index of last bit length code of non zero freq */
1656 G2.flag_buf[G2.last_flags] = G2.flags; /* Save the flags for the last 8 items */
1658 /* Construct the literal and distance trees */
1659 build_tree(&G2.l_desc);
1660 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
1662 build_tree(&G2.d_desc);
1663 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
1664 /* At this point, opt_len and static_len are the total bit lengths of
1665 * the compressed block data, excluding the tree representations.
1668 /* Build the bit length tree for the above two trees, and get the index
1669 * in bl_order of the last bit length code to send.
1671 max_blindex = build_bl_tree();
1673 /* Determine the best encoding. Compute first the block length in bytes */
1674 opt_lenb = (G2.opt_len + 3 + 7) >> 3;
1675 static_lenb = (G2.static_len + 3 + 7) >> 3;
1678 "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ",
1679 opt_lenb, G2.opt_len, static_lenb, G2.static_len, stored_len,
1680 G2.last_lit, G2.last_dist));
1682 if (static_lenb <= opt_lenb)
1683 opt_lenb = static_lenb;
1685 /* If compression failed and this is the first and last block,
1686 * and if the zip file can be seeked (to rewrite the local header),
1687 * the whole file is transformed into a stored file:
1689 if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) {
1690 /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
1692 bb_error_msg("block vanished");
1694 copy_block(buf, (unsigned) stored_len, 0); /* without header */
1695 G2.compressed_len = stored_len << 3;
1697 } else if (stored_len + 4 <= opt_lenb && buf != NULL) {
1698 /* 4: two words for the lengths */
1699 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
1700 * Otherwise we can't have processed more than WSIZE input bytes since
1701 * the last block flush, because compression would have been
1702 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
1703 * transform a block into a stored block.
1705 send_bits((STORED_BLOCK << 1) + eof, 3); /* send block type */
1706 G2.compressed_len = (G2.compressed_len + 3 + 7) & ~7L;
1707 G2.compressed_len += (stored_len + 4) << 3;
1709 copy_block(buf, (unsigned) stored_len, 1); /* with header */
1711 } else if (static_lenb == opt_lenb) {
1712 send_bits((STATIC_TREES << 1) + eof, 3);
1713 compress_block((ct_data *) G2.static_ltree, (ct_data *) G2.static_dtree);
1714 G2.compressed_len += 3 + G2.static_len;
1716 send_bits((DYN_TREES << 1) + eof, 3);
1717 send_all_trees(G2.l_desc.max_code + 1, G2.d_desc.max_code + 1,
1719 compress_block((ct_data *) G2.dyn_ltree, (ct_data *) G2.dyn_dtree);
1720 G2.compressed_len += 3 + G2.opt_len;
1722 Assert(G2.compressed_len == G1.bits_sent, "bad compressed size");
1727 G2.compressed_len += 7; /* align on byte boundary */
1729 Tracev((stderr, "\ncomprlen %lu(%lu) ", G2.compressed_len >> 3,
1730 G2.compressed_len - 7 * eof));
1732 return G2.compressed_len >> 3;
1736 /* ===========================================================================
1737 * Update a hash value with the given input byte
1738 * IN assertion: all calls to UPDATE_HASH are made with consecutive
1739 * input characters, so that a running hash key can be computed from the
1740 * previous key instead of complete recalculation each time.
1742 #define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK)
1745 /* ===========================================================================
1746 * Same as above, but achieves better compression. We use a lazy
1747 * evaluation for matches: a match is finally adopted only if there is
1748 * no better match at the next window position.
1750 * Processes a new input file and return its compressed length. Sets
1751 * the compressed length, crc, deflate flags and internal file
1755 /* Flush the current block, with given end-of-file flag.
1756 * IN assertion: strstart is set to the end of the current match. */
1757 #define FLUSH_BLOCK(eof) \
1759 G1.block_start >= 0L \
1760 ? (char*)&G1.window[(unsigned)G1.block_start] \
1762 (ulg)G1.strstart - G1.block_start, \
1766 /* Insert string s in the dictionary and set match_head to the previous head
1767 * of the hash chain (the most recent string with same hash key). Return
1768 * the previous length of the hash chain.
1769 * IN assertion: all calls to INSERT_STRING are made with consecutive
1770 * input characters and the first MIN_MATCH bytes of s are valid
1771 * (except for the last MIN_MATCH-1 bytes of the input file). */
1772 #define INSERT_STRING(s, match_head) \
1774 UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \
1775 G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \
1776 head[G1.ins_h] = (s); \
1779 static ulg deflate(void)
1781 IPos hash_head; /* head of hash chain */
1782 IPos prev_match; /* previous match */
1783 int flush; /* set if current block must be flushed */
1784 int match_available = 0; /* set if previous match exists */
1785 unsigned match_length = MIN_MATCH - 1; /* length of best match */
1787 /* Process the input block. */
1788 while (G1.lookahead != 0) {
1789 /* Insert the string window[strstart .. strstart+2] in the
1790 * dictionary, and set hash_head to the head of the hash chain:
1792 INSERT_STRING(G1.strstart, hash_head);
1794 /* Find the longest match, discarding those <= prev_length.
1796 G1.prev_length = match_length;
1797 prev_match = G1.match_start;
1798 match_length = MIN_MATCH - 1;
1800 if (hash_head != 0 && G1.prev_length < max_lazy_match
1801 && G1.strstart - hash_head <= MAX_DIST
1803 /* To simplify the code, we prevent matches with the string
1804 * of window index 0 (in particular we have to avoid a match
1805 * of the string with itself at the start of the input file).
1807 match_length = longest_match(hash_head);
1808 /* longest_match() sets match_start */
1809 if (match_length > G1.lookahead)
1810 match_length = G1.lookahead;
1812 /* Ignore a length 3 match if it is too distant: */
1813 if (match_length == MIN_MATCH && G1.strstart - G1.match_start > TOO_FAR) {
1814 /* If prev_match is also MIN_MATCH, G1.match_start is garbage
1815 * but we will ignore the current match anyway.
1820 /* If there was a match at the previous step and the current
1821 * match is not better, output the previous match:
1823 if (G1.prev_length >= MIN_MATCH && match_length <= G1.prev_length) {
1824 check_match(G1.strstart - 1, prev_match, G1.prev_length);
1825 flush = ct_tally(G1.strstart - 1 - prev_match, G1.prev_length - MIN_MATCH);
1827 /* Insert in hash table all strings up to the end of the match.
1828 * strstart-1 and strstart are already inserted.
1830 G1.lookahead -= G1.prev_length - 1;
1831 G1.prev_length -= 2;
1834 INSERT_STRING(G1.strstart, hash_head);
1835 /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1836 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
1837 * these bytes are garbage, but it does not matter since the
1838 * next lookahead bytes will always be emitted as literals.
1840 } while (--G1.prev_length != 0);
1841 match_available = 0;
1842 match_length = MIN_MATCH - 1;
1846 G1.block_start = G1.strstart;
1848 } else if (match_available) {
1849 /* If there was no match at the previous position, output a
1850 * single literal. If there was a match but the current match
1851 * is longer, truncate the previous match to a single literal.
1853 Tracevv((stderr, "%c", G1.window[G1.strstart - 1]));
1854 if (ct_tally(0, G1.window[G1.strstart - 1])) {
1856 G1.block_start = G1.strstart;
1861 /* There is no previous match to compare with, wait for
1862 * the next step to decide.
1864 match_available = 1;
1868 Assert(G1.strstart <= G1.isize && lookahead <= G1.isize, "a bit too far");
1870 /* Make sure that we always have enough lookahead, except
1871 * at the end of the input file. We need MAX_MATCH bytes
1872 * for the next match, plus MIN_MATCH bytes to insert the
1873 * string following the next match.
1875 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile)
1878 if (match_available)
1879 ct_tally(0, G1.window[G1.strstart - 1]);
1881 return FLUSH_BLOCK(1); /* eof */
1885 /* ===========================================================================
1886 * Initialize the bit string routines.
1888 static void bi_init(void)
1898 /* ===========================================================================
1899 * Initialize the "longest match" routines for a new file
1901 static void lm_init(ush * flagsp)
1905 /* Initialize the hash table. */
1906 memset(head, 0, HASH_SIZE * sizeof(*head));
1907 /* prev will be initialized on the fly */
1909 /* speed options for the general purpose bit flag */
1910 *flagsp |= 2; /* FAST 4, SLOW 2 */
1911 /* ??? reduce max_chain_length for binary files */
1914 G1.block_start = 0L;
1916 G1.lookahead = file_read(G1.window,
1917 sizeof(int) <= 2 ? (unsigned) WSIZE : 2 * WSIZE);
1919 if (G1.lookahead == 0 || G1.lookahead == (unsigned) -1) {
1925 /* Make sure that we always have enough lookahead. This is important
1926 * if input comes from a device such as a tty.
1928 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile)
1932 for (j = 0; j < MIN_MATCH - 1; j++)
1933 UPDATE_HASH(G1.ins_h, G1.window[j]);
1934 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is
1935 * not important since only literal bytes will be emitted.
1940 /* ===========================================================================
1941 * Allocate the match buffer, initialize the various tables and save the
1942 * location of the internal file attribute (ascii/binary) and method
1944 * One callsite in zip()
1946 static void ct_init(void)
1948 int n; /* iterates over tree elements */
1949 int length; /* length value */
1950 int code; /* code value */
1951 int dist; /* distance index */
1953 G2.compressed_len = 0L;
1956 if (G2.static_dtree[0].Len != 0)
1957 return; /* ct_init already called */
1960 /* Initialize the mapping length (0..255) -> length code (0..28) */
1962 for (code = 0; code < LENGTH_CODES - 1; code++) {
1963 G2.base_length[code] = length;
1964 for (n = 0; n < (1 << extra_lbits[code]); n++) {
1965 G2.length_code[length++] = code;
1968 Assert(length == 256, "ct_init: length != 256");
1969 /* Note that the length 255 (match length 258) can be represented
1970 * in two different ways: code 284 + 5 bits or code 285, so we
1971 * overwrite length_code[255] to use the best encoding:
1973 G2.length_code[length - 1] = code;
1975 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
1977 for (code = 0; code < 16; code++) {
1978 G2.base_dist[code] = dist;
1979 for (n = 0; n < (1 << extra_dbits[code]); n++) {
1980 G2.dist_code[dist++] = code;
1983 Assert(dist == 256, "ct_init: dist != 256");
1984 dist >>= 7; /* from now on, all distances are divided by 128 */
1985 for (; code < D_CODES; code++) {
1986 G2.base_dist[code] = dist << 7;
1987 for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {
1988 G2.dist_code[256 + dist++] = code;
1991 Assert(dist == 256, "ct_init: 256+dist != 512");
1993 /* Construct the codes of the static literal tree */
1994 /* already zeroed - it's in bss
1995 for (n = 0; n <= MAX_BITS; n++)
1996 G2.bl_count[n] = 0; */
2000 G2.static_ltree[n++].Len = 8;
2004 G2.static_ltree[n++].Len = 9;
2008 G2.static_ltree[n++].Len = 7;
2012 G2.static_ltree[n++].Len = 8;
2015 /* Codes 286 and 287 do not exist, but we must include them in the
2016 * tree construction to get a canonical Huffman tree (longest code
2019 gen_codes((ct_data *) G2.static_ltree, L_CODES + 1);
2021 /* The static distance tree is trivial: */
2022 for (n = 0; n < D_CODES; n++) {
2023 G2.static_dtree[n].Len = 5;
2024 G2.static_dtree[n].Code = bi_reverse(n, 5);
2027 /* Initialize the first block of the first file: */
2032 /* ===========================================================================
2033 * Deflate in to out.
2034 * IN assertions: the input and output buffers are cleared.
2037 static void zip(void)
2039 ush deflate_flags = 0; /* pkzip -es, -en or -ex equivalent */
2043 /* Write the header to the gzip file. See algorithm.doc for the format */
2044 /* magic header for gzip files: 1F 8B */
2045 /* compression method: 8 (DEFLATED) */
2046 /* general flags: 0 */
2047 put_32bit(0x00088b1f);
2048 put_32bit(0); /* Unix timestamp */
2050 /* Write deflated file to zip file */
2055 lm_init(&deflate_flags);
2057 put_8bit(deflate_flags); /* extra flags */
2058 put_8bit(3); /* OS identifier = 3 (Unix) */
2062 /* Write the crc and uncompressed size */
2064 put_32bit(G1.isize);
2070 /* ======================================================================== */
2072 IF_DESKTOP(long long) int FAST_FUNC pack_gzip(transformer_state_t *xstate UNUSED_PARAM)
2074 /* Clear input and output buffers */
2082 memset(&G2, 0, sizeof(G2));
2083 G2.l_desc.dyn_tree = G2.dyn_ltree;
2084 G2.l_desc.static_tree = G2.static_ltree;
2085 G2.l_desc.extra_bits = extra_lbits;
2086 G2.l_desc.extra_base = LITERALS + 1;
2087 G2.l_desc.elems = L_CODES;
2088 G2.l_desc.max_length = MAX_BITS;
2089 //G2.l_desc.max_code = 0;
2090 G2.d_desc.dyn_tree = G2.dyn_dtree;
2091 G2.d_desc.static_tree = G2.static_dtree;
2092 G2.d_desc.extra_bits = extra_dbits;
2093 //G2.d_desc.extra_base = 0;
2094 G2.d_desc.elems = D_CODES;
2095 G2.d_desc.max_length = MAX_BITS;
2096 //G2.d_desc.max_code = 0;
2097 G2.bl_desc.dyn_tree = G2.bl_tree;
2098 //G2.bl_desc.static_tree = NULL;
2099 G2.bl_desc.extra_bits = extra_blbits,
2100 //G2.bl_desc.extra_base = 0;
2101 G2.bl_desc.elems = BL_CODES;
2102 G2.bl_desc.max_length = MAX_BL_BITS;
2103 //G2.bl_desc.max_code = 0;
2106 /* Saving of timestamp is disabled. Why?
2107 * - it is not Y2038-safe.
2108 * - some people want deterministic results
2109 * (normally they'd use -n, but our -n is a nop).
2111 * Per RFC 1952, gzfile.time=0 is "no timestamp".
2112 * If users will demand this to be reinstated,
2113 * implement -n "don't save timestamp".
2117 fstat(STDIN_FILENO, &s);
2125 #if ENABLE_FEATURE_GZIP_LONG_OPTIONS
2126 static const char gzip_longopts[] ALIGN1 =
2127 "stdout\0" No_argument "c"
2128 "to-stdout\0" No_argument "c"
2129 "force\0" No_argument "f"
2130 "verbose\0" No_argument "v"
2132 "decompress\0" No_argument "d"
2133 "uncompress\0" No_argument "d"
2134 "test\0" No_argument "t"
2136 "quiet\0" No_argument "q"
2137 "fast\0" No_argument "1"
2138 "best\0" No_argument "9"
2143 * Linux kernel build uses gzip -d -n. We accept and ignore -n.
2146 * gzip: do not save the original file name and time stamp.
2147 * (The original name is always saved if the name had to be truncated.)
2148 * gunzip: do not restore the original file name/time even if present
2149 * (remove only the gzip suffix from the compressed file name).
2150 * This option is the default when decompressing.
2152 * gzip: always save the original file name and time stamp (this is the default)
2153 * gunzip: restore the original file name and time stamp if present.
2156 int gzip_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
2158 int gzip_main(int argc, char **argv)
2160 int gzip_main(int argc UNUSED_PARAM, char **argv)
2165 #if ENABLE_FEATURE_GZIP_LONG_OPTIONS
2166 applet_long_options = gzip_longopts;
2168 /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */
2169 opt = getopt32(argv, "cfv" IF_GUNZIP("dt") "q123456789n");
2170 #if ENABLE_GUNZIP /* gunzip_main may not be visible... */
2171 if (opt & 0x18) // -d and/or -t
2172 return gunzip_main(argc, argv);
2174 option_mask32 &= 0x7; /* ignore -q, -0..9 */
2175 //if (opt & 0x1) // -c
2176 //if (opt & 0x2) // -f
2177 //if (opt & 0x4) // -v
2180 SET_PTR_TO_GLOBALS((char *)xzalloc(sizeof(struct globals)+sizeof(struct globals2))
2181 + sizeof(struct globals));
2183 /* Allocate all global buffers (for DYN_ALLOC option) */
2184 ALLOC(uch, G1.l_buf, INBUFSIZ);
2185 ALLOC(uch, G1.outbuf, OUTBUFSIZ);
2186 ALLOC(ush, G1.d_buf, DIST_BUFSIZE);
2187 ALLOC(uch, G1.window, 2L * WSIZE);
2188 ALLOC(ush, G1.prev, 1L << BITS);
2190 /* Initialize the CRC32 table */
2191 global_crc32_table = crc32_filltable(NULL, 0);
2193 return bbunpack(argv, pack_gzip, append_ext, "gz");