1 /* vi: set sw=4 ts=4: */
3 * Gzip implementation for busybox
5 * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly.
7 * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com>
8 * "this is a stripped down version of gzip I put into busybox, it does
9 * only standard in to standard out with -9 compression. It also requires
10 * the zcat module for some important functions."
12 * Adjusted further by Erik Andersen <andersen@codepoet.org> to support
13 * files as well as stdin/stdout, and to generally behave itself wrt
14 * command line handling.
16 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
18 /* TODO: full support for -v for DESKTOP
19 * "/usr/bin/gzip -v a bogus aa" should say:
20 a: 85.1% -- replaced with a.gz
21 gzip: bogus: No such file or directory
22 aa: 85.1% -- replaced with aa.gz
25 //config: bool "gzip (19 kb)"
28 //config: gzip is used to compress files.
29 //config: It's probably the most widely used UNIX compression program.
31 //config:config FEATURE_GZIP_LONG_OPTIONS
32 //config: bool "Enable long options"
34 //config: depends on GZIP && LONG_OPTS
36 //config:config GZIP_FAST
37 //config: int "Trade memory for speed (0:small,slow - 2:fast,big)"
40 //config: depends on GZIP
42 //config: Enable big memory options for gzip.
43 //config: 0: small buffers, small hash-tables
44 //config: 1: larger buffers, larger hash-tables
45 //config: 2: larger buffers, largest hash-tables
46 //config: Larger models may give slightly better compression
48 //config:config FEATURE_GZIP_LEVELS
49 //config: bool "Enable compression levels"
51 //config: depends on GZIP
53 //config: Enable support for compression levels 4-9. The default level
54 //config: is 6. If levels 1-3 are specified, 4 is used.
55 //config: If this option is not selected, -N options are ignored and -9
58 //config:config FEATURE_GZIP_DECOMPRESS
59 //config: bool "Enable decompression"
61 //config: depends on GZIP || GUNZIP || ZCAT
63 //config: Enable -d (--decompress) and -t (--test) options for gzip.
64 //config: This will be automatically selected if gunzip or zcat is
67 //applet:IF_GZIP(APPLET(gzip, BB_DIR_BIN, BB_SUID_DROP))
69 //kbuild:lib-$(CONFIG_GZIP) += gzip.o
71 //usage:#define gzip_trivial_usage
72 //usage: "[-cfk" IF_FEATURE_GZIP_DECOMPRESS("dt") IF_FEATURE_GZIP_LEVELS("123456789") "] [FILE]..."
73 //usage:#define gzip_full_usage "\n\n"
74 //usage: "Compress FILEs (or stdin)\n"
75 //usage: IF_FEATURE_GZIP_LEVELS(
76 //usage: "\n -1..9 Compression level"
78 //usage: IF_FEATURE_GZIP_DECOMPRESS(
79 //usage: "\n -d Decompress"
80 //usage: "\n -t Test file integrity"
82 //usage: "\n -c Write to stdout"
83 //usage: "\n -f Force"
84 //usage: "\n -k Keep input files"
86 //usage:#define gzip_example_usage
87 //usage: "$ ls -la /tmp/busybox*\n"
88 //usage: "-rw-rw-r-- 1 andersen andersen 1761280 Apr 14 17:47 /tmp/busybox.tar\n"
89 //usage: "$ gzip /tmp/busybox.tar\n"
90 //usage: "$ ls -la /tmp/busybox*\n"
91 //usage: "-rw-rw-r-- 1 andersen andersen 554058 Apr 14 17:49 /tmp/busybox.tar.gz\n"
94 #include "bb_archive.h"
97 /* ===========================================================================
100 /* Diagnostic functions */
103 # define Assert(cond,msg) { if (!(cond)) bb_error_msg(msg); }
104 # define Trace(x) fprintf x
105 # define Tracev(x) {if (verbose) fprintf x; }
106 # define Tracevv(x) {if (verbose > 1) fprintf x; }
107 # define Tracec(c,x) {if (verbose && (c)) fprintf x; }
108 # define Tracecv(c,x) {if (verbose > 1 && (c)) fprintf x; }
110 # define Assert(cond,msg)
115 # define Tracecv(c,x)
119 /* ===========================================================================
121 #if CONFIG_GZIP_FAST == 0
123 #elif CONFIG_GZIP_FAST == 1
125 #elif CONFIG_GZIP_FAST == 2
128 # error "Invalid CONFIG_GZIP_FAST value"
133 # define INBUFSIZ 0x2000 /* input buffer size */
135 # define INBUFSIZ 0x8000 /* input buffer size */
141 # define OUTBUFSIZ 8192 /* output buffer size */
143 # define OUTBUFSIZ 16384 /* output buffer size */
149 # define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */
151 # define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */
156 #define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */
157 #define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */
158 #define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */
159 #define ORIG_NAME 0x08 /* bit 3 set: original file name present */
160 #define COMMENT 0x10 /* bit 4 set: file comment present */
161 #define RESERVED 0xC0 /* bit 6,7: reserved */
163 /* internal file attribute */
164 #define UNKNOWN 0xffff
169 # define WSIZE 0x8000 /* window size--must be a power of two, and */
170 #endif /* at least 32K for zip's deflate method */
173 #define MAX_MATCH 258
174 /* The minimum and maximum match lengths */
176 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
177 /* Minimum amount of lookahead, except at the end of the input file.
178 * See deflate.c for comments about the MIN_MATCH+1.
181 #define MAX_DIST (WSIZE-MIN_LOOKAHEAD)
182 /* In order to simplify the code, particularly on 16 bit machines, match
183 * distances are limited to MAX_DIST instead of WSIZE.
187 # define MAX_PATH_LEN 1024 /* max pathname length */
190 #define seekable() 0 /* force sequential output */
191 #define translate_eol 0 /* no option -a yet */
196 #define INIT_BITS 9 /* Initial number of bits per code */
198 #define BIT_MASK 0x1f /* Mask for 'number of compression bits' */
199 /* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free.
200 * It's a pity that old uncompress does not check bit 0x20. That makes
201 * extension of the format actually undesirable because old compress
202 * would just crash on the new format instead of giving a meaningful
203 * error message. It does check the number of bits, but it's more
204 * helpful to say "unsupported format, get a new version" than
205 * "can only handle 16 bits".
209 # define MAX_SUFFIX MAX_EXT_CHARS
211 # define MAX_SUFFIX 30
215 /* ===========================================================================
216 * Compile with MEDIUM_MEM to reduce the memory requirements or
217 * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the
218 * entire input file can be held in memory (not possible on 16 bit systems).
219 * Warning: defining these symbols affects HASH_BITS (see below) and thus
220 * affects the compression ratio. The compressed output
221 * is still correct, and might even be smaller in some cases.
225 # define HASH_BITS 13 /* Number of bits used to hash strings */
228 # define HASH_BITS 14
231 # define HASH_BITS 15
232 /* For portability to 16 bit machines, do not use values above 15. */
235 #define HASH_SIZE (unsigned)(1<<HASH_BITS)
236 #define HASH_MASK (HASH_SIZE-1)
237 #define WMASK (WSIZE-1)
238 /* HASH_SIZE and WSIZE must be powers of two */
240 # define TOO_FAR 4096
242 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
245 /* ===========================================================================
246 * These types are not really 'char', 'short' and 'long'
249 typedef uint16_t ush;
250 typedef uint32_t ulg;
254 typedef unsigned IPos;
255 /* A Pos is an index in the character window. We use short instead of int to
256 * save space in the various tables. IPos is used only for parameter passing.
260 WINDOW_SIZE = 2 * WSIZE,
261 /* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the
262 * input file length plus MIN_LOOKAHEAD.
265 #if !ENABLE_FEATURE_GZIP_LEVELS
267 max_chain_length = 4096,
268 /* To speed up deflation, hash chains are never searched beyond this length.
269 * A higher limit improves compression ratio but degrades the speed.
272 max_lazy_match = 258,
273 /* Attempt to find a better match only when the current match is strictly
274 * smaller than this value. This mechanism is used only for compression
278 max_insert_length = max_lazy_match,
279 /* Insert new strings in the hash table only if the match length
280 * is not greater than this length. This saves time but degrades compression.
281 * max_insert_length is used only for compression levels <= 3.
285 /* Use a faster search when the previous match is longer than this */
287 /* Values for max_lazy_match, good_match and max_chain_length, depending on
288 * the desired pack level (0..9). The values given below have been tuned to
289 * exclude worst case performance for pathological files. Better values may be
290 * found for specific files.
293 nice_match = 258, /* Stop searching when current match exceeds this */
294 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
295 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
298 #endif /* ENABLE_FEATURE_GZIP_LEVELS */
304 #if ENABLE_FEATURE_GZIP_LEVELS
305 unsigned max_chain_length;
306 unsigned max_lazy_match;
309 #define max_chain_length (G1.max_chain_length)
310 #define max_lazy_match (G1.max_lazy_match)
311 #define good_match (G1.good_match)
312 #define nice_match (G1.nice_match)
315 /* window position at the beginning of the current output block. Gets
316 * negative when the window is moved backwards.
320 unsigned ins_h; /* hash index of string to be inserted */
322 /* Number of bits by which ins_h and del_h must be shifted at each
323 * input step. It must be such that after MIN_MATCH steps, the oldest
324 * byte no longer takes part in the hash key, that is:
325 * H_SHIFT * MIN_MATCH >= HASH_BITS
327 #define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH)
329 /* Length of the best match at previous step. Matches not greater than this
330 * are discarded. This is used in the lazy match evaluation.
332 unsigned prev_length;
334 unsigned strstart; /* start of string to insert */
335 unsigned match_start; /* start of matching string */
336 unsigned lookahead; /* number of valid bytes ahead in window */
338 /* number of input bytes */
339 ulg isize; /* only 32 bits stored in .gz file */
341 /* bbox always use stdin/stdout */
342 #define ifd STDIN_FILENO /* input file descriptor */
343 #define ofd STDOUT_FILENO /* output file descriptor */
346 unsigned insize; /* valid bytes in l_buf */
348 unsigned outcnt; /* bytes in output buffer */
349 smallint eofile; /* flag set at end of input file */
351 /* ===========================================================================
352 * Local data used by the "bit string" routines.
355 /* Output buffer. bits are inserted starting at the bottom (least significant
358 unsigned bi_buf; /* was unsigned short */
361 #define BUF_SIZE (int)(8 * sizeof(G1.bi_buf))
363 /* Number of bits used within bi_buf. (bi_buf might be implemented on
364 * more than 16 bits on some systems.)
369 ulg bits_sent; /* bit length of the compressed data */
372 /*uint32_t *crc_32_tab;*/
373 uint32_t crc; /* shift register contents */
375 /* ===========================================================================
377 #define DECLARE(type, array, size) \
379 #define ALLOC(type, array, size) \
380 array = xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type))
381 #define FREE(array) \
382 do { free(array); array = NULL; } while (0)
386 /* buffer for literals or lengths */
387 /* DECLARE(uch, l_buf, LIT_BUFSIZE); */
388 DECLARE(uch, l_buf, INBUFSIZ);
390 DECLARE(ush, d_buf, DIST_BUFSIZE);
391 DECLARE(uch, outbuf, OUTBUFSIZ);
393 /* Sliding window. Input bytes are read into the second half of the window,
394 * and move to the first half later to keep a dictionary of at least WSIZE
395 * bytes. With this organization, matches are limited to a distance of
396 * WSIZE-MAX_MATCH bytes, but this ensures that IO is always
397 * performed with a length multiple of the block size. Also, it limits
398 * the window size to 64K, which is quite useful on MSDOS.
399 * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would
400 * be less efficient).
402 DECLARE(uch, window, 2L * WSIZE);
404 /* Link to older string with same hash index. To limit the size of this
405 * array to 64K, this link is maintained only for the last 32K strings.
406 * An index in this array is thus a window index modulo 32K.
408 /* DECLARE(Pos, prev, WSIZE); */
409 DECLARE(ush, prev, 1L << BITS);
411 /* Heads of the hash chains or 0. */
412 /* DECLARE(Pos, head, 1<<HASH_BITS); */
413 #define head (G1.prev + WSIZE) /* hash head (see deflate.c) */
416 #define G1 (*(ptr_to_globals - 1))
419 /* ===========================================================================
420 * Write the output buffer outbuf[0..outcnt-1] and update bytes_out.
421 * (used for the compressed data only)
423 static void flush_outbuf(void)
428 xwrite(ofd, (char *) G1.outbuf, G1.outcnt);
433 /* ===========================================================================
435 /* put_8bit is used for the compressed output */
436 #define put_8bit(c) \
438 G1.outbuf[G1.outcnt++] = (c); \
439 if (G1.outcnt == OUTBUFSIZ) \
443 /* Output a 16 bit value, lsb first */
444 static void put_16bit(ush w)
446 /* GCC 4.2.1 won't optimize out redundant loads of G1.outcnt
447 * (probably because of fear of aliasing with G1.outbuf[]
448 * stores), do it explicitly:
450 unsigned outcnt = G1.outcnt;
451 uch *dst = &G1.outbuf[outcnt];
453 #if BB_UNALIGNED_MEMACCESS_OK && BB_LITTLE_ENDIAN
454 if (outcnt < OUTBUFSIZ-2) {
456 ush *dst16 = (void*) dst;
457 *dst16 = w; /* unalinged LSB 16-bit store */
458 G1.outcnt = outcnt + 2;
466 if (outcnt < OUTBUFSIZ-2) {
469 G1.outcnt = outcnt + 2;
474 /* Slowpath: we will need to do flush_outbuf() */
475 G1.outcnt = ++outcnt;
476 if (outcnt == OUTBUFSIZ)
481 static void put_32bit(ulg n)
487 /* ===========================================================================
488 * Run a set of bytes through the crc shift register. If s is a NULL
489 * pointer, then initialize the crc shift register contents instead.
490 * Return the current crc in either case.
492 static void updcrc(uch * s, unsigned n)
494 G1.crc = crc32_block_endian0(G1.crc, s, n, global_crc32_table /*G1.crc_32_tab*/);
498 /* ===========================================================================
499 * Read a new buffer from the current input file, perform end-of-line
500 * translation, and update the crc and input file size.
501 * IN assertion: size >= 2 (for end-of-line translation)
503 static unsigned file_read(void *buf, unsigned size)
507 Assert(G1.insize == 0, "l_buf not empty");
509 len = safe_read(ifd, buf, size);
510 if (len == (unsigned)(-1) || len == 0)
519 /* ===========================================================================
520 * Send a value on a given number of bits.
521 * IN assertion: length <= 16 and value fits in length bits.
523 static void send_bits(unsigned value, unsigned length)
528 Tracev((stderr, " l %2d v %4x ", length, value));
529 Assert(length > 0 && length <= 15, "invalid length");
530 G1.bits_sent += length;
532 BUILD_BUG_ON(BUF_SIZE != 32 && BUF_SIZE != 16);
534 new_buf = G1.bi_buf | (value << G1.bi_valid);
535 /* NB: the above may sometimes do "<< 32" shift (undefined)
536 * if check below is changed to "length > BUF_SIZE" instead of >= */
537 length += G1.bi_valid;
539 /* If bi_buf is full */
540 if (length >= BUF_SIZE) {
541 /* ...use (valid) bits from bi_buf and
542 * (BUF_SIZE - bi_valid) bits from value,
543 * leaving (width - (BUF_SIZE-bi_valid)) unused bits in value.
545 value >>= (BUF_SIZE - G1.bi_valid);
546 if (BUF_SIZE == 32) {
547 put_32bit(new_buf); /* maybe unroll to 2*put_16bit()? */
555 G1.bi_valid = length;
559 /* ===========================================================================
560 * Reverse the first len bits of a code, using straightforward code (a faster
561 * method would use a table)
562 * IN assertion: 1 <= len <= 15
564 static unsigned bi_reverse(unsigned code, int len)
570 if (--len <= 0) return res;
577 /* ===========================================================================
578 * Write out any remaining bits in an incomplete byte.
580 static void bi_windup(void)
582 unsigned bits = G1.bi_buf;
583 int cnt = G1.bi_valid;
593 G1.bits_sent = (G1.bits_sent + 7) & ~7;
598 /* ===========================================================================
599 * Copy a stored block to the zip file, storing first the length and its
600 * one's complement if requested.
602 static void copy_block(char *buf, unsigned len, int header)
604 bi_windup(); /* align on byte boundary */
610 G1.bits_sent += 2 * 16;
614 G1.bits_sent += (ulg) len << 3;
622 /* ===========================================================================
623 * Fill the window when the lookahead becomes insufficient.
624 * Updates strstart and lookahead, and sets eofile if end of input file.
625 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0
626 * OUT assertions: at least one byte has been read, or eofile is set;
627 * file reads are performed for at least two bytes (required for the
628 * translate_eol option).
630 static void fill_window(void)
633 unsigned more = WINDOW_SIZE - G1.lookahead - G1.strstart;
634 /* Amount of free space at the end of the window. */
636 /* If the window is almost full and there is insufficient lookahead,
637 * move the upper half to the lower one to make room in the upper half.
639 if (more == (unsigned) -1) {
640 /* Very unlikely, but possible on 16 bit machine if strstart == 0
641 * and lookahead == 1 (input done one byte at time)
644 } else if (G1.strstart >= WSIZE + MAX_DIST) {
645 /* By the IN assertion, the window is not empty so we can't confuse
646 * more == 0 with more == 64K on a 16 bit machine.
648 Assert(WINDOW_SIZE == 2 * WSIZE, "no sliding with BIG_MEM");
650 memcpy(G1.window, G1.window + WSIZE, WSIZE);
651 G1.match_start -= WSIZE;
652 G1.strstart -= WSIZE; /* we now have strstart >= MAX_DIST: */
654 G1.block_start -= WSIZE;
656 for (n = 0; n < HASH_SIZE; n++) {
658 head[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0);
660 for (n = 0; n < WSIZE; n++) {
662 G1.prev[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0);
663 /* If n is not on any hash chain, prev[n] is garbage but
664 * its value will never be used.
669 /* At this point, more >= 2 */
671 n = file_read(G1.window + G1.strstart + G1.lookahead, more);
672 if (n == 0 || n == (unsigned) -1) {
679 /* Both users fill window with the same loop: */
680 static void fill_window_if_needed(void)
682 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile)
687 /* ===========================================================================
688 * Set match_start to the longest match starting at the given string and
689 * return its length. Matches shorter or equal to prev_length are discarded,
690 * in which case the result is equal to prev_length and match_start is
692 * IN assertions: cur_match is the head of the hash chain for the current
693 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
696 /* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or
697 * match.s. The code is functionally equivalent, so you can use the C version
700 static int longest_match(IPos cur_match)
702 unsigned chain_length = max_chain_length; /* max hash chain length */
703 uch *scan = G1.window + G1.strstart; /* current string */
704 uch *match; /* matched string */
705 int len; /* length of current match */
706 int best_len = G1.prev_length; /* best match length so far */
707 IPos limit = G1.strstart > (IPos) MAX_DIST ? G1.strstart - (IPos) MAX_DIST : 0;
708 /* Stop when cur_match becomes <= limit. To simplify the code,
709 * we prevent matches with the string of window index 0.
712 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
713 * It is easy to get rid of this optimization if necessary.
715 #if HASH_BITS < 8 || MAX_MATCH != 258
716 # error Code too clever
718 uch *strend = G1.window + G1.strstart + MAX_MATCH;
719 uch scan_end1 = scan[best_len - 1];
720 uch scan_end = scan[best_len];
722 /* Do not waste too much time if we already have a good match: */
723 if (G1.prev_length >= good_match) {
726 Assert(G1.strstart <= WINDOW_SIZE - MIN_LOOKAHEAD, "insufficient lookahead");
729 Assert(cur_match < G1.strstart, "no future");
730 match = G1.window + cur_match;
732 /* Skip to next match if the match length cannot increase
733 * or if the match length is less than 2:
735 if (match[best_len] != scan_end
736 || match[best_len - 1] != scan_end1
737 || *match != *scan || *++match != scan[1]
742 /* The check at best_len-1 can be removed because it will be made
743 * again later. (This heuristic is not always a win.)
744 * It is not necessary to compare scan[2] and match[2] since they
745 * are always equal when the other bytes match, given that
746 * the hash keys are equal and that HASH_BITS >= 8.
750 /* We check for insufficient lookahead only every 8th comparison;
751 * the 256th check will be made at strstart+258.
754 } while (*++scan == *++match && *++scan == *++match &&
755 *++scan == *++match && *++scan == *++match &&
756 *++scan == *++match && *++scan == *++match &&
757 *++scan == *++match && *++scan == *++match && scan < strend);
759 len = MAX_MATCH - (int) (strend - scan);
760 scan = strend - MAX_MATCH;
762 if (len > best_len) {
763 G1.match_start = cur_match;
765 if (len >= nice_match)
767 scan_end1 = scan[best_len - 1];
768 scan_end = scan[best_len];
770 } while ((cur_match = G1.prev[cur_match & WMASK]) > limit
771 && --chain_length != 0);
778 /* ===========================================================================
779 * Check that the match at match_start is indeed a match.
781 static void check_match(IPos start, IPos match, int length)
783 /* check that the match is indeed a match */
784 if (memcmp(G1.window + match, G1.window + start, length) != 0) {
785 bb_error_msg(" start %d, match %d, length %d", start, match, length);
786 bb_error_msg("invalid match");
789 bb_error_msg("\\[%d,%d]", start - match, length);
791 bb_putchar_stderr(G1.window[start++]);
792 } while (--length != 0);
796 # define check_match(start, match, length) ((void)0)
800 /* trees.c -- output deflated data using Huffman coding
801 * Copyright (C) 1992-1993 Jean-loup Gailly
802 * This is free software; you can redistribute it and/or modify it under the
803 * terms of the GNU General Public License, see the file COPYING.
807 * Encode various sets of source values using variable-length
811 * The PKZIP "deflation" process uses several Huffman trees. The more
812 * common source values are represented by shorter bit sequences.
814 * Each code tree is stored in the ZIP file in a compressed form
815 * which is itself a Huffman encoding of the lengths of
816 * all the code strings (in ascending order by source values).
817 * The actual code strings are reconstructed from the lengths in
818 * the UNZIP process, as described in the "application note"
819 * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program.
823 * Data Compression: Techniques and Applications, pp. 53-55.
824 * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7.
827 * Data Compression: Methods and Theory, pp. 49-50.
828 * Computer Science Press, 1988. ISBN 0-7167-8156-5.
832 * Addison-Wesley, 1983. ISBN 0-201-06672-6.
836 * Allocate the match buffer, initialize the various tables [and save
837 * the location of the internal file attribute (ascii/binary) and
838 * method (DEFLATE/STORE) -- deleted in bbox]
840 * void ct_tally(int dist, int lc);
841 * Save the match info and tally the frequency counts.
843 * ulg flush_block(char *buf, ulg stored_len, int eof)
844 * Determine the best encoding for the current block: dynamic trees,
845 * static trees or store, and output the encoded block to the zip
846 * file. Returns the total compressed length for the file so far.
850 /* All codes must not exceed MAX_BITS bits */
852 #define MAX_BL_BITS 7
853 /* Bit length codes must not exceed MAX_BL_BITS bits */
855 #define LENGTH_CODES 29
856 /* number of length codes, not counting the special END_BLOCK code */
859 /* number of literal bytes 0..255 */
861 #define END_BLOCK 256
862 /* end of block literal code */
864 #define L_CODES (LITERALS+1+LENGTH_CODES)
865 /* number of Literal or Length codes, including the END_BLOCK code */
868 /* number of distance codes */
871 /* number of codes used to transfer the bit lengths */
873 /* extra bits for each length code */
874 static const uint8_t extra_lbits[LENGTH_CODES] ALIGN1 = {
875 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4,
879 /* extra bits for each distance code */
880 static const uint8_t extra_dbits[D_CODES] ALIGN1 = {
881 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9,
882 10, 10, 11, 11, 12, 12, 13, 13
885 /* extra bits for each bit length code */
886 static const uint8_t extra_blbits[BL_CODES] ALIGN1 = {
887 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 };
889 /* number of codes at each bit length for an optimal tree */
890 static const uint8_t bl_order[BL_CODES] ALIGN1 = {
891 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
893 #define STORED_BLOCK 0
894 #define STATIC_TREES 1
896 /* The three kinds of block type */
900 # define LIT_BUFSIZE 0x2000
903 # define LIT_BUFSIZE 0x4000
905 # define LIT_BUFSIZE 0x8000
910 # define DIST_BUFSIZE LIT_BUFSIZE
912 /* Sizes of match buffers for literals/lengths and distances. There are
913 * 4 reasons for limiting LIT_BUFSIZE to 64K:
914 * - frequencies can be kept in 16 bit counters
915 * - if compression is not successful for the first block, all input data is
916 * still in the window so we can still emit a stored block even when input
917 * comes from standard input. (This can also be done for all blocks if
918 * LIT_BUFSIZE is not greater than 32K.)
919 * - if compression is not successful for a file smaller than 64K, we can
920 * even emit a stored file instead of a stored block (saving 5 bytes).
921 * - creating new Huffman trees less frequently may not provide fast
922 * adaptation to changes in the input data statistics. (Take for
923 * example a binary file with poorly compressible code followed by
924 * a highly compressible string table.) Smaller buffer sizes give
925 * fast adaptation but have of course the overhead of transmitting trees
927 * - I can't count above 4
928 * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save
929 * memory at the expense of compression). Some optimizations would be possible
930 * if we rely on DIST_BUFSIZE == LIT_BUFSIZE.
933 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
935 /* repeat a zero length 3-10 times (3 bits of repeat count) */
936 #define REPZ_11_138 18
937 /* repeat a zero length 11-138 times (7 bits of repeat count) */
939 /* ===========================================================================
941 /* Data structure describing a single value and its code string. */
942 typedef struct ct_data {
944 ush freq; /* frequency count */
945 ush code; /* bit string */
948 ush dad; /* father node in Huffman tree */
949 ush len; /* length of bit string */
958 #define HEAP_SIZE (2*L_CODES + 1)
959 /* maximum heap size */
961 typedef struct tree_desc {
962 ct_data *dyn_tree; /* the dynamic tree */
963 ct_data *static_tree; /* corresponding static tree or NULL */
964 const uint8_t *extra_bits; /* extra bits for each code or NULL */
965 int extra_base; /* base index for extra_bits */
966 int elems; /* max number of elements in the tree */
967 int max_length; /* max bit length for the codes */
968 int max_code; /* largest code with non zero frequency */
973 ush heap[HEAP_SIZE]; /* heap used to build the Huffman trees */
974 int heap_len; /* number of elements in the heap */
975 int heap_max; /* element of largest frequency */
977 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
978 * The same heap array is used to build all trees.
981 ct_data dyn_ltree[HEAP_SIZE]; /* literal and length tree */
982 ct_data dyn_dtree[2 * D_CODES + 1]; /* distance tree */
984 ct_data static_ltree[L_CODES + 2];
986 /* The static literal tree. Since the bit lengths are imposed, there is no
987 * need for the L_CODES extra codes used during heap construction. However
988 * The codes 286 and 287 are needed to build a canonical tree (see ct_init
992 ct_data static_dtree[D_CODES];
994 /* The static distance tree. (Actually a trivial tree since all codes use
998 ct_data bl_tree[2 * BL_CODES + 1];
1000 /* Huffman tree for the bit lengths */
1006 ush bl_count[MAX_BITS + 1];
1008 /* The lengths of the bit length codes are sent in order of decreasing
1009 * probability, to avoid transmitting the lengths for unused bit length codes.
1012 uch depth[2 * L_CODES + 1];
1014 /* Depth of each subtree used as tie breaker for trees of equal frequency */
1016 uch length_code[MAX_MATCH - MIN_MATCH + 1];
1018 /* length code for each normalized match length (0 == MIN_MATCH) */
1022 /* distance codes. The first 256 values correspond to the distances
1023 * 3 .. 258, the last 256 values correspond to the top 8 bits of
1024 * the 15 bit distances.
1027 int base_length[LENGTH_CODES];
1029 /* First normalized length for each code (0 = MIN_MATCH) */
1031 int base_dist[D_CODES];
1033 /* First normalized distance for each code (0 = distance of 1) */
1035 uch flag_buf[LIT_BUFSIZE / 8];
1037 /* flag_buf is a bit array distinguishing literals from lengths in
1038 * l_buf, thus indicating the presence or absence of a distance.
1041 unsigned last_lit; /* running index in l_buf */
1042 unsigned last_dist; /* running index in d_buf */
1043 unsigned last_flags; /* running index in flag_buf */
1044 uch flags; /* current flags not yet saved in flag_buf */
1045 uch flag_bit; /* current bit used in flags */
1047 /* bits are filled in flags starting at bit 0 (least significant).
1048 * Note: these flags are overkill in the current code since we don't
1049 * take advantage of DIST_BUFSIZE == LIT_BUFSIZE.
1052 ulg opt_len; /* bit length of current block with optimal trees */
1053 ulg static_len; /* bit length of current block with static trees */
1055 ulg compressed_len; /* total bit length of compressed file */
1058 #define G2ptr ((struct globals2*)(ptr_to_globals))
1062 /* ===========================================================================
1064 static void gen_codes(ct_data * tree, int max_code);
1065 static void build_tree(tree_desc * desc);
1066 static void scan_tree(ct_data * tree, int max_code);
1067 static void send_tree(ct_data * tree, int max_code);
1068 static int build_bl_tree(void);
1069 static void send_all_trees(int lcodes, int dcodes, int blcodes);
1070 static void compress_block(ct_data * ltree, ct_data * dtree);
1074 /* Send a code of the given tree. c and tree must not have side effects */
1075 # define SEND_CODE(c, tree) send_bits(tree[c].Code, tree[c].Len)
1077 # define SEND_CODE(c, tree) \
1079 if (verbose > 1) bb_error_msg("\ncd %3d ", (c)); \
1080 send_bits(tree[c].Code, tree[c].Len); \
1084 #define D_CODE(dist) \
1085 ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)])
1086 /* Mapping from a distance to a distance code. dist is the distance - 1 and
1087 * must not have side effects. dist_code[256] and dist_code[257] are never
1089 * The arguments must not have side effects.
1093 /* ===========================================================================
1094 * Initialize a new block.
1096 static void init_block(void)
1098 int n; /* iterates over tree elements */
1100 /* Initialize the trees. */
1101 for (n = 0; n < L_CODES; n++)
1102 G2.dyn_ltree[n].Freq = 0;
1103 for (n = 0; n < D_CODES; n++)
1104 G2.dyn_dtree[n].Freq = 0;
1105 for (n = 0; n < BL_CODES; n++)
1106 G2.bl_tree[n].Freq = 0;
1108 G2.dyn_ltree[END_BLOCK].Freq = 1;
1109 G2.opt_len = G2.static_len = 0;
1110 G2.last_lit = G2.last_dist = G2.last_flags = 0;
1116 /* ===========================================================================
1117 * Restore the heap property by moving down the tree starting at node k,
1118 * exchanging a node with the smallest of its two sons if necessary, stopping
1119 * when the heap property is re-established (each father smaller than its
1123 /* Compares to subtrees, using the tree depth as tie breaker when
1124 * the subtrees have equal frequency. This minimizes the worst case length. */
1125 #define SMALLER(tree, n, m) \
1126 (tree[n].Freq < tree[m].Freq \
1127 || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m]))
1129 static void pqdownheap(ct_data * tree, int k)
1132 int j = k << 1; /* left son of k */
1134 while (j <= G2.heap_len) {
1135 /* Set j to the smallest of the two sons: */
1136 if (j < G2.heap_len && SMALLER(tree, G2.heap[j + 1], G2.heap[j]))
1139 /* Exit if v is smaller than both sons */
1140 if (SMALLER(tree, v, G2.heap[j]))
1143 /* Exchange v with the smallest son */
1144 G2.heap[k] = G2.heap[j];
1147 /* And continue down the tree, setting j to the left son of k */
1154 /* ===========================================================================
1155 * Compute the optimal bit lengths for a tree and update the total bit length
1156 * for the current block.
1157 * IN assertion: the fields freq and dad are set, heap[heap_max] and
1158 * above are the tree nodes sorted by increasing frequency.
1159 * OUT assertions: the field len is set to the optimal bit length, the
1160 * array bl_count contains the frequencies for each bit length.
1161 * The length opt_len is updated; static_len is also updated if stree is
1164 static void gen_bitlen(tree_desc * desc)
1166 ct_data *tree = desc->dyn_tree;
1167 const uint8_t *extra = desc->extra_bits;
1168 int base = desc->extra_base;
1169 int max_code = desc->max_code;
1170 int max_length = desc->max_length;
1171 ct_data *stree = desc->static_tree;
1172 int h; /* heap index */
1173 int n, m; /* iterate over the tree elements */
1174 int bits; /* bit length */
1175 int xbits; /* extra bits */
1176 ush f; /* frequency */
1177 int overflow = 0; /* number of elements with bit length too large */
1179 for (bits = 0; bits <= MAX_BITS; bits++)
1180 G2.bl_count[bits] = 0;
1182 /* In a first pass, compute the optimal bit lengths (which may
1183 * overflow in the case of the bit length tree).
1185 tree[G2.heap[G2.heap_max]].Len = 0; /* root of the heap */
1187 for (h = G2.heap_max + 1; h < HEAP_SIZE; h++) {
1189 bits = tree[tree[n].Dad].Len + 1;
1190 if (bits > max_length) {
1194 tree[n].Len = (ush) bits;
1195 /* We overwrite tree[n].Dad which is no longer needed */
1198 continue; /* not a leaf node */
1200 G2.bl_count[bits]++;
1203 xbits = extra[n - base];
1205 G2.opt_len += (ulg) f *(bits + xbits);
1208 G2.static_len += (ulg) f * (stree[n].Len + xbits);
1213 Trace((stderr, "\nbit length overflow\n"));
1214 /* This happens for example on obj2 and pic of the Calgary corpus */
1216 /* Find the first bit length which could increase: */
1218 bits = max_length - 1;
1219 while (G2.bl_count[bits] == 0)
1221 G2.bl_count[bits]--; /* move one leaf down the tree */
1222 G2.bl_count[bits + 1] += 2; /* move one overflow item as its brother */
1223 G2.bl_count[max_length]--;
1224 /* The brother of the overflow item also moves one step up,
1225 * but this does not affect bl_count[max_length]
1228 } while (overflow > 0);
1230 /* Now recompute all bit lengths, scanning in increasing frequency.
1231 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
1232 * lengths instead of fixing only the wrong ones. This idea is taken
1233 * from 'ar' written by Haruhiko Okumura.)
1235 for (bits = max_length; bits != 0; bits--) {
1236 n = G2.bl_count[bits];
1241 if (tree[m].Len != (unsigned) bits) {
1242 Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits));
1243 G2.opt_len += ((int32_t) bits - tree[m].Len) * tree[m].Freq;
1252 /* ===========================================================================
1253 * Generate the codes for a given tree and bit counts (which need not be
1255 * IN assertion: the array bl_count contains the bit length statistics for
1256 * the given tree and the field len is set for all tree elements.
1257 * OUT assertion: the field code is set for all tree elements of non
1260 static void gen_codes(ct_data * tree, int max_code)
1262 ush next_code[MAX_BITS + 1]; /* next code value for each bit length */
1263 ush code = 0; /* running code value */
1264 int bits; /* bit index */
1265 int n; /* code index */
1267 /* The distribution counts are first used to generate the code values
1268 * without bit reversal.
1270 for (bits = 1; bits <= MAX_BITS; bits++) {
1271 next_code[bits] = code = (code + G2.bl_count[bits - 1]) << 1;
1273 /* Check that the bit counts in bl_count are consistent. The last code
1276 Assert(code + G2.bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
1277 "inconsistent bit counts");
1278 Tracev((stderr, "\ngen_codes: max_code %d ", max_code));
1280 for (n = 0; n <= max_code; n++) {
1281 int len = tree[n].Len;
1285 /* Now reverse the bits */
1286 tree[n].Code = bi_reverse(next_code[len]++, len);
1288 Tracec(tree != G2.static_ltree,
1289 (stderr, "\nn %3d %c l %2d c %4x (%x) ", n,
1290 (n > ' ' ? n : ' '), len, tree[n].Code,
1291 next_code[len] - 1));
1296 /* ===========================================================================
1297 * Construct one Huffman tree and assigns the code bit strings and lengths.
1298 * Update the total bit length for the current block.
1299 * IN assertion: the field freq is set for all tree elements.
1300 * OUT assertions: the fields len and code are set to the optimal bit length
1301 * and corresponding code. The length opt_len is updated; static_len is
1302 * also updated if stree is not null. The field max_code is set.
1305 /* Remove the smallest element from the heap and recreate the heap with
1306 * one less element. Updates heap and heap_len. */
1309 /* Index within the heap array of least frequent node in the Huffman tree */
1311 #define PQREMOVE(tree, top) \
1313 top = G2.heap[SMALLEST]; \
1314 G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \
1315 pqdownheap(tree, SMALLEST); \
1318 static void build_tree(tree_desc * desc)
1320 ct_data *tree = desc->dyn_tree;
1321 ct_data *stree = desc->static_tree;
1322 int elems = desc->elems;
1323 int n, m; /* iterate over heap elements */
1324 int max_code = -1; /* largest code with non zero frequency */
1325 int node = elems; /* next internal node of the tree */
1327 /* Construct the initial heap, with least frequent element in
1328 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
1329 * heap[0] is not used.
1332 G2.heap_max = HEAP_SIZE;
1334 for (n = 0; n < elems; n++) {
1335 if (tree[n].Freq != 0) {
1336 G2.heap[++G2.heap_len] = max_code = n;
1343 /* The pkzip format requires that at least one distance code exists,
1344 * and that at least one bit should be sent even if there is only one
1345 * possible code. So to avoid special checks later on we force at least
1346 * two codes of non zero frequency.
1348 while (G2.heap_len < 2) {
1349 int new = G2.heap[++G2.heap_len] = (max_code < 2 ? ++max_code : 0);
1355 G2.static_len -= stree[new].Len;
1356 /* new is 0 or 1 so it does not have extra bits */
1358 desc->max_code = max_code;
1360 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1361 * establish sub-heaps of increasing lengths:
1363 for (n = G2.heap_len / 2; n >= 1; n--)
1364 pqdownheap(tree, n);
1366 /* Construct the Huffman tree by repeatedly combining the least two
1370 PQREMOVE(tree, n); /* n = node of least frequency */
1371 m = G2.heap[SMALLEST]; /* m = node of next least frequency */
1373 G2.heap[--G2.heap_max] = n; /* keep the nodes sorted by frequency */
1374 G2.heap[--G2.heap_max] = m;
1376 /* Create a new node father of n and m */
1377 tree[node].Freq = tree[n].Freq + tree[m].Freq;
1378 G2.depth[node] = MAX(G2.depth[n], G2.depth[m]) + 1;
1379 tree[n].Dad = tree[m].Dad = (ush) node;
1381 if (tree == G2.bl_tree) {
1382 bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)",
1383 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
1386 /* and insert the new node in the heap */
1387 G2.heap[SMALLEST] = node++;
1388 pqdownheap(tree, SMALLEST);
1389 } while (G2.heap_len >= 2);
1391 G2.heap[--G2.heap_max] = G2.heap[SMALLEST];
1393 /* At this point, the fields freq and dad are set. We can now
1394 * generate the bit lengths.
1396 gen_bitlen((tree_desc *) desc);
1398 /* The field len is now set, we can generate the bit codes */
1399 gen_codes((ct_data *) tree, max_code);
1403 /* ===========================================================================
1404 * Scan a literal or distance tree to determine the frequencies of the codes
1405 * in the bit length tree. Updates opt_len to take into account the repeat
1406 * counts. (The contribution of the bit length codes will be added later
1407 * during the construction of bl_tree.)
1409 static void scan_tree(ct_data * tree, int max_code)
1411 int n; /* iterates over all tree elements */
1412 int prevlen = -1; /* last emitted length */
1413 int curlen; /* length of current code */
1414 int nextlen = tree[0].Len; /* length of next code */
1415 int count = 0; /* repeat count of the current code */
1416 int max_count = 7; /* max repeat count */
1417 int min_count = 4; /* min repeat count */
1423 tree[max_code + 1].Len = 0xffff; /* guard */
1425 for (n = 0; n <= max_code; n++) {
1427 nextlen = tree[n + 1].Len;
1428 if (++count < max_count && curlen == nextlen)
1431 if (count < min_count) {
1432 G2.bl_tree[curlen].Freq += count;
1433 } else if (curlen != 0) {
1434 if (curlen != prevlen)
1435 G2.bl_tree[curlen].Freq++;
1436 G2.bl_tree[REP_3_6].Freq++;
1437 } else if (count <= 10) {
1438 G2.bl_tree[REPZ_3_10].Freq++;
1440 G2.bl_tree[REPZ_11_138].Freq++;
1450 } else if (curlen == nextlen) {
1458 /* ===========================================================================
1459 * Send a literal or distance tree in compressed form, using the codes in
1462 static void send_tree(ct_data * tree, int max_code)
1464 int n; /* iterates over all tree elements */
1465 int prevlen = -1; /* last emitted length */
1466 int curlen; /* length of current code */
1467 int nextlen = tree[0].Len; /* length of next code */
1468 int count = 0; /* repeat count of the current code */
1469 int max_count = 7; /* max repeat count */
1470 int min_count = 4; /* min repeat count */
1472 /* tree[max_code+1].Len = -1; *//* guard already set */
1474 max_count = 138, min_count = 3;
1476 for (n = 0; n <= max_code; n++) {
1478 nextlen = tree[n + 1].Len;
1479 if (++count < max_count && curlen == nextlen) {
1481 } else if (count < min_count) {
1483 SEND_CODE(curlen, G2.bl_tree);
1485 } else if (curlen != 0) {
1486 if (curlen != prevlen) {
1487 SEND_CODE(curlen, G2.bl_tree);
1490 Assert(count >= 3 && count <= 6, " 3_6?");
1491 SEND_CODE(REP_3_6, G2.bl_tree);
1492 send_bits(count - 3, 2);
1493 } else if (count <= 10) {
1494 SEND_CODE(REPZ_3_10, G2.bl_tree);
1495 send_bits(count - 3, 3);
1497 SEND_CODE(REPZ_11_138, G2.bl_tree);
1498 send_bits(count - 11, 7);
1505 } else if (curlen == nextlen) {
1516 /* ===========================================================================
1517 * Construct the Huffman tree for the bit lengths and return the index in
1518 * bl_order of the last bit length code to send.
1520 static int build_bl_tree(void)
1522 int max_blindex; /* index of last bit length code of non zero freq */
1524 /* Determine the bit length frequencies for literal and distance trees */
1525 scan_tree(G2.dyn_ltree, G2.l_desc.max_code);
1526 scan_tree(G2.dyn_dtree, G2.d_desc.max_code);
1528 /* Build the bit length tree: */
1529 build_tree(&G2.bl_desc);
1530 /* opt_len now includes the length of the tree representations, except
1531 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
1534 /* Determine the number of bit length codes to send. The pkzip format
1535 * requires that at least 4 bit length codes be sent. (appnote.txt says
1536 * 3 but the actual value used is 4.)
1538 for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) {
1539 if (G2.bl_tree[bl_order[max_blindex]].Len != 0)
1542 /* Update opt_len to include the bit length tree and counts */
1543 G2.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
1544 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len));
1550 /* ===========================================================================
1551 * Send the header for a block using dynamic Huffman trees: the counts, the
1552 * lengths of the bit length codes, the literal tree and the distance tree.
1553 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
1555 static void send_all_trees(int lcodes, int dcodes, int blcodes)
1557 int rank; /* index in bl_order */
1559 Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
1560 Assert(lcodes <= L_CODES && dcodes <= D_CODES
1561 && blcodes <= BL_CODES, "too many codes");
1562 Tracev((stderr, "\nbl counts: "));
1563 send_bits(lcodes - 257, 5); /* not +255 as stated in appnote.txt */
1564 send_bits(dcodes - 1, 5);
1565 send_bits(blcodes - 4, 4); /* not -3 as stated in appnote.txt */
1566 for (rank = 0; rank < blcodes; rank++) {
1567 Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
1568 send_bits(G2.bl_tree[bl_order[rank]].Len, 3);
1570 Tracev((stderr, "\nbl tree: sent %ld", (long)G1.bits_sent));
1572 send_tree((ct_data *) G2.dyn_ltree, lcodes - 1); /* send the literal tree */
1573 Tracev((stderr, "\nlit tree: sent %ld", (long)G1.bits_sent));
1575 send_tree((ct_data *) G2.dyn_dtree, dcodes - 1); /* send the distance tree */
1576 Tracev((stderr, "\ndist tree: sent %ld", (long)G1.bits_sent));
1580 /* ===========================================================================
1581 * Save the match info and tally the frequency counts. Return true if
1582 * the current block must be flushed.
1584 static int ct_tally(int dist, int lc)
1586 G1.l_buf[G2.last_lit++] = lc;
1588 /* lc is the unmatched char */
1589 G2.dyn_ltree[lc].Freq++;
1591 /* Here, lc is the match length - MIN_MATCH */
1592 dist--; /* dist = match distance - 1 */
1593 Assert((ush) dist < (ush) MAX_DIST
1594 && (ush) lc <= (ush) (MAX_MATCH - MIN_MATCH)
1595 && (ush) D_CODE(dist) < (ush) D_CODES, "ct_tally: bad match"
1598 G2.dyn_ltree[G2.length_code[lc] + LITERALS + 1].Freq++;
1599 G2.dyn_dtree[D_CODE(dist)].Freq++;
1601 G1.d_buf[G2.last_dist++] = dist;
1602 G2.flags |= G2.flag_bit;
1606 /* Output the flags if they fill a byte: */
1607 if ((G2.last_lit & 7) == 0) {
1608 G2.flag_buf[G2.last_flags++] = G2.flags;
1612 /* Try to guess if it is profitable to stop the current block here */
1613 if ((G2.last_lit & 0xfff) == 0) {
1614 /* Compute an upper bound for the compressed length */
1615 ulg out_length = G2.last_lit * 8L;
1616 ulg in_length = (ulg) G1.strstart - G1.block_start;
1619 for (dcode = 0; dcode < D_CODES; dcode++) {
1620 out_length += G2.dyn_dtree[dcode].Freq * (5L + extra_dbits[dcode]);
1624 "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ",
1625 G2.last_lit, G2.last_dist,
1626 (long)in_length, (long)out_length,
1627 100L - out_length * 100L / in_length));
1628 if (G2.last_dist < G2.last_lit / 2 && out_length < in_length / 2)
1631 return (G2.last_lit == LIT_BUFSIZE - 1 || G2.last_dist == DIST_BUFSIZE);
1632 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K
1633 * on 16 bit machines and because stored blocks are restricted to
1638 /* ===========================================================================
1639 * Send the block data compressed using the given Huffman trees
1641 static void compress_block(ct_data * ltree, ct_data * dtree)
1643 unsigned dist; /* distance of matched string */
1644 int lc; /* match length or unmatched char (if dist == 0) */
1645 unsigned lx = 0; /* running index in l_buf */
1646 unsigned dx = 0; /* running index in d_buf */
1647 unsigned fx = 0; /* running index in flag_buf */
1648 uch flag = 0; /* current flags */
1649 unsigned code; /* the code to send */
1650 int extra; /* number of extra bits to send */
1652 if (G2.last_lit != 0) do {
1654 flag = G2.flag_buf[fx++];
1655 lc = G1.l_buf[lx++];
1656 if ((flag & 1) == 0) {
1657 SEND_CODE(lc, ltree); /* send a literal byte */
1658 Tracecv(lc > ' ', (stderr, " '%c' ", lc));
1660 /* Here, lc is the match length - MIN_MATCH */
1661 code = G2.length_code[lc];
1662 SEND_CODE(code + LITERALS + 1, ltree); /* send the length code */
1663 extra = extra_lbits[code];
1665 lc -= G2.base_length[code];
1666 send_bits(lc, extra); /* send the extra length bits */
1668 dist = G1.d_buf[dx++];
1669 /* Here, dist is the match distance - 1 */
1670 code = D_CODE(dist);
1671 Assert(code < D_CODES, "bad d_code");
1673 SEND_CODE(code, dtree); /* send the distance code */
1674 extra = extra_dbits[code];
1676 dist -= G2.base_dist[code];
1677 send_bits(dist, extra); /* send the extra distance bits */
1679 } /* literal or match pair ? */
1681 } while (lx < G2.last_lit);
1683 SEND_CODE(END_BLOCK, ltree);
1687 /* ===========================================================================
1688 * Determine the best encoding for the current block: dynamic trees, static
1689 * trees or store, and output the encoded block to the zip file. This function
1690 * returns the total compressed length for the file so far.
1692 static ulg flush_block(char *buf, ulg stored_len, int eof)
1694 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
1695 int max_blindex; /* index of last bit length code of non zero freq */
1697 G2.flag_buf[G2.last_flags] = G2.flags; /* Save the flags for the last 8 items */
1699 /* Construct the literal and distance trees */
1700 build_tree(&G2.l_desc);
1701 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len));
1703 build_tree(&G2.d_desc);
1704 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len));
1705 /* At this point, opt_len and static_len are the total bit lengths of
1706 * the compressed block data, excluding the tree representations.
1709 /* Build the bit length tree for the above two trees, and get the index
1710 * in bl_order of the last bit length code to send.
1712 max_blindex = build_bl_tree();
1714 /* Determine the best encoding. Compute first the block length in bytes */
1715 opt_lenb = (G2.opt_len + 3 + 7) >> 3;
1716 static_lenb = (G2.static_len + 3 + 7) >> 3;
1719 "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ",
1720 (unsigned long)opt_lenb, (unsigned long)G2.opt_len,
1721 (unsigned long)static_lenb, (unsigned long)G2.static_len,
1722 (unsigned long)stored_len,
1723 G2.last_lit, G2.last_dist));
1725 if (static_lenb <= opt_lenb)
1726 opt_lenb = static_lenb;
1728 /* If compression failed and this is the first and last block,
1729 * and if the zip file can be seeked (to rewrite the local header),
1730 * the whole file is transformed into a stored file:
1732 if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) {
1733 /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
1735 bb_error_msg("block vanished");
1737 copy_block(buf, (unsigned) stored_len, 0); /* without header */
1738 G2.compressed_len = stored_len << 3;
1739 } else if (stored_len + 4 <= opt_lenb && buf != NULL) {
1740 /* 4: two words for the lengths */
1741 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
1742 * Otherwise we can't have processed more than WSIZE input bytes since
1743 * the last block flush, because compression would have been
1744 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
1745 * transform a block into a stored block.
1747 send_bits((STORED_BLOCK << 1) + eof, 3); /* send block type */
1748 G2.compressed_len = (G2.compressed_len + 3 + 7) & ~7L;
1749 G2.compressed_len += (stored_len + 4) << 3;
1751 copy_block(buf, (unsigned) stored_len, 1); /* with header */
1752 } else if (static_lenb == opt_lenb) {
1753 send_bits((STATIC_TREES << 1) + eof, 3);
1754 compress_block((ct_data *) G2.static_ltree, (ct_data *) G2.static_dtree);
1755 G2.compressed_len += 3 + G2.static_len;
1757 send_bits((DYN_TREES << 1) + eof, 3);
1758 send_all_trees(G2.l_desc.max_code + 1, G2.d_desc.max_code + 1,
1760 compress_block((ct_data *) G2.dyn_ltree, (ct_data *) G2.dyn_dtree);
1761 G2.compressed_len += 3 + G2.opt_len;
1763 Assert(G2.compressed_len == G1.bits_sent, "bad compressed size");
1768 G2.compressed_len += 7; /* align on byte boundary */
1770 Tracev((stderr, "\ncomprlen %lu(%lu) ",
1771 (unsigned long)G2.compressed_len >> 3,
1772 (unsigned long)G2.compressed_len - 7 * eof));
1774 return G2.compressed_len >> 3;
1778 /* ===========================================================================
1779 * Update a hash value with the given input byte
1780 * IN assertion: all calls to UPDATE_HASH are made with consecutive
1781 * input characters, so that a running hash key can be computed from the
1782 * previous key instead of complete recalculation each time.
1784 #define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK)
1787 /* ===========================================================================
1788 * Same as above, but achieves better compression. We use a lazy
1789 * evaluation for matches: a match is finally adopted only if there is
1790 * no better match at the next window position.
1792 * Processes a new input file and return its compressed length. Sets
1793 * the compressed length, crc, deflate flags and internal file
1797 /* Flush the current block, with given end-of-file flag.
1798 * IN assertion: strstart is set to the end of the current match. */
1799 #define FLUSH_BLOCK(eof) \
1801 G1.block_start >= 0L \
1802 ? (char*)&G1.window[(unsigned)G1.block_start] \
1804 (ulg)G1.strstart - G1.block_start, \
1808 /* Insert string s in the dictionary and set match_head to the previous head
1809 * of the hash chain (the most recent string with same hash key). Return
1810 * the previous length of the hash chain.
1811 * IN assertion: all calls to INSERT_STRING are made with consecutive
1812 * input characters and the first MIN_MATCH bytes of s are valid
1813 * (except for the last MIN_MATCH-1 bytes of the input file). */
1814 #define INSERT_STRING(s, match_head) \
1816 UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \
1817 G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \
1818 head[G1.ins_h] = (s); \
1821 static NOINLINE ulg deflate(void)
1823 IPos hash_head; /* head of hash chain */
1824 IPos prev_match; /* previous match */
1825 int flush; /* set if current block must be flushed */
1826 int match_available = 0; /* set if previous match exists */
1827 unsigned match_length = MIN_MATCH - 1; /* length of best match */
1829 /* Process the input block. */
1830 while (G1.lookahead != 0) {
1831 /* Insert the string window[strstart .. strstart+2] in the
1832 * dictionary, and set hash_head to the head of the hash chain:
1834 INSERT_STRING(G1.strstart, hash_head);
1836 /* Find the longest match, discarding those <= prev_length.
1838 G1.prev_length = match_length;
1839 prev_match = G1.match_start;
1840 match_length = MIN_MATCH - 1;
1842 if (hash_head != 0 && G1.prev_length < max_lazy_match
1843 && G1.strstart - hash_head <= MAX_DIST
1845 /* To simplify the code, we prevent matches with the string
1846 * of window index 0 (in particular we have to avoid a match
1847 * of the string with itself at the start of the input file).
1849 match_length = longest_match(hash_head);
1850 /* longest_match() sets match_start */
1851 if (match_length > G1.lookahead)
1852 match_length = G1.lookahead;
1854 /* Ignore a length 3 match if it is too distant: */
1855 if (match_length == MIN_MATCH && G1.strstart - G1.match_start > TOO_FAR) {
1856 /* If prev_match is also MIN_MATCH, G1.match_start is garbage
1857 * but we will ignore the current match anyway.
1862 /* If there was a match at the previous step and the current
1863 * match is not better, output the previous match:
1865 if (G1.prev_length >= MIN_MATCH && match_length <= G1.prev_length) {
1866 check_match(G1.strstart - 1, prev_match, G1.prev_length);
1867 flush = ct_tally(G1.strstart - 1 - prev_match, G1.prev_length - MIN_MATCH);
1869 /* Insert in hash table all strings up to the end of the match.
1870 * strstart-1 and strstart are already inserted.
1872 G1.lookahead -= G1.prev_length - 1;
1873 G1.prev_length -= 2;
1876 INSERT_STRING(G1.strstart, hash_head);
1877 /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1878 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
1879 * these bytes are garbage, but it does not matter since the
1880 * next lookahead bytes will always be emitted as literals.
1882 } while (--G1.prev_length != 0);
1883 match_available = 0;
1884 match_length = MIN_MATCH - 1;
1888 G1.block_start = G1.strstart;
1890 } else if (match_available) {
1891 /* If there was no match at the previous position, output a
1892 * single literal. If there was a match but the current match
1893 * is longer, truncate the previous match to a single literal.
1895 Tracevv((stderr, "%c", G1.window[G1.strstart - 1]));
1896 if (ct_tally(0, G1.window[G1.strstart - 1])) {
1898 G1.block_start = G1.strstart;
1903 /* There is no previous match to compare with, wait for
1904 * the next step to decide.
1906 match_available = 1;
1910 Assert(G1.strstart <= G1.isize && G1.lookahead <= G1.isize, "a bit too far");
1912 /* Make sure that we always have enough lookahead, except
1913 * at the end of the input file. We need MAX_MATCH bytes
1914 * for the next match, plus MIN_MATCH bytes to insert the
1915 * string following the next match.
1917 fill_window_if_needed();
1919 if (match_available)
1920 ct_tally(0, G1.window[G1.strstart - 1]);
1922 return FLUSH_BLOCK(1); /* eof */
1926 /* ===========================================================================
1927 * Initialize the bit string routines.
1929 static void bi_init(void)
1931 //G1.bi_buf = 0; // globals are zeroed in pack_gzip()
1932 //G1.bi_valid = 0; // globals are zeroed in pack_gzip()
1934 //G1.bits_sent = 0L; // globals are zeroed in pack_gzip()
1939 /* ===========================================================================
1940 * Initialize the "longest match" routines for a new file
1942 static void lm_init(ush * flagsp)
1946 /* Initialize the hash table. */
1947 memset(head, 0, HASH_SIZE * sizeof(*head));
1948 /* prev will be initialized on the fly */
1950 /* speed options for the general purpose bit flag */
1951 *flagsp |= 2; /* FAST 4, SLOW 2 */
1952 /* ??? reduce max_chain_length for binary files */
1954 //G1.strstart = 0; // globals are zeroed in pack_gzip()
1955 //G1.block_start = 0L; // globals are zeroed in pack_gzip()
1957 G1.lookahead = file_read(G1.window,
1958 sizeof(int) <= 2 ? (unsigned) WSIZE : 2 * WSIZE);
1960 if (G1.lookahead == 0 || G1.lookahead == (unsigned) -1) {
1965 //G1.eofile = 0; // globals are zeroed in pack_gzip()
1967 /* Make sure that we always have enough lookahead. This is important
1968 * if input comes from a device such as a tty.
1970 fill_window_if_needed();
1972 //G1.ins_h = 0; // globals are zeroed in pack_gzip()
1973 for (j = 0; j < MIN_MATCH - 1; j++)
1974 UPDATE_HASH(G1.ins_h, G1.window[j]);
1975 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is
1976 * not important since only literal bytes will be emitted.
1981 /* ===========================================================================
1982 * Allocate the match buffer, initialize the various tables and save the
1983 * location of the internal file attribute (ascii/binary) and method
1985 * One callsite in zip()
1987 static void ct_init(void)
1989 int n; /* iterates over tree elements */
1990 int length; /* length value */
1991 int code; /* code value */
1992 int dist; /* distance index */
1994 //G2.compressed_len = 0L; // globals are zeroed in pack_gzip()
1997 if (G2.static_dtree[0].Len != 0)
1998 return; /* ct_init already called */
2001 /* Initialize the mapping length (0..255) -> length code (0..28) */
2003 for (code = 0; code < LENGTH_CODES - 1; code++) {
2004 G2.base_length[code] = length;
2005 for (n = 0; n < (1 << extra_lbits[code]); n++) {
2006 G2.length_code[length++] = code;
2009 Assert(length == 256, "ct_init: length != 256");
2010 /* Note that the length 255 (match length 258) can be represented
2011 * in two different ways: code 284 + 5 bits or code 285, so we
2012 * overwrite length_code[255] to use the best encoding:
2014 G2.length_code[length - 1] = code;
2016 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
2018 for (code = 0; code < 16; code++) {
2019 G2.base_dist[code] = dist;
2020 for (n = 0; n < (1 << extra_dbits[code]); n++) {
2021 G2.dist_code[dist++] = code;
2024 Assert(dist == 256, "ct_init: dist != 256");
2025 dist >>= 7; /* from now on, all distances are divided by 128 */
2026 for (; code < D_CODES; code++) {
2027 G2.base_dist[code] = dist << 7;
2028 for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {
2029 G2.dist_code[256 + dist++] = code;
2032 Assert(dist == 256, "ct_init: 256+dist != 512");
2034 /* Construct the codes of the static literal tree */
2035 /* already zeroed - it's in bss
2036 for (n = 0; n <= MAX_BITS; n++)
2037 G2.bl_count[n] = 0; */
2041 G2.static_ltree[n++].Len = 8;
2045 G2.static_ltree[n++].Len = 9;
2049 G2.static_ltree[n++].Len = 7;
2053 G2.static_ltree[n++].Len = 8;
2056 /* Codes 286 and 287 do not exist, but we must include them in the
2057 * tree construction to get a canonical Huffman tree (longest code
2060 gen_codes((ct_data *) G2.static_ltree, L_CODES + 1);
2062 /* The static distance tree is trivial: */
2063 for (n = 0; n < D_CODES; n++) {
2064 G2.static_dtree[n].Len = 5;
2065 G2.static_dtree[n].Code = bi_reverse(n, 5);
2068 /* Initialize the first block of the first file: */
2073 /* ===========================================================================
2074 * Deflate in to out.
2075 * IN assertions: the input and output buffers are cleared.
2077 static void zip(void)
2079 ush deflate_flags = 0; /* pkzip -es, -en or -ex equivalent */
2081 //G1.outcnt = 0; // globals are zeroed in pack_gzip()
2083 /* Write the header to the gzip file. See algorithm.doc for the format */
2084 /* magic header for gzip files: 1F 8B */
2085 /* compression method: 8 (DEFLATED) */
2086 /* general flags: 0 */
2087 put_32bit(0x00088b1f);
2088 put_32bit(0); /* Unix timestamp */
2090 /* Write deflated file to zip file */
2095 lm_init(&deflate_flags);
2097 put_8bit(deflate_flags); /* extra flags */
2098 put_8bit(3); /* OS identifier = 3 (Unix) */
2102 /* Write the crc and uncompressed size */
2104 put_32bit(G1.isize);
2110 /* ======================================================================== */
2112 IF_DESKTOP(long long) int FAST_FUNC pack_gzip(transformer_state_t *xstate UNUSED_PARAM)
2114 /* Reinit G1.xxx except pointers to allocated buffers */
2115 memset(&G1, 0, offsetof(struct globals, l_buf));
2117 /* Clear input and output buffers */
2125 memset(&G2, 0, sizeof(G2));
2126 G2.l_desc.dyn_tree = G2.dyn_ltree;
2127 G2.l_desc.static_tree = G2.static_ltree;
2128 G2.l_desc.extra_bits = extra_lbits;
2129 G2.l_desc.extra_base = LITERALS + 1;
2130 G2.l_desc.elems = L_CODES;
2131 G2.l_desc.max_length = MAX_BITS;
2132 //G2.l_desc.max_code = 0;
2133 G2.d_desc.dyn_tree = G2.dyn_dtree;
2134 G2.d_desc.static_tree = G2.static_dtree;
2135 G2.d_desc.extra_bits = extra_dbits;
2136 //G2.d_desc.extra_base = 0;
2137 G2.d_desc.elems = D_CODES;
2138 G2.d_desc.max_length = MAX_BITS;
2139 //G2.d_desc.max_code = 0;
2140 G2.bl_desc.dyn_tree = G2.bl_tree;
2141 //G2.bl_desc.static_tree = NULL;
2142 G2.bl_desc.extra_bits = extra_blbits,
2143 //G2.bl_desc.extra_base = 0;
2144 G2.bl_desc.elems = BL_CODES;
2145 G2.bl_desc.max_length = MAX_BL_BITS;
2146 //G2.bl_desc.max_code = 0;
2149 /* Saving of timestamp is disabled. Why?
2150 * - it is not Y2038-safe.
2151 * - some people want deterministic results
2152 * (normally they'd use -n, but our -n is a nop).
2154 * Per RFC 1952, gzfile.time=0 is "no timestamp".
2155 * If users will demand this to be reinstated,
2156 * implement -n "don't save timestamp".
2160 fstat(STDIN_FILENO, &s);
2168 #if ENABLE_FEATURE_GZIP_LONG_OPTIONS
2169 static const char gzip_longopts[] ALIGN1 =
2170 "stdout\0" No_argument "c"
2171 "to-stdout\0" No_argument "c"
2172 "force\0" No_argument "f"
2173 "verbose\0" No_argument "v"
2174 #if ENABLE_FEATURE_GZIP_DECOMPRESS
2175 "decompress\0" No_argument "d"
2176 "uncompress\0" No_argument "d"
2177 "test\0" No_argument "t"
2179 "quiet\0" No_argument "q"
2180 "fast\0" No_argument "1"
2181 "best\0" No_argument "9"
2182 "no-name\0" No_argument "n"
2187 * Linux kernel build uses gzip -d -n. We accept and ignore -n.
2190 * gzip: do not save the original file name and time stamp.
2191 * (The original name is always saved if the name had to be truncated.)
2192 * gunzip: do not restore the original file name/time even if present
2193 * (remove only the gzip suffix from the compressed file name).
2194 * This option is the default when decompressing.
2196 * gzip: always save the original file name and time stamp (this is the default)
2197 * gunzip: restore the original file name and time stamp if present.
2200 int gzip_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
2201 #if ENABLE_FEATURE_GZIP_DECOMPRESS
2202 int gzip_main(int argc, char **argv)
2204 int gzip_main(int argc UNUSED_PARAM, char **argv)
2208 #if ENABLE_FEATURE_GZIP_LEVELS
2209 static const struct {
2211 uint8_t chain_shift;
2214 } gzip_level_config[6] = {
2215 {4, 4, 4/2, 16/2}, /* Level 4 */
2216 {8, 5, 16/2, 32/2}, /* Level 5 */
2217 {8, 7, 16/2, 128/2}, /* Level 6 */
2218 {8, 8, 32/2, 128/2}, /* Level 7 */
2219 {32, 10, 128/2, 258/2}, /* Level 8 */
2220 {32, 12, 258/2, 258/2}, /* Level 9 */
2224 SET_PTR_TO_GLOBALS((char *)xzalloc(sizeof(struct globals)+sizeof(struct globals2))
2225 + sizeof(struct globals));
2227 /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */
2228 #if ENABLE_FEATURE_GZIP_LONG_OPTIONS
2229 opt = getopt32long(argv, "cfkv" IF_FEATURE_GZIP_DECOMPRESS("dt") "qn123456789", gzip_longopts);
2231 opt = getopt32(argv, "cfkv" IF_FEATURE_GZIP_DECOMPRESS("dt") "qn123456789");
2233 #if ENABLE_FEATURE_GZIP_DECOMPRESS /* gunzip_main may not be visible... */
2234 if (opt & 0x30) // -d and/or -t
2235 return gunzip_main(argc, argv);
2237 #if ENABLE_FEATURE_GZIP_LEVELS
2238 opt >>= ENABLE_FEATURE_GZIP_DECOMPRESS ? 8 : 6; /* drop cfkv[dt]qn bits */
2240 opt = 1 << 6; /* default: 6 */
2241 opt = ffs(opt >> 4); /* Maps -1..-4 to [0], -5 to [1] ... -9 to [5] */
2242 max_chain_length = 1 << gzip_level_config[opt].chain_shift;
2243 good_match = gzip_level_config[opt].good;
2244 max_lazy_match = gzip_level_config[opt].lazy2 * 2;
2245 nice_match = gzip_level_config[opt].nice2 * 2;
2247 option_mask32 &= 0xf; /* retain only -cfkv */
2249 /* Allocate all global buffers (for DYN_ALLOC option) */
2250 ALLOC(uch, G1.l_buf, INBUFSIZ);
2251 ALLOC(uch, G1.outbuf, OUTBUFSIZ);
2252 ALLOC(ush, G1.d_buf, DIST_BUFSIZE);
2253 ALLOC(uch, G1.window, 2L * WSIZE);
2254 ALLOC(ush, G1.prev, 1L << BITS);
2256 /* Initialize the CRC32 table */
2257 global_crc32_table = crc32_filltable(NULL, 0);
2260 return bbunpack(argv, pack_gzip, append_ext, "gz");