1 /* vi: set sw=4 ts=4: */
3 * Small lzma deflate implementation.
4 * Copyright (C) 2006 Aurelien Jacobs <aurel@gnuage.org>
6 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
7 * Copyright (C) 1999-2005 Igor Pavlov
9 * Licensed under GPLv2 or later, see file LICENSE in this tarball for details.
12 #include "unarchive.h"
14 #if ENABLE_FEATURE_LZMA_FAST
15 # define speed_inline ALWAYS_INLINE
19 # define size_inline ALWAYS_INLINE
27 /* Was keeping rc on stack in unlzma and separately allocating buffer,
28 * but with "buffer 'attached to' allocated rc" code is smaller: */
29 /* uint8_t *buffer; */
30 #define RC_BUFFER ((uint8_t*)(rc+1))
34 /* Had provisions for variable buffer, but we don't need it here */
35 /* int buffer_size; */
36 #define RC_BUFFER_SIZE 0x10000
43 #define RC_TOP_BITS 24
44 #define RC_MOVE_BITS 5
45 #define RC_MODEL_TOTAL_BITS 11
48 /* Called twice: once at startup (LZMA_FAST only) and once in rc_normalize() */
49 static size_inline void rc_read(rc_t *rc)
51 int buffer_size = safe_read(rc->fd, RC_BUFFER, RC_BUFFER_SIZE);
53 bb_error_msg_and_die("unexpected EOF");
55 rc->buffer_end = RC_BUFFER + buffer_size;
58 /* Called twice, but one callsite is in speed_inline'd rc_is_bit_1() */
59 static void rc_do_normalize(rc_t *rc)
61 if (rc->ptr >= rc->buffer_end)
64 rc->code = (rc->code << 8) | *rc->ptr++;
68 static ALWAYS_INLINE rc_t* rc_init(int fd) /*, int buffer_size) */
73 rc = xmalloc(sizeof(*rc) + RC_BUFFER_SIZE);
76 rc->ptr = rc->buffer_end;
78 for (i = 0; i < 5; i++) {
79 #if ENABLE_FEATURE_LZMA_FAST
80 if (rc->ptr >= rc->buffer_end)
82 rc->code = (rc->code << 8) | *rc->ptr++;
87 rc->range = 0xFFFFFFFF;
92 static ALWAYS_INLINE void rc_free(rc_t *rc)
97 static ALWAYS_INLINE void rc_normalize(rc_t *rc)
99 if (rc->range < (1 << RC_TOP_BITS)) {
104 /* rc_is_bit_1 is called 9 times */
105 static speed_inline int rc_is_bit_1(rc_t *rc, uint16_t *p)
108 rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS);
109 if (rc->code < rc->bound) {
110 rc->range = rc->bound;
111 *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS;
114 rc->range -= rc->bound;
115 rc->code -= rc->bound;
116 *p -= *p >> RC_MOVE_BITS;
120 /* Called 4 times in unlzma loop */
121 static speed_inline int rc_get_bit(rc_t *rc, uint16_t *p, int *symbol)
123 int ret = rc_is_bit_1(rc, p);
124 *symbol = *symbol * 2 + ret;
129 static ALWAYS_INLINE int rc_direct_bit(rc_t *rc)
133 if (rc->code >= rc->range) {
134 rc->code -= rc->range;
141 static speed_inline void
142 rc_bit_tree_decode(rc_t *rc, uint16_t *p, int num_levels, int *symbol)
148 rc_get_bit(rc, p + *symbol, symbol);
149 *symbol -= 1 << num_levels;
157 } __attribute__ ((packed)) lzma_header_t;
160 /* #defines will force compiler to compute/optimize each one with each usage.
161 * Have heart and use enum instead. */
163 LZMA_BASE_SIZE = 1846,
166 LZMA_NUM_POS_BITS_MAX = 4,
168 LZMA_LEN_NUM_LOW_BITS = 3,
169 LZMA_LEN_NUM_MID_BITS = 3,
170 LZMA_LEN_NUM_HIGH_BITS = 8,
173 LZMA_LEN_CHOICE_2 = (LZMA_LEN_CHOICE + 1),
174 LZMA_LEN_LOW = (LZMA_LEN_CHOICE_2 + 1),
175 LZMA_LEN_MID = (LZMA_LEN_LOW \
176 + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_LOW_BITS))),
177 LZMA_LEN_HIGH = (LZMA_LEN_MID \
178 + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_MID_BITS))),
179 LZMA_NUM_LEN_PROBS = (LZMA_LEN_HIGH + (1 << LZMA_LEN_NUM_HIGH_BITS)),
181 LZMA_NUM_STATES = 12,
182 LZMA_NUM_LIT_STATES = 7,
184 LZMA_START_POS_MODEL_INDEX = 4,
185 LZMA_END_POS_MODEL_INDEX = 14,
186 LZMA_NUM_FULL_DISTANCES = (1 << (LZMA_END_POS_MODEL_INDEX >> 1)),
188 LZMA_NUM_POS_SLOT_BITS = 6,
189 LZMA_NUM_LEN_TO_POS_STATES = 4,
191 LZMA_NUM_ALIGN_BITS = 4,
193 LZMA_MATCH_MIN_LEN = 2,
196 LZMA_IS_REP = (LZMA_IS_MATCH + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
197 LZMA_IS_REP_G0 = (LZMA_IS_REP + LZMA_NUM_STATES),
198 LZMA_IS_REP_G1 = (LZMA_IS_REP_G0 + LZMA_NUM_STATES),
199 LZMA_IS_REP_G2 = (LZMA_IS_REP_G1 + LZMA_NUM_STATES),
200 LZMA_IS_REP_0_LONG = (LZMA_IS_REP_G2 + LZMA_NUM_STATES),
201 LZMA_POS_SLOT = (LZMA_IS_REP_0_LONG \
202 + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
203 LZMA_SPEC_POS = (LZMA_POS_SLOT \
204 + (LZMA_NUM_LEN_TO_POS_STATES << LZMA_NUM_POS_SLOT_BITS)),
205 LZMA_ALIGN = (LZMA_SPEC_POS \
206 + LZMA_NUM_FULL_DISTANCES - LZMA_END_POS_MODEL_INDEX),
207 LZMA_LEN_CODER = (LZMA_ALIGN + (1 << LZMA_NUM_ALIGN_BITS)),
208 LZMA_REP_LEN_CODER = (LZMA_LEN_CODER + LZMA_NUM_LEN_PROBS),
209 LZMA_LITERAL = (LZMA_REP_LEN_CODER + LZMA_NUM_LEN_PROBS),
213 IF_DESKTOP(long long) int FAST_FUNC
214 unpack_lzma_stream(int src_fd, int dst_fd)
216 IF_DESKTOP(long long total_written = 0;)
217 lzma_header_t header;
219 uint32_t pos_state_mask;
220 uint32_t literal_pos_mask;
230 uint8_t previous_byte = 0;
231 size_t buffer_pos = 0, global_pos = 0;
234 uint32_t rep0 = 1, rep1 = 1, rep2 = 1, rep3 = 1;
236 xread(src_fd, &header, sizeof(header));
238 if (header.pos >= (9 * 5 * 5))
239 bb_error_msg_and_die("bad header");
244 pos_state_mask = (1 << pb) - 1;
245 literal_pos_mask = (1 << lp) - 1;
247 header.dict_size = SWAP_LE32(header.dict_size);
248 header.dst_size = SWAP_LE64(header.dst_size);
250 if (header.dict_size == 0)
253 buffer = xmalloc(MIN(header.dst_size, header.dict_size));
255 num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp));
256 p = xmalloc(num_probs * sizeof(*p));
257 num_probs += LZMA_LITERAL - LZMA_BASE_SIZE;
258 for (i = 0; i < num_probs; i++)
259 p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1;
261 rc = rc_init(src_fd); /*, RC_BUFFER_SIZE); */
263 while (global_pos + buffer_pos < header.dst_size) {
264 int pos_state = (buffer_pos + global_pos) & pos_state_mask;
266 prob = p + LZMA_IS_MATCH + (state << LZMA_NUM_POS_BITS_MAX) + pos_state;
267 if (!rc_is_bit_1(rc, prob)) {
269 prob = (p + LZMA_LITERAL
270 + (LZMA_LIT_SIZE * ((((buffer_pos + global_pos) & literal_pos_mask) << lc)
271 + (previous_byte >> (8 - lc))
276 if (state >= LZMA_NUM_LIT_STATES) {
279 pos = buffer_pos - rep0;
280 while (pos >= header.dict_size)
281 pos += header.dict_size;
282 match_byte = buffer[pos];
287 bit = match_byte & 0x100;
288 prob_lit = prob + 0x100 + bit + mi;
289 bit ^= (rc_get_bit(rc, prob_lit, &mi) << 8); /* 0x100 or 0 */
292 } while (mi < 0x100);
295 prob_lit = prob + mi;
296 rc_get_bit(rc, prob_lit, &mi);
305 previous_byte = (uint8_t) mi;
306 #if ENABLE_FEATURE_LZMA_FAST
308 buffer[buffer_pos++] = previous_byte;
309 if (buffer_pos == header.dict_size) {
311 global_pos += header.dict_size;
312 if (full_write(dst_fd, buffer, header.dict_size) != (ssize_t)header.dict_size)
314 IF_DESKTOP(total_written += header.dict_size;)
324 prob = p + LZMA_IS_REP + state;
325 if (!rc_is_bit_1(rc, prob)) {
329 state = state < LZMA_NUM_LIT_STATES ? 0 : 3;
330 prob = p + LZMA_LEN_CODER;
332 prob += LZMA_IS_REP_G0 - LZMA_IS_REP;
333 if (!rc_is_bit_1(rc, prob)) {
334 prob = (p + LZMA_IS_REP_0_LONG
335 + (state << LZMA_NUM_POS_BITS_MAX)
338 if (!rc_is_bit_1(rc, prob)) {
339 state = state < LZMA_NUM_LIT_STATES ? 9 : 11;
340 #if ENABLE_FEATURE_LZMA_FAST
341 pos = buffer_pos - rep0;
342 while (pos >= header.dict_size)
343 pos += header.dict_size;
344 previous_byte = buffer[pos];
354 prob += LZMA_IS_REP_G1 - LZMA_IS_REP_G0;
356 if (rc_is_bit_1(rc, prob)) {
357 prob += LZMA_IS_REP_G2 - LZMA_IS_REP_G1;
359 if (rc_is_bit_1(rc, prob)) {
368 state = state < LZMA_NUM_LIT_STATES ? 8 : 11;
369 prob = p + LZMA_REP_LEN_CODER;
372 prob_len = prob + LZMA_LEN_CHOICE;
373 if (!rc_is_bit_1(rc, prob_len)) {
374 prob_len += LZMA_LEN_LOW - LZMA_LEN_CHOICE
375 + (pos_state << LZMA_LEN_NUM_LOW_BITS);
377 num_bits = LZMA_LEN_NUM_LOW_BITS;
379 prob_len += LZMA_LEN_CHOICE_2 - LZMA_LEN_CHOICE;
380 if (!rc_is_bit_1(rc, prob_len)) {
381 prob_len += LZMA_LEN_MID - LZMA_LEN_CHOICE_2
382 + (pos_state << LZMA_LEN_NUM_MID_BITS);
383 offset = 1 << LZMA_LEN_NUM_LOW_BITS;
384 num_bits = LZMA_LEN_NUM_MID_BITS;
386 prob_len += LZMA_LEN_HIGH - LZMA_LEN_CHOICE_2;
387 offset = ((1 << LZMA_LEN_NUM_LOW_BITS)
388 + (1 << LZMA_LEN_NUM_MID_BITS));
389 num_bits = LZMA_LEN_NUM_HIGH_BITS;
392 rc_bit_tree_decode(rc, prob_len, num_bits, &len);
398 state += LZMA_NUM_LIT_STATES;
399 prob = p + LZMA_POS_SLOT +
400 ((len < LZMA_NUM_LEN_TO_POS_STATES ? len :
401 LZMA_NUM_LEN_TO_POS_STATES - 1)
402 << LZMA_NUM_POS_SLOT_BITS);
403 rc_bit_tree_decode(rc, prob,
404 LZMA_NUM_POS_SLOT_BITS, &pos_slot);
406 if (pos_slot >= LZMA_START_POS_MODEL_INDEX) {
407 num_bits = (pos_slot >> 1) - 1;
408 rep0 = 2 | (pos_slot & 1);
409 prob = p + LZMA_ALIGN;
410 if (pos_slot < LZMA_END_POS_MODEL_INDEX) {
412 prob += LZMA_SPEC_POS - LZMA_ALIGN - 1 + rep0 - pos_slot;
414 num_bits -= LZMA_NUM_ALIGN_BITS;
416 rep0 = (rep0 << 1) | rc_direct_bit(rc);
417 rep0 <<= LZMA_NUM_ALIGN_BITS;
418 num_bits = LZMA_NUM_ALIGN_BITS;
423 if (rc_get_bit(rc, prob + mi, &mi))
432 len += LZMA_MATCH_MIN_LEN;
433 IF_NOT_FEATURE_LZMA_FAST(string:)
435 pos = buffer_pos - rep0;
436 while (pos >= header.dict_size)
437 pos += header.dict_size;
438 previous_byte = buffer[pos];
439 IF_NOT_FEATURE_LZMA_FAST(one_byte2:)
440 buffer[buffer_pos++] = previous_byte;
441 if (buffer_pos == header.dict_size) {
443 global_pos += header.dict_size;
444 if (full_write(dst_fd, buffer, header.dict_size) != (ssize_t)header.dict_size)
446 IF_DESKTOP(total_written += header.dict_size;)
449 } while (len != 0 && buffer_pos < header.dst_size);
454 IF_NOT_DESKTOP(int total_written = 0; /* success */)
455 IF_DESKTOP(total_written += buffer_pos;)
456 if (full_write(dst_fd, buffer, buffer_pos) != (ssize_t)buffer_pos) {
458 total_written = -1; /* failure */
463 return total_written;