1 /* vi: set sw=4 ts=4: */
3 * Small lzma deflate implementation.
4 * Copyright (C) 2006 Aurelien Jacobs <aurel@gnuage.org>
6 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
7 * Copyright (C) 1999-2005 Igor Pavlov
9 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
12 #include "bb_archive.h"
14 #if ENABLE_FEATURE_LZMA_FAST
15 # define speed_inline ALWAYS_INLINE
19 # define size_inline ALWAYS_INLINE
27 /* Was keeping rc on stack in unlzma and separately allocating buffer,
28 * but with "buffer 'attached to' allocated rc" code is smaller: */
29 /* uint8_t *buffer; */
30 #define RC_BUFFER ((uint8_t*)(rc+1))
34 /* Had provisions for variable buffer, but we don't need it here */
35 /* int buffer_size; */
36 #define RC_BUFFER_SIZE 0x10000
43 #define RC_TOP_BITS 24
44 #define RC_MOVE_BITS 5
45 #define RC_MODEL_TOTAL_BITS 11
48 /* Called once in rc_do_normalize() */
49 static void rc_read(rc_t *rc)
51 int buffer_size = safe_read(rc->fd, RC_BUFFER, RC_BUFFER_SIZE);
52 //TODO: return -1 instead
53 //This will make unlzma delete broken unpacked file on unpack errors
55 bb_error_msg_and_die("unexpected EOF");
56 rc->buffer_end = RC_BUFFER + buffer_size;
60 /* Called twice, but one callsite is in speed_inline'd rc_is_bit_1() */
61 static void rc_do_normalize(rc_t *rc)
63 if (rc->ptr >= rc->buffer_end)
66 rc->code = (rc->code << 8) | *rc->ptr++;
70 static ALWAYS_INLINE rc_t* rc_init(int fd) /*, int buffer_size) */
75 rc = xzalloc(sizeof(*rc) + RC_BUFFER_SIZE);
78 /* rc->ptr = rc->buffer_end; */
80 for (i = 0; i < 5; i++) {
83 rc->range = 0xffffffff;
88 static ALWAYS_INLINE void rc_free(rc_t *rc)
93 static ALWAYS_INLINE void rc_normalize(rc_t *rc)
95 if (rc->range < (1 << RC_TOP_BITS)) {
100 /* rc_is_bit_1 is called 9 times */
101 static speed_inline int rc_is_bit_1(rc_t *rc, uint16_t *p)
104 rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS);
105 if (rc->code < rc->bound) {
106 rc->range = rc->bound;
107 *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS;
110 rc->range -= rc->bound;
111 rc->code -= rc->bound;
112 *p -= *p >> RC_MOVE_BITS;
116 /* Called 4 times in unlzma loop */
117 static ALWAYS_INLINE int rc_get_bit(rc_t *rc, uint16_t *p, int *symbol)
119 int ret = rc_is_bit_1(rc, p);
120 *symbol = *symbol * 2 + ret;
125 static ALWAYS_INLINE int rc_direct_bit(rc_t *rc)
129 if (rc->code >= rc->range) {
130 rc->code -= rc->range;
137 static speed_inline void
138 rc_bit_tree_decode(rc_t *rc, uint16_t *p, int num_levels, int *symbol)
144 rc_get_bit(rc, p + *symbol, symbol);
145 *symbol -= 1 << num_levels;
153 } PACKED lzma_header_t;
156 /* #defines will force compiler to compute/optimize each one with each usage.
157 * Have heart and use enum instead. */
159 LZMA_BASE_SIZE = 1846,
162 LZMA_NUM_POS_BITS_MAX = 4,
164 LZMA_LEN_NUM_LOW_BITS = 3,
165 LZMA_LEN_NUM_MID_BITS = 3,
166 LZMA_LEN_NUM_HIGH_BITS = 8,
169 LZMA_LEN_CHOICE_2 = (LZMA_LEN_CHOICE + 1),
170 LZMA_LEN_LOW = (LZMA_LEN_CHOICE_2 + 1),
171 LZMA_LEN_MID = (LZMA_LEN_LOW \
172 + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_LOW_BITS))),
173 LZMA_LEN_HIGH = (LZMA_LEN_MID \
174 + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_MID_BITS))),
175 LZMA_NUM_LEN_PROBS = (LZMA_LEN_HIGH + (1 << LZMA_LEN_NUM_HIGH_BITS)),
177 LZMA_NUM_STATES = 12,
178 LZMA_NUM_LIT_STATES = 7,
180 LZMA_START_POS_MODEL_INDEX = 4,
181 LZMA_END_POS_MODEL_INDEX = 14,
182 LZMA_NUM_FULL_DISTANCES = (1 << (LZMA_END_POS_MODEL_INDEX >> 1)),
184 LZMA_NUM_POS_SLOT_BITS = 6,
185 LZMA_NUM_LEN_TO_POS_STATES = 4,
187 LZMA_NUM_ALIGN_BITS = 4,
189 LZMA_MATCH_MIN_LEN = 2,
192 LZMA_IS_REP = (LZMA_IS_MATCH + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
193 LZMA_IS_REP_G0 = (LZMA_IS_REP + LZMA_NUM_STATES),
194 LZMA_IS_REP_G1 = (LZMA_IS_REP_G0 + LZMA_NUM_STATES),
195 LZMA_IS_REP_G2 = (LZMA_IS_REP_G1 + LZMA_NUM_STATES),
196 LZMA_IS_REP_0_LONG = (LZMA_IS_REP_G2 + LZMA_NUM_STATES),
197 LZMA_POS_SLOT = (LZMA_IS_REP_0_LONG \
198 + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
199 LZMA_SPEC_POS = (LZMA_POS_SLOT \
200 + (LZMA_NUM_LEN_TO_POS_STATES << LZMA_NUM_POS_SLOT_BITS)),
201 LZMA_ALIGN = (LZMA_SPEC_POS \
202 + LZMA_NUM_FULL_DISTANCES - LZMA_END_POS_MODEL_INDEX),
203 LZMA_LEN_CODER = (LZMA_ALIGN + (1 << LZMA_NUM_ALIGN_BITS)),
204 LZMA_REP_LEN_CODER = (LZMA_LEN_CODER + LZMA_NUM_LEN_PROBS),
205 LZMA_LITERAL = (LZMA_REP_LEN_CODER + LZMA_NUM_LEN_PROBS),
209 IF_DESKTOP(long long) int FAST_FUNC
210 unpack_lzma_stream(transformer_aux_data_t *aux UNUSED_PARAM, int src_fd, int dst_fd)
212 IF_DESKTOP(long long total_written = 0;)
213 lzma_header_t header;
215 uint32_t pos_state_mask;
216 uint32_t literal_pos_mask;
223 uint8_t previous_byte = 0;
224 size_t buffer_pos = 0, global_pos = 0;
227 uint32_t rep0 = 1, rep1 = 1, rep2 = 1, rep3 = 1;
229 if (full_read(src_fd, &header, sizeof(header)) != sizeof(header)
230 || header.pos >= (9 * 5 * 5)
232 bb_error_msg("bad lzma header");
240 pos_state_mask = (1 << pb) - 1;
241 literal_pos_mask = (1 << lp) - 1;
243 header.dict_size = SWAP_LE32(header.dict_size);
244 header.dst_size = SWAP_LE64(header.dst_size);
246 if (header.dict_size == 0)
249 buffer = xmalloc(MIN(header.dst_size, header.dict_size));
251 num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp));
252 p = xmalloc(num_probs * sizeof(*p));
253 num_probs += LZMA_LITERAL - LZMA_BASE_SIZE;
254 for (i = 0; i < num_probs; i++)
255 p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1;
257 rc = rc_init(src_fd); /*, RC_BUFFER_SIZE); */
259 while (global_pos + buffer_pos < header.dst_size) {
260 int pos_state = (buffer_pos + global_pos) & pos_state_mask;
261 uint16_t *prob = p + LZMA_IS_MATCH + (state << LZMA_NUM_POS_BITS_MAX) + pos_state;
263 if (!rc_is_bit_1(rc, prob)) {
264 static const char next_state[LZMA_NUM_STATES] =
265 { 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5 };
268 prob = (p + LZMA_LITERAL
269 + (LZMA_LIT_SIZE * ((((buffer_pos + global_pos) & literal_pos_mask) << lc)
270 + (previous_byte >> (8 - lc))
275 if (state >= LZMA_NUM_LIT_STATES) {
277 uint32_t pos = buffer_pos - rep0;
279 while (pos >= header.dict_size)
280 pos += header.dict_size;
281 match_byte = buffer[pos];
286 bit = match_byte & 0x100;
287 bit ^= (rc_get_bit(rc, prob + 0x100 + bit + mi, &mi) << 8); /* 0x100 or 0 */
290 } while (mi < 0x100);
293 rc_get_bit(rc, prob + mi, &mi);
296 state = next_state[state];
298 previous_byte = (uint8_t) mi;
299 #if ENABLE_FEATURE_LZMA_FAST
301 buffer[buffer_pos++] = previous_byte;
302 if (buffer_pos == header.dict_size) {
304 global_pos += header.dict_size;
305 if (full_write(dst_fd, buffer, header.dict_size) != (ssize_t)header.dict_size)
307 IF_DESKTOP(total_written += header.dict_size;)
316 #define prob_len prob2
318 prob2 = p + LZMA_IS_REP + state;
319 if (!rc_is_bit_1(rc, prob2)) {
323 state = state < LZMA_NUM_LIT_STATES ? 0 : 3;
324 prob2 = p + LZMA_LEN_CODER;
326 prob2 += LZMA_IS_REP_G0 - LZMA_IS_REP;
327 if (!rc_is_bit_1(rc, prob2)) {
328 prob2 = (p + LZMA_IS_REP_0_LONG
329 + (state << LZMA_NUM_POS_BITS_MAX)
332 if (!rc_is_bit_1(rc, prob2)) {
333 #if ENABLE_FEATURE_LZMA_FAST
334 uint32_t pos = buffer_pos - rep0;
335 state = state < LZMA_NUM_LIT_STATES ? 9 : 11;
336 while (pos >= header.dict_size)
337 pos += header.dict_size;
338 previous_byte = buffer[pos];
341 state = state < LZMA_NUM_LIT_STATES ? 9 : 11;
349 prob2 += LZMA_IS_REP_G1 - LZMA_IS_REP_G0;
351 if (rc_is_bit_1(rc, prob2)) {
352 prob2 += LZMA_IS_REP_G2 - LZMA_IS_REP_G1;
354 if (rc_is_bit_1(rc, prob2)) {
363 state = state < LZMA_NUM_LIT_STATES ? 8 : 11;
364 prob2 = p + LZMA_REP_LEN_CODER;
367 prob_len = prob2 + LZMA_LEN_CHOICE;
368 num_bits = LZMA_LEN_NUM_LOW_BITS;
369 if (!rc_is_bit_1(rc, prob_len)) {
370 prob_len += LZMA_LEN_LOW - LZMA_LEN_CHOICE
371 + (pos_state << LZMA_LEN_NUM_LOW_BITS);
374 prob_len += LZMA_LEN_CHOICE_2 - LZMA_LEN_CHOICE;
375 if (!rc_is_bit_1(rc, prob_len)) {
376 prob_len += LZMA_LEN_MID - LZMA_LEN_CHOICE_2
377 + (pos_state << LZMA_LEN_NUM_MID_BITS);
378 offset = 1 << LZMA_LEN_NUM_LOW_BITS;
379 num_bits += LZMA_LEN_NUM_MID_BITS - LZMA_LEN_NUM_LOW_BITS;
381 prob_len += LZMA_LEN_HIGH - LZMA_LEN_CHOICE_2;
382 offset = ((1 << LZMA_LEN_NUM_LOW_BITS)
383 + (1 << LZMA_LEN_NUM_MID_BITS));
384 num_bits += LZMA_LEN_NUM_HIGH_BITS - LZMA_LEN_NUM_LOW_BITS;
387 rc_bit_tree_decode(rc, prob_len, num_bits, &len);
394 state += LZMA_NUM_LIT_STATES;
395 prob3 = p + LZMA_POS_SLOT +
396 ((len < LZMA_NUM_LEN_TO_POS_STATES ? len :
397 LZMA_NUM_LEN_TO_POS_STATES - 1)
398 << LZMA_NUM_POS_SLOT_BITS);
399 rc_bit_tree_decode(rc, prob3,
400 LZMA_NUM_POS_SLOT_BITS, &pos_slot);
402 if (pos_slot >= LZMA_START_POS_MODEL_INDEX) {
403 int i2, mi2, num_bits2 = (pos_slot >> 1) - 1;
404 rep0 = 2 | (pos_slot & 1);
405 if (pos_slot < LZMA_END_POS_MODEL_INDEX) {
407 prob3 = p + LZMA_SPEC_POS + rep0 - pos_slot - 1;
409 for (; num_bits2 != LZMA_NUM_ALIGN_BITS; num_bits2--)
410 rep0 = (rep0 << 1) | rc_direct_bit(rc);
411 rep0 <<= LZMA_NUM_ALIGN_BITS;
412 prob3 = p + LZMA_ALIGN;
416 while (num_bits2--) {
417 if (rc_get_bit(rc, prob3 + mi2, &mi2))
426 len += LZMA_MATCH_MIN_LEN;
427 IF_NOT_FEATURE_LZMA_FAST(string:)
429 uint32_t pos = buffer_pos - rep0;
430 while (pos >= header.dict_size)
431 pos += header.dict_size;
432 previous_byte = buffer[pos];
433 IF_NOT_FEATURE_LZMA_FAST(one_byte2:)
434 buffer[buffer_pos++] = previous_byte;
435 if (buffer_pos == header.dict_size) {
437 global_pos += header.dict_size;
438 if (full_write(dst_fd, buffer, header.dict_size) != (ssize_t)header.dict_size)
440 IF_DESKTOP(total_written += header.dict_size;)
443 } while (len != 0 && buffer_pos < header.dst_size);
448 IF_NOT_DESKTOP(int total_written = 0; /* success */)
449 IF_DESKTOP(total_written += buffer_pos;)
450 if (full_write(dst_fd, buffer, buffer_pos) != (ssize_t)buffer_pos) {
452 total_written = -1; /* failure */
457 return total_written;