File indexing completed on 2025-05-11 08:24:10
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011 #include "xz_private.h"
0012 #include "xz_lzma2.h"
0013
0014
0015
0016
0017 #define RC_INIT_BYTES 5
0018
0019
0020
0021
0022
0023
0024
0025
0026 #define LZMA_IN_REQUIRED 21
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044 struct dictionary {
0045
0046 uint8_t *buf;
0047
0048
0049 size_t start;
0050
0051
0052 size_t pos;
0053
0054
0055
0056
0057
0058 size_t full;
0059
0060
0061 size_t limit;
0062
0063
0064
0065
0066
0067
0068 size_t end;
0069
0070
0071
0072
0073
0074
0075 uint32_t size;
0076
0077
0078
0079
0080
0081 uint32_t size_max;
0082
0083
0084
0085
0086
0087
0088 uint32_t allocated;
0089
0090
0091 enum xz_mode mode;
0092 };
0093
0094
0095 struct rc_dec {
0096 uint32_t range;
0097 uint32_t code;
0098
0099
0100
0101
0102
0103 uint32_t init_bytes_left;
0104
0105
0106
0107
0108
0109 const uint8_t *in;
0110 size_t in_pos;
0111 size_t in_limit;
0112 };
0113
0114
0115 struct lzma_len_dec {
0116
0117 uint16_t choice;
0118
0119
0120 uint16_t choice2;
0121
0122
0123 uint16_t low[POS_STATES_MAX][LEN_LOW_SYMBOLS];
0124
0125
0126 uint16_t mid[POS_STATES_MAX][LEN_MID_SYMBOLS];
0127
0128
0129 uint16_t high[LEN_HIGH_SYMBOLS];
0130 };
0131
0132 struct lzma_dec {
0133
0134 uint32_t rep0;
0135 uint32_t rep1;
0136 uint32_t rep2;
0137 uint32_t rep3;
0138
0139
0140 enum lzma_state state;
0141
0142
0143
0144
0145
0146 uint32_t len;
0147
0148
0149
0150
0151
0152
0153
0154 uint32_t lc;
0155 uint32_t literal_pos_mask;
0156 uint32_t pos_mask;
0157
0158
0159 uint16_t is_match[STATES][POS_STATES_MAX];
0160
0161
0162 uint16_t is_rep[STATES];
0163
0164
0165
0166
0167
0168 uint16_t is_rep0[STATES];
0169
0170
0171
0172
0173
0174 uint16_t is_rep1[STATES];
0175
0176
0177 uint16_t is_rep2[STATES];
0178
0179
0180
0181
0182
0183 uint16_t is_rep0_long[STATES][POS_STATES_MAX];
0184
0185
0186
0187
0188
0189
0190 uint16_t dist_slot[DIST_STATES][DIST_SLOTS];
0191
0192
0193
0194
0195
0196 uint16_t dist_special[FULL_DISTANCES - DIST_MODEL_END];
0197
0198
0199
0200
0201
0202 uint16_t dist_align[ALIGN_SIZE];
0203
0204
0205 struct lzma_len_dec match_len_dec;
0206
0207
0208 struct lzma_len_dec rep_len_dec;
0209
0210
0211 uint16_t literal[LITERAL_CODERS_MAX][LITERAL_CODER_SIZE];
0212 };
0213
0214 struct lzma2_dec {
0215
0216 enum lzma2_seq {
0217 SEQ_CONTROL,
0218 SEQ_UNCOMPRESSED_1,
0219 SEQ_UNCOMPRESSED_2,
0220 SEQ_COMPRESSED_0,
0221 SEQ_COMPRESSED_1,
0222 SEQ_PROPERTIES,
0223 SEQ_LZMA_PREPARE,
0224 SEQ_LZMA_RUN,
0225 SEQ_COPY
0226 } sequence;
0227
0228
0229 enum lzma2_seq next_sequence;
0230
0231
0232 uint32_t uncompressed;
0233
0234
0235
0236
0237
0238 uint32_t compressed;
0239
0240
0241
0242
0243
0244 bool need_dict_reset;
0245
0246
0247
0248
0249
0250 bool need_props;
0251 };
0252
0253 struct xz_dec_lzma2 {
0254
0255
0256
0257
0258
0259
0260
0261
0262
0263 struct rc_dec rc;
0264 struct dictionary dict;
0265 struct lzma2_dec lzma2;
0266 struct lzma_dec lzma;
0267
0268
0269
0270
0271
0272 struct {
0273 uint32_t size;
0274 uint8_t buf[3 * LZMA_IN_REQUIRED];
0275 } temp;
0276 };
0277
0278
0279
0280
0281
0282
0283
0284
0285
0286 static void dict_reset(struct dictionary *dict, struct xz_buf *b)
0287 {
0288 if (DEC_IS_SINGLE(dict->mode)) {
0289 dict->buf = b->out + b->out_pos;
0290 dict->end = b->out_size - b->out_pos;
0291 }
0292
0293 dict->start = 0;
0294 dict->pos = 0;
0295 dict->limit = 0;
0296 dict->full = 0;
0297 }
0298
0299
0300 static void dict_limit(struct dictionary *dict, size_t out_max)
0301 {
0302 if (dict->end - dict->pos <= out_max)
0303 dict->limit = dict->end;
0304 else
0305 dict->limit = dict->pos + out_max;
0306 }
0307
0308
0309 static inline bool dict_has_space(const struct dictionary *dict)
0310 {
0311 return dict->pos < dict->limit;
0312 }
0313
0314
0315
0316
0317
0318
0319
0320 static inline uint32_t dict_get(const struct dictionary *dict, uint32_t dist)
0321 {
0322 size_t offset = dict->pos - dist - 1;
0323
0324 if (dist >= dict->pos)
0325 offset += dict->end;
0326
0327 return dict->full > 0 ? dict->buf[offset] : 0;
0328 }
0329
0330
0331
0332
0333 static inline void dict_put(struct dictionary *dict, uint8_t byte)
0334 {
0335 dict->buf[dict->pos++] = byte;
0336
0337 if (dict->full < dict->pos)
0338 dict->full = dict->pos;
0339 }
0340
0341
0342
0343
0344
0345
0346 static bool dict_repeat(struct dictionary *dict, uint32_t *len, uint32_t dist)
0347 {
0348 size_t back;
0349 uint32_t left;
0350
0351 if (dist >= dict->full || dist >= dict->size)
0352 return false;
0353
0354 left = min_t(size_t, dict->limit - dict->pos, *len);
0355 *len -= left;
0356
0357 back = dict->pos - dist - 1;
0358 if (dist >= dict->pos)
0359 back += dict->end;
0360
0361 do {
0362 dict->buf[dict->pos++] = dict->buf[back++];
0363 if (back == dict->end)
0364 back = 0;
0365 } while (--left > 0);
0366
0367 if (dict->full < dict->pos)
0368 dict->full = dict->pos;
0369
0370 return true;
0371 }
0372
0373
0374 static void dict_uncompressed(struct dictionary *dict, struct xz_buf *b,
0375 uint32_t *left)
0376 {
0377 size_t copy_size;
0378
0379 while (*left > 0 && b->in_pos < b->in_size
0380 && b->out_pos < b->out_size) {
0381 copy_size = min(b->in_size - b->in_pos,
0382 b->out_size - b->out_pos);
0383 if (copy_size > dict->end - dict->pos)
0384 copy_size = dict->end - dict->pos;
0385 if (copy_size > *left)
0386 copy_size = *left;
0387
0388 *left -= copy_size;
0389
0390 memcpy(dict->buf + dict->pos, b->in + b->in_pos, copy_size);
0391 dict->pos += copy_size;
0392
0393 if (dict->full < dict->pos)
0394 dict->full = dict->pos;
0395
0396 if (DEC_IS_MULTI(dict->mode)) {
0397 if (dict->pos == dict->end)
0398 dict->pos = 0;
0399
0400 memcpy(b->out + b->out_pos, b->in + b->in_pos,
0401 copy_size);
0402 }
0403
0404 dict->start = dict->pos;
0405
0406 b->out_pos += copy_size;
0407 b->in_pos += copy_size;
0408 }
0409 }
0410
0411
0412
0413
0414
0415
0416 static uint32_t dict_flush(struct dictionary *dict, struct xz_buf *b)
0417 {
0418 size_t copy_size = dict->pos - dict->start;
0419
0420 if (DEC_IS_MULTI(dict->mode)) {
0421 if (dict->pos == dict->end)
0422 dict->pos = 0;
0423
0424 memcpy(b->out + b->out_pos, dict->buf + dict->start,
0425 copy_size);
0426 }
0427
0428 dict->start = dict->pos;
0429 b->out_pos += copy_size;
0430 return copy_size;
0431 }
0432
0433
0434
0435
0436
0437
0438 static void rc_reset(struct rc_dec *rc)
0439 {
0440 rc->range = (uint32_t)-1;
0441 rc->code = 0;
0442 rc->init_bytes_left = RC_INIT_BYTES;
0443 }
0444
0445
0446
0447
0448
0449 static bool rc_read_init(struct rc_dec *rc, struct xz_buf *b)
0450 {
0451 while (rc->init_bytes_left > 0) {
0452 if (b->in_pos == b->in_size)
0453 return false;
0454
0455 rc->code = (rc->code << 8) + b->in[b->in_pos++];
0456 --rc->init_bytes_left;
0457 }
0458
0459 return true;
0460 }
0461
0462
0463 static inline bool rc_limit_exceeded(const struct rc_dec *rc)
0464 {
0465 return rc->in_pos > rc->in_limit;
0466 }
0467
0468
0469
0470
0471
0472 static inline bool rc_is_finished(const struct rc_dec *rc)
0473 {
0474 return rc->code == 0;
0475 }
0476
0477 #ifdef __rtems__
0478 #pragma GCC diagnostic push
0479 #pragma GCC diagnostic ignored "-Wattributes"
0480 #endif
0481
0482 static __always_inline void rc_normalize(struct rc_dec *rc)
0483 {
0484 if (rc->range < RC_TOP_VALUE) {
0485 rc->range <<= RC_SHIFT_BITS;
0486 rc->code = (rc->code << RC_SHIFT_BITS) + rc->in[rc->in_pos++];
0487 }
0488 }
0489
0490
0491
0492
0493
0494
0495
0496
0497
0498
0499
0500
0501 static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
0502 {
0503 uint32_t bound;
0504 int bit;
0505
0506 rc_normalize(rc);
0507 bound = (rc->range >> RC_BIT_MODEL_TOTAL_BITS) * *prob;
0508 if (rc->code < bound) {
0509 rc->range = bound;
0510 *prob += (RC_BIT_MODEL_TOTAL - *prob) >> RC_MOVE_BITS;
0511 bit = 0;
0512 } else {
0513 rc->range -= bound;
0514 rc->code -= bound;
0515 *prob -= *prob >> RC_MOVE_BITS;
0516 bit = 1;
0517 }
0518
0519 return bit;
0520 }
0521
0522
0523 static __always_inline uint32_t rc_bittree(struct rc_dec *rc,
0524 uint16_t *probs, uint32_t limit)
0525 {
0526 uint32_t symbol = 1;
0527
0528 do {
0529 if (rc_bit(rc, &probs[symbol]))
0530 symbol = (symbol << 1) + 1;
0531 else
0532 symbol <<= 1;
0533 } while (symbol < limit);
0534
0535 return symbol;
0536 }
0537
0538
0539 static __always_inline void rc_bittree_reverse(struct rc_dec *rc,
0540 uint16_t *probs,
0541 uint32_t *dest, uint32_t limit)
0542 {
0543 uint32_t symbol = 1;
0544 uint32_t i = 0;
0545
0546 do {
0547 if (rc_bit(rc, &probs[symbol])) {
0548 symbol = (symbol << 1) + 1;
0549 *dest += 1 << i;
0550 } else {
0551 symbol <<= 1;
0552 }
0553 } while (++i < limit);
0554 }
0555 #ifdef __rtems__
0556 #pragma GCC diagnostic pop
0557 #endif
0558
0559
0560 static inline void rc_direct(struct rc_dec *rc, uint32_t *dest, uint32_t limit)
0561 {
0562 uint32_t mask;
0563
0564 do {
0565 rc_normalize(rc);
0566 rc->range >>= 1;
0567 rc->code -= rc->range;
0568 mask = (uint32_t)0 - (rc->code >> 31);
0569 rc->code += rc->range & mask;
0570 *dest = (*dest << 1) + (mask + 1);
0571 } while (--limit > 0);
0572 }
0573
0574
0575
0576
0577
0578
0579 static uint16_t *lzma_literal_probs(struct xz_dec_lzma2 *s)
0580 {
0581 uint32_t prev_byte = dict_get(&s->dict, 0);
0582 uint32_t low = prev_byte >> (8 - s->lzma.lc);
0583 uint32_t high = (s->dict.pos & s->lzma.literal_pos_mask) << s->lzma.lc;
0584 return s->lzma.literal[low + high];
0585 }
0586
0587
0588 static void lzma_literal(struct xz_dec_lzma2 *s)
0589 {
0590 uint16_t *probs;
0591 uint32_t symbol;
0592 uint32_t match_byte;
0593 uint32_t match_bit;
0594 uint32_t offset;
0595 uint32_t i;
0596
0597 probs = lzma_literal_probs(s);
0598
0599 if (lzma_state_is_literal(s->lzma.state)) {
0600 symbol = rc_bittree(&s->rc, probs, 0x100);
0601 } else {
0602 symbol = 1;
0603 match_byte = dict_get(&s->dict, s->lzma.rep0) << 1;
0604 offset = 0x100;
0605
0606 do {
0607 match_bit = match_byte & offset;
0608 match_byte <<= 1;
0609 i = offset + match_bit + symbol;
0610
0611 if (rc_bit(&s->rc, &probs[i])) {
0612 symbol = (symbol << 1) + 1;
0613 offset &= match_bit;
0614 } else {
0615 symbol <<= 1;
0616 offset &= ~match_bit;
0617 }
0618 } while (symbol < 0x100);
0619 }
0620
0621 dict_put(&s->dict, (uint8_t)symbol);
0622 lzma_state_literal(&s->lzma.state);
0623 }
0624
0625
0626 static void lzma_len(struct xz_dec_lzma2 *s, struct lzma_len_dec *l,
0627 uint32_t pos_state)
0628 {
0629 uint16_t *probs;
0630 uint32_t limit;
0631
0632 if (!rc_bit(&s->rc, &l->choice)) {
0633 probs = l->low[pos_state];
0634 limit = LEN_LOW_SYMBOLS;
0635 s->lzma.len = MATCH_LEN_MIN;
0636 } else {
0637 if (!rc_bit(&s->rc, &l->choice2)) {
0638 probs = l->mid[pos_state];
0639 limit = LEN_MID_SYMBOLS;
0640 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS;
0641 } else {
0642 probs = l->high;
0643 limit = LEN_HIGH_SYMBOLS;
0644 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS
0645 + LEN_MID_SYMBOLS;
0646 }
0647 }
0648
0649 s->lzma.len += rc_bittree(&s->rc, probs, limit) - limit;
0650 }
0651
0652
0653 static void lzma_match(struct xz_dec_lzma2 *s, uint32_t pos_state)
0654 {
0655 uint16_t *probs;
0656 uint32_t dist_slot;
0657 uint32_t limit;
0658
0659 lzma_state_match(&s->lzma.state);
0660
0661 s->lzma.rep3 = s->lzma.rep2;
0662 s->lzma.rep2 = s->lzma.rep1;
0663 s->lzma.rep1 = s->lzma.rep0;
0664
0665 lzma_len(s, &s->lzma.match_len_dec, pos_state);
0666
0667 probs = s->lzma.dist_slot[lzma_get_dist_state(s->lzma.len)];
0668 dist_slot = rc_bittree(&s->rc, probs, DIST_SLOTS) - DIST_SLOTS;
0669
0670 if (dist_slot < DIST_MODEL_START) {
0671 s->lzma.rep0 = dist_slot;
0672 } else {
0673 limit = (dist_slot >> 1) - 1;
0674 s->lzma.rep0 = 2 + (dist_slot & 1);
0675
0676 if (dist_slot < DIST_MODEL_END) {
0677 s->lzma.rep0 <<= limit;
0678 probs = s->lzma.dist_special + s->lzma.rep0
0679 - dist_slot - 1;
0680 rc_bittree_reverse(&s->rc, probs,
0681 &s->lzma.rep0, limit);
0682 } else {
0683 rc_direct(&s->rc, &s->lzma.rep0, limit - ALIGN_BITS);
0684 s->lzma.rep0 <<= ALIGN_BITS;
0685 rc_bittree_reverse(&s->rc, s->lzma.dist_align,
0686 &s->lzma.rep0, ALIGN_BITS);
0687 }
0688 }
0689 }
0690
0691
0692
0693
0694
0695 static void lzma_rep_match(struct xz_dec_lzma2 *s, uint32_t pos_state)
0696 {
0697 uint32_t tmp;
0698
0699 if (!rc_bit(&s->rc, &s->lzma.is_rep0[s->lzma.state])) {
0700 if (!rc_bit(&s->rc, &s->lzma.is_rep0_long[
0701 s->lzma.state][pos_state])) {
0702 lzma_state_short_rep(&s->lzma.state);
0703 s->lzma.len = 1;
0704 return;
0705 }
0706 } else {
0707 if (!rc_bit(&s->rc, &s->lzma.is_rep1[s->lzma.state])) {
0708 tmp = s->lzma.rep1;
0709 } else {
0710 if (!rc_bit(&s->rc, &s->lzma.is_rep2[s->lzma.state])) {
0711 tmp = s->lzma.rep2;
0712 } else {
0713 tmp = s->lzma.rep3;
0714 s->lzma.rep3 = s->lzma.rep2;
0715 }
0716
0717 s->lzma.rep2 = s->lzma.rep1;
0718 }
0719
0720 s->lzma.rep1 = s->lzma.rep0;
0721 s->lzma.rep0 = tmp;
0722 }
0723
0724 lzma_state_long_rep(&s->lzma.state);
0725 lzma_len(s, &s->lzma.rep_len_dec, pos_state);
0726 }
0727
0728
0729 static bool lzma_main(struct xz_dec_lzma2 *s)
0730 {
0731 uint32_t pos_state;
0732
0733
0734
0735
0736
0737 if (dict_has_space(&s->dict) && s->lzma.len > 0)
0738 dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0);
0739
0740
0741
0742
0743
0744 while (dict_has_space(&s->dict) && !rc_limit_exceeded(&s->rc)) {
0745 pos_state = s->dict.pos & s->lzma.pos_mask;
0746
0747 if (!rc_bit(&s->rc, &s->lzma.is_match[
0748 s->lzma.state][pos_state])) {
0749 lzma_literal(s);
0750 } else {
0751 if (rc_bit(&s->rc, &s->lzma.is_rep[s->lzma.state]))
0752 lzma_rep_match(s, pos_state);
0753 else
0754 lzma_match(s, pos_state);
0755
0756 if (!dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0))
0757 return false;
0758 }
0759 }
0760
0761
0762
0763
0764
0765 rc_normalize(&s->rc);
0766
0767 return true;
0768 }
0769
0770
0771
0772
0773
0774 static void lzma_reset(struct xz_dec_lzma2 *s)
0775 {
0776 uint16_t *probs;
0777 size_t i;
0778
0779 s->lzma.state = STATE_LIT_LIT;
0780 s->lzma.rep0 = 0;
0781 s->lzma.rep1 = 0;
0782 s->lzma.rep2 = 0;
0783 s->lzma.rep3 = 0;
0784
0785
0786
0787
0788
0789
0790
0791
0792
0793
0794 probs = s->lzma.is_match[0];
0795 for (i = 0; i < PROBS_TOTAL; ++i)
0796 probs[i] = RC_BIT_MODEL_TOTAL / 2;
0797
0798 rc_reset(&s->rc);
0799 }
0800
0801
0802
0803
0804
0805
0806 static bool lzma_props(struct xz_dec_lzma2 *s, uint8_t props)
0807 {
0808 if (props > (4 * 5 + 4) * 9 + 8)
0809 return false;
0810
0811 s->lzma.pos_mask = 0;
0812 while (props >= 9 * 5) {
0813 props -= 9 * 5;
0814 ++s->lzma.pos_mask;
0815 }
0816
0817 s->lzma.pos_mask = (1 << s->lzma.pos_mask) - 1;
0818
0819 s->lzma.literal_pos_mask = 0;
0820 while (props >= 9) {
0821 props -= 9;
0822 ++s->lzma.literal_pos_mask;
0823 }
0824
0825 s->lzma.lc = props;
0826
0827 if (s->lzma.lc + s->lzma.literal_pos_mask > 4)
0828 return false;
0829
0830 s->lzma.literal_pos_mask = (1 << s->lzma.literal_pos_mask) - 1;
0831
0832 lzma_reset(s);
0833
0834 return true;
0835 }
0836
0837
0838
0839
0840
0841
0842
0843
0844
0845
0846
0847
0848
0849
0850
0851
0852
0853 static bool lzma2_lzma(struct xz_dec_lzma2 *s, struct xz_buf *b)
0854 {
0855 size_t in_avail;
0856 uint32_t tmp;
0857
0858 in_avail = b->in_size - b->in_pos;
0859 if (s->temp.size > 0 || s->lzma2.compressed == 0) {
0860 tmp = 2 * LZMA_IN_REQUIRED - s->temp.size;
0861 if (tmp > s->lzma2.compressed - s->temp.size)
0862 tmp = s->lzma2.compressed - s->temp.size;
0863 if (tmp > in_avail)
0864 tmp = in_avail;
0865
0866 memcpy(s->temp.buf + s->temp.size, b->in + b->in_pos, tmp);
0867
0868 if (s->temp.size + tmp == s->lzma2.compressed) {
0869 memzero(s->temp.buf + s->temp.size + tmp,
0870 sizeof(s->temp.buf)
0871 - s->temp.size - tmp);
0872 s->rc.in_limit = s->temp.size + tmp;
0873 } else if (s->temp.size + tmp < LZMA_IN_REQUIRED) {
0874 s->temp.size += tmp;
0875 b->in_pos += tmp;
0876 return true;
0877 } else {
0878 s->rc.in_limit = s->temp.size + tmp - LZMA_IN_REQUIRED;
0879 }
0880
0881 s->rc.in = s->temp.buf;
0882 s->rc.in_pos = 0;
0883
0884 if (!lzma_main(s) || s->rc.in_pos > s->temp.size + tmp)
0885 return false;
0886
0887 s->lzma2.compressed -= s->rc.in_pos;
0888
0889 if (s->rc.in_pos < s->temp.size) {
0890 s->temp.size -= s->rc.in_pos;
0891 memmove(s->temp.buf, s->temp.buf + s->rc.in_pos,
0892 s->temp.size);
0893 return true;
0894 }
0895
0896 b->in_pos += s->rc.in_pos - s->temp.size;
0897 s->temp.size = 0;
0898 }
0899
0900 in_avail = b->in_size - b->in_pos;
0901 if (in_avail >= LZMA_IN_REQUIRED) {
0902 s->rc.in = b->in;
0903 s->rc.in_pos = b->in_pos;
0904
0905 if (in_avail >= s->lzma2.compressed + LZMA_IN_REQUIRED)
0906 s->rc.in_limit = b->in_pos + s->lzma2.compressed;
0907 else
0908 s->rc.in_limit = b->in_size - LZMA_IN_REQUIRED;
0909
0910 if (!lzma_main(s))
0911 return false;
0912
0913 in_avail = s->rc.in_pos - b->in_pos;
0914 if (in_avail > s->lzma2.compressed)
0915 return false;
0916
0917 s->lzma2.compressed -= in_avail;
0918 b->in_pos = s->rc.in_pos;
0919 }
0920
0921 in_avail = b->in_size - b->in_pos;
0922 if (in_avail < LZMA_IN_REQUIRED) {
0923 if (in_avail > s->lzma2.compressed)
0924 in_avail = s->lzma2.compressed;
0925
0926 memcpy(s->temp.buf, b->in + b->in_pos, in_avail);
0927 s->temp.size = in_avail;
0928 b->in_pos += in_avail;
0929 }
0930
0931 return true;
0932 }
0933
0934
0935
0936
0937
0938 XZ_EXTERN enum xz_ret xz_dec_lzma2_run(struct xz_dec_lzma2 *s,
0939 struct xz_buf *b)
0940 {
0941 uint32_t tmp;
0942
0943 while (b->in_pos < b->in_size || s->lzma2.sequence == SEQ_LZMA_RUN) {
0944 switch (s->lzma2.sequence) {
0945 case SEQ_CONTROL:
0946
0947
0948
0949
0950
0951
0952
0953
0954
0955
0956
0957
0958
0959
0960
0961
0962
0963
0964
0965
0966
0967
0968
0969
0970
0971
0972
0973
0974
0975
0976
0977 tmp = b->in[b->in_pos++];
0978
0979 if (tmp == 0x00)
0980 return XZ_STREAM_END;
0981
0982 if (tmp >= 0xE0 || tmp == 0x01) {
0983 s->lzma2.need_props = true;
0984 s->lzma2.need_dict_reset = false;
0985 dict_reset(&s->dict, b);
0986 } else if (s->lzma2.need_dict_reset) {
0987 return XZ_DATA_ERROR;
0988 }
0989
0990 if (tmp >= 0x80) {
0991 s->lzma2.uncompressed = (tmp & 0x1F) << 16;
0992 s->lzma2.sequence = SEQ_UNCOMPRESSED_1;
0993
0994 if (tmp >= 0xC0) {
0995
0996
0997
0998
0999
1000 s->lzma2.need_props = false;
1001 s->lzma2.next_sequence
1002 = SEQ_PROPERTIES;
1003
1004 } else if (s->lzma2.need_props) {
1005 return XZ_DATA_ERROR;
1006
1007 } else {
1008 s->lzma2.next_sequence
1009 = SEQ_LZMA_PREPARE;
1010 if (tmp >= 0xA0)
1011 lzma_reset(s);
1012 }
1013 } else {
1014 if (tmp > 0x02)
1015 return XZ_DATA_ERROR;
1016
1017 s->lzma2.sequence = SEQ_COMPRESSED_0;
1018 s->lzma2.next_sequence = SEQ_COPY;
1019 }
1020
1021 break;
1022
1023 case SEQ_UNCOMPRESSED_1:
1024 s->lzma2.uncompressed
1025 += (uint32_t)b->in[b->in_pos++] << 8;
1026 s->lzma2.sequence = SEQ_UNCOMPRESSED_2;
1027 break;
1028
1029 case SEQ_UNCOMPRESSED_2:
1030 s->lzma2.uncompressed
1031 += (uint32_t)b->in[b->in_pos++] + 1;
1032 s->lzma2.sequence = SEQ_COMPRESSED_0;
1033 break;
1034
1035 case SEQ_COMPRESSED_0:
1036 s->lzma2.compressed
1037 = (uint32_t)b->in[b->in_pos++] << 8;
1038 s->lzma2.sequence = SEQ_COMPRESSED_1;
1039 break;
1040
1041 case SEQ_COMPRESSED_1:
1042 s->lzma2.compressed
1043 += (uint32_t)b->in[b->in_pos++] + 1;
1044 s->lzma2.sequence = s->lzma2.next_sequence;
1045 break;
1046
1047 case SEQ_PROPERTIES:
1048 if (!lzma_props(s, b->in[b->in_pos++]))
1049 return XZ_DATA_ERROR;
1050
1051 s->lzma2.sequence = SEQ_LZMA_PREPARE;
1052
1053
1054
1055 case SEQ_LZMA_PREPARE:
1056 if (s->lzma2.compressed < RC_INIT_BYTES)
1057 return XZ_DATA_ERROR;
1058
1059 if (!rc_read_init(&s->rc, b))
1060 return XZ_OK;
1061
1062 s->lzma2.compressed -= RC_INIT_BYTES;
1063 s->lzma2.sequence = SEQ_LZMA_RUN;
1064
1065
1066
1067 case SEQ_LZMA_RUN:
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077 dict_limit(&s->dict, min_t(size_t,
1078 b->out_size - b->out_pos,
1079 s->lzma2.uncompressed));
1080 if (!lzma2_lzma(s, b))
1081 return XZ_DATA_ERROR;
1082
1083 s->lzma2.uncompressed -= dict_flush(&s->dict, b);
1084
1085 if (s->lzma2.uncompressed == 0) {
1086 if (s->lzma2.compressed > 0 || s->lzma.len > 0
1087 || !rc_is_finished(&s->rc))
1088 return XZ_DATA_ERROR;
1089
1090 rc_reset(&s->rc);
1091 s->lzma2.sequence = SEQ_CONTROL;
1092
1093 } else if (b->out_pos == b->out_size
1094 || (b->in_pos == b->in_size
1095 && s->temp.size
1096 < s->lzma2.compressed)) {
1097 return XZ_OK;
1098 }
1099
1100 break;
1101
1102 case SEQ_COPY:
1103 dict_uncompressed(&s->dict, b, &s->lzma2.compressed);
1104 if (s->lzma2.compressed > 0)
1105 return XZ_OK;
1106
1107 s->lzma2.sequence = SEQ_CONTROL;
1108 break;
1109 }
1110 }
1111
1112 return XZ_OK;
1113 }
1114
1115 XZ_EXTERN struct xz_dec_lzma2 *xz_dec_lzma2_create(enum xz_mode mode,
1116 uint32_t dict_max)
1117 {
1118 struct xz_dec_lzma2 *s = kmalloc(sizeof(*s), GFP_KERNEL);
1119 if (s == NULL)
1120 return NULL;
1121
1122 s->dict.mode = mode;
1123 s->dict.size_max = dict_max;
1124
1125 if (DEC_IS_PREALLOC(mode)) {
1126 s->dict.buf = vmalloc(dict_max);
1127 if (s->dict.buf == NULL) {
1128 kfree(s);
1129 return NULL;
1130 }
1131 } else if (DEC_IS_DYNALLOC(mode)) {
1132 s->dict.buf = NULL;
1133 s->dict.allocated = 0;
1134 }
1135
1136 return s;
1137 }
1138
1139 XZ_EXTERN enum xz_ret xz_dec_lzma2_reset(struct xz_dec_lzma2 *s, uint8_t props)
1140 {
1141
1142 if (props > 39)
1143 return XZ_OPTIONS_ERROR;
1144
1145 s->dict.size = 2 + (props & 1);
1146 s->dict.size <<= (props >> 1) + 11;
1147
1148 if (DEC_IS_MULTI(s->dict.mode)) {
1149 if (s->dict.size > s->dict.size_max)
1150 return XZ_MEMLIMIT_ERROR;
1151
1152 s->dict.end = s->dict.size;
1153
1154 if (DEC_IS_DYNALLOC(s->dict.mode)) {
1155 if (s->dict.allocated < s->dict.size) {
1156 vfree(s->dict.buf);
1157 s->dict.buf = vmalloc(s->dict.size);
1158 if (s->dict.buf == NULL) {
1159 s->dict.allocated = 0;
1160 return XZ_MEM_ERROR;
1161 }
1162 }
1163 }
1164 }
1165
1166 s->lzma.len = 0;
1167
1168 s->lzma2.sequence = SEQ_CONTROL;
1169 s->lzma2.need_dict_reset = true;
1170
1171 s->temp.size = 0;
1172
1173 return XZ_OK;
1174 }
1175
1176 XZ_EXTERN void xz_dec_lzma2_end(struct xz_dec_lzma2 *s)
1177 {
1178 if (DEC_IS_MULTI(s->dict.mode))
1179 vfree(s->dict.buf);
1180
1181 kfree(s);
1182 }