Mercurial > repos > blastem
comparison zlib/trees.c @ 2690:9ef72ee5c0b0
Update vendored zlib to 1.3.1
author | Michael Pavone <pavone@retrodev.com> |
---|---|
date | Sun, 15 Jun 2025 15:39:33 -0700 |
parents | 00d788dac91a |
children |
comparison
equal
deleted
inserted
replaced
2689:bd6e33de0972 | 2690:9ef72ee5c0b0 |
---|---|
1 /* trees.c -- output deflated data using Huffman coding | 1 /* trees.c -- output deflated data using Huffman coding |
2 * Copyright (C) 1995-2017 Jean-loup Gailly | 2 * Copyright (C) 1995-2024 Jean-loup Gailly |
3 * detect_data_type() function provided freely by Cosmin Truta, 2006 | 3 * detect_data_type() function provided freely by Cosmin Truta, 2006 |
4 * For conditions of distribution and use, see copyright notice in zlib.h | 4 * For conditions of distribution and use, see copyright notice in zlib.h |
5 */ | 5 */ |
6 | 6 |
7 /* | 7 /* |
120 int extra_base; /* base index for extra_bits */ | 120 int extra_base; /* base index for extra_bits */ |
121 int elems; /* max number of elements in the tree */ | 121 int elems; /* max number of elements in the tree */ |
122 int max_length; /* max bit length for the codes */ | 122 int max_length; /* max bit length for the codes */ |
123 }; | 123 }; |
124 | 124 |
125 local const static_tree_desc static_l_desc = | 125 #ifdef NO_INIT_GLOBAL_POINTERS |
126 # define TCONST | |
127 #else | |
128 # define TCONST const | |
129 #endif | |
130 | |
131 local TCONST static_tree_desc static_l_desc = | |
126 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; | 132 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; |
127 | 133 |
128 local const static_tree_desc static_d_desc = | 134 local TCONST static_tree_desc static_d_desc = |
129 {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; | 135 {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; |
130 | 136 |
131 local const static_tree_desc static_bl_desc = | 137 local TCONST static_tree_desc static_bl_desc = |
132 {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; | 138 {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; |
133 | 139 |
134 /* =========================================================================== | 140 /* =========================================================================== |
135 * Local (static) routines in this file. | 141 * Output a short LSB first on the stream. |
136 */ | 142 * IN assertion: there is enough room in pendingBuf. |
137 | 143 */ |
138 local void tr_static_init OF((void)); | 144 #define put_short(s, w) { \ |
139 local void init_block OF((deflate_state *s)); | 145 put_byte(s, (uch)((w) & 0xff)); \ |
140 local void pqdownheap OF((deflate_state *s, ct_data *tree, int k)); | 146 put_byte(s, (uch)((ush)(w) >> 8)); \ |
141 local void gen_bitlen OF((deflate_state *s, tree_desc *desc)); | 147 } |
142 local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count)); | 148 |
143 local void build_tree OF((deflate_state *s, tree_desc *desc)); | 149 /* =========================================================================== |
144 local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code)); | 150 * Reverse the first len bits of a code, using straightforward code (a faster |
145 local void send_tree OF((deflate_state *s, ct_data *tree, int max_code)); | 151 * method would use a table) |
146 local int build_bl_tree OF((deflate_state *s)); | 152 * IN assertion: 1 <= len <= 15 |
147 local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes, | 153 */ |
148 int blcodes)); | 154 local unsigned bi_reverse(unsigned code, int len) { |
149 local void compress_block OF((deflate_state *s, const ct_data *ltree, | 155 register unsigned res = 0; |
150 const ct_data *dtree)); | 156 do { |
151 local int detect_data_type OF((deflate_state *s)); | 157 res |= code & 1; |
152 local unsigned bi_reverse OF((unsigned value, int length)); | 158 code >>= 1, res <<= 1; |
153 local void bi_windup OF((deflate_state *s)); | 159 } while (--len > 0); |
154 local void bi_flush OF((deflate_state *s)); | 160 return res >> 1; |
161 } | |
162 | |
163 /* =========================================================================== | |
164 * Flush the bit buffer, keeping at most 7 bits in it. | |
165 */ | |
166 local void bi_flush(deflate_state *s) { | |
167 if (s->bi_valid == 16) { | |
168 put_short(s, s->bi_buf); | |
169 s->bi_buf = 0; | |
170 s->bi_valid = 0; | |
171 } else if (s->bi_valid >= 8) { | |
172 put_byte(s, (Byte)s->bi_buf); | |
173 s->bi_buf >>= 8; | |
174 s->bi_valid -= 8; | |
175 } | |
176 } | |
177 | |
178 /* =========================================================================== | |
179 * Flush the bit buffer and align the output on a byte boundary | |
180 */ | |
181 local void bi_windup(deflate_state *s) { | |
182 if (s->bi_valid > 8) { | |
183 put_short(s, s->bi_buf); | |
184 } else if (s->bi_valid > 0) { | |
185 put_byte(s, (Byte)s->bi_buf); | |
186 } | |
187 s->bi_buf = 0; | |
188 s->bi_valid = 0; | |
189 #ifdef ZLIB_DEBUG | |
190 s->bits_sent = (s->bits_sent + 7) & ~7; | |
191 #endif | |
192 } | |
193 | |
194 /* =========================================================================== | |
195 * Generate the codes for a given tree and bit counts (which need not be | |
196 * optimal). | |
197 * IN assertion: the array bl_count contains the bit length statistics for | |
198 * the given tree and the field len is set for all tree elements. | |
199 * OUT assertion: the field code is set for all tree elements of non | |
200 * zero code length. | |
201 */ | |
202 local void gen_codes(ct_data *tree, int max_code, ushf *bl_count) { | |
203 ush next_code[MAX_BITS+1]; /* next code value for each bit length */ | |
204 unsigned code = 0; /* running code value */ | |
205 int bits; /* bit index */ | |
206 int n; /* code index */ | |
207 | |
208 /* The distribution counts are first used to generate the code values | |
209 * without bit reversal. | |
210 */ | |
211 for (bits = 1; bits <= MAX_BITS; bits++) { | |
212 code = (code + bl_count[bits - 1]) << 1; | |
213 next_code[bits] = (ush)code; | |
214 } | |
215 /* Check that the bit counts in bl_count are consistent. The last code | |
216 * must be all ones. | |
217 */ | |
218 Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1, | |
219 "inconsistent bit counts"); | |
220 Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); | |
221 | |
222 for (n = 0; n <= max_code; n++) { | |
223 int len = tree[n].Len; | |
224 if (len == 0) continue; | |
225 /* Now reverse the bits */ | |
226 tree[n].Code = (ush)bi_reverse(next_code[len]++, len); | |
227 | |
228 Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", | |
229 n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1)); | |
230 } | |
231 } | |
155 | 232 |
156 #ifdef GEN_TREES_H | 233 #ifdef GEN_TREES_H |
157 local void gen_trees_header OF((void)); | 234 local void gen_trees_header(void); |
158 #endif | 235 #endif |
159 | 236 |
160 #ifndef ZLIB_DEBUG | 237 #ifndef ZLIB_DEBUG |
161 # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) | 238 # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) |
162 /* Send a code of the given tree. c and tree must not have side effects */ | 239 /* Send a code of the given tree. c and tree must not have side effects */ |
166 { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ | 243 { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ |
167 send_bits(s, tree[c].Code, tree[c].Len); } | 244 send_bits(s, tree[c].Code, tree[c].Len); } |
168 #endif | 245 #endif |
169 | 246 |
170 /* =========================================================================== | 247 /* =========================================================================== |
171 * Output a short LSB first on the stream. | |
172 * IN assertion: there is enough room in pendingBuf. | |
173 */ | |
174 #define put_short(s, w) { \ | |
175 put_byte(s, (uch)((w) & 0xff)); \ | |
176 put_byte(s, (uch)((ush)(w) >> 8)); \ | |
177 } | |
178 | |
179 /* =========================================================================== | |
180 * Send a value on a given number of bits. | 248 * Send a value on a given number of bits. |
181 * IN assertion: length <= 16 and value fits in length bits. | 249 * IN assertion: length <= 16 and value fits in length bits. |
182 */ | 250 */ |
183 #ifdef ZLIB_DEBUG | 251 #ifdef ZLIB_DEBUG |
184 local void send_bits OF((deflate_state *s, int value, int length)); | 252 local void send_bits(deflate_state *s, int value, int length) { |
185 | |
186 local void send_bits(s, value, length) | |
187 deflate_state *s; | |
188 int value; /* value to send */ | |
189 int length; /* number of bits */ | |
190 { | |
191 Tracevv((stderr," l %2d v %4x ", length, value)); | 253 Tracevv((stderr," l %2d v %4x ", length, value)); |
192 Assert(length > 0 && length <= 15, "invalid length"); | 254 Assert(length > 0 && length <= 15, "invalid length"); |
193 s->bits_sent += (ulg)length; | 255 s->bits_sent += (ulg)length; |
194 | 256 |
195 /* If not enough room in bi_buf, use (valid) bits from bi_buf and | 257 /* If not enough room in bi_buf, use (valid) bits from bi_buf and |
196 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) | 258 * (16 - bi_valid) bits from value, leaving (width - (16 - bi_valid)) |
197 * unused bits in value. | 259 * unused bits in value. |
198 */ | 260 */ |
199 if (s->bi_valid > (int)Buf_size - length) { | 261 if (s->bi_valid > (int)Buf_size - length) { |
200 s->bi_buf |= (ush)value << s->bi_valid; | 262 s->bi_buf |= (ush)value << s->bi_valid; |
201 put_short(s, s->bi_buf); | 263 put_short(s, s->bi_buf); |
227 /* the arguments must not have side effects */ | 289 /* the arguments must not have side effects */ |
228 | 290 |
229 /* =========================================================================== | 291 /* =========================================================================== |
230 * Initialize the various 'constant' tables. | 292 * Initialize the various 'constant' tables. |
231 */ | 293 */ |
232 local void tr_static_init() | 294 local void tr_static_init(void) { |
233 { | |
234 #if defined(GEN_TREES_H) || !defined(STDC) | 295 #if defined(GEN_TREES_H) || !defined(STDC) |
235 static int static_init_done = 0; | 296 static int static_init_done = 0; |
236 int n; /* iterates over tree elements */ | 297 int n; /* iterates over tree elements */ |
237 int bits; /* bit counter */ | 298 int bits; /* bit counter */ |
238 int length; /* length value */ | 299 int length; /* length value */ |
254 | 315 |
255 /* Initialize the mapping length (0..255) -> length code (0..28) */ | 316 /* Initialize the mapping length (0..255) -> length code (0..28) */ |
256 length = 0; | 317 length = 0; |
257 for (code = 0; code < LENGTH_CODES-1; code++) { | 318 for (code = 0; code < LENGTH_CODES-1; code++) { |
258 base_length[code] = length; | 319 base_length[code] = length; |
259 for (n = 0; n < (1<<extra_lbits[code]); n++) { | 320 for (n = 0; n < (1 << extra_lbits[code]); n++) { |
260 _length_code[length++] = (uch)code; | 321 _length_code[length++] = (uch)code; |
261 } | 322 } |
262 } | 323 } |
263 Assert (length == 256, "tr_static_init: length != 256"); | 324 Assert (length == 256, "tr_static_init: length != 256"); |
264 /* Note that the length 255 (match length 258) can be represented | 325 /* Note that the length 255 (match length 258) can be represented |
265 * in two different ways: code 284 + 5 bits or code 285, so we | 326 * in two different ways: code 284 + 5 bits or code 285, so we |
266 * overwrite length_code[255] to use the best encoding: | 327 * overwrite length_code[255] to use the best encoding: |
267 */ | 328 */ |
268 _length_code[length-1] = (uch)code; | 329 _length_code[length - 1] = (uch)code; |
269 | 330 |
270 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ | 331 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ |
271 dist = 0; | 332 dist = 0; |
272 for (code = 0 ; code < 16; code++) { | 333 for (code = 0 ; code < 16; code++) { |
273 base_dist[code] = dist; | 334 base_dist[code] = dist; |
274 for (n = 0; n < (1<<extra_dbits[code]); n++) { | 335 for (n = 0; n < (1 << extra_dbits[code]); n++) { |
275 _dist_code[dist++] = (uch)code; | 336 _dist_code[dist++] = (uch)code; |
276 } | 337 } |
277 } | 338 } |
278 Assert (dist == 256, "tr_static_init: dist != 256"); | 339 Assert (dist == 256, "tr_static_init: dist != 256"); |
279 dist >>= 7; /* from now on, all distances are divided by 128 */ | 340 dist >>= 7; /* from now on, all distances are divided by 128 */ |
280 for ( ; code < D_CODES; code++) { | 341 for ( ; code < D_CODES; code++) { |
281 base_dist[code] = dist << 7; | 342 base_dist[code] = dist << 7; |
282 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { | 343 for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { |
283 _dist_code[256 + dist++] = (uch)code; | 344 _dist_code[256 + dist++] = (uch)code; |
284 } | 345 } |
285 } | 346 } |
286 Assert (dist == 256, "tr_static_init: 256+dist != 512"); | 347 Assert (dist == 256, "tr_static_init: 256 + dist != 512"); |
287 | 348 |
288 /* Construct the codes of the static literal tree */ | 349 /* Construct the codes of the static literal tree */ |
289 for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; | 350 for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; |
290 n = 0; | 351 n = 0; |
291 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; | 352 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; |
310 # endif | 371 # endif |
311 #endif /* defined(GEN_TREES_H) || !defined(STDC) */ | 372 #endif /* defined(GEN_TREES_H) || !defined(STDC) */ |
312 } | 373 } |
313 | 374 |
314 /* =========================================================================== | 375 /* =========================================================================== |
315 * Genererate the file trees.h describing the static trees. | 376 * Generate the file trees.h describing the static trees. |
316 */ | 377 */ |
317 #ifdef GEN_TREES_H | 378 #ifdef GEN_TREES_H |
318 # ifndef ZLIB_DEBUG | 379 # ifndef ZLIB_DEBUG |
319 # include <stdio.h> | 380 # include <stdio.h> |
320 # endif | 381 # endif |
321 | 382 |
322 # define SEPARATOR(i, last, width) \ | 383 # define SEPARATOR(i, last, width) \ |
323 ((i) == (last)? "\n};\n\n" : \ | 384 ((i) == (last)? "\n};\n\n" : \ |
324 ((i) % (width) == (width)-1 ? ",\n" : ", ")) | 385 ((i) % (width) == (width) - 1 ? ",\n" : ", ")) |
325 | 386 |
326 void gen_trees_header() | 387 void gen_trees_header(void) { |
327 { | |
328 FILE *header = fopen("trees.h", "w"); | 388 FILE *header = fopen("trees.h", "w"); |
329 int i; | 389 int i; |
330 | 390 |
331 Assert (header != NULL, "Can't open trees.h"); | 391 Assert (header != NULL, "Can't open trees.h"); |
332 fprintf(header, | 392 fprintf(header, |
372 fclose(header); | 432 fclose(header); |
373 } | 433 } |
374 #endif /* GEN_TREES_H */ | 434 #endif /* GEN_TREES_H */ |
375 | 435 |
376 /* =========================================================================== | 436 /* =========================================================================== |
437 * Initialize a new block. | |
438 */ | |
439 local void init_block(deflate_state *s) { | |
440 int n; /* iterates over tree elements */ | |
441 | |
442 /* Initialize the trees. */ | |
443 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; | |
444 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; | |
445 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; | |
446 | |
447 s->dyn_ltree[END_BLOCK].Freq = 1; | |
448 s->opt_len = s->static_len = 0L; | |
449 s->sym_next = s->matches = 0; | |
450 } | |
451 | |
452 /* =========================================================================== | |
377 * Initialize the tree data structures for a new zlib stream. | 453 * Initialize the tree data structures for a new zlib stream. |
378 */ | 454 */ |
379 void ZLIB_INTERNAL _tr_init(s) | 455 void ZLIB_INTERNAL _tr_init(deflate_state *s) { |
380 deflate_state *s; | |
381 { | |
382 tr_static_init(); | 456 tr_static_init(); |
383 | 457 |
384 s->l_desc.dyn_tree = s->dyn_ltree; | 458 s->l_desc.dyn_tree = s->dyn_ltree; |
385 s->l_desc.stat_desc = &static_l_desc; | 459 s->l_desc.stat_desc = &static_l_desc; |
386 | 460 |
399 | 473 |
400 /* Initialize the first block of the first file: */ | 474 /* Initialize the first block of the first file: */ |
401 init_block(s); | 475 init_block(s); |
402 } | 476 } |
403 | 477 |
404 /* =========================================================================== | |
405 * Initialize a new block. | |
406 */ | |
407 local void init_block(s) | |
408 deflate_state *s; | |
409 { | |
410 int n; /* iterates over tree elements */ | |
411 | |
412 /* Initialize the trees. */ | |
413 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; | |
414 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; | |
415 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; | |
416 | |
417 s->dyn_ltree[END_BLOCK].Freq = 1; | |
418 s->opt_len = s->static_len = 0L; | |
419 s->last_lit = s->matches = 0; | |
420 } | |
421 | |
422 #define SMALLEST 1 | 478 #define SMALLEST 1 |
423 /* Index within the heap array of least frequent node in the Huffman tree */ | 479 /* Index within the heap array of least frequent node in the Huffman tree */ |
424 | 480 |
425 | 481 |
426 /* =========================================================================== | 482 /* =========================================================================== |
446 * Restore the heap property by moving down the tree starting at node k, | 502 * Restore the heap property by moving down the tree starting at node k, |
447 * exchanging a node with the smallest of its two sons if necessary, stopping | 503 * exchanging a node with the smallest of its two sons if necessary, stopping |
448 * when the heap property is re-established (each father smaller than its | 504 * when the heap property is re-established (each father smaller than its |
449 * two sons). | 505 * two sons). |
450 */ | 506 */ |
451 local void pqdownheap(s, tree, k) | 507 local void pqdownheap(deflate_state *s, ct_data *tree, int k) { |
452 deflate_state *s; | |
453 ct_data *tree; /* the tree to restore */ | |
454 int k; /* node to move down */ | |
455 { | |
456 int v = s->heap[k]; | 508 int v = s->heap[k]; |
457 int j = k << 1; /* left son of k */ | 509 int j = k << 1; /* left son of k */ |
458 while (j <= s->heap_len) { | 510 while (j <= s->heap_len) { |
459 /* Set j to the smallest of the two sons: */ | 511 /* Set j to the smallest of the two sons: */ |
460 if (j < s->heap_len && | 512 if (j < s->heap_len && |
461 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { | 513 smaller(tree, s->heap[j + 1], s->heap[j], s->depth)) { |
462 j++; | 514 j++; |
463 } | 515 } |
464 /* Exit if v is smaller than both sons */ | 516 /* Exit if v is smaller than both sons */ |
465 if (smaller(tree, v, s->heap[j], s->depth)) break; | 517 if (smaller(tree, v, s->heap[j], s->depth)) break; |
466 | 518 |
481 * OUT assertions: the field len is set to the optimal bit length, the | 533 * OUT assertions: the field len is set to the optimal bit length, the |
482 * array bl_count contains the frequencies for each bit length. | 534 * array bl_count contains the frequencies for each bit length. |
483 * The length opt_len is updated; static_len is also updated if stree is | 535 * The length opt_len is updated; static_len is also updated if stree is |
484 * not null. | 536 * not null. |
485 */ | 537 */ |
486 local void gen_bitlen(s, desc) | 538 local void gen_bitlen(deflate_state *s, tree_desc *desc) { |
487 deflate_state *s; | |
488 tree_desc *desc; /* the tree descriptor */ | |
489 { | |
490 ct_data *tree = desc->dyn_tree; | 539 ct_data *tree = desc->dyn_tree; |
491 int max_code = desc->max_code; | 540 int max_code = desc->max_code; |
492 const ct_data *stree = desc->stat_desc->static_tree; | 541 const ct_data *stree = desc->stat_desc->static_tree; |
493 const intf *extra = desc->stat_desc->extra_bits; | 542 const intf *extra = desc->stat_desc->extra_bits; |
494 int base = desc->stat_desc->extra_base; | 543 int base = desc->stat_desc->extra_base; |
505 /* In a first pass, compute the optimal bit lengths (which may | 554 /* In a first pass, compute the optimal bit lengths (which may |
506 * overflow in the case of the bit length tree). | 555 * overflow in the case of the bit length tree). |
507 */ | 556 */ |
508 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ | 557 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ |
509 | 558 |
510 for (h = s->heap_max+1; h < HEAP_SIZE; h++) { | 559 for (h = s->heap_max + 1; h < HEAP_SIZE; h++) { |
511 n = s->heap[h]; | 560 n = s->heap[h]; |
512 bits = tree[tree[n].Dad].Len + 1; | 561 bits = tree[tree[n].Dad].Len + 1; |
513 if (bits > max_length) bits = max_length, overflow++; | 562 if (bits > max_length) bits = max_length, overflow++; |
514 tree[n].Len = (ush)bits; | 563 tree[n].Len = (ush)bits; |
515 /* We overwrite tree[n].Dad which is no longer needed */ | 564 /* We overwrite tree[n].Dad which is no longer needed */ |
516 | 565 |
517 if (n > max_code) continue; /* not a leaf node */ | 566 if (n > max_code) continue; /* not a leaf node */ |
518 | 567 |
519 s->bl_count[bits]++; | 568 s->bl_count[bits]++; |
520 xbits = 0; | 569 xbits = 0; |
521 if (n >= base) xbits = extra[n-base]; | 570 if (n >= base) xbits = extra[n - base]; |
522 f = tree[n].Freq; | 571 f = tree[n].Freq; |
523 s->opt_len += (ulg)f * (unsigned)(bits + xbits); | 572 s->opt_len += (ulg)f * (unsigned)(bits + xbits); |
524 if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits); | 573 if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits); |
525 } | 574 } |
526 if (overflow == 0) return; | 575 if (overflow == 0) return; |
528 Tracev((stderr,"\nbit length overflow\n")); | 577 Tracev((stderr,"\nbit length overflow\n")); |
529 /* This happens for example on obj2 and pic of the Calgary corpus */ | 578 /* This happens for example on obj2 and pic of the Calgary corpus */ |
530 | 579 |
531 /* Find the first bit length which could increase: */ | 580 /* Find the first bit length which could increase: */ |
532 do { | 581 do { |
533 bits = max_length-1; | 582 bits = max_length - 1; |
534 while (s->bl_count[bits] == 0) bits--; | 583 while (s->bl_count[bits] == 0) bits--; |
535 s->bl_count[bits]--; /* move one leaf down the tree */ | 584 s->bl_count[bits]--; /* move one leaf down the tree */ |
536 s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ | 585 s->bl_count[bits + 1] += 2; /* move one overflow item as its brother */ |
537 s->bl_count[max_length]--; | 586 s->bl_count[max_length]--; |
538 /* The brother of the overflow item also moves one step up, | 587 /* The brother of the overflow item also moves one step up, |
539 * but this does not affect bl_count[max_length] | 588 * but this does not affect bl_count[max_length] |
540 */ | 589 */ |
541 overflow -= 2; | 590 overflow -= 2; |
559 n--; | 608 n--; |
560 } | 609 } |
561 } | 610 } |
562 } | 611 } |
563 | 612 |
564 /* =========================================================================== | 613 #ifdef DUMP_BL_TREE |
565 * Generate the codes for a given tree and bit counts (which need not be | 614 # include <stdio.h> |
566 * optimal). | 615 #endif |
567 * IN assertion: the array bl_count contains the bit length statistics for | |
568 * the given tree and the field len is set for all tree elements. | |
569 * OUT assertion: the field code is set for all tree elements of non | |
570 * zero code length. | |
571 */ | |
572 local void gen_codes (tree, max_code, bl_count) | |
573 ct_data *tree; /* the tree to decorate */ | |
574 int max_code; /* largest code with non zero frequency */ | |
575 ushf *bl_count; /* number of codes at each bit length */ | |
576 { | |
577 ush next_code[MAX_BITS+1]; /* next code value for each bit length */ | |
578 unsigned code = 0; /* running code value */ | |
579 int bits; /* bit index */ | |
580 int n; /* code index */ | |
581 | |
582 /* The distribution counts are first used to generate the code values | |
583 * without bit reversal. | |
584 */ | |
585 for (bits = 1; bits <= MAX_BITS; bits++) { | |
586 code = (code + bl_count[bits-1]) << 1; | |
587 next_code[bits] = (ush)code; | |
588 } | |
589 /* Check that the bit counts in bl_count are consistent. The last code | |
590 * must be all ones. | |
591 */ | |
592 Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, | |
593 "inconsistent bit counts"); | |
594 Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); | |
595 | |
596 for (n = 0; n <= max_code; n++) { | |
597 int len = tree[n].Len; | |
598 if (len == 0) continue; | |
599 /* Now reverse the bits */ | |
600 tree[n].Code = (ush)bi_reverse(next_code[len]++, len); | |
601 | |
602 Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", | |
603 n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); | |
604 } | |
605 } | |
606 | 616 |
607 /* =========================================================================== | 617 /* =========================================================================== |
608 * Construct one Huffman tree and assigns the code bit strings and lengths. | 618 * Construct one Huffman tree and assigns the code bit strings and lengths. |
609 * Update the total bit length for the current block. | 619 * Update the total bit length for the current block. |
610 * IN assertion: the field freq is set for all tree elements. | 620 * IN assertion: the field freq is set for all tree elements. |
611 * OUT assertions: the fields len and code are set to the optimal bit length | 621 * OUT assertions: the fields len and code are set to the optimal bit length |
612 * and corresponding code. The length opt_len is updated; static_len is | 622 * and corresponding code. The length opt_len is updated; static_len is |
613 * also updated if stree is not null. The field max_code is set. | 623 * also updated if stree is not null. The field max_code is set. |
614 */ | 624 */ |
615 local void build_tree(s, desc) | 625 local void build_tree(deflate_state *s, tree_desc *desc) { |
616 deflate_state *s; | |
617 tree_desc *desc; /* the tree descriptor */ | |
618 { | |
619 ct_data *tree = desc->dyn_tree; | 626 ct_data *tree = desc->dyn_tree; |
620 const ct_data *stree = desc->stat_desc->static_tree; | 627 const ct_data *stree = desc->stat_desc->static_tree; |
621 int elems = desc->stat_desc->elems; | 628 int elems = desc->stat_desc->elems; |
622 int n, m; /* iterate over heap elements */ | 629 int n, m; /* iterate over heap elements */ |
623 int max_code = -1; /* largest code with non zero frequency */ | 630 int max_code = -1; /* largest code with non zero frequency */ |
624 int node; /* new node being created */ | 631 int node; /* new node being created */ |
625 | 632 |
626 /* Construct the initial heap, with least frequent element in | 633 /* Construct the initial heap, with least frequent element in |
627 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. | 634 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n + 1]. |
628 * heap[0] is not used. | 635 * heap[0] is not used. |
629 */ | 636 */ |
630 s->heap_len = 0, s->heap_max = HEAP_SIZE; | 637 s->heap_len = 0, s->heap_max = HEAP_SIZE; |
631 | 638 |
632 for (n = 0; n < elems; n++) { | 639 for (n = 0; n < elems; n++) { |
650 s->opt_len--; if (stree) s->static_len -= stree[node].Len; | 657 s->opt_len--; if (stree) s->static_len -= stree[node].Len; |
651 /* node is 0 or 1 so it does not have extra bits */ | 658 /* node is 0 or 1 so it does not have extra bits */ |
652 } | 659 } |
653 desc->max_code = max_code; | 660 desc->max_code = max_code; |
654 | 661 |
655 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, | 662 /* The elements heap[heap_len/2 + 1 .. heap_len] are leaves of the tree, |
656 * establish sub-heaps of increasing lengths: | 663 * establish sub-heaps of increasing lengths: |
657 */ | 664 */ |
658 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); | 665 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); |
659 | 666 |
660 /* Construct the Huffman tree by repeatedly combining the least two | 667 /* Construct the Huffman tree by repeatedly combining the least two |
698 | 705 |
699 /* =========================================================================== | 706 /* =========================================================================== |
700 * Scan a literal or distance tree to determine the frequencies of the codes | 707 * Scan a literal or distance tree to determine the frequencies of the codes |
701 * in the bit length tree. | 708 * in the bit length tree. |
702 */ | 709 */ |
703 local void scan_tree (s, tree, max_code) | 710 local void scan_tree(deflate_state *s, ct_data *tree, int max_code) { |
704 deflate_state *s; | |
705 ct_data *tree; /* the tree to be scanned */ | |
706 int max_code; /* and its largest code of non zero frequency */ | |
707 { | |
708 int n; /* iterates over all tree elements */ | 711 int n; /* iterates over all tree elements */ |
709 int prevlen = -1; /* last emitted length */ | 712 int prevlen = -1; /* last emitted length */ |
710 int curlen; /* length of current code */ | 713 int curlen; /* length of current code */ |
711 int nextlen = tree[0].Len; /* length of next code */ | 714 int nextlen = tree[0].Len; /* length of next code */ |
712 int count = 0; /* repeat count of the current code */ | 715 int count = 0; /* repeat count of the current code */ |
713 int max_count = 7; /* max repeat count */ | 716 int max_count = 7; /* max repeat count */ |
714 int min_count = 4; /* min repeat count */ | 717 int min_count = 4; /* min repeat count */ |
715 | 718 |
716 if (nextlen == 0) max_count = 138, min_count = 3; | 719 if (nextlen == 0) max_count = 138, min_count = 3; |
717 tree[max_code+1].Len = (ush)0xffff; /* guard */ | 720 tree[max_code + 1].Len = (ush)0xffff; /* guard */ |
718 | 721 |
719 for (n = 0; n <= max_code; n++) { | 722 for (n = 0; n <= max_code; n++) { |
720 curlen = nextlen; nextlen = tree[n+1].Len; | 723 curlen = nextlen; nextlen = tree[n + 1].Len; |
721 if (++count < max_count && curlen == nextlen) { | 724 if (++count < max_count && curlen == nextlen) { |
722 continue; | 725 continue; |
723 } else if (count < min_count) { | 726 } else if (count < min_count) { |
724 s->bl_tree[curlen].Freq += count; | 727 s->bl_tree[curlen].Freq += count; |
725 } else if (curlen != 0) { | 728 } else if (curlen != 0) { |
743 | 746 |
744 /* =========================================================================== | 747 /* =========================================================================== |
745 * Send a literal or distance tree in compressed form, using the codes in | 748 * Send a literal or distance tree in compressed form, using the codes in |
746 * bl_tree. | 749 * bl_tree. |
747 */ | 750 */ |
748 local void send_tree (s, tree, max_code) | 751 local void send_tree(deflate_state *s, ct_data *tree, int max_code) { |
749 deflate_state *s; | |
750 ct_data *tree; /* the tree to be scanned */ | |
751 int max_code; /* and its largest code of non zero frequency */ | |
752 { | |
753 int n; /* iterates over all tree elements */ | 752 int n; /* iterates over all tree elements */ |
754 int prevlen = -1; /* last emitted length */ | 753 int prevlen = -1; /* last emitted length */ |
755 int curlen; /* length of current code */ | 754 int curlen; /* length of current code */ |
756 int nextlen = tree[0].Len; /* length of next code */ | 755 int nextlen = tree[0].Len; /* length of next code */ |
757 int count = 0; /* repeat count of the current code */ | 756 int count = 0; /* repeat count of the current code */ |
758 int max_count = 7; /* max repeat count */ | 757 int max_count = 7; /* max repeat count */ |
759 int min_count = 4; /* min repeat count */ | 758 int min_count = 4; /* min repeat count */ |
760 | 759 |
761 /* tree[max_code+1].Len = -1; */ /* guard already set */ | 760 /* tree[max_code + 1].Len = -1; */ /* guard already set */ |
762 if (nextlen == 0) max_count = 138, min_count = 3; | 761 if (nextlen == 0) max_count = 138, min_count = 3; |
763 | 762 |
764 for (n = 0; n <= max_code; n++) { | 763 for (n = 0; n <= max_code; n++) { |
765 curlen = nextlen; nextlen = tree[n+1].Len; | 764 curlen = nextlen; nextlen = tree[n + 1].Len; |
766 if (++count < max_count && curlen == nextlen) { | 765 if (++count < max_count && curlen == nextlen) { |
767 continue; | 766 continue; |
768 } else if (count < min_count) { | 767 } else if (count < min_count) { |
769 do { send_code(s, curlen, s->bl_tree); } while (--count != 0); | 768 do { send_code(s, curlen, s->bl_tree); } while (--count != 0); |
770 | 769 |
771 } else if (curlen != 0) { | 770 } else if (curlen != 0) { |
772 if (curlen != prevlen) { | 771 if (curlen != prevlen) { |
773 send_code(s, curlen, s->bl_tree); count--; | 772 send_code(s, curlen, s->bl_tree); count--; |
774 } | 773 } |
775 Assert(count >= 3 && count <= 6, " 3_6?"); | 774 Assert(count >= 3 && count <= 6, " 3_6?"); |
776 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); | 775 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count - 3, 2); |
777 | 776 |
778 } else if (count <= 10) { | 777 } else if (count <= 10) { |
779 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); | 778 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count - 3, 3); |
780 | 779 |
781 } else { | 780 } else { |
782 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); | 781 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count - 11, 7); |
783 } | 782 } |
784 count = 0; prevlen = curlen; | 783 count = 0; prevlen = curlen; |
785 if (nextlen == 0) { | 784 if (nextlen == 0) { |
786 max_count = 138, min_count = 3; | 785 max_count = 138, min_count = 3; |
787 } else if (curlen == nextlen) { | 786 } else if (curlen == nextlen) { |
794 | 793 |
795 /* =========================================================================== | 794 /* =========================================================================== |
796 * Construct the Huffman tree for the bit lengths and return the index in | 795 * Construct the Huffman tree for the bit lengths and return the index in |
797 * bl_order of the last bit length code to send. | 796 * bl_order of the last bit length code to send. |
798 */ | 797 */ |
799 local int build_bl_tree(s) | 798 local int build_bl_tree(deflate_state *s) { |
800 deflate_state *s; | |
801 { | |
802 int max_blindex; /* index of last bit length code of non zero freq */ | 799 int max_blindex; /* index of last bit length code of non zero freq */ |
803 | 800 |
804 /* Determine the bit length frequencies for literal and distance trees */ | 801 /* Determine the bit length frequencies for literal and distance trees */ |
805 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); | 802 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); |
806 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); | 803 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); |
807 | 804 |
808 /* Build the bit length tree: */ | 805 /* Build the bit length tree: */ |
809 build_tree(s, (tree_desc *)(&(s->bl_desc))); | 806 build_tree(s, (tree_desc *)(&(s->bl_desc))); |
810 /* opt_len now includes the length of the tree representations, except | 807 /* opt_len now includes the length of the tree representations, except the |
811 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. | 808 * lengths of the bit lengths codes and the 5 + 5 + 4 bits for the counts. |
812 */ | 809 */ |
813 | 810 |
814 /* Determine the number of bit length codes to send. The pkzip format | 811 /* Determine the number of bit length codes to send. The pkzip format |
815 * requires that at least 4 bit length codes be sent. (appnote.txt says | 812 * requires that at least 4 bit length codes be sent. (appnote.txt says |
816 * 3 but the actual value used is 4.) | 813 * 3 but the actual value used is 4.) |
817 */ | 814 */ |
818 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { | 815 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { |
819 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; | 816 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; |
820 } | 817 } |
821 /* Update opt_len to include the bit length tree and counts */ | 818 /* Update opt_len to include the bit length tree and counts */ |
822 s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4; | 819 s->opt_len += 3*((ulg)max_blindex + 1) + 5 + 5 + 4; |
823 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", | 820 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", |
824 s->opt_len, s->static_len)); | 821 s->opt_len, s->static_len)); |
825 | 822 |
826 return max_blindex; | 823 return max_blindex; |
827 } | 824 } |
829 /* =========================================================================== | 826 /* =========================================================================== |
830 * Send the header for a block using dynamic Huffman trees: the counts, the | 827 * Send the header for a block using dynamic Huffman trees: the counts, the |
831 * lengths of the bit length codes, the literal tree and the distance tree. | 828 * lengths of the bit length codes, the literal tree and the distance tree. |
832 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. | 829 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. |
833 */ | 830 */ |
834 local void send_all_trees(s, lcodes, dcodes, blcodes) | 831 local void send_all_trees(deflate_state *s, int lcodes, int dcodes, |
835 deflate_state *s; | 832 int blcodes) { |
836 int lcodes, dcodes, blcodes; /* number of codes for each tree */ | |
837 { | |
838 int rank; /* index in bl_order */ | 833 int rank; /* index in bl_order */ |
839 | 834 |
840 Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); | 835 Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); |
841 Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, | 836 Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, |
842 "too many codes"); | 837 "too many codes"); |
843 Tracev((stderr, "\nbl counts: ")); | 838 Tracev((stderr, "\nbl counts: ")); |
844 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ | 839 send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ |
845 send_bits(s, dcodes-1, 5); | 840 send_bits(s, dcodes - 1, 5); |
846 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ | 841 send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ |
847 for (rank = 0; rank < blcodes; rank++) { | 842 for (rank = 0; rank < blcodes; rank++) { |
848 Tracev((stderr, "\nbl code %2d ", bl_order[rank])); | 843 Tracev((stderr, "\nbl code %2d ", bl_order[rank])); |
849 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); | 844 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); |
850 } | 845 } |
851 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); | 846 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); |
852 | 847 |
853 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ | 848 send_tree(s, (ct_data *)s->dyn_ltree, lcodes - 1); /* literal tree */ |
854 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); | 849 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); |
855 | 850 |
856 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ | 851 send_tree(s, (ct_data *)s->dyn_dtree, dcodes - 1); /* distance tree */ |
857 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); | 852 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); |
858 } | 853 } |
859 | 854 |
860 /* =========================================================================== | 855 /* =========================================================================== |
861 * Send a stored block | 856 * Send a stored block |
862 */ | 857 */ |
863 void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last) | 858 void ZLIB_INTERNAL _tr_stored_block(deflate_state *s, charf *buf, |
864 deflate_state *s; | 859 ulg stored_len, int last) { |
865 charf *buf; /* input block */ | 860 send_bits(s, (STORED_BLOCK<<1) + last, 3); /* send block type */ |
866 ulg stored_len; /* length of input block */ | |
867 int last; /* one if this is the last block for a file */ | |
868 { | |
869 send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */ | |
870 bi_windup(s); /* align on byte boundary */ | 861 bi_windup(s); /* align on byte boundary */ |
871 put_short(s, (ush)stored_len); | 862 put_short(s, (ush)stored_len); |
872 put_short(s, (ush)~stored_len); | 863 put_short(s, (ush)~stored_len); |
873 zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len); | 864 if (stored_len) |
865 zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len); | |
874 s->pending += stored_len; | 866 s->pending += stored_len; |
875 #ifdef ZLIB_DEBUG | 867 #ifdef ZLIB_DEBUG |
876 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; | 868 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; |
877 s->compressed_len += (stored_len + 4) << 3; | 869 s->compressed_len += (stored_len + 4) << 3; |
878 s->bits_sent += 2*16; | 870 s->bits_sent += 2*16; |
879 s->bits_sent += stored_len<<3; | 871 s->bits_sent += stored_len << 3; |
880 #endif | 872 #endif |
881 } | 873 } |
882 | 874 |
883 /* =========================================================================== | 875 /* =========================================================================== |
884 * Flush the bits in the bit buffer to pending output (leaves at most 7 bits) | 876 * Flush the bits in the bit buffer to pending output (leaves at most 7 bits) |
885 */ | 877 */ |
886 void ZLIB_INTERNAL _tr_flush_bits(s) | 878 void ZLIB_INTERNAL _tr_flush_bits(deflate_state *s) { |
887 deflate_state *s; | |
888 { | |
889 bi_flush(s); | 879 bi_flush(s); |
890 } | 880 } |
891 | 881 |
892 /* =========================================================================== | 882 /* =========================================================================== |
893 * Send one empty static block to give enough lookahead for inflate. | 883 * Send one empty static block to give enough lookahead for inflate. |
894 * This takes 10 bits, of which 7 may remain in the bit buffer. | 884 * This takes 10 bits, of which 7 may remain in the bit buffer. |
895 */ | 885 */ |
896 void ZLIB_INTERNAL _tr_align(s) | 886 void ZLIB_INTERNAL _tr_align(deflate_state *s) { |
897 deflate_state *s; | |
898 { | |
899 send_bits(s, STATIC_TREES<<1, 3); | 887 send_bits(s, STATIC_TREES<<1, 3); |
900 send_code(s, END_BLOCK, static_ltree); | 888 send_code(s, END_BLOCK, static_ltree); |
901 #ifdef ZLIB_DEBUG | 889 #ifdef ZLIB_DEBUG |
902 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ | 890 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ |
903 #endif | 891 #endif |
904 bi_flush(s); | 892 bi_flush(s); |
905 } | 893 } |
906 | 894 |
907 /* =========================================================================== | 895 /* =========================================================================== |
896 * Send the block data compressed using the given Huffman trees | |
897 */ | |
898 local void compress_block(deflate_state *s, const ct_data *ltree, | |
899 const ct_data *dtree) { | |
900 unsigned dist; /* distance of matched string */ | |
901 int lc; /* match length or unmatched char (if dist == 0) */ | |
902 unsigned sx = 0; /* running index in symbol buffers */ | |
903 unsigned code; /* the code to send */ | |
904 int extra; /* number of extra bits to send */ | |
905 | |
906 if (s->sym_next != 0) do { | |
907 #ifdef LIT_MEM | |
908 dist = s->d_buf[sx]; | |
909 lc = s->l_buf[sx++]; | |
910 #else | |
911 dist = s->sym_buf[sx++] & 0xff; | |
912 dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8; | |
913 lc = s->sym_buf[sx++]; | |
914 #endif | |
915 if (dist == 0) { | |
916 send_code(s, lc, ltree); /* send a literal byte */ | |
917 Tracecv(isgraph(lc), (stderr," '%c' ", lc)); | |
918 } else { | |
919 /* Here, lc is the match length - MIN_MATCH */ | |
920 code = _length_code[lc]; | |
921 send_code(s, code + LITERALS + 1, ltree); /* send length code */ | |
922 extra = extra_lbits[code]; | |
923 if (extra != 0) { | |
924 lc -= base_length[code]; | |
925 send_bits(s, lc, extra); /* send the extra length bits */ | |
926 } | |
927 dist--; /* dist is now the match distance - 1 */ | |
928 code = d_code(dist); | |
929 Assert (code < D_CODES, "bad d_code"); | |
930 | |
931 send_code(s, code, dtree); /* send the distance code */ | |
932 extra = extra_dbits[code]; | |
933 if (extra != 0) { | |
934 dist -= (unsigned)base_dist[code]; | |
935 send_bits(s, dist, extra); /* send the extra distance bits */ | |
936 } | |
937 } /* literal or match pair ? */ | |
938 | |
939 /* Check for no overlay of pending_buf on needed symbols */ | |
940 #ifdef LIT_MEM | |
941 Assert(s->pending < 2 * (s->lit_bufsize + sx), "pendingBuf overflow"); | |
942 #else | |
943 Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow"); | |
944 #endif | |
945 | |
946 } while (sx < s->sym_next); | |
947 | |
948 send_code(s, END_BLOCK, ltree); | |
949 } | |
950 | |
951 /* =========================================================================== | |
952 * Check if the data type is TEXT or BINARY, using the following algorithm: | |
953 * - TEXT if the two conditions below are satisfied: | |
954 * a) There are no non-portable control characters belonging to the | |
955 * "block list" (0..6, 14..25, 28..31). | |
956 * b) There is at least one printable character belonging to the | |
957 * "allow list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). | |
958 * - BINARY otherwise. | |
959 * - The following partially-portable control characters form a | |
960 * "gray list" that is ignored in this detection algorithm: | |
961 * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). | |
962 * IN assertion: the fields Freq of dyn_ltree are set. | |
963 */ | |
964 local int detect_data_type(deflate_state *s) { | |
965 /* block_mask is the bit mask of block-listed bytes | |
966 * set bits 0..6, 14..25, and 28..31 | |
967 * 0xf3ffc07f = binary 11110011111111111100000001111111 | |
968 */ | |
969 unsigned long block_mask = 0xf3ffc07fUL; | |
970 int n; | |
971 | |
972 /* Check for non-textual ("block-listed") bytes. */ | |
973 for (n = 0; n <= 31; n++, block_mask >>= 1) | |
974 if ((block_mask & 1) && (s->dyn_ltree[n].Freq != 0)) | |
975 return Z_BINARY; | |
976 | |
977 /* Check for textual ("allow-listed") bytes. */ | |
978 if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0 | |
979 || s->dyn_ltree[13].Freq != 0) | |
980 return Z_TEXT; | |
981 for (n = 32; n < LITERALS; n++) | |
982 if (s->dyn_ltree[n].Freq != 0) | |
983 return Z_TEXT; | |
984 | |
985 /* There are no "block-listed" or "allow-listed" bytes: | |
986 * this stream either is empty or has tolerated ("gray-listed") bytes only. | |
987 */ | |
988 return Z_BINARY; | |
989 } | |
990 | |
991 /* =========================================================================== | |
908 * Determine the best encoding for the current block: dynamic trees, static | 992 * Determine the best encoding for the current block: dynamic trees, static |
909 * trees or store, and write out the encoded block. | 993 * trees or store, and write out the encoded block. |
910 */ | 994 */ |
911 void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last) | 995 void ZLIB_INTERNAL _tr_flush_block(deflate_state *s, charf *buf, |
912 deflate_state *s; | 996 ulg stored_len, int last) { |
913 charf *buf; /* input block, or NULL if too old */ | |
914 ulg stored_len; /* length of input block */ | |
915 int last; /* one if this is the last block for a file */ | |
916 { | |
917 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ | 997 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ |
918 int max_blindex = 0; /* index of last bit length code of non zero freq */ | 998 int max_blindex = 0; /* index of last bit length code of non zero freq */ |
919 | 999 |
920 /* Build the Huffman trees unless a stored block is forced */ | 1000 /* Build the Huffman trees unless a stored block is forced */ |
921 if (s->level > 0) { | 1001 if (s->level > 0) { |
940 * in bl_order of the last bit length code to send. | 1020 * in bl_order of the last bit length code to send. |
941 */ | 1021 */ |
942 max_blindex = build_bl_tree(s); | 1022 max_blindex = build_bl_tree(s); |
943 | 1023 |
944 /* Determine the best encoding. Compute the block lengths in bytes. */ | 1024 /* Determine the best encoding. Compute the block lengths in bytes. */ |
945 opt_lenb = (s->opt_len+3+7)>>3; | 1025 opt_lenb = (s->opt_len + 3 + 7) >> 3; |
946 static_lenb = (s->static_len+3+7)>>3; | 1026 static_lenb = (s->static_len + 3 + 7) >> 3; |
947 | 1027 |
948 Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", | 1028 Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", |
949 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, | 1029 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, |
950 s->last_lit)); | 1030 s->sym_next / 3)); |
951 | 1031 |
952 if (static_lenb <= opt_lenb) opt_lenb = static_lenb; | 1032 #ifndef FORCE_STATIC |
1033 if (static_lenb <= opt_lenb || s->strategy == Z_FIXED) | |
1034 #endif | |
1035 opt_lenb = static_lenb; | |
953 | 1036 |
954 } else { | 1037 } else { |
955 Assert(buf != (char*)0, "lost buf"); | 1038 Assert(buf != (char*)0, "lost buf"); |
956 opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ | 1039 opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ |
957 } | 1040 } |
958 | 1041 |
959 #ifdef FORCE_STORED | 1042 #ifdef FORCE_STORED |
960 if (buf != (char*)0) { /* force stored block */ | 1043 if (buf != (char*)0) { /* force stored block */ |
961 #else | 1044 #else |
962 if (stored_len+4 <= opt_lenb && buf != (char*)0) { | 1045 if (stored_len + 4 <= opt_lenb && buf != (char*)0) { |
963 /* 4: two words for the lengths */ | 1046 /* 4: two words for the lengths */ |
964 #endif | 1047 #endif |
965 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. | 1048 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. |
966 * Otherwise we can't have processed more than WSIZE input bytes since | 1049 * Otherwise we can't have processed more than WSIZE input bytes since |
967 * the last block flush, because compression would have been | 1050 * the last block flush, because compression would have been |
968 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to | 1051 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to |
969 * transform a block into a stored block. | 1052 * transform a block into a stored block. |
970 */ | 1053 */ |
971 _tr_stored_block(s, buf, stored_len, last); | 1054 _tr_stored_block(s, buf, stored_len, last); |
972 | 1055 |
973 #ifdef FORCE_STATIC | 1056 } else if (static_lenb == opt_lenb) { |
974 } else if (static_lenb >= 0) { /* force static trees */ | 1057 send_bits(s, (STATIC_TREES<<1) + last, 3); |
975 #else | |
976 } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) { | |
977 #endif | |
978 send_bits(s, (STATIC_TREES<<1)+last, 3); | |
979 compress_block(s, (const ct_data *)static_ltree, | 1058 compress_block(s, (const ct_data *)static_ltree, |
980 (const ct_data *)static_dtree); | 1059 (const ct_data *)static_dtree); |
981 #ifdef ZLIB_DEBUG | 1060 #ifdef ZLIB_DEBUG |
982 s->compressed_len += 3 + s->static_len; | 1061 s->compressed_len += 3 + s->static_len; |
983 #endif | 1062 #endif |
984 } else { | 1063 } else { |
985 send_bits(s, (DYN_TREES<<1)+last, 3); | 1064 send_bits(s, (DYN_TREES<<1) + last, 3); |
986 send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, | 1065 send_all_trees(s, s->l_desc.max_code + 1, s->d_desc.max_code + 1, |
987 max_blindex+1); | 1066 max_blindex + 1); |
988 compress_block(s, (const ct_data *)s->dyn_ltree, | 1067 compress_block(s, (const ct_data *)s->dyn_ltree, |
989 (const ct_data *)s->dyn_dtree); | 1068 (const ct_data *)s->dyn_dtree); |
990 #ifdef ZLIB_DEBUG | 1069 #ifdef ZLIB_DEBUG |
991 s->compressed_len += 3 + s->opt_len; | 1070 s->compressed_len += 3 + s->opt_len; |
992 #endif | 1071 #endif |
1001 bi_windup(s); | 1080 bi_windup(s); |
1002 #ifdef ZLIB_DEBUG | 1081 #ifdef ZLIB_DEBUG |
1003 s->compressed_len += 7; /* align on byte boundary */ | 1082 s->compressed_len += 7; /* align on byte boundary */ |
1004 #endif | 1083 #endif |
1005 } | 1084 } |
1006 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, | 1085 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len >> 3, |
1007 s->compressed_len-7*last)); | 1086 s->compressed_len - 7*last)); |
1008 } | 1087 } |
1009 | 1088 |
1010 /* =========================================================================== | 1089 /* =========================================================================== |
1011 * Save the match info and tally the frequency counts. Return true if | 1090 * Save the match info and tally the frequency counts. Return true if |
1012 * the current block must be flushed. | 1091 * the current block must be flushed. |
1013 */ | 1092 */ |
1014 int ZLIB_INTERNAL _tr_tally (s, dist, lc) | 1093 int ZLIB_INTERNAL _tr_tally(deflate_state *s, unsigned dist, unsigned lc) { |
1015 deflate_state *s; | 1094 #ifdef LIT_MEM |
1016 unsigned dist; /* distance of matched string */ | 1095 s->d_buf[s->sym_next] = (ush)dist; |
1017 unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ | 1096 s->l_buf[s->sym_next++] = (uch)lc; |
1018 { | 1097 #else |
1019 s->d_buf[s->last_lit] = (ush)dist; | 1098 s->sym_buf[s->sym_next++] = (uch)dist; |
1020 s->l_buf[s->last_lit++] = (uch)lc; | 1099 s->sym_buf[s->sym_next++] = (uch)(dist >> 8); |
1100 s->sym_buf[s->sym_next++] = (uch)lc; | |
1101 #endif | |
1021 if (dist == 0) { | 1102 if (dist == 0) { |
1022 /* lc is the unmatched char */ | 1103 /* lc is the unmatched char */ |
1023 s->dyn_ltree[lc].Freq++; | 1104 s->dyn_ltree[lc].Freq++; |
1024 } else { | 1105 } else { |
1025 s->matches++; | 1106 s->matches++; |
1027 dist--; /* dist = match distance - 1 */ | 1108 dist--; /* dist = match distance - 1 */ |
1028 Assert((ush)dist < (ush)MAX_DIST(s) && | 1109 Assert((ush)dist < (ush)MAX_DIST(s) && |
1029 (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && | 1110 (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && |
1030 (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); | 1111 (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); |
1031 | 1112 |
1032 s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; | 1113 s->dyn_ltree[_length_code[lc] + LITERALS + 1].Freq++; |
1033 s->dyn_dtree[d_code(dist)].Freq++; | 1114 s->dyn_dtree[d_code(dist)].Freq++; |
1034 } | 1115 } |
1035 | 1116 return (s->sym_next == s->sym_end); |
1036 #ifdef TRUNCATE_BLOCK | 1117 } |
1037 /* Try to guess if it is profitable to stop the current block here */ | |
1038 if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { | |
1039 /* Compute an upper bound for the compressed length */ | |
1040 ulg out_length = (ulg)s->last_lit*8L; | |
1041 ulg in_length = (ulg)((long)s->strstart - s->block_start); | |
1042 int dcode; | |
1043 for (dcode = 0; dcode < D_CODES; dcode++) { | |
1044 out_length += (ulg)s->dyn_dtree[dcode].Freq * | |
1045 (5L+extra_dbits[dcode]); | |
1046 } | |
1047 out_length >>= 3; | |
1048 Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", | |
1049 s->last_lit, in_length, out_length, | |
1050 100L - out_length*100L/in_length)); | |
1051 if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; | |
1052 } | |
1053 #endif | |
1054 return (s->last_lit == s->lit_bufsize-1); | |
1055 /* We avoid equality with lit_bufsize because of wraparound at 64K | |
1056 * on 16 bit machines and because stored blocks are restricted to | |
1057 * 64K-1 bytes. | |
1058 */ | |
1059 } | |
1060 | |
1061 /* =========================================================================== | |
1062 * Send the block data compressed using the given Huffman trees | |
1063 */ | |
1064 local void compress_block(s, ltree, dtree) | |
1065 deflate_state *s; | |
1066 const ct_data *ltree; /* literal tree */ | |
1067 const ct_data *dtree; /* distance tree */ | |
1068 { | |
1069 unsigned dist; /* distance of matched string */ | |
1070 int lc; /* match length or unmatched char (if dist == 0) */ | |
1071 unsigned lx = 0; /* running index in l_buf */ | |
1072 unsigned code; /* the code to send */ | |
1073 int extra; /* number of extra bits to send */ | |
1074 | |
1075 if (s->last_lit != 0) do { | |
1076 dist = s->d_buf[lx]; | |
1077 lc = s->l_buf[lx++]; | |
1078 if (dist == 0) { | |
1079 send_code(s, lc, ltree); /* send a literal byte */ | |
1080 Tracecv(isgraph(lc), (stderr," '%c' ", lc)); | |
1081 } else { | |
1082 /* Here, lc is the match length - MIN_MATCH */ | |
1083 code = _length_code[lc]; | |
1084 send_code(s, code+LITERALS+1, ltree); /* send the length code */ | |
1085 extra = extra_lbits[code]; | |
1086 if (extra != 0) { | |
1087 lc -= base_length[code]; | |
1088 send_bits(s, lc, extra); /* send the extra length bits */ | |
1089 } | |
1090 dist--; /* dist is now the match distance - 1 */ | |
1091 code = d_code(dist); | |
1092 Assert (code < D_CODES, "bad d_code"); | |
1093 | |
1094 send_code(s, code, dtree); /* send the distance code */ | |
1095 extra = extra_dbits[code]; | |
1096 if (extra != 0) { | |
1097 dist -= (unsigned)base_dist[code]; | |
1098 send_bits(s, dist, extra); /* send the extra distance bits */ | |
1099 } | |
1100 } /* literal or match pair ? */ | |
1101 | |
1102 /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ | |
1103 Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, | |
1104 "pendingBuf overflow"); | |
1105 | |
1106 } while (lx < s->last_lit); | |
1107 | |
1108 send_code(s, END_BLOCK, ltree); | |
1109 } | |
1110 | |
1111 /* =========================================================================== | |
1112 * Check if the data type is TEXT or BINARY, using the following algorithm: | |
1113 * - TEXT if the two conditions below are satisfied: | |
1114 * a) There are no non-portable control characters belonging to the | |
1115 * "black list" (0..6, 14..25, 28..31). | |
1116 * b) There is at least one printable character belonging to the | |
1117 * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). | |
1118 * - BINARY otherwise. | |
1119 * - The following partially-portable control characters form a | |
1120 * "gray list" that is ignored in this detection algorithm: | |
1121 * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). | |
1122 * IN assertion: the fields Freq of dyn_ltree are set. | |
1123 */ | |
1124 local int detect_data_type(s) | |
1125 deflate_state *s; | |
1126 { | |
1127 /* black_mask is the bit mask of black-listed bytes | |
1128 * set bits 0..6, 14..25, and 28..31 | |
1129 * 0xf3ffc07f = binary 11110011111111111100000001111111 | |
1130 */ | |
1131 unsigned long black_mask = 0xf3ffc07fUL; | |
1132 int n; | |
1133 | |
1134 /* Check for non-textual ("black-listed") bytes. */ | |
1135 for (n = 0; n <= 31; n++, black_mask >>= 1) | |
1136 if ((black_mask & 1) && (s->dyn_ltree[n].Freq != 0)) | |
1137 return Z_BINARY; | |
1138 | |
1139 /* Check for textual ("white-listed") bytes. */ | |
1140 if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0 | |
1141 || s->dyn_ltree[13].Freq != 0) | |
1142 return Z_TEXT; | |
1143 for (n = 32; n < LITERALS; n++) | |
1144 if (s->dyn_ltree[n].Freq != 0) | |
1145 return Z_TEXT; | |
1146 | |
1147 /* There are no "black-listed" or "white-listed" bytes: | |
1148 * this stream either is empty or has tolerated ("gray-listed") bytes only. | |
1149 */ | |
1150 return Z_BINARY; | |
1151 } | |
1152 | |
1153 /* =========================================================================== | |
1154 * Reverse the first len bits of a code, using straightforward code (a faster | |
1155 * method would use a table) | |
1156 * IN assertion: 1 <= len <= 15 | |
1157 */ | |
1158 local unsigned bi_reverse(code, len) | |
1159 unsigned code; /* the value to invert */ | |
1160 int len; /* its bit length */ | |
1161 { | |
1162 register unsigned res = 0; | |
1163 do { | |
1164 res |= code & 1; | |
1165 code >>= 1, res <<= 1; | |
1166 } while (--len > 0); | |
1167 return res >> 1; | |
1168 } | |
1169 | |
1170 /* =========================================================================== | |
1171 * Flush the bit buffer, keeping at most 7 bits in it. | |
1172 */ | |
1173 local void bi_flush(s) | |
1174 deflate_state *s; | |
1175 { | |
1176 if (s->bi_valid == 16) { | |
1177 put_short(s, s->bi_buf); | |
1178 s->bi_buf = 0; | |
1179 s->bi_valid = 0; | |
1180 } else if (s->bi_valid >= 8) { | |
1181 put_byte(s, (Byte)s->bi_buf); | |
1182 s->bi_buf >>= 8; | |
1183 s->bi_valid -= 8; | |
1184 } | |
1185 } | |
1186 | |
1187 /* =========================================================================== | |
1188 * Flush the bit buffer and align the output on a byte boundary | |
1189 */ | |
1190 local void bi_windup(s) | |
1191 deflate_state *s; | |
1192 { | |
1193 if (s->bi_valid > 8) { | |
1194 put_short(s, s->bi_buf); | |
1195 } else if (s->bi_valid > 0) { | |
1196 put_byte(s, (Byte)s->bi_buf); | |
1197 } | |
1198 s->bi_buf = 0; | |
1199 s->bi_valid = 0; | |
1200 #ifdef ZLIB_DEBUG | |
1201 s->bits_sent = (s->bits_sent+7) & ~7; | |
1202 #endif | |
1203 } |