| | varnish-cache/lib/libvgz/trees.c |
| 0 |
|
/* trees.c -- output deflated data using Huffman coding |
| 1 |
|
* Copyright (C) 1995-2024 Jean-loup Gailly |
| 2 |
|
* detect_data_type() function provided freely by Cosmin Truta, 2006 |
| 3 |
|
* For conditions of distribution and use, see copyright notice in zlib.h |
| 4 |
|
*/ |
| 5 |
|
|
| 6 |
|
/* |
| 7 |
|
* ALGORITHM |
| 8 |
|
* |
| 9 |
|
* The "deflation" process uses several Huffman trees. The more |
| 10 |
|
* common source values are represented by shorter bit sequences. |
| 11 |
|
* |
| 12 |
|
* Each code tree is stored in a compressed form which is itself |
| 13 |
|
* a Huffman encoding of the lengths of all the code strings (in |
| 14 |
|
* ascending order by source values). The actual code strings are |
| 15 |
|
* reconstructed from the lengths in the inflate process, as described |
| 16 |
|
* in the deflate specification. |
| 17 |
|
* |
| 18 |
|
* REFERENCES |
| 19 |
|
* |
| 20 |
|
* Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". |
| 21 |
|
* Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc |
| 22 |
|
* |
| 23 |
|
* Storer, James A. |
| 24 |
|
* Data Compression: Methods and Theory, pp. 49-50. |
| 25 |
|
* Computer Science Press, 1988. ISBN 0-7167-8156-5. |
| 26 |
|
* |
| 27 |
|
* Sedgewick, R. |
| 28 |
|
* Algorithms, p290. |
| 29 |
|
* Addison-Wesley, 1983. ISBN 0-201-06672-6. |
| 30 |
|
*/ |
| 31 |
|
|
| 32 |
|
/* @(#) $Id$ */ |
| 33 |
|
|
| 34 |
|
/* #define GEN_TREES_H */ |
| 35 |
|
|
| 36 |
|
#include "deflate.h" |
| 37 |
|
|
| 38 |
|
#ifdef ZLIB_DEBUG |
| 39 |
|
# include <ctype.h> |
| 40 |
|
#endif |
| 41 |
|
|
| 42 |
|
/* =========================================================================== |
| 43 |
|
* Constants |
| 44 |
|
*/ |
| 45 |
|
|
| 46 |
|
#define MAX_BL_BITS 7 |
| 47 |
|
/* Bit length codes must not exceed MAX_BL_BITS bits */ |
| 48 |
|
|
| 49 |
|
#define END_BLOCK 256 |
| 50 |
|
/* end of block literal code */ |
| 51 |
|
|
| 52 |
|
#define REP_3_6 16 |
| 53 |
|
/* repeat previous bit length 3-6 times (2 bits of repeat count) */ |
| 54 |
|
|
| 55 |
|
#define REPZ_3_10 17 |
| 56 |
|
/* repeat a zero length 3-10 times (3 bits of repeat count) */ |
| 57 |
|
|
| 58 |
|
#define REPZ_11_138 18 |
| 59 |
|
/* repeat a zero length 11-138 times (7 bits of repeat count) */ |
| 60 |
|
|
| 61 |
|
local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */ |
| 62 |
|
= {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0}; |
| 63 |
|
|
| 64 |
|
local const int extra_dbits[D_CODES] /* extra bits for each distance code */ |
| 65 |
|
= {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; |
| 66 |
|
|
| 67 |
|
local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */ |
| 68 |
|
= {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7}; |
| 69 |
|
|
| 70 |
|
local const uch bl_order[BL_CODES] |
| 71 |
|
= {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15}; |
| 72 |
|
/* The lengths of the bit length codes are sent in order of decreasing |
| 73 |
|
* probability, to avoid transmitting the lengths for unused bit length codes. |
| 74 |
|
*/ |
| 75 |
|
|
| 76 |
|
/* =========================================================================== |
| 77 |
|
* Local data. These are initialized only once. |
| 78 |
|
*/ |
| 79 |
|
|
| 80 |
|
#define DIST_CODE_LEN 512 /* see definition of array dist_code below */ |
| 81 |
|
|
| 82 |
|
#if defined(GEN_TREES_H) || !defined(STDC) |
| 83 |
|
/* non ANSI compilers may not accept trees.h */ |
| 84 |
|
|
| 85 |
|
local ct_data static_ltree[L_CODES+2]; |
| 86 |
|
/* The static literal tree. Since the bit lengths are imposed, there is no |
| 87 |
|
* need for the L_CODES extra codes used during heap construction. However |
| 88 |
|
* The codes 286 and 287 are needed to build a canonical tree (see _tr_init |
| 89 |
|
* below). |
| 90 |
|
*/ |
| 91 |
|
|
| 92 |
|
local ct_data static_dtree[D_CODES]; |
| 93 |
|
/* The static distance tree. (Actually a trivial tree since all codes use |
| 94 |
|
* 5 bits.) |
| 95 |
|
*/ |
| 96 |
|
|
| 97 |
|
uch _dist_code[DIST_CODE_LEN]; |
| 98 |
|
/* Distance codes. The first 256 values correspond to the distances |
| 99 |
|
* 3 .. 258, the last 256 values correspond to the top 8 bits of |
| 100 |
|
* the 15 bit distances. |
| 101 |
|
*/ |
| 102 |
|
|
| 103 |
|
uch _length_code[MAX_MATCH-MIN_MATCH+1]; |
| 104 |
|
/* length code for each normalized match length (0 == MIN_MATCH) */ |
| 105 |
|
|
| 106 |
|
local int base_length[LENGTH_CODES]; |
| 107 |
|
/* First normalized length for each code (0 = MIN_MATCH) */ |
| 108 |
|
|
| 109 |
|
local int base_dist[D_CODES]; |
| 110 |
|
/* First normalized distance for each code (0 = distance of 1) */ |
| 111 |
|
|
| 112 |
|
#else |
| 113 |
|
# include "trees.h" |
| 114 |
|
#endif /* GEN_TREES_H */ |
| 115 |
|
|
| 116 |
|
struct static_tree_desc_s { |
| 117 |
|
const ct_data *static_tree; /* static tree or NULL */ |
| 118 |
|
const intf *extra_bits; /* extra bits for each code or NULL */ |
| 119 |
|
int extra_base; /* base index for extra_bits */ |
| 120 |
|
int elems; /* max number of elements in the tree */ |
| 121 |
|
int max_length; /* max bit length for the codes */ |
| 122 |
|
}; |
| 123 |
|
|
| 124 |
|
#ifdef NO_INIT_GLOBAL_POINTERS |
| 125 |
|
# define TCONST |
| 126 |
|
#else |
| 127 |
|
# define TCONST const |
| 128 |
|
#endif |
| 129 |
|
|
| 130 |
|
local TCONST static_tree_desc static_l_desc = |
| 131 |
|
{static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; |
| 132 |
|
|
| 133 |
|
local TCONST static_tree_desc static_d_desc = |
| 134 |
|
{static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; |
| 135 |
|
|
| 136 |
|
local TCONST static_tree_desc static_bl_desc = |
| 137 |
|
{(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; |
| 138 |
|
|
| 139 |
|
/* =========================================================================== |
| 140 |
|
* Output a short LSB first on the stream. |
| 141 |
|
* IN assertion: there is enough room in pendingBuf. |
| 142 |
|
*/ |
| 143 |
|
#define put_short(s, w) { \ |
| 144 |
|
put_byte(s, (uch)((w) & 0xff)); \ |
| 145 |
|
put_byte(s, (uch)((ush)(w) >> 8)); \ |
| 146 |
|
} |
| 147 |
|
|
| 148 |
|
/* =========================================================================== |
| 149 |
|
* Reverse the first len bits of a code, using straightforward code (a faster |
| 150 |
|
* method would use a table) |
| 151 |
|
* IN assertion: 1 <= len <= 15 |
| 152 |
|
*/ |
| 153 |
271492 |
local unsigned bi_reverse(unsigned code, int len) { |
| 154 |
271492 |
register unsigned res = 0; |
| 155 |
271492 |
do { |
| 156 |
1200587 |
res |= code & 1; |
| 157 |
1200587 |
code >>= 1, res <<= 1; |
| 158 |
1200587 |
} while (--len > 0); |
| 159 |
271492 |
return res >> 1; |
| 160 |
|
} |
| 161 |
|
|
| 162 |
|
/* =========================================================================== |
| 163 |
|
* Flush the bit buffer, keeping at most 7 bits in it. |
| 164 |
|
*/ |
| 165 |
58880 |
local void bi_flush(deflate_state *s) { |
| 166 |
58880 |
if (s->bi_valid == 16) { |
| 167 |
257 |
put_short(s, s->bi_buf); |
| 168 |
257 |
s->bi_buf = 0; |
| 169 |
257 |
s->bi_valid = 0; |
| 170 |
58880 |
} else if (s->bi_valid >= 8) { |
| 171 |
3134 |
put_byte(s, (Byte)s->bi_buf); |
| 172 |
3134 |
s->bi_buf >>= 8; |
| 173 |
3134 |
s->bi_valid -= 8; |
| 174 |
3134 |
} |
| 175 |
58880 |
} |
| 176 |
|
|
| 177 |
|
/* =========================================================================== |
| 178 |
|
* Flush the bit buffer and align the output on a byte boundary |
| 179 |
|
*/ |
| 180 |
24720 |
local void bi_windup(deflate_state *s) { |
| 181 |
24720 |
if (s->bi_valid > 8) { |
| 182 |
3486 |
put_short(s, s->bi_buf); |
| 183 |
24720 |
} else if (s->bi_valid > 0) { |
| 184 |
21234 |
put_byte(s, (Byte)s->bi_buf); |
| 185 |
21234 |
} |
| 186 |
24720 |
s->bi_used = ((s->bi_valid - 1) & 7) + 1; |
| 187 |
24720 |
s->bi_buf = 0; |
| 188 |
24720 |
s->bi_valid = 0; |
| 189 |
|
#ifdef ZLIB_DEBUG |
| 190 |
|
s->bits_sent = (s->bits_sent+7) & ~7; |
| 191 |
|
#endif |
| 192 |
24720 |
} |
| 193 |
|
|
| 194 |
|
/* =========================================================================== |
| 195 |
|
* Generate the codes for a given tree and bit counts (which need not be |
| 196 |
|
* optimal). |
| 197 |
|
* IN assertion: the array bl_count contains the bit length statistics for |
| 198 |
|
* the given tree and the field len is set for all tree elements. |
| 199 |
|
* OUT assertion: the field code is set for all tree elements of non |
| 200 |
|
* zero code length. |
| 201 |
|
*/ |
| 202 |
31800 |
local void gen_codes(ct_data *tree, int max_code, ushf *bl_count) { |
| 203 |
|
ush next_code[MAX_BITS+1]; /* next code value for each bit length */ |
| 204 |
31800 |
unsigned code = 0; /* running code value */ |
| 205 |
|
int bits; /* bit index */ |
| 206 |
|
int n; /* code index */ |
| 207 |
|
|
| 208 |
|
/* The distribution counts are first used to generate the code values |
| 209 |
|
* without bit reversal. |
| 210 |
|
*/ |
| 211 |
508800 |
for (bits = 1; bits <= MAX_BITS; bits++) { |
| 212 |
477000 |
code = (code + bl_count[bits-1]) << 1; |
| 213 |
477000 |
next_code[bits] = (ush)code; |
| 214 |
477000 |
} |
| 215 |
|
/* Check that the bit counts in bl_count are consistent. The last code |
| 216 |
|
* must be all ones. |
| 217 |
|
*/ |
| 218 |
|
Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, |
| 219 |
|
"inconsistent bit counts"); |
| 220 |
|
Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); |
| 221 |
|
|
| 222 |
3015530 |
for (n = 0; n <= max_code; n++) { |
| 223 |
2983730 |
int len = tree[n].Len; |
| 224 |
2983730 |
if (len == 0) continue; |
| 225 |
|
/* Now reverse the bits */ |
| 226 |
271492 |
tree[n].Code = (ush)bi_reverse(next_code[len]++, len); |
| 227 |
|
|
| 228 |
|
Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", |
| 229 |
|
n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); |
| 230 |
271492 |
} |
| 231 |
31800 |
} |
| 232 |
|
|
| 233 |
|
#ifdef GEN_TREES_H |
| 234 |
|
local void gen_trees_header (void); |
| 235 |
|
#endif |
| 236 |
|
|
| 237 |
|
#ifndef ZLIB_DEBUG |
| 238 |
|
# define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) |
| 239 |
|
/* Send a code of the given tree. c and tree must not have side effects */ |
| 240 |
|
|
| 241 |
|
#else /* !ZLIB_DEBUG */ |
| 242 |
|
# define send_code(s, c, tree) \ |
| 243 |
|
{ if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ |
| 244 |
|
send_bits(s, tree[c].Code, tree[c].Len); } |
| 245 |
|
#endif |
| 246 |
|
|
| 247 |
|
/* =========================================================================== |
| 248 |
|
* Send a value on a given number of bits. |
| 249 |
|
* IN assertion: length <= 16 and value fits in length bits. |
| 250 |
|
*/ |
| 251 |
|
#ifdef ZLIB_DEBUG |
| 252 |
|
local void send_bits(deflate_state *s, int value, int length) { |
| 253 |
|
Tracevv((stderr," l %2d v %4x ", length, value)); |
| 254 |
|
Assert(length > 0 && length <= 15, "invalid length"); |
| 255 |
|
s->bits_sent += (ulg)length; |
| 256 |
|
|
| 257 |
|
/* If not enough room in bi_buf, use (valid) bits from bi_buf and |
| 258 |
|
* (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) |
| 259 |
|
* unused bits in value. |
| 260 |
|
*/ |
| 261 |
|
if (s->bi_valid > (int)Buf_size - length) { |
| 262 |
|
s->bi_buf |= (ush)value << s->bi_valid; |
| 263 |
|
put_short(s, s->bi_buf); |
| 264 |
|
s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); |
| 265 |
|
s->bi_valid += length - Buf_size; |
| 266 |
|
} else { |
| 267 |
|
s->bi_buf |= (ush)value << s->bi_valid; |
| 268 |
|
s->bi_valid += length; |
| 269 |
|
} |
| 270 |
|
} |
| 271 |
|
#else /* !ZLIB_DEBUG */ |
| 272 |
|
|
| 273 |
|
#define send_bits(s, value, length) \ |
| 274 |
|
{ int len = length;\ |
| 275 |
|
if (s->bi_valid > (int)Buf_size - len) {\ |
| 276 |
|
int val = (int)value;\ |
| 277 |
|
s->bi_buf |= (ush)val << s->bi_valid;\ |
| 278 |
|
put_short(s, s->bi_buf);\ |
| 279 |
|
s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\ |
| 280 |
|
s->bi_valid += len - Buf_size;\ |
| 281 |
|
} else {\ |
| 282 |
|
s->bi_buf |= (ush)(value) << s->bi_valid;\ |
| 283 |
|
s->bi_valid += len;\ |
| 284 |
|
}\ |
| 285 |
|
} |
| 286 |
|
#endif /* ZLIB_DEBUG */ |
| 287 |
|
|
| 288 |
|
|
| 289 |
|
/* the arguments must not have side effects */ |
| 290 |
|
|
| 291 |
|
/* =========================================================================== |
| 292 |
|
* Initialize the various 'constant' tables. |
| 293 |
|
*/ |
| 294 |
10320 |
local void tr_static_init(void) { |
| 295 |
|
#if defined(GEN_TREES_H) || !defined(STDC) |
| 296 |
|
static int static_init_done = 0; |
| 297 |
|
int n; /* iterates over tree elements */ |
| 298 |
|
int bits; /* bit counter */ |
| 299 |
|
int length; /* length value */ |
| 300 |
|
int code; /* code value */ |
| 301 |
|
int dist; /* distance index */ |
| 302 |
|
ush bl_count[MAX_BITS+1]; |
| 303 |
|
/* number of codes at each bit length for an optimal tree */ |
| 304 |
|
|
| 305 |
|
if (static_init_done) return; |
| 306 |
|
|
| 307 |
|
/* For some embedded targets, global variables are not initialized: */ |
| 308 |
|
#ifdef NO_INIT_GLOBAL_POINTERS |
| 309 |
|
static_l_desc.static_tree = static_ltree; |
| 310 |
|
static_l_desc.extra_bits = extra_lbits; |
| 311 |
|
static_d_desc.static_tree = static_dtree; |
| 312 |
|
static_d_desc.extra_bits = extra_dbits; |
| 313 |
|
static_bl_desc.extra_bits = extra_blbits; |
| 314 |
|
#endif |
| 315 |
|
|
| 316 |
|
/* Initialize the mapping length (0..255) -> length code (0..28) */ |
| 317 |
|
length = 0; |
| 318 |
|
for (code = 0; code < LENGTH_CODES-1; code++) { |
| 319 |
|
base_length[code] = length; |
| 320 |
|
for (n = 0; n < (1<<extra_lbits[code]); n++) { |
| 321 |
|
_length_code[length++] = (uch)code; |
| 322 |
|
} |
| 323 |
|
} |
| 324 |
|
Assert (length == 256, "tr_static_init: length != 256"); |
| 325 |
|
/* Note that the length 255 (match length 258) can be represented |
| 326 |
|
* in two different ways: code 284 + 5 bits or code 285, so we |
| 327 |
|
* overwrite length_code[255] to use the best encoding: |
| 328 |
|
*/ |
| 329 |
|
_length_code[length-1] = (uch)code; |
| 330 |
|
|
| 331 |
|
/* Initialize the mapping dist (0..32K) -> dist code (0..29) */ |
| 332 |
|
dist = 0; |
| 333 |
|
for (code = 0 ; code < 16; code++) { |
| 334 |
|
base_dist[code] = dist; |
| 335 |
|
for (n = 0; n < (1<<extra_dbits[code]); n++) { |
| 336 |
|
_dist_code[dist++] = (uch)code; |
| 337 |
|
} |
| 338 |
|
} |
| 339 |
|
Assert (dist == 256, "tr_static_init: dist != 256"); |
| 340 |
|
dist >>= 7; /* from now on, all distances are divided by 128 */ |
| 341 |
|
for ( ; code < D_CODES; code++) { |
| 342 |
|
base_dist[code] = dist << 7; |
| 343 |
|
for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { |
| 344 |
|
_dist_code[256 + dist++] = (uch)code; |
| 345 |
|
} |
| 346 |
|
} |
| 347 |
|
Assert (dist == 256, "tr_static_init: 256+dist != 512"); |
| 348 |
|
|
| 349 |
|
/* Construct the codes of the static literal tree */ |
| 350 |
|
for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; |
| 351 |
|
n = 0; |
| 352 |
|
while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; |
| 353 |
|
while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; |
| 354 |
|
while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; |
| 355 |
|
while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; |
| 356 |
|
/* Codes 286 and 287 do not exist, but we must include them in the |
| 357 |
|
* tree construction to get a canonical Huffman tree (longest code |
| 358 |
|
* all ones) |
| 359 |
|
*/ |
| 360 |
|
gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); |
| 361 |
|
|
| 362 |
|
/* The static distance tree is trivial: */ |
| 363 |
|
for (n = 0; n < D_CODES; n++) { |
| 364 |
|
static_dtree[n].Len = 5; |
| 365 |
|
static_dtree[n].Code = bi_reverse((unsigned)n, 5); |
| 366 |
|
} |
| 367 |
|
static_init_done = 1; |
| 368 |
|
|
| 369 |
|
# ifdef GEN_TREES_H |
| 370 |
|
gen_trees_header(); |
| 371 |
|
# endif |
| 372 |
|
#endif /* defined(GEN_TREES_H) || !defined(STDC) */ |
| 373 |
10320 |
} |
| 374 |
|
|
| 375 |
|
/* =========================================================================== |
| 376 |
|
* Generate the file trees.h describing the static trees. |
| 377 |
|
*/ |
| 378 |
|
#ifdef GEN_TREES_H |
| 379 |
|
# ifndef ZLIB_DEBUG |
| 380 |
|
# include <stdio.h> |
| 381 |
|
# endif |
| 382 |
|
|
| 383 |
|
# define SEPARATOR(i, last, width) \ |
| 384 |
|
((i) == (last)? "\n};\n\n" : \ |
| 385 |
|
((i) % (width) == (width)-1 ? ",\n" : ", ")) |
| 386 |
|
|
| 387 |
|
void gen_trees_header(void) { |
| 388 |
|
FILE *header = fopen("trees.h", "w"); |
| 389 |
|
int i; |
| 390 |
|
|
| 391 |
|
Assert (header != NULL, "Can't open trees.h"); |
| 392 |
|
fprintf(header, |
| 393 |
|
"/* header created automatically with -DGEN_TREES_H */\n\n"); |
| 394 |
|
|
| 395 |
|
fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n"); |
| 396 |
|
for (i = 0; i < L_CODES+2; i++) { |
| 397 |
|
fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code, |
| 398 |
|
static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5)); |
| 399 |
|
} |
| 400 |
|
|
| 401 |
|
fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n"); |
| 402 |
|
for (i = 0; i < D_CODES; i++) { |
| 403 |
|
fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code, |
| 404 |
|
static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5)); |
| 405 |
|
} |
| 406 |
|
|
| 407 |
|
fprintf(header, "const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {\n"); |
| 408 |
|
for (i = 0; i < DIST_CODE_LEN; i++) { |
| 409 |
|
fprintf(header, "%2u%s", _dist_code[i], |
| 410 |
|
SEPARATOR(i, DIST_CODE_LEN-1, 20)); |
| 411 |
|
} |
| 412 |
|
|
| 413 |
|
fprintf(header, |
| 414 |
|
"const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {\n"); |
| 415 |
|
for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) { |
| 416 |
|
fprintf(header, "%2u%s", _length_code[i], |
| 417 |
|
SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20)); |
| 418 |
|
} |
| 419 |
|
|
| 420 |
|
fprintf(header, "local const int base_length[LENGTH_CODES] = {\n"); |
| 421 |
|
for (i = 0; i < LENGTH_CODES; i++) { |
| 422 |
|
fprintf(header, "%1u%s", base_length[i], |
| 423 |
|
SEPARATOR(i, LENGTH_CODES-1, 20)); |
| 424 |
|
} |
| 425 |
|
|
| 426 |
|
fprintf(header, "local const int base_dist[D_CODES] = {\n"); |
| 427 |
|
for (i = 0; i < D_CODES; i++) { |
| 428 |
|
fprintf(header, "%5u%s", base_dist[i], |
| 429 |
|
SEPARATOR(i, D_CODES-1, 10)); |
| 430 |
|
} |
| 431 |
|
|
| 432 |
|
fclose(header); |
| 433 |
|
} |
| 434 |
|
#endif /* GEN_TREES_H */ |
| 435 |
|
|
| 436 |
|
/* =========================================================================== |
| 437 |
|
* Initialize a new block. |
| 438 |
|
*/ |
| 439 |
20920 |
local void init_block(deflate_state *s) { |
| 440 |
|
int n; /* iterates over tree elements */ |
| 441 |
|
|
| 442 |
|
/* Initialize the trees. */ |
| 443 |
6004040 |
for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; |
| 444 |
648520 |
for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; |
| 445 |
418400 |
for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; |
| 446 |
|
|
| 447 |
20920 |
s->dyn_ltree[END_BLOCK].Freq = 1; |
| 448 |
20920 |
s->opt_len = s->static_len = 0L; |
| 449 |
20920 |
s->sym_next = s->matches = 0; |
| 450 |
20920 |
} |
| 451 |
|
|
| 452 |
|
/* =========================================================================== |
| 453 |
|
* Initialize the tree data structures for a new zlib stream. |
| 454 |
|
*/ |
| 455 |
10320 |
void ZLIB_INTERNAL _tr_init(deflate_state *s) { |
| 456 |
10320 |
tr_static_init(); |
| 457 |
|
|
| 458 |
10320 |
s->l_desc.dyn_tree = s->dyn_ltree; |
| 459 |
10320 |
s->l_desc.stat_desc = &static_l_desc; |
| 460 |
|
|
| 461 |
10320 |
s->d_desc.dyn_tree = s->dyn_dtree; |
| 462 |
10320 |
s->d_desc.stat_desc = &static_d_desc; |
| 463 |
|
|
| 464 |
10320 |
s->bl_desc.dyn_tree = s->bl_tree; |
| 465 |
10320 |
s->bl_desc.stat_desc = &static_bl_desc; |
| 466 |
|
|
| 467 |
10320 |
s->bi_buf = 0; |
| 468 |
10320 |
s->bi_valid = 0; |
| 469 |
10320 |
s->bi_used = 0; |
| 470 |
|
#ifdef ZLIB_DEBUG |
| 471 |
|
s->compressed_len = 0L; |
| 472 |
|
s->bits_sent = 0L; |
| 473 |
|
#endif |
| 474 |
|
|
| 475 |
|
/* Initialize the first block of the first file: */ |
| 476 |
10320 |
init_block(s); |
| 477 |
10320 |
} |
| 478 |
|
|
| 479 |
|
#define SMALLEST 1 |
| 480 |
|
/* Index within the heap array of least frequent node in the Huffman tree */ |
| 481 |
|
|
| 482 |
|
|
| 483 |
|
/* =========================================================================== |
| 484 |
|
* Remove the smallest element from the heap and recreate the heap with |
| 485 |
|
* one less element. Updates heap and heap_len. |
| 486 |
|
*/ |
| 487 |
|
#define pqremove(s, tree, top) \ |
| 488 |
|
{\ |
| 489 |
|
top = s->heap[SMALLEST]; \ |
| 490 |
|
s->heap[SMALLEST] = s->heap[s->heap_len--]; \ |
| 491 |
|
pqdownheap(s, tree, SMALLEST); \ |
| 492 |
|
} |
| 493 |
|
|
| 494 |
|
/* =========================================================================== |
| 495 |
|
* Compares to subtrees, using the tree depth as tie breaker when |
| 496 |
|
* the subtrees have equal frequency. This minimizes the worst case length. |
| 497 |
|
*/ |
| 498 |
|
#define smaller(tree, n, m, depth) \ |
| 499 |
|
(tree[n].Freq < tree[m].Freq || \ |
| 500 |
|
(tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) |
| 501 |
|
|
| 502 |
|
/* =========================================================================== |
| 503 |
|
* Restore the heap property by moving down the tree starting at node k, |
| 504 |
|
* exchanging a node with the smallest of its two sons if necessary, stopping |
| 505 |
|
* when the heap property is re-established (each father smaller than its |
| 506 |
|
* two sons). |
| 507 |
|
*/ |
| 508 |
611245 |
local void pqdownheap(deflate_state *s, ct_data *tree, int k) { |
| 509 |
611245 |
int v = s->heap[k]; |
| 510 |
611245 |
int j = k << 1; /* left son of k */ |
| 511 |
1737900 |
while (j <= s->heap_len) { |
| 512 |
|
/* Set j to the smallest of the two sons: */ |
| 513 |
2002321 |
if (j < s->heap_len && |
| 514 |
1226055 |
smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { |
| 515 |
660270 |
j++; |
| 516 |
660270 |
} |
| 517 |
|
/* Exit if v is smaller than both sons */ |
| 518 |
1332607 |
if (smaller(tree, v, s->heap[j], s->depth)) break; |
| 519 |
|
|
| 520 |
|
/* Exchange v with the smallest son */ |
| 521 |
1126655 |
s->heap[k] = s->heap[j]; k = j; |
| 522 |
|
|
| 523 |
|
/* And continue down the tree, setting j to the left son of k */ |
| 524 |
1126655 |
j <<= 1; |
| 525 |
|
} |
| 526 |
611245 |
s->heap[k] = v; |
| 527 |
611245 |
} |
| 528 |
|
|
| 529 |
|
/* =========================================================================== |
| 530 |
|
* Compute the optimal bit lengths for a tree and update the total bit length |
| 531 |
|
* for the current block. |
| 532 |
|
* IN assertion: the fields freq and dad are set, heap[heap_max] and |
| 533 |
|
* above are the tree nodes sorted by increasing frequency. |
| 534 |
|
* OUT assertions: the field len is set to the optimal bit length, the |
| 535 |
|
* array bl_count contains the frequencies for each bit length. |
| 536 |
|
* The length opt_len is updated; static_len is also updated if stree is |
| 537 |
|
* not null. |
| 538 |
|
*/ |
| 539 |
31800 |
local void gen_bitlen(deflate_state *s, tree_desc *desc) { |
| 540 |
31800 |
ct_data *tree = desc->dyn_tree; |
| 541 |
31800 |
int max_code = desc->max_code; |
| 542 |
31800 |
const ct_data *stree = desc->stat_desc->static_tree; |
| 543 |
31800 |
const intf *extra = desc->stat_desc->extra_bits; |
| 544 |
31800 |
int base = desc->stat_desc->extra_base; |
| 545 |
31800 |
int max_length = desc->stat_desc->max_length; |
| 546 |
|
int h; /* heap index */ |
| 547 |
|
int n, m; /* iterate over the tree elements */ |
| 548 |
|
int bits; /* bit length */ |
| 549 |
|
int xbits; /* extra bits */ |
| 550 |
|
ush f; /* frequency */ |
| 551 |
31800 |
int overflow = 0; /* number of elements with bit length too large */ |
| 552 |
|
|
| 553 |
540600 |
for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; |
| 554 |
|
|
| 555 |
|
/* In a first pass, compute the optimal bit lengths (which may |
| 556 |
|
* overflow in the case of the bit length tree). |
| 557 |
|
*/ |
| 558 |
31800 |
tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ |
| 559 |
|
|
| 560 |
511184 |
for (h = s->heap_max+1; h < HEAP_SIZE; h++) { |
| 561 |
479384 |
n = s->heap[h]; |
| 562 |
479384 |
bits = tree[tree[n].Dad].Len + 1; |
| 563 |
479384 |
if (bits > max_length) bits = max_length, overflow++; |
| 564 |
479384 |
tree[n].Len = (ush)bits; |
| 565 |
|
/* We overwrite tree[n].Dad which is no longer needed */ |
| 566 |
|
|
| 567 |
479384 |
if (n > max_code) continue; /* not a leaf node */ |
| 568 |
|
|
| 569 |
271492 |
s->bl_count[bits]++; |
| 570 |
271492 |
xbits = 0; |
| 571 |
271492 |
if (n >= base) xbits = extra[n-base]; |
| 572 |
271492 |
f = tree[n].Freq; |
| 573 |
271492 |
s->opt_len += (ulg)f * (unsigned)(bits + xbits); |
| 574 |
271492 |
if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits); |
| 575 |
271492 |
} |
| 576 |
31800 |
if (overflow == 0) return; |
| 577 |
|
|
| 578 |
|
Tracev((stderr,"\nbit length overflow\n")); |
| 579 |
|
/* This happens for example on obj2 and pic of the Calgary corpus */ |
| 580 |
|
|
| 581 |
|
/* Find the first bit length which could increase: */ |
| 582 |
0 |
do { |
| 583 |
0 |
bits = max_length-1; |
| 584 |
0 |
while (s->bl_count[bits] == 0) bits--; |
| 585 |
0 |
s->bl_count[bits]--; /* move one leaf down the tree */ |
| 586 |
0 |
s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ |
| 587 |
0 |
s->bl_count[max_length]--; |
| 588 |
|
/* The brother of the overflow item also moves one step up, |
| 589 |
|
* but this does not affect bl_count[max_length] |
| 590 |
|
*/ |
| 591 |
0 |
overflow -= 2; |
| 592 |
0 |
} while (overflow > 0); |
| 593 |
|
|
| 594 |
|
/* Now recompute all bit lengths, scanning in increasing frequency. |
| 595 |
|
* h is still equal to HEAP_SIZE. (It is simpler to reconstruct all |
| 596 |
|
* lengths instead of fixing only the wrong ones. This idea is taken |
| 597 |
|
* from 'ar' written by Haruhiko Okumura.) |
| 598 |
|
*/ |
| 599 |
0 |
for (bits = max_length; bits != 0; bits--) { |
| 600 |
0 |
n = s->bl_count[bits]; |
| 601 |
0 |
while (n != 0) { |
| 602 |
0 |
m = s->heap[--h]; |
| 603 |
0 |
if (m > max_code) continue; |
| 604 |
0 |
if ((unsigned) tree[m].Len != (unsigned) bits) { |
| 605 |
|
Tracev((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); |
| 606 |
0 |
s->opt_len += ((ulg)bits - tree[m].Len) * tree[m].Freq; |
| 607 |
0 |
tree[m].Len = (ush)bits; |
| 608 |
0 |
} |
| 609 |
0 |
n--; |
| 610 |
|
} |
| 611 |
0 |
} |
| 612 |
31800 |
} |
| 613 |
|
|
| 614 |
|
#ifdef DUMP_BL_TREE |
| 615 |
|
# include <stdio.h> |
| 616 |
|
#endif |
| 617 |
|
|
| 618 |
|
/* =========================================================================== |
| 619 |
|
* Construct one Huffman tree and assigns the code bit strings and lengths. |
| 620 |
|
* Update the total bit length for the current block. |
| 621 |
|
* IN assertion: the field freq is set for all tree elements. |
| 622 |
|
* OUT assertions: the fields len and code are set to the optimal bit length |
| 623 |
|
* and corresponding code. The length opt_len is updated; static_len is |
| 624 |
|
* also updated if stree is not null. The field max_code is set. |
| 625 |
|
*/ |
| 626 |
31800 |
local void build_tree(deflate_state *s, tree_desc *desc) { |
| 627 |
31800 |
ct_data *tree = desc->dyn_tree; |
| 628 |
31800 |
const ct_data *stree = desc->stat_desc->static_tree; |
| 629 |
31800 |
int elems = desc->stat_desc->elems; |
| 630 |
|
int n, m; /* iterate over heap elements */ |
| 631 |
31800 |
int max_code = -1; /* largest code with non zero frequency */ |
| 632 |
|
int node; /* new node being created */ |
| 633 |
|
|
| 634 |
|
/* Construct the initial heap, with least frequent element in |
| 635 |
|
* heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. |
| 636 |
|
* heap[0] is not used. |
| 637 |
|
*/ |
| 638 |
31800 |
s->heap_len = 0, s->heap_max = HEAP_SIZE; |
| 639 |
|
|
| 640 |
3582800 |
for (n = 0; n < elems; n++) { |
| 641 |
3551000 |
if (tree[n].Freq != 0) { |
| 642 |
250900 |
s->heap[++(s->heap_len)] = max_code = n; |
| 643 |
250900 |
s->depth[n] = 0; |
| 644 |
250900 |
} else { |
| 645 |
3300100 |
tree[n].Len = 0; |
| 646 |
|
} |
| 647 |
3551000 |
} |
| 648 |
|
|
| 649 |
|
/* The pkzip format requires that at least one distance code exists, |
| 650 |
|
* and that at least one bit should be sent even if there is only one |
| 651 |
|
* possible code. So to avoid special checks later on we force at least |
| 652 |
|
* two codes of non zero frequency. |
| 653 |
|
*/ |
| 654 |
52392 |
while (s->heap_len < 2) { |
| 655 |
20592 |
node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); |
| 656 |
20592 |
tree[node].Freq = 1; |
| 657 |
20592 |
s->depth[node] = 0; |
| 658 |
20592 |
s->opt_len--; if (stree) s->static_len -= stree[node].Len; |
| 659 |
|
/* node is 0 or 1 so it does not have extra bits */ |
| 660 |
|
} |
| 661 |
31800 |
desc->max_code = max_code; |
| 662 |
|
|
| 663 |
|
/* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, |
| 664 |
|
* establish sub-heaps of increasing lengths: |
| 665 |
|
*/ |
| 666 |
163661 |
for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); |
| 667 |
|
|
| 668 |
|
/* Construct the Huffman tree by repeatedly combining the least two |
| 669 |
|
* frequent nodes. |
| 670 |
|
*/ |
| 671 |
31800 |
node = elems; /* next internal node of the tree */ |
| 672 |
31800 |
do { |
| 673 |
239692 |
pqremove(s, tree, n); /* n = node of least frequency */ |
| 674 |
239692 |
m = s->heap[SMALLEST]; /* m = node of next least frequency */ |
| 675 |
|
|
| 676 |
239692 |
s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ |
| 677 |
239692 |
s->heap[--(s->heap_max)] = m; |
| 678 |
|
|
| 679 |
|
/* Create a new node father of n and m */ |
| 680 |
239692 |
tree[node].Freq = tree[n].Freq + tree[m].Freq; |
| 681 |
239692 |
s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ? |
| 682 |
239692 |
s->depth[n] : s->depth[m]) + 1); |
| 683 |
239692 |
tree[n].Dad = tree[m].Dad = (ush)node; |
| 684 |
|
#ifdef DUMP_BL_TREE |
| 685 |
|
if (tree == s->bl_tree) { |
| 686 |
|
fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)", |
| 687 |
|
node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); |
| 688 |
|
} |
| 689 |
|
#endif |
| 690 |
|
/* and insert the new node in the heap */ |
| 691 |
239692 |
s->heap[SMALLEST] = node++; |
| 692 |
239692 |
pqdownheap(s, tree, SMALLEST); |
| 693 |
|
|
| 694 |
239692 |
} while (s->heap_len >= 2); |
| 695 |
|
|
| 696 |
31800 |
s->heap[--(s->heap_max)] = s->heap[SMALLEST]; |
| 697 |
|
|
| 698 |
|
/* At this point, the fields freq and dad are set. We can now |
| 699 |
|
* generate the bit lengths. |
| 700 |
|
*/ |
| 701 |
31800 |
gen_bitlen(s, (tree_desc *)desc); |
| 702 |
|
|
| 703 |
|
/* The field len is now set, we can generate the bit codes */ |
| 704 |
31800 |
gen_codes ((ct_data *)tree, max_code, s->bl_count); |
| 705 |
31800 |
} |
| 706 |
|
|
| 707 |
|
/* =========================================================================== |
| 708 |
|
* Scan a literal or distance tree to determine the frequencies of the codes |
| 709 |
|
* in the bit length tree. |
| 710 |
|
*/ |
| 711 |
21200 |
local void scan_tree(deflate_state *s, ct_data *tree, int max_code) { |
| 712 |
|
int n; /* iterates over all tree elements */ |
| 713 |
21200 |
int prevlen = -1; /* last emitted length */ |
| 714 |
|
int curlen; /* length of current code */ |
| 715 |
21200 |
int nextlen = tree[0].Len; /* length of next code */ |
| 716 |
21200 |
int count = 0; /* repeat count of the current code */ |
| 717 |
21200 |
int max_count = 7; /* max repeat count */ |
| 718 |
21200 |
int min_count = 4; /* min repeat count */ |
| 719 |
|
|
| 720 |
21200 |
if (nextlen == 0) max_count = 138, min_count = 3; |
| 721 |
21200 |
tree[max_code+1].Len = (ush)0xffff; /* guard */ |
| 722 |
|
|
| 723 |
2803530 |
for (n = 0; n <= max_code; n++) { |
| 724 |
2782330 |
curlen = nextlen; nextlen = tree[n+1].Len; |
| 725 |
2782330 |
if (++count < max_count && curlen == nextlen) { |
| 726 |
2530022 |
continue; |
| 727 |
252308 |
} else if (count < min_count) { |
| 728 |
197198 |
s->bl_tree[curlen].Freq += (ush)count; |
| 729 |
252308 |
} else if (curlen != 0) { |
| 730 |
4188 |
if (curlen != prevlen) s->bl_tree[curlen].Freq++; |
| 731 |
4188 |
s->bl_tree[REP_3_6].Freq++; |
| 732 |
55110 |
} else if (count <= 10) { |
| 733 |
18969 |
s->bl_tree[REPZ_3_10].Freq++; |
| 734 |
18969 |
} else { |
| 735 |
31953 |
s->bl_tree[REPZ_11_138].Freq++; |
| 736 |
|
} |
| 737 |
252308 |
count = 0; prevlen = curlen; |
| 738 |
252308 |
if (nextlen == 0) { |
| 739 |
84793 |
max_count = 138, min_count = 3; |
| 740 |
252308 |
} else if (curlen == nextlen) { |
| 741 |
934 |
max_count = 6, min_count = 3; |
| 742 |
934 |
} else { |
| 743 |
166581 |
max_count = 7, min_count = 4; |
| 744 |
|
} |
| 745 |
252308 |
} |
| 746 |
21200 |
} |
| 747 |
|
|
| 748 |
|
/* =========================================================================== |
| 749 |
|
* Send a literal or distance tree in compressed form, using the codes in |
| 750 |
|
* bl_tree. |
| 751 |
|
*/ |
| 752 |
240 |
local void send_tree(deflate_state *s, ct_data *tree, int max_code) { |
| 753 |
|
int n; /* iterates over all tree elements */ |
| 754 |
240 |
int prevlen = -1; /* last emitted length */ |
| 755 |
|
int curlen; /* length of current code */ |
| 756 |
240 |
int nextlen = tree[0].Len; /* length of next code */ |
| 757 |
240 |
int count = 0; /* repeat count of the current code */ |
| 758 |
240 |
int max_count = 7; /* max repeat count */ |
| 759 |
240 |
int min_count = 4; /* min repeat count */ |
| 760 |
|
|
| 761 |
|
/* tree[max_code+1].Len = -1; */ /* guard already set */ |
| 762 |
240 |
if (nextlen == 0) max_count = 138, min_count = 3; |
| 763 |
|
|
| 764 |
33640 |
for (n = 0; n <= max_code; n++) { |
| 765 |
33400 |
curlen = nextlen; nextlen = tree[n+1].Len; |
| 766 |
33400 |
if (++count < max_count && curlen == nextlen) { |
| 767 |
28280 |
continue; |
| 768 |
5120 |
} else if (count < min_count) { |
| 769 |
4720 |
do { send_code(s, curlen, s->bl_tree); } while (--count != 0); |
| 770 |
|
|
| 771 |
5120 |
} else if (curlen != 0) { |
| 772 |
360 |
if (curlen != prevlen) { |
| 773 |
320 |
send_code(s, curlen, s->bl_tree); count--; |
| 774 |
320 |
} |
| 775 |
|
Assert(count >= 3 && count <= 6, " 3_6?"); |
| 776 |
360 |
send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); |
| 777 |
|
|
| 778 |
1360 |
} else if (count <= 10) { |
| 779 |
440 |
send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); |
| 780 |
|
|
| 781 |
440 |
} else { |
| 782 |
560 |
send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); |
| 783 |
|
} |
| 784 |
5120 |
count = 0; prevlen = curlen; |
| 785 |
5120 |
if (nextlen == 0) { |
| 786 |
1360 |
max_count = 138, min_count = 3; |
| 787 |
5120 |
} else if (curlen == nextlen) { |
| 788 |
80 |
max_count = 6, min_count = 3; |
| 789 |
80 |
} else { |
| 790 |
3680 |
max_count = 7, min_count = 4; |
| 791 |
|
} |
| 792 |
5120 |
} |
| 793 |
240 |
} |
| 794 |
|
|
| 795 |
|
/* =========================================================================== |
| 796 |
|
* Construct the Huffman tree for the bit lengths and return the index in |
| 797 |
|
* bl_order of the last bit length code to send. |
| 798 |
|
*/ |
| 799 |
10600 |
local int build_bl_tree(deflate_state *s) { |
| 800 |
|
int max_blindex; /* index of last bit length code of non zero freq */ |
| 801 |
|
|
| 802 |
|
/* Determine the bit length frequencies for literal and distance trees */ |
| 803 |
10600 |
scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); |
| 804 |
10600 |
scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); |
| 805 |
|
|
| 806 |
|
/* Build the bit length tree: */ |
| 807 |
10600 |
build_tree(s, (tree_desc *)(&(s->bl_desc))); |
| 808 |
|
/* opt_len now includes the length of the tree representations, except the |
| 809 |
|
* lengths of the bit lengths codes and the 5 + 5 + 4 bits for the counts. |
| 810 |
|
*/ |
| 811 |
|
|
| 812 |
|
/* Determine the number of bit length codes to send. The pkzip format |
| 813 |
|
* requires that at least 4 bit length codes be sent. (appnote.txt says |
| 814 |
|
* 3 but the actual value used is 4.) |
| 815 |
|
*/ |
| 816 |
21520 |
for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { |
| 817 |
21520 |
if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; |
| 818 |
10920 |
} |
| 819 |
|
/* Update opt_len to include the bit length tree and counts */ |
| 820 |
10600 |
s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4; |
| 821 |
|
Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", |
| 822 |
|
s->opt_len, s->static_len)); |
| 823 |
|
|
| 824 |
10600 |
return max_blindex; |
| 825 |
|
} |
| 826 |
|
|
| 827 |
|
/* =========================================================================== |
| 828 |
|
* Send the header for a block using dynamic Huffman trees: the counts, the |
| 829 |
|
* lengths of the bit length codes, the literal tree and the distance tree. |
| 830 |
|
* IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. |
| 831 |
|
*/ |
| 832 |
120 |
local void send_all_trees(deflate_state *s, int lcodes, int dcodes, |
| 833 |
|
int blcodes) { |
| 834 |
|
int rank; /* index in bl_order */ |
| 835 |
|
|
| 836 |
|
Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); |
| 837 |
|
Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, |
| 838 |
|
"too many codes"); |
| 839 |
|
Tracev((stderr, "\nbl counts: ")); |
| 840 |
120 |
send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ |
| 841 |
120 |
send_bits(s, dcodes-1, 5); |
| 842 |
120 |
send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ |
| 843 |
2200 |
for (rank = 0; rank < blcodes; rank++) { |
| 844 |
|
Tracev((stderr, "\nbl code %2d ", bl_order[rank])); |
| 845 |
2080 |
send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); |
| 846 |
2080 |
} |
| 847 |
|
Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); |
| 848 |
|
|
| 849 |
120 |
send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ |
| 850 |
|
Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); |
| 851 |
|
|
| 852 |
120 |
send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ |
| 853 |
|
Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); |
| 854 |
120 |
} |
| 855 |
|
|
| 856 |
|
/* =========================================================================== |
| 857 |
|
* Send a stored block |
| 858 |
|
*/ |
| 859 |
21080 |
void ZLIB_INTERNAL _tr_stored_block(deflate_state *s, charf *buf, |
| 860 |
|
ulg stored_len, int last) { |
| 861 |
21080 |
if (last) |
| 862 |
6480 |
s->strm->last_bit = |
| 863 |
6480 |
(s->strm->total_out + s->pending) * 8 + s->bi_valid; |
| 864 |
|
|
| 865 |
21080 |
send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */ |
| 866 |
21080 |
bi_windup(s); /* align on byte boundary */ |
| 867 |
21080 |
put_short(s, (ush)stored_len); |
| 868 |
21080 |
put_short(s, (ush)~stored_len); |
| 869 |
21080 |
if (stored_len) |
| 870 |
1760 |
zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len); |
| 871 |
21080 |
s->pending += stored_len; |
| 872 |
|
#ifdef ZLIB_DEBUG |
| 873 |
|
s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; |
| 874 |
|
s->compressed_len += (stored_len + 4) << 3; |
| 875 |
|
s->bits_sent += 2*16; |
| 876 |
|
s->bits_sent += stored_len<<3; |
| 877 |
|
#endif |
| 878 |
21080 |
if (last) |
| 879 |
6480 |
s->strm->stop_bit = |
| 880 |
6480 |
(s->strm->total_out + s->pending) * 8 + s->bi_valid; |
| 881 |
21080 |
} |
| 882 |
|
|
| 883 |
|
/* =========================================================================== |
| 884 |
|
* Flush the bits in the bit buffer to pending output (leaves at most 7 bits) |
| 885 |
|
*/ |
| 886 |
58880 |
void ZLIB_INTERNAL _tr_flush_bits(deflate_state *s) { |
| 887 |
58880 |
bi_flush(s); |
| 888 |
58880 |
} |
| 889 |
|
|
| 890 |
|
/* =========================================================================== |
| 891 |
|
* Send one empty static block to give enough lookahead for inflate. |
| 892 |
|
* This takes 10 bits, of which 7 may remain in the bit buffer. |
| 893 |
|
*/ |
| 894 |
0 |
void ZLIB_INTERNAL _tr_align(deflate_state *s) { |
| 895 |
0 |
send_bits(s, STATIC_TREES<<1, 3); |
| 896 |
0 |
send_code(s, END_BLOCK, static_ltree); |
| 897 |
|
#ifdef ZLIB_DEBUG |
| 898 |
|
s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ |
| 899 |
|
#endif |
| 900 |
0 |
bi_flush(s); |
| 901 |
0 |
} |
| 902 |
|
|
| 903 |
|
/* =========================================================================== |
| 904 |
|
* Send the block data compressed using the given Huffman trees |
| 905 |
|
*/ |
| 906 |
10600 |
local void compress_block(deflate_state *s, const ct_data *ltree, |
| 907 |
|
const ct_data *dtree) { |
| 908 |
|
unsigned dist; /* distance of matched string */ |
| 909 |
|
int lc; /* match length or unmatched char (if dist == 0) */ |
| 910 |
10600 |
unsigned sx = 0; /* running index in symbol buffers */ |
| 911 |
|
unsigned code; /* the code to send */ |
| 912 |
|
int extra; /* number of extra bits to send */ |
| 913 |
|
|
| 914 |
10600 |
if (s->sym_next != 0) do { |
| 915 |
|
#ifdef LIT_MEM |
| 916 |
|
dist = s->d_buf[sx]; |
| 917 |
|
lc = s->l_buf[sx++]; |
| 918 |
|
#else |
| 919 |
288753 |
dist = s->sym_buf[sx++] & 0xff; |
| 920 |
288753 |
dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8; |
| 921 |
288753 |
lc = s->sym_buf[sx++]; |
| 922 |
|
#endif |
| 923 |
288753 |
if (dist == 0) { |
| 924 |
283093 |
send_code(s, lc, ltree); /* send a literal byte */ |
| 925 |
|
Tracecv(isgraph(lc), (stderr," '%c' ", lc)); |
| 926 |
283093 |
} else { |
| 927 |
|
/* Here, lc is the match length - MIN_MATCH */ |
| 928 |
5660 |
code = _length_code[lc]; |
| 929 |
5660 |
send_code(s, code + LITERALS + 1, ltree); /* send length code */ |
| 930 |
5660 |
extra = extra_lbits[code]; |
| 931 |
5660 |
if (extra != 0) { |
| 932 |
1480 |
lc -= base_length[code]; |
| 933 |
1480 |
send_bits(s, lc, extra); /* send the extra length bits */ |
| 934 |
1480 |
} |
| 935 |
5660 |
dist--; /* dist is now the match distance - 1 */ |
| 936 |
5660 |
code = d_code(dist); |
| 937 |
|
Assert (code < D_CODES, "bad d_code"); |
| 938 |
|
|
| 939 |
5660 |
send_code(s, code, dtree); /* send the distance code */ |
| 940 |
5660 |
extra = extra_dbits[code]; |
| 941 |
5660 |
if (extra != 0) { |
| 942 |
5020 |
dist -= (unsigned)base_dist[code]; |
| 943 |
5020 |
send_bits(s, dist, extra); /* send the extra distance bits */ |
| 944 |
5020 |
} |
| 945 |
|
} /* literal or match pair ? */ |
| 946 |
|
|
| 947 |
|
/* Check for no overlay of pending_buf on needed symbols */ |
| 948 |
|
#ifdef LIT_MEM |
| 949 |
|
Assert(s->pending < 2 * (s->lit_bufsize + sx), "pendingBuf overflow"); |
| 950 |
|
#else |
| 951 |
|
Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow"); |
| 952 |
|
#endif |
| 953 |
|
|
| 954 |
288753 |
} while (sx < s->sym_next); |
| 955 |
|
|
| 956 |
10600 |
send_code(s, END_BLOCK, ltree); |
| 957 |
10600 |
} |
| 958 |
|
|
| 959 |
|
/* =========================================================================== |
| 960 |
|
* Check if the data type is TEXT or BINARY, using the following algorithm: |
| 961 |
|
* - TEXT if the two conditions below are satisfied: |
| 962 |
|
* a) There are no non-portable control characters belonging to the |
| 963 |
|
* "block list" (0..6, 14..25, 28..31). |
| 964 |
|
* b) There is at least one printable character belonging to the |
| 965 |
|
* "allow list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). |
| 966 |
|
* - BINARY otherwise. |
| 967 |
|
* - The following partially-portable control characters form a |
| 968 |
|
* "gray list" that is ignored in this detection algorithm: |
| 969 |
|
* (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). |
| 970 |
|
* IN assertion: the fields Freq of dyn_ltree are set. |
| 971 |
|
*/ |
| 972 |
3640 |
local int detect_data_type(deflate_state *s) { |
| 973 |
|
/* block_mask is the bit mask of block-listed bytes |
| 974 |
|
* set bits 0..6, 14..25, and 28..31 |
| 975 |
|
* 0xf3ffc07f = binary 11110011111111111100000001111111 |
| 976 |
|
*/ |
| 977 |
3640 |
unsigned long block_mask = 0xf3ffc07fUL; |
| 978 |
|
int n; |
| 979 |
|
|
| 980 |
|
/* Check for non-textual ("block-listed") bytes. */ |
| 981 |
120120 |
for (n = 0; n <= 31; n++, block_mask >>= 1) |
| 982 |
116480 |
if ((block_mask & 1) && (s->dyn_ltree[n].Freq != 0)) |
| 983 |
0 |
return Z_BINARY; |
| 984 |
|
|
| 985 |
|
/* Check for textual ("allow-listed") bytes. */ |
| 986 |
3640 |
if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0 |
| 987 |
3280 |
|| s->dyn_ltree[13].Freq != 0) |
| 988 |
800 |
return Z_TEXT; |
| 989 |
72040 |
for (n = 32; n < LITERALS; n++) |
| 990 |
72040 |
if (s->dyn_ltree[n].Freq != 0) |
| 991 |
2840 |
return Z_TEXT; |
| 992 |
|
|
| 993 |
|
/* There are no "block-listed" or "allow-listed" bytes: |
| 994 |
|
* this stream either is empty or has tolerated ("gray-listed") bytes only. |
| 995 |
|
*/ |
| 996 |
0 |
return Z_BINARY; |
| 997 |
3640 |
} |
| 998 |
|
|
| 999 |
|
/* =========================================================================== |
| 1000 |
|
* Determine the best encoding for the current block: dynamic trees, static |
| 1001 |
|
* trees or store, and write out the encoded block. |
| 1002 |
|
*/ |
| 1003 |
10600 |
void ZLIB_INTERNAL _tr_flush_block(deflate_state *s, charf *buf, |
| 1004 |
|
ulg stored_len, int last) { |
| 1005 |
|
ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ |
| 1006 |
10600 |
int max_blindex = 0; /* index of last bit length code of non zero freq */ |
| 1007 |
|
|
| 1008 |
10600 |
if (last) |
| 1009 |
3640 |
s->strm->last_bit = |
| 1010 |
3640 |
(s->strm->total_out + s->pending) * 8 + s->bi_valid; |
| 1011 |
|
|
| 1012 |
|
/* Build the Huffman trees unless a stored block is forced */ |
| 1013 |
10600 |
if (s->level > 0) { |
| 1014 |
|
|
| 1015 |
|
/* Check if the file is binary or text */ |
| 1016 |
10600 |
if (s->strm->data_type == Z_UNKNOWN) |
| 1017 |
3640 |
s->strm->data_type = detect_data_type(s); |
| 1018 |
|
|
| 1019 |
|
/* Construct the literal and distance trees */ |
| 1020 |
10600 |
build_tree(s, (tree_desc *)(&(s->l_desc))); |
| 1021 |
|
Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, |
| 1022 |
|
s->static_len)); |
| 1023 |
|
|
| 1024 |
10600 |
build_tree(s, (tree_desc *)(&(s->d_desc))); |
| 1025 |
|
Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, |
| 1026 |
|
s->static_len)); |
| 1027 |
|
/* At this point, opt_len and static_len are the total bit lengths of |
| 1028 |
|
* the compressed block data, excluding the tree representations. |
| 1029 |
|
*/ |
| 1030 |
|
|
| 1031 |
|
/* Build the bit length tree for the above two trees, and get the index |
| 1032 |
|
* in bl_order of the last bit length code to send. |
| 1033 |
|
*/ |
| 1034 |
10600 |
max_blindex = build_bl_tree(s); |
| 1035 |
|
|
| 1036 |
|
/* Determine the best encoding. Compute the block lengths in bytes. */ |
| 1037 |
10600 |
opt_lenb = (s->opt_len+3+7)>>3; |
| 1038 |
10600 |
static_lenb = (s->static_len+3+7)>>3; |
| 1039 |
|
|
| 1040 |
|
Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", |
| 1041 |
|
opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, |
| 1042 |
|
s->sym_next / 3)); |
| 1043 |
|
|
| 1044 |
|
#ifndef FORCE_STATIC |
| 1045 |
10600 |
if (static_lenb <= opt_lenb || s->strategy == Z_FIXED) |
| 1046 |
|
#endif |
| 1047 |
10480 |
opt_lenb = static_lenb; |
| 1048 |
|
|
| 1049 |
10600 |
} else { |
| 1050 |
|
Assert(buf != (char*)0, "lost buf"); |
| 1051 |
0 |
opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ |
| 1052 |
|
} |
| 1053 |
|
|
| 1054 |
|
#ifdef FORCE_STORED |
| 1055 |
|
if (buf != (char*)0) { /* force stored block */ |
| 1056 |
|
#else |
| 1057 |
10600 |
if (stored_len+4 <= opt_lenb && buf != (char*)0) { |
| 1058 |
|
/* 4: two words for the lengths */ |
| 1059 |
|
#endif |
| 1060 |
|
/* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. |
| 1061 |
|
* Otherwise we can't have processed more than WSIZE input bytes since |
| 1062 |
|
* the last block flush, because compression would have been |
| 1063 |
|
* successful. If LIT_BUFSIZE <= WSIZE, it is never too late to |
| 1064 |
|
* transform a block into a stored block. |
| 1065 |
|
*/ |
| 1066 |
0 |
_tr_stored_block(s, buf, stored_len, last); |
| 1067 |
|
|
| 1068 |
10600 |
} else if (static_lenb == opt_lenb) { |
| 1069 |
10480 |
send_bits(s, (STATIC_TREES<<1)+last, 3); |
| 1070 |
10480 |
compress_block(s, (const ct_data *)static_ltree, |
| 1071 |
|
(const ct_data *)static_dtree); |
| 1072 |
|
#ifdef ZLIB_DEBUG |
| 1073 |
|
s->compressed_len += 3 + s->static_len; |
| 1074 |
|
#endif |
| 1075 |
10480 |
} else { |
| 1076 |
120 |
send_bits(s, (DYN_TREES<<1)+last, 3); |
| 1077 |
240 |
send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, |
| 1078 |
120 |
max_blindex+1); |
| 1079 |
240 |
compress_block(s, (const ct_data *)s->dyn_ltree, |
| 1080 |
120 |
(const ct_data *)s->dyn_dtree); |
| 1081 |
|
#ifdef ZLIB_DEBUG |
| 1082 |
|
s->compressed_len += 3 + s->opt_len; |
| 1083 |
|
#endif |
| 1084 |
|
} |
| 1085 |
|
Assert (s->compressed_len == s->bits_sent, "bad compressed size"); |
| 1086 |
|
/* The above check is made mod 2^32, for files larger than 512 MB |
| 1087 |
|
* and uLong implemented on 32 bits. |
| 1088 |
|
*/ |
| 1089 |
10600 |
init_block(s); |
| 1090 |
|
|
| 1091 |
10600 |
if (last) { |
| 1092 |
3640 |
s->strm->stop_bit = |
| 1093 |
3640 |
(s->strm->total_out + s->pending) * 8 + s->bi_valid; |
| 1094 |
3640 |
bi_windup(s); |
| 1095 |
|
#ifdef ZLIB_DEBUG |
| 1096 |
|
s->compressed_len += 7; /* align on byte boundary */ |
| 1097 |
|
#endif |
| 1098 |
3640 |
} |
| 1099 |
|
Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, |
| 1100 |
|
s->compressed_len-7*last)); |
| 1101 |
10600 |
} |
| 1102 |
|
|
| 1103 |
|
/* =========================================================================== |
| 1104 |
|
* Save the match info and tally the frequency counts. Return true if |
| 1105 |
|
* the current block must be flushed. |
| 1106 |
|
*/ |
| 1107 |
0 |
int ZLIB_INTERNAL _tr_tally(deflate_state *s, unsigned dist, unsigned lc) { |
| 1108 |
|
#ifdef LIT_MEM |
| 1109 |
|
s->d_buf[s->sym_next] = (ush)dist; |
| 1110 |
|
s->l_buf[s->sym_next++] = (uch)lc; |
| 1111 |
|
#else |
| 1112 |
0 |
s->sym_buf[s->sym_next++] = (uch)dist; |
| 1113 |
0 |
s->sym_buf[s->sym_next++] = (uch)(dist >> 8); |
| 1114 |
0 |
s->sym_buf[s->sym_next++] = (uch)lc; |
| 1115 |
|
#endif |
| 1116 |
0 |
if (dist == 0) { |
| 1117 |
|
/* lc is the unmatched char */ |
| 1118 |
0 |
s->dyn_ltree[lc].Freq++; |
| 1119 |
0 |
} else { |
| 1120 |
0 |
s->matches++; |
| 1121 |
|
/* Here, lc is the match length - MIN_MATCH */ |
| 1122 |
0 |
dist--; /* dist = match distance - 1 */ |
| 1123 |
|
Assert((ush)dist < (ush)MAX_DIST(s) && |
| 1124 |
|
(ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && |
| 1125 |
|
(ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); |
| 1126 |
|
|
| 1127 |
0 |
s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; |
| 1128 |
0 |
s->dyn_dtree[d_code(dist)].Freq++; |
| 1129 |
|
} |
| 1130 |
0 |
return (s->sym_next == s->sym_end); |
| 1131 |
|
} |