1
|
|
|
/* |
2
|
|
|
* $Id: rawdeflate.js,v 0.5 2013/04/09 14:25:38 dankogai Exp dankogai $ |
3
|
|
|
* |
4
|
|
|
* GNU General Public License, version 2 (GPL-2.0) |
5
|
|
|
* http://opensource.org/licenses/GPL-2.0 |
6
|
|
|
* Original: |
7
|
|
|
* http://www.onicos.com/staff/iz/amuse/javascript/expert/deflate.txt |
8
|
|
|
*/ |
9
|
|
|
function JSDeflater(/*inbuff*/inbuf) { |
10
|
|
|
|
11
|
|
|
/* Copyright (C) 1999 Masanao Izumo <[email protected]> |
12
|
|
|
* Version: 1.0.1 |
13
|
|
|
* LastModified: Dec 25 1999 |
14
|
|
|
*/ |
15
|
|
|
|
16
|
|
|
var WSIZE = 32768, // Sliding Window size |
17
|
|
|
zip_STORED_BLOCK = 0, |
18
|
|
|
zip_STATIC_TREES = 1, |
19
|
|
|
zip_DYN_TREES = 2, |
20
|
|
|
zip_DEFAULT_LEVEL = 6, |
21
|
|
|
zip_FULL_SEARCH = true, |
22
|
|
|
zip_INBUFSIZ = 32768, // Input buffer size |
23
|
|
|
zip_INBUF_EXTRA = 64, // Extra buffer |
24
|
|
|
zip_OUTBUFSIZ = 1024 * 8, |
25
|
|
|
zip_window_size = 2 * WSIZE, |
26
|
|
|
MIN_MATCH = 3, |
27
|
|
|
MAX_MATCH = 258, |
28
|
|
|
zip_BITS = 16, |
29
|
|
|
LIT_BUFSIZE = 0x2000, |
30
|
|
|
zip_HASH_BITS = 13, |
31
|
|
|
zip_DIST_BUFSIZE = LIT_BUFSIZE, |
32
|
|
|
zip_HASH_SIZE = 1 << zip_HASH_BITS, |
33
|
|
|
zip_HASH_MASK = zip_HASH_SIZE - 1, |
34
|
|
|
zip_WMASK = WSIZE - 1, |
35
|
|
|
zip_NIL = 0, // Tail of hash chains |
36
|
|
|
zip_TOO_FAR = 4096, |
37
|
|
|
zip_MIN_LOOKAHEAD = MAX_MATCH + MIN_MATCH + 1, |
38
|
|
|
zip_MAX_DIST = WSIZE - zip_MIN_LOOKAHEAD, |
39
|
|
|
zip_SMALLEST = 1, |
40
|
|
|
zip_MAX_BITS = 15, |
41
|
|
|
zip_MAX_BL_BITS = 7, |
42
|
|
|
zip_LENGTH_CODES = 29, |
43
|
|
|
zip_LITERALS = 256, |
44
|
|
|
zip_END_BLOCK = 256, |
45
|
|
|
zip_L_CODES = zip_LITERALS + 1 + zip_LENGTH_CODES, |
46
|
|
|
zip_D_CODES = 30, |
47
|
|
|
zip_BL_CODES = 19, |
48
|
|
|
zip_REP_3_6 = 16, |
49
|
|
|
zip_REPZ_3_10 = 17, |
50
|
|
|
zip_REPZ_11_138 = 18, |
51
|
|
|
zip_HEAP_SIZE = 2 * zip_L_CODES + 1, |
52
|
|
|
zip_H_SHIFT = parseInt((zip_HASH_BITS + MIN_MATCH - 1) / MIN_MATCH); |
53
|
|
|
|
54
|
|
|
var zip_free_queue, zip_qhead, zip_qtail, zip_initflag, zip_outbuf = null, zip_outcnt, zip_outoff, zip_complete, |
55
|
|
|
zip_window, zip_d_buf, zip_l_buf, zip_prev, zip_bi_buf, zip_bi_valid, zip_block_start, zip_ins_h, zip_hash_head, |
56
|
|
|
zip_prev_match, zip_match_available, zip_match_length, zip_prev_length, zip_strstart, zip_match_start, zip_eofile, |
57
|
|
|
zip_lookahead, zip_max_chain_length, zip_max_lazy_match, zip_compr_level, zip_good_match, zip_nice_match, |
58
|
|
|
zip_dyn_ltree, zip_dyn_dtree, zip_static_ltree, zip_static_dtree, zip_bl_tree, zip_l_desc, zip_d_desc, zip_bl_desc, |
59
|
|
|
zip_bl_count, zip_heap, zip_heap_len, zip_heap_max, zip_depth, zip_length_code, zip_dist_code, zip_base_length, |
60
|
|
|
zip_base_dist, zip_flag_buf, zip_last_lit, zip_last_dist, zip_last_flags, zip_flags, zip_flag_bit, zip_opt_len, |
61
|
|
|
zip_static_len, zip_deflate_data, zip_deflate_pos; |
62
|
|
|
|
63
|
|
|
var zip_DeflateCT = function () { |
64
|
|
|
this.fc = 0; // frequency count or bit string |
65
|
|
|
this.dl = 0; // father node in Huffman tree or length of bit string |
66
|
|
|
}; |
67
|
|
|
|
68
|
|
|
var zip_DeflateTreeDesc = function () { |
69
|
|
|
this.dyn_tree = null; // the dynamic tree |
70
|
|
|
this.static_tree = null; // corresponding static tree or NULL |
71
|
|
|
this.extra_bits = null; // extra bits for each code or NULL |
72
|
|
|
this.extra_base = 0; // base index for extra_bits |
73
|
|
|
this.elems = 0; // max number of elements in the tree |
74
|
|
|
this.max_length = 0; // max bit length for the codes |
75
|
|
|
this.max_code = 0; // largest code with non zero frequency |
76
|
|
|
}; |
77
|
|
|
|
78
|
|
|
/* Values for max_lazy_match, good_match and max_chain_length, depending on |
79
|
|
|
* the desired pack level (0..9). The values given below have been tuned to |
80
|
|
|
* exclude worst case performance for pathological files. Better values may be |
81
|
|
|
* found for specific files. |
82
|
|
|
*/ |
83
|
|
|
var zip_DeflateConfiguration = function (a, b, c, d) { |
84
|
|
|
this.good_length = a; // reduce lazy search above this match length |
85
|
|
|
this.max_lazy = b; // do not perform lazy search above this match length |
86
|
|
|
this.nice_length = c; // quit search above this match length |
87
|
|
|
this.max_chain = d; |
88
|
|
|
}; |
89
|
|
|
|
90
|
|
|
var zip_DeflateBuffer = function () { |
91
|
|
|
this.next = null; |
92
|
|
|
this.len = 0; |
93
|
|
|
this.ptr = new Array(zip_OUTBUFSIZ); |
94
|
|
|
this.off = 0; |
95
|
|
|
}; |
96
|
|
|
|
97
|
|
|
/* constant tables */ |
98
|
|
|
var zip_extra_lbits = new Array( |
|
|
|
|
99
|
|
|
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0); |
100
|
|
|
var zip_extra_dbits = new Array( |
|
|
|
|
101
|
|
|
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13); |
102
|
|
|
var zip_extra_blbits = new Array( |
|
|
|
|
103
|
|
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7); |
104
|
|
|
var zip_bl_order = new Array( |
|
|
|
|
105
|
|
|
16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15); |
106
|
|
|
var zip_configuration_table = new Array( |
|
|
|
|
107
|
|
|
new zip_DeflateConfiguration(0, 0, 0, 0), |
|
|
|
|
108
|
|
|
new zip_DeflateConfiguration(4, 4, 8, 4), |
|
|
|
|
109
|
|
|
new zip_DeflateConfiguration(4, 5, 16, 8), |
|
|
|
|
110
|
|
|
new zip_DeflateConfiguration(4, 6, 32, 32), |
|
|
|
|
111
|
|
|
new zip_DeflateConfiguration(4, 4, 16, 16), |
|
|
|
|
112
|
|
|
new zip_DeflateConfiguration(8, 16, 32, 32), |
|
|
|
|
113
|
|
|
new zip_DeflateConfiguration(8, 16, 128, 128), |
|
|
|
|
114
|
|
|
new zip_DeflateConfiguration(8, 32, 128, 256), |
|
|
|
|
115
|
|
|
new zip_DeflateConfiguration(32, 128, 258, 1024), |
|
|
|
|
116
|
|
|
new zip_DeflateConfiguration(32, 258, 258, 4096)); |
|
|
|
|
117
|
|
|
|
118
|
|
|
|
119
|
|
|
/* routines (deflate) */ |
120
|
|
|
|
121
|
|
|
var zip_deflate_start = function (level) { |
122
|
|
|
var i; |
123
|
|
|
|
124
|
|
|
if (!level) |
125
|
|
|
level = zip_DEFAULT_LEVEL; |
126
|
|
|
else if (level < 1) |
127
|
|
|
level = 1; |
128
|
|
|
else if (level > 9) |
129
|
|
|
level = 9; |
130
|
|
|
|
131
|
|
|
zip_compr_level = level; |
132
|
|
|
zip_initflag = false; |
133
|
|
|
zip_eofile = false; |
134
|
|
|
if (zip_outbuf != null) |
135
|
|
|
return; |
136
|
|
|
|
137
|
|
|
zip_free_queue = zip_qhead = zip_qtail = null; |
138
|
|
|
zip_outbuf = new Array(zip_OUTBUFSIZ); |
139
|
|
|
zip_window = new Array(zip_window_size); |
140
|
|
|
zip_d_buf = new Array(zip_DIST_BUFSIZE); |
|
|
|
|
141
|
|
|
zip_l_buf = new Array(zip_INBUFSIZ + zip_INBUF_EXTRA); |
142
|
|
|
zip_prev = new Array(1 << zip_BITS); |
143
|
|
|
zip_dyn_ltree = new Array(zip_HEAP_SIZE); |
144
|
|
|
for (i = 0; i < zip_HEAP_SIZE; i++) zip_dyn_ltree[i] = new zip_DeflateCT(); |
|
|
|
|
145
|
|
|
zip_dyn_dtree = new Array(2 * zip_D_CODES + 1); |
146
|
|
|
for (i = 0; i < 2 * zip_D_CODES + 1; i++) zip_dyn_dtree[i] = new zip_DeflateCT(); |
|
|
|
|
147
|
|
|
zip_static_ltree = new Array(zip_L_CODES + 2); |
148
|
|
|
for (i = 0; i < zip_L_CODES + 2; i++) zip_static_ltree[i] = new zip_DeflateCT(); |
|
|
|
|
149
|
|
|
zip_static_dtree = new Array(zip_D_CODES); |
150
|
|
|
for (i = 0; i < zip_D_CODES; i++) zip_static_dtree[i] = new zip_DeflateCT(); |
|
|
|
|
151
|
|
|
zip_bl_tree = new Array(2 * zip_BL_CODES + 1); |
152
|
|
|
for (i = 0; i < 2 * zip_BL_CODES + 1; i++) zip_bl_tree[i] = new zip_DeflateCT(); |
|
|
|
|
153
|
|
|
zip_l_desc = new zip_DeflateTreeDesc(); |
|
|
|
|
154
|
|
|
zip_d_desc = new zip_DeflateTreeDesc(); |
|
|
|
|
155
|
|
|
zip_bl_desc = new zip_DeflateTreeDesc(); |
|
|
|
|
156
|
|
|
zip_bl_count = new Array(zip_MAX_BITS + 1); |
157
|
|
|
zip_heap = new Array(2 * zip_L_CODES + 1); |
158
|
|
|
zip_depth = new Array(2 * zip_L_CODES + 1); |
159
|
|
|
zip_length_code = new Array(MAX_MATCH - MIN_MATCH + 1); |
160
|
|
|
zip_dist_code = new Array(512); |
161
|
|
|
zip_base_length = new Array(zip_LENGTH_CODES); |
162
|
|
|
zip_base_dist = new Array(zip_D_CODES); |
163
|
|
|
zip_flag_buf = new Array(parseInt(LIT_BUFSIZE / 8)); |
|
|
|
|
164
|
|
|
}; |
165
|
|
|
|
166
|
|
|
var zip_deflate_end = function () { |
167
|
|
|
zip_free_queue = zip_qhead = zip_qtail = null; |
168
|
|
|
zip_outbuf = null; |
169
|
|
|
zip_window = null; |
170
|
|
|
zip_d_buf = null; |
171
|
|
|
zip_l_buf = null; |
172
|
|
|
zip_prev = null; |
173
|
|
|
zip_dyn_ltree = null; |
174
|
|
|
zip_dyn_dtree = null; |
175
|
|
|
zip_static_ltree = null; |
176
|
|
|
zip_static_dtree = null; |
177
|
|
|
zip_bl_tree = null; |
178
|
|
|
zip_l_desc = null; |
179
|
|
|
zip_d_desc = null; |
180
|
|
|
zip_bl_desc = null; |
181
|
|
|
zip_bl_count = null; |
182
|
|
|
zip_heap = null; |
183
|
|
|
zip_depth = null; |
184
|
|
|
zip_length_code = null; |
185
|
|
|
zip_dist_code = null; |
186
|
|
|
zip_base_length = null; |
187
|
|
|
zip_base_dist = null; |
188
|
|
|
zip_flag_buf = null; |
189
|
|
|
}; |
190
|
|
|
|
191
|
|
|
var zip_reuse_queue = function (p) { |
192
|
|
|
p.next = zip_free_queue; |
193
|
|
|
zip_free_queue = p; |
194
|
|
|
}; |
195
|
|
|
|
196
|
|
|
var zip_new_queue = function () { |
197
|
|
|
var p; |
198
|
|
|
|
199
|
|
|
if (zip_free_queue != null) { |
200
|
|
|
p = zip_free_queue; |
201
|
|
|
zip_free_queue = zip_free_queue.next; |
202
|
|
|
} |
203
|
|
|
else |
204
|
|
|
p = new zip_DeflateBuffer(); |
|
|
|
|
205
|
|
|
p.next = null; |
206
|
|
|
p.len = p.off = 0; |
207
|
|
|
|
208
|
|
|
return p; |
209
|
|
|
}; |
210
|
|
|
|
211
|
|
|
var zip_head1 = function (i) { |
212
|
|
|
return zip_prev[WSIZE + i]; |
213
|
|
|
}; |
214
|
|
|
|
215
|
|
|
var zip_head2 = function (i, val) { |
216
|
|
|
return zip_prev[WSIZE + i] = val; |
217
|
|
|
}; |
218
|
|
|
|
219
|
|
|
/* put_byte is used for the compressed output, put_ubyte for the |
220
|
|
|
* uncompressed output. However unlzw() uses window for its |
221
|
|
|
* suffix table instead of its output buffer, so it does not use put_ubyte |
222
|
|
|
* (to be cleaned up). |
223
|
|
|
*/ |
224
|
|
|
var zip_put_byte = function (c) { |
225
|
|
|
zip_outbuf[zip_outoff + zip_outcnt++] = c; |
226
|
|
|
if (zip_outoff + zip_outcnt == zip_OUTBUFSIZ) |
227
|
|
|
zip_qoutbuf(); |
228
|
|
|
}; |
229
|
|
|
|
230
|
|
|
/* Output a 16 bit value, lsb first */ |
231
|
|
|
var zip_put_short = function (w) { |
232
|
|
|
w &= 0xffff; |
233
|
|
|
if (zip_outoff + zip_outcnt < zip_OUTBUFSIZ - 2) { |
234
|
|
|
zip_outbuf[zip_outoff + zip_outcnt++] = (w & 0xff); |
235
|
|
|
zip_outbuf[zip_outoff + zip_outcnt++] = (w >>> 8); |
236
|
|
|
} else { |
237
|
|
|
zip_put_byte(w & 0xff); |
238
|
|
|
zip_put_byte(w >>> 8); |
239
|
|
|
} |
240
|
|
|
}; |
241
|
|
|
|
242
|
|
|
/* ========================================================================== |
243
|
|
|
* Insert string s in the dictionary and set match_head to the previous head |
244
|
|
|
* of the hash chain (the most recent string with same hash key). Return |
245
|
|
|
* the previous length of the hash chain. |
246
|
|
|
* IN assertion: all calls to to INSERT_STRING are made with consecutive |
247
|
|
|
* input characters and the first MIN_MATCH bytes of s are valid |
248
|
|
|
* (except for the last MIN_MATCH-1 bytes of the input file). |
249
|
|
|
*/ |
250
|
|
|
var zip_INSERT_STRING = function () { |
251
|
|
|
zip_ins_h = ((zip_ins_h << zip_H_SHIFT) |
252
|
|
|
^ (zip_window[zip_strstart + MIN_MATCH - 1] & 0xff)) |
253
|
|
|
& zip_HASH_MASK; |
254
|
|
|
zip_hash_head = zip_head1(zip_ins_h); |
255
|
|
|
zip_prev[zip_strstart & zip_WMASK] = zip_hash_head; |
256
|
|
|
zip_head2(zip_ins_h, zip_strstart); |
257
|
|
|
}; |
258
|
|
|
|
259
|
|
|
/* Send a code of the given tree. c and tree must not have side effects */ |
260
|
|
|
var zip_SEND_CODE = function (c, tree) { |
261
|
|
|
zip_send_bits(tree[c].fc, tree[c].dl); |
262
|
|
|
}; |
263
|
|
|
|
264
|
|
|
/* Mapping from a distance to a distance code. dist is the distance - 1 and |
265
|
|
|
* must not have side effects. dist_code[256] and dist_code[257] are never |
266
|
|
|
* used. |
267
|
|
|
*/ |
268
|
|
|
var zip_D_CODE = function (dist) { |
269
|
|
|
return (dist < 256 ? zip_dist_code[dist] |
270
|
|
|
: zip_dist_code[256 + (dist >> 7)]) & 0xff; |
271
|
|
|
}; |
272
|
|
|
|
273
|
|
|
/* ========================================================================== |
274
|
|
|
* Compares to subtrees, using the tree depth as tie breaker when |
275
|
|
|
* the subtrees have equal frequency. This minimizes the worst case length. |
276
|
|
|
*/ |
277
|
|
|
var zip_SMALLER = function (tree, n, m) { |
278
|
|
|
return tree[n].fc < tree[m].fc || |
279
|
|
|
(tree[n].fc == tree[m].fc && zip_depth[n] <= zip_depth[m]); |
280
|
|
|
}; |
281
|
|
|
|
282
|
|
|
/* ========================================================================== |
283
|
|
|
* read string data |
284
|
|
|
*/ |
285
|
|
|
var zip_read_buff = function (buff, offset, n) { |
286
|
|
|
var i; |
287
|
|
|
for (i = 0; i < n && zip_deflate_pos < zip_deflate_data.length; i++) |
|
|
|
|
288
|
|
|
buff[offset + i] = |
289
|
|
|
zip_deflate_data[zip_deflate_pos++] & 0xff; |
290
|
|
|
return i; |
291
|
|
|
}; |
292
|
|
|
|
293
|
|
|
/* ========================================================================== |
294
|
|
|
* Initialize the "longest match" routines for a new file |
295
|
|
|
*/ |
296
|
|
|
var zip_lm_init = function () { |
297
|
|
|
var j; |
298
|
|
|
|
299
|
|
|
/* Initialize the hash table. */ |
300
|
|
|
for (j = 0; j < zip_HASH_SIZE; j++) |
301
|
|
|
zip_prev[WSIZE + j] = 0; |
302
|
|
|
zip_max_lazy_match = zip_configuration_table[zip_compr_level].max_lazy; |
303
|
|
|
zip_good_match = zip_configuration_table[zip_compr_level].good_length; |
304
|
|
|
if (!zip_FULL_SEARCH) |
305
|
|
|
zip_nice_match = zip_configuration_table[zip_compr_level].nice_length; |
306
|
|
|
zip_max_chain_length = zip_configuration_table[zip_compr_level].max_chain; |
307
|
|
|
|
308
|
|
|
zip_strstart = 0; |
309
|
|
|
zip_block_start = 0; |
310
|
|
|
|
311
|
|
|
zip_lookahead = zip_read_buff(zip_window, 0, 2 * WSIZE); |
312
|
|
|
if (zip_lookahead <= 0) { |
313
|
|
|
zip_eofile = true; |
314
|
|
|
zip_lookahead = 0; |
315
|
|
|
return; |
316
|
|
|
} |
317
|
|
|
zip_eofile = false; |
318
|
|
|
/* Make sure that we always have enough lookahead. This is important |
319
|
|
|
* if input comes from a device such as a tty. |
320
|
|
|
*/ |
321
|
|
|
while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) |
322
|
|
|
zip_fill_window(); |
323
|
|
|
|
324
|
|
|
/* If lookahead < MIN_MATCH, ins_h is garbage, but this is |
325
|
|
|
* not important since only literal bytes will be emitted. |
326
|
|
|
*/ |
327
|
|
|
zip_ins_h = 0; |
328
|
|
|
for (j = 0; j < MIN_MATCH - 1; j++) { |
329
|
|
|
zip_ins_h = ((zip_ins_h << zip_H_SHIFT) ^ (zip_window[j] & 0xff)) & zip_HASH_MASK; |
|
|
|
|
330
|
|
|
} |
331
|
|
|
}; |
332
|
|
|
|
333
|
|
|
/* ========================================================================== |
334
|
|
|
* Set match_start to the longest match starting at the given string and |
335
|
|
|
* return its length. Matches shorter or equal to prev_length are discarded, |
336
|
|
|
* in which case the result is equal to prev_length and match_start is |
337
|
|
|
* garbage. |
338
|
|
|
* IN assertions: cur_match is the head of the hash chain for the current |
339
|
|
|
* string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 |
340
|
|
|
*/ |
341
|
|
|
var zip_longest_match = function (cur_match) { |
342
|
|
|
var chain_length = zip_max_chain_length; // max hash chain length |
343
|
|
|
var scanp = zip_strstart; // current string |
344
|
|
|
var matchp; // matched string |
345
|
|
|
var len; // length of current match |
346
|
|
|
var best_len = zip_prev_length; // best match length so far |
347
|
|
|
|
348
|
|
|
/* Stop when cur_match becomes <= limit. To simplify the code, |
349
|
|
|
* we prevent matches with the string of window index 0. |
350
|
|
|
*/ |
351
|
|
|
var limit = (zip_strstart > zip_MAX_DIST ? zip_strstart - zip_MAX_DIST : zip_NIL); |
352
|
|
|
|
353
|
|
|
var strendp = zip_strstart + MAX_MATCH; |
354
|
|
|
var scan_end1 = zip_window[scanp + best_len - 1]; |
355
|
|
|
var scan_end = zip_window[scanp + best_len]; |
356
|
|
|
|
357
|
|
|
/* Do not waste too much time if we already have a good match: */ |
358
|
|
|
if (zip_prev_length >= zip_good_match) |
359
|
|
|
chain_length >>= 2; |
360
|
|
|
|
361
|
|
|
do { |
362
|
|
|
matchp = cur_match; |
363
|
|
|
|
364
|
|
|
/* Skip to next match if the match length cannot increase |
365
|
|
|
* or if the match length is less than 2: |
366
|
|
|
*/ |
367
|
|
|
if (zip_window[matchp + best_len] != scan_end || |
368
|
|
|
zip_window[matchp + best_len - 1] != scan_end1 || |
369
|
|
|
zip_window[matchp] != zip_window[scanp] || |
370
|
|
|
zip_window[++matchp] != zip_window[scanp + 1]) { |
371
|
|
|
continue; |
372
|
|
|
} |
373
|
|
|
|
374
|
|
|
/* The check at best_len-1 can be removed because it will be made |
375
|
|
|
* again later. (This heuristic is not always a win.) |
376
|
|
|
* It is not necessary to compare scan[2] and match[2] since they |
377
|
|
|
* are always equal when the other bytes match, given that |
378
|
|
|
* the hash keys are equal and that HASH_BITS >= 8. |
379
|
|
|
*/ |
380
|
|
|
scanp += 2; |
381
|
|
|
matchp++; |
382
|
|
|
|
383
|
|
|
/* We check for insufficient lookahead only every 8th comparison; |
384
|
|
|
* the 256th check will be made at strstart+258. |
385
|
|
|
*/ |
386
|
|
|
do { |
|
|
|
|
387
|
|
|
} while (zip_window[++scanp] == zip_window[++matchp] && |
388
|
|
|
zip_window[++scanp] == zip_window[++matchp] && |
389
|
|
|
zip_window[++scanp] == zip_window[++matchp] && |
390
|
|
|
zip_window[++scanp] == zip_window[++matchp] && |
391
|
|
|
zip_window[++scanp] == zip_window[++matchp] && |
392
|
|
|
zip_window[++scanp] == zip_window[++matchp] && |
393
|
|
|
zip_window[++scanp] == zip_window[++matchp] && |
394
|
|
|
zip_window[++scanp] == zip_window[++matchp] && |
395
|
|
|
scanp < strendp); |
396
|
|
|
|
397
|
|
|
len = MAX_MATCH - (strendp - scanp); |
398
|
|
|
scanp = strendp - MAX_MATCH; |
399
|
|
|
|
400
|
|
|
if (len > best_len) { |
401
|
|
|
zip_match_start = cur_match; |
402
|
|
|
best_len = len; |
403
|
|
|
if (zip_FULL_SEARCH) { |
404
|
|
|
if (len >= MAX_MATCH) break; |
405
|
|
|
} else { |
406
|
|
|
if (len >= zip_nice_match) break; |
407
|
|
|
} |
408
|
|
|
|
409
|
|
|
scan_end1 = zip_window[scanp + best_len - 1]; |
410
|
|
|
scan_end = zip_window[scanp + best_len]; |
411
|
|
|
} |
412
|
|
|
} while ((cur_match = zip_prev[cur_match & zip_WMASK]) > limit |
413
|
|
|
&& --chain_length != 0); |
414
|
|
|
|
415
|
|
|
return best_len; |
416
|
|
|
}; |
417
|
|
|
|
418
|
|
|
/* ========================================================================== |
419
|
|
|
* Fill the window when the lookahead becomes insufficient. |
420
|
|
|
* Updates strstart and lookahead, and sets eofile if end of input file. |
421
|
|
|
* IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0 |
422
|
|
|
* OUT assertions: at least one byte has been read, or eofile is set; |
423
|
|
|
* file reads are performed for at least two bytes (required for the |
424
|
|
|
* translate_eol option). |
425
|
|
|
*/ |
426
|
|
|
var zip_fill_window = function () { |
427
|
|
|
var n, m; |
428
|
|
|
|
429
|
|
|
// Amount of free space at the end of the window. |
430
|
|
|
var more = zip_window_size - zip_lookahead - zip_strstart; |
431
|
|
|
|
432
|
|
|
/* If the window is almost full and there is insufficient lookahead, |
433
|
|
|
* move the upper half to the lower one to make room in the upper half. |
434
|
|
|
*/ |
435
|
|
|
if (more == -1) { |
436
|
|
|
/* Very unlikely, but possible on 16 bit machine if strstart == 0 |
437
|
|
|
* and lookahead == 1 (input done one byte at time) |
438
|
|
|
*/ |
439
|
|
|
more--; |
440
|
|
|
} else if (zip_strstart >= WSIZE + zip_MAX_DIST) { |
441
|
|
|
/* By the IN assertion, the window is not empty so we can't confuse |
442
|
|
|
* more == 0 with more == 64K on a 16 bit machine. |
443
|
|
|
*/ |
444
|
|
|
for (n = 0; n < WSIZE; n++) |
445
|
|
|
zip_window[n] = zip_window[n + WSIZE]; |
446
|
|
|
|
447
|
|
|
zip_match_start -= WSIZE; |
448
|
|
|
zip_strstart -= WSIZE; |
449
|
|
|
/* we now have strstart >= MAX_DIST: */ |
450
|
|
|
zip_block_start -= WSIZE; |
451
|
|
|
|
452
|
|
|
for (n = 0; n < zip_HASH_SIZE; n++) { |
453
|
|
|
m = zip_head1(n); |
454
|
|
|
zip_head2(n, m >= WSIZE ? m - WSIZE : zip_NIL); |
455
|
|
|
} |
456
|
|
|
for (n = 0; n < WSIZE; n++) { |
457
|
|
|
/* If n is not on any hash chain, prev[n] is garbage but |
458
|
|
|
* its value will never be used. |
459
|
|
|
*/ |
460
|
|
|
m = zip_prev[n]; |
461
|
|
|
zip_prev[n] = (m >= WSIZE ? m - WSIZE : zip_NIL); |
462
|
|
|
} |
463
|
|
|
more += WSIZE; |
464
|
|
|
} |
465
|
|
|
// At this point, more >= 2 |
466
|
|
|
if (!zip_eofile) { |
467
|
|
|
n = zip_read_buff(zip_window, zip_strstart + zip_lookahead, more); |
|
|
|
|
468
|
|
|
if (n <= 0) |
469
|
|
|
zip_eofile = true; |
470
|
|
|
else |
471
|
|
|
zip_lookahead += n; |
472
|
|
|
} |
473
|
|
|
}; |
474
|
|
|
|
475
|
|
|
/* ========================================================================== |
476
|
|
|
* Processes a new input file and return its compressed length. This |
477
|
|
|
* function does not perform lazy evaluationof matches and inserts |
478
|
|
|
* new strings in the dictionary only for unmatched strings or for short |
479
|
|
|
* matches. It is used only for the fast compression options. |
480
|
|
|
*/ |
481
|
|
|
var zip_deflate_fast = function () { |
482
|
|
|
while (zip_lookahead != 0 && zip_qhead == null) { |
|
|
|
|
483
|
|
|
var flush; // set if current block must be flushed |
484
|
|
|
|
485
|
|
|
/* Insert the string window[strstart .. strstart+2] in the |
486
|
|
|
* dictionary, and set hash_head to the head of the hash chain: |
487
|
|
|
*/ |
488
|
|
|
zip_INSERT_STRING(); |
489
|
|
|
|
490
|
|
|
/* Find the longest match, discarding those <= prev_length. |
491
|
|
|
* At this point we have always match_length < MIN_MATCH |
492
|
|
|
*/ |
493
|
|
|
if (zip_hash_head != zip_NIL && |
494
|
|
|
zip_strstart - zip_hash_head <= zip_MAX_DIST) { |
|
|
|
|
495
|
|
|
/* To simplify the code, we prevent matches with the string |
496
|
|
|
* of window index 0 (in particular we have to avoid a match |
497
|
|
|
* of the string with itself at the start of the input file). |
498
|
|
|
*/ |
499
|
|
|
zip_match_length = zip_longest_match(zip_hash_head); |
500
|
|
|
/* longest_match() sets match_start */ |
501
|
|
|
if (zip_match_length > zip_lookahead) |
|
|
|
|
502
|
|
|
zip_match_length = zip_lookahead; |
503
|
|
|
} |
504
|
|
|
if (zip_match_length >= MIN_MATCH) { |
|
|
|
|
505
|
|
|
flush = zip_ct_tally(zip_strstart - zip_match_start, |
506
|
|
|
zip_match_length - MIN_MATCH); |
507
|
|
|
zip_lookahead -= zip_match_length; |
508
|
|
|
|
509
|
|
|
/* Insert new strings in the hash table only if the match length |
510
|
|
|
* is not too large. This saves time but degrades compression. |
511
|
|
|
*/ |
512
|
|
|
if (zip_match_length <= zip_max_lazy_match) { |
513
|
|
|
zip_match_length--; // string at strstart already in hash table |
514
|
|
|
do { |
515
|
|
|
zip_strstart++; |
516
|
|
|
zip_INSERT_STRING(); |
517
|
|
|
/* strstart never exceeds WSIZE-MAX_MATCH, so there are |
518
|
|
|
* always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH |
519
|
|
|
* these bytes are garbage, but it does not matter since |
520
|
|
|
* the next lookahead bytes will be emitted as literals. |
521
|
|
|
*/ |
522
|
|
|
} while (--zip_match_length != 0); |
523
|
|
|
zip_strstart++; |
|
|
|
|
524
|
|
|
} else { |
525
|
|
|
zip_strstart += zip_match_length; |
526
|
|
|
zip_match_length = 0; |
527
|
|
|
zip_ins_h = zip_window[zip_strstart] & 0xff; |
528
|
|
|
zip_ins_h = ((zip_ins_h << zip_H_SHIFT) ^ (zip_window[zip_strstart + 1] & 0xff)) & zip_HASH_MASK; |
|
|
|
|
529
|
|
|
} |
530
|
|
|
} else { |
531
|
|
|
/* No match, output a literal byte */ |
532
|
|
|
flush = zip_ct_tally(0, zip_window[zip_strstart] & 0xff); |
533
|
|
|
zip_lookahead--; |
534
|
|
|
zip_strstart++; |
535
|
|
|
} |
536
|
|
|
if (flush) { |
537
|
|
|
zip_flush_block(0); |
538
|
|
|
zip_block_start = zip_strstart; |
539
|
|
|
} |
540
|
|
|
|
541
|
|
|
/* Make sure that we always have enough lookahead, except |
542
|
|
|
* at the end of the input file. We need MAX_MATCH bytes |
543
|
|
|
* for the next match, plus MIN_MATCH bytes to insert the |
544
|
|
|
* string following the next match. |
545
|
|
|
*/ |
546
|
|
|
while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) |
547
|
|
|
zip_fill_window(); |
548
|
|
|
} |
549
|
|
|
}; |
550
|
|
|
|
551
|
|
|
var zip_deflate_better = function () { |
552
|
|
|
/* Process the input block. */ |
553
|
|
|
while (zip_lookahead != 0 && zip_qhead == null) { |
|
|
|
|
554
|
|
|
/* Insert the string window[strstart .. strstart+2] in the |
555
|
|
|
* dictionary, and set hash_head to the head of the hash chain: |
556
|
|
|
*/ |
557
|
|
|
zip_INSERT_STRING(); |
558
|
|
|
|
559
|
|
|
/* Find the longest match, discarding those <= prev_length. |
560
|
|
|
*/ |
561
|
|
|
zip_prev_length = zip_match_length; |
|
|
|
|
562
|
|
|
zip_prev_match = zip_match_start; |
563
|
|
|
zip_match_length = MIN_MATCH - 1; |
564
|
|
|
|
565
|
|
|
if (zip_hash_head != zip_NIL && |
566
|
|
|
zip_prev_length < zip_max_lazy_match && |
|
|
|
|
567
|
|
|
zip_strstart - zip_hash_head <= zip_MAX_DIST) { |
|
|
|
|
568
|
|
|
/* To simplify the code, we prevent matches with the string |
569
|
|
|
* of window index 0 (in particular we have to avoid a match |
570
|
|
|
* of the string with itself at the start of the input file). |
571
|
|
|
*/ |
572
|
|
|
zip_match_length = zip_longest_match(zip_hash_head); |
573
|
|
|
/* longest_match() sets match_start */ |
574
|
|
|
if (zip_match_length > zip_lookahead) |
|
|
|
|
575
|
|
|
zip_match_length = zip_lookahead; |
576
|
|
|
|
577
|
|
|
/* Ignore a length 3 match if it is too distant: */ |
578
|
|
|
if (zip_match_length == MIN_MATCH && |
579
|
|
|
zip_strstart - zip_match_start > zip_TOO_FAR) { |
580
|
|
|
/* If prev_match is also MIN_MATCH, match_start is garbage |
581
|
|
|
* but we will ignore the current match anyway. |
582
|
|
|
*/ |
583
|
|
|
zip_match_length--; |
584
|
|
|
} |
585
|
|
|
} |
586
|
|
|
/* If there was a match at the previous step and the current |
587
|
|
|
* match is not better, output the previous match: |
588
|
|
|
*/ |
589
|
|
|
if (zip_prev_length >= MIN_MATCH && |
590
|
|
|
zip_match_length <= zip_prev_length) { |
591
|
|
|
var flush; // set if current block must be flushed |
592
|
|
|
flush = zip_ct_tally(zip_strstart - 1 - zip_prev_match, |
|
|
|
|
593
|
|
|
zip_prev_length - MIN_MATCH); |
594
|
|
|
|
595
|
|
|
/* Insert in hash table all strings up to the end of the match. |
596
|
|
|
* strstart-1 and strstart are already inserted. |
597
|
|
|
*/ |
598
|
|
|
zip_lookahead -= zip_prev_length - 1; |
599
|
|
|
zip_prev_length -= 2; |
600
|
|
|
do { |
601
|
|
|
zip_strstart++; |
602
|
|
|
zip_INSERT_STRING(); |
603
|
|
|
/* strstart never exceeds WSIZE-MAX_MATCH, so there are |
604
|
|
|
* always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH |
605
|
|
|
* these bytes are garbage, but it does not matter since the |
606
|
|
|
* next lookahead bytes will always be emitted as literals. |
607
|
|
|
*/ |
608
|
|
|
} while (--zip_prev_length != 0); |
609
|
|
|
zip_match_available = 0; |
610
|
|
|
zip_match_length = MIN_MATCH - 1; |
611
|
|
|
zip_strstart++; |
|
|
|
|
612
|
|
|
if (flush) { |
613
|
|
|
zip_flush_block(0); |
614
|
|
|
zip_block_start = zip_strstart; |
615
|
|
|
} |
616
|
|
|
} else if (zip_match_available != 0) { |
|
|
|
|
617
|
|
|
/* If there was no match at the previous position, output a |
618
|
|
|
* single literal. If there was a match but the current match |
619
|
|
|
* is longer, truncate the previous match to a single literal. |
620
|
|
|
*/ |
621
|
|
|
if (zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff)) { |
622
|
|
|
zip_flush_block(0); |
623
|
|
|
zip_block_start = zip_strstart; |
624
|
|
|
} |
625
|
|
|
zip_strstart++; |
626
|
|
|
zip_lookahead--; |
627
|
|
|
} else { |
628
|
|
|
/* There is no previous match to compare with, wait for |
629
|
|
|
* the next step to decide. |
630
|
|
|
*/ |
631
|
|
|
zip_match_available = 1; |
632
|
|
|
zip_strstart++; |
633
|
|
|
zip_lookahead--; |
634
|
|
|
} |
635
|
|
|
|
636
|
|
|
/* Make sure that we always have enough lookahead, except |
637
|
|
|
* at the end of the input file. We need MAX_MATCH bytes |
638
|
|
|
* for the next match, plus MIN_MATCH bytes to insert the |
639
|
|
|
* string following the next match. |
640
|
|
|
*/ |
641
|
|
|
while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) |
642
|
|
|
zip_fill_window(); |
643
|
|
|
} |
644
|
|
|
}; |
645
|
|
|
|
646
|
|
|
var zip_init_deflate = function () { |
647
|
|
|
if (zip_eofile) |
648
|
|
|
return; |
649
|
|
|
zip_bi_buf = 0; |
650
|
|
|
zip_bi_valid = 0; |
651
|
|
|
zip_ct_init(); |
652
|
|
|
zip_lm_init(); |
653
|
|
|
|
654
|
|
|
zip_qhead = null; |
655
|
|
|
zip_outcnt = 0; |
656
|
|
|
zip_outoff = 0; |
657
|
|
|
zip_match_available = 0; |
658
|
|
|
|
659
|
|
|
if (zip_compr_level <= 3) { |
660
|
|
|
zip_prev_length = MIN_MATCH - 1; |
661
|
|
|
zip_match_length = 0; |
662
|
|
|
} |
663
|
|
|
else { |
664
|
|
|
zip_match_length = MIN_MATCH - 1; |
665
|
|
|
zip_match_available = 0; |
666
|
|
|
zip_match_available = 0; |
667
|
|
|
} |
668
|
|
|
|
669
|
|
|
zip_complete = false; |
670
|
|
|
}; |
671
|
|
|
|
672
|
|
|
/* ========================================================================== |
673
|
|
|
* Same as above, but achieves better compression. We use a lazy |
674
|
|
|
* evaluation for matches: a match is finally adopted only if there is |
675
|
|
|
* no better match at the next window position. |
676
|
|
|
*/ |
677
|
|
|
var zip_deflate_internal = function (buff, off, buff_size) { |
678
|
|
|
var n; |
679
|
|
|
|
680
|
|
|
if (!zip_initflag) { |
681
|
|
|
zip_init_deflate(); |
682
|
|
|
zip_initflag = true; |
683
|
|
|
if (zip_lookahead == 0) { // empty |
684
|
|
|
zip_complete = true; |
685
|
|
|
return 0; |
686
|
|
|
} |
687
|
|
|
} |
688
|
|
|
|
689
|
|
|
if ((n = zip_qcopy(buff, off, buff_size)) == buff_size) |
690
|
|
|
return buff_size; |
691
|
|
|
|
692
|
|
|
if (zip_complete) |
693
|
|
|
return n; |
694
|
|
|
|
695
|
|
|
if (zip_compr_level <= 3) // optimized for speed |
696
|
|
|
zip_deflate_fast(); |
697
|
|
|
else |
698
|
|
|
zip_deflate_better(); |
699
|
|
|
if (zip_lookahead == 0) { |
700
|
|
|
if (zip_match_available != 0) |
701
|
|
|
zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff); |
702
|
|
|
zip_flush_block(1); |
703
|
|
|
zip_complete = true; |
704
|
|
|
} |
705
|
|
|
return n + zip_qcopy(buff, n + off, buff_size - n); |
706
|
|
|
}; |
707
|
|
|
|
708
|
|
|
var zip_qcopy = function (buff, off, buff_size) { |
709
|
|
|
var n, i, j; |
710
|
|
|
|
711
|
|
|
n = 0; |
712
|
|
|
while (zip_qhead != null && n < buff_size) { |
|
|
|
|
713
|
|
|
i = buff_size - n; |
714
|
|
|
if (i > zip_qhead.len) |
715
|
|
|
i = zip_qhead.len; |
716
|
|
|
for (j = 0; j < i; j++) |
717
|
|
|
buff[off + n + j] = zip_qhead.ptr[zip_qhead.off + j]; |
718
|
|
|
|
719
|
|
|
zip_qhead.off += i; |
720
|
|
|
zip_qhead.len -= i; |
721
|
|
|
n += i; |
722
|
|
|
if (zip_qhead.len == 0) { |
723
|
|
|
var p; |
724
|
|
|
p = zip_qhead; |
725
|
|
|
zip_qhead = zip_qhead.next; |
726
|
|
|
zip_reuse_queue(p); |
727
|
|
|
} |
728
|
|
|
} |
729
|
|
|
|
730
|
|
|
if (n == buff_size) |
731
|
|
|
return n; |
732
|
|
|
|
733
|
|
|
if (zip_outoff < zip_outcnt) { |
734
|
|
|
i = buff_size - n; |
735
|
|
|
if (i > zip_outcnt - zip_outoff) |
736
|
|
|
i = zip_outcnt - zip_outoff; |
737
|
|
|
// System.arraycopy(outbuf, outoff, buff, off + n, i); |
738
|
|
|
for (j = 0; j < i; j++) |
739
|
|
|
buff[off + n + j] = zip_outbuf[zip_outoff + j]; |
740
|
|
|
zip_outoff += i; |
741
|
|
|
n += i; |
742
|
|
|
if (zip_outcnt == zip_outoff) |
743
|
|
|
zip_outcnt = zip_outoff = 0; |
744
|
|
|
} |
745
|
|
|
return n; |
746
|
|
|
}; |
747
|
|
|
|
748
|
|
|
/* ========================================================================== |
749
|
|
|
* Allocate the match buffer, initialize the various tables and save the |
750
|
|
|
* location of the internal file attribute (ascii/binary) and method |
751
|
|
|
* (DEFLATE/STORE). |
752
|
|
|
*/ |
753
|
|
|
var zip_ct_init = function () { |
754
|
|
|
var n; // iterates over tree elements |
755
|
|
|
var bits; // bit counter |
756
|
|
|
var length; // length value |
757
|
|
|
var code; // code value |
758
|
|
|
var dist; // distance index |
759
|
|
|
|
760
|
|
|
if (zip_static_dtree[0].dl != 0) return; // ct_init already called |
761
|
|
|
|
762
|
|
|
zip_l_desc.dyn_tree = zip_dyn_ltree; |
763
|
|
|
zip_l_desc.static_tree = zip_static_ltree; |
764
|
|
|
zip_l_desc.extra_bits = zip_extra_lbits; |
765
|
|
|
zip_l_desc.extra_base = zip_LITERALS + 1; |
766
|
|
|
zip_l_desc.elems = zip_L_CODES; |
767
|
|
|
zip_l_desc.max_length = zip_MAX_BITS; |
768
|
|
|
zip_l_desc.max_code = 0; |
769
|
|
|
|
770
|
|
|
zip_d_desc.dyn_tree = zip_dyn_dtree; |
771
|
|
|
zip_d_desc.static_tree = zip_static_dtree; |
772
|
|
|
zip_d_desc.extra_bits = zip_extra_dbits; |
773
|
|
|
zip_d_desc.extra_base = 0; |
774
|
|
|
zip_d_desc.elems = zip_D_CODES; |
775
|
|
|
zip_d_desc.max_length = zip_MAX_BITS; |
776
|
|
|
zip_d_desc.max_code = 0; |
777
|
|
|
|
778
|
|
|
zip_bl_desc.dyn_tree = zip_bl_tree; |
779
|
|
|
zip_bl_desc.static_tree = null; |
780
|
|
|
zip_bl_desc.extra_bits = zip_extra_blbits; |
781
|
|
|
zip_bl_desc.extra_base = 0; |
782
|
|
|
zip_bl_desc.elems = zip_BL_CODES; |
783
|
|
|
zip_bl_desc.max_length = zip_MAX_BL_BITS; |
784
|
|
|
zip_bl_desc.max_code = 0; |
785
|
|
|
|
786
|
|
|
// Initialize the mapping length (0..255) -> length code (0..28) |
787
|
|
|
length = 0; |
788
|
|
|
for (code = 0; code < zip_LENGTH_CODES - 1; code++) { |
789
|
|
|
zip_base_length[code] = length; |
790
|
|
|
for (n = 0; n < (1 << zip_extra_lbits[code]); n++) |
791
|
|
|
zip_length_code[length++] = code; |
792
|
|
|
} |
793
|
|
|
/* Note that the length 255 (match length 258) can be represented |
794
|
|
|
* in two different ways: code 284 + 5 bits or code 285, so we |
795
|
|
|
* overwrite length_code[255] to use the best encoding: |
796
|
|
|
*/ |
797
|
|
|
zip_length_code[length - 1] = code; |
798
|
|
|
|
799
|
|
|
/* Initialize the mapping dist (0..32K) -> dist code (0..29) */ |
800
|
|
|
dist = 0; |
801
|
|
|
for (code = 0; code < 16; code++) { |
802
|
|
|
zip_base_dist[code] = dist; |
803
|
|
|
for (n = 0; n < (1 << zip_extra_dbits[code]); n++) { |
804
|
|
|
zip_dist_code[dist++] = code; |
805
|
|
|
} |
806
|
|
|
} |
807
|
|
|
dist >>= 7; // from now on, all distances are divided by 128 |
808
|
|
|
for (; code < zip_D_CODES; code++) { |
809
|
|
|
zip_base_dist[code] = dist << 7; |
810
|
|
|
for (n = 0; n < (1 << (zip_extra_dbits[code] - 7)); n++) |
811
|
|
|
zip_dist_code[256 + dist++] = code; |
812
|
|
|
} |
813
|
|
|
// Construct the codes of the static literal tree |
814
|
|
|
for (bits = 0; bits <= zip_MAX_BITS; bits++) |
815
|
|
|
zip_bl_count[bits] = 0; |
816
|
|
|
n = 0; |
817
|
|
|
while (n <= 143) { |
818
|
|
|
zip_static_ltree[n++].dl = 8; |
819
|
|
|
zip_bl_count[8]++; |
820
|
|
|
} |
821
|
|
|
while (n <= 255) { |
822
|
|
|
zip_static_ltree[n++].dl = 9; |
823
|
|
|
zip_bl_count[9]++; |
824
|
|
|
} |
825
|
|
|
while (n <= 279) { |
826
|
|
|
zip_static_ltree[n++].dl = 7; |
827
|
|
|
zip_bl_count[7]++; |
828
|
|
|
} |
829
|
|
|
while (n <= 287) { |
830
|
|
|
zip_static_ltree[n++].dl = 8; |
831
|
|
|
zip_bl_count[8]++; |
832
|
|
|
} |
833
|
|
|
/* Codes 286 and 287 do not exist, but we must include them in the |
834
|
|
|
* tree construction to get a canonical Huffman tree (longest code |
835
|
|
|
* all ones) |
836
|
|
|
*/ |
837
|
|
|
zip_gen_codes(zip_static_ltree, zip_L_CODES + 1); |
838
|
|
|
|
839
|
|
|
/* The static distance tree is trivial: */ |
840
|
|
|
for (n = 0; n < zip_D_CODES; n++) { |
841
|
|
|
zip_static_dtree[n].dl = 5; |
842
|
|
|
zip_static_dtree[n].fc = zip_bi_reverse(n, 5); |
843
|
|
|
} |
844
|
|
|
|
845
|
|
|
// Initialize the first block of the first file: |
846
|
|
|
zip_init_block(); |
847
|
|
|
}; |
848
|
|
|
|
849
|
|
|
/* ========================================================================== |
850
|
|
|
* Initialize a new block. |
851
|
|
|
*/ |
852
|
|
|
var zip_init_block = function () { |
853
|
|
|
var n; // iterates over tree elements |
854
|
|
|
|
855
|
|
|
// Initialize the trees. |
856
|
|
|
for (n = 0; n < zip_L_CODES; n++) zip_dyn_ltree[n].fc = 0; |
857
|
|
|
for (n = 0; n < zip_D_CODES; n++) zip_dyn_dtree[n].fc = 0; |
858
|
|
|
for (n = 0; n < zip_BL_CODES; n++) zip_bl_tree[n].fc = 0; |
859
|
|
|
|
860
|
|
|
zip_dyn_ltree[zip_END_BLOCK].fc = 1; |
861
|
|
|
zip_opt_len = zip_static_len = 0; |
862
|
|
|
zip_last_lit = zip_last_dist = zip_last_flags = 0; |
863
|
|
|
zip_flags = 0; |
864
|
|
|
zip_flag_bit = 1; |
865
|
|
|
}; |
866
|
|
|
|
867
|
|
|
/* ========================================================================== |
868
|
|
|
* Restore the heap property by moving down the tree starting at node k, |
869
|
|
|
* exchanging a node with the smallest of its two sons if necessary, stopping |
870
|
|
|
* when the heap property is re-established (each father smaller than its |
871
|
|
|
* two sons). |
872
|
|
|
*/ |
873
|
|
|
var zip_pqdownheap = function (tree, // the tree to restore |
874
|
|
|
k) { // node to move down |
875
|
|
|
var v = zip_heap[k]; |
876
|
|
|
var j = k << 1; // left son of k |
877
|
|
|
|
878
|
|
|
while (j <= zip_heap_len) { |
879
|
|
|
// Set j to the smallest of the two sons: |
880
|
|
|
if (j < zip_heap_len && |
881
|
|
|
zip_SMALLER(tree, zip_heap[j + 1], zip_heap[j])) |
882
|
|
|
j++; |
883
|
|
|
|
884
|
|
|
// Exit if v is smaller than both sons |
885
|
|
|
if (zip_SMALLER(tree, v, zip_heap[j])) |
886
|
|
|
break; |
887
|
|
|
|
888
|
|
|
// Exchange v with the smallest son |
889
|
|
|
zip_heap[k] = zip_heap[j]; |
890
|
|
|
k = j; |
891
|
|
|
|
892
|
|
|
// And continue down the tree, setting j to the left son of k |
893
|
|
|
j <<= 1; |
894
|
|
|
} |
895
|
|
|
zip_heap[k] = v; |
896
|
|
|
}; |
897
|
|
|
|
898
|
|
|
/* ========================================================================== |
899
|
|
|
* Compute the optimal bit lengths for a tree and update the total bit length |
900
|
|
|
* for the current block. |
901
|
|
|
* IN assertion: the fields freq and dad are set, heap[heap_max] and |
902
|
|
|
* above are the tree nodes sorted by increasing frequency. |
903
|
|
|
* OUT assertions: the field len is set to the optimal bit length, the |
904
|
|
|
* array bl_count contains the frequencies for each bit length. |
905
|
|
|
* The length opt_len is updated; static_len is also updated if stree is |
906
|
|
|
* not null. |
907
|
|
|
*/ |
908
|
|
|
var zip_gen_bitlen = function (desc) { // the tree descriptor |
909
|
|
|
var tree = desc.dyn_tree; |
910
|
|
|
var extra = desc.extra_bits; |
911
|
|
|
var base = desc.extra_base; |
912
|
|
|
var max_code = desc.max_code; |
913
|
|
|
var max_length = desc.max_length; |
914
|
|
|
var stree = desc.static_tree; |
915
|
|
|
var h; // heap index |
916
|
|
|
var n, m; // iterate over the tree elements |
917
|
|
|
var bits; // bit length |
918
|
|
|
var xbits; // extra bits |
919
|
|
|
var f; // frequency |
920
|
|
|
var overflow = 0; // number of elements with bit length too large |
921
|
|
|
|
922
|
|
|
for (bits = 0; bits <= zip_MAX_BITS; bits++) |
923
|
|
|
zip_bl_count[bits] = 0; |
924
|
|
|
|
925
|
|
|
/* In a first pass, compute the optimal bit lengths (which may |
926
|
|
|
* overflow in the case of the bit length tree). |
927
|
|
|
*/ |
928
|
|
|
tree[zip_heap[zip_heap_max]].dl = 0; // root of the heap |
929
|
|
|
|
930
|
|
|
for (h = zip_heap_max + 1; h < zip_HEAP_SIZE; h++) { |
931
|
|
|
n = zip_heap[h]; |
932
|
|
|
bits = tree[tree[n].dl].dl + 1; |
933
|
|
|
if (bits > max_length) { |
934
|
|
|
bits = max_length; |
935
|
|
|
overflow++; |
936
|
|
|
} |
937
|
|
|
tree[n].dl = bits; |
938
|
|
|
// We overwrite tree[n].dl which is no longer needed |
939
|
|
|
|
940
|
|
|
if (n > max_code) |
941
|
|
|
continue; // not a leaf node |
942
|
|
|
|
943
|
|
|
zip_bl_count[bits]++; |
944
|
|
|
xbits = 0; |
945
|
|
|
if (n >= base) |
946
|
|
|
xbits = extra[n - base]; |
947
|
|
|
f = tree[n].fc; |
948
|
|
|
zip_opt_len += f * (bits + xbits); |
|
|
|
|
949
|
|
|
if (stree != null) |
950
|
|
|
zip_static_len += f * (stree[n].dl + xbits); |
|
|
|
|
951
|
|
|
} |
952
|
|
|
if (overflow == 0) |
953
|
|
|
return; |
954
|
|
|
|
955
|
|
|
// This happens for example on obj2 and pic of the Calgary corpus |
956
|
|
|
|
957
|
|
|
// Find the first bit length which could increase: |
958
|
|
|
do { |
959
|
|
|
bits = max_length - 1; |
960
|
|
|
while (zip_bl_count[bits] == 0) |
961
|
|
|
bits--; |
962
|
|
|
zip_bl_count[bits]--; // move one leaf down the tree |
963
|
|
|
zip_bl_count[bits + 1] += 2; // move one overflow item as its brother |
964
|
|
|
zip_bl_count[max_length]--; |
965
|
|
|
/* The brother of the overflow item also moves one step up, |
966
|
|
|
* but this does not affect bl_count[max_length] |
967
|
|
|
*/ |
968
|
|
|
overflow -= 2; |
969
|
|
|
} while (overflow > 0); |
970
|
|
|
|
971
|
|
|
/* Now recompute all bit lengths, scanning in increasing frequency. |
972
|
|
|
* h is still equal to HEAP_SIZE. (It is simpler to reconstruct all |
973
|
|
|
* lengths instead of fixing only the wrong ones. This idea is taken |
974
|
|
|
* from 'ar' written by Haruhiko Okumura.) |
975
|
|
|
*/ |
976
|
|
|
for (bits = max_length; bits != 0; bits--) { |
977
|
|
|
n = zip_bl_count[bits]; |
978
|
|
|
while (n != 0) { |
979
|
|
|
m = zip_heap[--h]; |
980
|
|
|
if (m > max_code) |
981
|
|
|
continue; |
982
|
|
|
if (tree[m].dl != bits) { |
983
|
|
|
zip_opt_len += (bits - tree[m].dl) * tree[m].fc; |
984
|
|
|
tree[m].fc = bits; |
985
|
|
|
} |
986
|
|
|
n--; |
987
|
|
|
} |
988
|
|
|
} |
989
|
|
|
}; |
990
|
|
|
|
991
|
|
|
/* ========================================================================== |
992
|
|
|
* Generate the codes for a given tree and bit counts (which need not be |
993
|
|
|
* optimal). |
994
|
|
|
* IN assertion: the array bl_count contains the bit length statistics for |
995
|
|
|
* the given tree and the field len is set for all tree elements. |
996
|
|
|
* OUT assertion: the field code is set for all tree elements of non |
997
|
|
|
* zero code length. |
998
|
|
|
*/ |
999
|
|
|
var zip_gen_codes = function (tree, // the tree to decorate |
1000
|
|
|
max_code) { // largest code with non zero frequency |
1001
|
|
|
var next_code = new Array(zip_MAX_BITS + 1); // next code value for each bit length |
1002
|
|
|
var code = 0; // running code value |
1003
|
|
|
var bits; // bit index |
1004
|
|
|
var n; // code index |
1005
|
|
|
|
1006
|
|
|
/* The distribution counts are first used to generate the code values |
1007
|
|
|
* without bit reversal. |
1008
|
|
|
*/ |
1009
|
|
|
for (bits = 1; bits <= zip_MAX_BITS; bits++) { |
1010
|
|
|
code = ((code + zip_bl_count[bits - 1]) << 1); |
1011
|
|
|
next_code[bits] = code; |
1012
|
|
|
} |
1013
|
|
|
|
1014
|
|
|
/* Check that the bit counts in bl_count are consistent. The last code |
1015
|
|
|
* must be all ones. |
1016
|
|
|
*/ |
1017
|
|
|
for (n = 0; n <= max_code; n++) { |
1018
|
|
|
var len = tree[n].dl; |
1019
|
|
|
if (len == 0) |
1020
|
|
|
continue; |
1021
|
|
|
// Now reverse the bits |
1022
|
|
|
tree[n].fc = zip_bi_reverse(next_code[len]++, len); |
1023
|
|
|
} |
1024
|
|
|
}; |
1025
|
|
|
|
1026
|
|
|
/* ========================================================================== |
1027
|
|
|
* Construct one Huffman tree and assigns the code bit strings and lengths. |
1028
|
|
|
* Update the total bit length for the current block. |
1029
|
|
|
* IN assertion: the field freq is set for all tree elements. |
1030
|
|
|
* OUT assertions: the fields len and code are set to the optimal bit length |
1031
|
|
|
* and corresponding code. The length opt_len is updated; static_len is |
1032
|
|
|
* also updated if stree is not null. The field max_code is set. |
1033
|
|
|
*/ |
1034
|
|
|
var zip_build_tree = function (desc) { // the tree descriptor |
1035
|
|
|
var tree = desc.dyn_tree; |
1036
|
|
|
var stree = desc.static_tree; |
1037
|
|
|
var elems = desc.elems; |
1038
|
|
|
var n, m; // iterate over heap elements |
1039
|
|
|
var max_code = -1; // largest code with non zero frequency |
1040
|
|
|
var node = elems; // next internal node of the tree |
1041
|
|
|
|
1042
|
|
|
/* Construct the initial heap, with least frequent element in |
1043
|
|
|
* heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. |
1044
|
|
|
* heap[0] is not used. |
1045
|
|
|
*/ |
1046
|
|
|
zip_heap_len = 0; |
1047
|
|
|
zip_heap_max = zip_HEAP_SIZE; |
1048
|
|
|
|
1049
|
|
|
for (n = 0; n < elems; n++) { |
1050
|
|
|
if (tree[n].fc != 0) { |
1051
|
|
|
zip_heap[++zip_heap_len] = max_code = n; |
|
|
|
|
1052
|
|
|
zip_depth[n] = 0; |
1053
|
|
|
} else |
1054
|
|
|
tree[n].dl = 0; |
1055
|
|
|
} |
1056
|
|
|
|
1057
|
|
|
/* The pkzip format requires that at least one distance code exists, |
1058
|
|
|
* and that at least one bit should be sent even if there is only one |
1059
|
|
|
* possible code. So to avoid special checks later on we force at least |
1060
|
|
|
* two codes of non zero frequency. |
1061
|
|
|
*/ |
1062
|
|
|
while (zip_heap_len < 2) { |
1063
|
|
|
var xnew = zip_heap[++zip_heap_len] = (max_code < 2 ? ++max_code : 0); |
1064
|
|
|
tree[xnew].fc = 1; |
1065
|
|
|
zip_depth[xnew] = 0; |
1066
|
|
|
zip_opt_len--; |
|
|
|
|
1067
|
|
|
if (stree != null) |
1068
|
|
|
zip_static_len -= stree[xnew].dl; |
|
|
|
|
1069
|
|
|
// new is 0 or 1 so it does not have extra bits |
1070
|
|
|
} |
1071
|
|
|
desc.max_code = max_code; |
1072
|
|
|
|
1073
|
|
|
/* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, |
1074
|
|
|
* establish sub-heaps of increasing lengths: |
1075
|
|
|
*/ |
1076
|
|
|
for (n = zip_heap_len >> 1; n >= 1; n--) |
1077
|
|
|
zip_pqdownheap(tree, n); |
1078
|
|
|
|
1079
|
|
|
/* Construct the Huffman tree by repeatedly combining the least two |
1080
|
|
|
* frequent nodes. |
1081
|
|
|
*/ |
1082
|
|
|
do { |
1083
|
|
|
n = zip_heap[zip_SMALLEST]; |
1084
|
|
|
zip_heap[zip_SMALLEST] = zip_heap[zip_heap_len--]; |
1085
|
|
|
zip_pqdownheap(tree, zip_SMALLEST); |
1086
|
|
|
|
1087
|
|
|
m = zip_heap[zip_SMALLEST]; // m = node of next least frequency |
1088
|
|
|
|
1089
|
|
|
// keep the nodes sorted by frequency |
1090
|
|
|
zip_heap[--zip_heap_max] = n; |
|
|
|
|
1091
|
|
|
zip_heap[--zip_heap_max] = m; |
1092
|
|
|
|
1093
|
|
|
// Create a new node father of n and m |
1094
|
|
|
tree[node].fc = tree[n].fc + tree[m].fc; |
1095
|
|
|
if (zip_depth[n] > zip_depth[m] + 1) |
1096
|
|
|
zip_depth[node] = zip_depth[n]; |
1097
|
|
|
else |
1098
|
|
|
zip_depth[node] = zip_depth[m] + 1; |
1099
|
|
|
tree[n].dl = tree[m].dl = node; |
1100
|
|
|
|
1101
|
|
|
// and insert the new node in the heap |
1102
|
|
|
zip_heap[zip_SMALLEST] = node++; |
1103
|
|
|
zip_pqdownheap(tree, zip_SMALLEST); |
1104
|
|
|
|
1105
|
|
|
} while (zip_heap_len >= 2); |
1106
|
|
|
|
1107
|
|
|
zip_heap[--zip_heap_max] = zip_heap[zip_SMALLEST]; |
1108
|
|
|
|
1109
|
|
|
/* At this point, the fields freq and dad are set. We can now |
1110
|
|
|
* generate the bit lengths. |
1111
|
|
|
*/ |
1112
|
|
|
zip_gen_bitlen(desc); |
1113
|
|
|
|
1114
|
|
|
// The field len is now set, we can generate the bit codes |
1115
|
|
|
zip_gen_codes(tree, max_code); |
1116
|
|
|
}; |
1117
|
|
|
|
1118
|
|
|
/* ========================================================================== |
1119
|
|
|
* Scan a literal or distance tree to determine the frequencies of the codes |
1120
|
|
|
* in the bit length tree. Updates opt_len to take into account the repeat |
1121
|
|
|
* counts. (The contribution of the bit length codes will be added later |
1122
|
|
|
* during the construction of bl_tree.) |
1123
|
|
|
*/ |
1124
|
|
|
var zip_scan_tree = function (tree,// the tree to be scanned |
1125
|
|
|
max_code) { // and its largest code of non zero frequency |
1126
|
|
|
var n; // iterates over all tree elements |
1127
|
|
|
var prevlen = -1; // last emitted length |
1128
|
|
|
var curlen; // length of current code |
1129
|
|
|
var nextlen = tree[0].dl; // length of next code |
1130
|
|
|
var count = 0; // repeat count of the current code |
1131
|
|
|
var max_count = 7; // max repeat count |
1132
|
|
|
var min_count = 4; // min repeat count |
1133
|
|
|
|
1134
|
|
|
if (nextlen == 0) { |
1135
|
|
|
max_count = 138; |
1136
|
|
|
min_count = 3; |
1137
|
|
|
} |
1138
|
|
|
tree[max_code + 1].dl = 0xffff; // guard |
1139
|
|
|
|
1140
|
|
|
for (n = 0; n <= max_code; n++) { |
1141
|
|
|
curlen = nextlen; |
1142
|
|
|
nextlen = tree[n + 1].dl; |
1143
|
|
|
if (++count < max_count && curlen == nextlen) |
1144
|
|
|
continue; |
1145
|
|
|
else if (count < min_count) |
1146
|
|
|
zip_bl_tree[curlen].fc += count; |
1147
|
|
|
else if (curlen != 0) { |
1148
|
|
|
if (curlen != prevlen) |
1149
|
|
|
zip_bl_tree[curlen].fc++; |
1150
|
|
|
zip_bl_tree[zip_REP_3_6].fc++; |
1151
|
|
|
} else if (count <= 10) |
1152
|
|
|
zip_bl_tree[zip_REPZ_3_10].fc++; |
1153
|
|
|
else |
1154
|
|
|
zip_bl_tree[zip_REPZ_11_138].fc++; |
1155
|
|
|
count = 0; |
1156
|
|
|
prevlen = curlen; |
1157
|
|
|
if (nextlen == 0) { |
1158
|
|
|
max_count = 138; |
1159
|
|
|
min_count = 3; |
1160
|
|
|
} else if (curlen == nextlen) { |
1161
|
|
|
max_count = 6; |
1162
|
|
|
min_count = 3; |
1163
|
|
|
} else { |
1164
|
|
|
max_count = 7; |
1165
|
|
|
min_count = 4; |
1166
|
|
|
} |
1167
|
|
|
} |
1168
|
|
|
}; |
1169
|
|
|
|
1170
|
|
|
/* ========================================================================== |
1171
|
|
|
* Send a literal or distance tree in compressed form, using the codes in |
1172
|
|
|
* bl_tree. |
1173
|
|
|
*/ |
1174
|
|
|
var zip_send_tree = function (tree, // the tree to be scanned |
1175
|
|
|
max_code) { // and its largest code of non zero frequency |
1176
|
|
|
var n; // iterates over all tree elements |
1177
|
|
|
var prevlen = -1; // last emitted length |
1178
|
|
|
var curlen; // length of current code |
1179
|
|
|
var nextlen = tree[0].dl; // length of next code |
1180
|
|
|
var count = 0; // repeat count of the current code |
1181
|
|
|
var max_count = 7; // max repeat count |
1182
|
|
|
var min_count = 4; // min repeat count |
1183
|
|
|
|
1184
|
|
|
/* tree[max_code+1].dl = -1; */ |
1185
|
|
|
/* guard already set */ |
1186
|
|
|
if (nextlen == 0) { |
1187
|
|
|
max_count = 138; |
1188
|
|
|
min_count = 3; |
1189
|
|
|
} |
1190
|
|
|
|
1191
|
|
|
for (n = 0; n <= max_code; n++) { |
1192
|
|
|
curlen = nextlen; |
1193
|
|
|
nextlen = tree[n + 1].dl; |
1194
|
|
|
if (++count < max_count && curlen == nextlen) { |
1195
|
|
|
continue; |
1196
|
|
|
} else if (count < min_count) { |
1197
|
|
|
do { |
1198
|
|
|
zip_SEND_CODE(curlen, zip_bl_tree); |
1199
|
|
|
} while (--count != 0); |
1200
|
|
|
} else if (curlen != 0) { |
1201
|
|
|
if (curlen != prevlen) { |
1202
|
|
|
zip_SEND_CODE(curlen, zip_bl_tree); |
1203
|
|
|
count--; |
1204
|
|
|
} |
1205
|
|
|
// Assert(count >= 3 && count <= 6, " 3_6?"); |
1206
|
|
|
zip_SEND_CODE(zip_REP_3_6, zip_bl_tree); |
1207
|
|
|
zip_send_bits(count - 3, 2); |
1208
|
|
|
} else if (count <= 10) { |
1209
|
|
|
zip_SEND_CODE(zip_REPZ_3_10, zip_bl_tree); |
1210
|
|
|
zip_send_bits(count - 3, 3); |
1211
|
|
|
} else { |
1212
|
|
|
zip_SEND_CODE(zip_REPZ_11_138, zip_bl_tree); |
1213
|
|
|
zip_send_bits(count - 11, 7); |
1214
|
|
|
} |
1215
|
|
|
count = 0; |
1216
|
|
|
prevlen = curlen; |
1217
|
|
|
if (nextlen == 0) { |
1218
|
|
|
max_count = 138; |
1219
|
|
|
min_count = 3; |
1220
|
|
|
} else if (curlen == nextlen) { |
1221
|
|
|
max_count = 6; |
1222
|
|
|
min_count = 3; |
1223
|
|
|
} else { |
1224
|
|
|
max_count = 7; |
1225
|
|
|
min_count = 4; |
1226
|
|
|
} |
1227
|
|
|
} |
1228
|
|
|
}; |
1229
|
|
|
|
1230
|
|
|
/* ========================================================================== |
1231
|
|
|
* Construct the Huffman tree for the bit lengths and return the index in |
1232
|
|
|
* bl_order of the last bit length code to send. |
1233
|
|
|
*/ |
1234
|
|
|
var zip_build_bl_tree = function () { |
1235
|
|
|
var max_blindex; // index of last bit length code of non zero freq |
1236
|
|
|
|
1237
|
|
|
// Determine the bit length frequencies for literal and distance trees |
1238
|
|
|
zip_scan_tree(zip_dyn_ltree, zip_l_desc.max_code); |
1239
|
|
|
zip_scan_tree(zip_dyn_dtree, zip_d_desc.max_code); |
1240
|
|
|
|
1241
|
|
|
// Build the bit length tree: |
1242
|
|
|
zip_build_tree(zip_bl_desc); |
1243
|
|
|
/* opt_len now includes the length of the tree representations, except |
1244
|
|
|
* the lengths of the bit lengths codes and the 5+5+4 bits for the counts. |
1245
|
|
|
*/ |
1246
|
|
|
|
1247
|
|
|
/* Determine the number of bit length codes to send. The pkzip format |
1248
|
|
|
* requires that at least 4 bit length codes be sent. (appnote.txt says |
1249
|
|
|
* 3 but the actual value used is 4.) |
1250
|
|
|
*/ |
1251
|
|
|
for (max_blindex = zip_BL_CODES - 1; max_blindex >= 3; max_blindex--) { |
1252
|
|
|
if (zip_bl_tree[zip_bl_order[max_blindex]].dl != 0) break; |
1253
|
|
|
} |
1254
|
|
|
/* Update opt_len to include the bit length tree and counts */ |
1255
|
|
|
zip_opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; |
1256
|
|
|
return max_blindex; |
1257
|
|
|
}; |
1258
|
|
|
|
1259
|
|
|
/* ========================================================================== |
1260
|
|
|
* Send the header for a block using dynamic Huffman trees: the counts, the |
1261
|
|
|
* lengths of the bit length codes, the literal tree and the distance tree. |
1262
|
|
|
* IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. |
1263
|
|
|
*/ |
1264
|
|
|
var zip_send_all_trees = function (lcodes, dcodes, blcodes) { // number of codes for each tree |
1265
|
|
|
var rank; // index in bl_order |
1266
|
|
|
zip_send_bits(lcodes - 257, 5); // not +255 as stated in appnote.txt |
1267
|
|
|
zip_send_bits(dcodes - 1, 5); |
1268
|
|
|
zip_send_bits(blcodes - 4, 4); // not -3 as stated in appnote.txt |
1269
|
|
|
for (rank = 0; rank < blcodes; rank++) { |
1270
|
|
|
zip_send_bits(zip_bl_tree[zip_bl_order[rank]].dl, 3); |
1271
|
|
|
} |
1272
|
|
|
|
1273
|
|
|
// send the literal tree |
1274
|
|
|
zip_send_tree(zip_dyn_ltree, lcodes - 1); |
1275
|
|
|
|
1276
|
|
|
// send the distance tree |
1277
|
|
|
zip_send_tree(zip_dyn_dtree, dcodes - 1); |
1278
|
|
|
}; |
1279
|
|
|
|
1280
|
|
|
/* ========================================================================== |
1281
|
|
|
* Determine the best encoding for the current block: dynamic trees, static |
1282
|
|
|
* trees or store, and output the encoded block to the zip file. |
1283
|
|
|
*/ |
1284
|
|
|
var zip_flush_block = function (eof) { // true if this is the last block for a file |
1285
|
|
|
var opt_lenb, static_lenb; // opt_len and static_len in bytes |
1286
|
|
|
var max_blindex; // index of last bit length code of non zero freq |
1287
|
|
|
var stored_len; // length of input block |
1288
|
|
|
|
1289
|
|
|
stored_len = zip_strstart - zip_block_start; |
1290
|
|
|
zip_flag_buf[zip_last_flags] = zip_flags; // Save the flags for the last 8 items |
1291
|
|
|
|
1292
|
|
|
// Construct the literal and distance trees |
1293
|
|
|
zip_build_tree(zip_l_desc); |
1294
|
|
|
zip_build_tree(zip_d_desc); |
1295
|
|
|
/* At this point, opt_len and static_len are the total bit lengths of |
1296
|
|
|
* the compressed block data, excluding the tree representations. |
1297
|
|
|
*/ |
1298
|
|
|
|
1299
|
|
|
/* Build the bit length tree for the above two trees, and get the index |
1300
|
|
|
* in bl_order of the last bit length code to send. |
1301
|
|
|
*/ |
1302
|
|
|
max_blindex = zip_build_bl_tree(); |
1303
|
|
|
|
1304
|
|
|
// Determine the best encoding. Compute first the block length in bytes |
1305
|
|
|
opt_lenb = (zip_opt_len + 3 + 7) >> 3; |
1306
|
|
|
static_lenb = (zip_static_len + 3 + 7) >> 3; |
1307
|
|
|
if (static_lenb <= opt_lenb) |
1308
|
|
|
opt_lenb = static_lenb; |
1309
|
|
|
if (stored_len + 4 <= opt_lenb // 4: two words for the lengths |
1310
|
|
|
&& zip_block_start >= 0) { |
1311
|
|
|
var i; |
1312
|
|
|
|
1313
|
|
|
/* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. |
1314
|
|
|
* Otherwise we can't have processed more than WSIZE input bytes since |
1315
|
|
|
* the last block flush, because compression would have been |
1316
|
|
|
* successful. If LIT_BUFSIZE <= WSIZE, it is never too late to |
1317
|
|
|
* transform a block into a stored block. |
1318
|
|
|
*/ |
1319
|
|
|
zip_send_bits((zip_STORED_BLOCK << 1) + eof, 3); |
1320
|
|
|
/* send block type */ |
1321
|
|
|
zip_bi_windup(); |
1322
|
|
|
/* align on byte boundary */ |
1323
|
|
|
zip_put_short(stored_len); |
1324
|
|
|
zip_put_short(~stored_len); |
1325
|
|
|
|
1326
|
|
|
// copy block |
1327
|
|
|
for (i = 0; i < stored_len; i++) |
1328
|
|
|
zip_put_byte(zip_window[zip_block_start + i]); |
1329
|
|
|
|
1330
|
|
|
} else if (static_lenb == opt_lenb) { |
1331
|
|
|
zip_send_bits((zip_STATIC_TREES << 1) + eof, 3); |
1332
|
|
|
zip_compress_block(zip_static_ltree, zip_static_dtree); |
1333
|
|
|
} else { |
1334
|
|
|
zip_send_bits((zip_DYN_TREES << 1) + eof, 3); |
1335
|
|
|
zip_send_all_trees(zip_l_desc.max_code + 1, |
1336
|
|
|
zip_d_desc.max_code + 1, |
1337
|
|
|
max_blindex + 1); |
1338
|
|
|
zip_compress_block(zip_dyn_ltree, zip_dyn_dtree); |
1339
|
|
|
} |
1340
|
|
|
|
1341
|
|
|
zip_init_block(); |
1342
|
|
|
|
1343
|
|
|
if (eof != 0) |
1344
|
|
|
zip_bi_windup(); |
1345
|
|
|
}; |
1346
|
|
|
|
1347
|
|
|
/* ========================================================================== |
1348
|
|
|
* Save the match info and tally the frequency counts. Return true if |
1349
|
|
|
* the current block must be flushed. |
1350
|
|
|
*/ |
1351
|
|
|
var zip_ct_tally = function (dist, // distance of matched string |
1352
|
|
|
lc) { // match length-MIN_MATCH or unmatched char (if dist==0) |
1353
|
|
|
zip_l_buf[zip_last_lit++] = lc; |
1354
|
|
|
if (dist == 0) { |
1355
|
|
|
// lc is the unmatched char |
1356
|
|
|
zip_dyn_ltree[lc].fc++; |
1357
|
|
|
} else { |
1358
|
|
|
// Here, lc is the match length - MIN_MATCH |
1359
|
|
|
dist--; // dist = match distance - 1 |
1360
|
|
|
zip_dyn_ltree[zip_length_code[lc] + zip_LITERALS + 1].fc++; |
1361
|
|
|
zip_dyn_dtree[zip_D_CODE(dist)].fc++; |
1362
|
|
|
|
1363
|
|
|
zip_d_buf[zip_last_dist++] = dist; |
1364
|
|
|
zip_flags |= zip_flag_bit; |
1365
|
|
|
} |
1366
|
|
|
zip_flag_bit <<= 1; |
1367
|
|
|
|
1368
|
|
|
// Output the flags if they fill a byte |
1369
|
|
|
if ((zip_last_lit & 7) == 0) { |
1370
|
|
|
zip_flag_buf[zip_last_flags++] = zip_flags; |
|
|
|
|
1371
|
|
|
zip_flags = 0; |
1372
|
|
|
zip_flag_bit = 1; |
1373
|
|
|
} |
1374
|
|
|
// Try to guess if it is profitable to stop the current block here |
1375
|
|
|
if (zip_compr_level > 2 && (zip_last_lit & 0xfff) == 0) { |
1376
|
|
|
// Compute an upper bound for the compressed length |
1377
|
|
|
var out_length = zip_last_lit * 8; |
1378
|
|
|
var in_length = zip_strstart - zip_block_start; |
1379
|
|
|
var dcode; |
1380
|
|
|
|
1381
|
|
|
for (dcode = 0; dcode < zip_D_CODES; dcode++) { |
1382
|
|
|
out_length += zip_dyn_dtree[dcode].fc * (5 + zip_extra_dbits[dcode]); |
1383
|
|
|
} |
1384
|
|
|
out_length >>= 3; |
1385
|
|
|
if (zip_last_dist < parseInt(zip_last_lit / 2) && |
|
|
|
|
1386
|
|
|
out_length < parseInt(in_length / 2)) |
1387
|
|
|
return true; |
1388
|
|
|
} |
1389
|
|
|
return (zip_last_lit == LIT_BUFSIZE - 1 || |
1390
|
|
|
zip_last_dist == zip_DIST_BUFSIZE); |
1391
|
|
|
/* We avoid equality with LIT_BUFSIZE because of wraparound at 64K |
1392
|
|
|
* on 16 bit machines and because stored blocks are restricted to |
1393
|
|
|
* 64K-1 bytes. |
1394
|
|
|
*/ |
1395
|
|
|
}; |
1396
|
|
|
|
1397
|
|
|
/* ========================================================================== |
1398
|
|
|
* Send the block data compressed using the given Huffman trees |
1399
|
|
|
*/ |
1400
|
|
|
var zip_compress_block = function (ltree, // literal tree |
1401
|
|
|
dtree) { // distance tree |
1402
|
|
|
var dist; // distance of matched string |
1403
|
|
|
var lc; // match length or unmatched char (if dist == 0) |
1404
|
|
|
var lx = 0; // running index in l_buf |
1405
|
|
|
var dx = 0; // running index in d_buf |
1406
|
|
|
var fx = 0; // running index in flag_buf |
1407
|
|
|
var flag = 0; // current flags |
1408
|
|
|
var code; // the code to send |
1409
|
|
|
var extra; // number of extra bits to send |
1410
|
|
|
|
1411
|
|
|
if (zip_last_lit != 0) do { |
1412
|
|
|
if ((lx & 7) == 0) |
1413
|
|
|
flag = zip_flag_buf[fx++]; |
1414
|
|
|
lc = zip_l_buf[lx++] & 0xff; |
1415
|
|
|
if ((flag & 1) == 0) { |
1416
|
|
|
zip_SEND_CODE(lc, ltree); |
1417
|
|
|
/* send a literal byte */ |
1418
|
|
|
} else { |
1419
|
|
|
// Here, lc is the match length - MIN_MATCH |
1420
|
|
|
code = zip_length_code[lc]; |
1421
|
|
|
zip_SEND_CODE(code + zip_LITERALS + 1, ltree); // send the length code |
1422
|
|
|
extra = zip_extra_lbits[code]; |
1423
|
|
|
if (extra != 0) { |
1424
|
|
|
lc -= zip_base_length[code]; |
1425
|
|
|
zip_send_bits(lc, extra); // send the extra length bits |
1426
|
|
|
} |
1427
|
|
|
dist = zip_d_buf[dx++]; |
1428
|
|
|
// Here, dist is the match distance - 1 |
1429
|
|
|
code = zip_D_CODE(dist); |
1430
|
|
|
zip_SEND_CODE(code, dtree); // send the distance code |
1431
|
|
|
extra = zip_extra_dbits[code]; |
1432
|
|
|
if (extra != 0) { |
1433
|
|
|
dist -= zip_base_dist[code]; |
1434
|
|
|
zip_send_bits(dist, extra); // send the extra distance bits |
1435
|
|
|
} |
1436
|
|
|
} // literal or match pair ? |
1437
|
|
|
flag >>= 1; |
1438
|
|
|
} while (lx < zip_last_lit); |
1439
|
|
|
|
1440
|
|
|
zip_SEND_CODE(zip_END_BLOCK, ltree); |
1441
|
|
|
}; |
1442
|
|
|
|
1443
|
|
|
/* ========================================================================== |
1444
|
|
|
* Send a value on a given number of bits. |
1445
|
|
|
* IN assertion: length <= 16 and value fits in length bits. |
1446
|
|
|
*/ |
1447
|
|
|
var zip_Buf_size = 16; // bit size of bi_buf |
1448
|
|
|
var zip_send_bits = function (value, // value to send |
1449
|
|
|
length) { // number of bits |
1450
|
|
|
/* If not enough room in bi_buf, use (valid) bits from bi_buf and |
1451
|
|
|
* (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) |
1452
|
|
|
* unused bits in value. |
1453
|
|
|
*/ |
1454
|
|
|
if (zip_bi_valid > zip_Buf_size - length) { |
1455
|
|
|
zip_bi_buf |= (value << zip_bi_valid); |
1456
|
|
|
zip_put_short(zip_bi_buf); |
1457
|
|
|
zip_bi_buf = (value >> (zip_Buf_size - zip_bi_valid)); |
1458
|
|
|
zip_bi_valid += length - zip_Buf_size; |
1459
|
|
|
} else { |
1460
|
|
|
zip_bi_buf |= value << zip_bi_valid; |
1461
|
|
|
zip_bi_valid += length; |
1462
|
|
|
} |
1463
|
|
|
}; |
1464
|
|
|
|
1465
|
|
|
/* ========================================================================== |
1466
|
|
|
* Reverse the first len bits of a code, using straightforward code (a faster |
1467
|
|
|
* method would use a table) |
1468
|
|
|
* IN assertion: 1 <= len <= 15 |
1469
|
|
|
*/ |
1470
|
|
|
var zip_bi_reverse = function (code, // the value to invert |
1471
|
|
|
len) { // its bit length |
1472
|
|
|
var res = 0; |
1473
|
|
|
do { |
1474
|
|
|
res |= code & 1; |
1475
|
|
|
code >>= 1; |
1476
|
|
|
res <<= 1; |
1477
|
|
|
} while (--len > 0); |
1478
|
|
|
return res >> 1; |
1479
|
|
|
}; |
1480
|
|
|
|
1481
|
|
|
/* ========================================================================== |
1482
|
|
|
* Write out any remaining bits in an incomplete byte. |
1483
|
|
|
*/ |
1484
|
|
|
var zip_bi_windup = function () { |
1485
|
|
|
if (zip_bi_valid > 8) { |
1486
|
|
|
zip_put_short(zip_bi_buf); |
1487
|
|
|
} else if (zip_bi_valid > 0) { |
1488
|
|
|
zip_put_byte(zip_bi_buf); |
1489
|
|
|
} |
1490
|
|
|
zip_bi_buf = 0; |
1491
|
|
|
zip_bi_valid = 0; |
1492
|
|
|
}; |
1493
|
|
|
|
1494
|
|
|
var zip_qoutbuf = function () { |
1495
|
|
|
if (zip_outcnt != 0) { |
1496
|
|
|
var q, i; |
1497
|
|
|
q = zip_new_queue(); |
1498
|
|
|
if (zip_qhead == null) |
1499
|
|
|
zip_qhead = zip_qtail = q; |
1500
|
|
|
else |
1501
|
|
|
zip_qtail = zip_qtail.next = q; |
1502
|
|
|
q.len = zip_outcnt - zip_outoff; |
1503
|
|
|
for (i = 0; i < q.len; i++) |
1504
|
|
|
q.ptr[i] = zip_outbuf[zip_outoff + i]; |
1505
|
|
|
zip_outcnt = zip_outoff = 0; |
1506
|
|
|
} |
1507
|
|
|
}; |
1508
|
|
|
|
1509
|
|
|
function deflate(buffData, level) { |
1510
|
|
|
zip_deflate_data = buffData; |
1511
|
|
|
zip_deflate_pos = 0; |
1512
|
|
|
zip_deflate_start(level); |
1513
|
|
|
|
1514
|
|
|
var buff = new Array(1024), |
1515
|
|
|
pages = [], |
1516
|
|
|
totalSize = 0, |
1517
|
|
|
i; |
1518
|
|
|
|
1519
|
|
|
for (i = 0; i < 1024; i++) buff[i] = 0; |
1520
|
|
|
while ((i = zip_deflate_internal(buff, 0, buff.length)) > 0) { |
1521
|
|
|
var buf = new Buffer(buff.slice(0, i)); |
1522
|
|
|
pages.push(buf); |
1523
|
|
|
totalSize += buf.length; |
1524
|
|
|
} |
1525
|
|
|
|
1526
|
|
|
if (pages.length == 1) { |
1527
|
|
|
return pages[0]; |
1528
|
|
|
} |
1529
|
|
|
|
1530
|
|
|
var result = new Buffer(totalSize), |
1531
|
|
|
index = 0; |
1532
|
|
|
|
1533
|
|
|
for (i = 0; i < pages.length; i++) { |
1534
|
|
|
pages[i].copy(result, index); |
1535
|
|
|
index = index + pages[i].length |
1536
|
|
|
} |
1537
|
|
|
|
1538
|
|
|
return result; |
1539
|
|
|
} |
1540
|
|
|
|
1541
|
|
|
return { |
1542
|
|
|
deflate: function () { |
1543
|
|
|
return deflate(inbuf, 8); |
1544
|
|
|
} |
1545
|
|
|
} |
1546
|
|
|
} |
1547
|
|
|
|
1548
|
|
|
module.exports = function (/*Buffer*/inbuf) { |
1549
|
|
|
|
1550
|
|
|
var zlib = require("zlib"); |
1551
|
|
|
|
1552
|
|
|
return { |
1553
|
|
|
deflate: function () { |
1554
|
|
|
return new JSDeflater(inbuf).deflate(); |
1555
|
|
|
}, |
1556
|
|
|
|
1557
|
|
|
deflateAsync: function (/*Function*/callback) { |
1558
|
|
|
var tmp = zlib.createDeflateRaw({chunkSize:(parseInt(inbuf.length / 1024) + 1)*1024}), |
1559
|
|
|
parts = [], total = 0; |
1560
|
|
|
tmp.on('data', function(data) { |
1561
|
|
|
parts.push(data); |
1562
|
|
|
total += data.length; |
1563
|
|
|
}); |
1564
|
|
|
tmp.on('end', function() { |
1565
|
|
|
var buf = new Buffer(total), written = 0; |
1566
|
|
|
buf.fill(0); |
1567
|
|
|
|
1568
|
|
|
for (var i = 0; i < parts.length; i++) { |
1569
|
|
|
var part = parts[i]; |
1570
|
|
|
part.copy(buf, written); |
1571
|
|
|
written += part.length; |
1572
|
|
|
} |
1573
|
|
|
callback && callback(buf); |
1574
|
|
|
}); |
1575
|
|
|
tmp.end(inbuf); |
1576
|
|
|
} |
1577
|
|
|
} |
1578
|
|
|
}; |
1579
|
|
|
|