1#![allow(dead_code)]
2use super::backward_references::kHashMul32;
3use super::brotli_bit_stream::{BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree};
6use super::compress_fragment_two_pass::{memcpy, BrotliStoreMetaBlockHeader, BrotliWriteBits};
13use super::entropy_encode::{
14 BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree,
15};
16use super::static_dict::{
17 FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
18};
19use super::util::{FastLog2, Log2FloorNonZero};
20use core::cmp::min;
21
22static kCmdHistoSeed: [u32; 128] = [
25 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
26 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
27 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
28 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0,
29];
30
31fn Hash(p: &[u8], shift: usize) -> u32 {
32 let h: u64 = (BROTLI_UNALIGNED_LOAD64(p) << 24).wrapping_mul(kHashMul32 as (u64));
33 (h >> shift) as u32
34}
35
36fn IsMatch(p1: &[u8], p2: &[u8]) -> bool {
37 BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2) && (p1[4] as i32 == p2[4] as i32)
38}
39
40fn BuildAndStoreLiteralPrefixCode<AllocHT: alloc::Allocator<HuffmanTree>>(
41 mht: &mut AllocHT,
42 input: &[u8],
43 input_size: usize,
44 depths: &mut [u8],
45 bits: &mut [u16],
46 storage_ix: &mut usize,
47 storage: &mut [u8],
48) -> usize {
49 let mut histogram: [u32; 256] = [0; 256];
50 let mut histogram_total: usize;
51 let mut i: usize;
52 if input_size < (1i32 << 15) as usize {
53 for i in 0usize..input_size {
54 let _rhs = 1;
55 let _lhs = &mut histogram[input[i] as usize];
56 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
57 }
58 histogram_total = input_size;
59 i = 0usize;
60 while i < 256usize {
61 {
62 let adjust: u32 = (2u32).wrapping_mul(min(histogram[i], 11u32));
63 {
64 let _rhs = adjust;
65 let _lhs = &mut histogram[i];
66 *_lhs = (*_lhs).wrapping_add(_rhs);
67 }
68 histogram_total = histogram_total.wrapping_add(adjust as usize);
69 }
70 i = i.wrapping_add(1);
71 }
72 } else {
73 static kSampleRate: usize = 29usize;
74 i = 0usize;
75 while i < input_size {
76 {
77 let _rhs = 1;
78 let _lhs = &mut histogram[input[i] as usize];
79 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
80 }
81 i = i.wrapping_add(kSampleRate);
82 }
83 histogram_total = input_size
84 .wrapping_add(kSampleRate)
85 .wrapping_sub(1)
86 .wrapping_div(kSampleRate);
87 i = 0usize;
88 while i < 256usize {
89 {
90 let adjust: u32 =
91 (1u32).wrapping_add((2u32).wrapping_mul(min(histogram[i], 11u32)));
92 {
93 let _rhs = adjust;
94 let _lhs = &mut histogram[i];
95 *_lhs = (*_lhs).wrapping_add(_rhs);
96 }
97 histogram_total = histogram_total.wrapping_add(adjust as usize);
98 }
99 i = i.wrapping_add(1);
100 }
101 }
102 BrotliBuildAndStoreHuffmanTreeFast(
103 mht,
104 &mut histogram[..],
105 histogram_total,
106 8usize,
107 depths,
108 bits,
109 storage_ix,
110 storage,
111 );
112 {
113 let mut literal_ratio: usize = 0usize;
114 for i in 0usize..256usize {
115 if histogram[i] != 0 {
116 literal_ratio = literal_ratio
117 .wrapping_add(histogram[i].wrapping_mul(depths[i] as u32) as usize);
118 }
119 }
120 literal_ratio
121 .wrapping_mul(125)
122 .wrapping_div(histogram_total)
123 }
124}
125#[derive(PartialEq, Eq, Copy, Clone)]
126pub enum CodeBlockState {
127 EMIT_REMAINDER,
128 EMIT_COMMANDS,
129 NEXT_BLOCK,
130}
131
132fn EmitInsertLen(
133 insertlen: usize,
134 depth: &[u8],
135 bits: &[u16],
136 histo: &mut [u32],
137 storage_ix: &mut usize,
138 storage: &mut [u8],
139) {
140 if insertlen < 6usize {
141 let code: usize = insertlen.wrapping_add(40);
142 BrotliWriteBits(
143 depth[code] as usize,
144 bits[code] as (u64),
145 storage_ix,
146 storage,
147 );
148 {
149 let _rhs = 1;
150 let _lhs = &mut histo[code];
151 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
152 }
153 } else if insertlen < 130usize {
154 let tail: usize = insertlen.wrapping_sub(2);
155 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
156 let prefix: usize = tail >> nbits;
157 let inscode: usize = ((nbits << 1) as usize)
158 .wrapping_add(prefix)
159 .wrapping_add(42);
160 BrotliWriteBits(
161 depth[(inscode as usize)] as usize,
162 bits[(inscode as usize)] as (u64),
163 storage_ix,
164 storage,
165 );
166 BrotliWriteBits(
167 nbits as usize,
168 (tail as u64).wrapping_sub((prefix as u64) << nbits),
169 storage_ix,
170 storage,
171 );
172 {
173 let _rhs = 1;
174 let _lhs = &mut histo[(inscode as usize)];
175 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
176 }
177 } else if insertlen < 2114usize {
178 let tail: usize = insertlen.wrapping_sub(66);
179 let nbits: u32 = Log2FloorNonZero(tail as u64);
180 let code: usize = nbits.wrapping_add(50) as usize;
181 BrotliWriteBits(
182 depth[(code as usize)] as usize,
183 bits[(code as usize)] as (u64),
184 storage_ix,
185 storage,
186 );
187 BrotliWriteBits(
188 nbits as usize,
189 (tail as u64).wrapping_sub(1 << nbits),
190 storage_ix,
191 storage,
192 );
193 {
194 let _rhs = 1;
195 let _lhs = &mut histo[(code as usize)];
196 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
197 }
198 } else {
199 BrotliWriteBits(depth[61] as usize, bits[61] as (u64), storage_ix, storage);
200 BrotliWriteBits(
201 12usize,
202 (insertlen as u64).wrapping_sub(2114),
203 storage_ix,
204 storage,
205 );
206 {
207 let _rhs = 1;
208 let _lhs = &mut histo[61];
209 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
210 }
211 }
212}
213
214fn ShouldUseUncompressedMode(delta: isize, insertlen: usize, literal_ratio: usize) -> bool {
215 let compressed = delta as usize;
216 if compressed.wrapping_mul(50) > insertlen {
217 false
218 } else if literal_ratio > 980 {
219 true
220 } else {
221 false
222 }
223}
224fn RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8]) {
225 let bitpos: usize = new_storage_ix & 7usize;
226 let mask: usize = (1u32 << bitpos).wrapping_sub(1) as usize;
227 {
228 let _rhs = mask as u8;
229 let _lhs = &mut storage[(new_storage_ix >> 3)];
230 *_lhs = (*_lhs as i32 & _rhs as i32) as u8;
231 }
232 *storage_ix = new_storage_ix;
233}
234
235fn EmitUncompressedMetaBlock(
236 begin: &[u8],
237 len: usize,
238 storage_ix_start: usize,
239 storage_ix: &mut usize,
240 storage: &mut [u8],
241) {
242 RewindBitPosition(storage_ix_start, storage_ix, storage);
243 BrotliStoreMetaBlockHeader(len, 1i32, storage_ix, storage);
244 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
245 memcpy(storage, (*storage_ix >> 3), begin, 0, len);
246 *storage_ix = storage_ix.wrapping_add(len << 3);
247 storage[(*storage_ix >> 3)] = 0u8;
248}
249
250fn EmitLongInsertLen(
251 insertlen: usize,
252 depth: &[u8],
253 bits: &[u16],
254 histo: &mut [u32],
255 storage_ix: &mut usize,
256 storage: &mut [u8],
257) {
258 if insertlen < 22594usize {
259 BrotliWriteBits(depth[62] as usize, bits[62] as (u64), storage_ix, storage);
260 BrotliWriteBits(
261 14usize,
262 (insertlen as u64).wrapping_sub(6210),
263 storage_ix,
264 storage,
265 );
266 {
267 let _rhs = 1;
268 let _lhs = &mut histo[62];
269 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
270 }
271 } else {
272 BrotliWriteBits(depth[63] as usize, bits[63] as (u64), storage_ix, storage);
273 BrotliWriteBits(
274 24usize,
275 (insertlen as u64).wrapping_sub(22594),
276 storage_ix,
277 storage,
278 );
279 {
280 let _rhs = 1;
281 let _lhs = &mut histo[63];
282 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
283 }
284 }
285}
286
287fn EmitLiterals(
288 input: &[u8],
289 len: usize,
290 depth: &[u8],
291 bits: &[u16],
292 storage_ix: &mut usize,
293 storage: &mut [u8],
294) {
295 for j in 0usize..len {
296 let lit: u8 = input[j];
297 BrotliWriteBits(
298 depth[(lit as usize)] as usize,
299 bits[(lit as usize)] as (u64),
300 storage_ix,
301 storage,
302 );
303 }
304}
305
306fn EmitDistance(
307 distance: usize,
308 depth: &[u8],
309 bits: &[u16],
310 histo: &mut [u32],
311 storage_ix: &mut usize,
312 storage: &mut [u8],
313) {
314 let d: u64 = distance.wrapping_add(3) as u64;
315 let nbits: u32 = Log2FloorNonZero(d).wrapping_sub(1);
316 let prefix: u64 = d >> nbits & 1;
317 let offset: u64 = (2u64).wrapping_add(prefix) << nbits;
318 let distcode: u64 = ((2u32).wrapping_mul(nbits.wrapping_sub(1)) as (u64))
319 .wrapping_add(prefix)
320 .wrapping_add(80);
321 BrotliWriteBits(
322 depth[(distcode as usize)] as usize,
323 bits[(distcode as usize)] as (u64),
324 storage_ix,
325 storage,
326 );
327 BrotliWriteBits(nbits as usize, d.wrapping_sub(offset), storage_ix, storage);
328 {
329 let _rhs = 1;
330 let _lhs = &mut histo[(distcode as usize)];
331 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
332 }
333}
334
335fn EmitCopyLenLastDistance(
336 copylen: usize,
337 depth: &[u8],
338 bits: &[u16],
339 histo: &mut [u32],
340 storage_ix: &mut usize,
341 storage: &mut [u8],
342) {
343 if copylen < 12usize {
344 BrotliWriteBits(
345 depth[copylen.wrapping_sub(4)] as usize,
346 bits[copylen.wrapping_sub(4)] as (u64),
347 storage_ix,
348 storage,
349 );
350 {
351 let _rhs = 1;
352 let _lhs = &mut histo[copylen.wrapping_sub(4)];
353 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
354 }
355 } else if copylen < 72usize {
356 let tail: usize = copylen.wrapping_sub(8);
357 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
358 let prefix: usize = tail >> nbits;
359 let code: usize = ((nbits << 1) as usize).wrapping_add(prefix).wrapping_add(4);
360 BrotliWriteBits(
361 depth[(code as usize)] as usize,
362 bits[(code as usize)] as (u64),
363 storage_ix,
364 storage,
365 );
366 BrotliWriteBits(
367 nbits as usize,
368 tail.wrapping_sub(prefix << nbits) as u64,
369 storage_ix,
370 storage,
371 );
372 {
373 let _rhs = 1;
374 let _lhs = &mut histo[(code as usize)];
375 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
376 }
377 } else if copylen < 136usize {
378 let tail: usize = copylen.wrapping_sub(8);
379 let code: usize = (tail >> 5).wrapping_add(30);
380 BrotliWriteBits(
381 depth[code] as usize,
382 bits[code] as (u64),
383 storage_ix,
384 storage,
385 );
386 BrotliWriteBits(5usize, tail as u64 & 31, storage_ix, storage);
387 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
388 {
389 let _rhs = 1;
390 let _lhs = &mut histo[code];
391 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
392 }
393 {
394 let _rhs = 1;
395 let _lhs = &mut histo[64];
396 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
397 }
398 } else if copylen < 2120usize {
399 let tail: usize = copylen.wrapping_sub(72);
400 let nbits: u32 = Log2FloorNonZero(tail as u64);
401 let code: usize = nbits.wrapping_add(28) as usize;
402 BrotliWriteBits(
403 depth[(code as usize)] as usize,
404 bits[(code as usize)] as (u64),
405 storage_ix,
406 storage,
407 );
408 BrotliWriteBits(
409 nbits as usize,
410 (tail as u64).wrapping_sub(1u64 << nbits),
411 storage_ix,
412 storage,
413 );
414 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
415 {
416 let _rhs = 1;
417 let _lhs = &mut histo[(code as usize)];
418 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
419 }
420 {
421 let _rhs = 1;
422 let _lhs = &mut histo[64];
423 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
424 }
425 } else {
426 BrotliWriteBits(depth[39] as usize, bits[39] as (u64), storage_ix, storage);
427 BrotliWriteBits(
428 24usize,
429 copylen.wrapping_sub(2120) as u64,
430 storage_ix,
431 storage,
432 );
433 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
434 {
435 let _rhs = 1;
436 let _lhs = &mut histo[39];
437 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
438 }
439 {
440 let _rhs = 1;
441 let _lhs = &mut histo[64];
442 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
443 }
444 }
445}
446
447fn HashBytesAtOffset(v: u64, offset: i32, shift: usize) -> u32 {
448 let h: u64 = (v >> (8i32 * offset) << 24).wrapping_mul(kHashMul32 as (u64));
449 (h >> shift) as u32
450}
451
452fn EmitCopyLen(
453 copylen: usize,
454 depth: &[u8],
455 bits: &[u16],
456 histo: &mut [u32],
457 storage_ix: &mut usize,
458 storage: &mut [u8],
459) {
460 if copylen < 10usize {
461 BrotliWriteBits(
462 depth[copylen.wrapping_add(14)] as usize,
463 bits[copylen.wrapping_add(14)] as (u64),
464 storage_ix,
465 storage,
466 );
467 {
468 let _rhs = 1;
469 let _lhs = &mut histo[copylen.wrapping_add(14)];
470 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
471 }
472 } else if copylen < 134usize {
473 let tail: usize = copylen.wrapping_sub(6);
474 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
475 let prefix: usize = tail >> nbits;
476 let code: usize = ((nbits << 1) as usize)
477 .wrapping_add(prefix)
478 .wrapping_add(20);
479 BrotliWriteBits(
480 depth[(code as usize)] as usize,
481 bits[(code as usize)] as (u64),
482 storage_ix,
483 storage,
484 );
485 BrotliWriteBits(
486 nbits as usize,
487 (tail as u64).wrapping_sub((prefix as u64) << nbits),
488 storage_ix,
489 storage,
490 );
491 {
492 let _rhs = 1;
493 let _lhs = &mut histo[(code as usize)];
494 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
495 }
496 } else if copylen < 2118usize {
497 let tail: usize = copylen.wrapping_sub(70);
498 let nbits: u32 = Log2FloorNonZero(tail as u64);
499 let code: usize = nbits.wrapping_add(28) as usize;
500 BrotliWriteBits(
501 depth[(code as usize)] as usize,
502 bits[(code as usize)] as (u64),
503 storage_ix,
504 storage,
505 );
506 BrotliWriteBits(
507 nbits as usize,
508 (tail as u64).wrapping_sub(1 << nbits),
509 storage_ix,
510 storage,
511 );
512 {
513 let _rhs = 1;
514 let _lhs = &mut histo[(code as usize)];
515 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
516 }
517 } else {
518 BrotliWriteBits(depth[39] as usize, bits[39] as (u64), storage_ix, storage);
519 BrotliWriteBits(
520 24usize,
521 (copylen as u64).wrapping_sub(2118),
522 storage_ix,
523 storage,
524 );
525 {
526 let _rhs = 1;
527 let _lhs = &mut histo[39];
528 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
529 }
530 }
531}
532
533fn ShouldMergeBlock(data: &[u8], len: usize, depths: &[u8]) -> bool {
534 let mut histo: [usize; 256] = [0; 256];
535 static kSampleRate: usize = 43usize;
536 let mut i: usize;
537 i = 0usize;
538 while i < len {
539 {
540 let _rhs = 1;
541 let _lhs = &mut histo[data[i] as usize];
542 *_lhs = (*_lhs).wrapping_add(_rhs as usize);
543 }
544 i = i.wrapping_add(kSampleRate);
545 }
546 {
547 let total: usize = len
548 .wrapping_add(kSampleRate)
549 .wrapping_sub(1)
550 .wrapping_div(kSampleRate);
551 let mut r: super::util::floatX = (FastLog2(total as u64) + 0.5 as super::util::floatX)
552 * total as (super::util::floatX)
553 + 200i32 as (super::util::floatX);
554 for i in 0usize..256usize {
555 r -= histo[i] as (super::util::floatX)
556 * (depths[i] as (super::util::floatX) + FastLog2(histo[i] as u64));
557 }
558 r >= 0.0
559 }
560}
561
562fn UpdateBits(mut n_bits: usize, mut bits: u32, mut pos: usize, array: &mut [u8]) {
563 while n_bits > 0usize {
564 let byte_pos: usize = pos >> 3;
565 let n_unchanged_bits: usize = pos & 7usize;
566 let n_changed_bits: usize = min(n_bits, (8usize).wrapping_sub(n_unchanged_bits));
567 let total_bits: usize = n_unchanged_bits.wrapping_add(n_changed_bits);
568 let mask: u32 =
569 !(1u32 << total_bits).wrapping_sub(1) | (1u32 << n_unchanged_bits).wrapping_sub(1);
570 let unchanged_bits: u32 = array[byte_pos] as u32 & mask;
571 let changed_bits: u32 = bits & (1u32 << n_changed_bits).wrapping_sub(1);
572 array[byte_pos] = (changed_bits << n_unchanged_bits | unchanged_bits) as u8;
573 n_bits = n_bits.wrapping_sub(n_changed_bits);
574 bits >>= n_changed_bits;
575 pos = pos.wrapping_add(n_changed_bits);
576 }
577}
578
579fn BuildAndStoreCommandPrefixCode(
580 histogram: &[u32],
581 depth: &mut [u8],
582 bits: &mut [u16],
583 storage_ix: &mut usize,
584 storage: &mut [u8],
585) {
586 let mut tree = [HuffmanTree::new(0, 0, 0); 129];
587 let mut cmd_depth: [u8; 704] = [0u8; 704];
588
589 let mut cmd_bits: [u16; 64] = [0; 64];
590 BrotliCreateHuffmanTree(histogram, 64usize, 15i32, &mut tree[..], depth);
591 BrotliCreateHuffmanTree(
592 &histogram[64..],
593 64usize,
594 14i32,
595 &mut tree[..],
596 &mut depth[64..],
597 );
598 memcpy(&mut cmd_depth[..], 0, depth, 0, 24usize);
604 memcpy(&mut cmd_depth[..], 24usize, depth, (40usize), 8usize);
605 memcpy(&mut cmd_depth[..], 32usize, depth, (24usize), 8usize);
606 memcpy(&mut cmd_depth[..], 40usize, depth, (48usize), 8usize);
607 memcpy(&mut cmd_depth[..], 48usize, depth, (32usize), 8usize);
608 memcpy(&mut cmd_depth[..], 56usize, depth, (56usize), 8usize);
609 BrotliConvertBitDepthsToSymbols(&mut cmd_depth[..], 64usize, &mut cmd_bits[..]);
610 memcpy(bits, 0, &cmd_bits[..], 0, 24usize);
611 memcpy(bits, (24usize), &cmd_bits[..], 32usize, 8usize);
612 memcpy(bits, (32usize), &cmd_bits[..], 48usize, 8usize);
613 memcpy(bits, (40usize), &cmd_bits[..], 24usize, 8usize);
614 memcpy(bits, (48usize), &cmd_bits[..], 40usize, 8usize);
615 memcpy(bits, (56usize), &cmd_bits[..], 56usize, 8usize);
616 BrotliConvertBitDepthsToSymbols(&mut depth[64..], 64usize, &mut bits[64..]);
617 {
618 for item in cmd_depth[..64].iter_mut() {
619 *item = 0;
620 }
621 memcpy(&mut cmd_depth[..], 0, depth, 0, 8usize);
622 memcpy(&mut cmd_depth[..], 64usize, depth, (8usize), 8usize);
623 memcpy(&mut cmd_depth[..], 128usize, depth, (16usize), 8usize);
624 memcpy(&mut cmd_depth[..], 192usize, depth, (24usize), 8usize);
625 memcpy(&mut cmd_depth[..], 384usize, depth, (32usize), 8usize);
626 for i in 0usize..8usize {
627 cmd_depth[(128usize).wrapping_add((8usize).wrapping_mul(i))] =
628 depth[i.wrapping_add(40)];
629 cmd_depth[(256usize).wrapping_add((8usize).wrapping_mul(i))] =
630 depth[i.wrapping_add(48)];
631 cmd_depth[(448usize).wrapping_add((8usize).wrapping_mul(i))] =
632 depth[i.wrapping_add(56)];
633 }
634 BrotliStoreHuffmanTree(
635 &mut cmd_depth[..],
636 704usize,
637 &mut tree[..],
638 storage_ix,
639 storage,
640 );
641 }
642 BrotliStoreHuffmanTree(
643 &mut depth[64..],
644 64usize,
645 &mut tree[..],
646 storage_ix,
647 storage,
648 );
649}
650
651#[allow(unused_assignments)]
652fn BrotliCompressFragmentFastImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
653 m: &mut AllocHT,
654 input_ptr: &[u8],
655 mut input_size: usize,
656 is_last: i32,
657 table: &mut [i32],
658 table_bits: usize,
659 cmd_depth: &mut [u8],
660 cmd_bits: &mut [u16],
661 cmd_code_numbits: &mut usize,
662 cmd_code: &mut [u8],
663 storage_ix: &mut usize,
664 storage: &mut [u8],
665) {
666 let mut cmd_histo = [0u32; 128];
667 let mut ip_end = 0usize;
668 let mut next_emit = 0usize;
669 let base_ip = 0usize;
670 static kFirstBlockSize: usize = (3i32 << 15) as usize;
671 static kMergeBlockSize: usize = (1i32 << 16) as usize;
672 let kInputMarginBytes = 16usize;
673 let kMinMatchLen = 5usize;
674 let mut metablock_start = 0usize;
675 let mut block_size = min(input_size, kFirstBlockSize);
676 let mut total_block_size = block_size;
677 let mut mlen_storage_ix = storage_ix.wrapping_add(3);
678 let mut lit_depth = [0u8; 256];
679 let mut lit_bits = [0u16; 256];
680 let mut literal_ratio: usize;
681 let mut input_index = 0usize;
682 let mut last_distance: i32;
683 let shift: usize = (64u32 as usize).wrapping_sub(table_bits);
684 BrotliStoreMetaBlockHeader(block_size, 0i32, storage_ix, storage);
685 BrotliWriteBits(13usize, 0, storage_ix, storage);
686 literal_ratio = BuildAndStoreLiteralPrefixCode(
687 m,
688 &input_ptr[input_index..],
689 block_size,
690 &mut lit_depth[..],
691 &mut lit_bits[..],
692 storage_ix,
693 storage,
694 );
695 {
696 let mut i = 0usize;
697 while i.wrapping_add(7) < *cmd_code_numbits {
698 BrotliWriteBits(8usize, cmd_code[i >> 3] as u64, storage_ix, storage);
699 i = i.wrapping_add(8);
700 }
701 }
702 BrotliWriteBits(
703 *cmd_code_numbits & 7usize,
704 cmd_code[*cmd_code_numbits >> 3] as u64,
705 storage_ix,
706 storage,
707 );
708 let mut code_block_selection: CodeBlockState = CodeBlockState::EMIT_COMMANDS;
709 'continue_to_next_block: loop {
710 let mut ip_index: usize;
711 if code_block_selection == CodeBlockState::EMIT_COMMANDS {
712 cmd_histo[..128].clone_from_slice(&kCmdHistoSeed[..]);
713 ip_index = input_index;
714 last_distance = -1i32;
715 ip_end = input_index.wrapping_add(block_size);
716 if block_size >= kInputMarginBytes {
717 let len_limit: usize = min(
718 block_size.wrapping_sub(kMinMatchLen),
719 input_size.wrapping_sub(kInputMarginBytes),
720 );
721 let ip_limit: usize = input_index.wrapping_add(len_limit);
722 let mut next_hash = Hash(
723 &input_ptr[{
724 ip_index = ip_index.wrapping_add(1);
725 ip_index
726 }..],
727 shift,
728 );
729 loop {
730 let mut skip = 32u32;
731 let mut next_ip = ip_index;
732 let mut candidate = 0usize;
733 loop {
734 {
735 'break15: loop {
736 {
737 let hash = next_hash;
738 let bytes_between_hash_lookups: u32 = skip >> 5;
739 skip = skip.wrapping_add(1);
740 ip_index = next_ip;
741 next_ip =
742 ip_index.wrapping_add(bytes_between_hash_lookups as usize);
743 if next_ip > ip_limit {
744 code_block_selection = CodeBlockState::EMIT_REMAINDER;
745 break 'break15;
746 }
747 next_hash = Hash(&input_ptr[next_ip..], shift);
748 candidate = ip_index.wrapping_sub(last_distance as usize);
749 if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..])
750 && candidate < ip_index
751 {
752 table[hash as usize] =
753 ip_index.wrapping_sub(base_ip) as i32;
754 break 'break15;
755 }
756 candidate = base_ip.wrapping_add(table[hash as usize] as usize);
757 table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
758 }
759 if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
760 break;
761 }
762 }
763 }
764 if !(ip_index.wrapping_sub(candidate)
765 > (1usize << 18).wrapping_sub(16) as isize as usize
766 && (code_block_selection as i32
767 == CodeBlockState::EMIT_COMMANDS as i32))
768 {
769 break;
770 }
771 }
772 if code_block_selection as i32 != CodeBlockState::EMIT_COMMANDS as i32 {
773 break;
774 }
775 {
776 let base: usize = ip_index;
777 let matched = (5usize).wrapping_add(FindMatchLengthWithLimit(
778 &input_ptr[candidate + 5..],
779 &input_ptr[ip_index + 5..],
780 ip_end.wrapping_sub(ip_index).wrapping_sub(5),
781 ));
782 let distance = base.wrapping_sub(candidate) as i32;
783 let insert = base.wrapping_sub(next_emit);
784 ip_index = ip_index.wrapping_add(matched);
785 if insert < 6210 {
786 EmitInsertLen(
787 insert,
788 cmd_depth,
789 cmd_bits,
790 &mut cmd_histo[..],
791 storage_ix,
792 storage,
793 );
794 } else if ShouldUseUncompressedMode(
795 (next_emit as isize) - (metablock_start as isize),
796 insert,
797 literal_ratio,
798 ) {
799 EmitUncompressedMetaBlock(
800 &input_ptr[metablock_start..],
801 base.wrapping_sub(metablock_start),
802 mlen_storage_ix.wrapping_sub(3),
803 storage_ix,
804 storage,
805 );
806 input_size = input_size.wrapping_sub(base.wrapping_sub(input_index));
807 input_index = base;
808 next_emit = input_index;
809 code_block_selection = CodeBlockState::NEXT_BLOCK;
810 continue 'continue_to_next_block;
811 } else {
812 EmitLongInsertLen(
813 insert,
814 cmd_depth,
815 cmd_bits,
816 &mut cmd_histo[..],
817 storage_ix,
818 storage,
819 );
820 }
821 EmitLiterals(
822 &input_ptr[next_emit..],
823 insert,
824 &mut lit_depth[..],
825 &mut lit_bits[..],
826 storage_ix,
827 storage,
828 );
829 if distance == last_distance {
830 BrotliWriteBits(
831 cmd_depth[64] as usize,
832 cmd_bits[64] as u64,
833 storage_ix,
834 storage,
835 );
836 {
837 let _rhs = 1u32;
838 let _lhs = &mut cmd_histo[64];
839 *_lhs = (*_lhs).wrapping_add(_rhs);
840 }
841 } else {
842 EmitDistance(
843 distance as usize,
844 cmd_depth,
845 cmd_bits,
846 &mut cmd_histo[..],
847 storage_ix,
848 storage,
849 );
850 last_distance = distance;
851 }
852 EmitCopyLenLastDistance(
853 matched,
854 cmd_depth,
855 cmd_bits,
856 &mut cmd_histo[..],
857 storage_ix,
858 storage,
859 );
860 next_emit = ip_index;
861 if ip_index >= ip_limit {
862 code_block_selection = CodeBlockState::EMIT_REMAINDER;
863 continue 'continue_to_next_block;
864 }
865 {
866 assert!(ip_index >= 3);
867 let input_bytes: u64 =
868 BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
869 let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0i32, shift);
870 let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3i32, shift);
871 table[prev_hash as usize] =
872 ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
873 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift);
874 table[prev_hash as usize] =
875 ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
876 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift);
877 table[prev_hash as usize] =
878 ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
879 candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
880 table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
881 }
882 }
883 while IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
884 let base: usize = ip_index;
885 let matched: usize = (5usize).wrapping_add(FindMatchLengthWithLimit(
886 &input_ptr[candidate + 5..],
887 &input_ptr[ip_index + 5..],
888 ip_end.wrapping_sub(ip_index).wrapping_sub(5),
889 ));
890 if ip_index.wrapping_sub(candidate) > (1usize << 18).wrapping_sub(16) {
891 break;
892 }
893 ip_index = ip_index.wrapping_add(matched);
894 last_distance = base.wrapping_sub(candidate) as i32;
895 EmitCopyLen(
896 matched,
897 cmd_depth,
898 cmd_bits,
899 &mut cmd_histo[..],
900 storage_ix,
901 storage,
902 );
903 EmitDistance(
904 last_distance as usize,
905 cmd_depth,
906 cmd_bits,
907 &mut cmd_histo[..],
908 storage_ix,
909 storage,
910 );
911 next_emit = ip_index;
912 if ip_index >= ip_limit {
913 code_block_selection = CodeBlockState::EMIT_REMAINDER;
914 continue 'continue_to_next_block;
915 }
916 {
917 assert!(ip_index >= 3);
918 let input_bytes: u64 =
919 BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
920 let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0i32, shift);
921 let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3i32, shift);
922 table[prev_hash as usize] =
923 ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
924 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift);
925 table[prev_hash as usize] =
926 ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
927 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift);
928 table[prev_hash as usize] =
929 ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
930 candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
931 table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
932 }
933 }
934 if code_block_selection as i32 == CodeBlockState::EMIT_REMAINDER as i32 {
935 break;
936 }
937 if code_block_selection as i32 == CodeBlockState::EMIT_COMMANDS as i32 {
938 next_hash = Hash(
939 &input_ptr[{
940 ip_index = ip_index.wrapping_add(1);
941 ip_index
942 }..],
943 shift,
944 );
945 }
946 }
947 }
948 code_block_selection = CodeBlockState::EMIT_REMAINDER;
949 continue 'continue_to_next_block;
950 } else if code_block_selection as i32 == CodeBlockState::EMIT_REMAINDER as i32 {
951 input_index = input_index.wrapping_add(block_size);
952 input_size = input_size.wrapping_sub(block_size);
953 block_size = min(input_size, kMergeBlockSize);
954 if input_size > 0
955 && (total_block_size.wrapping_add(block_size) <= (1i32 << 20) as usize)
956 && ShouldMergeBlock(&input_ptr[input_index..], block_size, &mut lit_depth[..])
957 {
958 total_block_size = total_block_size.wrapping_add(block_size);
959 UpdateBits(
960 20usize,
961 total_block_size.wrapping_sub(1) as u32,
962 mlen_storage_ix,
963 storage,
964 );
965 code_block_selection = CodeBlockState::EMIT_COMMANDS;
966 continue 'continue_to_next_block;
967 }
968 if next_emit < ip_end {
969 let insert: usize = ip_end.wrapping_sub(next_emit);
970 if insert < 6210 {
971 EmitInsertLen(
972 insert,
973 cmd_depth,
974 cmd_bits,
975 &mut cmd_histo[..],
976 storage_ix,
977 storage,
978 );
979 EmitLiterals(
980 &input_ptr[next_emit..],
981 insert,
982 &mut lit_depth[..],
983 &mut lit_bits[..],
984 storage_ix,
985 storage,
986 );
987 } else if ShouldUseUncompressedMode(
988 next_emit as isize - metablock_start as isize,
989 insert,
990 literal_ratio,
991 ) {
992 EmitUncompressedMetaBlock(
993 &input_ptr[metablock_start..],
994 ip_end.wrapping_sub(metablock_start),
995 mlen_storage_ix.wrapping_sub(3),
996 storage_ix,
997 storage,
998 );
999 } else {
1000 EmitLongInsertLen(
1001 insert,
1002 cmd_depth,
1003 cmd_bits,
1004 &mut cmd_histo[..],
1005 storage_ix,
1006 storage,
1007 );
1008 EmitLiterals(
1009 &input_ptr[next_emit..],
1010 insert,
1011 &mut lit_depth[..],
1012 &mut lit_bits[..],
1013 storage_ix,
1014 storage,
1015 );
1016 }
1017 }
1018 next_emit = ip_end;
1019 code_block_selection = CodeBlockState::NEXT_BLOCK;
1020 continue 'continue_to_next_block;
1021 } else if code_block_selection as i32 == CodeBlockState::NEXT_BLOCK as i32 {
1022 if input_size > 0 {
1023 metablock_start = input_index;
1024 block_size = min(input_size, kFirstBlockSize);
1025 total_block_size = block_size;
1026 mlen_storage_ix = storage_ix.wrapping_add(3);
1027 BrotliStoreMetaBlockHeader(block_size, 0i32, storage_ix, storage);
1028 BrotliWriteBits(13usize, 0, storage_ix, storage);
1029 literal_ratio = BuildAndStoreLiteralPrefixCode(
1030 m,
1031 &input_ptr[input_index..],
1032 block_size,
1033 &mut lit_depth[..],
1034 &mut lit_bits[..],
1035 storage_ix,
1036 storage,
1037 );
1038 BuildAndStoreCommandPrefixCode(
1039 &mut cmd_histo[..],
1040 cmd_depth,
1041 cmd_bits,
1042 storage_ix,
1043 storage,
1044 );
1045 code_block_selection = CodeBlockState::EMIT_COMMANDS;
1046 continue 'continue_to_next_block;
1047 }
1048 break;
1049 }
1050 }
1051 if is_last == 0 {
1052 cmd_code[0] = 0;
1053 *cmd_code_numbits = 0;
1054 BuildAndStoreCommandPrefixCode(
1055 &mut cmd_histo[..],
1056 cmd_depth,
1057 cmd_bits,
1058 cmd_code_numbits,
1059 cmd_code,
1060 );
1061 }
1062}
1063
1064macro_rules! compress_specialization {
1065 ($table_bits : expr, $fname: ident) => {
1066 fn $fname<AllocHT: alloc::Allocator<HuffmanTree>>(
1067 mht: &mut AllocHT,
1068 input: &[u8],
1069 input_size: usize,
1070 is_last: i32,
1071 table: &mut [i32],
1072 cmd_depth: &mut [u8],
1073 cmd_bits: &mut [u16],
1074 cmd_code_numbits: &mut usize,
1075 cmd_code: &mut [u8],
1076 storage_ix: &mut usize,
1077 storage: &mut [u8],
1078 ) {
1079 BrotliCompressFragmentFastImpl(
1080 mht,
1081 input,
1082 input_size,
1083 is_last,
1084 table,
1085 $table_bits,
1086 cmd_depth,
1087 cmd_bits,
1088 cmd_code_numbits,
1089 cmd_code,
1090 storage_ix,
1091 storage,
1092 );
1093 }
1094 };
1095}
1096
1097compress_specialization!(9, BrotliCompressFragmentFastImpl9);
1098compress_specialization!(11, BrotliCompressFragmentFastImpl11);
1099compress_specialization!(13, BrotliCompressFragmentFastImpl13);
1100compress_specialization!(15, BrotliCompressFragmentFastImpl15);
1101
1102pub fn BrotliCompressFragmentFast<AllocHT: alloc::Allocator<HuffmanTree>>(
1103 m: &mut AllocHT,
1104 input: &[u8],
1105 input_size: usize,
1106 is_last: i32,
1107 table: &mut [i32],
1108 table_size: usize,
1109 cmd_depth: &mut [u8],
1110 cmd_bits: &mut [u16],
1111 cmd_code_numbits: &mut usize,
1112 cmd_code: &mut [u8],
1113 storage_ix: &mut usize,
1114 storage: &mut [u8],
1115) {
1116 let initial_storage_ix: usize = *storage_ix;
1117 let table_bits: usize = Log2FloorNonZero(table_size as u64) as usize;
1118 if input_size == 0usize {
1119 BrotliWriteBits(1usize, 1, storage_ix, storage);
1120 BrotliWriteBits(1usize, 1, storage_ix, storage);
1121 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1122 return;
1123 }
1124 if table_bits == 9usize {
1125 BrotliCompressFragmentFastImpl9(
1126 m,
1127 input,
1128 input_size,
1129 is_last,
1130 table,
1131 cmd_depth,
1132 cmd_bits,
1133 cmd_code_numbits,
1134 cmd_code,
1135 storage_ix,
1136 storage,
1137 );
1138 }
1139 if table_bits == 11usize {
1140 BrotliCompressFragmentFastImpl11(
1141 m,
1142 input,
1143 input_size,
1144 is_last,
1145 table,
1146 cmd_depth,
1147 cmd_bits,
1148 cmd_code_numbits,
1149 cmd_code,
1150 storage_ix,
1151 storage,
1152 );
1153 }
1154 if table_bits == 13usize {
1155 BrotliCompressFragmentFastImpl13(
1156 m,
1157 input,
1158 input_size,
1159 is_last,
1160 table,
1161 cmd_depth,
1162 cmd_bits,
1163 cmd_code_numbits,
1164 cmd_code,
1165 storage_ix,
1166 storage,
1167 );
1168 }
1169 if table_bits == 15usize {
1170 BrotliCompressFragmentFastImpl15(
1171 m,
1172 input,
1173 input_size,
1174 is_last,
1175 table,
1176 cmd_depth,
1177 cmd_bits,
1178 cmd_code_numbits,
1179 cmd_code,
1180 storage_ix,
1181 storage,
1182 );
1183 }
1184 if storage_ix.wrapping_sub(initial_storage_ix) > (31usize).wrapping_add(input_size << 3) {
1185 EmitUncompressedMetaBlock(input, input_size, initial_storage_ix, storage_ix, storage);
1186 }
1187 if is_last != 0 {
1188 BrotliWriteBits(1usize, 1, storage_ix, storage);
1189 BrotliWriteBits(1usize, 1, storage_ix, storage);
1190 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1191 }
1192}