brotli/enc/
compress_fragment_two_pass.rs

1use core;
2use core::cmp::min;
3
4use super::super::alloc;
5use super::backward_references::kHashMul32;
6use super::bit_cost::BitsEntropy;
7use super::brotli_bit_stream::{BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree};
8use super::entropy_encode::{
9    BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree,
10};
11use super::static_dict::{
12    FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
13    BROTLI_UNALIGNED_STORE64,
14};
15use super::util::{floatX, Log2FloorNonZero};
16static kCompressFragmentTwoPassBlockSize: usize = (1i32 << 17) as usize;
17
18// returns number of commands inserted
19fn EmitInsertLen(insertlen: u32, commands: &mut &mut [u32]) -> usize {
20    if insertlen < 6u32 {
21        (*commands)[0] = insertlen;
22    } else if insertlen < 130u32 {
23        let tail: u32 = insertlen.wrapping_sub(2);
24        let nbits: u32 = Log2FloorNonZero(tail as (u64)).wrapping_sub(1);
25        let prefix: u32 = tail >> nbits;
26        let inscode: u32 = (nbits << 1).wrapping_add(prefix).wrapping_add(2);
27        let extra: u32 = tail.wrapping_sub(prefix << nbits);
28        (*commands)[0] = inscode | extra << 8;
29    } else if insertlen < 2114u32 {
30        let tail: u32 = insertlen.wrapping_sub(66);
31        let nbits: u32 = Log2FloorNonZero(tail as (u64));
32        let code: u32 = nbits.wrapping_add(10);
33        let extra: u32 = tail.wrapping_sub(1u32 << nbits);
34        (*commands)[0] = code | extra << 8;
35    } else if insertlen < 6210u32 {
36        let extra: u32 = insertlen.wrapping_sub(2114);
37        (*commands)[0] = 21u32 | extra << 8;
38    } else if insertlen < 22594u32 {
39        let extra: u32 = insertlen.wrapping_sub(6210);
40        (*commands)[0] = 22u32 | extra << 8;
41    } else {
42        let extra: u32 = insertlen.wrapping_sub(22594);
43        (*commands)[0] = 23u32 | extra << 8;
44    }
45    let remainder = core::mem::take(commands);
46    let _ = core::mem::replace(commands, &mut remainder[1..]);
47    1
48}
49
50fn EmitDistance(distance: u32, commands: &mut &mut [u32]) -> usize {
51    let d: u32 = distance.wrapping_add(3);
52    let nbits: u32 = Log2FloorNonZero(d as (u64)).wrapping_sub(1);
53    let prefix: u32 = d >> nbits & 1u32;
54    let offset: u32 = (2u32).wrapping_add(prefix) << nbits;
55    let distcode: u32 = (2u32)
56        .wrapping_mul(nbits.wrapping_sub(1))
57        .wrapping_add(prefix)
58        .wrapping_add(80);
59    let extra: u32 = d.wrapping_sub(offset);
60    (*commands)[0] = distcode | extra << 8;
61    let remainder = core::mem::take(commands);
62    let _ = core::mem::replace(commands, &mut remainder[1..]);
63    1
64}
65
66fn EmitCopyLenLastDistance(copylen: usize, commands: &mut &mut [u32]) -> usize {
67    if copylen < 12usize {
68        (*commands)[0] = copylen.wrapping_add(20) as u32;
69        let remainder = core::mem::take(commands);
70        let _ = core::mem::replace(commands, &mut remainder[1..]);
71        1
72    } else if copylen < 72usize {
73        let tail: usize = copylen.wrapping_sub(8);
74        let nbits: usize = Log2FloorNonZero(tail as u64).wrapping_sub(1) as usize;
75        let prefix: usize = tail >> nbits;
76        let code: usize = (nbits << 1).wrapping_add(prefix).wrapping_add(28);
77        let extra: usize = tail.wrapping_sub(prefix << nbits);
78        (*commands)[0] = (code | extra << 8) as u32;
79        let remainder = core::mem::take(commands);
80        let _ = core::mem::replace(commands, &mut remainder[1..]);
81        1
82    } else if copylen < 136usize {
83        let tail: usize = copylen.wrapping_sub(8);
84        let code: usize = (tail >> 5).wrapping_add(54);
85        let extra: usize = tail & 31usize;
86        (*commands)[0] = (code | extra << 8) as u32;
87        let remainder = core::mem::take(commands);
88        let _ = core::mem::replace(commands, &mut remainder[1..]);
89        (*commands)[0] = 64u32;
90        let remainder2 = core::mem::take(commands);
91        let _ = core::mem::replace(commands, &mut remainder2[1..]);
92        2
93    } else if copylen < 2120usize {
94        let tail: usize = copylen.wrapping_sub(72);
95        let nbits: usize = Log2FloorNonZero(tail as u64) as usize;
96        let code: usize = nbits.wrapping_add(52);
97        let extra: usize = tail.wrapping_sub(1usize << nbits);
98        (*commands)[0] = (code | extra << 8) as u32;
99        let remainder = core::mem::take(commands);
100        let _ = core::mem::replace(commands, &mut remainder[1..]);
101        (*commands)[0] = 64u32;
102        let remainder2 = core::mem::take(commands);
103        let _ = core::mem::replace(commands, &mut remainder2[1..]);
104        2
105    } else {
106        let extra: usize = copylen.wrapping_sub(2120);
107        (*commands)[0] = (63usize | extra << 8) as u32;
108        let remainder = core::mem::take(commands);
109        let _ = core::mem::replace(commands, &mut remainder[1..]);
110        (*commands)[0] = 64u32;
111        let remainder2 = core::mem::take(commands);
112        let _ = core::mem::replace(commands, &mut remainder2[1..]);
113        2
114    }
115}
116fn HashBytesAtOffset(v: u64, offset: i32, shift: usize, length: usize) -> u32 {
117    let h: u64 = (v >> (8i32 * offset) << ((8 - length) * 8)).wrapping_mul(kHashMul32 as (u64));
118    (h >> shift) as u32
119}
120
121fn EmitCopyLen(copylen: usize, commands: &mut &mut [u32]) -> usize {
122    if copylen < 10usize {
123        (*commands)[0] = copylen.wrapping_add(38) as u32;
124    } else if copylen < 134usize {
125        let tail: usize = copylen.wrapping_sub(6);
126        let nbits: usize = Log2FloorNonZero(tail as u64).wrapping_sub(1) as usize;
127        let prefix: usize = tail >> nbits;
128        let code: usize = (nbits << 1).wrapping_add(prefix).wrapping_add(44);
129        let extra: usize = tail.wrapping_sub(prefix << nbits);
130        (*commands)[0] = (code | extra << 8) as u32;
131    } else if copylen < 2118usize {
132        let tail: usize = copylen.wrapping_sub(70);
133        let nbits: usize = Log2FloorNonZero(tail as u64) as usize;
134        let code: usize = nbits.wrapping_add(52);
135        let extra: usize = tail.wrapping_sub(1usize << nbits);
136        (*commands)[0] = (code | extra << 8) as u32;
137    } else {
138        let extra: usize = copylen.wrapping_sub(2118);
139        (*commands)[0] = (63usize | extra << 8) as u32;
140    }
141    let remainder = core::mem::take(commands);
142    let _ = core::mem::replace(commands, &mut remainder[1..]);
143    1
144}
145fn Hash(p: &[u8], shift: usize, length: usize) -> u32 {
146    let h: u64 =
147        (BROTLI_UNALIGNED_LOAD64(p) << ((8 - length) * 8)).wrapping_mul(kHashMul32 as (u64));
148    (h >> shift) as u32
149}
150
151fn IsMatch(p1: &[u8], p2: &[u8], length: usize) -> bool {
152    BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2)
153        && (length == 4 || (p1[4] == p2[4] && p1[5] == p2[5]))
154}
155
156#[allow(unused_assignments)]
157fn CreateCommands(
158    input_index: usize,
159    block_size: usize,
160    input_size: usize,
161    base_ip: &[u8],
162    table: &mut [i32],
163    table_bits: usize,
164    min_match: usize,
165    literals: &mut &mut [u8],
166    num_literals: &mut usize,
167    commands: &mut &mut [u32],
168    num_commands: &mut usize,
169) {
170    let mut ip_index: usize = input_index;
171    let shift: usize = (64u32 as usize).wrapping_sub(table_bits);
172    let ip_end: usize = input_index.wrapping_add(block_size);
173    let mut next_emit: usize = input_index;
174    let mut last_distance: i32 = -1i32;
175    let kInputMarginBytes: usize = 16usize;
176
177    if block_size >= kInputMarginBytes {
178        let len_limit: usize = min(
179            block_size.wrapping_sub(min_match),
180            input_size.wrapping_sub(kInputMarginBytes),
181        );
182        let ip_limit: usize = input_index.wrapping_add(len_limit);
183        let mut next_hash: u32;
184        let mut goto_emit_remainder = false;
185        next_hash = Hash(
186            &base_ip[{
187                ip_index = ip_index.wrapping_add(1);
188                ip_index
189            }..],
190            shift,
191            min_match,
192        );
193        while !goto_emit_remainder {
194            let mut skip: u32 = 32u32;
195            let mut next_ip: usize = ip_index;
196            let mut candidate: usize = 0;
197            loop {
198                {
199                    'break3: loop {
200                        {
201                            let hash: u32 = next_hash;
202                            let bytes_between_hash_lookups: u32 = skip >> 5;
203                            skip = skip.wrapping_add(1);
204                            ip_index = next_ip;
205                            next_ip = ip_index.wrapping_add(bytes_between_hash_lookups as usize);
206                            if next_ip > ip_limit {
207                                goto_emit_remainder = true;
208                                {
209                                    break 'break3;
210                                }
211                            }
212                            next_hash = Hash(&base_ip[next_ip..], shift, min_match);
213                            candidate = ip_index.wrapping_sub(last_distance as usize);
214                            if IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match)
215                                && candidate < ip_index
216                            {
217                                table[(hash as usize)] = ip_index.wrapping_sub(0) as i32;
218                                {
219                                    break 'break3;
220                                }
221                            }
222                            candidate = table[(hash as usize)] as usize;
223                            table[(hash as usize)] = ip_index.wrapping_sub(0) as i32;
224                        }
225                        if IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match) {
226                            break;
227                        }
228                    }
229                }
230                if !(ip_index.wrapping_sub(candidate)
231                    > (1usize << 18).wrapping_sub(16) as isize as usize
232                    && !goto_emit_remainder)
233                {
234                    break;
235                }
236            }
237            if goto_emit_remainder {
238                break;
239            }
240            {
241                let base: usize = ip_index;
242                let matched: usize = min_match.wrapping_add(FindMatchLengthWithLimit(
243                    &base_ip[(candidate + min_match)..],
244                    &base_ip[(ip_index + min_match)..],
245                    ip_end.wrapping_sub(ip_index).wrapping_sub(min_match),
246                ));
247                let distance: i32 = base.wrapping_sub(candidate) as i32;
248                let insert: i32 = base.wrapping_sub(next_emit) as i32;
249                ip_index = ip_index.wrapping_add(matched);
250                *num_commands += EmitInsertLen(insert as u32, commands);
251                (*literals)[..(insert as usize)]
252                    .clone_from_slice(&base_ip[next_emit..(next_emit + insert as usize)]);
253                *num_literals += insert as usize;
254                let new_literals = core::mem::take(literals);
255                let _ = core::mem::replace(literals, &mut new_literals[(insert as usize)..]);
256                if distance == last_distance {
257                    (*commands)[0] = 64u32;
258                    let remainder = core::mem::take(commands);
259                    let _ = core::mem::replace(commands, &mut remainder[1..]);
260                    *num_commands += 1;
261                } else {
262                    *num_commands += EmitDistance(distance as u32, commands);
263                    last_distance = distance;
264                }
265                *num_commands += EmitCopyLenLastDistance(matched, commands);
266                next_emit = ip_index;
267                if ip_index >= ip_limit {
268                    goto_emit_remainder = true;
269                    {
270                        break;
271                    }
272                }
273                {
274                    let mut input_bytes: u64;
275                    let mut prev_hash: u32;
276                    let cur_hash: u32;
277                    if min_match == 4 {
278                        input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 3)..]);
279                        cur_hash = HashBytesAtOffset(input_bytes, 3i32, shift, min_match);
280                        prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
281                        table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
282                        prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
283                        table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
284                        prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
285                        table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
286                    } else {
287                        assert!(ip_index >= 5);
288                        // could this be off the end FIXME
289                        input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 5)..]);
290                        prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
291                        table[(prev_hash as usize)] = ip_index.wrapping_sub(5) as i32;
292                        prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
293                        table[(prev_hash as usize)] = ip_index.wrapping_sub(4) as i32;
294                        prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
295                        table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
296                        assert!(ip_index >= 2);
297                        input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 2)..]);
298                        cur_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
299                        prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
300                        table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
301                        prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
302                        table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
303                    }
304                    candidate = table[(cur_hash as usize)] as usize;
305                    table[(cur_hash as usize)] = ip_index as i32;
306                }
307            }
308            while ip_index.wrapping_sub(candidate)
309                <= (1usize << 18).wrapping_sub(16) as isize as usize
310                && IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match)
311            {
312                let base_index: usize = ip_index;
313                let matched: usize = min_match.wrapping_add(FindMatchLengthWithLimit(
314                    &base_ip[(candidate + min_match)..],
315                    &base_ip[(ip_index + min_match)..],
316                    ip_end.wrapping_sub(ip_index).wrapping_sub(min_match),
317                ));
318                ip_index = ip_index.wrapping_add(matched);
319                last_distance = base_index.wrapping_sub(candidate) as i32;
320                *num_commands += EmitCopyLen(matched, commands);
321                *num_commands += EmitDistance(last_distance as u32, commands);
322                next_emit = ip_index;
323                if ip_index >= ip_limit {
324                    goto_emit_remainder = true;
325                    {
326                        break;
327                    }
328                }
329                {
330                    assert!(ip_index >= 5);
331                    let mut input_bytes: u64;
332
333                    let cur_hash: u32;
334                    let mut prev_hash: u32;
335                    if min_match == 4 {
336                        input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 3)..]);
337                        cur_hash = HashBytesAtOffset(input_bytes, 3i32, shift, min_match);
338                        prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
339                        table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
340                        prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
341                        table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
342                        prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
343                        table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
344                    } else {
345                        input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 5)..]);
346                        prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
347                        table[(prev_hash as usize)] = ip_index.wrapping_sub(5) as i32;
348                        prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
349                        table[(prev_hash as usize)] = ip_index.wrapping_sub(4) as i32;
350                        prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
351                        table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
352                        assert!(ip_index >= 2);
353                        input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 2)..]);
354                        cur_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
355                        prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
356                        table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
357                        prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
358                        table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
359                    }
360                    candidate = table[(cur_hash as usize)] as usize;
361                    table[(cur_hash as usize)] = ip_index as i32;
362                }
363            }
364            if !goto_emit_remainder {
365                next_hash = Hash(
366                    &base_ip[{
367                        ip_index = ip_index.wrapping_add(1);
368                        ip_index
369                    }..],
370                    shift,
371                    min_match,
372                );
373            }
374        }
375    }
376    if next_emit < ip_end {
377        let insert: u32 = ip_end.wrapping_sub(next_emit) as u32;
378        *num_commands += EmitInsertLen(insert, commands);
379        literals[..insert as usize]
380            .clone_from_slice(&base_ip[next_emit..(next_emit + insert as usize)]);
381        let mut xliterals = core::mem::take(literals);
382        *literals = &mut core::mem::take(&mut xliterals)[(insert as usize)..];
383        *num_literals += insert as usize;
384    }
385}
386
387fn ShouldCompress(input: &[u8], input_size: usize, num_literals: usize) -> bool {
388    let corpus_size = input_size as floatX;
389    if (num_literals as floatX) < 0.98 * corpus_size {
390        true
391    } else {
392        let mut literal_histo: [u32; 256] = [0; 256];
393        let max_total_bit_cost: floatX = corpus_size * 8.0 * 0.98 / 43.0;
394        let mut i: usize;
395        i = 0usize;
396        while i < input_size {
397            {
398                let _rhs = 1;
399                let _lhs = &mut literal_histo[input[i] as usize];
400                *_lhs = (*_lhs).wrapping_add(_rhs as u32);
401            }
402            i = i.wrapping_add(43);
403        }
404        BitsEntropy(&mut literal_histo[..], 256) < max_total_bit_cost
405    }
406}
407
408pub fn BrotliWriteBits(n_bits: usize, bits: u64, pos: &mut usize, array: &mut [u8]) {
409    let p = &mut array[(*pos >> 3)..];
410    let mut v: u64 = p[0] as (u64);
411    v |= bits << (*pos & 7);
412    BROTLI_UNALIGNED_STORE64(p, v);
413    *pos = pos.wrapping_add(n_bits);
414}
415
416pub(crate) fn store_meta_block_header(
417    len: usize,
418    is_uncompressed: bool,
419    storage_ix: &mut usize,
420    storage: &mut [u8],
421) {
422    let mut nibbles: u64 = 6;
423    BrotliWriteBits(1, 0, storage_ix, storage);
424    if len <= (1u32 << 16) as usize {
425        nibbles = 4;
426    } else if len <= (1u32 << 20) as usize {
427        nibbles = 5;
428    }
429    BrotliWriteBits(2, nibbles.wrapping_sub(4), storage_ix, storage);
430    BrotliWriteBits(
431        nibbles.wrapping_mul(4) as usize,
432        len.wrapping_sub(1) as u64,
433        storage_ix,
434        storage,
435    );
436    BrotliWriteBits(1, u64::from(is_uncompressed), storage_ix, storage);
437}
438
439pub fn memcpy<T: Sized + Clone>(
440    dst: &mut [T],
441    dst_offset: usize,
442    src: &[T],
443    src_offset: usize,
444    size_to_copy: usize,
445) {
446    dst[dst_offset..(dst_offset + size_to_copy)]
447        .clone_from_slice(&src[src_offset..(src_offset + size_to_copy)]);
448}
449fn BuildAndStoreCommandPrefixCode(
450    histogram: &[u32],
451    depth: &mut [u8],
452    bits: &mut [u16],
453    storage_ix: &mut usize,
454    storage: &mut [u8],
455) {
456    let mut tree = [HuffmanTree::new(0, 0, 0); 129];
457    let mut cmd_depth: [u8; 704] = [0; 704];
458    let mut cmd_bits: [u16; 64] = [0; 64];
459    BrotliCreateHuffmanTree(histogram, 64usize, 15i32, &mut tree[..], depth);
460    BrotliCreateHuffmanTree(
461        &histogram[64..],
462        64usize,
463        14i32,
464        &mut tree[..],
465        &mut depth[64..],
466    );
467    /* We have to jump through a few hoops here in order to compute
468    the command bits because the symbols are in a different order than in
469    the full alphabet. This looks complicated, but having the symbols
470    in this order in the command bits saves a few branches in the Emit*
471    functions. */
472    memcpy(&mut cmd_depth[..], 0, depth, 24, 24);
473    memcpy(&mut cmd_depth[..], 24, depth, 0, 8);
474    memcpy(&mut cmd_depth[..], 32usize, depth, (48usize), 8usize);
475    memcpy(&mut cmd_depth[..], 40usize, depth, (8usize), 8usize);
476    memcpy(&mut cmd_depth[..], 48usize, depth, (56usize), 8usize);
477    memcpy(&mut cmd_depth[..], 56usize, depth, (16usize), 8usize);
478    BrotliConvertBitDepthsToSymbols(&mut cmd_depth[..], 64usize, &mut cmd_bits[..]);
479    memcpy(bits, 0, &cmd_bits[..], 24usize, 16usize);
480    memcpy(bits, (8usize), &cmd_bits[..], 40usize, 8usize);
481    memcpy(bits, (16usize), &cmd_bits[..], 56usize, 8usize);
482    memcpy(bits, (24usize), &cmd_bits[..], 0, 48usize);
483    memcpy(bits, (48usize), &cmd_bits[..], 32usize, 8usize);
484    memcpy(bits, (56usize), &cmd_bits[..], 48usize, 8usize);
485    BrotliConvertBitDepthsToSymbols(&mut depth[64..], 64usize, &mut bits[64..]);
486    {
487        for item in cmd_depth[..64].iter_mut() {
488            *item = 0;
489        }
490        //memset(&mut cmd_depth[..], 0i32, 64usize);
491        memcpy(&mut cmd_depth[..], 0, depth, (24usize), 8usize);
492        memcpy(&mut cmd_depth[..], 64usize, depth, (32usize), 8usize);
493        memcpy(&mut cmd_depth[..], 128usize, depth, (40usize), 8usize);
494        memcpy(&mut cmd_depth[..], 192usize, depth, (48usize), 8usize);
495        memcpy(&mut cmd_depth[..], 384usize, depth, (56usize), 8usize);
496        for i in 0usize..8usize {
497            cmd_depth[(128usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i];
498            cmd_depth[(256usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i.wrapping_add(8)];
499            cmd_depth[(448usize).wrapping_add((8usize).wrapping_mul(i))] =
500                depth[i.wrapping_add(16)];
501        }
502        BrotliStoreHuffmanTree(
503            &mut cmd_depth[..],
504            704usize,
505            &mut tree[..],
506            storage_ix,
507            storage,
508        );
509    }
510    BrotliStoreHuffmanTree(
511        &mut depth[64..],
512        64usize,
513        &mut tree[..],
514        storage_ix,
515        storage,
516    );
517}
518
519fn StoreCommands<AllocHT: alloc::Allocator<HuffmanTree>>(
520    mht: &mut AllocHT,
521    mut literals: &[u8],
522    num_literals: usize,
523    commands: &[u32],
524    num_commands: usize,
525    storage_ix: &mut usize,
526    storage: &mut [u8],
527) {
528    static kNumExtraBits: [u32; 128] = [
529        0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 12, 14, 24, 0, 0, 0, 0, 0,
530        0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6,
531        7, 8, 9, 10, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5,
532        5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17,
533        18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24,
534    ];
535    static kInsertOffset: [u32; 24] = [
536        0, 1, 2, 3, 4, 5, 6, 8, 10, 14, 18, 26, 34, 50, 66, 98, 130, 194, 322, 578, 1090, 2114,
537        6210, 22594,
538    ];
539    let mut lit_depths: [u8; 256] = [0; 256];
540    let mut lit_bits: [u16; 256] = [0; 256]; // maybe return this instead
541    let mut lit_histo: [u32; 256] = [0; 256]; // maybe return this instead of init
542    let mut cmd_depths: [u8; 128] = [0; 128];
543    let mut cmd_bits: [u16; 128] = [0; 128];
544    let mut cmd_histo: [u32; 128] = [0; 128];
545    let mut i: usize;
546    for i in 0usize..num_literals {
547        let _rhs = 1;
548        let _lhs = &mut lit_histo[literals[i] as usize];
549        *_lhs = (*_lhs).wrapping_add(_rhs as u32);
550    }
551    BrotliBuildAndStoreHuffmanTreeFast(
552        mht,
553        &lit_histo[..],
554        num_literals,
555        8usize,
556        &mut lit_depths[..],
557        &mut lit_bits[..],
558        storage_ix,
559        storage,
560    );
561    i = 0usize;
562    while i < num_commands {
563        {
564            let code: u32 = commands[i] & 0xffu32;
565            {
566                let _rhs = 1;
567                let _lhs = &mut cmd_histo[code as usize];
568                *_lhs = (*_lhs).wrapping_add(_rhs as u32);
569            }
570        }
571        i = i.wrapping_add(1);
572    }
573    {
574        let _rhs = 1i32;
575        let _lhs = &mut cmd_histo[1];
576        *_lhs = (*_lhs).wrapping_add(_rhs as u32);
577    }
578    {
579        let _rhs = 1i32;
580        let _lhs = &mut cmd_histo[2];
581        *_lhs = (*_lhs).wrapping_add(_rhs as u32);
582    }
583    {
584        let _rhs = 1i32;
585        let _lhs = &mut cmd_histo[64];
586        *_lhs = (*_lhs).wrapping_add(_rhs as u32);
587    }
588    {
589        let _rhs = 1i32;
590        let _lhs = &mut cmd_histo[84];
591        *_lhs = (*_lhs).wrapping_add(_rhs as u32);
592    }
593    BuildAndStoreCommandPrefixCode(
594        &mut cmd_histo[..],
595        &mut cmd_depths[..],
596        &mut cmd_bits[..],
597        storage_ix,
598        storage,
599    );
600    for i in 0usize..num_commands {
601        let cmd: u32 = commands[i];
602        let code: u32 = cmd & 0xffu32;
603        let extra: u32 = cmd >> 8;
604        BrotliWriteBits(
605            cmd_depths[code as usize] as usize,
606            cmd_bits[code as usize] as (u64),
607            storage_ix,
608            storage,
609        );
610        BrotliWriteBits(
611            kNumExtraBits[code as usize] as usize,
612            extra as (u64),
613            storage_ix,
614            storage,
615        );
616        if code < 24u32 {
617            let insert: u32 = kInsertOffset[code as usize].wrapping_add(extra);
618            for literal in literals[..(insert as usize)].iter() {
619                let lit: u8 = *literal;
620                BrotliWriteBits(
621                    lit_depths[lit as usize] as usize,
622                    lit_bits[lit as usize] as (u64),
623                    storage_ix,
624                    storage,
625                );
626            }
627            literals = &literals[insert as usize..];
628        }
629    }
630}
631fn EmitUncompressedMetaBlock(
632    input: &[u8],
633    input_size: usize,
634    storage_ix: &mut usize,
635    storage: &mut [u8],
636) {
637    store_meta_block_header(input_size, true, storage_ix, storage);
638    *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
639    memcpy(storage, (*storage_ix >> 3), input, 0, input_size);
640    *storage_ix = storage_ix.wrapping_add(input_size << 3);
641    storage[(*storage_ix >> 3)] = 0u8;
642}
643
644#[allow(unused_variables)]
645#[inline(always)]
646fn compress_fragment_two_pass_impl<AllocHT: alloc::Allocator<HuffmanTree>>(
647    m: &mut AllocHT,
648    base_ip: &[u8],
649    mut input_size: usize,
650    is_last: bool,
651    command_buf: &mut [u32],
652    literal_buf: &mut [u8],
653    table: &mut [i32],
654    table_bits: usize,
655    min_match: usize,
656    storage_ix: &mut usize,
657    storage: &mut [u8],
658) {
659    let mut input_index: usize = 0usize;
660    while input_size > 0usize {
661        let block_size: usize = min(input_size, kCompressFragmentTwoPassBlockSize);
662        let mut num_literals: usize = 0;
663        let mut num_commands: usize = 0;
664        {
665            let mut literals = &mut literal_buf[..];
666            let mut commands = &mut command_buf[..];
667            CreateCommands(
668                input_index,
669                block_size,
670                input_size,
671                base_ip,
672                table,
673                table_bits,
674                min_match,
675                &mut literals,
676                &mut num_literals,
677                &mut commands,
678                &mut num_commands,
679            );
680        }
681        if ShouldCompress(&base_ip[input_index..], block_size, num_literals) {
682            store_meta_block_header(block_size, false, storage_ix, storage);
683            BrotliWriteBits(13usize, 0, storage_ix, storage);
684            StoreCommands(
685                m,
686                literal_buf,
687                num_literals,
688                command_buf,
689                num_commands,
690                storage_ix,
691                storage,
692            );
693        } else {
694            EmitUncompressedMetaBlock(&base_ip[input_index..], block_size, storage_ix, storage);
695        }
696        input_index = input_index.wrapping_add(block_size);
697        input_size = input_size.wrapping_sub(block_size);
698    }
699}
700macro_rules! compress_specialization {
701    ($table_bits : expr, $fname: ident) => {
702        fn $fname<AllocHT: alloc::Allocator<HuffmanTree>>(
703            mht: &mut AllocHT,
704            input: &[u8],
705            input_size: usize,
706            is_last: bool,
707            command_buf: &mut [u32],
708            literal_buf: &mut [u8],
709            table: &mut [i32],
710            storage_ix: &mut usize,
711            storage: &mut [u8],
712        ) {
713            let min_match = if $table_bits < 15 { 4 } else { 6 };
714            compress_fragment_two_pass_impl(
715                mht,
716                input,
717                input_size,
718                is_last,
719                command_buf,
720                literal_buf,
721                table,
722                $table_bits,
723                min_match,
724                storage_ix,
725                storage,
726            );
727        }
728    };
729}
730compress_specialization!(8, BrotliCompressFragmentTwoPassImpl8);
731compress_specialization!(9, BrotliCompressFragmentTwoPassImpl9);
732compress_specialization!(10, BrotliCompressFragmentTwoPassImpl10);
733compress_specialization!(11, BrotliCompressFragmentTwoPassImpl11);
734compress_specialization!(12, BrotliCompressFragmentTwoPassImpl12);
735compress_specialization!(13, BrotliCompressFragmentTwoPassImpl13);
736compress_specialization!(14, BrotliCompressFragmentTwoPassImpl14);
737compress_specialization!(15, BrotliCompressFragmentTwoPassImpl15);
738compress_specialization!(16, BrotliCompressFragmentTwoPassImpl16);
739compress_specialization!(17, BrotliCompressFragmentTwoPassImpl17);
740
741fn RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8]) {
742    let bitpos: usize = new_storage_ix & 7usize;
743    let mask: usize = (1u32 << bitpos).wrapping_sub(1) as usize;
744    {
745        let _rhs = mask as u8;
746        let _lhs = &mut storage[(new_storage_ix >> 3)];
747        *_lhs = (*_lhs as i32 & _rhs as i32) as u8;
748    }
749    *storage_ix = new_storage_ix;
750}
751
752
753pub(crate) fn compress_fragment_two_pass<AllocHT: alloc::Allocator<HuffmanTree>>(
754    m: &mut AllocHT,
755    input: &[u8],
756    input_size: usize,
757    is_last: bool,
758    command_buf: &mut [u32],
759    literal_buf: &mut [u8],
760    table: &mut [i32],
761    table_size: usize,
762    storage_ix: &mut usize,
763    storage: &mut [u8],
764) {
765    let initial_storage_ix: usize = *storage_ix;
766    let table_bits: usize = Log2FloorNonZero(table_size as u64) as usize;
767    if table_bits == 8usize {
768        BrotliCompressFragmentTwoPassImpl8(
769            m,
770            input,
771            input_size,
772            is_last,
773            command_buf,
774            literal_buf,
775            table,
776            storage_ix,
777            storage,
778        );
779    }
780    if table_bits == 9usize {
781        BrotliCompressFragmentTwoPassImpl9(
782            m,
783            input,
784            input_size,
785            is_last,
786            command_buf,
787            literal_buf,
788            table,
789            storage_ix,
790            storage,
791        );
792    }
793    if table_bits == 10usize {
794        BrotliCompressFragmentTwoPassImpl10(
795            m,
796            input,
797            input_size,
798            is_last,
799            command_buf,
800            literal_buf,
801            table,
802            storage_ix,
803            storage,
804        );
805    }
806    if table_bits == 11usize {
807        BrotliCompressFragmentTwoPassImpl11(
808            m,
809            input,
810            input_size,
811            is_last,
812            command_buf,
813            literal_buf,
814            table,
815            storage_ix,
816            storage,
817        );
818    }
819    if table_bits == 12usize {
820        BrotliCompressFragmentTwoPassImpl12(
821            m,
822            input,
823            input_size,
824            is_last,
825            command_buf,
826            literal_buf,
827            table,
828            storage_ix,
829            storage,
830        );
831    }
832    if table_bits == 13usize {
833        BrotliCompressFragmentTwoPassImpl13(
834            m,
835            input,
836            input_size,
837            is_last,
838            command_buf,
839            literal_buf,
840            table,
841            storage_ix,
842            storage,
843        );
844    }
845    if table_bits == 14usize {
846        BrotliCompressFragmentTwoPassImpl14(
847            m,
848            input,
849            input_size,
850            is_last,
851            command_buf,
852            literal_buf,
853            table,
854            storage_ix,
855            storage,
856        );
857    }
858    if table_bits == 15usize {
859        BrotliCompressFragmentTwoPassImpl15(
860            m,
861            input,
862            input_size,
863            is_last,
864            command_buf,
865            literal_buf,
866            table,
867            storage_ix,
868            storage,
869        );
870    }
871    if table_bits == 16usize {
872        BrotliCompressFragmentTwoPassImpl16(
873            m,
874            input,
875            input_size,
876            is_last,
877            command_buf,
878            literal_buf,
879            table,
880            storage_ix,
881            storage,
882        );
883    }
884    if table_bits == 17usize {
885        BrotliCompressFragmentTwoPassImpl17(
886            m,
887            input,
888            input_size,
889            is_last,
890            command_buf,
891            literal_buf,
892            table,
893            storage_ix,
894            storage,
895        );
896    }
897    if storage_ix.wrapping_sub(initial_storage_ix) > (31usize).wrapping_add(input_size << 3) {
898        RewindBitPosition(initial_storage_ix, storage_ix, storage);
899        EmitUncompressedMetaBlock(input, input_size, storage_ix, storage);
900    }
901    if is_last {
902        BrotliWriteBits(1, 1, storage_ix, storage);
903        BrotliWriteBits(1, 1, storage_ix, storage);
904        *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
905    }
906}