brotli/enc/
brotli_bit_stream.rs

1#![allow(unknown_lints)]
2#![allow(unused_macros)]
3
4use core::cmp::{max, min};
5#[cfg(feature = "std")]
6use std::io::Write;
7
8use super::super::alloc::{Allocator, SliceWrapper, SliceWrapperMut};
9use super::super::dictionary::{
10    kBrotliDictionary, kBrotliDictionaryOffsetsByLength, kBrotliDictionarySizeBitsByLength,
11};
12use super::super::transform::TransformDictionaryWord;
13use super::super::{alloc, core};
14use super::block_split::BlockSplit;
15use super::combined_alloc::BrotliAlloc;
16use super::command::{Command, GetCopyLengthCode, GetInsertLengthCode};
17use super::constants::{
18    kCodeLengthBits, kCodeLengthDepth, kCopyBase, kCopyExtra, kInsBase, kInsExtra,
19    kNonZeroRepsBits, kNonZeroRepsDepth, kSigned3BitContextLookup, kStaticCommandCodeBits,
20    kStaticCommandCodeDepth, kStaticDistanceCodeBits, kStaticDistanceCodeDepth, kUTF8ContextLookup,
21    kZeroRepsBits, kZeroRepsDepth, BROTLI_CONTEXT_LUT, BROTLI_NUM_BLOCK_LEN_SYMBOLS,
22    BROTLI_NUM_COMMAND_SYMBOLS, BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS, BROTLI_NUM_LITERAL_SYMBOLS,
23};
24use super::context_map_entropy::{speed_to_tuple, ContextMapEntropy, SpeedAndMax};
25use super::entropy_encode::{
26    BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, BrotliSetDepth,
27    BrotliWriteHuffmanTree, HuffmanComparator, HuffmanTree, SortHuffmanTreeItems,
28};
29use super::histogram::{
30    ContextType, HistogramAddItem, HistogramCommand, HistogramDistance, HistogramLiteral,
31};
32use super::input_pair::{InputPair, InputReference, InputReferenceMut};
33use super::interface::StaticCommand;
34use super::static_dict::kNumDistanceCacheEntries;
35use super::util::floatX;
36use super::{find_stride, interface, prior_eval, stride_eval};
37use crate::enc::backward_references::BrotliEncoderParams;
38use crate::enc::combined_alloc::{alloc_default, alloc_or_default, allocate};
39use crate::VERSION;
40
41pub struct PrefixCodeRange {
42    pub offset: u32,
43    pub nbits: u32,
44}
45pub const MAX_SIMPLE_DISTANCE_ALPHABET_SIZE: usize = 140;
46
47fn window_size_from_lgwin(lgwin: i32) -> usize {
48    (1 << lgwin) - 16usize
49}
50
51struct CommandQueue<'a, Alloc: BrotliAlloc + 'a> {
52    mb: InputPair<'a>,
53    // TODO: delete unused fields
54    #[allow(dead_code)]
55    mb_byte_offset: usize,
56    mc: &'a mut Alloc,
57    queue: <Alloc as Allocator<StaticCommand>>::AllocatedMemory,
58    pred_mode: interface::PredictionModeContextMap<InputReferenceMut<'a>>,
59    loc: usize,
60    entropy_tally_scratch: find_stride::EntropyTally<Alloc>,
61    best_strides_per_block_type: <Alloc as Allocator<u8>>::AllocatedMemory,
62    entropy_pyramid: find_stride::EntropyPyramid<Alloc>,
63    context_map_entropy: ContextMapEntropy<'a, Alloc>,
64    #[allow(dead_code)]
65    stride_detection_quality: u8,
66    #[allow(dead_code)]
67    high_entropy_detection_quality: u8,
68    block_type_literal: u8,
69    #[allow(dead_code)]
70    best_stride_index: usize,
71    overfull: bool,
72}
73
74impl<'a, Alloc: BrotliAlloc> CommandQueue<'a, Alloc> {
75    fn new(
76        alloc: &'a mut Alloc,
77        num_commands: usize,
78        pred_mode: interface::PredictionModeContextMap<InputReferenceMut<'a>>,
79        mb: InputPair<'a>,
80        stride_detection_quality: u8,
81        high_entropy_detection_quality: u8,
82        context_map_entropy: ContextMapEntropy<'a, Alloc>,
83        best_strides: <Alloc as Allocator<u8>>::AllocatedMemory,
84        entropy_tally_scratch: find_stride::EntropyTally<Alloc>,
85        entropy_pyramid: find_stride::EntropyPyramid<Alloc>,
86    ) -> CommandQueue<'a, Alloc> {
87        // assume no more than 1/16 of the stream is block_types which may chop up literals
88        // also there's the first btypel and a potential wrap around the ring buffer
89        let queue = allocate::<StaticCommand, _>(alloc, num_commands * 17 / 16 + 4);
90        CommandQueue {
91            mc: alloc,
92            queue, // always need a spare command in case the ring buffer splits a literal into two
93            pred_mode,
94            mb,
95            mb_byte_offset: 0,
96            loc: 0,
97            best_strides_per_block_type: best_strides,
98            entropy_tally_scratch,
99            entropy_pyramid,
100            stride_detection_quality,
101            high_entropy_detection_quality,
102            context_map_entropy,
103            block_type_literal: 0,
104            best_stride_index: 0,
105            overfull: false,
106        }
107    }
108    fn full(&self) -> bool {
109        self.loc == self.queue.len()
110    }
111    fn error_if_full(&mut self) {
112        if self.full() {
113            self.overfull = true;
114        }
115    }
116    fn clear(&mut self) {
117        self.loc = 0;
118        self.block_type_literal = 0;
119    }
120    fn free<Cb>(&mut self, callback: &mut Cb) -> Result<(), ()>
121    where
122        Cb: FnMut(
123            &mut interface::PredictionModeContextMap<InputReferenceMut>,
124            &mut [interface::StaticCommand],
125            InputPair,
126            &mut Alloc,
127        ),
128    {
129        callback(
130            &mut self.pred_mode,
131            self.queue.slice_mut().split_at_mut(self.loc).0,
132            self.mb,
133            self.mc,
134        );
135        self.clear();
136        self.entropy_tally_scratch.free(self.mc);
137        self.entropy_pyramid.free(self.mc);
138        self.context_map_entropy.free(self.mc);
139        <Alloc as Allocator<StaticCommand>>::free_cell(self.mc, core::mem::take(&mut self.queue));
140        <Alloc as Allocator<u8>>::free_cell(
141            self.mc,
142            core::mem::take(&mut self.best_strides_per_block_type),
143        );
144        if self.overfull {
145            return Err(());
146        }
147        Ok(())
148    }
149}
150
151impl<'a, Alloc: BrotliAlloc> interface::CommandProcessor<'a> for CommandQueue<'a, Alloc> {
152    fn push(&mut self, val: interface::Command<InputReference<'a>>) {
153        if self.full() {
154            let mut tmp = allocate::<StaticCommand, _>(self.mc, self.queue.slice().len() * 2);
155            tmp.slice_mut()
156                .split_at_mut(self.queue.slice().len())
157                .0
158                .clone_from_slice(self.queue.slice());
159            <Alloc as Allocator<StaticCommand>>::free_cell(
160                self.mc,
161                core::mem::replace(&mut self.queue, tmp),
162            );
163        }
164        if !self.full() {
165            self.queue.slice_mut()[self.loc] = val.freeze();
166            self.loc += 1;
167        } else {
168            self.error_if_full();
169        }
170    }
171    fn push_block_switch_literal(&mut self, block_type: u8) {
172        self.push(interface::Command::BlockSwitchLiteral(
173            interface::LiteralBlockSwitch::new(block_type, 0),
174        ))
175    }
176}
177
178#[cfg(feature = "std")]
179fn warn_on_missing_free() {
180    let _err = ::std::io::stderr()
181        .write(b"Need to free entropy_tally_scratch before dropping CommandQueue\n");
182}
183#[cfg(not(feature = "std"))]
184fn warn_on_missing_free() {
185    // no way to warn in this case
186}
187impl<'a, Alloc: BrotliAlloc> Drop for CommandQueue<'a, Alloc> {
188    fn drop(&mut self) {
189        if !self.entropy_tally_scratch.is_free() {
190            warn_on_missing_free();
191        }
192    }
193}
194#[cfg(not(feature = "billing"))]
195fn best_singleton_speed_log(_name: &str, _data: &[SpeedAndMax; 2], _cost: &[floatX; 2]) {}
196#[cfg(feature = "billing")]
197fn best_singleton_speed_log(name: &str, data: &[SpeedAndMax; 2], cost: &[floatX; 2]) {
198    println!(
199        "{} hi cost: {} lo cost: {}   speeds {:?} {:?}",
200        name, cost[1], cost[0], data[1], data[0]
201    );
202}
203
204#[cfg(not(feature = "billing"))]
205fn best_speed_log(_name: &str, _data: &[SpeedAndMax; 2], _cost: &[floatX; 2]) {}
206#[cfg(feature = "billing")]
207fn best_speed_log(name: &str, data: &[SpeedAndMax; 2], cost: &[floatX; 2]) {
208    for high in 0..2 {
209        println!(
210            "{} Speed [ inc: {}, max: {}, algo: {} ] cost: {}",
211            name,
212            if high != 0 { "hi" } else { "lo" },
213            data[high].0,
214            data[high].1,
215            cost[high]
216        );
217    }
218}
219
220fn process_command_queue<'a, CmdProcessor: interface::CommandProcessor<'a>>(
221    command_queue: &mut CmdProcessor,
222    input: InputPair<'a>,
223    commands: &[Command],
224    dist_cache: &[i32; kNumDistanceCacheEntries],
225    mut recoder_state: RecoderState,
226    block_type: &MetaBlockSplitRefs,
227    params: &BrotliEncoderParams,
228    context_type: Option<ContextType>,
229) -> RecoderState {
230    let mut input_iter = input;
231    let mut local_dist_cache = [0i32; kNumDistanceCacheEntries];
232    local_dist_cache.clone_from_slice(&dist_cache[..]);
233    let mut btypel_counter = 0usize;
234    let mut btypec_counter = 0usize;
235    let mut btyped_counter = 0usize;
236    let mut btypel_sub = if block_type.btypel.num_types == 1 {
237        1u32 << 31
238    } else {
239        block_type.btypel.lengths[0]
240    };
241    let mut btypec_sub = if block_type.btypec.num_types == 1 {
242        1u32 << 31
243    } else {
244        block_type.btypec.lengths[0]
245    };
246    let mut btyped_sub = if block_type.btyped.num_types == 1 {
247        1u32 << 31
248    } else {
249        block_type.btyped.lengths[0]
250    };
251    {
252        command_queue.push_block_switch_literal(0);
253    }
254    let mut mb_len = input.len();
255    for cmd in commands.iter() {
256        let (inserts, interim) = input_iter.split_at(min(cmd.insert_len_ as usize, mb_len));
257        recoder_state.num_bytes_encoded += inserts.len();
258        let _copy_cursor = input.len() - interim.len();
259        // let distance_context = cmd.distance_context();
260        let copylen_code = cmd.copy_len_code();
261
262        let (prev_dist_index, dist_offset) = cmd.distance_index_and_offset(&params.dist);
263        let final_distance: usize;
264        if prev_dist_index == 0 {
265            final_distance = dist_offset as usize;
266        } else {
267            final_distance =
268                (local_dist_cache[prev_dist_index - 1] as isize + dist_offset) as usize;
269        }
270        let copy_len = copylen_code as usize;
271        let actual_copy_len: usize;
272        let max_distance = min(
273            recoder_state.num_bytes_encoded,
274            window_size_from_lgwin(params.lgwin),
275        );
276        assert!(inserts.len() <= mb_len);
277        if inserts.len() != 0 {
278            let mut tmp_inserts = inserts;
279            while tmp_inserts.len() > btypel_sub as usize {
280                // we have to divide some:
281                let (in_a, in_b) = tmp_inserts.split_at(btypel_sub as usize);
282                if in_a.len() != 0 {
283                    if context_type.is_some() {
284                        command_queue.push_literals(&in_a);
285                    } else if params.high_entropy_detection_quality == 0 {
286                        command_queue.push_literals(&in_a);
287                    } else {
288                        command_queue.push_rand_literals(&in_a);
289                    }
290                }
291                mb_len -= in_a.len();
292                tmp_inserts = in_b;
293                btypel_counter += 1;
294                if block_type.btypel.types.len() > btypel_counter {
295                    btypel_sub = block_type.btypel.lengths[btypel_counter];
296                    command_queue
297                        .push_block_switch_literal(block_type.btypel.types[btypel_counter]);
298                } else {
299                    btypel_sub = 1u32 << 31;
300                }
301            }
302            if context_type.is_some() {
303                command_queue.push_literals(&tmp_inserts);
304            } else if params.high_entropy_detection_quality == 0 {
305                command_queue.push_literals(&tmp_inserts);
306            } else {
307                command_queue.push_rand_literals(&tmp_inserts);
308            }
309            if tmp_inserts.len() != 0 {
310                mb_len -= tmp_inserts.len();
311                btypel_sub -= tmp_inserts.len() as u32;
312            }
313        }
314        if final_distance > max_distance {
315            // is dictionary
316            assert!(copy_len >= 4);
317            assert!(copy_len < 25);
318            let dictionary_offset = final_distance - max_distance - 1;
319            let ndbits = kBrotliDictionarySizeBitsByLength[copy_len] as usize;
320            let action = dictionary_offset >> ndbits;
321            let word_sub_index = dictionary_offset & ((1 << ndbits) - 1);
322            let word_index =
323                word_sub_index * copy_len + kBrotliDictionaryOffsetsByLength[copy_len] as usize;
324            let raw_word = &kBrotliDictionary[word_index..word_index + copy_len];
325            let mut transformed_word = [0u8; 38];
326            actual_copy_len = TransformDictionaryWord(
327                &mut transformed_word[..],
328                raw_word,
329                copy_len as i32,
330                action as i32,
331            ) as usize;
332            if actual_copy_len <= mb_len {
333                command_queue.push(interface::Command::Dict(interface::DictCommand {
334                    word_size: copy_len as u8,
335                    transform: action as u8,
336                    final_size: actual_copy_len as u8,
337                    empty: 0,
338                    word_id: word_sub_index as u32,
339                }));
340                mb_len -= actual_copy_len;
341                assert_eq!(
342                    InputPair(
343                        InputReference {
344                            data: transformed_word.split_at(actual_copy_len).0,
345                            orig_offset: 0
346                        },
347                        InputReference::default()
348                    ),
349                    interim.split_at(actual_copy_len).0
350                );
351            } else if mb_len != 0 {
352                // truncated dictionary word: represent it as literals instead
353                // won't be random noise since it fits in the dictionary, so we won't check for rand
354                command_queue.push_literals(&interim.split_at(mb_len).0);
355                mb_len = 0;
356                assert_eq!(
357                    InputPair(
358                        InputReference {
359                            data: transformed_word.split_at(mb_len).0,
360                            orig_offset: 0
361                        },
362                        InputReference::default()
363                    ),
364                    interim.split_at(mb_len).0
365                );
366            }
367        } else {
368            actual_copy_len = min(mb_len, copy_len);
369            if actual_copy_len != 0 {
370                command_queue.push(interface::Command::Copy(interface::CopyCommand {
371                    distance: final_distance as u32,
372                    num_bytes: actual_copy_len as u32,
373                }));
374            }
375            mb_len -= actual_copy_len;
376            if prev_dist_index != 1 || dist_offset != 0 {
377                // update distance cache unless it's the "0 distance symbol"
378                let mut tmp_dist_cache = [0i32; kNumDistanceCacheEntries - 1];
379                tmp_dist_cache.clone_from_slice(&local_dist_cache[..kNumDistanceCacheEntries - 1]);
380                local_dist_cache[1..].clone_from_slice(&tmp_dist_cache[..]);
381                local_dist_cache[0] = final_distance as i32;
382            }
383        }
384        {
385            btypec_sub -= 1;
386            if btypec_sub == 0 {
387                btypec_counter += 1;
388                if block_type.btypec.types.len() > btypec_counter {
389                    btypec_sub = block_type.btypec.lengths[btypec_counter];
390                    command_queue.push(interface::Command::BlockSwitchCommand(
391                        interface::BlockSwitch(block_type.btypec.types[btypec_counter]),
392                    ));
393                } else {
394                    btypec_sub = 1u32 << 31;
395                }
396            }
397        }
398        if copy_len != 0 && cmd.cmd_prefix_ >= 128 {
399            btyped_sub -= 1;
400            if btyped_sub == 0 {
401                btyped_counter += 1;
402                if block_type.btyped.types.len() > btyped_counter {
403                    btyped_sub = block_type.btyped.lengths[btyped_counter];
404                    command_queue.push(interface::Command::BlockSwitchDistance(
405                        interface::BlockSwitch(block_type.btyped.types[btyped_counter]),
406                    ));
407                } else {
408                    btyped_sub = 1u32 << 31;
409                }
410            }
411        }
412
413        let (copied, remainder) = interim.split_at(actual_copy_len);
414        recoder_state.num_bytes_encoded += copied.len();
415        input_iter = remainder;
416    }
417    recoder_state
418}
419
420fn LogMetaBlock<'a, Alloc: BrotliAlloc, Cb>(
421    alloc: &mut Alloc,
422    commands: &[Command],
423    input0: &'a [u8],
424    input1: &'a [u8],
425    dist_cache: &[i32; kNumDistanceCacheEntries],
426    recoder_state: &mut RecoderState,
427    block_type: MetaBlockSplitRefs,
428    params: &BrotliEncoderParams,
429    context_type: Option<ContextType>,
430    callback: &mut Cb,
431) where
432    Cb: FnMut(
433        &mut interface::PredictionModeContextMap<InputReferenceMut>,
434        &mut [interface::StaticCommand],
435        InputPair,
436        &mut Alloc,
437    ),
438{
439    let mut local_literal_context_map = [0u8; 256 * 64];
440    let mut local_distance_context_map = [0u8; 256 * 64 + interface::DISTANCE_CONTEXT_MAP_OFFSET];
441    assert_eq!(
442        *block_type.btypel.types.iter().max().unwrap_or(&0) as u32 + 1,
443        block_type.btypel.num_types
444    );
445    assert_eq!(
446        *block_type.btypec.types.iter().max().unwrap_or(&0) as u32 + 1,
447        block_type.btypec.num_types
448    );
449    assert_eq!(
450        *block_type.btyped.types.iter().max().unwrap_or(&0) as u32 + 1,
451        block_type.btyped.num_types
452    );
453    if block_type.literal_context_map.len() <= 256 * 64 {
454        for (index, item) in block_type.literal_context_map.iter().enumerate() {
455            local_literal_context_map[index] = *item as u8;
456        }
457    }
458    if block_type.distance_context_map.len() <= 256 * 64 {
459        for (index, item) in block_type.distance_context_map.iter().enumerate() {
460            local_distance_context_map[interface::DISTANCE_CONTEXT_MAP_OFFSET + index] =
461                *item as u8;
462        }
463    }
464
465    let mut prediction_mode = interface::PredictionModeContextMap::<InputReferenceMut> {
466        literal_context_map: InputReferenceMut {
467            data: local_literal_context_map
468                .split_at_mut(block_type.literal_context_map.len())
469                .0,
470            orig_offset: 0,
471        },
472        predmode_speed_and_distance_context_map: InputReferenceMut {
473            data: local_distance_context_map
474                .split_at_mut(
475                    interface::PredictionModeContextMap::<InputReference>::size_of_combined_array(
476                        block_type.distance_context_map.len(),
477                    ),
478                )
479                .0,
480            orig_offset: 0,
481        },
482    };
483    for item in prediction_mode.get_mixing_values_mut().iter_mut() {
484        *item = prior_eval::WhichPrior::STRIDE1 as u8;
485    }
486    prediction_mode
487        .set_stride_context_speed([params.literal_adaptation[2], params.literal_adaptation[3]]);
488    prediction_mode
489        .set_context_map_speed([params.literal_adaptation[0], params.literal_adaptation[1]]);
490    prediction_mode.set_combined_stride_context_speed([
491        params.literal_adaptation[0],
492        params.literal_adaptation[1],
493    ]);
494
495    prediction_mode.set_literal_prediction_mode(interface::LiteralPredictionModeNibble(
496        context_type.unwrap_or(ContextType::CONTEXT_LSB6) as u8,
497    ));
498    let mut entropy_tally_scratch;
499    let mut entropy_pyramid;
500    if params.stride_detection_quality == 1 || params.stride_detection_quality == 2 {
501        entropy_tally_scratch = find_stride::EntropyTally::<Alloc>::new(alloc, None);
502        entropy_pyramid = find_stride::EntropyPyramid::<Alloc>::new(alloc);
503        entropy_pyramid.populate(input0, input1, &mut entropy_tally_scratch);
504    } else {
505        entropy_tally_scratch = find_stride::EntropyTally::<Alloc>::disabled_placeholder(alloc);
506        entropy_pyramid = find_stride::EntropyPyramid::<Alloc>::disabled_placeholder(alloc);
507    }
508    let input = InputPair(
509        InputReference {
510            data: input0,
511            orig_offset: 0,
512        },
513        InputReference {
514            data: input1,
515            orig_offset: input0.len(),
516        },
517    );
518    let mut best_strides = alloc_default::<u8, Alloc>();
519    if params.stride_detection_quality > 2 {
520        let mut stride_selector =
521            stride_eval::StrideEval::<Alloc>::new(alloc, input, &prediction_mode, params);
522        process_command_queue(
523            &mut stride_selector,
524            input,
525            commands,
526            dist_cache,
527            *recoder_state,
528            &block_type,
529            params,
530            context_type,
531        );
532        let ntypes = stride_selector.num_types();
533        best_strides = allocate::<u8, _>(stride_selector.alloc(), ntypes);
534        stride_selector.choose_stride(best_strides.slice_mut());
535    }
536    let mut context_map_entropy = ContextMapEntropy::<Alloc>::new(
537        alloc,
538        input,
539        entropy_pyramid.stride_last_level_range(),
540        prediction_mode,
541        params.cdf_adaptation_detection,
542    );
543    if params.cdf_adaptation_detection != 0 {
544        process_command_queue(
545            &mut context_map_entropy,
546            input,
547            commands,
548            dist_cache,
549            *recoder_state,
550            &block_type,
551            params,
552            context_type,
553        );
554        {
555            let (cm_speed, cm_cost) = context_map_entropy.best_singleton_speeds(true, false);
556            let (stride_speed, stride_cost) =
557                context_map_entropy.best_singleton_speeds(false, false);
558            let (combined_speed, combined_cost) =
559                context_map_entropy.best_singleton_speeds(false, true);
560            best_singleton_speed_log("CM", &cm_speed, &cm_cost);
561            best_singleton_speed_log("stride", &stride_speed, &stride_cost);
562            best_singleton_speed_log("combined", &combined_speed, &combined_cost);
563        }
564
565        let cm_speed = context_map_entropy.best_speeds(true, false);
566        let stride_speed = context_map_entropy.best_speeds(false, false);
567        let combined_speed = context_map_entropy.best_speeds(false, true);
568        let acost = context_map_entropy.best_speeds_costs(true, false);
569        let bcost = context_map_entropy.best_speeds_costs(false, false);
570        let ccost = context_map_entropy.best_speeds_costs(false, true);
571        context_map_entropy
572            .prediction_mode_mut()
573            .set_stride_context_speed(speed_to_tuple(stride_speed));
574        context_map_entropy
575            .prediction_mode_mut()
576            .set_context_map_speed(speed_to_tuple(cm_speed));
577        context_map_entropy
578            .prediction_mode_mut()
579            .set_combined_stride_context_speed(speed_to_tuple(combined_speed));
580
581        best_speed_log("CM", &cm_speed, &acost);
582        best_speed_log("Stride", &stride_speed, &bcost);
583        best_speed_log("StrideCombined", &combined_speed, &ccost);
584    }
585    let mut prior_selector = prior_eval::PriorEval::<Alloc>::new(
586        alloc,
587        input,
588        entropy_pyramid.stride_last_level_range(),
589        context_map_entropy.take_prediction_mode(),
590        params,
591    );
592    if params.prior_bitmask_detection != 0 {
593        process_command_queue(
594            &mut prior_selector,
595            input,
596            commands,
597            dist_cache,
598            *recoder_state,
599            &block_type,
600            params,
601            context_type,
602        );
603        prior_selector.choose_bitmask();
604    }
605    let prediction_mode = prior_selector.take_prediction_mode();
606    prior_selector.free(alloc);
607    let mut command_queue = CommandQueue::new(
608        alloc,
609        commands.len(),
610        prediction_mode,
611        input,
612        params.stride_detection_quality,
613        params.high_entropy_detection_quality,
614        context_map_entropy,
615        best_strides,
616        entropy_tally_scratch,
617        entropy_pyramid,
618    );
619
620    *recoder_state = process_command_queue(
621        &mut command_queue,
622        input,
623        commands,
624        dist_cache,
625        *recoder_state,
626        &block_type,
627        params,
628        context_type,
629    );
630    command_queue.free(callback).unwrap();
631    //   ::std::io::stderr().write(input0).unwrap();
632    //   ::std::io::stderr().write(input1).unwrap();
633}
634
635static kBlockLengthPrefixCode: [PrefixCodeRange; BROTLI_NUM_BLOCK_LEN_SYMBOLS] = [
636    PrefixCodeRange {
637        offset: 1u32,
638        nbits: 2u32,
639    },
640    PrefixCodeRange {
641        offset: 5u32,
642        nbits: 2u32,
643    },
644    PrefixCodeRange {
645        offset: 9u32,
646        nbits: 2u32,
647    },
648    PrefixCodeRange {
649        offset: 13u32,
650        nbits: 2u32,
651    },
652    PrefixCodeRange {
653        offset: 17u32,
654        nbits: 3u32,
655    },
656    PrefixCodeRange {
657        offset: 25u32,
658        nbits: 3u32,
659    },
660    PrefixCodeRange {
661        offset: 33u32,
662        nbits: 3u32,
663    },
664    PrefixCodeRange {
665        offset: 41u32,
666        nbits: 3u32,
667    },
668    PrefixCodeRange {
669        offset: 49u32,
670        nbits: 4u32,
671    },
672    PrefixCodeRange {
673        offset: 65u32,
674        nbits: 4u32,
675    },
676    PrefixCodeRange {
677        offset: 81u32,
678        nbits: 4u32,
679    },
680    PrefixCodeRange {
681        offset: 97u32,
682        nbits: 4u32,
683    },
684    PrefixCodeRange {
685        offset: 113u32,
686        nbits: 5u32,
687    },
688    PrefixCodeRange {
689        offset: 145u32,
690        nbits: 5u32,
691    },
692    PrefixCodeRange {
693        offset: 177u32,
694        nbits: 5u32,
695    },
696    PrefixCodeRange {
697        offset: 209u32,
698        nbits: 5u32,
699    },
700    PrefixCodeRange {
701        offset: 241u32,
702        nbits: 6u32,
703    },
704    PrefixCodeRange {
705        offset: 305u32,
706        nbits: 6u32,
707    },
708    PrefixCodeRange {
709        offset: 369u32,
710        nbits: 7u32,
711    },
712    PrefixCodeRange {
713        offset: 497u32,
714        nbits: 8u32,
715    },
716    PrefixCodeRange {
717        offset: 753u32,
718        nbits: 9u32,
719    },
720    PrefixCodeRange {
721        offset: 1265u32,
722        nbits: 10u32,
723    },
724    PrefixCodeRange {
725        offset: 2289u32,
726        nbits: 11u32,
727    },
728    PrefixCodeRange {
729        offset: 4337u32,
730        nbits: 12u32,
731    },
732    PrefixCodeRange {
733        offset: 8433u32,
734        nbits: 13u32,
735    },
736    PrefixCodeRange {
737        offset: 16625u32,
738        nbits: 24u32,
739    },
740];
741
742fn BrotliWriteBits(n_bits: u8, bits: u64, pos: &mut usize, array: &mut [u8]) {
743    assert_eq!(bits >> n_bits, 0);
744    assert!(n_bits <= 56);
745    let ptr_offset: usize = ((*pos >> 3) as u32) as usize;
746    let mut v = array[ptr_offset] as u64;
747    v |= bits << ((*pos) as u64 & 7);
748    array[ptr_offset + 7] = (v >> 56) as u8;
749    array[ptr_offset + 6] = ((v >> 48) & 0xff) as u8;
750    array[ptr_offset + 5] = ((v >> 40) & 0xff) as u8;
751    array[ptr_offset + 4] = ((v >> 32) & 0xff) as u8;
752    array[ptr_offset + 3] = ((v >> 24) & 0xff) as u8;
753    array[ptr_offset + 2] = ((v >> 16) & 0xff) as u8;
754    array[ptr_offset + 1] = ((v >> 8) & 0xff) as u8;
755    array[ptr_offset] = (v & 0xff) as u8;
756    *pos += n_bits as usize
757}
758
759fn BrotliWriteBitsPrepareStorage(pos: usize, array: &mut [u8]) {
760    assert_eq!(pos & 7, 0);
761    array[pos >> 3] = 0;
762}
763
764fn BrotliStoreHuffmanTreeOfHuffmanTreeToBitMask(
765    num_codes: i32,
766    code_length_bitdepth: &[u8],
767    storage_ix: &mut usize,
768    storage: &mut [u8],
769) {
770    static kStorageOrder: [u8; 18] = [1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15];
771    static kHuffmanBitLengthHuffmanCodeSymbols: [u8; 6] = [0, 7, 3, 2, 1, 15];
772    static kHuffmanBitLengthHuffmanCodeBitLengths: [u8; 6] = [2, 4, 3, 2, 2, 4];
773    let mut skip_some: u64 = 0u64;
774    let mut codes_to_store: u64 = 18;
775    if num_codes > 1i32 {
776        'break5: while codes_to_store > 0 {
777            {
778                if code_length_bitdepth
779                    [(kStorageOrder[codes_to_store.wrapping_sub(1) as usize] as usize)]
780                    as i32
781                    != 0i32
782                {
783                    break 'break5;
784                }
785            }
786            codes_to_store = codes_to_store.wrapping_sub(1);
787        }
788    }
789    if code_length_bitdepth[(kStorageOrder[0] as usize)] as i32 == 0i32
790        && (code_length_bitdepth[(kStorageOrder[1] as usize)] as i32 == 0i32)
791    {
792        skip_some = 2;
793        if code_length_bitdepth[(kStorageOrder[2] as usize)] as i32 == 0i32 {
794            skip_some = 3;
795        }
796    }
797    BrotliWriteBits(2, skip_some, storage_ix, storage);
798
799    for i in skip_some..codes_to_store {
800        let l = code_length_bitdepth[kStorageOrder[i as usize] as usize] as usize;
801        BrotliWriteBits(
802            kHuffmanBitLengthHuffmanCodeBitLengths[l],
803            kHuffmanBitLengthHuffmanCodeSymbols[l] as u64,
804            storage_ix,
805            storage,
806        );
807    }
808}
809
810fn BrotliStoreHuffmanTreeToBitMask(
811    huffman_tree_size: usize,
812    huffman_tree: &[u8],
813    huffman_tree_extra_bits: &[u8],
814    code_length_bitdepth: &[u8],
815    code_length_bitdepth_symbols: &[u16],
816    storage_ix: &mut usize,
817    storage: &mut [u8],
818) {
819    for i in 0usize..huffman_tree_size {
820        let ix: usize = huffman_tree[i] as usize;
821        BrotliWriteBits(
822            code_length_bitdepth[ix],
823            code_length_bitdepth_symbols[ix] as (u64),
824            storage_ix,
825            storage,
826        );
827        if ix == 16usize {
828            BrotliWriteBits(2, huffman_tree_extra_bits[i] as (u64), storage_ix, storage);
829        } else if ix == 17usize {
830            BrotliWriteBits(3, huffman_tree_extra_bits[i] as (u64), storage_ix, storage);
831        }
832    }
833}
834
835pub fn BrotliStoreHuffmanTree(
836    depths: &[u8],
837    num: usize,
838    tree: &mut [HuffmanTree],
839    storage_ix: &mut usize,
840    storage: &mut [u8],
841) {
842    let mut huffman_tree = [0u8; 704];
843    let mut huffman_tree_extra_bits = [0u8; 704];
844    let mut huffman_tree_size = 0usize;
845    let mut code_length_bitdepth = [0u8; 18];
846    let mut code_length_bitdepth_symbols = [0u16; 18];
847    let mut huffman_tree_histogram = [0u32; 18];
848    let mut i: usize;
849    let mut num_codes: i32 = 0i32;
850    let mut code: usize = 0usize;
851
852    BrotliWriteHuffmanTree(
853        depths,
854        num,
855        &mut huffman_tree_size,
856        &mut huffman_tree[..],
857        &mut huffman_tree_extra_bits[..],
858    );
859    for i in 0usize..huffman_tree_size {
860        let _rhs = 1;
861        let _lhs = &mut huffman_tree_histogram[huffman_tree[i] as usize];
862        *_lhs = (*_lhs).wrapping_add(_rhs as u32);
863    }
864    i = 0usize;
865    'break3: while i < 18usize {
866        {
867            if huffman_tree_histogram[i] != 0 {
868                if num_codes == 0i32 {
869                    code = i;
870                    num_codes = 1i32;
871                } else if num_codes == 1i32 {
872                    num_codes = 2i32;
873                    {
874                        break 'break3;
875                    }
876                }
877            }
878        }
879        i = i.wrapping_add(1);
880    }
881    BrotliCreateHuffmanTree(
882        &mut huffman_tree_histogram,
883        18usize,
884        5i32,
885        tree,
886        &mut code_length_bitdepth,
887    );
888    BrotliConvertBitDepthsToSymbols(
889        &mut code_length_bitdepth,
890        18usize,
891        &mut code_length_bitdepth_symbols,
892    );
893    BrotliStoreHuffmanTreeOfHuffmanTreeToBitMask(
894        num_codes,
895        &code_length_bitdepth,
896        storage_ix,
897        storage,
898    );
899    if num_codes == 1i32 {
900        code_length_bitdepth[code] = 0u8;
901    }
902    BrotliStoreHuffmanTreeToBitMask(
903        huffman_tree_size,
904        &huffman_tree,
905        &huffman_tree_extra_bits,
906        &code_length_bitdepth,
907        &code_length_bitdepth_symbols,
908        storage_ix,
909        storage,
910    );
911}
912
913fn StoreStaticCodeLengthCode(storage_ix: &mut usize, storage: &mut [u8]) {
914    BrotliWriteBits(40, 0xff_5555_5554, storage_ix, storage);
915}
916
917pub struct SimpleSortHuffmanTree {}
918
919impl HuffmanComparator for SimpleSortHuffmanTree {
920    fn Cmp(&self, v0: &HuffmanTree, v1: &HuffmanTree) -> bool {
921        v0.total_count_ < v1.total_count_
922    }
923}
924
925pub fn BrotliBuildAndStoreHuffmanTreeFast<AllocHT: alloc::Allocator<HuffmanTree>>(
926    m: &mut AllocHT,
927    histogram: &[u32],
928    histogram_total: usize,
929    max_bits: usize,
930    depth: &mut [u8],
931    bits: &mut [u16],
932    storage_ix: &mut usize,
933    storage: &mut [u8],
934) {
935    let mut count: u64 = 0;
936    let mut symbols: [u64; 4] = [0; 4];
937    let mut length: u64 = 0;
938    let mut total: usize = histogram_total;
939    while total != 0usize {
940        if histogram[(length as usize)] != 0 {
941            if count < 4 {
942                symbols[count as usize] = length;
943            }
944            count = count.wrapping_add(1);
945            total = total.wrapping_sub(histogram[(length as usize)] as usize);
946        }
947        length = length.wrapping_add(1);
948    }
949    if count <= 1 {
950        BrotliWriteBits(4, 1, storage_ix, storage);
951        BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
952        depth[symbols[0] as usize] = 0u8;
953        bits[symbols[0] as usize] = 0u16;
954        return;
955    }
956    for depth_elem in depth[..(length as usize)].iter_mut() {
957        *depth_elem = 0; // memset
958    }
959    {
960        // FIXME: computation in u64, followed by allocation which might be less than u64
961        let max_tree_size: u64 = (2u64).wrapping_mul(length).wrapping_add(1);
962        // FIXME: makes little sense to test if N+1 > 0 -- always true unless wrapping. Perhaps use allocate() instead?
963        let mut tree = alloc_or_default::<HuffmanTree, _>(m, max_tree_size as usize);
964        let mut count_limit: u32;
965        count_limit = 1u32;
966        'break11: loop {
967            {
968                let mut node_index: u32 = 0u32;
969                let mut l: u64;
970                l = length;
971                while l != 0 {
972                    l = l.wrapping_sub(1);
973                    if histogram[l as usize] != 0 {
974                        if histogram[l as usize] >= count_limit {
975                            tree.slice_mut()[node_index as usize] =
976                                HuffmanTree::new(histogram[l as usize], -1, l as i16);
977                        } else {
978                            tree.slice_mut()[node_index as usize] =
979                                HuffmanTree::new(count_limit, -1, l as i16);
980                        }
981                        node_index = node_index.wrapping_add(1);
982                    }
983                }
984                {
985                    let n: i32 = node_index as i32;
986
987                    let mut i: i32 = 0i32;
988                    let mut j: i32 = n + 1i32;
989                    let mut k: i32;
990                    SortHuffmanTreeItems(tree.slice_mut(), n as usize, SimpleSortHuffmanTree {});
991                    let sentinel = HuffmanTree::new(u32::MAX, -1, -1);
992                    tree.slice_mut()[(node_index.wrapping_add(1) as usize)] = sentinel;
993                    tree.slice_mut()[(node_index as usize)] = sentinel;
994                    node_index = node_index.wrapping_add(2);
995                    k = n - 1i32;
996                    while k > 0i32 {
997                        {
998                            let left: i32;
999                            let right: i32;
1000                            if (tree.slice()[(i as usize)]).total_count_
1001                                <= (tree.slice()[(j as usize)]).total_count_
1002                            {
1003                                left = i;
1004                                i += 1;
1005                            } else {
1006                                left = j;
1007                                j += 1;
1008                            }
1009                            if (tree.slice()[(i as usize)]).total_count_
1010                                <= (tree.slice()[(j as usize)]).total_count_
1011                            {
1012                                right = i;
1013                                i += 1;
1014                            } else {
1015                                right = j;
1016                                j += 1;
1017                            }
1018                            let sum_total = (tree.slice()[(left as usize)])
1019                                .total_count_
1020                                .wrapping_add((tree.slice()[(right as usize)]).total_count_);
1021                            let tree_ind = (node_index.wrapping_sub(1) as usize);
1022                            (tree.slice_mut()[tree_ind]).total_count_ = sum_total;
1023                            (tree.slice_mut()[tree_ind]).index_left_ = left as i16;
1024                            (tree.slice_mut()[tree_ind]).index_right_or_value_ = right as i16;
1025                            tree.slice_mut()[(node_index as usize)] = sentinel;
1026                            node_index = node_index.wrapping_add(1);
1027                        }
1028                        k -= 1;
1029                    }
1030                    if BrotliSetDepth(2i32 * n - 1i32, tree.slice_mut(), depth, 14i32) {
1031                        break 'break11;
1032                    }
1033                }
1034            }
1035            count_limit = count_limit.wrapping_mul(2);
1036        }
1037        {
1038            m.free_cell(core::mem::take(&mut tree));
1039        }
1040    }
1041    BrotliConvertBitDepthsToSymbols(depth, length as usize, bits);
1042    if count <= 4 {
1043        BrotliWriteBits(2, 1, storage_ix, storage);
1044        BrotliWriteBits(2, count.wrapping_sub(1), storage_ix, storage);
1045        for i in 0..count as usize {
1046            for j in i + 1..count as usize {
1047                if depth[symbols[j] as usize] < depth[symbols[i] as usize] {
1048                    symbols.swap(j, i);
1049                }
1050            }
1051        }
1052        if count == 2 {
1053            BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1054            BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1055        } else if count == 3 {
1056            BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1057            BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1058            BrotliWriteBits(max_bits as u8, symbols[2], storage_ix, storage);
1059        } else {
1060            BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1061            BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1062            BrotliWriteBits(max_bits as u8, symbols[2], storage_ix, storage);
1063            BrotliWriteBits(max_bits as u8, symbols[3], storage_ix, storage);
1064            BrotliWriteBits(
1065                1,
1066                if depth[(symbols[0] as usize)] as i32 == 1i32 {
1067                    1i32
1068                } else {
1069                    0i32
1070                } as (u64),
1071                storage_ix,
1072                storage,
1073            );
1074        }
1075    } else {
1076        let mut previous_value: u8 = 8u8;
1077        let mut i: u64;
1078        StoreStaticCodeLengthCode(storage_ix, storage);
1079        i = 0;
1080        while i < length {
1081            let value: u8 = depth[(i as usize)];
1082            let mut reps: u64 = 1;
1083            let mut k: u64;
1084            k = i.wrapping_add(1);
1085            while k < length && (depth[(k as usize)] as i32 == value as i32) {
1086                {
1087                    reps = reps.wrapping_add(1);
1088                }
1089                k = k.wrapping_add(1);
1090            }
1091            i = i.wrapping_add(reps);
1092            if value as i32 == 0i32 {
1093                BrotliWriteBits(
1094                    kZeroRepsDepth[reps as usize] as u8,
1095                    kZeroRepsBits[reps as usize] as u64,
1096                    storage_ix,
1097                    storage,
1098                );
1099            } else {
1100                if previous_value as i32 != value as i32 {
1101                    BrotliWriteBits(
1102                        kCodeLengthDepth[value as usize],
1103                        kCodeLengthBits[value as usize] as (u64),
1104                        storage_ix,
1105                        storage,
1106                    );
1107                    reps = reps.wrapping_sub(1);
1108                }
1109                if reps < 3 {
1110                    while reps != 0 {
1111                        reps = reps.wrapping_sub(1);
1112                        BrotliWriteBits(
1113                            kCodeLengthDepth[value as usize],
1114                            kCodeLengthBits[value as usize] as (u64),
1115                            storage_ix,
1116                            storage,
1117                        );
1118                    }
1119                } else {
1120                    reps = reps.wrapping_sub(3);
1121                    BrotliWriteBits(
1122                        kNonZeroRepsDepth[reps as usize] as u8,
1123                        kNonZeroRepsBits[reps as usize] as u64,
1124                        storage_ix,
1125                        storage,
1126                    );
1127                }
1128                previous_value = value;
1129            }
1130        }
1131    }
1132}
1133
1134pub struct MetaBlockSplit<
1135    Alloc: alloc::Allocator<u8>
1136        + alloc::Allocator<u32>
1137        + alloc::Allocator<HistogramLiteral>
1138        + alloc::Allocator<HistogramCommand>
1139        + alloc::Allocator<HistogramDistance>,
1140> {
1141    pub literal_split: BlockSplit<Alloc>,
1142    pub command_split: BlockSplit<Alloc>,
1143    pub distance_split: BlockSplit<Alloc>,
1144    pub literal_context_map: <Alloc as Allocator<u32>>::AllocatedMemory,
1145    pub literal_context_map_size: usize,
1146    pub distance_context_map: <Alloc as Allocator<u32>>::AllocatedMemory,
1147    pub distance_context_map_size: usize,
1148    pub literal_histograms: <Alloc as Allocator<HistogramLiteral>>::AllocatedMemory,
1149    pub literal_histograms_size: usize,
1150    pub command_histograms: <Alloc as Allocator<HistogramCommand>>::AllocatedMemory,
1151    pub command_histograms_size: usize,
1152    pub distance_histograms: <Alloc as Allocator<HistogramDistance>>::AllocatedMemory,
1153    pub distance_histograms_size: usize,
1154}
1155impl<
1156        Alloc: alloc::Allocator<u8>
1157            + alloc::Allocator<u32>
1158            + alloc::Allocator<HistogramLiteral>
1159            + alloc::Allocator<HistogramCommand>
1160            + alloc::Allocator<HistogramDistance>,
1161    > Default for MetaBlockSplit<Alloc>
1162{
1163    fn default() -> Self {
1164        Self {
1165            literal_split: BlockSplit::default(),
1166            command_split: BlockSplit::default(),
1167            distance_split: BlockSplit::default(),
1168            literal_context_map: alloc_default::<u32, Alloc>(),
1169            literal_context_map_size: 0,
1170            distance_context_map: alloc_default::<u32, Alloc>(),
1171            distance_context_map_size: 0,
1172            literal_histograms: alloc_default::<HistogramLiteral, Alloc>(),
1173            literal_histograms_size: 0,
1174            command_histograms: alloc_default::<HistogramCommand, Alloc>(),
1175            command_histograms_size: 0,
1176            distance_histograms: alloc_default::<HistogramDistance, Alloc>(),
1177            distance_histograms_size: 0,
1178        }
1179    }
1180}
1181
1182impl<
1183        Alloc: alloc::Allocator<u8>
1184            + alloc::Allocator<u32>
1185            + alloc::Allocator<HistogramLiteral>
1186            + alloc::Allocator<HistogramCommand>
1187            + alloc::Allocator<HistogramDistance>,
1188    > MetaBlockSplit<Alloc>
1189{
1190    pub fn new() -> Self {
1191        Self::default()
1192    }
1193
1194    pub fn destroy(&mut self, alloc: &mut Alloc) {
1195        self.literal_split.destroy(alloc);
1196        self.command_split.destroy(alloc);
1197        self.distance_split.destroy(alloc);
1198        <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut self.literal_context_map));
1199        self.literal_context_map_size = 0;
1200        <Alloc as Allocator<u32>>::free_cell(
1201            alloc,
1202            core::mem::take(&mut self.distance_context_map),
1203        );
1204        self.distance_context_map_size = 0;
1205        <Alloc as Allocator<HistogramLiteral>>::free_cell(
1206            alloc,
1207            core::mem::take(&mut self.literal_histograms),
1208        );
1209
1210        self.literal_histograms_size = 0;
1211        <Alloc as Allocator<HistogramCommand>>::free_cell(
1212            alloc,
1213            core::mem::take(&mut self.command_histograms),
1214        );
1215        self.command_histograms_size = 0;
1216        <Alloc as Allocator<HistogramDistance>>::free_cell(
1217            alloc,
1218            core::mem::take(&mut self.distance_histograms),
1219        );
1220        self.distance_histograms_size = 0;
1221    }
1222}
1223#[derive(Clone, Copy)]
1224pub struct BlockTypeCodeCalculator {
1225    pub last_type: usize,
1226    pub second_last_type: usize,
1227}
1228
1229pub struct BlockSplitCode {
1230    pub type_code_calculator: BlockTypeCodeCalculator,
1231    pub type_depths: [u8; 258],
1232    pub type_bits: [u16; 258],
1233    pub length_depths: [u8; 26],
1234    pub length_bits: [u16; 26],
1235}
1236
1237pub struct BlockEncoder<'a, Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>> {
1238    /*    pub alloc_u8 : AllocU8,
1239    pub alloc_u16 : AllocU16,
1240    pub alloc_u32 : AllocU32,
1241    pub alloc_ht : AllocHT,*/
1242    pub histogram_length_: usize,
1243    pub num_block_types_: usize,
1244    pub block_types_: &'a [u8],
1245    pub block_lengths_: &'a [u32],
1246    pub num_blocks_: usize,
1247    pub block_split_code_: BlockSplitCode,
1248    pub block_ix_: usize,
1249    pub block_len_: usize,
1250    pub entropy_ix_: usize,
1251    pub depths_: <Alloc as Allocator<u8>>::AllocatedMemory,
1252    pub bits_: <Alloc as Allocator<u16>>::AllocatedMemory,
1253}
1254
1255fn Log2FloorNonZero(mut n: u64) -> u32 {
1256    let mut result: u32 = 0u32;
1257    'loop1: loop {
1258        if {
1259            n >>= 1i32;
1260            n
1261        } != 0
1262        {
1263            result = result.wrapping_add(1);
1264            continue 'loop1;
1265        } else {
1266            break 'loop1;
1267        }
1268    }
1269    result
1270}
1271
1272fn BrotliEncodeMlen(length: u32, bits: &mut u64, numbits: &mut u32, nibblesbits: &mut u32) {
1273    let lg: u32 = (if length == 1u32 {
1274        1u32
1275    } else {
1276        Log2FloorNonZero(length.wrapping_sub(1) as (u64)).wrapping_add(1)
1277    });
1278    let mnibbles: u32 = (if lg < 16u32 {
1279        16u32
1280    } else {
1281        lg.wrapping_add(3)
1282    })
1283    .wrapping_div(4);
1284    assert!(length > 0);
1285    assert!(length <= (1 << 24));
1286    assert!(lg <= 24);
1287    *nibblesbits = mnibbles.wrapping_sub(4);
1288    *numbits = mnibbles.wrapping_mul(4);
1289    *bits = length.wrapping_sub(1) as u64;
1290}
1291
1292fn StoreCompressedMetaBlockHeader(
1293    is_final_block: bool,
1294    length: usize,
1295    storage_ix: &mut usize,
1296    storage: &mut [u8],
1297) {
1298    let mut lenbits: u64 = 0;
1299    let mut nlenbits: u32 = 0;
1300    let mut nibblesbits: u32 = 0;
1301    BrotliWriteBits(1, is_final_block.into(), storage_ix, storage);
1302    if is_final_block {
1303        BrotliWriteBits(1, 0, storage_ix, storage);
1304    }
1305    BrotliEncodeMlen(length as u32, &mut lenbits, &mut nlenbits, &mut nibblesbits);
1306    BrotliWriteBits(2, nibblesbits as u64, storage_ix, storage);
1307    BrotliWriteBits(nlenbits as u8, lenbits, storage_ix, storage);
1308    if !is_final_block {
1309        BrotliWriteBits(1, 0, storage_ix, storage);
1310    }
1311}
1312
1313impl BlockTypeCodeCalculator {
1314    fn new() -> Self {
1315        Self {
1316            last_type: 1,
1317            second_last_type: 0,
1318        }
1319    }
1320}
1321
1322impl<'a, Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'a, Alloc> {
1323    fn new(
1324        histogram_length: usize,
1325        num_block_types: usize,
1326        block_types: &'a [u8],
1327        block_lengths: &'a [u32],
1328        num_blocks: usize,
1329    ) -> Self {
1330        let block_len = if num_blocks != 0 && !block_lengths.is_empty() {
1331            block_lengths[0] as usize
1332        } else {
1333            0
1334        };
1335        Self {
1336            histogram_length_: histogram_length,
1337            num_block_types_: num_block_types,
1338            block_types_: block_types,
1339            block_lengths_: block_lengths,
1340            num_blocks_: num_blocks,
1341            block_split_code_: BlockSplitCode {
1342                type_code_calculator: BlockTypeCodeCalculator::new(),
1343                type_depths: [0; 258],
1344                type_bits: [0; 258],
1345                length_depths: [0; 26],
1346                length_bits: [0; 26],
1347            },
1348            block_ix_: 0,
1349            block_len_: block_len,
1350            entropy_ix_: 0,
1351            depths_: alloc_default::<u8, Alloc>(),
1352            bits_: alloc_default::<u16, Alloc>(),
1353        }
1354    }
1355}
1356
1357fn NextBlockTypeCode(calculator: &mut BlockTypeCodeCalculator, type_: u8) -> usize {
1358    let type_code: usize = (if type_ as usize == calculator.last_type.wrapping_add(1) {
1359        1u32
1360    } else if type_ as usize == calculator.second_last_type {
1361        0u32
1362    } else {
1363        (type_ as u32).wrapping_add(2)
1364    }) as usize;
1365    calculator.second_last_type = calculator.last_type;
1366    calculator.last_type = type_ as usize;
1367    type_code
1368}
1369
1370fn BlockLengthPrefixCode(len: u32) -> u32 {
1371    let mut code: u32 = (if len >= 177u32 {
1372        if len >= 753u32 {
1373            20i32
1374        } else {
1375            14i32
1376        }
1377    } else if len >= 41u32 {
1378        7i32
1379    } else {
1380        0i32
1381    }) as u32;
1382    while code < (26i32 - 1i32) as u32
1383        && (len >= kBlockLengthPrefixCode[code.wrapping_add(1) as usize].offset)
1384    {
1385        code = code.wrapping_add(1);
1386    }
1387    code
1388}
1389
1390fn StoreVarLenUint8(n: u64, storage_ix: &mut usize, storage: &mut [u8]) {
1391    if n == 0 {
1392        BrotliWriteBits(1, 0, storage_ix, storage);
1393    } else {
1394        let nbits: u8 = Log2FloorNonZero(n) as u8;
1395        BrotliWriteBits(1, 1, storage_ix, storage);
1396        BrotliWriteBits(3, nbits as u64, storage_ix, storage);
1397        BrotliWriteBits(nbits, n.wrapping_sub(1u64 << nbits), storage_ix, storage);
1398    }
1399}
1400
1401fn StoreSimpleHuffmanTree(
1402    depths: &[u8],
1403    symbols: &mut [usize],
1404    num_symbols: usize,
1405    max_bits: usize,
1406    storage_ix: &mut usize,
1407    storage: &mut [u8],
1408) {
1409    BrotliWriteBits(2, 1, storage_ix, storage);
1410    BrotliWriteBits(2, num_symbols.wrapping_sub(1) as u64, storage_ix, storage);
1411    {
1412        for i in 0..num_symbols {
1413            for j in i + 1..num_symbols {
1414                if depths[symbols[j]] < depths[symbols[i]] {
1415                    symbols.swap(j, i);
1416                }
1417            }
1418        }
1419    }
1420    if num_symbols == 2usize {
1421        BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1422        BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1423    } else if num_symbols == 3usize {
1424        BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1425        BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1426        BrotliWriteBits(max_bits as u8, symbols[2] as u64, storage_ix, storage);
1427    } else {
1428        BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1429        BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1430        BrotliWriteBits(max_bits as u8, symbols[2] as u64, storage_ix, storage);
1431        BrotliWriteBits(max_bits as u8, symbols[3] as u64, storage_ix, storage);
1432        BrotliWriteBits(
1433            1,
1434            if depths[symbols[0]] as i32 == 1i32 {
1435                1i32
1436            } else {
1437                0i32
1438            } as (u64),
1439            storage_ix,
1440            storage,
1441        );
1442    }
1443}
1444
1445fn BuildAndStoreHuffmanTree(
1446    histogram: &[u32],
1447    histogram_length: usize,
1448    alphabet_size: usize,
1449    tree: &mut [HuffmanTree],
1450    depth: &mut [u8],
1451    bits: &mut [u16],
1452    storage_ix: &mut usize,
1453    storage: &mut [u8],
1454) {
1455    let mut count: usize = 0usize;
1456    let mut s4 = [0usize; 4];
1457    let mut i: usize;
1458    let mut max_bits: usize = 0usize;
1459    i = 0usize;
1460    'break31: while i < histogram_length {
1461        {
1462            if histogram[i] != 0 {
1463                if count < 4usize {
1464                    s4[count] = i;
1465                } else if count > 4usize {
1466                    break 'break31;
1467                }
1468                count = count.wrapping_add(1);
1469            }
1470        }
1471        i = i.wrapping_add(1);
1472    }
1473    {
1474        let mut max_bits_counter: usize = alphabet_size.wrapping_sub(1);
1475        while max_bits_counter != 0 {
1476            max_bits_counter >>= 1i32;
1477            max_bits = max_bits.wrapping_add(1);
1478        }
1479    }
1480    if count <= 1 {
1481        BrotliWriteBits(4, 1, storage_ix, storage);
1482        BrotliWriteBits(max_bits as u8, s4[0] as u64, storage_ix, storage);
1483        depth[s4[0]] = 0u8;
1484        bits[s4[0]] = 0u16;
1485        return;
1486    }
1487
1488    for depth_elem in depth[..histogram_length].iter_mut() {
1489        *depth_elem = 0; // memset
1490    }
1491    BrotliCreateHuffmanTree(histogram, histogram_length, 15i32, tree, depth);
1492    BrotliConvertBitDepthsToSymbols(depth, histogram_length, bits);
1493    if count <= 4usize {
1494        StoreSimpleHuffmanTree(depth, &mut s4[..], count, max_bits, storage_ix, storage);
1495    } else {
1496        BrotliStoreHuffmanTree(depth, histogram_length, tree, storage_ix, storage);
1497    }
1498}
1499
1500fn GetBlockLengthPrefixCode(len: u32, code: &mut usize, n_extra: &mut u32, extra: &mut u32) {
1501    *code = BlockLengthPrefixCode(len) as usize;
1502    *n_extra = kBlockLengthPrefixCode[*code].nbits;
1503    *extra = len.wrapping_sub(kBlockLengthPrefixCode[*code].offset);
1504}
1505
1506fn StoreBlockSwitch(
1507    code: &mut BlockSplitCode,
1508    block_len: u32,
1509    block_type: u8,
1510    is_first_block: bool,
1511    storage_ix: &mut usize,
1512    storage: &mut [u8],
1513) {
1514    let typecode: usize = NextBlockTypeCode(&mut code.type_code_calculator, block_type);
1515    let mut lencode: usize = 0;
1516    let mut len_nextra: u32 = 0;
1517    let mut len_extra: u32 = 0;
1518    if !is_first_block {
1519        BrotliWriteBits(
1520            code.type_depths[typecode] as u8,
1521            code.type_bits[typecode] as (u64),
1522            storage_ix,
1523            storage,
1524        );
1525    }
1526    GetBlockLengthPrefixCode(block_len, &mut lencode, &mut len_nextra, &mut len_extra);
1527    BrotliWriteBits(
1528        code.length_depths[lencode],
1529        code.length_bits[lencode] as (u64),
1530        storage_ix,
1531        storage,
1532    );
1533    BrotliWriteBits(len_nextra as u8, len_extra as (u64), storage_ix, storage);
1534}
1535
1536fn BuildAndStoreBlockSplitCode(
1537    types: &[u8],
1538    lengths: &[u32],
1539    num_blocks: usize,
1540    num_types: usize,
1541    tree: &mut [HuffmanTree],
1542    code: &mut BlockSplitCode,
1543    storage_ix: &mut usize,
1544    storage: &mut [u8],
1545) {
1546    let mut type_histo: [u32; 258] = [0; 258];
1547    let mut length_histo: [u32; 26] = [0; 26];
1548    let mut i: usize;
1549    let mut type_code_calculator = BlockTypeCodeCalculator::new();
1550    i = 0usize;
1551    while i < num_blocks {
1552        {
1553            let type_code: usize = NextBlockTypeCode(&mut type_code_calculator, types[i]);
1554            if i != 0usize {
1555                let _rhs = 1;
1556                let _lhs = &mut type_histo[type_code];
1557                *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1558            }
1559            {
1560                let _rhs = 1;
1561                let _lhs = &mut length_histo[BlockLengthPrefixCode(lengths[i]) as usize];
1562                *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1563            }
1564        }
1565        i = i.wrapping_add(1);
1566    }
1567    StoreVarLenUint8(num_types.wrapping_sub(1) as u64, storage_ix, storage);
1568    if num_types > 1 {
1569        BuildAndStoreHuffmanTree(
1570            &mut type_histo[0..],
1571            num_types.wrapping_add(2),
1572            num_types.wrapping_add(2),
1573            tree,
1574            &mut code.type_depths[0..],
1575            &mut code.type_bits[0..],
1576            storage_ix,
1577            storage,
1578        );
1579        BuildAndStoreHuffmanTree(
1580            &mut length_histo[0..],
1581            super::constants::BROTLI_NUM_BLOCK_LEN_SYMBOLS, // 26
1582            super::constants::BROTLI_NUM_BLOCK_LEN_SYMBOLS,
1583            tree,
1584            &mut code.length_depths[0..],
1585            &mut code.length_bits[0..],
1586            storage_ix,
1587            storage,
1588        );
1589        StoreBlockSwitch(code, lengths[0], types[0], true, storage_ix, storage);
1590    }
1591}
1592
1593impl<Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'_, Alloc> {
1594    fn build_and_store_block_switch_entropy_codes(
1595        &mut self,
1596        tree: &mut [HuffmanTree],
1597        storage_ix: &mut usize,
1598        storage: &mut [u8],
1599    ) {
1600        BuildAndStoreBlockSplitCode(
1601            self.block_types_,
1602            self.block_lengths_,
1603            self.num_blocks_,
1604            self.num_block_types_,
1605            tree,
1606            &mut self.block_split_code_,
1607            storage_ix,
1608            storage,
1609        );
1610    }
1611}
1612
1613fn StoreTrivialContextMap(
1614    num_types: usize,
1615    context_bits: usize,
1616    tree: &mut [HuffmanTree],
1617    storage_ix: &mut usize,
1618    storage: &mut [u8],
1619) {
1620    StoreVarLenUint8(num_types.wrapping_sub(1) as u64, storage_ix, storage);
1621    if num_types > 1 {
1622        let repeat_code: usize = context_bits.wrapping_sub(1u32 as usize);
1623        let repeat_bits: usize = (1u32 << repeat_code).wrapping_sub(1) as usize;
1624        let alphabet_size: usize = num_types.wrapping_add(repeat_code);
1625        let mut histogram: [u32; 272] = [0; 272];
1626        let mut depths: [u8; 272] = [0; 272];
1627        let mut bits: [u16; 272] = [0; 272];
1628        BrotliWriteBits(1u8, 1u64, storage_ix, storage);
1629        BrotliWriteBits(4u8, repeat_code.wrapping_sub(1) as u64, storage_ix, storage);
1630        histogram[repeat_code] = num_types as u32;
1631        histogram[0] = 1;
1632        for i in context_bits..alphabet_size {
1633            histogram[i] = 1;
1634        }
1635        BuildAndStoreHuffmanTree(
1636            &mut histogram[..],
1637            alphabet_size,
1638            alphabet_size,
1639            tree,
1640            &mut depths[..],
1641            &mut bits[..],
1642            storage_ix,
1643            storage,
1644        );
1645        for i in 0usize..num_types {
1646            let code: usize = if i == 0usize {
1647                0usize
1648            } else {
1649                i.wrapping_add(context_bits).wrapping_sub(1)
1650            };
1651            BrotliWriteBits(depths[code], bits[code] as (u64), storage_ix, storage);
1652            BrotliWriteBits(
1653                depths[repeat_code],
1654                bits[repeat_code] as (u64),
1655                storage_ix,
1656                storage,
1657            );
1658            BrotliWriteBits(repeat_code as u8, repeat_bits as u64, storage_ix, storage);
1659        }
1660        BrotliWriteBits(1, 1, storage_ix, storage);
1661    }
1662}
1663
1664fn IndexOf(v: &[u8], v_size: usize, value: u8) -> usize {
1665    let mut i: usize = 0usize;
1666    while i < v_size {
1667        {
1668            if v[i] as i32 == value as i32 {
1669                return i;
1670            }
1671        }
1672        i = i.wrapping_add(1);
1673    }
1674    i
1675}
1676
1677fn MoveToFront(v: &mut [u8], index: usize) {
1678    let value: u8 = v[index];
1679    let mut i: usize;
1680    i = index;
1681    while i != 0usize {
1682        {
1683            v[i] = v[i.wrapping_sub(1)];
1684        }
1685        i = i.wrapping_sub(1);
1686    }
1687    v[0] = value;
1688}
1689
1690fn MoveToFrontTransform(v_in: &[u32], v_size: usize, v_out: &mut [u32]) {
1691    let mut mtf: [u8; 256] = [0; 256];
1692    let mut max_value: u32;
1693    if v_size == 0usize {
1694        return;
1695    }
1696    max_value = v_in[0];
1697    for i in 1..v_size {
1698        if v_in[i] > max_value {
1699            max_value = v_in[i];
1700        }
1701    }
1702    for i in 0..=max_value as usize {
1703        mtf[i] = i as u8;
1704    }
1705    {
1706        let mtf_size: usize = max_value.wrapping_add(1) as usize;
1707        for i in 0usize..v_size {
1708            let index: usize = IndexOf(&mtf[..], mtf_size, v_in[i] as u8);
1709            v_out[i] = index as u32;
1710            MoveToFront(&mut mtf[..], index);
1711        }
1712    }
1713}
1714
1715fn RunLengthCodeZeros(
1716    in_size: usize,
1717    v: &mut [u32],
1718    out_size: &mut usize,
1719    max_run_length_prefix: &mut u32,
1720) {
1721    let mut max_reps: u32 = 0u32;
1722    let mut i: usize;
1723    let mut max_prefix: u32;
1724    i = 0usize;
1725    while i < in_size {
1726        let mut reps: u32 = 0u32;
1727        while i < in_size && (v[i] != 0u32) {
1728            i = i.wrapping_add(1);
1729        }
1730        while i < in_size && (v[i] == 0u32) {
1731            {
1732                reps = reps.wrapping_add(1);
1733            }
1734            i = i.wrapping_add(1);
1735        }
1736        max_reps = max(reps, max_reps);
1737    }
1738    max_prefix = if max_reps > 0u32 {
1739        Log2FloorNonZero(max_reps as (u64))
1740    } else {
1741        0u32
1742    };
1743    max_prefix = min(max_prefix, *max_run_length_prefix);
1744    *max_run_length_prefix = max_prefix;
1745    *out_size = 0usize;
1746    i = 0usize;
1747    while i < in_size {
1748        if v[i] != 0u32 {
1749            v[*out_size] = (v[i]).wrapping_add(*max_run_length_prefix);
1750            i = i.wrapping_add(1);
1751            *out_size = out_size.wrapping_add(1);
1752        } else {
1753            let mut reps: u32 = 1u32;
1754            let mut k: usize;
1755            k = i.wrapping_add(1);
1756            while k < in_size && (v[k] == 0u32) {
1757                {
1758                    reps = reps.wrapping_add(1);
1759                }
1760                k = k.wrapping_add(1);
1761            }
1762            i = i.wrapping_add(reps as usize);
1763            while reps != 0u32 {
1764                if reps < 2u32 << max_prefix {
1765                    let run_length_prefix: u32 = Log2FloorNonZero(reps as (u64));
1766                    let extra_bits: u32 = reps.wrapping_sub(1u32 << run_length_prefix);
1767                    v[*out_size] = run_length_prefix.wrapping_add(extra_bits << 9);
1768                    *out_size = out_size.wrapping_add(1);
1769                    {
1770                        break;
1771                    }
1772                } else {
1773                    let extra_bits: u32 = (1u32 << max_prefix).wrapping_sub(1);
1774                    v[*out_size] = max_prefix.wrapping_add(extra_bits << 9);
1775                    reps = reps.wrapping_sub((2u32 << max_prefix).wrapping_sub(1));
1776                    *out_size = out_size.wrapping_add(1);
1777                }
1778            }
1779        }
1780    }
1781}
1782
1783fn EncodeContextMap<AllocU32: alloc::Allocator<u32>>(
1784    m: &mut AllocU32,
1785    context_map: &[u32],
1786    context_map_size: usize,
1787    num_clusters: usize,
1788    tree: &mut [HuffmanTree],
1789    storage_ix: &mut usize,
1790    storage: &mut [u8],
1791) {
1792    let mut rle_symbols: AllocU32::AllocatedMemory;
1793    let mut max_run_length_prefix: u32 = 6u32;
1794    let mut num_rle_symbols: usize = 0usize;
1795    static kSymbolMask: u32 = (1u32 << 9) - 1;
1796    let mut depths: [u8; 272] = [0; 272];
1797    let mut bits: [u16; 272] = [0; 272];
1798    StoreVarLenUint8(num_clusters.wrapping_sub(1) as u64, storage_ix, storage);
1799    if num_clusters == 1 {
1800        return;
1801    }
1802    rle_symbols = alloc_or_default::<u32, _>(m, context_map_size);
1803    MoveToFrontTransform(context_map, context_map_size, rle_symbols.slice_mut());
1804    RunLengthCodeZeros(
1805        context_map_size,
1806        rle_symbols.slice_mut(),
1807        &mut num_rle_symbols,
1808        &mut max_run_length_prefix,
1809    );
1810    let mut histogram: [u32; 272] = [0; 272];
1811    for i in 0usize..num_rle_symbols {
1812        let _rhs = 1;
1813        let _lhs = &mut histogram[(rle_symbols.slice()[i] & kSymbolMask) as usize];
1814        *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1815    }
1816    {
1817        let use_rle = max_run_length_prefix > 0;
1818        BrotliWriteBits(1, u64::from(use_rle), storage_ix, storage);
1819        if use_rle {
1820            BrotliWriteBits(
1821                4,
1822                max_run_length_prefix.wrapping_sub(1) as (u64),
1823                storage_ix,
1824                storage,
1825            );
1826        }
1827    }
1828    BuildAndStoreHuffmanTree(
1829        &mut histogram[..],
1830        num_clusters.wrapping_add(max_run_length_prefix as usize),
1831        num_clusters.wrapping_add(max_run_length_prefix as usize),
1832        tree,
1833        &mut depths[..],
1834        &mut bits[..],
1835        storage_ix,
1836        storage,
1837    );
1838    for i in 0usize..num_rle_symbols {
1839        let rle_symbol: u32 = rle_symbols.slice()[i] & kSymbolMask;
1840        let extra_bits_val: u32 = rle_symbols.slice()[i] >> 9;
1841        BrotliWriteBits(
1842            depths[rle_symbol as usize],
1843            bits[rle_symbol as usize] as (u64),
1844            storage_ix,
1845            storage,
1846        );
1847        if rle_symbol > 0u32 && (rle_symbol <= max_run_length_prefix) {
1848            BrotliWriteBits(
1849                rle_symbol as u8,
1850                extra_bits_val as (u64),
1851                storage_ix,
1852                storage,
1853            );
1854        }
1855    }
1856    BrotliWriteBits(1, 1, storage_ix, storage);
1857    m.free_cell(rle_symbols);
1858}
1859
1860impl<Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'_, Alloc> {
1861    fn build_and_store_entropy_codes<HistogramType: SliceWrapper<u32>>(
1862        &mut self,
1863        m: &mut Alloc,
1864        histograms: &[HistogramType],
1865        histograms_size: usize,
1866        alphabet_size: usize,
1867        tree: &mut [HuffmanTree],
1868        storage_ix: &mut usize,
1869        storage: &mut [u8],
1870    ) {
1871        let table_size: usize = histograms_size.wrapping_mul(self.histogram_length_);
1872        self.depths_ = alloc_or_default::<u8, _>(m, table_size);
1873        self.bits_ = alloc_or_default::<u16, _>(m, table_size);
1874        {
1875            for i in 0usize..histograms_size {
1876                let ix: usize = i.wrapping_mul(self.histogram_length_);
1877                BuildAndStoreHuffmanTree(
1878                    &(histograms[i]).slice()[0..],
1879                    self.histogram_length_,
1880                    alphabet_size,
1881                    tree,
1882                    &mut self.depths_.slice_mut()[ix..],
1883                    &mut self.bits_.slice_mut()[ix..],
1884                    storage_ix,
1885                    storage,
1886                );
1887            }
1888        }
1889    }
1890
1891    fn store_symbol(&mut self, symbol: usize, storage_ix: &mut usize, storage: &mut [u8]) {
1892        if self.block_len_ == 0usize {
1893            let block_ix: usize = {
1894                self.block_ix_ = self.block_ix_.wrapping_add(1);
1895                self.block_ix_
1896            };
1897            let block_len: u32 = self.block_lengths_[block_ix];
1898            let block_type: u8 = self.block_types_[block_ix];
1899            self.block_len_ = block_len as usize;
1900            self.entropy_ix_ = (block_type as usize).wrapping_mul(self.histogram_length_);
1901            StoreBlockSwitch(
1902                &mut self.block_split_code_,
1903                block_len,
1904                block_type,
1905                false,
1906                storage_ix,
1907                storage,
1908            );
1909        }
1910        self.block_len_ = self.block_len_.wrapping_sub(1);
1911        {
1912            let ix: usize = self.entropy_ix_.wrapping_add(symbol);
1913            BrotliWriteBits(
1914                self.depths_.slice()[ix],
1915                self.bits_.slice()[ix] as (u64),
1916                storage_ix,
1917                storage,
1918            );
1919        }
1920    }
1921}
1922
1923impl Command {
1924    fn copy_len_code(&self) -> u32 {
1925        let modifier = self.copy_len_ >> 25;
1926        let delta: i32 = ((modifier | ((modifier & 0x40) << 1)) as u8) as i8 as i32;
1927        ((self.copy_len_ & 0x01ff_ffff) as i32 + delta) as u32
1928    }
1929}
1930
1931fn GetInsertExtra(inscode: u16) -> u32 {
1932    kInsExtra[inscode as usize]
1933}
1934
1935fn GetInsertBase(inscode: u16) -> u32 {
1936    kInsBase[inscode as usize]
1937}
1938
1939fn GetCopyBase(copycode: u16) -> u32 {
1940    kCopyBase[copycode as usize]
1941}
1942
1943fn GetCopyExtra(copycode: u16) -> u32 {
1944    kCopyExtra[copycode as usize]
1945}
1946
1947fn StoreCommandExtra(cmd: &Command, storage_ix: &mut usize, storage: &mut [u8]) {
1948    let copylen_code = cmd.copy_len_code();
1949    let inscode: u16 = GetInsertLengthCode(cmd.insert_len_ as usize);
1950    let copycode: u16 = GetCopyLengthCode(copylen_code as usize);
1951    let insnumextra: u32 = GetInsertExtra(inscode);
1952    let insextraval: u64 = cmd.insert_len_.wrapping_sub(GetInsertBase(inscode)) as (u64);
1953    let copyextraval: u64 = copylen_code.wrapping_sub(GetCopyBase(copycode)) as (u64);
1954    let bits: u64 = copyextraval << insnumextra | insextraval;
1955    BrotliWriteBits(
1956        insnumextra.wrapping_add(GetCopyExtra(copycode)) as u8,
1957        bits,
1958        storage_ix,
1959        storage,
1960    );
1961}
1962
1963fn Context(p1: u8, p2: u8, mode: ContextType) -> u8 {
1964    match mode {
1965        ContextType::CONTEXT_LSB6 => (p1 as i32 & 0x3fi32) as u8,
1966        ContextType::CONTEXT_MSB6 => (p1 as i32 >> 2) as u8,
1967        ContextType::CONTEXT_UTF8 => {
1968            (kUTF8ContextLookup[p1 as usize] as i32
1969                | kUTF8ContextLookup[(p2 as i32 + 256i32) as usize] as i32) as u8
1970        }
1971        ContextType::CONTEXT_SIGNED => {
1972            (((kSigned3BitContextLookup[p1 as usize] as i32) << 3)
1973                + kSigned3BitContextLookup[p2 as usize] as i32) as u8
1974        }
1975    }
1976    //  0u8
1977}
1978
1979impl<Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'_, Alloc> {
1980    fn store_symbol_with_context(
1981        &mut self,
1982        symbol: usize,
1983        context: usize,
1984        context_map: &[u32],
1985        storage_ix: &mut usize,
1986        storage: &mut [u8],
1987        context_bits: usize,
1988    ) {
1989        if self.block_len_ == 0 {
1990            let block_ix: usize = {
1991                self.block_ix_ = self.block_ix_.wrapping_add(1);
1992                self.block_ix_
1993            };
1994            let block_len: u32 = self.block_lengths_[block_ix];
1995            let block_type: u8 = self.block_types_[block_ix];
1996            self.block_len_ = block_len as usize;
1997            self.entropy_ix_ = (block_type as usize) << context_bits;
1998            StoreBlockSwitch(
1999                &mut self.block_split_code_,
2000                block_len,
2001                block_type,
2002                false,
2003                storage_ix,
2004                storage,
2005            );
2006        }
2007        self.block_len_ = self.block_len_.wrapping_sub(1);
2008        {
2009            let histo_ix: usize = context_map[self.entropy_ix_.wrapping_add(context)] as usize;
2010            let ix: usize = histo_ix
2011                .wrapping_mul(self.histogram_length_)
2012                .wrapping_add(symbol);
2013            BrotliWriteBits(
2014                self.depths_.slice()[ix],
2015                self.bits_.slice()[ix] as (u64),
2016                storage_ix,
2017                storage,
2018            );
2019        }
2020    }
2021}
2022
2023impl<Alloc: Allocator<u8> + Allocator<u16>> BlockEncoder<'_, Alloc> {
2024    fn cleanup(&mut self, m: &mut Alloc) {
2025        <Alloc as Allocator<u8>>::free_cell(m, core::mem::take(&mut self.depths_));
2026        <Alloc as Allocator<u16>>::free_cell(m, core::mem::take(&mut self.bits_));
2027    }
2028}
2029
2030pub fn JumpToByteBoundary(storage_ix: &mut usize, storage: &mut [u8]) {
2031    *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
2032    storage[(*storage_ix >> 3)] = 0u8;
2033}
2034
2035pub(crate) fn store_meta_block<Alloc: BrotliAlloc, Cb>(
2036    alloc: &mut Alloc,
2037    input: &[u8],
2038    start_pos: usize,
2039    length: usize,
2040    mask: usize,
2041    mut prev_byte: u8,
2042    mut prev_byte2: u8,
2043    is_last: bool,
2044    params: &BrotliEncoderParams,
2045    literal_context_mode: ContextType,
2046    distance_cache: &[i32; kNumDistanceCacheEntries],
2047    commands: &[Command],
2048    n_commands: usize,
2049    mb: &mut MetaBlockSplit<Alloc>,
2050    recoder_state: &mut RecoderState,
2051    storage_ix: &mut usize,
2052    storage: &mut [u8],
2053    callback: &mut Cb,
2054) where
2055    Cb: FnMut(
2056        &mut interface::PredictionModeContextMap<InputReferenceMut>,
2057        &mut [interface::StaticCommand],
2058        InputPair,
2059        &mut Alloc,
2060    ),
2061{
2062    let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2063    if params.log_meta_block {
2064        LogMetaBlock(
2065            alloc,
2066            commands.split_at(n_commands).0,
2067            input0,
2068            input1,
2069            distance_cache,
2070            recoder_state,
2071            block_split_reference(mb),
2072            params,
2073            Some(literal_context_mode),
2074            callback,
2075        );
2076    }
2077    let mut pos: usize = start_pos;
2078    let num_distance_symbols = params.dist.alphabet_size;
2079    let mut num_effective_distance_symbols = num_distance_symbols as usize;
2080    let _literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode);
2081    let mut literal_enc: BlockEncoder<Alloc>;
2082    let mut command_enc: BlockEncoder<Alloc>;
2083    let mut distance_enc: BlockEncoder<Alloc>;
2084    let dist = &params.dist;
2085    if params.large_window && num_effective_distance_symbols > BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS
2086    {
2087        num_effective_distance_symbols = BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS;
2088    }
2089    StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2090    let mut tree = allocate::<HuffmanTree, _>(alloc, 2 * 704 + 1);
2091    literal_enc = BlockEncoder::new(
2092        BROTLI_NUM_LITERAL_SYMBOLS,
2093        mb.literal_split.num_types,
2094        mb.literal_split.types.slice(),
2095        mb.literal_split.lengths.slice(),
2096        mb.literal_split.num_blocks,
2097    );
2098    command_enc = BlockEncoder::new(
2099        BROTLI_NUM_COMMAND_SYMBOLS,
2100        mb.command_split.num_types,
2101        mb.command_split.types.slice(),
2102        mb.command_split.lengths.slice(),
2103        mb.command_split.num_blocks,
2104    );
2105    distance_enc = BlockEncoder::new(
2106        num_effective_distance_symbols,
2107        mb.distance_split.num_types,
2108        mb.distance_split.types.slice(),
2109        mb.distance_split.lengths.slice(),
2110        mb.distance_split.num_blocks,
2111    );
2112    literal_enc.build_and_store_block_switch_entropy_codes(tree.slice_mut(), storage_ix, storage);
2113    command_enc.build_and_store_block_switch_entropy_codes(tree.slice_mut(), storage_ix, storage);
2114    distance_enc.build_and_store_block_switch_entropy_codes(tree.slice_mut(), storage_ix, storage);
2115    BrotliWriteBits(2, dist.distance_postfix_bits as (u64), storage_ix, storage);
2116    BrotliWriteBits(
2117        4,
2118        (dist.num_direct_distance_codes >> dist.distance_postfix_bits) as (u64),
2119        storage_ix,
2120        storage,
2121    );
2122    for _i in 0usize..mb.literal_split.num_types {
2123        BrotliWriteBits(2, literal_context_mode as (u64), storage_ix, storage);
2124    }
2125    if mb.literal_context_map_size == 0usize {
2126        StoreTrivialContextMap(
2127            mb.literal_histograms_size,
2128            6,
2129            tree.slice_mut(),
2130            storage_ix,
2131            storage,
2132        );
2133    } else {
2134        EncodeContextMap(
2135            alloc,
2136            mb.literal_context_map.slice(),
2137            mb.literal_context_map_size,
2138            mb.literal_histograms_size,
2139            tree.slice_mut(),
2140            storage_ix,
2141            storage,
2142        );
2143    }
2144    if mb.distance_context_map_size == 0usize {
2145        StoreTrivialContextMap(
2146            mb.distance_histograms_size,
2147            2usize,
2148            tree.slice_mut(),
2149            storage_ix,
2150            storage,
2151        );
2152    } else {
2153        EncodeContextMap(
2154            alloc,
2155            mb.distance_context_map.slice(),
2156            mb.distance_context_map_size,
2157            mb.distance_histograms_size,
2158            tree.slice_mut(),
2159            storage_ix,
2160            storage,
2161        );
2162    }
2163    literal_enc.build_and_store_entropy_codes(
2164        alloc,
2165        mb.literal_histograms.slice(),
2166        mb.literal_histograms_size,
2167        BROTLI_NUM_LITERAL_SYMBOLS,
2168        tree.slice_mut(),
2169        storage_ix,
2170        storage,
2171    );
2172    command_enc.build_and_store_entropy_codes(
2173        alloc,
2174        mb.command_histograms.slice(),
2175        mb.command_histograms_size,
2176        BROTLI_NUM_COMMAND_SYMBOLS,
2177        tree.slice_mut(),
2178        storage_ix,
2179        storage,
2180    );
2181    distance_enc.build_and_store_entropy_codes(
2182        alloc,
2183        mb.distance_histograms.slice(),
2184        mb.distance_histograms_size,
2185        num_distance_symbols as usize,
2186        tree.slice_mut(),
2187        storage_ix,
2188        storage,
2189    );
2190    {
2191        <Alloc as Allocator<HuffmanTree>>::free_cell(alloc, core::mem::take(&mut tree));
2192    }
2193    for i in 0usize..n_commands {
2194        let cmd: Command = commands[i];
2195        let cmd_code: usize = cmd.cmd_prefix_ as usize;
2196        command_enc.store_symbol(cmd_code, storage_ix, storage);
2197        StoreCommandExtra(&cmd, storage_ix, storage);
2198        if mb.literal_context_map_size == 0usize {
2199            let mut j: usize;
2200            j = cmd.insert_len_ as usize;
2201            while j != 0usize {
2202                {
2203                    literal_enc.store_symbol(input[(pos & mask)] as usize, storage_ix, storage);
2204                    pos = pos.wrapping_add(1);
2205                }
2206                j = j.wrapping_sub(1);
2207            }
2208        } else {
2209            let mut j: usize;
2210            j = cmd.insert_len_ as usize;
2211            while j != 0usize {
2212                {
2213                    let context: usize =
2214                        Context(prev_byte, prev_byte2, literal_context_mode) as usize;
2215                    let literal: u8 = input[(pos & mask)];
2216                    literal_enc.store_symbol_with_context(
2217                        literal as usize,
2218                        context,
2219                        mb.literal_context_map.slice(),
2220                        storage_ix,
2221                        storage,
2222                        6usize,
2223                    );
2224                    prev_byte2 = prev_byte;
2225                    prev_byte = literal;
2226                    pos = pos.wrapping_add(1);
2227                }
2228                j = j.wrapping_sub(1);
2229            }
2230        }
2231        pos = pos.wrapping_add(cmd.copy_len() as usize);
2232        if cmd.copy_len() != 0 {
2233            prev_byte2 = input[(pos.wrapping_sub(2) & mask)];
2234            prev_byte = input[(pos.wrapping_sub(1) & mask)];
2235            if cmd.cmd_prefix_ as i32 >= 128i32 {
2236                let dist_code: usize = cmd.dist_prefix_ as usize & 0x03ff;
2237                let distnumextra: u32 = u32::from(cmd.dist_prefix_) >> 10; //FIXME: from command
2238                let distextra: u64 = cmd.dist_extra_ as (u64);
2239                if mb.distance_context_map_size == 0usize {
2240                    distance_enc.store_symbol(dist_code, storage_ix, storage);
2241                } else {
2242                    distance_enc.store_symbol_with_context(
2243                        dist_code,
2244                        cmd.distance_context() as usize,
2245                        mb.distance_context_map.slice(),
2246                        storage_ix,
2247                        storage,
2248                        2usize,
2249                    );
2250                }
2251                BrotliWriteBits(distnumextra as u8, distextra, storage_ix, storage);
2252            }
2253        }
2254    }
2255    distance_enc.cleanup(alloc);
2256    command_enc.cleanup(alloc);
2257    literal_enc.cleanup(alloc);
2258    if is_last {
2259        JumpToByteBoundary(storage_ix, storage);
2260    }
2261}
2262
2263fn BuildHistograms(
2264    input: &[u8],
2265    start_pos: usize,
2266    mask: usize,
2267    commands: &[Command],
2268    n_commands: usize,
2269    lit_histo: &mut HistogramLiteral,
2270    cmd_histo: &mut HistogramCommand,
2271    dist_histo: &mut HistogramDistance,
2272) {
2273    let mut pos: usize = start_pos;
2274    for i in 0usize..n_commands {
2275        let cmd: Command = commands[i];
2276        let mut j: usize;
2277        HistogramAddItem(cmd_histo, cmd.cmd_prefix_ as usize);
2278        j = cmd.insert_len_ as usize;
2279        while j != 0usize {
2280            {
2281                HistogramAddItem(lit_histo, input[(pos & mask)] as usize);
2282                pos = pos.wrapping_add(1);
2283            }
2284            j = j.wrapping_sub(1);
2285        }
2286        pos = pos.wrapping_add(cmd.copy_len() as usize);
2287        if cmd.copy_len() != 0 && cmd.cmd_prefix_ >= 128 {
2288            HistogramAddItem(dist_histo, cmd.dist_prefix_ as usize & 0x03ff);
2289        }
2290    }
2291}
2292fn StoreDataWithHuffmanCodes(
2293    input: &[u8],
2294    start_pos: usize,
2295    mask: usize,
2296    commands: &[Command],
2297    n_commands: usize,
2298    lit_depth: &[u8],
2299    lit_bits: &[u16],
2300    cmd_depth: &[u8],
2301    cmd_bits: &[u16],
2302    dist_depth: &[u8],
2303    dist_bits: &[u16],
2304    storage_ix: &mut usize,
2305    storage: &mut [u8],
2306) {
2307    let mut pos: usize = start_pos;
2308    for i in 0usize..n_commands {
2309        let cmd: Command = commands[i];
2310        let cmd_code: usize = cmd.cmd_prefix_ as usize;
2311        let mut j: usize;
2312        BrotliWriteBits(
2313            cmd_depth[cmd_code],
2314            cmd_bits[cmd_code] as (u64),
2315            storage_ix,
2316            storage,
2317        );
2318        StoreCommandExtra(&cmd, storage_ix, storage);
2319        j = cmd.insert_len_ as usize;
2320        while j != 0usize {
2321            {
2322                let literal: u8 = input[(pos & mask)];
2323                BrotliWriteBits(
2324                    lit_depth[(literal as usize)],
2325                    lit_bits[(literal as usize)] as (u64),
2326                    storage_ix,
2327                    storage,
2328                );
2329                pos = pos.wrapping_add(1);
2330            }
2331            j = j.wrapping_sub(1);
2332        }
2333        pos = pos.wrapping_add(cmd.copy_len() as usize);
2334        if cmd.copy_len() != 0 && cmd.cmd_prefix_ >= 128 {
2335            let dist_code: usize = cmd.dist_prefix_ as usize & 0x03ff;
2336            let distnumextra: u32 = u32::from(cmd.dist_prefix_) >> 10;
2337            let distextra: u32 = cmd.dist_extra_;
2338            BrotliWriteBits(
2339                dist_depth[dist_code],
2340                dist_bits[dist_code] as (u64),
2341                storage_ix,
2342                storage,
2343            );
2344            BrotliWriteBits(distnumextra as u8, distextra as (u64), storage_ix, storage);
2345        }
2346    }
2347}
2348
2349pub(crate) fn store_meta_block_trivial<Alloc: BrotliAlloc, Cb>(
2350    alloc: &mut Alloc,
2351    input: &[u8],
2352    start_pos: usize,
2353    length: usize,
2354    mask: usize,
2355    is_last: bool,
2356    params: &BrotliEncoderParams,
2357    distance_cache: &[i32; kNumDistanceCacheEntries],
2358    commands: &[Command],
2359    n_commands: usize,
2360    recoder_state: &mut RecoderState,
2361    storage_ix: &mut usize,
2362    storage: &mut [u8],
2363    f: &mut Cb,
2364) where
2365    Cb: FnMut(
2366        &mut interface::PredictionModeContextMap<InputReferenceMut>,
2367        &mut [interface::StaticCommand],
2368        InputPair,
2369        &mut Alloc,
2370    ),
2371{
2372    let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2373    if params.log_meta_block {
2374        LogMetaBlock(
2375            alloc,
2376            commands.split_at(n_commands).0,
2377            input0,
2378            input1,
2379            distance_cache,
2380            recoder_state,
2381            block_split_nop(),
2382            params,
2383            Some(ContextType::CONTEXT_LSB6),
2384            f,
2385        );
2386    }
2387    let mut lit_histo: HistogramLiteral = HistogramLiteral::default();
2388    let mut cmd_histo: HistogramCommand = HistogramCommand::default();
2389    let mut dist_histo: HistogramDistance = HistogramDistance::default();
2390    let mut lit_depth: [u8; 256] = [0; 256];
2391    let mut lit_bits: [u16; 256] = [0; 256];
2392    let mut cmd_depth: [u8; 704] = [0; 704];
2393    let mut cmd_bits: [u16; 704] = [0; 704];
2394    let mut dist_depth: [u8; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2395        [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2396    let mut dist_bits: [u16; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2397        [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2398    const MAX_HUFFMAN_TREE_SIZE: usize = (2i32 * 704i32 + 1i32) as usize;
2399    let mut tree: [HuffmanTree; MAX_HUFFMAN_TREE_SIZE] = [HuffmanTree {
2400        total_count_: 0,
2401        index_left_: 0,
2402        index_right_or_value_: 0,
2403    }; MAX_HUFFMAN_TREE_SIZE];
2404    let num_distance_symbols = params.dist.alphabet_size;
2405    StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2406    BuildHistograms(
2407        input,
2408        start_pos,
2409        mask,
2410        commands,
2411        n_commands,
2412        &mut lit_histo,
2413        &mut cmd_histo,
2414        &mut dist_histo,
2415    );
2416    BrotliWriteBits(13, 0, storage_ix, storage);
2417    BuildAndStoreHuffmanTree(
2418        lit_histo.slice_mut(),
2419        BROTLI_NUM_LITERAL_SYMBOLS,
2420        BROTLI_NUM_LITERAL_SYMBOLS,
2421        &mut tree[..],
2422        &mut lit_depth[..],
2423        &mut lit_bits[..],
2424        storage_ix,
2425        storage,
2426    );
2427    BuildAndStoreHuffmanTree(
2428        cmd_histo.slice_mut(),
2429        BROTLI_NUM_COMMAND_SYMBOLS,
2430        BROTLI_NUM_COMMAND_SYMBOLS,
2431        &mut tree[..],
2432        &mut cmd_depth[..],
2433        &mut cmd_bits[..],
2434        storage_ix,
2435        storage,
2436    );
2437    BuildAndStoreHuffmanTree(
2438        dist_histo.slice_mut(),
2439        MAX_SIMPLE_DISTANCE_ALPHABET_SIZE,
2440        num_distance_symbols as usize,
2441        &mut tree[..],
2442        &mut dist_depth[..],
2443        &mut dist_bits[..],
2444        storage_ix,
2445        storage,
2446    );
2447    StoreDataWithHuffmanCodes(
2448        input,
2449        start_pos,
2450        mask,
2451        commands,
2452        n_commands,
2453        &mut lit_depth[..],
2454        &mut lit_bits[..],
2455        &mut cmd_depth[..],
2456        &mut cmd_bits[..],
2457        &mut dist_depth[..],
2458        &mut dist_bits[..],
2459        storage_ix,
2460        storage,
2461    );
2462    if is_last {
2463        JumpToByteBoundary(storage_ix, storage);
2464    }
2465}
2466
2467fn StoreStaticCommandHuffmanTree(storage_ix: &mut usize, storage: &mut [u8]) {
2468    BrotliWriteBits(56, 0x0092_6244_1630_7003, storage_ix, storage);
2469    BrotliWriteBits(3, 0, storage_ix, storage);
2470}
2471
2472fn StoreStaticDistanceHuffmanTree(storage_ix: &mut usize, storage: &mut [u8]) {
2473    BrotliWriteBits(28, 0x0369_dc03, storage_ix, storage);
2474}
2475
2476struct BlockSplitRef<'a> {
2477    types: &'a [u8],
2478    lengths: &'a [u32],
2479    num_types: u32,
2480}
2481
2482impl<'a> Default for BlockSplitRef<'a> {
2483    fn default() -> Self {
2484        BlockSplitRef {
2485            types: &[],
2486            lengths: &[],
2487            num_types: 1,
2488        }
2489    }
2490}
2491
2492#[derive(Default)]
2493struct MetaBlockSplitRefs<'a> {
2494    btypel: BlockSplitRef<'a>,
2495    literal_context_map: &'a [u32],
2496    btypec: BlockSplitRef<'a>,
2497    btyped: BlockSplitRef<'a>,
2498    distance_context_map: &'a [u32],
2499}
2500
2501fn block_split_nop() -> MetaBlockSplitRefs<'static> {
2502    MetaBlockSplitRefs::default()
2503}
2504
2505fn block_split_reference<'a, Alloc: BrotliAlloc>(
2506    mb: &'a MetaBlockSplit<Alloc>,
2507) -> MetaBlockSplitRefs<'a> {
2508    return MetaBlockSplitRefs::<'a> {
2509        btypel: BlockSplitRef {
2510            types: mb
2511                .literal_split
2512                .types
2513                .slice()
2514                .split_at(mb.literal_split.num_blocks)
2515                .0,
2516            lengths: mb
2517                .literal_split
2518                .lengths
2519                .slice()
2520                .split_at(mb.literal_split.num_blocks)
2521                .0,
2522            num_types: mb.literal_split.num_types as u32,
2523        },
2524        literal_context_map: mb
2525            .literal_context_map
2526            .slice()
2527            .split_at(mb.literal_context_map_size)
2528            .0,
2529        btypec: BlockSplitRef {
2530            types: mb
2531                .command_split
2532                .types
2533                .slice()
2534                .split_at(mb.command_split.num_blocks)
2535                .0,
2536            lengths: mb
2537                .command_split
2538                .lengths
2539                .slice()
2540                .split_at(mb.command_split.num_blocks)
2541                .0,
2542            num_types: mb.command_split.num_types as u32,
2543        },
2544        btyped: BlockSplitRef {
2545            types: mb
2546                .distance_split
2547                .types
2548                .slice()
2549                .split_at(mb.distance_split.num_blocks)
2550                .0,
2551            lengths: mb
2552                .distance_split
2553                .lengths
2554                .slice()
2555                .split_at(mb.distance_split.num_blocks)
2556                .0,
2557            num_types: mb.distance_split.num_types as u32,
2558        },
2559        distance_context_map: mb
2560            .distance_context_map
2561            .slice()
2562            .split_at(mb.distance_context_map_size)
2563            .0,
2564    };
2565}
2566
2567#[derive(Clone, Copy, Default)]
2568pub struct RecoderState {
2569    pub num_bytes_encoded: usize,
2570}
2571
2572impl RecoderState {
2573    pub fn new() -> Self {
2574        Self::default()
2575    }
2576}
2577
2578pub(crate) fn store_meta_block_fast<Cb, Alloc: BrotliAlloc>(
2579    m: &mut Alloc,
2580    input: &[u8],
2581    start_pos: usize,
2582    length: usize,
2583    mask: usize,
2584    is_last: bool,
2585    params: &BrotliEncoderParams,
2586    dist_cache: &[i32; kNumDistanceCacheEntries],
2587    commands: &[Command],
2588    n_commands: usize,
2589    recoder_state: &mut RecoderState,
2590    storage_ix: &mut usize,
2591    storage: &mut [u8],
2592    cb: &mut Cb,
2593) where
2594    Cb: FnMut(
2595        &mut interface::PredictionModeContextMap<InputReferenceMut>,
2596        &mut [StaticCommand],
2597        InputPair,
2598        &mut Alloc,
2599    ),
2600{
2601    let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2602    if params.log_meta_block {
2603        LogMetaBlock(
2604            m,
2605            commands.split_at(n_commands).0,
2606            input0,
2607            input1,
2608            dist_cache,
2609            recoder_state,
2610            block_split_nop(),
2611            params,
2612            Some(ContextType::CONTEXT_LSB6),
2613            cb,
2614        );
2615    }
2616    let num_distance_symbols = params.dist.alphabet_size;
2617    let distance_alphabet_bits = Log2FloorNonZero(u64::from(num_distance_symbols) - 1) + 1;
2618    StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2619    BrotliWriteBits(13, 0, storage_ix, storage);
2620    if n_commands <= 128usize {
2621        let mut histogram: [u32; 256] = [0; 256];
2622        let mut pos: usize = start_pos;
2623        let mut num_literals: usize = 0usize;
2624        let mut lit_depth: [u8; 256] = [0; 256];
2625        let mut lit_bits: [u16; 256] = [0; 256];
2626        for i in 0usize..n_commands {
2627            let cmd: Command = commands[i];
2628            let mut j: usize;
2629            j = cmd.insert_len_ as usize;
2630            while j != 0usize {
2631                {
2632                    {
2633                        let _rhs = 1;
2634                        let _lhs = &mut histogram[input[(pos & mask)] as usize];
2635                        *_lhs = (*_lhs).wrapping_add(_rhs as u32);
2636                    }
2637                    pos = pos.wrapping_add(1);
2638                }
2639                j = j.wrapping_sub(1);
2640            }
2641            num_literals = num_literals.wrapping_add(cmd.insert_len_ as usize);
2642            pos = pos.wrapping_add(cmd.copy_len() as usize);
2643        }
2644        BrotliBuildAndStoreHuffmanTreeFast(
2645            m,
2646            &mut histogram[..],
2647            num_literals,
2648            8usize,
2649            &mut lit_depth[..],
2650            &mut lit_bits[..],
2651            storage_ix,
2652            storage,
2653        );
2654        StoreStaticCommandHuffmanTree(storage_ix, storage);
2655        StoreStaticDistanceHuffmanTree(storage_ix, storage);
2656        StoreDataWithHuffmanCodes(
2657            input,
2658            start_pos,
2659            mask,
2660            commands,
2661            n_commands,
2662            &mut lit_depth[..],
2663            &mut lit_bits[..],
2664            &kStaticCommandCodeDepth[..],
2665            &kStaticCommandCodeBits[..],
2666            &kStaticDistanceCodeDepth[..],
2667            &kStaticDistanceCodeBits[..],
2668            storage_ix,
2669            storage,
2670        );
2671    } else {
2672        let mut lit_histo: HistogramLiteral = HistogramLiteral::default();
2673        let mut cmd_histo: HistogramCommand = HistogramCommand::default();
2674        let mut dist_histo: HistogramDistance = HistogramDistance::default();
2675        let mut lit_depth: [u8; 256] = [0; 256];
2676        let mut lit_bits: [u16; 256] = [0; 256];
2677        let mut cmd_depth: [u8; 704] = [0; 704];
2678        let mut cmd_bits: [u16; 704] = [0; 704];
2679        let mut dist_depth: [u8; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2680            [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2681        let mut dist_bits: [u16; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2682            [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2683        BuildHistograms(
2684            input,
2685            start_pos,
2686            mask,
2687            commands,
2688            n_commands,
2689            &mut lit_histo,
2690            &mut cmd_histo,
2691            &mut dist_histo,
2692        );
2693        BrotliBuildAndStoreHuffmanTreeFast(
2694            m,
2695            lit_histo.slice(),
2696            lit_histo.total_count_,
2697            8usize,
2698            &mut lit_depth[..],
2699            &mut lit_bits[..],
2700            storage_ix,
2701            storage,
2702        );
2703        BrotliBuildAndStoreHuffmanTreeFast(
2704            m,
2705            cmd_histo.slice(),
2706            cmd_histo.total_count_,
2707            10usize,
2708            &mut cmd_depth[..],
2709            &mut cmd_bits[..],
2710            storage_ix,
2711            storage,
2712        );
2713        BrotliBuildAndStoreHuffmanTreeFast(
2714            m,
2715            dist_histo.slice(),
2716            dist_histo.total_count_,
2717            distance_alphabet_bits as usize,
2718            &mut dist_depth[..],
2719            &mut dist_bits[..],
2720            storage_ix,
2721            storage,
2722        );
2723        StoreDataWithHuffmanCodes(
2724            input,
2725            start_pos,
2726            mask,
2727            commands,
2728            n_commands,
2729            &mut lit_depth[..],
2730            &mut lit_bits[..],
2731            &mut cmd_depth[..],
2732            &mut cmd_bits[..],
2733            &mut dist_depth[..],
2734            &mut dist_bits[..],
2735            storage_ix,
2736            storage,
2737        );
2738    }
2739    if is_last {
2740        JumpToByteBoundary(storage_ix, storage);
2741    }
2742}
2743fn BrotliStoreUncompressedMetaBlockHeader(
2744    length: usize,
2745    storage_ix: &mut usize,
2746    storage: &mut [u8],
2747) {
2748    let mut lenbits: u64 = 0;
2749    let mut nlenbits: u32 = 0;
2750    let mut nibblesbits: u32 = 0;
2751    BrotliWriteBits(1, 0, storage_ix, storage);
2752    BrotliEncodeMlen(length as u32, &mut lenbits, &mut nlenbits, &mut nibblesbits);
2753    BrotliWriteBits(2, nibblesbits as u64, storage_ix, storage);
2754    BrotliWriteBits(nlenbits as u8, lenbits, storage_ix, storage);
2755    BrotliWriteBits(1, 1, storage_ix, storage);
2756}
2757
2758fn InputPairFromMaskedInput(
2759    input: &[u8],
2760    position: usize,
2761    len: usize,
2762    mask: usize,
2763) -> (&[u8], &[u8]) {
2764    let masked_pos: usize = position & mask;
2765    if masked_pos.wrapping_add(len) > mask.wrapping_add(1) {
2766        let len1: usize = mask.wrapping_add(1).wrapping_sub(masked_pos);
2767        return (
2768            &input[masked_pos..(masked_pos + len1)],
2769            &input[0..len.wrapping_sub(len1)],
2770        );
2771    }
2772    (&input[masked_pos..masked_pos + len], &[])
2773}
2774
2775pub(crate) fn store_uncompressed_meta_block<Cb, Alloc: BrotliAlloc>(
2776    alloc: &mut Alloc,
2777    is_final_block: bool,
2778    input: &[u8],
2779    position: usize,
2780    mask: usize,
2781    params: &BrotliEncoderParams,
2782    len: usize,
2783    recoder_state: &mut RecoderState,
2784    storage_ix: &mut usize,
2785    storage: &mut [u8],
2786    suppress_meta_block_logging: bool,
2787    cb: &mut Cb,
2788) where
2789    Cb: FnMut(
2790        &mut interface::PredictionModeContextMap<InputReferenceMut>,
2791        &mut [StaticCommand],
2792        InputPair,
2793        &mut Alloc,
2794    ),
2795{
2796    let (input0, input1) = InputPairFromMaskedInput(input, position, len, mask);
2797    BrotliStoreUncompressedMetaBlockHeader(len, storage_ix, storage);
2798    JumpToByteBoundary(storage_ix, storage);
2799    let dst_start0 = (*storage_ix >> 3);
2800    storage[dst_start0..(dst_start0 + input0.len())].clone_from_slice(input0);
2801    *storage_ix = storage_ix.wrapping_add(input0.len() << 3);
2802    let dst_start1 = (*storage_ix >> 3);
2803    storage[dst_start1..(dst_start1 + input1.len())].clone_from_slice(input1);
2804    *storage_ix = storage_ix.wrapping_add(input1.len() << 3);
2805    BrotliWriteBitsPrepareStorage(*storage_ix, storage);
2806    if params.log_meta_block && !suppress_meta_block_logging {
2807        let cmds = [Command {
2808            insert_len_: len as u32,
2809            copy_len_: 0,
2810            dist_extra_: 0,
2811            cmd_prefix_: 0,
2812            dist_prefix_: 0,
2813        }];
2814
2815        LogMetaBlock(
2816            alloc,
2817            &cmds,
2818            input0,
2819            input1,
2820            &[0, 0, 0, 0],
2821            recoder_state,
2822            block_split_nop(),
2823            params,
2824            None,
2825            cb,
2826        );
2827    }
2828    if is_final_block {
2829        BrotliWriteBits(1u8, 1u64, storage_ix, storage);
2830        BrotliWriteBits(1u8, 1u64, storage_ix, storage);
2831        JumpToByteBoundary(storage_ix, storage);
2832    }
2833}
2834
2835pub fn BrotliStoreSyncMetaBlock(storage_ix: &mut usize, storage: &mut [u8]) {
2836    BrotliWriteBits(6, 6, storage_ix, storage);
2837    JumpToByteBoundary(storage_ix, storage);
2838}
2839
2840pub fn BrotliWriteEmptyLastMetaBlock(storage_ix: &mut usize, storage: &mut [u8]) {
2841    BrotliWriteBits(1u8, 1u64, storage_ix, storage);
2842    BrotliWriteBits(1u8, 1u64, storage_ix, storage);
2843    JumpToByteBoundary(storage_ix, storage);
2844}
2845
2846const MAX_SIZE_ENCODING: usize = 10;
2847
2848fn encode_base_128(mut value: u64) -> (usize, [u8; MAX_SIZE_ENCODING]) {
2849    let mut ret = [0u8; MAX_SIZE_ENCODING];
2850    for index in 0..ret.len() {
2851        ret[index] = (value & 0x7f) as u8;
2852        value >>= 7;
2853        if value != 0 {
2854            ret[index] |= 0x80;
2855        } else {
2856            return (index + 1, ret);
2857        }
2858    }
2859    (ret.len(), ret)
2860}
2861
2862pub fn BrotliWriteMetadataMetaBlock(
2863    params: &BrotliEncoderParams,
2864    storage_ix: &mut usize,
2865    storage: &mut [u8],
2866) {
2867    BrotliWriteBits(1u8, 0u64, storage_ix, storage); // not last
2868    BrotliWriteBits(2u8, 3u64, storage_ix, storage); // MNIBBLES = 0 (pattern 1,1)
2869    BrotliWriteBits(1u8, 0u64, storage_ix, storage); // reserved
2870    BrotliWriteBits(2u8, 1u64, storage_ix, storage); // num bytes for length of magic number header
2871    let (size_hint_count, size_hint_b128) = encode_base_128(params.size_hint as u64);
2872
2873    BrotliWriteBits(8u8, 3 + size_hint_count as u64, storage_ix, storage); // 1 byte of data: writing 12 for the magic number header
2874    JumpToByteBoundary(storage_ix, storage);
2875    let magic_number: [u8; 3] = if params.catable && !params.use_dictionary {
2876        [0xe1, 0x97, 0x81]
2877    } else if params.appendable {
2878        [0xe1, 0x97, 0x82]
2879    } else {
2880        [0xe1, 0x97, 0x80]
2881    };
2882    for magic in magic_number.iter() {
2883        BrotliWriteBits(8u8, u64::from(*magic), storage_ix, storage);
2884    }
2885    BrotliWriteBits(8u8, u64::from(VERSION), storage_ix, storage);
2886    for sh in size_hint_b128[..size_hint_count].iter() {
2887        BrotliWriteBits(8u8, u64::from(*sh), storage_ix, storage);
2888    }
2889}
2890
2891#[cfg(test)]
2892mod test {
2893    use crate::enc::brotli_bit_stream::{encode_base_128, MAX_SIZE_ENCODING};
2894
2895    #[test]
2896    fn test_encode_base_128() {
2897        assert_eq!(encode_base_128(0), (1, [0u8; MAX_SIZE_ENCODING]));
2898        assert_eq!(encode_base_128(1), (1, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0]));
2899        assert_eq!(encode_base_128(127), (1, [0x7f, 0, 0, 0, 0, 0, 0, 0, 0, 0]));
2900        assert_eq!(
2901            encode_base_128(128),
2902            (2, [0x80, 0x1, 0, 0, 0, 0, 0, 0, 0, 0])
2903        );
2904        assert_eq!(
2905            encode_base_128(16383),
2906            (2, [0xff, 0x7f, 0, 0, 0, 0, 0, 0, 0, 0])
2907        );
2908        assert_eq!(
2909            encode_base_128(16384),
2910            (3, [0x80, 0x80, 0x1, 0, 0, 0, 0, 0, 0, 0])
2911        );
2912        assert_eq!(
2913            encode_base_128(2097151),
2914            (3, [0xff, 0xff, 0x7f, 0, 0, 0, 0, 0, 0, 0])
2915        );
2916        assert_eq!(
2917            encode_base_128(2097152),
2918            (4, [0x80, 0x80, 0x80, 0x1, 0, 0, 0, 0, 0, 0])
2919        );
2920        assert_eq!(
2921            encode_base_128(4194303),
2922            (4, [0xff, 0xff, 0xff, 0x1, 0, 0, 0, 0, 0, 0])
2923        );
2924        assert_eq!(
2925            encode_base_128(4294967295),
2926            (5, [0xff, 0xff, 0xff, 0xff, 0xf, 0, 0, 0, 0, 0])
2927        );
2928        assert_eq!(
2929            encode_base_128(4294967296),
2930            (5, [0x80, 0x80, 0x80, 0x80, 0x10, 0, 0, 0, 0, 0])
2931        );
2932        assert_eq!(
2933            encode_base_128(9223372036854775808),
2934            (
2935                10,
2936                [0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x1]
2937            )
2938        );
2939        assert_eq!(
2940            encode_base_128(18446744073709551615),
2941            (
2942                10,
2943                [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1]
2944            )
2945        );
2946    }
2947}