brotli/enc/
encode.rs

1#![allow(dead_code)]
2use super::backward_references::{
3    AdvHashSpecialization, AdvHasher, AnyHasher, BasicHasher, BrotliCreateBackwardReferences,
4    BrotliEncoderMode, BrotliEncoderParams, BrotliHasherParams, H2Sub, H3Sub, H4Sub, H54Sub, H5Sub,
5    H6Sub, HQ5Sub, HQ7Sub, HowPrepared, StoreLookaheadThenStore, Struct1, UnionHasher, H9,
6    H9_BLOCK_BITS, H9_BLOCK_SIZE, H9_BUCKET_BITS, H9_NUM_LAST_DISTANCES_TO_CHECK,
7};
8use super::bit_cost::{BitsEntropy, ShannonEntropy};
9#[allow(unused_imports)]
10use super::block_split::BlockSplit;
11#[allow(unused_imports)]
12use super::brotli_bit_stream::{
13    BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree, BrotliStoreMetaBlock,
14    BrotliStoreMetaBlockFast, BrotliStoreMetaBlockTrivial, BrotliStoreUncompressedMetaBlock,
15    BrotliWriteEmptyLastMetaBlock, BrotliWriteMetadataMetaBlock, JumpToByteBoundary,
16    MetaBlockSplit, RecoderState,
17};
18use super::combined_alloc::BrotliAlloc;
19use super::constants::{
20    BROTLI_CONTEXT, BROTLI_CONTEXT_LUT, BROTLI_MAX_NDIRECT, BROTLI_MAX_NPOSTFIX,
21    BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS, BROTLI_WINDOW_GAP,
22};
23use super::hash_to_binary_tree::InitializeH10;
24use super::interface;
25pub use super::parameters::BrotliEncoderParameter;
26use alloc::Allocator;
27
28use super::super::alloc::{SliceWrapper, SliceWrapperMut};
29use super::command::{BrotliDistanceParams, Command, GetLengthCode};
30use super::compress_fragment::BrotliCompressFragmentFast;
31use super::compress_fragment_two_pass::{BrotliCompressFragmentTwoPass, BrotliWriteBits};
32#[allow(unused_imports)]
33use super::entropy_encode::{
34    BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree,
35};
36use super::histogram::{
37    ContextType, CostAccessors, HistogramCommand, HistogramDistance, HistogramLiteral,
38};
39use super::metablock::{
40    BrotliBuildMetaBlock, BrotliBuildMetaBlockGreedy, BrotliInitDistanceParams,
41    BrotliOptimizeHistograms,
42};
43use super::static_dict::{kNumDistanceCacheEntries, BrotliGetDictionary};
44use super::utf8_util::BrotliIsMostlyUTF8;
45use super::util::Log2FloorNonZero;
46use core::cmp::{max, min};
47use enc::input_pair::InputReferenceMut;
48//fn BrotliCreateHqZopfliBackwardReferences(m: &mut [MemoryManager],
49//                                          dictionary: &[BrotliDictionary],
50//                                          num_bytes: usize,
51//                                          position: usize,
52//                                          ringbuffer: &[u8],
53//                                          ringbuffer_mask: usize,
54//                                          params: &[BrotliEncoderParams],
55//                                          hasher: &mut [u8],
56//                                          dist_cache: &mut [i32],
57//                                          last_insert_len: &mut [usize],
58//                                          commands: &mut [Command],
59//                                          num_commands: &mut [usize],
60//                                          num_literals: &mut [usize]);
61//fn BrotliCreateZopfliBackwardReferences(m: &mut [MemoryManager],
62//                                       dictionary: &[BrotliDictionary],
63//                                      num_bytes: usize,
64//                                        position: usize,
65//                                        ringbuffer: &[u8],
66//                                        ringbuffer_mask: usize,
67//                                        params: &[BrotliEncoderParams],
68//                                        hasher: &mut [u8],
69//                                        dist_cache: &mut [i32],
70//                                        last_insert_len: &mut [usize],
71//                                        commands: &mut [Command],
72//                                        num_commands: &mut [usize],
73//                                        num_literals: &mut [usize]);
74//fn BrotliInitBlockSplit(xself: &mut BlockSplit);
75//fn BrotliInitMemoryManager(m: &mut [MemoryManager],
76//                           alloc_func: fn(&mut [::std::os::raw::c_void], usize)
77//                                          -> *mut ::std::os::raw::c_void,
78//                           free_func: fn(*mut ::std::os::raw::c_void,
79//                                         *mut ::std::os::raw::c_void),
80//                           opaque: *mut ::std::os::raw::c_void);
81//fn BrotliInitZopfliNodes(array: &mut [ZopfliNode], length: usize);
82//fn BrotliWipeOutMemoryManager(m: &mut [MemoryManager]);
83
84static kCompressFragmentTwoPassBlockSize: usize = (1i32 << 17) as usize;
85
86static kMinUTF8Ratio: super::util::floatX = 0.75 as super::util::floatX;
87
88pub struct RingBuffer<AllocU8: alloc::Allocator<u8>> {
89    pub size_: u32,
90    pub mask_: u32,
91    pub tail_size_: u32,
92    pub total_size_: u32,
93    pub cur_size_: u32,
94    pub pos_: u32,
95    pub data_mo: AllocU8::AllocatedMemory,
96    pub buffer_index: usize,
97}
98
99#[derive(PartialEq, Eq, Copy, Clone)]
100#[repr(i32)]
101pub enum BrotliEncoderStreamState {
102    BROTLI_STREAM_PROCESSING = 0,
103    BROTLI_STREAM_FLUSH_REQUESTED = 1,
104    BROTLI_STREAM_FINISHED = 2,
105    BROTLI_STREAM_METADATA_HEAD = 3,
106    BROTLI_STREAM_METADATA_BODY = 4,
107}
108
109#[derive(Clone, Copy, Debug)]
110enum NextOut {
111    DynamicStorage(u32),
112    TinyBuf(u32),
113    None,
114}
115fn GetNextOutInternal<'a>(
116    next_out: &NextOut,
117    storage: &'a mut [u8],
118    tiny_buf: &'a mut [u8; 16],
119) -> &'a mut [u8] {
120    match next_out {
121        &NextOut::DynamicStorage(offset) => &mut storage[offset as usize..],
122        &NextOut::TinyBuf(offset) => &mut tiny_buf[offset as usize..],
123        &NextOut::None => &mut [],
124    }
125}
126macro_rules! GetNextOut {
127    ($s : expr) => {
128        GetNextOutInternal(&$s.next_out_, $s.storage_.slice_mut(), &mut $s.tiny_buf_)
129    };
130}
131fn NextOutIncrement(next_out: &NextOut, inc: i32) -> NextOut {
132    match next_out {
133        &NextOut::DynamicStorage(offset) => NextOut::DynamicStorage((offset as i32 + inc) as u32),
134        &NextOut::TinyBuf(offset) => NextOut::TinyBuf((offset as i32 + inc) as u32),
135        &NextOut::None => NextOut::None,
136    }
137}
138fn IsNextOutNull(next_out: &NextOut) -> bool {
139    match next_out {
140        &NextOut::DynamicStorage(_) => false,
141        &NextOut::TinyBuf(_) => false,
142        &NextOut::None => true,
143    }
144}
145
146#[derive(Clone, Copy, Debug)]
147pub enum IsFirst {
148    NothingWritten,
149    HeaderWritten,
150    FirstCatableByteWritten,
151    BothCatableBytesWritten,
152}
153
154pub struct BrotliEncoderStateStruct<Alloc: BrotliAlloc> {
155    pub params: BrotliEncoderParams,
156    pub m8: Alloc,
157    pub hasher_: UnionHasher<Alloc>,
158    pub input_pos_: u64,
159    pub ringbuffer_: RingBuffer<Alloc>,
160    pub cmd_alloc_size_: usize,
161    pub commands_: <Alloc as Allocator<Command>>::AllocatedMemory, // not sure about this one
162    pub num_commands_: usize,
163    pub num_literals_: usize,
164    pub last_insert_len_: usize,
165    pub last_flush_pos_: u64,
166    pub last_processed_pos_: u64,
167    pub dist_cache_: [i32; 16],
168    pub saved_dist_cache_: [i32; kNumDistanceCacheEntries],
169    pub last_bytes_: u16,
170    pub last_bytes_bits_: u8,
171    pub prev_byte_: u8,
172    pub prev_byte2_: u8,
173    pub storage_size_: usize,
174    pub storage_: <Alloc as Allocator<u8>>::AllocatedMemory,
175    pub small_table_: [i32; 1024],
176    pub large_table_: <Alloc as Allocator<i32>>::AllocatedMemory,
177    //  pub large_table_size_: usize, // <-- get this by doing large_table_.len()
178    pub cmd_depths_: [u8; 128],
179    pub cmd_bits_: [u16; 128],
180    pub cmd_code_: [u8; 512],
181    pub cmd_code_numbits_: usize,
182    pub command_buf_: <Alloc as Allocator<u32>>::AllocatedMemory,
183    pub literal_buf_: <Alloc as Allocator<u8>>::AllocatedMemory,
184    next_out_: NextOut,
185    pub available_out_: usize,
186    pub total_out_: u64,
187    pub tiny_buf_: [u8; 16],
188    pub remaining_metadata_bytes_: u32,
189    pub stream_state_: BrotliEncoderStreamState,
190    pub is_last_block_emitted_: bool,
191    pub is_initialized_: bool,
192    pub is_first_mb: IsFirst,
193    pub literal_scratch_space: <HistogramLiteral as CostAccessors>::i32vec,
194    pub command_scratch_space: <HistogramCommand as CostAccessors>::i32vec,
195    pub distance_scratch_space: <HistogramDistance as CostAccessors>::i32vec,
196    pub recoder_state: RecoderState,
197    custom_dictionary: bool,
198}
199
200pub fn set_parameter(
201    params: &mut BrotliEncoderParams,
202    p: BrotliEncoderParameter,
203    value: u32,
204) -> bool {
205    use crate::enc::parameters::BrotliEncoderParameter::*;
206    match p {
207        BROTLI_PARAM_MODE => {
208            params.mode = match value {
209                0 => BrotliEncoderMode::BROTLI_MODE_GENERIC,
210                1 => BrotliEncoderMode::BROTLI_MODE_TEXT,
211                2 => BrotliEncoderMode::BROTLI_MODE_FONT,
212                3 => BrotliEncoderMode::BROTLI_FORCE_LSB_PRIOR,
213                4 => BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR,
214                5 => BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR,
215                6 => BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR,
216                _ => BrotliEncoderMode::BROTLI_MODE_GENERIC,
217            };
218        }
219        BROTLI_PARAM_QUALITY => params.quality = value as i32,
220        BROTLI_PARAM_STRIDE_DETECTION_QUALITY => params.stride_detection_quality = value as u8,
221        BROTLI_PARAM_HIGH_ENTROPY_DETECTION_QUALITY => {
222            params.high_entropy_detection_quality = value as u8
223        }
224        BROTLI_PARAM_CDF_ADAPTATION_DETECTION => params.cdf_adaptation_detection = value as u8,
225        BROTLI_PARAM_Q9_5 => params.q9_5 = (value != 0),
226        BROTLI_PARAM_PRIOR_BITMASK_DETECTION => params.prior_bitmask_detection = value as u8,
227        BROTLI_PARAM_SPEED => {
228            params.literal_adaptation[1].0 = value as u16;
229            if params.literal_adaptation[0] == (0, 0) {
230                params.literal_adaptation[0].0 = value as u16;
231            }
232        }
233        BROTLI_PARAM_SPEED_MAX => {
234            params.literal_adaptation[1].1 = value as u16;
235            if params.literal_adaptation[0].1 == 0 {
236                params.literal_adaptation[0].1 = value as u16;
237            }
238        }
239        BROTLI_PARAM_CM_SPEED => {
240            params.literal_adaptation[3].0 = value as u16;
241            if params.literal_adaptation[2] == (0, 0) {
242                params.literal_adaptation[2].0 = value as u16;
243            }
244        }
245        BROTLI_PARAM_CM_SPEED_MAX => {
246            params.literal_adaptation[3].1 = value as u16;
247            if params.literal_adaptation[2].1 == 0 {
248                params.literal_adaptation[2].1 = value as u16;
249            }
250        }
251        BROTLI_PARAM_SPEED_LOW => params.literal_adaptation[0].0 = value as u16,
252        BROTLI_PARAM_SPEED_LOW_MAX => params.literal_adaptation[0].1 = value as u16,
253        BROTLI_PARAM_CM_SPEED_LOW => params.literal_adaptation[2].0 = value as u16,
254        BROTLI_PARAM_CM_SPEED_LOW_MAX => params.literal_adaptation[2].1 = value as u16,
255        BROTLI_PARAM_LITERAL_BYTE_SCORE => params.hasher.literal_byte_score = value as i32,
256        BROTLI_METABLOCK_CALLBACK => params.log_meta_block = value != 0,
257        BROTLI_PARAM_LGWIN => params.lgwin = value as i32,
258        BROTLI_PARAM_LGBLOCK => params.lgblock = value as i32,
259        BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING => {
260            if value != 0 && value != 1 {
261                return false;
262            }
263            params.disable_literal_context_modeling = if value != 0 { 1 } else { 0 };
264        }
265        BROTLI_PARAM_SIZE_HINT => params.size_hint = value as usize,
266        BROTLI_PARAM_LARGE_WINDOW => params.large_window = value != 0,
267        BROTLI_PARAM_AVOID_DISTANCE_PREFIX_SEARCH => {
268            params.avoid_distance_prefix_search = value != 0
269        }
270        BROTLI_PARAM_CATABLE => {
271            params.catable = value != 0;
272            if !params.appendable {
273                params.appendable = value != 0;
274            }
275            params.use_dictionary = (value == 0);
276        }
277        BROTLI_PARAM_APPENDABLE => params.appendable = value != 0,
278        BROTLI_PARAM_MAGIC_NUMBER => params.magic_number = value != 0,
279        BROTLI_PARAM_FAVOR_EFFICIENCY => params.favor_cpu_efficiency = value != 0,
280        _ => return false,
281    }
282    true
283}
284
285impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
286    pub fn set_parameter(&mut self, p: BrotliEncoderParameter, value: u32) -> bool {
287        if self.is_initialized_ {
288            false
289        } else {
290            set_parameter(&mut self.params, p, value)
291        }
292    }
293}
294
295/* "Large Window Brotli" */
296pub const BROTLI_LARGE_MAX_DISTANCE_BITS: u32 = 62;
297pub const BROTLI_LARGE_MIN_WBITS: u32 = 10;
298pub const BROTLI_LARGE_MAX_WBITS: u32 = 30;
299
300pub const BROTLI_MAX_DISTANCE_BITS: u32 = 24;
301pub const BROTLI_MAX_WINDOW_BITS: usize = BROTLI_MAX_DISTANCE_BITS as usize;
302pub const BROTLI_MAX_DISTANCE: usize = 0x03ff_fffc;
303pub const BROTLI_MAX_ALLOWED_DISTANCE: usize = 0x07ff_fffc;
304pub const BROTLI_NUM_DISTANCE_SHORT_CODES: u32 = 16;
305pub fn BROTLI_DISTANCE_ALPHABET_SIZE(NPOSTFIX: u32, NDIRECT: u32, MAXNBITS: u32) -> u32 {
306    BROTLI_NUM_DISTANCE_SHORT_CODES + (NDIRECT) + ((MAXNBITS) << ((NPOSTFIX) + 1))
307}
308
309//#define BROTLI_NUM_DISTANCE_SYMBOLS \
310//    BROTLI_DISTANCE_ALPHABET_SIZE(  \
311//        BROTLI_MAX_NDIRECT, BROTLI_MAX_NPOSTFIX, BROTLI_LARGE_MAX_DISTANCE_BITS)
312
313pub const BROTLI_NUM_DISTANCE_SYMBOLS: usize = 1128;
314
315pub fn BrotliEncoderInitParams() -> BrotliEncoderParams {
316    BrotliEncoderParams {
317        dist: BrotliDistanceParams {
318            distance_postfix_bits: 0,
319            num_direct_distance_codes: 0,
320            alphabet_size: BROTLI_DISTANCE_ALPHABET_SIZE(0, 0, BROTLI_MAX_DISTANCE_BITS),
321            max_distance: BROTLI_MAX_DISTANCE,
322        },
323        mode: BrotliEncoderMode::BROTLI_MODE_GENERIC,
324        log_meta_block: false,
325        large_window: false,
326        avoid_distance_prefix_search: false,
327        quality: 11,
328        q9_5: false,
329        lgwin: 22i32,
330        lgblock: 0i32,
331        size_hint: 0usize,
332        disable_literal_context_modeling: 0i32,
333        stride_detection_quality: 0,
334        high_entropy_detection_quality: 0,
335        cdf_adaptation_detection: 0,
336        prior_bitmask_detection: 0,
337        literal_adaptation: [(0, 0); 4],
338        catable: false,
339        use_dictionary: true,
340        appendable: false,
341        magic_number: false,
342        favor_cpu_efficiency: false,
343        hasher: BrotliHasherParams {
344            type_: 6,
345            block_bits: 9 - 1,
346            bucket_bits: 15,
347            hash_len: 5,
348            num_last_distances_to_check: 16,
349            literal_byte_score: 0,
350        },
351    }
352}
353
354impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
355    fn extend_last_command(&mut self, bytes: &mut u32, wrapped_last_processed_pos: &mut u32) {
356        let last_command = &mut self.commands_.slice_mut()[self.num_commands_ - 1];
357
358        let mask = self.ringbuffer_.mask_;
359        let max_backward_distance: u64 = (1u64 << self.params.lgwin) - BROTLI_WINDOW_GAP as u64;
360        let last_copy_len = u64::from(last_command.copy_len_) & 0x01ff_ffff;
361        let last_processed_pos: u64 = self.last_processed_pos_ - last_copy_len;
362        let max_distance: u64 = if last_processed_pos < max_backward_distance {
363            last_processed_pos
364        } else {
365            max_backward_distance
366        };
367        let cmd_dist: u64 = self.dist_cache_[0] as u64;
368        let distance_code: u32 = last_command.restore_distance_code(&self.params.dist);
369        if (distance_code < BROTLI_NUM_DISTANCE_SHORT_CODES
370            || distance_code as u64 - (BROTLI_NUM_DISTANCE_SHORT_CODES - 1) as u64 == cmd_dist)
371        {
372            if (cmd_dist <= max_distance) {
373                while (*bytes != 0
374                    && self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index
375                        + (*wrapped_last_processed_pos as usize & mask as usize)]
376                        == self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index
377                            + (((*wrapped_last_processed_pos as usize)
378                                .wrapping_sub(cmd_dist as usize))
379                                & mask as usize)])
380                {
381                    last_command.copy_len_ += 1;
382                    (*bytes) -= 1;
383                    (*wrapped_last_processed_pos) += 1;
384                }
385            }
386            /* The copy length is at most the metablock size, and thus expressible. */
387            GetLengthCode(
388                last_command.insert_len_ as usize,
389                ((last_command.copy_len_ & 0x01ff_ffff) as i32
390                    + (last_command.copy_len_ >> 25) as i32) as usize,
391                ((last_command.dist_prefix_ & 0x03ff) == 0) as i32,
392                &mut last_command.cmd_prefix_,
393            );
394        }
395    }
396}
397
398fn RingBufferInit<AllocU8: alloc::Allocator<u8>>() -> RingBuffer<AllocU8> {
399    RingBuffer {
400        size_: 0,
401        mask_: 0, // 0xff??
402        tail_size_: 0,
403        total_size_: 0,
404
405        cur_size_: 0,
406        pos_: 0,
407        data_mo: AllocU8::AllocatedMemory::default(),
408        buffer_index: 0usize,
409    }
410}
411
412impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
413    pub fn new(m8: Alloc) -> Self {
414        let cache: [i32; 16] = [4, 11, 15, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
415        Self {
416            params: BrotliEncoderInitParams(),
417            input_pos_: 0,
418            num_commands_: 0,
419            num_literals_: 0,
420            last_insert_len_: 0,
421            last_flush_pos_: 0,
422            last_processed_pos_: 0,
423            prev_byte_: 0,
424            prev_byte2_: 0,
425            storage_size_: 0,
426            storage_: <Alloc as Allocator<u8>>::AllocatedMemory::default(),
427            hasher_: UnionHasher::<Alloc>::default(),
428            large_table_: <Alloc as Allocator<i32>>::AllocatedMemory::default(),
429            //    large_table_size_: 0,
430            cmd_code_numbits_: 0,
431            command_buf_: <Alloc as Allocator<u32>>::AllocatedMemory::default(),
432            literal_buf_: <Alloc as Allocator<u8>>::AllocatedMemory::default(),
433            next_out_: NextOut::None,
434            available_out_: 0,
435            total_out_: 0,
436            is_first_mb: IsFirst::NothingWritten,
437            stream_state_: BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING,
438            is_last_block_emitted_: false,
439            is_initialized_: false,
440            ringbuffer_: RingBufferInit(),
441            commands_: <Alloc as Allocator<Command>>::AllocatedMemory::default(),
442            cmd_alloc_size_: 0,
443            dist_cache_: cache,
444            saved_dist_cache_: [cache[0], cache[1], cache[2], cache[3]],
445            cmd_bits_: [0; 128],
446            cmd_depths_: [0; 128],
447            last_bytes_: 0,
448            last_bytes_bits_: 0,
449            cmd_code_: [0; 512],
450            m8,
451            remaining_metadata_bytes_: 0,
452            small_table_: [0; 1024],
453            tiny_buf_: [0; 16],
454            literal_scratch_space: HistogramLiteral::make_nnz_storage(),
455            command_scratch_space: HistogramCommand::make_nnz_storage(),
456            distance_scratch_space: HistogramDistance::make_nnz_storage(),
457            recoder_state: RecoderState::new(),
458            custom_dictionary: false,
459        }
460    }
461}
462
463fn RingBufferFree<AllocU8: alloc::Allocator<u8>>(m: &mut AllocU8, rb: &mut RingBuffer<AllocU8>) {
464    m.free_cell(core::mem::take(&mut rb.data_mo));
465}
466fn DestroyHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
467    m16: &mut Alloc,
468    handle: &mut UnionHasher<Alloc>,
469) {
470    handle.free(m16);
471}
472/*
473fn DestroyHasher<AllocU16:alloc::Allocator<u16>, AllocU32:alloc::Allocator<u32>>(
474m16: &mut AllocU16, m32:&mut AllocU32, handle: &mut UnionHasher<AllocU16, AllocU32>){
475  match handle {
476    &mut UnionHasher::H2(ref mut hasher) => {
477        m32.free_cell(core::mem::replace(&mut hasher.buckets_.buckets_, <Alloc as Allocator<u32>>::AllocatedMemory::default()));
478    }
479    &mut UnionHasher::H3(ref mut hasher) => {
480        m32.free_cell(core::mem::replace(&mut hasher.buckets_.buckets_, <Alloc as Allocator<u32>>::AllocatedMemory::default()));
481    }
482    &mut UnionHasher::H4(ref mut hasher) => {
483        m32.free_cell(core::mem::replace(&mut hasher.buckets_.buckets_, <Alloc as Allocator<u32>>::AllocatedMemory::default()));
484    }
485    &mut UnionHasher::H54(ref mut hasher) => {
486        m32.free_cell(core::mem::replace(&mut hasher.buckets_.buckets_, <Alloc as Allocator<u32>>::AllocatedMemory::default()));
487    }
488    &mut UnionHasher::H5(ref mut hasher) => {
489      m16.free_cell(core::mem::replace(&mut hasher.num, AllocU16::AllocatedMemory::default()));
490      m32.free_cell(core::mem::replace(&mut hasher.buckets, <Alloc as Allocator<u32>>::AllocatedMemory::default()));
491    }
492    &mut UnionHasher::H6(ref mut hasher) => {
493      m16.free_cell(core::mem::replace(&mut hasher.num, AllocU16::AllocatedMemory::default()));
494      m32.free_cell(core::mem::replace(&mut hasher.buckets, <Alloc as Allocator<u32>>::AllocatedMemory::default()));
495    }
496    &mut UnionHasher::H9(ref mut hasher) => {
497      m16.free_cell(core::mem::replace(&mut hasher.num_, AllocU16::AllocatedMemory::default()));
498      m32.free_cell(core::mem::replace(&mut hasher.buckets_, <Alloc as Allocator<u32>>::AllocatedMemory::default()));
499    }
500    _ => {}
501  }
502  *handle = UnionHasher::<AllocU16, AllocU32>::default();
503}
504*/
505
506impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
507    fn cleanup(&mut self) {
508        <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.storage_));
509        <Alloc as Allocator<Command>>::free_cell(
510            &mut self.m8,
511            core::mem::take(&mut self.commands_),
512        );
513        RingBufferFree(&mut self.m8, &mut self.ringbuffer_);
514        DestroyHasher(&mut self.m8, &mut self.hasher_);
515        <Alloc as Allocator<i32>>::free_cell(&mut self.m8, core::mem::take(&mut self.large_table_));
516        <Alloc as Allocator<u32>>::free_cell(&mut self.m8, core::mem::take(&mut self.command_buf_));
517        <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.literal_buf_));
518    }
519}
520
521// TODO: use drop trait instead
522// impl<Alloc: BrotliAlloc> Drop for BrotliEncoderStateStruct<Alloc> {
523//     fn drop(&mut self) {
524//         self.cleanup()
525//     }
526// }
527pub fn BrotliEncoderDestroyInstance<Alloc: BrotliAlloc>(s: &mut BrotliEncoderStateStruct<Alloc>) {
528    s.cleanup()
529}
530
531#[cfg(not(feature = "disallow_large_window_size"))]
532fn check_large_window_ok() -> bool {
533    true
534}
535#[cfg(feature = "disallow_large_window_size")]
536fn check_large_window_ok() -> bool {
537    false
538}
539
540pub fn SanitizeParams(params: &mut BrotliEncoderParams) {
541    params.quality = min(11i32, max(0i32, params.quality));
542    if params.lgwin < 10i32 {
543        params.lgwin = 10i32;
544    } else if params.lgwin > 24i32 {
545        if params.large_window && check_large_window_ok() {
546            if params.lgwin > 30i32 {
547                params.lgwin = 30i32;
548            }
549        } else {
550            params.lgwin = 24i32;
551        }
552    }
553    if params.catable {
554        params.appendable = true;
555    }
556}
557
558fn ComputeLgBlock(params: &BrotliEncoderParams) -> i32 {
559    let mut lgblock: i32 = params.lgblock;
560    if params.quality == 0i32 || params.quality == 1i32 {
561        lgblock = params.lgwin;
562    } else if params.quality < 4i32 {
563        lgblock = 14i32;
564    } else if lgblock == 0i32 {
565        lgblock = 16i32;
566        if params.quality >= 9i32 && (params.lgwin > lgblock) {
567            lgblock = min(18i32, params.lgwin);
568        }
569    } else {
570        lgblock = min(24i32, max(16i32, lgblock));
571    }
572    lgblock
573}
574
575fn ComputeRbBits(params: &BrotliEncoderParams) -> i32 {
576    1i32 + max(params.lgwin, params.lgblock)
577}
578
579fn RingBufferSetup<AllocU8: alloc::Allocator<u8>>(
580    params: &BrotliEncoderParams,
581    rb: &mut RingBuffer<AllocU8>,
582) {
583    let window_bits: i32 = ComputeRbBits(params);
584    let tail_bits: i32 = params.lgblock;
585    rb.size_ = 1u32 << window_bits;
586    rb.mask_ = (1u32 << window_bits).wrapping_sub(1);
587    rb.tail_size_ = 1u32 << tail_bits;
588    rb.total_size_ = rb.size_.wrapping_add(rb.tail_size_);
589}
590
591fn EncodeWindowBits(
592    lgwin: i32,
593    large_window: bool,
594    last_bytes: &mut u16,
595    last_bytes_bits: &mut u8,
596) {
597    if large_window {
598        *last_bytes = (((lgwin & 0x3F) << 8) | 0x11) as u16;
599        *last_bytes_bits = 14;
600    } else if lgwin == 16i32 {
601        *last_bytes = 0u16;
602        *last_bytes_bits = 1u8;
603    } else if lgwin == 17i32 {
604        *last_bytes = 1u16;
605        *last_bytes_bits = 7u8;
606    } else if lgwin > 17i32 {
607        *last_bytes = ((lgwin - 17i32) << 1 | 1i32) as u16;
608        *last_bytes_bits = 4u8;
609    } else {
610        *last_bytes = ((lgwin - 8i32) << 4 | 1i32) as u16;
611        *last_bytes_bits = 7u8;
612    }
613}
614
615fn InitCommandPrefixCodes(
616    cmd_depths: &mut [u8],
617    cmd_bits: &mut [u16],
618    cmd_code: &mut [u8],
619    cmd_code_numbits: &mut usize,
620) {
621    static kDefaultCommandDepths: [u8; 128] = [
622        0, 4, 4, 5, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 0, 0, 0, 4, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6,
623        7, 7, 7, 7, 10, 10, 10, 10, 10, 10, 0, 4, 4, 5, 5, 5, 6, 6, 7, 8, 8, 9, 10, 10, 10, 10, 10,
624        10, 10, 10, 10, 10, 10, 10, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 6, 6, 6,
625        6, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 7, 7, 7, 8, 10, 12, 12,
626        12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 0, 0, 0, 0,
627    ];
628    static kDefaultCommandBits: [u16; 128] = [
629        0, 0, 8, 9, 3, 35, 7, 71, 39, 103, 23, 47, 175, 111, 239, 31, 0, 0, 0, 4, 12, 2, 10, 6, 13,
630        29, 11, 43, 27, 59, 87, 55, 15, 79, 319, 831, 191, 703, 447, 959, 0, 14, 1, 25, 5, 21, 19,
631        51, 119, 159, 95, 223, 479, 991, 63, 575, 127, 639, 383, 895, 255, 767, 511, 1023, 14, 0,
632        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 59, 7, 39, 23, 55, 30, 1, 17, 9, 25, 5, 0, 8,
633        4, 12, 2, 10, 6, 21, 13, 29, 3, 19, 11, 15, 47, 31, 95, 63, 127, 255, 767, 2815, 1791,
634        3839, 511, 2559, 1535, 3583, 1023, 3071, 2047, 4095, 0, 0, 0, 0,
635    ];
636    static kDefaultCommandCode: [u8; 57] = [
637        0xff, 0x77, 0xd5, 0xbf, 0xe7, 0xde, 0xea, 0x9e, 0x51, 0x5d, 0xde, 0xc6, 0x70, 0x57, 0xbc,
638        0x58, 0x58, 0x58, 0xd8, 0xd8, 0x58, 0xd5, 0xcb, 0x8c, 0xea, 0xe0, 0xc3, 0x87, 0x1f, 0x83,
639        0xc1, 0x60, 0x1c, 0x67, 0xb2, 0xaa, 0x6, 0x83, 0xc1, 0x60, 0x30, 0x18, 0xcc, 0xa1, 0xce,
640        0x88, 0x54, 0x94, 0x46, 0xe1, 0xb0, 0xd0, 0x4e, 0xb2, 0xf7, 0x4, 0x0,
641    ];
642    static kDefaultCommandCodeNumBits: usize = 448usize;
643    cmd_depths[..].clone_from_slice(&kDefaultCommandDepths[..]);
644    cmd_bits[..].clone_from_slice(&kDefaultCommandBits[..]);
645    cmd_code[..kDefaultCommandCode.len()].clone_from_slice(&kDefaultCommandCode[..]);
646    *cmd_code_numbits = kDefaultCommandCodeNumBits;
647}
648
649impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
650    fn ensure_initialized(&mut self) -> bool {
651        if self.is_initialized_ {
652            return true;
653        }
654        SanitizeParams(&mut self.params);
655        self.params.lgblock = ComputeLgBlock(&mut self.params);
656        ChooseDistanceParams(&mut self.params);
657        self.remaining_metadata_bytes_ = !(0u32);
658        RingBufferSetup(&mut self.params, &mut self.ringbuffer_);
659        {
660            let mut lgwin: i32 = self.params.lgwin;
661            if self.params.quality == 0i32 || self.params.quality == 1i32 {
662                lgwin = max(lgwin, 18i32);
663            }
664            EncodeWindowBits(
665                lgwin,
666                self.params.large_window,
667                &mut self.last_bytes_,
668                &mut self.last_bytes_bits_,
669            );
670        }
671        if self.params.quality == 0i32 {
672            InitCommandPrefixCodes(
673                &mut self.cmd_depths_[..],
674                &mut self.cmd_bits_[..],
675                &mut self.cmd_code_[..],
676                &mut self.cmd_code_numbits_,
677            );
678        }
679        if self.params.catable {
680            // if we want to properly concatenate, then we need to ignore any distances
681            // this value 0x7ffffff0 was chosen to be larger than max_distance + gap
682            // but small enough so that +/-3 will not overflow (due to distance modifications)
683            for item in self.dist_cache_.iter_mut() {
684                *item = 0x7ffffff0;
685            }
686            for item in self.saved_dist_cache_.iter_mut() {
687                *item = 0x7ffffff0;
688            }
689        }
690        self.is_initialized_ = true;
691        true
692    }
693}
694
695fn RingBufferInitBuffer<AllocU8: alloc::Allocator<u8>>(
696    m: &mut AllocU8,
697    buflen: u32,
698    rb: &mut RingBuffer<AllocU8>,
699) {
700    static kSlackForEightByteHashingEverywhere: usize = 7usize;
701    let mut new_data = m.alloc_cell(
702        ((2u32).wrapping_add(buflen) as usize).wrapping_add(kSlackForEightByteHashingEverywhere),
703    );
704    if !rb.data_mo.slice().is_empty() {
705        let lim: usize = ((2u32).wrapping_add(rb.cur_size_) as usize)
706            .wrapping_add(kSlackForEightByteHashingEverywhere);
707        new_data.slice_mut()[..lim].clone_from_slice(&rb.data_mo.slice()[..lim]);
708        m.free_cell(core::mem::take(&mut rb.data_mo));
709    }
710    let _ = core::mem::replace(&mut rb.data_mo, new_data);
711    rb.cur_size_ = buflen;
712    rb.buffer_index = 2usize;
713    rb.data_mo.slice_mut()[(rb.buffer_index.wrapping_sub(2))] = 0;
714    rb.data_mo.slice_mut()[(rb.buffer_index.wrapping_sub(1))] = 0;
715    for i in 0usize..kSlackForEightByteHashingEverywhere {
716        rb.data_mo.slice_mut()[rb
717            .buffer_index
718            .wrapping_add(rb.cur_size_ as usize)
719            .wrapping_add(i)] = 0;
720    }
721}
722
723fn RingBufferWriteTail<AllocU8: alloc::Allocator<u8>>(
724    bytes: &[u8],
725    n: usize,
726    rb: &mut RingBuffer<AllocU8>,
727) {
728    let masked_pos: usize = (rb.pos_ & rb.mask_) as usize;
729    if masked_pos < rb.tail_size_ as usize {
730        let p: usize = (rb.size_ as usize).wrapping_add(masked_pos);
731        let begin = rb.buffer_index.wrapping_add(p);
732        let lim = min(n, (rb.tail_size_ as usize).wrapping_sub(masked_pos));
733        rb.data_mo.slice_mut()[begin..(begin + lim)].clone_from_slice(&bytes[..lim]);
734    }
735}
736
737fn RingBufferWrite<AllocU8: alloc::Allocator<u8>>(
738    m: &mut AllocU8,
739    bytes: &[u8],
740    n: usize,
741    rb: &mut RingBuffer<AllocU8>,
742) {
743    if rb.pos_ == 0u32 && (n < rb.tail_size_ as usize) {
744        rb.pos_ = n as u32;
745        RingBufferInitBuffer(m, rb.pos_, rb);
746        rb.data_mo.slice_mut()[rb.buffer_index..(rb.buffer_index + n)]
747            .clone_from_slice(&bytes[..n]);
748        return;
749    }
750    if rb.cur_size_ < rb.total_size_ {
751        RingBufferInitBuffer(m, rb.total_size_, rb);
752        if !(0i32 == 0) {
753            return;
754        }
755        rb.data_mo.slice_mut()[rb
756            .buffer_index
757            .wrapping_add(rb.size_ as usize)
758            .wrapping_sub(2)] = 0u8;
759        rb.data_mo.slice_mut()[rb
760            .buffer_index
761            .wrapping_add(rb.size_ as usize)
762            .wrapping_sub(1)] = 0u8;
763    }
764    {
765        let masked_pos: usize = (rb.pos_ & rb.mask_) as usize;
766        RingBufferWriteTail(bytes, n, rb);
767        if masked_pos.wrapping_add(n) <= rb.size_ as usize {
768            // a single write fits
769            let start = rb.buffer_index.wrapping_add(masked_pos);
770            rb.data_mo.slice_mut()[start..(start + n)].clone_from_slice(&bytes[..n]);
771        } else {
772            {
773                let start = rb.buffer_index.wrapping_add(masked_pos);
774                let mid = min(n, (rb.total_size_ as usize).wrapping_sub(masked_pos));
775                rb.data_mo.slice_mut()[start..(start + mid)].clone_from_slice(&bytes[..mid]);
776            }
777            let xstart = rb.buffer_index.wrapping_add(0);
778            let size = n.wrapping_sub((rb.size_ as usize).wrapping_sub(masked_pos));
779            let bytes_start = (rb.size_ as usize).wrapping_sub(masked_pos);
780            rb.data_mo.slice_mut()[xstart..(xstart + size)]
781                .clone_from_slice(&bytes[bytes_start..(bytes_start + size)]);
782        }
783    }
784    let data_2 = rb.data_mo.slice()[rb
785        .buffer_index
786        .wrapping_add(rb.size_ as usize)
787        .wrapping_sub(2)];
788    rb.data_mo.slice_mut()[rb.buffer_index.wrapping_sub(2)] = data_2;
789    let data_1 = rb.data_mo.slice()[rb
790        .buffer_index
791        .wrapping_add(rb.size_ as usize)
792        .wrapping_sub(1)];
793    rb.data_mo.slice_mut()[rb.buffer_index.wrapping_sub(1)] = data_1;
794    rb.pos_ = rb.pos_.wrapping_add(n as u32);
795    if rb.pos_ > 1u32 << 30 {
796        rb.pos_ = rb.pos_ & (1u32 << 30).wrapping_sub(1) | 1u32 << 30;
797    }
798}
799
800impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
801    pub fn copy_input_to_ring_buffer(&mut self, input_size: usize, input_buffer: &[u8]) {
802        if !self.ensure_initialized() {
803            return;
804        }
805        RingBufferWrite(
806            &mut self.m8,
807            input_buffer,
808            input_size,
809            &mut self.ringbuffer_,
810        );
811        self.input_pos_ = self.input_pos_.wrapping_add(input_size as u64);
812        if (self.ringbuffer_).pos_ <= (self.ringbuffer_).mask_ {
813            let start = (self.ringbuffer_)
814                .buffer_index
815                .wrapping_add((self.ringbuffer_).pos_ as usize);
816            for item in (self.ringbuffer_).data_mo.slice_mut()[start..(start + 7)].iter_mut() {
817                *item = 0;
818            }
819        }
820    }
821}
822
823fn ChooseHasher(params: &mut BrotliEncoderParams) {
824    let hparams = &mut params.hasher;
825    if params.quality >= 10 && !params.q9_5 {
826        hparams.type_ = 10;
827    } else if params.quality == 10 {
828        // we are using quality 10 as a proxy for "9.5"
829        hparams.type_ = 9;
830        hparams.num_last_distances_to_check = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
831        hparams.block_bits = H9_BLOCK_BITS as i32;
832        hparams.bucket_bits = H9_BUCKET_BITS as i32;
833        hparams.hash_len = 4;
834    } else if params.quality == 9 {
835        hparams.type_ = 9;
836        hparams.num_last_distances_to_check = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
837        hparams.block_bits = H9_BLOCK_BITS as i32;
838        hparams.bucket_bits = H9_BUCKET_BITS as i32;
839        hparams.hash_len = 4;
840    } else if params.quality == 4 && (params.size_hint >= (1i32 << 20) as usize) {
841        hparams.type_ = 54i32;
842    } else if params.quality < 5 {
843        hparams.type_ = params.quality;
844    } else if params.lgwin <= 16 {
845        hparams.type_ = if params.quality < 7 {
846            40i32
847        } else if params.quality < 9 {
848            41i32
849        } else {
850            42i32
851        };
852    } else if ((params.q9_5 && params.size_hint > (1 << 20)) || params.size_hint > (1 << 22))
853        && (params.lgwin >= 19i32)
854    {
855        hparams.type_ = 6i32;
856        hparams.block_bits = min(params.quality - 1, 9);
857        hparams.bucket_bits = 15i32;
858        hparams.hash_len = 5i32;
859        hparams.num_last_distances_to_check = if params.quality < 7 {
860            4i32
861        } else if params.quality < 9 {
862            10i32
863        } else {
864            16i32
865        };
866    } else {
867        hparams.type_ = 5i32;
868        hparams.block_bits = min(params.quality - 1, 9);
869        hparams.bucket_bits = if params.quality < 7 && params.size_hint <= (1 << 20) {
870            14i32
871        } else {
872            15i32
873        };
874        hparams.num_last_distances_to_check = if params.quality < 7 {
875            4i32
876        } else if params.quality < 9 {
877            10i32
878        } else {
879            16i32
880        };
881    }
882}
883
884fn InitializeH2<AllocU32: alloc::Allocator<u32>>(
885    m32: &mut AllocU32,
886    params: &BrotliEncoderParams,
887) -> BasicHasher<H2Sub<AllocU32>> {
888    BasicHasher {
889        GetHasherCommon: Struct1 {
890            params: params.hasher,
891            is_prepared_: 1,
892            dict_num_lookups: 0,
893            dict_num_matches: 0,
894        },
895        buckets_: H2Sub {
896            buckets_: m32.alloc_cell(65537 + 8),
897        },
898        h9_opts: super::backward_references::H9Opts::new(&params.hasher),
899    }
900}
901fn InitializeH3<AllocU32: alloc::Allocator<u32>>(
902    m32: &mut AllocU32,
903    params: &BrotliEncoderParams,
904) -> BasicHasher<H3Sub<AllocU32>> {
905    BasicHasher {
906        GetHasherCommon: Struct1 {
907            params: params.hasher,
908            is_prepared_: 1,
909            dict_num_lookups: 0,
910            dict_num_matches: 0,
911        },
912        buckets_: H3Sub {
913            buckets_: m32.alloc_cell(65538 + 8),
914        },
915        h9_opts: super::backward_references::H9Opts::new(&params.hasher),
916    }
917}
918fn InitializeH4<AllocU32: alloc::Allocator<u32>>(
919    m32: &mut AllocU32,
920    params: &BrotliEncoderParams,
921) -> BasicHasher<H4Sub<AllocU32>> {
922    BasicHasher {
923        GetHasherCommon: Struct1 {
924            params: params.hasher,
925            is_prepared_: 1,
926            dict_num_lookups: 0,
927            dict_num_matches: 0,
928        },
929        buckets_: H4Sub {
930            buckets_: m32.alloc_cell(131072 + 8),
931        },
932        h9_opts: super::backward_references::H9Opts::new(&params.hasher),
933    }
934}
935fn InitializeH54<AllocU32: alloc::Allocator<u32>>(
936    m32: &mut AllocU32,
937    params: &BrotliEncoderParams,
938) -> BasicHasher<H54Sub<AllocU32>> {
939    BasicHasher {
940        GetHasherCommon: Struct1 {
941            params: params.hasher,
942            is_prepared_: 1,
943            dict_num_lookups: 0,
944            dict_num_matches: 0,
945        },
946        buckets_: H54Sub {
947            buckets_: m32.alloc_cell(1048580 + 8),
948        },
949        h9_opts: super::backward_references::H9Opts::new(&params.hasher),
950    }
951}
952
953fn InitializeH9<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
954    m16: &mut Alloc,
955    params: &BrotliEncoderParams,
956) -> H9<Alloc> {
957    H9 {
958        dict_search_stats_: Struct1 {
959            params: params.hasher,
960            is_prepared_: 1,
961            dict_num_lookups: 0,
962            dict_num_matches: 0,
963        },
964        num_: <Alloc as Allocator<u16>>::alloc_cell(m16, 1 << H9_BUCKET_BITS),
965        buckets_: <Alloc as Allocator<u32>>::alloc_cell(m16, H9_BLOCK_SIZE << H9_BUCKET_BITS),
966        h9_opts: super::backward_references::H9Opts::new(&params.hasher),
967    }
968}
969
970fn InitializeH5<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
971    m16: &mut Alloc,
972    params: &BrotliEncoderParams,
973) -> UnionHasher<Alloc> {
974    let block_size = 1u64 << params.hasher.block_bits;
975    let bucket_size = 1u64 << params.hasher.bucket_bits;
976    let buckets: <Alloc as Allocator<u32>>::AllocatedMemory =
977        <Alloc as Allocator<u32>>::alloc_cell(m16, (bucket_size * block_size) as usize);
978    let num: <Alloc as Allocator<u16>>::AllocatedMemory =
979        <Alloc as Allocator<u16>>::alloc_cell(m16, bucket_size as usize);
980
981    if params.hasher.block_bits == (HQ5Sub {}).block_bits()
982        && (1 << params.hasher.bucket_bits) == (HQ5Sub {}).bucket_size()
983    {
984        return UnionHasher::H5q5(AdvHasher {
985            buckets,
986            h9_opts: super::backward_references::H9Opts::new(&params.hasher),
987            num,
988            GetHasherCommon: Struct1 {
989                params: params.hasher,
990                is_prepared_: 1,
991                dict_num_lookups: 0,
992                dict_num_matches: 0,
993            },
994            specialization: HQ5Sub {},
995        });
996    }
997    if params.hasher.block_bits == (HQ7Sub {}).block_bits()
998        && (1 << params.hasher.bucket_bits) == (HQ7Sub {}).bucket_size()
999    {
1000        return UnionHasher::H5q7(AdvHasher {
1001            buckets,
1002            h9_opts: super::backward_references::H9Opts::new(&params.hasher),
1003            num,
1004            GetHasherCommon: Struct1 {
1005                params: params.hasher,
1006                is_prepared_: 1,
1007                dict_num_lookups: 0,
1008                dict_num_matches: 0,
1009            },
1010            specialization: HQ7Sub {},
1011        });
1012    }
1013    UnionHasher::H5(AdvHasher {
1014        buckets,
1015        h9_opts: super::backward_references::H9Opts::new(&params.hasher),
1016        num,
1017        GetHasherCommon: Struct1 {
1018            params: params.hasher,
1019            is_prepared_: 1,
1020            dict_num_lookups: 0,
1021            dict_num_matches: 0,
1022        },
1023        specialization: H5Sub {
1024            hash_shift_: 32i32 - params.hasher.bucket_bits,
1025            bucket_size_: bucket_size as u32,
1026            block_bits_: params.hasher.block_bits,
1027            block_mask_: block_size.wrapping_sub(1) as u32,
1028        },
1029    })
1030}
1031fn InitializeH6<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1032    m16: &mut Alloc,
1033    params: &BrotliEncoderParams,
1034) -> UnionHasher<Alloc> {
1035    let block_size = 1u64 << params.hasher.block_bits;
1036    let bucket_size = 1u64 << params.hasher.bucket_bits;
1037    let buckets: <Alloc as Allocator<u32>>::AllocatedMemory =
1038        <Alloc as Allocator<u32>>::alloc_cell(m16, (bucket_size * block_size) as usize);
1039    let num: <Alloc as Allocator<u16>>::AllocatedMemory =
1040        <Alloc as Allocator<u16>>::alloc_cell(m16, bucket_size as usize);
1041    UnionHasher::H6(AdvHasher {
1042        buckets,
1043        num,
1044        h9_opts: super::backward_references::H9Opts::new(&params.hasher),
1045        GetHasherCommon: Struct1 {
1046            params: params.hasher,
1047            is_prepared_: 1,
1048            dict_num_lookups: 0,
1049            dict_num_matches: 0,
1050        },
1051        specialization: H6Sub {
1052            bucket_size_: 1u32 << params.hasher.bucket_bits,
1053            block_bits_: params.hasher.block_bits,
1054            block_mask_: block_size.wrapping_sub(1) as u32,
1055            hash_mask: 0xffffffffffffffffu64 >> (64i32 - 8i32 * params.hasher.hash_len),
1056            hash_shift_: 64i32 - params.hasher.bucket_bits,
1057        },
1058    })
1059}
1060
1061fn BrotliMakeHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1062    m: &mut Alloc,
1063    params: &BrotliEncoderParams,
1064) -> UnionHasher<Alloc> {
1065    let hasher_type: i32 = params.hasher.type_;
1066    if hasher_type == 2i32 {
1067        return UnionHasher::H2(InitializeH2(m, params));
1068    }
1069    if hasher_type == 3i32 {
1070        return UnionHasher::H3(InitializeH3(m, params));
1071    }
1072    if hasher_type == 4i32 {
1073        return UnionHasher::H4(InitializeH4(m, params));
1074    }
1075    if hasher_type == 5i32 {
1076        return InitializeH5(m, params);
1077    }
1078    if hasher_type == 6i32 {
1079        return InitializeH6(m, params);
1080    }
1081    if hasher_type == 9i32 {
1082        return UnionHasher::H9(InitializeH9(m, params));
1083    }
1084    /*
1085        if hasher_type == 40i32 {
1086          return InitializeH40(params);
1087        }
1088        if hasher_type == 41i32 {
1089          return InitializeH41(params);
1090        }
1091        if hasher_type == 42i32 {
1092          return InitializeH42(params);
1093        }
1094    */
1095    if hasher_type == 54i32 {
1096        return UnionHasher::H54(InitializeH54(m, params));
1097    }
1098    if hasher_type == 10i32 {
1099        return UnionHasher::H10(InitializeH10(m, false, params, 0));
1100    }
1101    // since we don't support all of these, fall back to something sane
1102    InitializeH6(m, params)
1103
1104    //  return UnionHasher::Uninit;
1105}
1106fn HasherReset<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(t: &mut UnionHasher<Alloc>) {
1107    match t {
1108        &mut UnionHasher::Uninit => {}
1109        _ => (t.GetHasherCommon()).is_prepared_ = 0i32,
1110    };
1111}
1112fn GetHasherCommon<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1113    t: &mut UnionHasher<Alloc>,
1114) -> &mut Struct1 {
1115    t.GetHasherCommon()
1116}
1117
1118pub fn HasherSetup<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1119    m16: &mut Alloc,
1120    handle: &mut UnionHasher<Alloc>,
1121    params: &mut BrotliEncoderParams,
1122    data: &[u8],
1123    position: usize,
1124    input_size: usize,
1125    is_last: i32,
1126) {
1127    let one_shot: i32 = (position == 0usize && (is_last != 0)) as i32;
1128    let is_uninit = match (handle) {
1129        &mut UnionHasher::Uninit => true,
1130        _ => false,
1131    };
1132    if is_uninit {
1133        //let alloc_size: usize;
1134        ChooseHasher(&mut (*params));
1135        //alloc_size = HasherSize(params, one_shot, input_size);
1136        //xself = BrotliAllocate(m, alloc_size.wrapping_mul(::core::mem::size_of::<u8>()))
1137        *handle = BrotliMakeHasher(m16, params);
1138        handle.GetHasherCommon().params = params.hasher;
1139        HasherReset(handle); // this sets everything to zero, unlike in C
1140        handle.GetHasherCommon().is_prepared_ = 1;
1141    } else {
1142        match handle.Prepare(one_shot != 0, input_size, data) {
1143            HowPrepared::ALREADY_PREPARED => {}
1144            HowPrepared::NEWLY_PREPARED => {
1145                if position == 0usize {
1146                    let common = handle.GetHasherCommon();
1147                    common.dict_num_lookups = 0usize;
1148                    common.dict_num_matches = 0usize;
1149                }
1150            }
1151        }
1152    }
1153}
1154
1155fn HasherPrependCustomDictionary<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1156    m: &mut Alloc,
1157    handle: &mut UnionHasher<Alloc>,
1158    params: &mut BrotliEncoderParams,
1159    size: usize,
1160    dict: &[u8],
1161) {
1162    HasherSetup(m, handle, params, dict, 0usize, size, 0i32);
1163    match handle {
1164        &mut UnionHasher::H2(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1165        &mut UnionHasher::H3(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1166        &mut UnionHasher::H4(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1167        &mut UnionHasher::H5(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1168        &mut UnionHasher::H5q7(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1169        &mut UnionHasher::H5q5(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1170        &mut UnionHasher::H6(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1171        &mut UnionHasher::H9(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1172        &mut UnionHasher::H54(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1173        &mut UnionHasher::H10(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1174        &mut UnionHasher::Uninit => panic!("Uninitialized"),
1175    }
1176}
1177
1178impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1179    pub fn set_custom_dictionary(&mut self, size: usize, dict: &[u8]) {
1180        self.set_custom_dictionary_with_optional_precomputed_hasher(size, dict, UnionHasher::Uninit)
1181    }
1182
1183    pub fn set_custom_dictionary_with_optional_precomputed_hasher(
1184        &mut self,
1185        size: usize,
1186        mut dict: &[u8],
1187        opt_hasher: UnionHasher<Alloc>,
1188    ) {
1189        let has_optional_hasher = if let UnionHasher::Uninit = opt_hasher {
1190            false
1191        } else {
1192            true
1193        };
1194        let max_dict_size: usize = (1usize << self.params.lgwin).wrapping_sub(16);
1195        self.hasher_ = opt_hasher;
1196        let mut dict_size: usize = size;
1197        if !self.ensure_initialized() {
1198            return;
1199        }
1200        if dict_size == 0 || self.params.quality == 0 || self.params.quality == 1 || size <= 1 {
1201            self.params.catable = true; // don't risk a too-short dictionary
1202            self.params.appendable = true; // don't risk a too-short dictionary
1203            return;
1204        }
1205        self.custom_dictionary = true;
1206        if size > max_dict_size {
1207            dict = &dict[size.wrapping_sub(max_dict_size)..];
1208            dict_size = max_dict_size;
1209        }
1210        self.copy_input_to_ring_buffer(dict_size, dict);
1211        self.last_flush_pos_ = dict_size as u64;
1212        self.last_processed_pos_ = dict_size as u64;
1213        if dict_size > 0 {
1214            self.prev_byte_ = dict[dict_size.wrapping_sub(1)];
1215        }
1216        if dict_size > 1 {
1217            self.prev_byte2_ = dict[dict_size.wrapping_sub(2)];
1218        }
1219        let m16 = &mut self.m8;
1220        if cfg!(debug_assertions) || !has_optional_hasher {
1221            let mut orig_hasher = UnionHasher::Uninit;
1222            if has_optional_hasher {
1223                orig_hasher = core::mem::replace(&mut self.hasher_, UnionHasher::Uninit);
1224            }
1225            HasherPrependCustomDictionary(
1226                m16,
1227                &mut self.hasher_,
1228                &mut self.params,
1229                dict_size,
1230                dict,
1231            );
1232            if has_optional_hasher {
1233                debug_assert!(orig_hasher == self.hasher_);
1234                DestroyHasher(m16, &mut orig_hasher);
1235            }
1236        }
1237    }
1238}
1239
1240pub fn BrotliEncoderMaxCompressedSizeMulti(input_size: usize, num_threads: usize) -> usize {
1241    BrotliEncoderMaxCompressedSize(input_size) + num_threads * 8
1242}
1243
1244pub fn BrotliEncoderMaxCompressedSize(input_size: usize) -> usize {
1245    let magic_size = 16usize;
1246    let num_large_blocks: usize = input_size >> 14;
1247    let tail: usize = input_size.wrapping_sub(num_large_blocks << 24);
1248    let tail_overhead: usize = (if tail > (1i32 << 20) as usize {
1249        4i32
1250    } else {
1251        3i32
1252    }) as usize;
1253    let overhead: usize = (2usize)
1254        .wrapping_add((4usize).wrapping_mul(num_large_blocks))
1255        .wrapping_add(tail_overhead)
1256        .wrapping_add(1);
1257    let result: usize = input_size.wrapping_add(overhead);
1258    if input_size == 0usize {
1259        return 1 + magic_size;
1260    }
1261    if result < input_size {
1262        0usize
1263    } else {
1264        result + magic_size
1265    }
1266}
1267
1268fn InitOrStitchToPreviousBlock<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1269    m: &mut Alloc,
1270    handle: &mut UnionHasher<Alloc>,
1271    data: &[u8],
1272    mask: usize,
1273    params: &mut BrotliEncoderParams,
1274    position: usize,
1275    input_size: usize,
1276    is_last: bool,
1277) {
1278    HasherSetup(
1279        m,
1280        handle,
1281        params,
1282        data,
1283        position,
1284        input_size,
1285        is_last as i32,
1286    );
1287    handle.StitchToPreviousBlock(input_size, position, data, mask);
1288}
1289
1290fn ShouldCompress(
1291    data: &[u8],
1292    mask: usize,
1293    last_flush_pos: u64,
1294    bytes: usize,
1295    num_literals: usize,
1296    num_commands: usize,
1297) -> i32 {
1298    if num_commands < (bytes >> 8).wrapping_add(2)
1299        && num_literals as (super::util::floatX)
1300            > 0.99 as super::util::floatX * bytes as (super::util::floatX)
1301    {
1302        let mut literal_histo = [0u32; 256];
1303        static kSampleRate: u32 = 13u32;
1304        static kMinEntropy: super::util::floatX = 7.92 as super::util::floatX;
1305        let bit_cost_threshold: super::util::floatX =
1306            bytes as (super::util::floatX) * kMinEntropy / kSampleRate as (super::util::floatX);
1307        let t: usize = bytes
1308            .wrapping_add(kSampleRate as usize)
1309            .wrapping_sub(1)
1310            .wrapping_div(kSampleRate as usize);
1311        let mut pos: u32 = last_flush_pos as u32;
1312        let mut i: usize;
1313        i = 0usize;
1314        while i < t {
1315            {
1316                {
1317                    let _rhs = 1;
1318                    let _lhs = &mut literal_histo[data[(pos as usize & mask)] as usize];
1319                    *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1320                }
1321                pos = pos.wrapping_add(kSampleRate);
1322            }
1323            i = i.wrapping_add(1);
1324        }
1325        if BitsEntropy(&literal_histo[..], 256usize) > bit_cost_threshold {
1326            return 0i32;
1327        }
1328    }
1329    1i32
1330}
1331
1332/* Chooses the literal context mode for a metablock */
1333fn ChooseContextMode(
1334    params: &BrotliEncoderParams,
1335    data: &[u8],
1336    pos: usize,
1337    mask: usize,
1338    length: usize,
1339) -> ContextType {
1340    /* We only do the computation for the option of something else than
1341    CONTEXT_UTF8 for the highest qualities */
1342    match params.mode {
1343        BrotliEncoderMode::BROTLI_FORCE_LSB_PRIOR => return ContextType::CONTEXT_LSB6,
1344        BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR => return ContextType::CONTEXT_MSB6,
1345        BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR => return ContextType::CONTEXT_UTF8,
1346        BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR => return ContextType::CONTEXT_SIGNED,
1347        _ => {}
1348    }
1349    if (params.quality >= 10 && BrotliIsMostlyUTF8(data, pos, mask, length, kMinUTF8Ratio) == 0) {
1350        return ContextType::CONTEXT_SIGNED;
1351    }
1352    ContextType::CONTEXT_UTF8
1353}
1354
1355#[derive(PartialEq, Eq, Copy, Clone)]
1356pub enum BrotliEncoderOperation {
1357    BROTLI_OPERATION_PROCESS = 0,
1358    BROTLI_OPERATION_FLUSH = 1,
1359    BROTLI_OPERATION_FINISH = 2,
1360    BROTLI_OPERATION_EMIT_METADATA = 3,
1361}
1362
1363fn MakeUncompressedStream(input: &[u8], input_size: usize, output: &mut [u8]) -> usize {
1364    let mut size: usize = input_size;
1365    let mut result: usize = 0usize;
1366    let mut offset: usize = 0usize;
1367    if input_size == 0usize {
1368        output[0] = 6u8;
1369        return 1;
1370    }
1371    output[result] = 0x21u8;
1372    result = result.wrapping_add(1);
1373    output[result] = 0x3u8;
1374    result = result.wrapping_add(1);
1375    while size > 0usize {
1376        let mut nibbles: u32 = 0u32;
1377
1378        let chunk_size: u32 = if size > (1u32 << 24) as usize {
1379            1u32 << 24
1380        } else {
1381            size as u32
1382        };
1383        if chunk_size > 1u32 << 16 {
1384            nibbles = if chunk_size > 1u32 << 20 { 2i32 } else { 1i32 } as u32;
1385        }
1386        let bits: u32 = nibbles << 1
1387            | chunk_size.wrapping_sub(1) << 3
1388            | 1u32 << (19u32).wrapping_add((4u32).wrapping_mul(nibbles));
1389        output[result] = bits as u8;
1390        result = result.wrapping_add(1);
1391        output[result] = (bits >> 8) as u8;
1392        result = result.wrapping_add(1);
1393        output[result] = (bits >> 16) as u8;
1394        result = result.wrapping_add(1);
1395        if nibbles == 2u32 {
1396            output[result] = (bits >> 24) as u8;
1397            result = result.wrapping_add(1);
1398        }
1399        output[result..(result + chunk_size as usize)]
1400            .clone_from_slice(&input[offset..(offset + chunk_size as usize)]);
1401        result = result.wrapping_add(chunk_size as usize);
1402        offset = offset.wrapping_add(chunk_size as usize);
1403        size = size.wrapping_sub(chunk_size as usize);
1404    }
1405    output[result] = 3u8;
1406    result = result.wrapping_add(1);
1407    result
1408}
1409pub fn BrotliEncoderCompress<
1410    Alloc: BrotliAlloc,
1411    MetablockCallback: FnMut(
1412        &mut interface::PredictionModeContextMap<InputReferenceMut>,
1413        &mut [interface::StaticCommand],
1414        interface::InputPair,
1415        &mut Alloc,
1416    ),
1417>(
1418    empty_m8: Alloc,
1419    m8: &mut Alloc,
1420    quality: i32,
1421    lgwin: i32,
1422    mode: BrotliEncoderMode,
1423    input_size: usize,
1424    input_buffer: &[u8],
1425    encoded_size: &mut usize,
1426    encoded_buffer: &mut [u8],
1427    metablock_callback: &mut MetablockCallback,
1428) -> i32 {
1429    let out_size: usize = *encoded_size;
1430    let input_start = input_buffer;
1431    let output_start = encoded_buffer;
1432    let max_out_size: usize = BrotliEncoderMaxCompressedSize(input_size);
1433    if out_size == 0usize {
1434        return 0i32;
1435    }
1436    if input_size == 0usize {
1437        *encoded_size = 1;
1438        output_start[0] = 6;
1439        return 1i32;
1440    }
1441    let mut is_fallback: i32 = 0i32;
1442    if quality == 10i32 {
1443        panic!("Unimplemented: need to set 9.5 here");
1444    }
1445    if is_fallback == 0 {
1446        let mut s_orig = BrotliEncoderStateStruct::new(core::mem::replace(m8, empty_m8));
1447        let mut result: bool;
1448        {
1449            let s = &mut s_orig;
1450            let mut available_in: usize = input_size;
1451            let next_in_array: &[u8] = input_buffer;
1452            let mut next_in_offset: usize = 0;
1453            let mut available_out: usize = *encoded_size;
1454            let next_out_array: &mut [u8] = output_start;
1455            let mut next_out_offset: usize = 0;
1456            let mut total_out = Some(0);
1457            s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_QUALITY, quality as u32);
1458            s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_LGWIN, lgwin as u32);
1459            s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_MODE, mode as u32);
1460            s.set_parameter(
1461                BrotliEncoderParameter::BROTLI_PARAM_SIZE_HINT,
1462                input_size as u32,
1463            );
1464            if lgwin > BROTLI_MAX_WINDOW_BITS as i32 {
1465                s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_LARGE_WINDOW, 1);
1466            }
1467            result = s.compress_stream(
1468                BrotliEncoderOperation::BROTLI_OPERATION_FINISH,
1469                &mut available_in,
1470                next_in_array,
1471                &mut next_in_offset,
1472                &mut available_out,
1473                next_out_array,
1474                &mut next_out_offset,
1475                &mut total_out,
1476                metablock_callback,
1477            );
1478            if !s.is_finished() {
1479                result = false;
1480            }
1481
1482            *encoded_size = total_out.unwrap();
1483            BrotliEncoderDestroyInstance(s);
1484        }
1485        let _ = core::mem::replace(m8, s_orig.m8);
1486        if !result || max_out_size != 0 && (*encoded_size > max_out_size) {
1487            is_fallback = 1i32;
1488        } else {
1489            return 1i32;
1490        }
1491    }
1492    assert_ne!(is_fallback, 0);
1493    *encoded_size = 0usize;
1494    if max_out_size == 0 {
1495        return 0i32;
1496    }
1497    if out_size >= max_out_size {
1498        *encoded_size = MakeUncompressedStream(input_start, input_size, output_start);
1499        return 1i32;
1500    }
1501    0i32
1502}
1503
1504impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1505    fn inject_byte_padding_block(&mut self) {
1506        let mut seal: u32 = self.last_bytes_ as u32;
1507        let mut seal_bits: usize = self.last_bytes_bits_ as usize;
1508        let destination: &mut [u8];
1509        self.last_bytes_ = 0;
1510        self.last_bytes_bits_ = 0;
1511        seal |= 0x6u32 << seal_bits;
1512
1513        seal_bits = seal_bits.wrapping_add(6);
1514        if !IsNextOutNull(&self.next_out_) {
1515            destination = &mut GetNextOut!(*self)[self.available_out_..];
1516        } else {
1517            destination = &mut self.tiny_buf_[..];
1518            self.next_out_ = NextOut::TinyBuf(0);
1519        }
1520        destination[0] = seal as u8;
1521        if seal_bits > 8usize {
1522            destination[1] = (seal >> 8) as u8;
1523        }
1524        if seal_bits > 16usize {
1525            destination[2] = (seal >> 16) as u8;
1526        }
1527        self.available_out_ = self
1528            .available_out_
1529            .wrapping_add(seal_bits.wrapping_add(7) >> 3);
1530    }
1531
1532    fn inject_flush_or_push_output(
1533        &mut self,
1534        available_out: &mut usize,
1535        next_out_array: &mut [u8],
1536        next_out_offset: &mut usize,
1537        total_out: &mut Option<usize>,
1538    ) -> i32 {
1539        if self.stream_state_ as i32
1540            == BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED as i32
1541            && (self.last_bytes_bits_ as i32 != 0i32)
1542        {
1543            self.inject_byte_padding_block();
1544            return 1i32;
1545        }
1546        if self.available_out_ != 0usize && (*available_out != 0usize) {
1547            let copy_output_size: usize = min(self.available_out_, *available_out);
1548            (*next_out_array)[(*next_out_offset)..(*next_out_offset + copy_output_size)]
1549                .clone_from_slice(&GetNextOut!(self)[..copy_output_size]);
1550            //memcpy(*next_out, s.next_out_, copy_output_size);
1551            *next_out_offset = next_out_offset.wrapping_add(copy_output_size);
1552            *available_out = available_out.wrapping_sub(copy_output_size);
1553            self.next_out_ = NextOutIncrement(&self.next_out_, (copy_output_size as i32));
1554            self.available_out_ = self.available_out_.wrapping_sub(copy_output_size);
1555            self.total_out_ = self.total_out_.wrapping_add(copy_output_size as u64);
1556            if let &mut Some(ref mut total_out_inner) = total_out {
1557                *total_out_inner = self.total_out_ as usize;
1558            }
1559            return 1i32;
1560        }
1561        0i32
1562    }
1563
1564    fn unprocessed_input_size(&self) -> u64 {
1565        self.input_pos_.wrapping_sub(self.last_processed_pos_)
1566    }
1567
1568    fn update_size_hint(&mut self, available_in: usize) {
1569        if self.params.size_hint == 0usize {
1570            let delta: u64 = self.unprocessed_input_size();
1571            let tail: u64 = available_in as u64;
1572            let limit: u32 = 1u32 << 30;
1573            let total: u32;
1574            if delta >= u64::from(limit)
1575                || tail >= u64::from(limit)
1576                || delta.wrapping_add(tail) >= u64::from(limit)
1577            {
1578                total = limit;
1579            } else {
1580                total = delta.wrapping_add(tail) as u32;
1581            }
1582            self.params.size_hint = total as usize;
1583        }
1584    }
1585}
1586
1587fn WrapPosition(position: u64) -> u32 {
1588    let mut result: u32 = position as u32;
1589    let gb: u64 = position >> 30;
1590    if gb > 2 {
1591        result = result & (1u32 << 30).wrapping_sub(1)
1592            | ((gb.wrapping_sub(1) & 1) as u32).wrapping_add(1) << 30;
1593    }
1594    result
1595}
1596
1597impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1598    fn get_brotli_storage(&mut self, size: usize) {
1599        if self.storage_size_ < size {
1600            <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.storage_));
1601            self.storage_ = <Alloc as Allocator<u8>>::alloc_cell(&mut self.m8, size);
1602            self.storage_size_ = size;
1603        }
1604    }
1605}
1606
1607fn MaxHashTableSize(quality: i32) -> usize {
1608    (if quality == 0i32 {
1609        1i32 << 15
1610    } else {
1611        1i32 << 17
1612    }) as usize
1613}
1614
1615fn HashTableSize(max_table_size: usize, input_size: usize) -> usize {
1616    let mut htsize: usize = 256usize;
1617    while htsize < max_table_size && (htsize < input_size) {
1618        htsize <<= 1i32;
1619    }
1620    htsize
1621}
1622
1623macro_rules! GetHashTable {
1624    ($s : expr, $quality: expr, $input_size : expr, $table_size : expr) => {
1625        GetHashTableInternal(
1626            &mut $s.m8,
1627            &mut $s.small_table_,
1628            &mut $s.large_table_,
1629            $quality,
1630            $input_size,
1631            $table_size,
1632        )
1633    };
1634}
1635fn GetHashTableInternal<'a, AllocI32: alloc::Allocator<i32>>(
1636    mi32: &mut AllocI32,
1637    small_table_: &'a mut [i32; 1024],
1638    large_table_: &'a mut AllocI32::AllocatedMemory,
1639    quality: i32,
1640    input_size: usize,
1641    table_size: &mut usize,
1642) -> &'a mut [i32] {
1643    let max_table_size: usize = MaxHashTableSize(quality);
1644    let mut htsize: usize = HashTableSize(max_table_size, input_size);
1645    let table: &mut [i32];
1646    if quality == 0i32 && htsize & 0xaaaaausize == 0usize {
1647        htsize <<= 1i32;
1648    }
1649    if htsize <= small_table_.len() {
1650        table = &mut small_table_[..];
1651    } else {
1652        if htsize > large_table_.slice().len() {
1653            //s.large_table_size_ = htsize;
1654            {
1655                mi32.free_cell(core::mem::take(large_table_));
1656            }
1657            *large_table_ = mi32.alloc_cell(htsize);
1658        }
1659        table = large_table_.slice_mut();
1660    }
1661    *table_size = htsize;
1662    for item in table[..htsize].iter_mut() {
1663        *item = 0;
1664    }
1665    table // FIXME: probably need a macro to do this without borrowing the whole EncoderStateStruct
1666}
1667
1668impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1669    fn update_last_processed_pos(&mut self) -> bool {
1670        let wrapped_last_processed_pos: u32 = WrapPosition(self.last_processed_pos_);
1671        let wrapped_input_pos: u32 = WrapPosition(self.input_pos_);
1672        self.last_processed_pos_ = self.input_pos_;
1673        wrapped_input_pos < wrapped_last_processed_pos
1674    }
1675}
1676
1677fn MaxMetablockSize(params: &BrotliEncoderParams) -> usize {
1678    1 << min(ComputeRbBits(params), 24)
1679}
1680
1681fn ChooseContextMap(
1682    quality: i32,
1683    bigram_histo: &mut [u32],
1684    num_literal_contexts: &mut usize,
1685    literal_context_map: &mut &[u32],
1686) {
1687    static kStaticContextMapContinuation: [u32; 64] = [
1688        1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1689        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1690        0, 0, 0, 0,
1691    ];
1692    static kStaticContextMapSimpleUTF8: [u32; 64] = [
1693        0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1694        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1695        0, 0, 0, 0,
1696    ];
1697    let mut monogram_histo = [0u32; 3];
1698    let mut two_prefix_histo = [0u32; 6];
1699
1700    let mut i: usize;
1701    let mut dummy: usize = 0;
1702    let mut entropy: [super::util::floatX; 4] = [0.0 as super::util::floatX; 4];
1703    i = 0usize;
1704    while i < 9usize {
1705        {
1706            {
1707                let _rhs = bigram_histo[i];
1708                let _lhs = &mut monogram_histo[i.wrapping_rem(3)];
1709                *_lhs = (*_lhs).wrapping_add(_rhs);
1710            }
1711            {
1712                let _rhs = bigram_histo[i];
1713                let _lhs = &mut two_prefix_histo[i.wrapping_rem(6)];
1714                *_lhs = (*_lhs).wrapping_add(_rhs);
1715            }
1716        }
1717        i = i.wrapping_add(1);
1718    }
1719    entropy[1] = ShannonEntropy(&monogram_histo[..], 3usize, &mut dummy);
1720    entropy[2] = ShannonEntropy(&two_prefix_histo[..], 3usize, &mut dummy)
1721        + ShannonEntropy(&two_prefix_histo[3..], 3usize, &mut dummy);
1722    entropy[3] = 0i32 as (super::util::floatX);
1723    for i in 0usize..3usize {
1724        entropy[3] += ShannonEntropy(
1725            &bigram_histo[(3usize).wrapping_mul(i)..],
1726            3usize,
1727            &mut dummy,
1728        );
1729    }
1730    let total: usize = monogram_histo[0]
1731        .wrapping_add(monogram_histo[1])
1732        .wrapping_add(monogram_histo[2]) as usize;
1733    entropy[0] = 1.0 as super::util::floatX / total as (super::util::floatX);
1734    entropy[1] *= entropy[0];
1735    entropy[2] *= entropy[0];
1736    entropy[3] *= entropy[0];
1737    if quality < 7i32 {
1738        entropy[3] = entropy[1] * 10i32 as (super::util::floatX);
1739    }
1740    if entropy[1] - entropy[2] < 0.2 as super::util::floatX
1741        && (entropy[1] - entropy[3] < 0.2 as super::util::floatX)
1742    {
1743        *num_literal_contexts = 1;
1744    } else if entropy[2] - entropy[3] < 0.02 as super::util::floatX {
1745        *num_literal_contexts = 2usize;
1746        *literal_context_map = &kStaticContextMapSimpleUTF8[..];
1747    } else {
1748        *num_literal_contexts = 3usize;
1749        *literal_context_map = &kStaticContextMapContinuation[..];
1750    }
1751}
1752
1753static kStaticContextMapComplexUTF8: [u32; 64] = [
1754    11, 11, 12, 12, /* 0 special */
1755    0, 0, 0, 0, /* 4 lf */
1756    1, 1, 9, 9, /* 8 space */
1757    2, 2, 2, 2, /* !, first after space/lf and after something else. */
1758    1, 1, 1, 1, /* " */
1759    8, 3, 3, 3, /* % */
1760    1, 1, 1, 1, /* ({[ */
1761    2, 2, 2, 2, /* }]) */
1762    8, 4, 4, 4, /* :; */
1763    8, 7, 4, 4, /* . */
1764    8, 0, 0, 0, /* > */
1765    3, 3, 3, 3, /* [0..9] */
1766    5, 5, 10, 5, /* [A-Z] */
1767    5, 5, 10, 5, 6, 6, 6, 6, /* [a-z] */
1768    6, 6, 6, 6,
1769];
1770/* Decide if we want to use a more complex static context map containing 13
1771context values, based on the entropy reduction of histograms over the
1772first 5 bits of literals. */
1773fn ShouldUseComplexStaticContextMap(
1774    input: &[u8],
1775    mut start_pos: usize,
1776    length: usize,
1777    mask: usize,
1778    quality: i32,
1779    size_hint: usize,
1780    num_literal_contexts: &mut usize,
1781    literal_context_map: &mut &[u32],
1782) -> bool {
1783    let _ = quality;
1784    //BROTLI_UNUSED(quality);
1785    /* Try the more complex static context map only for long data. */
1786    if (size_hint < (1 << 20)) {
1787        false
1788    } else {
1789        let end_pos = start_pos + length;
1790        /* To make entropy calculations faster and to fit on the stack, we collect
1791        histograms over the 5 most significant bits of literals. One histogram
1792        without context and 13 additional histograms for each context value. */
1793        let mut combined_histo: [u32; 32] = [0; 32];
1794        let mut context_histo: [[u32; 32]; 13] = [[0; 32]; 13];
1795        let mut total = 0u32;
1796        let mut entropy = [0.0 as super::util::floatX; 3];
1797        let mut dummy = 0usize;
1798        let utf8_lut = BROTLI_CONTEXT_LUT(ContextType::CONTEXT_UTF8);
1799        while start_pos + 64 <= end_pos {
1800            let stride_end_pos = start_pos + 64;
1801            let mut prev2 = input[start_pos & mask];
1802            let mut prev1 = input[(start_pos + 1) & mask];
1803
1804            /* To make the analysis of the data faster we only examine 64 byte long
1805            strides at every 4kB intervals. */
1806            for pos in start_pos + 2..stride_end_pos {
1807                let literal = input[pos & mask];
1808                let context = kStaticContextMapComplexUTF8
1809                    [BROTLI_CONTEXT(prev1, prev2, utf8_lut) as usize]
1810                    as u8;
1811                total += 1;
1812                combined_histo[(literal >> 3) as usize] += 1;
1813                context_histo[context as usize][(literal >> 3) as usize] += 1;
1814                prev2 = prev1;
1815                prev1 = literal;
1816            }
1817            start_pos += 4096;
1818        }
1819        entropy[1] = ShannonEntropy(&combined_histo[..], 32, &mut dummy);
1820        entropy[2] = 0.0 as super::util::floatX;
1821        for i in 0..13 {
1822            assert!(i < 13);
1823            entropy[2] += ShannonEntropy(&context_histo[i][..], 32, &mut dummy);
1824        }
1825        entropy[0] = (1.0 as super::util::floatX) / (total as super::util::floatX);
1826        entropy[1] *= entropy[0];
1827        entropy[2] *= entropy[0];
1828        /* The triggering heuristics below were tuned by compressing the individual
1829        files of the silesia corpus. If we skip this kind of context modeling
1830        for not very well compressible input (i.e. entropy using context modeling
1831        is 60% of maximal entropy) or if expected savings by symbol are less
1832        than 0.2 bits, then in every case when it triggers, the final compression
1833        ratio is improved. Note however that this heuristics might be too strict
1834        for some cases and could be tuned further. */
1835        if (entropy[2] > 3.0 || entropy[1] - entropy[2] < 0.2) {
1836            false
1837        } else {
1838            *num_literal_contexts = 13;
1839            *literal_context_map = &kStaticContextMapComplexUTF8;
1840            true
1841        }
1842    }
1843}
1844
1845fn DecideOverLiteralContextModeling(
1846    input: &[u8],
1847    mut start_pos: usize,
1848    length: usize,
1849    mask: usize,
1850    quality: i32,
1851    size_hint: usize,
1852    num_literal_contexts: &mut usize,
1853    literal_context_map: &mut &[u32],
1854) {
1855    if quality < 5i32 || length < 64usize {
1856    } else if ShouldUseComplexStaticContextMap(
1857        input,
1858        start_pos,
1859        length,
1860        mask,
1861        quality,
1862        size_hint,
1863        num_literal_contexts,
1864        literal_context_map,
1865    ) {
1866    } else {
1867        let end_pos: usize = start_pos.wrapping_add(length);
1868        let mut bigram_prefix_histo = [0u32; 9];
1869        while start_pos.wrapping_add(64) <= end_pos {
1870            {
1871                static lut: [i32; 4] = [0, 0, 1, 2];
1872                let stride_end_pos: usize = start_pos.wrapping_add(64);
1873                let mut prev: i32 = lut[(input[(start_pos & mask)] as i32 >> 6) as usize] * 3i32;
1874                let mut pos: usize;
1875                pos = start_pos.wrapping_add(1);
1876                while pos < stride_end_pos {
1877                    {
1878                        let literal: u8 = input[(pos & mask)];
1879                        {
1880                            let _rhs = 1;
1881                            let cur_ind = (prev + lut[(literal as i32 >> 6) as usize]);
1882                            let _lhs = &mut bigram_prefix_histo[cur_ind as usize];
1883                            *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1884                        }
1885                        prev = lut[(literal as i32 >> 6) as usize] * 3i32;
1886                    }
1887                    pos = pos.wrapping_add(1);
1888                }
1889            }
1890            start_pos = start_pos.wrapping_add(4096);
1891        }
1892        ChooseContextMap(
1893            quality,
1894            &mut bigram_prefix_histo[..],
1895            num_literal_contexts,
1896            literal_context_map,
1897        );
1898    }
1899}
1900fn WriteMetaBlockInternal<Alloc: BrotliAlloc, Cb>(
1901    alloc: &mut Alloc,
1902    data: &[u8],
1903    mask: usize,
1904    last_flush_pos: u64,
1905    bytes: usize,
1906    mut is_last: bool,
1907    literal_context_mode: ContextType,
1908    params: &BrotliEncoderParams,
1909    lit_scratch_space: &mut <HistogramLiteral as CostAccessors>::i32vec,
1910    cmd_scratch_space: &mut <HistogramCommand as CostAccessors>::i32vec,
1911    dst_scratch_space: &mut <HistogramDistance as CostAccessors>::i32vec,
1912    prev_byte: u8,
1913    prev_byte2: u8,
1914    num_literals: usize,
1915    num_commands: usize,
1916    commands: &mut [Command],
1917    saved_dist_cache: &[i32; kNumDistanceCacheEntries],
1918    dist_cache: &mut [i32; 16],
1919    recoder_state: &mut RecoderState,
1920    storage_ix: &mut usize,
1921    storage: &mut [u8],
1922    cb: &mut Cb,
1923) where
1924    Cb: FnMut(
1925        &mut interface::PredictionModeContextMap<InputReferenceMut>,
1926        &mut [interface::StaticCommand],
1927        interface::InputPair,
1928        &mut Alloc,
1929    ),
1930{
1931    let actual_is_last = is_last;
1932    if params.appendable {
1933        is_last = false;
1934    } else {
1935        assert!(!params.catable); // Sanitize Params senforces this constraint
1936    }
1937    let wrapped_last_flush_pos: u32 = WrapPosition(last_flush_pos);
1938
1939    let literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode);
1940    let mut block_params = params.clone();
1941    if bytes == 0usize {
1942        BrotliWriteBits(2usize, 3, storage_ix, storage);
1943        *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1944        return;
1945    }
1946    if ShouldCompress(
1947        data,
1948        mask,
1949        last_flush_pos,
1950        bytes,
1951        num_literals,
1952        num_commands,
1953    ) == 0
1954    {
1955        dist_cache[..4].clone_from_slice(&saved_dist_cache[..4]);
1956        BrotliStoreUncompressedMetaBlock(
1957            alloc,
1958            is_last as i32,
1959            data,
1960            wrapped_last_flush_pos as usize,
1961            mask,
1962            params,
1963            bytes,
1964            recoder_state,
1965            storage_ix,
1966            storage,
1967            false,
1968            cb,
1969        );
1970        if actual_is_last != is_last {
1971            BrotliWriteEmptyLastMetaBlock(storage_ix, storage)
1972        }
1973        return;
1974    }
1975    let saved_byte_location = (*storage_ix) >> 3;
1976    let last_bytes: u16 =
1977        ((storage[saved_byte_location + 1] as u16) << 8) | storage[saved_byte_location] as u16;
1978    let last_bytes_bits: u8 = *storage_ix as u8;
1979    /*if params.dist.num_direct_distance_codes != 0 ||
1980                      params.dist.distance_postfix_bits != 0 {
1981      RecomputeDistancePrefixes(commands,
1982                                num_commands,
1983                                params.dist.num_direct_distance_codes,
1984                                params.dist.distance_postfix_bits);
1985    }*/
1986    // why was this removed??
1987    if params.quality <= 2i32 {
1988        BrotliStoreMetaBlockFast(
1989            alloc,
1990            data,
1991            wrapped_last_flush_pos as usize,
1992            bytes,
1993            mask,
1994            is_last as i32,
1995            params,
1996            saved_dist_cache,
1997            commands,
1998            num_commands,
1999            recoder_state,
2000            storage_ix,
2001            storage,
2002            cb,
2003        );
2004    } else if params.quality < 4i32 {
2005        BrotliStoreMetaBlockTrivial(
2006            alloc,
2007            data,
2008            wrapped_last_flush_pos as usize,
2009            bytes,
2010            mask,
2011            is_last as i32,
2012            params,
2013            saved_dist_cache,
2014            commands,
2015            num_commands,
2016            recoder_state,
2017            storage_ix,
2018            storage,
2019            cb,
2020        );
2021    } else {
2022        //let mut literal_context_mode: ContextType = ContextType::CONTEXT_UTF8;
2023
2024        let mut mb = MetaBlockSplit::<Alloc>::new();
2025        if params.quality < 10i32 {
2026            let mut num_literal_contexts: usize = 1;
2027            let mut literal_context_map: &[u32] = &[];
2028            if params.disable_literal_context_modeling == 0 {
2029                DecideOverLiteralContextModeling(
2030                    data,
2031                    wrapped_last_flush_pos as usize,
2032                    bytes,
2033                    mask,
2034                    params.quality,
2035                    params.size_hint,
2036                    &mut num_literal_contexts,
2037                    &mut literal_context_map,
2038                );
2039            }
2040            BrotliBuildMetaBlockGreedy(
2041                alloc,
2042                data,
2043                wrapped_last_flush_pos as usize,
2044                mask,
2045                prev_byte,
2046                prev_byte2,
2047                literal_context_mode,
2048                literal_context_lut,
2049                num_literal_contexts,
2050                literal_context_map,
2051                commands,
2052                num_commands,
2053                &mut mb,
2054            );
2055        } else {
2056            BrotliBuildMetaBlock(
2057                alloc,
2058                data,
2059                wrapped_last_flush_pos as usize,
2060                mask,
2061                &mut block_params,
2062                prev_byte,
2063                prev_byte2,
2064                commands,
2065                num_commands,
2066                literal_context_mode,
2067                lit_scratch_space,
2068                cmd_scratch_space,
2069                dst_scratch_space,
2070                &mut mb,
2071            );
2072        }
2073        if params.quality >= 4i32 {
2074            let mut num_effective_dist_codes = block_params.dist.alphabet_size;
2075            if num_effective_dist_codes > BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS as u32 {
2076                num_effective_dist_codes = BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS as u32;
2077            }
2078            BrotliOptimizeHistograms(num_effective_dist_codes as usize, &mut mb);
2079        }
2080        BrotliStoreMetaBlock(
2081            alloc,
2082            data,
2083            wrapped_last_flush_pos as usize,
2084            bytes,
2085            mask,
2086            prev_byte,
2087            prev_byte2,
2088            is_last as i32,
2089            &block_params,
2090            literal_context_mode,
2091            saved_dist_cache,
2092            commands,
2093            num_commands,
2094            &mut mb,
2095            recoder_state,
2096            storage_ix,
2097            storage,
2098            cb,
2099        );
2100        mb.destroy(alloc);
2101    }
2102    if bytes + 4 + saved_byte_location < (*storage_ix >> 3) {
2103        dist_cache[..4].clone_from_slice(&saved_dist_cache[..4]);
2104        //memcpy(dist_cache,
2105        //     saved_dist_cache,
2106        //     (4usize).wrapping_mul(::core::mem::size_of::<i32>()));
2107        storage[saved_byte_location] = last_bytes as u8;
2108        storage[saved_byte_location + 1] = (last_bytes >> 8) as u8;
2109        *storage_ix = last_bytes_bits as usize;
2110        BrotliStoreUncompressedMetaBlock(
2111            alloc,
2112            is_last as i32,
2113            data,
2114            wrapped_last_flush_pos as usize,
2115            mask,
2116            params,
2117            bytes,
2118            recoder_state,
2119            storage_ix,
2120            storage,
2121            true,
2122            cb,
2123        );
2124    }
2125    if actual_is_last != is_last {
2126        BrotliWriteEmptyLastMetaBlock(storage_ix, storage)
2127    }
2128}
2129
2130fn ChooseDistanceParams(params: &mut BrotliEncoderParams) {
2131    let mut num_direct_distance_codes = 0u32;
2132    let mut distance_postfix_bits = 0u32;
2133
2134    if params.quality >= 4 {
2135        if params.mode == BrotliEncoderMode::BROTLI_MODE_FONT {
2136            distance_postfix_bits = 1;
2137            num_direct_distance_codes = 12;
2138        } else {
2139            distance_postfix_bits = params.dist.distance_postfix_bits;
2140            num_direct_distance_codes = params.dist.num_direct_distance_codes;
2141        }
2142        let ndirect_msb = (num_direct_distance_codes >> distance_postfix_bits) & 0x0f;
2143        if distance_postfix_bits > BROTLI_MAX_NPOSTFIX as u32
2144            || num_direct_distance_codes > BROTLI_MAX_NDIRECT as u32
2145            || (ndirect_msb << distance_postfix_bits) != num_direct_distance_codes
2146        {
2147            distance_postfix_bits = 0;
2148            num_direct_distance_codes = 0;
2149        }
2150    }
2151    BrotliInitDistanceParams(params, distance_postfix_bits, num_direct_distance_codes);
2152    /*(
2153    if (params.large_window) {
2154        max_distance = BROTLI_MAX_ALLOWED_DISTANCE;
2155        if (num_direct_distance_codes != 0 || distance_postfix_bits != 0) {
2156            max_distance = (3 << 29) - 4;
2157        }
2158        alphabet_size = BROTLI_DISTANCE_ALPHABET_SIZE(
2159            num_direct_distance_codes, distance_postfix_bits,
2160            BROTLI_LARGE_MAX_DISTANCE_BITS);
2161    } else {
2162        alphabet_size = BROTLI_DISTANCE_ALPHABET_SIZE(
2163            num_direct_distance_codes, distance_postfix_bits,
2164            BROTLI_MAX_DISTANCE_BITS);
2165
2166    }
2167
2168    params.dist.num_direct_distance_codes = num_direct_distance_codes;
2169    params.dist.distance_postfix_bits = distance_postfix_bits;
2170    params.dist.alphabet_size = alphabet_size;
2171    params.dist.max_distance = max_distance;*/
2172}
2173
2174impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2175    fn encode_data<MetablockCallback>(
2176        &mut self,
2177        is_last: bool,
2178        force_flush: bool,
2179        out_size: &mut usize,
2180        callback: &mut MetablockCallback,
2181        // mut output: &'a mut &'a mut [u8]
2182    ) -> bool
2183    where
2184        MetablockCallback: FnMut(
2185            &mut interface::PredictionModeContextMap<InputReferenceMut>,
2186            &mut [interface::StaticCommand],
2187            interface::InputPair,
2188            &mut Alloc,
2189        ),
2190    {
2191        let mut delta: u64 = self.unprocessed_input_size();
2192        let mut bytes: u32 = delta as u32;
2193        let mask = self.ringbuffer_.mask_;
2194        if !self.ensure_initialized() {
2195            return false;
2196        }
2197        let dictionary = BrotliGetDictionary();
2198        if self.is_last_block_emitted_ {
2199            return false;
2200        }
2201        if is_last {
2202            self.is_last_block_emitted_ = true;
2203        }
2204        if delta > self.input_block_size() as u64 {
2205            return false;
2206        }
2207        let mut storage_ix: usize = usize::from(self.last_bytes_bits_);
2208        {
2209            let meta_size = max(
2210                bytes as usize,
2211                self.input_pos_.wrapping_sub(self.last_flush_pos_) as usize,
2212            );
2213            self.get_brotli_storage((2usize).wrapping_mul(meta_size).wrapping_add(503 + 24));
2214        }
2215        {
2216            self.storage_.slice_mut()[0] = self.last_bytes_ as u8;
2217            self.storage_.slice_mut()[1] = (self.last_bytes_ >> 8) as u8;
2218        }
2219        let mut catable_header_size = 0;
2220        if let IsFirst::NothingWritten = self.is_first_mb {
2221            if self.params.magic_number {
2222                BrotliWriteMetadataMetaBlock(
2223                    &self.params,
2224                    &mut storage_ix,
2225                    self.storage_.slice_mut(),
2226                );
2227                self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2228                    | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2229                self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2230                self.next_out_ = NextOut::DynamicStorage(0);
2231                catable_header_size = storage_ix >> 3;
2232                *out_size = catable_header_size;
2233                self.is_first_mb = IsFirst::HeaderWritten;
2234            }
2235        }
2236        if let IsFirst::BothCatableBytesWritten = self.is_first_mb {
2237            // nothing to do here, move along
2238        } else if !self.params.catable {
2239            self.is_first_mb = IsFirst::BothCatableBytesWritten;
2240        } else if bytes != 0 {
2241            assert!(self.last_processed_pos_ < 2 || self.custom_dictionary);
2242            let num_bytes_to_write_uncompressed: usize = min(2, bytes as usize);
2243            {
2244                let data =
2245                    &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..];
2246                BrotliStoreUncompressedMetaBlock(
2247                    &mut self.m8,
2248                    0,
2249                    data,
2250                    self.last_flush_pos_ as usize,
2251                    mask as usize,
2252                    &self.params,
2253                    num_bytes_to_write_uncompressed,
2254                    &mut self.recoder_state,
2255                    &mut storage_ix,
2256                    self.storage_.slice_mut(),
2257                    false, /* suppress meta-block logging */
2258                    callback,
2259                );
2260                self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2261                    | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2262                self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2263                self.prev_byte2_ = self.prev_byte_;
2264                self.prev_byte_ = data[self.last_flush_pos_ as usize & mask as usize];
2265                if num_bytes_to_write_uncompressed == 2 {
2266                    self.prev_byte2_ = self.prev_byte_;
2267                    self.prev_byte_ = data[(self.last_flush_pos_ + 1) as usize & mask as usize];
2268                }
2269            }
2270            self.last_flush_pos_ += num_bytes_to_write_uncompressed as u64;
2271            bytes -= num_bytes_to_write_uncompressed as u32;
2272            self.last_processed_pos_ += num_bytes_to_write_uncompressed as u64;
2273            if num_bytes_to_write_uncompressed >= 2 {
2274                self.is_first_mb = IsFirst::BothCatableBytesWritten;
2275            } else if num_bytes_to_write_uncompressed == 1 {
2276                if let IsFirst::FirstCatableByteWritten = self.is_first_mb {
2277                    self.is_first_mb = IsFirst::BothCatableBytesWritten;
2278                } else {
2279                    self.is_first_mb = IsFirst::FirstCatableByteWritten;
2280                }
2281            }
2282            catable_header_size = storage_ix >> 3;
2283            self.next_out_ = NextOut::DynamicStorage(0);
2284            *out_size = catable_header_size;
2285            delta = self.unprocessed_input_size();
2286        }
2287        let mut wrapped_last_processed_pos: u32 = WrapPosition(self.last_processed_pos_);
2288        if self.params.quality == 1i32 && self.command_buf_.slice().is_empty() {
2289            let new_buf = <Alloc as Allocator<u32>>::alloc_cell(
2290                &mut self.m8,
2291                kCompressFragmentTwoPassBlockSize,
2292            );
2293            self.command_buf_ = new_buf;
2294            let new_buf8 = <Alloc as Allocator<u8>>::alloc_cell(
2295                &mut self.m8,
2296                kCompressFragmentTwoPassBlockSize,
2297            );
2298            self.literal_buf_ = new_buf8;
2299        }
2300        if self.params.quality == 0i32 || self.params.quality == 1i32 {
2301            let mut table_size: usize = 0;
2302            {
2303                if delta == 0 && !is_last {
2304                    *out_size = catable_header_size;
2305                    return true;
2306                }
2307                let data =
2308                    &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..];
2309
2310                //s.storage_.slice_mut()[0] = (*s).last_bytes_ as u8;
2311                //        s.storage_.slice_mut()[1] = ((*s).last_bytes_ >> 8) as u8;
2312
2313                let table: &mut [i32] =
2314                    GetHashTable!(self, self.params.quality, bytes as usize, &mut table_size);
2315
2316                if self.params.quality == 0i32 {
2317                    BrotliCompressFragmentFast(
2318                        &mut self.m8,
2319                        &mut data[((wrapped_last_processed_pos & mask) as usize)..],
2320                        bytes as usize,
2321                        is_last as i32,
2322                        table,
2323                        table_size,
2324                        &mut self.cmd_depths_[..],
2325                        &mut self.cmd_bits_[..],
2326                        &mut self.cmd_code_numbits_,
2327                        &mut self.cmd_code_[..],
2328                        &mut storage_ix,
2329                        self.storage_.slice_mut(),
2330                    );
2331                } else {
2332                    BrotliCompressFragmentTwoPass(
2333                        &mut self.m8,
2334                        &mut data[((wrapped_last_processed_pos & mask) as usize)..],
2335                        bytes as usize,
2336                        is_last as i32,
2337                        self.command_buf_.slice_mut(),
2338                        self.literal_buf_.slice_mut(),
2339                        table,
2340                        table_size,
2341                        &mut storage_ix,
2342                        self.storage_.slice_mut(),
2343                    );
2344                }
2345                self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2346                    | ((self.storage_.slice()[(storage_ix >> 3) + 1] as u16) << 8);
2347                self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2348            }
2349            self.update_last_processed_pos();
2350            // *output = &mut s.storage_.slice_mut();
2351            self.next_out_ = NextOut::DynamicStorage(0); // this always returns that
2352            *out_size = storage_ix >> 3;
2353            return true;
2354        }
2355        {
2356            let mut newsize: usize = self
2357                .num_commands_
2358                .wrapping_add(bytes.wrapping_div(2) as usize)
2359                .wrapping_add(1);
2360            if newsize > self.cmd_alloc_size_ {
2361                newsize = newsize.wrapping_add(bytes.wrapping_div(4).wrapping_add(16) as usize);
2362                self.cmd_alloc_size_ = newsize;
2363                let mut new_commands =
2364                    <Alloc as Allocator<Command>>::alloc_cell(&mut self.m8, newsize);
2365                if !self.commands_.slice().is_empty() {
2366                    new_commands.slice_mut()[..self.num_commands_]
2367                        .clone_from_slice(&self.commands_.slice()[..self.num_commands_]);
2368                    <Alloc as Allocator<Command>>::free_cell(
2369                        &mut self.m8,
2370                        core::mem::take(&mut self.commands_),
2371                    );
2372                }
2373                self.commands_ = new_commands;
2374            }
2375        }
2376        InitOrStitchToPreviousBlock(
2377            &mut self.m8,
2378            &mut self.hasher_,
2379            &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2380            mask as usize,
2381            &mut self.params,
2382            wrapped_last_processed_pos as usize,
2383            bytes as usize,
2384            is_last,
2385        );
2386        let literal_context_mode = ChooseContextMode(
2387            &self.params,
2388            self.ringbuffer_.data_mo.slice(),
2389            WrapPosition(self.last_flush_pos_) as usize,
2390            mask as usize,
2391            (self.input_pos_.wrapping_sub(self.last_flush_pos_)) as usize,
2392        );
2393        if self.num_commands_ != 0 && self.last_insert_len_ == 0 {
2394            self.extend_last_command(&mut bytes, &mut wrapped_last_processed_pos);
2395        }
2396        BrotliCreateBackwardReferences(
2397            &mut self.m8,
2398            dictionary,
2399            bytes as usize,
2400            wrapped_last_processed_pos as usize,
2401            &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2402            mask as usize,
2403            &mut self.params,
2404            &mut self.hasher_,
2405            &mut self.dist_cache_,
2406            &mut self.last_insert_len_,
2407            &mut self.commands_.slice_mut()[self.num_commands_..],
2408            &mut self.num_commands_,
2409            &mut self.num_literals_,
2410        );
2411        {
2412            let max_length: usize = MaxMetablockSize(&mut self.params);
2413            let max_literals: usize = max_length.wrapping_div(8);
2414            let max_commands: usize = max_length.wrapping_div(8);
2415            let processed_bytes: usize =
2416                self.input_pos_.wrapping_sub(self.last_flush_pos_) as usize;
2417            let next_input_fits_metablock =
2418                processed_bytes.wrapping_add(self.input_block_size()) <= max_length;
2419            let should_flush = self.params.quality < 4
2420                && self.num_literals_.wrapping_add(self.num_commands_) >= 0x2fff;
2421            if !is_last
2422                && !force_flush
2423                && !should_flush
2424                && next_input_fits_metablock
2425                && self.num_literals_ < max_literals
2426                && self.num_commands_ < max_commands
2427            {
2428                if self.update_last_processed_pos() {
2429                    HasherReset(&mut self.hasher_);
2430                }
2431                *out_size = catable_header_size;
2432                return true;
2433            }
2434        }
2435        if self.last_insert_len_ > 0usize {
2436            self.commands_.slice_mut()[self.num_commands_].init_insert(self.last_insert_len_);
2437            self.num_commands_ = self.num_commands_.wrapping_add(1);
2438            self.num_literals_ = self.num_literals_.wrapping_add(self.last_insert_len_);
2439            self.last_insert_len_ = 0usize;
2440        }
2441        if !is_last && self.input_pos_ == self.last_flush_pos_ {
2442            *out_size = catable_header_size;
2443            return true;
2444        }
2445        {
2446            let metablock_size: u32 = self.input_pos_.wrapping_sub(self.last_flush_pos_) as u32;
2447            //let mut storage_ix: usize = s.last_bytes_bits_ as usize;
2448            //s.storage_.slice_mut()[0] = (*s).last_bytes_ as u8;
2449            //s.storage_.slice_mut()[1] = ((*s).last_bytes_ >> 8) as u8;
2450
2451            WriteMetaBlockInternal(
2452                &mut self.m8,
2453                &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2454                mask as usize,
2455                self.last_flush_pos_,
2456                metablock_size as usize,
2457                is_last,
2458                literal_context_mode,
2459                &mut self.params,
2460                &mut self.literal_scratch_space,
2461                &mut self.command_scratch_space,
2462                &mut self.distance_scratch_space,
2463                self.prev_byte_,
2464                self.prev_byte2_,
2465                self.num_literals_,
2466                self.num_commands_,
2467                self.commands_.slice_mut(),
2468                &mut self.saved_dist_cache_,
2469                &mut self.dist_cache_,
2470                &mut self.recoder_state,
2471                &mut storage_ix,
2472                self.storage_.slice_mut(),
2473                callback,
2474            );
2475
2476            self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2477                | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2478            self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2479            self.last_flush_pos_ = self.input_pos_;
2480            if self.update_last_processed_pos() {
2481                HasherReset(&mut self.hasher_);
2482            }
2483            let data = &self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index..];
2484            if self.last_flush_pos_ > 0 {
2485                self.prev_byte_ =
2486                    data[(((self.last_flush_pos_ as u32).wrapping_sub(1) & mask) as usize)];
2487            }
2488            if self.last_flush_pos_ > 1 {
2489                self.prev_byte2_ =
2490                    data[((self.last_flush_pos_.wrapping_sub(2) as u32 & mask) as usize)];
2491            }
2492            self.num_commands_ = 0usize;
2493            self.num_literals_ = 0usize;
2494            self.saved_dist_cache_
2495                .clone_from_slice(self.dist_cache_.split_at(4).0);
2496            self.next_out_ = NextOut::DynamicStorage(0); // this always returns that
2497            *out_size = storage_ix >> 3;
2498            true
2499        }
2500    }
2501
2502    fn write_metadata_header(&mut self) -> usize {
2503        let block_size = self.remaining_metadata_bytes_ as usize;
2504        let header = GetNextOut!(*self);
2505        let mut storage_ix: usize;
2506        storage_ix = self.last_bytes_bits_ as usize;
2507        header[0] = self.last_bytes_ as u8;
2508        header[1] = (self.last_bytes_ >> 8) as u8;
2509        self.last_bytes_ = 0;
2510        self.last_bytes_bits_ = 0;
2511        BrotliWriteBits(1, 0, &mut storage_ix, header);
2512        BrotliWriteBits(2usize, 3, &mut storage_ix, header);
2513        BrotliWriteBits(1, 0, &mut storage_ix, header);
2514        if block_size == 0usize {
2515            BrotliWriteBits(2usize, 0, &mut storage_ix, header);
2516        } else {
2517            let nbits: u32 = if block_size == 1 {
2518                0u32
2519            } else {
2520                Log2FloorNonZero((block_size as u32).wrapping_sub(1) as (u64)).wrapping_add(1)
2521            };
2522            let nbytes: u32 = nbits.wrapping_add(7).wrapping_div(8);
2523            BrotliWriteBits(2usize, nbytes as (u64), &mut storage_ix, header);
2524            BrotliWriteBits(
2525                (8u32).wrapping_mul(nbytes) as usize,
2526                block_size.wrapping_sub(1) as u64,
2527                &mut storage_ix,
2528                header,
2529            );
2530        }
2531        storage_ix.wrapping_add(7u32 as usize) >> 3
2532    }
2533}
2534
2535impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2536    fn process_metadata<
2537        MetaBlockCallback: FnMut(
2538            &mut interface::PredictionModeContextMap<InputReferenceMut>,
2539            &mut [interface::StaticCommand],
2540            interface::InputPair,
2541            &mut Alloc,
2542        ),
2543    >(
2544        &mut self,
2545        available_in: &mut usize,
2546        next_in_array: &[u8],
2547        next_in_offset: &mut usize,
2548        available_out: &mut usize,
2549        next_out_array: &mut [u8],
2550        next_out_offset: &mut usize,
2551        total_out: &mut Option<usize>,
2552        metablock_callback: &mut MetaBlockCallback,
2553    ) -> bool {
2554        if *available_in > (1u32 << 24) as usize {
2555            return false;
2556        }
2557        if self.stream_state_ as i32 == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32 {
2558            self.remaining_metadata_bytes_ = *available_in as u32;
2559            self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD;
2560        }
2561        if self.stream_state_ as i32 != BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2562            && (self.stream_state_ as i32
2563                != BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY as i32)
2564        {
2565            return false;
2566        }
2567        loop {
2568            if self.inject_flush_or_push_output(
2569                available_out,
2570                next_out_array,
2571                next_out_offset,
2572                total_out,
2573            ) != 0
2574            {
2575                continue;
2576            }
2577            if self.available_out_ != 0usize {
2578                break;
2579            }
2580            if self.input_pos_ != self.last_flush_pos_ {
2581                let mut avail_out: usize = self.available_out_;
2582                let result = self.encode_data(false, true, &mut avail_out, metablock_callback);
2583                self.available_out_ = avail_out;
2584                if !result {
2585                    return false;
2586                }
2587                continue;
2588            }
2589            if self.stream_state_ as i32
2590                == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2591            {
2592                self.next_out_ = NextOut::TinyBuf(0);
2593                self.available_out_ = self.write_metadata_header();
2594                self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY;
2595                {
2596                    continue;
2597                }
2598            } else {
2599                if self.remaining_metadata_bytes_ == 0u32 {
2600                    self.remaining_metadata_bytes_ = !(0u32);
2601                    self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING;
2602                    {
2603                        break;
2604                    }
2605                }
2606                if *available_out != 0 {
2607                    let copy: u32 =
2608                        min(self.remaining_metadata_bytes_ as usize, *available_out) as u32;
2609                    next_out_array[*next_out_offset..(*next_out_offset + copy as usize)]
2610                        .clone_from_slice(
2611                            &next_in_array[*next_in_offset..(*next_in_offset + copy as usize)],
2612                        );
2613                    //memcpy(*next_out, *next_in, copy as usize);
2614                    // *next_in = next_in.offset(copy as isize);
2615                    *next_in_offset += copy as usize;
2616                    *available_in = available_in.wrapping_sub(copy as usize);
2617                    self.remaining_metadata_bytes_ =
2618                        self.remaining_metadata_bytes_.wrapping_sub(copy);
2619                    *next_out_offset += copy as usize;
2620                    // *next_out = next_out.offset(copy as isize);
2621                    *available_out = available_out.wrapping_sub(copy as usize);
2622                } else {
2623                    let copy: u32 = min(self.remaining_metadata_bytes_, 16u32);
2624                    self.next_out_ = NextOut::TinyBuf(0);
2625                    GetNextOut!(self)[..(copy as usize)].clone_from_slice(
2626                        &next_in_array[*next_in_offset..(*next_in_offset + copy as usize)],
2627                    );
2628                    //memcpy(s.next_out_, *next_in, copy as usize);
2629                    // *next_in = next_in.offset(copy as isize);
2630                    *next_in_offset += copy as usize;
2631                    *available_in = available_in.wrapping_sub(copy as usize);
2632                    self.remaining_metadata_bytes_ =
2633                        self.remaining_metadata_bytes_.wrapping_sub(copy);
2634                    self.available_out_ = copy as usize;
2635                }
2636                {
2637                    continue;
2638                }
2639            }
2640        }
2641        true
2642    }
2643}
2644fn CheckFlushCompleteInner(
2645    stream_state: &mut BrotliEncoderStreamState,
2646    available_out: usize,
2647    next_out: &mut NextOut,
2648) {
2649    if *stream_state == BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED
2650        && (available_out == 0)
2651    {
2652        *stream_state = BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING;
2653        *next_out = NextOut::None;
2654    }
2655}
2656
2657impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2658    fn check_flush_complete(&mut self) {
2659        CheckFlushCompleteInner(
2660            &mut self.stream_state_,
2661            self.available_out_,
2662            &mut self.next_out_,
2663        );
2664    }
2665
2666    fn compress_stream_fast(
2667        &mut self,
2668        op: BrotliEncoderOperation,
2669        available_in: &mut usize,
2670        next_in_array: &[u8],
2671        next_in_offset: &mut usize,
2672        available_out: &mut usize,
2673        next_out_array: &mut [u8],
2674        next_out_offset: &mut usize,
2675        total_out: &mut Option<usize>,
2676    ) -> bool {
2677        let block_size_limit: usize = 1 << self.params.lgwin;
2678        let buf_size: usize = min(
2679            kCompressFragmentTwoPassBlockSize,
2680            min(*available_in, block_size_limit),
2681        );
2682        let mut command_buf = <Alloc as Allocator<u32>>::AllocatedMemory::default();
2683        let mut literal_buf = <Alloc as Allocator<u8>>::AllocatedMemory::default();
2684        if self.params.quality != 0i32 && (self.params.quality != 1i32) {
2685            return false;
2686        }
2687        if self.params.quality == 1i32 {
2688            if self.command_buf_.slice().is_empty()
2689                && (buf_size == kCompressFragmentTwoPassBlockSize)
2690            {
2691                self.command_buf_ = <Alloc as Allocator<u32>>::alloc_cell(
2692                    &mut self.m8,
2693                    kCompressFragmentTwoPassBlockSize,
2694                );
2695                self.literal_buf_ = <Alloc as Allocator<u8>>::alloc_cell(
2696                    &mut self.m8,
2697                    kCompressFragmentTwoPassBlockSize,
2698                );
2699            }
2700            if !self.command_buf_.slice().is_empty() {
2701                command_buf = core::mem::take(&mut self.command_buf_);
2702                literal_buf = core::mem::take(&mut self.literal_buf_);
2703            } else {
2704                command_buf = <Alloc as Allocator<u32>>::alloc_cell(&mut self.m8, buf_size);
2705                literal_buf = <Alloc as Allocator<u8>>::alloc_cell(&mut self.m8, buf_size);
2706            }
2707        }
2708        loop {
2709            if self.inject_flush_or_push_output(
2710                available_out,
2711                next_out_array,
2712                next_out_offset,
2713                total_out,
2714            ) != 0
2715            {
2716                continue;
2717            }
2718            if self.available_out_ == 0usize
2719                && (self.stream_state_ as i32
2720                    == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32)
2721                && (*available_in != 0usize
2722                    || op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_PROCESS as i32)
2723            {
2724                let block_size: usize = min(block_size_limit, *available_in);
2725                let is_last = *available_in == block_size
2726                    && op == BrotliEncoderOperation::BROTLI_OPERATION_FINISH;
2727                let force_flush = *available_in == block_size
2728                    && op == BrotliEncoderOperation::BROTLI_OPERATION_FLUSH;
2729                let max_out_size: usize = (2usize).wrapping_mul(block_size).wrapping_add(503);
2730                let mut inplace: i32 = 1i32;
2731                let storage: &mut [u8];
2732                let mut storage_ix: usize = self.last_bytes_bits_ as usize;
2733                let mut table_size: usize = 0;
2734
2735                if force_flush && block_size == 0 {
2736                    self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2737                    {
2738                        continue;
2739                    }
2740                }
2741                if max_out_size <= *available_out {
2742                    storage = &mut next_out_array[*next_out_offset..]; //GetNextOut!(s);
2743                } else {
2744                    inplace = 0i32;
2745                    self.get_brotli_storage(max_out_size);
2746                    storage = self.storage_.slice_mut();
2747                }
2748                storage[0] = self.last_bytes_ as u8;
2749                storage[1] = (self.last_bytes_ >> 8) as u8;
2750                let table: &mut [i32] =
2751                    GetHashTable!(self, self.params.quality, block_size, &mut table_size);
2752                if self.params.quality == 0i32 {
2753                    BrotliCompressFragmentFast(
2754                        &mut self.m8,
2755                        &(next_in_array)[*next_in_offset..],
2756                        block_size,
2757                        is_last as i32,
2758                        table,
2759                        table_size,
2760                        &mut self.cmd_depths_[..],
2761                        &mut self.cmd_bits_[..],
2762                        &mut self.cmd_code_numbits_,
2763                        &mut self.cmd_code_[..],
2764                        &mut storage_ix,
2765                        storage,
2766                    );
2767                } else {
2768                    BrotliCompressFragmentTwoPass(
2769                        &mut self.m8,
2770                        &(next_in_array)[*next_in_offset..],
2771                        block_size,
2772                        is_last as i32,
2773                        command_buf.slice_mut(),
2774                        literal_buf.slice_mut(),
2775                        table,
2776                        table_size,
2777                        &mut storage_ix,
2778                        storage,
2779                    );
2780                }
2781                *next_in_offset += block_size;
2782                *available_in = available_in.wrapping_sub(block_size);
2783                if inplace != 0 {
2784                    let out_bytes: usize = storage_ix >> 3;
2785                    *next_out_offset += out_bytes;
2786                    *available_out = available_out.wrapping_sub(out_bytes);
2787                    self.total_out_ = self.total_out_.wrapping_add(out_bytes as u64);
2788                    if let &mut Some(ref mut total_out_inner) = total_out {
2789                        *total_out_inner = self.total_out_ as usize;
2790                    }
2791                } else {
2792                    let out_bytes: usize = storage_ix >> 3;
2793                    self.next_out_ = NextOut::DynamicStorage(0);
2794                    self.available_out_ = out_bytes;
2795                }
2796                self.last_bytes_ = storage[(storage_ix >> 3)] as u16
2797                    | ((storage[1 + (storage_ix >> 3)] as u16) << 8);
2798                self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2799                if force_flush {
2800                    self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2801                }
2802                if is_last {
2803                    self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FINISHED;
2804                }
2805                {
2806                    continue;
2807                }
2808            }
2809            {
2810                break;
2811            }
2812        }
2813        if command_buf.slice().len() == kCompressFragmentTwoPassBlockSize
2814            && self.command_buf_.slice().is_empty()
2815        {
2816            // undo temporary aliasing of command_buf and literal_buf
2817            self.command_buf_ = core::mem::take(&mut command_buf);
2818            self.literal_buf_ = core::mem::take(&mut literal_buf);
2819        } else {
2820            <Alloc as Allocator<u32>>::free_cell(&mut self.m8, command_buf);
2821            <Alloc as Allocator<u8>>::free_cell(&mut self.m8, literal_buf);
2822        }
2823        self.check_flush_complete();
2824        true
2825    }
2826
2827    fn remaining_input_block_size(&mut self) -> usize {
2828        let delta: u64 = self.unprocessed_input_size();
2829        let block_size = self.input_block_size();
2830        if delta >= block_size as u64 {
2831            return 0usize;
2832        }
2833        (block_size as u64).wrapping_sub(delta) as usize
2834    }
2835
2836    pub fn compress_stream<
2837        MetablockCallback: FnMut(
2838            &mut interface::PredictionModeContextMap<InputReferenceMut>,
2839            &mut [interface::StaticCommand],
2840            interface::InputPair,
2841            &mut Alloc,
2842        ),
2843    >(
2844        &mut self,
2845        op: BrotliEncoderOperation,
2846        available_in: &mut usize,
2847        next_in_array: &[u8],
2848        next_in_offset: &mut usize,
2849        available_out: &mut usize,
2850        next_out_array: &mut [u8],
2851        next_out_offset: &mut usize,
2852        total_out: &mut Option<usize>,
2853        metablock_callback: &mut MetablockCallback,
2854    ) -> bool {
2855        if !self.ensure_initialized() {
2856            return false;
2857        }
2858        if self.remaining_metadata_bytes_ != !(0u32) {
2859            if *available_in != self.remaining_metadata_bytes_ as usize {
2860                return false;
2861            }
2862            if op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_EMIT_METADATA as i32 {
2863                return false;
2864            }
2865        }
2866        if op as i32 == BrotliEncoderOperation::BROTLI_OPERATION_EMIT_METADATA as i32 {
2867            self.update_size_hint(0);
2868            return self.process_metadata(
2869                available_in,
2870                next_in_array,
2871                next_in_offset,
2872                available_out,
2873                next_out_array,
2874                next_out_offset,
2875                total_out,
2876                metablock_callback,
2877            );
2878        }
2879        if self.stream_state_ as i32 == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2880            || self.stream_state_ as i32
2881                == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY as i32
2882        {
2883            return false;
2884        }
2885        if self.stream_state_ as i32 != BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32
2886            && (*available_in != 0usize)
2887        {
2888            return false;
2889        }
2890        if (self.params.quality == 0i32 || self.params.quality == 1i32) && !self.params.catable {
2891            // this part of the code does not support concatability
2892            return self.compress_stream_fast(
2893                op,
2894                available_in,
2895                next_in_array,
2896                next_in_offset,
2897                available_out,
2898                next_out_array,
2899                next_out_offset,
2900                total_out,
2901            );
2902        }
2903        loop {
2904            let remaining_block_size: usize = self.remaining_input_block_size();
2905            if remaining_block_size != 0usize && (*available_in != 0usize) {
2906                let copy_input_size: usize = min(remaining_block_size, *available_in);
2907                self.copy_input_to_ring_buffer(copy_input_size, &next_in_array[*next_in_offset..]);
2908                *next_in_offset += copy_input_size;
2909                *available_in = available_in.wrapping_sub(copy_input_size);
2910                {
2911                    continue;
2912                }
2913            }
2914            if self.inject_flush_or_push_output(
2915                available_out,
2916                next_out_array,
2917                next_out_offset,
2918                total_out,
2919            ) != 0
2920            {
2921                continue;
2922            }
2923            if self.available_out_ == 0usize
2924                && (self.stream_state_ as i32
2925                    == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32)
2926                && (remaining_block_size == 0usize
2927                    || op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_PROCESS as i32)
2928            {
2929                let is_last =
2930                    *available_in == 0 && op == BrotliEncoderOperation::BROTLI_OPERATION_FINISH;
2931                let force_flush =
2932                    *available_in == 0 && op == BrotliEncoderOperation::BROTLI_OPERATION_FLUSH;
2933
2934                self.update_size_hint(*available_in);
2935                let mut avail_out = self.available_out_;
2936                let result =
2937                    self.encode_data(is_last, force_flush, &mut avail_out, metablock_callback);
2938                self.available_out_ = avail_out;
2939                //this function set next_out to &storage[0]
2940                if !result {
2941                    return false;
2942                }
2943                if force_flush {
2944                    self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2945                }
2946                if is_last {
2947                    self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FINISHED;
2948                }
2949                {
2950                    continue;
2951                }
2952            }
2953            {
2954                break;
2955            }
2956        }
2957        self.check_flush_complete();
2958        true
2959    }
2960
2961    pub fn is_finished(&self) -> bool {
2962        self.stream_state_ == BrotliEncoderStreamState::BROTLI_STREAM_FINISHED
2963            && !self.has_more_output()
2964    }
2965
2966    pub fn has_more_output(&self) -> bool {
2967        self.available_out_ != 0
2968    }
2969
2970    pub fn take_output(&mut self, size: &mut usize) -> &[u8] {
2971        let mut consumed_size: usize = self.available_out_;
2972        let mut result: &[u8] = GetNextOut!(*self);
2973        if *size != 0 {
2974            consumed_size = min(*size, self.available_out_);
2975        }
2976        if consumed_size != 0 {
2977            self.next_out_ = NextOutIncrement(&self.next_out_, consumed_size as i32);
2978            self.available_out_ = self.available_out_.wrapping_sub(consumed_size);
2979            self.total_out_ = self.total_out_.wrapping_add(consumed_size as u64);
2980            CheckFlushCompleteInner(
2981                &mut self.stream_state_,
2982                self.available_out_,
2983                &mut self.next_out_,
2984            );
2985            *size = consumed_size;
2986        } else {
2987            *size = 0usize;
2988            result = &[];
2989        }
2990        result
2991    }
2992}
2993
2994pub fn BrotliEncoderVersion() -> u32 {
2995    0x0100_0f01
2996}
2997
2998impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2999    pub fn input_block_size(&mut self) -> usize {
3000        if !self.ensure_initialized() {
3001            return 0;
3002        }
3003        1 << self.params.lgblock
3004    }
3005
3006    pub fn write_data<
3007        'a,
3008        MetablockCallback: FnMut(
3009            &mut interface::PredictionModeContextMap<InputReferenceMut>,
3010            &mut [interface::StaticCommand],
3011            interface::InputPair,
3012            &mut Alloc,
3013        ),
3014    >(
3015        &'a mut self,
3016        is_last: i32,
3017        force_flush: i32,
3018        out_size: &mut usize,
3019        output: &'a mut &'a mut [u8],
3020        metablock_callback: &mut MetablockCallback,
3021    ) -> bool {
3022        let ret = self.encode_data(is_last != 0, force_flush != 0, out_size, metablock_callback);
3023        *output = self.storage_.slice_mut();
3024        ret
3025    }
3026}