1use alloc::Allocator;
2use core;
3use core::cmp::{max, min};
4
5use super::super::alloc;
6use super::super::alloc::{SliceWrapper, SliceWrapperMut};
7use super::backward_references::{
8 AdvHashSpecialization, AdvHasher, AnyHasher, BasicHasher, BrotliCreateBackwardReferences,
9 BrotliEncoderMode, BrotliEncoderParams, BrotliHasherParams, H2Sub, H3Sub, H4Sub, H54Sub, H5Sub,
10 H6Sub, HQ5Sub, HQ7Sub, HowPrepared, StoreLookaheadThenStore, Struct1, UnionHasher, H9,
11 H9_BLOCK_BITS, H9_BLOCK_SIZE, H9_BUCKET_BITS, H9_NUM_LAST_DISTANCES_TO_CHECK,
12};
13use super::bit_cost::{shannon_entropy, BitsEntropy};
14use super::brotli_bit_stream::{
15 store_meta_block, store_meta_block_fast, store_meta_block_trivial,
16 store_uncompressed_meta_block, BrotliWriteEmptyLastMetaBlock, BrotliWriteMetadataMetaBlock,
17 MetaBlockSplit, RecoderState,
18};
19use super::combined_alloc::BrotliAlloc;
20use super::command::{get_length_code, BrotliDistanceParams, Command};
21use super::compress_fragment::compress_fragment_fast;
22use super::compress_fragment_two_pass::{compress_fragment_two_pass, BrotliWriteBits};
23use super::constants::{
24 BROTLI_CONTEXT, BROTLI_CONTEXT_LUT, BROTLI_MAX_NDIRECT, BROTLI_MAX_NPOSTFIX,
25 BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS, BROTLI_WINDOW_GAP,
26};
27use super::hash_to_binary_tree::InitializeH10;
28use super::histogram::{
29 ContextType, CostAccessors, HistogramCommand, HistogramDistance, HistogramLiteral,
30};
31use super::interface;
32use super::metablock::{
33 BrotliBuildMetaBlock, BrotliBuildMetaBlockGreedy, BrotliInitDistanceParams,
34 BrotliOptimizeHistograms,
35};
36pub use super::parameters::BrotliEncoderParameter;
37use super::static_dict::{kNumDistanceCacheEntries, BrotliGetDictionary};
38use super::util::{floatX, Log2FloorNonZero};
39use crate::enc::combined_alloc::{alloc_default, allocate};
40use crate::enc::input_pair::InputReferenceMut;
41use crate::enc::utf8_util::is_mostly_utf8;
42
43static kCompressFragmentTwoPassBlockSize: usize = (1i32 << 17) as usize;
80
81static kMinUTF8Ratio: floatX = 0.75;
82
83pub struct RingBuffer<AllocU8: alloc::Allocator<u8>> {
84 pub size_: u32,
85 pub mask_: u32,
86 pub tail_size_: u32,
87 pub total_size_: u32,
88 pub cur_size_: u32,
89 pub pos_: u32,
90 pub data_mo: AllocU8::AllocatedMemory,
91 pub buffer_index: usize,
92}
93
94#[derive(PartialEq, Eq, Copy, Clone)]
95#[repr(i32)]
96pub enum BrotliEncoderStreamState {
97 BROTLI_STREAM_PROCESSING = 0,
98 BROTLI_STREAM_FLUSH_REQUESTED = 1,
99 BROTLI_STREAM_FINISHED = 2,
100 BROTLI_STREAM_METADATA_HEAD = 3,
101 BROTLI_STREAM_METADATA_BODY = 4,
102}
103
104#[derive(Clone, Copy, Debug)]
105enum NextOut {
106 DynamicStorage(u32),
107 TinyBuf(u32),
108 None,
109}
110fn GetNextOutInternal<'a>(
111 next_out: &NextOut,
112 storage: &'a mut [u8],
113 tiny_buf: &'a mut [u8; 16],
114) -> &'a mut [u8] {
115 match next_out {
116 &NextOut::DynamicStorage(offset) => &mut storage[offset as usize..],
117 &NextOut::TinyBuf(offset) => &mut tiny_buf[offset as usize..],
118 &NextOut::None => &mut [],
119 }
120}
121macro_rules! GetNextOut {
122 ($s : expr) => {
123 GetNextOutInternal(&$s.next_out_, $s.storage_.slice_mut(), &mut $s.tiny_buf_)
124 };
125}
126fn NextOutIncrement(next_out: &NextOut, inc: i32) -> NextOut {
127 match next_out {
128 &NextOut::DynamicStorage(offset) => NextOut::DynamicStorage((offset as i32 + inc) as u32),
129 &NextOut::TinyBuf(offset) => NextOut::TinyBuf((offset as i32 + inc) as u32),
130 &NextOut::None => NextOut::None,
131 }
132}
133fn IsNextOutNull(next_out: &NextOut) -> bool {
134 match next_out {
135 &NextOut::DynamicStorage(_) => false,
136 &NextOut::TinyBuf(_) => false,
137 &NextOut::None => true,
138 }
139}
140
141#[derive(Clone, Copy, Debug)]
142pub enum IsFirst {
143 NothingWritten,
144 HeaderWritten,
145 FirstCatableByteWritten,
146 BothCatableBytesWritten,
147}
148
149pub struct BrotliEncoderStateStruct<Alloc: BrotliAlloc> {
150 pub params: BrotliEncoderParams,
151 pub m8: Alloc,
152 pub hasher_: UnionHasher<Alloc>,
153 pub input_pos_: u64,
154 pub ringbuffer_: RingBuffer<Alloc>,
155 pub cmd_alloc_size_: usize,
156 pub commands_: <Alloc as Allocator<Command>>::AllocatedMemory, pub num_commands_: usize,
158 pub num_literals_: usize,
159 pub last_insert_len_: usize,
160 pub last_flush_pos_: u64,
161 pub last_processed_pos_: u64,
162 pub dist_cache_: [i32; 16],
163 pub saved_dist_cache_: [i32; kNumDistanceCacheEntries],
164 pub last_bytes_: u16,
165 pub last_bytes_bits_: u8,
166 pub prev_byte_: u8,
167 pub prev_byte2_: u8,
168 pub storage_size_: usize,
169 pub storage_: <Alloc as Allocator<u8>>::AllocatedMemory,
170 pub small_table_: [i32; 1024],
171 pub large_table_: <Alloc as Allocator<i32>>::AllocatedMemory,
172 pub cmd_depths_: [u8; 128],
174 pub cmd_bits_: [u16; 128],
175 pub cmd_code_: [u8; 512],
176 pub cmd_code_numbits_: usize,
177 pub command_buf_: <Alloc as Allocator<u32>>::AllocatedMemory,
178 pub literal_buf_: <Alloc as Allocator<u8>>::AllocatedMemory,
179 next_out_: NextOut,
180 pub available_out_: usize,
181 pub total_out_: u64,
182 pub tiny_buf_: [u8; 16],
183 pub remaining_metadata_bytes_: u32,
184 pub stream_state_: BrotliEncoderStreamState,
185 pub is_last_block_emitted_: bool,
186 pub is_initialized_: bool,
187 pub is_first_mb: IsFirst,
188 pub literal_scratch_space: <HistogramLiteral as CostAccessors>::i32vec,
189 pub command_scratch_space: <HistogramCommand as CostAccessors>::i32vec,
190 pub distance_scratch_space: <HistogramDistance as CostAccessors>::i32vec,
191 pub recoder_state: RecoderState,
192 custom_dictionary_size: Option<core::num::NonZeroUsize>,
193 custom_dictionary: bool,
194}
195
196pub fn set_parameter(
197 params: &mut BrotliEncoderParams,
198 p: BrotliEncoderParameter,
199 value: u32,
200) -> bool {
201 use crate::enc::parameters::BrotliEncoderParameter::*;
202 match p {
203 BROTLI_PARAM_MODE => {
204 params.mode = match value {
205 0 => BrotliEncoderMode::BROTLI_MODE_GENERIC,
206 1 => BrotliEncoderMode::BROTLI_MODE_TEXT,
207 2 => BrotliEncoderMode::BROTLI_MODE_FONT,
208 3 => BrotliEncoderMode::BROTLI_FORCE_LSB_PRIOR,
209 4 => BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR,
210 5 => BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR,
211 6 => BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR,
212 _ => BrotliEncoderMode::BROTLI_MODE_GENERIC,
213 };
214 }
215 BROTLI_PARAM_QUALITY => params.quality = value as i32,
216 BROTLI_PARAM_STRIDE_DETECTION_QUALITY => params.stride_detection_quality = value as u8,
217 BROTLI_PARAM_HIGH_ENTROPY_DETECTION_QUALITY => {
218 params.high_entropy_detection_quality = value as u8
219 }
220 BROTLI_PARAM_CDF_ADAPTATION_DETECTION => params.cdf_adaptation_detection = value as u8,
221 BROTLI_PARAM_Q9_5 => params.q9_5 = (value != 0),
222 BROTLI_PARAM_PRIOR_BITMASK_DETECTION => params.prior_bitmask_detection = value as u8,
223 BROTLI_PARAM_SPEED => {
224 params.literal_adaptation[1].0 = value as u16;
225 if params.literal_adaptation[0] == (0, 0) {
226 params.literal_adaptation[0].0 = value as u16;
227 }
228 }
229 BROTLI_PARAM_SPEED_MAX => {
230 params.literal_adaptation[1].1 = value as u16;
231 if params.literal_adaptation[0].1 == 0 {
232 params.literal_adaptation[0].1 = value as u16;
233 }
234 }
235 BROTLI_PARAM_CM_SPEED => {
236 params.literal_adaptation[3].0 = value as u16;
237 if params.literal_adaptation[2] == (0, 0) {
238 params.literal_adaptation[2].0 = value as u16;
239 }
240 }
241 BROTLI_PARAM_CM_SPEED_MAX => {
242 params.literal_adaptation[3].1 = value as u16;
243 if params.literal_adaptation[2].1 == 0 {
244 params.literal_adaptation[2].1 = value as u16;
245 }
246 }
247 BROTLI_PARAM_SPEED_LOW => params.literal_adaptation[0].0 = value as u16,
248 BROTLI_PARAM_SPEED_LOW_MAX => params.literal_adaptation[0].1 = value as u16,
249 BROTLI_PARAM_CM_SPEED_LOW => params.literal_adaptation[2].0 = value as u16,
250 BROTLI_PARAM_CM_SPEED_LOW_MAX => params.literal_adaptation[2].1 = value as u16,
251 BROTLI_PARAM_LITERAL_BYTE_SCORE => params.hasher.literal_byte_score = value as i32,
252 BROTLI_METABLOCK_CALLBACK => params.log_meta_block = value != 0,
253 BROTLI_PARAM_LGWIN => params.lgwin = value as i32,
254 BROTLI_PARAM_LGBLOCK => params.lgblock = value as i32,
255 BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING => {
256 if value != 0 && value != 1 {
257 return false;
258 }
259 params.disable_literal_context_modeling = if value != 0 { 1 } else { 0 };
260 }
261 BROTLI_PARAM_SIZE_HINT => params.size_hint = value as usize,
262 BROTLI_PARAM_LARGE_WINDOW => params.large_window = value != 0,
263 BROTLI_PARAM_AVOID_DISTANCE_PREFIX_SEARCH => {
264 params.avoid_distance_prefix_search = value != 0
265 }
266 BROTLI_PARAM_CATABLE => {
267 params.catable = value != 0;
268 if !params.appendable {
269 params.appendable = value != 0;
270 }
271 params.use_dictionary = (value == 0);
272 }
273 BROTLI_PARAM_APPENDABLE => params.appendable = value != 0,
274 BROTLI_PARAM_MAGIC_NUMBER => params.magic_number = value != 0,
275 BROTLI_PARAM_FAVOR_EFFICIENCY => params.favor_cpu_efficiency = value != 0,
276 _ => return false,
277 }
278 true
279}
280
281impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
282 pub fn set_parameter(&mut self, p: BrotliEncoderParameter, value: u32) -> bool {
283 if self.is_initialized_ {
284 false
285 } else {
286 set_parameter(&mut self.params, p, value)
287 }
288 }
289}
290
291pub const BROTLI_LARGE_MAX_DISTANCE_BITS: u32 = 62;
293pub const BROTLI_LARGE_MIN_WBITS: u32 = 10;
294pub const BROTLI_LARGE_MAX_WBITS: u32 = 30;
295
296pub const BROTLI_MAX_DISTANCE_BITS: u32 = 24;
297pub const BROTLI_MAX_WINDOW_BITS: usize = BROTLI_MAX_DISTANCE_BITS as usize;
298pub const BROTLI_MAX_DISTANCE: usize = 0x03ff_fffc;
299pub const BROTLI_MAX_ALLOWED_DISTANCE: usize = 0x07ff_fffc;
300pub const BROTLI_NUM_DISTANCE_SHORT_CODES: u32 = 16;
301pub fn BROTLI_DISTANCE_ALPHABET_SIZE(NPOSTFIX: u32, NDIRECT: u32, MAXNBITS: u32) -> u32 {
302 BROTLI_NUM_DISTANCE_SHORT_CODES + (NDIRECT) + ((MAXNBITS) << ((NPOSTFIX) + 1))
303}
304
305pub const BROTLI_NUM_DISTANCE_SYMBOLS: usize = 1128;
310
311pub fn BrotliEncoderInitParams() -> BrotliEncoderParams {
312 BrotliEncoderParams {
313 dist: BrotliDistanceParams {
314 distance_postfix_bits: 0,
315 num_direct_distance_codes: 0,
316 alphabet_size: BROTLI_DISTANCE_ALPHABET_SIZE(0, 0, BROTLI_MAX_DISTANCE_BITS),
317 max_distance: BROTLI_MAX_DISTANCE,
318 },
319 mode: BrotliEncoderMode::BROTLI_MODE_GENERIC,
320 log_meta_block: false,
321 large_window: false,
322 avoid_distance_prefix_search: false,
323 quality: 11,
324 q9_5: false,
325 lgwin: 22i32,
326 lgblock: 0i32,
327 size_hint: 0usize,
328 disable_literal_context_modeling: 0i32,
329 stride_detection_quality: 0,
330 high_entropy_detection_quality: 0,
331 cdf_adaptation_detection: 0,
332 prior_bitmask_detection: 0,
333 literal_adaptation: [(0, 0); 4],
334 catable: false,
335 use_dictionary: true,
336 appendable: false,
337 magic_number: false,
338 favor_cpu_efficiency: false,
339 hasher: BrotliHasherParams {
340 type_: 6,
341 block_bits: 9 - 1,
342 bucket_bits: 15,
343 hash_len: 5,
344 num_last_distances_to_check: 16,
345 literal_byte_score: 0,
346 },
347 }
348}
349
350impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
351 fn extend_last_command(&mut self, bytes: &mut u32, wrapped_last_processed_pos: &mut u32) {
352 let last_command = &mut self.commands_.slice_mut()[self.num_commands_ - 1];
353
354 let mask = self.ringbuffer_.mask_;
355 let max_backward_distance: u64 = (1u64 << self.params.lgwin) - BROTLI_WINDOW_GAP as u64;
356 let last_copy_len = u64::from(last_command.copy_len_) & 0x01ff_ffff;
357 let last_processed_pos: u64 = self.last_processed_pos_ - last_copy_len;
358 let max_distance: u64 = if last_processed_pos < max_backward_distance {
359 last_processed_pos
360 } else {
361 max_backward_distance
362 };
363 let cmd_dist: u64 = self.dist_cache_[0] as u64;
364 let distance_code: u32 = last_command.restore_distance_code(&self.params.dist);
365 if (distance_code < BROTLI_NUM_DISTANCE_SHORT_CODES
366 || distance_code as u64 - (BROTLI_NUM_DISTANCE_SHORT_CODES - 1) as u64 == cmd_dist)
367 {
368 if (cmd_dist <= max_distance) {
369 while (*bytes != 0
370 && self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index
371 + (*wrapped_last_processed_pos as usize & mask as usize)]
372 == self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index
373 + (((*wrapped_last_processed_pos as usize)
374 .wrapping_sub(cmd_dist as usize))
375 & mask as usize)])
376 {
377 last_command.copy_len_ += 1;
378 (*bytes) -= 1;
379 (*wrapped_last_processed_pos) += 1;
380 }
381 }
382 get_length_code(
384 last_command.insert_len_ as usize,
385 ((last_command.copy_len_ & 0x01ff_ffff) as i32
386 + (last_command.copy_len_ >> 25) as i32) as usize,
387 (last_command.dist_prefix_ & 0x03ff) == 0,
388 &mut last_command.cmd_prefix_,
389 );
390 }
391 }
392}
393
394fn RingBufferInit<AllocU8: alloc::Allocator<u8>>() -> RingBuffer<AllocU8> {
395 RingBuffer {
396 size_: 0,
397 mask_: 0, tail_size_: 0,
399 total_size_: 0,
400
401 cur_size_: 0,
402 pos_: 0,
403 data_mo: AllocU8::AllocatedMemory::default(),
404 buffer_index: 0usize,
405 }
406}
407
408impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
409 pub fn new(m8: Alloc) -> Self {
410 let cache: [i32; 16] = [4, 11, 15, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
411 Self {
412 params: BrotliEncoderInitParams(),
413 input_pos_: 0,
414 num_commands_: 0,
415 num_literals_: 0,
416 last_insert_len_: 0,
417 last_flush_pos_: 0,
418 last_processed_pos_: 0,
419 prev_byte_: 0,
420 prev_byte2_: 0,
421 storage_size_: 0,
422 storage_: alloc_default::<u8, Alloc>(),
423 hasher_: UnionHasher::<Alloc>::default(),
424 large_table_: alloc_default::<i32, Alloc>(),
425 cmd_code_numbits_: 0,
427 command_buf_: alloc_default::<u32, Alloc>(),
428 literal_buf_: alloc_default::<u8, Alloc>(),
429 next_out_: NextOut::None,
430 available_out_: 0,
431 total_out_: 0,
432 is_first_mb: IsFirst::NothingWritten,
433 stream_state_: BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING,
434 is_last_block_emitted_: false,
435 is_initialized_: false,
436 ringbuffer_: RingBufferInit(),
437 commands_: alloc_default::<Command, Alloc>(),
438 cmd_alloc_size_: 0,
439 dist_cache_: cache,
440 saved_dist_cache_: [cache[0], cache[1], cache[2], cache[3]],
441 cmd_bits_: [0; 128],
442 cmd_depths_: [0; 128],
443 last_bytes_: 0,
444 last_bytes_bits_: 0,
445 cmd_code_: [0; 512],
446 m8,
447 remaining_metadata_bytes_: 0,
448 small_table_: [0; 1024],
449 tiny_buf_: [0; 16],
450 literal_scratch_space: HistogramLiteral::make_nnz_storage(),
451 command_scratch_space: HistogramCommand::make_nnz_storage(),
452 distance_scratch_space: HistogramDistance::make_nnz_storage(),
453 recoder_state: RecoderState::new(),
454 custom_dictionary: false,
455 custom_dictionary_size: None,
456 }
457 }
458}
459
460fn RingBufferFree<AllocU8: alloc::Allocator<u8>>(m: &mut AllocU8, rb: &mut RingBuffer<AllocU8>) {
461 m.free_cell(core::mem::take(&mut rb.data_mo));
462}
463fn DestroyHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
464 m16: &mut Alloc,
465 handle: &mut UnionHasher<Alloc>,
466) {
467 handle.free(m16);
468}
469impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
504 fn cleanup(&mut self) {
505 <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.storage_));
506 <Alloc as Allocator<Command>>::free_cell(
507 &mut self.m8,
508 core::mem::take(&mut self.commands_),
509 );
510 RingBufferFree(&mut self.m8, &mut self.ringbuffer_);
511 DestroyHasher(&mut self.m8, &mut self.hasher_);
512 <Alloc as Allocator<i32>>::free_cell(&mut self.m8, core::mem::take(&mut self.large_table_));
513 <Alloc as Allocator<u32>>::free_cell(&mut self.m8, core::mem::take(&mut self.command_buf_));
514 <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.literal_buf_));
515 }
516}
517
518pub fn BrotliEncoderDestroyInstance<Alloc: BrotliAlloc>(s: &mut BrotliEncoderStateStruct<Alloc>) {
525 s.cleanup()
526}
527
528#[cfg(not(feature = "disallow_large_window_size"))]
529fn check_large_window_ok() -> bool {
530 true
531}
532#[cfg(feature = "disallow_large_window_size")]
533fn check_large_window_ok() -> bool {
534 false
535}
536
537pub fn SanitizeParams(params: &mut BrotliEncoderParams) {
538 params.quality = min(11i32, max(0i32, params.quality));
539 if params.lgwin < 10i32 {
540 params.lgwin = 10i32;
541 } else if params.lgwin > 24i32 {
542 if params.large_window && check_large_window_ok() {
543 if params.lgwin > 30i32 {
544 params.lgwin = 30i32;
545 }
546 } else {
547 params.lgwin = 24i32;
548 }
549 }
550 if params.catable {
551 params.appendable = true;
552 }
553}
554
555fn ComputeLgBlock(params: &BrotliEncoderParams) -> i32 {
556 let mut lgblock: i32 = params.lgblock;
557 if params.quality == 0i32 || params.quality == 1i32 {
558 lgblock = params.lgwin;
559 } else if params.quality < 4i32 {
560 lgblock = 14i32;
561 } else if lgblock == 0i32 {
562 lgblock = 16i32;
563 if params.quality >= 9i32 && (params.lgwin > lgblock) {
564 lgblock = min(18i32, params.lgwin);
565 }
566 } else {
567 lgblock = min(24i32, max(16i32, lgblock));
568 }
569 lgblock
570}
571
572fn ComputeRbBits(params: &BrotliEncoderParams) -> i32 {
573 1i32 + max(params.lgwin, params.lgblock)
574}
575
576fn RingBufferSetup<AllocU8: alloc::Allocator<u8>>(
577 params: &BrotliEncoderParams,
578 rb: &mut RingBuffer<AllocU8>,
579) {
580 let window_bits: i32 = ComputeRbBits(params);
581 let tail_bits: i32 = params.lgblock;
582 rb.size_ = 1u32 << window_bits;
583 rb.mask_ = (1u32 << window_bits).wrapping_sub(1);
584 rb.tail_size_ = 1u32 << tail_bits;
585 rb.total_size_ = rb.size_.wrapping_add(rb.tail_size_);
586}
587
588fn EncodeWindowBits(
589 lgwin: i32,
590 large_window: bool,
591 last_bytes: &mut u16,
592 last_bytes_bits: &mut u8,
593) {
594 if large_window {
595 *last_bytes = (((lgwin & 0x3F) << 8) | 0x11) as u16;
596 *last_bytes_bits = 14;
597 } else if lgwin == 16i32 {
598 *last_bytes = 0u16;
599 *last_bytes_bits = 1u8;
600 } else if lgwin == 17i32 {
601 *last_bytes = 1u16;
602 *last_bytes_bits = 7u8;
603 } else if lgwin > 17i32 {
604 *last_bytes = ((lgwin - 17i32) << 1 | 1i32) as u16;
605 *last_bytes_bits = 4u8;
606 } else {
607 *last_bytes = ((lgwin - 8i32) << 4 | 1i32) as u16;
608 *last_bytes_bits = 7u8;
609 }
610}
611
612fn InitCommandPrefixCodes(
613 cmd_depths: &mut [u8],
614 cmd_bits: &mut [u16],
615 cmd_code: &mut [u8],
616 cmd_code_numbits: &mut usize,
617) {
618 static kDefaultCommandDepths: [u8; 128] = [
619 0, 4, 4, 5, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 0, 0, 0, 4, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6,
620 7, 7, 7, 7, 10, 10, 10, 10, 10, 10, 0, 4, 4, 5, 5, 5, 6, 6, 7, 8, 8, 9, 10, 10, 10, 10, 10,
621 10, 10, 10, 10, 10, 10, 10, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 6, 6, 6,
622 6, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 7, 7, 7, 8, 10, 12, 12,
623 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 0, 0, 0, 0,
624 ];
625 static kDefaultCommandBits: [u16; 128] = [
626 0, 0, 8, 9, 3, 35, 7, 71, 39, 103, 23, 47, 175, 111, 239, 31, 0, 0, 0, 4, 12, 2, 10, 6, 13,
627 29, 11, 43, 27, 59, 87, 55, 15, 79, 319, 831, 191, 703, 447, 959, 0, 14, 1, 25, 5, 21, 19,
628 51, 119, 159, 95, 223, 479, 991, 63, 575, 127, 639, 383, 895, 255, 767, 511, 1023, 14, 0,
629 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 59, 7, 39, 23, 55, 30, 1, 17, 9, 25, 5, 0, 8,
630 4, 12, 2, 10, 6, 21, 13, 29, 3, 19, 11, 15, 47, 31, 95, 63, 127, 255, 767, 2815, 1791,
631 3839, 511, 2559, 1535, 3583, 1023, 3071, 2047, 4095, 0, 0, 0, 0,
632 ];
633 static kDefaultCommandCode: [u8; 57] = [
634 0xff, 0x77, 0xd5, 0xbf, 0xe7, 0xde, 0xea, 0x9e, 0x51, 0x5d, 0xde, 0xc6, 0x70, 0x57, 0xbc,
635 0x58, 0x58, 0x58, 0xd8, 0xd8, 0x58, 0xd5, 0xcb, 0x8c, 0xea, 0xe0, 0xc3, 0x87, 0x1f, 0x83,
636 0xc1, 0x60, 0x1c, 0x67, 0xb2, 0xaa, 0x6, 0x83, 0xc1, 0x60, 0x30, 0x18, 0xcc, 0xa1, 0xce,
637 0x88, 0x54, 0x94, 0x46, 0xe1, 0xb0, 0xd0, 0x4e, 0xb2, 0xf7, 0x4, 0x0,
638 ];
639 static kDefaultCommandCodeNumBits: usize = 448usize;
640 cmd_depths[..].clone_from_slice(&kDefaultCommandDepths[..]);
641 cmd_bits[..].clone_from_slice(&kDefaultCommandBits[..]);
642 cmd_code[..kDefaultCommandCode.len()].clone_from_slice(&kDefaultCommandCode[..]);
643 *cmd_code_numbits = kDefaultCommandCodeNumBits;
644}
645
646impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
647 fn ensure_initialized(&mut self) -> bool {
648 if self.is_initialized_ {
649 return true;
650 }
651 SanitizeParams(&mut self.params);
652 self.params.lgblock = ComputeLgBlock(&mut self.params);
653 ChooseDistanceParams(&mut self.params);
654 self.remaining_metadata_bytes_ = u32::MAX;
655 RingBufferSetup(&mut self.params, &mut self.ringbuffer_);
656 {
657 let mut lgwin: i32 = self.params.lgwin;
658 if self.params.quality == 0i32 || self.params.quality == 1i32 {
659 lgwin = max(lgwin, 18i32);
660 }
661 EncodeWindowBits(
662 lgwin,
663 self.params.large_window,
664 &mut self.last_bytes_,
665 &mut self.last_bytes_bits_,
666 );
667 }
668 if self.params.quality == 0i32 {
669 InitCommandPrefixCodes(
670 &mut self.cmd_depths_[..],
671 &mut self.cmd_bits_[..],
672 &mut self.cmd_code_[..],
673 &mut self.cmd_code_numbits_,
674 );
675 }
676 if self.params.catable {
677 for item in self.dist_cache_.iter_mut() {
681 *item = 0x7ffffff0;
682 }
683 for item in self.saved_dist_cache_.iter_mut() {
684 *item = 0x7ffffff0;
685 }
686 }
687 self.is_initialized_ = true;
688 true
689 }
690}
691
692fn RingBufferInitBuffer<AllocU8: alloc::Allocator<u8>>(
693 m: &mut AllocU8,
694 buflen: u32,
695 rb: &mut RingBuffer<AllocU8>,
696) {
697 static kSlackForEightByteHashingEverywhere: usize = 7usize;
698 let mut new_data = m.alloc_cell(
699 ((2u32).wrapping_add(buflen) as usize).wrapping_add(kSlackForEightByteHashingEverywhere),
700 );
701 if !rb.data_mo.slice().is_empty() {
702 let lim: usize = ((2u32).wrapping_add(rb.cur_size_) as usize)
703 .wrapping_add(kSlackForEightByteHashingEverywhere);
704 new_data.slice_mut()[..lim].clone_from_slice(&rb.data_mo.slice()[..lim]);
705 m.free_cell(core::mem::take(&mut rb.data_mo));
706 }
707 let _ = core::mem::replace(&mut rb.data_mo, new_data);
708 rb.cur_size_ = buflen;
709 rb.buffer_index = 2usize;
710 rb.data_mo.slice_mut()[(rb.buffer_index.wrapping_sub(2))] = 0;
711 rb.data_mo.slice_mut()[(rb.buffer_index.wrapping_sub(1))] = 0;
712 for i in 0usize..kSlackForEightByteHashingEverywhere {
713 rb.data_mo.slice_mut()[rb
714 .buffer_index
715 .wrapping_add(rb.cur_size_ as usize)
716 .wrapping_add(i)] = 0;
717 }
718}
719
720fn RingBufferWriteTail<AllocU8: alloc::Allocator<u8>>(
721 bytes: &[u8],
722 n: usize,
723 rb: &mut RingBuffer<AllocU8>,
724) {
725 let masked_pos: usize = (rb.pos_ & rb.mask_) as usize;
726 if masked_pos < rb.tail_size_ as usize {
727 let p: usize = (rb.size_ as usize).wrapping_add(masked_pos);
728 let begin = rb.buffer_index.wrapping_add(p);
729 let lim = min(n, (rb.tail_size_ as usize).wrapping_sub(masked_pos));
730 rb.data_mo.slice_mut()[begin..(begin + lim)].clone_from_slice(&bytes[..lim]);
731 }
732}
733
734fn RingBufferWrite<AllocU8: alloc::Allocator<u8>>(
735 m: &mut AllocU8,
736 bytes: &[u8],
737 n: usize,
738 rb: &mut RingBuffer<AllocU8>,
739) {
740 if rb.pos_ == 0u32 && (n < rb.tail_size_ as usize) {
741 rb.pos_ = n as u32;
742 RingBufferInitBuffer(m, rb.pos_, rb);
743 rb.data_mo.slice_mut()[rb.buffer_index..(rb.buffer_index + n)]
744 .clone_from_slice(&bytes[..n]);
745 return;
746 }
747 if rb.cur_size_ < rb.total_size_ {
748 RingBufferInitBuffer(m, rb.total_size_, rb);
749 rb.data_mo.slice_mut()[rb
750 .buffer_index
751 .wrapping_add(rb.size_ as usize)
752 .wrapping_sub(2)] = 0u8;
753 rb.data_mo.slice_mut()[rb
754 .buffer_index
755 .wrapping_add(rb.size_ as usize)
756 .wrapping_sub(1)] = 0u8;
757 }
758 {
759 let masked_pos: usize = (rb.pos_ & rb.mask_) as usize;
760 RingBufferWriteTail(bytes, n, rb);
761 if masked_pos.wrapping_add(n) <= rb.size_ as usize {
762 let start = rb.buffer_index.wrapping_add(masked_pos);
764 rb.data_mo.slice_mut()[start..(start + n)].clone_from_slice(&bytes[..n]);
765 } else {
766 {
767 let start = rb.buffer_index.wrapping_add(masked_pos);
768 let mid = min(n, (rb.total_size_ as usize).wrapping_sub(masked_pos));
769 rb.data_mo.slice_mut()[start..(start + mid)].clone_from_slice(&bytes[..mid]);
770 }
771 let xstart = rb.buffer_index.wrapping_add(0);
772 let size = n.wrapping_sub((rb.size_ as usize).wrapping_sub(masked_pos));
773 let bytes_start = (rb.size_ as usize).wrapping_sub(masked_pos);
774 rb.data_mo.slice_mut()[xstart..(xstart + size)]
775 .clone_from_slice(&bytes[bytes_start..(bytes_start + size)]);
776 }
777 }
778 let data_2 = rb.data_mo.slice()[rb
779 .buffer_index
780 .wrapping_add(rb.size_ as usize)
781 .wrapping_sub(2)];
782 rb.data_mo.slice_mut()[rb.buffer_index.wrapping_sub(2)] = data_2;
783 let data_1 = rb.data_mo.slice()[rb
784 .buffer_index
785 .wrapping_add(rb.size_ as usize)
786 .wrapping_sub(1)];
787 rb.data_mo.slice_mut()[rb.buffer_index.wrapping_sub(1)] = data_1;
788 rb.pos_ = rb.pos_.wrapping_add(n as u32);
789 if rb.pos_ > 1u32 << 30 {
790 rb.pos_ = rb.pos_ & (1u32 << 30).wrapping_sub(1) | 1u32 << 30;
791 }
792}
793
794impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
795 pub fn copy_input_to_ring_buffer(&mut self, input_size: usize, input_buffer: &[u8]) {
796 if !self.ensure_initialized() {
797 return;
798 }
799 RingBufferWrite(
800 &mut self.m8,
801 input_buffer,
802 input_size,
803 &mut self.ringbuffer_,
804 );
805 self.input_pos_ = self.input_pos_.wrapping_add(input_size as u64);
806 if (self.ringbuffer_).pos_ <= (self.ringbuffer_).mask_ {
807 let start = (self.ringbuffer_)
808 .buffer_index
809 .wrapping_add((self.ringbuffer_).pos_ as usize);
810 for item in (self.ringbuffer_).data_mo.slice_mut()[start..(start + 7)].iter_mut() {
811 *item = 0;
812 }
813 }
814 }
815}
816
817fn ChooseHasher(params: &mut BrotliEncoderParams) {
818 let hparams = &mut params.hasher;
819 if params.quality >= 10 && !params.q9_5 {
820 hparams.type_ = 10;
821 } else if params.quality == 10 {
822 hparams.type_ = 9;
824 hparams.num_last_distances_to_check = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
825 hparams.block_bits = H9_BLOCK_BITS as i32;
826 hparams.bucket_bits = H9_BUCKET_BITS as i32;
827 hparams.hash_len = 4;
828 } else if params.quality == 9 {
829 hparams.type_ = 9;
830 hparams.num_last_distances_to_check = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
831 hparams.block_bits = H9_BLOCK_BITS as i32;
832 hparams.bucket_bits = H9_BUCKET_BITS as i32;
833 hparams.hash_len = 4;
834 } else if params.quality == 4 && (params.size_hint >= (1i32 << 20) as usize) {
835 hparams.type_ = 54i32;
836 } else if params.quality < 5 {
837 hparams.type_ = params.quality;
838 } else if params.lgwin <= 16 {
839 hparams.type_ = if params.quality < 7 {
840 40i32
841 } else if params.quality < 9 {
842 41i32
843 } else {
844 42i32
845 };
846 } else if ((params.q9_5 && params.size_hint > (1 << 20)) || params.size_hint > (1 << 22))
847 && (params.lgwin >= 19i32)
848 {
849 hparams.type_ = 6i32;
850 hparams.block_bits = min(params.quality - 1, 9);
851 hparams.bucket_bits = 15i32;
852 hparams.hash_len = 5i32;
853 hparams.num_last_distances_to_check = if params.quality < 7 {
854 4i32
855 } else if params.quality < 9 {
856 10i32
857 } else {
858 16i32
859 };
860 } else {
861 hparams.type_ = 5i32;
862 hparams.block_bits = min(params.quality - 1, 9);
863 hparams.bucket_bits = if params.quality < 7 && params.size_hint <= (1 << 20) {
864 14i32
865 } else {
866 15i32
867 };
868 hparams.num_last_distances_to_check = if params.quality < 7 {
869 4i32
870 } else if params.quality < 9 {
871 10i32
872 } else {
873 16i32
874 };
875 }
876}
877
878fn InitializeH2<AllocU32: alloc::Allocator<u32>>(
879 m32: &mut AllocU32,
880 params: &BrotliEncoderParams,
881) -> BasicHasher<H2Sub<AllocU32>> {
882 BasicHasher {
883 GetHasherCommon: Struct1 {
884 params: params.hasher,
885 is_prepared_: 1,
886 dict_num_lookups: 0,
887 dict_num_matches: 0,
888 },
889 buckets_: H2Sub {
890 buckets_: m32.alloc_cell(65537 + 8),
891 },
892 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
893 }
894}
895fn InitializeH3<AllocU32: alloc::Allocator<u32>>(
896 m32: &mut AllocU32,
897 params: &BrotliEncoderParams,
898) -> BasicHasher<H3Sub<AllocU32>> {
899 BasicHasher {
900 GetHasherCommon: Struct1 {
901 params: params.hasher,
902 is_prepared_: 1,
903 dict_num_lookups: 0,
904 dict_num_matches: 0,
905 },
906 buckets_: H3Sub {
907 buckets_: m32.alloc_cell(65538 + 8),
908 },
909 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
910 }
911}
912fn InitializeH4<AllocU32: alloc::Allocator<u32>>(
913 m32: &mut AllocU32,
914 params: &BrotliEncoderParams,
915) -> BasicHasher<H4Sub<AllocU32>> {
916 BasicHasher {
917 GetHasherCommon: Struct1 {
918 params: params.hasher,
919 is_prepared_: 1,
920 dict_num_lookups: 0,
921 dict_num_matches: 0,
922 },
923 buckets_: H4Sub {
924 buckets_: m32.alloc_cell(131072 + 8),
925 },
926 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
927 }
928}
929fn InitializeH54<AllocU32: alloc::Allocator<u32>>(
930 m32: &mut AllocU32,
931 params: &BrotliEncoderParams,
932) -> BasicHasher<H54Sub<AllocU32>> {
933 BasicHasher {
934 GetHasherCommon: Struct1 {
935 params: params.hasher,
936 is_prepared_: 1,
937 dict_num_lookups: 0,
938 dict_num_matches: 0,
939 },
940 buckets_: H54Sub {
941 buckets_: m32.alloc_cell(1048580 + 8),
942 },
943 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
944 }
945}
946
947fn InitializeH9<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
948 m16: &mut Alloc,
949 params: &BrotliEncoderParams,
950) -> H9<Alloc> {
951 H9 {
952 dict_search_stats_: Struct1 {
953 params: params.hasher,
954 is_prepared_: 1,
955 dict_num_lookups: 0,
956 dict_num_matches: 0,
957 },
958 num_: allocate::<u16, _>(m16, 1 << H9_BUCKET_BITS),
959 buckets_: allocate::<u32, _>(m16, H9_BLOCK_SIZE << H9_BUCKET_BITS),
960 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
961 }
962}
963
964fn InitializeH5<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
965 m16: &mut Alloc,
966 params: &BrotliEncoderParams,
967) -> UnionHasher<Alloc> {
968 let block_size = 1u64 << params.hasher.block_bits;
969 let bucket_size = 1u64 << params.hasher.bucket_bits;
970 let buckets: <Alloc as Allocator<u32>>::AllocatedMemory =
971 allocate::<u32, _>(m16, (bucket_size * block_size) as usize);
972 let num: <Alloc as Allocator<u16>>::AllocatedMemory =
973 allocate::<u16, _>(m16, bucket_size as usize);
974
975 if params.hasher.block_bits == (HQ5Sub {}).block_bits()
976 && (1 << params.hasher.bucket_bits) == (HQ5Sub {}).bucket_size()
977 {
978 return UnionHasher::H5q5(AdvHasher {
979 buckets,
980 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
981 num,
982 GetHasherCommon: Struct1 {
983 params: params.hasher,
984 is_prepared_: 1,
985 dict_num_lookups: 0,
986 dict_num_matches: 0,
987 },
988 specialization: HQ5Sub {},
989 });
990 }
991 if params.hasher.block_bits == (HQ7Sub {}).block_bits()
992 && (1 << params.hasher.bucket_bits) == (HQ7Sub {}).bucket_size()
993 {
994 return UnionHasher::H5q7(AdvHasher {
995 buckets,
996 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
997 num,
998 GetHasherCommon: Struct1 {
999 params: params.hasher,
1000 is_prepared_: 1,
1001 dict_num_lookups: 0,
1002 dict_num_matches: 0,
1003 },
1004 specialization: HQ7Sub {},
1005 });
1006 }
1007 UnionHasher::H5(AdvHasher {
1008 buckets,
1009 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
1010 num,
1011 GetHasherCommon: Struct1 {
1012 params: params.hasher,
1013 is_prepared_: 1,
1014 dict_num_lookups: 0,
1015 dict_num_matches: 0,
1016 },
1017 specialization: H5Sub {
1018 hash_shift_: 32i32 - params.hasher.bucket_bits,
1019 bucket_size_: bucket_size as u32,
1020 block_bits_: params.hasher.block_bits,
1021 block_mask_: block_size.wrapping_sub(1) as u32,
1022 },
1023 })
1024}
1025fn InitializeH6<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1026 m16: &mut Alloc,
1027 params: &BrotliEncoderParams,
1028) -> UnionHasher<Alloc> {
1029 let block_size = 1u64 << params.hasher.block_bits;
1030 let bucket_size = 1u64 << params.hasher.bucket_bits;
1031 let buckets: <Alloc as Allocator<u32>>::AllocatedMemory =
1032 allocate::<u32, _>(m16, (bucket_size * block_size) as usize);
1033 let num: <Alloc as Allocator<u16>>::AllocatedMemory =
1034 allocate::<u16, _>(m16, bucket_size as usize);
1035 UnionHasher::H6(AdvHasher {
1036 buckets,
1037 num,
1038 h9_opts: super::backward_references::H9Opts::new(¶ms.hasher),
1039 GetHasherCommon: Struct1 {
1040 params: params.hasher,
1041 is_prepared_: 1,
1042 dict_num_lookups: 0,
1043 dict_num_matches: 0,
1044 },
1045 specialization: H6Sub {
1046 bucket_size_: 1u32 << params.hasher.bucket_bits,
1047 block_bits_: params.hasher.block_bits,
1048 block_mask_: block_size.wrapping_sub(1) as u32,
1049 hash_mask: 0xffffffffffffffffu64 >> (64i32 - 8i32 * params.hasher.hash_len),
1050 hash_shift_: 64i32 - params.hasher.bucket_bits,
1051 },
1052 })
1053}
1054
1055fn BrotliMakeHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1056 m: &mut Alloc,
1057 params: &BrotliEncoderParams,
1058 ringbuffer_break: Option<core::num::NonZeroUsize>,
1059) -> UnionHasher<Alloc> {
1060 let hasher_type: i32 = params.hasher.type_;
1061 if hasher_type == 2i32 {
1062 return UnionHasher::H2(InitializeH2(m, params));
1063 }
1064 if hasher_type == 3i32 {
1065 return UnionHasher::H3(InitializeH3(m, params));
1066 }
1067 if hasher_type == 4i32 {
1068 return UnionHasher::H4(InitializeH4(m, params));
1069 }
1070 if hasher_type == 5i32 {
1071 return InitializeH5(m, params);
1072 }
1073 if hasher_type == 6i32 {
1074 return InitializeH6(m, params);
1075 }
1076 if hasher_type == 9i32 {
1077 return UnionHasher::H9(InitializeH9(m, params));
1078 }
1079 if hasher_type == 54i32 {
1091 return UnionHasher::H54(InitializeH54(m, params));
1092 }
1093 if hasher_type == 10i32 {
1094 return UnionHasher::H10(InitializeH10(m, false, params, ringbuffer_break, 0));
1095 }
1096 InitializeH6(m, params)
1098
1099 }
1101fn HasherReset<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(t: &mut UnionHasher<Alloc>) {
1102 match t {
1103 &mut UnionHasher::Uninit => {}
1104 _ => (t.GetHasherCommon()).is_prepared_ = 0i32,
1105 };
1106}
1107
1108pub(crate) fn hasher_setup<Alloc: Allocator<u16> + Allocator<u32>>(
1109 m16: &mut Alloc,
1110 handle: &mut UnionHasher<Alloc>,
1111 params: &mut BrotliEncoderParams,
1112 ringbuffer_break: Option<core::num::NonZeroUsize>,
1113 data: &[u8],
1114 position: usize,
1115 input_size: usize,
1116 is_last: bool,
1117) {
1118 let one_shot = position == 0 && is_last;
1119 let is_uninit = match (handle) {
1120 &mut UnionHasher::Uninit => true,
1121 _ => false,
1122 };
1123 if is_uninit {
1124 ChooseHasher(&mut (*params));
1126 *handle = BrotliMakeHasher(m16, params, ringbuffer_break);
1129 handle.GetHasherCommon().params = params.hasher;
1130 HasherReset(handle); handle.GetHasherCommon().is_prepared_ = 1;
1132 } else {
1133 match handle.Prepare(one_shot, input_size, data) {
1134 HowPrepared::ALREADY_PREPARED => {}
1135 HowPrepared::NEWLY_PREPARED => {
1136 if position == 0usize {
1137 let common = handle.GetHasherCommon();
1138 common.dict_num_lookups = 0usize;
1139 common.dict_num_matches = 0usize;
1140 }
1141 }
1142 }
1143 }
1144}
1145
1146fn HasherPrependCustomDictionary<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1147 m: &mut Alloc,
1148 handle: &mut UnionHasher<Alloc>,
1149 params: &mut BrotliEncoderParams,
1150 ringbuffer_break: Option<core::num::NonZeroUsize>,
1151 size: usize,
1152 dict: &[u8],
1153) {
1154 hasher_setup(
1155 m,
1156 handle,
1157 params,
1158 ringbuffer_break,
1159 dict,
1160 0usize,
1161 size,
1162 false,
1163 );
1164 match handle {
1165 &mut UnionHasher::H2(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1166 &mut UnionHasher::H3(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1167 &mut UnionHasher::H4(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1168 &mut UnionHasher::H5(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1169 &mut UnionHasher::H5q7(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1170 &mut UnionHasher::H5q5(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1171 &mut UnionHasher::H6(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1172 &mut UnionHasher::H9(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1173 &mut UnionHasher::H54(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1174 &mut UnionHasher::H10(ref mut hasher) => StoreLookaheadThenStore(hasher, size, dict),
1175 &mut UnionHasher::Uninit => panic!("Uninitialized"),
1176 }
1177}
1178
1179impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1180 pub fn set_custom_dictionary(&mut self, size: usize, dict: &[u8]) {
1181 self.set_custom_dictionary_with_optional_precomputed_hasher(
1182 size,
1183 dict,
1184 UnionHasher::Uninit,
1185 false,
1186 )
1187 }
1188
1189 pub fn set_custom_dictionary_with_optional_precomputed_hasher(
1190 &mut self,
1191 size: usize,
1192 mut dict: &[u8],
1193 opt_hasher: UnionHasher<Alloc>,
1194 is_multithreading_file_continue: bool,
1195 ) {
1196 self.params.use_dictionary = false;
1197
1198 self.prev_byte_ = 0;
1199 self.prev_byte2_ = 0;
1200 if is_multithreading_file_continue {
1201 if size > 0 {
1202 self.prev_byte_ = dict[size.wrapping_sub(1)];
1203 }
1204 if size > 1 {
1205 self.prev_byte2_ = dict[size.wrapping_sub(2)];
1206 }
1207 }
1208
1209 let has_optional_hasher = if let UnionHasher::Uninit = opt_hasher {
1210 false
1211 } else {
1212 true
1213 };
1214 let max_dict_size: usize = (1usize << self.params.lgwin).wrapping_sub(16);
1215 self.hasher_ = opt_hasher;
1216 let mut dict_size: usize = size;
1217 if !self.ensure_initialized() {
1218 return;
1219 }
1220 if dict_size == 0 || self.params.quality == 0 || self.params.quality == 1 || size <= 1 {
1221 self.params.catable = true; self.params.appendable = true; return;
1224 }
1225 self.custom_dictionary = true;
1226 if size > max_dict_size {
1227 dict = &dict[size.wrapping_sub(max_dict_size)..];
1228 dict_size = max_dict_size;
1229 }
1230 self.custom_dictionary_size = core::num::NonZeroUsize::new(dict_size);
1231 self.copy_input_to_ring_buffer(dict_size, dict);
1232 self.last_flush_pos_ = dict_size as u64;
1233 self.last_processed_pos_ = dict_size as u64;
1234 let m16 = &mut self.m8;
1235 if cfg!(debug_assertions) || !has_optional_hasher {
1236 let mut orig_hasher = UnionHasher::Uninit;
1237 if has_optional_hasher {
1238 orig_hasher = core::mem::replace(&mut self.hasher_, UnionHasher::Uninit);
1239 }
1240 HasherPrependCustomDictionary(
1241 m16,
1242 &mut self.hasher_,
1243 &mut self.params,
1244 self.custom_dictionary_size,
1245 dict_size,
1246 dict,
1247 );
1248 if has_optional_hasher {
1249 debug_assert!(orig_hasher == self.hasher_);
1250 DestroyHasher(m16, &mut orig_hasher);
1251 }
1252 }
1253 }
1254}
1255
1256pub fn BrotliEncoderMaxCompressedSizeMulti(input_size: usize, num_threads: usize) -> usize {
1257 BrotliEncoderMaxCompressedSize(input_size) + num_threads * 8
1258}
1259
1260pub fn BrotliEncoderMaxCompressedSize(input_size: usize) -> usize {
1261 let magic_size = 16usize;
1262 let num_large_blocks: usize = input_size >> 14;
1263 let tail: usize = input_size.wrapping_sub(num_large_blocks << 24);
1264 let tail_overhead: usize = (if tail > (1i32 << 20) as usize {
1265 4i32
1266 } else {
1267 3i32
1268 }) as usize;
1269 let overhead: usize = (2usize)
1270 .wrapping_add((4usize).wrapping_mul(num_large_blocks))
1271 .wrapping_add(tail_overhead)
1272 .wrapping_add(1);
1273 let result: usize = input_size.wrapping_add(overhead);
1274 if input_size == 0usize {
1275 return 1 + magic_size;
1276 }
1277 if result < input_size {
1278 0usize
1279 } else {
1280 result + magic_size
1281 }
1282}
1283
1284fn InitOrStitchToPreviousBlock<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>>(
1285 m: &mut Alloc,
1286 handle: &mut UnionHasher<Alloc>,
1287 data: &[u8],
1288 mask: usize,
1289 ringbuffer_break: Option<core::num::NonZeroUsize>,
1290 params: &mut BrotliEncoderParams,
1291 position: usize,
1292 input_size: usize,
1293 is_last: bool,
1294) {
1295 hasher_setup(
1296 m,
1297 handle,
1298 params,
1299 ringbuffer_break,
1300 data,
1301 position,
1302 input_size,
1303 is_last,
1304 );
1305 handle.StitchToPreviousBlock(input_size, position, data, mask);
1306}
1307
1308fn should_compress(
1309 data: &[u8],
1310 mask: usize,
1311 last_flush_pos: u64,
1312 bytes: usize,
1313 num_literals: usize,
1314 num_commands: usize,
1315) -> bool {
1316 const K_SAMPLE_RATE: u32 = 13;
1317 const K_MIN_ENTROPY: floatX = 7.92;
1318
1319 if num_commands < (bytes >> 8) + 2 && num_literals as floatX > 0.99 * bytes as floatX {
1320 let mut literal_histo = [0u32; 256];
1321 let bit_cost_threshold = (bytes as floatX) * K_MIN_ENTROPY / (K_SAMPLE_RATE as floatX);
1322 let t = bytes
1323 .wrapping_add(K_SAMPLE_RATE as usize)
1324 .wrapping_sub(1)
1325 .wrapping_div(K_SAMPLE_RATE as usize);
1326 let mut pos = last_flush_pos as u32;
1327 for _ in 0..t {
1328 let value = &mut literal_histo[data[pos as usize & mask] as usize];
1329 *value = value.wrapping_add(1);
1330 pos = pos.wrapping_add(K_SAMPLE_RATE);
1331 }
1332 if BitsEntropy(&literal_histo[..], 256) > bit_cost_threshold {
1333 return false;
1334 }
1335 }
1336 true
1337}
1338
1339fn ChooseContextMode(
1341 params: &BrotliEncoderParams,
1342 data: &[u8],
1343 pos: usize,
1344 mask: usize,
1345 length: usize,
1346) -> ContextType {
1347 match params.mode {
1350 BrotliEncoderMode::BROTLI_FORCE_LSB_PRIOR => return ContextType::CONTEXT_LSB6,
1351 BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR => return ContextType::CONTEXT_MSB6,
1352 BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR => return ContextType::CONTEXT_UTF8,
1353 BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR => return ContextType::CONTEXT_SIGNED,
1354 _ => {}
1355 }
1356 if (params.quality >= 10 && !is_mostly_utf8(data, pos, mask, length, kMinUTF8Ratio)) {
1357 return ContextType::CONTEXT_SIGNED;
1358 }
1359 ContextType::CONTEXT_UTF8
1360}
1361
1362#[derive(PartialEq, Eq, Copy, Clone)]
1363pub enum BrotliEncoderOperation {
1364 BROTLI_OPERATION_PROCESS = 0,
1365 BROTLI_OPERATION_FLUSH = 1,
1366 BROTLI_OPERATION_FINISH = 2,
1367 BROTLI_OPERATION_EMIT_METADATA = 3,
1368}
1369
1370#[allow(unused)]
1371fn MakeUncompressedStream(input: &[u8], input_size: usize, output: &mut [u8]) -> usize {
1372 let mut size: usize = input_size;
1373 let mut result: usize = 0usize;
1374 let mut offset: usize = 0usize;
1375 if input_size == 0usize {
1376 output[0] = 6u8;
1377 return 1;
1378 }
1379 output[result] = 0x21u8;
1380 result = result.wrapping_add(1);
1381 output[result] = 0x3u8;
1382 result = result.wrapping_add(1);
1383 while size > 0usize {
1384 let mut nibbles: u32 = 0u32;
1385
1386 let chunk_size: u32 = if size > (1u32 << 24) as usize {
1387 1u32 << 24
1388 } else {
1389 size as u32
1390 };
1391 if chunk_size > 1u32 << 16 {
1392 nibbles = if chunk_size > 1u32 << 20 { 2i32 } else { 1i32 } as u32;
1393 }
1394 let bits: u32 = nibbles << 1
1395 | chunk_size.wrapping_sub(1) << 3
1396 | 1u32 << (19u32).wrapping_add((4u32).wrapping_mul(nibbles));
1397 output[result] = bits as u8;
1398 result = result.wrapping_add(1);
1399 output[result] = (bits >> 8) as u8;
1400 result = result.wrapping_add(1);
1401 output[result] = (bits >> 16) as u8;
1402 result = result.wrapping_add(1);
1403 if nibbles == 2u32 {
1404 output[result] = (bits >> 24) as u8;
1405 result = result.wrapping_add(1);
1406 }
1407 output[result..(result + chunk_size as usize)]
1408 .clone_from_slice(&input[offset..(offset + chunk_size as usize)]);
1409 result = result.wrapping_add(chunk_size as usize);
1410 offset = offset.wrapping_add(chunk_size as usize);
1411 size = size.wrapping_sub(chunk_size as usize);
1412 }
1413 output[result] = 3u8;
1414 result = result.wrapping_add(1);
1415 result
1416}
1417
1418#[cfg_attr(not(feature = "ffi-api"), cfg(test))]
1419pub(crate) fn encoder_compress<
1420 Alloc: BrotliAlloc,
1421 MetablockCallback: FnMut(
1422 &mut interface::PredictionModeContextMap<InputReferenceMut>,
1423 &mut [interface::StaticCommand],
1424 interface::InputPair,
1425 &mut Alloc,
1426 ),
1427>(
1428 empty_m8: Alloc,
1429 m8: &mut Alloc,
1430 mut quality: i32,
1431 lgwin: i32,
1432 mode: BrotliEncoderMode,
1433 input_size: usize,
1434 input_buffer: &[u8],
1435 encoded_size: &mut usize,
1436 encoded_buffer: &mut [u8],
1437 metablock_callback: &mut MetablockCallback,
1438) -> bool {
1439 let out_size: usize = *encoded_size;
1440 let input_start = input_buffer;
1441 let output_start = encoded_buffer;
1442 let max_out_size: usize = BrotliEncoderMaxCompressedSize(input_size);
1443 if out_size == 0 {
1444 return false;
1445 }
1446 if input_size == 0 {
1447 *encoded_size = 1;
1448 output_start[0] = 6;
1449 return true;
1450 }
1451 let mut is_fallback = false;
1452 let mut is_9_5 = false;
1453 if quality == 10 {
1454 quality = 9;
1455 is_9_5 = true;
1456 }
1457 if !is_fallback {
1458 let mut s_orig = BrotliEncoderStateStruct::new(core::mem::replace(m8, empty_m8));
1459 if is_9_5 {
1460 let mut params = BrotliEncoderParams::default();
1461 params.q9_5 = true;
1462 params.quality = 10;
1463 ChooseHasher(&mut params);
1464 s_orig.hasher_ = BrotliMakeHasher(m8, ¶ms, None );
1465 }
1466 let mut result: bool;
1467 {
1468 let s = &mut s_orig;
1469 let mut available_in: usize = input_size;
1470 let next_in_array: &[u8] = input_buffer;
1471 let mut next_in_offset: usize = 0;
1472 let mut available_out: usize = *encoded_size;
1473 let next_out_array: &mut [u8] = output_start;
1474 let mut next_out_offset: usize = 0;
1475 let mut total_out = Some(0);
1476 s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_QUALITY, quality as u32);
1477 s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_LGWIN, lgwin as u32);
1478 s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_MODE, mode as u32);
1479 s.set_parameter(
1480 BrotliEncoderParameter::BROTLI_PARAM_SIZE_HINT,
1481 input_size as u32,
1482 );
1483 if lgwin > BROTLI_MAX_WINDOW_BITS as i32 {
1484 s.set_parameter(BrotliEncoderParameter::BROTLI_PARAM_LARGE_WINDOW, 1);
1485 }
1486 result = s.compress_stream(
1487 BrotliEncoderOperation::BROTLI_OPERATION_FINISH,
1488 &mut available_in,
1489 next_in_array,
1490 &mut next_in_offset,
1491 &mut available_out,
1492 next_out_array,
1493 &mut next_out_offset,
1494 &mut total_out,
1495 metablock_callback,
1496 );
1497 if !s.is_finished() {
1498 result = false;
1499 }
1500
1501 *encoded_size = total_out.unwrap();
1502 BrotliEncoderDestroyInstance(s);
1503 }
1504 let _ = core::mem::replace(m8, s_orig.m8);
1505 if !result || max_out_size != 0 && (*encoded_size > max_out_size) {
1506 is_fallback = true;
1507 } else {
1508 return true;
1509 }
1510 }
1511 assert_ne!(is_fallback, false);
1512 *encoded_size = 0;
1513 if max_out_size == 0 {
1514 return false;
1515 }
1516 if out_size >= max_out_size {
1517 *encoded_size = MakeUncompressedStream(input_start, input_size, output_start);
1518 return true;
1519 }
1520 false
1521}
1522
1523impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1524 fn inject_byte_padding_block(&mut self) {
1525 let mut seal: u32 = self.last_bytes_ as u32;
1526 let mut seal_bits: usize = self.last_bytes_bits_ as usize;
1527 let destination: &mut [u8];
1528 self.last_bytes_ = 0;
1529 self.last_bytes_bits_ = 0;
1530 seal |= 0x6u32 << seal_bits;
1531
1532 seal_bits = seal_bits.wrapping_add(6);
1533 if !IsNextOutNull(&self.next_out_) {
1534 destination = &mut GetNextOut!(*self)[self.available_out_..];
1535 } else {
1536 destination = &mut self.tiny_buf_[..];
1537 self.next_out_ = NextOut::TinyBuf(0);
1538 }
1539 destination[0] = seal as u8;
1540 if seal_bits > 8usize {
1541 destination[1] = (seal >> 8) as u8;
1542 }
1543 if seal_bits > 16usize {
1544 destination[2] = (seal >> 16) as u8;
1545 }
1546 self.available_out_ = self
1547 .available_out_
1548 .wrapping_add(seal_bits.wrapping_add(7) >> 3);
1549 }
1550
1551 fn inject_flush_or_push_output(
1552 &mut self,
1553 available_out: &mut usize,
1554 next_out_array: &mut [u8],
1555 next_out_offset: &mut usize,
1556 total_out: &mut Option<usize>,
1557 ) -> bool {
1558 if self.stream_state_ as i32
1559 == BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED as i32
1560 && (self.last_bytes_bits_ as i32 != 0i32)
1561 {
1562 self.inject_byte_padding_block();
1563 return true;
1564 }
1565 if self.available_out_ != 0usize && (*available_out != 0usize) {
1566 let copy_output_size: usize = min(self.available_out_, *available_out);
1567 (*next_out_array)[(*next_out_offset)..(*next_out_offset + copy_output_size)]
1568 .clone_from_slice(&GetNextOut!(self)[..copy_output_size]);
1569 *next_out_offset = next_out_offset.wrapping_add(copy_output_size);
1571 *available_out = available_out.wrapping_sub(copy_output_size);
1572 self.next_out_ = NextOutIncrement(&self.next_out_, (copy_output_size as i32));
1573 self.available_out_ = self.available_out_.wrapping_sub(copy_output_size);
1574 self.total_out_ = self.total_out_.wrapping_add(copy_output_size as u64);
1575 if let &mut Some(ref mut total_out_inner) = total_out {
1576 *total_out_inner = self.total_out_ as usize;
1577 }
1578 return true;
1579 }
1580 false
1581 }
1582
1583 fn unprocessed_input_size(&self) -> u64 {
1584 self.input_pos_.wrapping_sub(self.last_processed_pos_)
1585 }
1586
1587 fn update_size_hint(&mut self, available_in: usize) {
1588 if self.params.size_hint == 0usize {
1589 let delta: u64 = self.unprocessed_input_size();
1590 let tail: u64 = available_in as u64;
1591 let limit: u32 = 1u32 << 30;
1592 let total: u32;
1593 if delta >= u64::from(limit)
1594 || tail >= u64::from(limit)
1595 || delta.wrapping_add(tail) >= u64::from(limit)
1596 {
1597 total = limit;
1598 } else {
1599 total = delta.wrapping_add(tail) as u32;
1600 }
1601 self.params.size_hint = total as usize;
1602 }
1603 }
1604}
1605
1606fn WrapPosition(position: u64) -> u32 {
1607 let mut result: u32 = position as u32;
1608 let gb: u64 = position >> 30;
1609 if gb > 2 {
1610 result = result & (1u32 << 30).wrapping_sub(1)
1611 | ((gb.wrapping_sub(1) & 1) as u32).wrapping_add(1) << 30;
1612 }
1613 result
1614}
1615
1616impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1617 fn get_brotli_storage(&mut self, size: usize) {
1618 if self.storage_size_ < size {
1619 <Alloc as Allocator<u8>>::free_cell(&mut self.m8, core::mem::take(&mut self.storage_));
1620 self.storage_ = allocate::<u8, _>(&mut self.m8, size);
1621 self.storage_size_ = size;
1622 }
1623 }
1624}
1625
1626fn MaxHashTableSize(quality: i32) -> usize {
1627 (if quality == 0i32 {
1628 1i32 << 15
1629 } else {
1630 1i32 << 17
1631 }) as usize
1632}
1633
1634fn HashTableSize(max_table_size: usize, input_size: usize) -> usize {
1635 let mut htsize: usize = 256usize;
1636 while htsize < max_table_size && (htsize < input_size) {
1637 htsize <<= 1i32;
1638 }
1639 htsize
1640}
1641
1642macro_rules! GetHashTable {
1643 ($s : expr, $quality: expr, $input_size : expr, $table_size : expr) => {
1644 GetHashTableInternal(
1645 &mut $s.m8,
1646 &mut $s.small_table_,
1647 &mut $s.large_table_,
1648 $quality,
1649 $input_size,
1650 $table_size,
1651 )
1652 };
1653}
1654fn GetHashTableInternal<'a, AllocI32: alloc::Allocator<i32>>(
1655 mi32: &mut AllocI32,
1656 small_table_: &'a mut [i32; 1024],
1657 large_table_: &'a mut AllocI32::AllocatedMemory,
1658 quality: i32,
1659 input_size: usize,
1660 table_size: &mut usize,
1661) -> &'a mut [i32] {
1662 let max_table_size: usize = MaxHashTableSize(quality);
1663 let mut htsize: usize = HashTableSize(max_table_size, input_size);
1664 let table: &mut [i32];
1665 if quality == 0i32 && htsize & 0xaaaaausize == 0usize {
1666 htsize <<= 1i32;
1667 }
1668 if htsize <= small_table_.len() {
1669 table = &mut small_table_[..];
1670 } else {
1671 if htsize > large_table_.slice().len() {
1672 {
1674 mi32.free_cell(core::mem::take(large_table_));
1675 }
1676 *large_table_ = mi32.alloc_cell(htsize);
1677 }
1678 table = large_table_.slice_mut();
1679 }
1680 *table_size = htsize;
1681 for item in table[..htsize].iter_mut() {
1682 *item = 0;
1683 }
1684 table }
1686
1687impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
1688 fn update_last_processed_pos(&mut self) -> bool {
1689 let wrapped_last_processed_pos: u32 = WrapPosition(self.last_processed_pos_);
1690 let wrapped_input_pos: u32 = WrapPosition(self.input_pos_);
1691 self.last_processed_pos_ = self.input_pos_;
1692 wrapped_input_pos < wrapped_last_processed_pos
1693 }
1694}
1695
1696fn MaxMetablockSize(params: &BrotliEncoderParams) -> usize {
1697 1 << min(ComputeRbBits(params), 24)
1698}
1699
1700fn ChooseContextMap(
1701 quality: i32,
1702 bigram_histo: &mut [u32],
1703 num_literal_contexts: &mut usize,
1704 literal_context_map: &mut &[u32],
1705) {
1706 static kStaticContextMapContinuation: [u32; 64] = [
1707 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1708 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1709 0, 0, 0, 0,
1710 ];
1711 static kStaticContextMapSimpleUTF8: [u32; 64] = [
1712 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1713 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1714 0, 0, 0, 0,
1715 ];
1716 let mut monogram_histo = [0u32; 3];
1717 let mut two_prefix_histo = [0u32; 6];
1718
1719 let mut i: usize;
1720 let mut entropy = [0.0 as floatX; 4];
1721 i = 0usize;
1722 while i < 9usize {
1723 {
1724 {
1725 let _rhs = bigram_histo[i];
1726 let _lhs = &mut monogram_histo[i.wrapping_rem(3)];
1727 *_lhs = (*_lhs).wrapping_add(_rhs);
1728 }
1729 {
1730 let _rhs = bigram_histo[i];
1731 let _lhs = &mut two_prefix_histo[i.wrapping_rem(6)];
1732 *_lhs = (*_lhs).wrapping_add(_rhs);
1733 }
1734 }
1735 i = i.wrapping_add(1);
1736 }
1737 entropy[1] = shannon_entropy(&monogram_histo[..], 3).0;
1738 entropy[2] =
1739 shannon_entropy(&two_prefix_histo[..], 3).0 + shannon_entropy(&two_prefix_histo[3..], 3).0;
1740 entropy[3] = 0.0;
1741 for i in 0usize..3usize {
1742 entropy[3] += shannon_entropy(&bigram_histo[(3usize).wrapping_mul(i)..], 3).0;
1743 }
1744 let total: usize = monogram_histo[0]
1745 .wrapping_add(monogram_histo[1])
1746 .wrapping_add(monogram_histo[2]) as usize;
1747 entropy[0] = 1.0 / (total as floatX);
1748 entropy[1] *= entropy[0];
1749 entropy[2] *= entropy[0];
1750 entropy[3] *= entropy[0];
1751 if quality < 7i32 {
1752 entropy[3] = entropy[1] * 10.0;
1753 }
1754 if entropy[1] - entropy[2] < 0.2 && entropy[1] - entropy[3] < 0.2 {
1755 *num_literal_contexts = 1;
1756 } else if entropy[2] - entropy[3] < 0.02 {
1757 *num_literal_contexts = 2usize;
1758 *literal_context_map = &kStaticContextMapSimpleUTF8[..];
1759 } else {
1760 *num_literal_contexts = 3usize;
1761 *literal_context_map = &kStaticContextMapContinuation[..];
1762 }
1763}
1764
1765static kStaticContextMapComplexUTF8: [u32; 64] = [
1766 11, 11, 12, 12, 0, 0, 0, 0, 1, 1, 9, 9, 2, 2, 2, 2, 1, 1, 1, 1, 8, 3, 3, 3, 1, 1, 1, 1, 2, 2, 2, 2, 8, 4, 4, 4, 8, 7, 4, 4, 8, 0, 0, 0, 3, 3, 3, 3, 5, 5, 10, 5, 5, 5, 10, 5, 6, 6, 6, 6, 6, 6, 6, 6,
1781];
1782fn ShouldUseComplexStaticContextMap(
1786 input: &[u8],
1787 mut start_pos: usize,
1788 length: usize,
1789 mask: usize,
1790 quality: i32,
1791 size_hint: usize,
1792 num_literal_contexts: &mut usize,
1793 literal_context_map: &mut &[u32],
1794) -> bool {
1795 let _ = quality;
1796 if (size_hint < (1 << 20)) {
1799 false
1800 } else {
1801 let end_pos = start_pos + length;
1802 let mut combined_histo: [u32; 32] = [0; 32];
1806 let mut context_histo: [[u32; 32]; 13] = [[0; 32]; 13];
1807 let mut total = 0u32;
1808 let mut entropy = [0.0 as floatX; 3];
1809 let utf8_lut = BROTLI_CONTEXT_LUT(ContextType::CONTEXT_UTF8);
1810 while start_pos + 64 <= end_pos {
1811 let stride_end_pos = start_pos + 64;
1812 let mut prev2 = input[start_pos & mask];
1813 let mut prev1 = input[(start_pos + 1) & mask];
1814
1815 for pos in start_pos + 2..stride_end_pos {
1818 let literal = input[pos & mask];
1819 let context = kStaticContextMapComplexUTF8
1820 [BROTLI_CONTEXT(prev1, prev2, utf8_lut) as usize]
1821 as u8;
1822 total += 1;
1823 combined_histo[(literal >> 3) as usize] += 1;
1824 context_histo[context as usize][(literal >> 3) as usize] += 1;
1825 prev2 = prev1;
1826 prev1 = literal;
1827 }
1828 start_pos += 4096;
1829 }
1830 entropy[1] = shannon_entropy(&combined_histo[..], 32).0;
1831 entropy[2] = 0.0;
1832 for i in 0..13 {
1833 assert!(i < 13);
1834 entropy[2] += shannon_entropy(&context_histo[i][..], 32).0;
1835 }
1836 entropy[0] = 1.0 / (total as floatX);
1837 entropy[1] *= entropy[0];
1838 entropy[2] *= entropy[0];
1839 if (entropy[2] > 3.0 || entropy[1] - entropy[2] < 0.2) {
1847 false
1848 } else {
1849 *num_literal_contexts = 13;
1850 *literal_context_map = &kStaticContextMapComplexUTF8;
1851 true
1852 }
1853 }
1854}
1855
1856fn DecideOverLiteralContextModeling(
1857 input: &[u8],
1858 mut start_pos: usize,
1859 length: usize,
1860 mask: usize,
1861 quality: i32,
1862 size_hint: usize,
1863 num_literal_contexts: &mut usize,
1864 literal_context_map: &mut &[u32],
1865) {
1866 if quality < 5i32 || length < 64usize {
1867 } else if ShouldUseComplexStaticContextMap(
1868 input,
1869 start_pos,
1870 length,
1871 mask,
1872 quality,
1873 size_hint,
1874 num_literal_contexts,
1875 literal_context_map,
1876 ) {
1877 } else {
1878 let end_pos: usize = start_pos.wrapping_add(length);
1879 let mut bigram_prefix_histo = [0u32; 9];
1880 while start_pos.wrapping_add(64) <= end_pos {
1881 {
1882 static lut: [i32; 4] = [0, 0, 1, 2];
1883 let stride_end_pos: usize = start_pos.wrapping_add(64);
1884 let mut prev: i32 = lut[(input[(start_pos & mask)] as i32 >> 6) as usize] * 3i32;
1885 let mut pos: usize;
1886 pos = start_pos.wrapping_add(1);
1887 while pos < stride_end_pos {
1888 {
1889 let literal: u8 = input[(pos & mask)];
1890 {
1891 let _rhs = 1;
1892 let cur_ind = (prev + lut[(literal as i32 >> 6) as usize]);
1893 let _lhs = &mut bigram_prefix_histo[cur_ind as usize];
1894 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1895 }
1896 prev = lut[(literal as i32 >> 6) as usize] * 3i32;
1897 }
1898 pos = pos.wrapping_add(1);
1899 }
1900 }
1901 start_pos = start_pos.wrapping_add(4096);
1902 }
1903 ChooseContextMap(
1904 quality,
1905 &mut bigram_prefix_histo[..],
1906 num_literal_contexts,
1907 literal_context_map,
1908 );
1909 }
1910}
1911fn WriteMetaBlockInternal<Alloc: BrotliAlloc, Cb>(
1912 alloc: &mut Alloc,
1913 data: &[u8],
1914 mask: usize,
1915 last_flush_pos: u64,
1916 bytes: usize,
1917 mut is_last: bool,
1918 literal_context_mode: ContextType,
1919 params: &BrotliEncoderParams,
1920 lit_scratch_space: &mut <HistogramLiteral as CostAccessors>::i32vec,
1921 cmd_scratch_space: &mut <HistogramCommand as CostAccessors>::i32vec,
1922 dst_scratch_space: &mut <HistogramDistance as CostAccessors>::i32vec,
1923 prev_byte: u8,
1924 prev_byte2: u8,
1925 num_literals: usize,
1926 num_commands: usize,
1927 commands: &mut [Command],
1928 saved_dist_cache: &[i32; kNumDistanceCacheEntries],
1929 dist_cache: &mut [i32; 16],
1930 recoder_state: &mut RecoderState,
1931 storage_ix: &mut usize,
1932 storage: &mut [u8],
1933 cb: &mut Cb,
1934) where
1935 Cb: FnMut(
1936 &mut interface::PredictionModeContextMap<InputReferenceMut>,
1937 &mut [interface::StaticCommand],
1938 interface::InputPair,
1939 &mut Alloc,
1940 ),
1941{
1942 let actual_is_last = is_last;
1943 if params.appendable {
1944 is_last = false;
1945 } else {
1946 assert!(!params.catable); }
1948 let wrapped_last_flush_pos: u32 = WrapPosition(last_flush_pos);
1949
1950 let literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode);
1951 let mut block_params = params.clone();
1952 if bytes == 0usize {
1953 BrotliWriteBits(2usize, 3, storage_ix, storage);
1954 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1955 return;
1956 }
1957 if !should_compress(
1958 data,
1959 mask,
1960 last_flush_pos,
1961 bytes,
1962 num_literals,
1963 num_commands,
1964 ) {
1965 dist_cache[..4].clone_from_slice(&saved_dist_cache[..4]);
1966 store_uncompressed_meta_block(
1967 alloc,
1968 is_last,
1969 data,
1970 wrapped_last_flush_pos as usize,
1971 mask,
1972 params,
1973 bytes,
1974 recoder_state,
1975 storage_ix,
1976 storage,
1977 false,
1978 cb,
1979 );
1980 if actual_is_last != is_last {
1981 BrotliWriteEmptyLastMetaBlock(storage_ix, storage)
1982 }
1983 return;
1984 }
1985 let saved_byte_location = (*storage_ix) >> 3;
1986 let last_bytes: u16 =
1987 ((storage[saved_byte_location + 1] as u16) << 8) | storage[saved_byte_location] as u16;
1988 let last_bytes_bits: u8 = *storage_ix as u8;
1989 if params.quality <= 2 {
1998 store_meta_block_fast(
1999 alloc,
2000 data,
2001 wrapped_last_flush_pos as usize,
2002 bytes,
2003 mask,
2004 is_last,
2005 params,
2006 saved_dist_cache,
2007 commands,
2008 num_commands,
2009 recoder_state,
2010 storage_ix,
2011 storage,
2012 cb,
2013 );
2014 } else if params.quality < 4 {
2015 store_meta_block_trivial(
2016 alloc,
2017 data,
2018 wrapped_last_flush_pos as usize,
2019 bytes,
2020 mask,
2021 is_last,
2022 params,
2023 saved_dist_cache,
2024 commands,
2025 num_commands,
2026 recoder_state,
2027 storage_ix,
2028 storage,
2029 cb,
2030 );
2031 } else {
2032 let mut mb = MetaBlockSplit::<Alloc>::new();
2035 if params.quality < 10i32 {
2036 let mut num_literal_contexts: usize = 1;
2037 let mut literal_context_map: &[u32] = &[];
2038 if params.disable_literal_context_modeling == 0 {
2039 DecideOverLiteralContextModeling(
2040 data,
2041 wrapped_last_flush_pos as usize,
2042 bytes,
2043 mask,
2044 params.quality,
2045 params.size_hint,
2046 &mut num_literal_contexts,
2047 &mut literal_context_map,
2048 );
2049 }
2050 BrotliBuildMetaBlockGreedy(
2051 alloc,
2052 data,
2053 wrapped_last_flush_pos as usize,
2054 mask,
2055 prev_byte,
2056 prev_byte2,
2057 literal_context_mode,
2058 literal_context_lut,
2059 num_literal_contexts,
2060 literal_context_map,
2061 commands,
2062 num_commands,
2063 &mut mb,
2064 );
2065 } else {
2066 BrotliBuildMetaBlock(
2067 alloc,
2068 data,
2069 wrapped_last_flush_pos as usize,
2070 mask,
2071 &mut block_params,
2072 prev_byte,
2073 prev_byte2,
2074 commands,
2075 num_commands,
2076 literal_context_mode,
2077 lit_scratch_space,
2078 cmd_scratch_space,
2079 dst_scratch_space,
2080 &mut mb,
2081 );
2082 }
2083 if params.quality >= 4i32 {
2084 let mut num_effective_dist_codes = block_params.dist.alphabet_size;
2085 if num_effective_dist_codes > BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS as u32 {
2086 num_effective_dist_codes = BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS as u32;
2087 }
2088 BrotliOptimizeHistograms(num_effective_dist_codes as usize, &mut mb);
2089 }
2090 store_meta_block(
2091 alloc,
2092 data,
2093 wrapped_last_flush_pos as usize,
2094 bytes,
2095 mask,
2096 prev_byte,
2097 prev_byte2,
2098 is_last,
2099 &block_params,
2100 literal_context_mode,
2101 saved_dist_cache,
2102 commands,
2103 num_commands,
2104 &mut mb,
2105 recoder_state,
2106 storage_ix,
2107 storage,
2108 cb,
2109 );
2110 mb.destroy(alloc);
2111 }
2112 if bytes + 4 + saved_byte_location < (*storage_ix >> 3) {
2113 dist_cache[..4].clone_from_slice(&saved_dist_cache[..4]);
2114 storage[saved_byte_location] = last_bytes as u8;
2118 storage[saved_byte_location + 1] = (last_bytes >> 8) as u8;
2119 *storage_ix = last_bytes_bits as usize;
2120 store_uncompressed_meta_block(
2121 alloc,
2122 is_last,
2123 data,
2124 wrapped_last_flush_pos as usize,
2125 mask,
2126 params,
2127 bytes,
2128 recoder_state,
2129 storage_ix,
2130 storage,
2131 true,
2132 cb,
2133 );
2134 }
2135 if actual_is_last != is_last {
2136 BrotliWriteEmptyLastMetaBlock(storage_ix, storage)
2137 }
2138}
2139
2140fn ChooseDistanceParams(params: &mut BrotliEncoderParams) {
2141 let mut num_direct_distance_codes = 0u32;
2142 let mut distance_postfix_bits = 0u32;
2143
2144 if params.quality >= 4 {
2145 if params.mode == BrotliEncoderMode::BROTLI_MODE_FONT {
2146 distance_postfix_bits = 1;
2147 num_direct_distance_codes = 12;
2148 } else {
2149 distance_postfix_bits = params.dist.distance_postfix_bits;
2150 num_direct_distance_codes = params.dist.num_direct_distance_codes;
2151 }
2152 let ndirect_msb = (num_direct_distance_codes >> distance_postfix_bits) & 0x0f;
2153 if distance_postfix_bits > BROTLI_MAX_NPOSTFIX as u32
2154 || num_direct_distance_codes > BROTLI_MAX_NDIRECT as u32
2155 || (ndirect_msb << distance_postfix_bits) != num_direct_distance_codes
2156 {
2157 distance_postfix_bits = 0;
2158 num_direct_distance_codes = 0;
2159 }
2160 }
2161 BrotliInitDistanceParams(params, distance_postfix_bits, num_direct_distance_codes);
2162 }
2183
2184impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2185 fn encode_data<MetablockCallback>(
2186 &mut self,
2187 is_last: bool,
2188 force_flush: bool,
2189 out_size: &mut usize,
2190 callback: &mut MetablockCallback,
2191 ) -> bool
2193 where
2194 MetablockCallback: FnMut(
2195 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2196 &mut [interface::StaticCommand],
2197 interface::InputPair,
2198 &mut Alloc,
2199 ),
2200 {
2201 let mut delta: u64 = self.unprocessed_input_size();
2202 let mut bytes: u32 = delta as u32;
2203 let mask = self.ringbuffer_.mask_;
2204 if !self.ensure_initialized() {
2205 return false;
2206 }
2207 let dictionary = BrotliGetDictionary();
2208 if self.is_last_block_emitted_ {
2209 return false;
2210 }
2211 if is_last {
2212 self.is_last_block_emitted_ = true;
2213 }
2214 if delta > self.input_block_size() as u64 {
2215 return false;
2216 }
2217 let mut storage_ix: usize = usize::from(self.last_bytes_bits_);
2218 {
2219 let meta_size = max(
2220 bytes as usize,
2221 self.input_pos_.wrapping_sub(self.last_flush_pos_) as usize,
2222 );
2223 self.get_brotli_storage((2usize).wrapping_mul(meta_size).wrapping_add(503 + 24));
2224 }
2225 {
2226 self.storage_.slice_mut()[0] = self.last_bytes_ as u8;
2227 self.storage_.slice_mut()[1] = (self.last_bytes_ >> 8) as u8;
2228 }
2229 let mut catable_header_size = 0;
2230 if let IsFirst::NothingWritten = self.is_first_mb {
2231 if self.params.magic_number {
2232 BrotliWriteMetadataMetaBlock(
2233 &self.params,
2234 &mut storage_ix,
2235 self.storage_.slice_mut(),
2236 );
2237 self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2238 | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2239 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2240 self.next_out_ = NextOut::DynamicStorage(0);
2241 catable_header_size = storage_ix >> 3;
2242 *out_size = catable_header_size;
2243 self.is_first_mb = IsFirst::HeaderWritten;
2244 }
2245 }
2246 if let IsFirst::BothCatableBytesWritten = self.is_first_mb {
2247 } else if !self.params.catable {
2249 self.is_first_mb = IsFirst::BothCatableBytesWritten;
2250 } else if bytes != 0 {
2251 assert!(self.last_processed_pos_ < 2 || self.custom_dictionary);
2252 let num_bytes_to_write_uncompressed: usize = min(2, bytes as usize);
2253 {
2254 let data =
2255 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..];
2256 store_uncompressed_meta_block(
2257 &mut self.m8,
2258 false,
2259 data,
2260 self.last_flush_pos_ as usize,
2261 mask as usize,
2262 &self.params,
2263 num_bytes_to_write_uncompressed,
2264 &mut self.recoder_state,
2265 &mut storage_ix,
2266 self.storage_.slice_mut(),
2267 false, callback,
2269 );
2270 self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2271 | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2272 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2273 self.prev_byte2_ = self.prev_byte_;
2274 self.prev_byte_ = data[self.last_flush_pos_ as usize & mask as usize];
2275 if num_bytes_to_write_uncompressed == 2 {
2276 self.prev_byte2_ = self.prev_byte_;
2277 self.prev_byte_ = data[(self.last_flush_pos_ + 1) as usize & mask as usize];
2278 }
2279 }
2280 self.last_flush_pos_ += num_bytes_to_write_uncompressed as u64;
2281 bytes -= num_bytes_to_write_uncompressed as u32;
2282 self.last_processed_pos_ += num_bytes_to_write_uncompressed as u64;
2283 if num_bytes_to_write_uncompressed >= 2 {
2284 self.is_first_mb = IsFirst::BothCatableBytesWritten;
2285 } else if num_bytes_to_write_uncompressed == 1 {
2286 if let IsFirst::FirstCatableByteWritten = self.is_first_mb {
2287 self.is_first_mb = IsFirst::BothCatableBytesWritten;
2288 } else {
2289 self.is_first_mb = IsFirst::FirstCatableByteWritten;
2290 }
2291 }
2292 catable_header_size = storage_ix >> 3;
2293 self.next_out_ = NextOut::DynamicStorage(0);
2294 *out_size = catable_header_size;
2295 delta = self.unprocessed_input_size();
2296 }
2297 let mut wrapped_last_processed_pos: u32 = WrapPosition(self.last_processed_pos_);
2298 if self.params.quality == 1i32 && self.command_buf_.slice().is_empty() {
2299 let new_buf = allocate::<u32, _>(&mut self.m8, kCompressFragmentTwoPassBlockSize);
2300 self.command_buf_ = new_buf;
2301 let new_buf8 = allocate::<u8, _>(&mut self.m8, kCompressFragmentTwoPassBlockSize);
2302 self.literal_buf_ = new_buf8;
2303 }
2304
2305 if self.params.quality == 0i32 || self.params.quality == 1i32 {
2306 let mut table_size: usize = 0;
2307 {
2308 if delta == 0 && !is_last {
2309 *out_size = catable_header_size;
2310 return true;
2311 }
2312 let data =
2313 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..];
2314
2315 let table: &mut [i32] =
2319 GetHashTable!(self, self.params.quality, bytes as usize, &mut table_size);
2320
2321 if self.params.quality == 0i32 {
2322 compress_fragment_fast(
2323 &mut self.m8,
2324 &mut data[((wrapped_last_processed_pos & mask) as usize)..],
2325 bytes as usize,
2326 is_last,
2327 table,
2328 table_size,
2329 &mut self.cmd_depths_[..],
2330 &mut self.cmd_bits_[..],
2331 &mut self.cmd_code_numbits_,
2332 &mut self.cmd_code_[..],
2333 &mut storage_ix,
2334 self.storage_.slice_mut(),
2335 );
2336 } else {
2337 compress_fragment_two_pass(
2338 &mut self.m8,
2339 &mut data[((wrapped_last_processed_pos & mask) as usize)..],
2340 bytes as usize,
2341 is_last,
2342 self.command_buf_.slice_mut(),
2343 self.literal_buf_.slice_mut(),
2344 table,
2345 table_size,
2346 &mut storage_ix,
2347 self.storage_.slice_mut(),
2348 );
2349 }
2350 self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2351 | ((self.storage_.slice()[(storage_ix >> 3) + 1] as u16) << 8);
2352 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2353 }
2354 self.update_last_processed_pos();
2355 self.next_out_ = NextOut::DynamicStorage(0); *out_size = storage_ix >> 3;
2358 return true;
2359 }
2360 {
2361 let mut newsize: usize = self
2362 .num_commands_
2363 .wrapping_add(bytes.wrapping_div(2) as usize)
2364 .wrapping_add(1);
2365 if newsize > self.cmd_alloc_size_ {
2366 newsize = newsize.wrapping_add(bytes.wrapping_div(4).wrapping_add(16) as usize);
2367 self.cmd_alloc_size_ = newsize;
2368 let mut new_commands = allocate::<Command, _>(&mut self.m8, newsize);
2369 if !self.commands_.slice().is_empty() {
2370 new_commands.slice_mut()[..self.num_commands_]
2371 .clone_from_slice(&self.commands_.slice()[..self.num_commands_]);
2372 <Alloc as Allocator<Command>>::free_cell(
2373 &mut self.m8,
2374 core::mem::take(&mut self.commands_),
2375 );
2376 }
2377 self.commands_ = new_commands;
2378 }
2379 }
2380 InitOrStitchToPreviousBlock(
2381 &mut self.m8,
2382 &mut self.hasher_,
2383 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2384 mask as usize,
2385 self.custom_dictionary_size,
2386 &mut self.params,
2387 wrapped_last_processed_pos as usize,
2388 bytes as usize,
2389 is_last,
2390 );
2391 let literal_context_mode = ChooseContextMode(
2392 &self.params,
2393 self.ringbuffer_.data_mo.slice(),
2394 WrapPosition(self.last_flush_pos_) as usize,
2395 mask as usize,
2396 (self.input_pos_.wrapping_sub(self.last_flush_pos_)) as usize,
2397 );
2398 if self.num_commands_ != 0 && self.last_insert_len_ == 0 {
2399 self.extend_last_command(&mut bytes, &mut wrapped_last_processed_pos);
2400 }
2401 BrotliCreateBackwardReferences(
2402 &mut self.m8,
2403 dictionary,
2404 bytes as usize,
2405 wrapped_last_processed_pos as usize,
2406 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2407 mask as usize,
2408 self.custom_dictionary_size,
2409 &mut self.params,
2410 &mut self.hasher_,
2411 &mut self.dist_cache_,
2412 &mut self.last_insert_len_,
2413 &mut self.commands_.slice_mut()[self.num_commands_..],
2414 &mut self.num_commands_,
2415 &mut self.num_literals_,
2416 );
2417 {
2418 let max_length: usize = MaxMetablockSize(&mut self.params);
2419 let max_literals: usize = max_length.wrapping_div(8);
2420 let max_commands: usize = max_length.wrapping_div(8);
2421 let processed_bytes: usize =
2422 self.input_pos_.wrapping_sub(self.last_flush_pos_) as usize;
2423 let next_input_fits_metablock =
2424 processed_bytes.wrapping_add(self.input_block_size()) <= max_length;
2425 let should_flush = self.params.quality < 4
2426 && self.num_literals_.wrapping_add(self.num_commands_) >= 0x2fff;
2427 if !is_last
2428 && !force_flush
2429 && !should_flush
2430 && next_input_fits_metablock
2431 && self.num_literals_ < max_literals
2432 && self.num_commands_ < max_commands
2433 {
2434 if self.update_last_processed_pos() {
2435 HasherReset(&mut self.hasher_);
2436 }
2437 *out_size = catable_header_size;
2438 return true;
2439 }
2440 }
2441 if self.last_insert_len_ > 0usize {
2442 self.commands_.slice_mut()[self.num_commands_].init_insert(self.last_insert_len_);
2443 self.num_commands_ = self.num_commands_.wrapping_add(1);
2444 self.num_literals_ = self.num_literals_.wrapping_add(self.last_insert_len_);
2445 self.last_insert_len_ = 0usize;
2446 }
2447 if !is_last && self.input_pos_ == self.last_flush_pos_ {
2448 *out_size = catable_header_size;
2449 return true;
2450 }
2451 {
2452 let metablock_size: u32 = self.input_pos_.wrapping_sub(self.last_flush_pos_) as u32;
2453 WriteMetaBlockInternal(
2458 &mut self.m8,
2459 &mut self.ringbuffer_.data_mo.slice_mut()[self.ringbuffer_.buffer_index..],
2460 mask as usize,
2461 self.last_flush_pos_,
2462 metablock_size as usize,
2463 is_last,
2464 literal_context_mode,
2465 &mut self.params,
2466 &mut self.literal_scratch_space,
2467 &mut self.command_scratch_space,
2468 &mut self.distance_scratch_space,
2469 self.prev_byte_,
2470 self.prev_byte2_,
2471 self.num_literals_,
2472 self.num_commands_,
2473 self.commands_.slice_mut(),
2474 &mut self.saved_dist_cache_,
2475 &mut self.dist_cache_,
2476 &mut self.recoder_state,
2477 &mut storage_ix,
2478 self.storage_.slice_mut(),
2479 callback,
2480 );
2481
2482 self.last_bytes_ = self.storage_.slice()[(storage_ix >> 3)] as u16
2483 | ((self.storage_.slice()[1 + (storage_ix >> 3)] as u16) << 8);
2484 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2485 self.last_flush_pos_ = self.input_pos_;
2486 if self.update_last_processed_pos() {
2487 HasherReset(&mut self.hasher_);
2488 }
2489 let data = &self.ringbuffer_.data_mo.slice()[self.ringbuffer_.buffer_index..];
2490 if self.last_flush_pos_ > 0 {
2491 self.prev_byte_ =
2492 data[(((self.last_flush_pos_ as u32).wrapping_sub(1) & mask) as usize)];
2493 }
2494 if self.last_flush_pos_ > 1 {
2495 self.prev_byte2_ =
2496 data[((self.last_flush_pos_.wrapping_sub(2) as u32 & mask) as usize)];
2497 }
2498 self.num_commands_ = 0usize;
2499 self.num_literals_ = 0usize;
2500 self.saved_dist_cache_
2501 .clone_from_slice(self.dist_cache_.split_at(4).0);
2502 self.next_out_ = NextOut::DynamicStorage(0); *out_size = storage_ix >> 3;
2504 true
2505 }
2506 }
2507
2508 fn write_metadata_header(&mut self) -> usize {
2509 let block_size = self.remaining_metadata_bytes_ as usize;
2510 let header = GetNextOut!(*self);
2511 let mut storage_ix: usize;
2512 storage_ix = self.last_bytes_bits_ as usize;
2513 header[0] = self.last_bytes_ as u8;
2514 header[1] = (self.last_bytes_ >> 8) as u8;
2515 self.last_bytes_ = 0;
2516 self.last_bytes_bits_ = 0;
2517 BrotliWriteBits(1, 0, &mut storage_ix, header);
2518 BrotliWriteBits(2usize, 3, &mut storage_ix, header);
2519 BrotliWriteBits(1, 0, &mut storage_ix, header);
2520 if block_size == 0usize {
2521 BrotliWriteBits(2usize, 0, &mut storage_ix, header);
2522 } else {
2523 let nbits: u32 = if block_size == 1 {
2524 0u32
2525 } else {
2526 Log2FloorNonZero((block_size as u32).wrapping_sub(1) as (u64)).wrapping_add(1)
2527 };
2528 let nbytes: u32 = nbits.wrapping_add(7).wrapping_div(8);
2529 BrotliWriteBits(2usize, nbytes as (u64), &mut storage_ix, header);
2530 BrotliWriteBits(
2531 (8u32).wrapping_mul(nbytes) as usize,
2532 block_size.wrapping_sub(1) as u64,
2533 &mut storage_ix,
2534 header,
2535 );
2536 }
2537 storage_ix.wrapping_add(7u32 as usize) >> 3
2538 }
2539}
2540
2541impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2542 fn process_metadata<
2543 MetaBlockCallback: FnMut(
2544 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2545 &mut [interface::StaticCommand],
2546 interface::InputPair,
2547 &mut Alloc,
2548 ),
2549 >(
2550 &mut self,
2551 available_in: &mut usize,
2552 next_in_array: &[u8],
2553 next_in_offset: &mut usize,
2554 available_out: &mut usize,
2555 next_out_array: &mut [u8],
2556 next_out_offset: &mut usize,
2557 total_out: &mut Option<usize>,
2558 metablock_callback: &mut MetaBlockCallback,
2559 ) -> bool {
2560 if *available_in > (1u32 << 24) as usize {
2561 return false;
2562 }
2563 if self.stream_state_ as i32 == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32 {
2564 self.remaining_metadata_bytes_ = *available_in as u32;
2565 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD;
2566 }
2567 if self.stream_state_ as i32 != BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2568 && (self.stream_state_ as i32
2569 != BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY as i32)
2570 {
2571 return false;
2572 }
2573 loop {
2574 if self.inject_flush_or_push_output(
2575 available_out,
2576 next_out_array,
2577 next_out_offset,
2578 total_out,
2579 ) {
2580 continue;
2581 }
2582 if self.available_out_ != 0usize {
2583 break;
2584 }
2585 if self.input_pos_ != self.last_flush_pos_ {
2586 let mut avail_out: usize = self.available_out_;
2587 let result = self.encode_data(false, true, &mut avail_out, metablock_callback);
2588 self.available_out_ = avail_out;
2589 if !result {
2590 return false;
2591 }
2592 continue;
2593 }
2594 if self.stream_state_ as i32
2595 == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2596 {
2597 self.next_out_ = NextOut::TinyBuf(0);
2598 self.available_out_ = self.write_metadata_header();
2599 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY;
2600 {
2601 continue;
2602 }
2603 } else {
2604 if self.remaining_metadata_bytes_ == 0u32 {
2605 self.remaining_metadata_bytes_ = u32::MAX;
2606 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING;
2607 {
2608 break;
2609 }
2610 }
2611 if *available_out != 0 {
2612 let copy: u32 =
2613 min(self.remaining_metadata_bytes_ as usize, *available_out) as u32;
2614 next_out_array[*next_out_offset..(*next_out_offset + copy as usize)]
2615 .clone_from_slice(
2616 &next_in_array[*next_in_offset..(*next_in_offset + copy as usize)],
2617 );
2618 *next_in_offset += copy as usize;
2621 *available_in = available_in.wrapping_sub(copy as usize);
2622 self.remaining_metadata_bytes_ =
2623 self.remaining_metadata_bytes_.wrapping_sub(copy);
2624 *next_out_offset += copy as usize;
2625 *available_out = available_out.wrapping_sub(copy as usize);
2627 } else {
2628 let copy: u32 = min(self.remaining_metadata_bytes_, 16u32);
2629 self.next_out_ = NextOut::TinyBuf(0);
2630 GetNextOut!(self)[..(copy as usize)].clone_from_slice(
2631 &next_in_array[*next_in_offset..(*next_in_offset + copy as usize)],
2632 );
2633 *next_in_offset += copy as usize;
2636 *available_in = available_in.wrapping_sub(copy as usize);
2637 self.remaining_metadata_bytes_ =
2638 self.remaining_metadata_bytes_.wrapping_sub(copy);
2639 self.available_out_ = copy as usize;
2640 }
2641 {
2642 continue;
2643 }
2644 }
2645 }
2646 true
2647 }
2648}
2649fn CheckFlushCompleteInner(
2650 stream_state: &mut BrotliEncoderStreamState,
2651 available_out: usize,
2652 next_out: &mut NextOut,
2653) {
2654 if *stream_state == BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED
2655 && (available_out == 0)
2656 {
2657 *stream_state = BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING;
2658 *next_out = NextOut::None;
2659 }
2660}
2661
2662impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2663 fn check_flush_complete(&mut self) {
2664 CheckFlushCompleteInner(
2665 &mut self.stream_state_,
2666 self.available_out_,
2667 &mut self.next_out_,
2668 );
2669 }
2670
2671 fn compress_stream_fast(
2672 &mut self,
2673 op: BrotliEncoderOperation,
2674 available_in: &mut usize,
2675 next_in_array: &[u8],
2676 next_in_offset: &mut usize,
2677 available_out: &mut usize,
2678 next_out_array: &mut [u8],
2679 next_out_offset: &mut usize,
2680 total_out: &mut Option<usize>,
2681 ) -> bool {
2682 let block_size_limit: usize = 1 << self.params.lgwin;
2683 let buf_size: usize = min(
2684 kCompressFragmentTwoPassBlockSize,
2685 min(*available_in, block_size_limit),
2686 );
2687 let mut command_buf = alloc_default::<u32, Alloc>();
2688 let mut literal_buf = alloc_default::<u8, Alloc>();
2689 if self.params.quality != 0i32 && (self.params.quality != 1i32) {
2690 return false;
2691 }
2692 if self.params.quality == 1i32 {
2693 if self.command_buf_.slice().is_empty()
2694 && (buf_size == kCompressFragmentTwoPassBlockSize)
2695 {
2696 self.command_buf_ =
2697 allocate::<u32, _>(&mut self.m8, kCompressFragmentTwoPassBlockSize);
2698 self.literal_buf_ =
2699 allocate::<u8, _>(&mut self.m8, kCompressFragmentTwoPassBlockSize);
2700 }
2701 if !self.command_buf_.slice().is_empty() {
2702 command_buf = core::mem::take(&mut self.command_buf_);
2703 literal_buf = core::mem::take(&mut self.literal_buf_);
2704 } else {
2705 command_buf = allocate::<u32, _>(&mut self.m8, buf_size);
2706 literal_buf = allocate::<u8, _>(&mut self.m8, buf_size);
2707 }
2708 }
2709 loop {
2710 if self.inject_flush_or_push_output(
2711 available_out,
2712 next_out_array,
2713 next_out_offset,
2714 total_out,
2715 ) {
2716 continue;
2717 }
2718 if self.available_out_ == 0usize
2719 && (self.stream_state_ as i32
2720 == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32)
2721 && (*available_in != 0usize
2722 || op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_PROCESS as i32)
2723 {
2724 let block_size: usize = min(block_size_limit, *available_in);
2725 let is_last = *available_in == block_size
2726 && op == BrotliEncoderOperation::BROTLI_OPERATION_FINISH;
2727 let force_flush = *available_in == block_size
2728 && op == BrotliEncoderOperation::BROTLI_OPERATION_FLUSH;
2729 let max_out_size: usize = (2usize).wrapping_mul(block_size).wrapping_add(503);
2730 let mut inplace: i32 = 1i32;
2731 let storage: &mut [u8];
2732 let mut storage_ix: usize = self.last_bytes_bits_ as usize;
2733 let mut table_size: usize = 0;
2734
2735 if force_flush && block_size == 0 {
2736 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2737 {
2738 continue;
2739 }
2740 }
2741 if max_out_size <= *available_out {
2742 storage = &mut next_out_array[*next_out_offset..]; } else {
2744 inplace = 0i32;
2745 self.get_brotli_storage(max_out_size);
2746 storage = self.storage_.slice_mut();
2747 }
2748 storage[0] = self.last_bytes_ as u8;
2749 storage[1] = (self.last_bytes_ >> 8) as u8;
2750 let table: &mut [i32] =
2751 GetHashTable!(self, self.params.quality, block_size, &mut table_size);
2752 if self.params.quality == 0i32 {
2753 compress_fragment_fast(
2754 &mut self.m8,
2755 &(next_in_array)[*next_in_offset..],
2756 block_size,
2757 is_last,
2758 table,
2759 table_size,
2760 &mut self.cmd_depths_[..],
2761 &mut self.cmd_bits_[..],
2762 &mut self.cmd_code_numbits_,
2763 &mut self.cmd_code_[..],
2764 &mut storage_ix,
2765 storage,
2766 );
2767 } else {
2768 compress_fragment_two_pass(
2769 &mut self.m8,
2770 &(next_in_array)[*next_in_offset..],
2771 block_size,
2772 is_last,
2773 command_buf.slice_mut(),
2774 literal_buf.slice_mut(),
2775 table,
2776 table_size,
2777 &mut storage_ix,
2778 storage,
2779 );
2780 }
2781 *next_in_offset += block_size;
2782 *available_in = available_in.wrapping_sub(block_size);
2783 if inplace != 0 {
2784 let out_bytes: usize = storage_ix >> 3;
2785 *next_out_offset += out_bytes;
2786 *available_out = available_out.wrapping_sub(out_bytes);
2787 self.total_out_ = self.total_out_.wrapping_add(out_bytes as u64);
2788 if let &mut Some(ref mut total_out_inner) = total_out {
2789 *total_out_inner = self.total_out_ as usize;
2790 }
2791 } else {
2792 let out_bytes: usize = storage_ix >> 3;
2793 self.next_out_ = NextOut::DynamicStorage(0);
2794 self.available_out_ = out_bytes;
2795 }
2796 self.last_bytes_ = storage[(storage_ix >> 3)] as u16
2797 | ((storage[1 + (storage_ix >> 3)] as u16) << 8);
2798 self.last_bytes_bits_ = (storage_ix & 7u32 as usize) as u8;
2799 if force_flush {
2800 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2801 }
2802 if is_last {
2803 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FINISHED;
2804 }
2805 {
2806 continue;
2807 }
2808 }
2809 {
2810 break;
2811 }
2812 }
2813 if command_buf.slice().len() == kCompressFragmentTwoPassBlockSize
2814 && self.command_buf_.slice().is_empty()
2815 {
2816 self.command_buf_ = core::mem::take(&mut command_buf);
2818 self.literal_buf_ = core::mem::take(&mut literal_buf);
2819 } else {
2820 <Alloc as Allocator<u32>>::free_cell(&mut self.m8, command_buf);
2821 <Alloc as Allocator<u8>>::free_cell(&mut self.m8, literal_buf);
2822 }
2823 self.check_flush_complete();
2824 true
2825 }
2826
2827 fn remaining_input_block_size(&mut self) -> usize {
2828 let delta: u64 = self.unprocessed_input_size();
2829 let block_size = self.input_block_size();
2830 if delta >= block_size as u64 {
2831 return 0usize;
2832 }
2833 (block_size as u64).wrapping_sub(delta) as usize
2834 }
2835
2836 pub fn compress_stream<
2837 MetablockCallback: FnMut(
2838 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2839 &mut [interface::StaticCommand],
2840 interface::InputPair,
2841 &mut Alloc,
2842 ),
2843 >(
2844 &mut self,
2845 op: BrotliEncoderOperation,
2846 available_in: &mut usize,
2847 next_in_array: &[u8],
2848 next_in_offset: &mut usize,
2849 available_out: &mut usize,
2850 next_out_array: &mut [u8],
2851 next_out_offset: &mut usize,
2852 total_out: &mut Option<usize>,
2853 metablock_callback: &mut MetablockCallback,
2854 ) -> bool {
2855 if !self.ensure_initialized() {
2856 return false;
2857 }
2858 if self.remaining_metadata_bytes_ != u32::MAX {
2859 if *available_in != self.remaining_metadata_bytes_ as usize {
2860 return false;
2861 }
2862 if op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_EMIT_METADATA as i32 {
2863 return false;
2864 }
2865 }
2866 if op as i32 == BrotliEncoderOperation::BROTLI_OPERATION_EMIT_METADATA as i32 {
2867 self.update_size_hint(0);
2868 return self.process_metadata(
2869 available_in,
2870 next_in_array,
2871 next_in_offset,
2872 available_out,
2873 next_out_array,
2874 next_out_offset,
2875 total_out,
2876 metablock_callback,
2877 );
2878 }
2879 if self.stream_state_ as i32 == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_HEAD as i32
2880 || self.stream_state_ as i32
2881 == BrotliEncoderStreamState::BROTLI_STREAM_METADATA_BODY as i32
2882 {
2883 return false;
2884 }
2885 if self.stream_state_ as i32 != BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32
2886 && (*available_in != 0usize)
2887 {
2888 return false;
2889 }
2890 if (self.params.quality == 0i32 || self.params.quality == 1i32) && !self.params.catable {
2891 return self.compress_stream_fast(
2893 op,
2894 available_in,
2895 next_in_array,
2896 next_in_offset,
2897 available_out,
2898 next_out_array,
2899 next_out_offset,
2900 total_out,
2901 );
2902 }
2903 loop {
2904 let remaining_block_size: usize = self.remaining_input_block_size();
2905 if remaining_block_size != 0usize && (*available_in != 0usize) {
2906 let copy_input_size: usize = min(remaining_block_size, *available_in);
2907 self.copy_input_to_ring_buffer(copy_input_size, &next_in_array[*next_in_offset..]);
2908 *next_in_offset += copy_input_size;
2909 *available_in = available_in.wrapping_sub(copy_input_size);
2910 {
2911 continue;
2912 }
2913 }
2914 if self.inject_flush_or_push_output(
2915 available_out,
2916 next_out_array,
2917 next_out_offset,
2918 total_out,
2919 ) {
2920 continue;
2921 }
2922 if self.available_out_ == 0usize
2923 && (self.stream_state_ as i32
2924 == BrotliEncoderStreamState::BROTLI_STREAM_PROCESSING as i32)
2925 && (remaining_block_size == 0usize
2926 || op as i32 != BrotliEncoderOperation::BROTLI_OPERATION_PROCESS as i32)
2927 {
2928 let is_last =
2929 *available_in == 0 && op == BrotliEncoderOperation::BROTLI_OPERATION_FINISH;
2930 let force_flush =
2931 *available_in == 0 && op == BrotliEncoderOperation::BROTLI_OPERATION_FLUSH;
2932
2933 self.update_size_hint(*available_in);
2934 let mut avail_out = self.available_out_;
2935 let result =
2936 self.encode_data(is_last, force_flush, &mut avail_out, metablock_callback);
2937 self.available_out_ = avail_out;
2938 if !result {
2940 return false;
2941 }
2942 if force_flush {
2943 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FLUSH_REQUESTED;
2944 }
2945 if is_last {
2946 self.stream_state_ = BrotliEncoderStreamState::BROTLI_STREAM_FINISHED;
2947 }
2948 {
2949 continue;
2950 }
2951 }
2952 {
2953 break;
2954 }
2955 }
2956 self.check_flush_complete();
2957 true
2958 }
2959
2960 pub fn is_finished(&self) -> bool {
2961 self.stream_state_ == BrotliEncoderStreamState::BROTLI_STREAM_FINISHED
2962 && !self.has_more_output()
2963 }
2964
2965 pub fn has_more_output(&self) -> bool {
2966 self.available_out_ != 0
2967 }
2968
2969 pub fn take_output(&mut self, size: &mut usize) -> &[u8] {
2970 let mut consumed_size: usize = self.available_out_;
2971 let mut result: &[u8] = GetNextOut!(*self);
2972 if *size != 0 {
2973 consumed_size = min(*size, self.available_out_);
2974 }
2975 if consumed_size != 0 {
2976 self.next_out_ = NextOutIncrement(&self.next_out_, consumed_size as i32);
2977 self.available_out_ = self.available_out_.wrapping_sub(consumed_size);
2978 self.total_out_ = self.total_out_.wrapping_add(consumed_size as u64);
2979 CheckFlushCompleteInner(
2980 &mut self.stream_state_,
2981 self.available_out_,
2982 &mut self.next_out_,
2983 );
2984 *size = consumed_size;
2985 } else {
2986 *size = 0usize;
2987 result = &[];
2988 }
2989 result
2990 }
2991}
2992
2993pub fn BrotliEncoderVersion() -> u32 {
2994 0x0100_0f01
2995}
2996
2997impl<Alloc: BrotliAlloc> BrotliEncoderStateStruct<Alloc> {
2998 pub fn input_block_size(&mut self) -> usize {
2999 if !self.ensure_initialized() {
3000 return 0;
3001 }
3002 1 << self.params.lgblock
3003 }
3004
3005 pub fn write_data<
3006 'a,
3007 MetablockCallback: FnMut(
3008 &mut interface::PredictionModeContextMap<InputReferenceMut>,
3009 &mut [interface::StaticCommand],
3010 interface::InputPair,
3011 &mut Alloc,
3012 ),
3013 >(
3014 &'a mut self,
3015 is_last: i32,
3017 force_flush: i32,
3019 out_size: &mut usize,
3021 output: &'a mut &'a mut [u8],
3023 metablock_callback: &mut MetablockCallback,
3024 ) -> bool {
3025 let ret = self.encode_data(is_last != 0, force_flush != 0, out_size, metablock_callback);
3026 *output = self.storage_.slice_mut();
3027 ret
3028 }
3029}
3030
3031#[cfg(feature = "std")]
3032mod test {
3033 #[cfg(test)]
3034 use alloc_stdlib::StandardAlloc;
3035
3036 #[test]
3037 fn test_encoder_compress() {
3038 let input = include_bytes!("../../testdata/alice29.txt");
3039 let mut output_buffer = [0; 100000];
3040 let mut output_len = output_buffer.len();
3041 let ret = super::encoder_compress(
3042 StandardAlloc::default(),
3043 &mut StandardAlloc::default(),
3044 9,
3045 16,
3046 super::BrotliEncoderMode::BROTLI_MODE_GENERIC,
3047 input.len(),
3048 input,
3049 &mut output_len,
3050 &mut output_buffer,
3051 &mut |_, _, _, _| (),
3052 );
3053 assert!(ret);
3054 assert_eq!(output_len, 51737);
3055 let mut roundtrip = [0u8; 200000];
3056 let (_, s, t) = super::super::test::oneshot_decompress(
3057 &output_buffer[..output_len],
3058 &mut roundtrip[..],
3059 );
3060 assert_eq!(roundtrip[..t], input[..]);
3061 assert_eq!(s, output_len);
3062 }
3063}