1#![allow(dead_code, unused_imports)]
2use super::{
3 kDistanceCacheIndex, kDistanceCacheOffset, kHashMul32, kHashMul64, kHashMul64Long,
4 kInvalidMatch, AnyHasher, BrotliEncoderParams, BrotliHasherParams, CloneWithAlloc, H9Opts,
5 HasherSearchResult, HowPrepared, Struct1,
6};
7use alloc;
8use alloc::{Allocator, SliceWrapper, SliceWrapperMut};
9use core;
10use core::cmp::{max, min};
11use enc::command::{
12 CombineLengthCodes, Command, ComputeDistanceCode, GetCopyLengthCode, GetInsertLengthCode,
13 PrefixEncodeCopyDistance,
14};
15use enc::constants::{kCopyExtra, kInsExtra};
16use enc::dictionary_hash::kStaticDictionaryHash;
17use enc::literal_cost::BrotliEstimateBitCostsForLiterals;
18use enc::static_dict::{
19 kBrotliEncDictionary, BrotliDictionary, BrotliFindAllStaticDictionaryMatches,
20};
21use enc::static_dict::{
22 FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
23};
24use enc::util::{floatX, FastLog2, Log2FloorNonZero};
25
26pub const kInfinity: floatX = 1.7e38 as floatX;
27#[derive(Clone, Copy, Debug)]
28pub enum Union1 {
29 cost(floatX),
30 next(u32),
31 shortcut(u32),
32}
33
34#[derive(Clone, Copy, Debug)]
35pub struct ZopfliNode {
36 pub length: u32,
38 pub distance: u32,
40 pub dcode_insert_length: u32,
42 pub u: Union1,
43}
44impl Default for ZopfliNode {
45 fn default() -> Self {
46 ZopfliNode {
47 length: 1,
48 distance: 0,
49 dcode_insert_length: 0,
50 u: Union1::cost(kInfinity),
51 }
52 }
53}
54
55pub trait Allocable<T: Copy, AllocT: Allocator<T>> {
56 fn new(m: &mut AllocT, init: T) -> Self;
57 fn new_uninit(m: &mut AllocT) -> Self;
58 fn free(&mut self, m: &mut AllocT);
59}
60pub trait H10Params {
61 fn max_tree_search_depth() -> u32;
62 fn max_tree_comp_length() -> u32;
63}
64
65pub struct H10DefaultParams {}
66impl H10Params for H10DefaultParams {
67 #[inline(always)]
68 fn max_tree_search_depth() -> u32 {
69 64
70 }
71 #[inline(always)]
72 fn max_tree_comp_length() -> u32 {
73 128
74 }
75}
76
77const BUCKET_BITS: usize = 17;
78
79pub struct H10Buckets<AllocU32: Allocator<u32>>(AllocU32::AllocatedMemory);
80
81impl<AllocU32: Allocator<u32>> Allocable<u32, AllocU32> for H10Buckets<AllocU32> {
82 fn new(m: &mut AllocU32, initializer: u32) -> H10Buckets<AllocU32> {
83 let mut ret = m.alloc_cell(1 << BUCKET_BITS);
84 for item in ret.slice_mut().iter_mut() {
85 *item = initializer;
86 }
87 H10Buckets::<AllocU32>(ret)
88 }
89 fn new_uninit(m: &mut AllocU32) -> H10Buckets<AllocU32> {
90 H10Buckets::<AllocU32>(m.alloc_cell(1 << BUCKET_BITS))
91 }
92 fn free(&mut self, m: &mut AllocU32) {
93 m.free_cell(core::mem::take(&mut self.0));
94 }
95}
96
97impl<AllocU32: Allocator<u32>> PartialEq<H10Buckets<AllocU32>> for H10Buckets<AllocU32> {
98 fn eq(&self, other: &H10Buckets<AllocU32>) -> bool {
99 return self.0.slice() == other.0.slice();
100 }
101}
102
103impl<AllocU32: Allocator<u32>> SliceWrapper<u32> for H10Buckets<AllocU32> {
104 #[inline(always)]
105 fn slice(&self) -> &[u32] {
106 self.0.slice()
107 }
108}
109impl<AllocU32: Allocator<u32>> SliceWrapperMut<u32> for H10Buckets<AllocU32> {
110 #[inline(always)]
111 fn slice_mut(&mut self) -> &mut [u32] {
112 self.0.slice_mut()
113 }
114}
115
116pub struct H10<
117 AllocU32: Allocator<u32>,
118 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
119 Params: H10Params,
120> where
121 Buckets: PartialEq<Buckets>,
122{
123 pub window_mask_: usize,
124 pub common: Struct1,
125 pub buckets_: Buckets,
126 pub invalid_pos_: u32,
127 pub forest: AllocU32::AllocatedMemory,
128 pub _params: core::marker::PhantomData<Params>,
129}
130
131impl<
132 AllocU32: Allocator<u32>,
133 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
134 Params: H10Params,
135 > PartialEq<H10<AllocU32, Buckets, Params>> for H10<AllocU32, Buckets, Params>
136where
137 Buckets: PartialEq<Buckets>,
138{
139 fn eq(&self, other: &H10<AllocU32, Buckets, Params>) -> bool {
140 self.window_mask_ == other.window_mask_
141 && self.common == other.common
142 && self.buckets_ == other.buckets_
143 && self.invalid_pos_ == other.invalid_pos_
144 && self.forest.slice() == other.forest.slice()
145 && self._params == other._params
146 }
147}
148
149pub fn InitializeH10<AllocU32: Allocator<u32>>(
150 m32: &mut AllocU32,
151 one_shot: bool,
152 params: &BrotliEncoderParams,
153 input_size: usize,
154) -> H10<AllocU32, H10Buckets<AllocU32>, H10DefaultParams> {
155 initialize_h10::<AllocU32, H10Buckets<AllocU32>>(m32, one_shot, params, input_size)
156}
157fn initialize_h10<
158 AllocU32: Allocator<u32>,
159 Buckets: SliceWrapperMut<u32> + SliceWrapper<u32> + Allocable<u32, AllocU32>,
160>(
161 m32: &mut AllocU32,
162 one_shot: bool,
163 params: &BrotliEncoderParams,
164 input_size: usize,
165) -> H10<AllocU32, Buckets, H10DefaultParams>
166where
167 Buckets: PartialEq<Buckets>,
168{
169 let mut num_nodes = 1 << params.lgwin;
170 if one_shot && input_size < num_nodes {
171 num_nodes = input_size;
172 }
173 let window_mask = (1 << params.lgwin) - 1;
174 let invalid_pos = 0u32.wrapping_sub(window_mask);
175 let buckets = <Buckets as Allocable<u32, AllocU32>>::new(m32, invalid_pos);
176 H10::<AllocU32, Buckets, H10DefaultParams> {
177 common: Struct1 {
178 params: params.hasher,
179 is_prepared_: 1,
180 dict_num_lookups: 0,
181 dict_num_matches: 0,
182 },
183 _params: core::marker::PhantomData::<H10DefaultParams>,
184 window_mask_: window_mask as usize,
185 invalid_pos_: invalid_pos,
186 buckets_: buckets,
187 forest: m32.alloc_cell(num_nodes * 2),
188 }
189}
190
191impl<
192 AllocU32: Allocator<u32>,
193 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
194 Params: H10Params,
195 > H10<AllocU32, Buckets, Params>
196where
197 Buckets: PartialEq<Buckets>,
198{
199 pub fn free(&mut self, m32: &mut AllocU32) {
200 m32.free_cell(core::mem::take(&mut self.forest));
201 self.buckets_.free(m32);
202 }
203}
204impl<
205 Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
206 Buckets: Allocable<u32, Alloc> + SliceWrapperMut<u32> + SliceWrapper<u32>,
207 Params: H10Params,
208 > CloneWithAlloc<Alloc> for H10<Alloc, Buckets, Params>
209where
210 Buckets: PartialEq<Buckets>,
211{
212 fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
213 let mut ret = H10::<Alloc, Buckets, Params> {
214 window_mask_: self.window_mask_,
215 common: self.common.clone(),
216 buckets_: Buckets::new_uninit(m),
217 invalid_pos_: self.invalid_pos_,
218 forest: <Alloc as Allocator<u32>>::alloc_cell(m, self.forest.len()),
219 _params: core::marker::PhantomData::<Params>,
220 };
221 ret.buckets_
222 .slice_mut()
223 .clone_from_slice(self.buckets_.slice());
224 ret.forest.slice_mut().clone_from_slice(self.forest.slice());
225 ret
226 }
227}
228
229impl<
230 AllocU32: Allocator<u32>,
231 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
232 Params: H10Params,
233 > AnyHasher for H10<AllocU32, Buckets, Params>
234where
235 Buckets: PartialEq<Buckets>,
236{
237 #[inline(always)]
241 fn Opts(&self) -> H9Opts {
242 H9Opts {
243 literal_byte_score: 340,
244 }
245 }
246 #[inline(always)]
247 fn PrepareDistanceCache(&self, _distance_cache: &mut [i32]) {}
248 #[inline(always)]
249 fn HashTypeLength(&self) -> usize {
250 4
251 }
252 #[inline(always)]
253 fn StoreLookahead(&self) -> usize {
254 Params::max_tree_comp_length() as usize
255 }
256 fn StitchToPreviousBlock(
257 &mut self,
258 num_bytes: usize,
259 position: usize,
260 ringbuffer: &[u8],
261 ringbuffer_mask: usize,
262 ) {
263 super::hq::StitchToPreviousBlockH10(self, num_bytes, position, ringbuffer, ringbuffer_mask)
264 }
265 #[inline(always)]
266 fn GetHasherCommon(&mut self) -> &mut Struct1 {
267 &mut self.common
268 }
269 #[inline(always)]
270 fn HashBytes(&self, data: &[u8]) -> usize {
271 let h = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
272 (h >> (32i32 - BUCKET_BITS as i32)) as usize
273 }
274 #[inline(always)]
275 fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
276 let max_backward: usize = self.window_mask_.wrapping_sub(16).wrapping_add(1);
277 StoreAndFindMatchesH10(
278 self,
279 data,
280 ix,
281 mask,
282 Params::max_tree_comp_length() as usize,
283 max_backward,
284 &mut 0,
285 &mut [],
286 );
287 }
288 fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
289 let mut i: usize = ix_start;
290 let mut j: usize = ix_start;
291 if ix_start.wrapping_add(63) <= ix_end {
292 i = ix_end.wrapping_sub(63);
293 }
294 if ix_start.wrapping_add(512) <= i {
295 while j < i {
296 {
297 self.Store(data, mask, j);
298 }
299 j = j.wrapping_add(8);
300 }
301 }
302 while i < ix_end {
303 {
304 self.Store(data, mask, i);
305 }
306 i = i.wrapping_add(1);
307 }
308 }
309 fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
310 for i in ix_start..ix_end {
311 self.Store(data, mask, i);
312 }
313 }
314 fn Prepare(&mut self, _one_shot: bool, _input_size: usize, _data: &[u8]) -> HowPrepared {
315 if self.common.is_prepared_ != 0 {
316 return HowPrepared::ALREADY_PREPARED;
317 }
318 let invalid_pos = self.invalid_pos_;
319 for bucket in self.buckets_.slice_mut().iter_mut() {
320 *bucket = invalid_pos;
321 }
322 self.common.is_prepared_ = 1;
323 HowPrepared::NEWLY_PREPARED
324 }
325
326 fn FindLongestMatch(
327 &mut self,
328 _dictionary: Option<&BrotliDictionary>,
329 _dictionary_hash: &[u16],
330 _data: &[u8],
331 _ring_buffer_mask: usize,
332 _distance_cache: &[i32],
333 _cur_ix: usize,
334 _max_length: usize,
335 _max_backward: usize,
336 _gap: usize,
337 _max_distance: usize,
338 _out: &mut HasherSearchResult,
339 ) -> bool {
340 unimplemented!();
341 }
342}
343
344pub struct BackwardMatch(pub u64);
345
346impl BackwardMatch {
349 #[inline(always)]
350 pub fn distance(&self) -> u32 {
351 self.0 as u32
352 }
353 #[inline(always)]
354 pub fn length_and_code(&self) -> u32 {
355 (self.0 >> 32) as u32
356 }
357}
358pub struct BackwardMatchMut<'a>(pub &'a mut u64);
359
360impl<'a> BackwardMatchMut<'a> {
363 #[inline(always)]
364 pub fn distance(&self) -> u32 {
365 *self.0 as u32
366 }
367 #[inline(always)]
368 pub fn length_and_code(&self) -> u32 {
369 (*self.0 >> 32) as u32
370 }
371 #[inline(always)]
372 pub fn set_distance(&mut self, data: u32) {
373 *self.0 &= 0xffffffff00000000;
374 *self.0 |= u64::from(data)
375 }
376 #[inline(always)]
377 pub fn set_length_and_code(&mut self, data: u32) {
378 *self.0 = u64::from((*self.0) as u32) | (u64::from(data) << 32);
379 }
380 #[inline(always)]
381 pub fn init(&mut self, dist: usize, len: usize) {
382 self.set_distance(dist as u32);
383 self.set_length_and_code((len << 5) as u32);
384 }
385 #[inline(always)]
386 pub(crate) fn init_dictionary(&mut self, dist: usize, len: usize, len_code: usize) {
387 self.set_distance(dist as u32);
388 self.set_length_and_code((len << 5 | if len == len_code { 0 } else { len_code }) as u32);
389 }
390}
391
392macro_rules! LeftChildIndexH10 {
393 ($xself: expr, $pos: expr) => {
394 (2usize).wrapping_mul($pos & (*$xself).window_mask_)
395 };
396}
397macro_rules! RightChildIndexH10 {
398 ($xself: expr, $pos: expr) => {
399 (2usize)
400 .wrapping_mul($pos & (*$xself).window_mask_)
401 .wrapping_add(1)
402 };
403}
404pub fn StoreAndFindMatchesH10<
427 AllocU32: Allocator<u32>,
428 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
429 Params: H10Params,
430>(
431 xself: &mut H10<AllocU32, Buckets, Params>,
432 data: &[u8],
433 cur_ix: usize,
434 ring_buffer_mask: usize,
435 max_length: usize,
436 max_backward: usize,
437 best_len: &mut usize,
438 matches: &mut [u64],
439) -> usize
440where
441 Buckets: PartialEq<Buckets>,
442{
443 let mut matches_offset = 0usize;
444 let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
445 let max_comp_len: usize = min(max_length, 128usize);
446 let should_reroot_tree = max_length >= 128;
447 let key = xself.HashBytes(&data[cur_ix_masked..]);
448 let forest: &mut [u32] = xself.forest.slice_mut();
449 let mut prev_ix: usize = xself.buckets_.slice()[key] as usize;
450 let mut node_left: usize = LeftChildIndexH10!(xself, cur_ix);
451 let mut node_right: usize = RightChildIndexH10!(xself, cur_ix);
452 let mut best_len_left: usize = 0usize;
453 let mut best_len_right: usize = 0usize;
454 let mut depth_remaining: usize;
455 if should_reroot_tree {
456 xself.buckets_.slice_mut()[key] = cur_ix as u32;
457 }
458 depth_remaining = 64usize;
459 'break16: loop {
460 {
461 let backward: usize = cur_ix.wrapping_sub(prev_ix);
462 let prev_ix_masked: usize = prev_ix & ring_buffer_mask;
463 if backward == 0usize || backward > max_backward || depth_remaining == 0usize {
464 if should_reroot_tree {
465 forest[node_left] = xself.invalid_pos_;
466 forest[node_right] = xself.invalid_pos_;
467 }
468 break 'break16;
469 }
470 {
471 let cur_len: usize = min(best_len_left, best_len_right);
472
473 let len: usize = cur_len.wrapping_add(FindMatchLengthWithLimit(
474 &data[cur_ix_masked.wrapping_add(cur_len)..],
475 &data[prev_ix_masked.wrapping_add(cur_len)..],
476 max_length.wrapping_sub(cur_len),
477 ));
478 if matches_offset != matches.len() && (len > *best_len) {
479 *best_len = len;
480 BackwardMatchMut(&mut matches[matches_offset]).init(backward, len);
481 matches_offset += 1;
482 }
483 if len >= max_comp_len {
484 if should_reroot_tree {
485 forest[node_left] = forest[LeftChildIndexH10!(xself, prev_ix)];
486 forest[node_right] = forest[RightChildIndexH10!(xself, prev_ix)];
487 }
488 break 'break16;
489 }
490 if data[cur_ix_masked.wrapping_add(len)] as i32
491 > data[prev_ix_masked.wrapping_add(len)] as i32
492 {
493 best_len_left = len;
494 if should_reroot_tree {
495 forest[node_left] = prev_ix as u32;
496 }
497 node_left = RightChildIndexH10!(xself, prev_ix);
498 prev_ix = forest[node_left] as usize;
499 } else {
500 best_len_right = len;
501 if should_reroot_tree {
502 forest[node_right] = prev_ix as u32;
503 }
504 node_right = LeftChildIndexH10!(xself, prev_ix);
505 prev_ix = forest[node_right] as usize;
506 }
507 }
508 }
509 depth_remaining = depth_remaining.wrapping_sub(1);
510 }
511 matches_offset
512}