1use alloc::{Allocator, SliceWrapper, SliceWrapperMut};
2use core;
3use core::cmp::min;
4
5use super::{
6 kHashMul32, AnyHasher, BrotliEncoderParams, CloneWithAlloc, H9Opts, HasherSearchResult,
7 HowPrepared, Struct1, fix_unbroken_len,
8};
9use crate::enc::combined_alloc::allocate;
10use crate::enc::static_dict::{
11 BrotliDictionary, FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32,
12};
13use crate::enc::util::floatX;
14
15pub const kInfinity: floatX = 1.7e38;
16
17#[derive(Clone, Copy, Debug)]
18pub enum Union1 {
19 cost(floatX),
20 next(u32),
21 shortcut(u32),
22}
23
24#[derive(Clone, Copy, Debug)]
25pub struct ZopfliNode {
26 pub length: u32,
28 pub distance: u32,
30 pub dcode_insert_length: u32,
32 pub u: Union1,
33}
34impl Default for ZopfliNode {
35 fn default() -> Self {
36 ZopfliNode {
37 length: 1,
38 distance: 0,
39 dcode_insert_length: 0,
40 u: Union1::cost(kInfinity),
41 }
42 }
43}
44
45pub trait Allocable<T: Copy, AllocT: Allocator<T>> {
46 fn new(m: &mut AllocT, init: T) -> Self;
47 fn new_uninit(m: &mut AllocT) -> Self;
48 fn free(&mut self, m: &mut AllocT);
49}
50pub trait H10Params {
51 fn max_tree_search_depth() -> u32;
52 fn max_tree_comp_length() -> u32;
53}
54
55pub struct H10DefaultParams {}
56impl H10Params for H10DefaultParams {
57 #[inline(always)]
58 fn max_tree_search_depth() -> u32 {
59 64
60 }
61 #[inline(always)]
62 fn max_tree_comp_length() -> u32 {
63 128
64 }
65}
66
67const BUCKET_BITS: usize = 17;
68
69pub struct H10Buckets<AllocU32: Allocator<u32>>(AllocU32::AllocatedMemory);
70
71impl<AllocU32: Allocator<u32>> Allocable<u32, AllocU32> for H10Buckets<AllocU32> {
72 fn new(m: &mut AllocU32, initializer: u32) -> H10Buckets<AllocU32> {
73 let mut ret = m.alloc_cell(1 << BUCKET_BITS);
74 for item in ret.slice_mut().iter_mut() {
75 *item = initializer;
76 }
77 H10Buckets::<AllocU32>(ret)
78 }
79 fn new_uninit(m: &mut AllocU32) -> H10Buckets<AllocU32> {
80 H10Buckets::<AllocU32>(m.alloc_cell(1 << BUCKET_BITS))
81 }
82 fn free(&mut self, m: &mut AllocU32) {
83 m.free_cell(core::mem::take(&mut self.0));
84 }
85}
86
87impl<AllocU32: Allocator<u32>> PartialEq<H10Buckets<AllocU32>> for H10Buckets<AllocU32> {
88 fn eq(&self, other: &H10Buckets<AllocU32>) -> bool {
89 return self.0.slice() == other.0.slice();
90 }
91}
92
93impl<AllocU32: Allocator<u32>> SliceWrapper<u32> for H10Buckets<AllocU32> {
94 #[inline(always)]
95 fn slice(&self) -> &[u32] {
96 self.0.slice()
97 }
98}
99impl<AllocU32: Allocator<u32>> SliceWrapperMut<u32> for H10Buckets<AllocU32> {
100 #[inline(always)]
101 fn slice_mut(&mut self) -> &mut [u32] {
102 self.0.slice_mut()
103 }
104}
105
106pub struct H10<
107 AllocU32: Allocator<u32>,
108 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
109 Params: H10Params,
110> where
111 Buckets: PartialEq<Buckets>,
112{
113 pub window_mask_: usize,
114 pub ringbuffer_break: Option<core::num::NonZeroUsize>,
115 pub common: Struct1,
116 pub buckets_: Buckets,
117 pub invalid_pos_: u32,
118 pub forest: AllocU32::AllocatedMemory,
119 pub _params: core::marker::PhantomData<Params>,
120}
121
122impl<
123 AllocU32: Allocator<u32>,
124 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
125 Params: H10Params,
126 > PartialEq<H10<AllocU32, Buckets, Params>> for H10<AllocU32, Buckets, Params>
127where
128 Buckets: PartialEq<Buckets>,
129{
130 fn eq(&self, other: &H10<AllocU32, Buckets, Params>) -> bool {
131 self.window_mask_ == other.window_mask_
132 && self.common == other.common
133 && self.buckets_ == other.buckets_
134 && self.invalid_pos_ == other.invalid_pos_
135 && self.forest.slice() == other.forest.slice()
136 && self._params == other._params
137 && self.ringbuffer_break == other.ringbuffer_break
138 }
139}
140
141pub fn InitializeH10<AllocU32: Allocator<u32>>(
142 m32: &mut AllocU32,
143 one_shot: bool,
144 params: &BrotliEncoderParams,
145 ringbuffer_break: Option<core::num::NonZeroUsize>,
146 input_size: usize,
147) -> H10<AllocU32, H10Buckets<AllocU32>, H10DefaultParams> {
148 initialize_h10::<AllocU32, H10Buckets<AllocU32>>(m32, one_shot, params, input_size, ringbuffer_break)
149}
150fn initialize_h10<
151 AllocU32: Allocator<u32>,
152 Buckets: SliceWrapperMut<u32> + SliceWrapper<u32> + Allocable<u32, AllocU32>,
153>(
154 m32: &mut AllocU32,
155 one_shot: bool,
156 params: &BrotliEncoderParams,
157 input_size: usize,
158 ringbuffer_break: Option<core::num::NonZeroUsize>,
159) -> H10<AllocU32, Buckets, H10DefaultParams>
160where
161 Buckets: PartialEq<Buckets>,
162{
163 let mut num_nodes = 1 << params.lgwin;
164 if one_shot && input_size < num_nodes {
165 num_nodes = input_size;
166 }
167 let window_mask = (1 << params.lgwin) - 1;
168 let invalid_pos = 0u32.wrapping_sub(window_mask);
169 let buckets = <Buckets as Allocable<u32, AllocU32>>::new(m32, invalid_pos);
170 H10::<AllocU32, Buckets, H10DefaultParams> {
171 common: Struct1 {
172 params: params.hasher,
173 is_prepared_: 1,
174 dict_num_lookups: 0,
175 dict_num_matches: 0,
176 },
177 _params: core::marker::PhantomData::<H10DefaultParams>,
178 window_mask_: window_mask as usize,
179 invalid_pos_: invalid_pos,
180 buckets_: buckets,
181 forest: m32.alloc_cell(num_nodes * 2),
182 ringbuffer_break,
183 }
184}
185
186impl<
187 AllocU32: Allocator<u32>,
188 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
189 Params: H10Params,
190 > H10<AllocU32, Buckets, Params>
191where
192 Buckets: PartialEq<Buckets>,
193{
194 pub fn free(&mut self, m32: &mut AllocU32) {
195 m32.free_cell(core::mem::take(&mut self.forest));
196 self.buckets_.free(m32);
197 }
198}
199impl<
200 Alloc: Allocator<u16> + Allocator<u32>,
201 Buckets: Allocable<u32, Alloc> + SliceWrapperMut<u32> + SliceWrapper<u32>,
202 Params: H10Params,
203 > CloneWithAlloc<Alloc> for H10<Alloc, Buckets, Params>
204where
205 Buckets: PartialEq<Buckets>,
206{
207 fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
208 let mut ret = H10::<Alloc, Buckets, Params> {
209 window_mask_: self.window_mask_,
210 common: self.common.clone(),
211 buckets_: Buckets::new_uninit(m),
212 invalid_pos_: self.invalid_pos_,
213 forest: allocate::<u32, _>(m, self.forest.len()),
214 _params: core::marker::PhantomData::<Params>,
215 ringbuffer_break: self.ringbuffer_break,
216 };
217 ret.buckets_
218 .slice_mut()
219 .clone_from_slice(self.buckets_.slice());
220 ret.forest.slice_mut().clone_from_slice(self.forest.slice());
221 ret
222 }
223}
224
225impl<
226 AllocU32: Allocator<u32>,
227 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
228 Params: H10Params,
229 > AnyHasher for H10<AllocU32, Buckets, Params>
230where
231 Buckets: PartialEq<Buckets>,
232{
233 #[inline(always)]
237 fn Opts(&self) -> H9Opts {
238 H9Opts {
239 literal_byte_score: 340,
240 }
241 }
242 #[inline(always)]
243 fn PrepareDistanceCache(&self, _distance_cache: &mut [i32]) {}
244 #[inline(always)]
245 fn HashTypeLength(&self) -> usize {
246 4
247 }
248 #[inline(always)]
249 fn StoreLookahead(&self) -> usize {
250 Params::max_tree_comp_length() as usize
251 }
252 fn StitchToPreviousBlock(
253 &mut self,
254 num_bytes: usize,
255 position: usize,
256 ringbuffer: &[u8],
257 ringbuffer_mask: usize,
258 ) {
259 super::hq::StitchToPreviousBlockH10(self, num_bytes, position, ringbuffer, ringbuffer_mask, self.ringbuffer_break)
260 }
261 #[inline(always)]
262 fn GetHasherCommon(&mut self) -> &mut Struct1 {
263 &mut self.common
264 }
265 #[inline(always)]
266 fn HashBytes(&self, data: &[u8]) -> usize {
267 let h = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
268 (h >> (32i32 - BUCKET_BITS as i32)) as usize
269 }
270 #[inline(always)]
271 fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
272 let max_backward: usize = self.window_mask_.wrapping_sub(16).wrapping_add(1);
273 StoreAndFindMatchesH10(
274 self,
275 data,
276 ix,
277 mask,
278 self.ringbuffer_break,
279 Params::max_tree_comp_length() as usize,
280 max_backward,
281 &mut 0,
282 &mut [],
283 );
284 }
285 fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
286 let mut i: usize = ix_start;
287 let mut j: usize = ix_start;
288 if ix_start.wrapping_add(63) <= ix_end {
289 i = ix_end.wrapping_sub(63);
290 }
291 if ix_start.wrapping_add(512) <= i {
292 while j < i {
293 {
294 self.Store(data, mask, j);
295 }
296 j = j.wrapping_add(8);
297 }
298 }
299 while i < ix_end {
300 {
301 self.Store(data, mask, i);
302 }
303 i = i.wrapping_add(1);
304 }
305 }
306 fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
307 for i in ix_start..ix_end {
308 self.Store(data, mask, i);
309 }
310 }
311 fn Prepare(&mut self, _one_shot: bool, _input_size: usize, _data: &[u8]) -> HowPrepared {
312 if self.common.is_prepared_ != 0 {
313 return HowPrepared::ALREADY_PREPARED;
314 }
315 let invalid_pos = self.invalid_pos_;
316 for bucket in self.buckets_.slice_mut().iter_mut() {
317 *bucket = invalid_pos;
318 }
319 self.common.is_prepared_ = 1;
320 HowPrepared::NEWLY_PREPARED
321 }
322
323 fn FindLongestMatch(
324 &mut self,
325 _dictionary: Option<&BrotliDictionary>,
326 _dictionary_hash: &[u16],
327 _data: &[u8],
328 _ring_buffer_mask: usize,
329 _ring_buffer_break: Option<core::num::NonZeroUsize>,
330 _distance_cache: &[i32],
331 _cur_ix: usize,
332 _max_length: usize,
333 _max_backward: usize,
334 _gap: usize,
335 _max_distance: usize,
336 _out: &mut HasherSearchResult,
337 ) -> bool {
338 unimplemented!();
339 }
340}
341
342pub struct BackwardMatch(pub u64);
343
344impl BackwardMatch {
347 #[inline(always)]
348 pub fn distance(&self) -> u32 {
349 self.0 as u32
350 }
351 #[inline(always)]
352 pub fn length_and_code(&self) -> u32 {
353 (self.0 >> 32) as u32
354 }
355}
356pub struct BackwardMatchMut<'a>(pub &'a mut u64);
357
358impl<'a> BackwardMatchMut<'a> {
361 #[inline(always)]
362 pub fn distance(&self) -> u32 {
363 *self.0 as u32
364 }
365 #[inline(always)]
366 pub fn length_and_code(&self) -> u32 {
367 (*self.0 >> 32) as u32
368 }
369 #[inline(always)]
370 pub fn set_distance(&mut self, data: u32) {
371 *self.0 &= 0xffffffff00000000;
372 *self.0 |= u64::from(data)
373 }
374 #[inline(always)]
375 pub fn set_length_and_code(&mut self, data: u32) {
376 *self.0 = u64::from((*self.0) as u32) | (u64::from(data) << 32);
377 }
378 #[inline(always)]
379 pub fn init(&mut self, dist: usize, len: usize) {
380 self.set_distance(dist as u32);
381 self.set_length_and_code((len << 5) as u32);
382 }
383 #[inline(always)]
384 pub(crate) fn init_dictionary(&mut self, dist: usize, len: usize, len_code: usize) {
385 self.set_distance(dist as u32);
386 self.set_length_and_code((len << 5 | if len == len_code { 0 } else { len_code }) as u32);
387 }
388}
389
390macro_rules! LeftChildIndexH10 {
391 ($xself: expr, $pos: expr) => {
392 (2usize).wrapping_mul($pos & (*$xself).window_mask_)
393 };
394}
395macro_rules! RightChildIndexH10 {
396 ($xself: expr, $pos: expr) => {
397 (2usize)
398 .wrapping_mul($pos & (*$xself).window_mask_)
399 .wrapping_add(1)
400 };
401}
402pub fn StoreAndFindMatchesH10<
425 AllocU32: Allocator<u32>,
426 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
427 Params: H10Params,
428>(
429 xself: &mut H10<AllocU32, Buckets, Params>,
430 data: &[u8],
431 cur_ix: usize,
432 ring_buffer_mask: usize,
433 ringbuffer_break: Option<core::num::NonZeroUsize>,
434 max_length: usize,
435 max_backward: usize,
436 best_len: &mut usize,
437 matches: &mut [u64],
438) -> usize
439where
440 Buckets: PartialEq<Buckets>,
441{
442 let mut matches_offset = 0_usize;
443 let cur_ix_masked = cur_ix & ring_buffer_mask;
444 let max_comp_len = min(max_length, 128);
445 let should_reroot_tree = max_length >= 128;
446 let key = xself.HashBytes(&data[cur_ix_masked..]);
447 let forest = xself.forest.slice_mut();
448 let mut prev_ix = xself.buckets_.slice()[key] as usize;
449 let mut node_left = LeftChildIndexH10!(xself, cur_ix);
450 let mut node_right = RightChildIndexH10!(xself, cur_ix);
451 let mut best_len_left = 0_usize;
452 let mut best_len_right = 0_usize;
453 let mut depth_remaining = 64_usize;
454
455 if should_reroot_tree {
456 xself.buckets_.slice_mut()[key] = cur_ix as u32;
457 }
458
459 loop {
460 let backward = cur_ix.wrapping_sub(prev_ix);
461 let prev_ix_masked = prev_ix & ring_buffer_mask;
462 if backward == 0 || backward > max_backward || depth_remaining == 0 {
463 if should_reroot_tree {
464 forest[node_left] = xself.invalid_pos_;
465 forest[node_right] = xself.invalid_pos_;
466 }
467 break;
468 }
469
470 let cur_len = min(best_len_left, best_len_right);
471
472 let len = fix_unbroken_len(
473 cur_len.wrapping_add(FindMatchLengthWithLimit(
474 &data[cur_ix_masked.wrapping_add(cur_len)..],
475 &data[prev_ix_masked.wrapping_add(cur_len)..],
476 max_length.wrapping_sub(cur_len),
477 )), prev_ix_masked, cur_ix_masked, ringbuffer_break);
478
479 if matches_offset != matches.len() && len > *best_len {
480 *best_len = len;
481 BackwardMatchMut(&mut matches[matches_offset]).init(backward, len);
482 matches_offset += 1;
483 }
484
485 if len >= max_comp_len {
486 if should_reroot_tree {
487 forest[node_left] = forest[LeftChildIndexH10!(xself, prev_ix)];
488 forest[node_right] = forest[RightChildIndexH10!(xself, prev_ix)];
489 }
490 break;
491 }
492
493 if data[cur_ix_masked.wrapping_add(len)] > data[prev_ix_masked.wrapping_add(len)] {
494 best_len_left = len;
495 if should_reroot_tree {
496 forest[node_left] = prev_ix as u32;
497 }
498 node_left = RightChildIndexH10!(xself, prev_ix);
499 prev_ix = forest[node_left] as usize;
500 } else {
501 best_len_right = len;
502 if should_reroot_tree {
503 forest[node_right] = prev_ix as u32;
504 }
505 node_right = LeftChildIndexH10!(xself, prev_ix);
506 prev_ix = forest[node_right] as usize;
507 }
508
509 depth_remaining = depth_remaining.wrapping_sub(1);
510 }
511
512 matches_offset
513}