brotli/enc/backward_references/
mod.rs

1#![allow(dead_code)]
2mod benchmark;
3pub mod hash_to_binary_tree;
4pub mod hq;
5mod test;
6
7use super::super::alloc::{Allocator, SliceWrapper, SliceWrapperMut};
8use super::command::{BrotliDistanceParams, Command, ComputeDistanceCode};
9use super::dictionary_hash::kStaticDictionaryHash;
10use super::hash_to_binary_tree::{H10Buckets, H10DefaultParams, ZopfliNode, H10};
11use super::static_dict::BrotliDictionary;
12use super::static_dict::{
13    FindMatchLengthWithLimit, FindMatchLengthWithLimitMin4, BROTLI_UNALIGNED_LOAD32,
14    BROTLI_UNALIGNED_LOAD64,
15};
16use super::util::{floatX, Log2FloorNonZero};
17use core::cmp::{max, min};
18
19static kBrotliMinWindowBits: i32 = 10;
20static kBrotliMaxWindowBits: i32 = 24;
21pub static kInvalidMatch: u32 = 0x0fff_ffff;
22static kCutoffTransformsCount: u32 = 10;
23static kCutoffTransforms: u64 = 0x071b_520a_da2d_3200;
24pub static kHashMul32: u32 = 0x1e35_a7bd;
25pub static kHashMul64: u64 = 0x1e35_a7bd_1e35_a7bd;
26pub static kHashMul64Long: u64 = 0x1fe3_5a7b_d357_9bd3;
27
28#[derive(PartialEq, Eq, Copy, Clone, Debug)]
29#[repr(C)]
30pub enum BrotliEncoderMode {
31    BROTLI_MODE_GENERIC = 0,
32    BROTLI_MODE_TEXT = 1,
33    BROTLI_MODE_FONT = 2,
34    BROTLI_FORCE_LSB_PRIOR = 3,
35    BROTLI_FORCE_MSB_PRIOR = 4,
36    BROTLI_FORCE_UTF8_PRIOR = 5,
37    BROTLI_FORCE_SIGNED_PRIOR = 6,
38}
39
40#[derive(Clone, Copy, Debug, PartialEq)]
41pub struct BrotliHasherParams {
42    /// type of hasher to use (default: type 6, but others have tradeoffs of speed/memory)
43    pub type_: i32,
44    /// number of the number of buckets to have in the hash table (defaults to quality - 1)
45    pub bucket_bits: i32,
46    /// number of potential matches to hold per bucket (hash collisions)
47    pub block_bits: i32,
48    /// number of bytes of a potential match to hash
49    pub hash_len: i32,
50    /// number of previous distance matches to check for future matches (defaults to 16)
51    pub num_last_distances_to_check: i32,
52    /// how much to weigh distance vs an extra byte of copy match when comparing possible copy srcs
53    pub literal_byte_score: i32,
54}
55
56#[derive(Clone, Debug)]
57pub struct BrotliEncoderParams {
58    pub dist: BrotliDistanceParams,
59    /// if this brotli file is generic, font or specifically text
60    pub mode: BrotliEncoderMode,
61    /// quality param between 0 and 11 (11 is smallest but takes longest to encode)
62    pub quality: i32,
63    pub q9_5: bool,
64    /// log of how big the ring buffer should be for copying prior data
65    pub lgwin: i32,
66    /// log of how often metablocks should be serialized
67    pub lgblock: i32,
68    /// how big the source file is (or 0 if no hint is provided)
69    pub size_hint: usize,
70    /// avoid serializing out priors for literal sections in the favor of decode speed
71    pub disable_literal_context_modeling: i32,
72    pub hasher: BrotliHasherParams,
73    /// produce an IR of the compression file
74    pub log_meta_block: bool,
75    /// attempt to detect how many bytes before the current byte generates the best prediction of it
76    /// * 0 = off (stride 1 always)
77    /// * 1 = on per 16th of a file
78    /// * 2 = on per block type switch
79    pub stride_detection_quality: u8,
80    /// if nonzero, will search for high entropy strings and log them differently to the IR
81    pub high_entropy_detection_quality: u8,
82    /// if nonzero it will search for the temporal locality and effectiveness of the priors
83    /// for literals. The best adaptation and forgetfulness will be logged per metablock to the IR
84    pub cdf_adaptation_detection: u8,
85    /// whether to search for whether the previous byte or the context_map are better predictors on a per-context-map basis
86    pub prior_bitmask_detection: u8,
87    /// for prior bitmask detection: stride_low, stride_speed, cm_low, cm_speed
88    pub literal_adaptation: [(u16, u16); 4],
89    pub large_window: bool,
90    /// avoid search for the best ndirect vs npostfix parameters for distance
91    pub avoid_distance_prefix_search: bool,
92    /// construct brotli in such a way that it may be concatenated with another brotli file using appropriate bit ops
93    pub catable: bool,
94    /// can use the dictionary (default yes unless catable is set)
95    pub use_dictionary: bool,
96    /// construct brotli in such a way that another concatable brotli file may be appended
97    pub appendable: bool,
98    /// include a magic number and version number and size_hint at the beginning
99    pub magic_number: bool,
100    /// prefer to compute the map of previously seen strings
101    /// just once for all the threads at the beginning, since they overlap significantly
102    pub favor_cpu_efficiency: bool,
103}
104
105impl Default for BrotliEncoderParams {
106    fn default() -> BrotliEncoderParams {
107        super::encode::BrotliEncoderInitParams()
108    }
109}
110
111#[derive(Clone, Copy, Default, PartialEq)]
112pub struct H9Opts {
113    pub literal_byte_score: u32,
114}
115pub enum HowPrepared {
116    ALREADY_PREPARED,
117    NEWLY_PREPARED,
118}
119#[derive(Clone, PartialEq)]
120pub struct Struct1 {
121    pub params: BrotliHasherParams,
122    pub is_prepared_: i32,
123    pub dict_num_lookups: usize,
124    pub dict_num_matches: usize,
125}
126
127fn LiteralSpreeLengthForSparseSearch(params: &BrotliEncoderParams) -> usize {
128    (if params.quality < 9 { 64i32 } else { 512i32 }) as usize
129}
130
131pub struct HasherSearchResult {
132    pub len: usize,
133    pub len_x_code: usize,
134    pub distance: usize,
135    pub score: u64,
136}
137
138pub trait CloneWithAlloc<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
139    fn clone_with_alloc(&self, m: &mut Alloc) -> Self;
140}
141
142pub trait AnyHasher {
143    fn Opts(&self) -> H9Opts;
144    fn GetHasherCommon(&mut self) -> &mut Struct1;
145    fn HashBytes(&self, data: &[u8]) -> usize;
146    fn HashTypeLength(&self) -> usize;
147    fn StoreLookahead(&self) -> usize;
148    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]);
149    fn FindLongestMatch(
150        &mut self,
151        dictionary: Option<&BrotliDictionary>,
152        dictionary_hash: &[u16],
153        data: &[u8],
154        ring_buffer_mask: usize,
155        distance_cache: &[i32],
156        cur_ix: usize,
157        max_length: usize,
158        max_backward: usize,
159        gap: usize,
160        max_distance: usize,
161        out: &mut HasherSearchResult,
162    ) -> bool;
163    fn Store(&mut self, data: &[u8], mask: usize, ix: usize);
164    fn Store4Vec4(&mut self, data: &[u8], mask: usize, ix: usize) {
165        for i in 0..4 {
166            self.Store(data, mask, ix + i * 4);
167        }
168    }
169    fn StoreEvenVec4(&mut self, data: &[u8], mask: usize, ix: usize) {
170        for i in 0..4 {
171            self.Store(data, mask, ix + i * 2);
172        }
173    }
174    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize);
175    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize);
176    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared;
177    fn StitchToPreviousBlock(
178        &mut self,
179        num_bytes: usize,
180        position: usize,
181        ringbuffer: &[u8],
182        ringbuffer_mask: usize,
183    );
184}
185
186pub fn StitchToPreviousBlockInternal<T: AnyHasher>(
187    handle: &mut T,
188    num_bytes: usize,
189    position: usize,
190    ringbuffer: &[u8],
191    ringbuffer_mask: usize,
192) {
193    if num_bytes >= handle.HashTypeLength().wrapping_sub(1) && (position >= 3) {
194        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(3));
195        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(2));
196        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(1));
197    }
198}
199
200pub fn StoreLookaheadThenStore<T: AnyHasher>(hasher: &mut T, size: usize, dict: &[u8]) {
201    let overlap = hasher.StoreLookahead().wrapping_sub(1);
202    if size > overlap {
203        hasher.BulkStoreRange(dict, !(0), 0, size - overlap);
204    }
205}
206
207pub trait BasicHashComputer {
208    fn HashBytes(&self, data: &[u8]) -> u32;
209    fn BUCKET_BITS(&self) -> i32;
210    fn USE_DICTIONARY(&self) -> i32;
211    fn BUCKET_SWEEP(&self) -> i32;
212}
213pub struct BasicHasher<Buckets: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> {
214    pub GetHasherCommon: Struct1,
215    pub buckets_: Buckets,
216    pub h9_opts: H9Opts,
217}
218
219impl<A: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> PartialEq<BasicHasher<A>>
220    for BasicHasher<A>
221{
222    fn eq(&self, other: &BasicHasher<A>) -> bool {
223        self.GetHasherCommon == other.GetHasherCommon
224            && self.h9_opts == other.h9_opts
225            && self.buckets_.slice() == other.buckets_.slice()
226    }
227}
228
229impl<T: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> BasicHasher<T> {
230    fn StoreRangeOptBasic(
231        &mut self,
232        data: &[u8],
233        mask: usize,
234        ix_start: usize,
235        ix_end: usize,
236    ) -> usize {
237        let lookahead = 8;
238        if ix_end >= ix_start + lookahead * 2 {
239            let chunk_count = (ix_end - ix_start) / 4;
240            for chunk_id in 0..chunk_count {
241                let i = (ix_start + chunk_id * 4) & mask;
242                let word11 = data.split_at(i).1.split_at(11).0;
243                let mixed0 = self.HashBytes(word11);
244                let mixed1 = self.HashBytes(word11.split_at(1).1);
245                let mixed2 = self.HashBytes(word11.split_at(2).1);
246                let mixed3 = self.HashBytes(word11.split_at(3).1);
247                let off: u32 = (i >> 3).wrapping_rem(self.buckets_.BUCKET_SWEEP() as usize) as u32;
248                let offset0: usize = mixed0 + off as usize;
249                let offset1: usize = mixed1 + off as usize;
250                let offset2: usize = mixed2 + off as usize;
251                let offset3: usize = mixed3 + off as usize;
252                self.buckets_.slice_mut()[offset0] = i as u32;
253                self.buckets_.slice_mut()[offset1] = i as u32 + 1;
254                self.buckets_.slice_mut()[offset2] = i as u32 + 2;
255                self.buckets_.slice_mut()[offset3] = i as u32 + 3;
256            }
257            return ix_start + chunk_count * 4;
258        }
259        ix_start
260    }
261}
262pub struct H2Sub<AllocU32: alloc::Allocator<u32>> {
263    pub buckets_: AllocU32::AllocatedMemory, // 65537
264}
265impl<T: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> AnyHasher for BasicHasher<T> {
266    #[inline(always)]
267    fn Opts(&self) -> H9Opts {
268        self.h9_opts
269    }
270    #[allow(unused_variables)]
271    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {}
272    #[inline(always)]
273    fn HashTypeLength(&self) -> usize {
274        8
275    }
276    #[inline(always)]
277    fn StoreLookahead(&self) -> usize {
278        8
279    }
280    fn StitchToPreviousBlock(
281        &mut self,
282        num_bytes: usize,
283        position: usize,
284        ringbuffer: &[u8],
285        ringbuffer_mask: usize,
286    ) {
287        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask);
288    }
289    #[inline(always)]
290    fn GetHasherCommon(&mut self) -> &mut Struct1 {
291        &mut self.GetHasherCommon
292    }
293    #[inline(always)]
294    fn HashBytes(&self, data: &[u8]) -> usize {
295        self.buckets_.HashBytes(data) as usize
296    }
297    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
298        let (_, data_window) = data.split_at((ix & mask));
299        let key: u32 = self.HashBytes(data_window) as u32;
300        let off: u32 = (ix >> 3).wrapping_rem(self.buckets_.BUCKET_SWEEP() as usize) as u32;
301        self.buckets_.slice_mut()[key.wrapping_add(off) as usize] = ix as u32;
302    }
303    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
304        for i in self.StoreRangeOptBasic(data, mask, ix_start, ix_end)..ix_end {
305            self.Store(data, mask, i);
306        }
307    }
308    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
309        self.StoreRange(data, mask, ix_start, ix_end);
310    }
311    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
312        if self.GetHasherCommon.is_prepared_ != 0 {
313            return HowPrepared::ALREADY_PREPARED;
314        }
315        let partial_prepare_threshold = (4 << self.buckets_.BUCKET_BITS()) >> 7;
316        if one_shot && input_size <= partial_prepare_threshold {
317            for i in 0..input_size {
318                let key = self.HashBytes(&data[i..]);
319                let bs = self.buckets_.BUCKET_SWEEP() as usize;
320                for item in self.buckets_.slice_mut()[key..(key + bs)].iter_mut() {
321                    *item = 0;
322                }
323            }
324        } else {
325            for item in self.buckets_.slice_mut().iter_mut() {
326                *item = 0;
327            }
328        }
329        self.GetHasherCommon.is_prepared_ = 1;
330        HowPrepared::NEWLY_PREPARED
331    }
332
333    fn FindLongestMatch(
334        &mut self,
335        dictionary: Option<&BrotliDictionary>,
336        dictionary_hash: &[u16],
337        data: &[u8],
338        ring_buffer_mask: usize,
339        distance_cache: &[i32],
340        cur_ix: usize,
341        max_length: usize,
342        max_backward: usize,
343        gap: usize,
344        max_distance: usize,
345        out: &mut HasherSearchResult,
346    ) -> bool {
347        let opts = self.Opts();
348        let best_len_in: usize = out.len;
349        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
350        let key: u32 = self.HashBytes(&data[cur_ix_masked..]) as u32;
351        let mut compare_char: i32 = data[cur_ix_masked.wrapping_add(best_len_in)] as i32;
352        let mut best_score: u64 = out.score;
353        let mut best_len: usize = best_len_in;
354        let cached_backward: usize = distance_cache[0] as usize;
355        let mut prev_ix: usize = cur_ix.wrapping_sub(cached_backward);
356        let mut is_match_found = false;
357        out.len_x_code = 0usize;
358        if prev_ix < cur_ix {
359            prev_ix &= ring_buffer_mask as u32 as usize;
360            if compare_char == data[prev_ix.wrapping_add(best_len)] as i32 {
361                let len: usize = FindMatchLengthWithLimitMin4(
362                    &data[prev_ix..],
363                    &data[cur_ix_masked..],
364                    max_length,
365                );
366                if len != 0 {
367                    best_score = BackwardReferenceScoreUsingLastDistance(len, opts);
368                    best_len = len;
369                    out.len = len;
370                    out.distance = cached_backward;
371                    out.score = best_score;
372                    compare_char = data[cur_ix_masked.wrapping_add(best_len)] as i32;
373                    if self.buckets_.BUCKET_SWEEP() == 1i32 {
374                        self.buckets_.slice_mut()[key as usize] = cur_ix as u32;
375                        return true;
376                    } else {
377                        is_match_found = true;
378                    }
379                }
380            }
381        }
382        let bucket_sweep = self.buckets_.BUCKET_SWEEP();
383        if bucket_sweep == 1i32 {
384            prev_ix = self.buckets_.slice()[key as usize] as usize;
385            self.buckets_.slice_mut()[key as usize] = cur_ix as u32;
386            let backward: usize = cur_ix.wrapping_sub(prev_ix);
387            prev_ix &= ring_buffer_mask as u32 as usize;
388            if compare_char != data[prev_ix.wrapping_add(best_len_in)] as i32 {
389                return false;
390            }
391            if backward == 0usize || backward > max_backward {
392                return false;
393            }
394            let len: usize =
395                FindMatchLengthWithLimitMin4(&data[prev_ix..], &data[cur_ix_masked..], max_length);
396            if len != 0 {
397                out.len = len;
398                out.distance = backward;
399                out.score = BackwardReferenceScore(len, backward, opts);
400                return true;
401            }
402        } else {
403            for prev_ix_ref in
404                self.buckets_.slice().split_at(key as usize).1[..bucket_sweep as usize].iter()
405            {
406                let mut prev_ix = *prev_ix_ref as usize;
407                let backward: usize = cur_ix.wrapping_sub(prev_ix);
408                prev_ix &= ring_buffer_mask as u32 as usize;
409                if compare_char != data[prev_ix.wrapping_add(best_len)] as i32 {
410                    continue;
411                }
412                if backward == 0usize || backward > max_backward {
413                    continue;
414                }
415                let len = FindMatchLengthWithLimitMin4(
416                    &data[prev_ix..],
417                    &data[cur_ix_masked..],
418                    max_length,
419                );
420                if len != 0 {
421                    let score: u64 = BackwardReferenceScore(len, backward, opts);
422                    if best_score < score {
423                        best_score = score;
424                        best_len = len;
425                        out.len = best_len;
426                        out.distance = backward;
427                        out.score = score;
428                        compare_char = data[cur_ix_masked.wrapping_add(best_len)] as i32;
429                        is_match_found = true;
430                    }
431                }
432            }
433        }
434        if dictionary.is_some() && self.buckets_.USE_DICTIONARY() != 0 && !is_match_found {
435            is_match_found = SearchInStaticDictionary(
436                dictionary.unwrap(),
437                dictionary_hash,
438                self,
439                &data[cur_ix_masked..],
440                max_length,
441                max_backward.wrapping_add(gap),
442                max_distance,
443                out,
444                true,
445            );
446        }
447        self.buckets_.slice_mut()
448            [(key as usize).wrapping_add((cur_ix >> 3).wrapping_rem(bucket_sweep as usize))] =
449            cur_ix as u32;
450        is_match_found
451    }
452}
453impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H2Sub<AllocU32> {
454    fn HashBytes(&self, data: &[u8]) -> u32 {
455        let h: u64 =
456            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
457        (h >> (64i32 - 16i32)) as u32
458    }
459    fn BUCKET_BITS(&self) -> i32 {
460        16
461    }
462    fn BUCKET_SWEEP(&self) -> i32 {
463        1
464    }
465    fn USE_DICTIONARY(&self) -> i32 {
466        1
467    }
468}
469impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H2Sub<AllocU32> {
470    fn slice_mut(&mut self) -> &mut [u32] {
471        return self.buckets_.slice_mut();
472    }
473}
474impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H2Sub<AllocU32> {
475    fn slice(&self) -> &[u32] {
476        return self.buckets_.slice();
477    }
478}
479pub struct H3Sub<AllocU32: alloc::Allocator<u32>> {
480    pub buckets_: AllocU32::AllocatedMemory, // 65538
481}
482impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H3Sub<AllocU32> {
483    fn slice_mut(&mut self) -> &mut [u32] {
484        return self.buckets_.slice_mut();
485    }
486}
487impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H3Sub<AllocU32> {
488    fn slice(&self) -> &[u32] {
489        return self.buckets_.slice();
490    }
491}
492impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H3Sub<AllocU32> {
493    fn BUCKET_BITS(&self) -> i32 {
494        16
495    }
496    fn BUCKET_SWEEP(&self) -> i32 {
497        2
498    }
499    fn USE_DICTIONARY(&self) -> i32 {
500        0
501    }
502    fn HashBytes(&self, data: &[u8]) -> u32 {
503        let h: u64 =
504            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
505        (h >> (64i32 - 16i32)) as u32
506    }
507}
508pub struct H4Sub<AllocU32: alloc::Allocator<u32>> {
509    pub buckets_: AllocU32::AllocatedMemory, // 131076
510}
511impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H4Sub<AllocU32> {
512    fn BUCKET_BITS(&self) -> i32 {
513        17
514    }
515    fn BUCKET_SWEEP(&self) -> i32 {
516        4
517    }
518    fn USE_DICTIONARY(&self) -> i32 {
519        1
520    }
521    fn HashBytes(&self, data: &[u8]) -> u32 {
522        let h: u64 =
523            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
524        (h >> (64i32 - 17i32)) as u32
525    }
526}
527impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H4Sub<AllocU32> {
528    fn slice_mut(&mut self) -> &mut [u32] {
529        return self.buckets_.slice_mut();
530    }
531}
532impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H4Sub<AllocU32> {
533    fn slice(&self) -> &[u32] {
534        return self.buckets_.slice();
535    }
536}
537pub struct H54Sub<AllocU32: alloc::Allocator<u32>> {
538    pub buckets_: AllocU32::AllocatedMemory,
539}
540impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H54Sub<AllocU32> {
541    fn BUCKET_BITS(&self) -> i32 {
542        20
543    }
544    fn BUCKET_SWEEP(&self) -> i32 {
545        4
546    }
547    fn USE_DICTIONARY(&self) -> i32 {
548        0
549    }
550    fn HashBytes(&self, data: &[u8]) -> u32 {
551        let h: u64 =
552            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 7i32)).wrapping_mul(kHashMul64);
553        (h >> (64i32 - 20i32)) as u32
554    }
555}
556
557impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H54Sub<AllocU32> {
558    fn slice_mut(&mut self) -> &mut [u32] {
559        return self.buckets_.slice_mut();
560    }
561}
562impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H54Sub<AllocU32> {
563    fn slice(&self) -> &[u32] {
564        return self.buckets_.slice();
565    }
566}
567pub const H9_BUCKET_BITS: usize = 15;
568pub const H9_BLOCK_BITS: usize = 8;
569pub const H9_NUM_LAST_DISTANCES_TO_CHECK: usize = 16;
570pub const H9_BLOCK_SIZE: usize = 1 << H9_BLOCK_BITS;
571const H9_BLOCK_MASK: usize = (1 << H9_BLOCK_BITS) - 1;
572
573impl H9Opts {
574    pub fn new(params: &BrotliHasherParams) -> H9Opts {
575        H9Opts {
576            literal_byte_score: if params.literal_byte_score != 0 {
577                params.literal_byte_score as u32
578            } else {
579                540
580            },
581        }
582    }
583}
584
585pub struct H9<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
586    pub num_: <Alloc as Allocator<u16>>::AllocatedMemory, //[u16;1 << H9_BUCKET_BITS],
587    pub buckets_: <Alloc as Allocator<u32>>::AllocatedMemory, //[u32; H9_BLOCK_SIZE << H9_BUCKET_BITS],
588    pub dict_search_stats_: Struct1,
589    pub h9_opts: H9Opts,
590}
591
592impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> PartialEq<H9<Alloc>> for H9<Alloc> {
593    fn eq(&self, other: &H9<Alloc>) -> bool {
594        self.dict_search_stats_ == other.dict_search_stats_
595            && self.num_.slice() == other.num_.slice()
596            && self.buckets_.slice() == other.buckets_.slice()
597            && self.h9_opts == other.h9_opts
598    }
599}
600
601fn adv_prepare_distance_cache(distance_cache: &mut [i32], num_distances: i32) {
602    if num_distances > 4i32 {
603        let last_distance: i32 = distance_cache[0];
604        distance_cache[4] = last_distance - 1i32;
605        distance_cache[5] = last_distance + 1i32;
606        distance_cache[6] = last_distance - 2i32;
607        distance_cache[7] = last_distance + 2i32;
608        distance_cache[8] = last_distance - 3i32;
609        distance_cache[9] = last_distance + 3i32;
610        if num_distances > 10i32 {
611            let next_last_distance: i32 = distance_cache[1];
612            distance_cache[10] = next_last_distance - 1i32;
613            distance_cache[11] = next_last_distance + 1i32;
614            distance_cache[12] = next_last_distance - 2i32;
615            distance_cache[13] = next_last_distance + 2i32;
616            distance_cache[14] = next_last_distance - 3i32;
617            distance_cache[15] = next_last_distance + 3i32;
618        }
619    }
620}
621
622pub const kDistanceCacheIndex: [u8; 16] = [0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1];
623
624pub const kDistanceCacheOffset: [i8; 16] = [0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3];
625
626//const BROTLI_LITERAL_BYTE_SCORE: u64 = 540;
627const BROTLI_DISTANCE_BIT_PENALTY: u32 = 120;
628
629// Score must be positive after applying maximal penalty.
630const BROTLI_SCORE_BASE: u32 = (BROTLI_DISTANCE_BIT_PENALTY * 8 * 8/* sizeof usize*/);
631const kDistanceShortCodeCost: [u32; 16] = [
632    /* Repeat last */
633    BROTLI_SCORE_BASE + 60,
634    /* 2nd, 3rd, 4th last */
635    BROTLI_SCORE_BASE - 95,
636    BROTLI_SCORE_BASE - 117,
637    BROTLI_SCORE_BASE - 127,
638    /* Last with offset */
639    BROTLI_SCORE_BASE - 93,
640    BROTLI_SCORE_BASE - 93,
641    BROTLI_SCORE_BASE - 96,
642    BROTLI_SCORE_BASE - 96,
643    BROTLI_SCORE_BASE - 99,
644    BROTLI_SCORE_BASE - 99,
645    /* 2nd last with offset */
646    BROTLI_SCORE_BASE - 105,
647    BROTLI_SCORE_BASE - 105,
648    BROTLI_SCORE_BASE - 115,
649    BROTLI_SCORE_BASE - 115,
650    BROTLI_SCORE_BASE - 125,
651    BROTLI_SCORE_BASE - 125,
652];
653
654fn BackwardReferenceScoreH9(
655    copy_length: usize,
656    backward_reference_offset: usize,
657    h9_opts: H9Opts,
658) -> u64 {
659    (u64::from(BROTLI_SCORE_BASE)
660        .wrapping_add((h9_opts.literal_byte_score as u64).wrapping_mul(copy_length as u64))
661        .wrapping_sub(
662            (BROTLI_DISTANCE_BIT_PENALTY as u64)
663                .wrapping_mul(Log2FloorNonZero(backward_reference_offset as u64) as u64),
664        ))
665        >> 2
666}
667
668fn BackwardReferenceScoreUsingLastDistanceH9(
669    copy_length: usize,
670    distance_short_code: usize,
671    h9_opts: H9Opts,
672) -> u64 {
673    ((h9_opts.literal_byte_score as u64)
674        .wrapping_mul(copy_length as u64)
675        .wrapping_add(u64::from(kDistanceShortCodeCost[distance_short_code])))
676        >> 2
677}
678
679impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> AnyHasher for H9<Alloc> {
680    #[inline(always)]
681    fn Opts(&self) -> H9Opts {
682        self.h9_opts
683    }
684    #[inline(always)]
685    fn GetHasherCommon(&mut self) -> &mut Struct1 {
686        &mut self.dict_search_stats_
687    }
688    #[inline(always)]
689    fn HashBytes(&self, data: &[u8]) -> usize {
690        let h: u32 = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
691        let thirty_two: usize = 32;
692        (h >> (thirty_two.wrapping_sub(H9_BUCKET_BITS))) as usize
693    }
694    #[inline(always)]
695    fn HashTypeLength(&self) -> usize {
696        4
697    }
698    #[inline(always)]
699    fn StoreLookahead(&self) -> usize {
700        4
701    }
702    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
703        let num_distances = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
704        adv_prepare_distance_cache(distance_cache, num_distances);
705    }
706    fn FindLongestMatch(
707        &mut self,
708        dictionary: Option<&BrotliDictionary>,
709        dictionary_hash: &[u16],
710        data: &[u8],
711        ring_buffer_mask: usize,
712        distance_cache: &[i32],
713        cur_ix: usize,
714        max_length: usize,
715        max_backward: usize,
716        gap: usize,
717        max_distance: usize,
718        out: &mut HasherSearchResult,
719    ) -> bool {
720        let best_len_in: usize = out.len;
721        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
722        let mut best_score: u64 = out.score;
723        let mut best_len: usize = best_len_in;
724        let mut is_match_found = false;
725        out.len_x_code = 0usize;
726        for i in 0..H9_NUM_LAST_DISTANCES_TO_CHECK {
727            let idx = kDistanceCacheIndex[i] as usize;
728            let backward =
729                (distance_cache[idx] as usize).wrapping_add(kDistanceCacheOffset[i] as usize);
730            let mut prev_ix = cur_ix.wrapping_sub(backward);
731            if prev_ix >= cur_ix {
732                continue;
733            }
734            if backward > max_backward {
735                continue;
736            }
737            prev_ix &= ring_buffer_mask;
738            if cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
739                || prev_ix.wrapping_add(best_len) > ring_buffer_mask
740                || data[cur_ix_masked.wrapping_add(best_len)]
741                    != data[prev_ix.wrapping_add(best_len)]
742            {
743                continue;
744            }
745            {
746                let len: usize =
747                    FindMatchLengthWithLimit(&data[prev_ix..], &data[cur_ix_masked..], max_length);
748                if len >= 3 || (len == 2 && i < 2) {
749                    let score = BackwardReferenceScoreUsingLastDistanceH9(len, i, self.h9_opts);
750                    if best_score < score {
751                        best_score = score;
752                        best_len = len;
753                        out.len = best_len;
754                        out.distance = backward;
755                        out.score = best_score;
756                        is_match_found = true;
757                    }
758                }
759            }
760        }
761        if max_length >= 4 && cur_ix_masked.wrapping_add(best_len) <= ring_buffer_mask {
762            let key = self.HashBytes(data.split_at(cur_ix_masked).1);
763            let bucket = &mut self
764                .buckets_
765                .slice_mut()
766                .split_at_mut(key << H9_BLOCK_BITS)
767                .1
768                .split_at_mut(H9_BLOCK_SIZE)
769                .0;
770            assert!(bucket.len() > H9_BLOCK_MASK);
771            assert_eq!(bucket.len(), H9_BLOCK_MASK + 1);
772            let self_num_key = &mut self.num_.slice_mut()[key];
773            let down = if *self_num_key > H9_BLOCK_SIZE as u16 {
774                (*self_num_key as usize) - H9_BLOCK_SIZE
775            } else {
776                0usize
777            };
778            let mut i: usize = *self_num_key as usize;
779            let mut prev_best_val = data[cur_ix_masked.wrapping_add(best_len)];
780            while i > down {
781                i -= 1;
782                let mut prev_ix = bucket[i & H9_BLOCK_MASK] as usize;
783                let backward = cur_ix.wrapping_sub(prev_ix);
784                if (backward > max_backward) {
785                    break;
786                }
787                prev_ix &= ring_buffer_mask;
788                if (prev_ix.wrapping_add(best_len) > ring_buffer_mask
789                    || prev_best_val != data[prev_ix.wrapping_add(best_len)])
790                {
791                    continue;
792                }
793                {
794                    let len = FindMatchLengthWithLimit(
795                        data.split_at(prev_ix).1,
796                        data.split_at(cur_ix_masked).1,
797                        max_length,
798                    );
799                    if (len >= 4) {
800                        /* Comparing for >= 3 does not change the semantics, but just saves
801                        for a few unnecessary binary logarithms in backward reference
802                        score, since we are not interested in such short matches. */
803                        let score = BackwardReferenceScoreH9(len, backward, self.h9_opts);
804                        if (best_score < score) {
805                            best_score = score;
806                            best_len = len;
807                            out.len = best_len;
808                            out.distance = backward;
809                            out.score = best_score;
810                            is_match_found = true;
811                            if cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask {
812                                break;
813                            }
814                            prev_best_val = data[cur_ix_masked.wrapping_add(best_len)];
815                        }
816                    }
817                }
818            }
819            bucket[*self_num_key as usize & H9_BLOCK_MASK] = cur_ix as u32;
820            *self_num_key = self_num_key.wrapping_add(1);
821        }
822        if !is_match_found && dictionary.is_some() {
823            let (_, cur_data) = data.split_at(cur_ix_masked);
824            is_match_found = SearchInStaticDictionary(
825                dictionary.unwrap(),
826                dictionary_hash,
827                self,
828                cur_data,
829                max_length,
830                max_backward.wrapping_add(gap),
831                max_distance,
832                out,
833                false,
834            );
835        }
836        is_match_found
837    }
838
839    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
840        let (_, data_window) = data.split_at((ix & mask));
841        let key: u32 = self.HashBytes(data_window) as u32;
842        let self_num_key = &mut self.num_.slice_mut()[key as usize];
843        let minor_ix: usize = (*self_num_key as usize & H9_BLOCK_MASK);
844        self.buckets_.slice_mut()[minor_ix.wrapping_add((key as usize) << H9_BLOCK_BITS)] =
845            ix as u32;
846        *self_num_key = self_num_key.wrapping_add(1);
847    }
848    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
849        for i in ix_start..ix_end {
850            self.Store(data, mask, i);
851        }
852    }
853    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
854        for i in ix_start..ix_end {
855            self.Store(data, mask, i);
856        }
857    }
858    fn Prepare(&mut self, _one_shot: bool, _input_size: usize, _data: &[u8]) -> HowPrepared {
859        if self.GetHasherCommon().is_prepared_ != 0 {
860            return HowPrepared::ALREADY_PREPARED;
861        }
862        for item in self.num_.slice_mut().iter_mut() {
863            *item = 0;
864        }
865        self.GetHasherCommon().is_prepared_ = 1;
866        HowPrepared::NEWLY_PREPARED
867    }
868    fn StitchToPreviousBlock(
869        &mut self,
870        num_bytes: usize,
871        position: usize,
872        ringbuffer: &[u8],
873        ringbuffer_mask: usize,
874    ) {
875        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask)
876    }
877}
878
879pub trait AdvHashSpecialization: PartialEq<Self> {
880    fn get_hash_mask(&self) -> u64;
881    fn set_hash_mask(&mut self, params_hash_len: i32);
882    fn get_k_hash_mul(&self) -> u64;
883    fn HashTypeLength(&self) -> usize;
884    fn StoreLookahead(&self) -> usize;
885    fn load_and_mix_word(&self, data: &[u8]) -> u64;
886    fn hash_shift(&self) -> i32;
887    fn bucket_size(&self) -> u32;
888    fn block_mask(&self) -> u32;
889    fn block_size(&self) -> u32;
890    fn block_bits(&self) -> i32;
891}
892pub struct AdvHasher<
893    Specialization: AdvHashSpecialization + Sized + Clone,
894    Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
895> {
896    pub GetHasherCommon: Struct1,
897    pub specialization: Specialization, // contains hash_mask_
898    pub num: <Alloc as Allocator<u16>>::AllocatedMemory,
899    pub buckets: <Alloc as Allocator<u32>>::AllocatedMemory,
900    pub h9_opts: H9Opts,
901}
902
903impl<
904        Specialization: AdvHashSpecialization + Sized + Clone,
905        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
906    > PartialEq<AdvHasher<Specialization, Alloc>> for AdvHasher<Specialization, Alloc>
907{
908    fn eq(&self, other: &Self) -> bool {
909        self.GetHasherCommon == other.GetHasherCommon
910            && self.specialization == other.specialization
911            && self.num.slice() == other.num.slice()
912            && self.buckets.slice() == other.buckets.slice()
913            && self.h9_opts == other.h9_opts
914    }
915}
916
917#[derive(Clone, PartialEq)]
918pub struct HQ5Sub {}
919impl AdvHashSpecialization for HQ5Sub {
920    #[inline(always)]
921    fn hash_shift(&self) -> i32 {
922        32i32 - 14 // 32 - bucket_bits
923    }
924    #[inline(always)]
925    fn bucket_size(&self) -> u32 {
926        1 << 14
927    }
928    #[inline(always)]
929    fn block_bits(&self) -> i32 {
930        4
931    }
932    #[inline(always)]
933    fn block_size(&self) -> u32 {
934        1 << 4
935    }
936    #[inline(always)]
937    fn block_mask(&self) -> u32 {
938        (1 << 4) - 1
939    }
940    #[inline(always)]
941    fn get_hash_mask(&self) -> u64 {
942        //return 0xffff_ffff_ffff_ffff;
943        0xffff_ffff // make it 32 bit
944    }
945    #[inline(always)]
946    fn get_k_hash_mul(&self) -> u64 {
947        kHashMul32 as u64
948    }
949    #[inline(always)]
950    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
951        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
952    }
953    #[inline(always)]
954    fn set_hash_mask(&mut self, _params_hash_len: i32) {}
955    fn HashTypeLength(&self) -> usize {
956        4
957    }
958    #[inline(always)]
959    fn StoreLookahead(&self) -> usize {
960        4
961    }
962}
963
964#[derive(Clone, PartialEq)]
965pub struct HQ7Sub {}
966impl AdvHashSpecialization for HQ7Sub {
967    #[inline(always)]
968    fn hash_shift(&self) -> i32 {
969        32i32 - 15 // 32 - bucket_bits
970    }
971    #[inline(always)]
972    fn bucket_size(&self) -> u32 {
973        1 << 15
974    }
975    #[inline(always)]
976    fn block_bits(&self) -> i32 {
977        6
978    }
979    #[inline(always)]
980    fn block_size(&self) -> u32 {
981        1 << 6
982    }
983    #[inline(always)]
984    fn block_mask(&self) -> u32 {
985        (1 << 6) - 1
986    }
987    #[inline(always)]
988    fn get_hash_mask(&self) -> u64 {
989        //return 0xffff_ffff_ffff_ffff;
990        0xffff_ffff // make it 32 bit
991    }
992    #[inline(always)]
993    fn get_k_hash_mul(&self) -> u64 {
994        kHashMul32 as u64
995    }
996    #[inline(always)]
997    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
998        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
999    }
1000    #[inline(always)]
1001    fn set_hash_mask(&mut self, _params_hash_len: i32) {}
1002    fn HashTypeLength(&self) -> usize {
1003        4
1004    }
1005    #[inline(always)]
1006    fn StoreLookahead(&self) -> usize {
1007        4
1008    }
1009}
1010
1011#[derive(Clone, PartialEq)]
1012pub struct H5Sub {
1013    pub hash_shift_: i32,
1014    pub bucket_size_: u32,
1015    pub block_mask_: u32,
1016    pub block_bits_: i32,
1017}
1018
1019impl AdvHashSpecialization for H5Sub {
1020    #[inline(always)]
1021    fn hash_shift(&self) -> i32 {
1022        self.hash_shift_
1023    }
1024    fn bucket_size(&self) -> u32 {
1025        self.bucket_size_
1026    }
1027    fn block_bits(&self) -> i32 {
1028        self.block_bits_
1029    }
1030    fn block_size(&self) -> u32 {
1031        1 << self.block_bits_
1032    }
1033    fn block_mask(&self) -> u32 {
1034        self.block_mask_
1035    }
1036    fn get_hash_mask(&self) -> u64 {
1037        //return 0xffff_ffff_ffff_ffff;
1038        0xffff_ffff // make it 32 bit
1039    }
1040    fn get_k_hash_mul(&self) -> u64 {
1041        kHashMul32 as u64
1042    }
1043    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1044        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
1045    }
1046    #[allow(unused_variables)]
1047    fn set_hash_mask(&mut self, params_hash_len: i32) {}
1048    fn HashTypeLength(&self) -> usize {
1049        4
1050    }
1051    fn StoreLookahead(&self) -> usize {
1052        4
1053    }
1054}
1055
1056#[derive(Clone, PartialEq)]
1057pub struct H6Sub {
1058    pub hash_mask: u64,
1059    pub hash_shift_: i32,
1060    pub bucket_size_: u32,
1061    pub block_mask_: u32,
1062    pub block_bits_: i32,
1063}
1064
1065impl AdvHashSpecialization for H6Sub {
1066    #[inline(always)]
1067    fn hash_shift(&self) -> i32 {
1068        self.hash_shift_
1069    }
1070    #[inline(always)]
1071    fn bucket_size(&self) -> u32 {
1072        self.bucket_size_
1073    }
1074    fn block_bits(&self) -> i32 {
1075        self.block_bits_
1076    }
1077    fn block_size(&self) -> u32 {
1078        1 << self.block_bits_
1079    }
1080    #[inline(always)]
1081    fn block_mask(&self) -> u32 {
1082        self.block_mask_
1083    }
1084    #[inline(always)]
1085    fn get_hash_mask(&self) -> u64 {
1086        self.hash_mask
1087    }
1088    #[inline(always)]
1089    fn set_hash_mask(&mut self, params_hash_len: i32) {
1090        self.hash_mask = !(0u32 as (u64)) >> (64i32 - 8i32 * params_hash_len);
1091    }
1092    #[inline(always)]
1093    fn get_k_hash_mul(&self) -> u64 {
1094        kHashMul64Long
1095    }
1096    #[inline(always)]
1097    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1098        (BROTLI_UNALIGNED_LOAD64(data) & self.get_hash_mask()).wrapping_mul(self.get_k_hash_mul())
1099    }
1100    #[inline(always)]
1101    fn HashTypeLength(&self) -> usize {
1102        8
1103    }
1104    #[inline(always)]
1105    fn StoreLookahead(&self) -> usize {
1106        8
1107    }
1108}
1109
1110fn BackwardReferencePenaltyUsingLastDistance(distance_short_code: usize) -> u64 {
1111    // FIXME?: double bitwise AND with the same value?
1112    (39u64).wrapping_add((0x0001_ca10_u64 >> (distance_short_code & 0x0e) & 0x0e))
1113}
1114
1115impl<
1116        Specialization: AdvHashSpecialization + Clone,
1117        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
1118    > AdvHasher<Specialization, Alloc>
1119{
1120    // 7 opt
1121    // returns a new ix_start
1122    fn StoreRangeOptBatch(
1123        &mut self,
1124        data: &[u8],
1125        mask: usize,
1126        ix_start: usize,
1127        ix_end: usize,
1128    ) -> usize {
1129        let lookahead = self.specialization.StoreLookahead();
1130        if ix_end >= ix_start + lookahead * 2 && lookahead == 4 {
1131            let num = self.num.slice_mut();
1132            let buckets = self.buckets.slice_mut();
1133            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1134            assert_eq!(
1135                buckets.len(),
1136                self.specialization.bucket_size() as usize
1137                    * self.specialization.block_size() as usize
1138            );
1139            let shift = self.specialization.hash_shift();
1140            let chunk_count = (ix_end - ix_start) / 4;
1141            for chunk_id in 0..chunk_count {
1142                let i = (ix_start + chunk_id * 4) & mask;
1143                let ffffffff = 0xffff_ffff;
1144                let word = u64::from(data[i])
1145                    | (u64::from(data[i + 1]) << 8)
1146                    | (u64::from(data[i + 2]) << 16)
1147                    | (u64::from(data[i + 3]) << 24)
1148                    | (u64::from(data[i + 4]) << 32)
1149                    | (u64::from(data[i + 5]) << 40)
1150                    | (u64::from(data[i + 6]) << 48);
1151                let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1152                    & self.specialization.get_hash_mask())
1153                    >> shift) as usize;
1154                let mixed1 = (((((word >> 8) & ffffffff) * self.specialization.get_k_hash_mul())
1155                    & self.specialization.get_hash_mask())
1156                    >> shift) as usize;
1157                let mixed2 = (((((word >> 16) & ffffffff) * self.specialization.get_k_hash_mul())
1158                    & self.specialization.get_hash_mask())
1159                    >> shift) as usize;
1160                let mixed3 = (((((word >> 24) & ffffffff) * self.specialization.get_k_hash_mul())
1161                    & self.specialization.get_hash_mask())
1162                    >> shift) as usize;
1163                let mut num_ref0 = u32::from(num[mixed0]);
1164                num[mixed0] = num_ref0.wrapping_add(1) as u16;
1165                num_ref0 &= self.specialization.block_mask();
1166                let mut num_ref1 = u32::from(num[mixed1]);
1167                num[mixed1] = num_ref1.wrapping_add(1) as u16;
1168                num_ref1 &= self.specialization.block_mask();
1169                let mut num_ref2 = u32::from(num[mixed2]);
1170                num[mixed2] = num_ref2.wrapping_add(1) as u16;
1171                num_ref2 &= self.specialization.block_mask();
1172                let mut num_ref3 = u32::from(num[mixed3]);
1173                num[mixed3] = num_ref3.wrapping_add(1) as u16;
1174                num_ref3 &= self.specialization.block_mask();
1175                let offset0: usize =
1176                    (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1177                let offset1: usize =
1178                    (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1179                let offset2: usize =
1180                    (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1181                let offset3: usize =
1182                    (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1183                buckets[offset0] = (i) as u32;
1184                buckets[offset1] = (i + 1) as u32;
1185                buckets[offset2] = (i + 2) as u32;
1186                buckets[offset3] = (i + 3) as u32;
1187            }
1188            return ix_start + chunk_count * 4;
1189        }
1190        ix_start
1191    }
1192
1193    fn BulkStoreRangeOptMemFetch(
1194        &mut self,
1195        data: &[u8],
1196        mask: usize,
1197        ix_start: usize,
1198        ix_end: usize,
1199    ) -> usize {
1200        const REG_SIZE: usize = 32usize;
1201        let lookahead = self.specialization.StoreLookahead();
1202        if mask == !0 && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1203            const lookahead4: usize = 4;
1204            assert_eq!(lookahead4, lookahead);
1205            let mut data64 = [0u8; REG_SIZE + lookahead4 - 1];
1206            let del = (ix_end - ix_start) / REG_SIZE;
1207            let num = self.num.slice_mut();
1208            let buckets = self.buckets.slice_mut();
1209            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1210            assert_eq!(
1211                buckets.len(),
1212                self.specialization.bucket_size() as usize
1213                    * self.specialization.block_size() as usize
1214            );
1215            let shift = self.specialization.hash_shift();
1216            for chunk_id in 0..del {
1217                let ix_offset = ix_start + chunk_id * REG_SIZE;
1218                data64[..REG_SIZE + lookahead4 - 1].clone_from_slice(
1219                    data.split_at(ix_offset)
1220                        .1
1221                        .split_at(REG_SIZE + lookahead4 - 1)
1222                        .0,
1223                );
1224                for quad_index in 0..(REG_SIZE >> 2) {
1225                    let i = quad_index << 2;
1226                    let ffffffff = 0xffff_ffff;
1227                    let word = u64::from(data64[i])
1228                        | (u64::from(data64[i + 1]) << 8)
1229                        | (u64::from(data64[i + 2]) << 16)
1230                        | (u64::from(data64[i + 3]) << 24)
1231                        | (u64::from(data64[i + 4]) << 32)
1232                        | (u64::from(data64[i + 5]) << 40)
1233                        | (u64::from(data64[i + 6]) << 48);
1234                    let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1235                        & self.specialization.get_hash_mask())
1236                        >> shift) as usize;
1237                    let mixed1 = (((((word >> 8) & ffffffff)
1238                        * self.specialization.get_k_hash_mul())
1239                        & self.specialization.get_hash_mask())
1240                        >> shift) as usize;
1241                    let mixed2 = (((((word >> 16) & ffffffff)
1242                        * self.specialization.get_k_hash_mul())
1243                        & self.specialization.get_hash_mask())
1244                        >> shift) as usize;
1245                    let mixed3 = (((((word >> 24) & ffffffff)
1246                        * self.specialization.get_k_hash_mul())
1247                        & self.specialization.get_hash_mask())
1248                        >> shift) as usize;
1249                    let mut num_ref0 = u32::from(num[mixed0]);
1250                    num[mixed0] = num_ref0.wrapping_add(1) as u16;
1251                    num_ref0 &= self.specialization.block_mask();
1252                    let mut num_ref1 = u32::from(num[mixed1]);
1253                    num[mixed1] = num_ref1.wrapping_add(1) as u16;
1254                    num_ref1 &= self.specialization.block_mask();
1255                    let mut num_ref2 = u32::from(num[mixed2]);
1256                    num[mixed2] = num_ref2.wrapping_add(1) as u16;
1257                    num_ref2 &= self.specialization.block_mask();
1258                    let mut num_ref3 = u32::from(num[mixed3]);
1259                    num[mixed3] = num_ref3.wrapping_add(1) as u16;
1260                    num_ref3 &= self.specialization.block_mask();
1261                    let offset0: usize =
1262                        (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1263                    let offset1: usize =
1264                        (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1265                    let offset2: usize =
1266                        (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1267                    let offset3: usize =
1268                        (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1269                    buckets[offset0] = (ix_offset + i) as u32;
1270                    buckets[offset1] = (ix_offset + i + 1) as u32;
1271                    buckets[offset2] = (ix_offset + i + 2) as u32;
1272                    buckets[offset3] = (ix_offset + i + 3) as u32;
1273                }
1274            }
1275            return ix_start + del * REG_SIZE;
1276        }
1277        ix_start
1278    }
1279    fn BulkStoreRangeOptMemFetchLazyDupeUpdate(
1280        &mut self,
1281        data: &[u8],
1282        mask: usize,
1283        ix_start: usize,
1284        ix_end: usize,
1285    ) -> usize {
1286        const REG_SIZE: usize = 32usize;
1287        let lookahead = self.specialization.StoreLookahead();
1288        if mask == !0 && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1289            const lookahead4: usize = 4;
1290            assert_eq!(lookahead4, lookahead);
1291            let mut data64 = [0u8; REG_SIZE + lookahead4];
1292            let del = (ix_end - ix_start) / REG_SIZE;
1293            let num = self.num.slice_mut();
1294            let buckets = self.buckets.slice_mut();
1295            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1296            assert_eq!(
1297                buckets.len(),
1298                self.specialization.bucket_size() as usize
1299                    * self.specialization.block_size() as usize
1300            );
1301            let shift = self.specialization.hash_shift();
1302            for chunk_id in 0..del {
1303                let ix_offset = ix_start + chunk_id * REG_SIZE;
1304                data64[..REG_SIZE + lookahead4]
1305                    .clone_from_slice(data.split_at(ix_offset).1.split_at(REG_SIZE + lookahead4).0);
1306                for quad_index in 0..(REG_SIZE >> 2) {
1307                    let i = quad_index << 2;
1308                    let ffffffff = 0xffff_ffff;
1309                    let word = u64::from(data64[i])
1310                        | (u64::from(data64[i + 1]) << 8)
1311                        | (u64::from(data64[i + 2]) << 16)
1312                        | (u64::from(data64[i + 3]) << 24)
1313                        | (u64::from(data64[i + 4]) << 32)
1314                        | (u64::from(data64[i + 5]) << 40)
1315                        | (u64::from(data64[i + 6]) << 48);
1316                    let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1317                        & self.specialization.get_hash_mask())
1318                        >> shift) as usize;
1319                    let mixed1 = (((((word >> 8) & ffffffff)
1320                        * self.specialization.get_k_hash_mul())
1321                        & self.specialization.get_hash_mask())
1322                        >> shift) as usize;
1323                    let mixed2 = (((((word >> 16) & ffffffff)
1324                        * self.specialization.get_k_hash_mul())
1325                        & self.specialization.get_hash_mask())
1326                        >> shift) as usize;
1327                    let mixed3 = (((((word >> 24) & ffffffff)
1328                        * self.specialization.get_k_hash_mul())
1329                        & self.specialization.get_hash_mask())
1330                        >> shift) as usize;
1331                    let mut num_ref0 = u32::from(num[mixed0]);
1332                    let mut num_ref1 = u32::from(num[mixed1]);
1333                    let mut num_ref2 = u32::from(num[mixed2]);
1334                    let mut num_ref3 = u32::from(num[mixed3]);
1335                    num[mixed0] = num_ref0.wrapping_add(1) as u16;
1336                    num[mixed1] = num_ref1.wrapping_add(1) as u16;
1337                    num[mixed2] = num_ref2.wrapping_add(1) as u16;
1338                    num[mixed3] = num_ref3.wrapping_add(1) as u16;
1339                    num_ref0 &= self.specialization.block_mask();
1340                    num_ref1 &= self.specialization.block_mask();
1341                    num_ref2 &= self.specialization.block_mask();
1342                    num_ref3 &= self.specialization.block_mask();
1343                    let offset0: usize =
1344                        (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1345                    let offset1: usize =
1346                        (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1347                    let offset2: usize =
1348                        (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1349                    let offset3: usize =
1350                        (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1351                    buckets[offset0] = (ix_offset + i) as u32;
1352                    buckets[offset1] = (ix_offset + i + 1) as u32;
1353                    buckets[offset2] = (ix_offset + i + 2) as u32;
1354                    buckets[offset3] = (ix_offset + i + 3) as u32;
1355                }
1356            }
1357            return ix_start + del * REG_SIZE;
1358        }
1359        ix_start
1360    }
1361    fn BulkStoreRangeOptRandomDupeUpdate(
1362        &mut self,
1363        data: &[u8],
1364        mask: usize,
1365        ix_start: usize,
1366        ix_end: usize,
1367    ) -> usize {
1368        const REG_SIZE: usize = 32usize;
1369        let lookahead = self.specialization.StoreLookahead();
1370        if mask == !0 && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1371            const lookahead4: usize = 4;
1372            assert_eq!(lookahead4, lookahead);
1373            let mut data64 = [0u8; REG_SIZE + lookahead4];
1374            let del = (ix_end - ix_start) / REG_SIZE;
1375            let num = self.num.slice_mut();
1376            let buckets = self.buckets.slice_mut();
1377            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1378            assert_eq!(
1379                buckets.len(),
1380                self.specialization.bucket_size() as usize
1381                    * self.specialization.block_size() as usize
1382            );
1383            let shift = self.specialization.hash_shift();
1384            for chunk_id in 0..del {
1385                let ix_offset = ix_start + chunk_id * REG_SIZE;
1386                data64[..REG_SIZE + lookahead4]
1387                    .clone_from_slice(data.split_at(ix_offset).1.split_at(REG_SIZE + lookahead4).0);
1388                for i in 0..REG_SIZE {
1389                    let mixed_word = ((u32::from(data64[i])
1390                        | (u32::from(data64[i + 1]) << 8)
1391                        | (u32::from(data64[i + 2]) << 16)
1392                        | (u32::from(data64[i + 3]) << 24))
1393                        as u64
1394                        * self.specialization.get_k_hash_mul())
1395                        & self.specialization.get_hash_mask();
1396                    let key = mixed_word >> shift;
1397                    let minor_ix: usize = chunk_id & self.specialization.block_mask() as usize; //   *num_ref as usize & self.specialization.block_mask() as usize; //GIGANTIC HAX: overwrite firsst option
1398                    let offset: usize =
1399                        minor_ix + (key << self.specialization.block_bits()) as usize;
1400                    buckets[offset] = (ix_offset + i) as u32;
1401                }
1402            }
1403            for (bucket_index, num_ref) in num.iter_mut().enumerate() {
1404                let region = buckets
1405                    .split_at_mut(bucket_index << self.specialization.block_bits())
1406                    .1
1407                    .split_at_mut(self.specialization.block_size() as usize)
1408                    .0;
1409                let mut lnum = 0usize;
1410                for block_index in 0..self.specialization.block_size() as usize {
1411                    if region[block_index] != 0 {
1412                        let byte_addr = region[block_index];
1413                        region[lnum] = byte_addr;
1414                        lnum += 1;
1415                    }
1416                }
1417                *num_ref = lnum as u16;
1418            }
1419            return ix_start + del * REG_SIZE;
1420        }
1421        ix_start
1422    }
1423}
1424
1425impl<
1426        Specialization: AdvHashSpecialization + Clone,
1427        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
1428    > AnyHasher for AdvHasher<Specialization, Alloc>
1429{
1430    fn Opts(&self) -> H9Opts {
1431        self.h9_opts
1432    }
1433    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
1434        let num_distances = self.GetHasherCommon.params.num_last_distances_to_check;
1435        adv_prepare_distance_cache(distance_cache, num_distances);
1436    }
1437    fn StitchToPreviousBlock(
1438        &mut self,
1439        num_bytes: usize,
1440        position: usize,
1441        ringbuffer: &[u8],
1442        ringbuffer_mask: usize,
1443    ) {
1444        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask);
1445    }
1446    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
1447        if self.GetHasherCommon.is_prepared_ != 0 {
1448            return HowPrepared::ALREADY_PREPARED;
1449        }
1450        let partial_prepare_threshold = self.specialization.bucket_size() as usize >> 6;
1451        if one_shot && input_size <= partial_prepare_threshold {
1452            for i in 0..input_size {
1453                let key = self.HashBytes(&data[i..]);
1454                self.num.slice_mut()[key] = 0;
1455            }
1456        } else {
1457            for item in
1458                self.num.slice_mut()[..(self.specialization.bucket_size() as usize)].iter_mut()
1459            {
1460                *item = 0;
1461            }
1462        }
1463        self.GetHasherCommon.is_prepared_ = 1;
1464        HowPrepared::NEWLY_PREPARED
1465    }
1466
1467    fn GetHasherCommon(&mut self) -> &mut Struct1 {
1468        &mut self.GetHasherCommon
1469    }
1470    fn HashTypeLength(&self) -> usize {
1471        self.specialization.HashTypeLength()
1472    }
1473    fn StoreLookahead(&self) -> usize {
1474        self.specialization.StoreLookahead()
1475    }
1476    fn HashBytes(&self, data: &[u8]) -> usize {
1477        let shift = self.specialization.hash_shift();
1478        let h: u64 = self.specialization.load_and_mix_word(data);
1479        (h >> shift) as u32 as usize
1480    }
1481    fn StoreEvenVec4(&mut self, data: &[u8], mask: usize, ix: usize) {
1482        if self.specialization.StoreLookahead() != 4 {
1483            for i in 0..4 {
1484                self.Store(data, mask, ix + i * 2);
1485            }
1486            return;
1487        }
1488        let shift = self.specialization.hash_shift();
1489        let num = self.num.slice_mut();
1490        let buckets = self.buckets.slice_mut();
1491        let li = ix & mask;
1492        let lword = u64::from(data[li])
1493            | (u64::from(data[li + 1]) << 8)
1494            | (u64::from(data[li + 2]) << 16)
1495            | (u64::from(data[li + 3]) << 24)
1496            | (u64::from(data[li + 4]) << 32)
1497            | (u64::from(data[li + 5]) << 40)
1498            | (u64::from(data[li + 6]) << 48)
1499            | (u64::from(data[li + 7]) << 56);
1500        let hi = (ix + 8) & mask;
1501        let hword = u64::from(data[hi]) | (u64::from(data[hi + 1]) << 8);
1502        let mixed0 = ((((lword & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1503            & self.specialization.get_hash_mask())
1504            >> shift) as usize;
1505        let mixed1 = (((((lword >> 16) & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1506            & self.specialization.get_hash_mask())
1507            >> shift) as usize;
1508        let mixed2 = (((((lword >> 32) & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1509            & self.specialization.get_hash_mask())
1510            >> shift) as usize;
1511        let mixed3 = ((((((hword & 0xffff) << 16) | ((lword >> 48) & 0xffff))
1512            * self.specialization.get_k_hash_mul())
1513            & self.specialization.get_hash_mask())
1514            >> shift) as usize;
1515        let mut num_ref0 = u32::from(num[mixed0]);
1516        num[mixed0] = num_ref0.wrapping_add(1) as u16;
1517        num_ref0 &= self.specialization.block_mask();
1518        let mut num_ref1 = u32::from(num[mixed1]);
1519        num[mixed1] = num_ref1.wrapping_add(1) as u16;
1520        num_ref1 &= self.specialization.block_mask();
1521        let mut num_ref2 = u32::from(num[mixed2]);
1522        num[mixed2] = num_ref2.wrapping_add(1) as u16;
1523        num_ref2 &= self.specialization.block_mask();
1524        let mut num_ref3 = u32::from(num[mixed3]);
1525        num[mixed3] = num_ref3.wrapping_add(1) as u16;
1526        num_ref3 &= self.specialization.block_mask();
1527        let offset0: usize = (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1528        let offset1: usize = (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1529        let offset2: usize = (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1530        let offset3: usize = (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1531        buckets[offset0] = ix as u32;
1532        buckets[offset1] = (ix + 2) as u32;
1533        buckets[offset2] = (ix + 4) as u32;
1534        buckets[offset3] = (ix + 6) as u32;
1535    }
1536    fn Store4Vec4(&mut self, data: &[u8], mask: usize, ix: usize) {
1537        if self.specialization.StoreLookahead() != 4 {
1538            for i in 0..4 {
1539                self.Store(data, mask, ix + i * 4);
1540            }
1541            return;
1542        }
1543        let shift = self.specialization.hash_shift();
1544        let num = self.num.slice_mut();
1545        let buckets = self.buckets.slice_mut();
1546        let li = ix & mask;
1547        let llword = u32::from(data[li])
1548            | (u32::from(data[li + 1]) << 8)
1549            | (u32::from(data[li + 2]) << 16)
1550            | (u32::from(data[li + 3]) << 24);
1551        let luword = u32::from(data[li + 4])
1552            | (u32::from(data[li + 5]) << 8)
1553            | (u32::from(data[li + 6]) << 16)
1554            | (u32::from(data[li + 7]) << 24);
1555        let ui = (ix + 8) & mask;
1556        let ulword = u32::from(data[ui])
1557            | (u32::from(data[ui + 1]) << 8)
1558            | (u32::from(data[ui + 2]) << 16)
1559            | (u32::from(data[ui + 3]) << 24);
1560
1561        let uuword = u32::from(data[ui + 4])
1562            | (u32::from(data[ui + 5]) << 8)
1563            | (u32::from(data[ui + 6]) << 16)
1564            | (u32::from(data[ui + 7]) << 24);
1565
1566        let mixed0 = (((u64::from(llword) * self.specialization.get_k_hash_mul())
1567            & self.specialization.get_hash_mask())
1568            >> shift) as usize;
1569        let mixed1 = (((u64::from(luword) * self.specialization.get_k_hash_mul())
1570            & self.specialization.get_hash_mask())
1571            >> shift) as usize;
1572        let mixed2 = (((u64::from(ulword) * self.specialization.get_k_hash_mul())
1573            & self.specialization.get_hash_mask())
1574            >> shift) as usize;
1575        let mixed3 = (((u64::from(uuword) * self.specialization.get_k_hash_mul())
1576            & self.specialization.get_hash_mask())
1577            >> shift) as usize;
1578        let mut num_ref0 = u32::from(num[mixed0]);
1579        num[mixed0] = num_ref0.wrapping_add(1) as u16;
1580        num_ref0 &= self.specialization.block_mask();
1581        let mut num_ref1 = u32::from(num[mixed1]);
1582        num[mixed1] = num_ref1.wrapping_add(1) as u16;
1583        num_ref1 &= self.specialization.block_mask();
1584        let mut num_ref2 = u32::from(num[mixed2]);
1585        num[mixed2] = num_ref2.wrapping_add(1) as u16;
1586        num_ref2 &= self.specialization.block_mask();
1587        let mut num_ref3 = u32::from(num[mixed3]);
1588        num[mixed3] = num_ref3.wrapping_add(1) as u16;
1589        num_ref3 &= self.specialization.block_mask();
1590        let offset0: usize = (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1591        let offset1: usize = (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1592        let offset2: usize = (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1593        let offset3: usize = (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1594        buckets[offset0] = ix as u32;
1595        buckets[offset1] = (ix + 4) as u32;
1596        buckets[offset2] = (ix + 8) as u32;
1597        buckets[offset3] = (ix + 12) as u32;
1598    }
1599    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
1600        let (_, data_window) = data.split_at((ix & mask));
1601        let key: u32 = self.HashBytes(data_window) as u32;
1602        let minor_ix: usize =
1603            (self.num.slice()[(key as usize)] as u32 & self.specialization.block_mask()) as usize;
1604        let offset: usize =
1605            minor_ix.wrapping_add((key << self.specialization.block_bits()) as usize);
1606        self.buckets.slice_mut()[offset] = ix as u32;
1607        {
1608            let _lhs = &mut self.num.slice_mut()[(key as usize)];
1609            *_lhs = (*_lhs as i32 + 1) as u16;
1610        }
1611    }
1612    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
1613        for i in self.StoreRangeOptBatch(data, mask, ix_start, ix_end)..ix_end {
1614            self.Store(data, mask, i);
1615        }
1616    }
1617    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, mut ix_start: usize, ix_end: usize) {
1618        /*
1619        if ix_start + 4096 < ix_end {
1620          for vec_offset in 0..(ix_end - ix_start - 4096) / 16 {
1621            self.Store4Vec4(data, mask, ix_start + vec_offset * 16);
1622          }
1623          ix_start += 16 * ((ix_end - ix_start - 4096) / 16);
1624        }
1625        if ix_start + 512 < ix_end {
1626          for vec_offset in 0..(ix_end - ix_start - 512) / 8 {
1627            self.StoreEvenVec4(data, mask, ix_start + vec_offset * 8);
1628            //self.StoreRange(data, mask, ix_start + vec_offset * 8, ix_start + (1+ vec_offset) * 8);
1629          }
1630          ix_start += 8 * ((ix_end - ix_start - 512) / 8);
1631        }
1632         */
1633        ix_start = self.BulkStoreRangeOptMemFetch(data, mask, ix_start, ix_end);
1634        for i in ix_start..ix_end {
1635            self.Store(data, mask, i);
1636        }
1637    }
1638
1639    fn FindLongestMatch(
1640        &mut self,
1641        dictionary: Option<&BrotliDictionary>,
1642        dictionary_hash: &[u16],
1643        data: &[u8],
1644        ring_buffer_mask: usize,
1645        distance_cache: &[i32],
1646        cur_ix: usize,
1647        max_length: usize,
1648        max_backward: usize,
1649        gap: usize,
1650        max_distance: usize,
1651        out: &mut HasherSearchResult,
1652    ) -> bool {
1653        let opts = self.Opts();
1654        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
1655        let mut is_match_found = false;
1656        let mut best_score: u64 = out.score;
1657        let mut best_len: usize = out.len;
1658        let mut i: usize;
1659        out.len = 0usize;
1660        out.len_x_code = 0usize;
1661        i = 0usize;
1662        let cur_data = data.split_at(cur_ix_masked).1;
1663        while i < self.GetHasherCommon.params.num_last_distances_to_check as usize {
1664            'continue45: loop {
1665                {
1666                    let backward: usize = distance_cache[i] as usize;
1667                    let mut prev_ix: usize = cur_ix.wrapping_sub(backward);
1668                    if prev_ix >= cur_ix {
1669                        break 'continue45;
1670                    }
1671                    if backward > max_backward {
1672                        break 'continue45;
1673                    }
1674                    prev_ix &= ring_buffer_mask;
1675                    if (cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
1676                        || prev_ix.wrapping_add(best_len) > ring_buffer_mask
1677                        || cur_data[best_len] != data[prev_ix.wrapping_add(best_len)])
1678                    {
1679                        break 'continue45;
1680                    }
1681                    let prev_data = data.split_at(prev_ix).1;
1682
1683                    let len: usize = FindMatchLengthWithLimit(prev_data, cur_data, max_length);
1684                    if len >= 3usize || len == 2usize && (i < 2usize) {
1685                        let mut score: u64 = BackwardReferenceScoreUsingLastDistance(len, opts);
1686                        if best_score < score {
1687                            if i != 0usize {
1688                                score = score
1689                                    .wrapping_sub(BackwardReferencePenaltyUsingLastDistance(i));
1690                            }
1691                            if best_score < score {
1692                                best_score = score;
1693                                best_len = len;
1694                                out.len = best_len;
1695                                out.distance = backward;
1696                                out.score = best_score;
1697                                is_match_found = true;
1698                            }
1699                        }
1700                    }
1701                }
1702                break;
1703            }
1704            i = i.wrapping_add(1);
1705        }
1706        {
1707            let key: u32 = self.HashBytes(cur_data) as u32;
1708            let common_block_bits = self.specialization.block_bits();
1709            let num_ref_mut = &mut self.num.slice_mut()[key as usize];
1710            let num_copy = *num_ref_mut;
1711            let bucket: &mut [u32] = self
1712                .buckets
1713                .slice_mut()
1714                .split_at_mut((key << common_block_bits) as usize)
1715                .1
1716                .split_at_mut(self.specialization.block_size() as usize)
1717                .0;
1718            assert!(bucket.len() > self.specialization.block_mask() as usize);
1719            if num_copy != 0 {
1720                let down: usize = max(
1721                    i32::from(num_copy) - self.specialization.block_size() as i32,
1722                    0,
1723                ) as usize;
1724                i = num_copy as usize;
1725                while i > down {
1726                    i -= 1;
1727                    let mut prev_ix =
1728                        bucket[i & self.specialization.block_mask() as usize] as usize;
1729                    let backward = cur_ix.wrapping_sub(prev_ix);
1730                    prev_ix &= ring_buffer_mask;
1731                    if (cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
1732                        || prev_ix.wrapping_add(best_len) > ring_buffer_mask
1733                        || cur_data[best_len] != data[prev_ix.wrapping_add(best_len)])
1734                    {
1735                        if backward > max_backward {
1736                            break;
1737                        }
1738                        continue;
1739                    }
1740                    if backward > max_backward {
1741                        break;
1742                    }
1743                    let prev_data = data.split_at(prev_ix).1;
1744                    let len = FindMatchLengthWithLimitMin4(prev_data, cur_data, max_length);
1745                    if len != 0 {
1746                        let score: u64 = BackwardReferenceScore(len, backward, opts);
1747                        if best_score < score {
1748                            best_score = score;
1749                            best_len = len;
1750                            out.len = best_len;
1751                            out.distance = backward;
1752                            out.score = best_score;
1753                            is_match_found = true;
1754                        }
1755                    }
1756                }
1757            }
1758            bucket[((num_copy as u32 & (self).specialization.block_mask()) as usize)] =
1759                cur_ix as u32;
1760            *num_ref_mut = num_ref_mut.wrapping_add(1);
1761        }
1762        if !is_match_found && dictionary.is_some() {
1763            let (_, cur_data) = data.split_at(cur_ix_masked);
1764            is_match_found = SearchInStaticDictionary(
1765                dictionary.unwrap(),
1766                dictionary_hash,
1767                self,
1768                cur_data,
1769                max_length,
1770                max_backward.wrapping_add(gap),
1771                max_distance,
1772                out,
1773                false,
1774            );
1775        }
1776        is_match_found
1777    }
1778}
1779
1780pub struct BankH40 {
1781    pub slots: [SlotH40; 65536],
1782}
1783
1784pub struct BankH41 {
1785    pub slots: [SlotH41; 65536],
1786}
1787
1788pub struct BankH42 {
1789    pub slots: [SlotH42; 512],
1790}
1791
1792pub struct SlotH40 {
1793    pub delta: u16,
1794    pub next: u16,
1795}
1796pub struct SlotH41 {
1797    pub delta: u16,
1798    pub next: u16,
1799}
1800
1801pub struct SlotH42 {
1802    pub delta: u16,
1803    pub next: u16,
1804}
1805
1806// UNSUPPORTED, for now.
1807pub struct H40 {
1808    pub common: Struct1,
1809    pub addr: [u32; 32768],
1810    pub head: [u16; 32768],
1811    pub tiny_hash: [u8; 65536],
1812    pub banks: [BankH40; 1],
1813    pub free_slot_idx: [u16; 1],
1814    pub max_hops: usize,
1815}
1816
1817pub struct H41 {
1818    pub common: Struct1,
1819    pub addr: [u32; 32768],
1820    pub head: [u16; 32768],
1821    pub tiny_hash: [u8; 65536],
1822    pub banks: [BankH41; 1],
1823    pub free_slot_idx: [u16; 1],
1824    pub max_hops: usize,
1825}
1826
1827pub struct H42 {
1828    pub common: Struct1,
1829    pub addr: [u32; 32768],
1830    pub head: [u16; 32768],
1831    pub tiny_hash: [u8; 65536],
1832    pub banks: [BankH42; 512],
1833    free_slot_idx: [u16; 512],
1834    pub max_hops: usize,
1835}
1836
1837fn unopt_ctzll(mut val: usize) -> u8 {
1838    let mut cnt: u8 = 0u8;
1839    while val & 1 == 0usize {
1840        val >>= 1i32;
1841        cnt = (cnt as i32 + 1) as u8;
1842    }
1843    cnt
1844}
1845
1846fn BackwardReferenceScoreUsingLastDistance(copy_length: usize, h9_opts: H9Opts) -> u64 {
1847    ((h9_opts.literal_byte_score as u64) >> 2)
1848        .wrapping_mul(copy_length as u64)
1849        .wrapping_add((30u64 * 8u64).wrapping_mul(::core::mem::size_of::<u64>() as u64))
1850        .wrapping_add(15)
1851}
1852
1853fn BackwardReferenceScore(
1854    copy_length: usize,
1855    backward_reference_offset: usize,
1856    h9_opts: H9Opts,
1857) -> u64 {
1858    (30u64 * 8u64)
1859        .wrapping_mul(::core::mem::size_of::<u64>() as u64)
1860        .wrapping_add(((h9_opts.literal_byte_score as usize) >> 2).wrapping_mul(copy_length) as u64)
1861        .wrapping_sub(
1862            (30u64).wrapping_mul(Log2FloorNonZero(backward_reference_offset as u64) as u64),
1863        )
1864}
1865
1866fn Hash14(data: &[u8]) -> u32 {
1867    let h: u32 = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
1868    h >> (32i32 - 14i32)
1869}
1870
1871fn TestStaticDictionaryItem(
1872    dictionary: &BrotliDictionary,
1873    item: usize,
1874    data: &[u8],
1875    max_length: usize,
1876    max_backward: usize,
1877    max_distance: usize,
1878    h9_opts: H9Opts,
1879    out: &mut HasherSearchResult,
1880) -> i32 {
1881    let backward: usize;
1882
1883    let len: usize = item & 0x1fusize;
1884    let dist: usize = item >> 5;
1885    let offset: usize =
1886        (dictionary.offsets_by_length[len] as usize).wrapping_add(len.wrapping_mul(dist));
1887    if len > max_length {
1888        return 0i32;
1889    }
1890    let matchlen: usize = FindMatchLengthWithLimit(data, &dictionary.data[offset..], len);
1891    if matchlen.wrapping_add(kCutoffTransformsCount as usize) <= len || matchlen == 0usize {
1892        return 0i32;
1893    }
1894    {
1895        let cut: u64 = len.wrapping_sub(matchlen) as u64;
1896        let transform_id: usize =
1897            (cut << 2).wrapping_add(kCutoffTransforms >> cut.wrapping_mul(6) & 0x3f) as usize;
1898        backward = max_backward
1899            .wrapping_add(dist)
1900            .wrapping_add(1)
1901            .wrapping_add(transform_id << dictionary.size_bits_by_length[len] as i32);
1902    }
1903    if backward > max_distance {
1904        return 0i32;
1905    }
1906    let score: u64 = BackwardReferenceScore(matchlen, backward, h9_opts);
1907    if score < out.score {
1908        return 0i32;
1909    }
1910    out.len = matchlen;
1911    out.len_x_code = len ^ matchlen;
1912    out.distance = backward;
1913    out.score = score;
1914    1i32
1915}
1916
1917fn SearchInStaticDictionary<HasherType: AnyHasher>(
1918    dictionary: &BrotliDictionary,
1919    dictionary_hash: &[u16],
1920    handle: &mut HasherType,
1921    data: &[u8],
1922    max_length: usize,
1923    max_backward: usize,
1924    max_distance: usize,
1925    out: &mut HasherSearchResult,
1926    shallow: bool,
1927) -> bool {
1928    let mut key: usize;
1929    let mut i: usize;
1930    let mut is_match_found = false;
1931    let opts = handle.Opts();
1932    let xself: &mut Struct1 = handle.GetHasherCommon();
1933    if xself.dict_num_matches < xself.dict_num_lookups >> 7 {
1934        return false;
1935    }
1936    key = (Hash14(data) << 1) as usize; //FIXME: works for any kind of hasher??
1937    i = 0usize;
1938    while i < if shallow { 1 } else { 2 } {
1939        {
1940            let item: usize = dictionary_hash[key] as usize;
1941            xself.dict_num_lookups = xself.dict_num_lookups.wrapping_add(1);
1942            if item != 0usize {
1943                let item_matches: i32 = TestStaticDictionaryItem(
1944                    dictionary,
1945                    item,
1946                    data,
1947                    max_length,
1948                    max_backward,
1949                    max_distance,
1950                    opts,
1951                    out,
1952                );
1953                if item_matches != 0 {
1954                    xself.dict_num_matches = xself.dict_num_matches.wrapping_add(1);
1955                    is_match_found = true;
1956                }
1957            }
1958        }
1959        i = i.wrapping_add(1);
1960        key = key.wrapping_add(1);
1961    }
1962    is_match_found
1963}
1964
1965impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1966    for BasicHasher<H2Sub<Alloc>>
1967{
1968    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
1969        let mut ret = BasicHasher::<H2Sub<Alloc>> {
1970            GetHasherCommon: self.GetHasherCommon.clone(),
1971            buckets_: H2Sub::<Alloc> {
1972                buckets_: <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.buckets_.len()),
1973            },
1974            h9_opts: self.h9_opts,
1975        };
1976        ret.buckets_
1977            .buckets_
1978            .slice_mut()
1979            .clone_from_slice(self.buckets_.buckets_.slice());
1980        ret
1981    }
1982}
1983impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1984    for BasicHasher<H3Sub<Alloc>>
1985{
1986    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
1987        let mut ret = BasicHasher::<H3Sub<Alloc>> {
1988            GetHasherCommon: self.GetHasherCommon.clone(),
1989            buckets_: H3Sub::<Alloc> {
1990                buckets_: <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.buckets_.len()),
1991            },
1992            h9_opts: self.h9_opts,
1993        };
1994        ret.buckets_
1995            .buckets_
1996            .slice_mut()
1997            .clone_from_slice(self.buckets_.buckets_.slice());
1998        ret
1999    }
2000}
2001impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2002    for BasicHasher<H4Sub<Alloc>>
2003{
2004    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2005        let mut ret = BasicHasher::<H4Sub<Alloc>> {
2006            GetHasherCommon: self.GetHasherCommon.clone(),
2007            buckets_: H4Sub::<Alloc> {
2008                buckets_: <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.buckets_.len()),
2009            },
2010            h9_opts: self.h9_opts,
2011        };
2012        ret.buckets_
2013            .buckets_
2014            .slice_mut()
2015            .clone_from_slice(self.buckets_.buckets_.slice());
2016        ret
2017    }
2018}
2019impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2020    for BasicHasher<H54Sub<Alloc>>
2021{
2022    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2023        let mut ret = BasicHasher::<H54Sub<Alloc>> {
2024            GetHasherCommon: self.GetHasherCommon.clone(),
2025            buckets_: H54Sub::<Alloc> {
2026                buckets_: <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.len()),
2027            },
2028            h9_opts: self.h9_opts,
2029        };
2030        ret.buckets_
2031            .buckets_
2032            .slice_mut()
2033            .clone_from_slice(self.buckets_.buckets_.slice());
2034        ret
2035    }
2036}
2037impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc> for H9<Alloc> {
2038    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2039        let mut num = <Alloc as Allocator<u16>>::alloc_cell(m, self.num_.len());
2040        num.slice_mut().clone_from_slice(self.num_.slice());
2041        let mut buckets = <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.len());
2042        buckets.slice_mut().clone_from_slice(self.buckets_.slice());
2043        H9::<Alloc> {
2044            num_: num,
2045            buckets_: buckets,
2046            dict_search_stats_: self.dict_search_stats_.clone(),
2047            h9_opts: self.h9_opts,
2048        }
2049    }
2050}
2051impl<
2052        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
2053        Special: AdvHashSpecialization + Sized + Clone,
2054    > CloneWithAlloc<Alloc> for AdvHasher<Special, Alloc>
2055{
2056    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2057        let mut num = <Alloc as Allocator<u16>>::alloc_cell(m, self.num.len());
2058        num.slice_mut().clone_from_slice(self.num.slice());
2059        let mut buckets = <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets.len());
2060        buckets.slice_mut().clone_from_slice(self.buckets.slice());
2061        AdvHasher::<Special, Alloc> {
2062            GetHasherCommon: self.GetHasherCommon.clone(),
2063            specialization: self.specialization.clone(),
2064            num,
2065            buckets,
2066            h9_opts: self.h9_opts,
2067        }
2068    }
2069}
2070
2071pub enum UnionHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
2072    Uninit,
2073    H2(BasicHasher<H2Sub<Alloc>>),
2074    H3(BasicHasher<H3Sub<Alloc>>),
2075    H4(BasicHasher<H4Sub<Alloc>>),
2076    H54(BasicHasher<H54Sub<Alloc>>),
2077    H5(AdvHasher<H5Sub, Alloc>),
2078    H5q7(AdvHasher<HQ7Sub, Alloc>),
2079    H5q5(AdvHasher<HQ5Sub, Alloc>),
2080    H6(AdvHasher<H6Sub, Alloc>),
2081    H9(H9<Alloc>),
2082    H10(H10<Alloc, H10Buckets<Alloc>, H10DefaultParams>),
2083}
2084impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> PartialEq<UnionHasher<Alloc>>
2085    for UnionHasher<Alloc>
2086{
2087    fn eq(&self, other: &UnionHasher<Alloc>) -> bool {
2088        match *self {
2089            UnionHasher::H2(ref hasher) => match *other {
2090                UnionHasher::H2(ref otherh) => *hasher == *otherh,
2091                _ => false,
2092            },
2093            UnionHasher::H3(ref hasher) => match *other {
2094                UnionHasher::H3(ref otherh) => *hasher == *otherh,
2095                _ => false,
2096            },
2097            UnionHasher::H4(ref hasher) => match *other {
2098                UnionHasher::H4(ref otherh) => *hasher == *otherh,
2099                _ => false,
2100            },
2101            UnionHasher::H54(ref hasher) => match *other {
2102                UnionHasher::H54(ref otherh) => *hasher == *otherh,
2103                _ => false,
2104            },
2105            UnionHasher::H5(ref hasher) => match *other {
2106                UnionHasher::H5(ref otherh) => *hasher == *otherh,
2107                _ => false,
2108            },
2109            UnionHasher::H5q7(ref hasher) => match *other {
2110                UnionHasher::H5q7(ref otherh) => *hasher == *otherh,
2111                _ => false,
2112            },
2113            UnionHasher::H5q5(ref hasher) => match *other {
2114                UnionHasher::H5q5(ref otherh) => *hasher == *otherh,
2115                _ => false,
2116            },
2117            UnionHasher::H6(ref hasher) => match *other {
2118                UnionHasher::H6(ref otherh) => *hasher == *otherh,
2119                _ => false,
2120            },
2121            UnionHasher::H9(ref hasher) => match *other {
2122                UnionHasher::H9(ref otherh) => *hasher == *otherh,
2123                _ => false,
2124            },
2125            UnionHasher::H10(ref hasher) => match *other {
2126                UnionHasher::H10(ref otherh) => *hasher == *otherh,
2127                _ => false,
2128            },
2129            UnionHasher::Uninit => match *other {
2130                UnionHasher::Uninit => true,
2131                _ => false,
2132            },
2133        }
2134    }
2135}
2136impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2137    for UnionHasher<Alloc>
2138{
2139    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2140        match *self {
2141            UnionHasher::H2(ref hasher) => UnionHasher::H2(hasher.clone_with_alloc(m)),
2142            UnionHasher::H3(ref hasher) => UnionHasher::H3(hasher.clone_with_alloc(m)),
2143            UnionHasher::H4(ref hasher) => UnionHasher::H4(hasher.clone_with_alloc(m)),
2144            UnionHasher::H5(ref hasher) => UnionHasher::H5(hasher.clone_with_alloc(m)),
2145            UnionHasher::H5q7(ref hasher) => UnionHasher::H5q7(hasher.clone_with_alloc(m)),
2146            UnionHasher::H5q5(ref hasher) => UnionHasher::H5q5(hasher.clone_with_alloc(m)),
2147            UnionHasher::H6(ref hasher) => UnionHasher::H6(hasher.clone_with_alloc(m)),
2148            UnionHasher::H54(ref hasher) => UnionHasher::H54(hasher.clone_with_alloc(m)),
2149            UnionHasher::H9(ref hasher) => UnionHasher::H9(hasher.clone_with_alloc(m)),
2150            UnionHasher::H10(ref hasher) => UnionHasher::H10(hasher.clone_with_alloc(m)),
2151            UnionHasher::Uninit => UnionHasher::Uninit,
2152        }
2153    }
2154}
2155macro_rules! match_all_hashers_mut {
2156    ($xself : expr, $func_call : ident, $( $args:expr),*) => {
2157        match $xself {
2158     &mut UnionHasher::H2(ref mut hasher) => hasher.$func_call($($args),*),
2159     &mut UnionHasher::H3(ref mut hasher) => hasher.$func_call($($args),*),
2160     &mut UnionHasher::H4(ref mut hasher) => hasher.$func_call($($args),*),
2161     &mut UnionHasher::H5(ref mut hasher) => hasher.$func_call($($args),*),
2162     &mut UnionHasher::H5q7(ref mut hasher) => hasher.$func_call($($args),*),
2163     &mut UnionHasher::H5q5(ref mut hasher) => hasher.$func_call($($args),*),
2164     &mut UnionHasher::H6(ref mut hasher) => hasher.$func_call($($args),*),
2165     &mut UnionHasher::H54(ref mut hasher) => hasher.$func_call($($args),*),
2166     &mut UnionHasher::H9(ref mut hasher) => hasher.$func_call($($args),*),
2167     &mut UnionHasher::H10(ref mut hasher) => hasher.$func_call($($args),*),
2168     &mut UnionHasher::Uninit => panic!("UNINTIALIZED"),
2169        }
2170    };
2171}
2172macro_rules! match_all_hashers {
2173    ($xself : expr, $func_call : ident, $( $args:expr),*) => {
2174        match $xself {
2175     &UnionHasher::H2(ref hasher) => hasher.$func_call($($args),*),
2176     &UnionHasher::H3(ref hasher) => hasher.$func_call($($args),*),
2177     &UnionHasher::H4(ref hasher) => hasher.$func_call($($args),*),
2178     &UnionHasher::H5(ref hasher) => hasher.$func_call($($args),*),
2179     &UnionHasher::H5q7(ref hasher) => hasher.$func_call($($args),*),
2180     &UnionHasher::H5q5(ref hasher) => hasher.$func_call($($args),*),
2181     &UnionHasher::H6(ref hasher) => hasher.$func_call($($args),*),
2182     &UnionHasher::H54(ref hasher) => hasher.$func_call($($args),*),
2183     &UnionHasher::H9(ref hasher) => hasher.$func_call($($args),*),
2184     &UnionHasher::H10(ref hasher) => hasher.$func_call($($args),*),
2185     &UnionHasher::Uninit => panic!("UNINTIALIZED"),
2186        }
2187    };
2188}
2189impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> AnyHasher for UnionHasher<Alloc> {
2190    fn Opts(&self) -> H9Opts {
2191        match_all_hashers!(self, Opts,)
2192    }
2193    fn GetHasherCommon(&mut self) -> &mut Struct1 {
2194        match_all_hashers_mut!(self, GetHasherCommon,)
2195    } /*
2196      fn GetH10Tree(&mut self) -> Option<&mut H10<AllocU32, H10Buckets, H10DefaultParams>> {
2197        return match_all_hashers_mut!(self, GetH10Tree,);
2198      }*/
2199    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
2200        match_all_hashers_mut!(self, Prepare, one_shot, input_size, data)
2201    }
2202    fn HashBytes(&self, data: &[u8]) -> usize {
2203        match_all_hashers!(self, HashBytes, data)
2204    }
2205    fn HashTypeLength(&self) -> usize {
2206        match_all_hashers!(self, HashTypeLength,)
2207    }
2208    fn StoreLookahead(&self) -> usize {
2209        match_all_hashers!(self, StoreLookahead,)
2210    }
2211    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
2212        match_all_hashers!(self, PrepareDistanceCache, distance_cache)
2213    }
2214    fn StitchToPreviousBlock(
2215        &mut self,
2216        num_bytes: usize,
2217        position: usize,
2218        ringbuffer: &[u8],
2219        ringbuffer_mask: usize,
2220    ) {
2221        match_all_hashers_mut!(
2222            self,
2223            StitchToPreviousBlock,
2224            num_bytes,
2225            position,
2226            ringbuffer,
2227            ringbuffer_mask
2228        )
2229    }
2230    fn FindLongestMatch(
2231        &mut self,
2232        dictionary: Option<&BrotliDictionary>,
2233        dictionary_hash: &[u16],
2234        data: &[u8],
2235        ring_buffer_mask: usize,
2236        distance_cache: &[i32],
2237        cur_ix: usize,
2238        max_length: usize,
2239        max_backward: usize,
2240        gap: usize,
2241        max_distance: usize,
2242        out: &mut HasherSearchResult,
2243    ) -> bool {
2244        match_all_hashers_mut!(
2245            self,
2246            FindLongestMatch,
2247            dictionary,
2248            dictionary_hash,
2249            data,
2250            ring_buffer_mask,
2251            distance_cache,
2252            cur_ix,
2253            max_length,
2254            max_backward,
2255            gap,
2256            max_distance,
2257            out
2258        )
2259    }
2260    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
2261        match_all_hashers_mut!(self, Store, data, mask, ix)
2262    }
2263    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
2264        match_all_hashers_mut!(self, StoreRange, data, mask, ix_start, ix_end)
2265    }
2266    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
2267        match_all_hashers_mut!(self, BulkStoreRange, data, mask, ix_start, ix_end)
2268    }
2269}
2270
2271impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> UnionHasher<Alloc> {
2272    pub fn free(&mut self, alloc: &mut Alloc) {
2273        match self {
2274            &mut UnionHasher::H2(ref mut hasher) => {
2275                <Alloc as Allocator<u32>>::free_cell(
2276                    alloc,
2277                    core::mem::take(&mut hasher.buckets_.buckets_),
2278                );
2279            }
2280            &mut UnionHasher::H3(ref mut hasher) => {
2281                <Alloc as Allocator<u32>>::free_cell(
2282                    alloc,
2283                    core::mem::take(&mut hasher.buckets_.buckets_),
2284                );
2285            }
2286            &mut UnionHasher::H4(ref mut hasher) => {
2287                <Alloc as Allocator<u32>>::free_cell(
2288                    alloc,
2289                    core::mem::take(&mut hasher.buckets_.buckets_),
2290                );
2291            }
2292            &mut UnionHasher::H54(ref mut hasher) => {
2293                <Alloc as Allocator<u32>>::free_cell(
2294                    alloc,
2295                    core::mem::take(&mut hasher.buckets_.buckets_),
2296                );
2297            }
2298            &mut UnionHasher::H5q7(ref mut hasher) => {
2299                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2300                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2301            }
2302            &mut UnionHasher::H5q5(ref mut hasher) => {
2303                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2304                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2305            }
2306            &mut UnionHasher::H5(ref mut hasher) => {
2307                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2308                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2309            }
2310            &mut UnionHasher::H6(ref mut hasher) => {
2311                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2312                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2313            }
2314            &mut UnionHasher::H9(ref mut hasher) => {
2315                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num_));
2316                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets_));
2317            }
2318            &mut UnionHasher::H10(ref mut hasher) => {
2319                hasher.free(alloc);
2320            }
2321            &mut UnionHasher::Uninit => {}
2322        }
2323        *self = UnionHasher::<Alloc>::default();
2324    }
2325}
2326
2327impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> Default for UnionHasher<Alloc> {
2328    fn default() -> Self {
2329        UnionHasher::Uninit
2330    }
2331}
2332
2333/*UnionHasher::H2(BasicHasher {
2334GetHasherCommon:Struct1{params:BrotliHasherParams{
2335 type_:2,
2336 block_bits: 8,
2337 bucket_bits:16,
2338 hash_len: 4,
2339 num_last_distances_to_check:0},
2340is_prepared_:0,
2341dict_num_lookups:0,
2342dict_num_matches:0,
2343},
2344buckets_:H2Sub{
2345buckets_:[0;65537],
2346},
2347})
2348*/
2349fn CreateBackwardReferences<AH: AnyHasher>(
2350    dictionary: Option<&BrotliDictionary>,
2351    dictionary_hash: &[u16],
2352    num_bytes: usize,
2353    mut position: usize,
2354    ringbuffer: &[u8],
2355    ringbuffer_mask: usize,
2356    params: &BrotliEncoderParams,
2357    hasher: &mut AH,
2358    dist_cache: &mut [i32],
2359    last_insert_len: &mut usize,
2360    mut commands: &mut [Command],
2361    num_commands: &mut usize,
2362    num_literals: &mut usize,
2363) {
2364    let gap = 0usize;
2365    let max_backward_limit: usize = (1usize << params.lgwin).wrapping_sub(16);
2366    let mut new_commands_count: usize = 0;
2367    let mut insert_length: usize = *last_insert_len;
2368    let pos_end: usize = position.wrapping_add(num_bytes);
2369    let store_end: usize = if num_bytes >= hasher.StoreLookahead() {
2370        position
2371            .wrapping_add(num_bytes)
2372            .wrapping_sub(hasher.StoreLookahead())
2373            .wrapping_add(1)
2374    } else {
2375        position
2376    };
2377    let random_heuristics_window_size: usize = LiteralSpreeLengthForSparseSearch(params);
2378    let mut apply_random_heuristics: usize = position.wrapping_add(random_heuristics_window_size);
2379    let kMinScore: u64 = (30u64 * 8)
2380        .wrapping_mul(::core::mem::size_of::<u64>() as u64)
2381        .wrapping_add(100);
2382    hasher.PrepareDistanceCache(dist_cache);
2383    while position.wrapping_add(hasher.HashTypeLength()) < pos_end {
2384        let mut max_length: usize = pos_end.wrapping_sub(position);
2385        let mut max_distance: usize = min(position, max_backward_limit);
2386        let mut sr = HasherSearchResult {
2387            len: 0,
2388            len_x_code: 0,
2389            distance: 0,
2390            score: 0,
2391        };
2392        sr.len = 0usize;
2393        sr.len_x_code = 0usize;
2394        sr.distance = 0usize;
2395        sr.score = kMinScore;
2396        if hasher.FindLongestMatch(
2397            dictionary,
2398            dictionary_hash,
2399            ringbuffer,
2400            ringbuffer_mask,
2401            dist_cache,
2402            position,
2403            max_length,
2404            max_distance,
2405            gap,
2406            params.dist.max_distance,
2407            &mut sr,
2408        ) {
2409            let mut delayed_backward_references_in_row: i32 = 0i32;
2410            max_length = max_length.wrapping_sub(1);
2411            'break6: loop {
2412                'continue7: loop {
2413                    let cost_diff_lazy: u64 = 175;
2414
2415                    let mut sr2 = HasherSearchResult {
2416                        len: 0,
2417                        len_x_code: 0,
2418                        distance: 0,
2419                        score: 0,
2420                    };
2421                    sr2.len = if params.quality < 5 {
2422                        min(sr.len.wrapping_sub(1), max_length)
2423                    } else {
2424                        0usize
2425                    };
2426                    sr2.len_x_code = 0usize;
2427                    sr2.distance = 0usize;
2428                    sr2.score = kMinScore;
2429                    max_distance = min(position.wrapping_add(1), max_backward_limit);
2430                    let is_match_found: bool = hasher.FindLongestMatch(
2431                        dictionary,
2432                        dictionary_hash,
2433                        ringbuffer,
2434                        ringbuffer_mask,
2435                        dist_cache,
2436                        position.wrapping_add(1),
2437                        max_length,
2438                        max_distance,
2439                        gap,
2440                        params.dist.max_distance,
2441                        &mut sr2,
2442                    );
2443                    if is_match_found && (sr2.score >= sr.score.wrapping_add(cost_diff_lazy)) {
2444                        position = position.wrapping_add(1);
2445                        insert_length = insert_length.wrapping_add(1);
2446                        sr = sr2;
2447                        if {
2448                            delayed_backward_references_in_row += 1;
2449                            delayed_backward_references_in_row
2450                        } < 4i32
2451                            && (position.wrapping_add(hasher.HashTypeLength()) < pos_end)
2452                        {
2453                            break 'continue7;
2454                        }
2455                    }
2456                    break 'break6;
2457                }
2458                max_length = max_length.wrapping_sub(1);
2459            }
2460            apply_random_heuristics = position
2461                .wrapping_add((2usize).wrapping_mul(sr.len))
2462                .wrapping_add(random_heuristics_window_size);
2463            max_distance = min(position, max_backward_limit);
2464            {
2465                let distance_code: usize =
2466                    ComputeDistanceCode(sr.distance, max_distance, dist_cache);
2467                if sr.distance <= max_distance && (distance_code > 0usize) {
2468                    dist_cache[3] = dist_cache[2];
2469                    dist_cache[2] = dist_cache[1];
2470                    dist_cache[1] = dist_cache[0];
2471                    dist_cache[0] = sr.distance as i32;
2472                    hasher.PrepareDistanceCache(dist_cache);
2473                }
2474                new_commands_count += 1;
2475
2476                let (old, new_commands) = core::mem::take(&mut commands).split_at_mut(1);
2477                commands = new_commands;
2478                old[0].init(
2479                    &params.dist,
2480                    insert_length,
2481                    sr.len,
2482                    sr.len ^ sr.len_x_code,
2483                    distance_code,
2484                );
2485            }
2486            *num_literals = num_literals.wrapping_add(insert_length);
2487            insert_length = 0usize;
2488            hasher.StoreRange(
2489                ringbuffer,
2490                ringbuffer_mask,
2491                position.wrapping_add(2),
2492                min(position.wrapping_add(sr.len), store_end),
2493            );
2494            position = position.wrapping_add(sr.len);
2495        } else {
2496            insert_length = insert_length.wrapping_add(1);
2497            position = position.wrapping_add(1);
2498
2499            if position > apply_random_heuristics {
2500                let kMargin: usize = max(hasher.StoreLookahead().wrapping_sub(1), 4);
2501                if position.wrapping_add(16) >= pos_end.wrapping_sub(kMargin) {
2502                    insert_length = insert_length.wrapping_add(pos_end - position);
2503                    position = pos_end;
2504                } else if position
2505                    > apply_random_heuristics
2506                        .wrapping_add((4usize).wrapping_mul(random_heuristics_window_size))
2507                {
2508                    hasher.Store4Vec4(ringbuffer, ringbuffer_mask, position);
2509                    insert_length = insert_length.wrapping_add(16);
2510                    position = position.wrapping_add(16);
2511                } else {
2512                    hasher.StoreEvenVec4(ringbuffer, ringbuffer_mask, position);
2513                    insert_length = insert_length.wrapping_add(8);
2514                    position = position.wrapping_add(8);
2515                }
2516            }
2517        }
2518    }
2519    insert_length = insert_length.wrapping_add(pos_end.wrapping_sub(position));
2520    *last_insert_len = insert_length;
2521    *num_commands = num_commands.wrapping_add(new_commands_count);
2522}
2523pub fn BrotliCreateBackwardReferences<
2524    Alloc: alloc::Allocator<u16>
2525        + alloc::Allocator<u32>
2526        + alloc::Allocator<u64>
2527        + alloc::Allocator<floatX>
2528        + alloc::Allocator<ZopfliNode>,
2529>(
2530    alloc: &mut Alloc,
2531    dictionary: &BrotliDictionary,
2532    num_bytes: usize,
2533    position: usize,
2534    ringbuffer: &[u8],
2535    ringbuffer_mask: usize,
2536    params: &BrotliEncoderParams,
2537    hasher_union: &mut UnionHasher<Alloc>,
2538    dist_cache: &mut [i32],
2539    last_insert_len: &mut usize,
2540    commands: &mut [Command],
2541    num_commands: &mut usize,
2542    num_literals: &mut usize,
2543) {
2544    match (hasher_union) {
2545        &mut UnionHasher::Uninit => panic!("working with uninitialized hash map"),
2546        &mut UnionHasher::H10(ref mut hasher) => {
2547            if params.quality >= 11 {
2548                super::backward_references_hq::BrotliCreateHqZopfliBackwardReferences(
2549                    alloc,
2550                    if params.use_dictionary {
2551                        Some(dictionary)
2552                    } else {
2553                        None
2554                    },
2555                    num_bytes,
2556                    position,
2557                    ringbuffer,
2558                    ringbuffer_mask,
2559                    params,
2560                    hasher,
2561                    dist_cache,
2562                    last_insert_len,
2563                    commands,
2564                    num_commands,
2565                    num_literals,
2566                )
2567            } else {
2568                super::backward_references_hq::BrotliCreateZopfliBackwardReferences(
2569                    alloc,
2570                    if params.use_dictionary {
2571                        Some(dictionary)
2572                    } else {
2573                        None
2574                    },
2575                    num_bytes,
2576                    position,
2577                    ringbuffer,
2578                    ringbuffer_mask,
2579                    params,
2580                    hasher,
2581                    dist_cache,
2582                    last_insert_len,
2583                    commands,
2584                    num_commands,
2585                    num_literals,
2586                )
2587            }
2588        }
2589        &mut UnionHasher::H2(ref mut hasher) => CreateBackwardReferences(
2590            if params.use_dictionary {
2591                Some(dictionary)
2592            } else {
2593                None
2594            },
2595            &kStaticDictionaryHash[..],
2596            num_bytes,
2597            position,
2598            ringbuffer,
2599            ringbuffer_mask,
2600            params,
2601            hasher,
2602            dist_cache,
2603            last_insert_len,
2604            commands,
2605            num_commands,
2606            num_literals,
2607        ),
2608        &mut UnionHasher::H3(ref mut hasher) => CreateBackwardReferences(
2609            if params.use_dictionary {
2610                Some(dictionary)
2611            } else {
2612                None
2613            },
2614            &kStaticDictionaryHash[..],
2615            num_bytes,
2616            position,
2617            ringbuffer,
2618            ringbuffer_mask,
2619            params,
2620            hasher,
2621            dist_cache,
2622            last_insert_len,
2623            commands,
2624            num_commands,
2625            num_literals,
2626        ),
2627        &mut UnionHasher::H4(ref mut hasher) => CreateBackwardReferences(
2628            if params.use_dictionary {
2629                Some(dictionary)
2630            } else {
2631                None
2632            },
2633            &kStaticDictionaryHash[..],
2634            num_bytes,
2635            position,
2636            ringbuffer,
2637            ringbuffer_mask,
2638            params,
2639            hasher,
2640            dist_cache,
2641            last_insert_len,
2642            commands,
2643            num_commands,
2644            num_literals,
2645        ),
2646        &mut UnionHasher::H5(ref mut hasher) => CreateBackwardReferences(
2647            if params.use_dictionary {
2648                Some(dictionary)
2649            } else {
2650                None
2651            },
2652            &kStaticDictionaryHash[..],
2653            num_bytes,
2654            position,
2655            ringbuffer,
2656            ringbuffer_mask,
2657            params,
2658            hasher,
2659            dist_cache,
2660            last_insert_len,
2661            commands,
2662            num_commands,
2663            num_literals,
2664        ),
2665        &mut UnionHasher::H5q7(ref mut hasher) => CreateBackwardReferences(
2666            if params.use_dictionary {
2667                Some(dictionary)
2668            } else {
2669                None
2670            },
2671            &kStaticDictionaryHash[..],
2672            num_bytes,
2673            position,
2674            ringbuffer,
2675            ringbuffer_mask,
2676            params,
2677            hasher,
2678            dist_cache,
2679            last_insert_len,
2680            commands,
2681            num_commands,
2682            num_literals,
2683        ),
2684        &mut UnionHasher::H5q5(ref mut hasher) => CreateBackwardReferences(
2685            if params.use_dictionary {
2686                Some(dictionary)
2687            } else {
2688                None
2689            },
2690            &kStaticDictionaryHash[..],
2691            num_bytes,
2692            position,
2693            ringbuffer,
2694            ringbuffer_mask,
2695            params,
2696            hasher,
2697            dist_cache,
2698            last_insert_len,
2699            commands,
2700            num_commands,
2701            num_literals,
2702        ),
2703        &mut UnionHasher::H6(ref mut hasher) => CreateBackwardReferences(
2704            if params.use_dictionary {
2705                Some(dictionary)
2706            } else {
2707                None
2708            },
2709            &kStaticDictionaryHash[..],
2710            num_bytes,
2711            position,
2712            ringbuffer,
2713            ringbuffer_mask,
2714            params,
2715            hasher,
2716            dist_cache,
2717            last_insert_len,
2718            commands,
2719            num_commands,
2720            num_literals,
2721        ),
2722        &mut UnionHasher::H9(ref mut hasher) => CreateBackwardReferences(
2723            if params.use_dictionary {
2724                Some(dictionary)
2725            } else {
2726                None
2727            },
2728            &kStaticDictionaryHash[..],
2729            num_bytes,
2730            position,
2731            ringbuffer,
2732            ringbuffer_mask,
2733            params,
2734            hasher,
2735            dist_cache,
2736            last_insert_len,
2737            commands,
2738            num_commands,
2739            num_literals,
2740        ),
2741        &mut UnionHasher::H54(ref mut hasher) => CreateBackwardReferences(
2742            if params.use_dictionary {
2743                Some(dictionary)
2744            } else {
2745                None
2746            },
2747            &kStaticDictionaryHash[..],
2748            num_bytes,
2749            position,
2750            ringbuffer,
2751            ringbuffer_mask,
2752            params,
2753            hasher,
2754            dist_cache,
2755            last_insert_len,
2756            commands,
2757            num_commands,
2758            num_literals,
2759        ),
2760    }
2761}