brotli/enc/backward_references/
mod.rs

1mod benchmark;
2pub mod hash_to_binary_tree;
3pub mod hq;
4mod test;
5
6use core::cmp::{max, min};
7
8use super::super::alloc::{Allocator, SliceWrapper, SliceWrapperMut};
9use super::command::{BrotliDistanceParams, Command, ComputeDistanceCode};
10use super::dictionary_hash::kStaticDictionaryHash;
11use super::hash_to_binary_tree::{H10Buckets, H10DefaultParams, ZopfliNode, H10};
12use super::static_dict::{
13    BrotliDictionary, FindMatchLengthWithLimit, FindMatchLengthWithLimitMin4,
14    BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
15};
16use super::util::{floatX, Log2FloorNonZero};
17use crate::enc::combined_alloc::allocate;
18
19pub static kInvalidMatch: u32 = 0x0fff_ffff;
20static kCutoffTransformsCount: u32 = 10;
21static kCutoffTransforms: u64 = 0x071b_520a_da2d_3200;
22pub static kHashMul32: u32 = 0x1e35_a7bd;
23pub static kHashMul64: u64 = 0x1e35_a7bd_1e35_a7bd;
24pub static kHashMul64Long: u64 = 0x1fe3_5a7b_d357_9bd3;
25
26#[derive(PartialEq, Eq, Copy, Clone, Debug)]
27#[repr(C)]
28pub enum BrotliEncoderMode {
29    BROTLI_MODE_GENERIC = 0,
30    BROTLI_MODE_TEXT = 1,
31    BROTLI_MODE_FONT = 2,
32    BROTLI_FORCE_LSB_PRIOR = 3,
33    BROTLI_FORCE_MSB_PRIOR = 4,
34    BROTLI_FORCE_UTF8_PRIOR = 5,
35    BROTLI_FORCE_SIGNED_PRIOR = 6,
36}
37
38/// This code takes a length and checks if there's an "end of dictionary" marker at the
39///  "ring_buffer_break"point. This marks where a backwards reference cannot pull data through.
40/// A match must stop at the end of the dictionary and cannot span the end of the dictionary
41/// and beginning of the file.  ring_buffer_break is only set true for custom LZ77 dictionary.
42fn fix_unbroken_len(unbroken_len: usize, prev_ix: usize, _cur_ix_masked: usize, ring_buffer_break: Option<core::num::NonZeroUsize>) -> usize
43{
44    if let Some(br) = ring_buffer_break {
45        if prev_ix < usize::from(br) && prev_ix + unbroken_len > usize::from(br)
46        {
47            return usize::from(br) - prev_ix;
48        }
49    }
50    return unbroken_len;
51}
52#[derive(Clone, Copy, Debug, PartialEq)]
53pub struct BrotliHasherParams {
54    /// type of hasher to use (default: type 6, but others have tradeoffs of speed/memory)
55    pub type_: i32,
56    /// number of the number of buckets to have in the hash table (defaults to quality - 1)
57    pub bucket_bits: i32,
58    /// number of potential matches to hold per bucket (hash collisions)
59    pub block_bits: i32,
60    /// number of bytes of a potential match to hash
61    pub hash_len: i32,
62    /// number of previous distance matches to check for future matches (defaults to 16)
63    pub num_last_distances_to_check: i32,
64    /// how much to weigh distance vs an extra byte of copy match when comparing possible copy srcs
65    pub literal_byte_score: i32,
66}
67
68#[derive(Clone, Debug)]
69pub struct BrotliEncoderParams {
70    pub dist: BrotliDistanceParams,
71    /// if this brotli file is generic, font or specifically text
72    pub mode: BrotliEncoderMode,
73    /// quality param between 0 and 11 (11 is smallest but takes longest to encode)
74    pub quality: i32,
75    pub q9_5: bool,
76    /// log of how big the ring buffer should be for copying prior data
77    pub lgwin: i32,
78    /// log of how often metablocks should be serialized
79    pub lgblock: i32,
80    /// how big the source file is (or 0 if no hint is provided)
81    pub size_hint: usize,
82    // FIXME: this should be bool
83    /// avoid serializing out priors for literal sections in the favor of decode speed
84    pub disable_literal_context_modeling: i32,
85    pub hasher: BrotliHasherParams,
86    /// produce an IR of the compression file
87    pub log_meta_block: bool,
88    /// attempt to detect how many bytes before the current byte generates the best prediction of it
89    /// * 0 = off (stride 1 always)
90    /// * 1 = on per 16th of a file
91    /// * 2 = on per block type switch
92    pub stride_detection_quality: u8,
93    /// if nonzero, will search for high entropy strings and log them differently to the IR
94    pub high_entropy_detection_quality: u8,
95    /// if nonzero it will search for the temporal locality and effectiveness of the priors
96    /// for literals. The best adaptation and forgetfulness will be logged per metablock to the IR
97    pub cdf_adaptation_detection: u8,
98    /// whether to search for whether the previous byte or the context_map are better predictors on a per-context-map basis
99    pub prior_bitmask_detection: u8,
100    /// for prior bitmask detection: stride_low, stride_speed, cm_low, cm_speed
101    pub literal_adaptation: [(u16, u16); 4],
102    pub large_window: bool,
103    /// avoid search for the best ndirect vs npostfix parameters for distance
104    pub avoid_distance_prefix_search: bool,
105    /// construct brotli in such a way that it may be concatenated with another brotli file using appropriate bit ops
106    pub catable: bool,
107    /// can use the dictionary (default yes unless catable is set)
108    pub use_dictionary: bool,
109    /// construct brotli in such a way that another concatable brotli file may be appended
110    pub appendable: bool,
111    /// include a magic number and version number and size_hint at the beginning
112    pub magic_number: bool,
113    /// prefer to compute the map of previously seen strings
114    /// just once for all the threads at the beginning, since they overlap significantly
115    pub favor_cpu_efficiency: bool,
116}
117
118impl Default for BrotliEncoderParams {
119    fn default() -> BrotliEncoderParams {
120        super::encode::BrotliEncoderInitParams()
121    }
122}
123
124#[derive(Clone, Copy, Default, PartialEq)]
125pub struct H9Opts {
126    pub literal_byte_score: u32,
127}
128pub enum HowPrepared {
129    ALREADY_PREPARED,
130    NEWLY_PREPARED,
131}
132#[derive(Clone, PartialEq)]
133pub struct Struct1 {
134    pub params: BrotliHasherParams,
135    /// FIXME: this should be bool
136    pub is_prepared_: i32,
137    pub dict_num_lookups: usize,
138    pub dict_num_matches: usize,
139}
140
141fn LiteralSpreeLengthForSparseSearch(params: &BrotliEncoderParams) -> usize {
142    (if params.quality < 9 { 64i32 } else { 512i32 }) as usize
143}
144
145pub struct HasherSearchResult {
146    pub len: usize,
147    pub len_x_code: usize,
148    pub distance: usize,
149    pub score: u64,
150}
151
152pub trait CloneWithAlloc<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
153    fn clone_with_alloc(&self, m: &mut Alloc) -> Self;
154}
155
156pub trait AnyHasher {
157    fn Opts(&self) -> H9Opts;
158    fn GetHasherCommon(&mut self) -> &mut Struct1;
159    fn HashBytes(&self, data: &[u8]) -> usize;
160    fn HashTypeLength(&self) -> usize;
161    fn StoreLookahead(&self) -> usize;
162    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]);
163    fn FindLongestMatch(
164        &mut self,
165        dictionary: Option<&BrotliDictionary>,
166        dictionary_hash: &[u16],
167        data: &[u8],
168        ring_buffer_mask: usize,
169        ring_buffer_break: Option<core::num::NonZeroUsize>,
170        distance_cache: &[i32],
171        cur_ix: usize,
172        max_length: usize,
173        max_backward: usize,
174        gap: usize,
175        max_distance: usize,
176        out: &mut HasherSearchResult,
177    ) -> bool;
178    fn Store(&mut self, data: &[u8], mask: usize, ix: usize);
179    fn Store4Vec4(&mut self, data: &[u8], mask: usize, ix: usize) {
180        for i in 0..4 {
181            self.Store(data, mask, ix + i * 4);
182        }
183    }
184    fn StoreEvenVec4(&mut self, data: &[u8], mask: usize, ix: usize) {
185        for i in 0..4 {
186            self.Store(data, mask, ix + i * 2);
187        }
188    }
189    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize);
190    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize);
191    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared;
192    fn StitchToPreviousBlock(
193        &mut self,
194        num_bytes: usize,
195        position: usize,
196        ringbuffer: &[u8],
197        ringbuffer_mask: usize,
198    );
199}
200
201pub fn StitchToPreviousBlockInternal<T: AnyHasher>(
202    handle: &mut T,
203    num_bytes: usize,
204    position: usize,
205    ringbuffer: &[u8],
206    ringbuffer_mask: usize,
207) {
208    if num_bytes >= handle.HashTypeLength().wrapping_sub(1) && (position >= 3) {
209        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(3));
210        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(2));
211        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(1));
212    }
213}
214
215pub fn StoreLookaheadThenStore<T: AnyHasher>(hasher: &mut T, size: usize, dict: &[u8]) {
216    let overlap = hasher.StoreLookahead().wrapping_sub(1);
217    if size > overlap {
218        hasher.BulkStoreRange(dict, usize::MAX, 0, size - overlap);
219    }
220}
221
222pub trait BasicHashComputer {
223    fn HashBytes(&self, data: &[u8]) -> u32;
224    fn BUCKET_BITS(&self) -> i32;
225    fn USE_DICTIONARY(&self) -> i32;
226    fn BUCKET_SWEEP(&self) -> i32;
227}
228pub struct BasicHasher<Buckets: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> {
229    pub GetHasherCommon: Struct1,
230    pub buckets_: Buckets,
231    pub h9_opts: H9Opts,
232}
233
234impl<A: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> PartialEq<BasicHasher<A>>
235    for BasicHasher<A>
236{
237    fn eq(&self, other: &BasicHasher<A>) -> bool {
238        self.GetHasherCommon == other.GetHasherCommon
239            && self.h9_opts == other.h9_opts
240            && self.buckets_.slice() == other.buckets_.slice()
241    }
242}
243
244impl<T: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> BasicHasher<T> {
245    fn StoreRangeOptBasic(
246        &mut self,
247        data: &[u8],
248        mask: usize,
249        ix_start: usize,
250        ix_end: usize,
251    ) -> usize {
252        let lookahead = 8;
253        if ix_end >= ix_start + lookahead * 2 {
254            let chunk_count = (ix_end - ix_start) / 4;
255            for chunk_id in 0..chunk_count {
256                let i = (ix_start + chunk_id * 4) & mask;
257                let word11 = data.split_at(i).1.split_at(11).0;
258                let mixed0 = self.HashBytes(word11);
259                let mixed1 = self.HashBytes(word11.split_at(1).1);
260                let mixed2 = self.HashBytes(word11.split_at(2).1);
261                let mixed3 = self.HashBytes(word11.split_at(3).1);
262                let off: u32 = (i >> 3).wrapping_rem(self.buckets_.BUCKET_SWEEP() as usize) as u32;
263                let offset0: usize = mixed0 + off as usize;
264                let offset1: usize = mixed1 + off as usize;
265                let offset2: usize = mixed2 + off as usize;
266                let offset3: usize = mixed3 + off as usize;
267                self.buckets_.slice_mut()[offset0] = i as u32;
268                self.buckets_.slice_mut()[offset1] = i as u32 + 1;
269                self.buckets_.slice_mut()[offset2] = i as u32 + 2;
270                self.buckets_.slice_mut()[offset3] = i as u32 + 3;
271            }
272            return ix_start + chunk_count * 4;
273        }
274        ix_start
275    }
276}
277pub struct H2Sub<AllocU32: alloc::Allocator<u32>> {
278    pub buckets_: AllocU32::AllocatedMemory, // 65537
279}
280impl<T: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> AnyHasher for BasicHasher<T> {
281    #[inline(always)]
282    fn Opts(&self) -> H9Opts {
283        self.h9_opts
284    }
285    #[allow(unused_variables)]
286    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {}
287    #[inline(always)]
288    fn HashTypeLength(&self) -> usize {
289        8
290    }
291    #[inline(always)]
292    fn StoreLookahead(&self) -> usize {
293        8
294    }
295    fn StitchToPreviousBlock(
296        &mut self,
297        num_bytes: usize,
298        position: usize,
299        ringbuffer: &[u8],
300        ringbuffer_mask: usize,
301    ) {
302        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask);
303    }
304    #[inline(always)]
305    fn GetHasherCommon(&mut self) -> &mut Struct1 {
306        &mut self.GetHasherCommon
307    }
308    #[inline(always)]
309    fn HashBytes(&self, data: &[u8]) -> usize {
310        self.buckets_.HashBytes(data) as usize
311    }
312    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
313        let (_, data_window) = data.split_at((ix & mask));
314        let key: u32 = self.HashBytes(data_window) as u32;
315        let off: u32 = (ix >> 3).wrapping_rem(self.buckets_.BUCKET_SWEEP() as usize) as u32;
316        self.buckets_.slice_mut()[key.wrapping_add(off) as usize] = ix as u32;
317    }
318    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
319        for i in self.StoreRangeOptBasic(data, mask, ix_start, ix_end)..ix_end {
320            self.Store(data, mask, i);
321        }
322    }
323    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
324        self.StoreRange(data, mask, ix_start, ix_end);
325    }
326    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
327        if self.GetHasherCommon.is_prepared_ != 0 {
328            return HowPrepared::ALREADY_PREPARED;
329        }
330        let partial_prepare_threshold = (4 << self.buckets_.BUCKET_BITS()) >> 7;
331        if one_shot && input_size <= partial_prepare_threshold {
332            for i in 0..input_size {
333                let key = self.HashBytes(&data[i..]);
334                let bs = self.buckets_.BUCKET_SWEEP() as usize;
335                for item in self.buckets_.slice_mut()[key..(key + bs)].iter_mut() {
336                    *item = 0;
337                }
338            }
339        } else {
340            for item in self.buckets_.slice_mut().iter_mut() {
341                *item = 0;
342            }
343        }
344        self.GetHasherCommon.is_prepared_ = 1;
345        HowPrepared::NEWLY_PREPARED
346    }
347
348    fn FindLongestMatch(
349        &mut self,
350        dictionary: Option<&BrotliDictionary>,
351        dictionary_hash: &[u16],
352        data: &[u8],
353        ring_buffer_mask: usize,
354        ring_buffer_break: Option<core::num::NonZeroUsize>,
355        distance_cache: &[i32],
356        cur_ix: usize,
357        max_length: usize,
358        max_backward: usize,
359        gap: usize,
360        max_distance: usize,
361        out: &mut HasherSearchResult,
362    ) -> bool {
363        let opts = self.Opts();
364        let best_len_in: usize = out.len;
365        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
366        let key: u32 = self.HashBytes(&data[cur_ix_masked..]) as u32;
367        let mut compare_char: i32 = data[cur_ix_masked.wrapping_add(best_len_in)] as i32;
368        let mut best_score: u64 = out.score;
369        let mut best_len: usize = best_len_in;
370        let cached_backward: usize = distance_cache[0] as usize;
371        let mut prev_ix: usize = cur_ix.wrapping_sub(cached_backward);
372        let mut is_match_found = false;
373        out.len_x_code = 0usize;
374        if prev_ix < cur_ix {
375            prev_ix &= ring_buffer_mask as u32 as usize;
376            if compare_char == data[prev_ix.wrapping_add(best_len)] as i32 {
377                let unbroken_len: usize = FindMatchLengthWithLimitMin4(
378                    &data[prev_ix..],
379                    &data[cur_ix_masked..],
380                    max_length,
381                );
382                if unbroken_len != 0 {
383                    let len = fix_unbroken_len(unbroken_len, prev_ix, cur_ix_masked, ring_buffer_break);
384                    best_score = BackwardReferenceScoreUsingLastDistance(len, opts);
385                    best_len = len;
386                    out.len = len;
387                    out.distance = cached_backward;
388                    out.score = best_score;
389                    compare_char = data[cur_ix_masked.wrapping_add(best_len)] as i32;
390                    if self.buckets_.BUCKET_SWEEP() == 1i32 {
391                        self.buckets_.slice_mut()[key as usize] = cur_ix as u32;
392                        return true;
393                    } else {
394                        is_match_found = true;
395                    }
396                }
397            }
398        }
399        let bucket_sweep = self.buckets_.BUCKET_SWEEP();
400        if bucket_sweep == 1i32 {
401            prev_ix = self.buckets_.slice()[key as usize] as usize;
402            self.buckets_.slice_mut()[key as usize] = cur_ix as u32;
403            let backward: usize = cur_ix.wrapping_sub(prev_ix);
404            prev_ix &= ring_buffer_mask as u32 as usize;
405            if compare_char != data[prev_ix.wrapping_add(best_len_in)] as i32 {
406                return false;
407            }
408            if backward == 0usize || backward > max_backward {
409                return false;
410            }
411            let unbroken_len: usize =
412                FindMatchLengthWithLimitMin4(&data[prev_ix..], &data[cur_ix_masked..], max_length);
413            if unbroken_len != 0 {
414                let len = fix_unbroken_len(unbroken_len, prev_ix, cur_ix_masked, ring_buffer_break);
415                out.len = len;
416                out.distance = backward;
417                out.score = BackwardReferenceScore(len, backward, opts);
418                return true;
419            }
420        } else {
421            for prev_ix_ref in
422                self.buckets_.slice().split_at(key as usize).1[..bucket_sweep as usize].iter()
423            {
424                let mut prev_ix = *prev_ix_ref as usize;
425                let backward: usize = cur_ix.wrapping_sub(prev_ix);
426                prev_ix &= ring_buffer_mask as u32 as usize;
427                if compare_char != data[prev_ix.wrapping_add(best_len)] as i32 {
428                    continue;
429                }
430                if backward == 0usize || backward > max_backward {
431                    continue;
432                }
433                let unbroken_len = FindMatchLengthWithLimitMin4(
434                    &data[prev_ix..],
435                    &data[cur_ix_masked..],
436                    max_length,
437                );
438                
439                if unbroken_len != 0 {
440                    let len = fix_unbroken_len(unbroken_len, prev_ix, cur_ix_masked, ring_buffer_break);
441                    let score: u64 = BackwardReferenceScore(len, backward, opts);
442                    if best_score < score {
443                        best_score = score;
444                        best_len = len;
445                        out.len = best_len;
446                        out.distance = backward;
447                        out.score = score;
448                        compare_char = data[cur_ix_masked.wrapping_add(best_len)] as i32;
449                        is_match_found = true;
450                    }
451                }
452            }
453        }
454        if dictionary.is_some() && self.buckets_.USE_DICTIONARY() != 0 && !is_match_found {
455            is_match_found = SearchInStaticDictionary(
456                dictionary.unwrap(),
457                dictionary_hash,
458                self,
459                &data[cur_ix_masked..],
460                max_length,
461                max_backward.wrapping_add(gap),
462                max_distance,
463                out,
464                true,
465            );
466        }
467        self.buckets_.slice_mut()
468            [(key as usize).wrapping_add((cur_ix >> 3).wrapping_rem(bucket_sweep as usize))] =
469            cur_ix as u32;
470        is_match_found
471    }
472}
473impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H2Sub<AllocU32> {
474    fn HashBytes(&self, data: &[u8]) -> u32 {
475        let h: u64 =
476            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
477        (h >> (64i32 - 16i32)) as u32
478    }
479    fn BUCKET_BITS(&self) -> i32 {
480        16
481    }
482    fn BUCKET_SWEEP(&self) -> i32 {
483        1
484    }
485    fn USE_DICTIONARY(&self) -> i32 {
486        1
487    }
488}
489impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H2Sub<AllocU32> {
490    fn slice_mut(&mut self) -> &mut [u32] {
491        return self.buckets_.slice_mut();
492    }
493}
494impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H2Sub<AllocU32> {
495    fn slice(&self) -> &[u32] {
496        return self.buckets_.slice();
497    }
498}
499pub struct H3Sub<AllocU32: alloc::Allocator<u32>> {
500    pub buckets_: AllocU32::AllocatedMemory, // 65538
501}
502impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H3Sub<AllocU32> {
503    fn slice_mut(&mut self) -> &mut [u32] {
504        return self.buckets_.slice_mut();
505    }
506}
507impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H3Sub<AllocU32> {
508    fn slice(&self) -> &[u32] {
509        return self.buckets_.slice();
510    }
511}
512impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H3Sub<AllocU32> {
513    fn BUCKET_BITS(&self) -> i32 {
514        16
515    }
516    fn BUCKET_SWEEP(&self) -> i32 {
517        2
518    }
519    fn USE_DICTIONARY(&self) -> i32 {
520        0
521    }
522    fn HashBytes(&self, data: &[u8]) -> u32 {
523        let h: u64 =
524            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
525        (h >> (64i32 - 16i32)) as u32
526    }
527}
528pub struct H4Sub<AllocU32: alloc::Allocator<u32>> {
529    pub buckets_: AllocU32::AllocatedMemory, // 131076
530}
531impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H4Sub<AllocU32> {
532    fn BUCKET_BITS(&self) -> i32 {
533        17
534    }
535    fn BUCKET_SWEEP(&self) -> i32 {
536        4
537    }
538    fn USE_DICTIONARY(&self) -> i32 {
539        1
540    }
541    fn HashBytes(&self, data: &[u8]) -> u32 {
542        let h: u64 =
543            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
544        (h >> (64i32 - 17i32)) as u32
545    }
546}
547impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H4Sub<AllocU32> {
548    fn slice_mut(&mut self) -> &mut [u32] {
549        return self.buckets_.slice_mut();
550    }
551}
552impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H4Sub<AllocU32> {
553    fn slice(&self) -> &[u32] {
554        return self.buckets_.slice();
555    }
556}
557pub struct H54Sub<AllocU32: alloc::Allocator<u32>> {
558    pub buckets_: AllocU32::AllocatedMemory,
559}
560impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H54Sub<AllocU32> {
561    fn BUCKET_BITS(&self) -> i32 {
562        20
563    }
564    fn BUCKET_SWEEP(&self) -> i32 {
565        4
566    }
567    fn USE_DICTIONARY(&self) -> i32 {
568        0
569    }
570    fn HashBytes(&self, data: &[u8]) -> u32 {
571        let h: u64 =
572            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 7i32)).wrapping_mul(kHashMul64);
573        (h >> (64i32 - 20i32)) as u32
574    }
575}
576
577impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H54Sub<AllocU32> {
578    fn slice_mut(&mut self) -> &mut [u32] {
579        return self.buckets_.slice_mut();
580    }
581}
582impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H54Sub<AllocU32> {
583    fn slice(&self) -> &[u32] {
584        return self.buckets_.slice();
585    }
586}
587pub const H9_BUCKET_BITS: usize = 15;
588pub const H9_BLOCK_BITS: usize = 8;
589pub const H9_NUM_LAST_DISTANCES_TO_CHECK: usize = 16;
590pub const H9_BLOCK_SIZE: usize = 1 << H9_BLOCK_BITS;
591const H9_BLOCK_MASK: usize = (1 << H9_BLOCK_BITS) - 1;
592
593impl H9Opts {
594    pub fn new(params: &BrotliHasherParams) -> H9Opts {
595        H9Opts {
596            literal_byte_score: if params.literal_byte_score != 0 {
597                params.literal_byte_score as u32
598            } else {
599                540
600            },
601        }
602    }
603}
604
605pub struct H9<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
606    pub num_: <Alloc as Allocator<u16>>::AllocatedMemory, //[u16;1 << H9_BUCKET_BITS],
607    pub buckets_: <Alloc as Allocator<u32>>::AllocatedMemory, //[u32; H9_BLOCK_SIZE << H9_BUCKET_BITS],
608    pub dict_search_stats_: Struct1,
609    pub h9_opts: H9Opts,
610}
611
612impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> PartialEq<H9<Alloc>> for H9<Alloc> {
613    fn eq(&self, other: &H9<Alloc>) -> bool {
614        self.dict_search_stats_ == other.dict_search_stats_
615            && self.num_.slice() == other.num_.slice()
616            && self.buckets_.slice() == other.buckets_.slice()
617            && self.h9_opts == other.h9_opts
618    }
619}
620
621fn adv_prepare_distance_cache(distance_cache: &mut [i32], num_distances: i32) {
622    if num_distances > 4i32 {
623        let last_distance: i32 = distance_cache[0];
624        distance_cache[4] = last_distance - 1i32;
625        distance_cache[5] = last_distance + 1i32;
626        distance_cache[6] = last_distance - 2i32;
627        distance_cache[7] = last_distance + 2i32;
628        distance_cache[8] = last_distance - 3i32;
629        distance_cache[9] = last_distance + 3i32;
630        if num_distances > 10i32 {
631            let next_last_distance: i32 = distance_cache[1];
632            distance_cache[10] = next_last_distance - 1i32;
633            distance_cache[11] = next_last_distance + 1i32;
634            distance_cache[12] = next_last_distance - 2i32;
635            distance_cache[13] = next_last_distance + 2i32;
636            distance_cache[14] = next_last_distance - 3i32;
637            distance_cache[15] = next_last_distance + 3i32;
638        }
639    }
640}
641
642pub const kDistanceCacheIndex: [u8; 16] = [0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1];
643
644pub const kDistanceCacheOffset: [i8; 16] = [0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3];
645
646//const BROTLI_LITERAL_BYTE_SCORE: u64 = 540;
647const BROTLI_DISTANCE_BIT_PENALTY: u32 = 120;
648
649// Score must be positive after applying maximal penalty.
650const BROTLI_SCORE_BASE: u32 = (BROTLI_DISTANCE_BIT_PENALTY * 8 * 8/* sizeof usize*/);
651const kDistanceShortCodeCost: [u32; 16] = [
652    /* Repeat last */
653    BROTLI_SCORE_BASE + 60,
654    /* 2nd, 3rd, 4th last */
655    BROTLI_SCORE_BASE - 95,
656    BROTLI_SCORE_BASE - 117,
657    BROTLI_SCORE_BASE - 127,
658    /* Last with offset */
659    BROTLI_SCORE_BASE - 93,
660    BROTLI_SCORE_BASE - 93,
661    BROTLI_SCORE_BASE - 96,
662    BROTLI_SCORE_BASE - 96,
663    BROTLI_SCORE_BASE - 99,
664    BROTLI_SCORE_BASE - 99,
665    /* 2nd last with offset */
666    BROTLI_SCORE_BASE - 105,
667    BROTLI_SCORE_BASE - 105,
668    BROTLI_SCORE_BASE - 115,
669    BROTLI_SCORE_BASE - 115,
670    BROTLI_SCORE_BASE - 125,
671    BROTLI_SCORE_BASE - 125,
672];
673
674fn BackwardReferenceScoreH9(
675    copy_length: usize,
676    backward_reference_offset: usize,
677    h9_opts: H9Opts,
678) -> u64 {
679    (u64::from(BROTLI_SCORE_BASE)
680        .wrapping_add((h9_opts.literal_byte_score as u64).wrapping_mul(copy_length as u64))
681        .wrapping_sub(
682            (BROTLI_DISTANCE_BIT_PENALTY as u64)
683                .wrapping_mul(Log2FloorNonZero(backward_reference_offset as u64) as u64),
684        ))
685        >> 2
686}
687
688fn BackwardReferenceScoreUsingLastDistanceH9(
689    copy_length: usize,
690    distance_short_code: usize,
691    h9_opts: H9Opts,
692) -> u64 {
693    ((h9_opts.literal_byte_score as u64)
694        .wrapping_mul(copy_length as u64)
695        .wrapping_add(u64::from(kDistanceShortCodeCost[distance_short_code])))
696        >> 2
697}
698
699impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> AnyHasher for H9<Alloc> {
700    #[inline(always)]
701    fn Opts(&self) -> H9Opts {
702        self.h9_opts
703    }
704    #[inline(always)]
705    fn GetHasherCommon(&mut self) -> &mut Struct1 {
706        &mut self.dict_search_stats_
707    }
708    #[inline(always)]
709    fn HashBytes(&self, data: &[u8]) -> usize {
710        let h: u32 = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
711        let thirty_two: usize = 32;
712        (h >> (thirty_two.wrapping_sub(H9_BUCKET_BITS))) as usize
713    }
714    #[inline(always)]
715    fn HashTypeLength(&self) -> usize {
716        4
717    }
718    #[inline(always)]
719    fn StoreLookahead(&self) -> usize {
720        4
721    }
722    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
723        let num_distances = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
724        adv_prepare_distance_cache(distance_cache, num_distances);
725    }
726    fn FindLongestMatch(
727        &mut self,
728        dictionary: Option<&BrotliDictionary>,
729        dictionary_hash: &[u16],
730        data: &[u8],
731        ring_buffer_mask: usize,
732        ring_buffer_break: Option<core::num::NonZeroUsize>,
733        distance_cache: &[i32],
734        cur_ix: usize,
735        max_length: usize,
736        max_backward: usize,
737        gap: usize,
738        max_distance: usize,
739        out: &mut HasherSearchResult,
740    ) -> bool {
741        let best_len_in: usize = out.len;
742        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
743        let mut best_score: u64 = out.score;
744        let mut best_len: usize = best_len_in;
745        let mut is_match_found = false;
746        out.len_x_code = 0usize;
747        for i in 0..H9_NUM_LAST_DISTANCES_TO_CHECK {
748            let idx = kDistanceCacheIndex[i] as usize;
749            let backward =
750                (distance_cache[idx] as usize).wrapping_add(kDistanceCacheOffset[i] as usize);
751            let mut prev_ix = cur_ix.wrapping_sub(backward);
752            if prev_ix >= cur_ix {
753                continue;
754            }
755            if backward > max_backward {
756                continue;
757            }
758            prev_ix &= ring_buffer_mask;
759            if cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
760                || prev_ix.wrapping_add(best_len) > ring_buffer_mask
761                || data[cur_ix_masked.wrapping_add(best_len)]
762                    != data[prev_ix.wrapping_add(best_len)]
763            {
764                continue;
765            }
766            {
767                let unbroken_len: usize =
768                    FindMatchLengthWithLimit(&data[prev_ix..], &data[cur_ix_masked..], max_length);
769                if unbroken_len >= 3 || (unbroken_len == 2 && i < 2) {
770                    let len = fix_unbroken_len(unbroken_len, prev_ix, cur_ix_masked, ring_buffer_break);
771                    let score = BackwardReferenceScoreUsingLastDistanceH9(len, i, self.h9_opts);
772                    if best_score < score {
773                        best_score = score;
774                        best_len = len;
775                        out.len = best_len;
776                        out.distance = backward;
777                        out.score = best_score;
778                        is_match_found = true;
779                    }
780                }
781            }
782        }
783        if max_length >= 4 && cur_ix_masked.wrapping_add(best_len) <= ring_buffer_mask {
784            let key = self.HashBytes(data.split_at(cur_ix_masked).1);
785            let bucket = &mut self
786                .buckets_
787                .slice_mut()
788                .split_at_mut(key << H9_BLOCK_BITS)
789                .1
790                .split_at_mut(H9_BLOCK_SIZE)
791                .0;
792            assert!(bucket.len() > H9_BLOCK_MASK);
793            assert_eq!(bucket.len(), H9_BLOCK_MASK + 1);
794            let self_num_key = &mut self.num_.slice_mut()[key];
795            let down = if *self_num_key > H9_BLOCK_SIZE as u16 {
796                (*self_num_key as usize) - H9_BLOCK_SIZE
797            } else {
798                0usize
799            };
800            let mut i: usize = *self_num_key as usize;
801            let mut prev_best_val = data[cur_ix_masked.wrapping_add(best_len)];
802            while i > down {
803                i -= 1;
804                let mut prev_ix = bucket[i & H9_BLOCK_MASK] as usize;
805                let backward = cur_ix.wrapping_sub(prev_ix);
806                if (backward > max_backward) {
807                    break;
808                }
809                prev_ix &= ring_buffer_mask;
810                if (prev_ix.wrapping_add(best_len) > ring_buffer_mask
811                    || prev_best_val != data[prev_ix.wrapping_add(best_len)])
812                {
813                    continue;
814                }
815                {
816                    let unbroken_len = FindMatchLengthWithLimit(
817                        data.split_at(prev_ix).1,
818                        data.split_at(cur_ix_masked).1,
819                        max_length,
820                    );
821                    if (unbroken_len >= 4) {
822                        let len = fix_unbroken_len(unbroken_len, prev_ix, cur_ix_masked, ring_buffer_break);
823                        /* Comparing for >= 3 does not change the semantics, but just saves
824                        for a few unnecessary binary logarithms in backward reference
825                        score, since we are not interested in such short matches. */
826                        let score = BackwardReferenceScoreH9(len, backward, self.h9_opts);
827                        if (best_score < score) {
828                            best_score = score;
829                            best_len = len;
830                            out.len = best_len;
831                            out.distance = backward;
832                            out.score = best_score;
833                            is_match_found = true;
834                            if cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask {
835                                break;
836                            }
837                            prev_best_val = data[cur_ix_masked.wrapping_add(best_len)];
838                        }
839                    }
840                }
841            }
842            bucket[*self_num_key as usize & H9_BLOCK_MASK] = cur_ix as u32;
843            *self_num_key = self_num_key.wrapping_add(1);
844        }
845        if !is_match_found && dictionary.is_some() {
846            let (_, cur_data) = data.split_at(cur_ix_masked);
847            is_match_found = SearchInStaticDictionary(
848                dictionary.unwrap(),
849                dictionary_hash,
850                self,
851                cur_data,
852                max_length,
853                max_backward.wrapping_add(gap),
854                max_distance,
855                out,
856                false,
857            );
858        }
859        is_match_found
860    }
861
862    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
863        let (_, data_window) = data.split_at((ix & mask));
864        let key: u32 = self.HashBytes(data_window) as u32;
865        let self_num_key = &mut self.num_.slice_mut()[key as usize];
866        let minor_ix: usize = (*self_num_key as usize & H9_BLOCK_MASK);
867        self.buckets_.slice_mut()[minor_ix.wrapping_add((key as usize) << H9_BLOCK_BITS)] =
868            ix as u32;
869        *self_num_key = self_num_key.wrapping_add(1);
870    }
871    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
872        for i in ix_start..ix_end {
873            self.Store(data, mask, i);
874        }
875    }
876    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
877        for i in ix_start..ix_end {
878            self.Store(data, mask, i);
879        }
880    }
881    fn Prepare(&mut self, _one_shot: bool, _input_size: usize, _data: &[u8]) -> HowPrepared {
882        if self.GetHasherCommon().is_prepared_ != 0 {
883            return HowPrepared::ALREADY_PREPARED;
884        }
885        for item in self.num_.slice_mut().iter_mut() {
886            *item = 0;
887        }
888        self.GetHasherCommon().is_prepared_ = 1;
889        HowPrepared::NEWLY_PREPARED
890    }
891    fn StitchToPreviousBlock(
892        &mut self,
893        num_bytes: usize,
894        position: usize,
895        ringbuffer: &[u8],
896        ringbuffer_mask: usize,
897    ) {
898        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask)
899    }
900}
901
902pub trait AdvHashSpecialization: PartialEq<Self> {
903    fn get_hash_mask(&self) -> u64;
904    fn set_hash_mask(&mut self, params_hash_len: i32);
905    fn get_k_hash_mul(&self) -> u64;
906    fn HashTypeLength(&self) -> usize;
907    fn StoreLookahead(&self) -> usize;
908    fn load_and_mix_word(&self, data: &[u8]) -> u64;
909    fn hash_shift(&self) -> i32;
910    fn bucket_size(&self) -> u32;
911    fn block_mask(&self) -> u32;
912    fn block_size(&self) -> u32;
913    fn block_bits(&self) -> i32;
914}
915pub struct AdvHasher<
916    Specialization: AdvHashSpecialization + Sized + Clone,
917    Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
918> {
919    pub GetHasherCommon: Struct1,
920    pub specialization: Specialization, // contains hash_mask_
921    pub num: <Alloc as Allocator<u16>>::AllocatedMemory,
922    pub buckets: <Alloc as Allocator<u32>>::AllocatedMemory,
923    pub h9_opts: H9Opts,
924}
925
926impl<
927        Specialization: AdvHashSpecialization + Sized + Clone,
928        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
929    > PartialEq<AdvHasher<Specialization, Alloc>> for AdvHasher<Specialization, Alloc>
930{
931    fn eq(&self, other: &Self) -> bool {
932        self.GetHasherCommon == other.GetHasherCommon
933            && self.specialization == other.specialization
934            && self.num.slice() == other.num.slice()
935            && self.buckets.slice() == other.buckets.slice()
936            && self.h9_opts == other.h9_opts
937    }
938}
939
940#[derive(Clone, PartialEq)]
941pub struct HQ5Sub {}
942impl AdvHashSpecialization for HQ5Sub {
943    #[inline(always)]
944    fn hash_shift(&self) -> i32 {
945        32i32 - 14 // 32 - bucket_bits
946    }
947    #[inline(always)]
948    fn bucket_size(&self) -> u32 {
949        1 << 14
950    }
951    #[inline(always)]
952    fn block_bits(&self) -> i32 {
953        4
954    }
955    #[inline(always)]
956    fn block_size(&self) -> u32 {
957        1 << 4
958    }
959    #[inline(always)]
960    fn block_mask(&self) -> u32 {
961        (1 << 4) - 1
962    }
963    #[inline(always)]
964    fn get_hash_mask(&self) -> u64 {
965        //return 0xffff_ffff_ffff_ffff;
966        0xffff_ffff // make it 32 bit
967    }
968    #[inline(always)]
969    fn get_k_hash_mul(&self) -> u64 {
970        kHashMul32 as u64
971    }
972    #[inline(always)]
973    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
974        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
975    }
976    #[inline(always)]
977    fn set_hash_mask(&mut self, _params_hash_len: i32) {}
978    fn HashTypeLength(&self) -> usize {
979        4
980    }
981    #[inline(always)]
982    fn StoreLookahead(&self) -> usize {
983        4
984    }
985}
986
987#[derive(Clone, PartialEq)]
988pub struct HQ7Sub {}
989impl AdvHashSpecialization for HQ7Sub {
990    #[inline(always)]
991    fn hash_shift(&self) -> i32 {
992        32i32 - 15 // 32 - bucket_bits
993    }
994    #[inline(always)]
995    fn bucket_size(&self) -> u32 {
996        1 << 15
997    }
998    #[inline(always)]
999    fn block_bits(&self) -> i32 {
1000        6
1001    }
1002    #[inline(always)]
1003    fn block_size(&self) -> u32 {
1004        1 << 6
1005    }
1006    #[inline(always)]
1007    fn block_mask(&self) -> u32 {
1008        (1 << 6) - 1
1009    }
1010    #[inline(always)]
1011    fn get_hash_mask(&self) -> u64 {
1012        //return 0xffff_ffff_ffff_ffff;
1013        0xffff_ffff // make it 32 bit
1014    }
1015    #[inline(always)]
1016    fn get_k_hash_mul(&self) -> u64 {
1017        kHashMul32 as u64
1018    }
1019    #[inline(always)]
1020    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1021        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
1022    }
1023    #[inline(always)]
1024    fn set_hash_mask(&mut self, _params_hash_len: i32) {}
1025    fn HashTypeLength(&self) -> usize {
1026        4
1027    }
1028    #[inline(always)]
1029    fn StoreLookahead(&self) -> usize {
1030        4
1031    }
1032}
1033
1034#[derive(Clone, PartialEq)]
1035pub struct H5Sub {
1036    pub hash_shift_: i32,
1037    pub bucket_size_: u32,
1038    pub block_mask_: u32,
1039    pub block_bits_: i32,
1040}
1041
1042impl AdvHashSpecialization for H5Sub {
1043    #[inline(always)]
1044    fn hash_shift(&self) -> i32 {
1045        self.hash_shift_
1046    }
1047    fn bucket_size(&self) -> u32 {
1048        self.bucket_size_
1049    }
1050    fn block_bits(&self) -> i32 {
1051        self.block_bits_
1052    }
1053    fn block_size(&self) -> u32 {
1054        1 << self.block_bits_
1055    }
1056    fn block_mask(&self) -> u32 {
1057        self.block_mask_
1058    }
1059    fn get_hash_mask(&self) -> u64 {
1060        //return 0xffff_ffff_ffff_ffff;
1061        0xffff_ffff // make it 32 bit
1062    }
1063    fn get_k_hash_mul(&self) -> u64 {
1064        kHashMul32 as u64
1065    }
1066    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1067        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
1068    }
1069    #[allow(unused_variables)]
1070    fn set_hash_mask(&mut self, params_hash_len: i32) {}
1071    fn HashTypeLength(&self) -> usize {
1072        4
1073    }
1074    fn StoreLookahead(&self) -> usize {
1075        4
1076    }
1077}
1078
1079#[derive(Clone, PartialEq)]
1080pub struct H6Sub {
1081    pub hash_mask: u64,
1082    pub hash_shift_: i32,
1083    pub bucket_size_: u32,
1084    pub block_mask_: u32,
1085    pub block_bits_: i32,
1086}
1087
1088impl AdvHashSpecialization for H6Sub {
1089    #[inline(always)]
1090    fn hash_shift(&self) -> i32 {
1091        self.hash_shift_
1092    }
1093    #[inline(always)]
1094    fn bucket_size(&self) -> u32 {
1095        self.bucket_size_
1096    }
1097    fn block_bits(&self) -> i32 {
1098        self.block_bits_
1099    }
1100    fn block_size(&self) -> u32 {
1101        1 << self.block_bits_
1102    }
1103    #[inline(always)]
1104    fn block_mask(&self) -> u32 {
1105        self.block_mask_
1106    }
1107    #[inline(always)]
1108    fn get_hash_mask(&self) -> u64 {
1109        self.hash_mask
1110    }
1111    #[inline(always)]
1112    fn set_hash_mask(&mut self, params_hash_len: i32) {
1113        // FIXME: this assumes params_hash_len is fairly small, or else it may result in a negative shift value
1114        self.hash_mask = u64::MAX >> (64i32 - 8i32 * params_hash_len);
1115    }
1116    #[inline(always)]
1117    fn get_k_hash_mul(&self) -> u64 {
1118        kHashMul64Long
1119    }
1120    #[inline(always)]
1121    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1122        (BROTLI_UNALIGNED_LOAD64(data) & self.get_hash_mask()).wrapping_mul(self.get_k_hash_mul())
1123    }
1124    #[inline(always)]
1125    fn HashTypeLength(&self) -> usize {
1126        8
1127    }
1128    #[inline(always)]
1129    fn StoreLookahead(&self) -> usize {
1130        8
1131    }
1132}
1133
1134fn BackwardReferencePenaltyUsingLastDistance(distance_short_code: usize) -> u64 {
1135    // FIXME?: double bitwise AND with the same value?
1136    (39u64).wrapping_add((0x0001_ca10_u64 >> (distance_short_code & 0x0e) & 0x0e))
1137}
1138
1139impl<
1140        Specialization: AdvHashSpecialization + Clone,
1141        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
1142    > AdvHasher<Specialization, Alloc>
1143{
1144    // 7 opt
1145    // returns a new ix_start
1146    fn StoreRangeOptBatch(
1147        &mut self,
1148        data: &[u8],
1149        mask: usize,
1150        ix_start: usize,
1151        ix_end: usize,
1152    ) -> usize {
1153        let lookahead = self.specialization.StoreLookahead();
1154        if ix_end >= ix_start + lookahead * 2 && lookahead == 4 {
1155            let num = self.num.slice_mut();
1156            let buckets = self.buckets.slice_mut();
1157            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1158            assert_eq!(
1159                buckets.len(),
1160                self.specialization.bucket_size() as usize
1161                    * self.specialization.block_size() as usize
1162            );
1163            let shift = self.specialization.hash_shift();
1164            let chunk_count = (ix_end - ix_start) / 4;
1165            for chunk_id in 0..chunk_count {
1166                let i = (ix_start + chunk_id * 4) & mask;
1167                let ffffffff = 0xffff_ffff;
1168                let word = u64::from(data[i])
1169                    | (u64::from(data[i + 1]) << 8)
1170                    | (u64::from(data[i + 2]) << 16)
1171                    | (u64::from(data[i + 3]) << 24)
1172                    | (u64::from(data[i + 4]) << 32)
1173                    | (u64::from(data[i + 5]) << 40)
1174                    | (u64::from(data[i + 6]) << 48);
1175                let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1176                    & self.specialization.get_hash_mask())
1177                    >> shift) as usize;
1178                let mixed1 = (((((word >> 8) & ffffffff) * self.specialization.get_k_hash_mul())
1179                    & self.specialization.get_hash_mask())
1180                    >> shift) as usize;
1181                let mixed2 = (((((word >> 16) & ffffffff) * self.specialization.get_k_hash_mul())
1182                    & self.specialization.get_hash_mask())
1183                    >> shift) as usize;
1184                let mixed3 = (((((word >> 24) & ffffffff) * self.specialization.get_k_hash_mul())
1185                    & self.specialization.get_hash_mask())
1186                    >> shift) as usize;
1187                let mut num_ref0 = u32::from(num[mixed0]);
1188                num[mixed0] = num_ref0.wrapping_add(1) as u16;
1189                num_ref0 &= self.specialization.block_mask();
1190                let mut num_ref1 = u32::from(num[mixed1]);
1191                num[mixed1] = num_ref1.wrapping_add(1) as u16;
1192                num_ref1 &= self.specialization.block_mask();
1193                let mut num_ref2 = u32::from(num[mixed2]);
1194                num[mixed2] = num_ref2.wrapping_add(1) as u16;
1195                num_ref2 &= self.specialization.block_mask();
1196                let mut num_ref3 = u32::from(num[mixed3]);
1197                num[mixed3] = num_ref3.wrapping_add(1) as u16;
1198                num_ref3 &= self.specialization.block_mask();
1199                let offset0: usize =
1200                    (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1201                let offset1: usize =
1202                    (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1203                let offset2: usize =
1204                    (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1205                let offset3: usize =
1206                    (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1207                buckets[offset0] = (i) as u32;
1208                buckets[offset1] = (i + 1) as u32;
1209                buckets[offset2] = (i + 2) as u32;
1210                buckets[offset3] = (i + 3) as u32;
1211            }
1212            return ix_start + chunk_count * 4;
1213        }
1214        ix_start
1215    }
1216
1217    fn BulkStoreRangeOptMemFetch(
1218        &mut self,
1219        data: &[u8],
1220        mask: usize,
1221        ix_start: usize,
1222        ix_end: usize,
1223    ) -> usize {
1224        const REG_SIZE: usize = 32usize;
1225        let lookahead = self.specialization.StoreLookahead();
1226        if mask == usize::MAX && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1227            const lookahead4: usize = 4;
1228            assert_eq!(lookahead4, lookahead);
1229            let mut data64 = [0u8; REG_SIZE + lookahead4 - 1];
1230            let del = (ix_end - ix_start) / REG_SIZE;
1231            let num = self.num.slice_mut();
1232            let buckets = self.buckets.slice_mut();
1233            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1234            assert_eq!(
1235                buckets.len(),
1236                self.specialization.bucket_size() as usize
1237                    * self.specialization.block_size() as usize
1238            );
1239            let shift = self.specialization.hash_shift();
1240            for chunk_id in 0..del {
1241                let ix_offset = ix_start + chunk_id * REG_SIZE;
1242                data64[..REG_SIZE + lookahead4 - 1].clone_from_slice(
1243                    data.split_at(ix_offset)
1244                        .1
1245                        .split_at(REG_SIZE + lookahead4 - 1)
1246                        .0,
1247                );
1248                for quad_index in 0..(REG_SIZE >> 2) {
1249                    let i = quad_index << 2;
1250                    let ffffffff = 0xffff_ffff;
1251                    let word = u64::from(data64[i])
1252                        | (u64::from(data64[i + 1]) << 8)
1253                        | (u64::from(data64[i + 2]) << 16)
1254                        | (u64::from(data64[i + 3]) << 24)
1255                        | (u64::from(data64[i + 4]) << 32)
1256                        | (u64::from(data64[i + 5]) << 40)
1257                        | (u64::from(data64[i + 6]) << 48);
1258                    let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1259                        & self.specialization.get_hash_mask())
1260                        >> shift) as usize;
1261                    let mixed1 = (((((word >> 8) & ffffffff)
1262                        * self.specialization.get_k_hash_mul())
1263                        & self.specialization.get_hash_mask())
1264                        >> shift) as usize;
1265                    let mixed2 = (((((word >> 16) & ffffffff)
1266                        * self.specialization.get_k_hash_mul())
1267                        & self.specialization.get_hash_mask())
1268                        >> shift) as usize;
1269                    let mixed3 = (((((word >> 24) & ffffffff)
1270                        * self.specialization.get_k_hash_mul())
1271                        & self.specialization.get_hash_mask())
1272                        >> shift) as usize;
1273                    let mut num_ref0 = u32::from(num[mixed0]);
1274                    num[mixed0] = num_ref0.wrapping_add(1) as u16;
1275                    num_ref0 &= self.specialization.block_mask();
1276                    let mut num_ref1 = u32::from(num[mixed1]);
1277                    num[mixed1] = num_ref1.wrapping_add(1) as u16;
1278                    num_ref1 &= self.specialization.block_mask();
1279                    let mut num_ref2 = u32::from(num[mixed2]);
1280                    num[mixed2] = num_ref2.wrapping_add(1) as u16;
1281                    num_ref2 &= self.specialization.block_mask();
1282                    let mut num_ref3 = u32::from(num[mixed3]);
1283                    num[mixed3] = num_ref3.wrapping_add(1) as u16;
1284                    num_ref3 &= self.specialization.block_mask();
1285                    let offset0: usize =
1286                        (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1287                    let offset1: usize =
1288                        (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1289                    let offset2: usize =
1290                        (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1291                    let offset3: usize =
1292                        (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1293                    buckets[offset0] = (ix_offset + i) as u32;
1294                    buckets[offset1] = (ix_offset + i + 1) as u32;
1295                    buckets[offset2] = (ix_offset + i + 2) as u32;
1296                    buckets[offset3] = (ix_offset + i + 3) as u32;
1297                }
1298            }
1299            return ix_start + del * REG_SIZE;
1300        }
1301        ix_start
1302    }
1303
1304    #[cfg(feature = "benchmark")]
1305    fn BulkStoreRangeOptMemFetchLazyDupeUpdate(
1306        &mut self,
1307        data: &[u8],
1308        mask: usize,
1309        ix_start: usize,
1310        ix_end: usize,
1311    ) -> usize {
1312        const REG_SIZE: usize = 32usize;
1313        let lookahead = self.specialization.StoreLookahead();
1314        if mask == usize::MAX && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1315            const lookahead4: usize = 4;
1316            assert_eq!(lookahead4, lookahead);
1317            let mut data64 = [0u8; REG_SIZE + lookahead4];
1318            let del = (ix_end - ix_start) / REG_SIZE;
1319            let num = self.num.slice_mut();
1320            let buckets = self.buckets.slice_mut();
1321            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1322            assert_eq!(
1323                buckets.len(),
1324                self.specialization.bucket_size() as usize
1325                    * self.specialization.block_size() as usize
1326            );
1327            let shift = self.specialization.hash_shift();
1328            for chunk_id in 0..del {
1329                let ix_offset = ix_start + chunk_id * REG_SIZE;
1330                data64[..REG_SIZE + lookahead4]
1331                    .clone_from_slice(data.split_at(ix_offset).1.split_at(REG_SIZE + lookahead4).0);
1332                for quad_index in 0..(REG_SIZE >> 2) {
1333                    let i = quad_index << 2;
1334                    let ffffffff = 0xffff_ffff;
1335                    let word = u64::from(data64[i])
1336                        | (u64::from(data64[i + 1]) << 8)
1337                        | (u64::from(data64[i + 2]) << 16)
1338                        | (u64::from(data64[i + 3]) << 24)
1339                        | (u64::from(data64[i + 4]) << 32)
1340                        | (u64::from(data64[i + 5]) << 40)
1341                        | (u64::from(data64[i + 6]) << 48);
1342                    let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1343                        & self.specialization.get_hash_mask())
1344                        >> shift) as usize;
1345                    let mixed1 = (((((word >> 8) & ffffffff)
1346                        * self.specialization.get_k_hash_mul())
1347                        & self.specialization.get_hash_mask())
1348                        >> shift) as usize;
1349                    let mixed2 = (((((word >> 16) & ffffffff)
1350                        * self.specialization.get_k_hash_mul())
1351                        & self.specialization.get_hash_mask())
1352                        >> shift) as usize;
1353                    let mixed3 = (((((word >> 24) & ffffffff)
1354                        * self.specialization.get_k_hash_mul())
1355                        & self.specialization.get_hash_mask())
1356                        >> shift) as usize;
1357                    let mut num_ref0 = u32::from(num[mixed0]);
1358                    let mut num_ref1 = u32::from(num[mixed1]);
1359                    let mut num_ref2 = u32::from(num[mixed2]);
1360                    let mut num_ref3 = u32::from(num[mixed3]);
1361                    num[mixed0] = num_ref0.wrapping_add(1) as u16;
1362                    num[mixed1] = num_ref1.wrapping_add(1) as u16;
1363                    num[mixed2] = num_ref2.wrapping_add(1) as u16;
1364                    num[mixed3] = num_ref3.wrapping_add(1) as u16;
1365                    num_ref0 &= self.specialization.block_mask();
1366                    num_ref1 &= self.specialization.block_mask();
1367                    num_ref2 &= self.specialization.block_mask();
1368                    num_ref3 &= self.specialization.block_mask();
1369                    let offset0: usize =
1370                        (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1371                    let offset1: usize =
1372                        (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1373                    let offset2: usize =
1374                        (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1375                    let offset3: usize =
1376                        (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1377                    buckets[offset0] = (ix_offset + i) as u32;
1378                    buckets[offset1] = (ix_offset + i + 1) as u32;
1379                    buckets[offset2] = (ix_offset + i + 2) as u32;
1380                    buckets[offset3] = (ix_offset + i + 3) as u32;
1381                }
1382            }
1383            return ix_start + del * REG_SIZE;
1384        }
1385        ix_start
1386    }
1387
1388    #[cfg(feature = "benchmark")]
1389    fn BulkStoreRangeOptRandomDupeUpdate(
1390        &mut self,
1391        data: &[u8],
1392        mask: usize,
1393        ix_start: usize,
1394        ix_end: usize,
1395    ) -> usize {
1396        const REG_SIZE: usize = 32usize;
1397        let lookahead = self.specialization.StoreLookahead();
1398        if mask == usize::MAX && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1399            const lookahead4: usize = 4;
1400            assert_eq!(lookahead4, lookahead);
1401            let mut data64 = [0u8; REG_SIZE + lookahead4];
1402            let del = (ix_end - ix_start) / REG_SIZE;
1403            let num = self.num.slice_mut();
1404            let buckets = self.buckets.slice_mut();
1405            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1406            assert_eq!(
1407                buckets.len(),
1408                self.specialization.bucket_size() as usize
1409                    * self.specialization.block_size() as usize
1410            );
1411            let shift = self.specialization.hash_shift();
1412            for chunk_id in 0..del {
1413                let ix_offset = ix_start + chunk_id * REG_SIZE;
1414                data64[..REG_SIZE + lookahead4]
1415                    .clone_from_slice(data.split_at(ix_offset).1.split_at(REG_SIZE + lookahead4).0);
1416                for i in 0..REG_SIZE {
1417                    let mixed_word = ((u32::from(data64[i])
1418                        | (u32::from(data64[i + 1]) << 8)
1419                        | (u32::from(data64[i + 2]) << 16)
1420                        | (u32::from(data64[i + 3]) << 24))
1421                        as u64
1422                        * self.specialization.get_k_hash_mul())
1423                        & self.specialization.get_hash_mask();
1424                    let key = mixed_word >> shift;
1425                    let minor_ix: usize = chunk_id & self.specialization.block_mask() as usize; //   *num_ref as usize & self.specialization.block_mask() as usize; //GIGANTIC HAX: overwrite firsst option
1426                    let offset: usize =
1427                        minor_ix + (key << self.specialization.block_bits()) as usize;
1428                    buckets[offset] = (ix_offset + i) as u32;
1429                }
1430            }
1431            for (bucket_index, num_ref) in num.iter_mut().enumerate() {
1432                let region = buckets
1433                    .split_at_mut(bucket_index << self.specialization.block_bits())
1434                    .1
1435                    .split_at_mut(self.specialization.block_size() as usize)
1436                    .0;
1437                let mut lnum = 0usize;
1438                for block_index in 0..self.specialization.block_size() as usize {
1439                    if region[block_index] != 0 {
1440                        let byte_addr = region[block_index];
1441                        region[lnum] = byte_addr;
1442                        lnum += 1;
1443                    }
1444                }
1445                *num_ref = lnum as u16;
1446            }
1447            return ix_start + del * REG_SIZE;
1448        }
1449        ix_start
1450    }
1451}
1452
1453impl<
1454        Specialization: AdvHashSpecialization + Clone,
1455        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
1456    > AnyHasher for AdvHasher<Specialization, Alloc>
1457{
1458    fn Opts(&self) -> H9Opts {
1459        self.h9_opts
1460    }
1461    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
1462        let num_distances = self.GetHasherCommon.params.num_last_distances_to_check;
1463        adv_prepare_distance_cache(distance_cache, num_distances);
1464    }
1465    fn StitchToPreviousBlock(
1466        &mut self,
1467        num_bytes: usize,
1468        position: usize,
1469        ringbuffer: &[u8],
1470        ringbuffer_mask: usize,
1471    ) {
1472        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask);
1473    }
1474    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
1475        if self.GetHasherCommon.is_prepared_ != 0 {
1476            return HowPrepared::ALREADY_PREPARED;
1477        }
1478        let partial_prepare_threshold = self.specialization.bucket_size() as usize >> 6;
1479        if one_shot && input_size <= partial_prepare_threshold {
1480            for i in 0..input_size {
1481                let key = self.HashBytes(&data[i..]);
1482                self.num.slice_mut()[key] = 0;
1483            }
1484        } else {
1485            for item in
1486                self.num.slice_mut()[..(self.specialization.bucket_size() as usize)].iter_mut()
1487            {
1488                *item = 0;
1489            }
1490        }
1491        self.GetHasherCommon.is_prepared_ = 1;
1492        HowPrepared::NEWLY_PREPARED
1493    }
1494
1495    fn GetHasherCommon(&mut self) -> &mut Struct1 {
1496        &mut self.GetHasherCommon
1497    }
1498    fn HashTypeLength(&self) -> usize {
1499        self.specialization.HashTypeLength()
1500    }
1501    fn StoreLookahead(&self) -> usize {
1502        self.specialization.StoreLookahead()
1503    }
1504    fn HashBytes(&self, data: &[u8]) -> usize {
1505        let shift = self.specialization.hash_shift();
1506        let h: u64 = self.specialization.load_and_mix_word(data);
1507        (h >> shift) as u32 as usize
1508    }
1509    fn StoreEvenVec4(&mut self, data: &[u8], mask: usize, ix: usize) {
1510        if self.specialization.StoreLookahead() != 4 {
1511            for i in 0..4 {
1512                self.Store(data, mask, ix + i * 2);
1513            }
1514            return;
1515        }
1516        let shift = self.specialization.hash_shift();
1517        let num = self.num.slice_mut();
1518        let buckets = self.buckets.slice_mut();
1519        let li = ix & mask;
1520        let lword = u64::from(data[li])
1521            | (u64::from(data[li + 1]) << 8)
1522            | (u64::from(data[li + 2]) << 16)
1523            | (u64::from(data[li + 3]) << 24)
1524            | (u64::from(data[li + 4]) << 32)
1525            | (u64::from(data[li + 5]) << 40)
1526            | (u64::from(data[li + 6]) << 48)
1527            | (u64::from(data[li + 7]) << 56);
1528        let hi = (ix + 8) & mask;
1529        let hword = u64::from(data[hi]) | (u64::from(data[hi + 1]) << 8);
1530        let mixed0 = ((((lword & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1531            & self.specialization.get_hash_mask())
1532            >> shift) as usize;
1533        let mixed1 = (((((lword >> 16) & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1534            & self.specialization.get_hash_mask())
1535            >> shift) as usize;
1536        let mixed2 = (((((lword >> 32) & 0xffff_ffff) * self.specialization.get_k_hash_mul())
1537            & self.specialization.get_hash_mask())
1538            >> shift) as usize;
1539        let mixed3 = ((((((hword & 0xffff) << 16) | ((lword >> 48) & 0xffff))
1540            * self.specialization.get_k_hash_mul())
1541            & self.specialization.get_hash_mask())
1542            >> shift) as usize;
1543        let mut num_ref0 = u32::from(num[mixed0]);
1544        num[mixed0] = num_ref0.wrapping_add(1) as u16;
1545        num_ref0 &= self.specialization.block_mask();
1546        let mut num_ref1 = u32::from(num[mixed1]);
1547        num[mixed1] = num_ref1.wrapping_add(1) as u16;
1548        num_ref1 &= self.specialization.block_mask();
1549        let mut num_ref2 = u32::from(num[mixed2]);
1550        num[mixed2] = num_ref2.wrapping_add(1) as u16;
1551        num_ref2 &= self.specialization.block_mask();
1552        let mut num_ref3 = u32::from(num[mixed3]);
1553        num[mixed3] = num_ref3.wrapping_add(1) as u16;
1554        num_ref3 &= self.specialization.block_mask();
1555        let offset0: usize = (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1556        let offset1: usize = (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1557        let offset2: usize = (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1558        let offset3: usize = (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1559        buckets[offset0] = ix as u32;
1560        buckets[offset1] = (ix + 2) as u32;
1561        buckets[offset2] = (ix + 4) as u32;
1562        buckets[offset3] = (ix + 6) as u32;
1563    }
1564    fn Store4Vec4(&mut self, data: &[u8], mask: usize, ix: usize) {
1565        if self.specialization.StoreLookahead() != 4 {
1566            for i in 0..4 {
1567                self.Store(data, mask, ix + i * 4);
1568            }
1569            return;
1570        }
1571        let shift = self.specialization.hash_shift();
1572        let num = self.num.slice_mut();
1573        let buckets = self.buckets.slice_mut();
1574        let li = ix & mask;
1575        let llword = u32::from(data[li])
1576            | (u32::from(data[li + 1]) << 8)
1577            | (u32::from(data[li + 2]) << 16)
1578            | (u32::from(data[li + 3]) << 24);
1579        let luword = u32::from(data[li + 4])
1580            | (u32::from(data[li + 5]) << 8)
1581            | (u32::from(data[li + 6]) << 16)
1582            | (u32::from(data[li + 7]) << 24);
1583        let ui = (ix + 8) & mask;
1584        let ulword = u32::from(data[ui])
1585            | (u32::from(data[ui + 1]) << 8)
1586            | (u32::from(data[ui + 2]) << 16)
1587            | (u32::from(data[ui + 3]) << 24);
1588
1589        let uuword = u32::from(data[ui + 4])
1590            | (u32::from(data[ui + 5]) << 8)
1591            | (u32::from(data[ui + 6]) << 16)
1592            | (u32::from(data[ui + 7]) << 24);
1593
1594        let mixed0 = (((u64::from(llword) * self.specialization.get_k_hash_mul())
1595            & self.specialization.get_hash_mask())
1596            >> shift) as usize;
1597        let mixed1 = (((u64::from(luword) * self.specialization.get_k_hash_mul())
1598            & self.specialization.get_hash_mask())
1599            >> shift) as usize;
1600        let mixed2 = (((u64::from(ulword) * self.specialization.get_k_hash_mul())
1601            & self.specialization.get_hash_mask())
1602            >> shift) as usize;
1603        let mixed3 = (((u64::from(uuword) * self.specialization.get_k_hash_mul())
1604            & self.specialization.get_hash_mask())
1605            >> shift) as usize;
1606        let mut num_ref0 = u32::from(num[mixed0]);
1607        num[mixed0] = num_ref0.wrapping_add(1) as u16;
1608        num_ref0 &= self.specialization.block_mask();
1609        let mut num_ref1 = u32::from(num[mixed1]);
1610        num[mixed1] = num_ref1.wrapping_add(1) as u16;
1611        num_ref1 &= self.specialization.block_mask();
1612        let mut num_ref2 = u32::from(num[mixed2]);
1613        num[mixed2] = num_ref2.wrapping_add(1) as u16;
1614        num_ref2 &= self.specialization.block_mask();
1615        let mut num_ref3 = u32::from(num[mixed3]);
1616        num[mixed3] = num_ref3.wrapping_add(1) as u16;
1617        num_ref3 &= self.specialization.block_mask();
1618        let offset0: usize = (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1619        let offset1: usize = (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1620        let offset2: usize = (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1621        let offset3: usize = (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1622        buckets[offset0] = ix as u32;
1623        buckets[offset1] = (ix + 4) as u32;
1624        buckets[offset2] = (ix + 8) as u32;
1625        buckets[offset3] = (ix + 12) as u32;
1626    }
1627    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
1628        let (_, data_window) = data.split_at((ix & mask));
1629        let key: u32 = self.HashBytes(data_window) as u32;
1630        let minor_ix: usize =
1631            (self.num.slice()[(key as usize)] as u32 & self.specialization.block_mask()) as usize;
1632        let offset: usize =
1633            minor_ix.wrapping_add((key << self.specialization.block_bits()) as usize);
1634        self.buckets.slice_mut()[offset] = ix as u32;
1635        {
1636            let _lhs = &mut self.num.slice_mut()[(key as usize)];
1637            *_lhs = (*_lhs as i32 + 1) as u16;
1638        }
1639    }
1640    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
1641        for i in self.StoreRangeOptBatch(data, mask, ix_start, ix_end)..ix_end {
1642            self.Store(data, mask, i);
1643        }
1644    }
1645    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, mut ix_start: usize, ix_end: usize) {
1646        /*
1647        if ix_start + 4096 < ix_end {
1648          for vec_offset in 0..(ix_end - ix_start - 4096) / 16 {
1649            self.Store4Vec4(data, mask, ix_start + vec_offset * 16);
1650          }
1651          ix_start += 16 * ((ix_end - ix_start - 4096) / 16);
1652        }
1653        if ix_start + 512 < ix_end {
1654          for vec_offset in 0..(ix_end - ix_start - 512) / 8 {
1655            self.StoreEvenVec4(data, mask, ix_start + vec_offset * 8);
1656            //self.StoreRange(data, mask, ix_start + vec_offset * 8, ix_start + (1+ vec_offset) * 8);
1657          }
1658          ix_start += 8 * ((ix_end - ix_start - 512) / 8);
1659        }
1660         */
1661        ix_start = self.BulkStoreRangeOptMemFetch(data, mask, ix_start, ix_end);
1662        for i in ix_start..ix_end {
1663            self.Store(data, mask, i);
1664        }
1665    }
1666
1667    fn FindLongestMatch(
1668        &mut self,
1669        dictionary: Option<&BrotliDictionary>,
1670        dictionary_hash: &[u16],
1671        data: &[u8],
1672        ring_buffer_mask: usize,
1673        ring_buffer_break: Option<core::num::NonZeroUsize>,
1674        distance_cache: &[i32],
1675        cur_ix: usize,
1676        max_length: usize,
1677        max_backward: usize,
1678        gap: usize,
1679        max_distance: usize,
1680        out: &mut HasherSearchResult,
1681    ) -> bool {
1682        let opts = self.Opts();
1683        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
1684        let mut is_match_found = false;
1685        let mut best_score: u64 = out.score;
1686        let mut best_len: usize = out.len;
1687        out.len = 0usize;
1688        out.len_x_code = 0usize;
1689        let cur_data = data.split_at(cur_ix_masked).1;
1690        for i in 0..self.GetHasherCommon.params.num_last_distances_to_check as usize {
1691            let backward: usize = distance_cache[i] as usize;
1692            let mut prev_ix: usize = cur_ix.wrapping_sub(backward);
1693            if prev_ix >= cur_ix || backward > max_backward {
1694                continue;
1695            }
1696            prev_ix &= ring_buffer_mask;
1697            if (cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
1698                || prev_ix.wrapping_add(best_len) > ring_buffer_mask
1699                || cur_data[best_len] != data[prev_ix.wrapping_add(best_len)])
1700            {
1701                continue;
1702            }
1703            let prev_data = data.split_at(prev_ix).1;
1704
1705            let unbroken_len = FindMatchLengthWithLimit(prev_data, cur_data, max_length);
1706            if unbroken_len >= 3 || (unbroken_len == 2 && i < 2) {
1707                let len = fix_unbroken_len(unbroken_len, prev_ix, cur_ix_masked, ring_buffer_break);
1708                let mut score: u64 = BackwardReferenceScoreUsingLastDistance(len, opts);
1709                if best_score < score {
1710                    if i != 0 {
1711                        score = score.wrapping_sub(BackwardReferencePenaltyUsingLastDistance(i));
1712                    }
1713                    if best_score < score {
1714                        best_score = score;
1715                        best_len = len;
1716                        out.len = best_len;
1717                        out.distance = backward;
1718                        out.score = best_score;
1719                        is_match_found = true;
1720                    }
1721                }
1722            }
1723        }
1724
1725        let key: u32 = self.HashBytes(cur_data) as u32;
1726        let common_block_bits = self.specialization.block_bits();
1727        let num_ref_mut = &mut self.num.slice_mut()[key as usize];
1728        let num_copy = *num_ref_mut;
1729        let bucket: &mut [u32] = self
1730            .buckets
1731            .slice_mut()
1732            .split_at_mut((key << common_block_bits) as usize)
1733            .1
1734            .split_at_mut(self.specialization.block_size() as usize)
1735            .0;
1736        assert!(bucket.len() > self.specialization.block_mask() as usize);
1737        if num_copy != 0 {
1738            let down: usize = max(
1739                i32::from(num_copy) - self.specialization.block_size() as i32,
1740                0,
1741            ) as usize;
1742            let mut i = num_copy as usize;
1743            while i > down {
1744                i -= 1;
1745                let mut prev_ix = bucket[i & self.specialization.block_mask() as usize] as usize;
1746                let backward = cur_ix.wrapping_sub(prev_ix);
1747                prev_ix &= ring_buffer_mask;
1748                if (cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
1749                    || prev_ix.wrapping_add(best_len) > ring_buffer_mask
1750                    || cur_data[best_len] != data[prev_ix.wrapping_add(best_len)])
1751                {
1752                    if backward > max_backward {
1753                        break;
1754                    }
1755                    continue;
1756                }
1757                if backward > max_backward {
1758                    break;
1759                }
1760                let prev_data = data.split_at(prev_ix).1;
1761                let unbroken_len = FindMatchLengthWithLimitMin4(prev_data, cur_data, max_length);
1762                if unbroken_len != 0 {
1763                    let len = fix_unbroken_len(unbroken_len, prev_ix, cur_ix_masked, ring_buffer_break);
1764                    let score: u64 = BackwardReferenceScore(len, backward, opts);
1765                    if best_score < score {
1766                        best_score = score;
1767                        best_len = len;
1768                        out.len = best_len;
1769                        out.distance = backward;
1770                        out.score = best_score;
1771                        is_match_found = true;
1772                    }
1773                }
1774            }
1775        }
1776        bucket[(num_copy as u32 & self.specialization.block_mask()) as usize] = cur_ix as u32;
1777        *num_ref_mut = num_ref_mut.wrapping_add(1);
1778
1779        if !is_match_found && dictionary.is_some() {
1780            let (_, cur_data) = data.split_at(cur_ix_masked);
1781            is_match_found = SearchInStaticDictionary(
1782                dictionary.unwrap(),
1783                dictionary_hash,
1784                self,
1785                cur_data,
1786                max_length,
1787                max_backward.wrapping_add(gap),
1788                max_distance,
1789                out,
1790                false,
1791            );
1792        }
1793        is_match_found
1794    }
1795}
1796
1797pub struct BankH40 {
1798    pub slots: [SlotH40; 65536],
1799}
1800
1801pub struct BankH41 {
1802    pub slots: [SlotH41; 65536],
1803}
1804
1805pub struct BankH42 {
1806    pub slots: [SlotH42; 512],
1807}
1808
1809pub struct SlotH40 {
1810    pub delta: u16,
1811    pub next: u16,
1812}
1813pub struct SlotH41 {
1814    pub delta: u16,
1815    pub next: u16,
1816}
1817
1818pub struct SlotH42 {
1819    pub delta: u16,
1820    pub next: u16,
1821}
1822
1823// UNSUPPORTED, for now.
1824pub struct H40 {
1825    pub common: Struct1,
1826    pub addr: [u32; 32768],
1827    pub head: [u16; 32768],
1828    pub tiny_hash: [u8; 65536],
1829    pub banks: [BankH40; 1],
1830    pub free_slot_idx: [u16; 1],
1831    pub max_hops: usize,
1832}
1833
1834pub struct H41 {
1835    pub common: Struct1,
1836    pub addr: [u32; 32768],
1837    pub head: [u16; 32768],
1838    pub tiny_hash: [u8; 65536],
1839    pub banks: [BankH41; 1],
1840    pub free_slot_idx: [u16; 1],
1841    pub max_hops: usize,
1842}
1843
1844pub struct H42 {
1845    pub common: Struct1,
1846    pub addr: [u32; 32768],
1847    pub head: [u16; 32768],
1848    pub tiny_hash: [u8; 65536],
1849    pub banks: [BankH42; 512],
1850    pub max_hops: usize,
1851}
1852
1853fn BackwardReferenceScoreUsingLastDistance(copy_length: usize, h9_opts: H9Opts) -> u64 {
1854    ((h9_opts.literal_byte_score as u64) >> 2)
1855        .wrapping_mul(copy_length as u64)
1856        .wrapping_add((30u64 * 8u64).wrapping_mul(::core::mem::size_of::<u64>() as u64))
1857        .wrapping_add(15)
1858}
1859
1860fn BackwardReferenceScore(
1861    copy_length: usize,
1862    backward_reference_offset: usize,
1863    h9_opts: H9Opts,
1864) -> u64 {
1865    (30u64 * 8u64)
1866        .wrapping_mul(::core::mem::size_of::<u64>() as u64)
1867        .wrapping_add(((h9_opts.literal_byte_score as usize) >> 2).wrapping_mul(copy_length) as u64)
1868        .wrapping_sub(
1869            (30u64).wrapping_mul(Log2FloorNonZero(backward_reference_offset as u64) as u64),
1870        )
1871}
1872
1873fn Hash14(data: &[u8]) -> u32 {
1874    let h: u32 = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
1875    h >> (32i32 - 14i32)
1876}
1877
1878fn TestStaticDictionaryItem(
1879    dictionary: &BrotliDictionary,
1880    item: usize,
1881    data: &[u8],
1882    max_length: usize,
1883    max_backward: usize,
1884    max_distance: usize,
1885    h9_opts: H9Opts,
1886    out: &mut HasherSearchResult,
1887) -> i32 {
1888    let backward: usize;
1889
1890    let len: usize = item & 0x1fusize;
1891    let dist: usize = item >> 5;
1892    let offset: usize =
1893        (dictionary.offsets_by_length[len] as usize).wrapping_add(len.wrapping_mul(dist));
1894    if len > max_length {
1895        return 0i32;
1896    }
1897    let matchlen: usize = FindMatchLengthWithLimit(data, &dictionary.data[offset..], len);
1898    if matchlen.wrapping_add(kCutoffTransformsCount as usize) <= len || matchlen == 0usize {
1899        return 0i32;
1900    }
1901    {
1902        let cut: u64 = len.wrapping_sub(matchlen) as u64;
1903        let transform_id: usize =
1904            (cut << 2).wrapping_add(kCutoffTransforms >> cut.wrapping_mul(6) & 0x3f) as usize;
1905        backward = max_backward
1906            .wrapping_add(dist)
1907            .wrapping_add(1)
1908            .wrapping_add(transform_id << dictionary.size_bits_by_length[len] as i32);
1909    }
1910    if backward > max_distance {
1911        return 0i32;
1912    }
1913    let score: u64 = BackwardReferenceScore(matchlen, backward, h9_opts);
1914    if score < out.score {
1915        return 0i32;
1916    }
1917    out.len = matchlen;
1918    out.len_x_code = len ^ matchlen;
1919    out.distance = backward;
1920    out.score = score;
1921    1i32
1922}
1923
1924fn SearchInStaticDictionary<HasherType: AnyHasher>(
1925    dictionary: &BrotliDictionary,
1926    dictionary_hash: &[u16],
1927    handle: &mut HasherType,
1928    data: &[u8],
1929    max_length: usize,
1930    max_backward: usize,
1931    max_distance: usize,
1932    out: &mut HasherSearchResult,
1933    shallow: bool,
1934) -> bool {
1935    let mut key: usize;
1936    let mut i: usize;
1937    let mut is_match_found = false;
1938    let opts = handle.Opts();
1939    let xself: &mut Struct1 = handle.GetHasherCommon();
1940    if xself.dict_num_matches < xself.dict_num_lookups >> 7 {
1941        return false;
1942    }
1943    key = (Hash14(data) << 1) as usize; //FIXME: works for any kind of hasher??
1944    i = 0usize;
1945    while i < if shallow { 1 } else { 2 } {
1946        {
1947            let item: usize = dictionary_hash[key] as usize;
1948            xself.dict_num_lookups = xself.dict_num_lookups.wrapping_add(1);
1949            if item != 0usize {
1950                let item_matches: i32 = TestStaticDictionaryItem(
1951                    dictionary,
1952                    item,
1953                    data,
1954                    max_length,
1955                    max_backward,
1956                    max_distance,
1957                    opts,
1958                    out,
1959                );
1960                if item_matches != 0 {
1961                    xself.dict_num_matches = xself.dict_num_matches.wrapping_add(1);
1962                    is_match_found = true;
1963                }
1964            }
1965        }
1966        i = i.wrapping_add(1);
1967        key = key.wrapping_add(1);
1968    }
1969    is_match_found
1970}
1971
1972impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1973    for BasicHasher<H2Sub<Alloc>>
1974{
1975    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
1976        let mut ret = BasicHasher::<H2Sub<Alloc>> {
1977            GetHasherCommon: self.GetHasherCommon.clone(),
1978            buckets_: H2Sub {
1979                buckets_: allocate::<u32, _>(m, self.buckets_.buckets_.len()),
1980            },
1981            h9_opts: self.h9_opts,
1982        };
1983        ret.buckets_
1984            .buckets_
1985            .slice_mut()
1986            .clone_from_slice(self.buckets_.buckets_.slice());
1987        ret
1988    }
1989}
1990impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1991    for BasicHasher<H3Sub<Alloc>>
1992{
1993    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
1994        let mut ret = BasicHasher::<H3Sub<Alloc>> {
1995            GetHasherCommon: self.GetHasherCommon.clone(),
1996            buckets_: H3Sub::<Alloc> {
1997                buckets_: allocate::<u32, _>(m, self.buckets_.buckets_.len()),
1998            },
1999            h9_opts: self.h9_opts,
2000        };
2001        ret.buckets_
2002            .buckets_
2003            .slice_mut()
2004            .clone_from_slice(self.buckets_.buckets_.slice());
2005        ret
2006    }
2007}
2008impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2009    for BasicHasher<H4Sub<Alloc>>
2010{
2011    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2012        let mut ret = BasicHasher::<H4Sub<Alloc>> {
2013            GetHasherCommon: self.GetHasherCommon.clone(),
2014            buckets_: H4Sub::<Alloc> {
2015                buckets_: allocate::<u32, _>(m, self.buckets_.buckets_.len()),
2016            },
2017            h9_opts: self.h9_opts,
2018        };
2019        ret.buckets_
2020            .buckets_
2021            .slice_mut()
2022            .clone_from_slice(self.buckets_.buckets_.slice());
2023        ret
2024    }
2025}
2026impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2027    for BasicHasher<H54Sub<Alloc>>
2028{
2029    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2030        let mut ret = BasicHasher::<H54Sub<Alloc>> {
2031            GetHasherCommon: self.GetHasherCommon.clone(),
2032            buckets_: H54Sub::<Alloc> {
2033                buckets_: allocate::<u32, _>(m, self.buckets_.len()),
2034            },
2035            h9_opts: self.h9_opts,
2036        };
2037        ret.buckets_
2038            .buckets_
2039            .slice_mut()
2040            .clone_from_slice(self.buckets_.buckets_.slice());
2041        ret
2042    }
2043}
2044impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc> for H9<Alloc> {
2045    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2046        let mut num = allocate::<u16, _>(m, self.num_.len());
2047        num.slice_mut().clone_from_slice(self.num_.slice());
2048        let mut buckets = allocate::<u32, _>(m, self.buckets_.len());
2049        buckets.slice_mut().clone_from_slice(self.buckets_.slice());
2050        H9::<Alloc> {
2051            num_: num,
2052            buckets_: buckets,
2053            dict_search_stats_: self.dict_search_stats_.clone(),
2054            h9_opts: self.h9_opts,
2055        }
2056    }
2057}
2058impl<
2059        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
2060        Special: AdvHashSpecialization + Sized + Clone,
2061    > CloneWithAlloc<Alloc> for AdvHasher<Special, Alloc>
2062{
2063    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2064        let mut num = allocate::<u16, _>(m, self.num.len());
2065        num.slice_mut().clone_from_slice(self.num.slice());
2066        let mut buckets = allocate::<u32, _>(m, self.buckets.len());
2067        buckets.slice_mut().clone_from_slice(self.buckets.slice());
2068        AdvHasher::<Special, Alloc> {
2069            GetHasherCommon: self.GetHasherCommon.clone(),
2070            specialization: self.specialization.clone(),
2071            num,
2072            buckets,
2073            h9_opts: self.h9_opts,
2074        }
2075    }
2076}
2077
2078pub enum UnionHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
2079    Uninit,
2080    H2(BasicHasher<H2Sub<Alloc>>),
2081    H3(BasicHasher<H3Sub<Alloc>>),
2082    H4(BasicHasher<H4Sub<Alloc>>),
2083    H54(BasicHasher<H54Sub<Alloc>>),
2084    H5(AdvHasher<H5Sub, Alloc>),
2085    H5q7(AdvHasher<HQ7Sub, Alloc>),
2086    H5q5(AdvHasher<HQ5Sub, Alloc>),
2087    H6(AdvHasher<H6Sub, Alloc>),
2088    H9(H9<Alloc>),
2089    H10(H10<Alloc, H10Buckets<Alloc>, H10DefaultParams>),
2090}
2091impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> PartialEq<UnionHasher<Alloc>>
2092    for UnionHasher<Alloc>
2093{
2094    fn eq(&self, other: &UnionHasher<Alloc>) -> bool {
2095        match *self {
2096            UnionHasher::H2(ref hasher) => match *other {
2097                UnionHasher::H2(ref otherh) => *hasher == *otherh,
2098                _ => false,
2099            },
2100            UnionHasher::H3(ref hasher) => match *other {
2101                UnionHasher::H3(ref otherh) => *hasher == *otherh,
2102                _ => false,
2103            },
2104            UnionHasher::H4(ref hasher) => match *other {
2105                UnionHasher::H4(ref otherh) => *hasher == *otherh,
2106                _ => false,
2107            },
2108            UnionHasher::H54(ref hasher) => match *other {
2109                UnionHasher::H54(ref otherh) => *hasher == *otherh,
2110                _ => false,
2111            },
2112            UnionHasher::H5(ref hasher) => match *other {
2113                UnionHasher::H5(ref otherh) => *hasher == *otherh,
2114                _ => false,
2115            },
2116            UnionHasher::H5q7(ref hasher) => match *other {
2117                UnionHasher::H5q7(ref otherh) => *hasher == *otherh,
2118                _ => false,
2119            },
2120            UnionHasher::H5q5(ref hasher) => match *other {
2121                UnionHasher::H5q5(ref otherh) => *hasher == *otherh,
2122                _ => false,
2123            },
2124            UnionHasher::H6(ref hasher) => match *other {
2125                UnionHasher::H6(ref otherh) => *hasher == *otherh,
2126                _ => false,
2127            },
2128            UnionHasher::H9(ref hasher) => match *other {
2129                UnionHasher::H9(ref otherh) => *hasher == *otherh,
2130                _ => false,
2131            },
2132            UnionHasher::H10(ref hasher) => match *other {
2133                UnionHasher::H10(ref otherh) => *hasher == *otherh,
2134                _ => false,
2135            },
2136            UnionHasher::Uninit => match *other {
2137                UnionHasher::Uninit => true,
2138                _ => false,
2139            },
2140        }
2141    }
2142}
2143impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2144    for UnionHasher<Alloc>
2145{
2146    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2147        match *self {
2148            UnionHasher::H2(ref hasher) => UnionHasher::H2(hasher.clone_with_alloc(m)),
2149            UnionHasher::H3(ref hasher) => UnionHasher::H3(hasher.clone_with_alloc(m)),
2150            UnionHasher::H4(ref hasher) => UnionHasher::H4(hasher.clone_with_alloc(m)),
2151            UnionHasher::H5(ref hasher) => UnionHasher::H5(hasher.clone_with_alloc(m)),
2152            UnionHasher::H5q7(ref hasher) => UnionHasher::H5q7(hasher.clone_with_alloc(m)),
2153            UnionHasher::H5q5(ref hasher) => UnionHasher::H5q5(hasher.clone_with_alloc(m)),
2154            UnionHasher::H6(ref hasher) => UnionHasher::H6(hasher.clone_with_alloc(m)),
2155            UnionHasher::H54(ref hasher) => UnionHasher::H54(hasher.clone_with_alloc(m)),
2156            UnionHasher::H9(ref hasher) => UnionHasher::H9(hasher.clone_with_alloc(m)),
2157            UnionHasher::H10(ref hasher) => UnionHasher::H10(hasher.clone_with_alloc(m)),
2158            UnionHasher::Uninit => UnionHasher::Uninit,
2159        }
2160    }
2161}
2162macro_rules! match_all_hashers_mut {
2163    ($xself : expr, $func_call : ident, $( $args:expr),*) => {
2164        match $xself {
2165     &mut UnionHasher::H2(ref mut hasher) => hasher.$func_call($($args),*),
2166     &mut UnionHasher::H3(ref mut hasher) => hasher.$func_call($($args),*),
2167     &mut UnionHasher::H4(ref mut hasher) => hasher.$func_call($($args),*),
2168     &mut UnionHasher::H5(ref mut hasher) => hasher.$func_call($($args),*),
2169     &mut UnionHasher::H5q7(ref mut hasher) => hasher.$func_call($($args),*),
2170     &mut UnionHasher::H5q5(ref mut hasher) => hasher.$func_call($($args),*),
2171     &mut UnionHasher::H6(ref mut hasher) => hasher.$func_call($($args),*),
2172     &mut UnionHasher::H54(ref mut hasher) => hasher.$func_call($($args),*),
2173     &mut UnionHasher::H9(ref mut hasher) => hasher.$func_call($($args),*),
2174     &mut UnionHasher::H10(ref mut hasher) => hasher.$func_call($($args),*),
2175     &mut UnionHasher::Uninit => panic!("UNINTIALIZED"),
2176        }
2177    };
2178}
2179macro_rules! match_all_hashers {
2180    ($xself : expr, $func_call : ident, $( $args:expr),*) => {
2181        match $xself {
2182     &UnionHasher::H2(ref hasher) => hasher.$func_call($($args),*),
2183     &UnionHasher::H3(ref hasher) => hasher.$func_call($($args),*),
2184     &UnionHasher::H4(ref hasher) => hasher.$func_call($($args),*),
2185     &UnionHasher::H5(ref hasher) => hasher.$func_call($($args),*),
2186     &UnionHasher::H5q7(ref hasher) => hasher.$func_call($($args),*),
2187     &UnionHasher::H5q5(ref hasher) => hasher.$func_call($($args),*),
2188     &UnionHasher::H6(ref hasher) => hasher.$func_call($($args),*),
2189     &UnionHasher::H54(ref hasher) => hasher.$func_call($($args),*),
2190     &UnionHasher::H9(ref hasher) => hasher.$func_call($($args),*),
2191     &UnionHasher::H10(ref hasher) => hasher.$func_call($($args),*),
2192     &UnionHasher::Uninit => panic!("UNINTIALIZED"),
2193        }
2194    };
2195}
2196impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> AnyHasher for UnionHasher<Alloc> {
2197    fn Opts(&self) -> H9Opts {
2198        match_all_hashers!(self, Opts,)
2199    }
2200    fn GetHasherCommon(&mut self) -> &mut Struct1 {
2201        match_all_hashers_mut!(self, GetHasherCommon,)
2202    } /*
2203      fn GetH10Tree(&mut self) -> Option<&mut H10<AllocU32, H10Buckets, H10DefaultParams>> {
2204        return match_all_hashers_mut!(self, GetH10Tree,);
2205      }*/
2206    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
2207        match_all_hashers_mut!(self, Prepare, one_shot, input_size, data)
2208    }
2209    fn HashBytes(&self, data: &[u8]) -> usize {
2210        match_all_hashers!(self, HashBytes, data)
2211    }
2212    fn HashTypeLength(&self) -> usize {
2213        match_all_hashers!(self, HashTypeLength,)
2214    }
2215    fn StoreLookahead(&self) -> usize {
2216        match_all_hashers!(self, StoreLookahead,)
2217    }
2218    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
2219        match_all_hashers!(self, PrepareDistanceCache, distance_cache)
2220    }
2221    fn StitchToPreviousBlock(
2222        &mut self,
2223        num_bytes: usize,
2224        position: usize,
2225        ringbuffer: &[u8],
2226        ringbuffer_mask: usize,
2227    ) {
2228        match_all_hashers_mut!(
2229            self,
2230            StitchToPreviousBlock,
2231            num_bytes,
2232            position,
2233            ringbuffer,
2234            ringbuffer_mask
2235        )
2236    }
2237    fn FindLongestMatch(
2238        &mut self,
2239        dictionary: Option<&BrotliDictionary>,
2240        dictionary_hash: &[u16],
2241        data: &[u8],
2242        ring_buffer_mask: usize,
2243        ring_buffer_break: Option<core::num::NonZeroUsize>,
2244        distance_cache: &[i32],
2245        cur_ix: usize,
2246        max_length: usize,
2247        max_backward: usize,
2248        gap: usize,
2249        max_distance: usize,
2250        out: &mut HasherSearchResult,
2251    ) -> bool {
2252        match_all_hashers_mut!(
2253            self,
2254            FindLongestMatch,
2255            dictionary,
2256            dictionary_hash,
2257            data,
2258            ring_buffer_mask,
2259            ring_buffer_break,
2260            distance_cache,
2261            cur_ix,
2262            max_length,
2263            max_backward,
2264            gap,
2265            max_distance,
2266            out
2267        )
2268    }
2269    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
2270        match_all_hashers_mut!(self, Store, data, mask, ix)
2271    }
2272    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
2273        match_all_hashers_mut!(self, StoreRange, data, mask, ix_start, ix_end)
2274    }
2275    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
2276        match_all_hashers_mut!(self, BulkStoreRange, data, mask, ix_start, ix_end)
2277    }
2278}
2279
2280impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> UnionHasher<Alloc> {
2281    pub fn free(&mut self, alloc: &mut Alloc) {
2282        match self {
2283            &mut UnionHasher::H2(ref mut hasher) => {
2284                <Alloc as Allocator<u32>>::free_cell(
2285                    alloc,
2286                    core::mem::take(&mut hasher.buckets_.buckets_),
2287                );
2288            }
2289            &mut UnionHasher::H3(ref mut hasher) => {
2290                <Alloc as Allocator<u32>>::free_cell(
2291                    alloc,
2292                    core::mem::take(&mut hasher.buckets_.buckets_),
2293                );
2294            }
2295            &mut UnionHasher::H4(ref mut hasher) => {
2296                <Alloc as Allocator<u32>>::free_cell(
2297                    alloc,
2298                    core::mem::take(&mut hasher.buckets_.buckets_),
2299                );
2300            }
2301            &mut UnionHasher::H54(ref mut hasher) => {
2302                <Alloc as Allocator<u32>>::free_cell(
2303                    alloc,
2304                    core::mem::take(&mut hasher.buckets_.buckets_),
2305                );
2306            }
2307            &mut UnionHasher::H5q7(ref mut hasher) => {
2308                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2309                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2310            }
2311            &mut UnionHasher::H5q5(ref mut hasher) => {
2312                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2313                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2314            }
2315            &mut UnionHasher::H5(ref mut hasher) => {
2316                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2317                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2318            }
2319            &mut UnionHasher::H6(ref mut hasher) => {
2320                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2321                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2322            }
2323            &mut UnionHasher::H9(ref mut hasher) => {
2324                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num_));
2325                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets_));
2326            }
2327            &mut UnionHasher::H10(ref mut hasher) => {
2328                hasher.free(alloc);
2329            }
2330            &mut UnionHasher::Uninit => {}
2331        }
2332        *self = UnionHasher::<Alloc>::default();
2333    }
2334}
2335
2336impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> Default for UnionHasher<Alloc> {
2337    fn default() -> Self {
2338        UnionHasher::Uninit
2339    }
2340}
2341
2342/*UnionHasher::H2(BasicHasher {
2343GetHasherCommon:Struct1{params:BrotliHasherParams{
2344 type_:2,
2345 block_bits: 8,
2346 bucket_bits:16,
2347 hash_len: 4,
2348 num_last_distances_to_check:0},
2349is_prepared_:0,
2350dict_num_lookups:0,
2351dict_num_matches:0,
2352},
2353buckets_:H2Sub{
2354buckets_:[0;65537],
2355},
2356})
2357*/
2358fn CreateBackwardReferences<AH: AnyHasher>(
2359    dictionary: Option<&BrotliDictionary>,
2360    dictionary_hash: &[u16],
2361    num_bytes: usize,
2362    mut position: usize,
2363    ringbuffer: &[u8],
2364    ringbuffer_mask: usize,
2365    ringbuffer_break: Option<core::num::NonZeroUsize>,
2366    params: &BrotliEncoderParams,
2367    hasher: &mut AH,
2368    dist_cache: &mut [i32],
2369    last_insert_len: &mut usize,
2370    mut commands: &mut [Command],
2371    num_commands: &mut usize,
2372    num_literals: &mut usize,
2373) {
2374    let gap = 0usize;
2375    let max_backward_limit: usize = (1usize << params.lgwin).wrapping_sub(16);
2376    let mut new_commands_count: usize = 0;
2377    let mut insert_length: usize = *last_insert_len;
2378    let pos_end: usize = position.wrapping_add(num_bytes);
2379    let store_end: usize = if num_bytes >= hasher.StoreLookahead() {
2380        position
2381            .wrapping_add(num_bytes)
2382            .wrapping_sub(hasher.StoreLookahead())
2383            .wrapping_add(1)
2384    } else {
2385        position
2386    };
2387    let random_heuristics_window_size: usize = LiteralSpreeLengthForSparseSearch(params);
2388    let mut apply_random_heuristics: usize = position.wrapping_add(random_heuristics_window_size);
2389    let kMinScore: u64 = (30u64 * 8)
2390        .wrapping_mul(::core::mem::size_of::<u64>() as u64)
2391        .wrapping_add(100);
2392    hasher.PrepareDistanceCache(dist_cache);
2393    while position.wrapping_add(hasher.HashTypeLength()) < pos_end {
2394        let mut max_length: usize = pos_end.wrapping_sub(position);
2395        let mut max_distance: usize = min(position, max_backward_limit);
2396        let mut sr = HasherSearchResult {
2397            len: 0,
2398            len_x_code: 0,
2399            distance: 0,
2400            score: 0,
2401        };
2402        sr.len = 0usize;
2403        sr.len_x_code = 0usize;
2404        sr.distance = 0usize;
2405        sr.score = kMinScore;
2406        if hasher.FindLongestMatch(
2407            dictionary,
2408            dictionary_hash,
2409            ringbuffer,
2410            ringbuffer_mask,
2411            ringbuffer_break,
2412            dist_cache,
2413            position,
2414            max_length,
2415            max_distance,
2416            gap,
2417            params.dist.max_distance,
2418            &mut sr,
2419        ) {
2420            let mut delayed_backward_references_in_row: i32 = 0i32;
2421            max_length = max_length.wrapping_sub(1);
2422            'break6: loop {
2423                'continue7: loop {
2424                    let cost_diff_lazy: u64 = 175;
2425
2426                    let mut sr2 = HasherSearchResult {
2427                        len: 0,
2428                        len_x_code: 0,
2429                        distance: 0,
2430                        score: 0,
2431                    };
2432                    sr2.len = if params.quality < 5 {
2433                        min(sr.len.wrapping_sub(1), max_length)
2434                    } else {
2435                        0usize
2436                    };
2437                    sr2.len_x_code = 0usize;
2438                    sr2.distance = 0usize;
2439                    sr2.score = kMinScore;
2440                    max_distance = min(position.wrapping_add(1), max_backward_limit);
2441                    let is_match_found: bool = hasher.FindLongestMatch(
2442                        dictionary,
2443                        dictionary_hash,
2444                        ringbuffer,
2445                        ringbuffer_mask,
2446                        ringbuffer_break,
2447                        dist_cache,
2448                        position.wrapping_add(1),
2449                        max_length,
2450                        max_distance,
2451                        gap,
2452                        params.dist.max_distance,
2453                        &mut sr2,
2454                    );
2455                    if is_match_found && (sr2.score >= sr.score.wrapping_add(cost_diff_lazy)) {
2456                        position = position.wrapping_add(1);
2457                        insert_length = insert_length.wrapping_add(1);
2458                        sr = sr2;
2459                        if {
2460                            delayed_backward_references_in_row += 1;
2461                            delayed_backward_references_in_row
2462                        } < 4i32
2463                            && (position.wrapping_add(hasher.HashTypeLength()) < pos_end)
2464                        {
2465                            break 'continue7;
2466                        }
2467                    }
2468                    break 'break6;
2469                }
2470                max_length = max_length.wrapping_sub(1);
2471            }
2472            apply_random_heuristics = position
2473                .wrapping_add((2usize).wrapping_mul(sr.len))
2474                .wrapping_add(random_heuristics_window_size);
2475            max_distance = min(position, max_backward_limit);
2476            {
2477                let distance_code: usize =
2478                    ComputeDistanceCode(sr.distance, max_distance, dist_cache);
2479                if sr.distance <= max_distance && (distance_code > 0usize) {
2480                    dist_cache[3] = dist_cache[2];
2481                    dist_cache[2] = dist_cache[1];
2482                    dist_cache[1] = dist_cache[0];
2483                    dist_cache[0] = sr.distance as i32;
2484                    hasher.PrepareDistanceCache(dist_cache);
2485                }
2486                new_commands_count += 1;
2487
2488                let (old, new_commands) = core::mem::take(&mut commands).split_at_mut(1);
2489                commands = new_commands;
2490                old[0].init(
2491                    &params.dist,
2492                    insert_length,
2493                    sr.len,
2494                    sr.len ^ sr.len_x_code,
2495                    distance_code,
2496                );
2497            }
2498            *num_literals = num_literals.wrapping_add(insert_length);
2499            insert_length = 0usize;
2500            hasher.StoreRange(
2501                ringbuffer,
2502                ringbuffer_mask,
2503                position.wrapping_add(2),
2504                min(position.wrapping_add(sr.len), store_end),
2505            );
2506            position = position.wrapping_add(sr.len);
2507        } else {
2508            insert_length = insert_length.wrapping_add(1);
2509            position = position.wrapping_add(1);
2510
2511            if position > apply_random_heuristics {
2512                let kMargin: usize = max(hasher.StoreLookahead().wrapping_sub(1), 4);
2513                if position.wrapping_add(16) >= pos_end.wrapping_sub(kMargin) {
2514                    insert_length = insert_length.wrapping_add(pos_end - position);
2515                    position = pos_end;
2516                } else if position
2517                    > apply_random_heuristics
2518                        .wrapping_add((4usize).wrapping_mul(random_heuristics_window_size))
2519                {
2520                    hasher.Store4Vec4(ringbuffer, ringbuffer_mask, position);
2521                    insert_length = insert_length.wrapping_add(16);
2522                    position = position.wrapping_add(16);
2523                } else {
2524                    hasher.StoreEvenVec4(ringbuffer, ringbuffer_mask, position);
2525                    insert_length = insert_length.wrapping_add(8);
2526                    position = position.wrapping_add(8);
2527                }
2528            }
2529        }
2530    }
2531    insert_length = insert_length.wrapping_add(pos_end.wrapping_sub(position));
2532    *last_insert_len = insert_length;
2533    *num_commands = num_commands.wrapping_add(new_commands_count);
2534}
2535pub fn BrotliCreateBackwardReferences<
2536    Alloc: alloc::Allocator<u16>
2537        + alloc::Allocator<u32>
2538        + alloc::Allocator<u64>
2539        + alloc::Allocator<floatX>
2540        + alloc::Allocator<ZopfliNode>,
2541>(
2542    alloc: &mut Alloc,
2543    dictionary: &BrotliDictionary,
2544    num_bytes: usize,
2545    position: usize,
2546    ringbuffer: &[u8],
2547    ringbuffer_mask: usize,
2548    ringbuffer_break: Option<core::num::NonZeroUsize>
2549,
2550    params: &BrotliEncoderParams,
2551    hasher_union: &mut UnionHasher<Alloc>,
2552    dist_cache: &mut [i32],
2553    last_insert_len: &mut usize,
2554    commands: &mut [Command],
2555    num_commands: &mut usize,
2556    num_literals: &mut usize,
2557) {
2558    match (hasher_union) {
2559        &mut UnionHasher::Uninit => panic!("working with uninitialized hash map"),
2560        &mut UnionHasher::H10(ref mut hasher) => {
2561            if params.quality >= 11 {
2562                super::backward_references_hq::BrotliCreateHqZopfliBackwardReferences(
2563                    alloc,
2564                    if params.use_dictionary {
2565                        Some(dictionary)
2566                    } else {
2567                        None
2568                    },
2569                    num_bytes,
2570                    position,
2571                    ringbuffer,
2572                    ringbuffer_mask,
2573                    ringbuffer_break,
2574                    params,
2575                    hasher,
2576                    dist_cache,
2577                    last_insert_len,
2578                    commands,
2579                    num_commands,
2580                    num_literals,
2581                )
2582            } else {
2583                super::backward_references_hq::BrotliCreateZopfliBackwardReferences(
2584                    alloc,
2585                    if params.use_dictionary {
2586                        Some(dictionary)
2587                    } else {
2588                        None
2589                    },
2590                    num_bytes,
2591                    position,
2592                    ringbuffer,
2593                    ringbuffer_mask,
2594                    ringbuffer_break,
2595                    params,
2596                    hasher,
2597                    dist_cache,
2598                    last_insert_len,
2599                    commands,
2600                    num_commands,
2601                    num_literals,
2602                )
2603            }
2604        }
2605        &mut UnionHasher::H2(ref mut hasher) => CreateBackwardReferences(
2606            if params.use_dictionary {
2607                Some(dictionary)
2608            } else {
2609                None
2610            },
2611            &kStaticDictionaryHash[..],
2612            num_bytes,
2613            position,
2614            ringbuffer,
2615            ringbuffer_mask,
2616            ringbuffer_break,
2617            params,
2618            hasher,
2619            dist_cache,
2620            last_insert_len,
2621            commands,
2622            num_commands,
2623            num_literals,
2624        ),
2625        &mut UnionHasher::H3(ref mut hasher) => CreateBackwardReferences(
2626            if params.use_dictionary {
2627                Some(dictionary)
2628            } else {
2629                None
2630            },
2631            &kStaticDictionaryHash[..],
2632            num_bytes,
2633            position,
2634            ringbuffer,
2635            ringbuffer_mask,
2636            ringbuffer_break,
2637            params,
2638            hasher,
2639            dist_cache,
2640            last_insert_len,
2641            commands,
2642            num_commands,
2643            num_literals,
2644        ),
2645        &mut UnionHasher::H4(ref mut hasher) => CreateBackwardReferences(
2646            if params.use_dictionary {
2647                Some(dictionary)
2648            } else {
2649                None
2650            },
2651            &kStaticDictionaryHash[..],
2652            num_bytes,
2653            position,
2654            ringbuffer,
2655            ringbuffer_mask,
2656            ringbuffer_break,
2657            params,
2658            hasher,
2659            dist_cache,
2660            last_insert_len,
2661            commands,
2662            num_commands,
2663            num_literals,
2664        ),
2665        &mut UnionHasher::H5(ref mut hasher) => CreateBackwardReferences(
2666            if params.use_dictionary {
2667                Some(dictionary)
2668            } else {
2669                None
2670            },
2671            &kStaticDictionaryHash[..],
2672            num_bytes,
2673            position,
2674            ringbuffer,
2675            ringbuffer_mask,
2676            ringbuffer_break,
2677            params,
2678            hasher,
2679            dist_cache,
2680            last_insert_len,
2681            commands,
2682            num_commands,
2683            num_literals,
2684        ),
2685        &mut UnionHasher::H5q7(ref mut hasher) => CreateBackwardReferences(
2686            if params.use_dictionary {
2687                Some(dictionary)
2688            } else {
2689                None
2690            },
2691            &kStaticDictionaryHash[..],
2692            num_bytes,
2693            position,
2694            ringbuffer,
2695            ringbuffer_mask,
2696            ringbuffer_break,
2697            params,
2698            hasher,
2699            dist_cache,
2700            last_insert_len,
2701            commands,
2702            num_commands,
2703            num_literals,
2704        ),
2705        &mut UnionHasher::H5q5(ref mut hasher) => CreateBackwardReferences(
2706            if params.use_dictionary {
2707                Some(dictionary)
2708            } else {
2709                None
2710            },
2711            &kStaticDictionaryHash[..],
2712            num_bytes,
2713            position,
2714            ringbuffer,
2715            ringbuffer_mask,
2716            ringbuffer_break,
2717            params,
2718            hasher,
2719            dist_cache,
2720            last_insert_len,
2721            commands,
2722            num_commands,
2723            num_literals,
2724        ),
2725        &mut UnionHasher::H6(ref mut hasher) => CreateBackwardReferences(
2726            if params.use_dictionary {
2727                Some(dictionary)
2728            } else {
2729                None
2730            },
2731            &kStaticDictionaryHash[..],
2732            num_bytes,
2733            position,
2734            ringbuffer,
2735            ringbuffer_mask,
2736            ringbuffer_break,
2737            params,
2738            hasher,
2739            dist_cache,
2740            last_insert_len,
2741            commands,
2742            num_commands,
2743            num_literals,
2744        ),
2745        &mut UnionHasher::H9(ref mut hasher) => CreateBackwardReferences(
2746            if params.use_dictionary {
2747                Some(dictionary)
2748            } else {
2749                None
2750            },
2751            &kStaticDictionaryHash[..],
2752            num_bytes,
2753            position,
2754            ringbuffer,
2755            ringbuffer_mask,
2756            ringbuffer_break,
2757            params,
2758            hasher,
2759            dist_cache,
2760            last_insert_len,
2761            commands,
2762            num_commands,
2763            num_literals,
2764        ),
2765        &mut UnionHasher::H54(ref mut hasher) => CreateBackwardReferences(
2766            if params.use_dictionary {
2767                Some(dictionary)
2768            } else {
2769                None
2770            },
2771            &kStaticDictionaryHash[..],
2772            num_bytes,
2773            position,
2774            ringbuffer,
2775            ringbuffer_mask,
2776            ringbuffer_break,
2777            params,
2778            hasher,
2779            dist_cache,
2780            last_insert_len,
2781            commands,
2782            num_commands,
2783            num_literals,
2784        ),
2785    }
2786}