1 /* Copyright 2010 Google Inc. All Rights Reserved.
3 Distributed under MIT license.
4 See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
7 /* A (forgetful) hash table to the data seen by the compressor, to
8 help create backward references to previous data. */
10 #ifndef BROTLI_ENC_HASH_H_
11 #define BROTLI_ENC_HASH_H_
13 #include <string.h> /* memcmp, memset */
15 #include "../common/constants.h"
16 #include "../common/dictionary.h"
17 #include "../common/types.h"
18 #include "./dictionary_hash.h"
19 #include "./fast_log.h"
20 #include "./find_match_length.h"
23 #include "./quality.h"
24 #include "./static_dict.h"
26 #if defined(__cplusplus) || defined(c_plusplus)
30 #define MAX_TREE_SEARCH_DEPTH 64
31 #define MAX_TREE_COMP_LENGTH 128
32 #define score_t size_t
34 static const uint32_t kDistanceCacheIndex
[] = {
35 0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
37 static const int kDistanceCacheOffset
[] = {
38 0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3
41 static const uint32_t kCutoffTransformsCount
= 10;
42 static const uint8_t kCutoffTransforms
[] = {
43 0, 12, 27, 23, 42, 63, 56, 48, 59, 64
46 typedef struct HasherSearchResult
{
48 size_t len_x_code
; /* == len ^ len_code */
53 typedef struct DictionarySearchStatictics
{
56 } DictionarySearchStatictics
;
58 /* kHashMul32 multiplier has these properties:
59 * The multiplier must be odd. Otherwise we may lose the highest bit.
60 * No long streaks of 1s or 0s.
61 * There is no effort to ensure that it is a prime, the oddity is enough
63 * The number has been tuned heuristically against compression benchmarks. */
64 static const uint32_t kHashMul32
= 0x1e35a7bd;
66 static BROTLI_INLINE
uint32_t Hash14(const uint8_t* data
) {
67 uint32_t h
= BROTLI_UNALIGNED_LOAD32(data
) * kHashMul32
;
68 /* The higher bits contain more mixture from the multiplication,
69 so we take our results from there. */
70 return h
>> (32 - 14);
73 #define BROTLI_LITERAL_BYTE_SCORE 540
74 #define BROTLI_DISTANCE_BIT_PENALTY 120
75 /* Score must be positive after applying maximal penalty. */
76 #define BROTLI_SCORE_BASE (BROTLI_DISTANCE_BIT_PENALTY * 8 * sizeof(size_t))
78 /* Usually, we always choose the longest backward reference. This function
79 allows for the exception of that rule.
81 If we choose a backward reference that is further away, it will
82 usually be coded with more bits. We approximate this by assuming
83 log2(distance). If the distance can be expressed in terms of the
84 last four distances, we use some heuristic constants to estimate
85 the bits cost. For the first up to four literals we use the bit
86 cost of the literals from the literal cost model, after that we
87 use the average bit cost of the cost model.
89 This function is used to sometimes discard a longer backward reference
90 when it is not much longer and the bit cost for encoding it is more
91 than the saved literals.
93 backward_reference_offset MUST be positive. */
94 static BROTLI_INLINE score_t
BackwardReferenceScore(
95 size_t copy_length
, size_t backward_reference_offset
) {
96 return BROTLI_SCORE_BASE
+ BROTLI_LITERAL_BYTE_SCORE
* (score_t
)copy_length
-
97 BROTLI_DISTANCE_BIT_PENALTY
* Log2FloorNonZero(backward_reference_offset
);
100 static const score_t kDistanceShortCodeCost
[BROTLI_NUM_DISTANCE_SHORT_CODES
] = {
102 BROTLI_SCORE_BASE
+ 60,
103 /* 2nd, 3rd, 4th last */
104 BROTLI_SCORE_BASE
- 95,
105 BROTLI_SCORE_BASE
- 117,
106 BROTLI_SCORE_BASE
- 127,
107 /* Last with offset */
108 BROTLI_SCORE_BASE
- 93,
109 BROTLI_SCORE_BASE
- 93,
110 BROTLI_SCORE_BASE
- 96,
111 BROTLI_SCORE_BASE
- 96,
112 BROTLI_SCORE_BASE
- 99,
113 BROTLI_SCORE_BASE
- 99,
114 /* 2nd last with offset */
115 BROTLI_SCORE_BASE
- 105,
116 BROTLI_SCORE_BASE
- 105,
117 BROTLI_SCORE_BASE
- 115,
118 BROTLI_SCORE_BASE
- 115,
119 BROTLI_SCORE_BASE
- 125,
120 BROTLI_SCORE_BASE
- 125
123 static BROTLI_INLINE score_t
BackwardReferenceScoreUsingLastDistance(
124 size_t copy_length
, size_t distance_short_code
) {
125 return BROTLI_LITERAL_BYTE_SCORE
* (score_t
)copy_length
+
126 kDistanceShortCodeCost
[distance_short_code
];
129 static BROTLI_INLINE
void DictionarySearchStaticticsReset(
130 DictionarySearchStatictics
* self
) {
131 self
->num_lookups
= 0;
132 self
->num_matches
= 0;
135 static BROTLI_INLINE BROTLI_BOOL
TestStaticDictionaryItem(
136 size_t item
, const uint8_t* data
, size_t max_length
, size_t max_backward
,
137 HasherSearchResult
* out
) {
146 offset
= kBrotliDictionaryOffsetsByLength
[len
] + len
* dist
;
147 if (len
> max_length
) {
151 matchlen
= FindMatchLengthWithLimit(data
, &kBrotliDictionary
[offset
], len
);
152 if (matchlen
+ kCutoffTransformsCount
<= len
|| matchlen
== 0) {
156 size_t transform_id
= kCutoffTransforms
[len
- matchlen
];
157 backward
= max_backward
+ dist
+ 1 +
158 (transform_id
<< kBrotliDictionarySizeBitsByLength
[len
]);
160 score
= BackwardReferenceScore(matchlen
, backward
);
161 if (score
< out
->score
) {
165 out
->len_x_code
= len
^ matchlen
;
166 out
->distance
= backward
;
171 static BROTLI_INLINE BROTLI_BOOL
SearchInStaticDictionary(
172 DictionarySearchStatictics
* self
, const uint8_t* data
, size_t max_length
,
173 size_t max_backward
, HasherSearchResult
* out
, BROTLI_BOOL shallow
) {
176 BROTLI_BOOL is_match_found
= BROTLI_FALSE
;
177 if (self
->num_matches
< (self
->num_lookups
>> 7)) {
180 key
= Hash14(data
) << 1;
181 for (i
= 0; i
< (shallow
? 1 : 2); ++i
, ++key
) {
182 size_t item
= kStaticDictionaryHash
[key
];
185 TestStaticDictionaryItem(item
, data
, max_length
, max_backward
, out
)) {
187 is_match_found
= BROTLI_TRUE
;
190 return is_match_found
;
193 typedef struct BackwardMatch
{
195 uint32_t length_and_code
;
198 static BROTLI_INLINE
void InitBackwardMatch(BackwardMatch
* self
,
199 size_t dist
, size_t len
) {
200 self
->distance
= (uint32_t)dist
;
201 self
->length_and_code
= (uint32_t)(len
<< 5);
204 static BROTLI_INLINE
void InitDictionaryBackwardMatch(BackwardMatch
* self
,
205 size_t dist
, size_t len
, size_t len_code
) {
206 self
->distance
= (uint32_t)dist
;
207 self
->length_and_code
=
208 (uint32_t)((len
<< 5) | (len
== len_code
? 0 : len_code
));
211 static BROTLI_INLINE
size_t BackwardMatchLength(const BackwardMatch
* self
) {
212 return self
->length_and_code
>> 5;
215 static BROTLI_INLINE
size_t BackwardMatchLengthCode(const BackwardMatch
* self
) {
216 size_t code
= self
->length_and_code
& 31;
217 return code
? code
: BackwardMatchLength(self
);
220 #define EXPAND_CAT(a, b) CAT(a, b)
221 #define CAT(a, b) a ## b
222 #define FN(X) EXPAND_CAT(X, HASHER())
224 #define MAX_NUM_MATCHES_H10 (64 + MAX_TREE_SEARCH_DEPTH)
227 #define HashToBinaryTree HASHER()
229 #define BUCKET_BITS 17
230 #define BUCKET_SIZE (1 << BUCKET_BITS)
232 static size_t FN(HashTypeLength
)(void) { return 4; }
233 static size_t FN(StoreLookahead
)(void) { return MAX_TREE_COMP_LENGTH
; }
235 static uint32_t FN(HashBytes
)(const uint8_t *data
) {
236 uint32_t h
= BROTLI_UNALIGNED_LOAD32(data
) * kHashMul32
;
237 /* The higher bits contain more mixture from the multiplication,
238 so we take our results from there. */
239 return h
>> (32 - BUCKET_BITS
);
242 /* A (forgetful) hash table where each hash bucket contains a binary tree of
243 sequences whose first 4 bytes share the same hash code.
244 Each sequence is MAX_TREE_COMP_LENGTH long and is identified by its starting
245 position in the input data. The binary tree is sorted by the lexicographic
246 order of the sequences, and it is also a max-heap with respect to the
247 starting positions. */
248 typedef struct HashToBinaryTree
{
249 /* The window size minus 1 */
252 /* Hash table that maps the 4-byte hashes of the sequence to the last
253 position where this hash was found, which is the root of the binary
254 tree of sequences that share this hash bucket. */
255 uint32_t buckets_
[BUCKET_SIZE
];
257 /* The union of the binary trees of each hash bucket. The root of the tree
258 corresponding to a hash is a sequence starting at buckets_[hash] and
259 the left and right children of a sequence starting at pos are
260 forest_[2 * pos] and forest_[2 * pos + 1]. */
263 /* A position used to mark a non-existent sequence, i.e. a tree is empty if
264 its root is at invalid_pos_ and a node is a leaf if both its children
265 are at invalid_pos_. */
266 uint32_t invalid_pos_
;
269 BROTLI_BOOL is_dirty_
;
272 static void FN(Reset
)(HashToBinaryTree
* self
) {
273 self
->is_dirty_
= BROTLI_TRUE
;
276 static void FN(Initialize
)(HashToBinaryTree
* self
) {
277 self
->forest_
= NULL
;
278 self
->forest_size_
= 0;
282 static void FN(Cleanup
)(MemoryManager
* m
, HashToBinaryTree
* self
) {
283 BROTLI_FREE(m
, self
->forest_
);
286 static void FN(Init
)(
287 MemoryManager
* m
, HashToBinaryTree
* self
, const uint8_t* data
,
288 const BrotliEncoderParams
* params
, size_t position
, size_t bytes
,
289 BROTLI_BOOL is_last
) {
290 if (self
->is_dirty_
) {
291 uint32_t invalid_pos
;
295 self
->window_mask_
= (1u << params
->lgwin
) - 1u;
296 invalid_pos
= (uint32_t)(0 - self
->window_mask_
);
297 self
->invalid_pos_
= invalid_pos
;
298 for (i
= 0; i
< BUCKET_SIZE
; i
++) {
299 self
->buckets_
[i
] = invalid_pos
;
301 num_nodes
= (position
== 0 && is_last
) ? bytes
: self
->window_mask_
+ 1;
302 if (num_nodes
> self
->forest_size_
) {
303 BROTLI_FREE(m
, self
->forest_
);
304 self
->forest_
= BROTLI_ALLOC(m
, uint32_t, 2 * num_nodes
);
305 if (BROTLI_IS_OOM(m
)) return;
306 self
->forest_size_
= num_nodes
;
308 self
->is_dirty_
= BROTLI_FALSE
;
312 static BROTLI_INLINE
size_t FN(LeftChildIndex
)(HashToBinaryTree
* self
,
314 return 2 * (pos
& self
->window_mask_
);
317 static BROTLI_INLINE
size_t FN(RightChildIndex
)(HashToBinaryTree
* self
,
319 return 2 * (pos
& self
->window_mask_
) + 1;
322 /* Stores the hash of the next 4 bytes and in a single tree-traversal, the
323 hash bucket's binary tree is searched for matches and is re-rooted at the
326 If less than MAX_TREE_COMP_LENGTH data is available, the hash bucket of the
327 current position is searched for matches, but the state of the hash table
328 is not changed, since we can not know the final sorting order of the
329 current (incomplete) sequence.
331 This function must be called with increasing cur_ix positions. */
332 static BROTLI_INLINE BackwardMatch
* FN(StoreAndFindMatches
)(
333 HashToBinaryTree
* self
, const uint8_t* const BROTLI_RESTRICT data
,
334 const size_t cur_ix
, const size_t ring_buffer_mask
, const size_t max_length
,
335 const size_t max_backward
, size_t* const BROTLI_RESTRICT best_len
,
336 BackwardMatch
* BROTLI_RESTRICT matches
) {
337 const size_t cur_ix_masked
= cur_ix
& ring_buffer_mask
;
338 const size_t max_comp_len
=
339 BROTLI_MIN(size_t, max_length
, MAX_TREE_COMP_LENGTH
);
340 const BROTLI_BOOL should_reroot_tree
=
341 TO_BROTLI_BOOL(max_length
>= MAX_TREE_COMP_LENGTH
);
342 const uint32_t key
= FN(HashBytes
)(&data
[cur_ix_masked
]);
343 size_t prev_ix
= self
->buckets_
[key
];
344 /* The forest index of the rightmost node of the left subtree of the new
345 root, updated as we traverse and reroot the tree of the hash bucket. */
346 size_t node_left
= FN(LeftChildIndex
)(self
, cur_ix
);
347 /* The forest index of the leftmost node of the right subtree of the new
348 root, updated as we traverse and reroot the tree of the hash bucket. */
349 size_t node_right
= FN(RightChildIndex
)(self
, cur_ix
);
350 /* The match length of the rightmost node of the left subtree of the new
351 root, updated as we traverse and reroot the tree of the hash bucket. */
352 size_t best_len_left
= 0;
353 /* The match length of the leftmost node of the right subtree of the new
354 root, updated as we traverse and reroot the tree of the hash bucket. */
355 size_t best_len_right
= 0;
356 size_t depth_remaining
;
357 if (should_reroot_tree
) {
358 self
->buckets_
[key
] = (uint32_t)cur_ix
;
360 for (depth_remaining
= MAX_TREE_SEARCH_DEPTH
; ; --depth_remaining
) {
361 const size_t backward
= cur_ix
- prev_ix
;
362 const size_t prev_ix_masked
= prev_ix
& ring_buffer_mask
;
363 if (backward
== 0 || backward
> max_backward
|| depth_remaining
== 0) {
364 if (should_reroot_tree
) {
365 self
->forest_
[node_left
] = self
->invalid_pos_
;
366 self
->forest_
[node_right
] = self
->invalid_pos_
;
371 const size_t cur_len
= BROTLI_MIN(size_t, best_len_left
, best_len_right
);
373 assert(cur_len
<= MAX_TREE_COMP_LENGTH
);
375 FindMatchLengthWithLimit(&data
[cur_ix_masked
+ cur_len
],
376 &data
[prev_ix_masked
+ cur_len
],
377 max_length
- cur_len
);
378 assert(0 == memcmp(&data
[cur_ix_masked
], &data
[prev_ix_masked
], len
));
379 if (matches
&& len
> *best_len
) {
381 InitBackwardMatch(matches
++, backward
, len
);
383 if (len
>= max_comp_len
) {
384 if (should_reroot_tree
) {
385 self
->forest_
[node_left
] =
386 self
->forest_
[FN(LeftChildIndex
)(self
, prev_ix
)];
387 self
->forest_
[node_right
] =
388 self
->forest_
[FN(RightChildIndex
)(self
, prev_ix
)];
392 if (data
[cur_ix_masked
+ len
] > data
[prev_ix_masked
+ len
]) {
394 if (should_reroot_tree
) {
395 self
->forest_
[node_left
] = (uint32_t)prev_ix
;
397 node_left
= FN(RightChildIndex
)(self
, prev_ix
);
398 prev_ix
= self
->forest_
[node_left
];
400 best_len_right
= len
;
401 if (should_reroot_tree
) {
402 self
->forest_
[node_right
] = (uint32_t)prev_ix
;
404 node_right
= FN(LeftChildIndex
)(self
, prev_ix
);
405 prev_ix
= self
->forest_
[node_right
];
412 /* Finds all backward matches of &data[cur_ix & ring_buffer_mask] up to the
413 length of max_length and stores the position cur_ix in the hash table.
415 Sets *num_matches to the number of matches found, and stores the found
416 matches in matches[0] to matches[*num_matches - 1]. The matches will be
417 sorted by strictly increasing length and (non-strictly) increasing
419 static BROTLI_INLINE
size_t FN(FindAllMatches
)(HashToBinaryTree
* self
,
420 const uint8_t* data
, const size_t ring_buffer_mask
, const size_t cur_ix
,
421 const size_t max_length
, const size_t max_backward
,
422 const BrotliEncoderParams
* params
, BackwardMatch
* matches
) {
423 BackwardMatch
* const orig_matches
= matches
;
424 const size_t cur_ix_masked
= cur_ix
& ring_buffer_mask
;
426 const size_t short_match_max_backward
=
427 params
->quality
!= HQ_ZOPFLIFICATION_QUALITY
? 16 : 64;
428 size_t stop
= cur_ix
- short_match_max_backward
;
429 uint32_t dict_matches
[BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN
+ 1];
431 if (cur_ix
< short_match_max_backward
) { stop
= 0; }
432 for (i
= cur_ix
- 1; i
> stop
&& best_len
<= 2; --i
) {
434 const size_t backward
= cur_ix
- prev_ix
;
435 if (PREDICT_FALSE(backward
> max_backward
)) {
438 prev_ix
&= ring_buffer_mask
;
439 if (data
[cur_ix_masked
] != data
[prev_ix
] ||
440 data
[cur_ix_masked
+ 1] != data
[prev_ix
+ 1]) {
445 FindMatchLengthWithLimit(&data
[prev_ix
], &data
[cur_ix_masked
],
447 if (len
> best_len
) {
449 InitBackwardMatch(matches
++, backward
, len
);
453 if (best_len
< max_length
) {
454 matches
= FN(StoreAndFindMatches
)(self
, data
, cur_ix
, ring_buffer_mask
,
455 max_length
, max_backward
, &best_len
, matches
);
457 for (i
= 0; i
<= BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN
; ++i
) {
458 dict_matches
[i
] = kInvalidMatch
;
461 size_t minlen
= BROTLI_MAX(size_t, 4, best_len
+ 1);
462 if (BrotliFindAllStaticDictionaryMatches(&data
[cur_ix_masked
], minlen
,
463 max_length
, &dict_matches
[0])) {
464 size_t maxlen
= BROTLI_MIN(
465 size_t, BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN
, max_length
);
467 for (l
= minlen
; l
<= maxlen
; ++l
) {
468 uint32_t dict_id
= dict_matches
[l
];
469 if (dict_id
< kInvalidMatch
) {
470 InitDictionaryBackwardMatch(matches
++,
471 max_backward
+ (dict_id
>> 5) + 1, l
, dict_id
& 31);
476 return (size_t)(matches
- orig_matches
);
479 /* Stores the hash of the next 4 bytes and re-roots the binary tree at the
480 current sequence, without returning any matches.
481 REQUIRES: ix + MAX_TREE_COMP_LENGTH <= end-of-current-block */
482 static BROTLI_INLINE
void FN(Store
)(HashToBinaryTree
* self
, const uint8_t *data
,
483 const size_t mask
, const size_t ix
) {
484 /* Maximum distance is window size - 16, see section 9.1. of the spec. */
485 const size_t max_backward
= self
->window_mask_
- 15;
486 FN(StoreAndFindMatches
)(self
, data
, ix
, mask
, MAX_TREE_COMP_LENGTH
,
487 max_backward
, NULL
, NULL
);
490 static BROTLI_INLINE
void FN(StoreRange
)(HashToBinaryTree
* self
,
491 const uint8_t *data
, const size_t mask
, const size_t ix_start
,
492 const size_t ix_end
) {
493 size_t i
= ix_start
+ 63 <= ix_end
? ix_end
- 63 : ix_start
;
494 for (; i
< ix_end
; ++i
) {
495 FN(Store
)(self
, data
, mask
, i
);
499 static BROTLI_INLINE
void FN(StitchToPreviousBlock
)(HashToBinaryTree
* self
,
500 size_t num_bytes
, size_t position
, const uint8_t* ringbuffer
,
501 size_t ringbuffer_mask
) {
502 if (num_bytes
>= FN(HashTypeLength
)() - 1 &&
503 position
>= MAX_TREE_COMP_LENGTH
) {
504 /* Store the last `MAX_TREE_COMP_LENGTH - 1` positions in the hasher.
505 These could not be calculated before, since they require knowledge
506 of both the previous and the current block. */
507 const size_t i_start
= position
- MAX_TREE_COMP_LENGTH
+ 1;
508 const size_t i_end
= BROTLI_MIN(size_t, position
, i_start
+ num_bytes
);
510 for (i
= i_start
; i
< i_end
; ++i
) {
511 /* Maximum distance is window size - 16, see section 9.1. of the spec.
512 Furthermore, we have to make sure that we don't look further back
513 from the start of the next block than the window size, otherwise we
514 could access already overwritten areas of the ringbuffer. */
515 const size_t max_backward
=
516 self
->window_mask_
- BROTLI_MAX(size_t, 15, position
- i
);
517 /* We know that i + MAX_TREE_COMP_LENGTH <= position + num_bytes, i.e. the
518 end of the current block and that we have at least
519 MAX_TREE_COMP_LENGTH tail in the ringbuffer. */
520 FN(StoreAndFindMatches
)(self
, ringbuffer
, i
, ringbuffer_mask
,
521 MAX_TREE_COMP_LENGTH
, max_backward
, NULL
, NULL
);
531 /* For BUCKET_SWEEP == 1, enabling the dictionary lookup makes compression
532 a little faster (0.5% - 1%) and it compresses 0.15% better on small text
536 #define BUCKET_BITS 16
537 #define BUCKET_SWEEP 1
538 #define USE_DICTIONARY 1
539 #include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
541 #undef USE_DICTIONARY
545 #define BUCKET_SWEEP 2
546 #define USE_DICTIONARY 0
547 #include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
548 #undef USE_DICTIONARY
554 #define BUCKET_BITS 17
555 #define BUCKET_SWEEP 4
556 #define USE_DICTIONARY 1
557 #include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
558 #undef USE_DICTIONARY
564 #define BUCKET_BITS 14
566 #define NUM_LAST_DISTANCES_TO_CHECK 4
567 #include "./hash_longest_match_inc.h" /* NOLINT(build/include) */
573 #include "./hash_longest_match_inc.h" /* NOLINT(build/include) */
574 #undef NUM_LAST_DISTANCES_TO_CHECK
580 #define BUCKET_BITS 15
582 #define NUM_LAST_DISTANCES_TO_CHECK 10
583 #include "./hash_longest_match_inc.h" /* NOLINT(build/include) */
589 #include "./hash_longest_match_inc.h" /* NOLINT(build/include) */
590 #undef NUM_LAST_DISTANCES_TO_CHECK
596 #define NUM_LAST_DISTANCES_TO_CHECK 16
597 #include "./hash_longest_match_inc.h" /* NOLINT(build/include) */
598 #undef NUM_LAST_DISTANCES_TO_CHECK
603 #define BUCKET_BITS 15
605 #define NUM_LAST_DISTANCES_TO_CHECK 4
609 #include "./hash_forgetful_chain_inc.h" /* NOLINT(build/include) */
611 #undef NUM_LAST_DISTANCES_TO_CHECK
613 #define NUM_LAST_DISTANCES_TO_CHECK 10
615 #include "./hash_forgetful_chain_inc.h" /* NOLINT(build/include) */
617 #undef NUM_LAST_DISTANCES_TO_CHECK
621 #define NUM_LAST_DISTANCES_TO_CHECK 16
622 #define NUM_BANKS 512
625 #include "./hash_forgetful_chain_inc.h" /* NOLINT(build/include) */
627 #undef NUM_LAST_DISTANCES_TO_CHECK
637 #define FOR_GENERIC_HASHERS(H) H(2) H(3) H(4) H(5) H(6) H(7) H(8) H(9) \
639 #define FOR_ALL_HASHERS(H) FOR_GENERIC_HASHERS(H) H(10)
641 typedef struct Hashers
{
642 #define _MEMBER(N) H ## N* h ## N;
643 FOR_ALL_HASHERS(_MEMBER
)
647 static BROTLI_INLINE
void InitHashers(Hashers
* self
) {
648 #define _INIT(N) self->h ## N = 0;
649 FOR_ALL_HASHERS(_INIT
)
653 static BROTLI_INLINE
void DestroyHashers(MemoryManager
* m
, Hashers
* self
) {
654 if (self
->h10
) CleanupH10(m
, self
->h10
);
655 #define _CLEANUP(N) BROTLI_FREE(m, self->h ## N)
656 FOR_ALL_HASHERS(_CLEANUP
)
660 static BROTLI_INLINE
void HashersReset(Hashers
* self
, int type
) {
662 #define _RESET(N) case N: ResetH ## N(self->h ## N); break;
663 FOR_ALL_HASHERS(_RESET
)
669 static BROTLI_INLINE
void HashersSetup(
670 MemoryManager
* m
, Hashers
* self
, int type
) {
672 #define _SETUP(N) case N: self->h ## N = BROTLI_ALLOC(m, H ## N, 1); break;
673 FOR_ALL_HASHERS(_SETUP
)
677 if (BROTLI_IS_OOM(m
)) return;
678 if (type
== 10) InitializeH10(self
->h10
);
679 HashersReset(self
, type
);
682 #define _WARMUP_HASH(N) \
683 static BROTLI_INLINE void WarmupHashH ## N(MemoryManager* m, \
684 const BrotliEncoderParams* params, const size_t size, const uint8_t* dict, \
686 size_t overlap = (StoreLookaheadH ## N()) - 1; \
688 InitH ## N(m, hasher, dict, params, 0, size, BROTLI_FALSE); \
689 if (BROTLI_IS_OOM(m)) return; \
690 for (i = 0; i + overlap < size; i++) { \
691 StoreH ## N(hasher, dict, ~(size_t)0, i); \
694 FOR_ALL_HASHERS(_WARMUP_HASH
)
697 /* Custom LZ77 window. */
698 static BROTLI_INLINE
void HashersPrependCustomDictionary(
699 MemoryManager
* m
, Hashers
* self
, const BrotliEncoderParams
* params
,
700 const size_t size
, const uint8_t* dict
) {
701 int hasher_type
= ChooseHasher(params
);
702 switch (hasher_type
) {
703 #define _PREPEND(N) \
704 case N: WarmupHashH ## N(m, params, size, dict, self->h ## N); break;
705 FOR_ALL_HASHERS(_PREPEND
)
709 if (BROTLI_IS_OOM(m
)) return;
713 #if defined(__cplusplus) || defined(c_plusplus)
717 #endif /* BROTLI_ENC_HASH_H_ */