]>
Commit | Line | Data |
---|---|---|
11b7501a SB |
1 | /* NOLINT(build/header_guard) */\r |
2 | /* Copyright 2010 Google Inc. All Rights Reserved.\r | |
3 | \r | |
4 | Distributed under MIT license.\r | |
5 | See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\r | |
6 | */\r | |
7 | \r | |
8 | /* template parameters: FN, BUCKET_BITS, BUCKET_SWEEP, USE_DICTIONARY */\r | |
9 | \r | |
10 | #define HashLongestMatchQuickly HASHER()\r | |
11 | \r | |
12 | #define BUCKET_SIZE (1 << BUCKET_BITS)\r | |
13 | \r | |
14 | #define HASH_MAP_SIZE (4 << BUCKET_BITS)\r | |
15 | \r | |
16 | static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 8; }\r | |
17 | static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 8; }\r | |
18 | \r | |
19 | /* HashBytes is the function that chooses the bucket to place\r | |
20 | the address in. The HashLongestMatch and HashLongestMatchQuickly\r | |
21 | classes have separate, different implementations of hashing. */\r | |
22 | static uint32_t FN(HashBytes)(const uint8_t *data) {\r | |
23 | /* Computing a hash based on 5 bytes works much better for\r | |
24 | qualities 1 and 3, where the next hash value is likely to replace */\r | |
25 | uint64_t h = (BROTLI_UNALIGNED_LOAD64(data) << 24) * kHashMul32;\r | |
26 | /* The higher bits contain more mixture from the multiplication,\r | |
27 | so we take our results from there. */\r | |
28 | return (uint32_t)(h >> (64 - BUCKET_BITS));\r | |
29 | }\r | |
30 | \r | |
31 | /* A (forgetful) hash table to the data seen by the compressor, to\r | |
32 | help create backward references to previous data.\r | |
33 | \r | |
34 | This is a hash map of fixed size (BUCKET_SIZE). Starting from the\r | |
35 | given index, BUCKET_SWEEP buckets are used to store values of a key. */\r | |
36 | typedef struct HashLongestMatchQuickly {\r | |
37 | uint32_t buckets_[BUCKET_SIZE + BUCKET_SWEEP];\r | |
38 | /* True if buckets_ array needs to be initialized. */\r | |
39 | BROTLI_BOOL is_dirty_;\r | |
40 | DictionarySearchStatictics dict_search_stats_;\r | |
41 | } HashLongestMatchQuickly;\r | |
42 | \r | |
43 | static void FN(Reset)(HashLongestMatchQuickly* self) {\r | |
44 | self->is_dirty_ = BROTLI_TRUE;\r | |
45 | DictionarySearchStaticticsReset(&self->dict_search_stats_);\r | |
46 | }\r | |
47 | \r | |
48 | static void FN(InitEmpty)(HashLongestMatchQuickly* self) {\r | |
49 | if (self->is_dirty_) {\r | |
50 | /* It is not strictly necessary to fill this buffer here, but\r | |
51 | not filling will make the results of the compression stochastic\r | |
52 | (but correct). This is because random data would cause the\r | |
53 | system to find accidentally good backward references here and there. */\r | |
54 | memset(&self->buckets_[0], 0, sizeof(self->buckets_));\r | |
55 | self->is_dirty_ = BROTLI_FALSE;\r | |
56 | }\r | |
57 | }\r | |
58 | \r | |
59 | static void FN(InitForData)(HashLongestMatchQuickly* self, const uint8_t* data,\r | |
60 | size_t num) {\r | |
61 | size_t i;\r | |
62 | for (i = 0; i < num; ++i) {\r | |
63 | const uint32_t key = FN(HashBytes)(&data[i]);\r | |
64 | memset(&self->buckets_[key], 0, BUCKET_SWEEP * sizeof(self->buckets_[0]));\r | |
65 | }\r | |
66 | if (num != 0) {\r | |
67 | self->is_dirty_ = BROTLI_FALSE;\r | |
68 | }\r | |
69 | }\r | |
70 | \r | |
71 | static void FN(Init)(\r | |
72 | MemoryManager* m, HashLongestMatchQuickly* self, const uint8_t* data,\r | |
73 | const BrotliEncoderParams* params, size_t position, size_t bytes,\r | |
74 | BROTLI_BOOL is_last) {\r | |
75 | /* Choose which init method is faster.\r | |
76 | Init() is about 100 times faster than InitForData(). */\r | |
77 | const size_t kMaxBytesForPartialHashInit = HASH_MAP_SIZE >> 7;\r | |
78 | BROTLI_UNUSED(m);\r | |
79 | BROTLI_UNUSED(params);\r | |
80 | if (position == 0 && is_last && bytes <= kMaxBytesForPartialHashInit) {\r | |
81 | FN(InitForData)(self, data, bytes);\r | |
82 | } else {\r | |
83 | FN(InitEmpty)(self);\r | |
84 | }\r | |
85 | }\r | |
86 | \r | |
87 | /* Look at 5 bytes at &data[ix & mask].\r | |
88 | Compute a hash from these, and store the value somewhere within\r | |
89 | [ix .. ix+3]. */\r | |
90 | static BROTLI_INLINE void FN(Store)(HashLongestMatchQuickly* self,\r | |
91 | const uint8_t *data, const size_t mask, const size_t ix) {\r | |
92 | const uint32_t key = FN(HashBytes)(&data[ix & mask]);\r | |
93 | /* Wiggle the value with the bucket sweep range. */\r | |
94 | const uint32_t off = (ix >> 3) % BUCKET_SWEEP;\r | |
95 | self->buckets_[key + off] = (uint32_t)ix;\r | |
96 | }\r | |
97 | \r | |
98 | static BROTLI_INLINE void FN(StoreRange)(HashLongestMatchQuickly* self,\r | |
99 | const uint8_t *data, const size_t mask, const size_t ix_start,\r | |
100 | const size_t ix_end) {\r | |
101 | size_t i;\r | |
102 | for (i = ix_start; i < ix_end; ++i) {\r | |
103 | FN(Store)(self, data, mask, i);\r | |
104 | }\r | |
105 | }\r | |
106 | \r | |
107 | static BROTLI_INLINE void FN(StitchToPreviousBlock)(\r | |
108 | HashLongestMatchQuickly* self, size_t num_bytes, size_t position,\r | |
109 | const uint8_t* ringbuffer, size_t ringbuffer_mask) {\r | |
110 | if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {\r | |
111 | /* Prepare the hashes for three last bytes of the last write.\r | |
112 | These could not be calculated before, since they require knowledge\r | |
113 | of both the previous and the current block. */\r | |
114 | FN(Store)(self, ringbuffer, ringbuffer_mask, position - 3);\r | |
115 | FN(Store)(self, ringbuffer, ringbuffer_mask, position - 2);\r | |
116 | FN(Store)(self, ringbuffer, ringbuffer_mask, position - 1);\r | |
117 | }\r | |
118 | }\r | |
119 | \r | |
120 | /* Find a longest backward match of &data[cur_ix & ring_buffer_mask]\r | |
121 | up to the length of max_length and stores the position cur_ix in the\r | |
122 | hash table.\r | |
123 | \r | |
124 | Does not look for matches longer than max_length.\r | |
125 | Does not look for matches further away than max_backward.\r | |
126 | Writes the best match into |out|.\r | |
127 | Returns true if match is found, otherwise false. */\r | |
128 | static BROTLI_INLINE BROTLI_BOOL FN(FindLongestMatch)(\r | |
129 | HashLongestMatchQuickly* self, const uint8_t* BROTLI_RESTRICT data,\r | |
130 | const size_t ring_buffer_mask, const int* BROTLI_RESTRICT distance_cache,\r | |
131 | const size_t cur_ix, const size_t max_length, const size_t max_backward,\r | |
132 | HasherSearchResult* BROTLI_RESTRICT out) {\r | |
133 | const size_t best_len_in = out->len;\r | |
134 | const size_t cur_ix_masked = cur_ix & ring_buffer_mask;\r | |
135 | const uint32_t key = FN(HashBytes)(&data[cur_ix_masked]);\r | |
136 | int compare_char = data[cur_ix_masked + best_len_in];\r | |
137 | score_t best_score = out->score;\r | |
138 | size_t best_len = best_len_in;\r | |
139 | size_t cached_backward = (size_t)distance_cache[0];\r | |
140 | size_t prev_ix = cur_ix - cached_backward;\r | |
141 | BROTLI_BOOL is_match_found = BROTLI_FALSE;\r | |
142 | out->len_x_code = 0;\r | |
143 | if (prev_ix < cur_ix) {\r | |
144 | prev_ix &= (uint32_t)ring_buffer_mask;\r | |
145 | if (compare_char == data[prev_ix + best_len]) {\r | |
146 | size_t len = FindMatchLengthWithLimit(&data[prev_ix],\r | |
147 | &data[cur_ix_masked],\r | |
148 | max_length);\r | |
149 | if (len >= 4) {\r | |
150 | best_score = BackwardReferenceScoreUsingLastDistance(len, 0);\r | |
151 | best_len = len;\r | |
152 | out->len = len;\r | |
153 | out->distance = cached_backward;\r | |
154 | out->score = best_score;\r | |
155 | compare_char = data[cur_ix_masked + best_len];\r | |
156 | if (BUCKET_SWEEP == 1) {\r | |
157 | self->buckets_[key] = (uint32_t)cur_ix;\r | |
158 | return BROTLI_TRUE;\r | |
159 | } else {\r | |
160 | is_match_found = BROTLI_TRUE;\r | |
161 | }\r | |
162 | }\r | |
163 | }\r | |
164 | }\r | |
165 | if (BUCKET_SWEEP == 1) {\r | |
166 | size_t backward;\r | |
167 | size_t len;\r | |
168 | /* Only one to look for, don't bother to prepare for a loop. */\r | |
169 | prev_ix = self->buckets_[key];\r | |
170 | self->buckets_[key] = (uint32_t)cur_ix;\r | |
171 | backward = cur_ix - prev_ix;\r | |
172 | prev_ix &= (uint32_t)ring_buffer_mask;\r | |
173 | if (compare_char != data[prev_ix + best_len_in]) {\r | |
174 | return BROTLI_FALSE;\r | |
175 | }\r | |
176 | if (PREDICT_FALSE(backward == 0 || backward > max_backward)) {\r | |
177 | return BROTLI_FALSE;\r | |
178 | }\r | |
179 | len = FindMatchLengthWithLimit(&data[prev_ix],\r | |
180 | &data[cur_ix_masked],\r | |
181 | max_length);\r | |
182 | if (len >= 4) {\r | |
183 | out->len = len;\r | |
184 | out->distance = backward;\r | |
185 | out->score = BackwardReferenceScore(len, backward);\r | |
186 | return BROTLI_TRUE;\r | |
187 | }\r | |
188 | } else {\r | |
189 | uint32_t *bucket = self->buckets_ + key;\r | |
190 | int i;\r | |
191 | prev_ix = *bucket++;\r | |
192 | for (i = 0; i < BUCKET_SWEEP; ++i, prev_ix = *bucket++) {\r | |
193 | const size_t backward = cur_ix - prev_ix;\r | |
194 | size_t len;\r | |
195 | prev_ix &= (uint32_t)ring_buffer_mask;\r | |
196 | if (compare_char != data[prev_ix + best_len]) {\r | |
197 | continue;\r | |
198 | }\r | |
199 | if (PREDICT_FALSE(backward == 0 || backward > max_backward)) {\r | |
200 | continue;\r | |
201 | }\r | |
202 | len = FindMatchLengthWithLimit(&data[prev_ix],\r | |
203 | &data[cur_ix_masked],\r | |
204 | max_length);\r | |
205 | if (len >= 4) {\r | |
206 | const score_t score = BackwardReferenceScore(len, backward);\r | |
207 | if (best_score < score) {\r | |
208 | best_score = score;\r | |
209 | best_len = len;\r | |
210 | out->len = best_len;\r | |
211 | out->distance = backward;\r | |
212 | out->score = score;\r | |
213 | compare_char = data[cur_ix_masked + best_len];\r | |
214 | is_match_found = BROTLI_TRUE;\r | |
215 | }\r | |
216 | }\r | |
217 | }\r | |
218 | }\r | |
219 | if (USE_DICTIONARY && !is_match_found) {\r | |
220 | is_match_found = SearchInStaticDictionary(&self->dict_search_stats_,\r | |
221 | &data[cur_ix_masked], max_length, max_backward, out, BROTLI_TRUE);\r | |
222 | }\r | |
223 | self->buckets_[key + ((cur_ix >> 3) % BUCKET_SWEEP)] = (uint32_t)cur_ix;\r | |
224 | return is_match_found;\r | |
225 | }\r | |
226 | \r | |
227 | #undef HASH_MAP_SIZE\r | |
228 | #undef BUCKET_SIZE\r | |
229 | \r | |
230 | #undef HashLongestMatchQuickly\r |