1 /* NOLINT(build/header_guard) */
2 /* Copyright 2013 Google Inc. All Rights Reserved.
3
4 Distributed under MIT license.
5 See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
6 */
7
8 /* template parameters: EXPORT_FN, FN */
9
EXPORT_FN(CreateBackwardReferences)10 static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
11 size_t num_bytes, size_t position,
12 const uint8_t* ringbuffer, size_t ringbuffer_mask,
13 ContextLut literal_context_lut, const BrotliEncoderParams* params,
14 Hasher* hasher, int* dist_cache, size_t* last_insert_len,
15 Command* commands, size_t* num_commands, size_t* num_literals) {
16 HASHER()* privat = &hasher->privat.FN(_);
17 /* Set maximum distance, see section 9.1. of the spec. */
18 const size_t max_backward_limit = BROTLI_MAX_BACKWARD_LIMIT(params->lgwin);
19 const size_t position_offset = params->stream_offset;
20
21 const Command* const orig_commands = commands;
22 size_t insert_length = *last_insert_len;
23 const size_t pos_end = position + num_bytes;
24 const size_t store_end = num_bytes >= FN(StoreLookahead)() ?
25 position + num_bytes - FN(StoreLookahead)() + 1 : position;
26
27 /* For speed up heuristics for random data. */
28 const size_t random_heuristics_window_size =
29 LiteralSpreeLengthForSparseSearch(params);
30 size_t apply_random_heuristics = position + random_heuristics_window_size;
31 const size_t gap = params->dictionary.compound.total_size;
32
33 /* Minimum score to accept a backward reference. */
34 const score_t kMinScore = BROTLI_SCORE_BASE + 100;
35
36 FN(PrepareDistanceCache)(privat, dist_cache);
37
38 while (position + FN(HashTypeLength)() < pos_end) {
39 size_t max_length = pos_end - position;
40 size_t max_distance = BROTLI_MIN(size_t, position, max_backward_limit);
41 size_t dictionary_start = BROTLI_MIN(size_t,
42 position + position_offset, max_backward_limit);
43 HasherSearchResult sr;
44 int dict_id = 0;
45 uint8_t p1 = 0;
46 uint8_t p2 = 0;
47 if (params->dictionary.contextual.context_based) {
48 p1 = position >= 1 ?
49 ringbuffer[(size_t)(position - 1) & ringbuffer_mask] : 0;
50 p2 = position >= 2 ?
51 ringbuffer[(size_t)(position - 2) & ringbuffer_mask] : 0;
52 dict_id = params->dictionary.contextual.context_map[
53 BROTLI_CONTEXT(p1, p2, literal_context_lut)];
54 }
55 sr.len = 0;
56 sr.len_code_delta = 0;
57 sr.distance = 0;
58 sr.score = kMinScore;
59 FN(FindLongestMatch)(privat, params->dictionary.contextual.dict[dict_id],
60 ringbuffer, ringbuffer_mask, dist_cache, position, max_length,
61 max_distance, dictionary_start + gap, params->dist.max_distance, &sr);
62 if (ENABLE_COMPOUND_DICTIONARY) {
63 LookupCompoundDictionaryMatch(¶ms->dictionary.compound, ringbuffer,
64 ringbuffer_mask, dist_cache, position, max_length,
65 dictionary_start, params->dist.max_distance, &sr);
66 }
67 if (sr.score > kMinScore) {
68 /* Found a match. Let's look for something even better ahead. */
69 int delayed_backward_references_in_row = 0;
70 --max_length;
71 for (;; --max_length) {
72 const score_t cost_diff_lazy = 175;
73 HasherSearchResult sr2;
74 sr2.len = params->quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH ?
75 BROTLI_MIN(size_t, sr.len - 1, max_length) : 0;
76 sr2.len_code_delta = 0;
77 sr2.distance = 0;
78 sr2.score = kMinScore;
79 max_distance = BROTLI_MIN(size_t, position + 1, max_backward_limit);
80 dictionary_start = BROTLI_MIN(size_t,
81 position + 1 + position_offset, max_backward_limit);
82 if (params->dictionary.contextual.context_based) {
83 p2 = p1;
84 p1 = ringbuffer[position & ringbuffer_mask];
85 dict_id = params->dictionary.contextual.context_map[
86 BROTLI_CONTEXT(p1, p2, literal_context_lut)];
87 }
88 FN(FindLongestMatch)(privat,
89 params->dictionary.contextual.dict[dict_id],
90 ringbuffer, ringbuffer_mask, dist_cache, position + 1, max_length,
91 max_distance, dictionary_start + gap, params->dist.max_distance,
92 &sr2);
93 if (ENABLE_COMPOUND_DICTIONARY) {
94 LookupCompoundDictionaryMatch(
95 ¶ms->dictionary.compound, ringbuffer,
96 ringbuffer_mask, dist_cache, position + 1, max_length,
97 dictionary_start, params->dist.max_distance, &sr2);
98 }
99 if (sr2.score >= sr.score + cost_diff_lazy) {
100 /* Ok, let's just write one byte for now and start a match from the
101 next byte. */
102 ++position;
103 ++insert_length;
104 sr = sr2;
105 if (++delayed_backward_references_in_row < 4 &&
106 position + FN(HashTypeLength)() < pos_end) {
107 continue;
108 }
109 }
110 break;
111 }
112 apply_random_heuristics =
113 position + 2 * sr.len + random_heuristics_window_size;
114 dictionary_start = BROTLI_MIN(size_t,
115 position + position_offset, max_backward_limit);
116 {
117 /* The first 16 codes are special short-codes,
118 and the minimum offset is 1. */
119 size_t distance_code = ComputeDistanceCode(
120 sr.distance, dictionary_start + gap, dist_cache);
121 if ((sr.distance <= (dictionary_start + gap)) && distance_code > 0) {
122 dist_cache[3] = dist_cache[2];
123 dist_cache[2] = dist_cache[1];
124 dist_cache[1] = dist_cache[0];
125 dist_cache[0] = (int)sr.distance;
126 FN(PrepareDistanceCache)(privat, dist_cache);
127 }
128 InitCommand(commands++, ¶ms->dist, insert_length,
129 sr.len, sr.len_code_delta, distance_code);
130 }
131 *num_literals += insert_length;
132 insert_length = 0;
133 /* Put the hash keys into the table, if there are enough bytes left.
134 Depending on the hasher implementation, it can push all positions
135 in the given range or only a subset of them.
136 Avoid hash poisoning with RLE data. */
137 {
138 size_t range_start = position + 2;
139 size_t range_end = BROTLI_MIN(size_t, position + sr.len, store_end);
140 if (sr.distance < (sr.len >> 2)) {
141 range_start = BROTLI_MIN(size_t, range_end, BROTLI_MAX(size_t,
142 range_start, position + sr.len - (sr.distance << 2)));
143 }
144 FN(StoreRange)(privat, ringbuffer, ringbuffer_mask, range_start,
145 range_end);
146 }
147 position += sr.len;
148 } else {
149 ++insert_length;
150 ++position;
151 /* If we have not seen matches for a long time, we can skip some
152 match lookups. Unsuccessful match lookups are very very expensive
153 and this kind of a heuristic speeds up compression quite
154 a lot. */
155 if (position > apply_random_heuristics) {
156 /* Going through uncompressible data, jump. */
157 if (position >
158 apply_random_heuristics + 4 * random_heuristics_window_size) {
159 /* It is quite a long time since we saw a copy, so we assume
160 that this data is not compressible, and store hashes less
161 often. Hashes of non compressible data are less likely to
162 turn out to be useful in the future, too, so we store less of
163 them to not to flood out the hash table of good compressible
164 data. */
165 const size_t kMargin =
166 BROTLI_MAX(size_t, FN(StoreLookahead)() - 1, 4);
167 size_t pos_jump =
168 BROTLI_MIN(size_t, position + 16, pos_end - kMargin);
169 for (; position < pos_jump; position += 4) {
170 FN(Store)(privat, ringbuffer, ringbuffer_mask, position);
171 insert_length += 4;
172 }
173 } else {
174 const size_t kMargin =
175 BROTLI_MAX(size_t, FN(StoreLookahead)() - 1, 2);
176 size_t pos_jump =
177 BROTLI_MIN(size_t, position + 8, pos_end - kMargin);
178 for (; position < pos_jump; position += 2) {
179 FN(Store)(privat, ringbuffer, ringbuffer_mask, position);
180 insert_length += 2;
181 }
182 }
183 }
184 }
185 }
186 insert_length += pos_end - position;
187 *last_insert_len = insert_length;
188 *num_commands += (size_t)(commands - orig_commands);
189 }
190