1 /* Copyright 2016 Google Inc. All Rights Reserved.
2
3 Distributed under MIT license.
4 See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
5 */
6
7 /* Macros for compiler / platform specific features and build options.
8
9 Build options are:
10 * BROTLI_BUILD_32_BIT disables 64-bit optimizations
11 * BROTLI_BUILD_64_BIT forces to use 64-bit optimizations
12 * BROTLI_BUILD_BIG_ENDIAN forces to use big-endian optimizations
13 * BROTLI_BUILD_ENDIAN_NEUTRAL disables endian-aware optimizations
14 * BROTLI_BUILD_LITTLE_ENDIAN forces to use little-endian optimizations
15 * BROTLI_BUILD_PORTABLE disables dangerous optimizations, like unaligned
16 read and overlapping memcpy; this reduces decompression speed by 5%
17 * BROTLI_BUILD_NO_RBIT disables "rbit" optimization for ARM CPUs
18 * BROTLI_DEBUG dumps file name and line number when decoder detects stream
19 or memory error
20 * BROTLI_ENABLE_LOG enables asserts and dumps various state information
21 */
22
23 #ifndef BROTLI_COMMON_PLATFORM_H_
24 #define BROTLI_COMMON_PLATFORM_H_
25
26 #include <string.h> /* memcpy */
27
28 #include <brotli/port.h>
29 #include <brotli/types.h>
30
31 #if defined(OS_LINUX) || defined(OS_CYGWIN) || defined(__EMSCRIPTEN__)
32 #include <endian.h>
33 #elif defined(OS_FREEBSD)
34 #include <machine/endian.h>
35 #elif defined(OS_MACOSX)
36 #include <machine/endian.h>
37 /* Let's try and follow the Linux convention */
38 #define BROTLI_X_BYTE_ORDER BYTE_ORDER
39 #define BROTLI_X_LITTLE_ENDIAN LITTLE_ENDIAN
40 #define BROTLI_X_BIG_ENDIAN BIG_ENDIAN
41 #endif
42
43 #if BROTLI_MSVC_VERSION_CHECK(18, 0, 0)
44 #include <intrin.h>
45 #endif
46
47 #if defined(BROTLI_ENABLE_LOG) || defined(BROTLI_DEBUG)
48 #include <assert.h>
49 #include <stdio.h>
50 #endif
51
52 /* The following macros were borrowed from https://github.com/nemequ/hedley
53 * with permission of original author - Evan Nemerson <[email protected]> */
54
55 /* >>> >>> >>> hedley macros */
56
57 /* Define "BROTLI_PREDICT_TRUE" and "BROTLI_PREDICT_FALSE" macros for capable
58 compilers.
59
60 To apply compiler hint, enclose the branching condition into macros, like this:
61
62 if (BROTLI_PREDICT_TRUE(zero == 0)) {
63 // main execution path
64 } else {
65 // compiler should place this code outside of main execution path
66 }
67
68 OR:
69
70 if (BROTLI_PREDICT_FALSE(something_rare_or_unexpected_happens)) {
71 // compiler should place this code outside of main execution path
72 }
73
74 */
75 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_expect, 3, 0, 0) || \
76 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) || \
77 BROTLI_SUNPRO_VERSION_CHECK(5, 15, 0) || \
78 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || \
79 BROTLI_IBM_VERSION_CHECK(10, 1, 0) || \
80 BROTLI_TI_VERSION_CHECK(7, 3, 0) || \
81 BROTLI_TINYC_VERSION_CHECK(0, 9, 27)
82 #define BROTLI_PREDICT_TRUE(x) (__builtin_expect(!!(x), 1))
83 #define BROTLI_PREDICT_FALSE(x) (__builtin_expect(x, 0))
84 #else
85 #define BROTLI_PREDICT_FALSE(x) (x)
86 #define BROTLI_PREDICT_TRUE(x) (x)
87 #endif
88
89 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \
90 !defined(__cplusplus)
91 #define BROTLI_RESTRICT restrict
92 #elif BROTLI_GNUC_VERSION_CHECK(3, 1, 0) || \
93 BROTLI_MSVC_VERSION_CHECK(14, 0, 0) || \
94 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) || \
95 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || \
96 BROTLI_IBM_VERSION_CHECK(10, 1, 0) || \
97 BROTLI_PGI_VERSION_CHECK(17, 10, 0) || \
98 BROTLI_TI_VERSION_CHECK(8, 0, 0) || \
99 BROTLI_IAR_VERSION_CHECK(8, 0, 0) || \
100 (BROTLI_SUNPRO_VERSION_CHECK(5, 14, 0) && defined(__cplusplus))
101 #define BROTLI_RESTRICT __restrict
102 #elif BROTLI_SUNPRO_VERSION_CHECK(5, 3, 0) && !defined(__cplusplus)
103 #define BROTLI_RESTRICT _Restrict
104 #else
105 #define BROTLI_RESTRICT
106 #endif
107
108 #if (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \
109 (defined(__cplusplus) && (__cplusplus >= 199711L))
110 #define BROTLI_MAYBE_INLINE inline
111 #elif defined(__GNUC_STDC_INLINE__) || defined(__GNUC_GNU_INLINE__) || \
112 BROTLI_ARM_VERSION_CHECK(6, 2, 0)
113 #define BROTLI_MAYBE_INLINE __inline__
114 #elif BROTLI_MSVC_VERSION_CHECK(12, 0, 0) || \
115 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || BROTLI_TI_VERSION_CHECK(8, 0, 0)
116 #define BROTLI_MAYBE_INLINE __inline
117 #else
118 #define BROTLI_MAYBE_INLINE
119 #endif
120
121 #if BROTLI_GNUC_HAS_ATTRIBUTE(always_inline, 4, 0, 0) || \
122 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) || \
123 BROTLI_SUNPRO_VERSION_CHECK(5, 11, 0) || \
124 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || \
125 BROTLI_IBM_VERSION_CHECK(10, 1, 0) || \
126 BROTLI_TI_VERSION_CHECK(8, 0, 0) || \
127 (BROTLI_TI_VERSION_CHECK(7, 3, 0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__))
128 #define BROTLI_INLINE BROTLI_MAYBE_INLINE __attribute__((__always_inline__))
129 #elif BROTLI_MSVC_VERSION_CHECK(12, 0, 0)
130 #define BROTLI_INLINE BROTLI_MAYBE_INLINE __forceinline
131 #elif BROTLI_TI_VERSION_CHECK(7, 0, 0) && defined(__cplusplus)
132 #define BROTLI_INLINE BROTLI_MAYBE_INLINE _Pragma("FUNC_ALWAYS_INLINE;")
133 #elif BROTLI_IAR_VERSION_CHECK(8, 0, 0)
134 #define BROTLI_INLINE BROTLI_MAYBE_INLINE _Pragma("inline=forced")
135 #else
136 #define BROTLI_INLINE BROTLI_MAYBE_INLINE
137 #endif
138
139 #if BROTLI_GNUC_HAS_ATTRIBUTE(noinline, 4, 0, 0) || \
140 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) || \
141 BROTLI_SUNPRO_VERSION_CHECK(5, 11, 0) || \
142 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || \
143 BROTLI_IBM_VERSION_CHECK(10, 1, 0) || \
144 BROTLI_TI_VERSION_CHECK(8, 0, 0) || \
145 (BROTLI_TI_VERSION_CHECK(7, 3, 0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__))
146 #define BROTLI_NOINLINE __attribute__((__noinline__))
147 #elif BROTLI_MSVC_VERSION_CHECK(13, 10, 0)
148 #define BROTLI_NOINLINE __declspec(noinline)
149 #elif BROTLI_PGI_VERSION_CHECK(10, 2, 0)
150 #define BROTLI_NOINLINE _Pragma("noinline")
151 #elif BROTLI_TI_VERSION_CHECK(6, 0, 0) && defined(__cplusplus)
152 #define BROTLI_NOINLINE _Pragma("FUNC_CANNOT_INLINE;")
153 #elif BROTLI_IAR_VERSION_CHECK(8, 0, 0)
154 #define BROTLI_NOINLINE _Pragma("inline=never")
155 #else
156 #define BROTLI_NOINLINE
157 #endif
158
159 /* <<< <<< <<< end of hedley macros. */
160
161 #if BROTLI_GNUC_HAS_ATTRIBUTE(unused, 2, 7, 0) || \
162 BROTLI_INTEL_VERSION_CHECK(16, 0, 0)
163 #define BROTLI_UNUSED_FUNCTION static BROTLI_INLINE __attribute__ ((unused))
164 #else
165 #define BROTLI_UNUSED_FUNCTION static BROTLI_INLINE
166 #endif
167
168 #if BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0)
169 #define BROTLI_ALIGNED(N) __attribute__((aligned(N)))
170 #else
171 #define BROTLI_ALIGNED(N)
172 #endif
173
174 #if (defined(__ARM_ARCH) && (__ARM_ARCH == 7)) || \
175 (defined(M_ARM) && (M_ARM == 7))
176 #define BROTLI_TARGET_ARMV7
177 #endif /* ARMv7 */
178
179 #if (defined(__ARM_ARCH) && (__ARM_ARCH == 8)) || \
180 defined(__aarch64__) || defined(__ARM64_ARCH_8__)
181 #define BROTLI_TARGET_ARMV8_ANY
182
183 #if defined(__ARM_32BIT_STATE)
184 #define BROTLI_TARGET_ARMV8_32
185 #elif defined(__ARM_64BIT_STATE)
186 #define BROTLI_TARGET_ARMV8_64
187 #endif
188
189 #endif /* ARMv8 */
190
191 #if defined(__ARM_NEON__) || defined(__ARM_NEON)
192 #define BROTLI_TARGET_NEON
193 #endif
194
195 #if defined(__i386) || defined(_M_IX86)
196 #define BROTLI_TARGET_X86
197 #endif
198
199 #if defined(__x86_64__) || defined(_M_X64)
200 #define BROTLI_TARGET_X64
201 #endif
202
203 #if defined(__PPC64__)
204 #define BROTLI_TARGET_POWERPC64
205 #endif
206
207 #if defined(__riscv) && defined(__riscv_xlen) && __riscv_xlen == 64
208 #define BROTLI_TARGET_RISCV64
209 #endif
210
211 #if defined(BROTLI_BUILD_64_BIT)
212 #define BROTLI_64_BITS 1
213 #elif defined(BROTLI_BUILD_32_BIT)
214 #define BROTLI_64_BITS 0
215 #elif defined(BROTLI_TARGET_X64) || defined(BROTLI_TARGET_ARMV8_64) || \
216 defined(BROTLI_TARGET_POWERPC64) || defined(BROTLI_TARGET_RISCV64)
217 #define BROTLI_64_BITS 1
218 #else
219 #define BROTLI_64_BITS 0
220 #endif
221
222 #if (BROTLI_64_BITS)
223 #define brotli_reg_t uint64_t
224 #else
225 #define brotli_reg_t uint32_t
226 #endif
227
228 #if defined(BROTLI_BUILD_BIG_ENDIAN)
229 #define BROTLI_BIG_ENDIAN 1
230 #elif defined(BROTLI_BUILD_LITTLE_ENDIAN)
231 #define BROTLI_LITTLE_ENDIAN 1
232 #elif defined(BROTLI_BUILD_ENDIAN_NEUTRAL)
233 /* Just break elif chain. */
234 #elif defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
235 #define BROTLI_LITTLE_ENDIAN 1
236 #elif defined(_WIN32) || defined(BROTLI_TARGET_X64)
237 /* Win32 & x64 can currently always be assumed to be little endian */
238 #define BROTLI_LITTLE_ENDIAN 1
239 #elif defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
240 #define BROTLI_BIG_ENDIAN 1
241 #elif defined(BROTLI_X_BYTE_ORDER)
242 #if BROTLI_X_BYTE_ORDER == BROTLI_X_LITTLE_ENDIAN
243 #define BROTLI_LITTLE_ENDIAN 1
244 #elif BROTLI_X_BYTE_ORDER == BROTLI_X_BIG_ENDIAN
245 #define BROTLI_BIG_ENDIAN 1
246 #endif
247 #endif /* BROTLI_X_BYTE_ORDER */
248
249 #if !defined(BROTLI_LITTLE_ENDIAN)
250 #define BROTLI_LITTLE_ENDIAN 0
251 #endif
252
253 #if !defined(BROTLI_BIG_ENDIAN)
254 #define BROTLI_BIG_ENDIAN 0
255 #endif
256
257 #if defined(BROTLI_X_BYTE_ORDER)
258 #undef BROTLI_X_BYTE_ORDER
259 #undef BROTLI_X_LITTLE_ENDIAN
260 #undef BROTLI_X_BIG_ENDIAN
261 #endif
262
263 #if defined(BROTLI_BUILD_PORTABLE)
264 #define BROTLI_ALIGNED_READ (!!1)
265 #elif defined(BROTLI_TARGET_X86) || defined(BROTLI_TARGET_X64) || \
266 defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY) || \
267 defined(BROTLI_TARGET_RISCV64)
268 /* Allow unaligned read only for white-listed CPUs. */
269 #define BROTLI_ALIGNED_READ (!!0)
270 #else
271 #define BROTLI_ALIGNED_READ (!!1)
272 #endif
273
274 #if BROTLI_ALIGNED_READ
275 /* Portable unaligned memory access: read / write values via memcpy. */
BrotliUnalignedRead16(const void * p)276 static BROTLI_INLINE uint16_t BrotliUnalignedRead16(const void* p) {
277 uint16_t t;
278 memcpy(&t, p, sizeof t);
279 return t;
280 }
BrotliUnalignedRead32(const void * p)281 static BROTLI_INLINE uint32_t BrotliUnalignedRead32(const void* p) {
282 uint32_t t;
283 memcpy(&t, p, sizeof t);
284 return t;
285 }
BrotliUnalignedRead64(const void * p)286 static BROTLI_INLINE uint64_t BrotliUnalignedRead64(const void* p) {
287 uint64_t t;
288 memcpy(&t, p, sizeof t);
289 return t;
290 }
BrotliUnalignedWrite64(void * p,uint64_t v)291 static BROTLI_INLINE void BrotliUnalignedWrite64(void* p, uint64_t v) {
292 memcpy(p, &v, sizeof v);
293 }
294 #else /* BROTLI_ALIGNED_READ */
295 /* Unaligned memory access is allowed: just cast pointer to requested type. */
296 #if BROTLI_SANITIZED
297 /* Consider we have an unaligned load/store of 4 bytes from address 0x...05.
298 AddressSanitizer will treat it as a 3-byte access to the range 05:07 and
299 will miss a bug if 08 is the first unaddressable byte.
300 ThreadSanitizer will also treat this as a 3-byte access to 05:07 and will
301 miss a race between this access and some other accesses to 08.
302 MemorySanitizer will correctly propagate the shadow on unaligned stores
303 and correctly report bugs on unaligned loads, but it may not properly
304 update and report the origin of the uninitialized memory.
305 For all three tools, replacing an unaligned access with a tool-specific
306 callback solves the problem. */
307 #if defined(__cplusplus)
308 extern "C" {
309 #endif /* __cplusplus */
310 uint16_t __sanitizer_unaligned_load16(const void* p);
311 uint32_t __sanitizer_unaligned_load32(const void* p);
312 uint64_t __sanitizer_unaligned_load64(const void* p);
313 void __sanitizer_unaligned_store64(void* p, uint64_t v);
314 #if defined(__cplusplus)
315 } /* extern "C" */
316 #endif /* __cplusplus */
317 #define BrotliUnalignedRead16 __sanitizer_unaligned_load16
318 #define BrotliUnalignedRead32 __sanitizer_unaligned_load32
319 #define BrotliUnalignedRead64 __sanitizer_unaligned_load64
320 #define BrotliUnalignedWrite64 __sanitizer_unaligned_store64
321 #else /* BROTLI_SANITIZED */
BrotliUnalignedRead16(const void * p)322 static BROTLI_INLINE uint16_t BrotliUnalignedRead16(const void* p) {
323 return *(const uint16_t*)p;
324 }
BrotliUnalignedRead32(const void * p)325 static BROTLI_INLINE uint32_t BrotliUnalignedRead32(const void* p) {
326 return *(const uint32_t*)p;
327 }
328 #if (BROTLI_64_BITS)
BrotliUnalignedRead64(const void * p)329 static BROTLI_INLINE uint64_t BrotliUnalignedRead64(const void* p) {
330 return *(const uint64_t*)p;
331 }
BrotliUnalignedWrite64(void * p,uint64_t v)332 static BROTLI_INLINE void BrotliUnalignedWrite64(void* p, uint64_t v) {
333 *(uint64_t*)p = v;
334 }
335 #else /* BROTLI_64_BITS */
336 /* Avoid emitting LDRD / STRD, which require properly aligned address. */
337 /* If __attribute__(aligned) is available, use that. Otherwise, memcpy. */
338
339 #if BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0)
340 typedef BROTLI_ALIGNED(1) uint64_t brotli_unaligned_uint64_t;
341
BrotliUnalignedRead64(const void * p)342 static BROTLI_INLINE uint64_t BrotliUnalignedRead64(const void* p) {
343 return (uint64_t) ((const brotli_unaligned_uint64_t*) p)[0];
344 }
BrotliUnalignedWrite64(void * p,uint64_t v)345 static BROTLI_INLINE void BrotliUnalignedWrite64(void* p, uint64_t v) {
346 brotli_unaligned_uint64_t* dwords = (brotli_unaligned_uint64_t*) p;
347 dwords[0] = (brotli_unaligned_uint64_t) v;
348 }
349 #else /* BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0) */
BrotliUnalignedRead64(const void * p)350 static BROTLI_INLINE uint64_t BrotliUnalignedRead64(const void* p) {
351 uint64_t v;
352 memcpy(&v, p, sizeof(uint64_t));
353 return v;
354 }
355
BrotliUnalignedWrite64(void * p,uint64_t v)356 static BROTLI_INLINE void BrotliUnalignedWrite64(void* p, uint64_t v) {
357 memcpy(p, &v, sizeof(uint64_t));
358 }
359 #endif /* BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0) */
360 #endif /* BROTLI_64_BITS */
361 #endif /* BROTLI_SANITIZED */
362 #endif /* BROTLI_ALIGNED_READ */
363
364 #if BROTLI_LITTLE_ENDIAN
365 /* Straight endianness. Just read / write values. */
366 #define BROTLI_UNALIGNED_LOAD16LE BrotliUnalignedRead16
367 #define BROTLI_UNALIGNED_LOAD32LE BrotliUnalignedRead32
368 #define BROTLI_UNALIGNED_LOAD64LE BrotliUnalignedRead64
369 #define BROTLI_UNALIGNED_STORE64LE BrotliUnalignedWrite64
370 #elif BROTLI_BIG_ENDIAN /* BROTLI_LITTLE_ENDIAN */
371 /* Explain compiler to byte-swap values. */
372 #define BROTLI_BSWAP16_(V) ((uint16_t)( \
373 (((V) & 0xFFU) << 8) | \
374 (((V) >> 8) & 0xFFU)))
BROTLI_UNALIGNED_LOAD16LE(const void * p)375 static BROTLI_INLINE uint16_t BROTLI_UNALIGNED_LOAD16LE(const void* p) {
376 uint16_t value = BrotliUnalignedRead16(p);
377 return BROTLI_BSWAP16_(value);
378 }
379 #define BROTLI_BSWAP32_(V) ( \
380 (((V) & 0xFFU) << 24) | (((V) & 0xFF00U) << 8) | \
381 (((V) >> 8) & 0xFF00U) | (((V) >> 24) & 0xFFU))
BROTLI_UNALIGNED_LOAD32LE(const void * p)382 static BROTLI_INLINE uint32_t BROTLI_UNALIGNED_LOAD32LE(const void* p) {
383 uint32_t value = BrotliUnalignedRead32(p);
384 return BROTLI_BSWAP32_(value);
385 }
386 #define BROTLI_BSWAP64_(V) ( \
387 (((V) & 0xFFU) << 56) | (((V) & 0xFF00U) << 40) | \
388 (((V) & 0xFF0000U) << 24) | (((V) & 0xFF000000U) << 8) | \
389 (((V) >> 8) & 0xFF000000U) | (((V) >> 24) & 0xFF0000U) | \
390 (((V) >> 40) & 0xFF00U) | (((V) >> 56) & 0xFFU))
BROTLI_UNALIGNED_LOAD64LE(const void * p)391 static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64LE(const void* p) {
392 uint64_t value = BrotliUnalignedRead64(p);
393 return BROTLI_BSWAP64_(value);
394 }
BROTLI_UNALIGNED_STORE64LE(void * p,uint64_t v)395 static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64LE(void* p, uint64_t v) {
396 uint64_t value = BROTLI_BSWAP64_(v);
397 BrotliUnalignedWrite64(p, value);
398 }
399 #else /* BROTLI_LITTLE_ENDIAN */
400 /* Read / store values byte-wise; hopefully compiler will understand. */
BROTLI_UNALIGNED_LOAD16LE(const void * p)401 static BROTLI_INLINE uint16_t BROTLI_UNALIGNED_LOAD16LE(const void* p) {
402 const uint8_t* in = (const uint8_t*)p;
403 return (uint16_t)(in[0] | (in[1] << 8));
404 }
BROTLI_UNALIGNED_LOAD32LE(const void * p)405 static BROTLI_INLINE uint32_t BROTLI_UNALIGNED_LOAD32LE(const void* p) {
406 const uint8_t* in = (const uint8_t*)p;
407 uint32_t value = (uint32_t)(in[0]);
408 value |= (uint32_t)(in[1]) << 8;
409 value |= (uint32_t)(in[2]) << 16;
410 value |= (uint32_t)(in[3]) << 24;
411 return value;
412 }
BROTLI_UNALIGNED_LOAD64LE(const void * p)413 static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64LE(const void* p) {
414 const uint8_t* in = (const uint8_t*)p;
415 uint64_t value = (uint64_t)(in[0]);
416 value |= (uint64_t)(in[1]) << 8;
417 value |= (uint64_t)(in[2]) << 16;
418 value |= (uint64_t)(in[3]) << 24;
419 value |= (uint64_t)(in[4]) << 32;
420 value |= (uint64_t)(in[5]) << 40;
421 value |= (uint64_t)(in[6]) << 48;
422 value |= (uint64_t)(in[7]) << 56;
423 return value;
424 }
BROTLI_UNALIGNED_STORE64LE(void * p,uint64_t v)425 static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64LE(void* p, uint64_t v) {
426 uint8_t* out = (uint8_t*)p;
427 out[0] = (uint8_t)v;
428 out[1] = (uint8_t)(v >> 8);
429 out[2] = (uint8_t)(v >> 16);
430 out[3] = (uint8_t)(v >> 24);
431 out[4] = (uint8_t)(v >> 32);
432 out[5] = (uint8_t)(v >> 40);
433 out[6] = (uint8_t)(v >> 48);
434 out[7] = (uint8_t)(v >> 56);
435 }
436 #endif /* BROTLI_LITTLE_ENDIAN */
437
438 /* BROTLI_IS_CONSTANT macros returns true for compile-time constants. */
439 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_constant_p, 3, 0, 1) || \
440 BROTLI_INTEL_VERSION_CHECK(16, 0, 0)
441 #define BROTLI_IS_CONSTANT(x) (!!__builtin_constant_p(x))
442 #else
443 #define BROTLI_IS_CONSTANT(x) (!!0)
444 #endif
445
446 #if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY)
447 #define BROTLI_HAS_UBFX (!!1)
448 #else
449 #define BROTLI_HAS_UBFX (!!0)
450 #endif
451
452 #if defined(BROTLI_ENABLE_LOG)
453 #define BROTLI_LOG(x) printf x
454 #else
455 #define BROTLI_LOG(x)
456 #endif
457
458 #if defined(BROTLI_DEBUG) || defined(BROTLI_ENABLE_LOG)
459 #define BROTLI_DCHECK(x) assert(x)
BrotliDump(const char * f,int l,const char * fn)460 static BROTLI_INLINE void BrotliDump(const char* f, int l, const char* fn) {
461 fprintf(stderr, "%s:%d (%s)\n", f, l, fn);
462 fflush(stderr);
463 }
464 #define BROTLI_DUMP() BrotliDump(__FILE__, __LINE__, __FUNCTION__)
465 #else
466 #define BROTLI_DCHECK(x)
467 #define BROTLI_DUMP() (void)(0)
468 #endif
469
470 /* TODO(eustas): add appropriate icc/sunpro/arm/ibm/ti checks. */
471 #if (BROTLI_GNUC_VERSION_CHECK(3, 0, 0) || defined(__llvm__)) && \
472 !defined(BROTLI_BUILD_NO_RBIT)
473 #if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY)
474 /* TODO(eustas): detect ARMv6T2 and enable this code for it. */
BrotliRBit(brotli_reg_t input)475 static BROTLI_INLINE brotli_reg_t BrotliRBit(brotli_reg_t input) {
476 brotli_reg_t output;
477 __asm__("rbit %0, %1\n" : "=r"(output) : "r"(input));
478 return output;
479 }
480 #define BROTLI_RBIT(x) BrotliRBit(x)
481 #endif /* armv7 / armv8 */
482 #endif /* gcc || clang */
483 #if !defined(BROTLI_RBIT)
BrotliRBit(void)484 static BROTLI_INLINE void BrotliRBit(void) { /* Should break build if used. */ }
485 #endif /* BROTLI_RBIT */
486
487 #define BROTLI_REPEAT(N, X) { \
488 if ((N & 1) != 0) {X;} \
489 if ((N & 2) != 0) {X; X;} \
490 if ((N & 4) != 0) {X; X; X; X;} \
491 }
492
493 #define BROTLI_UNUSED(X) (void)(X)
494
495 #define BROTLI_MIN_MAX(T) \
496 static BROTLI_INLINE T brotli_min_ ## T (T a, T b) { return a < b ? a : b; } \
497 static BROTLI_INLINE T brotli_max_ ## T (T a, T b) { return a > b ? a : b; }
498 BROTLI_MIN_MAX(double) BROTLI_MIN_MAX(float) BROTLI_MIN_MAX(int)
499 BROTLI_MIN_MAX(size_t) BROTLI_MIN_MAX(uint32_t) BROTLI_MIN_MAX(uint8_t)
500 #undef BROTLI_MIN_MAX
501 #define BROTLI_MIN(T, A, B) (brotli_min_ ## T((A), (B)))
502 #define BROTLI_MAX(T, A, B) (brotli_max_ ## T((A), (B)))
503
504 #define BROTLI_SWAP(T, A, I, J) { \
505 T __brotli_swap_tmp = (A)[(I)]; \
506 (A)[(I)] = (A)[(J)]; \
507 (A)[(J)] = __brotli_swap_tmp; \
508 }
509
510 #if BROTLI_64_BITS
511 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_ctzll, 3, 4, 0) || \
512 BROTLI_INTEL_VERSION_CHECK(16, 0, 0)
513 #define BROTLI_TZCNT64 __builtin_ctzll
514 #elif BROTLI_MSVC_VERSION_CHECK(18, 0, 0)
515 #if defined(BROTLI_TARGET_X64)
516 #define BROTLI_TZCNT64 _tzcnt_u64
517 #else /* BROTLI_TARGET_X64 */
518 static BROTLI_INLINE uint32_t BrotliBsf64Msvc(uint64_t x) {
519 uint32_t lsb;
520 _BitScanForward64(&lsb, x);
521 return lsb;
522 }
523 #define BROTLI_TZCNT64 BrotliBsf64Msvc
524 #endif /* BROTLI_TARGET_X64 */
525 #endif /* __builtin_ctzll */
526 #endif /* BROTLI_64_BITS */
527
528 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_clz, 3, 4, 0) || \
529 BROTLI_INTEL_VERSION_CHECK(16, 0, 0)
530 #define BROTLI_BSR32(x) (31u ^ (uint32_t)__builtin_clz(x))
531 #elif BROTLI_MSVC_VERSION_CHECK(18, 0, 0)
532 static BROTLI_INLINE uint32_t BrotliBsr32Msvc(uint32_t x) {
533 unsigned long msb;
534 _BitScanReverse(&msb, x);
535 return (uint32_t)msb;
536 }
537 #define BROTLI_BSR32 BrotliBsr32Msvc
538 #endif /* __builtin_clz */
539
540 /* Default brotli_alloc_func */
541 BROTLI_COMMON_API void* BrotliDefaultAllocFunc(void* opaque, size_t size);
542
543 /* Default brotli_free_func */
544 BROTLI_COMMON_API void BrotliDefaultFreeFunc(void* opaque, void* address);
545
BrotliSuppressUnusedFunctions(void)546 BROTLI_UNUSED_FUNCTION void BrotliSuppressUnusedFunctions(void) {
547 BROTLI_UNUSED(&BrotliSuppressUnusedFunctions);
548 BROTLI_UNUSED(&BrotliUnalignedRead16);
549 BROTLI_UNUSED(&BrotliUnalignedRead32);
550 BROTLI_UNUSED(&BrotliUnalignedRead64);
551 BROTLI_UNUSED(&BrotliUnalignedWrite64);
552 BROTLI_UNUSED(&BROTLI_UNALIGNED_LOAD16LE);
553 BROTLI_UNUSED(&BROTLI_UNALIGNED_LOAD32LE);
554 BROTLI_UNUSED(&BROTLI_UNALIGNED_LOAD64LE);
555 BROTLI_UNUSED(&BROTLI_UNALIGNED_STORE64LE);
556 BROTLI_UNUSED(&BrotliRBit);
557 BROTLI_UNUSED(&brotli_min_double);
558 BROTLI_UNUSED(&brotli_max_double);
559 BROTLI_UNUSED(&brotli_min_float);
560 BROTLI_UNUSED(&brotli_max_float);
561 BROTLI_UNUSED(&brotli_min_int);
562 BROTLI_UNUSED(&brotli_max_int);
563 BROTLI_UNUSED(&brotli_min_size_t);
564 BROTLI_UNUSED(&brotli_max_size_t);
565 BROTLI_UNUSED(&brotli_min_uint32_t);
566 BROTLI_UNUSED(&brotli_max_uint32_t);
567 BROTLI_UNUSED(&brotli_min_uint8_t);
568 BROTLI_UNUSED(&brotli_max_uint8_t);
569 BROTLI_UNUSED(&BrotliDefaultAllocFunc);
570 BROTLI_UNUSED(&BrotliDefaultFreeFunc);
571 #if defined(BROTLI_DEBUG) || defined(BROTLI_ENABLE_LOG)
572 BROTLI_UNUSED(&BrotliDump);
573 #endif
574 }
575
576 #endif /* BROTLI_COMMON_PLATFORM_H_ */
577