@@ -71,7 +71,7 @@ To apply compiler hint, enclose the branching condition into macros, like this:
71
71
*/
72
72
#if BROTLI_GNUC_HAS_BUILTIN(__builtin_expect, 3, 0, 0) || \
73
73
BROTLI_INTEL_VERSION_CHECK (16 , 0 , 0 ) || \
74
- BROTLI_SUNPRO_VERSION_CHECK(5 , 12 , 0 ) || \
74
+ BROTLI_SUNPRO_VERSION_CHECK(5 , 15 , 0 ) || \
75
75
BROTLI_ARM_VERSION_CHECK(4 , 1 , 0 ) || \
76
76
BROTLI_IBM_VERSION_CHECK(10 , 1 , 0 ) || \
77
77
BROTLI_TI_VERSION_CHECK(7 , 3 , 0 ) || \
@@ -180,16 +180,33 @@ To apply compiler hint, enclose the branching condition into macros, like this:
180
180
#define BROTLI_UNUSED_FUNCTION static BROTLI_INLINE
181
181
#endif
182
182
183
+ #if BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0)
184
+ #define BROTLI_ALIGNED (N ) __attribute__((aligned(N)))
185
+ #else
186
+ #define BROTLI_ALIGNED (N )
187
+ #endif
188
+
183
189
#if (defined(__ARM_ARCH) && (__ARM_ARCH == 7)) || \
184
190
(defined(M_ARM) && (M_ARM == 7 ))
185
191
#define BROTLI_TARGET_ARMV7
186
192
#endif /* ARMv7 */
187
193
188
194
#if (defined(__ARM_ARCH) && (__ARM_ARCH == 8)) || \
189
195
defined (__aarch64__) || defined(__ARM64_ARCH_8__)
190
- #define BROTLI_TARGET_ARMV8
196
+ #define BROTLI_TARGET_ARMV8_ANY
197
+
198
+ #if defined(__ARM_32BIT_STATE)
199
+ #define BROTLI_TARGET_ARMV8_32
200
+ #elif defined(__ARM_64BIT_STATE)
201
+ #define BROTLI_TARGET_ARMV8_64
202
+ #endif
203
+
191
204
#endif /* ARMv8 */
192
205
206
+ #if defined(__ARM_NEON__) || defined(__ARM_NEON)
207
+ #define BROTLI_TARGET_NEON
208
+ #endif
209
+
193
210
#if defined(__i386) || defined(_M_IX86)
194
211
#define BROTLI_TARGET_X86
195
212
#endif
@@ -210,7 +227,7 @@ To apply compiler hint, enclose the branching condition into macros, like this:
210
227
#define BROTLI_64_BITS 1
211
228
#elif defined(BROTLI_BUILD_32_BIT)
212
229
#define BROTLI_64_BITS 0
213
- #elif defined(BROTLI_TARGET_X64) || defined(BROTLI_TARGET_ARMV8 ) || \
230
+ #elif defined(BROTLI_TARGET_X64) || defined(BROTLI_TARGET_ARMV8_64 ) || \
214
231
defined (BROTLI_TARGET_POWERPC64) || defined(BROTLI_TARGET_RISCV64)
215
232
#define BROTLI_64_BITS 1
216
233
#else
@@ -261,7 +278,7 @@ To apply compiler hint, enclose the branching condition into macros, like this:
261
278
#if defined(BROTLI_BUILD_PORTABLE)
262
279
#define BROTLI_ALIGNED_READ (!!1 )
263
280
#elif defined(BROTLI_TARGET_X86) || defined(BROTLI_TARGET_X64) || \
264
- defined (BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8 ) || \
281
+ defined (BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY ) || \
265
282
defined(BROTLI_TARGET_RISCV64)
266
283
/* Allow unaligned read only for white-listed CPUs. */
267
284
#define BROTLI_ALIGNED_READ (!!0 )
@@ -291,6 +308,33 @@ static BROTLI_INLINE void BrotliUnalignedWrite64(void* p, uint64_t v) {
291
308
}
292
309
#else /* BROTLI_ALIGNED_READ */
293
310
/* Unaligned memory access is allowed: just cast pointer to requested type. */
311
+ #if defined(ADDRESS_SANITIZER) || defined(THREAD_SANITIZER) || \
312
+ defined (MEMORY_SANITIZER)
313
+ /* Consider we have an unaligned load/store of 4 bytes from address 0x...05.
314
+ AddressSanitizer will treat it as a 3-byte access to the range 05:07 and
315
+ will miss a bug if 08 is the first unaddressable byte.
316
+ ThreadSanitizer will also treat this as a 3-byte access to 05:07 and will
317
+ miss a race between this access and some other accesses to 08.
318
+ MemorySanitizer will correctly propagate the shadow on unaligned stores
319
+ and correctly report bugs on unaligned loads, but it may not properly
320
+ update and report the origin of the uninitialized memory.
321
+ For all three tools, replacing an unaligned access with a tool-specific
322
+ callback solves the problem. */
323
+ #if defined(__cplusplus)
324
+ extern " C" {
325
+ #endif /* __cplusplus */
326
+ uint16_t __sanitizer_unaligned_load16 (const void * p);
327
+ uint32_t __sanitizer_unaligned_load32 (const void * p);
328
+ uint64_t __sanitizer_unaligned_load64 (const void * p);
329
+ void __sanitizer_unaligned_store64 (void * p, uint64_t v);
330
+ #if defined(__cplusplus)
331
+ } /* extern "C" */
332
+ #endif /* __cplusplus */
333
+ #define BrotliUnalignedRead16 __sanitizer_unaligned_load16
334
+ #define BrotliUnalignedRead32 __sanitizer_unaligned_load32
335
+ #define BrotliUnalignedRead64 __sanitizer_unaligned_load64
336
+ #define BrotliUnalignedWrite64 __sanitizer_unaligned_store64
337
+ #else
294
338
static BROTLI_INLINE uint16_t BrotliUnalignedRead16 (const void * p) {
295
339
return *(const uint16_t *)p;
296
340
}
@@ -306,16 +350,31 @@ static BROTLI_INLINE void BrotliUnalignedWrite64(void* p, uint64_t v) {
306
350
}
307
351
#else /* BROTLI_64_BITS */
308
352
/* Avoid emitting LDRD / STRD, which require properly aligned address. */
353
+ /* If __attribute__(aligned) is available, use that. Otherwise, memcpy. */
354
+
355
+ #if BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0)
356
+ typedef BROTLI_ALIGNED (1 ) uint64_t brotli_unaligned_uint64_t;
357
+
358
+ static BROTLI_INLINE uint64_t BrotliUnalignedRead64 (const void * p) {
359
+ return (uint64_t ) ((brotli_unaligned_uint64_t *) p)[0 ];
360
+ }
361
+ static BROTLI_INLINE void BrotliUnalignedWrite64 (void * p, uint64_t v) {
362
+ brotli_unaligned_uint64_t * dwords = (brotli_unaligned_uint64_t *) p;
363
+ dwords[0 ] = (brotli_unaligned_uint64_t ) v;
364
+ }
365
+ #else /* BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0) */
309
366
static BROTLI_INLINE uint64_t BrotliUnalignedRead64 (const void * p) {
310
- const uint32_t * dwords = (const uint32_t *)p;
311
- return dwords[0 ] | ((uint64_t )dwords[1 ] << 32 );
367
+ uint64_t v;
368
+ memcpy (&v, p, sizeof (uint64_t ));
369
+ return v;
312
370
}
371
+
313
372
static BROTLI_INLINE void BrotliUnalignedWrite64 (void * p, uint64_t v) {
314
- uint32_t * dwords = (uint32_t *)p;
315
- dwords[0 ] = (uint32_t )v;
316
- dwords[1 ] = (uint32_t )(v >> 32 );
373
+ memcpy (p, &v, sizeof (uint64_t ));
317
374
}
375
+ #endif /* BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0) */
318
376
#endif /* BROTLI_64_BITS */
377
+ #endif /* ASAN / TSAN / MSAN */
319
378
#endif /* BROTLI_ALIGNED_READ */
320
379
321
380
#if BROTLI_LITTLE_ENDIAN
@@ -400,7 +459,7 @@ static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64LE(void* p, uint64_t v) {
400
459
#define BROTLI_IS_CONSTANT (x ) (!!0 )
401
460
#endif
402
461
403
- #if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8 )
462
+ #if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY )
404
463
#define BROTLI_HAS_UBFX (!!1 )
405
464
#else
406
465
#define BROTLI_HAS_UBFX (!!0 )
@@ -427,7 +486,7 @@ static BROTLI_INLINE void BrotliDump(const char* f, int l, const char* fn) {
427
486
/* TODO: add appropriate icc/sunpro/arm/ibm/ti checks. */
428
487
#if (BROTLI_GNUC_VERSION_CHECK(3, 0, 0) || defined(__llvm__)) && \
429
488
!defined(BROTLI_BUILD_NO_RBIT)
430
- #if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8 )
489
+ #if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY )
431
490
/* TODO: detect ARMv6T2 and enable this code for it. */
432
491
static BROTLI_INLINE brotli_reg_t BrotliRBit (brotli_reg_t input) {
433
492
brotli_reg_t output;
0 commit comments