aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/compiler.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/compiler.h')
-rw-r--r--include/linux/compiler.h39
1 files changed, 27 insertions, 12 deletions
diff --git a/include/linux/compiler.h b/include/linux/compiler.h
index a1c81f80978e..d1ec10a940ff 100644
--- a/include/linux/compiler.h
+++ b/include/linux/compiler.h
@@ -54,7 +54,11 @@ extern void __chk_io_ptr(const volatile void __iomem *);
54#include <linux/compiler-gcc.h> 54#include <linux/compiler-gcc.h>
55#endif 55#endif
56 56
57#ifdef CC_USING_HOTPATCH
58#define notrace __attribute__((hotpatch(0,0)))
59#else
57#define notrace __attribute__((no_instrument_function)) 60#define notrace __attribute__((no_instrument_function))
61#endif
58 62
59/* Intel compiler defines __GNUC__. So we will overwrite implementations 63/* Intel compiler defines __GNUC__. So we will overwrite implementations
60 * coming from above header files here 64 * coming from above header files here
@@ -215,7 +219,7 @@ static __always_inline void __read_once_size(volatile void *p, void *res, int si
215 } 219 }
216} 220}
217 221
218static __always_inline void __assign_once_size(volatile void *p, void *res, int size) 222static __always_inline void __write_once_size(volatile void *p, void *res, int size)
219{ 223{
220 switch (size) { 224 switch (size) {
221 case 1: *(volatile __u8 *)p = *(__u8 *)res; break; 225 case 1: *(volatile __u8 *)p = *(__u8 *)res; break;
@@ -235,15 +239,15 @@ static __always_inline void __assign_once_size(volatile void *p, void *res, int
235/* 239/*
236 * Prevent the compiler from merging or refetching reads or writes. The 240 * Prevent the compiler from merging or refetching reads or writes. The
237 * compiler is also forbidden from reordering successive instances of 241 * compiler is also forbidden from reordering successive instances of
238 * READ_ONCE, ASSIGN_ONCE and ACCESS_ONCE (see below), but only when the 242 * READ_ONCE, WRITE_ONCE and ACCESS_ONCE (see below), but only when the
239 * compiler is aware of some particular ordering. One way to make the 243 * compiler is aware of some particular ordering. One way to make the
240 * compiler aware of ordering is to put the two invocations of READ_ONCE, 244 * compiler aware of ordering is to put the two invocations of READ_ONCE,
241 * ASSIGN_ONCE or ACCESS_ONCE() in different C statements. 245 * WRITE_ONCE or ACCESS_ONCE() in different C statements.
242 * 246 *
243 * In contrast to ACCESS_ONCE these two macros will also work on aggregate 247 * In contrast to ACCESS_ONCE these two macros will also work on aggregate
244 * data types like structs or unions. If the size of the accessed data 248 * data types like structs or unions. If the size of the accessed data
245 * type exceeds the word size of the machine (e.g., 32 bits or 64 bits) 249 * type exceeds the word size of the machine (e.g., 32 bits or 64 bits)
246 * READ_ONCE() and ASSIGN_ONCE() will fall back to memcpy and print a 250 * READ_ONCE() and WRITE_ONCE() will fall back to memcpy and print a
247 * compile-time warning. 251 * compile-time warning.
248 * 252 *
249 * Their two major use cases are: (1) Mediating communication between 253 * Their two major use cases are: (1) Mediating communication between
@@ -257,8 +261,8 @@ static __always_inline void __assign_once_size(volatile void *p, void *res, int
257#define READ_ONCE(x) \ 261#define READ_ONCE(x) \
258 ({ typeof(x) __val; __read_once_size(&x, &__val, sizeof(__val)); __val; }) 262 ({ typeof(x) __val; __read_once_size(&x, &__val, sizeof(__val)); __val; })
259 263
260#define ASSIGN_ONCE(val, x) \ 264#define WRITE_ONCE(x, val) \
261 ({ typeof(x) __val; __val = val; __assign_once_size(&x, &__val, sizeof(__val)); __val; }) 265 ({ typeof(x) __val; __val = val; __write_once_size(&x, &__val, sizeof(__val)); __val; })
262 266
263#endif /* __KERNEL__ */ 267#endif /* __KERNEL__ */
264 268
@@ -385,7 +389,7 @@ static __always_inline void __assign_once_size(volatile void *p, void *res, int
385 389
386/* Is this type a native word size -- useful for atomic operations */ 390/* Is this type a native word size -- useful for atomic operations */
387#ifndef __native_word 391#ifndef __native_word
388# define __native_word(t) (sizeof(t) == sizeof(int) || sizeof(t) == sizeof(long)) 392# define __native_word(t) (sizeof(t) == sizeof(char) || sizeof(t) == sizeof(short) || sizeof(t) == sizeof(int) || sizeof(t) == sizeof(long))
389#endif 393#endif
390 394
391/* Compile time object size, -1 for unknown */ 395/* Compile time object size, -1 for unknown */
@@ -447,12 +451,23 @@ static __always_inline void __assign_once_size(volatile void *p, void *res, int
447 * to make the compiler aware of ordering is to put the two invocations of 451 * to make the compiler aware of ordering is to put the two invocations of
448 * ACCESS_ONCE() in different C statements. 452 * ACCESS_ONCE() in different C statements.
449 * 453 *
450 * This macro does absolutely -nothing- to prevent the CPU from reordering, 454 * ACCESS_ONCE will only work on scalar types. For union types, ACCESS_ONCE
451 * merging, or refetching absolutely anything at any time. Its main intended 455 * on a union member will work as long as the size of the member matches the
452 * use is to mediate communication between process-level code and irq/NMI 456 * size of the union and the size is smaller than word size.
453 * handlers, all running on the same CPU. 457 *
458 * The major use cases of ACCESS_ONCE used to be (1) Mediating communication
459 * between process-level code and irq/NMI handlers, all running on the same CPU,
460 * and (2) Ensuring that the compiler does not fold, spindle, or otherwise
461 * mutilate accesses that either do not require ordering or that interact
462 * with an explicit memory barrier or atomic instruction that provides the
463 * required ordering.
464 *
465 * If possible use READ_ONCE/ASSIGN_ONCE instead.
454 */ 466 */
455#define ACCESS_ONCE(x) (*(volatile typeof(x) *)&(x)) 467#define __ACCESS_ONCE(x) ({ \
468 __maybe_unused typeof(x) __var = (__force typeof(x)) 0; \
469 (volatile typeof(x) *)&(x); })
470#define ACCESS_ONCE(x) (*__ACCESS_ONCE(x))
456 471
457/* Ignore/forbid kprobes attach on very low level functions marked by this attribute: */ 472/* Ignore/forbid kprobes attach on very low level functions marked by this attribute: */
458#ifdef CONFIG_KPROBES 473#ifdef CONFIG_KPROBES