aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64/percpu.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86_64/percpu.h')
-rw-r--r--include/asm-x86_64/percpu.h68
1 files changed, 0 insertions, 68 deletions
diff --git a/include/asm-x86_64/percpu.h b/include/asm-x86_64/percpu.h
deleted file mode 100644
index 5abd48270101..000000000000
--- a/include/asm-x86_64/percpu.h
+++ /dev/null
@@ -1,68 +0,0 @@
1#ifndef _ASM_X8664_PERCPU_H_
2#define _ASM_X8664_PERCPU_H_
3#include <linux/compiler.h>
4
5/* Same as asm-generic/percpu.h, except that we store the per cpu offset
6 in the PDA. Longer term the PDA and every per cpu variable
7 should be just put into a single section and referenced directly
8 from %gs */
9
10#ifdef CONFIG_SMP
11
12#include <asm/pda.h>
13
14#define __per_cpu_offset(cpu) (cpu_pda(cpu)->data_offset)
15#define __my_cpu_offset() read_pda(data_offset)
16
17#define per_cpu_offset(x) (__per_cpu_offset(x))
18
19/* Separate out the type, so (int[3], foo) works. */
20#define DEFINE_PER_CPU(type, name) \
21 __attribute__((__section__(".data.percpu"))) __typeof__(type) per_cpu__##name
22
23#define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
24 __attribute__((__section__(".data.percpu.shared_aligned"))) \
25 __typeof__(type) per_cpu__##name \
26 ____cacheline_internodealigned_in_smp
27
28/* var is in discarded region: offset to particular copy we want */
29#define per_cpu(var, cpu) (*({ \
30 extern int simple_identifier_##var(void); \
31 RELOC_HIDE(&per_cpu__##var, __per_cpu_offset(cpu)); }))
32#define __get_cpu_var(var) (*({ \
33 extern int simple_identifier_##var(void); \
34 RELOC_HIDE(&per_cpu__##var, __my_cpu_offset()); }))
35#define __raw_get_cpu_var(var) (*({ \
36 extern int simple_identifier_##var(void); \
37 RELOC_HIDE(&per_cpu__##var, __my_cpu_offset()); }))
38
39/* A macro to avoid #include hell... */
40#define percpu_modcopy(pcpudst, src, size) \
41do { \
42 unsigned int __i; \
43 for_each_possible_cpu(__i) \
44 memcpy((pcpudst)+__per_cpu_offset(__i), \
45 (src), (size)); \
46} while (0)
47
48extern void setup_per_cpu_areas(void);
49
50#else /* ! SMP */
51
52#define DEFINE_PER_CPU(type, name) \
53 __typeof__(type) per_cpu__##name
54#define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
55 DEFINE_PER_CPU(type, name)
56
57#define per_cpu(var, cpu) (*((void)(cpu), &per_cpu__##var))
58#define __get_cpu_var(var) per_cpu__##var
59#define __raw_get_cpu_var(var) per_cpu__##var
60
61#endif /* SMP */
62
63#define DECLARE_PER_CPU(type, name) extern __typeof__(type) per_cpu__##name
64
65#define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(per_cpu__##var)
66#define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(per_cpu__##var)
67
68#endif /* _ASM_X8664_PERCPU_H_ */