diff options
Diffstat (limited to 'include/asm-sparc64/percpu.h')
-rw-r--r-- | include/asm-sparc64/percpu.h | 38 |
1 files changed, 3 insertions, 35 deletions
diff --git a/include/asm-sparc64/percpu.h b/include/asm-sparc64/percpu.h index a1f53a4da405..bee64593023e 100644 --- a/include/asm-sparc64/percpu.h +++ b/include/asm-sparc64/percpu.h | |||
@@ -7,7 +7,6 @@ register unsigned long __local_per_cpu_offset asm("g5"); | |||
7 | 7 | ||
8 | #ifdef CONFIG_SMP | 8 | #ifdef CONFIG_SMP |
9 | 9 | ||
10 | #define setup_per_cpu_areas() do { } while (0) | ||
11 | extern void real_setup_per_cpu_areas(void); | 10 | extern void real_setup_per_cpu_areas(void); |
12 | 11 | ||
13 | extern unsigned long __per_cpu_base; | 12 | extern unsigned long __per_cpu_base; |
@@ -16,45 +15,14 @@ extern unsigned long __per_cpu_shift; | |||
16 | (__per_cpu_base + ((unsigned long)(__cpu) << __per_cpu_shift)) | 15 | (__per_cpu_base + ((unsigned long)(__cpu) << __per_cpu_shift)) |
17 | #define per_cpu_offset(x) (__per_cpu_offset(x)) | 16 | #define per_cpu_offset(x) (__per_cpu_offset(x)) |
18 | 17 | ||
19 | /* Separate out the type, so (int[3], foo) works. */ | 18 | #define __my_cpu_offset __local_per_cpu_offset |
20 | #define DEFINE_PER_CPU(type, name) \ | 19 | |
21 | __attribute__((__section__(".data.percpu"))) __typeof__(type) per_cpu__##name | ||
22 | |||
23 | #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \ | ||
24 | __attribute__((__section__(".data.percpu.shared_aligned"))) \ | ||
25 | __typeof__(type) per_cpu__##name \ | ||
26 | ____cacheline_aligned_in_smp | ||
27 | |||
28 | /* var is in discarded region: offset to particular copy we want */ | ||
29 | #define per_cpu(var, cpu) (*RELOC_HIDE(&per_cpu__##var, __per_cpu_offset(cpu))) | ||
30 | #define __get_cpu_var(var) (*RELOC_HIDE(&per_cpu__##var, __local_per_cpu_offset)) | ||
31 | #define __raw_get_cpu_var(var) (*RELOC_HIDE(&per_cpu__##var, __local_per_cpu_offset)) | ||
32 | |||
33 | /* A macro to avoid #include hell... */ | ||
34 | #define percpu_modcopy(pcpudst, src, size) \ | ||
35 | do { \ | ||
36 | unsigned int __i; \ | ||
37 | for_each_possible_cpu(__i) \ | ||
38 | memcpy((pcpudst)+__per_cpu_offset(__i), \ | ||
39 | (src), (size)); \ | ||
40 | } while (0) | ||
41 | #else /* ! SMP */ | 20 | #else /* ! SMP */ |
42 | 21 | ||
43 | #define real_setup_per_cpu_areas() do { } while (0) | 22 | #define real_setup_per_cpu_areas() do { } while (0) |
44 | #define DEFINE_PER_CPU(type, name) \ | ||
45 | __typeof__(type) per_cpu__##name | ||
46 | #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \ | ||
47 | DEFINE_PER_CPU(type, name) | ||
48 | |||
49 | #define per_cpu(var, cpu) (*((void)cpu, &per_cpu__##var)) | ||
50 | #define __get_cpu_var(var) per_cpu__##var | ||
51 | #define __raw_get_cpu_var(var) per_cpu__##var | ||
52 | 23 | ||
53 | #endif /* SMP */ | 24 | #endif /* SMP */ |
54 | 25 | ||
55 | #define DECLARE_PER_CPU(type, name) extern __typeof__(type) per_cpu__##name | 26 | #include <asm-generic/percpu.h> |
56 | |||
57 | #define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(per_cpu__##var) | ||
58 | #define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(per_cpu__##var) | ||
59 | 27 | ||
60 | #endif /* __ARCH_SPARC64_PERCPU__ */ | 28 | #endif /* __ARCH_SPARC64_PERCPU__ */ |