diff options
author | David Howells <dhowells@redhat.com> | 2012-03-28 13:30:03 -0400 |
---|---|---|
committer | David Howells <dhowells@redhat.com> | 2012-03-28 13:30:03 -0400 |
commit | b4816afa3986704d1404fc48e931da5135820472 (patch) | |
tree | e2d0d5779c55528e0439eee359d46bc9a2c017f7 /include/asm-generic | |
parent | 885df91ca3571afd4b3f50f3391329bbf3c9e262 (diff) |
Move the asm-generic/system.h xchg() implementation to asm-generic/cmpxchg.h
Move the asm-generic/system.h xchg() implementation to asm-generic/cmpxchg.h
to simplify disintegration of asm/system.h.
Signed-off-by: David Howells <dhowells@redhat.com>
Acked-by: Arnd Bergmann <arnd@arndb.de>
Diffstat (limited to 'include/asm-generic')
-rw-r--r-- | include/asm-generic/cmpxchg.h | 87 | ||||
-rw-r--r-- | include/asm-generic/system.h | 70 |
2 files changed, 80 insertions, 77 deletions
diff --git a/include/asm-generic/cmpxchg.h b/include/asm-generic/cmpxchg.h index e0449af91f60..8a361834dc25 100644 --- a/include/asm-generic/cmpxchg.h +++ b/include/asm-generic/cmpxchg.h | |||
@@ -1,16 +1,87 @@ | |||
1 | /* | ||
2 | * Generic UP xchg and cmpxchg using interrupt disablement. Does not | ||
3 | * support SMP. | ||
4 | */ | ||
5 | |||
1 | #ifndef __ASM_GENERIC_CMPXCHG_H | 6 | #ifndef __ASM_GENERIC_CMPXCHG_H |
2 | #define __ASM_GENERIC_CMPXCHG_H | 7 | #define __ASM_GENERIC_CMPXCHG_H |
3 | 8 | ||
4 | /* | ||
5 | * Generic cmpxchg | ||
6 | * | ||
7 | * Uses the local cmpxchg. Does not support SMP. | ||
8 | */ | ||
9 | #ifdef CONFIG_SMP | 9 | #ifdef CONFIG_SMP |
10 | #error "Cannot use generic cmpxchg on SMP" | 10 | #error "Cannot use generic cmpxchg on SMP" |
11 | #endif | 11 | #endif |
12 | 12 | ||
13 | #include <asm-generic/cmpxchg-local.h> | 13 | #include <linux/irqflags.h> |
14 | |||
15 | #ifndef xchg | ||
16 | |||
17 | /* | ||
18 | * This function doesn't exist, so you'll get a linker error if | ||
19 | * something tries to do an invalidly-sized xchg(). | ||
20 | */ | ||
21 | extern void __xchg_called_with_bad_pointer(void); | ||
22 | |||
23 | static inline | ||
24 | unsigned long __xchg(unsigned long x, volatile void *ptr, int size) | ||
25 | { | ||
26 | unsigned long ret, flags; | ||
27 | |||
28 | switch (size) { | ||
29 | case 1: | ||
30 | #ifdef __xchg_u8 | ||
31 | return __xchg_u8(x, ptr); | ||
32 | #else | ||
33 | local_irq_save(flags); | ||
34 | ret = *(volatile u8 *)ptr; | ||
35 | *(volatile u8 *)ptr = x; | ||
36 | local_irq_restore(flags); | ||
37 | return ret; | ||
38 | #endif /* __xchg_u8 */ | ||
39 | |||
40 | case 2: | ||
41 | #ifdef __xchg_u16 | ||
42 | return __xchg_u16(x, ptr); | ||
43 | #else | ||
44 | local_irq_save(flags); | ||
45 | ret = *(volatile u16 *)ptr; | ||
46 | *(volatile u16 *)ptr = x; | ||
47 | local_irq_restore(flags); | ||
48 | return ret; | ||
49 | #endif /* __xchg_u16 */ | ||
50 | |||
51 | case 4: | ||
52 | #ifdef __xchg_u32 | ||
53 | return __xchg_u32(x, ptr); | ||
54 | #else | ||
55 | local_irq_save(flags); | ||
56 | ret = *(volatile u32 *)ptr; | ||
57 | *(volatile u32 *)ptr = x; | ||
58 | local_irq_restore(flags); | ||
59 | return ret; | ||
60 | #endif /* __xchg_u32 */ | ||
61 | |||
62 | #ifdef CONFIG_64BIT | ||
63 | case 8: | ||
64 | #ifdef __xchg_u64 | ||
65 | return __xchg_u64(x, ptr); | ||
66 | #else | ||
67 | local_irq_save(flags); | ||
68 | ret = *(volatile u64 *)ptr; | ||
69 | *(volatile u64 *)ptr = x; | ||
70 | local_irq_restore(flags); | ||
71 | return ret; | ||
72 | #endif /* __xchg_u64 */ | ||
73 | #endif /* CONFIG_64BIT */ | ||
74 | |||
75 | default: | ||
76 | __xchg_called_with_bad_pointer(); | ||
77 | return x; | ||
78 | } | ||
79 | } | ||
80 | |||
81 | #define xchg(ptr, x) \ | ||
82 | ((__typeof__(*(ptr))) __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))) | ||
83 | |||
84 | #endif /* xchg */ | ||
14 | 85 | ||
15 | /* | 86 | /* |
16 | * Atomic compare and exchange. | 87 | * Atomic compare and exchange. |
@@ -18,7 +89,9 @@ | |||
18 | * Do not define __HAVE_ARCH_CMPXCHG because we want to use it to check whether | 89 | * Do not define __HAVE_ARCH_CMPXCHG because we want to use it to check whether |
19 | * a cmpxchg primitive faster than repeated local irq save/restore exists. | 90 | * a cmpxchg primitive faster than repeated local irq save/restore exists. |
20 | */ | 91 | */ |
92 | #include <asm-generic/cmpxchg-local.h> | ||
93 | |||
21 | #define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n)) | 94 | #define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n)) |
22 | #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n)) | 95 | #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n)) |
23 | 96 | ||
24 | #endif | 97 | #endif /* __ASM_GENERIC_CMPXCHG_H */ |
diff --git a/include/asm-generic/system.h b/include/asm-generic/system.h index f98f693383c4..54cd124ea9a4 100644 --- a/include/asm-generic/system.h +++ b/include/asm-generic/system.h | |||
@@ -17,7 +17,6 @@ | |||
17 | #ifndef __ASSEMBLY__ | 17 | #ifndef __ASSEMBLY__ |
18 | 18 | ||
19 | #include <linux/types.h> | 19 | #include <linux/types.h> |
20 | #include <linux/irqflags.h> | ||
21 | 20 | ||
22 | #include <asm/barrier.h> | 21 | #include <asm/barrier.h> |
23 | #include <asm/cmpxchg.h> | 22 | #include <asm/cmpxchg.h> |
@@ -34,75 +33,6 @@ extern struct task_struct *__switch_to(struct task_struct *, | |||
34 | 33 | ||
35 | #define arch_align_stack(x) (x) | 34 | #define arch_align_stack(x) (x) |
36 | 35 | ||
37 | /* | ||
38 | * we make sure local_irq_enable() doesn't cause priority inversion | ||
39 | */ | ||
40 | |||
41 | /* This function doesn't exist, so you'll get a linker error | ||
42 | * if something tries to do an invalid xchg(). */ | ||
43 | extern void __xchg_called_with_bad_pointer(void); | ||
44 | |||
45 | static inline | ||
46 | unsigned long __xchg(unsigned long x, volatile void *ptr, int size) | ||
47 | { | ||
48 | unsigned long ret, flags; | ||
49 | |||
50 | switch (size) { | ||
51 | case 1: | ||
52 | #ifdef __xchg_u8 | ||
53 | return __xchg_u8(x, ptr); | ||
54 | #else | ||
55 | local_irq_save(flags); | ||
56 | ret = *(volatile u8 *)ptr; | ||
57 | *(volatile u8 *)ptr = x; | ||
58 | local_irq_restore(flags); | ||
59 | return ret; | ||
60 | #endif /* __xchg_u8 */ | ||
61 | |||
62 | case 2: | ||
63 | #ifdef __xchg_u16 | ||
64 | return __xchg_u16(x, ptr); | ||
65 | #else | ||
66 | local_irq_save(flags); | ||
67 | ret = *(volatile u16 *)ptr; | ||
68 | *(volatile u16 *)ptr = x; | ||
69 | local_irq_restore(flags); | ||
70 | return ret; | ||
71 | #endif /* __xchg_u16 */ | ||
72 | |||
73 | case 4: | ||
74 | #ifdef __xchg_u32 | ||
75 | return __xchg_u32(x, ptr); | ||
76 | #else | ||
77 | local_irq_save(flags); | ||
78 | ret = *(volatile u32 *)ptr; | ||
79 | *(volatile u32 *)ptr = x; | ||
80 | local_irq_restore(flags); | ||
81 | return ret; | ||
82 | #endif /* __xchg_u32 */ | ||
83 | |||
84 | #ifdef CONFIG_64BIT | ||
85 | case 8: | ||
86 | #ifdef __xchg_u64 | ||
87 | return __xchg_u64(x, ptr); | ||
88 | #else | ||
89 | local_irq_save(flags); | ||
90 | ret = *(volatile u64 *)ptr; | ||
91 | *(volatile u64 *)ptr = x; | ||
92 | local_irq_restore(flags); | ||
93 | return ret; | ||
94 | #endif /* __xchg_u64 */ | ||
95 | #endif /* CONFIG_64BIT */ | ||
96 | |||
97 | default: | ||
98 | __xchg_called_with_bad_pointer(); | ||
99 | return x; | ||
100 | } | ||
101 | } | ||
102 | |||
103 | #define xchg(ptr, x) \ | ||
104 | ((__typeof__(*(ptr))) __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))) | ||
105 | |||
106 | #endif /* !__ASSEMBLY__ */ | 36 | #endif /* !__ASSEMBLY__ */ |
107 | 37 | ||
108 | #endif /* __ASM_GENERIC_SYSTEM_H */ | 38 | #endif /* __ASM_GENERIC_SYSTEM_H */ |