diff options
author | Russell King <rmk+kernel@arm.linux.org.uk> | 2012-03-28 13:30:01 -0400 |
---|---|---|
committer | David Howells <dhowells@redhat.com> | 2012-03-28 13:30:01 -0400 |
commit | 15d07dc9c59eae51219c40253bdf920f62bb10f2 (patch) | |
tree | d830b428bf55526b1ab80139f6b4c24d4b38d627 /arch/arm/include/asm | |
parent | ec2212088c42ff7d1362629ec26dda4f3e8bdad3 (diff) |
ARM: move CP15 definitions to separate header file
Avoid namespace conflicts with drivers over the CP15 definitions by
moving CP15 related prototypes and definitions to a private header
file.
Acked-by: Stephen Warren <swarren@nvidia.com>
Tested-by: Stephen Warren <swarren@nvidia.com> [Tegra]
Acked-by: H Hartley Sweeten <hsweeten@visionengravers.com>
Tested-by: H Hartley Sweeten <hsweeten@visionengravers.com> [EP93xx]
Acked-by: Nicolas Pitre <nico@linaro.org>
Acked-by: Kukjin Kim <kgene.kim@samsung.com>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Signed-off-by: David Howells <dhowells@redhat.com>
Diffstat (limited to 'arch/arm/include/asm')
-rw-r--r-- | arch/arm/include/asm/cp15.h | 87 | ||||
-rw-r--r-- | arch/arm/include/asm/system.h | 77 |
2 files changed, 87 insertions, 77 deletions
diff --git a/arch/arm/include/asm/cp15.h b/arch/arm/include/asm/cp15.h new file mode 100644 index 000000000000..3dabd8dd4049 --- /dev/null +++ b/arch/arm/include/asm/cp15.h | |||
@@ -0,0 +1,87 @@ | |||
1 | #ifndef __ASM_ARM_CP15_H | ||
2 | #define __ASM_ARM_CP15_H | ||
3 | |||
4 | #include <asm/system.h> | ||
5 | |||
6 | /* | ||
7 | * CR1 bits (CP#15 CR1) | ||
8 | */ | ||
9 | #define CR_M (1 << 0) /* MMU enable */ | ||
10 | #define CR_A (1 << 1) /* Alignment abort enable */ | ||
11 | #define CR_C (1 << 2) /* Dcache enable */ | ||
12 | #define CR_W (1 << 3) /* Write buffer enable */ | ||
13 | #define CR_P (1 << 4) /* 32-bit exception handler */ | ||
14 | #define CR_D (1 << 5) /* 32-bit data address range */ | ||
15 | #define CR_L (1 << 6) /* Implementation defined */ | ||
16 | #define CR_B (1 << 7) /* Big endian */ | ||
17 | #define CR_S (1 << 8) /* System MMU protection */ | ||
18 | #define CR_R (1 << 9) /* ROM MMU protection */ | ||
19 | #define CR_F (1 << 10) /* Implementation defined */ | ||
20 | #define CR_Z (1 << 11) /* Implementation defined */ | ||
21 | #define CR_I (1 << 12) /* Icache enable */ | ||
22 | #define CR_V (1 << 13) /* Vectors relocated to 0xffff0000 */ | ||
23 | #define CR_RR (1 << 14) /* Round Robin cache replacement */ | ||
24 | #define CR_L4 (1 << 15) /* LDR pc can set T bit */ | ||
25 | #define CR_DT (1 << 16) | ||
26 | #define CR_IT (1 << 18) | ||
27 | #define CR_ST (1 << 19) | ||
28 | #define CR_FI (1 << 21) /* Fast interrupt (lower latency mode) */ | ||
29 | #define CR_U (1 << 22) /* Unaligned access operation */ | ||
30 | #define CR_XP (1 << 23) /* Extended page tables */ | ||
31 | #define CR_VE (1 << 24) /* Vectored interrupts */ | ||
32 | #define CR_EE (1 << 25) /* Exception (Big) Endian */ | ||
33 | #define CR_TRE (1 << 28) /* TEX remap enable */ | ||
34 | #define CR_AFE (1 << 29) /* Access flag enable */ | ||
35 | #define CR_TE (1 << 30) /* Thumb exception enable */ | ||
36 | |||
37 | #ifndef __ASSEMBLY__ | ||
38 | |||
39 | #if __LINUX_ARM_ARCH__ >= 4 | ||
40 | #define vectors_high() (cr_alignment & CR_V) | ||
41 | #else | ||
42 | #define vectors_high() (0) | ||
43 | #endif | ||
44 | |||
45 | extern unsigned long cr_no_alignment; /* defined in entry-armv.S */ | ||
46 | extern unsigned long cr_alignment; /* defined in entry-armv.S */ | ||
47 | |||
48 | static inline unsigned int get_cr(void) | ||
49 | { | ||
50 | unsigned int val; | ||
51 | asm("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val) : : "cc"); | ||
52 | return val; | ||
53 | } | ||
54 | |||
55 | static inline void set_cr(unsigned int val) | ||
56 | { | ||
57 | asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR" | ||
58 | : : "r" (val) : "cc"); | ||
59 | isb(); | ||
60 | } | ||
61 | |||
62 | #ifndef CONFIG_SMP | ||
63 | extern void adjust_cr(unsigned long mask, unsigned long set); | ||
64 | #endif | ||
65 | |||
66 | #define CPACC_FULL(n) (3 << (n * 2)) | ||
67 | #define CPACC_SVC(n) (1 << (n * 2)) | ||
68 | #define CPACC_DISABLE(n) (0 << (n * 2)) | ||
69 | |||
70 | static inline unsigned int get_copro_access(void) | ||
71 | { | ||
72 | unsigned int val; | ||
73 | asm("mrc p15, 0, %0, c1, c0, 2 @ get copro access" | ||
74 | : "=r" (val) : : "cc"); | ||
75 | return val; | ||
76 | } | ||
77 | |||
78 | static inline void set_copro_access(unsigned int val) | ||
79 | { | ||
80 | asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access" | ||
81 | : : "r" (val) : "cc"); | ||
82 | isb(); | ||
83 | } | ||
84 | |||
85 | #endif | ||
86 | |||
87 | #endif | ||
diff --git a/arch/arm/include/asm/system.h b/arch/arm/include/asm/system.h index e4c96cc6ec0c..774c41e8addf 100644 --- a/arch/arm/include/asm/system.h +++ b/arch/arm/include/asm/system.h | |||
@@ -15,37 +15,6 @@ | |||
15 | #define CPU_ARCH_ARMv7 9 | 15 | #define CPU_ARCH_ARMv7 9 |
16 | 16 | ||
17 | /* | 17 | /* |
18 | * CR1 bits (CP#15 CR1) | ||
19 | */ | ||
20 | #define CR_M (1 << 0) /* MMU enable */ | ||
21 | #define CR_A (1 << 1) /* Alignment abort enable */ | ||
22 | #define CR_C (1 << 2) /* Dcache enable */ | ||
23 | #define CR_W (1 << 3) /* Write buffer enable */ | ||
24 | #define CR_P (1 << 4) /* 32-bit exception handler */ | ||
25 | #define CR_D (1 << 5) /* 32-bit data address range */ | ||
26 | #define CR_L (1 << 6) /* Implementation defined */ | ||
27 | #define CR_B (1 << 7) /* Big endian */ | ||
28 | #define CR_S (1 << 8) /* System MMU protection */ | ||
29 | #define CR_R (1 << 9) /* ROM MMU protection */ | ||
30 | #define CR_F (1 << 10) /* Implementation defined */ | ||
31 | #define CR_Z (1 << 11) /* Implementation defined */ | ||
32 | #define CR_I (1 << 12) /* Icache enable */ | ||
33 | #define CR_V (1 << 13) /* Vectors relocated to 0xffff0000 */ | ||
34 | #define CR_RR (1 << 14) /* Round Robin cache replacement */ | ||
35 | #define CR_L4 (1 << 15) /* LDR pc can set T bit */ | ||
36 | #define CR_DT (1 << 16) | ||
37 | #define CR_IT (1 << 18) | ||
38 | #define CR_ST (1 << 19) | ||
39 | #define CR_FI (1 << 21) /* Fast interrupt (lower latency mode) */ | ||
40 | #define CR_U (1 << 22) /* Unaligned access operation */ | ||
41 | #define CR_XP (1 << 23) /* Extended page tables */ | ||
42 | #define CR_VE (1 << 24) /* Vectored interrupts */ | ||
43 | #define CR_EE (1 << 25) /* Exception (Big) Endian */ | ||
44 | #define CR_TRE (1 << 28) /* TEX remap enable */ | ||
45 | #define CR_AFE (1 << 29) /* Access flag enable */ | ||
46 | #define CR_TE (1 << 30) /* Thumb exception enable */ | ||
47 | |||
48 | /* | ||
49 | * This is used to ensure the compiler did actually allocate the register we | 18 | * This is used to ensure the compiler did actually allocate the register we |
50 | * asked it for some inline assembly sequences. Apparently we can't trust | 19 | * asked it for some inline assembly sequences. Apparently we can't trust |
51 | * the compiler from one version to another so a bit of paranoia won't hurt. | 20 | * the compiler from one version to another so a bit of paranoia won't hurt. |
@@ -119,12 +88,6 @@ extern void (*arm_pm_restart)(char str, const char *cmd); | |||
119 | 88 | ||
120 | extern unsigned int user_debug; | 89 | extern unsigned int user_debug; |
121 | 90 | ||
122 | #if __LINUX_ARM_ARCH__ >= 4 | ||
123 | #define vectors_high() (cr_alignment & CR_V) | ||
124 | #else | ||
125 | #define vectors_high() (0) | ||
126 | #endif | ||
127 | |||
128 | #if __LINUX_ARM_ARCH__ >= 7 || \ | 91 | #if __LINUX_ARM_ARCH__ >= 7 || \ |
129 | (__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K)) | 92 | (__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K)) |
130 | #define sev() __asm__ __volatile__ ("sev" : : : "memory") | 93 | #define sev() __asm__ __volatile__ ("sev" : : : "memory") |
@@ -185,46 +148,6 @@ extern unsigned int user_debug; | |||
185 | #define set_mb(var, value) do { var = value; smp_mb(); } while (0) | 148 | #define set_mb(var, value) do { var = value; smp_mb(); } while (0) |
186 | #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); | 149 | #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); |
187 | 150 | ||
188 | extern unsigned long cr_no_alignment; /* defined in entry-armv.S */ | ||
189 | extern unsigned long cr_alignment; /* defined in entry-armv.S */ | ||
190 | |||
191 | static inline unsigned int get_cr(void) | ||
192 | { | ||
193 | unsigned int val; | ||
194 | asm("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val) : : "cc"); | ||
195 | return val; | ||
196 | } | ||
197 | |||
198 | static inline void set_cr(unsigned int val) | ||
199 | { | ||
200 | asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR" | ||
201 | : : "r" (val) : "cc"); | ||
202 | isb(); | ||
203 | } | ||
204 | |||
205 | #ifndef CONFIG_SMP | ||
206 | extern void adjust_cr(unsigned long mask, unsigned long set); | ||
207 | #endif | ||
208 | |||
209 | #define CPACC_FULL(n) (3 << (n * 2)) | ||
210 | #define CPACC_SVC(n) (1 << (n * 2)) | ||
211 | #define CPACC_DISABLE(n) (0 << (n * 2)) | ||
212 | |||
213 | static inline unsigned int get_copro_access(void) | ||
214 | { | ||
215 | unsigned int val; | ||
216 | asm("mrc p15, 0, %0, c1, c0, 2 @ get copro access" | ||
217 | : "=r" (val) : : "cc"); | ||
218 | return val; | ||
219 | } | ||
220 | |||
221 | static inline void set_copro_access(unsigned int val) | ||
222 | { | ||
223 | asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access" | ||
224 | : : "r" (val) : "cc"); | ||
225 | isb(); | ||
226 | } | ||
227 | |||
228 | /* | 151 | /* |
229 | * switch_mm() may do a full cache flush over the context switch, | 152 | * switch_mm() may do a full cache flush over the context switch, |
230 | * so enable interrupts over the context switch to avoid high | 153 | * so enable interrupts over the context switch to avoid high |