aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-sparc/pgtsrmmu.h
diff options
context:
space:
mode:
authorAdrian Bunk <bunk@stusta.de>2005-10-03 20:37:02 -0400
committerDavid S. Miller <davem@davemloft.net>2005-10-03 20:37:02 -0400
commit3115624eda34d0f4e673fc6bcea36b7ad701ee33 (patch)
treea81c9e0f3d84a96725e109452d4ddc90f95b513a /include/asm-sparc/pgtsrmmu.h
parented39f731ab2e77e58122232f6e27333331d7793d (diff)
[SPARC]: "extern inline" doesn't make much sense.
Signed-off-by: Adrian Bunk <bunk@stusta.de> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'include/asm-sparc/pgtsrmmu.h')
-rw-r--r--include/asm-sparc/pgtsrmmu.h30
1 files changed, 15 insertions, 15 deletions
diff --git a/include/asm-sparc/pgtsrmmu.h b/include/asm-sparc/pgtsrmmu.h
index ee3b9d93187c..edeb9811e728 100644
--- a/include/asm-sparc/pgtsrmmu.h
+++ b/include/asm-sparc/pgtsrmmu.h
@@ -148,7 +148,7 @@ extern void *srmmu_nocache_pool;
148#define __nocache_fix(VADDR) __va(__nocache_pa(VADDR)) 148#define __nocache_fix(VADDR) __va(__nocache_pa(VADDR))
149 149
150/* Accessing the MMU control register. */ 150/* Accessing the MMU control register. */
151extern __inline__ unsigned int srmmu_get_mmureg(void) 151static inline unsigned int srmmu_get_mmureg(void)
152{ 152{
153 unsigned int retval; 153 unsigned int retval;
154 __asm__ __volatile__("lda [%%g0] %1, %0\n\t" : 154 __asm__ __volatile__("lda [%%g0] %1, %0\n\t" :
@@ -157,14 +157,14 @@ extern __inline__ unsigned int srmmu_get_mmureg(void)
157 return retval; 157 return retval;
158} 158}
159 159
160extern __inline__ void srmmu_set_mmureg(unsigned long regval) 160static inline void srmmu_set_mmureg(unsigned long regval)
161{ 161{
162 __asm__ __volatile__("sta %0, [%%g0] %1\n\t" : : 162 __asm__ __volatile__("sta %0, [%%g0] %1\n\t" : :
163 "r" (regval), "i" (ASI_M_MMUREGS) : "memory"); 163 "r" (regval), "i" (ASI_M_MMUREGS) : "memory");
164 164
165} 165}
166 166
167extern __inline__ void srmmu_set_ctable_ptr(unsigned long paddr) 167static inline void srmmu_set_ctable_ptr(unsigned long paddr)
168{ 168{
169 paddr = ((paddr >> 4) & SRMMU_CTX_PMASK); 169 paddr = ((paddr >> 4) & SRMMU_CTX_PMASK);
170 __asm__ __volatile__("sta %0, [%1] %2\n\t" : : 170 __asm__ __volatile__("sta %0, [%1] %2\n\t" : :
@@ -173,7 +173,7 @@ extern __inline__ void srmmu_set_ctable_ptr(unsigned long paddr)
173 "memory"); 173 "memory");
174} 174}
175 175
176extern __inline__ unsigned long srmmu_get_ctable_ptr(void) 176static inline unsigned long srmmu_get_ctable_ptr(void)
177{ 177{
178 unsigned int retval; 178 unsigned int retval;
179 179
@@ -184,14 +184,14 @@ extern __inline__ unsigned long srmmu_get_ctable_ptr(void)
184 return (retval & SRMMU_CTX_PMASK) << 4; 184 return (retval & SRMMU_CTX_PMASK) << 4;
185} 185}
186 186
187extern __inline__ void srmmu_set_context(int context) 187static inline void srmmu_set_context(int context)
188{ 188{
189 __asm__ __volatile__("sta %0, [%1] %2\n\t" : : 189 __asm__ __volatile__("sta %0, [%1] %2\n\t" : :
190 "r" (context), "r" (SRMMU_CTX_REG), 190 "r" (context), "r" (SRMMU_CTX_REG),
191 "i" (ASI_M_MMUREGS) : "memory"); 191 "i" (ASI_M_MMUREGS) : "memory");
192} 192}
193 193
194extern __inline__ int srmmu_get_context(void) 194static inline int srmmu_get_context(void)
195{ 195{
196 register int retval; 196 register int retval;
197 __asm__ __volatile__("lda [%1] %2, %0\n\t" : 197 __asm__ __volatile__("lda [%1] %2, %0\n\t" :
@@ -201,7 +201,7 @@ extern __inline__ int srmmu_get_context(void)
201 return retval; 201 return retval;
202} 202}
203 203
204extern __inline__ unsigned int srmmu_get_fstatus(void) 204static inline unsigned int srmmu_get_fstatus(void)
205{ 205{
206 unsigned int retval; 206 unsigned int retval;
207 207
@@ -211,7 +211,7 @@ extern __inline__ unsigned int srmmu_get_fstatus(void)
211 return retval; 211 return retval;
212} 212}
213 213
214extern __inline__ unsigned int srmmu_get_faddr(void) 214static inline unsigned int srmmu_get_faddr(void)
215{ 215{
216 unsigned int retval; 216 unsigned int retval;
217 217
@@ -222,7 +222,7 @@ extern __inline__ unsigned int srmmu_get_faddr(void)
222} 222}
223 223
224/* This is guaranteed on all SRMMU's. */ 224/* This is guaranteed on all SRMMU's. */
225extern __inline__ void srmmu_flush_whole_tlb(void) 225static inline void srmmu_flush_whole_tlb(void)
226{ 226{
227 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": : 227 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": :
228 "r" (0x400), /* Flush entire TLB!! */ 228 "r" (0x400), /* Flush entire TLB!! */
@@ -231,7 +231,7 @@ extern __inline__ void srmmu_flush_whole_tlb(void)
231} 231}
232 232
233/* These flush types are not available on all chips... */ 233/* These flush types are not available on all chips... */
234extern __inline__ void srmmu_flush_tlb_ctx(void) 234static inline void srmmu_flush_tlb_ctx(void)
235{ 235{
236 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": : 236 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": :
237 "r" (0x300), /* Flush TLB ctx.. */ 237 "r" (0x300), /* Flush TLB ctx.. */
@@ -239,7 +239,7 @@ extern __inline__ void srmmu_flush_tlb_ctx(void)
239 239
240} 240}
241 241
242extern __inline__ void srmmu_flush_tlb_region(unsigned long addr) 242static inline void srmmu_flush_tlb_region(unsigned long addr)
243{ 243{
244 addr &= SRMMU_PGDIR_MASK; 244 addr &= SRMMU_PGDIR_MASK;
245 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": : 245 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": :
@@ -249,7 +249,7 @@ extern __inline__ void srmmu_flush_tlb_region(unsigned long addr)
249} 249}
250 250
251 251
252extern __inline__ void srmmu_flush_tlb_segment(unsigned long addr) 252static inline void srmmu_flush_tlb_segment(unsigned long addr)
253{ 253{
254 addr &= SRMMU_REAL_PMD_MASK; 254 addr &= SRMMU_REAL_PMD_MASK;
255 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": : 255 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": :
@@ -258,7 +258,7 @@ extern __inline__ void srmmu_flush_tlb_segment(unsigned long addr)
258 258
259} 259}
260 260
261extern __inline__ void srmmu_flush_tlb_page(unsigned long page) 261static inline void srmmu_flush_tlb_page(unsigned long page)
262{ 262{
263 page &= PAGE_MASK; 263 page &= PAGE_MASK;
264 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": : 264 __asm__ __volatile__("sta %%g0, [%0] %1\n\t": :
@@ -267,7 +267,7 @@ extern __inline__ void srmmu_flush_tlb_page(unsigned long page)
267 267
268} 268}
269 269
270extern __inline__ unsigned long srmmu_hwprobe(unsigned long vaddr) 270static inline unsigned long srmmu_hwprobe(unsigned long vaddr)
271{ 271{
272 unsigned long retval; 272 unsigned long retval;
273 273
@@ -279,7 +279,7 @@ extern __inline__ unsigned long srmmu_hwprobe(unsigned long vaddr)
279 return retval; 279 return retval;
280} 280}
281 281
282extern __inline__ int 282static inline int
283srmmu_get_pte (unsigned long addr) 283srmmu_get_pte (unsigned long addr)
284{ 284{
285 register unsigned long entry; 285 register unsigned long entry;