aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorDaniel Jacobowitz <drow@false.org>2006-08-30 10:02:08 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2006-09-20 09:58:35 -0400
commit6a39dd6222dda5ee2414a1b42e8e62118742a49e (patch)
tree8c4eeeb2d48b583aed900f2a3e8c57f0536a0193 /include
parent681a4991f83742a0d2325afbf7b7f22045ad5b30 (diff)
[ARM] 3759/2: Remove uses of %?
Patch from Daniel Jacobowitz The ARM kernel has several uses of asm("foo%?"). %? is a GCC internal modifier used to output conditional execution predicates. However, no version of GCC supports conditionalizing asm statements. GCC 4.2 will correctly expand %? to the empty string in user asms. Earlier versions may reuse the condition from the previous instruction. In 'if (foo) asm ("bar%?");' this is somewhat likely to be right... but not reliable. So, the only safe thing to do is to remove the uses of %?. I believe the tlbflush.h occurances were supposed to be removed before, based on the comment about %? not working at the top of that file. Old versions of GCC could omit branches around user asms if the asm didn't mark the condition codes as clobbered. This problem hasn't been seen on any recent (3.x or 4.x) GCC, but it could theoretically happen. So, where %? was removed a cc clobber was added. Signed-off-by: Daniel Jacobowitz <dan@codesourcery.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'include')
-rw-r--r--include/asm-arm/arch-l7200/io.h8
-rw-r--r--include/asm-arm/tlbflush.h76
2 files changed, 42 insertions, 42 deletions
diff --git a/include/asm-arm/arch-l7200/io.h b/include/asm-arm/arch-l7200/io.h
index cd080d8384d9..d744d97c18a5 100644
--- a/include/asm-arm/arch-l7200/io.h
+++ b/include/asm-arm/arch-l7200/io.h
@@ -31,9 +31,9 @@
31static inline unsigned int __arch_getw(unsigned long a) 31static inline unsigned int __arch_getw(unsigned long a)
32{ 32{
33 unsigned int value; 33 unsigned int value;
34 __asm__ __volatile__("ldr%?h %0, [%1, #0] @ getw" 34 __asm__ __volatile__("ldrh %0, [%1, #0] @ getw"
35 : "=&r" (value) 35 : "=&r" (value)
36 : "r" (a)); 36 : "r" (a) : "cc");
37 return value; 37 return value;
38} 38}
39 39
@@ -42,8 +42,8 @@ static inline unsigned int __arch_getw(unsigned long a)
42 42
43static inline void __arch_putw(unsigned int value, unsigned long a) 43static inline void __arch_putw(unsigned int value, unsigned long a)
44{ 44{
45 __asm__ __volatile__("str%?h %0, [%1, #0] @ putw" 45 __asm__ __volatile__("strh %0, [%1, #0] @ putw"
46 : : "r" (value), "r" (a)); 46 : : "r" (value), "r" (a) : "cc");
47} 47}
48 48
49/* 49/*
diff --git a/include/asm-arm/tlbflush.h b/include/asm-arm/tlbflush.h
index d97fc76189a5..cd10a0b5f8ae 100644
--- a/include/asm-arm/tlbflush.h
+++ b/include/asm-arm/tlbflush.h
@@ -247,16 +247,16 @@ static inline void local_flush_tlb_all(void)
247 const unsigned int __tlb_flag = __cpu_tlb_flags; 247 const unsigned int __tlb_flag = __cpu_tlb_flags;
248 248
249 if (tlb_flag(TLB_WB)) 249 if (tlb_flag(TLB_WB))
250 asm("mcr%? p15, 0, %0, c7, c10, 4" : : "r" (zero)); 250 asm("mcr p15, 0, %0, c7, c10, 4" : : "r" (zero) : "cc");
251 251
252 if (tlb_flag(TLB_V3_FULL)) 252 if (tlb_flag(TLB_V3_FULL))
253 asm("mcr%? p15, 0, %0, c6, c0, 0" : : "r" (zero)); 253 asm("mcr p15, 0, %0, c6, c0, 0" : : "r" (zero) : "cc");
254 if (tlb_flag(TLB_V4_U_FULL | TLB_V6_U_FULL)) 254 if (tlb_flag(TLB_V4_U_FULL | TLB_V6_U_FULL))
255 asm("mcr%? p15, 0, %0, c8, c7, 0" : : "r" (zero)); 255 asm("mcr p15, 0, %0, c8, c7, 0" : : "r" (zero) : "cc");
256 if (tlb_flag(TLB_V4_D_FULL | TLB_V6_D_FULL)) 256 if (tlb_flag(TLB_V4_D_FULL | TLB_V6_D_FULL))
257 asm("mcr%? p15, 0, %0, c8, c6, 0" : : "r" (zero)); 257 asm("mcr p15, 0, %0, c8, c6, 0" : : "r" (zero) : "cc");
258 if (tlb_flag(TLB_V4_I_FULL | TLB_V6_I_FULL)) 258 if (tlb_flag(TLB_V4_I_FULL | TLB_V6_I_FULL))
259 asm("mcr%? p15, 0, %0, c8, c5, 0" : : "r" (zero)); 259 asm("mcr p15, 0, %0, c8, c5, 0" : : "r" (zero) : "cc");
260} 260}
261 261
262static inline void local_flush_tlb_mm(struct mm_struct *mm) 262static inline void local_flush_tlb_mm(struct mm_struct *mm)
@@ -266,25 +266,25 @@ static inline void local_flush_tlb_mm(struct mm_struct *mm)
266 const unsigned int __tlb_flag = __cpu_tlb_flags; 266 const unsigned int __tlb_flag = __cpu_tlb_flags;
267 267
268 if (tlb_flag(TLB_WB)) 268 if (tlb_flag(TLB_WB))
269 asm("mcr%? p15, 0, %0, c7, c10, 4" : : "r" (zero)); 269 asm("mcr p15, 0, %0, c7, c10, 4" : : "r" (zero) : "cc");
270 270
271 if (cpu_isset(smp_processor_id(), mm->cpu_vm_mask)) { 271 if (cpu_isset(smp_processor_id(), mm->cpu_vm_mask)) {
272 if (tlb_flag(TLB_V3_FULL)) 272 if (tlb_flag(TLB_V3_FULL))
273 asm("mcr%? p15, 0, %0, c6, c0, 0" : : "r" (zero)); 273 asm("mcr p15, 0, %0, c6, c0, 0" : : "r" (zero) : "cc");
274 if (tlb_flag(TLB_V4_U_FULL)) 274 if (tlb_flag(TLB_V4_U_FULL))
275 asm("mcr%? p15, 0, %0, c8, c7, 0" : : "r" (zero)); 275 asm("mcr p15, 0, %0, c8, c7, 0" : : "r" (zero) : "cc");
276 if (tlb_flag(TLB_V4_D_FULL)) 276 if (tlb_flag(TLB_V4_D_FULL))
277 asm("mcr%? p15, 0, %0, c8, c6, 0" : : "r" (zero)); 277 asm("mcr p15, 0, %0, c8, c6, 0" : : "r" (zero) : "cc");
278 if (tlb_flag(TLB_V4_I_FULL)) 278 if (tlb_flag(TLB_V4_I_FULL))
279 asm("mcr%? p15, 0, %0, c8, c5, 0" : : "r" (zero)); 279 asm("mcr p15, 0, %0, c8, c5, 0" : : "r" (zero) : "cc");
280 } 280 }
281 281
282 if (tlb_flag(TLB_V6_U_ASID)) 282 if (tlb_flag(TLB_V6_U_ASID))
283 asm("mcr%? p15, 0, %0, c8, c7, 2" : : "r" (asid)); 283 asm("mcr p15, 0, %0, c8, c7, 2" : : "r" (asid) : "cc");
284 if (tlb_flag(TLB_V6_D_ASID)) 284 if (tlb_flag(TLB_V6_D_ASID))
285 asm("mcr%? p15, 0, %0, c8, c6, 2" : : "r" (asid)); 285 asm("mcr p15, 0, %0, c8, c6, 2" : : "r" (asid) : "cc");
286 if (tlb_flag(TLB_V6_I_ASID)) 286 if (tlb_flag(TLB_V6_I_ASID))
287 asm("mcr%? p15, 0, %0, c8, c5, 2" : : "r" (asid)); 287 asm("mcr p15, 0, %0, c8, c5, 2" : : "r" (asid) : "cc");
288} 288}
289 289
290static inline void 290static inline void
@@ -296,27 +296,27 @@ local_flush_tlb_page(struct vm_area_struct *vma, unsigned long uaddr)
296 uaddr = (uaddr & PAGE_MASK) | ASID(vma->vm_mm); 296 uaddr = (uaddr & PAGE_MASK) | ASID(vma->vm_mm);
297 297
298 if (tlb_flag(TLB_WB)) 298 if (tlb_flag(TLB_WB))
299 asm("mcr%? p15, 0, %0, c7, c10, 4" : : "r" (zero)); 299 asm("mcr p15, 0, %0, c7, c10, 4" : : "r" (zero));
300 300
301 if (cpu_isset(smp_processor_id(), vma->vm_mm->cpu_vm_mask)) { 301 if (cpu_isset(smp_processor_id(), vma->vm_mm->cpu_vm_mask)) {
302 if (tlb_flag(TLB_V3_PAGE)) 302 if (tlb_flag(TLB_V3_PAGE))
303 asm("mcr%? p15, 0, %0, c6, c0, 0" : : "r" (uaddr)); 303 asm("mcr p15, 0, %0, c6, c0, 0" : : "r" (uaddr) : "cc");
304 if (tlb_flag(TLB_V4_U_PAGE)) 304 if (tlb_flag(TLB_V4_U_PAGE))
305 asm("mcr%? p15, 0, %0, c8, c7, 1" : : "r" (uaddr)); 305 asm("mcr p15, 0, %0, c8, c7, 1" : : "r" (uaddr) : "cc");
306 if (tlb_flag(TLB_V4_D_PAGE)) 306 if (tlb_flag(TLB_V4_D_PAGE))
307 asm("mcr%? p15, 0, %0, c8, c6, 1" : : "r" (uaddr)); 307 asm("mcr p15, 0, %0, c8, c6, 1" : : "r" (uaddr) : "cc");
308 if (tlb_flag(TLB_V4_I_PAGE)) 308 if (tlb_flag(TLB_V4_I_PAGE))
309 asm("mcr%? p15, 0, %0, c8, c5, 1" : : "r" (uaddr)); 309 asm("mcr p15, 0, %0, c8, c5, 1" : : "r" (uaddr) : "cc");
310 if (!tlb_flag(TLB_V4_I_PAGE) && tlb_flag(TLB_V4_I_FULL)) 310 if (!tlb_flag(TLB_V4_I_PAGE) && tlb_flag(TLB_V4_I_FULL))
311 asm("mcr%? p15, 0, %0, c8, c5, 0" : : "r" (zero)); 311 asm("mcr p15, 0, %0, c8, c5, 0" : : "r" (zero) : "cc");
312 } 312 }
313 313
314 if (tlb_flag(TLB_V6_U_PAGE)) 314 if (tlb_flag(TLB_V6_U_PAGE))
315 asm("mcr%? p15, 0, %0, c8, c7, 1" : : "r" (uaddr)); 315 asm("mcr p15, 0, %0, c8, c7, 1" : : "r" (uaddr) : "cc");
316 if (tlb_flag(TLB_V6_D_PAGE)) 316 if (tlb_flag(TLB_V6_D_PAGE))
317 asm("mcr%? p15, 0, %0, c8, c6, 1" : : "r" (uaddr)); 317 asm("mcr p15, 0, %0, c8, c6, 1" : : "r" (uaddr) : "cc");
318 if (tlb_flag(TLB_V6_I_PAGE)) 318 if (tlb_flag(TLB_V6_I_PAGE))
319 asm("mcr%? p15, 0, %0, c8, c5, 1" : : "r" (uaddr)); 319 asm("mcr p15, 0, %0, c8, c5, 1" : : "r" (uaddr) : "cc");
320} 320}
321 321
322static inline void local_flush_tlb_kernel_page(unsigned long kaddr) 322static inline void local_flush_tlb_kernel_page(unsigned long kaddr)
@@ -327,31 +327,31 @@ static inline void local_flush_tlb_kernel_page(unsigned long kaddr)
327 kaddr &= PAGE_MASK; 327 kaddr &= PAGE_MASK;
328 328
329 if (tlb_flag(TLB_WB)) 329 if (tlb_flag(TLB_WB))
330 asm("mcr%? p15, 0, %0, c7, c10, 4" : : "r" (zero)); 330 asm("mcr p15, 0, %0, c7, c10, 4" : : "r" (zero) : "cc");
331 331
332 if (tlb_flag(TLB_V3_PAGE)) 332 if (tlb_flag(TLB_V3_PAGE))
333 asm("mcr%? p15, 0, %0, c6, c0, 0" : : "r" (kaddr)); 333 asm("mcr p15, 0, %0, c6, c0, 0" : : "r" (kaddr) : "cc");
334 if (tlb_flag(TLB_V4_U_PAGE)) 334 if (tlb_flag(TLB_V4_U_PAGE))
335 asm("mcr%? p15, 0, %0, c8, c7, 1" : : "r" (kaddr)); 335 asm("mcr p15, 0, %0, c8, c7, 1" : : "r" (kaddr) : "cc");
336 if (tlb_flag(TLB_V4_D_PAGE)) 336 if (tlb_flag(TLB_V4_D_PAGE))
337 asm("mcr%? p15, 0, %0, c8, c6, 1" : : "r" (kaddr)); 337 asm("mcr p15, 0, %0, c8, c6, 1" : : "r" (kaddr) : "cc");
338 if (tlb_flag(TLB_V4_I_PAGE)) 338 if (tlb_flag(TLB_V4_I_PAGE))
339 asm("mcr%? p15, 0, %0, c8, c5, 1" : : "r" (kaddr)); 339 asm("mcr p15, 0, %0, c8, c5, 1" : : "r" (kaddr) : "cc");
340 if (!tlb_flag(TLB_V4_I_PAGE) && tlb_flag(TLB_V4_I_FULL)) 340 if (!tlb_flag(TLB_V4_I_PAGE) && tlb_flag(TLB_V4_I_FULL))
341 asm("mcr%? p15, 0, %0, c8, c5, 0" : : "r" (zero)); 341 asm("mcr p15, 0, %0, c8, c5, 0" : : "r" (zero) : "cc");
342 342
343 if (tlb_flag(TLB_V6_U_PAGE)) 343 if (tlb_flag(TLB_V6_U_PAGE))
344 asm("mcr%? p15, 0, %0, c8, c7, 1" : : "r" (kaddr)); 344 asm("mcr p15, 0, %0, c8, c7, 1" : : "r" (kaddr) : "cc");
345 if (tlb_flag(TLB_V6_D_PAGE)) 345 if (tlb_flag(TLB_V6_D_PAGE))
346 asm("mcr%? p15, 0, %0, c8, c6, 1" : : "r" (kaddr)); 346 asm("mcr p15, 0, %0, c8, c6, 1" : : "r" (kaddr) : "cc");
347 if (tlb_flag(TLB_V6_I_PAGE)) 347 if (tlb_flag(TLB_V6_I_PAGE))
348 asm("mcr%? p15, 0, %0, c8, c5, 1" : : "r" (kaddr)); 348 asm("mcr p15, 0, %0, c8, c5, 1" : : "r" (kaddr) : "cc");
349 349
350 /* The ARM ARM states that the completion of a TLB maintenance 350 /* The ARM ARM states that the completion of a TLB maintenance
351 * operation is only guaranteed by a DSB instruction 351 * operation is only guaranteed by a DSB instruction
352 */ 352 */
353 if (tlb_flag(TLB_V6_U_PAGE | TLB_V6_D_PAGE | TLB_V6_I_PAGE)) 353 if (tlb_flag(TLB_V6_U_PAGE | TLB_V6_D_PAGE | TLB_V6_I_PAGE))
354 asm("mcr%? p15, 0, %0, c7, c10, 4" : : "r" (zero)); 354 asm("mcr p15, 0, %0, c7, c10, 4" : : "r" (zero) : "cc");
355} 355}
356 356
357/* 357/*
@@ -373,11 +373,11 @@ static inline void flush_pmd_entry(pmd_t *pmd)
373 const unsigned int __tlb_flag = __cpu_tlb_flags; 373 const unsigned int __tlb_flag = __cpu_tlb_flags;
374 374
375 if (tlb_flag(TLB_DCLEAN)) 375 if (tlb_flag(TLB_DCLEAN))
376 asm("mcr%? p15, 0, %0, c7, c10, 1 @ flush_pmd" 376 asm("mcr p15, 0, %0, c7, c10, 1 @ flush_pmd"
377 : : "r" (pmd)); 377 : : "r" (pmd) : "cc");
378 if (tlb_flag(TLB_WB)) 378 if (tlb_flag(TLB_WB))
379 asm("mcr%? p15, 0, %0, c7, c10, 4 @ flush_pmd" 379 asm("mcr p15, 0, %0, c7, c10, 4 @ flush_pmd"
380 : : "r" (zero)); 380 : : "r" (zero) : "cc");
381} 381}
382 382
383static inline void clean_pmd_entry(pmd_t *pmd) 383static inline void clean_pmd_entry(pmd_t *pmd)
@@ -385,8 +385,8 @@ static inline void clean_pmd_entry(pmd_t *pmd)
385 const unsigned int __tlb_flag = __cpu_tlb_flags; 385 const unsigned int __tlb_flag = __cpu_tlb_flags;
386 386
387 if (tlb_flag(TLB_DCLEAN)) 387 if (tlb_flag(TLB_DCLEAN))
388 asm("mcr%? p15, 0, %0, c7, c10, 1 @ flush_pmd" 388 asm("mcr p15, 0, %0, c7, c10, 1 @ flush_pmd"
389 : : "r" (pmd)); 389 : : "r" (pmd) : "cc");
390} 390}
391 391
392#undef tlb_flag 392#undef tlb_flag