aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc
diff options
context:
space:
mode:
authorJoakim Tjernlund <joakim.tjernlund@transmode.se>2009-11-19 19:21:09 -0500
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2009-12-09 01:10:37 -0500
commit15d914d72a3f4f1531c41c084cb556be22aa1d2e (patch)
tree09ff4038394194058da9ca802871d35e6fe9a2d5 /arch/powerpc
parent0c4661698c58db2a9efc44f403b893bd4d98f348 (diff)
powerpc/8xx: Start using dcbX instructions in various copy routines
Now that 8xx can fixup dcbX instructions, start using them where possible like every other PowerPc arch do. Signed-off-by: Joakim Tjernlund <Joakim.Tjernlund@transmode.se> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc')
-rw-r--r--arch/powerpc/kernel/misc_32.S18
-rw-r--r--arch/powerpc/lib/copy_32.S24
2 files changed, 0 insertions, 42 deletions
diff --git a/arch/powerpc/kernel/misc_32.S b/arch/powerpc/kernel/misc_32.S
index da9c0c4c10f3..8649f536f8df 100644
--- a/arch/powerpc/kernel/misc_32.S
+++ b/arch/powerpc/kernel/misc_32.S
@@ -502,15 +502,7 @@ _GLOBAL(clear_pages)
502 li r0,PAGE_SIZE/L1_CACHE_BYTES 502 li r0,PAGE_SIZE/L1_CACHE_BYTES
503 slw r0,r0,r4 503 slw r0,r0,r4
504 mtctr r0 504 mtctr r0
505#ifdef CONFIG_8xx
506 li r4, 0
5071: stw r4, 0(r3)
508 stw r4, 4(r3)
509 stw r4, 8(r3)
510 stw r4, 12(r3)
511#else
5121: dcbz 0,r3 5051: dcbz 0,r3
513#endif
514 addi r3,r3,L1_CACHE_BYTES 506 addi r3,r3,L1_CACHE_BYTES
515 bdnz 1b 507 bdnz 1b
516 blr 508 blr
@@ -535,15 +527,6 @@ _GLOBAL(copy_page)
535 addi r3,r3,-4 527 addi r3,r3,-4
536 addi r4,r4,-4 528 addi r4,r4,-4
537 529
538#ifdef CONFIG_8xx
539 /* don't use prefetch on 8xx */
540 li r0,4096/L1_CACHE_BYTES
541 mtctr r0
5421: COPY_16_BYTES
543 bdnz 1b
544 blr
545
546#else /* not 8xx, we can prefetch */
547 li r5,4 530 li r5,4
548 531
549#if MAX_COPY_PREFETCH > 1 532#if MAX_COPY_PREFETCH > 1
@@ -584,7 +567,6 @@ _GLOBAL(copy_page)
584 li r0,MAX_COPY_PREFETCH 567 li r0,MAX_COPY_PREFETCH
585 li r11,4 568 li r11,4
586 b 2b 569 b 2b
587#endif /* CONFIG_8xx */
588 570
589/* 571/*
590 * void atomic_clear_mask(atomic_t mask, atomic_t *addr) 572 * void atomic_clear_mask(atomic_t mask, atomic_t *addr)
diff --git a/arch/powerpc/lib/copy_32.S b/arch/powerpc/lib/copy_32.S
index c657de59abca..74a7f4130b4c 100644
--- a/arch/powerpc/lib/copy_32.S
+++ b/arch/powerpc/lib/copy_32.S
@@ -98,20 +98,7 @@ _GLOBAL(cacheable_memzero)
98 bdnz 4b 98 bdnz 4b
993: mtctr r9 993: mtctr r9
100 li r7,4 100 li r7,4
101#if !defined(CONFIG_8xx)
10210: dcbz r7,r6 10110: dcbz r7,r6
103#else
10410: stw r4, 4(r6)
105 stw r4, 8(r6)
106 stw r4, 12(r6)
107 stw r4, 16(r6)
108#if CACHE_LINE_SIZE >= 32
109 stw r4, 20(r6)
110 stw r4, 24(r6)
111 stw r4, 28(r6)
112 stw r4, 32(r6)
113#endif /* CACHE_LINE_SIZE */
114#endif
115 addi r6,r6,CACHELINE_BYTES 102 addi r6,r6,CACHELINE_BYTES
116 bdnz 10b 103 bdnz 10b
117 clrlwi r5,r8,32-LG_CACHELINE_BYTES 104 clrlwi r5,r8,32-LG_CACHELINE_BYTES
@@ -200,9 +187,7 @@ _GLOBAL(cacheable_memcpy)
200 mtctr r0 187 mtctr r0
201 beq 63f 188 beq 63f
20253: 18953:
203#if !defined(CONFIG_8xx)
204 dcbz r11,r6 190 dcbz r11,r6
205#endif
206 COPY_16_BYTES 191 COPY_16_BYTES
207#if L1_CACHE_BYTES >= 32 192#if L1_CACHE_BYTES >= 32
208 COPY_16_BYTES 193 COPY_16_BYTES
@@ -356,14 +341,6 @@ _GLOBAL(__copy_tofrom_user)
356 li r11,4 341 li r11,4
357 beq 63f 342 beq 63f
358 343
359#ifdef CONFIG_8xx
360 /* Don't use prefetch on 8xx */
361 mtctr r0
362 li r0,0
36353: COPY_16_BYTES_WITHEX(0)
364 bdnz 53b
365
366#else /* not CONFIG_8xx */
367 /* Here we decide how far ahead to prefetch the source */ 344 /* Here we decide how far ahead to prefetch the source */
368 li r3,4 345 li r3,4
369 cmpwi r0,1 346 cmpwi r0,1
@@ -416,7 +393,6 @@ _GLOBAL(__copy_tofrom_user)
416 li r3,4 393 li r3,4
417 li r7,0 394 li r7,0
418 bne 114b 395 bne 114b
419#endif /* CONFIG_8xx */
420 396
42163: srwi. r0,r5,2 39763: srwi. r0,r5,2
422 mtctr r0 398 mtctr r0