aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/lib/memmove.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/lib/memmove.S')
-rw-r--r--arch/arm/lib/memmove.S12
1 files changed, 2 insertions, 10 deletions
diff --git a/arch/arm/lib/memmove.S b/arch/arm/lib/memmove.S
index 018522c3ff26..2e301b7bd8f1 100644
--- a/arch/arm/lib/memmove.S
+++ b/arch/arm/lib/memmove.S
@@ -13,14 +13,6 @@
13#include <linux/linkage.h> 13#include <linux/linkage.h>
14#include <asm/assembler.h> 14#include <asm/assembler.h>
15 15
16/*
17 * This can be used to enable code to cacheline align the source pointer.
18 * Experiments on tested architectures (StrongARM and XScale) didn't show
19 * this a worthwhile thing to do. That might be different in the future.
20 */
21//#define CALGN(code...) code
22#define CALGN(code...)
23
24 .text 16 .text
25 17
26/* 18/*
@@ -55,7 +47,7 @@ ENTRY(memmove)
55 stmfd sp!, {r5 - r8} 47 stmfd sp!, {r5 - r8}
56 blt 5f 48 blt 5f
57 49
58 CALGN( ands ip, r1, #31 ) 50 CALGN( ands ip, r0, #31 )
59 CALGN( sbcnes r4, ip, r2 ) @ C is always set here 51 CALGN( sbcnes r4, ip, r2 ) @ C is always set here
60 CALGN( bcs 2f ) 52 CALGN( bcs 2f )
61 CALGN( adr r4, 6f ) 53 CALGN( adr r4, 6f )
@@ -139,7 +131,7 @@ ENTRY(memmove)
139 subs r2, r2, #28 131 subs r2, r2, #28
140 blt 14f 132 blt 14f
141 133
142 CALGN( ands ip, r1, #31 ) 134 CALGN( ands ip, r0, #31 )
143 CALGN( sbcnes r4, ip, r2 ) @ C is always set here 135 CALGN( sbcnes r4, ip, r2 ) @ C is always set here
144 CALGN( subcc r2, r2, ip ) 136 CALGN( subcc r2, r2, ip )
145 CALGN( bcc 15f ) 137 CALGN( bcc 15f )