diff options
Diffstat (limited to 'arch/powerpc/lib/mem_64.S')
-rw-r--r-- | arch/powerpc/lib/mem_64.S | 119 |
1 files changed, 119 insertions, 0 deletions
diff --git a/arch/powerpc/lib/mem_64.S b/arch/powerpc/lib/mem_64.S new file mode 100644 index 000000000000..68df20283ff5 --- /dev/null +++ b/arch/powerpc/lib/mem_64.S | |||
@@ -0,0 +1,119 @@ | |||
1 | /* | ||
2 | * String handling functions for PowerPC. | ||
3 | * | ||
4 | * Copyright (C) 1996 Paul Mackerras. | ||
5 | * | ||
6 | * This program is free software; you can redistribute it and/or | ||
7 | * modify it under the terms of the GNU General Public License | ||
8 | * as published by the Free Software Foundation; either version | ||
9 | * 2 of the License, or (at your option) any later version. | ||
10 | */ | ||
11 | #include <asm/processor.h> | ||
12 | #include <asm/errno.h> | ||
13 | #include <asm/ppc_asm.h> | ||
14 | |||
15 | _GLOBAL(memset) | ||
16 | neg r0,r3 | ||
17 | rlwimi r4,r4,8,16,23 | ||
18 | andi. r0,r0,7 /* # bytes to be 8-byte aligned */ | ||
19 | rlwimi r4,r4,16,0,15 | ||
20 | cmplw cr1,r5,r0 /* do we get that far? */ | ||
21 | rldimi r4,r4,32,0 | ||
22 | mtcrf 1,r0 | ||
23 | mr r6,r3 | ||
24 | blt cr1,8f | ||
25 | beq+ 3f /* if already 8-byte aligned */ | ||
26 | subf r5,r0,r5 | ||
27 | bf 31,1f | ||
28 | stb r4,0(r6) | ||
29 | addi r6,r6,1 | ||
30 | 1: bf 30,2f | ||
31 | sth r4,0(r6) | ||
32 | addi r6,r6,2 | ||
33 | 2: bf 29,3f | ||
34 | stw r4,0(r6) | ||
35 | addi r6,r6,4 | ||
36 | 3: srdi. r0,r5,6 | ||
37 | clrldi r5,r5,58 | ||
38 | mtctr r0 | ||
39 | beq 5f | ||
40 | 4: std r4,0(r6) | ||
41 | std r4,8(r6) | ||
42 | std r4,16(r6) | ||
43 | std r4,24(r6) | ||
44 | std r4,32(r6) | ||
45 | std r4,40(r6) | ||
46 | std r4,48(r6) | ||
47 | std r4,56(r6) | ||
48 | addi r6,r6,64 | ||
49 | bdnz 4b | ||
50 | 5: srwi. r0,r5,3 | ||
51 | clrlwi r5,r5,29 | ||
52 | mtcrf 1,r0 | ||
53 | beq 8f | ||
54 | bf 29,6f | ||
55 | std r4,0(r6) | ||
56 | std r4,8(r6) | ||
57 | std r4,16(r6) | ||
58 | std r4,24(r6) | ||
59 | addi r6,r6,32 | ||
60 | 6: bf 30,7f | ||
61 | std r4,0(r6) | ||
62 | std r4,8(r6) | ||
63 | addi r6,r6,16 | ||
64 | 7: bf 31,8f | ||
65 | std r4,0(r6) | ||
66 | addi r6,r6,8 | ||
67 | 8: cmpwi r5,0 | ||
68 | mtcrf 1,r5 | ||
69 | beqlr+ | ||
70 | bf 29,9f | ||
71 | stw r4,0(r6) | ||
72 | addi r6,r6,4 | ||
73 | 9: bf 30,10f | ||
74 | sth r4,0(r6) | ||
75 | addi r6,r6,2 | ||
76 | 10: bflr 31 | ||
77 | stb r4,0(r6) | ||
78 | blr | ||
79 | |||
80 | _GLOBAL(memmove) | ||
81 | cmplw 0,r3,r4 | ||
82 | bgt .backwards_memcpy | ||
83 | b .memcpy | ||
84 | |||
85 | _GLOBAL(backwards_memcpy) | ||
86 | rlwinm. r7,r5,32-3,3,31 /* r0 = r5 >> 3 */ | ||
87 | add r6,r3,r5 | ||
88 | add r4,r4,r5 | ||
89 | beq 2f | ||
90 | andi. r0,r6,3 | ||
91 | mtctr r7 | ||
92 | bne 5f | ||
93 | 1: lwz r7,-4(r4) | ||
94 | lwzu r8,-8(r4) | ||
95 | stw r7,-4(r6) | ||
96 | stwu r8,-8(r6) | ||
97 | bdnz 1b | ||
98 | andi. r5,r5,7 | ||
99 | 2: cmplwi 0,r5,4 | ||
100 | blt 3f | ||
101 | lwzu r0,-4(r4) | ||
102 | subi r5,r5,4 | ||
103 | stwu r0,-4(r6) | ||
104 | 3: cmpwi 0,r5,0 | ||
105 | beqlr | ||
106 | mtctr r5 | ||
107 | 4: lbzu r0,-1(r4) | ||
108 | stbu r0,-1(r6) | ||
109 | bdnz 4b | ||
110 | blr | ||
111 | 5: mtctr r0 | ||
112 | 6: lbzu r7,-1(r4) | ||
113 | stbu r7,-1(r6) | ||
114 | bdnz 6b | ||
115 | subf r5,r0,r5 | ||
116 | rlwinm. r7,r5,32-3,3,31 | ||
117 | beq 2b | ||
118 | mtctr r7 | ||
119 | b 1b | ||