diff options
author | David S. Miller <davem@davemloft.net> | 2012-05-11 23:33:22 -0400 |
---|---|---|
committer | David S. Miller <davem@davemloft.net> | 2012-05-11 23:33:22 -0400 |
commit | 8695c37d06721c581385725eb80ba4e6d6bdf73f (patch) | |
tree | 072f37f58590aea8ca880b6175d127809edd4cec /arch/sparc/lib | |
parent | b55e81b9f8cf0256bcfc548360aef642630c2919 (diff) |
sparc: Convert some assembler over to linakge.h's ENTRY/ENDPROC
Use those, instead of doing it all by hand.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc/lib')
-rw-r--r-- | arch/sparc/lib/ashldi3.S | 7 | ||||
-rw-r--r-- | arch/sparc/lib/ashrdi3.S | 7 | ||||
-rw-r--r-- | arch/sparc/lib/atomic_64.S | 49 | ||||
-rw-r--r-- | arch/sparc/lib/bitops.S | 37 | ||||
-rw-r--r-- | arch/sparc/lib/blockops.S | 10 | ||||
-rw-r--r-- | arch/sparc/lib/bzero.S | 20 | ||||
-rw-r--r-- | arch/sparc/lib/ipcsum.S | 9 | ||||
-rw-r--r-- | arch/sparc/lib/lshrdi3.S | 5 | ||||
-rw-r--r-- | arch/sparc/lib/memmove.S | 9 | ||||
-rw-r--r-- | arch/sparc/lib/strlen_user_64.S | 8 | ||||
-rw-r--r-- | arch/sparc/lib/strncmp_32.S | 7 | ||||
-rw-r--r-- | arch/sparc/lib/strncmp_64.S | 8 | ||||
-rw-r--r-- | arch/sparc/lib/strncpy_from_user_32.S | 6 | ||||
-rw-r--r-- | arch/sparc/lib/strncpy_from_user_64.S | 8 | ||||
-rw-r--r-- | arch/sparc/lib/xor.S | 50 |
15 files changed, 97 insertions, 143 deletions
diff --git a/arch/sparc/lib/ashldi3.S b/arch/sparc/lib/ashldi3.S index 17912e608716..86f60de07b0a 100644 --- a/arch/sparc/lib/ashldi3.S +++ b/arch/sparc/lib/ashldi3.S | |||
@@ -5,10 +5,10 @@ | |||
5 | * Copyright (C) 1999 David S. Miller (davem@redhat.com) | 5 | * Copyright (C) 1999 David S. Miller (davem@redhat.com) |
6 | */ | 6 | */ |
7 | 7 | ||
8 | #include <linux/linkage.h> | ||
9 | |||
8 | .text | 10 | .text |
9 | .align 4 | 11 | ENTRY(__ashldi3) |
10 | .globl __ashldi3 | ||
11 | __ashldi3: | ||
12 | cmp %o2, 0 | 12 | cmp %o2, 0 |
13 | be 9f | 13 | be 9f |
14 | mov 0x20, %g2 | 14 | mov 0x20, %g2 |
@@ -32,3 +32,4 @@ __ashldi3: | |||
32 | 9: | 32 | 9: |
33 | retl | 33 | retl |
34 | nop | 34 | nop |
35 | ENDPROC(__ashldi3) | ||
diff --git a/arch/sparc/lib/ashrdi3.S b/arch/sparc/lib/ashrdi3.S index 85398fd6dcc9..6eb8ba2dd50e 100644 --- a/arch/sparc/lib/ashrdi3.S +++ b/arch/sparc/lib/ashrdi3.S | |||
@@ -5,10 +5,10 @@ | |||
5 | * Copyright (C) 1995 David S. Miller (davem@caip.rutgers.edu) | 5 | * Copyright (C) 1995 David S. Miller (davem@caip.rutgers.edu) |
6 | */ | 6 | */ |
7 | 7 | ||
8 | #include <linux/linkage.h> | ||
9 | |||
8 | .text | 10 | .text |
9 | .align 4 | 11 | ENTRY(__ashrdi3) |
10 | .globl __ashrdi3 | ||
11 | __ashrdi3: | ||
12 | tst %o2 | 12 | tst %o2 |
13 | be 3f | 13 | be 3f |
14 | or %g0, 32, %g2 | 14 | or %g0, 32, %g2 |
@@ -34,3 +34,4 @@ __ashrdi3: | |||
34 | 3: | 34 | 3: |
35 | jmpl %o7 + 8, %g0 | 35 | jmpl %o7 + 8, %g0 |
36 | nop | 36 | nop |
37 | ENDPROC(__ashrdi3) | ||
diff --git a/arch/sparc/lib/atomic_64.S b/arch/sparc/lib/atomic_64.S index 59186e0fcf39..4d502da3de78 100644 --- a/arch/sparc/lib/atomic_64.S +++ b/arch/sparc/lib/atomic_64.S | |||
@@ -3,6 +3,7 @@ | |||
3 | * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net) | 3 | * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net) |
4 | */ | 4 | */ |
5 | 5 | ||
6 | #include <linux/linkage.h> | ||
6 | #include <asm/asi.h> | 7 | #include <asm/asi.h> |
7 | #include <asm/backoff.h> | 8 | #include <asm/backoff.h> |
8 | 9 | ||
@@ -13,9 +14,7 @@ | |||
13 | * memory barriers, and a second which returns | 14 | * memory barriers, and a second which returns |
14 | * a value and does the barriers. | 15 | * a value and does the barriers. |
15 | */ | 16 | */ |
16 | .globl atomic_add | 17 | ENTRY(atomic_add) /* %o0 = increment, %o1 = atomic_ptr */ |
17 | .type atomic_add,#function | ||
18 | atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ | ||
19 | BACKOFF_SETUP(%o2) | 18 | BACKOFF_SETUP(%o2) |
20 | 1: lduw [%o1], %g1 | 19 | 1: lduw [%o1], %g1 |
21 | add %g1, %o0, %g7 | 20 | add %g1, %o0, %g7 |
@@ -26,11 +25,9 @@ atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ | |||
26 | retl | 25 | retl |
27 | nop | 26 | nop |
28 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 27 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
29 | .size atomic_add, .-atomic_add | 28 | ENDPROC(atomic_add) |
30 | 29 | ||
31 | .globl atomic_sub | 30 | ENTRY(atomic_sub) /* %o0 = decrement, %o1 = atomic_ptr */ |
32 | .type atomic_sub,#function | ||
33 | atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | ||
34 | BACKOFF_SETUP(%o2) | 31 | BACKOFF_SETUP(%o2) |
35 | 1: lduw [%o1], %g1 | 32 | 1: lduw [%o1], %g1 |
36 | sub %g1, %o0, %g7 | 33 | sub %g1, %o0, %g7 |
@@ -41,11 +38,9 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | |||
41 | retl | 38 | retl |
42 | nop | 39 | nop |
43 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 40 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
44 | .size atomic_sub, .-atomic_sub | 41 | ENDPROC(atomic_sub) |
45 | 42 | ||
46 | .globl atomic_add_ret | 43 | ENTRY(atomic_add_ret) /* %o0 = increment, %o1 = atomic_ptr */ |
47 | .type atomic_add_ret,#function | ||
48 | atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ | ||
49 | BACKOFF_SETUP(%o2) | 44 | BACKOFF_SETUP(%o2) |
50 | 1: lduw [%o1], %g1 | 45 | 1: lduw [%o1], %g1 |
51 | add %g1, %o0, %g7 | 46 | add %g1, %o0, %g7 |
@@ -56,11 +51,9 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ | |||
56 | retl | 51 | retl |
57 | sra %g1, 0, %o0 | 52 | sra %g1, 0, %o0 |
58 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 53 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
59 | .size atomic_add_ret, .-atomic_add_ret | 54 | ENDPROC(atomic_add_ret) |
60 | 55 | ||
61 | .globl atomic_sub_ret | 56 | ENTRY(atomic_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */ |
62 | .type atomic_sub_ret,#function | ||
63 | atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ | ||
64 | BACKOFF_SETUP(%o2) | 57 | BACKOFF_SETUP(%o2) |
65 | 1: lduw [%o1], %g1 | 58 | 1: lduw [%o1], %g1 |
66 | sub %g1, %o0, %g7 | 59 | sub %g1, %o0, %g7 |
@@ -71,11 +64,9 @@ atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ | |||
71 | retl | 64 | retl |
72 | sra %g1, 0, %o0 | 65 | sra %g1, 0, %o0 |
73 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 66 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
74 | .size atomic_sub_ret, .-atomic_sub_ret | 67 | ENDPROC(atomic_sub_ret) |
75 | 68 | ||
76 | .globl atomic64_add | 69 | ENTRY(atomic64_add) /* %o0 = increment, %o1 = atomic_ptr */ |
77 | .type atomic64_add,#function | ||
78 | atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ | ||
79 | BACKOFF_SETUP(%o2) | 70 | BACKOFF_SETUP(%o2) |
80 | 1: ldx [%o1], %g1 | 71 | 1: ldx [%o1], %g1 |
81 | add %g1, %o0, %g7 | 72 | add %g1, %o0, %g7 |
@@ -86,11 +77,9 @@ atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ | |||
86 | retl | 77 | retl |
87 | nop | 78 | nop |
88 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 79 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
89 | .size atomic64_add, .-atomic64_add | 80 | ENDPROC(atomic64_add) |
90 | 81 | ||
91 | .globl atomic64_sub | 82 | ENTRY(atomic64_sub) /* %o0 = decrement, %o1 = atomic_ptr */ |
92 | .type atomic64_sub,#function | ||
93 | atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | ||
94 | BACKOFF_SETUP(%o2) | 83 | BACKOFF_SETUP(%o2) |
95 | 1: ldx [%o1], %g1 | 84 | 1: ldx [%o1], %g1 |
96 | sub %g1, %o0, %g7 | 85 | sub %g1, %o0, %g7 |
@@ -101,11 +90,9 @@ atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | |||
101 | retl | 90 | retl |
102 | nop | 91 | nop |
103 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 92 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
104 | .size atomic64_sub, .-atomic64_sub | 93 | ENDPROC(atomic64_sub) |
105 | 94 | ||
106 | .globl atomic64_add_ret | 95 | ENTRY(atomic64_add_ret) /* %o0 = increment, %o1 = atomic_ptr */ |
107 | .type atomic64_add_ret,#function | ||
108 | atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ | ||
109 | BACKOFF_SETUP(%o2) | 96 | BACKOFF_SETUP(%o2) |
110 | 1: ldx [%o1], %g1 | 97 | 1: ldx [%o1], %g1 |
111 | add %g1, %o0, %g7 | 98 | add %g1, %o0, %g7 |
@@ -116,11 +103,9 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ | |||
116 | retl | 103 | retl |
117 | add %g1, %o0, %o0 | 104 | add %g1, %o0, %o0 |
118 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 105 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
119 | .size atomic64_add_ret, .-atomic64_add_ret | 106 | ENDPROC(atomic64_add_ret) |
120 | 107 | ||
121 | .globl atomic64_sub_ret | 108 | ENTRY(atomic64_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */ |
122 | .type atomic64_sub_ret,#function | ||
123 | atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ | ||
124 | BACKOFF_SETUP(%o2) | 109 | BACKOFF_SETUP(%o2) |
125 | 1: ldx [%o1], %g1 | 110 | 1: ldx [%o1], %g1 |
126 | sub %g1, %o0, %g7 | 111 | sub %g1, %o0, %g7 |
@@ -131,4 +116,4 @@ atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ | |||
131 | retl | 116 | retl |
132 | sub %g1, %o0, %o0 | 117 | sub %g1, %o0, %o0 |
133 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 118 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
134 | .size atomic64_sub_ret, .-atomic64_sub_ret | 119 | ENDPROC(atomic64_sub_ret) |
diff --git a/arch/sparc/lib/bitops.S b/arch/sparc/lib/bitops.S index 3dc61d5537c0..36f72cc0e67e 100644 --- a/arch/sparc/lib/bitops.S +++ b/arch/sparc/lib/bitops.S | |||
@@ -3,14 +3,13 @@ | |||
3 | * Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net) | 3 | * Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net) |
4 | */ | 4 | */ |
5 | 5 | ||
6 | #include <linux/linkage.h> | ||
6 | #include <asm/asi.h> | 7 | #include <asm/asi.h> |
7 | #include <asm/backoff.h> | 8 | #include <asm/backoff.h> |
8 | 9 | ||
9 | .text | 10 | .text |
10 | 11 | ||
11 | .globl test_and_set_bit | 12 | ENTRY(test_and_set_bit) /* %o0=nr, %o1=addr */ |
12 | .type test_and_set_bit,#function | ||
13 | test_and_set_bit: /* %o0=nr, %o1=addr */ | ||
14 | BACKOFF_SETUP(%o3) | 13 | BACKOFF_SETUP(%o3) |
15 | srlx %o0, 6, %g1 | 14 | srlx %o0, 6, %g1 |
16 | mov 1, %o2 | 15 | mov 1, %o2 |
@@ -29,11 +28,9 @@ test_and_set_bit: /* %o0=nr, %o1=addr */ | |||
29 | retl | 28 | retl |
30 | nop | 29 | nop |
31 | 2: BACKOFF_SPIN(%o3, %o4, 1b) | 30 | 2: BACKOFF_SPIN(%o3, %o4, 1b) |
32 | .size test_and_set_bit, .-test_and_set_bit | 31 | ENDPROC(test_and_set_bit) |
33 | 32 | ||
34 | .globl test_and_clear_bit | 33 | ENTRY(test_and_clear_bit) /* %o0=nr, %o1=addr */ |
35 | .type test_and_clear_bit,#function | ||
36 | test_and_clear_bit: /* %o0=nr, %o1=addr */ | ||
37 | BACKOFF_SETUP(%o3) | 34 | BACKOFF_SETUP(%o3) |
38 | srlx %o0, 6, %g1 | 35 | srlx %o0, 6, %g1 |
39 | mov 1, %o2 | 36 | mov 1, %o2 |
@@ -52,11 +49,9 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */ | |||
52 | retl | 49 | retl |
53 | nop | 50 | nop |
54 | 2: BACKOFF_SPIN(%o3, %o4, 1b) | 51 | 2: BACKOFF_SPIN(%o3, %o4, 1b) |
55 | .size test_and_clear_bit, .-test_and_clear_bit | 52 | ENDPROC(test_and_clear_bit) |
56 | 53 | ||
57 | .globl test_and_change_bit | 54 | ENTRY(test_and_change_bit) /* %o0=nr, %o1=addr */ |
58 | .type test_and_change_bit,#function | ||
59 | test_and_change_bit: /* %o0=nr, %o1=addr */ | ||
60 | BACKOFF_SETUP(%o3) | 55 | BACKOFF_SETUP(%o3) |
61 | srlx %o0, 6, %g1 | 56 | srlx %o0, 6, %g1 |
62 | mov 1, %o2 | 57 | mov 1, %o2 |
@@ -75,11 +70,9 @@ test_and_change_bit: /* %o0=nr, %o1=addr */ | |||
75 | retl | 70 | retl |
76 | nop | 71 | nop |
77 | 2: BACKOFF_SPIN(%o3, %o4, 1b) | 72 | 2: BACKOFF_SPIN(%o3, %o4, 1b) |
78 | .size test_and_change_bit, .-test_and_change_bit | 73 | ENDPROC(test_and_change_bit) |
79 | 74 | ||
80 | .globl set_bit | 75 | ENTRY(set_bit) /* %o0=nr, %o1=addr */ |
81 | .type set_bit,#function | ||
82 | set_bit: /* %o0=nr, %o1=addr */ | ||
83 | BACKOFF_SETUP(%o3) | 76 | BACKOFF_SETUP(%o3) |
84 | srlx %o0, 6, %g1 | 77 | srlx %o0, 6, %g1 |
85 | mov 1, %o2 | 78 | mov 1, %o2 |
@@ -96,11 +89,9 @@ set_bit: /* %o0=nr, %o1=addr */ | |||
96 | retl | 89 | retl |
97 | nop | 90 | nop |
98 | 2: BACKOFF_SPIN(%o3, %o4, 1b) | 91 | 2: BACKOFF_SPIN(%o3, %o4, 1b) |
99 | .size set_bit, .-set_bit | 92 | ENDPROC(set_bit) |
100 | 93 | ||
101 | .globl clear_bit | 94 | ENTRY(clear_bit) /* %o0=nr, %o1=addr */ |
102 | .type clear_bit,#function | ||
103 | clear_bit: /* %o0=nr, %o1=addr */ | ||
104 | BACKOFF_SETUP(%o3) | 95 | BACKOFF_SETUP(%o3) |
105 | srlx %o0, 6, %g1 | 96 | srlx %o0, 6, %g1 |
106 | mov 1, %o2 | 97 | mov 1, %o2 |
@@ -117,11 +108,9 @@ clear_bit: /* %o0=nr, %o1=addr */ | |||
117 | retl | 108 | retl |
118 | nop | 109 | nop |
119 | 2: BACKOFF_SPIN(%o3, %o4, 1b) | 110 | 2: BACKOFF_SPIN(%o3, %o4, 1b) |
120 | .size clear_bit, .-clear_bit | 111 | ENDPROC(clear_bit) |
121 | 112 | ||
122 | .globl change_bit | 113 | ENTRY(change_bit) /* %o0=nr, %o1=addr */ |
123 | .type change_bit,#function | ||
124 | change_bit: /* %o0=nr, %o1=addr */ | ||
125 | BACKOFF_SETUP(%o3) | 114 | BACKOFF_SETUP(%o3) |
126 | srlx %o0, 6, %g1 | 115 | srlx %o0, 6, %g1 |
127 | mov 1, %o2 | 116 | mov 1, %o2 |
@@ -138,4 +127,4 @@ change_bit: /* %o0=nr, %o1=addr */ | |||
138 | retl | 127 | retl |
139 | nop | 128 | nop |
140 | 2: BACKOFF_SPIN(%o3, %o4, 1b) | 129 | 2: BACKOFF_SPIN(%o3, %o4, 1b) |
141 | .size change_bit, .-change_bit | 130 | ENDPROC(change_bit) |
diff --git a/arch/sparc/lib/blockops.S b/arch/sparc/lib/blockops.S index 804be87f9a42..3c771011ff4b 100644 --- a/arch/sparc/lib/blockops.S +++ b/arch/sparc/lib/blockops.S | |||
@@ -4,6 +4,7 @@ | |||
4 | * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) | 4 | * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/linkage.h> | ||
7 | #include <asm/page.h> | 8 | #include <asm/page.h> |
8 | 9 | ||
9 | /* Zero out 64 bytes of memory at (buf + offset). | 10 | /* Zero out 64 bytes of memory at (buf + offset). |
@@ -44,10 +45,7 @@ | |||
44 | */ | 45 | */ |
45 | 46 | ||
46 | .text | 47 | .text |
47 | .align 4 | 48 | ENTRY(bzero_1page) |
48 | .globl bzero_1page, __copy_1page | ||
49 | |||
50 | bzero_1page: | ||
51 | /* NOTE: If you change the number of insns of this routine, please check | 49 | /* NOTE: If you change the number of insns of this routine, please check |
52 | * arch/sparc/mm/hypersparc.S */ | 50 | * arch/sparc/mm/hypersparc.S */ |
53 | /* %o0 = buf */ | 51 | /* %o0 = buf */ |
@@ -65,8 +63,9 @@ bzero_1page: | |||
65 | 63 | ||
66 | retl | 64 | retl |
67 | nop | 65 | nop |
66 | ENDPROC(bzero_1page) | ||
68 | 67 | ||
69 | __copy_1page: | 68 | ENTRY(__copy_1page) |
70 | /* NOTE: If you change the number of insns of this routine, please check | 69 | /* NOTE: If you change the number of insns of this routine, please check |
71 | * arch/sparc/mm/hypersparc.S */ | 70 | * arch/sparc/mm/hypersparc.S */ |
72 | /* %o0 = dst, %o1 = src */ | 71 | /* %o0 = dst, %o1 = src */ |
@@ -87,3 +86,4 @@ __copy_1page: | |||
87 | 86 | ||
88 | retl | 87 | retl |
89 | nop | 88 | nop |
89 | ENDPROC(__copy_1page) | ||
diff --git a/arch/sparc/lib/bzero.S b/arch/sparc/lib/bzero.S index 615f401edf69..8c058114b649 100644 --- a/arch/sparc/lib/bzero.S +++ b/arch/sparc/lib/bzero.S | |||
@@ -4,11 +4,11 @@ | |||
4 | * Copyright (C) 2005 David S. Miller <davem@davemloft.net> | 4 | * Copyright (C) 2005 David S. Miller <davem@davemloft.net> |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/linkage.h> | ||
8 | |||
7 | .text | 9 | .text |
8 | 10 | ||
9 | .globl memset | 11 | ENTRY(memset) /* %o0=buf, %o1=pat, %o2=len */ |
10 | .type memset, #function | ||
11 | memset: /* %o0=buf, %o1=pat, %o2=len */ | ||
12 | and %o1, 0xff, %o3 | 12 | and %o1, 0xff, %o3 |
13 | mov %o2, %o1 | 13 | mov %o2, %o1 |
14 | sllx %o3, 8, %g1 | 14 | sllx %o3, 8, %g1 |
@@ -19,9 +19,7 @@ memset: /* %o0=buf, %o1=pat, %o2=len */ | |||
19 | ba,pt %xcc, 1f | 19 | ba,pt %xcc, 1f |
20 | or %g1, %o2, %o2 | 20 | or %g1, %o2, %o2 |
21 | 21 | ||
22 | .globl __bzero | 22 | ENTRY(__bzero) /* %o0=buf, %o1=len */ |
23 | .type __bzero, #function | ||
24 | __bzero: /* %o0=buf, %o1=len */ | ||
25 | clr %o2 | 23 | clr %o2 |
26 | 1: mov %o0, %o3 | 24 | 1: mov %o0, %o3 |
27 | brz,pn %o1, __bzero_done | 25 | brz,pn %o1, __bzero_done |
@@ -78,8 +76,8 @@ __bzero_tiny: | |||
78 | __bzero_done: | 76 | __bzero_done: |
79 | retl | 77 | retl |
80 | mov %o3, %o0 | 78 | mov %o3, %o0 |
81 | .size __bzero, .-__bzero | 79 | ENDPROC(__bzero) |
82 | .size memset, .-memset | 80 | ENDPROC(memset) |
83 | 81 | ||
84 | #define EX_ST(x,y) \ | 82 | #define EX_ST(x,y) \ |
85 | 98: x,y; \ | 83 | 98: x,y; \ |
@@ -89,9 +87,7 @@ __bzero_done: | |||
89 | .text; \ | 87 | .text; \ |
90 | .align 4; | 88 | .align 4; |
91 | 89 | ||
92 | .globl __clear_user | 90 | ENTRY(__clear_user) /* %o0=buf, %o1=len */ |
93 | .type __clear_user, #function | ||
94 | __clear_user: /* %o0=buf, %o1=len */ | ||
95 | brz,pn %o1, __clear_user_done | 91 | brz,pn %o1, __clear_user_done |
96 | cmp %o1, 16 | 92 | cmp %o1, 16 |
97 | bl,pn %icc, __clear_user_tiny | 93 | bl,pn %icc, __clear_user_tiny |
@@ -146,4 +142,4 @@ __clear_user_tiny: | |||
146 | __clear_user_done: | 142 | __clear_user_done: |
147 | retl | 143 | retl |
148 | clr %o0 | 144 | clr %o0 |
149 | .size __clear_user, .-__clear_user | 145 | ENDPROC(__clear_user) |
diff --git a/arch/sparc/lib/ipcsum.S b/arch/sparc/lib/ipcsum.S index 58ca5b9a8778..4742d59029ee 100644 --- a/arch/sparc/lib/ipcsum.S +++ b/arch/sparc/lib/ipcsum.S | |||
@@ -1,8 +1,7 @@ | |||
1 | #include <linux/linkage.h> | ||
2 | |||
1 | .text | 3 | .text |
2 | .align 32 | 4 | ENTRY(ip_fast_csum) /* %o0 = iph, %o1 = ihl */ |
3 | .globl ip_fast_csum | ||
4 | .type ip_fast_csum,#function | ||
5 | ip_fast_csum: /* %o0 = iph, %o1 = ihl */ | ||
6 | sub %o1, 4, %g7 | 5 | sub %o1, 4, %g7 |
7 | lduw [%o0 + 0x00], %o2 | 6 | lduw [%o0 + 0x00], %o2 |
8 | lduw [%o0 + 0x04], %g2 | 7 | lduw [%o0 + 0x04], %g2 |
@@ -31,4 +30,4 @@ ip_fast_csum: /* %o0 = iph, %o1 = ihl */ | |||
31 | set 0xffff, %o1 | 30 | set 0xffff, %o1 |
32 | retl | 31 | retl |
33 | and %o2, %o1, %o0 | 32 | and %o2, %o1, %o0 |
34 | .size ip_fast_csum, .-ip_fast_csum | 33 | ENDPROC(ip_fast_csum) |
diff --git a/arch/sparc/lib/lshrdi3.S b/arch/sparc/lib/lshrdi3.S index 47a1354c1602..60ebc7cdbee0 100644 --- a/arch/sparc/lib/lshrdi3.S +++ b/arch/sparc/lib/lshrdi3.S | |||
@@ -1,6 +1,6 @@ | |||
1 | #include <linux/linkage.h> | ||
1 | 2 | ||
2 | .globl __lshrdi3 | 3 | ENTRY(__lshrdi3) |
3 | __lshrdi3: | ||
4 | cmp %o2, 0 | 4 | cmp %o2, 0 |
5 | be 3f | 5 | be 3f |
6 | mov 0x20, %g2 | 6 | mov 0x20, %g2 |
@@ -24,3 +24,4 @@ __lshrdi3: | |||
24 | 3: | 24 | 3: |
25 | retl | 25 | retl |
26 | nop | 26 | nop |
27 | ENDPROC(__lshrdi3) | ||
diff --git a/arch/sparc/lib/memmove.S b/arch/sparc/lib/memmove.S index 97395802c23c..b7f6334e159f 100644 --- a/arch/sparc/lib/memmove.S +++ b/arch/sparc/lib/memmove.S | |||
@@ -4,11 +4,10 @@ | |||
4 | * Copyright (C) 1996, 1997, 1998, 1999 Jakub Jelinek (jj@ultra.linux.cz) | 4 | * Copyright (C) 1996, 1997, 1998, 1999 Jakub Jelinek (jj@ultra.linux.cz) |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/linkage.h> | ||
8 | |||
7 | .text | 9 | .text |
8 | .align 32 | 10 | ENTRY(memmove) /* o0=dst o1=src o2=len */ |
9 | .globl memmove | ||
10 | .type memmove,#function | ||
11 | memmove: /* o0=dst o1=src o2=len */ | ||
12 | mov %o0, %g1 | 11 | mov %o0, %g1 |
13 | cmp %o0, %o1 | 12 | cmp %o0, %o1 |
14 | bleu,pt %xcc, memcpy | 13 | bleu,pt %xcc, memcpy |
@@ -28,4 +27,4 @@ memmove: /* o0=dst o1=src o2=len */ | |||
28 | 27 | ||
29 | retl | 28 | retl |
30 | mov %g1, %o0 | 29 | mov %g1, %o0 |
31 | .size memmove, .-memmove | 30 | ENDPROC(memmove) |
diff --git a/arch/sparc/lib/strlen_user_64.S b/arch/sparc/lib/strlen_user_64.S index 114ed111e251..c3df71fa4928 100644 --- a/arch/sparc/lib/strlen_user_64.S +++ b/arch/sparc/lib/strlen_user_64.S | |||
@@ -8,16 +8,16 @@ | |||
8 | * Copyright (C) 1996,1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | 8 | * Copyright (C) 1996,1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz) |
9 | */ | 9 | */ |
10 | 10 | ||
11 | #include <linux/linkage.h> | ||
11 | #include <asm/asi.h> | 12 | #include <asm/asi.h> |
12 | 13 | ||
13 | #define LO_MAGIC 0x01010101 | 14 | #define LO_MAGIC 0x01010101 |
14 | #define HI_MAGIC 0x80808080 | 15 | #define HI_MAGIC 0x80808080 |
15 | 16 | ||
16 | .align 4 | 17 | .align 4 |
17 | .global __strlen_user, __strnlen_user | 18 | ENTRY(__strlen_user) |
18 | __strlen_user: | ||
19 | sethi %hi(32768), %o1 | 19 | sethi %hi(32768), %o1 |
20 | __strnlen_user: | 20 | ENTRY(__strnlen_user) |
21 | mov %o1, %g1 | 21 | mov %o1, %g1 |
22 | mov %o0, %o1 | 22 | mov %o0, %o1 |
23 | andcc %o0, 3, %g0 | 23 | andcc %o0, 3, %g0 |
@@ -78,6 +78,8 @@ __strnlen_user: | |||
78 | mov 2, %o0 | 78 | mov 2, %o0 |
79 | 23: retl | 79 | 23: retl |
80 | mov 3, %o0 | 80 | mov 3, %o0 |
81 | ENDPROC(__strlen_user) | ||
82 | ENDPROC(__strnlen_user) | ||
81 | 83 | ||
82 | .section .fixup,#alloc,#execinstr | 84 | .section .fixup,#alloc,#execinstr |
83 | .align 4 | 85 | .align 4 |
diff --git a/arch/sparc/lib/strncmp_32.S b/arch/sparc/lib/strncmp_32.S index 1476f574db22..c0d1b568c1c5 100644 --- a/arch/sparc/lib/strncmp_32.S +++ b/arch/sparc/lib/strncmp_32.S | |||
@@ -3,10 +3,10 @@ | |||
3 | * generic strncmp routine. | 3 | * generic strncmp routine. |
4 | */ | 4 | */ |
5 | 5 | ||
6 | #include <linux/linkage.h> | ||
7 | |||
6 | .text | 8 | .text |
7 | .align 4 | 9 | ENTRY(strncmp) |
8 | .global strncmp | ||
9 | strncmp: | ||
10 | mov %o0, %g3 | 10 | mov %o0, %g3 |
11 | mov 0, %o3 | 11 | mov 0, %o3 |
12 | 12 | ||
@@ -115,3 +115,4 @@ strncmp: | |||
115 | and %g2, 0xff, %o0 | 115 | and %g2, 0xff, %o0 |
116 | retl | 116 | retl |
117 | sub %o3, %o0, %o0 | 117 | sub %o3, %o0, %o0 |
118 | ENDPROC(strncmp) | ||
diff --git a/arch/sparc/lib/strncmp_64.S b/arch/sparc/lib/strncmp_64.S index 980e83751556..0656627166f3 100644 --- a/arch/sparc/lib/strncmp_64.S +++ b/arch/sparc/lib/strncmp_64.S | |||
@@ -4,13 +4,11 @@ | |||
4 | * Copyright (C) 1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | 4 | * Copyright (C) 1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz) |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/linkage.h> | ||
7 | #include <asm/asi.h> | 8 | #include <asm/asi.h> |
8 | 9 | ||
9 | .text | 10 | .text |
10 | .align 32 | 11 | ENTRY(strncmp) |
11 | .globl strncmp | ||
12 | .type strncmp,#function | ||
13 | strncmp: | ||
14 | brlez,pn %o2, 3f | 12 | brlez,pn %o2, 3f |
15 | lduba [%o0] (ASI_PNF), %o3 | 13 | lduba [%o0] (ASI_PNF), %o3 |
16 | 1: | 14 | 1: |
@@ -29,4 +27,4 @@ strncmp: | |||
29 | 3: | 27 | 3: |
30 | retl | 28 | retl |
31 | clr %o0 | 29 | clr %o0 |
32 | .size strncmp, .-strncmp | 30 | ENDPROC(strncmp) |
diff --git a/arch/sparc/lib/strncpy_from_user_32.S b/arch/sparc/lib/strncpy_from_user_32.S index d77198976a66..db0ed2964bdb 100644 --- a/arch/sparc/lib/strncpy_from_user_32.S +++ b/arch/sparc/lib/strncpy_from_user_32.S | |||
@@ -3,11 +3,11 @@ | |||
3 | * Copyright(C) 1996 David S. Miller | 3 | * Copyright(C) 1996 David S. Miller |
4 | */ | 4 | */ |
5 | 5 | ||
6 | #include <linux/linkage.h> | ||
6 | #include <asm/ptrace.h> | 7 | #include <asm/ptrace.h> |
7 | #include <asm/errno.h> | 8 | #include <asm/errno.h> |
8 | 9 | ||
9 | .text | 10 | .text |
10 | .align 4 | ||
11 | 11 | ||
12 | /* Must return: | 12 | /* Must return: |
13 | * | 13 | * |
@@ -16,8 +16,7 @@ | |||
16 | * bytes copied if we hit a null byte | 16 | * bytes copied if we hit a null byte |
17 | */ | 17 | */ |
18 | 18 | ||
19 | .globl __strncpy_from_user | 19 | ENTRY(__strncpy_from_user) |
20 | __strncpy_from_user: | ||
21 | /* %o0=dest, %o1=src, %o2=count */ | 20 | /* %o0=dest, %o1=src, %o2=count */ |
22 | mov %o2, %o3 | 21 | mov %o2, %o3 |
23 | 1: | 22 | 1: |
@@ -35,6 +34,7 @@ __strncpy_from_user: | |||
35 | add %o2, 1, %o0 | 34 | add %o2, 1, %o0 |
36 | retl | 35 | retl |
37 | sub %o3, %o0, %o0 | 36 | sub %o3, %o0, %o0 |
37 | ENDPROC(__strncpy_from_user) | ||
38 | 38 | ||
39 | .section .fixup,#alloc,#execinstr | 39 | .section .fixup,#alloc,#execinstr |
40 | .align 4 | 40 | .align 4 |
diff --git a/arch/sparc/lib/strncpy_from_user_64.S b/arch/sparc/lib/strncpy_from_user_64.S index 511c8f136f95..d1246b713077 100644 --- a/arch/sparc/lib/strncpy_from_user_64.S +++ b/arch/sparc/lib/strncpy_from_user_64.S | |||
@@ -4,6 +4,7 @@ | |||
4 | * Copyright (C) 1997, 1999 Jakub Jelinek (jj@ultra.linux.cz) | 4 | * Copyright (C) 1997, 1999 Jakub Jelinek (jj@ultra.linux.cz) |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/linkage.h> | ||
7 | #include <asm/asi.h> | 8 | #include <asm/asi.h> |
8 | #include <asm/errno.h> | 9 | #include <asm/errno.h> |
9 | 10 | ||
@@ -12,7 +13,6 @@ | |||
12 | 0: .xword 0x0101010101010101 | 13 | 0: .xword 0x0101010101010101 |
13 | 14 | ||
14 | .text | 15 | .text |
15 | .align 32 | ||
16 | 16 | ||
17 | /* Must return: | 17 | /* Must return: |
18 | * | 18 | * |
@@ -30,9 +30,7 @@ | |||
30 | * and average length is 18 or so. | 30 | * and average length is 18 or so. |
31 | */ | 31 | */ |
32 | 32 | ||
33 | .globl __strncpy_from_user | 33 | ENTRY(__strncpy_from_user) |
34 | .type __strncpy_from_user,#function | ||
35 | __strncpy_from_user: | ||
36 | /* %o0=dest, %o1=src, %o2=count */ | 34 | /* %o0=dest, %o1=src, %o2=count */ |
37 | andcc %o1, 7, %g0 ! IEU1 Group | 35 | andcc %o1, 7, %g0 ! IEU1 Group |
38 | bne,pn %icc, 30f ! CTI | 36 | bne,pn %icc, 30f ! CTI |
@@ -123,7 +121,7 @@ __strncpy_from_user: | |||
123 | mov %o2, %o0 | 121 | mov %o2, %o0 |
124 | 2: retl | 122 | 2: retl |
125 | add %o2, %o3, %o0 | 123 | add %o2, %o3, %o0 |
126 | .size __strncpy_from_user, .-__strncpy_from_user | 124 | ENDPROC(__strncpy_from_user) |
127 | 125 | ||
128 | .section __ex_table,"a" | 126 | .section __ex_table,"a" |
129 | .align 4 | 127 | .align 4 |
diff --git a/arch/sparc/lib/xor.S b/arch/sparc/lib/xor.S index f44f58f40234..2c05641c3263 100644 --- a/arch/sparc/lib/xor.S +++ b/arch/sparc/lib/xor.S | |||
@@ -8,6 +8,7 @@ | |||
8 | * Copyright (C) 2006 David S. Miller <davem@davemloft.net> | 8 | * Copyright (C) 2006 David S. Miller <davem@davemloft.net> |
9 | */ | 9 | */ |
10 | 10 | ||
11 | #include <linux/linkage.h> | ||
11 | #include <asm/visasm.h> | 12 | #include <asm/visasm.h> |
12 | #include <asm/asi.h> | 13 | #include <asm/asi.h> |
13 | #include <asm/dcu.h> | 14 | #include <asm/dcu.h> |
@@ -19,12 +20,9 @@ | |||
19 | * !(len & 127) && len >= 256 | 20 | * !(len & 127) && len >= 256 |
20 | */ | 21 | */ |
21 | .text | 22 | .text |
22 | .align 32 | ||
23 | 23 | ||
24 | /* VIS versions. */ | 24 | /* VIS versions. */ |
25 | .globl xor_vis_2 | 25 | ENTRY(xor_vis_2) |
26 | .type xor_vis_2,#function | ||
27 | xor_vis_2: | ||
28 | rd %fprs, %o5 | 26 | rd %fprs, %o5 |
29 | andcc %o5, FPRS_FEF|FPRS_DU, %g0 | 27 | andcc %o5, FPRS_FEF|FPRS_DU, %g0 |
30 | be,pt %icc, 0f | 28 | be,pt %icc, 0f |
@@ -91,11 +89,9 @@ xor_vis_2: | |||
91 | wr %g1, %g0, %asi | 89 | wr %g1, %g0, %asi |
92 | retl | 90 | retl |
93 | wr %g0, 0, %fprs | 91 | wr %g0, 0, %fprs |
94 | .size xor_vis_2, .-xor_vis_2 | 92 | ENDPROC(xor_vis_2) |
95 | 93 | ||
96 | .globl xor_vis_3 | 94 | ENTRY(xor_vis_3) |
97 | .type xor_vis_3,#function | ||
98 | xor_vis_3: | ||
99 | rd %fprs, %o5 | 95 | rd %fprs, %o5 |
100 | andcc %o5, FPRS_FEF|FPRS_DU, %g0 | 96 | andcc %o5, FPRS_FEF|FPRS_DU, %g0 |
101 | be,pt %icc, 0f | 97 | be,pt %icc, 0f |
@@ -159,11 +155,9 @@ xor_vis_3: | |||
159 | wr %g1, %g0, %asi | 155 | wr %g1, %g0, %asi |
160 | retl | 156 | retl |
161 | wr %g0, 0, %fprs | 157 | wr %g0, 0, %fprs |
162 | .size xor_vis_3, .-xor_vis_3 | 158 | ENDPROC(xor_vis_3) |
163 | 159 | ||
164 | .globl xor_vis_4 | 160 | ENTRY(xor_vis_4) |
165 | .type xor_vis_4,#function | ||
166 | xor_vis_4: | ||
167 | rd %fprs, %o5 | 161 | rd %fprs, %o5 |
168 | andcc %o5, FPRS_FEF|FPRS_DU, %g0 | 162 | andcc %o5, FPRS_FEF|FPRS_DU, %g0 |
169 | be,pt %icc, 0f | 163 | be,pt %icc, 0f |
@@ -246,11 +240,9 @@ xor_vis_4: | |||
246 | wr %g1, %g0, %asi | 240 | wr %g1, %g0, %asi |
247 | retl | 241 | retl |
248 | wr %g0, 0, %fprs | 242 | wr %g0, 0, %fprs |
249 | .size xor_vis_4, .-xor_vis_4 | 243 | ENDPROC(xor_vis_4) |
250 | 244 | ||
251 | .globl xor_vis_5 | 245 | ENTRY(xor_vis_5) |
252 | .type xor_vis_5,#function | ||
253 | xor_vis_5: | ||
254 | save %sp, -192, %sp | 246 | save %sp, -192, %sp |
255 | rd %fprs, %o5 | 247 | rd %fprs, %o5 |
256 | andcc %o5, FPRS_FEF|FPRS_DU, %g0 | 248 | andcc %o5, FPRS_FEF|FPRS_DU, %g0 |
@@ -354,12 +346,10 @@ xor_vis_5: | |||
354 | wr %g0, 0, %fprs | 346 | wr %g0, 0, %fprs |
355 | ret | 347 | ret |
356 | restore | 348 | restore |
357 | .size xor_vis_5, .-xor_vis_5 | 349 | ENDPROC(xor_vis_5) |
358 | 350 | ||
359 | /* Niagara versions. */ | 351 | /* Niagara versions. */ |
360 | .globl xor_niagara_2 | 352 | ENTRY(xor_niagara_2) /* %o0=bytes, %o1=dest, %o2=src */ |
361 | .type xor_niagara_2,#function | ||
362 | xor_niagara_2: /* %o0=bytes, %o1=dest, %o2=src */ | ||
363 | save %sp, -192, %sp | 353 | save %sp, -192, %sp |
364 | prefetch [%i1], #n_writes | 354 | prefetch [%i1], #n_writes |
365 | prefetch [%i2], #one_read | 355 | prefetch [%i2], #one_read |
@@ -402,11 +392,9 @@ xor_niagara_2: /* %o0=bytes, %o1=dest, %o2=src */ | |||
402 | wr %g7, 0x0, %asi | 392 | wr %g7, 0x0, %asi |
403 | ret | 393 | ret |
404 | restore | 394 | restore |
405 | .size xor_niagara_2, .-xor_niagara_2 | 395 | ENDPROC(xor_niagara_2) |
406 | 396 | ||
407 | .globl xor_niagara_3 | 397 | ENTRY(xor_niagara_3) /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2 */ |
408 | .type xor_niagara_3,#function | ||
409 | xor_niagara_3: /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2 */ | ||
410 | save %sp, -192, %sp | 398 | save %sp, -192, %sp |
411 | prefetch [%i1], #n_writes | 399 | prefetch [%i1], #n_writes |
412 | prefetch [%i2], #one_read | 400 | prefetch [%i2], #one_read |
@@ -465,11 +453,9 @@ xor_niagara_3: /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2 */ | |||
465 | wr %g7, 0x0, %asi | 453 | wr %g7, 0x0, %asi |
466 | ret | 454 | ret |
467 | restore | 455 | restore |
468 | .size xor_niagara_3, .-xor_niagara_3 | 456 | ENDPROC(xor_niagara_3) |
469 | 457 | ||
470 | .globl xor_niagara_4 | 458 | ENTRY(xor_niagara_4) /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2, %o4=src3 */ |
471 | .type xor_niagara_4,#function | ||
472 | xor_niagara_4: /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2, %o4=src3 */ | ||
473 | save %sp, -192, %sp | 459 | save %sp, -192, %sp |
474 | prefetch [%i1], #n_writes | 460 | prefetch [%i1], #n_writes |
475 | prefetch [%i2], #one_read | 461 | prefetch [%i2], #one_read |
@@ -549,11 +535,9 @@ xor_niagara_4: /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2, %o4=src3 */ | |||
549 | wr %g7, 0x0, %asi | 535 | wr %g7, 0x0, %asi |
550 | ret | 536 | ret |
551 | restore | 537 | restore |
552 | .size xor_niagara_4, .-xor_niagara_4 | 538 | ENDPROC(xor_niagara_4) |
553 | 539 | ||
554 | .globl xor_niagara_5 | 540 | ENTRY(xor_niagara_5) /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2, %o4=src3, %o5=src4 */ |
555 | .type xor_niagara_5,#function | ||
556 | xor_niagara_5: /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2, %o4=src3, %o5=src4 */ | ||
557 | save %sp, -192, %sp | 541 | save %sp, -192, %sp |
558 | prefetch [%i1], #n_writes | 542 | prefetch [%i1], #n_writes |
559 | prefetch [%i2], #one_read | 543 | prefetch [%i2], #one_read |
@@ -649,4 +633,4 @@ xor_niagara_5: /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2, %o4=src3, %o5=src4 * | |||
649 | wr %g7, 0x0, %asi | 633 | wr %g7, 0x0, %asi |
650 | ret | 634 | ret |
651 | restore | 635 | restore |
652 | .size xor_niagara_5, .-xor_niagara_5 | 636 | ENDPROC(xor_niagara_5) |