aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm
diff options
context:
space:
mode:
authorCatalin Marinas <catalin.marinas@arm.com>2008-08-28 06:22:32 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2008-09-01 07:06:34 -0400
commit93ed3970114983543bbebd195bef65db84444ea2 (patch)
tree9df88b61a2a7b3cc493c6cfc5f4848448250f6b5 /arch/arm
parent8d5796d2ec6b5a4e7a52861144e63af438d6f8f7 (diff)
[ARM] 5227/1: Add the ENDPROC declarations to the .S files
This declaration specifies the "function" type and size for various assembly functions, mainly needed for generating the correct branch instructions in Thumb-2. Signed-off-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/boot/compressed/head.S1
-rw-r--r--arch/arm/kernel/debug.S5
-rw-r--r--arch/arm/kernel/entry-armv.S16
-rw-r--r--arch/arm/kernel/entry-common.S25
-rw-r--r--arch/arm/kernel/head-common.S19
-rw-r--r--arch/arm/kernel/head-nommu.S4
-rw-r--r--arch/arm/kernel/head.S12
-rw-r--r--arch/arm/lib/ashldi3.S2
-rw-r--r--arch/arm/lib/ashrdi3.S2
-rw-r--r--arch/arm/lib/backtrace.S4
-rw-r--r--arch/arm/lib/changebit.S2
-rw-r--r--arch/arm/lib/clear_user.S1
-rw-r--r--arch/arm/lib/clearbit.S2
-rw-r--r--arch/arm/lib/copy_from_user.S2
-rw-r--r--arch/arm/lib/copy_page.S1
-rw-r--r--arch/arm/lib/copy_to_user.S2
-rw-r--r--arch/arm/lib/csumipv6.S1
-rw-r--r--arch/arm/lib/csumpartial.S1
-rw-r--r--arch/arm/lib/csumpartialcopy.S1
-rw-r--r--arch/arm/lib/csumpartialcopygeneric.S1
-rw-r--r--arch/arm/lib/csumpartialcopyuser.S1
-rw-r--r--arch/arm/lib/delay.S3
-rw-r--r--arch/arm/lib/div64.S1
-rw-r--r--arch/arm/lib/findbit.S8
-rw-r--r--arch/arm/lib/getuser.S14
-rw-r--r--arch/arm/lib/io-readsb.S1
-rw-r--r--arch/arm/lib/io-readsl.S1
-rw-r--r--arch/arm/lib/io-readsw-armv4.S1
-rw-r--r--arch/arm/lib/io-writesb.S1
-rw-r--r--arch/arm/lib/io-writesl.S1
-rw-r--r--arch/arm/lib/io-writesw-armv4.S1
-rw-r--r--arch/arm/lib/lib1funcs.S11
-rw-r--r--arch/arm/lib/lshrdi3.S2
-rw-r--r--arch/arm/lib/memchr.S1
-rw-r--r--arch/arm/lib/memcpy.S1
-rw-r--r--arch/arm/lib/memmove.S1
-rw-r--r--arch/arm/lib/memset.S1
-rw-r--r--arch/arm/lib/memzero.S1
-rw-r--r--arch/arm/lib/muldi3.S2
-rw-r--r--arch/arm/lib/putuser.S18
-rw-r--r--arch/arm/lib/setbit.S2
-rw-r--r--arch/arm/lib/sha1.S3
-rw-r--r--arch/arm/lib/strchr.S1
-rw-r--r--arch/arm/lib/strncpy_from_user.S1
-rw-r--r--arch/arm/lib/strnlen_user.S1
-rw-r--r--arch/arm/lib/strrchr.S1
-rw-r--r--arch/arm/lib/testchangebit.S2
-rw-r--r--arch/arm/lib/testclearbit.S2
-rw-r--r--arch/arm/lib/testsetbit.S2
-rw-r--r--arch/arm/lib/uaccess.S2
-rw-r--r--arch/arm/lib/ucmpdi2.S4
-rw-r--r--arch/arm/mm/abort-ev7.S1
-rw-r--r--arch/arm/mm/abort-nommu.S1
-rw-r--r--arch/arm/mm/cache-v7.S10
-rw-r--r--arch/arm/mm/proc-v7.S8
-rw-r--r--arch/arm/mm/tlb-v7.S2
-rw-r--r--arch/arm/vfp/entry.S8
-rw-r--r--arch/arm/vfp/vfphw.S25
58 files changed, 201 insertions, 50 deletions
diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S
index d42f89b7760b..1f1729b76d01 100644
--- a/arch/arm/boot/compressed/head.S
+++ b/arch/arm/boot/compressed/head.S
@@ -421,6 +421,7 @@ __setup_mmu: sub r3, r4, #16384 @ Page directory size
421 add r1, r1, #1048576 421 add r1, r1, #1048576
422 str r1, [r0] 422 str r1, [r0]
423 mov pc, lr 423 mov pc, lr
424ENDPROC(__setup_mmu)
424 425
425__armv4_mmu_cache_on: 426__armv4_mmu_cache_on:
426 mov r12, lr 427 mov r12, lr
diff --git a/arch/arm/kernel/debug.S b/arch/arm/kernel/debug.S
index 9550ff0ddde4..f53c58290543 100644
--- a/arch/arm/kernel/debug.S
+++ b/arch/arm/kernel/debug.S
@@ -89,10 +89,12 @@
89ENTRY(printhex8) 89ENTRY(printhex8)
90 mov r1, #8 90 mov r1, #8
91 b printhex 91 b printhex
92ENDPROC(printhex8)
92 93
93ENTRY(printhex4) 94ENTRY(printhex4)
94 mov r1, #4 95 mov r1, #4
95 b printhex 96 b printhex
97ENDPROC(printhex4)
96 98
97ENTRY(printhex2) 99ENTRY(printhex2)
98 mov r1, #2 100 mov r1, #2
@@ -110,6 +112,7 @@ printhex: adr r2, hexbuf
110 bne 1b 112 bne 1b
111 mov r0, r2 113 mov r0, r2
112 b printascii 114 b printascii
115ENDPROC(printhex2)
113 116
114 .ltorg 117 .ltorg
115 118
@@ -127,11 +130,13 @@ ENTRY(printascii)
127 teqne r1, #0 130 teqne r1, #0
128 bne 1b 131 bne 1b
129 mov pc, lr 132 mov pc, lr
133ENDPROC(printascii)
130 134
131ENTRY(printch) 135ENTRY(printch)
132 addruart r3 136 addruart r3
133 mov r1, r0 137 mov r1, r0
134 mov r0, #0 138 mov r0, #0
135 b 1b 139 b 1b
140ENDPROC(printch)
136 141
137hexbuf: .space 16 142hexbuf: .space 16
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index 617e509d60df..77b047475539 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -76,14 +76,17 @@
76__pabt_invalid: 76__pabt_invalid:
77 inv_entry BAD_PREFETCH 77 inv_entry BAD_PREFETCH
78 b common_invalid 78 b common_invalid
79ENDPROC(__pabt_invalid)
79 80
80__dabt_invalid: 81__dabt_invalid:
81 inv_entry BAD_DATA 82 inv_entry BAD_DATA
82 b common_invalid 83 b common_invalid
84ENDPROC(__dabt_invalid)
83 85
84__irq_invalid: 86__irq_invalid:
85 inv_entry BAD_IRQ 87 inv_entry BAD_IRQ
86 b common_invalid 88 b common_invalid
89ENDPROC(__irq_invalid)
87 90
88__und_invalid: 91__und_invalid:
89 inv_entry BAD_UNDEFINSTR 92 inv_entry BAD_UNDEFINSTR
@@ -107,6 +110,7 @@ common_invalid:
107 110
108 mov r0, sp 111 mov r0, sp
109 b bad_mode 112 b bad_mode
113ENDPROC(__und_invalid)
110 114
111/* 115/*
112 * SVC mode handlers 116 * SVC mode handlers
@@ -192,6 +196,7 @@ __dabt_svc:
192 ldr r0, [sp, #S_PSR] 196 ldr r0, [sp, #S_PSR]
193 msr spsr_cxsf, r0 197 msr spsr_cxsf, r0
194 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr 198 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
199ENDPROC(__dabt_svc)
195 200
196 .align 5 201 .align 5
197__irq_svc: 202__irq_svc:
@@ -223,6 +228,7 @@ __irq_svc:
223 bleq trace_hardirqs_on 228 bleq trace_hardirqs_on
224#endif 229#endif
225 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr 230 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
231ENDPROC(__irq_svc)
226 232
227 .ltorg 233 .ltorg
228 234
@@ -272,6 +278,7 @@ __und_svc:
272 ldr lr, [sp, #S_PSR] @ Get SVC cpsr 278 ldr lr, [sp, #S_PSR] @ Get SVC cpsr
273 msr spsr_cxsf, lr 279 msr spsr_cxsf, lr
274 ldmia sp, {r0 - pc}^ @ Restore SVC registers 280 ldmia sp, {r0 - pc}^ @ Restore SVC registers
281ENDPROC(__und_svc)
275 282
276 .align 5 283 .align 5
277__pabt_svc: 284__pabt_svc:
@@ -313,6 +320,7 @@ __pabt_svc:
313 ldr r0, [sp, #S_PSR] 320 ldr r0, [sp, #S_PSR]
314 msr spsr_cxsf, r0 321 msr spsr_cxsf, r0
315 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr 322 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
323ENDPROC(__pabt_svc)
316 324
317 .align 5 325 .align 5
318.LCcralign: 326.LCcralign:
@@ -412,6 +420,7 @@ __dabt_usr:
412 mov r2, sp 420 mov r2, sp
413 adr lr, ret_from_exception 421 adr lr, ret_from_exception
414 b do_DataAbort 422 b do_DataAbort
423ENDPROC(__dabt_usr)
415 424
416 .align 5 425 .align 5
417__irq_usr: 426__irq_usr:
@@ -441,6 +450,7 @@ __irq_usr:
441 450
442 mov why, #0 451 mov why, #0
443 b ret_to_user 452 b ret_to_user
453ENDPROC(__irq_usr)
444 454
445 .ltorg 455 .ltorg
446 456
@@ -474,6 +484,7 @@ __und_usr:
474#else 484#else
475 b __und_usr_unknown 485 b __und_usr_unknown
476#endif 486#endif
487ENDPROC(__und_usr)
477 488
478 @ 489 @
479 @ fallthrough to call_fpe 490 @ fallthrough to call_fpe
@@ -642,6 +653,7 @@ __und_usr_unknown:
642 mov r0, sp 653 mov r0, sp
643 adr lr, ret_from_exception 654 adr lr, ret_from_exception
644 b do_undefinstr 655 b do_undefinstr
656ENDPROC(__und_usr_unknown)
645 657
646 .align 5 658 .align 5
647__pabt_usr: 659__pabt_usr:
@@ -666,6 +678,8 @@ ENTRY(ret_from_exception)
666 get_thread_info tsk 678 get_thread_info tsk
667 mov why, #0 679 mov why, #0
668 b ret_to_user 680 b ret_to_user
681ENDPROC(__pabt_usr)
682ENDPROC(ret_from_exception)
669 683
670/* 684/*
671 * Register switch for ARMv3 and ARMv4 processors 685 * Register switch for ARMv3 and ARMv4 processors
@@ -702,6 +716,7 @@ ENTRY(__switch_to)
702 bl atomic_notifier_call_chain 716 bl atomic_notifier_call_chain
703 mov r0, r5 717 mov r0, r5
704 ldmia r4, {r4 - sl, fp, sp, pc} @ Load all regs saved previously 718 ldmia r4, {r4 - sl, fp, sp, pc} @ Load all regs saved previously
719ENDPROC(__switch_to)
705 720
706 __INIT 721 __INIT
707 722
@@ -1029,6 +1044,7 @@ vector_\name:
1029 mov r0, sp 1044 mov r0, sp
1030 ldr lr, [pc, lr, lsl #2] 1045 ldr lr, [pc, lr, lsl #2]
1031 movs pc, lr @ branch to handler in SVC mode 1046 movs pc, lr @ branch to handler in SVC mode
1047ENDPROC(vector_\name)
1032 .endm 1048 .endm
1033 1049
1034 .globl __stubs_start 1050 .globl __stubs_start
diff --git a/arch/arm/kernel/entry-common.S b/arch/arm/kernel/entry-common.S
index 060d7e2e9f64..3aa14dcc5bab 100644
--- a/arch/arm/kernel/entry-common.S
+++ b/arch/arm/kernel/entry-common.S
@@ -77,6 +77,7 @@ no_work_pending:
77 mov r0, r0 77 mov r0, r0
78 add sp, sp, #S_FRAME_SIZE - S_PC 78 add sp, sp, #S_FRAME_SIZE - S_PC
79 movs pc, lr @ return & move spsr_svc into cpsr 79 movs pc, lr @ return & move spsr_svc into cpsr
80ENDPROC(ret_to_user)
80 81
81/* 82/*
82 * This is how we return from a fork. 83 * This is how we return from a fork.
@@ -92,7 +93,7 @@ ENTRY(ret_from_fork)
92 mov r0, #1 @ trace exit [IP = 1] 93 mov r0, #1 @ trace exit [IP = 1]
93 bl syscall_trace 94 bl syscall_trace
94 b ret_slow_syscall 95 b ret_slow_syscall
95 96ENDPROC(ret_from_fork)
96 97
97 .equ NR_syscalls,0 98 .equ NR_syscalls,0
98#define CALL(x) .equ NR_syscalls,NR_syscalls+1 99#define CALL(x) .equ NR_syscalls,NR_syscalls+1
@@ -269,6 +270,7 @@ ENTRY(vector_swi)
269 eor r0, scno, #__NR_SYSCALL_BASE @ put OS number back 270 eor r0, scno, #__NR_SYSCALL_BASE @ put OS number back
270 bcs arm_syscall 271 bcs arm_syscall
271 b sys_ni_syscall @ not private func 272 b sys_ni_syscall @ not private func
273ENDPROC(vector_swi)
272 274
273 /* 275 /*
274 * This is the really slow path. We're going to be doing 276 * This is the really slow path. We're going to be doing
@@ -326,7 +328,6 @@ ENTRY(sys_call_table)
326 */ 328 */
327@ r0 = syscall number 329@ r0 = syscall number
328@ r8 = syscall table 330@ r8 = syscall table
329 .type sys_syscall, #function
330sys_syscall: 331sys_syscall:
331 bic scno, r0, #__NR_OABI_SYSCALL_BASE 332 bic scno, r0, #__NR_OABI_SYSCALL_BASE
332 cmp scno, #__NR_syscall - __NR_SYSCALL_BASE 333 cmp scno, #__NR_syscall - __NR_SYSCALL_BASE
@@ -338,53 +339,65 @@ sys_syscall:
338 movlo r3, r4 339 movlo r3, r4
339 ldrlo pc, [tbl, scno, lsl #2] 340 ldrlo pc, [tbl, scno, lsl #2]
340 b sys_ni_syscall 341 b sys_ni_syscall
342ENDPROC(sys_syscall)
341 343
342sys_fork_wrapper: 344sys_fork_wrapper:
343 add r0, sp, #S_OFF 345 add r0, sp, #S_OFF
344 b sys_fork 346 b sys_fork
347ENDPROC(sys_fork_wrapper)
345 348
346sys_vfork_wrapper: 349sys_vfork_wrapper:
347 add r0, sp, #S_OFF 350 add r0, sp, #S_OFF
348 b sys_vfork 351 b sys_vfork
352ENDPROC(sys_vfork_wrapper)
349 353
350sys_execve_wrapper: 354sys_execve_wrapper:
351 add r3, sp, #S_OFF 355 add r3, sp, #S_OFF
352 b sys_execve 356 b sys_execve
357ENDPROC(sys_execve_wrapper)
353 358
354sys_clone_wrapper: 359sys_clone_wrapper:
355 add ip, sp, #S_OFF 360 add ip, sp, #S_OFF
356 str ip, [sp, #4] 361 str ip, [sp, #4]
357 b sys_clone 362 b sys_clone
363ENDPROC(sys_clone_wrapper)
358 364
359sys_sigsuspend_wrapper: 365sys_sigsuspend_wrapper:
360 add r3, sp, #S_OFF 366 add r3, sp, #S_OFF
361 b sys_sigsuspend 367 b sys_sigsuspend
368ENDPROC(sys_sigsuspend_wrapper)
362 369
363sys_rt_sigsuspend_wrapper: 370sys_rt_sigsuspend_wrapper:
364 add r2, sp, #S_OFF 371 add r2, sp, #S_OFF
365 b sys_rt_sigsuspend 372 b sys_rt_sigsuspend
373ENDPROC(sys_rt_sigsuspend_wrapper)
366 374
367sys_sigreturn_wrapper: 375sys_sigreturn_wrapper:
368 add r0, sp, #S_OFF 376 add r0, sp, #S_OFF
369 b sys_sigreturn 377 b sys_sigreturn
378ENDPROC(sys_sigreturn_wrapper)
370 379
371sys_rt_sigreturn_wrapper: 380sys_rt_sigreturn_wrapper:
372 add r0, sp, #S_OFF 381 add r0, sp, #S_OFF
373 b sys_rt_sigreturn 382 b sys_rt_sigreturn
383ENDPROC(sys_rt_sigreturn_wrapper)
374 384
375sys_sigaltstack_wrapper: 385sys_sigaltstack_wrapper:
376 ldr r2, [sp, #S_OFF + S_SP] 386 ldr r2, [sp, #S_OFF + S_SP]
377 b do_sigaltstack 387 b do_sigaltstack
388ENDPROC(sys_sigaltstack_wrapper)
378 389
379sys_statfs64_wrapper: 390sys_statfs64_wrapper:
380 teq r1, #88 391 teq r1, #88
381 moveq r1, #84 392 moveq r1, #84
382 b sys_statfs64 393 b sys_statfs64
394ENDPROC(sys_statfs64_wrapper)
383 395
384sys_fstatfs64_wrapper: 396sys_fstatfs64_wrapper:
385 teq r1, #88 397 teq r1, #88
386 moveq r1, #84 398 moveq r1, #84
387 b sys_fstatfs64 399 b sys_fstatfs64
400ENDPROC(sys_fstatfs64_wrapper)
388 401
389/* 402/*
390 * Note: off_4k (r5) is always units of 4K. If we can't do the requested 403 * Note: off_4k (r5) is always units of 4K. If we can't do the requested
@@ -402,11 +415,14 @@ sys_mmap2:
402 str r5, [sp, #4] 415 str r5, [sp, #4]
403 b do_mmap2 416 b do_mmap2
404#endif 417#endif
418ENDPROC(sys_mmap2)
405 419
406ENTRY(pabort_ifar) 420ENTRY(pabort_ifar)
407 mrc p15, 0, r0, cr6, cr0, 2 421 mrc p15, 0, r0, cr6, cr0, 2
408ENTRY(pabort_noifar) 422ENTRY(pabort_noifar)
409 mov pc, lr 423 mov pc, lr
424ENDPROC(pabort_ifar)
425ENDPROC(pabort_noifar)
410 426
411#ifdef CONFIG_OABI_COMPAT 427#ifdef CONFIG_OABI_COMPAT
412 428
@@ -417,26 +433,31 @@ ENTRY(pabort_noifar)
417sys_oabi_pread64: 433sys_oabi_pread64:
418 stmia sp, {r3, r4} 434 stmia sp, {r3, r4}
419 b sys_pread64 435 b sys_pread64
436ENDPROC(sys_oabi_pread64)
420 437
421sys_oabi_pwrite64: 438sys_oabi_pwrite64:
422 stmia sp, {r3, r4} 439 stmia sp, {r3, r4}
423 b sys_pwrite64 440 b sys_pwrite64
441ENDPROC(sys_oabi_pwrite64)
424 442
425sys_oabi_truncate64: 443sys_oabi_truncate64:
426 mov r3, r2 444 mov r3, r2
427 mov r2, r1 445 mov r2, r1
428 b sys_truncate64 446 b sys_truncate64
447ENDPROC(sys_oabi_truncate64)
429 448
430sys_oabi_ftruncate64: 449sys_oabi_ftruncate64:
431 mov r3, r2 450 mov r3, r2
432 mov r2, r1 451 mov r2, r1
433 b sys_ftruncate64 452 b sys_ftruncate64
453ENDPROC(sys_oabi_ftruncate64)
434 454
435sys_oabi_readahead: 455sys_oabi_readahead:
436 str r3, [sp] 456 str r3, [sp]
437 mov r3, r2 457 mov r3, r2
438 mov r2, r1 458 mov r2, r1
439 b sys_readahead 459 b sys_readahead
460ENDPROC(sys_oabi_readahead)
440 461
441/* 462/*
442 * Let's declare a second syscall table for old ABI binaries 463 * Let's declare a second syscall table for old ABI binaries
diff --git a/arch/arm/kernel/head-common.S b/arch/arm/kernel/head-common.S
index 1c3c6ea5f9e7..bde52df1c668 100644
--- a/arch/arm/kernel/head-common.S
+++ b/arch/arm/kernel/head-common.S
@@ -36,7 +36,6 @@ __switch_data:
36 * r2 = atags pointer 36 * r2 = atags pointer
37 * r9 = processor ID 37 * r9 = processor ID
38 */ 38 */
39 .type __mmap_switched, %function
40__mmap_switched: 39__mmap_switched:
41 adr r3, __switch_data + 4 40 adr r3, __switch_data + 4
42 41
@@ -59,6 +58,7 @@ __mmap_switched:
59 bic r4, r0, #CR_A @ Clear 'A' bit 58 bic r4, r0, #CR_A @ Clear 'A' bit
60 stmia r7, {r0, r4} @ Save control register values 59 stmia r7, {r0, r4} @ Save control register values
61 b start_kernel 60 b start_kernel
61ENDPROC(__mmap_switched)
62 62
63/* 63/*
64 * Exception handling. Something went wrong and we can't proceed. We 64 * Exception handling. Something went wrong and we can't proceed. We
@@ -69,8 +69,6 @@ __mmap_switched:
69 * and hope for the best (useful if bootloader fails to pass a proper 69 * and hope for the best (useful if bootloader fails to pass a proper
70 * machine ID for example). 70 * machine ID for example).
71 */ 71 */
72
73 .type __error_p, %function
74__error_p: 72__error_p:
75#ifdef CONFIG_DEBUG_LL 73#ifdef CONFIG_DEBUG_LL
76 adr r0, str_p1 74 adr r0, str_p1
@@ -84,8 +82,8 @@ str_p1: .asciz "\nError: unrecognized/unsupported processor variant (0x"
84str_p2: .asciz ").\n" 82str_p2: .asciz ").\n"
85 .align 83 .align
86#endif 84#endif
85ENDPROC(__error_p)
87 86
88 .type __error_a, %function
89__error_a: 87__error_a:
90#ifdef CONFIG_DEBUG_LL 88#ifdef CONFIG_DEBUG_LL
91 mov r4, r1 @ preserve machine ID 89 mov r4, r1 @ preserve machine ID
@@ -115,13 +113,14 @@ __error_a:
115 adr r0, str_a3 113 adr r0, str_a3
116 bl printascii 114 bl printascii
117 b __error 115 b __error
116ENDPROC(__error_a)
117
118str_a1: .asciz "\nError: unrecognized/unsupported machine ID (r1 = 0x" 118str_a1: .asciz "\nError: unrecognized/unsupported machine ID (r1 = 0x"
119str_a2: .asciz ").\n\nAvailable machine support:\n\nID (hex)\tNAME\n" 119str_a2: .asciz ").\n\nAvailable machine support:\n\nID (hex)\tNAME\n"
120str_a3: .asciz "\nPlease check your kernel config and/or bootloader.\n" 120str_a3: .asciz "\nPlease check your kernel config and/or bootloader.\n"
121 .align 121 .align
122#endif 122#endif
123 123
124 .type __error, %function
125__error: 124__error:
126#ifdef CONFIG_ARCH_RPC 125#ifdef CONFIG_ARCH_RPC
127/* 126/*
@@ -138,6 +137,7 @@ __error:
138#endif 137#endif
1391: mov r0, r0 1381: mov r0, r0
140 b 1b 139 b 1b
140ENDPROC(__error)
141 141
142 142
143/* 143/*
@@ -153,7 +153,6 @@ __error:
153 * r5 = proc_info pointer in physical address space 153 * r5 = proc_info pointer in physical address space
154 * r9 = cpuid (preserved) 154 * r9 = cpuid (preserved)
155 */ 155 */
156 .type __lookup_processor_type, %function
157__lookup_processor_type: 156__lookup_processor_type:
158 adr r3, 3f 157 adr r3, 3f
159 ldmda r3, {r5 - r7} 158 ldmda r3, {r5 - r7}
@@ -169,6 +168,7 @@ __lookup_processor_type:
169 blo 1b 168 blo 1b
170 mov r5, #0 @ unknown processor 169 mov r5, #0 @ unknown processor
1712: mov pc, lr 1702: mov pc, lr
171ENDPROC(__lookup_processor_type)
172 172
173/* 173/*
174 * This provides a C-API version of the above function. 174 * This provides a C-API version of the above function.
@@ -179,6 +179,7 @@ ENTRY(lookup_processor_type)
179 bl __lookup_processor_type 179 bl __lookup_processor_type
180 mov r0, r5 180 mov r0, r5
181 ldmfd sp!, {r4 - r7, r9, pc} 181 ldmfd sp!, {r4 - r7, r9, pc}
182ENDPROC(lookup_processor_type)
182 183
183/* 184/*
184 * Look in <asm/procinfo.h> and arch/arm/kernel/arch.[ch] for 185 * Look in <asm/procinfo.h> and arch/arm/kernel/arch.[ch] for
@@ -201,7 +202,6 @@ ENTRY(lookup_processor_type)
201 * r3, r4, r6 corrupted 202 * r3, r4, r6 corrupted
202 * r5 = mach_info pointer in physical address space 203 * r5 = mach_info pointer in physical address space
203 */ 204 */
204 .type __lookup_machine_type, %function
205__lookup_machine_type: 205__lookup_machine_type:
206 adr r3, 3b 206 adr r3, 3b
207 ldmia r3, {r4, r5, r6} 207 ldmia r3, {r4, r5, r6}
@@ -216,6 +216,7 @@ __lookup_machine_type:
216 blo 1b 216 blo 1b
217 mov r5, #0 @ unknown machine 217 mov r5, #0 @ unknown machine
2182: mov pc, lr 2182: mov pc, lr
219ENDPROC(__lookup_machine_type)
219 220
220/* 221/*
221 * This provides a C-API version of the above function. 222 * This provides a C-API version of the above function.
@@ -226,6 +227,7 @@ ENTRY(lookup_machine_type)
226 bl __lookup_machine_type 227 bl __lookup_machine_type
227 mov r0, r5 228 mov r0, r5
228 ldmfd sp!, {r4 - r6, pc} 229 ldmfd sp!, {r4 - r6, pc}
230ENDPROC(lookup_machine_type)
229 231
230/* Determine validity of the r2 atags pointer. The heuristic requires 232/* Determine validity of the r2 atags pointer. The heuristic requires
231 * that the pointer be aligned, in the first 16k of physical RAM and 233 * that the pointer be aligned, in the first 16k of physical RAM and
@@ -239,8 +241,6 @@ ENTRY(lookup_machine_type)
239 * r2 either valid atags pointer, or zero 241 * r2 either valid atags pointer, or zero
240 * r5, r6 corrupted 242 * r5, r6 corrupted
241 */ 243 */
242
243 .type __vet_atags, %function
244__vet_atags: 244__vet_atags:
245 tst r2, #0x3 @ aligned? 245 tst r2, #0x3 @ aligned?
246 bne 1f 246 bne 1f
@@ -257,3 +257,4 @@ __vet_atags:
257 257
2581: mov r2, #0 2581: mov r2, #0
259 mov pc, lr 259 mov pc, lr
260ENDPROC(__vet_atags)
diff --git a/arch/arm/kernel/head-nommu.S b/arch/arm/kernel/head-nommu.S
index 27329bd32037..cc87e1765ed2 100644
--- a/arch/arm/kernel/head-nommu.S
+++ b/arch/arm/kernel/head-nommu.S
@@ -33,7 +33,6 @@
33 * 33 *
34 */ 34 */
35 .section ".text.head", "ax" 35 .section ".text.head", "ax"
36 .type stext, %function
37ENTRY(stext) 36ENTRY(stext)
38 msr cpsr_c, #PSR_F_BIT | PSR_I_BIT | SVC_MODE @ ensure svc mode 37 msr cpsr_c, #PSR_F_BIT | PSR_I_BIT | SVC_MODE @ ensure svc mode
39 @ and irqs disabled 38 @ and irqs disabled
@@ -53,11 +52,11 @@ ENTRY(stext)
53 @ the initialization is done 52 @ the initialization is done
54 adr lr, __after_proc_init @ return (PIC) address 53 adr lr, __after_proc_init @ return (PIC) address
55 add pc, r10, #PROCINFO_INITFUNC 54 add pc, r10, #PROCINFO_INITFUNC
55ENDPROC(stext)
56 56
57/* 57/*
58 * Set the Control Register and Read the process ID. 58 * Set the Control Register and Read the process ID.
59 */ 59 */
60 .type __after_proc_init, %function
61__after_proc_init: 60__after_proc_init:
62#ifdef CONFIG_CPU_CP15 61#ifdef CONFIG_CPU_CP15
63 mrc p15, 0, r0, c1, c0, 0 @ read control reg 62 mrc p15, 0, r0, c1, c0, 0 @ read control reg
@@ -85,6 +84,7 @@ __after_proc_init:
85 84
86 mov pc, r13 @ clear the BSS and jump 85 mov pc, r13 @ clear the BSS and jump
87 @ to start_kernel 86 @ to start_kernel
87ENDPROC(__after_proc_init)
88 .ltorg 88 .ltorg
89 89
90#include "head-common.S" 90#include "head-common.S"
diff --git a/arch/arm/kernel/head.S b/arch/arm/kernel/head.S
index bff4c6e90dd5..21e17dc94cb5 100644
--- a/arch/arm/kernel/head.S
+++ b/arch/arm/kernel/head.S
@@ -75,7 +75,6 @@
75 * circumstances, zImage) is for. 75 * circumstances, zImage) is for.
76 */ 76 */
77 .section ".text.head", "ax" 77 .section ".text.head", "ax"
78 .type stext, %function
79ENTRY(stext) 78ENTRY(stext)
80 msr cpsr_c, #PSR_F_BIT | PSR_I_BIT | SVC_MODE @ ensure svc mode 79 msr cpsr_c, #PSR_F_BIT | PSR_I_BIT | SVC_MODE @ ensure svc mode
81 @ and irqs disabled 80 @ and irqs disabled
@@ -100,9 +99,9 @@ ENTRY(stext)
100 @ mmu has been enabled 99 @ mmu has been enabled
101 adr lr, __enable_mmu @ return (PIC) address 100 adr lr, __enable_mmu @ return (PIC) address
102 add pc, r10, #PROCINFO_INITFUNC 101 add pc, r10, #PROCINFO_INITFUNC
102ENDPROC(stext)
103 103
104#if defined(CONFIG_SMP) 104#if defined(CONFIG_SMP)
105 .type secondary_startup, #function
106ENTRY(secondary_startup) 105ENTRY(secondary_startup)
107 /* 106 /*
108 * Common entry point for secondary CPUs. 107 * Common entry point for secondary CPUs.
@@ -128,6 +127,7 @@ ENTRY(secondary_startup)
128 adr lr, __enable_mmu @ return address 127 adr lr, __enable_mmu @ return address
129 add pc, r10, #PROCINFO_INITFUNC @ initialise processor 128 add pc, r10, #PROCINFO_INITFUNC @ initialise processor
130 @ (return control reg) 129 @ (return control reg)
130ENDPROC(secondary_startup)
131 131
132 /* 132 /*
133 * r6 = &secondary_data 133 * r6 = &secondary_data
@@ -136,6 +136,7 @@ ENTRY(__secondary_switched)
136 ldr sp, [r7, #4] @ get secondary_data.stack 136 ldr sp, [r7, #4] @ get secondary_data.stack
137 mov fp, #0 137 mov fp, #0
138 b secondary_start_kernel 138 b secondary_start_kernel
139ENDPROC(__secondary_switched)
139 140
140 .type __secondary_data, %object 141 .type __secondary_data, %object
141__secondary_data: 142__secondary_data:
@@ -151,7 +152,6 @@ __secondary_data:
151 * this is just loading the page table pointer and domain access 152 * this is just loading the page table pointer and domain access
152 * registers. 153 * registers.
153 */ 154 */
154 .type __enable_mmu, %function
155__enable_mmu: 155__enable_mmu:
156#ifdef CONFIG_ALIGNMENT_TRAP 156#ifdef CONFIG_ALIGNMENT_TRAP
157 orr r0, r0, #CR_A 157 orr r0, r0, #CR_A
@@ -174,6 +174,7 @@ __enable_mmu:
174 mcr p15, 0, r5, c3, c0, 0 @ load domain access register 174 mcr p15, 0, r5, c3, c0, 0 @ load domain access register
175 mcr p15, 0, r4, c2, c0, 0 @ load page table pointer 175 mcr p15, 0, r4, c2, c0, 0 @ load page table pointer
176 b __turn_mmu_on 176 b __turn_mmu_on
177ENDPROC(__enable_mmu)
177 178
178/* 179/*
179 * Enable the MMU. This completely changes the structure of the visible 180 * Enable the MMU. This completely changes the structure of the visible
@@ -187,7 +188,6 @@ __enable_mmu:
187 * other registers depend on the function called upon completion 188 * other registers depend on the function called upon completion
188 */ 189 */
189 .align 5 190 .align 5
190 .type __turn_mmu_on, %function
191__turn_mmu_on: 191__turn_mmu_on:
192 mov r0, r0 192 mov r0, r0
193 mcr p15, 0, r0, c1, c0, 0 @ write control reg 193 mcr p15, 0, r0, c1, c0, 0 @ write control reg
@@ -195,7 +195,7 @@ __turn_mmu_on:
195 mov r3, r3 195 mov r3, r3
196 mov r3, r3 196 mov r3, r3
197 mov pc, r13 197 mov pc, r13
198 198ENDPROC(__turn_mmu_on)
199 199
200 200
201/* 201/*
@@ -211,7 +211,6 @@ __turn_mmu_on:
211 * r0, r3, r6, r7 corrupted 211 * r0, r3, r6, r7 corrupted
212 * r4 = physical page table address 212 * r4 = physical page table address
213 */ 213 */
214 .type __create_page_tables, %function
215__create_page_tables: 214__create_page_tables:
216 pgtbl r4 @ page table address 215 pgtbl r4 @ page table address
217 216
@@ -325,6 +324,7 @@ __create_page_tables:
325#endif 324#endif
326#endif 325#endif
327 mov pc, lr 326 mov pc, lr
327ENDPROC(__create_page_tables)
328 .ltorg 328 .ltorg
329 329
330#include "head-common.S" 330#include "head-common.S"
diff --git a/arch/arm/lib/ashldi3.S b/arch/arm/lib/ashldi3.S
index 55e57a1c2e6d..1154d924080b 100644
--- a/arch/arm/lib/ashldi3.S
+++ b/arch/arm/lib/ashldi3.S
@@ -47,3 +47,5 @@ ENTRY(__aeabi_llsl)
47 mov al, al, lsl r2 47 mov al, al, lsl r2
48 mov pc, lr 48 mov pc, lr
49 49
50ENDPROC(__ashldi3)
51ENDPROC(__aeabi_llsl)
diff --git a/arch/arm/lib/ashrdi3.S b/arch/arm/lib/ashrdi3.S
index 0b31398f89b2..9f8b35572f8c 100644
--- a/arch/arm/lib/ashrdi3.S
+++ b/arch/arm/lib/ashrdi3.S
@@ -47,3 +47,5 @@ ENTRY(__aeabi_lasr)
47 mov ah, ah, asr r2 47 mov ah, ah, asr r2
48 mov pc, lr 48 mov pc, lr
49 49
50ENDPROC(__ashrdi3)
51ENDPROC(__aeabi_lasr)
diff --git a/arch/arm/lib/backtrace.S b/arch/arm/lib/backtrace.S
index 84dc890d2bf3..b0951d0e8b2c 100644
--- a/arch/arm/lib/backtrace.S
+++ b/arch/arm/lib/backtrace.S
@@ -30,6 +30,8 @@ ENTRY(c_backtrace)
30 30
31#if !defined(CONFIG_FRAME_POINTER) || !defined(CONFIG_PRINTK) 31#if !defined(CONFIG_FRAME_POINTER) || !defined(CONFIG_PRINTK)
32 mov pc, lr 32 mov pc, lr
33ENDPROC(__backtrace)
34ENDPROC(c_backtrace)
33#else 35#else
34 stmfd sp!, {r4 - r8, lr} @ Save an extra register so we have a location... 36 stmfd sp!, {r4 - r8, lr} @ Save an extra register so we have a location...
35 movs frame, r0 @ if frame pointer is zero 37 movs frame, r0 @ if frame pointer is zero
@@ -103,6 +105,8 @@ for_each_frame: tst frame, mask @ Check for address exceptions
103 mov r1, frame 105 mov r1, frame
104 bl printk 106 bl printk
105no_frame: ldmfd sp!, {r4 - r8, pc} 107no_frame: ldmfd sp!, {r4 - r8, pc}
108ENDPROC(__backtrace)
109ENDPROC(c_backtrace)
106 110
107 .section __ex_table,"a" 111 .section __ex_table,"a"
108 .align 3 112 .align 3
diff --git a/arch/arm/lib/changebit.S b/arch/arm/lib/changebit.S
index 389567c24090..80f3115cbee2 100644
--- a/arch/arm/lib/changebit.S
+++ b/arch/arm/lib/changebit.S
@@ -19,3 +19,5 @@ ENTRY(_change_bit_be)
19 eor r0, r0, #0x18 @ big endian byte ordering 19 eor r0, r0, #0x18 @ big endian byte ordering
20ENTRY(_change_bit_le) 20ENTRY(_change_bit_le)
21 bitop eor 21 bitop eor
22ENDPROC(_change_bit_be)
23ENDPROC(_change_bit_le)
diff --git a/arch/arm/lib/clear_user.S b/arch/arm/lib/clear_user.S
index ecb28dcdaf7b..81041a3f8d50 100644
--- a/arch/arm/lib/clear_user.S
+++ b/arch/arm/lib/clear_user.S
@@ -44,6 +44,7 @@ USER( strnebt r2, [r0], #1)
44USER( strnebt r2, [r0], #1) 44USER( strnebt r2, [r0], #1)
45 mov r0, #0 45 mov r0, #0
46 ldmfd sp!, {r1, pc} 46 ldmfd sp!, {r1, pc}
47ENDPROC(__clear_user)
47 48
48 .section .fixup,"ax" 49 .section .fixup,"ax"
49 .align 0 50 .align 0
diff --git a/arch/arm/lib/clearbit.S b/arch/arm/lib/clearbit.S
index 347516533025..1a63e43a1df0 100644
--- a/arch/arm/lib/clearbit.S
+++ b/arch/arm/lib/clearbit.S
@@ -20,3 +20,5 @@ ENTRY(_clear_bit_be)
20 eor r0, r0, #0x18 @ big endian byte ordering 20 eor r0, r0, #0x18 @ big endian byte ordering
21ENTRY(_clear_bit_le) 21ENTRY(_clear_bit_le)
22 bitop bic 22 bitop bic
23ENDPROC(_clear_bit_be)
24ENDPROC(_clear_bit_le)
diff --git a/arch/arm/lib/copy_from_user.S b/arch/arm/lib/copy_from_user.S
index 6b7363ce749c..56799a165cc4 100644
--- a/arch/arm/lib/copy_from_user.S
+++ b/arch/arm/lib/copy_from_user.S
@@ -87,6 +87,8 @@ ENTRY(__copy_from_user)
87 87
88#include "copy_template.S" 88#include "copy_template.S"
89 89
90ENDPROC(__copy_from_user)
91
90 .section .fixup,"ax" 92 .section .fixup,"ax"
91 .align 0 93 .align 0
92 copy_abort_preamble 94 copy_abort_preamble
diff --git a/arch/arm/lib/copy_page.S b/arch/arm/lib/copy_page.S
index 666c99cc0744..6ae04db1ca4f 100644
--- a/arch/arm/lib/copy_page.S
+++ b/arch/arm/lib/copy_page.S
@@ -44,3 +44,4 @@ ENTRY(copy_page)
44 PLD( ldmeqia r1!, {r3, r4, ip, lr} ) 44 PLD( ldmeqia r1!, {r3, r4, ip, lr} )
45 PLD( beq 2b ) 45 PLD( beq 2b )
46 ldmfd sp!, {r4, pc} @ 3 46 ldmfd sp!, {r4, pc} @ 3
47ENDPROC(copy_page)
diff --git a/arch/arm/lib/copy_to_user.S b/arch/arm/lib/copy_to_user.S
index 5224d94688d9..22f968bbdffd 100644
--- a/arch/arm/lib/copy_to_user.S
+++ b/arch/arm/lib/copy_to_user.S
@@ -90,6 +90,8 @@ ENTRY(__copy_to_user)
90 90
91#include "copy_template.S" 91#include "copy_template.S"
92 92
93ENDPROC(__copy_to_user)
94
93 .section .fixup,"ax" 95 .section .fixup,"ax"
94 .align 0 96 .align 0
95 copy_abort_preamble 97 copy_abort_preamble
diff --git a/arch/arm/lib/csumipv6.S b/arch/arm/lib/csumipv6.S
index 9621469beec1..3ac6ef01bc43 100644
--- a/arch/arm/lib/csumipv6.S
+++ b/arch/arm/lib/csumipv6.S
@@ -29,4 +29,5 @@ ENTRY(__csum_ipv6_magic)
29 adcs r0, r0, r2 29 adcs r0, r0, r2
30 adcs r0, r0, #0 30 adcs r0, r0, #0
31 ldmfd sp!, {pc} 31 ldmfd sp!, {pc}
32ENDPROC(__csum_ipv6_magic)
32 33
diff --git a/arch/arm/lib/csumpartial.S b/arch/arm/lib/csumpartial.S
index a78dae5a7b28..31d3cb34740d 100644
--- a/arch/arm/lib/csumpartial.S
+++ b/arch/arm/lib/csumpartial.S
@@ -139,3 +139,4 @@ ENTRY(csum_partial)
139 tst len, #0x1c 139 tst len, #0x1c
140 bne 4b 140 bne 4b
141 b .Lless4 141 b .Lless4
142ENDPROC(csum_partial)
diff --git a/arch/arm/lib/csumpartialcopy.S b/arch/arm/lib/csumpartialcopy.S
index 21effe0dbf97..80aa2c795155 100644
--- a/arch/arm/lib/csumpartialcopy.S
+++ b/arch/arm/lib/csumpartialcopy.S
@@ -50,5 +50,6 @@
50 .endm 50 .endm
51 51
52#define FN_ENTRY ENTRY(csum_partial_copy_nocheck) 52#define FN_ENTRY ENTRY(csum_partial_copy_nocheck)
53#define FN_EXIT ENDPROC(csum_partial_copy_nocheck)
53 54
54#include "csumpartialcopygeneric.S" 55#include "csumpartialcopygeneric.S"
diff --git a/arch/arm/lib/csumpartialcopygeneric.S b/arch/arm/lib/csumpartialcopygeneric.S
index c50e8f5285d1..d620a5f22a09 100644
--- a/arch/arm/lib/csumpartialcopygeneric.S
+++ b/arch/arm/lib/csumpartialcopygeneric.S
@@ -329,3 +329,4 @@ FN_ENTRY
329 adcs sum, sum, r4, push #24 329 adcs sum, sum, r4, push #24
330 mov r5, r4, get_byte_1 330 mov r5, r4, get_byte_1
331 b .Lexit 331 b .Lexit
332FN_EXIT
diff --git a/arch/arm/lib/csumpartialcopyuser.S b/arch/arm/lib/csumpartialcopyuser.S
index c3b93e22ea25..e8b9c24551c2 100644
--- a/arch/arm/lib/csumpartialcopyuser.S
+++ b/arch/arm/lib/csumpartialcopyuser.S
@@ -82,6 +82,7 @@
82 */ 82 */
83 83
84#define FN_ENTRY ENTRY(csum_partial_copy_from_user) 84#define FN_ENTRY ENTRY(csum_partial_copy_from_user)
85#define FN_EXIT ENDPROC(csum_partial_copy_from_user)
85 86
86#include "csumpartialcopygeneric.S" 87#include "csumpartialcopygeneric.S"
87 88
diff --git a/arch/arm/lib/delay.S b/arch/arm/lib/delay.S
index 930a70259220..8d6a8762ab88 100644
--- a/arch/arm/lib/delay.S
+++ b/arch/arm/lib/delay.S
@@ -60,3 +60,6 @@ ENTRY(__delay)
60#endif 60#endif
61 bhi __delay 61 bhi __delay
62 mov pc, lr 62 mov pc, lr
63ENDPROC(__udelay)
64ENDPROC(__const_udelay)
65ENDPROC(__delay)
diff --git a/arch/arm/lib/div64.S b/arch/arm/lib/div64.S
index 58eef6607629..1425e789ba86 100644
--- a/arch/arm/lib/div64.S
+++ b/arch/arm/lib/div64.S
@@ -198,3 +198,4 @@ ENTRY(__do_div64)
198 mov xh, #0 198 mov xh, #0
199 ldr pc, [sp], #8 199 ldr pc, [sp], #8
200 200
201ENDPROC(__do_div64)
diff --git a/arch/arm/lib/findbit.S b/arch/arm/lib/findbit.S
index a5ca0248aa4e..8c4defc4f3c4 100644
--- a/arch/arm/lib/findbit.S
+++ b/arch/arm/lib/findbit.S
@@ -33,6 +33,7 @@ ENTRY(_find_first_zero_bit_le)
33 blo 1b 33 blo 1b
343: mov r0, r1 @ no free bits 343: mov r0, r1 @ no free bits
35 mov pc, lr 35 mov pc, lr
36ENDPROC(_find_first_zero_bit_le)
36 37
37/* 38/*
38 * Purpose : Find next 'zero' bit 39 * Purpose : Find next 'zero' bit
@@ -50,6 +51,7 @@ ENTRY(_find_next_zero_bit_le)
50 orr r2, r2, #7 @ if zero, then no bits here 51 orr r2, r2, #7 @ if zero, then no bits here
51 add r2, r2, #1 @ align bit pointer 52 add r2, r2, #1 @ align bit pointer
52 b 2b @ loop for next bit 53 b 2b @ loop for next bit
54ENDPROC(_find_next_zero_bit_le)
53 55
54/* 56/*
55 * Purpose : Find a 'one' bit 57 * Purpose : Find a 'one' bit
@@ -67,6 +69,7 @@ ENTRY(_find_first_bit_le)
67 blo 1b 69 blo 1b
683: mov r0, r1 @ no free bits 703: mov r0, r1 @ no free bits
69 mov pc, lr 71 mov pc, lr
72ENDPROC(_find_first_bit_le)
70 73
71/* 74/*
72 * Purpose : Find next 'one' bit 75 * Purpose : Find next 'one' bit
@@ -83,6 +86,7 @@ ENTRY(_find_next_bit_le)
83 orr r2, r2, #7 @ if zero, then no bits here 86 orr r2, r2, #7 @ if zero, then no bits here
84 add r2, r2, #1 @ align bit pointer 87 add r2, r2, #1 @ align bit pointer
85 b 2b @ loop for next bit 88 b 2b @ loop for next bit
89ENDPROC(_find_next_bit_le)
86 90
87#ifdef __ARMEB__ 91#ifdef __ARMEB__
88 92
@@ -99,6 +103,7 @@ ENTRY(_find_first_zero_bit_be)
99 blo 1b 103 blo 1b
1003: mov r0, r1 @ no free bits 1043: mov r0, r1 @ no free bits
101 mov pc, lr 105 mov pc, lr
106ENDPROC(_find_first_zero_bit_be)
102 107
103ENTRY(_find_next_zero_bit_be) 108ENTRY(_find_next_zero_bit_be)
104 teq r1, #0 109 teq r1, #0
@@ -113,6 +118,7 @@ ENTRY(_find_next_zero_bit_be)
113 orr r2, r2, #7 @ if zero, then no bits here 118 orr r2, r2, #7 @ if zero, then no bits here
114 add r2, r2, #1 @ align bit pointer 119 add r2, r2, #1 @ align bit pointer
115 b 2b @ loop for next bit 120 b 2b @ loop for next bit
121ENDPROC(_find_next_zero_bit_be)
116 122
117ENTRY(_find_first_bit_be) 123ENTRY(_find_first_bit_be)
118 teq r1, #0 124 teq r1, #0
@@ -127,6 +133,7 @@ ENTRY(_find_first_bit_be)
127 blo 1b 133 blo 1b
1283: mov r0, r1 @ no free bits 1343: mov r0, r1 @ no free bits
129 mov pc, lr 135 mov pc, lr
136ENDPROC(_find_first_bit_be)
130 137
131ENTRY(_find_next_bit_be) 138ENTRY(_find_next_bit_be)
132 teq r1, #0 139 teq r1, #0
@@ -140,6 +147,7 @@ ENTRY(_find_next_bit_be)
140 orr r2, r2, #7 @ if zero, then no bits here 147 orr r2, r2, #7 @ if zero, then no bits here
141 add r2, r2, #1 @ align bit pointer 148 add r2, r2, #1 @ align bit pointer
142 b 2b @ loop for next bit 149 b 2b @ loop for next bit
150ENDPROC(_find_next_bit_be)
143 151
144#endif 152#endif
145 153
diff --git a/arch/arm/lib/getuser.S b/arch/arm/lib/getuser.S
index 2034d4dbe6ad..6763088b7607 100644
--- a/arch/arm/lib/getuser.S
+++ b/arch/arm/lib/getuser.S
@@ -26,16 +26,16 @@
26 * Note that ADDR_LIMIT is either 0 or 0xc0000000. 26 * Note that ADDR_LIMIT is either 0 or 0xc0000000.
27 * Note also that it is intended that __get_user_bad is not global. 27 * Note also that it is intended that __get_user_bad is not global.
28 */ 28 */
29#include <linux/linkage.h>
29#include <asm/errno.h> 30#include <asm/errno.h>
30 31
31 .global __get_user_1 32ENTRY(__get_user_1)
32__get_user_1:
331: ldrbt r2, [r0] 331: ldrbt r2, [r0]
34 mov r0, #0 34 mov r0, #0
35 mov pc, lr 35 mov pc, lr
36ENDPROC(__get_user_1)
36 37
37 .global __get_user_2 38ENTRY(__get_user_2)
38__get_user_2:
392: ldrbt r2, [r0], #1 392: ldrbt r2, [r0], #1
403: ldrbt r3, [r0] 403: ldrbt r3, [r0]
41#ifndef __ARMEB__ 41#ifndef __ARMEB__
@@ -45,17 +45,19 @@ __get_user_2:
45#endif 45#endif
46 mov r0, #0 46 mov r0, #0
47 mov pc, lr 47 mov pc, lr
48ENDPROC(__get_user_2)
48 49
49 .global __get_user_4 50ENTRY(__get_user_4)
50__get_user_4:
514: ldrt r2, [r0] 514: ldrt r2, [r0]
52 mov r0, #0 52 mov r0, #0
53 mov pc, lr 53 mov pc, lr
54ENDPROC(__get_user_4)
54 55
55__get_user_bad: 56__get_user_bad:
56 mov r2, #0 57 mov r2, #0
57 mov r0, #-EFAULT 58 mov r0, #-EFAULT
58 mov pc, lr 59 mov pc, lr
60ENDPROC(__get_user_bad)
59 61
60.section __ex_table, "a" 62.section __ex_table, "a"
61 .long 1b, __get_user_bad 63 .long 1b, __get_user_bad
diff --git a/arch/arm/lib/io-readsb.S b/arch/arm/lib/io-readsb.S
index fb966ad0276f..9f4238987fe9 100644
--- a/arch/arm/lib/io-readsb.S
+++ b/arch/arm/lib/io-readsb.S
@@ -120,3 +120,4 @@ ENTRY(__raw_readsb)
120 strgtb r3, [r1] 120 strgtb r3, [r1]
121 121
122 ldmfd sp!, {r4 - r6, pc} 122 ldmfd sp!, {r4 - r6, pc}
123ENDPROC(__raw_readsb)
diff --git a/arch/arm/lib/io-readsl.S b/arch/arm/lib/io-readsl.S
index 75a9121cb23f..5fb97e7f9f4b 100644
--- a/arch/arm/lib/io-readsl.S
+++ b/arch/arm/lib/io-readsl.S
@@ -76,3 +76,4 @@ ENTRY(__raw_readsl)
768: mov r3, ip, get_byte_0 768: mov r3, ip, get_byte_0
77 strb r3, [r1, #0] 77 strb r3, [r1, #0]
78 mov pc, lr 78 mov pc, lr
79ENDPROC(__raw_readsl)
diff --git a/arch/arm/lib/io-readsw-armv4.S b/arch/arm/lib/io-readsw-armv4.S
index 4db1c5f0b219..1f393d42593d 100644
--- a/arch/arm/lib/io-readsw-armv4.S
+++ b/arch/arm/lib/io-readsw-armv4.S
@@ -128,3 +128,4 @@ ENTRY(__raw_readsw)
128 _BE_ONLY_( movne ip, ip, lsr #24 ) 128 _BE_ONLY_( movne ip, ip, lsr #24 )
129 strneb ip, [r1] 129 strneb ip, [r1]
130 ldmfd sp!, {r4, pc} 130 ldmfd sp!, {r4, pc}
131ENDPROC(__raw_readsw)
diff --git a/arch/arm/lib/io-writesb.S b/arch/arm/lib/io-writesb.S
index 7eba2b6cc69f..68b92f4acaeb 100644
--- a/arch/arm/lib/io-writesb.S
+++ b/arch/arm/lib/io-writesb.S
@@ -91,3 +91,4 @@ ENTRY(__raw_writesb)
91 strgtb r3, [r0] 91 strgtb r3, [r0]
92 92
93 ldmfd sp!, {r4, r5, pc} 93 ldmfd sp!, {r4, r5, pc}
94ENDPROC(__raw_writesb)
diff --git a/arch/arm/lib/io-writesl.S b/arch/arm/lib/io-writesl.S
index f8f14dd227ca..8d3b7813725c 100644
--- a/arch/arm/lib/io-writesl.S
+++ b/arch/arm/lib/io-writesl.S
@@ -64,3 +64,4 @@ ENTRY(__raw_writesl)
64 str ip, [r0] 64 str ip, [r0]
65 bne 6b 65 bne 6b
66 mov pc, lr 66 mov pc, lr
67ENDPROC(__raw_writesl)
diff --git a/arch/arm/lib/io-writesw-armv4.S b/arch/arm/lib/io-writesw-armv4.S
index c8e85bd653b7..d6585612c86b 100644
--- a/arch/arm/lib/io-writesw-armv4.S
+++ b/arch/arm/lib/io-writesw-armv4.S
@@ -94,3 +94,4 @@ ENTRY(__raw_writesw)
943: movne ip, r3, lsr #8 943: movne ip, r3, lsr #8
95 strneh ip, [r0] 95 strneh ip, [r0]
96 mov pc, lr 96 mov pc, lr
97ENDPROC(__raw_writesw)
diff --git a/arch/arm/lib/lib1funcs.S b/arch/arm/lib/lib1funcs.S
index 4e492f4b3f0e..67964bcfc854 100644
--- a/arch/arm/lib/lib1funcs.S
+++ b/arch/arm/lib/lib1funcs.S
@@ -230,6 +230,8 @@ ENTRY(__aeabi_uidiv)
230 mov r0, r0, lsr r2 230 mov r0, r0, lsr r2
231 mov pc, lr 231 mov pc, lr
232 232
233ENDPROC(__udivsi3)
234ENDPROC(__aeabi_uidiv)
233 235
234ENTRY(__umodsi3) 236ENTRY(__umodsi3)
235 237
@@ -245,6 +247,7 @@ ENTRY(__umodsi3)
245 247
246 mov pc, lr 248 mov pc, lr
247 249
250ENDPROC(__umodsi3)
248 251
249ENTRY(__divsi3) 252ENTRY(__divsi3)
250ENTRY(__aeabi_idiv) 253ENTRY(__aeabi_idiv)
@@ -284,6 +287,8 @@ ENTRY(__aeabi_idiv)
284 rsbmi r0, r0, #0 287 rsbmi r0, r0, #0
285 mov pc, lr 288 mov pc, lr
286 289
290ENDPROC(__divsi3)
291ENDPROC(__aeabi_idiv)
287 292
288ENTRY(__modsi3) 293ENTRY(__modsi3)
289 294
@@ -305,6 +310,8 @@ ENTRY(__modsi3)
305 rsbmi r0, r0, #0 310 rsbmi r0, r0, #0
306 mov pc, lr 311 mov pc, lr
307 312
313ENDPROC(__modsi3)
314
308#ifdef CONFIG_AEABI 315#ifdef CONFIG_AEABI
309 316
310ENTRY(__aeabi_uidivmod) 317ENTRY(__aeabi_uidivmod)
@@ -316,6 +323,8 @@ ENTRY(__aeabi_uidivmod)
316 sub r1, r1, r3 323 sub r1, r1, r3
317 mov pc, lr 324 mov pc, lr
318 325
326ENDPROC(__aeabi_uidivmod)
327
319ENTRY(__aeabi_idivmod) 328ENTRY(__aeabi_idivmod)
320 329
321 stmfd sp!, {r0, r1, ip, lr} 330 stmfd sp!, {r0, r1, ip, lr}
@@ -325,6 +334,8 @@ ENTRY(__aeabi_idivmod)
325 sub r1, r1, r3 334 sub r1, r1, r3
326 mov pc, lr 335 mov pc, lr
327 336
337ENDPROC(__aeabi_idivmod)
338
328#endif 339#endif
329 340
330Ldiv0: 341Ldiv0:
diff --git a/arch/arm/lib/lshrdi3.S b/arch/arm/lib/lshrdi3.S
index a86dbdd59cc4..99ea338bf87c 100644
--- a/arch/arm/lib/lshrdi3.S
+++ b/arch/arm/lib/lshrdi3.S
@@ -47,3 +47,5 @@ ENTRY(__aeabi_llsr)
47 mov ah, ah, lsr r2 47 mov ah, ah, lsr r2
48 mov pc, lr 48 mov pc, lr
49 49
50ENDPROC(__lshrdi3)
51ENDPROC(__aeabi_llsr)
diff --git a/arch/arm/lib/memchr.S b/arch/arm/lib/memchr.S
index e7ab1ea8ebaa..1da86991d700 100644
--- a/arch/arm/lib/memchr.S
+++ b/arch/arm/lib/memchr.S
@@ -23,3 +23,4 @@ ENTRY(memchr)
23 sub r0, r0, #1 23 sub r0, r0, #1
242: movne r0, #0 242: movne r0, #0
25 mov pc, lr 25 mov pc, lr
26ENDPROC(memchr)
diff --git a/arch/arm/lib/memcpy.S b/arch/arm/lib/memcpy.S
index 7e71d6708a8d..e0d002641d3f 100644
--- a/arch/arm/lib/memcpy.S
+++ b/arch/arm/lib/memcpy.S
@@ -57,3 +57,4 @@ ENTRY(memcpy)
57 57
58#include "copy_template.S" 58#include "copy_template.S"
59 59
60ENDPROC(memcpy)
diff --git a/arch/arm/lib/memmove.S b/arch/arm/lib/memmove.S
index 2e301b7bd8f1..12549187088c 100644
--- a/arch/arm/lib/memmove.S
+++ b/arch/arm/lib/memmove.S
@@ -196,3 +196,4 @@ ENTRY(memmove)
196 196
19718: backward_copy_shift push=24 pull=8 19718: backward_copy_shift push=24 pull=8
198 198
199ENDPROC(memmove)
diff --git a/arch/arm/lib/memset.S b/arch/arm/lib/memset.S
index b477d4ac88ef..761eefa76243 100644
--- a/arch/arm/lib/memset.S
+++ b/arch/arm/lib/memset.S
@@ -124,3 +124,4 @@ ENTRY(memset)
124 tst r2, #1 124 tst r2, #1
125 strneb r1, [r0], #1 125 strneb r1, [r0], #1
126 mov pc, lr 126 mov pc, lr
127ENDPROC(memset)
diff --git a/arch/arm/lib/memzero.S b/arch/arm/lib/memzero.S
index b8f79d80ee9b..3fbdef5f802a 100644
--- a/arch/arm/lib/memzero.S
+++ b/arch/arm/lib/memzero.S
@@ -122,3 +122,4 @@ ENTRY(__memzero)
122 tst r1, #1 @ 1 a byte left over 122 tst r1, #1 @ 1 a byte left over
123 strneb r2, [r0], #1 @ 1 123 strneb r2, [r0], #1 @ 1
124 mov pc, lr @ 1 124 mov pc, lr @ 1
125ENDPROC(__memzero)
diff --git a/arch/arm/lib/muldi3.S b/arch/arm/lib/muldi3.S
index d89c60615794..36c91b4957e2 100644
--- a/arch/arm/lib/muldi3.S
+++ b/arch/arm/lib/muldi3.S
@@ -43,3 +43,5 @@ ENTRY(__aeabi_lmul)
43 adc xh, xh, ip, lsr #16 43 adc xh, xh, ip, lsr #16
44 mov pc, lr 44 mov pc, lr
45 45
46ENDPROC(__muldi3)
47ENDPROC(__aeabi_lmul)
diff --git a/arch/arm/lib/putuser.S b/arch/arm/lib/putuser.S
index 08ec7dffa52e..864f3c1c4f18 100644
--- a/arch/arm/lib/putuser.S
+++ b/arch/arm/lib/putuser.S
@@ -26,16 +26,16 @@
26 * Note that ADDR_LIMIT is either 0 or 0xc0000000 26 * Note that ADDR_LIMIT is either 0 or 0xc0000000
27 * Note also that it is intended that __put_user_bad is not global. 27 * Note also that it is intended that __put_user_bad is not global.
28 */ 28 */
29#include <linux/linkage.h>
29#include <asm/errno.h> 30#include <asm/errno.h>
30 31
31 .global __put_user_1 32ENTRY(__put_user_1)
32__put_user_1:
331: strbt r2, [r0] 331: strbt r2, [r0]
34 mov r0, #0 34 mov r0, #0
35 mov pc, lr 35 mov pc, lr
36ENDPROC(__put_user_1)
36 37
37 .global __put_user_2 38ENTRY(__put_user_2)
38__put_user_2:
39 mov ip, r2, lsr #8 39 mov ip, r2, lsr #8
40#ifndef __ARMEB__ 40#ifndef __ARMEB__
412: strbt r2, [r0], #1 412: strbt r2, [r0], #1
@@ -46,23 +46,25 @@ __put_user_2:
46#endif 46#endif
47 mov r0, #0 47 mov r0, #0
48 mov pc, lr 48 mov pc, lr
49ENDPROC(__put_user_2)
49 50
50 .global __put_user_4 51ENTRY(__put_user_4)
51__put_user_4:
524: strt r2, [r0] 524: strt r2, [r0]
53 mov r0, #0 53 mov r0, #0
54 mov pc, lr 54 mov pc, lr
55ENDPROC(__put_user_4)
55 56
56 .global __put_user_8 57ENTRY(__put_user_8)
57__put_user_8:
585: strt r2, [r0], #4 585: strt r2, [r0], #4
596: strt r3, [r0] 596: strt r3, [r0]
60 mov r0, #0 60 mov r0, #0
61 mov pc, lr 61 mov pc, lr
62ENDPROC(__put_user_8)
62 63
63__put_user_bad: 64__put_user_bad:
64 mov r0, #-EFAULT 65 mov r0, #-EFAULT
65 mov pc, lr 66 mov pc, lr
67ENDPROC(__put_user_bad)
66 68
67.section __ex_table, "a" 69.section __ex_table, "a"
68 .long 1b, __put_user_bad 70 .long 1b, __put_user_bad
diff --git a/arch/arm/lib/setbit.S b/arch/arm/lib/setbit.S
index 83bc23d5b037..1dd7176c4b2b 100644
--- a/arch/arm/lib/setbit.S
+++ b/arch/arm/lib/setbit.S
@@ -20,3 +20,5 @@ ENTRY(_set_bit_be)
20 eor r0, r0, #0x18 @ big endian byte ordering 20 eor r0, r0, #0x18 @ big endian byte ordering
21ENTRY(_set_bit_le) 21ENTRY(_set_bit_le)
22 bitop orr 22 bitop orr
23ENDPROC(_set_bit_be)
24ENDPROC(_set_bit_le)
diff --git a/arch/arm/lib/sha1.S b/arch/arm/lib/sha1.S
index ff6ece487ffc..8a1c67fe0544 100644
--- a/arch/arm/lib/sha1.S
+++ b/arch/arm/lib/sha1.S
@@ -185,6 +185,8 @@ ENTRY(sha_transform)
185 185
186 ldmfd sp!, {r4 - r8, pc} 186 ldmfd sp!, {r4 - r8, pc}
187 187
188ENDPROC(sha_transform)
189
188.L_sha_K: 190.L_sha_K:
189 .word 0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6 191 .word 0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6
190 192
@@ -204,3 +206,4 @@ ENTRY(sha_init)
204 stmia r0, {r1, r2, r3, ip, lr} 206 stmia r0, {r1, r2, r3, ip, lr}
205 ldr pc, [sp], #4 207 ldr pc, [sp], #4
206 208
209ENDPROC(sha_init)
diff --git a/arch/arm/lib/strchr.S b/arch/arm/lib/strchr.S
index 9f18d6fdee6a..d8f2a1c1aea4 100644
--- a/arch/arm/lib/strchr.S
+++ b/arch/arm/lib/strchr.S
@@ -24,3 +24,4 @@ ENTRY(strchr)
24 movne r0, #0 24 movne r0, #0
25 subeq r0, r0, #1 25 subeq r0, r0, #1
26 mov pc, lr 26 mov pc, lr
27ENDPROC(strchr)
diff --git a/arch/arm/lib/strncpy_from_user.S b/arch/arm/lib/strncpy_from_user.S
index 36e3741a3772..330373c26dd9 100644
--- a/arch/arm/lib/strncpy_from_user.S
+++ b/arch/arm/lib/strncpy_from_user.S
@@ -31,6 +31,7 @@ USER( ldrplbt r3, [r1], #1)
31 sub r1, r1, #1 @ take NUL character out of count 31 sub r1, r1, #1 @ take NUL character out of count
322: sub r0, r1, ip 322: sub r0, r1, ip
33 mov pc, lr 33 mov pc, lr
34ENDPROC(__strncpy_from_user)
34 35
35 .section .fixup,"ax" 36 .section .fixup,"ax"
36 .align 0 37 .align 0
diff --git a/arch/arm/lib/strnlen_user.S b/arch/arm/lib/strnlen_user.S
index 18d8fa4f925a..90bb9d020836 100644
--- a/arch/arm/lib/strnlen_user.S
+++ b/arch/arm/lib/strnlen_user.S
@@ -31,6 +31,7 @@ USER( ldrbt r3, [r0], #1)
31 add r0, r0, #1 31 add r0, r0, #1
322: sub r0, r0, r2 322: sub r0, r0, r2
33 mov pc, lr 33 mov pc, lr
34ENDPROC(__strnlen_user)
34 35
35 .section .fixup,"ax" 36 .section .fixup,"ax"
36 .align 0 37 .align 0
diff --git a/arch/arm/lib/strrchr.S b/arch/arm/lib/strrchr.S
index 538df220aa48..302f20cd2423 100644
--- a/arch/arm/lib/strrchr.S
+++ b/arch/arm/lib/strrchr.S
@@ -23,3 +23,4 @@ ENTRY(strrchr)
23 bne 1b 23 bne 1b
24 mov r0, r3 24 mov r0, r3
25 mov pc, lr 25 mov pc, lr
26ENDPROC(strrchr)
diff --git a/arch/arm/lib/testchangebit.S b/arch/arm/lib/testchangebit.S
index b25dcd2be53e..5c98dc567f0f 100644
--- a/arch/arm/lib/testchangebit.S
+++ b/arch/arm/lib/testchangebit.S
@@ -16,3 +16,5 @@ ENTRY(_test_and_change_bit_be)
16 eor r0, r0, #0x18 @ big endian byte ordering 16 eor r0, r0, #0x18 @ big endian byte ordering
17ENTRY(_test_and_change_bit_le) 17ENTRY(_test_and_change_bit_le)
18 testop eor, strb 18 testop eor, strb
19ENDPROC(_test_and_change_bit_be)
20ENDPROC(_test_and_change_bit_le)
diff --git a/arch/arm/lib/testclearbit.S b/arch/arm/lib/testclearbit.S
index 2dcc4b16b68e..543d7094d18e 100644
--- a/arch/arm/lib/testclearbit.S
+++ b/arch/arm/lib/testclearbit.S
@@ -16,3 +16,5 @@ ENTRY(_test_and_clear_bit_be)
16 eor r0, r0, #0x18 @ big endian byte ordering 16 eor r0, r0, #0x18 @ big endian byte ordering
17ENTRY(_test_and_clear_bit_le) 17ENTRY(_test_and_clear_bit_le)
18 testop bicne, strneb 18 testop bicne, strneb
19ENDPROC(_test_and_clear_bit_be)
20ENDPROC(_test_and_clear_bit_le)
diff --git a/arch/arm/lib/testsetbit.S b/arch/arm/lib/testsetbit.S
index 9011c969761a..0b3f390401ce 100644
--- a/arch/arm/lib/testsetbit.S
+++ b/arch/arm/lib/testsetbit.S
@@ -16,3 +16,5 @@ ENTRY(_test_and_set_bit_be)
16 eor r0, r0, #0x18 @ big endian byte ordering 16 eor r0, r0, #0x18 @ big endian byte ordering
17ENTRY(_test_and_set_bit_le) 17ENTRY(_test_and_set_bit_le)
18 testop orreq, streqb 18 testop orreq, streqb
19ENDPROC(_test_and_set_bit_be)
20ENDPROC(_test_and_set_bit_le)
diff --git a/arch/arm/lib/uaccess.S b/arch/arm/lib/uaccess.S
index b48bd6d5fd83..ffdd27498cee 100644
--- a/arch/arm/lib/uaccess.S
+++ b/arch/arm/lib/uaccess.S
@@ -277,6 +277,7 @@ USER( strgebt r3, [r0], #1) @ May fault
277 ldrgtb r3, [r1], #0 277 ldrgtb r3, [r1], #0
278USER( strgtbt r3, [r0], #1) @ May fault 278USER( strgtbt r3, [r0], #1) @ May fault
279 b .Lc2u_finished 279 b .Lc2u_finished
280ENDPROC(__copy_to_user)
280 281
281 .section .fixup,"ax" 282 .section .fixup,"ax"
282 .align 0 283 .align 0
@@ -542,6 +543,7 @@ USER( ldrgebt r3, [r1], #1) @ May fault
542USER( ldrgtbt r3, [r1], #1) @ May fault 543USER( ldrgtbt r3, [r1], #1) @ May fault
543 strgtb r3, [r0], #1 544 strgtb r3, [r0], #1
544 b .Lcfu_finished 545 b .Lcfu_finished
546ENDPROC(__copy_from_user)
545 547
546 .section .fixup,"ax" 548 .section .fixup,"ax"
547 .align 0 549 .align 0
diff --git a/arch/arm/lib/ucmpdi2.S b/arch/arm/lib/ucmpdi2.S
index f76de07ac182..f0df6a91db04 100644
--- a/arch/arm/lib/ucmpdi2.S
+++ b/arch/arm/lib/ucmpdi2.S
@@ -33,6 +33,8 @@ ENTRY(__ucmpdi2)
33 movhi r0, #2 33 movhi r0, #2
34 mov pc, lr 34 mov pc, lr
35 35
36ENDPROC(__ucmpdi2)
37
36#ifdef CONFIG_AEABI 38#ifdef CONFIG_AEABI
37 39
38ENTRY(__aeabi_ulcmp) 40ENTRY(__aeabi_ulcmp)
@@ -44,5 +46,7 @@ ENTRY(__aeabi_ulcmp)
44 movhi r0, #1 46 movhi r0, #1
45 mov pc, lr 47 mov pc, lr
46 48
49ENDPROC(__aeabi_ulcmp)
50
47#endif 51#endif
48 52
diff --git a/arch/arm/mm/abort-ev7.S b/arch/arm/mm/abort-ev7.S
index eb90bce38e14..2e6dc040c654 100644
--- a/arch/arm/mm/abort-ev7.S
+++ b/arch/arm/mm/abort-ev7.S
@@ -30,3 +30,4 @@ ENTRY(v7_early_abort)
30 * New designs should not need to patch up faults. 30 * New designs should not need to patch up faults.
31 */ 31 */
32 mov pc, lr 32 mov pc, lr
33ENDPROC(v7_early_abort)
diff --git a/arch/arm/mm/abort-nommu.S b/arch/arm/mm/abort-nommu.S
index a7cc7f9ee45d..625e580945b5 100644
--- a/arch/arm/mm/abort-nommu.S
+++ b/arch/arm/mm/abort-nommu.S
@@ -17,3 +17,4 @@ ENTRY(nommu_early_abort)
17 mov r0, #0 @ clear r0, r1 (no FSR/FAR) 17 mov r0, #0 @ clear r0, r1 (no FSR/FAR)
18 mov r1, #0 18 mov r1, #0
19 mov pc, lr 19 mov pc, lr
20ENDPROC(nommu_early_abort)
diff --git a/arch/arm/mm/cache-v7.S b/arch/arm/mm/cache-v7.S
index 35ffc4d95997..d19c2bec2b1f 100644
--- a/arch/arm/mm/cache-v7.S
+++ b/arch/arm/mm/cache-v7.S
@@ -66,6 +66,7 @@ finished:
66 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 66 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
67 isb 67 isb
68 mov pc, lr 68 mov pc, lr
69ENDPROC(v7_flush_dcache_all)
69 70
70/* 71/*
71 * v7_flush_cache_all() 72 * v7_flush_cache_all()
@@ -85,6 +86,7 @@ ENTRY(v7_flush_kern_cache_all)
85 mcr p15, 0, r0, c7, c5, 0 @ I+BTB cache invalidate 86 mcr p15, 0, r0, c7, c5, 0 @ I+BTB cache invalidate
86 ldmfd sp!, {r4-r5, r7, r9-r11, lr} 87 ldmfd sp!, {r4-r5, r7, r9-r11, lr}
87 mov pc, lr 88 mov pc, lr
89ENDPROC(v7_flush_kern_cache_all)
88 90
89/* 91/*
90 * v7_flush_cache_all() 92 * v7_flush_cache_all()
@@ -110,6 +112,8 @@ ENTRY(v7_flush_user_cache_all)
110 */ 112 */
111ENTRY(v7_flush_user_cache_range) 113ENTRY(v7_flush_user_cache_range)
112 mov pc, lr 114 mov pc, lr
115ENDPROC(v7_flush_user_cache_all)
116ENDPROC(v7_flush_user_cache_range)
113 117
114/* 118/*
115 * v7_coherent_kern_range(start,end) 119 * v7_coherent_kern_range(start,end)
@@ -155,6 +159,8 @@ ENTRY(v7_coherent_user_range)
155 dsb 159 dsb
156 isb 160 isb
157 mov pc, lr 161 mov pc, lr
162ENDPROC(v7_coherent_kern_range)
163ENDPROC(v7_coherent_user_range)
158 164
159/* 165/*
160 * v7_flush_kern_dcache_page(kaddr) 166 * v7_flush_kern_dcache_page(kaddr)
@@ -174,6 +180,7 @@ ENTRY(v7_flush_kern_dcache_page)
174 blo 1b 180 blo 1b
175 dsb 181 dsb
176 mov pc, lr 182 mov pc, lr
183ENDPROC(v7_flush_kern_dcache_page)
177 184
178/* 185/*
179 * v7_dma_inv_range(start,end) 186 * v7_dma_inv_range(start,end)
@@ -202,6 +209,7 @@ ENTRY(v7_dma_inv_range)
202 blo 1b 209 blo 1b
203 dsb 210 dsb
204 mov pc, lr 211 mov pc, lr
212ENDPROC(v7_dma_inv_range)
205 213
206/* 214/*
207 * v7_dma_clean_range(start,end) 215 * v7_dma_clean_range(start,end)
@@ -219,6 +227,7 @@ ENTRY(v7_dma_clean_range)
219 blo 1b 227 blo 1b
220 dsb 228 dsb
221 mov pc, lr 229 mov pc, lr
230ENDPROC(v7_dma_clean_range)
222 231
223/* 232/*
224 * v7_dma_flush_range(start,end) 233 * v7_dma_flush_range(start,end)
@@ -236,6 +245,7 @@ ENTRY(v7_dma_flush_range)
236 blo 1b 245 blo 1b
237 dsb 246 dsb
238 mov pc, lr 247 mov pc, lr
248ENDPROC(v7_dma_flush_range)
239 249
240 __INITDATA 250 __INITDATA
241 251
diff --git a/arch/arm/mm/proc-v7.S b/arch/arm/mm/proc-v7.S
index b49f9a4c82c8..dff967784626 100644
--- a/arch/arm/mm/proc-v7.S
+++ b/arch/arm/mm/proc-v7.S
@@ -25,9 +25,11 @@
25 25
26ENTRY(cpu_v7_proc_init) 26ENTRY(cpu_v7_proc_init)
27 mov pc, lr 27 mov pc, lr
28ENDPROC(cpu_v7_proc_init)
28 29
29ENTRY(cpu_v7_proc_fin) 30ENTRY(cpu_v7_proc_fin)
30 mov pc, lr 31 mov pc, lr
32ENDPROC(cpu_v7_proc_fin)
31 33
32/* 34/*
33 * cpu_v7_reset(loc) 35 * cpu_v7_reset(loc)
@@ -43,6 +45,7 @@ ENTRY(cpu_v7_proc_fin)
43 .align 5 45 .align 5
44ENTRY(cpu_v7_reset) 46ENTRY(cpu_v7_reset)
45 mov pc, r0 47 mov pc, r0
48ENDPROC(cpu_v7_reset)
46 49
47/* 50/*
48 * cpu_v7_do_idle() 51 * cpu_v7_do_idle()
@@ -54,6 +57,7 @@ ENTRY(cpu_v7_reset)
54ENTRY(cpu_v7_do_idle) 57ENTRY(cpu_v7_do_idle)
55 .long 0xe320f003 @ ARM V7 WFI instruction 58 .long 0xe320f003 @ ARM V7 WFI instruction
56 mov pc, lr 59 mov pc, lr
60ENDPROC(cpu_v7_do_idle)
57 61
58ENTRY(cpu_v7_dcache_clean_area) 62ENTRY(cpu_v7_dcache_clean_area)
59#ifndef TLB_CAN_READ_FROM_L1_CACHE 63#ifndef TLB_CAN_READ_FROM_L1_CACHE
@@ -65,6 +69,7 @@ ENTRY(cpu_v7_dcache_clean_area)
65 dsb 69 dsb
66#endif 70#endif
67 mov pc, lr 71 mov pc, lr
72ENDPROC(cpu_v7_dcache_clean_area)
68 73
69/* 74/*
70 * cpu_v7_switch_mm(pgd_phys, tsk) 75 * cpu_v7_switch_mm(pgd_phys, tsk)
@@ -89,6 +94,7 @@ ENTRY(cpu_v7_switch_mm)
89 isb 94 isb
90#endif 95#endif
91 mov pc, lr 96 mov pc, lr
97ENDPROC(cpu_v7_switch_mm)
92 98
93/* 99/*
94 * cpu_v7_set_pte_ext(ptep, pte) 100 * cpu_v7_set_pte_ext(ptep, pte)
@@ -141,6 +147,7 @@ ENTRY(cpu_v7_set_pte_ext)
141 mcr p15, 0, r0, c7, c10, 1 @ flush_pte 147 mcr p15, 0, r0, c7, c10, 1 @ flush_pte
142#endif 148#endif
143 mov pc, lr 149 mov pc, lr
150ENDPROC(cpu_v7_set_pte_ext)
144 151
145cpu_v7_name: 152cpu_v7_name:
146 .ascii "ARMv7 Processor" 153 .ascii "ARMv7 Processor"
@@ -188,6 +195,7 @@ __v7_setup:
188 bic r0, r0, r5 @ clear bits them 195 bic r0, r0, r5 @ clear bits them
189 orr r0, r0, r6 @ set them 196 orr r0, r0, r6 @ set them
190 mov pc, lr @ return to head.S:__ret 197 mov pc, lr @ return to head.S:__ret
198ENDPROC(__v7_setup)
191 199
192 /* 200 /*
193 * V X F I D LR 201 * V X F I D LR
diff --git a/arch/arm/mm/tlb-v7.S b/arch/arm/mm/tlb-v7.S
index b56dda8052f7..24ba5109f2e7 100644
--- a/arch/arm/mm/tlb-v7.S
+++ b/arch/arm/mm/tlb-v7.S
@@ -51,6 +51,7 @@ ENTRY(v7wbi_flush_user_tlb_range)
51 mcr p15, 0, ip, c7, c5, 6 @ flush BTAC/BTB 51 mcr p15, 0, ip, c7, c5, 6 @ flush BTAC/BTB
52 dsb 52 dsb
53 mov pc, lr 53 mov pc, lr
54ENDPROC(v7wbi_flush_user_tlb_range)
54 55
55/* 56/*
56 * v7wbi_flush_kern_tlb_range(start,end) 57 * v7wbi_flush_kern_tlb_range(start,end)
@@ -77,6 +78,7 @@ ENTRY(v7wbi_flush_kern_tlb_range)
77 dsb 78 dsb
78 isb 79 isb
79 mov pc, lr 80 mov pc, lr
81ENDPROC(v7wbi_flush_kern_tlb_range)
80 82
81 .section ".text.init", #alloc, #execinstr 83 .section ".text.init", #alloc, #execinstr
82 84
diff --git a/arch/arm/vfp/entry.S b/arch/arm/vfp/entry.S
index 806ce26d5243..ba592a9e6fb3 100644
--- a/arch/arm/vfp/entry.S
+++ b/arch/arm/vfp/entry.S
@@ -21,13 +21,13 @@
21#include <asm/assembler.h> 21#include <asm/assembler.h>
22#include <asm/vfpmacros.h> 22#include <asm/vfpmacros.h>
23 23
24 .globl do_vfp 24ENTRY(do_vfp)
25do_vfp:
26 enable_irq 25 enable_irq
27 ldr r4, .LCvfp 26 ldr r4, .LCvfp
28 ldr r11, [r10, #TI_CPU] @ CPU number 27 ldr r11, [r10, #TI_CPU] @ CPU number
29 add r10, r10, #TI_VFPSTATE @ r10 = workspace 28 add r10, r10, #TI_VFPSTATE @ r10 = workspace
30 ldr pc, [r4] @ call VFP entry point 29 ldr pc, [r4] @ call VFP entry point
30ENDPROC(do_vfp)
31 31
32ENTRY(vfp_null_entry) 32ENTRY(vfp_null_entry)
33 mov pc, lr 33 mov pc, lr
@@ -40,11 +40,11 @@ ENDPROC(vfp_null_entry)
40@ failure to the VFP initialisation code. 40@ failure to the VFP initialisation code.
41 41
42 __INIT 42 __INIT
43 .globl vfp_testing_entry 43ENTRY(vfp_testing_entry)
44vfp_testing_entry:
45 ldr r0, VFP_arch_address 44 ldr r0, VFP_arch_address
46 str r5, [r0] @ known non-zero value 45 str r5, [r0] @ known non-zero value
47 mov pc, r9 @ we have handled the fault 46 mov pc, r9 @ we have handled the fault
47ENDPROC(vfp_testing_entry)
48 48
49VFP_arch_address: 49VFP_arch_address:
50 .word VFP_arch 50 .word VFP_arch
diff --git a/arch/arm/vfp/vfphw.S b/arch/arm/vfp/vfphw.S
index 353f9e5c7919..a62dcf7098ba 100644
--- a/arch/arm/vfp/vfphw.S
+++ b/arch/arm/vfp/vfphw.S
@@ -68,8 +68,7 @@
68@ r11 = CPU number 68@ r11 = CPU number
69@ lr = failure return 69@ lr = failure return
70 70
71 .globl vfp_support_entry 71ENTRY(vfp_support_entry)
72vfp_support_entry:
73 DBGSTR3 "instr %08x pc %08x state %p", r0, r2, r10 72 DBGSTR3 "instr %08x pc %08x state %p", r0, r2, r10
74 73
75 VFPFMRX r1, FPEXC @ Is the VFP enabled? 74 VFPFMRX r1, FPEXC @ Is the VFP enabled?
@@ -165,11 +164,10 @@ process_exception:
165 @ code will raise an exception if 164 @ code will raise an exception if
166 @ required. If not, the user code will 165 @ required. If not, the user code will
167 @ retry the faulted instruction 166 @ retry the faulted instruction
167ENDPROC(vfp_support_entry)
168 168
169#ifdef CONFIG_SMP 169#ifdef CONFIG_SMP
170 .globl vfp_save_state 170ENTRY(vfp_save_state)
171 .type vfp_save_state, %function
172vfp_save_state:
173 @ Save the current VFP state 171 @ Save the current VFP state
174 @ r0 - save location 172 @ r0 - save location
175 @ r1 - FPEXC 173 @ r1 - FPEXC
@@ -182,13 +180,13 @@ vfp_save_state:
182 VFPFMRX r12, FPINST2, NE @ FPINST2 if needed (and present) 180 VFPFMRX r12, FPINST2, NE @ FPINST2 if needed (and present)
183 stmia r0, {r1, r2, r3, r12} @ save FPEXC, FPSCR, FPINST, FPINST2 181 stmia r0, {r1, r2, r3, r12} @ save FPEXC, FPSCR, FPINST, FPINST2
184 mov pc, lr 182 mov pc, lr
183ENDPROC(vfp_save_state)
185#endif 184#endif
186 185
187last_VFP_context_address: 186last_VFP_context_address:
188 .word last_VFP_context 187 .word last_VFP_context
189 188
190 .globl vfp_get_float 189ENTRY(vfp_get_float)
191vfp_get_float:
192 add pc, pc, r0, lsl #3 190 add pc, pc, r0, lsl #3
193 mov r0, r0 191 mov r0, r0
194 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 192 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
@@ -197,9 +195,9 @@ vfp_get_float:
197 mrc p10, 0, r0, c\dr, c0, 4 @ fmrs r0, s1 195 mrc p10, 0, r0, c\dr, c0, 4 @ fmrs r0, s1
198 mov pc, lr 196 mov pc, lr
199 .endr 197 .endr
198ENDPROC(vfp_get_float)
200 199
201 .globl vfp_put_float 200ENTRY(vfp_put_float)
202vfp_put_float:
203 add pc, pc, r1, lsl #3 201 add pc, pc, r1, lsl #3
204 mov r0, r0 202 mov r0, r0
205 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 203 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
@@ -208,9 +206,9 @@ vfp_put_float:
208 mcr p10, 0, r0, c\dr, c0, 4 @ fmsr r0, s1 206 mcr p10, 0, r0, c\dr, c0, 4 @ fmsr r0, s1
209 mov pc, lr 207 mov pc, lr
210 .endr 208 .endr
209ENDPROC(vfp_put_float)
211 210
212 .globl vfp_get_double 211ENTRY(vfp_get_double)
213vfp_get_double:
214 add pc, pc, r0, lsl #3 212 add pc, pc, r0, lsl #3
215 mov r0, r0 213 mov r0, r0
216 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 214 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
@@ -229,9 +227,9 @@ vfp_get_double:
229 mov r0, #0 227 mov r0, #0
230 mov r1, #0 228 mov r1, #0
231 mov pc, lr 229 mov pc, lr
230ENDPROC(vfp_get_double)
232 231
233 .globl vfp_put_double 232ENTRY(vfp_put_double)
234vfp_put_double:
235 add pc, pc, r2, lsl #3 233 add pc, pc, r2, lsl #3
236 mov r0, r0 234 mov r0, r0
237 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 235 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
@@ -245,3 +243,4 @@ vfp_put_double:
245 mov pc, lr 243 mov pc, lr
246 .endr 244 .endr
247#endif 245#endif
246ENDPROC(vfp_put_double)