aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc
diff options
context:
space:
mode:
authorKumar Gala <galak@kernel.crashing.org>2007-09-27 09:43:35 -0400
committerKumar Gala <galak@kernel.crashing.org>2007-10-04 12:03:06 -0400
commit3c5df5c26ed17828760945d59653a2e22e3fb63f (patch)
tree03bd5105a8cbae6c0ac83f07da1e01c15a5b2363 /arch/powerpc
parentb6927bca245f83879bcb319aa108a1a347e36d8f (diff)
[POWERPC] Cleaned up whitespace in head_fsl_booke.S
Signed-off-by: Kumar Gala <galak@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc')
-rw-r--r--arch/powerpc/kernel/head_fsl_booke.S76
1 files changed, 38 insertions, 38 deletions
diff --git a/arch/powerpc/kernel/head_fsl_booke.S b/arch/powerpc/kernel/head_fsl_booke.S
index bfc38703a8ac..ee33ddd97ef3 100644
--- a/arch/powerpc/kernel/head_fsl_booke.S
+++ b/arch/powerpc/kernel/head_fsl_booke.S
@@ -2,27 +2,27 @@
2 * Kernel execution entry point code. 2 * Kernel execution entry point code.
3 * 3 *
4 * Copyright (c) 1995-1996 Gary Thomas <gdt@linuxppc.org> 4 * Copyright (c) 1995-1996 Gary Thomas <gdt@linuxppc.org>
5 * Initial PowerPC version. 5 * Initial PowerPC version.
6 * Copyright (c) 1996 Cort Dougan <cort@cs.nmt.edu> 6 * Copyright (c) 1996 Cort Dougan <cort@cs.nmt.edu>
7 * Rewritten for PReP 7 * Rewritten for PReP
8 * Copyright (c) 1996 Paul Mackerras <paulus@cs.anu.edu.au> 8 * Copyright (c) 1996 Paul Mackerras <paulus@cs.anu.edu.au>
9 * Low-level exception handers, MMU support, and rewrite. 9 * Low-level exception handers, MMU support, and rewrite.
10 * Copyright (c) 1997 Dan Malek <dmalek@jlc.net> 10 * Copyright (c) 1997 Dan Malek <dmalek@jlc.net>
11 * PowerPC 8xx modifications. 11 * PowerPC 8xx modifications.
12 * Copyright (c) 1998-1999 TiVo, Inc. 12 * Copyright (c) 1998-1999 TiVo, Inc.
13 * PowerPC 403GCX modifications. 13 * PowerPC 403GCX modifications.
14 * Copyright (c) 1999 Grant Erickson <grant@lcse.umn.edu> 14 * Copyright (c) 1999 Grant Erickson <grant@lcse.umn.edu>
15 * PowerPC 403GCX/405GP modifications. 15 * PowerPC 403GCX/405GP modifications.
16 * Copyright 2000 MontaVista Software Inc. 16 * Copyright 2000 MontaVista Software Inc.
17 * PPC405 modifications 17 * PPC405 modifications
18 * PowerPC 403GCX/405GP modifications. 18 * PowerPC 403GCX/405GP modifications.
19 * Author: MontaVista Software, Inc. 19 * Author: MontaVista Software, Inc.
20 * frank_rowand@mvista.com or source@mvista.com 20 * frank_rowand@mvista.com or source@mvista.com
21 * debbie_chu@mvista.com 21 * debbie_chu@mvista.com
22 * Copyright 2002-2004 MontaVista Software, Inc. 22 * Copyright 2002-2004 MontaVista Software, Inc.
23 * PowerPC 44x support, Matt Porter <mporter@kernel.crashing.org> 23 * PowerPC 44x support, Matt Porter <mporter@kernel.crashing.org>
24 * Copyright 2004 Freescale Semiconductor, Inc 24 * Copyright 2004 Freescale Semiconductor, Inc
25 * PowerPC e500 modifications, Kumar Gala <galak@kernel.crashing.org> 25 * PowerPC e500 modifications, Kumar Gala <galak@kernel.crashing.org>
26 * 26 *
27 * This program is free software; you can redistribute it and/or modify it 27 * This program is free software; you can redistribute it and/or modify it
28 * under the terms of the GNU General Public License as published by the 28 * under the terms of the GNU General Public License as published by the
@@ -146,13 +146,13 @@ skpinv: addi r6,r6,1 /* Increment */
146 bne 1b /* If not, repeat */ 146 bne 1b /* If not, repeat */
147 147
148 /* Invalidate TLB0 */ 148 /* Invalidate TLB0 */
149 li r6,0x04 149 li r6,0x04
150 tlbivax 0,r6 150 tlbivax 0,r6
151#ifdef CONFIG_SMP 151#ifdef CONFIG_SMP
152 tlbsync 152 tlbsync
153#endif 153#endif
154 /* Invalidate TLB1 */ 154 /* Invalidate TLB1 */
155 li r6,0x0c 155 li r6,0x0c
156 tlbivax 0,r6 156 tlbivax 0,r6
157#ifdef CONFIG_SMP 157#ifdef CONFIG_SMP
158 tlbsync 158 tlbsync
@@ -211,7 +211,7 @@ skpinv: addi r6,r6,1 /* Increment */
211 mtspr SPRN_MAS1,r6 211 mtspr SPRN_MAS1,r6
212 tlbwe 212 tlbwe
213 /* Invalidate TLB1 */ 213 /* Invalidate TLB1 */
214 li r9,0x0c 214 li r9,0x0c
215 tlbivax 0,r9 215 tlbivax 0,r9
216#ifdef CONFIG_SMP 216#ifdef CONFIG_SMP
217 tlbsync 217 tlbsync
@@ -254,7 +254,7 @@ skpinv: addi r6,r6,1 /* Increment */
254 mtspr SPRN_MAS1,r8 254 mtspr SPRN_MAS1,r8
255 tlbwe 255 tlbwe
256 /* Invalidate TLB1 */ 256 /* Invalidate TLB1 */
257 li r9,0x0c 257 li r9,0x0c
258 tlbivax 0,r9 258 tlbivax 0,r9
259#ifdef CONFIG_SMP 259#ifdef CONFIG_SMP
260 tlbsync 260 tlbsync
@@ -294,7 +294,7 @@ skpinv: addi r6,r6,1 /* Increment */
294#ifdef CONFIG_E200 294#ifdef CONFIG_E200
295 oris r2,r2,MAS4_TLBSELD(1)@h 295 oris r2,r2,MAS4_TLBSELD(1)@h
296#endif 296#endif
297 mtspr SPRN_MAS4, r2 297 mtspr SPRN_MAS4, r2
298 298
299#if 0 299#if 0
300 /* Enable DOZE */ 300 /* Enable DOZE */
@@ -305,7 +305,7 @@ skpinv: addi r6,r6,1 /* Increment */
305#ifdef CONFIG_E200 305#ifdef CONFIG_E200
306 /* enable dedicated debug exception handling resources (Debug APU) */ 306 /* enable dedicated debug exception handling resources (Debug APU) */
307 mfspr r2,SPRN_HID0 307 mfspr r2,SPRN_HID0
308 ori r2,r2,HID0_DAPUEN@l 308 ori r2,r2,HID0_DAPUEN@l
309 mtspr SPRN_HID0,r2 309 mtspr SPRN_HID0,r2
310#endif 310#endif
311 311
@@ -391,7 +391,7 @@ skpinv: addi r6,r6,1 /* Increment */
391#ifdef CONFIG_PTE_64BIT 391#ifdef CONFIG_PTE_64BIT
392#define PTE_FLAGS_OFFSET 4 392#define PTE_FLAGS_OFFSET 4
393#define FIND_PTE \ 393#define FIND_PTE \
394 rlwinm r12, r10, 13, 19, 29; /* Compute pgdir/pmd offset */ \ 394 rlwinm r12, r10, 13, 19, 29; /* Compute pgdir/pmd offset */ \
395 lwzx r11, r12, r11; /* Get pgd/pmd entry */ \ 395 lwzx r11, r12, r11; /* Get pgd/pmd entry */ \
396 rlwinm. r12, r11, 0, 0, 20; /* Extract pt base address */ \ 396 rlwinm. r12, r11, 0, 0, 20; /* Extract pt base address */ \
397 beq 2f; /* Bail if no table */ \ 397 beq 2f; /* Bail if no table */ \
@@ -487,7 +487,7 @@ interrupt_base:
487 */ 487 */
488 andi. r11, r11, _PAGE_HWEXEC 488 andi. r11, r11, _PAGE_HWEXEC
489 rlwimi r11, r11, 31, 27, 27 /* SX <- _PAGE_HWEXEC */ 489 rlwimi r11, r11, 31, 27, 27 /* SX <- _PAGE_HWEXEC */
490 ori r11, r11, (MAS3_UW|MAS3_SW|MAS3_UR|MAS3_SR)@l /* set static perms */ 490 ori r11, r11, (MAS3_UW|MAS3_SW|MAS3_UR|MAS3_SR)@l /* set static perms */
491 491
492 /* update search PID in MAS6, AS = 0 */ 492 /* update search PID in MAS6, AS = 0 */
493 mfspr r12, SPRN_PID0 493 mfspr r12, SPRN_PID0
@@ -694,7 +694,7 @@ interrupt_base:
694 START_EXCEPTION(SPEUnavailable) 694 START_EXCEPTION(SPEUnavailable)
695 NORMAL_EXCEPTION_PROLOG 695 NORMAL_EXCEPTION_PROLOG
696 bne load_up_spe 696 bne load_up_spe
697 addi r3,r1,STACK_FRAME_OVERHEAD 697 addi r3,r1,STACK_FRAME_OVERHEAD
698 EXC_XFER_EE_LITE(0x2010, KernelSPE) 698 EXC_XFER_EE_LITE(0x2010, KernelSPE)
699#else 699#else
700 EXCEPTION(0x2020, SPEUnavailable, unknown_exception, EXC_XFER_EE) 700 EXCEPTION(0x2020, SPEUnavailable, unknown_exception, EXC_XFER_EE)
@@ -741,10 +741,10 @@ data_access:
741 741
742 * Both the instruction and data TLB miss get to this 742 * Both the instruction and data TLB miss get to this
743 * point to load the TLB. 743 * point to load the TLB.
744 * r10 - EA of fault 744 * r10 - EA of fault
745 * r11 - TLB (info from Linux PTE) 745 * r11 - TLB (info from Linux PTE)
746 * r12, r13 - available to use 746 * r12, r13 - available to use
747 * CR5 - results of addr < TASK_SIZE 747 * CR5 - results of addr < TASK_SIZE
748 * MAS0, MAS1 - loaded with proper value when we get here 748 * MAS0, MAS1 - loaded with proper value when we get here
749 * MAS2, MAS3 - will need additional info from Linux PTE 749 * MAS2, MAS3 - will need additional info from Linux PTE
750 * Upon exit, we reload everything and RFI. 750 * Upon exit, we reload everything and RFI.
@@ -813,7 +813,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_BIG_PHYS)
813 lwz r13, tlbcam_index@l(r13) 813 lwz r13, tlbcam_index@l(r13)
814 rlwimi r12, r13, 0, 20, 31 814 rlwimi r12, r13, 0, 20, 31
8157: 8157:
816 mtspr SPRN_MAS0,r12 816 mtspr SPRN_MAS0,r12
817#endif /* CONFIG_E200 */ 817#endif /* CONFIG_E200 */
818 818
819 tlbwe 819 tlbwe
@@ -855,17 +855,17 @@ load_up_spe:
855 beq 1f 855 beq 1f
856 addi r4,r4,THREAD /* want THREAD of last_task_used_spe */ 856 addi r4,r4,THREAD /* want THREAD of last_task_used_spe */
857 SAVE_32EVRS(0,r10,r4) 857 SAVE_32EVRS(0,r10,r4)
858 evxor evr10, evr10, evr10 /* clear out evr10 */ 858 evxor evr10, evr10, evr10 /* clear out evr10 */
859 evmwumiaa evr10, evr10, evr10 /* evr10 <- ACC = 0 * 0 + ACC */ 859 evmwumiaa evr10, evr10, evr10 /* evr10 <- ACC = 0 * 0 + ACC */
860 li r5,THREAD_ACC 860 li r5,THREAD_ACC
861 evstddx evr10, r4, r5 /* save off accumulator */ 861 evstddx evr10, r4, r5 /* save off accumulator */
862 lwz r5,PT_REGS(r4) 862 lwz r5,PT_REGS(r4)
863 lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5) 863 lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5)
864 lis r10,MSR_SPE@h 864 lis r10,MSR_SPE@h
865 andc r4,r4,r10 /* disable SPE for previous task */ 865 andc r4,r4,r10 /* disable SPE for previous task */
866 stw r4,_MSR-STACK_FRAME_OVERHEAD(r5) 866 stw r4,_MSR-STACK_FRAME_OVERHEAD(r5)
8671: 8671:
868#endif /* CONFIG_SMP */ 868#endif /* !CONFIG_SMP */
869 /* enable use of SPE after return */ 869 /* enable use of SPE after return */
870 oris r9,r9,MSR_SPE@h 870 oris r9,r9,MSR_SPE@h
871 mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */ 871 mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */
@@ -878,7 +878,7 @@ load_up_spe:
878#ifndef CONFIG_SMP 878#ifndef CONFIG_SMP
879 subi r4,r5,THREAD 879 subi r4,r5,THREAD
880 stw r4,last_task_used_spe@l(r3) 880 stw r4,last_task_used_spe@l(r3)
881#endif /* CONFIG_SMP */ 881#endif /* !CONFIG_SMP */
882 /* restore registers and return */ 882 /* restore registers and return */
8832: REST_4GPRS(3, r11) 8832: REST_4GPRS(3, r11)
884 lwz r10,_CCR(r11) 884 lwz r10,_CCR(r11)
@@ -963,10 +963,10 @@ _GLOBAL(giveup_spe)
963 lwz r5,PT_REGS(r3) 963 lwz r5,PT_REGS(r3)
964 cmpi 0,r5,0 964 cmpi 0,r5,0
965 SAVE_32EVRS(0, r4, r3) 965 SAVE_32EVRS(0, r4, r3)
966 evxor evr6, evr6, evr6 /* clear out evr6 */ 966 evxor evr6, evr6, evr6 /* clear out evr6 */
967 evmwumiaa evr6, evr6, evr6 /* evr6 <- ACC = 0 * 0 + ACC */ 967 evmwumiaa evr6, evr6, evr6 /* evr6 <- ACC = 0 * 0 + ACC */
968 li r4,THREAD_ACC 968 li r4,THREAD_ACC
969 evstddx evr6, r4, r3 /* save off accumulator */ 969 evstddx evr6, r4, r3 /* save off accumulator */
970 mfspr r6,SPRN_SPEFSCR 970 mfspr r6,SPRN_SPEFSCR
971 stw r6,THREAD_SPEFSCR(r3) /* save spefscr register value */ 971 stw r6,THREAD_SPEFSCR(r3) /* save spefscr register value */
972 beq 1f 972 beq 1f
@@ -979,7 +979,7 @@ _GLOBAL(giveup_spe)
979 li r5,0 979 li r5,0
980 lis r4,last_task_used_spe@ha 980 lis r4,last_task_used_spe@ha
981 stw r5,last_task_used_spe@l(r4) 981 stw r5,last_task_used_spe@l(r4)
982#endif /* CONFIG_SMP */ 982#endif /* !CONFIG_SMP */
983 blr 983 blr
984#endif /* CONFIG_SPE */ 984#endif /* CONFIG_SPE */
985 985
@@ -1000,15 +1000,15 @@ _GLOBAL(giveup_fpu)
1000 */ 1000 */
1001_GLOBAL(abort) 1001_GLOBAL(abort)
1002 li r13,0 1002 li r13,0
1003 mtspr SPRN_DBCR0,r13 /* disable all debug events */ 1003 mtspr SPRN_DBCR0,r13 /* disable all debug events */
1004 isync 1004 isync
1005 mfmsr r13 1005 mfmsr r13
1006 ori r13,r13,MSR_DE@l /* Enable Debug Events */ 1006 ori r13,r13,MSR_DE@l /* Enable Debug Events */
1007 mtmsr r13 1007 mtmsr r13
1008 isync 1008 isync
1009 mfspr r13,SPRN_DBCR0 1009 mfspr r13,SPRN_DBCR0
1010 lis r13,(DBCR0_IDM|DBCR0_RST_CHIP)@h 1010 lis r13,(DBCR0_IDM|DBCR0_RST_CHIP)@h
1011 mtspr SPRN_DBCR0,r13 1011 mtspr SPRN_DBCR0,r13
1012 isync 1012 isync
1013 1013
1014_GLOBAL(set_context) 1014_GLOBAL(set_context)
@@ -1043,7 +1043,7 @@ swapper_pg_dir:
1043/* Reserved 4k for the critical exception stack & 4k for the machine 1043/* Reserved 4k for the critical exception stack & 4k for the machine
1044 * check stack per CPU for kernel mode exceptions */ 1044 * check stack per CPU for kernel mode exceptions */
1045 .section .bss 1045 .section .bss
1046 .align 12 1046 .align 12
1047exception_stack_bottom: 1047exception_stack_bottom:
1048 .space BOOKE_EXCEPTION_STACK_SIZE * NR_CPUS 1048 .space BOOKE_EXCEPTION_STACK_SIZE * NR_CPUS
1049 .globl exception_stack_top 1049 .globl exception_stack_top