aboutsummaryrefslogtreecommitdiffstats
path: root/arch/parisc/kernel/entry.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/parisc/kernel/entry.S')
-rw-r--r--arch/parisc/kernel/entry.S188
1 files changed, 85 insertions, 103 deletions
diff --git a/arch/parisc/kernel/entry.S b/arch/parisc/kernel/entry.S
index be0f07f2fa58..c7e66ee5b083 100644
--- a/arch/parisc/kernel/entry.S
+++ b/arch/parisc/kernel/entry.S
@@ -30,14 +30,14 @@
30 * - save registers to kernel stack and handle in assembly or C */ 30 * - save registers to kernel stack and handle in assembly or C */
31 31
32 32
33#include <asm/psw.h>
33#include <asm/assembly.h> /* for LDREG/STREG defines */ 34#include <asm/assembly.h> /* for LDREG/STREG defines */
34#include <asm/pgtable.h> 35#include <asm/pgtable.h>
35#include <asm/psw.h>
36#include <asm/signal.h> 36#include <asm/signal.h>
37#include <asm/unistd.h> 37#include <asm/unistd.h>
38#include <asm/thread_info.h> 38#include <asm/thread_info.h>
39 39
40#ifdef __LP64__ 40#ifdef CONFIG_64BIT
41#define CMPIB cmpib,* 41#define CMPIB cmpib,*
42#define CMPB cmpb,* 42#define CMPB cmpb,*
43#define COND(x) *x 43#define COND(x) *x
@@ -67,19 +67,22 @@
67 67
68 /* Switch to virtual mapping, trashing only %r1 */ 68 /* Switch to virtual mapping, trashing only %r1 */
69 .macro virt_map 69 .macro virt_map
70 rsm PSW_SM_Q,%r0 70 /* pcxt_ssm_bug */
71 tovirt_r1 %r29 71 rsm PSW_SM_I, %r0 /* barrier for "Relied upon Translation */
72 mfsp %sr7, %r1
73 or,= %r0,%r1,%r0 /* Only save sr7 in sr3 if sr7 != 0 */
74 mtsp %r1, %sr3
75 mtsp %r0, %sr4 72 mtsp %r0, %sr4
76 mtsp %r0, %sr5 73 mtsp %r0, %sr5
74 mfsp %sr7, %r1
75 or,= %r0,%r1,%r0 /* Only save sr7 in sr3 if sr7 != 0 */
76 mtsp %r1, %sr3
77 tovirt_r1 %r29
78 load32 KERNEL_PSW, %r1
79
80 rsm PSW_SM_QUIET,%r0 /* second "heavy weight" ctl op */
77 mtsp %r0, %sr6 81 mtsp %r0, %sr6
78 mtsp %r0, %sr7 82 mtsp %r0, %sr7
79 load32 KERNEL_PSW, %r1
80 mtctl %r1, %cr22
81 mtctl %r0, %cr17 /* Clear IIASQ tail */ 83 mtctl %r0, %cr17 /* Clear IIASQ tail */
82 mtctl %r0, %cr17 /* Clear IIASQ head */ 84 mtctl %r0, %cr17 /* Clear IIASQ head */
85 mtctl %r1, %ipsw
83 load32 4f, %r1 86 load32 4f, %r1
84 mtctl %r1, %cr18 /* Set IIAOQ tail */ 87 mtctl %r1, %cr18 /* Set IIAOQ tail */
85 ldo 4(%r1), %r1 88 ldo 4(%r1), %r1
@@ -214,7 +217,7 @@
214 va = r8 /* virtual address for which the trap occured */ 217 va = r8 /* virtual address for which the trap occured */
215 spc = r24 /* space for which the trap occured */ 218 spc = r24 /* space for which the trap occured */
216 219
217#ifndef __LP64__ 220#ifndef CONFIG_64BIT
218 221
219 /* 222 /*
220 * itlb miss interruption handler (parisc 1.1 - 32 bit) 223 * itlb miss interruption handler (parisc 1.1 - 32 bit)
@@ -236,7 +239,7 @@
236 239
237 .macro itlb_20 code 240 .macro itlb_20 code
238 mfctl %pcsq, spc 241 mfctl %pcsq, spc
239#ifdef __LP64__ 242#ifdef CONFIG_64BIT
240 b itlb_miss_20w 243 b itlb_miss_20w
241#else 244#else
242 b itlb_miss_20 245 b itlb_miss_20
@@ -246,7 +249,7 @@
246 .align 32 249 .align 32
247 .endm 250 .endm
248 251
249#ifndef __LP64__ 252#ifndef CONFIG_64BIT
250 /* 253 /*
251 * naitlb miss interruption handler (parisc 1.1 - 32 bit) 254 * naitlb miss interruption handler (parisc 1.1 - 32 bit)
252 * 255 *
@@ -283,7 +286,7 @@
283 .macro naitlb_20 code 286 .macro naitlb_20 code
284 287
285 mfctl %isr,spc 288 mfctl %isr,spc
286#ifdef __LP64__ 289#ifdef CONFIG_64BIT
287 b itlb_miss_20w 290 b itlb_miss_20w
288#else 291#else
289 b itlb_miss_20 292 b itlb_miss_20
@@ -296,7 +299,7 @@
296 .align 32 299 .align 32
297 .endm 300 .endm
298 301
299#ifndef __LP64__ 302#ifndef CONFIG_64BIT
300 /* 303 /*
301 * dtlb miss interruption handler (parisc 1.1 - 32 bit) 304 * dtlb miss interruption handler (parisc 1.1 - 32 bit)
302 */ 305 */
@@ -318,7 +321,7 @@
318 .macro dtlb_20 code 321 .macro dtlb_20 code
319 322
320 mfctl %isr, spc 323 mfctl %isr, spc
321#ifdef __LP64__ 324#ifdef CONFIG_64BIT
322 b dtlb_miss_20w 325 b dtlb_miss_20w
323#else 326#else
324 b dtlb_miss_20 327 b dtlb_miss_20
@@ -328,7 +331,7 @@
328 .align 32 331 .align 32
329 .endm 332 .endm
330 333
331#ifndef __LP64__ 334#ifndef CONFIG_64BIT
332 /* nadtlb miss interruption handler (parisc 1.1 - 32 bit) */ 335 /* nadtlb miss interruption handler (parisc 1.1 - 32 bit) */
333 336
334 .macro nadtlb_11 code 337 .macro nadtlb_11 code
@@ -346,7 +349,7 @@
346 .macro nadtlb_20 code 349 .macro nadtlb_20 code
347 350
348 mfctl %isr,spc 351 mfctl %isr,spc
349#ifdef __LP64__ 352#ifdef CONFIG_64BIT
350 b nadtlb_miss_20w 353 b nadtlb_miss_20w
351#else 354#else
352 b nadtlb_miss_20 355 b nadtlb_miss_20
@@ -356,7 +359,7 @@
356 .align 32 359 .align 32
357 .endm 360 .endm
358 361
359#ifndef __LP64__ 362#ifndef CONFIG_64BIT
360 /* 363 /*
361 * dirty bit trap interruption handler (parisc 1.1 - 32 bit) 364 * dirty bit trap interruption handler (parisc 1.1 - 32 bit)
362 */ 365 */
@@ -378,7 +381,7 @@
378 .macro dbit_20 code 381 .macro dbit_20 code
379 382
380 mfctl %isr,spc 383 mfctl %isr,spc
381#ifdef __LP64__ 384#ifdef CONFIG_64BIT
382 b dbit_trap_20w 385 b dbit_trap_20w
383#else 386#else
384 b dbit_trap_20 387 b dbit_trap_20
@@ -391,7 +394,7 @@
391 /* The following are simple 32 vs 64 bit instruction 394 /* The following are simple 32 vs 64 bit instruction
392 * abstractions for the macros */ 395 * abstractions for the macros */
393 .macro EXTR reg1,start,length,reg2 396 .macro EXTR reg1,start,length,reg2
394#ifdef __LP64__ 397#ifdef CONFIG_64BIT
395 extrd,u \reg1,32+\start,\length,\reg2 398 extrd,u \reg1,32+\start,\length,\reg2
396#else 399#else
397 extrw,u \reg1,\start,\length,\reg2 400 extrw,u \reg1,\start,\length,\reg2
@@ -399,7 +402,7 @@
399 .endm 402 .endm
400 403
401 .macro DEP reg1,start,length,reg2 404 .macro DEP reg1,start,length,reg2
402#ifdef __LP64__ 405#ifdef CONFIG_64BIT
403 depd \reg1,32+\start,\length,\reg2 406 depd \reg1,32+\start,\length,\reg2
404#else 407#else
405 depw \reg1,\start,\length,\reg2 408 depw \reg1,\start,\length,\reg2
@@ -407,7 +410,7 @@
407 .endm 410 .endm
408 411
409 .macro DEPI val,start,length,reg 412 .macro DEPI val,start,length,reg
410#ifdef __LP64__ 413#ifdef CONFIG_64BIT
411 depdi \val,32+\start,\length,\reg 414 depdi \val,32+\start,\length,\reg
412#else 415#else
413 depwi \val,\start,\length,\reg 416 depwi \val,\start,\length,\reg
@@ -418,7 +421,7 @@
418 * fault. We have to extract this and place it in the va, 421 * fault. We have to extract this and place it in the va,
419 * zeroing the corresponding bits in the space register */ 422 * zeroing the corresponding bits in the space register */
420 .macro space_adjust spc,va,tmp 423 .macro space_adjust spc,va,tmp
421#ifdef __LP64__ 424#ifdef CONFIG_64BIT
422 extrd,u \spc,63,SPACEID_SHIFT,\tmp 425 extrd,u \spc,63,SPACEID_SHIFT,\tmp
423 depd %r0,63,SPACEID_SHIFT,\spc 426 depd %r0,63,SPACEID_SHIFT,\spc
424 depd \tmp,31,SPACEID_SHIFT,\va 427 depd \tmp,31,SPACEID_SHIFT,\va
@@ -476,7 +479,7 @@
476 bb,>=,n \pmd,_PxD_PRESENT_BIT,\fault 479 bb,>=,n \pmd,_PxD_PRESENT_BIT,\fault
477 DEP %r0,31,PxD_FLAG_SHIFT,\pmd /* clear flags */ 480 DEP %r0,31,PxD_FLAG_SHIFT,\pmd /* clear flags */
478 copy \pmd,%r9 481 copy \pmd,%r9
479#ifdef __LP64__ 482#ifdef CONFIG_64BIT
480 shld %r9,PxD_VALUE_SHIFT,\pmd 483 shld %r9,PxD_VALUE_SHIFT,\pmd
481#else 484#else
482 shlw %r9,PxD_VALUE_SHIFT,\pmd 485 shlw %r9,PxD_VALUE_SHIFT,\pmd
@@ -607,7 +610,7 @@
607 .macro do_alias spc,tmp,tmp1,va,pte,prot,fault 610 .macro do_alias spc,tmp,tmp1,va,pte,prot,fault
608 cmpib,COND(<>),n 0,\spc,\fault 611 cmpib,COND(<>),n 0,\spc,\fault
609 ldil L%(TMPALIAS_MAP_START),\tmp 612 ldil L%(TMPALIAS_MAP_START),\tmp
610#if defined(__LP64__) && (TMPALIAS_MAP_START >= 0x80000000) 613#if defined(CONFIG_64BIT) && (TMPALIAS_MAP_START >= 0x80000000)
611 /* on LP64, ldi will sign extend into the upper 32 bits, 614 /* on LP64, ldi will sign extend into the upper 32 bits,
612 * which is behaviour we don't want */ 615 * which is behaviour we don't want */
613 depdi 0,31,32,\tmp 616 depdi 0,31,32,\tmp
@@ -621,7 +624,7 @@
621 * OK, it is in the temp alias region, check whether "from" or "to". 624 * OK, it is in the temp alias region, check whether "from" or "to".
622 * Check "subtle" note in pacache.S re: r23/r26. 625 * Check "subtle" note in pacache.S re: r23/r26.
623 */ 626 */
624#ifdef __LP64__ 627#ifdef CONFIG_64BIT
625 extrd,u,*= \va,41,1,%r0 628 extrd,u,*= \va,41,1,%r0
626#else 629#else
627 extrw,u,= \va,9,1,%r0 630 extrw,u,= \va,9,1,%r0
@@ -688,7 +691,7 @@ fault_vector_20:
688 def 30 691 def 30
689 def 31 692 def 31
690 693
691#ifndef __LP64__ 694#ifndef CONFIG_64BIT
692 695
693 .export fault_vector_11 696 .export fault_vector_11
694 697
@@ -761,7 +764,7 @@ __kernel_thread:
761 764
762 copy %r30, %r1 765 copy %r30, %r1
763 ldo PT_SZ_ALGN(%r30),%r30 766 ldo PT_SZ_ALGN(%r30),%r30
764#ifdef __LP64__ 767#ifdef CONFIG_64BIT
765 /* Yo, function pointers in wide mode are little structs... -PB */ 768 /* Yo, function pointers in wide mode are little structs... -PB */
766 ldd 24(%r26), %r2 769 ldd 24(%r26), %r2
767 STREG %r2, PT_GR27(%r1) /* Store childs %dp */ 770 STREG %r2, PT_GR27(%r1) /* Store childs %dp */
@@ -777,7 +780,7 @@ __kernel_thread:
777 or %r26, %r24, %r26 /* will have kernel mappings. */ 780 or %r26, %r24, %r26 /* will have kernel mappings. */
778 ldi 1, %r25 /* stack_start, signals kernel thread */ 781 ldi 1, %r25 /* stack_start, signals kernel thread */
779 stw %r0, -52(%r30) /* user_tid */ 782 stw %r0, -52(%r30) /* user_tid */
780#ifdef __LP64__ 783#ifdef CONFIG_64BIT
781 ldo -16(%r30),%r29 /* Reference param save area */ 784 ldo -16(%r30),%r29 /* Reference param save area */
782#endif 785#endif
783 BL do_fork, %r2 786 BL do_fork, %r2
@@ -806,7 +809,7 @@ ret_from_kernel_thread:
806 809
807 LDREG TI_TASK-THREAD_SZ_ALGN(%r30), %r1 810 LDREG TI_TASK-THREAD_SZ_ALGN(%r30), %r1
808 LDREG TASK_PT_GR25(%r1), %r26 811 LDREG TASK_PT_GR25(%r1), %r26
809#ifdef __LP64__ 812#ifdef CONFIG_64BIT
810 LDREG TASK_PT_GR27(%r1), %r27 813 LDREG TASK_PT_GR27(%r1), %r27
811 LDREG TASK_PT_GR22(%r1), %r22 814 LDREG TASK_PT_GR22(%r1), %r22
812#endif 815#endif
@@ -814,11 +817,16 @@ ret_from_kernel_thread:
814 ble 0(%sr7, %r1) 817 ble 0(%sr7, %r1)
815 copy %r31, %r2 818 copy %r31, %r2
816 819
817#ifdef __LP64__ 820#ifdef CONFIG_64BIT
818 ldo -16(%r30),%r29 /* Reference param save area */ 821 ldo -16(%r30),%r29 /* Reference param save area */
819 loadgp /* Thread could have been in a module */ 822 loadgp /* Thread could have been in a module */
820#endif 823#endif
824#ifndef CONFIG_64BIT
821 b sys_exit 825 b sys_exit
826#else
827 load32 sys_exit, %r1
828 bv %r0(%r1)
829#endif
822 ldi 0, %r26 830 ldi 0, %r26
823 831
824 .import sys_execve, code 832 .import sys_execve, code
@@ -830,7 +838,7 @@ __execve:
830 STREG %r26, PT_GR26(%r16) 838 STREG %r26, PT_GR26(%r16)
831 STREG %r25, PT_GR25(%r16) 839 STREG %r25, PT_GR25(%r16)
832 STREG %r24, PT_GR24(%r16) 840 STREG %r24, PT_GR24(%r16)
833#ifdef __LP64__ 841#ifdef CONFIG_64BIT
834 ldo -16(%r30),%r29 /* Reference param save area */ 842 ldo -16(%r30),%r29 /* Reference param save area */
835#endif 843#endif
836 BL sys_execve, %r2 844 BL sys_execve, %r2
@@ -855,6 +863,7 @@ __execve:
855_switch_to: 863_switch_to:
856 STREG %r2, -RP_OFFSET(%r30) 864 STREG %r2, -RP_OFFSET(%r30)
857 865
866 callee_save_float
858 callee_save 867 callee_save
859 868
860 load32 _switch_to_ret, %r2 869 load32 _switch_to_ret, %r2
@@ -871,6 +880,7 @@ _switch_to:
871_switch_to_ret: 880_switch_to_ret:
872 mtctl %r0, %cr0 /* Needed for single stepping */ 881 mtctl %r0, %cr0 /* Needed for single stepping */
873 callee_rest 882 callee_rest
883 callee_rest_float
874 884
875 LDREG -RP_OFFSET(%r30), %r2 885 LDREG -RP_OFFSET(%r30), %r2
876 bv %r0(%r2) 886 bv %r0(%r2)
@@ -888,9 +898,6 @@ _switch_to_ret:
888 * this way, then we will need to copy %sr3 in to PT_SR[3..7], and 898 * this way, then we will need to copy %sr3 in to PT_SR[3..7], and
889 * adjust IASQ[0..1]. 899 * adjust IASQ[0..1].
890 * 900 *
891 * Note that the following code uses a "relied upon translation".
892 * See the parisc ACD for details. The ssm is necessary due to a
893 * PCXT bug.
894 */ 901 */
895 902
896 .align 4096 903 .align 4096
@@ -911,7 +918,7 @@ syscall_exit_rfi:
911 STREG %r19,PT_IAOQ1(%r16) 918 STREG %r19,PT_IAOQ1(%r16)
912 LDREG PT_PSW(%r16),%r19 919 LDREG PT_PSW(%r16),%r19
913 load32 USER_PSW_MASK,%r1 920 load32 USER_PSW_MASK,%r1
914#ifdef __LP64__ 921#ifdef CONFIG_64BIT
915 load32 USER_PSW_HI_MASK,%r20 922 load32 USER_PSW_HI_MASK,%r20
916 depd %r20,31,32,%r1 923 depd %r20,31,32,%r1
917#endif 924#endif
@@ -955,7 +962,7 @@ intr_return:
955 /* shift left ____cacheline_aligned (aka L1_CACHE_BYTES) amount 962 /* shift left ____cacheline_aligned (aka L1_CACHE_BYTES) amount
956 ** irq_stat[] is defined using ____cacheline_aligned. 963 ** irq_stat[] is defined using ____cacheline_aligned.
957 */ 964 */
958#ifdef __LP64__ 965#ifdef CONFIG_64BIT
959 shld %r1, 6, %r20 966 shld %r1, 6, %r20
960#else 967#else
961 shlw %r1, 5, %r20 968 shlw %r1, 5, %r20
@@ -963,9 +970,6 @@ intr_return:
963 add %r19,%r20,%r19 /* now have &irq_stat[smp_processor_id()] */ 970 add %r19,%r20,%r19 /* now have &irq_stat[smp_processor_id()] */
964#endif /* CONFIG_SMP */ 971#endif /* CONFIG_SMP */
965 972
966 LDREG IRQSTAT_SIRQ_PEND(%r19),%r20 /* hardirq.h: unsigned long */
967 cmpib,<>,n 0,%r20,intr_do_softirq /* forward */
968
969intr_check_resched: 973intr_check_resched:
970 974
971 /* check for reschedule */ 975 /* check for reschedule */
@@ -985,24 +989,19 @@ intr_restore:
985 rest_fp %r1 989 rest_fp %r1
986 rest_general %r29 990 rest_general %r29
987 991
988 /* Create a "relied upon translation" PA 2.0 Arch. F-5 */ 992 /* inverse of virt_map */
989 ssm 0,%r0 993 pcxt_ssm_bug
990 nop 994 rsm PSW_SM_QUIET,%r0 /* prepare for rfi */
991 nop
992 nop
993 nop
994 nop
995 nop
996 nop
997 tophys_r1 %r29 995 tophys_r1 %r29
998 rsm (PSW_SM_Q|PSW_SM_P|PSW_SM_D|PSW_SM_I),%r0
999 996
1000 /* Restore space id's and special cr's from PT_REGS 997 /* Restore space id's and special cr's from PT_REGS
1001 * structure pointed to by r29 */ 998 * structure pointed to by r29
999 */
1002 rest_specials %r29 1000 rest_specials %r29
1003 1001
1004 /* Important: Note that rest_stack restores r29 1002 /* IMPORTANT: rest_stack restores r29 last (we are using it)!
1005 * last (we are using it)! It also restores r1 and r30. */ 1003 * It also restores r1 and r30.
1004 */
1006 rest_stack 1005 rest_stack
1007 1006
1008 rfi 1007 rfi
@@ -1015,17 +1014,6 @@ intr_restore:
1015 nop 1014 nop
1016 nop 1015 nop
1017 1016
1018 .import do_softirq,code
1019intr_do_softirq:
1020 bl do_softirq,%r2
1021#ifdef __LP64__
1022 ldo -16(%r30),%r29 /* Reference param save area */
1023#else
1024 nop
1025#endif
1026 b intr_check_resched
1027 nop
1028
1029 .import schedule,code 1017 .import schedule,code
1030intr_do_resched: 1018intr_do_resched:
1031 /* Only do reschedule if we are returning to user space */ 1019 /* Only do reschedule if we are returning to user space */
@@ -1036,12 +1024,17 @@ intr_do_resched:
1036 CMPIB= 0,%r20,intr_restore /* backward */ 1024 CMPIB= 0,%r20,intr_restore /* backward */
1037 nop 1025 nop
1038 1026
1039#ifdef __LP64__ 1027#ifdef CONFIG_64BIT
1040 ldo -16(%r30),%r29 /* Reference param save area */ 1028 ldo -16(%r30),%r29 /* Reference param save area */
1041#endif 1029#endif
1042 1030
1043 ldil L%intr_check_sig, %r2 1031 ldil L%intr_check_sig, %r2
1032#ifndef CONFIG_64BIT
1044 b schedule 1033 b schedule
1034#else
1035 load32 schedule, %r20
1036 bv %r0(%r20)
1037#endif
1045 ldo R%intr_check_sig(%r2), %r2 1038 ldo R%intr_check_sig(%r2), %r2
1046 1039
1047 1040
@@ -1064,7 +1057,7 @@ intr_do_signal:
1064 1057
1065 copy %r0, %r24 /* unsigned long in_syscall */ 1058 copy %r0, %r24 /* unsigned long in_syscall */
1066 copy %r16, %r25 /* struct pt_regs *regs */ 1059 copy %r16, %r25 /* struct pt_regs *regs */
1067#ifdef __LP64__ 1060#ifdef CONFIG_64BIT
1068 ldo -16(%r30),%r29 /* Reference param save area */ 1061 ldo -16(%r30),%r29 /* Reference param save area */
1069#endif 1062#endif
1070 1063
@@ -1088,7 +1081,7 @@ intr_extint:
1088 mfctl %cr31,%r1 1081 mfctl %cr31,%r1
1089 copy %r30,%r17 1082 copy %r30,%r17
1090 /* FIXME! depi below has hardcoded idea of interrupt stack size (32k)*/ 1083 /* FIXME! depi below has hardcoded idea of interrupt stack size (32k)*/
1091#ifdef __LP64__ 1084#ifdef CONFIG_64BIT
1092 depdi 0,63,15,%r17 1085 depdi 0,63,15,%r17
1093#else 1086#else
1094 depi 0,31,15,%r17 1087 depi 0,31,15,%r17
@@ -1115,7 +1108,7 @@ intr_extint:
1115 1108
1116 ldil L%intr_return, %r2 1109 ldil L%intr_return, %r2
1117 1110
1118#ifdef __LP64__ 1111#ifdef CONFIG_64BIT
1119 ldo -16(%r30),%r29 /* Reference param save area */ 1112 ldo -16(%r30),%r29 /* Reference param save area */
1120#endif 1113#endif
1121 1114
@@ -1153,15 +1146,17 @@ intr_save:
1153 1146
1154 CMPIB=,n 6,%r26,skip_save_ior 1147 CMPIB=,n 6,%r26,skip_save_ior
1155 1148
1156 /* save_specials left ipsw value in r8 for us to test */
1157 1149
1158 mfctl %cr20, %r16 /* isr */ 1150 mfctl %cr20, %r16 /* isr */
1151 nop /* serialize mfctl on PA 2.0 to avoid 4 cycle penalty */
1159 mfctl %cr21, %r17 /* ior */ 1152 mfctl %cr21, %r17 /* ior */
1160 1153
1161#ifdef __LP64__ 1154
1155#ifdef CONFIG_64BIT
1162 /* 1156 /*
1163 * If the interrupted code was running with W bit off (32 bit), 1157 * If the interrupted code was running with W bit off (32 bit),
1164 * clear the b bits (bits 0 & 1) in the ior. 1158 * clear the b bits (bits 0 & 1) in the ior.
1159 * save_specials left ipsw value in r8 for us to test.
1165 */ 1160 */
1166 extrd,u,*<> %r8,PSW_W_BIT,1,%r0 1161 extrd,u,*<> %r8,PSW_W_BIT,1,%r0
1167 depdi 0,1,2,%r17 1162 depdi 0,1,2,%r17
@@ -1192,7 +1187,7 @@ skip_save_ior:
1192 loadgp 1187 loadgp
1193 1188
1194 copy %r29, %r25 /* arg1 is pt_regs */ 1189 copy %r29, %r25 /* arg1 is pt_regs */
1195#ifdef __LP64__ 1190#ifdef CONFIG_64BIT
1196 ldo -16(%r30),%r29 /* Reference param save area */ 1191 ldo -16(%r30),%r29 /* Reference param save area */
1197#endif 1192#endif
1198 1193
@@ -1230,7 +1225,7 @@ skip_save_ior:
1230 spc = r24 /* space for which the trap occured */ 1225 spc = r24 /* space for which the trap occured */
1231 ptp = r25 /* page directory/page table pointer */ 1226 ptp = r25 /* page directory/page table pointer */
1232 1227
1233#ifdef __LP64__ 1228#ifdef CONFIG_64BIT
1234 1229
1235dtlb_miss_20w: 1230dtlb_miss_20w:
1236 space_adjust spc,va,t0 1231 space_adjust spc,va,t0
@@ -1487,10 +1482,10 @@ nadtlb_emulate:
1487 add,l %r1,%r24,%r1 /* doesn't affect c/b bits */ 1482 add,l %r1,%r24,%r1 /* doesn't affect c/b bits */
1488 1483
1489nadtlb_nullify: 1484nadtlb_nullify:
1490 mfctl %cr22,%r8 /* Get ipsw */ 1485 mfctl %ipsw,%r8
1491 ldil L%PSW_N,%r9 1486 ldil L%PSW_N,%r9
1492 or %r8,%r9,%r8 /* Set PSW_N */ 1487 or %r8,%r9,%r8 /* Set PSW_N */
1493 mtctl %r8,%cr22 1488 mtctl %r8,%ipsw
1494 1489
1495 rfir 1490 rfir
1496 nop 1491 nop
@@ -1521,7 +1516,7 @@ nadtlb_probe_check:
1521 nop 1516 nop
1522 1517
1523 1518
1524#ifdef __LP64__ 1519#ifdef CONFIG_64BIT
1525itlb_miss_20w: 1520itlb_miss_20w:
1526 1521
1527 /* 1522 /*
@@ -1588,7 +1583,7 @@ itlb_miss_20:
1588 1583
1589#endif 1584#endif
1590 1585
1591#ifdef __LP64__ 1586#ifdef CONFIG_64BIT
1592 1587
1593dbit_trap_20w: 1588dbit_trap_20w:
1594 space_adjust spc,va,t0 1589 space_adjust spc,va,t0
@@ -1797,7 +1792,7 @@ sys_fork_wrapper:
1797 1792
1798 STREG %r2,-RP_OFFSET(%r30) 1793 STREG %r2,-RP_OFFSET(%r30)
1799 ldo FRAME_SIZE(%r30),%r30 1794 ldo FRAME_SIZE(%r30),%r30
1800#ifdef __LP64__ 1795#ifdef CONFIG_64BIT
1801 ldo -16(%r30),%r29 /* Reference param save area */ 1796 ldo -16(%r30),%r29 /* Reference param save area */
1802#endif 1797#endif
1803 1798
@@ -1847,7 +1842,7 @@ sys_clone_wrapper:
1847 1842
1848 STREG %r2,-RP_OFFSET(%r30) 1843 STREG %r2,-RP_OFFSET(%r30)
1849 ldo FRAME_SIZE(%r30),%r30 1844 ldo FRAME_SIZE(%r30),%r30
1850#ifdef __LP64__ 1845#ifdef CONFIG_64BIT
1851 ldo -16(%r30),%r29 /* Reference param save area */ 1846 ldo -16(%r30),%r29 /* Reference param save area */
1852#endif 1847#endif
1853 1848
@@ -1869,7 +1864,7 @@ sys_vfork_wrapper:
1869 1864
1870 STREG %r2,-RP_OFFSET(%r30) 1865 STREG %r2,-RP_OFFSET(%r30)
1871 ldo FRAME_SIZE(%r30),%r30 1866 ldo FRAME_SIZE(%r30),%r30
1872#ifdef __LP64__ 1867#ifdef CONFIG_64BIT
1873 ldo -16(%r30),%r29 /* Reference param save area */ 1868 ldo -16(%r30),%r29 /* Reference param save area */
1874#endif 1869#endif
1875 1870
@@ -1897,10 +1892,10 @@ sys_vfork_wrapper:
1897 1892
1898 STREG %r2,-RP_OFFSET(%r30) 1893 STREG %r2,-RP_OFFSET(%r30)
1899 ldo FRAME_SIZE(%r30),%r30 1894 ldo FRAME_SIZE(%r30),%r30
1900#ifdef __LP64__ 1895#ifdef CONFIG_64BIT
1901 ldo -16(%r30),%r29 /* Reference param save area */ 1896 ldo -16(%r30),%r29 /* Reference param save area */
1902#endif 1897#endif
1903 bl \execve,%r2 1898 BL \execve,%r2
1904 copy %r1,%arg0 1899 copy %r1,%arg0
1905 1900
1906 ldo -FRAME_SIZE(%r30),%r30 1901 ldo -FRAME_SIZE(%r30),%r30
@@ -1923,7 +1918,7 @@ error_\execve:
1923sys_execve_wrapper: 1918sys_execve_wrapper:
1924 execve_wrapper sys_execve 1919 execve_wrapper sys_execve
1925 1920
1926#ifdef __LP64__ 1921#ifdef CONFIG_64BIT
1927 .export sys32_execve_wrapper 1922 .export sys32_execve_wrapper
1928 .import sys32_execve 1923 .import sys32_execve
1929 1924
@@ -1937,7 +1932,7 @@ sys_rt_sigreturn_wrapper:
1937 ldo TASK_REGS(%r26),%r26 /* get pt regs */ 1932 ldo TASK_REGS(%r26),%r26 /* get pt regs */
1938 /* Don't save regs, we are going to restore them from sigcontext. */ 1933 /* Don't save regs, we are going to restore them from sigcontext. */
1939 STREG %r2, -RP_OFFSET(%r30) 1934 STREG %r2, -RP_OFFSET(%r30)
1940#ifdef __LP64__ 1935#ifdef CONFIG_64BIT
1941 ldo FRAME_SIZE(%r30), %r30 1936 ldo FRAME_SIZE(%r30), %r30
1942 BL sys_rt_sigreturn,%r2 1937 BL sys_rt_sigreturn,%r2
1943 ldo -16(%r30),%r29 /* Reference param save area */ 1938 ldo -16(%r30),%r29 /* Reference param save area */
@@ -1968,7 +1963,7 @@ sys_sigaltstack_wrapper:
1968 ldo TASK_REGS(%r1),%r24 /* get pt regs */ 1963 ldo TASK_REGS(%r1),%r24 /* get pt regs */
1969 LDREG TASK_PT_GR30(%r24),%r24 1964 LDREG TASK_PT_GR30(%r24),%r24
1970 STREG %r2, -RP_OFFSET(%r30) 1965 STREG %r2, -RP_OFFSET(%r30)
1971#ifdef __LP64__ 1966#ifdef CONFIG_64BIT
1972 ldo FRAME_SIZE(%r30), %r30 1967 ldo FRAME_SIZE(%r30), %r30
1973 b,l do_sigaltstack,%r2 1968 b,l do_sigaltstack,%r2
1974 ldo -16(%r30),%r29 /* Reference param save area */ 1969 ldo -16(%r30),%r29 /* Reference param save area */
@@ -1982,7 +1977,7 @@ sys_sigaltstack_wrapper:
1982 bv %r0(%r2) 1977 bv %r0(%r2)
1983 nop 1978 nop
1984 1979
1985#ifdef __LP64__ 1980#ifdef CONFIG_64BIT
1986 .export sys32_sigaltstack_wrapper 1981 .export sys32_sigaltstack_wrapper
1987sys32_sigaltstack_wrapper: 1982sys32_sigaltstack_wrapper:
1988 /* Get the user stack pointer */ 1983 /* Get the user stack pointer */
@@ -2006,7 +2001,7 @@ sys_rt_sigsuspend_wrapper:
2006 reg_save %r24 2001 reg_save %r24
2007 2002
2008 STREG %r2, -RP_OFFSET(%r30) 2003 STREG %r2, -RP_OFFSET(%r30)
2009#ifdef __LP64__ 2004#ifdef CONFIG_64BIT
2010 ldo FRAME_SIZE(%r30), %r30 2005 ldo FRAME_SIZE(%r30), %r30
2011 b,l sys_rt_sigsuspend,%r2 2006 b,l sys_rt_sigsuspend,%r2
2012 ldo -16(%r30),%r29 /* Reference param save area */ 2007 ldo -16(%r30),%r29 /* Reference param save area */
@@ -2079,7 +2074,7 @@ syscall_check_bh:
2079 ldw TI_CPU-THREAD_SZ_ALGN-FRAME_SIZE(%r30),%r26 /* cpu # */ 2074 ldw TI_CPU-THREAD_SZ_ALGN-FRAME_SIZE(%r30),%r26 /* cpu # */
2080 2075
2081 /* shift left ____cacheline_aligned (aka L1_CACHE_BYTES) bits */ 2076 /* shift left ____cacheline_aligned (aka L1_CACHE_BYTES) bits */
2082#ifdef __LP64__ 2077#ifdef CONFIG_64BIT
2083 shld %r26, 6, %r20 2078 shld %r26, 6, %r20
2084#else 2079#else
2085 shlw %r26, 5, %r20 2080 shlw %r26, 5, %r20
@@ -2087,9 +2082,6 @@ syscall_check_bh:
2087 add %r19,%r20,%r19 /* now have &irq_stat[smp_processor_id()] */ 2082 add %r19,%r20,%r19 /* now have &irq_stat[smp_processor_id()] */
2088#endif /* CONFIG_SMP */ 2083#endif /* CONFIG_SMP */
2089 2084
2090 LDREG IRQSTAT_SIRQ_PEND(%r19),%r20 /* hardirq.h: unsigned long */
2091 cmpib,<>,n 0,%r20,syscall_do_softirq /* forward */
2092
2093syscall_check_resched: 2085syscall_check_resched:
2094 2086
2095 /* check for reschedule */ 2087 /* check for reschedule */
@@ -2144,7 +2136,7 @@ syscall_restore:
2144 2136
2145 depi 3,31,2,%r31 /* ensure return to user mode. */ 2137 depi 3,31,2,%r31 /* ensure return to user mode. */
2146 2138
2147#ifdef __LP64__ 2139#ifdef CONFIG_64BIT
2148 /* decide whether to reset the wide mode bit 2140 /* decide whether to reset the wide mode bit
2149 * 2141 *
2150 * For a syscall, the W bit is stored in the lowest bit 2142 * For a syscall, the W bit is stored in the lowest bit
@@ -2227,20 +2219,10 @@ pt_regs_ok:
2227 b intr_restore 2219 b intr_restore
2228 nop 2220 nop
2229 2221
2230 .import do_softirq,code
2231syscall_do_softirq:
2232 bl do_softirq,%r2
2233 nop
2234 /* NOTE: We enable I-bit incase we schedule later,
2235 * and we might be going back to userspace if we were
2236 * traced. */
2237 b syscall_check_resched
2238 ssm PSW_SM_I, %r0 /* do_softirq returns with I bit off */
2239
2240 .import schedule,code 2222 .import schedule,code
2241syscall_do_resched: 2223syscall_do_resched:
2242 BL schedule,%r2 2224 BL schedule,%r2
2243#ifdef __LP64__ 2225#ifdef CONFIG_64BIT
2244 ldo -16(%r30),%r29 /* Reference param save area */ 2226 ldo -16(%r30),%r29 /* Reference param save area */
2245#else 2227#else
2246 nop 2228 nop
@@ -2260,7 +2242,7 @@ syscall_do_signal:
2260 2242
2261 ldi 1, %r24 /* unsigned long in_syscall */ 2243 ldi 1, %r24 /* unsigned long in_syscall */
2262 2244
2263#ifdef __LP64__ 2245#ifdef CONFIG_64BIT
2264 ldo -16(%r30),%r29 /* Reference param save area */ 2246 ldo -16(%r30),%r29 /* Reference param save area */
2265#endif 2247#endif
2266 BL do_signal,%r2 2248 BL do_signal,%r2