diff options
author | Kyle McMartin <kyle@mcmartin.ca> | 2008-05-15 10:53:57 -0400 |
---|---|---|
committer | Kyle McMartin <kyle@mcmartin.ca> | 2008-05-15 11:03:43 -0400 |
commit | 872f6debcae63309eb39bfc2cc9462fb83450ee0 (patch) | |
tree | 7a1bb365c00e791b8e3d8e848b6f0e97e9b3ab69 /arch/parisc/kernel | |
parent | f54d8a1b3fef79bb1aa2f0840dd356ce7bb180f9 (diff) |
parisc: use conditional macro for 64-bit wide ops
This work enables us to remove -traditional from $AFLAGS on
parisc.
Signed-off-by: Kyle McMartin <kyle@mcmartin.ca>
Diffstat (limited to 'arch/parisc/kernel')
-rw-r--r-- | arch/parisc/kernel/entry.S | 46 | ||||
-rw-r--r-- | arch/parisc/kernel/pacache.S | 70 |
2 files changed, 54 insertions, 62 deletions
diff --git a/arch/parisc/kernel/entry.S b/arch/parisc/kernel/entry.S index 1a3935e61ab7..5d0837458c19 100644 --- a/arch/parisc/kernel/entry.S +++ b/arch/parisc/kernel/entry.S | |||
@@ -41,16 +41,8 @@ | |||
41 | #include <linux/init.h> | 41 | #include <linux/init.h> |
42 | 42 | ||
43 | #ifdef CONFIG_64BIT | 43 | #ifdef CONFIG_64BIT |
44 | #define CMPIB cmpib,* | ||
45 | #define CMPB cmpb,* | ||
46 | #define COND(x) *x | ||
47 | |||
48 | .level 2.0w | 44 | .level 2.0w |
49 | #else | 45 | #else |
50 | #define CMPIB cmpib, | ||
51 | #define CMPB cmpb, | ||
52 | #define COND(x) x | ||
53 | |||
54 | .level 2.0 | 46 | .level 2.0 |
55 | #endif | 47 | #endif |
56 | 48 | ||
@@ -958,9 +950,9 @@ intr_check_sig: | |||
958 | * Only do signals if we are returning to user space | 950 | * Only do signals if we are returning to user space |
959 | */ | 951 | */ |
960 | LDREG PT_IASQ0(%r16), %r20 | 952 | LDREG PT_IASQ0(%r16), %r20 |
961 | CMPIB=,n 0,%r20,intr_restore /* backward */ | 953 | cmpib,COND(=),n 0,%r20,intr_restore /* backward */ |
962 | LDREG PT_IASQ1(%r16), %r20 | 954 | LDREG PT_IASQ1(%r16), %r20 |
963 | CMPIB=,n 0,%r20,intr_restore /* backward */ | 955 | cmpib,COND(=),n 0,%r20,intr_restore /* backward */ |
964 | 956 | ||
965 | copy %r0, %r25 /* long in_syscall = 0 */ | 957 | copy %r0, %r25 /* long in_syscall = 0 */ |
966 | #ifdef CONFIG_64BIT | 958 | #ifdef CONFIG_64BIT |
@@ -1014,10 +1006,10 @@ intr_do_resched: | |||
1014 | * we jump back to intr_restore. | 1006 | * we jump back to intr_restore. |
1015 | */ | 1007 | */ |
1016 | LDREG PT_IASQ0(%r16), %r20 | 1008 | LDREG PT_IASQ0(%r16), %r20 |
1017 | CMPIB= 0, %r20, intr_do_preempt | 1009 | cmpib,COND(=) 0, %r20, intr_do_preempt |
1018 | nop | 1010 | nop |
1019 | LDREG PT_IASQ1(%r16), %r20 | 1011 | LDREG PT_IASQ1(%r16), %r20 |
1020 | CMPIB= 0, %r20, intr_do_preempt | 1012 | cmpib,COND(=) 0, %r20, intr_do_preempt |
1021 | nop | 1013 | nop |
1022 | 1014 | ||
1023 | #ifdef CONFIG_64BIT | 1015 | #ifdef CONFIG_64BIT |
@@ -1046,7 +1038,7 @@ intr_do_preempt: | |||
1046 | /* current_thread_info()->preempt_count */ | 1038 | /* current_thread_info()->preempt_count */ |
1047 | mfctl %cr30, %r1 | 1039 | mfctl %cr30, %r1 |
1048 | LDREG TI_PRE_COUNT(%r1), %r19 | 1040 | LDREG TI_PRE_COUNT(%r1), %r19 |
1049 | CMPIB<> 0, %r19, intr_restore /* if preempt_count > 0 */ | 1041 | cmpib,COND(<>) 0, %r19, intr_restore /* if preempt_count > 0 */ |
1050 | nop /* prev insn branched backwards */ | 1042 | nop /* prev insn branched backwards */ |
1051 | 1043 | ||
1052 | /* check if we interrupted a critical path */ | 1044 | /* check if we interrupted a critical path */ |
@@ -1065,7 +1057,7 @@ intr_do_preempt: | |||
1065 | */ | 1057 | */ |
1066 | 1058 | ||
1067 | intr_extint: | 1059 | intr_extint: |
1068 | CMPIB=,n 0,%r16,1f | 1060 | cmpib,COND(=),n 0,%r16,1f |
1069 | 1061 | ||
1070 | get_stack_use_cr30 | 1062 | get_stack_use_cr30 |
1071 | b,n 2f | 1063 | b,n 2f |
@@ -1100,7 +1092,7 @@ ENDPROC(syscall_exit_rfi) | |||
1100 | 1092 | ||
1101 | ENTRY(intr_save) /* for os_hpmc */ | 1093 | ENTRY(intr_save) /* for os_hpmc */ |
1102 | mfsp %sr7,%r16 | 1094 | mfsp %sr7,%r16 |
1103 | CMPIB=,n 0,%r16,1f | 1095 | cmpib,COND(=),n 0,%r16,1f |
1104 | get_stack_use_cr30 | 1096 | get_stack_use_cr30 |
1105 | b 2f | 1097 | b 2f |
1106 | copy %r8,%r26 | 1098 | copy %r8,%r26 |
@@ -1122,7 +1114,7 @@ ENTRY(intr_save) /* for os_hpmc */ | |||
1122 | * adjust isr/ior below. | 1114 | * adjust isr/ior below. |
1123 | */ | 1115 | */ |
1124 | 1116 | ||
1125 | CMPIB=,n 6,%r26,skip_save_ior | 1117 | cmpib,COND(=),n 6,%r26,skip_save_ior |
1126 | 1118 | ||
1127 | 1119 | ||
1128 | mfctl %cr20, %r16 /* isr */ | 1120 | mfctl %cr20, %r16 /* isr */ |
@@ -1451,11 +1443,11 @@ nadtlb_emulate: | |||
1451 | bb,>=,n %r9,26,nadtlb_nullify /* m bit not set, just nullify */ | 1443 | bb,>=,n %r9,26,nadtlb_nullify /* m bit not set, just nullify */ |
1452 | BL get_register,%r25 | 1444 | BL get_register,%r25 |
1453 | extrw,u %r9,15,5,%r8 /* Get index register # */ | 1445 | extrw,u %r9,15,5,%r8 /* Get index register # */ |
1454 | CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ | 1446 | cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ |
1455 | copy %r1,%r24 | 1447 | copy %r1,%r24 |
1456 | BL get_register,%r25 | 1448 | BL get_register,%r25 |
1457 | extrw,u %r9,10,5,%r8 /* Get base register # */ | 1449 | extrw,u %r9,10,5,%r8 /* Get base register # */ |
1458 | CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ | 1450 | cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ |
1459 | BL set_register,%r25 | 1451 | BL set_register,%r25 |
1460 | add,l %r1,%r24,%r1 /* doesn't affect c/b bits */ | 1452 | add,l %r1,%r24,%r1 /* doesn't affect c/b bits */ |
1461 | 1453 | ||
@@ -1487,7 +1479,7 @@ nadtlb_probe_check: | |||
1487 | cmpb,<>,n %r16,%r17,nadtlb_fault /* Must be probe,[rw]*/ | 1479 | cmpb,<>,n %r16,%r17,nadtlb_fault /* Must be probe,[rw]*/ |
1488 | BL get_register,%r25 /* Find the target register */ | 1480 | BL get_register,%r25 /* Find the target register */ |
1489 | extrw,u %r9,31,5,%r8 /* Get target register */ | 1481 | extrw,u %r9,31,5,%r8 /* Get target register */ |
1490 | CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ | 1482 | cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ |
1491 | BL set_register,%r25 | 1483 | BL set_register,%r25 |
1492 | copy %r0,%r1 /* Write zero to target register */ | 1484 | copy %r0,%r1 /* Write zero to target register */ |
1493 | b nadtlb_nullify /* Nullify return insn */ | 1485 | b nadtlb_nullify /* Nullify return insn */ |
@@ -1571,12 +1563,12 @@ dbit_trap_20w: | |||
1571 | L3_ptep ptp,pte,t0,va,dbit_fault | 1563 | L3_ptep ptp,pte,t0,va,dbit_fault |
1572 | 1564 | ||
1573 | #ifdef CONFIG_SMP | 1565 | #ifdef CONFIG_SMP |
1574 | CMPIB=,n 0,spc,dbit_nolock_20w | 1566 | cmpib,COND(=),n 0,spc,dbit_nolock_20w |
1575 | load32 PA(pa_dbit_lock),t0 | 1567 | load32 PA(pa_dbit_lock),t0 |
1576 | 1568 | ||
1577 | dbit_spin_20w: | 1569 | dbit_spin_20w: |
1578 | LDCW 0(t0),t1 | 1570 | LDCW 0(t0),t1 |
1579 | cmpib,= 0,t1,dbit_spin_20w | 1571 | cmpib,COND(=) 0,t1,dbit_spin_20w |
1580 | nop | 1572 | nop |
1581 | 1573 | ||
1582 | dbit_nolock_20w: | 1574 | dbit_nolock_20w: |
@@ -1587,7 +1579,7 @@ dbit_nolock_20w: | |||
1587 | 1579 | ||
1588 | idtlbt pte,prot | 1580 | idtlbt pte,prot |
1589 | #ifdef CONFIG_SMP | 1581 | #ifdef CONFIG_SMP |
1590 | CMPIB=,n 0,spc,dbit_nounlock_20w | 1582 | cmpib,COND(=),n 0,spc,dbit_nounlock_20w |
1591 | ldi 1,t1 | 1583 | ldi 1,t1 |
1592 | stw t1,0(t0) | 1584 | stw t1,0(t0) |
1593 | 1585 | ||
@@ -1607,7 +1599,7 @@ dbit_trap_11: | |||
1607 | L2_ptep ptp,pte,t0,va,dbit_fault | 1599 | L2_ptep ptp,pte,t0,va,dbit_fault |
1608 | 1600 | ||
1609 | #ifdef CONFIG_SMP | 1601 | #ifdef CONFIG_SMP |
1610 | CMPIB=,n 0,spc,dbit_nolock_11 | 1602 | cmpib,COND(=),n 0,spc,dbit_nolock_11 |
1611 | load32 PA(pa_dbit_lock),t0 | 1603 | load32 PA(pa_dbit_lock),t0 |
1612 | 1604 | ||
1613 | dbit_spin_11: | 1605 | dbit_spin_11: |
@@ -1629,7 +1621,7 @@ dbit_nolock_11: | |||
1629 | 1621 | ||
1630 | mtsp t1, %sr1 /* Restore sr1 */ | 1622 | mtsp t1, %sr1 /* Restore sr1 */ |
1631 | #ifdef CONFIG_SMP | 1623 | #ifdef CONFIG_SMP |
1632 | CMPIB=,n 0,spc,dbit_nounlock_11 | 1624 | cmpib,COND(=),n 0,spc,dbit_nounlock_11 |
1633 | ldi 1,t1 | 1625 | ldi 1,t1 |
1634 | stw t1,0(t0) | 1626 | stw t1,0(t0) |
1635 | 1627 | ||
@@ -1647,7 +1639,7 @@ dbit_trap_20: | |||
1647 | L2_ptep ptp,pte,t0,va,dbit_fault | 1639 | L2_ptep ptp,pte,t0,va,dbit_fault |
1648 | 1640 | ||
1649 | #ifdef CONFIG_SMP | 1641 | #ifdef CONFIG_SMP |
1650 | CMPIB=,n 0,spc,dbit_nolock_20 | 1642 | cmpib,COND(=),n 0,spc,dbit_nolock_20 |
1651 | load32 PA(pa_dbit_lock),t0 | 1643 | load32 PA(pa_dbit_lock),t0 |
1652 | 1644 | ||
1653 | dbit_spin_20: | 1645 | dbit_spin_20: |
@@ -1666,7 +1658,7 @@ dbit_nolock_20: | |||
1666 | idtlbt pte,prot | 1658 | idtlbt pte,prot |
1667 | 1659 | ||
1668 | #ifdef CONFIG_SMP | 1660 | #ifdef CONFIG_SMP |
1669 | CMPIB=,n 0,spc,dbit_nounlock_20 | 1661 | cmpib,COND(=),n 0,spc,dbit_nounlock_20 |
1670 | ldi 1,t1 | 1662 | ldi 1,t1 |
1671 | stw t1,0(t0) | 1663 | stw t1,0(t0) |
1672 | 1664 | ||
@@ -1995,7 +1987,7 @@ ENTRY(syscall_exit) | |||
1995 | 1987 | ||
1996 | /* We can't use "CMPIB<> PER_HPUX" since "im5" field is sign extended */ | 1988 | /* We can't use "CMPIB<> PER_HPUX" since "im5" field is sign extended */ |
1997 | ldo -PER_HPUX(%r19), %r19 | 1989 | ldo -PER_HPUX(%r19), %r19 |
1998 | CMPIB<>,n 0,%r19,1f | 1990 | cmpib,COND(<>),n 0,%r19,1f |
1999 | 1991 | ||
2000 | /* Save other hpux returns if personality is PER_HPUX */ | 1992 | /* Save other hpux returns if personality is PER_HPUX */ |
2001 | STREG %r22,TASK_PT_GR22(%r1) | 1993 | STREG %r22,TASK_PT_GR22(%r1) |
diff --git a/arch/parisc/kernel/pacache.S b/arch/parisc/kernel/pacache.S index 7e4a33978907..e3246a5ca74f 100644 --- a/arch/parisc/kernel/pacache.S +++ b/arch/parisc/kernel/pacache.S | |||
@@ -86,7 +86,7 @@ ENTRY(flush_tlb_all_local) | |||
86 | LDREG ITLB_OFF_COUNT(%r1), %arg2 | 86 | LDREG ITLB_OFF_COUNT(%r1), %arg2 |
87 | LDREG ITLB_LOOP(%r1), %arg3 | 87 | LDREG ITLB_LOOP(%r1), %arg3 |
88 | 88 | ||
89 | ADDIB= -1, %arg3, fitoneloop /* Preadjust and test */ | 89 | addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */ |
90 | movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */ | 90 | movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */ |
91 | copy %arg0, %r28 /* Init base addr */ | 91 | copy %arg0, %r28 /* Init base addr */ |
92 | 92 | ||
@@ -96,14 +96,14 @@ fitmanyloop: /* Loop if LOOP >= 2 */ | |||
96 | copy %arg2, %r29 /* Init middle loop count */ | 96 | copy %arg2, %r29 /* Init middle loop count */ |
97 | 97 | ||
98 | fitmanymiddle: /* Loop if LOOP >= 2 */ | 98 | fitmanymiddle: /* Loop if LOOP >= 2 */ |
99 | ADDIB> -1, %r31, fitmanymiddle /* Adjusted inner loop decr */ | 99 | addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */ |
100 | pitlbe 0(%sr1, %r28) | 100 | pitlbe 0(%sr1, %r28) |
101 | pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */ | 101 | pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */ |
102 | ADDIB> -1, %r29, fitmanymiddle /* Middle loop decr */ | 102 | addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */ |
103 | copy %arg3, %r31 /* Re-init inner loop count */ | 103 | copy %arg3, %r31 /* Re-init inner loop count */ |
104 | 104 | ||
105 | movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */ | 105 | movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */ |
106 | ADDIB<=,n -1, %r22, fitdone /* Outer loop count decr */ | 106 | addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */ |
107 | 107 | ||
108 | fitoneloop: /* Loop if LOOP = 1 */ | 108 | fitoneloop: /* Loop if LOOP = 1 */ |
109 | mtsp %r20, %sr1 | 109 | mtsp %r20, %sr1 |
@@ -111,10 +111,10 @@ fitoneloop: /* Loop if LOOP = 1 */ | |||
111 | copy %arg2, %r29 /* init middle loop count */ | 111 | copy %arg2, %r29 /* init middle loop count */ |
112 | 112 | ||
113 | fitonemiddle: /* Loop if LOOP = 1 */ | 113 | fitonemiddle: /* Loop if LOOP = 1 */ |
114 | ADDIB> -1, %r29, fitonemiddle /* Middle loop count decr */ | 114 | addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */ |
115 | pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */ | 115 | pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */ |
116 | 116 | ||
117 | ADDIB> -1, %r22, fitoneloop /* Outer loop count decr */ | 117 | addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */ |
118 | add %r21, %r20, %r20 /* increment space */ | 118 | add %r21, %r20, %r20 /* increment space */ |
119 | 119 | ||
120 | fitdone: | 120 | fitdone: |
@@ -129,7 +129,7 @@ fitdone: | |||
129 | LDREG DTLB_OFF_COUNT(%r1), %arg2 | 129 | LDREG DTLB_OFF_COUNT(%r1), %arg2 |
130 | LDREG DTLB_LOOP(%r1), %arg3 | 130 | LDREG DTLB_LOOP(%r1), %arg3 |
131 | 131 | ||
132 | ADDIB= -1, %arg3, fdtoneloop /* Preadjust and test */ | 132 | addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */ |
133 | movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */ | 133 | movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */ |
134 | copy %arg0, %r28 /* Init base addr */ | 134 | copy %arg0, %r28 /* Init base addr */ |
135 | 135 | ||
@@ -139,14 +139,14 @@ fdtmanyloop: /* Loop if LOOP >= 2 */ | |||
139 | copy %arg2, %r29 /* Init middle loop count */ | 139 | copy %arg2, %r29 /* Init middle loop count */ |
140 | 140 | ||
141 | fdtmanymiddle: /* Loop if LOOP >= 2 */ | 141 | fdtmanymiddle: /* Loop if LOOP >= 2 */ |
142 | ADDIB> -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */ | 142 | addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */ |
143 | pdtlbe 0(%sr1, %r28) | 143 | pdtlbe 0(%sr1, %r28) |
144 | pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */ | 144 | pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */ |
145 | ADDIB> -1, %r29, fdtmanymiddle /* Middle loop decr */ | 145 | addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */ |
146 | copy %arg3, %r31 /* Re-init inner loop count */ | 146 | copy %arg3, %r31 /* Re-init inner loop count */ |
147 | 147 | ||
148 | movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */ | 148 | movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */ |
149 | ADDIB<=,n -1, %r22,fdtdone /* Outer loop count decr */ | 149 | addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */ |
150 | 150 | ||
151 | fdtoneloop: /* Loop if LOOP = 1 */ | 151 | fdtoneloop: /* Loop if LOOP = 1 */ |
152 | mtsp %r20, %sr1 | 152 | mtsp %r20, %sr1 |
@@ -154,10 +154,10 @@ fdtoneloop: /* Loop if LOOP = 1 */ | |||
154 | copy %arg2, %r29 /* init middle loop count */ | 154 | copy %arg2, %r29 /* init middle loop count */ |
155 | 155 | ||
156 | fdtonemiddle: /* Loop if LOOP = 1 */ | 156 | fdtonemiddle: /* Loop if LOOP = 1 */ |
157 | ADDIB> -1, %r29, fdtonemiddle /* Middle loop count decr */ | 157 | addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */ |
158 | pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */ | 158 | pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */ |
159 | 159 | ||
160 | ADDIB> -1, %r22, fdtoneloop /* Outer loop count decr */ | 160 | addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */ |
161 | add %r21, %r20, %r20 /* increment space */ | 161 | add %r21, %r20, %r20 /* increment space */ |
162 | 162 | ||
163 | 163 | ||
@@ -210,18 +210,18 @@ ENTRY(flush_instruction_cache_local) | |||
210 | LDREG ICACHE_COUNT(%r1), %arg2 | 210 | LDREG ICACHE_COUNT(%r1), %arg2 |
211 | LDREG ICACHE_LOOP(%r1), %arg3 | 211 | LDREG ICACHE_LOOP(%r1), %arg3 |
212 | rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/ | 212 | rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/ |
213 | ADDIB= -1, %arg3, fioneloop /* Preadjust and test */ | 213 | addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */ |
214 | movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */ | 214 | movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */ |
215 | 215 | ||
216 | fimanyloop: /* Loop if LOOP >= 2 */ | 216 | fimanyloop: /* Loop if LOOP >= 2 */ |
217 | ADDIB> -1, %r31, fimanyloop /* Adjusted inner loop decr */ | 217 | addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */ |
218 | fice %r0(%sr1, %arg0) | 218 | fice %r0(%sr1, %arg0) |
219 | fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */ | 219 | fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */ |
220 | movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */ | 220 | movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */ |
221 | ADDIB<=,n -1, %arg2, fisync /* Outer loop decr */ | 221 | addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */ |
222 | 222 | ||
223 | fioneloop: /* Loop if LOOP = 1 */ | 223 | fioneloop: /* Loop if LOOP = 1 */ |
224 | ADDIB> -1, %arg2, fioneloop /* Outer loop count decr */ | 224 | addib,COND(>) -1, %arg2, fioneloop /* Outer loop count decr */ |
225 | fice,m %arg1(%sr1, %arg0) /* Fice for one loop */ | 225 | fice,m %arg1(%sr1, %arg0) /* Fice for one loop */ |
226 | 226 | ||
227 | fisync: | 227 | fisync: |
@@ -251,18 +251,18 @@ ENTRY(flush_data_cache_local) | |||
251 | LDREG DCACHE_COUNT(%r1), %arg2 | 251 | LDREG DCACHE_COUNT(%r1), %arg2 |
252 | LDREG DCACHE_LOOP(%r1), %arg3 | 252 | LDREG DCACHE_LOOP(%r1), %arg3 |
253 | rsm PSW_SM_I, %r22 | 253 | rsm PSW_SM_I, %r22 |
254 | ADDIB= -1, %arg3, fdoneloop /* Preadjust and test */ | 254 | addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */ |
255 | movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */ | 255 | movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */ |
256 | 256 | ||
257 | fdmanyloop: /* Loop if LOOP >= 2 */ | 257 | fdmanyloop: /* Loop if LOOP >= 2 */ |
258 | ADDIB> -1, %r31, fdmanyloop /* Adjusted inner loop decr */ | 258 | addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */ |
259 | fdce %r0(%sr1, %arg0) | 259 | fdce %r0(%sr1, %arg0) |
260 | fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */ | 260 | fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */ |
261 | movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */ | 261 | movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */ |
262 | ADDIB<=,n -1, %arg2, fdsync /* Outer loop decr */ | 262 | addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */ |
263 | 263 | ||
264 | fdoneloop: /* Loop if LOOP = 1 */ | 264 | fdoneloop: /* Loop if LOOP = 1 */ |
265 | ADDIB> -1, %arg2, fdoneloop /* Outer loop count decr */ | 265 | addib,COND(>) -1, %arg2, fdoneloop /* Outer loop count decr */ |
266 | fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */ | 266 | fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */ |
267 | 267 | ||
268 | fdsync: | 268 | fdsync: |
@@ -343,7 +343,7 @@ ENTRY(copy_user_page_asm) | |||
343 | * non-taken backward branch. Note that .+4 is a backwards branch. | 343 | * non-taken backward branch. Note that .+4 is a backwards branch. |
344 | * The ldd should only get executed if the branch is taken. | 344 | * The ldd should only get executed if the branch is taken. |
345 | */ | 345 | */ |
346 | ADDIB>,n -1, %r1, 1b /* bundle 10 */ | 346 | addib,COND(>),n -1, %r1, 1b /* bundle 10 */ |
347 | ldd 0(%r25), %r19 /* start next loads */ | 347 | ldd 0(%r25), %r19 /* start next loads */ |
348 | 348 | ||
349 | #else | 349 | #else |
@@ -392,7 +392,7 @@ ENTRY(copy_user_page_asm) | |||
392 | stw %r21, 56(%r26) | 392 | stw %r21, 56(%r26) |
393 | stw %r22, 60(%r26) | 393 | stw %r22, 60(%r26) |
394 | ldo 64(%r26), %r26 | 394 | ldo 64(%r26), %r26 |
395 | ADDIB>,n -1, %r1, 1b | 395 | addib,COND(>),n -1, %r1, 1b |
396 | ldw 0(%r25), %r19 | 396 | ldw 0(%r25), %r19 |
397 | #endif | 397 | #endif |
398 | bv %r0(%r2) | 398 | bv %r0(%r2) |
@@ -516,7 +516,7 @@ ENTRY(copy_user_page_asm) | |||
516 | stw %r21, 56(%r28) | 516 | stw %r21, 56(%r28) |
517 | stw %r22, 60(%r28) | 517 | stw %r22, 60(%r28) |
518 | ldo 64(%r28), %r28 | 518 | ldo 64(%r28), %r28 |
519 | ADDIB> -1, %r1,1b | 519 | addib,COND(>) -1, %r1,1b |
520 | ldo 64(%r29), %r29 | 520 | ldo 64(%r29), %r29 |
521 | 521 | ||
522 | bv %r0(%r2) | 522 | bv %r0(%r2) |
@@ -575,7 +575,7 @@ ENTRY(__clear_user_page_asm) | |||
575 | std %r0, 104(%r28) | 575 | std %r0, 104(%r28) |
576 | std %r0, 112(%r28) | 576 | std %r0, 112(%r28) |
577 | std %r0, 120(%r28) | 577 | std %r0, 120(%r28) |
578 | ADDIB> -1, %r1, 1b | 578 | addib,COND(>) -1, %r1, 1b |
579 | ldo 128(%r28), %r28 | 579 | ldo 128(%r28), %r28 |
580 | 580 | ||
581 | #else /* ! CONFIG_64BIT */ | 581 | #else /* ! CONFIG_64BIT */ |
@@ -598,7 +598,7 @@ ENTRY(__clear_user_page_asm) | |||
598 | stw %r0, 52(%r28) | 598 | stw %r0, 52(%r28) |
599 | stw %r0, 56(%r28) | 599 | stw %r0, 56(%r28) |
600 | stw %r0, 60(%r28) | 600 | stw %r0, 60(%r28) |
601 | ADDIB> -1, %r1, 1b | 601 | addib,COND(>) -1, %r1, 1b |
602 | ldo 64(%r28), %r28 | 602 | ldo 64(%r28), %r28 |
603 | #endif /* CONFIG_64BIT */ | 603 | #endif /* CONFIG_64BIT */ |
604 | 604 | ||
@@ -641,7 +641,7 @@ ENTRY(flush_kernel_dcache_page_asm) | |||
641 | fdc,m %r23(%r26) | 641 | fdc,m %r23(%r26) |
642 | fdc,m %r23(%r26) | 642 | fdc,m %r23(%r26) |
643 | fdc,m %r23(%r26) | 643 | fdc,m %r23(%r26) |
644 | CMPB<< %r26, %r25,1b | 644 | cmpb,COND(<<) %r26, %r25,1b |
645 | fdc,m %r23(%r26) | 645 | fdc,m %r23(%r26) |
646 | 646 | ||
647 | sync | 647 | sync |
@@ -684,7 +684,7 @@ ENTRY(flush_user_dcache_page) | |||
684 | fdc,m %r23(%sr3, %r26) | 684 | fdc,m %r23(%sr3, %r26) |
685 | fdc,m %r23(%sr3, %r26) | 685 | fdc,m %r23(%sr3, %r26) |
686 | fdc,m %r23(%sr3, %r26) | 686 | fdc,m %r23(%sr3, %r26) |
687 | CMPB<< %r26, %r25,1b | 687 | cmpb,COND(<<) %r26, %r25,1b |
688 | fdc,m %r23(%sr3, %r26) | 688 | fdc,m %r23(%sr3, %r26) |
689 | 689 | ||
690 | sync | 690 | sync |
@@ -727,7 +727,7 @@ ENTRY(flush_user_icache_page) | |||
727 | fic,m %r23(%sr3, %r26) | 727 | fic,m %r23(%sr3, %r26) |
728 | fic,m %r23(%sr3, %r26) | 728 | fic,m %r23(%sr3, %r26) |
729 | fic,m %r23(%sr3, %r26) | 729 | fic,m %r23(%sr3, %r26) |
730 | CMPB<< %r26, %r25,1b | 730 | cmpb,COND(<<) %r26, %r25,1b |
731 | fic,m %r23(%sr3, %r26) | 731 | fic,m %r23(%sr3, %r26) |
732 | 732 | ||
733 | sync | 733 | sync |
@@ -770,7 +770,7 @@ ENTRY(purge_kernel_dcache_page) | |||
770 | pdc,m %r23(%r26) | 770 | pdc,m %r23(%r26) |
771 | pdc,m %r23(%r26) | 771 | pdc,m %r23(%r26) |
772 | pdc,m %r23(%r26) | 772 | pdc,m %r23(%r26) |
773 | CMPB<< %r26, %r25, 1b | 773 | cmpb,COND(<<) %r26, %r25, 1b |
774 | pdc,m %r23(%r26) | 774 | pdc,m %r23(%r26) |
775 | 775 | ||
776 | sync | 776 | sync |
@@ -834,7 +834,7 @@ ENTRY(flush_alias_page) | |||
834 | fdc,m %r23(%r28) | 834 | fdc,m %r23(%r28) |
835 | fdc,m %r23(%r28) | 835 | fdc,m %r23(%r28) |
836 | fdc,m %r23(%r28) | 836 | fdc,m %r23(%r28) |
837 | CMPB<< %r28, %r29, 1b | 837 | cmpb,COND(<<) %r28, %r29, 1b |
838 | fdc,m %r23(%r28) | 838 | fdc,m %r23(%r28) |
839 | 839 | ||
840 | sync | 840 | sync |
@@ -857,7 +857,7 @@ flush_user_dcache_range_asm: | |||
857 | ldo -1(%r23), %r21 | 857 | ldo -1(%r23), %r21 |
858 | ANDCM %r26, %r21, %r26 | 858 | ANDCM %r26, %r21, %r26 |
859 | 859 | ||
860 | 1: CMPB<<,n %r26, %r25, 1b | 860 | 1: cmpb,COND(<<),n %r26, %r25, 1b |
861 | fdc,m %r23(%sr3, %r26) | 861 | fdc,m %r23(%sr3, %r26) |
862 | 862 | ||
863 | sync | 863 | sync |
@@ -878,7 +878,7 @@ ENTRY(flush_kernel_dcache_range_asm) | |||
878 | ldo -1(%r23), %r21 | 878 | ldo -1(%r23), %r21 |
879 | ANDCM %r26, %r21, %r26 | 879 | ANDCM %r26, %r21, %r26 |
880 | 880 | ||
881 | 1: CMPB<<,n %r26, %r25,1b | 881 | 1: cmpb,COND(<<),n %r26, %r25,1b |
882 | fdc,m %r23(%r26) | 882 | fdc,m %r23(%r26) |
883 | 883 | ||
884 | sync | 884 | sync |
@@ -900,7 +900,7 @@ ENTRY(flush_user_icache_range_asm) | |||
900 | ldo -1(%r23), %r21 | 900 | ldo -1(%r23), %r21 |
901 | ANDCM %r26, %r21, %r26 | 901 | ANDCM %r26, %r21, %r26 |
902 | 902 | ||
903 | 1: CMPB<<,n %r26, %r25,1b | 903 | 1: cmpb,COND(<<),n %r26, %r25,1b |
904 | fic,m %r23(%sr3, %r26) | 904 | fic,m %r23(%sr3, %r26) |
905 | 905 | ||
906 | sync | 906 | sync |
@@ -943,7 +943,7 @@ ENTRY(flush_kernel_icache_page) | |||
943 | fic,m %r23(%sr4, %r26) | 943 | fic,m %r23(%sr4, %r26) |
944 | fic,m %r23(%sr4, %r26) | 944 | fic,m %r23(%sr4, %r26) |
945 | fic,m %r23(%sr4, %r26) | 945 | fic,m %r23(%sr4, %r26) |
946 | CMPB<< %r26, %r25, 1b | 946 | cmpb,COND(<<) %r26, %r25, 1b |
947 | fic,m %r23(%sr4, %r26) | 947 | fic,m %r23(%sr4, %r26) |
948 | 948 | ||
949 | sync | 949 | sync |
@@ -964,7 +964,7 @@ ENTRY(flush_kernel_icache_range_asm) | |||
964 | ldo -1(%r23), %r21 | 964 | ldo -1(%r23), %r21 |
965 | ANDCM %r26, %r21, %r26 | 965 | ANDCM %r26, %r21, %r26 |
966 | 966 | ||
967 | 1: CMPB<<,n %r26, %r25, 1b | 967 | 1: cmpb,COND(<<),n %r26, %r25, 1b |
968 | fic,m %r23(%sr4, %r26) | 968 | fic,m %r23(%sr4, %r26) |
969 | 969 | ||
970 | sync | 970 | sync |