aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/lib
diff options
context:
space:
mode:
authorPaul Mackerras <paulus@samba.org>2014-09-02 00:35:07 -0400
committerMichael Ellerman <mpe@ellerman.id.au>2014-09-25 09:14:51 -0400
commitbe96f63375a14ee8e690856ac77e579c75bd0bae (patch)
treeef054973ec062a761afcf10aa688e67ad047d7ff /arch/powerpc/lib
parentad72a279a2b874828d1b5070ef01cf6ee6b1d62c (diff)
powerpc: Split out instruction analysis part of emulate_step()
This splits out the instruction analysis part of emulate_step() into a separate analyse_instr() function, which decodes the instruction, but doesn't execute any load or store instructions. It does execute integer instructions and branches which can be executed purely by updating register values in the pt_regs struct. For other instructions, it returns the instruction type and other details in a new instruction_op struct. emulate_step() then uses that information to execute loads, stores, cache operations, mfmsr, mtmsr[d], and (on 64-bit) sc instructions. The reason for doing this is so that the KVM code can use it instead of having its own separate instruction emulation code. Possibly the alignment interrupt handler could also use this. Signed-off-by: Paul Mackerras <paulus@samba.org> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/lib')
-rw-r--r--arch/powerpc/lib/sstep.c897
1 files changed, 537 insertions, 360 deletions
diff --git a/arch/powerpc/lib/sstep.c b/arch/powerpc/lib/sstep.c
index 5c09f365c842..3726a03179ab 100644
--- a/arch/powerpc/lib/sstep.c
+++ b/arch/powerpc/lib/sstep.c
@@ -98,13 +98,8 @@ static unsigned long __kprobes dform_ea(unsigned int instr, struct pt_regs *regs
98 98
99 ra = (instr >> 16) & 0x1f; 99 ra = (instr >> 16) & 0x1f;
100 ea = (signed short) instr; /* sign-extend */ 100 ea = (signed short) instr; /* sign-extend */
101 if (ra) { 101 if (ra)
102 ea += regs->gpr[ra]; 102 ea += regs->gpr[ra];
103 if (instr & 0x04000000) { /* update forms */
104 if ((instr>>26) != 47) /* stmw is not an update form */
105 regs->gpr[ra] = ea;
106 }
107 }
108 103
109 return truncate_if_32bit(regs->msr, ea); 104 return truncate_if_32bit(regs->msr, ea);
110} 105}
@@ -120,11 +115,8 @@ static unsigned long __kprobes dsform_ea(unsigned int instr, struct pt_regs *reg
120 115
121 ra = (instr >> 16) & 0x1f; 116 ra = (instr >> 16) & 0x1f;
122 ea = (signed short) (instr & ~3); /* sign-extend */ 117 ea = (signed short) (instr & ~3); /* sign-extend */
123 if (ra) { 118 if (ra)
124 ea += regs->gpr[ra]; 119 ea += regs->gpr[ra];
125 if ((instr & 3) == 1) /* update forms */
126 regs->gpr[ra] = ea;
127 }
128 120
129 return truncate_if_32bit(regs->msr, ea); 121 return truncate_if_32bit(regs->msr, ea);
130} 122}
@@ -133,8 +125,8 @@ static unsigned long __kprobes dsform_ea(unsigned int instr, struct pt_regs *reg
133/* 125/*
134 * Calculate effective address for an X-form instruction 126 * Calculate effective address for an X-form instruction
135 */ 127 */
136static unsigned long __kprobes xform_ea(unsigned int instr, struct pt_regs *regs, 128static unsigned long __kprobes xform_ea(unsigned int instr,
137 int do_update) 129 struct pt_regs *regs)
138{ 130{
139 int ra, rb; 131 int ra, rb;
140 unsigned long ea; 132 unsigned long ea;
@@ -142,11 +134,8 @@ static unsigned long __kprobes xform_ea(unsigned int instr, struct pt_regs *regs
142 ra = (instr >> 16) & 0x1f; 134 ra = (instr >> 16) & 0x1f;
143 rb = (instr >> 11) & 0x1f; 135 rb = (instr >> 11) & 0x1f;
144 ea = regs->gpr[rb]; 136 ea = regs->gpr[rb];
145 if (ra) { 137 if (ra)
146 ea += regs->gpr[ra]; 138 ea += regs->gpr[ra];
147 if (do_update) /* update forms */
148 regs->gpr[ra] = ea;
149 }
150 139
151 return truncate_if_32bit(regs->msr, ea); 140 return truncate_if_32bit(regs->msr, ea);
152} 141}
@@ -627,26 +616,27 @@ static void __kprobes do_cmp_unsigned(struct pt_regs *regs, unsigned long v1,
627#define ROTATE(x, n) ((n) ? (((x) << (n)) | ((x) >> (8 * sizeof(long) - (n)))) : (x)) 616#define ROTATE(x, n) ((n) ? (((x) << (n)) | ((x) >> (8 * sizeof(long) - (n)))) : (x))
628 617
629/* 618/*
630 * Emulate instructions that cause a transfer of control, 619 * Decode an instruction, and execute it if that can be done just by
631 * loads and stores, and a few other instructions. 620 * modifying *regs (i.e. integer arithmetic and logical instructions,
632 * Returns 1 if the step was emulated, 0 if not, 621 * branches, and barrier instructions).
633 * or -1 if the instruction is one that should not be stepped, 622 * Returns 1 if the instruction has been executed, or 0 if not.
634 * such as an rfid, or a mtmsrd that would clear MSR_RI. 623 * Sets *op to indicate what the instruction does.
635 */ 624 */
636int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr) 625int __kprobes analyse_instr(struct instruction_op *op, struct pt_regs *regs,
626 unsigned int instr)
637{ 627{
638 unsigned int opcode, ra, rb, rd, spr, u; 628 unsigned int opcode, ra, rb, rd, spr, u;
639 unsigned long int imm; 629 unsigned long int imm;
640 unsigned long int val, val2; 630 unsigned long int val, val2;
641 unsigned long int ea; 631 unsigned int mb, me, sh;
642 unsigned int cr, mb, me, sh;
643 int err;
644 unsigned long old_ra, val3;
645 long ival; 632 long ival;
646 633
634 op->type = COMPUTE;
635
647 opcode = instr >> 26; 636 opcode = instr >> 26;
648 switch (opcode) { 637 switch (opcode) {
649 case 16: /* bc */ 638 case 16: /* bc */
639 op->type = BRANCH;
650 imm = (signed short)(instr & 0xfffc); 640 imm = (signed short)(instr & 0xfffc);
651 if ((instr & 2) == 0) 641 if ((instr & 2) == 0)
652 imm += regs->nip; 642 imm += regs->nip;
@@ -659,26 +649,14 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
659 return 1; 649 return 1;
660#ifdef CONFIG_PPC64 650#ifdef CONFIG_PPC64
661 case 17: /* sc */ 651 case 17: /* sc */
662 /* 652 if ((instr & 0xfe2) == 2)
663 * N.B. this uses knowledge about how the syscall 653 op->type = SYSCALL;
664 * entry code works. If that is changed, this will 654 else
665 * need to be changed also. 655 op->type = UNKNOWN;
666 */ 656 return 0;
667 if (regs->gpr[0] == 0x1ebe &&
668 cpu_has_feature(CPU_FTR_REAL_LE)) {
669 regs->msr ^= MSR_LE;
670 goto instr_done;
671 }
672 regs->gpr[9] = regs->gpr[13];
673 regs->gpr[10] = MSR_KERNEL;
674 regs->gpr[11] = regs->nip + 4;
675 regs->gpr[12] = regs->msr & MSR_MASK;
676 regs->gpr[13] = (unsigned long) get_paca();
677 regs->nip = (unsigned long) &system_call_common;
678 regs->msr = MSR_KERNEL;
679 return 1;
680#endif 657#endif
681 case 18: /* b */ 658 case 18: /* b */
659 op->type = BRANCH;
682 imm = instr & 0x03fffffc; 660 imm = instr & 0x03fffffc;
683 if (imm & 0x02000000) 661 if (imm & 0x02000000)
684 imm -= 0x04000000; 662 imm -= 0x04000000;
@@ -693,6 +671,7 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
693 switch ((instr >> 1) & 0x3ff) { 671 switch ((instr >> 1) & 0x3ff) {
694 case 16: /* bclr */ 672 case 16: /* bclr */
695 case 528: /* bcctr */ 673 case 528: /* bcctr */
674 op->type = BRANCH;
696 imm = (instr & 0x400)? regs->ctr: regs->link; 675 imm = (instr & 0x400)? regs->ctr: regs->link;
697 regs->nip = truncate_if_32bit(regs->msr, regs->nip + 4); 676 regs->nip = truncate_if_32bit(regs->msr, regs->nip + 4);
698 imm = truncate_if_32bit(regs->msr, imm); 677 imm = truncate_if_32bit(regs->msr, imm);
@@ -703,9 +682,13 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
703 return 1; 682 return 1;
704 683
705 case 18: /* rfid, scary */ 684 case 18: /* rfid, scary */
706 return -1; 685 if (regs->msr & MSR_PR)
686 goto priv;
687 op->type = RFI;
688 return 0;
707 689
708 case 150: /* isync */ 690 case 150: /* isync */
691 op->type = BARRIER;
709 isync(); 692 isync();
710 goto instr_done; 693 goto instr_done;
711 694
@@ -731,6 +714,7 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
731 case 31: 714 case 31:
732 switch ((instr >> 1) & 0x3ff) { 715 switch ((instr >> 1) & 0x3ff) {
733 case 598: /* sync */ 716 case 598: /* sync */
717 op->type = BARRIER;
734#ifdef __powerpc64__ 718#ifdef __powerpc64__
735 switch ((instr >> 21) & 3) { 719 switch ((instr >> 21) & 3) {
736 case 1: /* lwsync */ 720 case 1: /* lwsync */
@@ -745,6 +729,7 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
745 goto instr_done; 729 goto instr_done;
746 730
747 case 854: /* eieio */ 731 case 854: /* eieio */
732 op->type = BARRIER;
748 eieio(); 733 eieio();
749 goto instr_done; 734 goto instr_done;
750 } 735 }
@@ -910,33 +895,30 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
910 switch ((instr >> 1) & 0x3ff) { 895 switch ((instr >> 1) & 0x3ff) {
911 case 83: /* mfmsr */ 896 case 83: /* mfmsr */
912 if (regs->msr & MSR_PR) 897 if (regs->msr & MSR_PR)
913 break; 898 goto priv;
914 regs->gpr[rd] = regs->msr & MSR_MASK; 899 op->type = MFMSR;
915 goto instr_done; 900 op->reg = rd;
901 return 0;
916 case 146: /* mtmsr */ 902 case 146: /* mtmsr */
917 if (regs->msr & MSR_PR) 903 if (regs->msr & MSR_PR)
918 break; 904 goto priv;
919 imm = regs->gpr[rd]; 905 op->type = MTMSR;
920 if ((imm & MSR_RI) == 0) 906 op->reg = rd;
921 /* can't step mtmsr that would clear MSR_RI */ 907 op->val = 0xffffffff & ~(MSR_ME | MSR_LE);
922 return -1; 908 return 0;
923 regs->msr = imm;
924 goto instr_done;
925#ifdef CONFIG_PPC64 909#ifdef CONFIG_PPC64
926 case 178: /* mtmsrd */ 910 case 178: /* mtmsrd */
927 /* only MSR_EE and MSR_RI get changed if bit 15 set */
928 /* mtmsrd doesn't change MSR_HV and MSR_ME */
929 if (regs->msr & MSR_PR) 911 if (regs->msr & MSR_PR)
930 break; 912 goto priv;
931 imm = (instr & 0x10000)? 0x8002: 0xefffffffffffefffUL; 913 op->type = MTMSR;
932 imm = (regs->msr & MSR_MASK & ~imm) 914 op->reg = rd;
933 | (regs->gpr[rd] & imm); 915 /* only MSR_EE and MSR_RI get changed if bit 15 set */
934 if ((imm & MSR_RI) == 0) 916 /* mtmsrd doesn't change MSR_HV, MSR_ME or MSR_LE */
935 /* can't step mtmsrd that would clear MSR_RI */ 917 imm = (instr & 0x10000)? 0x8002: 0xefffffffffffeffeUL;
936 return -1; 918 op->val = imm;
937 regs->msr = imm; 919 return 0;
938 goto instr_done;
939#endif 920#endif
921
940 case 19: /* mfcr */ 922 case 19: /* mfcr */
941 regs->gpr[rd] = regs->ccr; 923 regs->gpr[rd] = regs->ccr;
942 regs->gpr[rd] &= 0xffffffffUL; 924 regs->gpr[rd] &= 0xffffffffUL;
@@ -954,33 +936,43 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
954 goto instr_done; 936 goto instr_done;
955 937
956 case 339: /* mfspr */ 938 case 339: /* mfspr */
957 spr = (instr >> 11) & 0x3ff; 939 spr = ((instr >> 16) & 0x1f) | ((instr >> 6) & 0x3e0);
958 switch (spr) { 940 switch (spr) {
959 case 0x20: /* mfxer */ 941 case SPRN_XER: /* mfxer */
960 regs->gpr[rd] = regs->xer; 942 regs->gpr[rd] = regs->xer;
961 regs->gpr[rd] &= 0xffffffffUL; 943 regs->gpr[rd] &= 0xffffffffUL;
962 goto instr_done; 944 goto instr_done;
963 case 0x100: /* mflr */ 945 case SPRN_LR: /* mflr */
964 regs->gpr[rd] = regs->link; 946 regs->gpr[rd] = regs->link;
965 goto instr_done; 947 goto instr_done;
966 case 0x120: /* mfctr */ 948 case SPRN_CTR: /* mfctr */
967 regs->gpr[rd] = regs->ctr; 949 regs->gpr[rd] = regs->ctr;
968 goto instr_done; 950 goto instr_done;
951 default:
952 op->type = MFSPR;
953 op->reg = rd;
954 op->spr = spr;
955 return 0;
969 } 956 }
970 break; 957 break;
971 958
972 case 467: /* mtspr */ 959 case 467: /* mtspr */
973 spr = (instr >> 11) & 0x3ff; 960 spr = ((instr >> 16) & 0x1f) | ((instr >> 6) & 0x3e0);
974 switch (spr) { 961 switch (spr) {
975 case 0x20: /* mtxer */ 962 case SPRN_XER: /* mtxer */
976 regs->xer = (regs->gpr[rd] & 0xffffffffUL); 963 regs->xer = (regs->gpr[rd] & 0xffffffffUL);
977 goto instr_done; 964 goto instr_done;
978 case 0x100: /* mtlr */ 965 case SPRN_LR: /* mtlr */
979 regs->link = regs->gpr[rd]; 966 regs->link = regs->gpr[rd];
980 goto instr_done; 967 goto instr_done;
981 case 0x120: /* mtctr */ 968 case SPRN_CTR: /* mtctr */
982 regs->ctr = regs->gpr[rd]; 969 regs->ctr = regs->gpr[rd];
983 goto instr_done; 970 goto instr_done;
971 default:
972 op->type = MTSPR;
973 op->val = regs->gpr[rd];
974 op->spr = spr;
975 return 0;
984 } 976 }
985 break; 977 break;
986 978
@@ -1257,294 +1249,210 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
1257 * Cache instructions 1249 * Cache instructions
1258 */ 1250 */
1259 case 54: /* dcbst */ 1251 case 54: /* dcbst */
1260 ea = xform_ea(instr, regs, 0); 1252 op->type = MKOP(CACHEOP, DCBST, 0);
1261 if (!address_ok(regs, ea, 8)) 1253 op->ea = xform_ea(instr, regs);
1262 return 0; 1254 return 0;
1263 err = 0;
1264 __cacheop_user_asmx(ea, err, "dcbst");
1265 if (err)
1266 return 0;
1267 goto instr_done;
1268 1255
1269 case 86: /* dcbf */ 1256 case 86: /* dcbf */
1270 ea = xform_ea(instr, regs, 0); 1257 op->type = MKOP(CACHEOP, DCBF, 0);
1271 if (!address_ok(regs, ea, 8)) 1258 op->ea = xform_ea(instr, regs);
1272 return 0; 1259 return 0;
1273 err = 0;
1274 __cacheop_user_asmx(ea, err, "dcbf");
1275 if (err)
1276 return 0;
1277 goto instr_done;
1278 1260
1279 case 246: /* dcbtst */ 1261 case 246: /* dcbtst */
1280 if (rd == 0) { 1262 op->type = MKOP(CACHEOP, DCBTST, 0);
1281 ea = xform_ea(instr, regs, 0); 1263 op->ea = xform_ea(instr, regs);
1282 prefetchw((void *) ea); 1264 op->reg = rd;
1283 } 1265 return 0;
1284 goto instr_done;
1285 1266
1286 case 278: /* dcbt */ 1267 case 278: /* dcbt */
1287 if (rd == 0) { 1268 op->type = MKOP(CACHEOP, DCBTST, 0);
1288 ea = xform_ea(instr, regs, 0); 1269 op->ea = xform_ea(instr, regs);
1289 prefetch((void *) ea); 1270 op->reg = rd;
1290 } 1271 return 0;
1291 goto instr_done;
1292
1293 } 1272 }
1294 break; 1273 break;
1295 } 1274 }
1296 1275
1297 /* 1276 /*
1298 * Following cases are for loads and stores, so bail out 1277 * Loads and stores.
1299 * if we're in little-endian mode.
1300 */ 1278 */
1301 if (regs->msr & MSR_LE) 1279 op->type = UNKNOWN;
1302 return 0; 1280 op->update_reg = ra;
1303 1281 op->reg = rd;
1304 /* 1282 op->val = regs->gpr[rd];
1305 * Save register RA in case it's an update form load or store 1283 u = (instr >> 20) & UPDATE;
1306 * and the access faults.
1307 */
1308 old_ra = regs->gpr[ra];
1309 1284
1310 switch (opcode) { 1285 switch (opcode) {
1311 case 31: 1286 case 31:
1312 u = instr & 0x40; 1287 u = instr & UPDATE;
1288 op->ea = xform_ea(instr, regs);
1313 switch ((instr >> 1) & 0x3ff) { 1289 switch ((instr >> 1) & 0x3ff) {
1314 case 20: /* lwarx */ 1290 case 20: /* lwarx */
1315 ea = xform_ea(instr, regs, 0); 1291 op->type = MKOP(LARX, 0, 4);
1316 if (ea & 3) 1292 break;
1317 break; /* can't handle misaligned */
1318 err = -EFAULT;
1319 if (!address_ok(regs, ea, 4))
1320 goto ldst_done;
1321 err = 0;
1322 __get_user_asmx(val, ea, err, "lwarx");
1323 if (!err)
1324 regs->gpr[rd] = val;
1325 goto ldst_done;
1326 1293
1327 case 150: /* stwcx. */ 1294 case 150: /* stwcx. */
1328 ea = xform_ea(instr, regs, 0); 1295 op->type = MKOP(STCX, 0, 4);
1329 if (ea & 3) 1296 break;
1330 break; /* can't handle misaligned */
1331 err = -EFAULT;
1332 if (!address_ok(regs, ea, 4))
1333 goto ldst_done;
1334 err = 0;
1335 __put_user_asmx(regs->gpr[rd], ea, err, "stwcx.", cr);
1336 if (!err)
1337 regs->ccr = (regs->ccr & 0x0fffffff) |
1338 (cr & 0xe0000000) |
1339 ((regs->xer >> 3) & 0x10000000);
1340 goto ldst_done;
1341 1297
1342#ifdef __powerpc64__ 1298#ifdef __powerpc64__
1343 case 84: /* ldarx */ 1299 case 84: /* ldarx */
1344 ea = xform_ea(instr, regs, 0); 1300 op->type = MKOP(LARX, 0, 8);
1345 if (ea & 7) 1301 break;
1346 break; /* can't handle misaligned */
1347 err = -EFAULT;
1348 if (!address_ok(regs, ea, 8))
1349 goto ldst_done;
1350 err = 0;
1351 __get_user_asmx(val, ea, err, "ldarx");
1352 if (!err)
1353 regs->gpr[rd] = val;
1354 goto ldst_done;
1355 1302
1356 case 214: /* stdcx. */ 1303 case 214: /* stdcx. */
1357 ea = xform_ea(instr, regs, 0); 1304 op->type = MKOP(STCX, 0, 8);
1358 if (ea & 7) 1305 break;
1359 break; /* can't handle misaligned */
1360 err = -EFAULT;
1361 if (!address_ok(regs, ea, 8))
1362 goto ldst_done;
1363 err = 0;
1364 __put_user_asmx(regs->gpr[rd], ea, err, "stdcx.", cr);
1365 if (!err)
1366 regs->ccr = (regs->ccr & 0x0fffffff) |
1367 (cr & 0xe0000000) |
1368 ((regs->xer >> 3) & 0x10000000);
1369 goto ldst_done;
1370 1306
1371 case 21: /* ldx */ 1307 case 21: /* ldx */
1372 case 53: /* ldux */ 1308 case 53: /* ldux */
1373 err = read_mem(&regs->gpr[rd], xform_ea(instr, regs, u), 1309 op->type = MKOP(LOAD, u, 8);
1374 8, regs); 1310 break;
1375 goto ldst_done;
1376#endif 1311#endif
1377 1312
1378 case 23: /* lwzx */ 1313 case 23: /* lwzx */
1379 case 55: /* lwzux */ 1314 case 55: /* lwzux */
1380 err = read_mem(&regs->gpr[rd], xform_ea(instr, regs, u), 1315 op->type = MKOP(LOAD, u, 4);
1381 4, regs); 1316 break;
1382 goto ldst_done;
1383 1317
1384 case 87: /* lbzx */ 1318 case 87: /* lbzx */
1385 case 119: /* lbzux */ 1319 case 119: /* lbzux */
1386 err = read_mem(&regs->gpr[rd], xform_ea(instr, regs, u), 1320 op->type = MKOP(LOAD, u, 1);
1387 1, regs); 1321 break;
1388 goto ldst_done;
1389 1322
1390#ifdef CONFIG_ALTIVEC 1323#ifdef CONFIG_ALTIVEC
1391 case 103: /* lvx */ 1324 case 103: /* lvx */
1392 case 359: /* lvxl */ 1325 case 359: /* lvxl */
1393 if (!(regs->msr & MSR_VEC)) 1326 if (!(regs->msr & MSR_VEC))
1394 break; 1327 goto vecunavail;
1395 ea = xform_ea(instr, regs, 0); 1328 op->type = MKOP(LOAD_VMX, 0, 16);
1396 err = do_vec_load(rd, do_lvx, ea, regs); 1329 break;
1397 goto ldst_done;
1398 1330
1399 case 231: /* stvx */ 1331 case 231: /* stvx */
1400 case 487: /* stvxl */ 1332 case 487: /* stvxl */
1401 if (!(regs->msr & MSR_VEC)) 1333 if (!(regs->msr & MSR_VEC))
1402 break; 1334 goto vecunavail;
1403 ea = xform_ea(instr, regs, 0); 1335 op->type = MKOP(STORE_VMX, 0, 16);
1404 err = do_vec_store(rd, do_stvx, ea, regs); 1336 break;
1405 goto ldst_done;
1406#endif /* CONFIG_ALTIVEC */ 1337#endif /* CONFIG_ALTIVEC */
1407 1338
1408#ifdef __powerpc64__ 1339#ifdef __powerpc64__
1409 case 149: /* stdx */ 1340 case 149: /* stdx */
1410 case 181: /* stdux */ 1341 case 181: /* stdux */
1411 val = regs->gpr[rd]; 1342 op->type = MKOP(STORE, u, 8);
1412 err = write_mem(val, xform_ea(instr, regs, u), 8, regs); 1343 break;
1413 goto ldst_done;
1414#endif 1344#endif
1415 1345
1416 case 151: /* stwx */ 1346 case 151: /* stwx */
1417 case 183: /* stwux */ 1347 case 183: /* stwux */
1418 val = regs->gpr[rd]; 1348 op->type = MKOP(STORE, u, 4);
1419 err = write_mem(val, xform_ea(instr, regs, u), 4, regs); 1349 break;
1420 goto ldst_done;
1421 1350
1422 case 215: /* stbx */ 1351 case 215: /* stbx */
1423 case 247: /* stbux */ 1352 case 247: /* stbux */
1424 val = regs->gpr[rd]; 1353 op->type = MKOP(STORE, u, 1);
1425 err = write_mem(val, xform_ea(instr, regs, u), 1, regs); 1354 break;
1426 goto ldst_done;
1427 1355
1428 case 279: /* lhzx */ 1356 case 279: /* lhzx */
1429 case 311: /* lhzux */ 1357 case 311: /* lhzux */
1430 err = read_mem(&regs->gpr[rd], xform_ea(instr, regs, u), 1358 op->type = MKOP(LOAD, u, 2);
1431 2, regs); 1359 break;
1432 goto ldst_done;
1433 1360
1434#ifdef __powerpc64__ 1361#ifdef __powerpc64__
1435 case 341: /* lwax */ 1362 case 341: /* lwax */
1436 case 373: /* lwaux */ 1363 case 373: /* lwaux */
1437 err = read_mem(&regs->gpr[rd], xform_ea(instr, regs, u), 1364 op->type = MKOP(LOAD, SIGNEXT | u, 4);
1438 4, regs); 1365 break;
1439 if (!err)
1440 regs->gpr[rd] = (signed int) regs->gpr[rd];
1441 goto ldst_done;
1442#endif 1366#endif
1443 1367
1444 case 343: /* lhax */ 1368 case 343: /* lhax */
1445 case 375: /* lhaux */ 1369 case 375: /* lhaux */
1446 err = read_mem(&regs->gpr[rd], xform_ea(instr, regs, u), 1370 op->type = MKOP(LOAD, SIGNEXT | u, 2);
1447 2, regs); 1371 break;
1448 if (!err)
1449 regs->gpr[rd] = (signed short) regs->gpr[rd];
1450 goto ldst_done;
1451 1372
1452 case 407: /* sthx */ 1373 case 407: /* sthx */
1453 case 439: /* sthux */ 1374 case 439: /* sthux */
1454 val = regs->gpr[rd]; 1375 op->type = MKOP(STORE, u, 2);
1455 err = write_mem(val, xform_ea(instr, regs, u), 2, regs); 1376 break;
1456 goto ldst_done;
1457 1377
1458#ifdef __powerpc64__ 1378#ifdef __powerpc64__
1459 case 532: /* ldbrx */ 1379 case 532: /* ldbrx */
1460 err = read_mem(&val, xform_ea(instr, regs, 0), 8, regs); 1380 op->type = MKOP(LOAD, BYTEREV, 8);
1461 if (!err) 1381 break;
1462 regs->gpr[rd] = byterev_8(val);
1463 goto ldst_done;
1464 1382
1465#endif 1383#endif
1466 1384
1467 case 534: /* lwbrx */ 1385 case 534: /* lwbrx */
1468 err = read_mem(&val, xform_ea(instr, regs, 0), 4, regs); 1386 op->type = MKOP(LOAD, BYTEREV, 4);
1469 if (!err) 1387 break;
1470 regs->gpr[rd] = byterev_4(val);
1471 goto ldst_done;
1472 1388
1473#ifdef CONFIG_PPC_FPU 1389#ifdef CONFIG_PPC_FPU
1474 case 535: /* lfsx */ 1390 case 535: /* lfsx */
1475 case 567: /* lfsux */ 1391 case 567: /* lfsux */
1476 if (!(regs->msr & MSR_FP)) 1392 if (!(regs->msr & MSR_FP))
1477 break; 1393 goto fpunavail;
1478 ea = xform_ea(instr, regs, u); 1394 op->type = MKOP(LOAD_FP, u, 4);
1479 err = do_fp_load(rd, do_lfs, ea, 4, regs); 1395 break;
1480 goto ldst_done;
1481 1396
1482 case 599: /* lfdx */ 1397 case 599: /* lfdx */
1483 case 631: /* lfdux */ 1398 case 631: /* lfdux */
1484 if (!(regs->msr & MSR_FP)) 1399 if (!(regs->msr & MSR_FP))
1485 break; 1400 goto fpunavail;
1486 ea = xform_ea(instr, regs, u); 1401 op->type = MKOP(LOAD_FP, u, 8);
1487 err = do_fp_load(rd, do_lfd, ea, 8, regs); 1402 break;
1488 goto ldst_done;
1489 1403
1490 case 663: /* stfsx */ 1404 case 663: /* stfsx */
1491 case 695: /* stfsux */ 1405 case 695: /* stfsux */
1492 if (!(regs->msr & MSR_FP)) 1406 if (!(regs->msr & MSR_FP))
1493 break; 1407 goto fpunavail;
1494 ea = xform_ea(instr, regs, u); 1408 op->type = MKOP(STORE_FP, u, 4);
1495 err = do_fp_store(rd, do_stfs, ea, 4, regs); 1409 break;
1496 goto ldst_done;
1497 1410
1498 case 727: /* stfdx */ 1411 case 727: /* stfdx */
1499 case 759: /* stfdux */ 1412 case 759: /* stfdux */
1500 if (!(regs->msr & MSR_FP)) 1413 if (!(regs->msr & MSR_FP))
1501 break; 1414 goto fpunavail;
1502 ea = xform_ea(instr, regs, u); 1415 op->type = MKOP(STORE_FP, u, 8);
1503 err = do_fp_store(rd, do_stfd, ea, 8, regs); 1416 break;
1504 goto ldst_done;
1505#endif 1417#endif
1506 1418
1507#ifdef __powerpc64__ 1419#ifdef __powerpc64__
1508 case 660: /* stdbrx */ 1420 case 660: /* stdbrx */
1509 val = byterev_8(regs->gpr[rd]); 1421 op->type = MKOP(STORE, BYTEREV, 8);
1510 err = write_mem(val, xform_ea(instr, regs, 0), 8, regs); 1422 op->val = byterev_8(regs->gpr[rd]);
1511 goto ldst_done; 1423 break;
1512 1424
1513#endif 1425#endif
1514 case 662: /* stwbrx */ 1426 case 662: /* stwbrx */
1515 val = byterev_4(regs->gpr[rd]); 1427 op->type = MKOP(STORE, BYTEREV, 4);
1516 err = write_mem(val, xform_ea(instr, regs, 0), 4, regs); 1428 op->val = byterev_4(regs->gpr[rd]);
1517 goto ldst_done; 1429 break;
1518 1430
1519 case 790: /* lhbrx */ 1431 case 790: /* lhbrx */
1520 err = read_mem(&val, xform_ea(instr, regs, 0), 2, regs); 1432 op->type = MKOP(LOAD, BYTEREV, 2);
1521 if (!err) 1433 break;
1522 regs->gpr[rd] = byterev_2(val);
1523 goto ldst_done;
1524 1434
1525 case 918: /* sthbrx */ 1435 case 918: /* sthbrx */
1526 val = byterev_2(regs->gpr[rd]); 1436 op->type = MKOP(STORE, BYTEREV, 2);
1527 err = write_mem(val, xform_ea(instr, regs, 0), 2, regs); 1437 op->val = byterev_2(regs->gpr[rd]);
1528 goto ldst_done; 1438 break;
1529 1439
1530#ifdef CONFIG_VSX 1440#ifdef CONFIG_VSX
1531 case 844: /* lxvd2x */ 1441 case 844: /* lxvd2x */
1532 case 876: /* lxvd2ux */ 1442 case 876: /* lxvd2ux */
1533 if (!(regs->msr & MSR_VSX)) 1443 if (!(regs->msr & MSR_VSX))
1534 break; 1444 goto vsxunavail;
1535 rd |= (instr & 1) << 5; 1445 op->reg = rd | ((instr & 1) << 5);
1536 ea = xform_ea(instr, regs, u); 1446 op->type = MKOP(LOAD_VSX, u, 16);
1537 err = do_vsx_load(rd, do_lxvd2x, ea, regs); 1447 break;
1538 goto ldst_done;
1539 1448
1540 case 972: /* stxvd2x */ 1449 case 972: /* stxvd2x */
1541 case 1004: /* stxvd2ux */ 1450 case 1004: /* stxvd2ux */
1542 if (!(regs->msr & MSR_VSX)) 1451 if (!(regs->msr & MSR_VSX))
1543 break; 1452 goto vsxunavail;
1544 rd |= (instr & 1) << 5; 1453 op->reg = rd | ((instr & 1) << 5);
1545 ea = xform_ea(instr, regs, u); 1454 op->type = MKOP(STORE_VSX, u, 16);
1546 err = do_vsx_store(rd, do_stxvd2x, ea, regs); 1455 break;
1547 goto ldst_done;
1548 1456
1549#endif /* CONFIG_VSX */ 1457#endif /* CONFIG_VSX */
1550 } 1458 }
@@ -1552,178 +1460,124 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
1552 1460
1553 case 32: /* lwz */ 1461 case 32: /* lwz */
1554 case 33: /* lwzu */ 1462 case 33: /* lwzu */
1555 err = read_mem(&regs->gpr[rd], dform_ea(instr, regs), 4, regs); 1463 op->type = MKOP(LOAD, u, 4);
1556 goto ldst_done; 1464 op->ea = dform_ea(instr, regs);
1465 break;
1557 1466
1558 case 34: /* lbz */ 1467 case 34: /* lbz */
1559 case 35: /* lbzu */ 1468 case 35: /* lbzu */
1560 err = read_mem(&regs->gpr[rd], dform_ea(instr, regs), 1, regs); 1469 op->type = MKOP(LOAD, u, 1);
1561 goto ldst_done; 1470 op->ea = dform_ea(instr, regs);
1471 break;
1562 1472
1563 case 36: /* stw */ 1473 case 36: /* stw */
1564 val = regs->gpr[rd];
1565 err = write_mem(val, dform_ea(instr, regs), 4, regs);
1566 goto ldst_done;
1567
1568 case 37: /* stwu */ 1474 case 37: /* stwu */
1569 val = regs->gpr[rd]; 1475 op->type = MKOP(STORE, u, 4);
1570 val3 = dform_ea(instr, regs); 1476 op->ea = dform_ea(instr, regs);
1571 /* 1477 break;
1572 * For PPC32 we always use stwu to change stack point with r1. So
1573 * this emulated store may corrupt the exception frame, now we
1574 * have to provide the exception frame trampoline, which is pushed
1575 * below the kprobed function stack. So we only update gpr[1] but
1576 * don't emulate the real store operation. We will do real store
1577 * operation safely in exception return code by checking this flag.
1578 */
1579 if ((ra == 1) && !(regs->msr & MSR_PR) \
1580 && (val3 >= (regs->gpr[1] - STACK_INT_FRAME_SIZE))) {
1581#ifdef CONFIG_PPC32
1582 /*
1583 * Check if we will touch kernel sack overflow
1584 */
1585 if (val3 - STACK_INT_FRAME_SIZE <= current->thread.ksp_limit) {
1586 printk(KERN_CRIT "Can't kprobe this since Kernel stack overflow.\n");
1587 err = -EINVAL;
1588 break;
1589 }
1590#endif /* CONFIG_PPC32 */
1591 /*
1592 * Check if we already set since that means we'll
1593 * lose the previous value.
1594 */
1595 WARN_ON(test_thread_flag(TIF_EMULATE_STACK_STORE));
1596 set_thread_flag(TIF_EMULATE_STACK_STORE);
1597 err = 0;
1598 } else
1599 err = write_mem(val, val3, 4, regs);
1600 goto ldst_done;
1601 1478
1602 case 38: /* stb */ 1479 case 38: /* stb */
1603 case 39: /* stbu */ 1480 case 39: /* stbu */
1604 val = regs->gpr[rd]; 1481 op->type = MKOP(STORE, u, 1);
1605 err = write_mem(val, dform_ea(instr, regs), 1, regs); 1482 op->ea = dform_ea(instr, regs);
1606 goto ldst_done; 1483 break;
1607 1484
1608 case 40: /* lhz */ 1485 case 40: /* lhz */
1609 case 41: /* lhzu */ 1486 case 41: /* lhzu */
1610 err = read_mem(&regs->gpr[rd], dform_ea(instr, regs), 2, regs); 1487 op->type = MKOP(LOAD, u, 2);
1611 goto ldst_done; 1488 op->ea = dform_ea(instr, regs);
1489 break;
1612 1490
1613 case 42: /* lha */ 1491 case 42: /* lha */
1614 case 43: /* lhau */ 1492 case 43: /* lhau */
1615 err = read_mem(&regs->gpr[rd], dform_ea(instr, regs), 2, regs); 1493 op->type = MKOP(LOAD, SIGNEXT | u, 2);
1616 if (!err) 1494 op->ea = dform_ea(instr, regs);
1617 regs->gpr[rd] = (signed short) regs->gpr[rd]; 1495 break;
1618 goto ldst_done;
1619 1496
1620 case 44: /* sth */ 1497 case 44: /* sth */
1621 case 45: /* sthu */ 1498 case 45: /* sthu */
1622 val = regs->gpr[rd]; 1499 op->type = MKOP(STORE, u, 2);
1623 err = write_mem(val, dform_ea(instr, regs), 2, regs); 1500 op->ea = dform_ea(instr, regs);
1624 goto ldst_done; 1501 break;
1625 1502
1626 case 46: /* lmw */ 1503 case 46: /* lmw */
1627 ra = (instr >> 16) & 0x1f; 1504 ra = (instr >> 16) & 0x1f;
1628 if (ra >= rd) 1505 if (ra >= rd)
1629 break; /* invalid form, ra in range to load */ 1506 break; /* invalid form, ra in range to load */
1630 ea = dform_ea(instr, regs); 1507 op->type = MKOP(LOAD_MULTI, 0, 4);
1631 do { 1508 op->ea = dform_ea(instr, regs);
1632 err = read_mem(&regs->gpr[rd], ea, 4, regs); 1509 break;
1633 if (err)
1634 return 0;
1635 ea += 4;
1636 } while (++rd < 32);
1637 goto instr_done;
1638 1510
1639 case 47: /* stmw */ 1511 case 47: /* stmw */
1640 ea = dform_ea(instr, regs); 1512 op->type = MKOP(STORE_MULTI, 0, 4);
1641 do { 1513 op->ea = dform_ea(instr, regs);
1642 err = write_mem(regs->gpr[rd], ea, 4, regs); 1514 break;
1643 if (err)
1644 return 0;
1645 ea += 4;
1646 } while (++rd < 32);
1647 goto instr_done;
1648 1515
1649#ifdef CONFIG_PPC_FPU 1516#ifdef CONFIG_PPC_FPU
1650 case 48: /* lfs */ 1517 case 48: /* lfs */
1651 case 49: /* lfsu */ 1518 case 49: /* lfsu */
1652 if (!(regs->msr & MSR_FP)) 1519 if (!(regs->msr & MSR_FP))
1653 break; 1520 goto fpunavail;
1654 ea = dform_ea(instr, regs); 1521 op->type = MKOP(LOAD_FP, u, 4);
1655 err = do_fp_load(rd, do_lfs, ea, 4, regs); 1522 op->ea = dform_ea(instr, regs);
1656 goto ldst_done; 1523 break;
1657 1524
1658 case 50: /* lfd */ 1525 case 50: /* lfd */
1659 case 51: /* lfdu */ 1526 case 51: /* lfdu */
1660 if (!(regs->msr & MSR_FP)) 1527 if (!(regs->msr & MSR_FP))
1661 break; 1528 goto fpunavail;
1662 ea = dform_ea(instr, regs); 1529 op->type = MKOP(LOAD_FP, u, 8);
1663 err = do_fp_load(rd, do_lfd, ea, 8, regs); 1530 op->ea = dform_ea(instr, regs);
1664 goto ldst_done; 1531 break;
1665 1532
1666 case 52: /* stfs */ 1533 case 52: /* stfs */
1667 case 53: /* stfsu */ 1534 case 53: /* stfsu */
1668 if (!(regs->msr & MSR_FP)) 1535 if (!(regs->msr & MSR_FP))
1669 break; 1536 goto fpunavail;
1670 ea = dform_ea(instr, regs); 1537 op->type = MKOP(STORE_FP, u, 4);
1671 err = do_fp_store(rd, do_stfs, ea, 4, regs); 1538 op->ea = dform_ea(instr, regs);
1672 goto ldst_done; 1539 break;
1673 1540
1674 case 54: /* stfd */ 1541 case 54: /* stfd */
1675 case 55: /* stfdu */ 1542 case 55: /* stfdu */
1676 if (!(regs->msr & MSR_FP)) 1543 if (!(regs->msr & MSR_FP))
1677 break; 1544 goto fpunavail;
1678 ea = dform_ea(instr, regs); 1545 op->type = MKOP(STORE_FP, u, 8);
1679 err = do_fp_store(rd, do_stfd, ea, 8, regs); 1546 op->ea = dform_ea(instr, regs);
1680 goto ldst_done; 1547 break;
1681#endif 1548#endif
1682 1549
1683#ifdef __powerpc64__ 1550#ifdef __powerpc64__
1684 case 58: /* ld[u], lwa */ 1551 case 58: /* ld[u], lwa */
1552 op->ea = dsform_ea(instr, regs);
1685 switch (instr & 3) { 1553 switch (instr & 3) {
1686 case 0: /* ld */ 1554 case 0: /* ld */
1687 err = read_mem(&regs->gpr[rd], dsform_ea(instr, regs), 1555 op->type = MKOP(LOAD, 0, 8);
1688 8, regs); 1556 break;
1689 goto ldst_done;
1690 case 1: /* ldu */ 1557 case 1: /* ldu */
1691 err = read_mem(&regs->gpr[rd], dsform_ea(instr, regs), 1558 op->type = MKOP(LOAD, UPDATE, 8);
1692 8, regs); 1559 break;
1693 goto ldst_done;
1694 case 2: /* lwa */ 1560 case 2: /* lwa */
1695 err = read_mem(&regs->gpr[rd], dsform_ea(instr, regs), 1561 op->type = MKOP(LOAD, SIGNEXT, 4);
1696 4, regs); 1562 break;
1697 if (!err)
1698 regs->gpr[rd] = (signed int) regs->gpr[rd];
1699 goto ldst_done;
1700 } 1563 }
1701 break; 1564 break;
1702 1565
1703 case 62: /* std[u] */ 1566 case 62: /* std[u] */
1704 val = regs->gpr[rd]; 1567 op->ea = dsform_ea(instr, regs);
1705 switch (instr & 3) { 1568 switch (instr & 3) {
1706 case 0: /* std */ 1569 case 0: /* std */
1707 err = write_mem(val, dsform_ea(instr, regs), 8, regs); 1570 op->type = MKOP(STORE, 0, 8);
1708 goto ldst_done; 1571 break;
1709 case 1: /* stdu */ 1572 case 1: /* stdu */
1710 err = write_mem(val, dsform_ea(instr, regs), 8, regs); 1573 op->type = MKOP(STORE, UPDATE, 8);
1711 goto ldst_done; 1574 break;
1712 } 1575 }
1713 break; 1576 break;
1714#endif /* __powerpc64__ */ 1577#endif /* __powerpc64__ */
1715 1578
1716 } 1579 }
1717 err = -EINVAL; 1580 return 0;
1718
1719 ldst_done:
1720 if (err) {
1721 regs->gpr[ra] = old_ra;
1722 return 0; /* invoke DSI if -EFAULT? */
1723 }
1724 instr_done:
1725 regs->nip = truncate_if_32bit(regs->msr, regs->nip + 4);
1726 return 1;
1727 1581
1728 logical_done: 1582 logical_done:
1729 if (instr & 1) 1583 if (instr & 1)
@@ -1733,5 +1587,328 @@ int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
1733 arith_done: 1587 arith_done:
1734 if (instr & 1) 1588 if (instr & 1)
1735 set_cr0(regs, rd); 1589 set_cr0(regs, rd);
1736 goto instr_done; 1590
1591 instr_done:
1592 regs->nip = truncate_if_32bit(regs->msr, regs->nip + 4);
1593 return 1;
1594
1595 priv:
1596 op->type = INTERRUPT | 0x700;
1597 op->val = SRR1_PROGPRIV;
1598 return 0;
1599
1600#ifdef CONFIG_PPC_FPU
1601 fpunavail:
1602 op->type = INTERRUPT | 0x800;
1603 return 0;
1604#endif
1605
1606#ifdef CONFIG_ALTIVEC
1607 vecunavail:
1608 op->type = INTERRUPT | 0xf20;
1609 return 0;
1610#endif
1611
1612#ifdef CONFIG_VSX
1613 vsxunavail:
1614 op->type = INTERRUPT | 0xf40;
1615 return 0;
1616#endif
1617}
1618EXPORT_SYMBOL_GPL(analyse_instr);
1619
1620/*
1621 * For PPC32 we always use stwu with r1 to change the stack pointer.
1622 * So this emulated store may corrupt the exception frame, now we
1623 * have to provide the exception frame trampoline, which is pushed
1624 * below the kprobed function stack. So we only update gpr[1] but
1625 * don't emulate the real store operation. We will do real store
1626 * operation safely in exception return code by checking this flag.
1627 */
1628static __kprobes int handle_stack_update(unsigned long ea, struct pt_regs *regs)
1629{
1630#ifdef CONFIG_PPC32
1631 /*
1632 * Check if we will touch kernel stack overflow
1633 */
1634 if (ea - STACK_INT_FRAME_SIZE <= current->thread.ksp_limit) {
1635 printk(KERN_CRIT "Can't kprobe this since kernel stack would overflow.\n");
1636 return -EINVAL;
1637 }
1638#endif /* CONFIG_PPC32 */
1639 /*
1640 * Check if we already set since that means we'll
1641 * lose the previous value.
1642 */
1643 WARN_ON(test_thread_flag(TIF_EMULATE_STACK_STORE));
1644 set_thread_flag(TIF_EMULATE_STACK_STORE);
1645 return 0;
1646}
1647
1648static __kprobes void do_signext(unsigned long *valp, int size)
1649{
1650 switch (size) {
1651 case 2:
1652 *valp = (signed short) *valp;
1653 break;
1654 case 4:
1655 *valp = (signed int) *valp;
1656 break;
1657 }
1658}
1659
1660static __kprobes void do_byterev(unsigned long *valp, int size)
1661{
1662 switch (size) {
1663 case 2:
1664 *valp = byterev_2(*valp);
1665 break;
1666 case 4:
1667 *valp = byterev_4(*valp);
1668 break;
1669#ifdef __powerpc64__
1670 case 8:
1671 *valp = byterev_8(*valp);
1672 break;
1673#endif
1674 }
1675}
1676
1677/*
1678 * Emulate instructions that cause a transfer of control,
1679 * loads and stores, and a few other instructions.
1680 * Returns 1 if the step was emulated, 0 if not,
1681 * or -1 if the instruction is one that should not be stepped,
1682 * such as an rfid, or a mtmsrd that would clear MSR_RI.
1683 */
1684int __kprobes emulate_step(struct pt_regs *regs, unsigned int instr)
1685{
1686 struct instruction_op op;
1687 int r, err, size;
1688 unsigned long val;
1689 unsigned int cr;
1690 int rd;
1691
1692 r = analyse_instr(&op, regs, instr);
1693 if (r != 0)
1694 return r;
1695
1696 err = 0;
1697 size = GETSIZE(op.type);
1698 switch (op.type & INSTR_TYPE_MASK) {
1699 case CACHEOP:
1700 if (!address_ok(regs, op.ea, 8))
1701 return 0;
1702 switch (op.type & CACHEOP_MASK) {
1703 case DCBST:
1704 __cacheop_user_asmx(op.ea, err, "dcbst");
1705 break;
1706 case DCBF:
1707 __cacheop_user_asmx(op.ea, err, "dcbf");
1708 break;
1709 case DCBTST:
1710 if (op.reg == 0)
1711 prefetchw((void *) op.ea);
1712 break;
1713 case DCBT:
1714 if (op.reg == 0)
1715 prefetch((void *) op.ea);
1716 break;
1717 }
1718 if (err)
1719 return 0;
1720 goto instr_done;
1721
1722 case LARX:
1723 if (regs->msr & MSR_LE)
1724 return 0;
1725 if (op.ea & (size - 1))
1726 break; /* can't handle misaligned */
1727 err = -EFAULT;
1728 if (!address_ok(regs, op.ea, size))
1729 goto ldst_done;
1730 err = 0;
1731 switch (size) {
1732 case 4:
1733 __get_user_asmx(val, op.ea, err, "lwarx");
1734 break;
1735 case 8:
1736 __get_user_asmx(val, op.ea, err, "ldarx");
1737 break;
1738 default:
1739 return 0;
1740 }
1741 if (!err)
1742 regs->gpr[op.reg] = val;
1743 goto ldst_done;
1744
1745 case STCX:
1746 if (regs->msr & MSR_LE)
1747 return 0;
1748 if (op.ea & (size - 1))
1749 break; /* can't handle misaligned */
1750 err = -EFAULT;
1751 if (!address_ok(regs, op.ea, size))
1752 goto ldst_done;
1753 err = 0;
1754 switch (size) {
1755 case 4:
1756 __put_user_asmx(op.val, op.ea, err, "stwcx.", cr);
1757 break;
1758 case 8:
1759 __put_user_asmx(op.val, op.ea, err, "stdcx.", cr);
1760 break;
1761 default:
1762 return 0;
1763 }
1764 if (!err)
1765 regs->ccr = (regs->ccr & 0x0fffffff) |
1766 (cr & 0xe0000000) |
1767 ((regs->xer >> 3) & 0x10000000);
1768 goto ldst_done;
1769
1770 case LOAD:
1771 if (regs->msr & MSR_LE)
1772 return 0;
1773 err = read_mem(&regs->gpr[op.reg], op.ea, size, regs);
1774 if (!err) {
1775 if (op.type & SIGNEXT)
1776 do_signext(&regs->gpr[op.reg], size);
1777 if (op.type & BYTEREV)
1778 do_byterev(&regs->gpr[op.reg], size);
1779 }
1780 goto ldst_done;
1781
1782 case LOAD_FP:
1783 if (regs->msr & MSR_LE)
1784 return 0;
1785 if (size == 4)
1786 err = do_fp_load(op.reg, do_lfs, op.ea, size, regs);
1787 else
1788 err = do_fp_load(op.reg, do_lfd, op.ea, size, regs);
1789 goto ldst_done;
1790
1791#ifdef CONFIG_ALTIVEC
1792 case LOAD_VMX:
1793 if (regs->msr & MSR_LE)
1794 return 0;
1795 err = do_vec_load(op.reg, do_lvx, op.ea & ~0xfUL, regs);
1796 goto ldst_done;
1797#endif
1798#ifdef CONFIG_VSX
1799 case LOAD_VSX:
1800 if (regs->msr & MSR_LE)
1801 return 0;
1802 err = do_vsx_load(op.reg, do_lxvd2x, op.ea, regs);
1803 goto ldst_done;
1804#endif
1805 case LOAD_MULTI:
1806 if (regs->msr & MSR_LE)
1807 return 0;
1808 rd = op.reg;
1809 do {
1810 err = read_mem(&regs->gpr[rd], op.ea, 4, regs);
1811 if (err)
1812 return 0;
1813 op.ea += 4;
1814 } while (++rd < 32);
1815 goto instr_done;
1816
1817 case STORE:
1818 if (regs->msr & MSR_LE)
1819 return 0;
1820 if ((op.type & UPDATE) && size == sizeof(long) &&
1821 op.reg == 1 && op.update_reg == 1 &&
1822 !(regs->msr & MSR_PR) &&
1823 op.ea >= regs->gpr[1] - STACK_INT_FRAME_SIZE) {
1824 err = handle_stack_update(op.ea, regs);
1825 goto ldst_done;
1826 }
1827 err = write_mem(op.val, op.ea, size, regs);
1828 goto ldst_done;
1829
1830 case STORE_FP:
1831 if (regs->msr & MSR_LE)
1832 return 0;
1833 if (size == 4)
1834 err = do_fp_store(op.reg, do_stfs, op.ea, size, regs);
1835 else
1836 err = do_fp_store(op.reg, do_stfd, op.ea, size, regs);
1837 goto ldst_done;
1838
1839#ifdef CONFIG_ALTIVEC
1840 case STORE_VMX:
1841 if (regs->msr & MSR_LE)
1842 return 0;
1843 err = do_vec_store(op.reg, do_stvx, op.ea & ~0xfUL, regs);
1844 goto ldst_done;
1845#endif
1846#ifdef CONFIG_VSX
1847 case STORE_VSX:
1848 if (regs->msr & MSR_LE)
1849 return 0;
1850 err = do_vsx_store(op.reg, do_stxvd2x, op.ea, regs);
1851 goto ldst_done;
1852#endif
1853 case STORE_MULTI:
1854 if (regs->msr & MSR_LE)
1855 return 0;
1856 rd = op.reg;
1857 do {
1858 err = write_mem(regs->gpr[rd], op.ea, 4, regs);
1859 if (err)
1860 return 0;
1861 op.ea += 4;
1862 } while (++rd < 32);
1863 goto instr_done;
1864
1865 case MFMSR:
1866 regs->gpr[op.reg] = regs->msr & MSR_MASK;
1867 goto instr_done;
1868
1869 case MTMSR:
1870 val = regs->gpr[op.reg];
1871 if ((val & MSR_RI) == 0)
1872 /* can't step mtmsr[d] that would clear MSR_RI */
1873 return -1;
1874 /* here op.val is the mask of bits to change */
1875 regs->msr = (regs->msr & ~op.val) | (val & op.val);
1876 goto instr_done;
1877
1878#ifdef CONFIG_PPC64
1879 case SYSCALL: /* sc */
1880 /*
1881 * N.B. this uses knowledge about how the syscall
1882 * entry code works. If that is changed, this will
1883 * need to be changed also.
1884 */
1885 if (regs->gpr[0] == 0x1ebe &&
1886 cpu_has_feature(CPU_FTR_REAL_LE)) {
1887 regs->msr ^= MSR_LE;
1888 goto instr_done;
1889 }
1890 regs->gpr[9] = regs->gpr[13];
1891 regs->gpr[10] = MSR_KERNEL;
1892 regs->gpr[11] = regs->nip + 4;
1893 regs->gpr[12] = regs->msr & MSR_MASK;
1894 regs->gpr[13] = (unsigned long) get_paca();
1895 regs->nip = (unsigned long) &system_call_common;
1896 regs->msr = MSR_KERNEL;
1897 return 1;
1898
1899 case RFI:
1900 return -1;
1901#endif
1902 }
1903 return 0;
1904
1905 ldst_done:
1906 if (err)
1907 return 0;
1908 if (op.type & UPDATE)
1909 regs->gpr[op.update_reg] = op.ea;
1910
1911 instr_done:
1912 regs->nip = truncate_if_32bit(regs->msr, regs->nip + 4);
1913 return 1;
1737} 1914}