Commit d9bce9d99f4656ae0b0127f7472db9067b8f84ab
1 parent
5fd46862
Make it safe to use 64 bits GPR and/or 64 bits host registers.
For "symetry", add 64 bits versions of all modified functions. As a side effect, add a lot of code provision for PowerPC 64 support. Move overflow and carry checks in common routines for simple cases. Add isel and popcntb instructions from PowerPC 2.03 specification. Remove remaining micro-operations helpers prototypes from op.c. Fix XER_BC field to be 7 bits long. Add power management support for PowerPC 603 & 604. Fix compilation warnings. git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@2482 c046a42c-6fe2-441c-8c8c-71466251a162
Showing
8 changed files
with
1892 additions
and
395 deletions
Too many changes to show.
To preserve performance only 8 of 9 files are displayed.
target-ppc/cpu.h
... | ... | @@ -365,6 +365,8 @@ enum { |
365 | 365 | PPC_E500_VECTOR = 0x20000000, |
366 | 366 | /* PowerPC 4xx dedicated instructions */ |
367 | 367 | PPC_4xx_COMMON = 0x40000000, |
368 | + /* PowerPC 2.03 specification extensions */ | |
369 | + PPC_203 = 0x80000000, | |
368 | 370 | }; |
369 | 371 | |
370 | 372 | /* CPU run-time flags (MMU and exception model) */ |
... | ... | @@ -385,6 +387,8 @@ enum { |
385 | 387 | PPC_FLAGS_MMU_403 = 0x00000005, |
386 | 388 | /* Freescale e500 MMU model */ |
387 | 389 | PPC_FLAGS_MMU_e500 = 0x00000006, |
390 | + /* BookE MMU model */ | |
391 | + PPC_FLAGS_MMU_BOOKE = 0x00000007, | |
388 | 392 | /* Exception model */ |
389 | 393 | PPC_FLAGS_EXCP_MASK = 0x000000F0, |
390 | 394 | /* Standard PowerPC exception model */ |
... | ... | @@ -407,6 +411,8 @@ enum { |
407 | 411 | PPC_FLAGS_EXCP_74xx = 0x00000080, |
408 | 412 | /* PowerPC 970 exception model */ |
409 | 413 | PPC_FLAGS_EXCP_970 = 0x00000090, |
414 | + /* BookE exception model */ | |
415 | + PPC_FLAGS_EXCP_BOOKE = 0x000000A0, | |
410 | 416 | }; |
411 | 417 | |
412 | 418 | #define PPC_MMU(env) (env->flags & PPC_FLAGS_MMU_MASK) |
... | ... | @@ -437,11 +443,11 @@ enum { |
437 | 443 | /* PowerPC 440 */ |
438 | 444 | #define PPC_INSNS_440 (PPC_INSNS_EMB | PPC_CACHE_OPT | PPC_BOOKE | \ |
439 | 445 | PPC_4xx_COMMON | PPC_405_MAC | PPC_440_SPEC) |
440 | -#define PPC_FLAGS_440 (PPC_FLAGS_TODO) | |
446 | +#define PPC_FLAGS_440 (PPC_FLAGS_MMU_BOOKE | PPC_FLAGS_EXCP_BOOKE) | |
441 | 447 | /* Generic BookE PowerPC */ |
442 | 448 | #define PPC_INSNS_BOOKE (PPC_INSNS_EMB | PPC_BOOKE | PPC_MEM_EIEIO | \ |
443 | 449 | PPC_FLOAT | PPC_FLOAT_OPT | PPC_CACHE_OPT) |
444 | -#define PPC_FLAGS_BOOKE (PPC_FLAGS_MMU_SOFT_4xx | PPC_FLAGS_EXCP_40x) | |
450 | +#define PPC_FLAGS_BOOKE (PPC_FLAGS_MMU_BOOKE | PPC_FLAGS_EXCP_BOOKE) | |
445 | 451 | /* e500 core */ |
446 | 452 | #define PPC_INSNS_E500 (PPC_INSNS_EMB | PPC_BOOKE | PPC_MEM_EIEIO | \ |
447 | 453 | PPC_CACHE_OPT | PPC_E500_VECTOR) |
... | ... | @@ -502,7 +508,6 @@ typedef struct ppc_dcr_t ppc_dcr_t; |
502 | 508 | typedef struct ppc_avr_t ppc_avr_t; |
503 | 509 | typedef struct ppc_tlb_t ppc_tlb_t; |
504 | 510 | |
505 | - | |
506 | 511 | /* SPR access micro-ops generations callbacks */ |
507 | 512 | struct ppc_spr_t { |
508 | 513 | void (*uea_read)(void *opaque, int spr_num); |
... | ... | @@ -619,6 +624,8 @@ struct CPUPPCState { |
619 | 624 | */ |
620 | 625 | target_ulong t0, t1, t2; |
621 | 626 | #endif |
627 | + ppc_avr_t t0_avr, t1_avr, t2_avr; | |
628 | + | |
622 | 629 | /* general purpose registers */ |
623 | 630 | ppc_gpr_t gpr[32]; |
624 | 631 | /* LR */ |
... | ... | @@ -674,6 +681,9 @@ struct CPUPPCState { |
674 | 681 | /* Altivec registers */ |
675 | 682 | ppc_avr_t avr[32]; |
676 | 683 | uint32_t vscr; |
684 | + /* SPE registers */ | |
685 | + ppc_gpr_t spe_acc; | |
686 | + uint32_t spe_fscr; | |
677 | 687 | |
678 | 688 | /* Internal devices resources */ |
679 | 689 | /* Time base and decrementer */ |
... | ... | @@ -762,8 +772,10 @@ void do_store_dbatu (CPUPPCState *env, int nr, target_ulong value); |
762 | 772 | void do_store_dbatl (CPUPPCState *env, int nr, target_ulong value); |
763 | 773 | target_ulong do_load_sdr1 (CPUPPCState *env); |
764 | 774 | void do_store_sdr1 (CPUPPCState *env, target_ulong value); |
765 | -target_ulong do_load_asr (CPUPPCState *env); | |
766 | -void do_store_asr (CPUPPCState *env, target_ulong value); | |
775 | +#if defined(TARGET_PPC64) | |
776 | +target_ulong ppc_load_asr (CPUPPCState *env); | |
777 | +void ppc_store_asr (CPUPPCState *env, target_ulong value); | |
778 | +#endif | |
767 | 779 | target_ulong do_load_sr (CPUPPCState *env, int srnum); |
768 | 780 | void do_store_sr (CPUPPCState *env, int srnum, target_ulong value); |
769 | 781 | #endif |
... | ... | @@ -771,6 +783,7 @@ uint32_t ppc_load_xer (CPUPPCState *env); |
771 | 783 | void ppc_store_xer (CPUPPCState *env, uint32_t value); |
772 | 784 | target_ulong do_load_msr (CPUPPCState *env); |
773 | 785 | void do_store_msr (CPUPPCState *env, target_ulong value); |
786 | +void ppc_store_msr32 (CPUPPCState *env, uint32_t value); | |
774 | 787 | |
775 | 788 | void do_compute_hflags (CPUPPCState *env); |
776 | 789 | |
... | ... | @@ -787,6 +800,16 @@ void cpu_ppc_store_tbu (CPUPPCState *env, uint32_t value); |
787 | 800 | void cpu_ppc_store_tbl (CPUPPCState *env, uint32_t value); |
788 | 801 | uint32_t cpu_ppc_load_decr (CPUPPCState *env); |
789 | 802 | void cpu_ppc_store_decr (CPUPPCState *env, uint32_t value); |
803 | +uint32_t cpu_ppc601_load_rtcl (CPUPPCState *env); | |
804 | +uint32_t cpu_ppc601_load_rtcu (CPUPPCState *env); | |
805 | +#if !defined(CONFIG_USER_ONLY) | |
806 | +void cpu_ppc601_store_rtcl (CPUPPCState *env, uint32_t value); | |
807 | +void cpu_ppc601_store_rtcu (CPUPPCState *env, uint32_t value); | |
808 | +target_ulong load_40x_pit (CPUPPCState *env); | |
809 | +void store_40x_pit (CPUPPCState *env, target_ulong val); | |
810 | +void store_booke_tcr (CPUPPCState *env, target_ulong val); | |
811 | +void store_booke_tsr (CPUPPCState *env, target_ulong val); | |
812 | +#endif | |
790 | 813 | #endif |
791 | 814 | |
792 | 815 | #define TARGET_PAGE_BITS 12 | ... | ... |
target-ppc/exec.h
... | ... | @@ -34,19 +34,25 @@ register struct CPUPPCState *env asm(AREG0); |
34 | 34 | #define T1 (env->t1) |
35 | 35 | #define T2 (env->t2) |
36 | 36 | #else |
37 | -/* This may be more efficient if HOST_LONG_BITS > TARGET_LONG_BITS | |
38 | - * To be set to one when we'll be sure it does not cause bugs.... | |
39 | - */ | |
40 | -#if 0 | |
41 | 37 | register unsigned long T0 asm(AREG1); |
42 | 38 | register unsigned long T1 asm(AREG2); |
43 | 39 | register unsigned long T2 asm(AREG3); |
44 | -#else | |
45 | -register target_ulong T0 asm(AREG1); | |
46 | -register target_ulong T1 asm(AREG2); | |
47 | -register target_ulong T2 asm(AREG3); | |
48 | 40 | #endif |
41 | +/* We may, sometime, need 64 bits registers on 32 bits target */ | |
42 | +#if defined(TARGET_PPC64) || (HOST_LONG_BITS == 64) | |
43 | +#define T0_64 T0 | |
44 | +#define T1_64 T0 | |
45 | +#define T2_64 T0 | |
46 | +#else | |
47 | +/* no registers can be used */ | |
48 | +#define T0_64 (env->t0) | |
49 | +#define T1_64 (env->t1) | |
50 | +#define T2_64 (env->t2) | |
49 | 51 | #endif |
52 | +/* Provision for Altivec */ | |
53 | +#define T0_avr (env->t0_avr) | |
54 | +#define T1_avr (env->t1_avr) | |
55 | +#define T2_avr (env->t2_avr) | |
50 | 56 | |
51 | 57 | /* XXX: to clean: remove this mess */ |
52 | 58 | #define PARAM(n) ((uint32_t)PARAM##n) | ... | ... |
target-ppc/helper.c
... | ... | @@ -37,12 +37,12 @@ |
37 | 37 | /*****************************************************************************/ |
38 | 38 | /* PowerPC MMU emulation */ |
39 | 39 | |
40 | -#if defined(CONFIG_USER_ONLY) | |
40 | +#if defined(CONFIG_USER_ONLY) | |
41 | 41 | int cpu_ppc_handle_mmu_fault (CPUState *env, uint32_t address, int rw, |
42 | 42 | int is_user, int is_softmmu) |
43 | 43 | { |
44 | 44 | int exception, error_code; |
45 | - | |
45 | + | |
46 | 46 | if (rw == 2) { |
47 | 47 | exception = EXCP_ISI; |
48 | 48 | error_code = 0; |
... | ... | @@ -277,7 +277,7 @@ static int ppc6xx_tlb_check (CPUState *env, mmu_ctx_t *ctx, |
277 | 277 | ppc_tlb_t *tlb; |
278 | 278 | int nr, best, way; |
279 | 279 | int ret; |
280 | - | |
280 | + | |
281 | 281 | best = -1; |
282 | 282 | ret = -1; /* No TLB found */ |
283 | 283 | for (way = 0; way < env->nb_ways; way++) { |
... | ... | @@ -672,7 +672,7 @@ int get_physical_address (CPUState *env, mmu_ctx_t *ctx, target_ulong eaddr, |
672 | 672 | if (loglevel > 0) { |
673 | 673 | fprintf(logfile, "%s\n", __func__); |
674 | 674 | } |
675 | -#endif | |
675 | +#endif | |
676 | 676 | if ((access_type == ACCESS_CODE && msr_ir == 0) || |
677 | 677 | (access_type != ACCESS_CODE && msr_dr == 0)) { |
678 | 678 | /* No address translation */ |
... | ... | @@ -693,7 +693,7 @@ int get_physical_address (CPUState *env, mmu_ctx_t *ctx, target_ulong eaddr, |
693 | 693 | __func__, eaddr, ctx->raddr); |
694 | 694 | } |
695 | 695 | #endif |
696 | - | |
696 | + | |
697 | 697 | return ret; |
698 | 698 | } |
699 | 699 | |
... | ... | @@ -715,7 +715,7 @@ int cpu_ppc_handle_mmu_fault (CPUState *env, uint32_t address, int rw, |
715 | 715 | int exception = 0, error_code = 0; |
716 | 716 | int access_type; |
717 | 717 | int ret = 0; |
718 | - | |
718 | + | |
719 | 719 | if (rw == 2) { |
720 | 720 | /* code access */ |
721 | 721 | rw = 0; |
... | ... | @@ -975,6 +975,21 @@ void do_store_dbatl (CPUPPCState *env, int nr, target_ulong value) |
975 | 975 | |
976 | 976 | /*****************************************************************************/ |
977 | 977 | /* Special registers manipulation */ |
978 | +#if defined(TARGET_PPC64) | |
979 | +target_ulong ppc_load_asr (CPUPPCState *env) | |
980 | +{ | |
981 | + return env->asr; | |
982 | +} | |
983 | + | |
984 | +void ppc_store_asr (CPUPPCState *env, target_ulong value) | |
985 | +{ | |
986 | + if (env->asr != value) { | |
987 | + env->asr = value; | |
988 | + tlb_flush(env, 1); | |
989 | + } | |
990 | +} | |
991 | +#endif | |
992 | + | |
978 | 993 | target_ulong do_load_sdr1 (CPUPPCState *env) |
979 | 994 | { |
980 | 995 | return env->sdr1; |
... | ... | @@ -1039,7 +1054,7 @@ void ppc_store_xer (CPUPPCState *env, uint32_t value) |
1039 | 1054 | xer_ov = (value >> XER_OV) & 0x01; |
1040 | 1055 | xer_ca = (value >> XER_CA) & 0x01; |
1041 | 1056 | xer_cmp = (value >> XER_CMP) & 0xFF; |
1042 | - xer_bc = (value >> XER_BC) & 0x3F; | |
1057 | + xer_bc = (value >> XER_BC) & 0x7F; | |
1043 | 1058 | } |
1044 | 1059 | |
1045 | 1060 | /* Swap temporary saved registers with GPRs */ |
... | ... | @@ -1066,34 +1081,34 @@ target_ulong do_load_msr (CPUPPCState *env) |
1066 | 1081 | { |
1067 | 1082 | return |
1068 | 1083 | #if defined (TARGET_PPC64) |
1069 | - (msr_sf << MSR_SF) | | |
1070 | - (msr_isf << MSR_ISF) | | |
1071 | - (msr_hv << MSR_HV) | | |
1084 | + ((target_ulong)msr_sf << MSR_SF) | | |
1085 | + ((target_ulong)msr_isf << MSR_ISF) | | |
1086 | + ((target_ulong)msr_hv << MSR_HV) | | |
1072 | 1087 | #endif |
1073 | - (msr_ucle << MSR_UCLE) | | |
1074 | - (msr_vr << MSR_VR) | /* VR / SPE */ | |
1075 | - (msr_ap << MSR_AP) | | |
1076 | - (msr_sa << MSR_SA) | | |
1077 | - (msr_key << MSR_KEY) | | |
1078 | - (msr_pow << MSR_POW) | /* POW / WE */ | |
1079 | - (msr_tlb << MSR_TLB) | /* TLB / TGPE / CE */ | |
1080 | - (msr_ile << MSR_ILE) | | |
1081 | - (msr_ee << MSR_EE) | | |
1082 | - (msr_pr << MSR_PR) | | |
1083 | - (msr_fp << MSR_FP) | | |
1084 | - (msr_me << MSR_ME) | | |
1085 | - (msr_fe0 << MSR_FE0) | | |
1086 | - (msr_se << MSR_SE) | /* SE / DWE / UBLE */ | |
1087 | - (msr_be << MSR_BE) | /* BE / DE */ | |
1088 | - (msr_fe1 << MSR_FE1) | | |
1089 | - (msr_al << MSR_AL) | | |
1090 | - (msr_ip << MSR_IP) | | |
1091 | - (msr_ir << MSR_IR) | /* IR / IS */ | |
1092 | - (msr_dr << MSR_DR) | /* DR / DS */ | |
1093 | - (msr_pe << MSR_PE) | /* PE / EP */ | |
1094 | - (msr_px << MSR_PX) | /* PX / PMM */ | |
1095 | - (msr_ri << MSR_RI) | | |
1096 | - (msr_le << MSR_LE); | |
1088 | + ((target_ulong)msr_ucle << MSR_UCLE) | | |
1089 | + ((target_ulong)msr_vr << MSR_VR) | /* VR / SPE */ | |
1090 | + ((target_ulong)msr_ap << MSR_AP) | | |
1091 | + ((target_ulong)msr_sa << MSR_SA) | | |
1092 | + ((target_ulong)msr_key << MSR_KEY) | | |
1093 | + ((target_ulong)msr_pow << MSR_POW) | /* POW / WE */ | |
1094 | + ((target_ulong)msr_tlb << MSR_TLB) | /* TLB / TGPE / CE */ | |
1095 | + ((target_ulong)msr_ile << MSR_ILE) | | |
1096 | + ((target_ulong)msr_ee << MSR_EE) | | |
1097 | + ((target_ulong)msr_pr << MSR_PR) | | |
1098 | + ((target_ulong)msr_fp << MSR_FP) | | |
1099 | + ((target_ulong)msr_me << MSR_ME) | | |
1100 | + ((target_ulong)msr_fe0 << MSR_FE0) | | |
1101 | + ((target_ulong)msr_se << MSR_SE) | /* SE / DWE / UBLE */ | |
1102 | + ((target_ulong)msr_be << MSR_BE) | /* BE / DE */ | |
1103 | + ((target_ulong)msr_fe1 << MSR_FE1) | | |
1104 | + ((target_ulong)msr_al << MSR_AL) | | |
1105 | + ((target_ulong)msr_ip << MSR_IP) | | |
1106 | + ((target_ulong)msr_ir << MSR_IR) | /* IR / IS */ | |
1107 | + ((target_ulong)msr_dr << MSR_DR) | /* DR / DS */ | |
1108 | + ((target_ulong)msr_pe << MSR_PE) | /* PE / EP */ | |
1109 | + ((target_ulong)msr_px << MSR_PX) | /* PX / PMM */ | |
1110 | + ((target_ulong)msr_ri << MSR_RI) | | |
1111 | + ((target_ulong)msr_le << MSR_LE); | |
1097 | 1112 | } |
1098 | 1113 | |
1099 | 1114 | void do_store_msr (CPUPPCState *env, target_ulong value) |
... | ... | @@ -1156,6 +1171,17 @@ void do_store_msr (CPUPPCState *env, target_ulong value) |
1156 | 1171 | |
1157 | 1172 | enter_pm = 0; |
1158 | 1173 | switch (PPC_EXCP(env)) { |
1174 | + case PPC_FLAGS_EXCP_603: | |
1175 | + /* Don't handle SLEEP mode: we should disable all clocks... | |
1176 | + * No dynamic power-management. | |
1177 | + */ | |
1178 | + if (msr_pow == 1 && (env->spr[SPR_HID0] & 0x00C00000) != 0) | |
1179 | + enter_pm = 1; | |
1180 | + break; | |
1181 | + case PPC_FLAGS_EXCP_604: | |
1182 | + if (msr_pow == 1) | |
1183 | + enter_pm = 1; | |
1184 | + break; | |
1159 | 1185 | case PPC_FLAGS_EXCP_7x0: |
1160 | 1186 | if (msr_pow == 1 && (env->spr[SPR_HID0] & 0x00E00000) != 0) |
1161 | 1187 | enter_pm = 1; |
... | ... | @@ -1171,15 +1197,22 @@ void do_store_msr (CPUPPCState *env, target_ulong value) |
1171 | 1197 | } |
1172 | 1198 | } |
1173 | 1199 | |
1200 | +#if defined(TARGET_PPC64) | |
1201 | +void ppc_store_msr_32 (CPUPPCState *env, target_ulong value) | |
1202 | +{ | |
1203 | + do_store_msr(env, (uint32_t)value); | |
1204 | +} | |
1205 | +#endif | |
1206 | + | |
1174 | 1207 | void do_compute_hflags (CPUPPCState *env) |
1175 | 1208 | { |
1176 | 1209 | /* Compute current hflags */ |
1177 | 1210 | env->hflags = (msr_pr << MSR_PR) | (msr_le << MSR_LE) | |
1178 | 1211 | (msr_fp << MSR_FP) | (msr_fe0 << MSR_FE0) | (msr_fe1 << MSR_FE1) | |
1179 | - (msr_vr << MSR_VR) | (msr_ap << MSR_AP) | (msr_sa << MSR_SA) | | |
1212 | + (msr_vr << MSR_VR) | (msr_ap << MSR_AP) | (msr_sa << MSR_SA) | | |
1180 | 1213 | (msr_se << MSR_SE) | (msr_be << MSR_BE); |
1181 | 1214 | #if defined (TARGET_PPC64) |
1182 | - env->hflags |= (msr_sf << MSR_SF) | (msr_hv << MSR_HV); | |
1215 | + env->hflags |= (msr_sf << (MSR_SF - 32)) | (msr_hv << (MSR_HV - 32)); | |
1183 | 1216 | #endif |
1184 | 1217 | } |
1185 | 1218 | |
... | ... | @@ -1193,8 +1226,8 @@ void do_interrupt (CPUState *env) |
1193 | 1226 | #else /* defined (CONFIG_USER_ONLY) */ |
1194 | 1227 | static void dump_syscall(CPUState *env) |
1195 | 1228 | { |
1196 | - fprintf(logfile, "syscall r0=0x%08x r3=0x%08x r4=0x%08x " | |
1197 | - "r5=0x%08x r6=0x%08x nip=0x%08x\n", | |
1229 | + fprintf(logfile, "syscall r0=0x" REGX " r3=0x" REGX " r4=0x" REGX | |
1230 | + " r5=0x" REGX " r6=0x" REGX " nip=0x" REGX "\n", | |
1198 | 1231 | env->gpr[0], env->gpr[3], env->gpr[4], |
1199 | 1232 | env->gpr[5], env->gpr[6], env->nip); |
1200 | 1233 | } | ... | ... |
target-ppc/op.c
... | ... | @@ -26,9 +26,6 @@ |
26 | 26 | |
27 | 27 | /* XXX: this is to be suppressed */ |
28 | 28 | #define regs (env) |
29 | -#define Ts0 (int32_t)T0 | |
30 | -#define Ts1 (int32_t)T1 | |
31 | -#define Ts2 (int32_t)T2 | |
32 | 29 | |
33 | 30 | #define FT0 (env->ft0) |
34 | 31 | #define FT1 (env->ft1) |
... | ... | @@ -157,15 +154,31 @@ void OPPROTO op_reset_T0 (void) |
157 | 154 | |
158 | 155 | PPC_OP(set_T0) |
159 | 156 | { |
160 | - T0 = PARAM(1); | |
157 | + T0 = (uint32_t)PARAM1; | |
161 | 158 | RETURN(); |
162 | 159 | } |
163 | 160 | |
161 | +#if defined(TARGET_PPC64) | |
162 | +void OPPROTO op_set_T0_64 (void) | |
163 | +{ | |
164 | + T0 = ((uint64_t)PARAM1 << 32) | (uint64_t)PARAM2; | |
165 | + RETURN(); | |
166 | +} | |
167 | +#endif | |
168 | + | |
164 | 169 | PPC_OP(set_T1) |
165 | 170 | { |
166 | - T1 = PARAM(1); | |
171 | + T1 = (uint32_t)PARAM1; | |
172 | + RETURN(); | |
173 | +} | |
174 | + | |
175 | +#if defined(TARGET_PPC64) | |
176 | +void OPPROTO op_set_T1_64 (void) | |
177 | +{ | |
178 | + T1 = ((uint64_t)PARAM1 << 32) | (uint64_t)PARAM2; | |
167 | 179 | RETURN(); |
168 | 180 | } |
181 | +#endif | |
169 | 182 | |
170 | 183 | #if 0 // unused |
171 | 184 | PPC_OP(set_T2) |
... | ... | @@ -181,6 +194,12 @@ void OPPROTO op_move_T1_T0 (void) |
181 | 194 | RETURN(); |
182 | 195 | } |
183 | 196 | |
197 | +void OPPROTO op_move_T2_T0 (void) | |
198 | +{ | |
199 | + T2 = T0; | |
200 | + RETURN(); | |
201 | +} | |
202 | + | |
184 | 203 | /* Generate exceptions */ |
185 | 204 | PPC_OP(raise_exception_err) |
186 | 205 | { |
... | ... | @@ -189,16 +208,23 @@ PPC_OP(raise_exception_err) |
189 | 208 | |
190 | 209 | PPC_OP(update_nip) |
191 | 210 | { |
192 | - env->nip = PARAM(1); | |
211 | + env->nip = (uint32_t)PARAM1; | |
193 | 212 | RETURN(); |
194 | 213 | } |
195 | 214 | |
215 | +#if defined(TARGET_PPC64) | |
216 | +void OPPROTO op_update_nip_64 (void) | |
217 | +{ | |
218 | + env->nip = ((uint64_t)PARAM1 << 32) | (uint64_t)PARAM2; | |
219 | + RETURN(); | |
220 | +} | |
221 | +#endif | |
222 | + | |
196 | 223 | PPC_OP(debug) |
197 | 224 | { |
198 | 225 | do_raise_exception(EXCP_DEBUG); |
199 | 226 | } |
200 | 227 | |
201 | - | |
202 | 228 | PPC_OP(exit_tb) |
203 | 229 | { |
204 | 230 | EXIT_TB(); |
... | ... | @@ -293,6 +319,20 @@ PPC_OP(store_sdr1) |
293 | 319 | RETURN(); |
294 | 320 | } |
295 | 321 | |
322 | +#if defined (TARGET_PPC64) | |
323 | +void OPPROTO op_load_asr (void) | |
324 | +{ | |
325 | + T0 = env->asr; | |
326 | + RETURN(); | |
327 | +} | |
328 | + | |
329 | +void OPPROTO op_store_asr (void) | |
330 | +{ | |
331 | + ppc_store_asr(env, T0); | |
332 | + RETURN(); | |
333 | +} | |
334 | +#endif | |
335 | + | |
296 | 336 | PPC_OP(load_msr) |
297 | 337 | { |
298 | 338 | T0 = do_load_msr(env); |
... | ... | @@ -304,6 +344,14 @@ PPC_OP(store_msr) |
304 | 344 | do_store_msr(env, T0); |
305 | 345 | RETURN(); |
306 | 346 | } |
347 | + | |
348 | +#if defined (TARGET_PPC64) | |
349 | +void OPPROTO op_store_msr_32 (void) | |
350 | +{ | |
351 | + ppc_store_msr_32(env, T0); | |
352 | + RETURN(); | |
353 | +} | |
354 | +#endif | |
307 | 355 | #endif |
308 | 356 | |
309 | 357 | /* SPR */ |
... | ... | @@ -459,7 +507,7 @@ PPC_OP(getbit_T1) |
459 | 507 | |
460 | 508 | PPC_OP(setcrfbit) |
461 | 509 | { |
462 | - T1 = (T1 & PARAM(1)) | (T0 << PARAM(2)); | |
510 | + T1 = (T1 & PARAM(1)) | (T0 << PARAM(2)); | |
463 | 511 | RETURN(); |
464 | 512 | } |
465 | 513 | |
... | ... | @@ -468,10 +516,18 @@ PPC_OP(setcrfbit) |
468 | 516 | |
469 | 517 | PPC_OP(setlr) |
470 | 518 | { |
471 | - regs->lr = PARAM1; | |
519 | + regs->lr = (uint32_t)PARAM1; | |
472 | 520 | RETURN(); |
473 | 521 | } |
474 | 522 | |
523 | +#if defined (TARGET_PPC64) | |
524 | +void OPPROTO op_setlr_64 (void) | |
525 | +{ | |
526 | + regs->lr = ((uint64_t)PARAM1 << 32) | (uint64_t)PARAM2; | |
527 | + RETURN(); | |
528 | +} | |
529 | +#endif | |
530 | + | |
475 | 531 | PPC_OP(goto_tb0) |
476 | 532 | { |
477 | 533 | GOTO_TB(op_goto_tb0, PARAM1, 0); |
... | ... | @@ -482,12 +538,20 @@ PPC_OP(goto_tb1) |
482 | 538 | GOTO_TB(op_goto_tb1, PARAM1, 1); |
483 | 539 | } |
484 | 540 | |
485 | -PPC_OP(b_T1) | |
541 | +void OPPROTO op_b_T1 (void) | |
486 | 542 | { |
487 | - regs->nip = T1 & ~3; | |
543 | + regs->nip = (uint32_t)(T1 & ~3); | |
488 | 544 | RETURN(); |
489 | 545 | } |
490 | 546 | |
547 | +#if defined (TARGET_PPC64) | |
548 | +void OPPROTO op_b_T1_64 (void) | |
549 | +{ | |
550 | + regs->nip = (uint64_t)(T1 & ~3); | |
551 | + RETURN(); | |
552 | +} | |
553 | +#endif | |
554 | + | |
491 | 555 | PPC_OP(jz_T0) |
492 | 556 | { |
493 | 557 | if (!T0) |
... | ... | @@ -495,16 +559,28 @@ PPC_OP(jz_T0) |
495 | 559 | RETURN(); |
496 | 560 | } |
497 | 561 | |
498 | -PPC_OP(btest_T1) | |
562 | +void OPPROTO op_btest_T1 (void) | |
499 | 563 | { |
500 | 564 | if (T0) { |
501 | - regs->nip = T1 & ~3; | |
565 | + regs->nip = (uint32_t)(T1 & ~3); | |
502 | 566 | } else { |
503 | - regs->nip = PARAM1; | |
567 | + regs->nip = (uint32_t)PARAM1; | |
504 | 568 | } |
505 | 569 | RETURN(); |
506 | 570 | } |
507 | 571 | |
572 | +#if defined (TARGET_PPC64) | |
573 | +void OPPROTO op_btest_T1_64 (void) | |
574 | +{ | |
575 | + if (T0) { | |
576 | + regs->nip = (uint64_t)(T1 & ~3); | |
577 | + } else { | |
578 | + regs->nip = ((uint64_t)PARAM1 << 32) | (uint64_t)PARAM2; | |
579 | + } | |
580 | + RETURN(); | |
581 | +} | |
582 | +#endif | |
583 | + | |
508 | 584 | PPC_OP(movl_T1_ctr) |
509 | 585 | { |
510 | 586 | T1 = regs->ctr; |
... | ... | @@ -518,42 +594,89 @@ PPC_OP(movl_T1_lr) |
518 | 594 | } |
519 | 595 | |
520 | 596 | /* tests with result in T0 */ |
597 | +void OPPROTO op_test_ctr (void) | |
598 | +{ | |
599 | + T0 = (uint32_t)regs->ctr; | |
600 | + RETURN(); | |
601 | +} | |
521 | 602 | |
522 | -PPC_OP(test_ctr) | |
603 | +#if defined(TARGET_PPC64) | |
604 | +void OPPROTO op_test_ctr_64 (void) | |
523 | 605 | { |
524 | - T0 = regs->ctr; | |
606 | + T0 = (uint64_t)regs->ctr; | |
607 | + RETURN(); | |
608 | +} | |
609 | +#endif | |
610 | + | |
611 | +void OPPROTO op_test_ctr_true (void) | |
612 | +{ | |
613 | + T0 = ((uint32_t)regs->ctr != 0 && (T0 & PARAM1) != 0); | |
525 | 614 | RETURN(); |
526 | 615 | } |
527 | 616 | |
528 | -PPC_OP(test_ctr_true) | |
617 | +#if defined(TARGET_PPC64) | |
618 | +void OPPROTO op_test_ctr_true_64 (void) | |
529 | 619 | { |
530 | - T0 = (regs->ctr != 0 && (T0 & PARAM(1)) != 0); | |
620 | + T0 = ((uint64_t)regs->ctr != 0 && (T0 & PARAM1) != 0); | |
531 | 621 | RETURN(); |
532 | 622 | } |
623 | +#endif | |
533 | 624 | |
534 | -PPC_OP(test_ctr_false) | |
625 | +void OPPROTO op_test_ctr_false (void) | |
535 | 626 | { |
536 | - T0 = (regs->ctr != 0 && (T0 & PARAM(1)) == 0); | |
627 | + T0 = ((uint32_t)regs->ctr != 0 && (T0 & PARAM1) == 0); | |
537 | 628 | RETURN(); |
538 | 629 | } |
539 | 630 | |
540 | -PPC_OP(test_ctrz) | |
631 | +#if defined(TARGET_PPC64) | |
632 | +void OPPROTO op_test_ctr_false_64 (void) | |
541 | 633 | { |
542 | - T0 = (regs->ctr == 0); | |
634 | + T0 = ((uint64_t)regs->ctr != 0 && (T0 & PARAM1) == 0); | |
543 | 635 | RETURN(); |
544 | 636 | } |
637 | +#endif | |
638 | + | |
639 | +void OPPROTO op_test_ctrz (void) | |
640 | +{ | |
641 | + T0 = ((uint32_t)regs->ctr == 0); | |
642 | + RETURN(); | |
643 | +} | |
644 | + | |
645 | +#if defined(TARGET_PPC64) | |
646 | +void OPPROTO op_test_ctrz_64 (void) | |
647 | +{ | |
648 | + T0 = ((uint64_t)regs->ctr == 0); | |
649 | + RETURN(); | |
650 | +} | |
651 | +#endif | |
652 | + | |
653 | +void OPPROTO op_test_ctrz_true (void) | |
654 | +{ | |
655 | + T0 = ((uint32_t)regs->ctr == 0 && (T0 & PARAM1) != 0); | |
656 | + RETURN(); | |
657 | +} | |
658 | + | |
659 | +#if defined(TARGET_PPC64) | |
660 | +void OPPROTO op_test_ctrz_true_64 (void) | |
661 | +{ | |
662 | + T0 = ((uint64_t)regs->ctr == 0 && (T0 & PARAM1) != 0); | |
663 | + RETURN(); | |
664 | +} | |
665 | +#endif | |
545 | 666 | |
546 | -PPC_OP(test_ctrz_true) | |
667 | +void OPPROTO op_test_ctrz_false (void) | |
547 | 668 | { |
548 | - T0 = (regs->ctr == 0 && (T0 & PARAM(1)) != 0); | |
669 | + T0 = ((uint32_t)regs->ctr == 0 && (T0 & PARAM1) == 0); | |
549 | 670 | RETURN(); |
550 | 671 | } |
551 | 672 | |
552 | -PPC_OP(test_ctrz_false) | |
673 | +#if defined(TARGET_PPC64) | |
674 | +void OPPROTO op_test_ctrz_false_64 (void) | |
553 | 675 | { |
554 | - T0 = (regs->ctr == 0 && (T0 & PARAM(1)) == 0); | |
676 | + T0 = ((uint64_t)regs->ctr == 0 && (T0 & PARAM1) == 0); | |
555 | 677 | RETURN(); |
556 | 678 | } |
679 | +#endif | |
557 | 680 | |
558 | 681 | PPC_OP(test_true) |
559 | 682 | { |
... | ... | @@ -582,30 +705,52 @@ PPC_OP(add) |
582 | 705 | RETURN(); |
583 | 706 | } |
584 | 707 | |
585 | -void OPPROTO op_addo (void) | |
708 | +void OPPROTO op_check_addo (void) | |
586 | 709 | { |
587 | - do_addo(); | |
588 | - RETURN(); | |
710 | + if (likely(!(((uint32_t)T2 ^ (uint32_t)T1 ^ UINT32_MAX) & | |
711 | + ((uint32_t)T2 ^ (uint32_t)T0) & (1UL << 31)))) { | |
712 | + xer_ov = 0; | |
713 | + } else { | |
714 | + xer_so = 1; | |
715 | + xer_ov = 1; | |
716 | + } | |
589 | 717 | } |
590 | 718 | |
591 | -/* add carrying */ | |
592 | -PPC_OP(addc) | |
719 | +#if defined(TARGET_PPC64) | |
720 | +void OPPROTO op_check_addo_64 (void) | |
593 | 721 | { |
594 | - T2 = T0; | |
595 | - T0 += T1; | |
596 | - if (T0 < T2) { | |
597 | - xer_ca = 1; | |
722 | + if (likely(!(((uint64_t)T2 ^ (uint64_t)T1 ^ UINT64_MAX) & | |
723 | + ((uint64_t)T2 ^ (uint64_t)T0) & (1UL << 63)))) { | |
724 | + xer_ov = 0; | |
598 | 725 | } else { |
726 | + xer_so = 1; | |
727 | + xer_ov = 1; | |
728 | + } | |
729 | +} | |
730 | +#endif | |
731 | + | |
732 | +/* add carrying */ | |
733 | +void OPPROTO op_check_addc (void) | |
734 | +{ | |
735 | + if (likely((uint32_t)T0 >= (uint32_t)T2)) { | |
599 | 736 | xer_ca = 0; |
737 | + } else { | |
738 | + xer_ca = 1; | |
600 | 739 | } |
601 | 740 | RETURN(); |
602 | 741 | } |
603 | 742 | |
604 | -void OPPROTO op_addco (void) | |
743 | +#if defined(TARGET_PPC64) | |
744 | +void OPPROTO op_check_addc_64 (void) | |
605 | 745 | { |
606 | - do_addco(); | |
746 | + if (likely((uint64_t)T0 >= (uint64_t)T2)) { | |
747 | + xer_ca = 0; | |
748 | + } else { | |
749 | + xer_ca = 1; | |
750 | + } | |
607 | 751 | RETURN(); |
608 | 752 | } |
753 | +#endif | |
609 | 754 | |
610 | 755 | /* add extended */ |
611 | 756 | void OPPROTO op_adde (void) |
... | ... | @@ -614,11 +759,13 @@ void OPPROTO op_adde (void) |
614 | 759 | RETURN(); |
615 | 760 | } |
616 | 761 | |
617 | -PPC_OP(addeo) | |
762 | +#if defined(TARGET_PPC64) | |
763 | +void OPPROTO op_adde_64 (void) | |
618 | 764 | { |
619 | - do_addeo(); | |
765 | + do_adde_64(); | |
620 | 766 | RETURN(); |
621 | 767 | } |
768 | +#endif | |
622 | 769 | |
623 | 770 | /* add immediate */ |
624 | 771 | PPC_OP(addi) |
... | ... | @@ -627,28 +774,24 @@ PPC_OP(addi) |
627 | 774 | RETURN(); |
628 | 775 | } |
629 | 776 | |
630 | -/* add immediate carrying */ | |
631 | -PPC_OP(addic) | |
777 | +/* add to minus one extended */ | |
778 | +void OPPROTO op_add_me (void) | |
632 | 779 | { |
633 | - T1 = T0; | |
634 | - T0 += PARAM(1); | |
635 | - if (T0 < T1) { | |
780 | + T0 += xer_ca + (-1); | |
781 | + if (likely((uint32_t)T1 != 0)) | |
636 | 782 | xer_ca = 1; |
637 | - } else { | |
638 | - xer_ca = 0; | |
639 | - } | |
640 | 783 | RETURN(); |
641 | 784 | } |
642 | 785 | |
643 | -/* add to minus one extended */ | |
644 | -PPC_OP(addme) | |
786 | +#if defined(TARGET_PPC64) | |
787 | +void OPPROTO op_add_me_64 (void) | |
645 | 788 | { |
646 | - T1 = T0; | |
647 | 789 | T0 += xer_ca + (-1); |
648 | - if (T1 != 0) | |
790 | + if (likely((uint64_t)T1 != 0)) | |
649 | 791 | xer_ca = 1; |
650 | 792 | RETURN(); |
651 | 793 | } |
794 | +#endif | |
652 | 795 | |
653 | 796 | void OPPROTO op_addmeo (void) |
654 | 797 | { |
... | ... | @@ -656,35 +799,43 @@ void OPPROTO op_addmeo (void) |
656 | 799 | RETURN(); |
657 | 800 | } |
658 | 801 | |
802 | +void OPPROTO op_addmeo_64 (void) | |
803 | +{ | |
804 | + do_addmeo(); | |
805 | + RETURN(); | |
806 | +} | |
807 | + | |
659 | 808 | /* add to zero extended */ |
660 | -PPC_OP(addze) | |
809 | +void OPPROTO op_add_ze (void) | |
661 | 810 | { |
662 | - T1 = T0; | |
663 | 811 | T0 += xer_ca; |
664 | - if (T0 < T1) { | |
665 | - xer_ca = 1; | |
666 | - } else { | |
667 | - xer_ca = 0; | |
668 | - } | |
669 | 812 | RETURN(); |
670 | 813 | } |
671 | 814 | |
672 | -void OPPROTO op_addzeo (void) | |
815 | +/* divide word */ | |
816 | +void OPPROTO op_divw (void) | |
673 | 817 | { |
674 | - do_addzeo(); | |
818 | + if (unlikely(((int32_t)T0 == INT32_MIN && (int32_t)T1 == -1) || | |
819 | + (int32_t)T1 == 0)) { | |
820 | + T0 = (int32_t)((-1) * ((uint32_t)T0 >> 31)); | |
821 | + } else { | |
822 | + T0 = (int32_t)T0 / (int32_t)T1; | |
823 | + } | |
675 | 824 | RETURN(); |
676 | 825 | } |
677 | 826 | |
678 | -/* divide word */ | |
679 | -PPC_OP(divw) | |
827 | +#if defined(TARGET_PPC64) | |
828 | +void OPPROTO op_divd (void) | |
680 | 829 | { |
681 | - if ((Ts0 == INT32_MIN && Ts1 == -1) || Ts1 == 0) { | |
682 | - T0 = (int32_t)((-1) * (T0 >> 31)); | |
830 | + if (unlikely(((int64_t)T0 == INT64_MIN && (int64_t)T1 == -1) || | |
831 | + (int64_t)T1 == 0)) { | |
832 | + T0 = (int64_t)((-1ULL) * ((uint64_t)T0 >> 63)); | |
683 | 833 | } else { |
684 | - T0 = (Ts0 / Ts1); | |
834 | + T0 = (int64_t)T0 / (int64_t)T1; | |
685 | 835 | } |
686 | 836 | RETURN(); |
687 | 837 | } |
838 | +#endif | |
688 | 839 | |
689 | 840 | void OPPROTO op_divwo (void) |
690 | 841 | { |
... | ... | @@ -692,16 +843,36 @@ void OPPROTO op_divwo (void) |
692 | 843 | RETURN(); |
693 | 844 | } |
694 | 845 | |
846 | +#if defined(TARGET_PPC64) | |
847 | +void OPPROTO op_divdo (void) | |
848 | +{ | |
849 | + do_divdo(); | |
850 | + RETURN(); | |
851 | +} | |
852 | +#endif | |
853 | + | |
695 | 854 | /* divide word unsigned */ |
696 | -PPC_OP(divwu) | |
855 | +void OPPROTO op_divwu (void) | |
856 | +{ | |
857 | + if (unlikely(T1 == 0)) { | |
858 | + T0 = 0; | |
859 | + } else { | |
860 | + T0 = (uint32_t)T0 / (uint32_t)T1; | |
861 | + } | |
862 | + RETURN(); | |
863 | +} | |
864 | + | |
865 | +#if defined(TARGET_PPC64) | |
866 | +void OPPROTO op_divdu (void) | |
697 | 867 | { |
698 | - if (T1 == 0) { | |
868 | + if (unlikely(T1 == 0)) { | |
699 | 869 | T0 = 0; |
700 | 870 | } else { |
701 | 871 | T0 /= T1; |
702 | 872 | } |
703 | 873 | RETURN(); |
704 | 874 | } |
875 | +#endif | |
705 | 876 | |
706 | 877 | void OPPROTO op_divwuo (void) |
707 | 878 | { |
... | ... | @@ -709,33 +880,71 @@ void OPPROTO op_divwuo (void) |
709 | 880 | RETURN(); |
710 | 881 | } |
711 | 882 | |
883 | +#if defined(TARGET_PPC64) | |
884 | +void OPPROTO op_divduo (void) | |
885 | +{ | |
886 | + do_divduo(); | |
887 | + RETURN(); | |
888 | +} | |
889 | +#endif | |
890 | + | |
712 | 891 | /* multiply high word */ |
713 | -PPC_OP(mulhw) | |
892 | +void OPPROTO op_mulhw (void) | |
714 | 893 | { |
715 | - T0 = ((int64_t)Ts0 * (int64_t)Ts1) >> 32; | |
894 | + T0 = ((int64_t)((int32_t)T0) * (int64_t)((int32_t)T1)) >> 32; | |
716 | 895 | RETURN(); |
717 | 896 | } |
718 | 897 | |
898 | +#if defined(TARGET_PPC64) | |
899 | +void OPPROTO op_mulhd (void) | |
900 | +{ | |
901 | + uint64_t tl, th; | |
902 | + | |
903 | + do_imul64(&tl, &th); | |
904 | + T0 = th; | |
905 | + RETURN(); | |
906 | +} | |
907 | +#endif | |
908 | + | |
719 | 909 | /* multiply high word unsigned */ |
720 | -PPC_OP(mulhwu) | |
910 | +void OPPROTO op_mulhwu (void) | |
721 | 911 | { |
722 | - T0 = ((uint64_t)T0 * (uint64_t)T1) >> 32; | |
912 | + T0 = ((uint64_t)(uint32_t)T0 * (uint64_t)(uint32_t)T1) >> 32; | |
723 | 913 | RETURN(); |
724 | 914 | } |
725 | 915 | |
916 | +#if defined(TARGET_PPC64) | |
917 | +void OPPROTO op_mulhdu (void) | |
918 | +{ | |
919 | + uint64_t tl, th; | |
920 | + | |
921 | + do_mul64(&tl, &th); | |
922 | + T0 = th; | |
923 | + RETURN(); | |
924 | +} | |
925 | +#endif | |
926 | + | |
726 | 927 | /* multiply low immediate */ |
727 | 928 | PPC_OP(mulli) |
728 | 929 | { |
729 | - T0 = (Ts0 * SPARAM(1)); | |
930 | + T0 = ((int32_t)T0 * (int32_t)PARAM1); | |
730 | 931 | RETURN(); |
731 | 932 | } |
732 | 933 | |
733 | 934 | /* multiply low word */ |
734 | 935 | PPC_OP(mullw) |
735 | 936 | { |
937 | + T0 = (int32_t)(T0 * T1); | |
938 | + RETURN(); | |
939 | +} | |
940 | + | |
941 | +#if defined(TARGET_PPC64) | |
942 | +void OPPROTO op_mulld (void) | |
943 | +{ | |
736 | 944 | T0 *= T1; |
737 | 945 | RETURN(); |
738 | 946 | } |
947 | +#endif | |
739 | 948 | |
740 | 949 | void OPPROTO op_mullwo (void) |
741 | 950 | { |
... | ... | @@ -743,21 +952,47 @@ void OPPROTO op_mullwo (void) |
743 | 952 | RETURN(); |
744 | 953 | } |
745 | 954 | |
955 | +#if defined(TARGET_PPC64) | |
956 | +void OPPROTO op_mulldo (void) | |
957 | +{ | |
958 | + do_mulldo(); | |
959 | + RETURN(); | |
960 | +} | |
961 | +#endif | |
962 | + | |
746 | 963 | /* negate */ |
747 | -PPC_OP(neg) | |
964 | +void OPPROTO op_neg (void) | |
748 | 965 | { |
749 | - if (T0 != 0x80000000) { | |
750 | - T0 = -Ts0; | |
966 | + if (likely(T0 != INT32_MIN)) { | |
967 | + T0 = -(int32_t)T0; | |
751 | 968 | } |
752 | 969 | RETURN(); |
753 | 970 | } |
754 | 971 | |
972 | +#if defined(TARGET_PPC64) | |
973 | +void OPPROTO op_neg_64 (void) | |
974 | +{ | |
975 | + if (likely(T0 != INT64_MIN)) { | |
976 | + T0 = -(int64_t)T0; | |
977 | + } | |
978 | + RETURN(); | |
979 | +} | |
980 | +#endif | |
981 | + | |
755 | 982 | void OPPROTO op_nego (void) |
756 | 983 | { |
757 | 984 | do_nego(); |
758 | 985 | RETURN(); |
759 | 986 | } |
760 | 987 | |
988 | +#if defined(TARGET_PPC64) | |
989 | +void OPPROTO op_nego_64 (void) | |
990 | +{ | |
991 | + do_nego_64(); | |
992 | + RETURN(); | |
993 | +} | |
994 | +#endif | |
995 | + | |
761 | 996 | /* substract from */ |
762 | 997 | PPC_OP(subf) |
763 | 998 | { |
... | ... | @@ -765,29 +1000,54 @@ PPC_OP(subf) |
765 | 1000 | RETURN(); |
766 | 1001 | } |
767 | 1002 | |
768 | -void OPPROTO op_subfo (void) | |
1003 | +void OPPROTO op_check_subfo (void) | |
1004 | +{ | |
1005 | + if (likely(!(((uint32_t)(~T2) ^ (uint32_t)T1 ^ UINT32_MAX) & | |
1006 | + ((uint32_t)(~T2) ^ (uint32_t)T0) & (1UL << 31)))) { | |
1007 | + xer_ov = 0; | |
1008 | + } else { | |
1009 | + xer_so = 1; | |
1010 | + xer_ov = 1; | |
1011 | + } | |
1012 | + RETURN(); | |
1013 | +} | |
1014 | + | |
1015 | +#if defined(TARGET_PPC64) | |
1016 | +void OPPROTO op_check_subfo_64 (void) | |
769 | 1017 | { |
770 | - do_subfo(); | |
1018 | + if (likely(!(((uint64_t)(~T2) ^ (uint64_t)T1 ^ UINT64_MAX) & | |
1019 | + ((uint64_t)(~T2) ^ (uint64_t)T0) & (1ULL << 63)))) { | |
1020 | + xer_ov = 0; | |
1021 | + } else { | |
1022 | + xer_so = 1; | |
1023 | + xer_ov = 1; | |
1024 | + } | |
771 | 1025 | RETURN(); |
772 | 1026 | } |
1027 | +#endif | |
773 | 1028 | |
774 | 1029 | /* substract from carrying */ |
775 | -PPC_OP(subfc) | |
1030 | +void OPPROTO op_check_subfc (void) | |
776 | 1031 | { |
777 | - T0 = T1 - T0; | |
778 | - if (T0 <= T1) { | |
779 | - xer_ca = 1; | |
780 | - } else { | |
1032 | + if (likely((uint32_t)T0 > (uint32_t)T1)) { | |
781 | 1033 | xer_ca = 0; |
1034 | + } else { | |
1035 | + xer_ca = 1; | |
782 | 1036 | } |
783 | 1037 | RETURN(); |
784 | 1038 | } |
785 | 1039 | |
786 | -void OPPROTO op_subfco (void) | |
1040 | +#if defined(TARGET_PPC64) | |
1041 | +void OPPROTO op_check_subfc_64 (void) | |
787 | 1042 | { |
788 | - do_subfco(); | |
1043 | + if (likely((uint64_t)T0 > (uint64_t)T1)) { | |
1044 | + xer_ca = 0; | |
1045 | + } else { | |
1046 | + xer_ca = 1; | |
1047 | + } | |
789 | 1048 | RETURN(); |
790 | 1049 | } |
1050 | +#endif | |
791 | 1051 | |
792 | 1052 | /* substract from extended */ |
793 | 1053 | void OPPROTO op_subfe (void) |
... | ... | @@ -796,17 +1056,19 @@ void OPPROTO op_subfe (void) |
796 | 1056 | RETURN(); |
797 | 1057 | } |
798 | 1058 | |
799 | -PPC_OP(subfeo) | |
1059 | +#if defined(TARGET_PPC64) | |
1060 | +void OPPROTO op_subfe_64 (void) | |
800 | 1061 | { |
801 | - do_subfeo(); | |
1062 | + do_subfe_64(); | |
802 | 1063 | RETURN(); |
803 | 1064 | } |
1065 | +#endif | |
804 | 1066 | |
805 | 1067 | /* substract from immediate carrying */ |
806 | -PPC_OP(subfic) | |
1068 | +void OPPROTO op_subfic (void) | |
807 | 1069 | { |
808 | - T0 = PARAM(1) + ~T0 + 1; | |
809 | - if (T0 <= PARAM(1)) { | |
1070 | + T0 = PARAM1 + ~T0 + 1; | |
1071 | + if ((uint32_t)T0 <= (uint32_t)PARAM1) { | |
810 | 1072 | xer_ca = 1; |
811 | 1073 | } else { |
812 | 1074 | xer_ca = 0; |
... | ... | @@ -814,15 +1076,37 @@ PPC_OP(subfic) |
814 | 1076 | RETURN(); |
815 | 1077 | } |
816 | 1078 | |
1079 | +#if defined(TARGET_PPC64) | |
1080 | +void OPPROTO op_subfic_64 (void) | |
1081 | +{ | |
1082 | + T0 = PARAM1 + ~T0 + 1; | |
1083 | + if ((uint64_t)T0 <= (uint64_t)PARAM1) { | |
1084 | + xer_ca = 1; | |
1085 | + } else { | |
1086 | + xer_ca = 0; | |
1087 | + } | |
1088 | + RETURN(); | |
1089 | +} | |
1090 | +#endif | |
1091 | + | |
817 | 1092 | /* substract from minus one extended */ |
818 | -PPC_OP(subfme) | |
1093 | +void OPPROTO op_subfme (void) | |
819 | 1094 | { |
820 | 1095 | T0 = ~T0 + xer_ca - 1; |
1096 | + if (likely((uint32_t)T0 != (uint32_t)-1)) | |
1097 | + xer_ca = 1; | |
1098 | + RETURN(); | |
1099 | +} | |
821 | 1100 | |
822 | - if (T0 != -1) | |
1101 | +#if defined(TARGET_PPC64) | |
1102 | +void OPPROTO op_subfme_64 (void) | |
1103 | +{ | |
1104 | + T0 = ~T0 + xer_ca - 1; | |
1105 | + if (likely((uint64_t)T0 != (uint64_t)-1)) | |
823 | 1106 | xer_ca = 1; |
824 | 1107 | RETURN(); |
825 | 1108 | } |
1109 | +#endif | |
826 | 1110 | |
827 | 1111 | void OPPROTO op_subfmeo (void) |
828 | 1112 | { |
... | ... | @@ -830,12 +1114,20 @@ void OPPROTO op_subfmeo (void) |
830 | 1114 | RETURN(); |
831 | 1115 | } |
832 | 1116 | |
1117 | +#if defined(TARGET_PPC64) | |
1118 | +void OPPROTO op_subfmeo_64 (void) | |
1119 | +{ | |
1120 | + do_subfmeo_64(); | |
1121 | + RETURN(); | |
1122 | +} | |
1123 | +#endif | |
1124 | + | |
833 | 1125 | /* substract from zero extended */ |
834 | -PPC_OP(subfze) | |
1126 | +void OPPROTO op_subfze (void) | |
835 | 1127 | { |
836 | 1128 | T1 = ~T0; |
837 | 1129 | T0 = T1 + xer_ca; |
838 | - if (T0 < T1) { | |
1130 | + if ((uint32_t)T0 < (uint32_t)T1) { | |
839 | 1131 | xer_ca = 1; |
840 | 1132 | } else { |
841 | 1133 | xer_ca = 0; |
... | ... | @@ -843,32 +1135,68 @@ PPC_OP(subfze) |
843 | 1135 | RETURN(); |
844 | 1136 | } |
845 | 1137 | |
1138 | +#if defined(TARGET_PPC64) | |
1139 | +void OPPROTO op_subfze_64 (void) | |
1140 | +{ | |
1141 | + T1 = ~T0; | |
1142 | + T0 = T1 + xer_ca; | |
1143 | + if ((uint64_t)T0 < (uint64_t)T1) { | |
1144 | + xer_ca = 1; | |
1145 | + } else { | |
1146 | + xer_ca = 0; | |
1147 | + } | |
1148 | + RETURN(); | |
1149 | +} | |
1150 | +#endif | |
1151 | + | |
846 | 1152 | void OPPROTO op_subfzeo (void) |
847 | 1153 | { |
848 | 1154 | do_subfzeo(); |
849 | 1155 | RETURN(); |
850 | 1156 | } |
851 | 1157 | |
1158 | +#if defined(TARGET_PPC64) | |
1159 | +void OPPROTO op_subfzeo_64 (void) | |
1160 | +{ | |
1161 | + do_subfzeo_64(); | |
1162 | + RETURN(); | |
1163 | +} | |
1164 | +#endif | |
1165 | + | |
852 | 1166 | /*** Integer comparison ***/ |
853 | 1167 | /* compare */ |
854 | -PPC_OP(cmp) | |
1168 | +void OPPROTO op_cmp (void) | |
1169 | +{ | |
1170 | + if ((int32_t)T0 < (int32_t)T1) { | |
1171 | + T0 = 0x08; | |
1172 | + } else if ((int32_t)T0 > (int32_t)T1) { | |
1173 | + T0 = 0x04; | |
1174 | + } else { | |
1175 | + T0 = 0x02; | |
1176 | + } | |
1177 | + RETURN(); | |
1178 | +} | |
1179 | + | |
1180 | +#if defined(TARGET_PPC64) | |
1181 | +void OPPROTO op_cmp_64 (void) | |
855 | 1182 | { |
856 | - if (Ts0 < Ts1) { | |
1183 | + if ((int64_t)T0 < (int64_t)T1) { | |
857 | 1184 | T0 = 0x08; |
858 | - } else if (Ts0 > Ts1) { | |
1185 | + } else if ((int64_t)T0 > (int64_t)T1) { | |
859 | 1186 | T0 = 0x04; |
860 | 1187 | } else { |
861 | 1188 | T0 = 0x02; |
862 | 1189 | } |
863 | 1190 | RETURN(); |
864 | 1191 | } |
1192 | +#endif | |
865 | 1193 | |
866 | 1194 | /* compare immediate */ |
867 | -PPC_OP(cmpi) | |
1195 | +void OPPROTO op_cmpi (void) | |
868 | 1196 | { |
869 | - if (Ts0 < SPARAM(1)) { | |
1197 | + if ((int32_t)T0 < (int32_t)PARAM1) { | |
870 | 1198 | T0 = 0x08; |
871 | - } else if (Ts0 > SPARAM(1)) { | |
1199 | + } else if ((int32_t)T0 > (int32_t)PARAM1) { | |
872 | 1200 | T0 = 0x04; |
873 | 1201 | } else { |
874 | 1202 | T0 = 0x02; |
... | ... | @@ -876,12 +1204,26 @@ PPC_OP(cmpi) |
876 | 1204 | RETURN(); |
877 | 1205 | } |
878 | 1206 | |
1207 | +#if defined(TARGET_PPC64) | |
1208 | +void OPPROTO op_cmpi_64 (void) | |
1209 | +{ | |
1210 | + if ((int64_t)T0 < (int64_t)((int32_t)PARAM1)) { | |
1211 | + T0 = 0x08; | |
1212 | + } else if ((int64_t)T0 > (int64_t)((int32_t)PARAM1)) { | |
1213 | + T0 = 0x04; | |
1214 | + } else { | |
1215 | + T0 = 0x02; | |
1216 | + } | |
1217 | + RETURN(); | |
1218 | +} | |
1219 | +#endif | |
1220 | + | |
879 | 1221 | /* compare logical */ |
880 | -PPC_OP(cmpl) | |
1222 | +void OPPROTO op_cmpl (void) | |
881 | 1223 | { |
882 | - if (T0 < T1) { | |
1224 | + if ((uint32_t)T0 < (uint32_t)T1) { | |
883 | 1225 | T0 = 0x08; |
884 | - } else if (T0 > T1) { | |
1226 | + } else if ((uint32_t)T0 > (uint32_t)T1) { | |
885 | 1227 | T0 = 0x04; |
886 | 1228 | } else { |
887 | 1229 | T0 = 0x02; |
... | ... | @@ -889,18 +1231,69 @@ PPC_OP(cmpl) |
889 | 1231 | RETURN(); |
890 | 1232 | } |
891 | 1233 | |
1234 | +#if defined(TARGET_PPC64) | |
1235 | +void OPPROTO op_cmpl_64 (void) | |
1236 | +{ | |
1237 | + if ((uint64_t)T0 < (uint64_t)T1) { | |
1238 | + T0 = 0x08; | |
1239 | + } else if ((uint64_t)T0 > (uint64_t)T1) { | |
1240 | + T0 = 0x04; | |
1241 | + } else { | |
1242 | + T0 = 0x02; | |
1243 | + } | |
1244 | + RETURN(); | |
1245 | +} | |
1246 | +#endif | |
1247 | + | |
892 | 1248 | /* compare logical immediate */ |
893 | -PPC_OP(cmpli) | |
1249 | +void OPPROTO op_cmpli (void) | |
1250 | +{ | |
1251 | + if ((uint32_t)T0 < (uint32_t)PARAM1) { | |
1252 | + T0 = 0x08; | |
1253 | + } else if ((uint32_t)T0 > (uint32_t)PARAM1) { | |
1254 | + T0 = 0x04; | |
1255 | + } else { | |
1256 | + T0 = 0x02; | |
1257 | + } | |
1258 | + RETURN(); | |
1259 | +} | |
1260 | + | |
1261 | +#if defined(TARGET_PPC64) | |
1262 | +void OPPROTO op_cmpli_64 (void) | |
894 | 1263 | { |
895 | - if (T0 < PARAM(1)) { | |
1264 | + if ((uint64_t)T0 < (uint64_t)PARAM1) { | |
896 | 1265 | T0 = 0x08; |
897 | - } else if (T0 > PARAM(1)) { | |
1266 | + } else if ((uint64_t)T0 > (uint64_t)PARAM1) { | |
898 | 1267 | T0 = 0x04; |
899 | 1268 | } else { |
900 | 1269 | T0 = 0x02; |
901 | 1270 | } |
902 | 1271 | RETURN(); |
903 | 1272 | } |
1273 | +#endif | |
1274 | + | |
1275 | +void OPPROTO op_isel (void) | |
1276 | +{ | |
1277 | + if (T0) | |
1278 | + T0 = T1; | |
1279 | + else | |
1280 | + T0 = T2; | |
1281 | + RETURN(); | |
1282 | +} | |
1283 | + | |
1284 | +void OPPROTO op_popcntb (void) | |
1285 | +{ | |
1286 | + do_popcntb(); | |
1287 | + RETURN(); | |
1288 | +} | |
1289 | + | |
1290 | +#if defined(TARGET_PPC64) | |
1291 | +void OPPROTO op_popcntb_64 (void) | |
1292 | +{ | |
1293 | + do_popcntb_64(); | |
1294 | + RETURN(); | |
1295 | +} | |
1296 | +#endif | |
904 | 1297 | |
905 | 1298 | /*** Integer logical ***/ |
906 | 1299 | /* and */ |
... | ... | @@ -963,6 +1356,80 @@ void OPPROTO op_cntlzw (void) |
963 | 1356 | RETURN(); |
964 | 1357 | } |
965 | 1358 | |
1359 | +#if defined(TARGET_PPC64) | |
1360 | +void OPPROTO op_cntlzd (void) | |
1361 | +{ | |
1362 | +#if HOST_LONG_BITS == 64 | |
1363 | + int cnt; | |
1364 | + | |
1365 | + cnt = 0; | |
1366 | + if (!(T0 & 0xFFFFFFFF00000000ULL)) { | |
1367 | + cnt += 32; | |
1368 | + T0 <<= 32; | |
1369 | + } | |
1370 | + if (!(T0 & 0xFFFF000000000000ULL)) { | |
1371 | + cnt += 16; | |
1372 | + T0 <<= 16; | |
1373 | + } | |
1374 | + if (!(T0 & 0xFF00000000000000ULL)) { | |
1375 | + cnt += 8; | |
1376 | + T0 <<= 8; | |
1377 | + } | |
1378 | + if (!(T0 & 0xF000000000000000ULL)) { | |
1379 | + cnt += 4; | |
1380 | + T0 <<= 4; | |
1381 | + } | |
1382 | + if (!(T0 & 0xC000000000000000ULL)) { | |
1383 | + cnt += 2; | |
1384 | + T0 <<= 2; | |
1385 | + } | |
1386 | + if (!(T0 & 0x8000000000000000ULL)) { | |
1387 | + cnt++; | |
1388 | + T0 <<= 1; | |
1389 | + } | |
1390 | + if (!(T0 & 0x8000000000000000ULL)) { | |
1391 | + cnt++; | |
1392 | + } | |
1393 | + T0 = cnt; | |
1394 | +#else | |
1395 | + uint32_t tmp; | |
1396 | + | |
1397 | + /* Make it easier on 32 bits host machines */ | |
1398 | + if (!(T0 >> 32)) { | |
1399 | + tmp = T0; | |
1400 | + T0 = 32; | |
1401 | + } else { | |
1402 | + tmp = T0 >> 32; | |
1403 | + T0 = 0; | |
1404 | + } | |
1405 | + if (!(tmp & 0xFFFF0000UL)) { | |
1406 | + T0 += 16; | |
1407 | + tmp <<= 16; | |
1408 | + } | |
1409 | + if (!(tmp & 0xFF000000UL)) { | |
1410 | + T0 += 8; | |
1411 | + tmp <<= 8; | |
1412 | + } | |
1413 | + if (!(tmp & 0xF0000000UL)) { | |
1414 | + T0 += 4; | |
1415 | + tmp <<= 4; | |
1416 | + } | |
1417 | + if (!(tmp & 0xC0000000UL)) { | |
1418 | + T0 += 2; | |
1419 | + tmp <<= 2; | |
1420 | + } | |
1421 | + if (!(tmp & 0x80000000UL)) { | |
1422 | + T0++; | |
1423 | + tmp <<= 1; | |
1424 | + } | |
1425 | + if (!(tmp & 0x80000000UL)) { | |
1426 | + T0++; | |
1427 | + } | |
1428 | +#endif | |
1429 | + RETURN(); | |
1430 | +} | |
1431 | +#endif | |
1432 | + | |
966 | 1433 | /* eqv */ |
967 | 1434 | PPC_OP(eqv) |
968 | 1435 | { |
... | ... | @@ -971,19 +1438,35 @@ PPC_OP(eqv) |
971 | 1438 | } |
972 | 1439 | |
973 | 1440 | /* extend sign byte */ |
974 | -PPC_OP(extsb) | |
1441 | +void OPPROTO op_extsb (void) | |
975 | 1442 | { |
976 | - T0 = (int32_t)((int8_t)(Ts0)); | |
1443 | +#if defined (TARGET_PPC64) | |
1444 | + T0 = (int64_t)((int8_t)T0); | |
1445 | +#else | |
1446 | + T0 = (int32_t)((int8_t)T0); | |
1447 | +#endif | |
977 | 1448 | RETURN(); |
978 | 1449 | } |
979 | 1450 | |
980 | 1451 | /* extend sign half word */ |
981 | -PPC_OP(extsh) | |
1452 | +void OPPROTO op_extsh (void) | |
982 | 1453 | { |
983 | - T0 = (int32_t)((int16_t)(Ts0)); | |
1454 | +#if defined (TARGET_PPC64) | |
1455 | + T0 = (int64_t)((int16_t)T0); | |
1456 | +#else | |
1457 | + T0 = (int32_t)((int16_t)T0); | |
1458 | +#endif | |
984 | 1459 | RETURN(); |
985 | 1460 | } |
986 | 1461 | |
1462 | +#if defined (TARGET_PPC64) | |
1463 | +void OPPROTO op_extsw (void) | |
1464 | +{ | |
1465 | + T0 = (int64_t)((int32_t)T0); | |
1466 | + RETURN(); | |
1467 | +} | |
1468 | +#endif | |
1469 | + | |
987 | 1470 | /* nand */ |
988 | 1471 | PPC_OP(nand) |
989 | 1472 | { |
... | ... | @@ -1048,15 +1531,27 @@ void OPPROTO op_rotli32_T0 (void) |
1048 | 1531 | |
1049 | 1532 | /*** Integer shift ***/ |
1050 | 1533 | /* shift left word */ |
1051 | -PPC_OP(slw) | |
1534 | +void OPPROTO op_slw (void) | |
1052 | 1535 | { |
1053 | 1536 | if (T1 & 0x20) { |
1054 | 1537 | T0 = 0; |
1055 | 1538 | } else { |
1539 | + T0 = (uint32_t)(T0 << T1); | |
1540 | + } | |
1541 | + RETURN(); | |
1542 | +} | |
1543 | + | |
1544 | +#if defined(TARGET_PPC64) | |
1545 | +void OPPROTO op_sld (void) | |
1546 | +{ | |
1547 | + if (T1 & 0x40) { | |
1548 | + T0 = 0; | |
1549 | + } else { | |
1056 | 1550 | T0 = T0 << T1; |
1057 | 1551 | } |
1058 | 1552 | RETURN(); |
1059 | 1553 | } |
1554 | +#endif | |
1060 | 1555 | |
1061 | 1556 | /* shift right algebraic word */ |
1062 | 1557 | void OPPROTO op_sraw (void) |
... | ... | @@ -1065,12 +1560,21 @@ void OPPROTO op_sraw (void) |
1065 | 1560 | RETURN(); |
1066 | 1561 | } |
1067 | 1562 | |
1563 | +#if defined(TARGET_PPC64) | |
1564 | +void OPPROTO op_srad (void) | |
1565 | +{ | |
1566 | + do_srad(); | |
1567 | + RETURN(); | |
1568 | +} | |
1569 | +#endif | |
1570 | + | |
1068 | 1571 | /* shift right algebraic word immediate */ |
1069 | -PPC_OP(srawi) | |
1572 | +void OPPROTO op_srawi (void) | |
1070 | 1573 | { |
1071 | - T1 = T0; | |
1072 | - T0 = (Ts0 >> PARAM(1)); | |
1073 | - if (Ts1 < 0 && (Ts1 & PARAM(2)) != 0) { | |
1574 | + uint32_t mask = (uint32_t)PARAM2; | |
1575 | + | |
1576 | + T0 = (int32_t)T0 >> PARAM1; | |
1577 | + if ((int32_t)T1 < 0 && (T1 & mask) != 0) { | |
1074 | 1578 | xer_ca = 1; |
1075 | 1579 | } else { |
1076 | 1580 | xer_ca = 0; |
... | ... | @@ -1078,16 +1582,43 @@ PPC_OP(srawi) |
1078 | 1582 | RETURN(); |
1079 | 1583 | } |
1080 | 1584 | |
1585 | +#if defined(TARGET_PPC64) | |
1586 | +void OPPROTO op_sradi (void) | |
1587 | +{ | |
1588 | + uint64_t mask = ((uint64_t)PARAM2 << 32) | (uint64_t)PARAM3; | |
1589 | + | |
1590 | + T0 = (int64_t)T0 >> PARAM1; | |
1591 | + if ((int64_t)T1 < 0 && ((uint64_t)T1 & mask) != 0) { | |
1592 | + xer_ca = 1; | |
1593 | + } else { | |
1594 | + xer_ca = 0; | |
1595 | + } | |
1596 | + RETURN(); | |
1597 | +} | |
1598 | +#endif | |
1599 | + | |
1081 | 1600 | /* shift right word */ |
1082 | -PPC_OP(srw) | |
1601 | +void OPPROTO op_srw (void) | |
1083 | 1602 | { |
1084 | 1603 | if (T1 & 0x20) { |
1085 | 1604 | T0 = 0; |
1086 | 1605 | } else { |
1087 | - T0 = T0 >> T1; | |
1606 | + T0 = (uint32_t)T0 >> T1; | |
1607 | + } | |
1608 | + RETURN(); | |
1609 | +} | |
1610 | + | |
1611 | +#if defined(TARGET_PPC64) | |
1612 | +void OPPROTO op_srd (void) | |
1613 | +{ | |
1614 | + if (T1 & 0x40) { | |
1615 | + T0 = 0; | |
1616 | + } else { | |
1617 | + T0 = (uint64_t)T0 >> T1; | |
1088 | 1618 | } |
1089 | 1619 | RETURN(); |
1090 | 1620 | } |
1621 | +#endif | |
1091 | 1622 | |
1092 | 1623 | void OPPROTO op_sl_T0_T1 (void) |
1093 | 1624 | { |
... | ... | @@ -1103,22 +1634,46 @@ void OPPROTO op_sli_T0 (void) |
1103 | 1634 | |
1104 | 1635 | void OPPROTO op_srl_T0_T1 (void) |
1105 | 1636 | { |
1106 | - T0 = T0 >> T1; | |
1637 | + T0 = (uint32_t)T0 >> T1; | |
1638 | + RETURN(); | |
1639 | +} | |
1640 | + | |
1641 | +#if defined(TARGET_PPC64) | |
1642 | +void OPPROTO op_srl_T0_T1_64 (void) | |
1643 | +{ | |
1644 | + T0 = (uint32_t)T0 >> T1; | |
1107 | 1645 | RETURN(); |
1108 | 1646 | } |
1647 | +#endif | |
1109 | 1648 | |
1110 | 1649 | void OPPROTO op_srli_T0 (void) |
1111 | 1650 | { |
1112 | - T0 = T0 >> PARAM1; | |
1651 | + T0 = (uint32_t)T0 >> PARAM1; | |
1113 | 1652 | RETURN(); |
1114 | 1653 | } |
1115 | 1654 | |
1655 | +#if defined(TARGET_PPC64) | |
1656 | +void OPPROTO op_srli_T0_64 (void) | |
1657 | +{ | |
1658 | + T0 = (uint64_t)T0 >> PARAM1; | |
1659 | + RETURN(); | |
1660 | +} | |
1661 | +#endif | |
1662 | + | |
1116 | 1663 | void OPPROTO op_srli_T1 (void) |
1117 | 1664 | { |
1118 | - T1 = T1 >> PARAM1; | |
1665 | + T1 = (uint32_t)T1 >> PARAM1; | |
1119 | 1666 | RETURN(); |
1120 | 1667 | } |
1121 | 1668 | |
1669 | +#if defined(TARGET_PPC64) | |
1670 | +void OPPROTO op_srli_T1_64 (void) | |
1671 | +{ | |
1672 | + T1 = (uint64_t)T1 >> PARAM1; | |
1673 | + RETURN(); | |
1674 | +} | |
1675 | +#endif | |
1676 | + | |
1122 | 1677 | /*** Floating-Point arithmetic ***/ |
1123 | 1678 | /* fadd - fadd. */ |
1124 | 1679 | PPC_OP(fadd) |
... | ... | @@ -1281,13 +1836,22 @@ PPC_OP(fneg) |
1281 | 1836 | #endif |
1282 | 1837 | |
1283 | 1838 | /* Special op to check and maybe clear reservation */ |
1284 | -PPC_OP(check_reservation) | |
1839 | +void OPPROTO op_check_reservation (void) | |
1285 | 1840 | { |
1286 | 1841 | if ((uint32_t)env->reserve == (uint32_t)(T0 & ~0x00000003)) |
1287 | 1842 | env->reserve = -1; |
1288 | 1843 | RETURN(); |
1289 | 1844 | } |
1290 | 1845 | |
1846 | +#if defined(TARGET_PPC64) | |
1847 | +void OPPROTO op_check_reservation_64 (void) | |
1848 | +{ | |
1849 | + if ((uint64_t)env->reserve == (uint64_t)(T0 & ~0x00000003)) | |
1850 | + env->reserve = -1; | |
1851 | + RETURN(); | |
1852 | +} | |
1853 | +#endif | |
1854 | + | |
1291 | 1855 | /* Return from interrupt */ |
1292 | 1856 | #if !defined(CONFIG_USER_ONLY) |
1293 | 1857 | void OPPROTO op_rfi (void) |
... | ... | @@ -1295,6 +1859,14 @@ void OPPROTO op_rfi (void) |
1295 | 1859 | do_rfi(); |
1296 | 1860 | RETURN(); |
1297 | 1861 | } |
1862 | + | |
1863 | +#if defined(TARGET_PPC64) | |
1864 | +void OPPROTO op_rfi_32 (void) | |
1865 | +{ | |
1866 | + do_rfi_32(); | |
1867 | + RETURN(); | |
1868 | +} | |
1869 | +#endif | |
1298 | 1870 | #endif |
1299 | 1871 | |
1300 | 1872 | /* Trap word */ |
... | ... | @@ -1304,13 +1876,29 @@ void OPPROTO op_tw (void) |
1304 | 1876 | RETURN(); |
1305 | 1877 | } |
1306 | 1878 | |
1879 | +#if defined(TARGET_PPC64) | |
1880 | +void OPPROTO op_td (void) | |
1881 | +{ | |
1882 | + do_td(PARAM1); | |
1883 | + RETURN(); | |
1884 | +} | |
1885 | +#endif | |
1886 | + | |
1307 | 1887 | /* Instruction cache block invalidate */ |
1308 | -PPC_OP(icbi) | |
1888 | +void OPPROTO op_icbi (void) | |
1309 | 1889 | { |
1310 | 1890 | do_icbi(); |
1311 | 1891 | RETURN(); |
1312 | 1892 | } |
1313 | 1893 | |
1894 | +#if defined(TARGET_PPC64) | |
1895 | +void OPPROTO op_icbi_64 (void) | |
1896 | +{ | |
1897 | + do_icbi_64(); | |
1898 | + RETURN(); | |
1899 | +} | |
1900 | +#endif | |
1901 | + | |
1314 | 1902 | #if !defined(CONFIG_USER_ONLY) |
1315 | 1903 | /* tlbia */ |
1316 | 1904 | PPC_OP(tlbia) |
... | ... | @@ -1320,11 +1908,33 @@ PPC_OP(tlbia) |
1320 | 1908 | } |
1321 | 1909 | |
1322 | 1910 | /* tlbie */ |
1323 | -PPC_OP(tlbie) | |
1911 | +void OPPROTO op_tlbie (void) | |
1324 | 1912 | { |
1325 | 1913 | do_tlbie(); |
1326 | 1914 | RETURN(); |
1327 | 1915 | } |
1916 | + | |
1917 | +#if defined(TARGET_PPC64) | |
1918 | +void OPPROTO op_tlbie_64 (void) | |
1919 | +{ | |
1920 | + do_tlbie_64(); | |
1921 | + RETURN(); | |
1922 | +} | |
1923 | +#endif | |
1924 | + | |
1925 | +#if defined(TARGET_PPC64) | |
1926 | +void OPPROTO op_slbia (void) | |
1927 | +{ | |
1928 | + do_slbia(); | |
1929 | + RETURN(); | |
1930 | +} | |
1931 | + | |
1932 | +void OPPROTO op_slbie (void) | |
1933 | +{ | |
1934 | + do_slbie(); | |
1935 | + RETURN(); | |
1936 | +} | |
1937 | +#endif | |
1328 | 1938 | #endif |
1329 | 1939 | |
1330 | 1940 | /* PowerPC 602/603/755 software TLB load instructions */ |
... | ... | @@ -1343,14 +1953,12 @@ void OPPROTO op_6xx_tlbli (void) |
1343 | 1953 | #endif |
1344 | 1954 | |
1345 | 1955 | /* 601 specific */ |
1346 | -uint32_t cpu_ppc601_load_rtcl (CPUState *env); | |
1347 | 1956 | void OPPROTO op_load_601_rtcl (void) |
1348 | 1957 | { |
1349 | 1958 | T0 = cpu_ppc601_load_rtcl(env); |
1350 | 1959 | RETURN(); |
1351 | 1960 | } |
1352 | 1961 | |
1353 | -uint32_t cpu_ppc601_load_rtcu (CPUState *env); | |
1354 | 1962 | void OPPROTO op_load_601_rtcu (void) |
1355 | 1963 | { |
1356 | 1964 | T0 = cpu_ppc601_load_rtcu(env); |
... | ... | @@ -1358,14 +1966,12 @@ void OPPROTO op_load_601_rtcu (void) |
1358 | 1966 | } |
1359 | 1967 | |
1360 | 1968 | #if !defined(CONFIG_USER_ONLY) |
1361 | -void cpu_ppc601_store_rtcl (CPUState *env, uint32_t value); | |
1362 | 1969 | void OPPROTO op_store_601_rtcl (void) |
1363 | 1970 | { |
1364 | 1971 | cpu_ppc601_store_rtcl(env, T0); |
1365 | 1972 | RETURN(); |
1366 | 1973 | } |
1367 | 1974 | |
1368 | -void cpu_ppc601_store_rtcu (CPUState *env, uint32_t value); | |
1369 | 1975 | void OPPROTO op_store_601_rtcu (void) |
1370 | 1976 | { |
1371 | 1977 | cpu_ppc601_store_rtcu(env, T0); |
... | ... | @@ -1449,7 +2055,7 @@ void OPPROTO op_POWER_divso (void) |
1449 | 2055 | |
1450 | 2056 | void OPPROTO op_POWER_doz (void) |
1451 | 2057 | { |
1452 | - if (Ts1 > Ts0) | |
2058 | + if ((int32_t)T1 > (int32_t)T0) | |
1453 | 2059 | T0 = T1 - T0; |
1454 | 2060 | else |
1455 | 2061 | T0 = 0; |
... | ... | @@ -1580,7 +2186,7 @@ void OPPROTO op_POWER_sraq (void) |
1580 | 2186 | if (T1 & 0x20UL) |
1581 | 2187 | T0 = -1L; |
1582 | 2188 | else |
1583 | - T0 = Ts0 >> T1; | |
2189 | + T0 = (int32_t)T0 >> T1; | |
1584 | 2190 | RETURN(); |
1585 | 2191 | } |
1586 | 2192 | |
... | ... | @@ -1588,7 +2194,7 @@ void OPPROTO op_POWER_sre (void) |
1588 | 2194 | { |
1589 | 2195 | T1 &= 0x1FUL; |
1590 | 2196 | env->spr[SPR_MQ] = rotl32(T0, 32 - T1); |
1591 | - T0 = Ts0 >> T1; | |
2197 | + T0 = (int32_t)T0 >> T1; | |
1592 | 2198 | RETURN(); |
1593 | 2199 | } |
1594 | 2200 | |
... | ... | @@ -1596,7 +2202,7 @@ void OPPROTO op_POWER_srea (void) |
1596 | 2202 | { |
1597 | 2203 | T1 &= 0x1FUL; |
1598 | 2204 | env->spr[SPR_MQ] = T0 >> T1; |
1599 | - T0 = Ts0 >> T1; | |
2205 | + T0 = (int32_t)T0 >> T1; | |
1600 | 2206 | RETURN(); |
1601 | 2207 | } |
1602 | 2208 | |
... | ... | @@ -1848,28 +2454,24 @@ void OPPROTO op_store_403_pb (void) |
1848 | 2454 | RETURN(); |
1849 | 2455 | } |
1850 | 2456 | |
1851 | -target_ulong load_40x_pit (CPUState *env); | |
1852 | 2457 | void OPPROTO op_load_40x_pit (void) |
1853 | 2458 | { |
1854 | 2459 | T0 = load_40x_pit(env); |
1855 | 2460 | RETURN(); |
1856 | 2461 | } |
1857 | 2462 | |
1858 | -void store_40x_pit (CPUState *env, target_ulong val); | |
1859 | 2463 | void OPPROTO op_store_40x_pit (void) |
1860 | 2464 | { |
1861 | 2465 | store_40x_pit(env, T0); |
1862 | 2466 | RETURN(); |
1863 | 2467 | } |
1864 | 2468 | |
1865 | -void store_booke_tcr (CPUState *env, target_ulong val); | |
1866 | 2469 | void OPPROTO op_store_booke_tcr (void) |
1867 | 2470 | { |
1868 | 2471 | store_booke_tcr(env, T0); |
1869 | 2472 | RETURN(); |
1870 | 2473 | } |
1871 | 2474 | |
1872 | -void store_booke_tsr (CPUState *env, target_ulong val); | |
1873 | 2475 | void OPPROTO op_store_booke_tsr (void) |
1874 | 2476 | { |
1875 | 2477 | store_booke_tsr(env, T0); | ... | ... |
target-ppc/op_helper.c
... | ... | @@ -33,10 +33,6 @@ |
33 | 33 | //#define DEBUG_SOFTWARE_TLB |
34 | 34 | //#define FLUSH_ALL_TLBS |
35 | 35 | |
36 | -#define Ts0 (long)((target_long)T0) | |
37 | -#define Ts1 (long)((target_long)T1) | |
38 | -#define Ts2 (long)((target_long)T2) | |
39 | - | |
40 | 36 | /*****************************************************************************/ |
41 | 37 | /* Exceptions processing helpers */ |
42 | 38 | void cpu_loop_exit (void) |
... | ... | @@ -106,7 +102,7 @@ void do_store_xer (void) |
106 | 102 | xer_ov = (T0 >> XER_OV) & 0x01; |
107 | 103 | xer_ca = (T0 >> XER_CA) & 0x01; |
108 | 104 | xer_cmp = (T0 >> XER_CMP) & 0xFF; |
109 | - xer_bc = (T0 >> XER_BC) & 0x3F; | |
105 | + xer_bc = (T0 >> XER_BC) & 0x7F; | |
110 | 106 | } |
111 | 107 | |
112 | 108 | void do_load_fpscr (void) |
... | ... | @@ -122,7 +118,7 @@ void do_load_fpscr (void) |
122 | 118 | } u; |
123 | 119 | int i; |
124 | 120 | |
125 | -#ifdef WORDS_BIGENDIAN | |
121 | +#if defined(WORDS_BIGENDIAN) | |
126 | 122 | #define WORD0 0 |
127 | 123 | #define WORD1 1 |
128 | 124 | #else |
... | ... | @@ -182,68 +178,110 @@ void do_store_fpscr (uint32_t mask) |
182 | 178 | |
183 | 179 | /*****************************************************************************/ |
184 | 180 | /* Fixed point operations helpers */ |
185 | -void do_addo (void) | |
181 | +#if defined(TARGET_PPC64) | |
182 | +static void add128 (uint64_t *plow, uint64_t *phigh, uint64_t a, uint64_t b) | |
186 | 183 | { |
187 | - T2 = T0; | |
188 | - T0 += T1; | |
189 | - if (likely(!((T2 ^ T1 ^ (-1)) & (T2 ^ T0) & (1 << 31)))) { | |
190 | - xer_ov = 0; | |
191 | - } else { | |
192 | - xer_so = 1; | |
193 | - xer_ov = 1; | |
194 | - } | |
184 | + *plow += a; | |
185 | + /* carry test */ | |
186 | + if (*plow < a) | |
187 | + (*phigh)++; | |
188 | + *phigh += b; | |
195 | 189 | } |
196 | 190 | |
197 | -void do_addco (void) | |
191 | +static void neg128 (uint64_t *plow, uint64_t *phigh) | |
198 | 192 | { |
199 | - T2 = T0; | |
200 | - T0 += T1; | |
201 | - if (likely(T0 >= T2)) { | |
202 | - xer_ca = 0; | |
203 | - } else { | |
204 | - xer_ca = 1; | |
205 | - } | |
206 | - if (likely(!((T2 ^ T1 ^ (-1)) & (T2 ^ T0) & (1 << 31)))) { | |
207 | - xer_ov = 0; | |
208 | - } else { | |
209 | - xer_so = 1; | |
210 | - xer_ov = 1; | |
193 | + *plow = ~ *plow; | |
194 | + *phigh = ~ *phigh; | |
195 | + add128(plow, phigh, 1, 0); | |
196 | +} | |
197 | + | |
198 | +static void mul64 (uint64_t *plow, uint64_t *phigh, uint64_t a, uint64_t b) | |
199 | +{ | |
200 | + uint32_t a0, a1, b0, b1; | |
201 | + uint64_t v; | |
202 | + | |
203 | + a0 = a; | |
204 | + a1 = a >> 32; | |
205 | + | |
206 | + b0 = b; | |
207 | + b1 = b >> 32; | |
208 | + | |
209 | + v = (uint64_t)a0 * (uint64_t)b0; | |
210 | + *plow = v; | |
211 | + *phigh = 0; | |
212 | + | |
213 | + v = (uint64_t)a0 * (uint64_t)b1; | |
214 | + add128(plow, phigh, v << 32, v >> 32); | |
215 | + | |
216 | + v = (uint64_t)a1 * (uint64_t)b0; | |
217 | + add128(plow, phigh, v << 32, v >> 32); | |
218 | + | |
219 | + v = (uint64_t)a1 * (uint64_t)b1; | |
220 | + *phigh += v; | |
221 | +#if defined(DEBUG_MULDIV) | |
222 | + printf("mul: 0x%016llx * 0x%016llx = 0x%016llx%016llx\n", | |
223 | + a, b, *phigh, *plow); | |
224 | +#endif | |
225 | +} | |
226 | + | |
227 | +void do_mul64 (uint64_t *plow, uint64_t *phigh) | |
228 | +{ | |
229 | + mul64(plow, phigh, T0, T1); | |
230 | +} | |
231 | + | |
232 | +static void imul64(uint64_t *plow, uint64_t *phigh, int64_t a, int64_t b) | |
233 | +{ | |
234 | + int sa, sb; | |
235 | + sa = (a < 0); | |
236 | + if (sa) | |
237 | + a = -a; | |
238 | + sb = (b < 0); | |
239 | + if (sb) | |
240 | + b = -b; | |
241 | + mul64(plow, phigh, a, b); | |
242 | + if (sa ^ sb) { | |
243 | + neg128(plow, phigh); | |
211 | 244 | } |
212 | 245 | } |
213 | 246 | |
247 | +void do_imul64 (uint64_t *plow, uint64_t *phigh) | |
248 | +{ | |
249 | + imul64(plow, phigh, T0, T1); | |
250 | +} | |
251 | +#endif | |
252 | + | |
214 | 253 | void do_adde (void) |
215 | 254 | { |
216 | 255 | T2 = T0; |
217 | 256 | T0 += T1 + xer_ca; |
218 | - if (likely(!(T0 < T2 || (xer_ca == 1 && T0 == T2)))) { | |
257 | + if (likely(!((uint32_t)T0 < (uint32_t)T2 || | |
258 | + (xer_ca == 1 && (uint32_t)T0 == (uint32_t)T2)))) { | |
219 | 259 | xer_ca = 0; |
220 | 260 | } else { |
221 | 261 | xer_ca = 1; |
222 | 262 | } |
223 | 263 | } |
224 | 264 | |
225 | -void do_addeo (void) | |
265 | +#if defined(TARGET_PPC64) | |
266 | +void do_adde_64 (void) | |
226 | 267 | { |
227 | 268 | T2 = T0; |
228 | 269 | T0 += T1 + xer_ca; |
229 | - if (likely(!(T0 < T2 || (xer_ca == 1 && T0 == T2)))) { | |
270 | + if (likely(!((uint64_t)T0 < (uint64_t)T2 || | |
271 | + (xer_ca == 1 && (uint64_t)T0 == (uint64_t)T2)))) { | |
230 | 272 | xer_ca = 0; |
231 | 273 | } else { |
232 | 274 | xer_ca = 1; |
233 | 275 | } |
234 | - if (likely(!((T2 ^ T1 ^ (-1)) & (T2 ^ T0) & (1 << 31)))) { | |
235 | - xer_ov = 0; | |
236 | - } else { | |
237 | - xer_so = 1; | |
238 | - xer_ov = 1; | |
239 | - } | |
240 | 276 | } |
277 | +#endif | |
241 | 278 | |
242 | 279 | void do_addmeo (void) |
243 | 280 | { |
244 | 281 | T1 = T0; |
245 | 282 | T0 += xer_ca + (-1); |
246 | - if (likely(!(T1 & (T1 ^ T0) & (1 << 31)))) { | |
283 | + if (likely(!((uint32_t)T1 & | |
284 | + ((uint32_t)T1 ^ (uint32_t)T0) & (1UL << 31)))) { | |
247 | 285 | xer_ov = 0; |
248 | 286 | } else { |
249 | 287 | xer_so = 1; |
... | ... | @@ -253,28 +291,29 @@ void do_addmeo (void) |
253 | 291 | xer_ca = 1; |
254 | 292 | } |
255 | 293 | |
256 | -void do_addzeo (void) | |
294 | +#if defined(TARGET_PPC64) | |
295 | +void do_addmeo_64 (void) | |
257 | 296 | { |
258 | 297 | T1 = T0; |
259 | - T0 += xer_ca; | |
260 | - if (likely(!((T1 ^ (-1)) & (T1 ^ T0) & (1 << 31)))) { | |
298 | + T0 += xer_ca + (-1); | |
299 | + if (likely(!((uint64_t)T1 & | |
300 | + ((uint64_t)T1 ^ (uint64_t)T0) & (1ULL << 63)))) { | |
261 | 301 | xer_ov = 0; |
262 | 302 | } else { |
263 | 303 | xer_so = 1; |
264 | 304 | xer_ov = 1; |
265 | 305 | } |
266 | - if (likely(T0 >= T1)) { | |
267 | - xer_ca = 0; | |
268 | - } else { | |
306 | + if (likely(T1 != 0)) | |
269 | 307 | xer_ca = 1; |
270 | - } | |
271 | 308 | } |
309 | +#endif | |
272 | 310 | |
273 | 311 | void do_divwo (void) |
274 | 312 | { |
275 | - if (likely(!((Ts0 == INT32_MIN && Ts1 == -1) || Ts1 == 0))) { | |
313 | + if (likely(!(((int32_t)T0 == INT32_MIN && (int32_t)T1 == -1) || | |
314 | + (int32_t)T1 == 0))) { | |
276 | 315 | xer_ov = 0; |
277 | - T0 = (Ts0 / Ts1); | |
316 | + T0 = (int32_t)T0 / (int32_t)T1; | |
278 | 317 | } else { |
279 | 318 | xer_so = 1; |
280 | 319 | xer_ov = 1; |
... | ... | @@ -282,6 +321,21 @@ void do_divwo (void) |
282 | 321 | } |
283 | 322 | } |
284 | 323 | |
324 | +#if defined(TARGET_PPC64) | |
325 | +void do_divdo (void) | |
326 | +{ | |
327 | + if (likely(!(((int64_t)T0 == INT64_MIN && (int64_t)T1 == -1ULL) || | |
328 | + (int64_t)T1 == 0))) { | |
329 | + xer_ov = 0; | |
330 | + T0 = (int64_t)T0 / (int64_t)T1; | |
331 | + } else { | |
332 | + xer_so = 1; | |
333 | + xer_ov = 1; | |
334 | + T0 = (-1ULL) * ((uint64_t)T0 >> 63); | |
335 | + } | |
336 | +} | |
337 | +#endif | |
338 | + | |
285 | 339 | void do_divwuo (void) |
286 | 340 | { |
287 | 341 | if (likely((uint32_t)T1 != 0)) { |
... | ... | @@ -294,9 +348,23 @@ void do_divwuo (void) |
294 | 348 | } |
295 | 349 | } |
296 | 350 | |
351 | +#if defined(TARGET_PPC64) | |
352 | +void do_divduo (void) | |
353 | +{ | |
354 | + if (likely((uint64_t)T1 != 0)) { | |
355 | + xer_ov = 0; | |
356 | + T0 = (uint64_t)T0 / (uint64_t)T1; | |
357 | + } else { | |
358 | + xer_so = 1; | |
359 | + xer_ov = 1; | |
360 | + T0 = 0; | |
361 | + } | |
362 | +} | |
363 | +#endif | |
364 | + | |
297 | 365 | void do_mullwo (void) |
298 | 366 | { |
299 | - int64_t res = (int64_t)Ts0 * (int64_t)Ts1; | |
367 | + int64_t res = (int64_t)T0 * (int64_t)T1; | |
300 | 368 | |
301 | 369 | if (likely((int32_t)res == res)) { |
302 | 370 | xer_ov = 0; |
... | ... | @@ -307,112 +375,148 @@ void do_mullwo (void) |
307 | 375 | T0 = (int32_t)res; |
308 | 376 | } |
309 | 377 | |
310 | -void do_nego (void) | |
378 | +#if defined(TARGET_PPC64) | |
379 | +void do_mulldo (void) | |
311 | 380 | { |
312 | - if (likely(T0 != INT32_MIN)) { | |
381 | + int64_t th; | |
382 | + uint64_t tl; | |
383 | + | |
384 | + do_imul64(&tl, &th); | |
385 | + if (likely(th == 0)) { | |
313 | 386 | xer_ov = 0; |
314 | - T0 = -Ts0; | |
315 | 387 | } else { |
316 | 388 | xer_ov = 1; |
317 | 389 | xer_so = 1; |
318 | 390 | } |
391 | + T0 = (int64_t)tl; | |
319 | 392 | } |
393 | +#endif | |
320 | 394 | |
321 | -void do_subfo (void) | |
395 | +void do_nego (void) | |
322 | 396 | { |
323 | - T2 = T0; | |
324 | - T0 = T1 - T0; | |
325 | - if (likely(!(((~T2) ^ T1 ^ (-1)) & ((~T2) ^ T0) & (1 << 31)))) { | |
397 | + if (likely((int32_t)T0 != INT32_MIN)) { | |
326 | 398 | xer_ov = 0; |
399 | + T0 = -(int32_t)T0; | |
327 | 400 | } else { |
328 | - xer_so = 1; | |
329 | 401 | xer_ov = 1; |
402 | + xer_so = 1; | |
330 | 403 | } |
331 | - RETURN(); | |
332 | 404 | } |
333 | 405 | |
334 | -void do_subfco (void) | |
406 | +#if defined(TARGET_PPC64) | |
407 | +void do_nego_64 (void) | |
335 | 408 | { |
336 | - T2 = T0; | |
337 | - T0 = T1 - T0; | |
338 | - if (likely(T0 > T1)) { | |
339 | - xer_ca = 0; | |
340 | - } else { | |
341 | - xer_ca = 1; | |
342 | - } | |
343 | - if (likely(!(((~T2) ^ T1 ^ (-1)) & ((~T2) ^ T0) & (1 << 31)))) { | |
409 | + if (likely((int64_t)T0 != INT64_MIN)) { | |
344 | 410 | xer_ov = 0; |
411 | + T0 = -(int64_t)T0; | |
345 | 412 | } else { |
346 | - xer_so = 1; | |
347 | 413 | xer_ov = 1; |
414 | + xer_so = 1; | |
348 | 415 | } |
349 | 416 | } |
417 | +#endif | |
350 | 418 | |
351 | 419 | void do_subfe (void) |
352 | 420 | { |
353 | 421 | T0 = T1 + ~T0 + xer_ca; |
354 | - if (likely(T0 >= T1 && (xer_ca == 0 || T0 != T1))) { | |
422 | + if (likely((uint32_t)T0 >= (uint32_t)T1 && | |
423 | + (xer_ca == 0 || (uint32_t)T0 != (uint32_t)T1))) { | |
355 | 424 | xer_ca = 0; |
356 | 425 | } else { |
357 | 426 | xer_ca = 1; |
358 | 427 | } |
359 | 428 | } |
360 | 429 | |
361 | -void do_subfeo (void) | |
430 | +#if defined(TARGET_PPC64) | |
431 | +void do_subfe_64 (void) | |
362 | 432 | { |
363 | - T2 = T0; | |
364 | 433 | T0 = T1 + ~T0 + xer_ca; |
365 | - if (likely(!((~T2 ^ T1 ^ (-1)) & (~T2 ^ T0) & (1 << 31)))) { | |
434 | + if (likely((uint64_t)T0 >= (uint64_t)T1 && | |
435 | + (xer_ca == 0 || (uint64_t)T0 != (uint64_t)T1))) { | |
436 | + xer_ca = 0; | |
437 | + } else { | |
438 | + xer_ca = 1; | |
439 | + } | |
440 | +} | |
441 | +#endif | |
442 | + | |
443 | +void do_subfmeo (void) | |
444 | +{ | |
445 | + T1 = T0; | |
446 | + T0 = ~T0 + xer_ca - 1; | |
447 | + if (likely(!((uint32_t)~T1 & ((uint32_t)~T1 ^ (uint32_t)T0) & | |
448 | + (1UL << 31)))) { | |
366 | 449 | xer_ov = 0; |
367 | 450 | } else { |
368 | 451 | xer_so = 1; |
369 | 452 | xer_ov = 1; |
370 | 453 | } |
371 | - if (likely(T0 >= T1 && (xer_ca == 0 || T0 != T1))) { | |
372 | - xer_ca = 0; | |
373 | - } else { | |
454 | + if (likely((uint32_t)T1 != UINT32_MAX)) | |
374 | 455 | xer_ca = 1; |
375 | - } | |
376 | 456 | } |
377 | 457 | |
378 | -void do_subfmeo (void) | |
458 | +#if defined(TARGET_PPC64) | |
459 | +void do_subfmeo_64 (void) | |
379 | 460 | { |
380 | 461 | T1 = T0; |
381 | 462 | T0 = ~T0 + xer_ca - 1; |
382 | - if (likely(!(~T1 & (~T1 ^ T0) & (1 << 31)))) { | |
463 | + if (likely(!((uint64_t)~T1 & ((uint64_t)~T1 ^ (uint64_t)T0) & | |
464 | + (1ULL << 63)))) { | |
383 | 465 | xer_ov = 0; |
384 | 466 | } else { |
385 | 467 | xer_so = 1; |
386 | 468 | xer_ov = 1; |
387 | 469 | } |
388 | - if (likely(T1 != -1)) | |
470 | + if (likely((uint64_t)T1 != UINT64_MAX)) | |
389 | 471 | xer_ca = 1; |
390 | 472 | } |
473 | +#endif | |
391 | 474 | |
392 | 475 | void do_subfzeo (void) |
393 | 476 | { |
394 | 477 | T1 = T0; |
395 | 478 | T0 = ~T0 + xer_ca; |
396 | - if (likely(!((~T1 ^ (-1)) & ((~T1) ^ T0) & (1 << 31)))) { | |
479 | + if (likely(!(((uint32_t)~T1 ^ UINT32_MAX) & | |
480 | + ((uint32_t)(~T1) ^ (uint32_t)T0) & (1UL << 31)))) { | |
397 | 481 | xer_ov = 0; |
398 | 482 | } else { |
399 | 483 | xer_ov = 1; |
400 | 484 | xer_so = 1; |
401 | 485 | } |
402 | - if (likely(T0 >= ~T1)) { | |
486 | + if (likely((uint32_t)T0 >= (uint32_t)~T1)) { | |
403 | 487 | xer_ca = 0; |
404 | 488 | } else { |
405 | 489 | xer_ca = 1; |
406 | 490 | } |
407 | 491 | } |
408 | 492 | |
493 | +#if defined(TARGET_PPC64) | |
494 | +void do_subfzeo_64 (void) | |
495 | +{ | |
496 | + T1 = T0; | |
497 | + T0 = ~T0 + xer_ca; | |
498 | + if (likely(!(((uint64_t)~T1 ^ UINT64_MAX) & | |
499 | + ((uint64_t)(~T1) ^ (uint64_t)T0) & (1ULL << 63)))) { | |
500 | + xer_ov = 0; | |
501 | + } else { | |
502 | + xer_ov = 1; | |
503 | + xer_so = 1; | |
504 | + } | |
505 | + if (likely((uint64_t)T0 >= (uint64_t)~T1)) { | |
506 | + xer_ca = 0; | |
507 | + } else { | |
508 | + xer_ca = 1; | |
509 | + } | |
510 | +} | |
511 | +#endif | |
512 | + | |
409 | 513 | /* shift right arithmetic helper */ |
410 | 514 | void do_sraw (void) |
411 | 515 | { |
412 | 516 | int32_t ret; |
413 | 517 | |
414 | 518 | if (likely(!(T1 & 0x20UL))) { |
415 | - if (likely(T1 != 0)) { | |
519 | + if (likely((uint32_t)T1 != 0)) { | |
416 | 520 | ret = (int32_t)T0 >> (T1 & 0x1fUL); |
417 | 521 | if (likely(ret >= 0 || ((int32_t)T0 & ((1 << T1) - 1)) == 0)) { |
418 | 522 | xer_ca = 0; |
... | ... | @@ -434,6 +538,69 @@ void do_sraw (void) |
434 | 538 | T0 = ret; |
435 | 539 | } |
436 | 540 | |
541 | +#if defined(TARGET_PPC64) | |
542 | +void do_srad (void) | |
543 | +{ | |
544 | + int64_t ret; | |
545 | + | |
546 | + if (likely(!(T1 & 0x40UL))) { | |
547 | + if (likely((uint64_t)T1 != 0)) { | |
548 | + ret = (int64_t)T0 >> (T1 & 0x3FUL); | |
549 | + if (likely(ret >= 0 || ((int64_t)T0 & ((1 << T1) - 1)) == 0)) { | |
550 | + xer_ca = 0; | |
551 | + } else { | |
552 | + xer_ca = 1; | |
553 | + } | |
554 | + } else { | |
555 | + ret = T0; | |
556 | + xer_ca = 0; | |
557 | + } | |
558 | + } else { | |
559 | + ret = (-1) * ((uint64_t)T0 >> 63); | |
560 | + if (likely(ret >= 0 || ((uint64_t)T0 & ~0x8000000000000000ULL) == 0)) { | |
561 | + xer_ca = 0; | |
562 | + } else { | |
563 | + xer_ca = 1; | |
564 | + } | |
565 | + } | |
566 | + T0 = ret; | |
567 | +} | |
568 | +#endif | |
569 | + | |
570 | +static inline int popcnt (uint32_t val) | |
571 | +{ | |
572 | + int i; | |
573 | + | |
574 | + for (i = 0; val != 0;) | |
575 | + val = val ^ (val - 1); | |
576 | + | |
577 | + return i; | |
578 | +} | |
579 | + | |
580 | +void do_popcntb (void) | |
581 | +{ | |
582 | + uint32_t ret; | |
583 | + int i; | |
584 | + | |
585 | + ret = 0; | |
586 | + for (i = 0; i < 32; i += 8) | |
587 | + ret |= popcnt((T0 >> i) & 0xFF) << i; | |
588 | + T0 = ret; | |
589 | +} | |
590 | + | |
591 | +#if defined(TARGET_PPC64) | |
592 | +void do_popcntb_64 (void) | |
593 | +{ | |
594 | + uint64_t ret; | |
595 | + int i; | |
596 | + | |
597 | + ret = 0; | |
598 | + for (i = 0; i < 64; i += 8) | |
599 | + ret |= popcnt((T0 >> i) & 0xFF) << i; | |
600 | + T0 = ret; | |
601 | +} | |
602 | +#endif | |
603 | + | |
437 | 604 | /*****************************************************************************/ |
438 | 605 | /* Floating point operations helpers */ |
439 | 606 | void do_fctiw (void) |
... | ... | @@ -459,7 +626,7 @@ void do_fctiwz (void) |
459 | 626 | } p; |
460 | 627 | |
461 | 628 | /* XXX: higher bits are not supposed to be significant. |
462 | - * to make tests easier, return the same as a real PowerPC 750 (aka G3) | |
629 | + * to make tests easier, return the same as a real PowerPC 750 (aka G3) | |
463 | 630 | */ |
464 | 631 | p.i = float64_to_int32_round_to_zero(FT0, &env->fp_status); |
465 | 632 | p.i |= 0xFFF80000ULL << 32; |
... | ... | @@ -596,26 +763,51 @@ void do_fcmpo (void) |
596 | 763 | #if !defined (CONFIG_USER_ONLY) |
597 | 764 | void do_rfi (void) |
598 | 765 | { |
599 | - env->nip = env->spr[SPR_SRR0] & ~0x00000003; | |
600 | - T0 = env->spr[SPR_SRR1] & ~0xFFFF0000UL; | |
766 | + env->nip = (target_ulong)(env->spr[SPR_SRR0] & ~0x00000003); | |
767 | + T0 = (target_ulong)(env->spr[SPR_SRR1] & ~0xFFFF0000UL); | |
601 | 768 | do_store_msr(env, T0); |
602 | 769 | #if defined (DEBUG_OP) |
603 | 770 | dump_rfi(); |
604 | 771 | #endif |
605 | 772 | env->interrupt_request |= CPU_INTERRUPT_EXITTB; |
606 | 773 | } |
774 | + | |
775 | +#if defined(TARGET_PPC64) | |
776 | +void do_rfi_32 (void) | |
777 | +{ | |
778 | + env->nip = (uint32_t)(env->spr[SPR_SRR0] & ~0x00000003); | |
779 | + T0 = (uint32_t)(env->spr[SPR_SRR1] & ~0xFFFF0000UL); | |
780 | + do_store_msr(env, T0); | |
781 | +#if defined (DEBUG_OP) | |
782 | + dump_rfi(); | |
783 | +#endif | |
784 | + env->interrupt_request |= CPU_INTERRUPT_EXITTB; | |
785 | +} | |
786 | +#endif | |
607 | 787 | #endif |
608 | 788 | |
609 | 789 | void do_tw (int flags) |
610 | 790 | { |
611 | - if (!likely(!((Ts0 < Ts1 && (flags & 0x10)) || | |
612 | - (Ts0 > Ts1 && (flags & 0x08)) || | |
613 | - (Ts0 == Ts1 && (flags & 0x04)) || | |
614 | - (T0 < T1 && (flags & 0x02)) || | |
615 | - (T0 > T1 && (flags & 0x01))))) | |
791 | + if (!likely(!(((int32_t)T0 < (int32_t)T1 && (flags & 0x10)) || | |
792 | + ((int32_t)T0 > (int32_t)T1 && (flags & 0x08)) || | |
793 | + ((int32_t)T0 == (int32_t)T1 && (flags & 0x04)) || | |
794 | + ((uint32_t)T0 < (uint32_t)T1 && (flags & 0x02)) || | |
795 | + ((uint32_t)T0 > (uint32_t)T1 && (flags & 0x01))))) | |
616 | 796 | do_raise_exception_err(EXCP_PROGRAM, EXCP_TRAP); |
617 | 797 | } |
618 | 798 | |
799 | +#if defined(TARGET_PPC64) | |
800 | +void do_td (int flags) | |
801 | +{ | |
802 | + if (!likely(!(((int64_t)T0 < (int64_t)T1 && (flags & 0x10)) || | |
803 | + ((int64_t)T0 > (int64_t)T1 && (flags & 0x08)) || | |
804 | + ((int64_t)T0 == (int64_t)T1 && (flags & 0x04)) || | |
805 | + ((uint64_t)T0 < (uint64_t)T1 && (flags & 0x02)) || | |
806 | + ((uint64_t)T0 > (uint64_t)T1 && (flags & 0x01))))) | |
807 | + do_raise_exception_err(EXCP_PROGRAM, EXCP_TRAP); | |
808 | +} | |
809 | +#endif | |
810 | + | |
619 | 811 | /* Instruction cache invalidation helper */ |
620 | 812 | void do_icbi (void) |
621 | 813 | { |
... | ... | @@ -625,20 +817,31 @@ void do_icbi (void) |
625 | 817 | * (not a fetch) by the MMU. To be sure it will be so, |
626 | 818 | * do the load "by hand". |
627 | 819 | */ |
820 | + tmp = ldl_kernel((uint32_t)T0); | |
821 | + T0 &= ~(ICACHE_LINE_SIZE - 1); | |
822 | + tb_invalidate_page_range((uint32_t)T0, (uint32_t)(T0 + ICACHE_LINE_SIZE)); | |
823 | +} | |
824 | + | |
628 | 825 | #if defined(TARGET_PPC64) |
629 | - if (!msr_sf) | |
630 | - T0 &= 0xFFFFFFFFULL; | |
631 | -#endif | |
632 | - tmp = ldl_kernel(T0); | |
826 | +void do_icbi_64 (void) | |
827 | +{ | |
828 | + uint64_t tmp; | |
829 | + /* Invalidate one cache line : | |
830 | + * PowerPC specification says this is to be treated like a load | |
831 | + * (not a fetch) by the MMU. To be sure it will be so, | |
832 | + * do the load "by hand". | |
833 | + */ | |
834 | + tmp = ldq_kernel((uint64_t)T0); | |
633 | 835 | T0 &= ~(ICACHE_LINE_SIZE - 1); |
634 | - tb_invalidate_page_range(T0, T0 + ICACHE_LINE_SIZE); | |
836 | + tb_invalidate_page_range((uint64_t)T0, (uint64_t)(T0 + ICACHE_LINE_SIZE)); | |
635 | 837 | } |
838 | +#endif | |
636 | 839 | |
637 | 840 | /*****************************************************************************/ |
638 | 841 | /* PowerPC 601 specific instructions (POWER bridge) */ |
639 | 842 | void do_POWER_abso (void) |
640 | 843 | { |
641 | - if (T0 == INT32_MIN) { | |
844 | + if ((uint32_t)T0 == INT32_MIN) { | |
642 | 845 | T0 = INT32_MAX; |
643 | 846 | xer_ov = 1; |
644 | 847 | xer_so = 1; |
... | ... | @@ -679,13 +882,13 @@ void do_POWER_div (void) |
679 | 882 | { |
680 | 883 | uint64_t tmp; |
681 | 884 | |
682 | - if ((Ts0 == INT32_MIN && Ts1 == -1) || Ts1 == 0) { | |
885 | + if (((int32_t)T0 == INT32_MIN && (int32_t)T1 == -1) || (int32_t)T1 == 0) { | |
683 | 886 | T0 = (long)((-1) * (T0 >> 31)); |
684 | 887 | env->spr[SPR_MQ] = 0; |
685 | 888 | } else { |
686 | 889 | tmp = ((uint64_t)T0 << 32) | env->spr[SPR_MQ]; |
687 | 890 | env->spr[SPR_MQ] = tmp % T1; |
688 | - T0 = tmp / Ts1; | |
891 | + T0 = tmp / (int32_t)T1; | |
689 | 892 | } |
690 | 893 | } |
691 | 894 | |
... | ... | @@ -693,7 +896,7 @@ void do_POWER_divo (void) |
693 | 896 | { |
694 | 897 | int64_t tmp; |
695 | 898 | |
696 | - if ((Ts0 == INT32_MIN && Ts1 == -1) || Ts1 == 0) { | |
899 | + if (((int32_t)T0 == INT32_MIN && (int32_t)T1 == -1) || (int32_t)T1 == 0) { | |
697 | 900 | T0 = (long)((-1) * (T0 >> 31)); |
698 | 901 | env->spr[SPR_MQ] = 0; |
699 | 902 | xer_ov = 1; |
... | ... | @@ -701,7 +904,7 @@ void do_POWER_divo (void) |
701 | 904 | } else { |
702 | 905 | tmp = ((uint64_t)T0 << 32) | env->spr[SPR_MQ]; |
703 | 906 | env->spr[SPR_MQ] = tmp % T1; |
704 | - tmp /= Ts1; | |
907 | + tmp /= (int32_t)T1; | |
705 | 908 | if (tmp > (int64_t)INT32_MAX || tmp < (int64_t)INT32_MIN) { |
706 | 909 | xer_ov = 1; |
707 | 910 | xer_so = 1; |
... | ... | @@ -714,35 +917,36 @@ void do_POWER_divo (void) |
714 | 917 | |
715 | 918 | void do_POWER_divs (void) |
716 | 919 | { |
717 | - if ((Ts0 == INT32_MIN && Ts1 == -1) || Ts1 == 0) { | |
920 | + if (((int32_t)T0 == INT32_MIN && (int32_t)T1 == -1) || (int32_t)T1 == 0) { | |
718 | 921 | T0 = (long)((-1) * (T0 >> 31)); |
719 | 922 | env->spr[SPR_MQ] = 0; |
720 | 923 | } else { |
721 | 924 | env->spr[SPR_MQ] = T0 % T1; |
722 | - T0 = Ts0 / Ts1; | |
925 | + T0 = (int32_t)T0 / (int32_t)T1; | |
723 | 926 | } |
724 | 927 | } |
725 | 928 | |
726 | 929 | void do_POWER_divso (void) |
727 | 930 | { |
728 | - if ((Ts0 == INT32_MIN && Ts1 == -1) || Ts1 == 0) { | |
931 | + if (((int32_t)T0 == INT32_MIN && (int32_t)T1 == -1) || (int32_t)T1 == 0) { | |
729 | 932 | T0 = (long)((-1) * (T0 >> 31)); |
730 | 933 | env->spr[SPR_MQ] = 0; |
731 | 934 | xer_ov = 1; |
732 | 935 | xer_so = 1; |
733 | 936 | } else { |
734 | - T0 = Ts0 / Ts1; | |
735 | - env->spr[SPR_MQ] = Ts0 % Ts1; | |
937 | + T0 = (int32_t)T0 / (int32_t)T1; | |
938 | + env->spr[SPR_MQ] = (int32_t)T0 % (int32_t)T1; | |
736 | 939 | xer_ov = 0; |
737 | 940 | } |
738 | 941 | } |
739 | 942 | |
740 | 943 | void do_POWER_dozo (void) |
741 | 944 | { |
742 | - if (Ts1 > Ts0) { | |
945 | + if ((int32_t)T1 > (int32_t)T0) { | |
743 | 946 | T2 = T0; |
744 | 947 | T0 = T1 - T0; |
745 | - if (((~T2) ^ T1 ^ (-1)) & ((~T2) ^ T0) & (1 << 31)) { | |
948 | + if (((uint32_t)(~T2) ^ (uint32_t)T1 ^ UINT32_MAX) & | |
949 | + ((uint32_t)(~T2) ^ (uint32_t)T0) & (1UL << 31)) { | |
746 | 950 | xer_so = 1; |
747 | 951 | xer_ov = 1; |
748 | 952 | } else { |
... | ... | @@ -758,12 +962,12 @@ void do_POWER_maskg (void) |
758 | 962 | { |
759 | 963 | uint32_t ret; |
760 | 964 | |
761 | - if (T0 == T1 + 1) { | |
965 | + if ((uint32_t)T0 == (uint32_t)(T1 + 1)) { | |
762 | 966 | ret = -1; |
763 | 967 | } else { |
764 | - ret = (((uint32_t)(-1)) >> (T0)) ^ | |
765 | - (((uint32_t)(-1) >> (T1)) >> 1); | |
766 | - if (T0 > T1) | |
968 | + ret = (((uint32_t)(-1)) >> ((uint32_t)T0)) ^ | |
969 | + (((uint32_t)(-1) >> ((uint32_t)T1)) >> 1); | |
970 | + if ((uint32_t)T0 > (uint32_t)T1) | |
767 | 971 | ret = ~ret; |
768 | 972 | } |
769 | 973 | T0 = ret; |
... | ... | @@ -812,7 +1016,7 @@ void do_POWER_rfsvc (void) |
812 | 1016 | /* PowerPC 601 BAT management helper */ |
813 | 1017 | void do_store_601_batu (int nr) |
814 | 1018 | { |
815 | - do_store_ibatu(env, nr, T0); | |
1019 | + do_store_ibatu(env, nr, (uint32_t)T0); | |
816 | 1020 | env->DBAT[0][nr] = env->IBAT[0][nr]; |
817 | 1021 | env->DBAT[1][nr] = env->IBAT[1][nr]; |
818 | 1022 | } |
... | ... | @@ -826,7 +1030,7 @@ void do_store_601_batu (int nr) |
826 | 1030 | void do_op_602_mfrom (void) |
827 | 1031 | { |
828 | 1032 | if (likely(T0 < 602)) { |
829 | -#ifdef USE_MFROM_ROM_TABLE | |
1033 | +#if defined(USE_MFROM_ROM_TABLE) | |
830 | 1034 | #include "mfrom_table.c" |
831 | 1035 | T0 = mfrom_ROM_table[T0]; |
832 | 1036 | #else |
... | ... | @@ -854,7 +1058,8 @@ void do_op_602_mfrom (void) |
854 | 1058 | /* Embedded PowerPC specific helpers */ |
855 | 1059 | void do_405_check_ov (void) |
856 | 1060 | { |
857 | - if (likely(((T1 ^ T2) >> 31) || !((T0 ^ T2) >> 31))) { | |
1061 | + if (likely((((uint32_t)T1 ^ (uint32_t)T2) >> 31) || | |
1062 | + !(((uint32_t)T0 ^ (uint32_t)T2) >> 31))) { | |
858 | 1063 | xer_ov = 0; |
859 | 1064 | } else { |
860 | 1065 | xer_ov = 1; |
... | ... | @@ -864,7 +1069,8 @@ void do_405_check_ov (void) |
864 | 1069 | |
865 | 1070 | void do_405_check_sat (void) |
866 | 1071 | { |
867 | - if (!likely(((T1 ^ T2) >> 31) || !((T0 ^ T2) >> 31))) { | |
1072 | + if (!likely((((uint32_t)T1 ^ (uint32_t)T2) >> 31) || | |
1073 | + !(((uint32_t)T0 ^ (uint32_t)T2) >> 31))) { | |
868 | 1074 | /* Saturate result */ |
869 | 1075 | if (T2 >> 31) { |
870 | 1076 | T0 = INT32_MIN; |
... | ... | @@ -1010,6 +1216,7 @@ void do_tlbia (void) |
1010 | 1216 | |
1011 | 1217 | void do_tlbie (void) |
1012 | 1218 | { |
1219 | + T0 = (uint32_t)T0; | |
1013 | 1220 | #if !defined(FLUSH_ALL_TLBS) |
1014 | 1221 | if (unlikely(PPC_MMU(env) == PPC_FLAGS_MMU_SOFT_6xx)) { |
1015 | 1222 | ppc6xx_tlb_invalidate_virt(env, T0 & TARGET_PAGE_MASK, 0); |
... | ... | @@ -1050,13 +1257,78 @@ void do_tlbie (void) |
1050 | 1257 | #endif |
1051 | 1258 | } |
1052 | 1259 | |
1260 | +#if defined(TARGET_PPC64) | |
1261 | +void do_tlbie_64 (void) | |
1262 | +{ | |
1263 | + T0 = (uint64_t)T0; | |
1264 | +#if !defined(FLUSH_ALL_TLBS) | |
1265 | + if (unlikely(PPC_MMU(env) == PPC_FLAGS_MMU_SOFT_6xx)) { | |
1266 | + ppc6xx_tlb_invalidate_virt(env, T0 & TARGET_PAGE_MASK, 0); | |
1267 | + if (env->id_tlbs == 1) | |
1268 | + ppc6xx_tlb_invalidate_virt(env, T0 & TARGET_PAGE_MASK, 1); | |
1269 | + } else if (unlikely(PPC_MMU(env) == PPC_FLAGS_MMU_SOFT_4xx)) { | |
1270 | + /* XXX: TODO */ | |
1271 | +#if 0 | |
1272 | + ppcbooke_tlb_invalidate_virt(env, T0 & TARGET_PAGE_MASK, | |
1273 | + env->spr[SPR_BOOKE_PID]); | |
1274 | +#endif | |
1275 | + } else { | |
1276 | + /* tlbie invalidate TLBs for all segments | |
1277 | + * As we have 2^36 segments, invalidate all qemu TLBs | |
1278 | + */ | |
1279 | +#if 0 | |
1280 | + T0 &= TARGET_PAGE_MASK; | |
1281 | + T0 &= ~((target_ulong)-1 << 28); | |
1282 | + /* XXX: this case should be optimized, | |
1283 | + * giving a mask to tlb_flush_page | |
1284 | + */ | |
1285 | + tlb_flush_page(env, T0 | (0x0 << 28)); | |
1286 | + tlb_flush_page(env, T0 | (0x1 << 28)); | |
1287 | + tlb_flush_page(env, T0 | (0x2 << 28)); | |
1288 | + tlb_flush_page(env, T0 | (0x3 << 28)); | |
1289 | + tlb_flush_page(env, T0 | (0x4 << 28)); | |
1290 | + tlb_flush_page(env, T0 | (0x5 << 28)); | |
1291 | + tlb_flush_page(env, T0 | (0x6 << 28)); | |
1292 | + tlb_flush_page(env, T0 | (0x7 << 28)); | |
1293 | + tlb_flush_page(env, T0 | (0x8 << 28)); | |
1294 | + tlb_flush_page(env, T0 | (0x9 << 28)); | |
1295 | + tlb_flush_page(env, T0 | (0xA << 28)); | |
1296 | + tlb_flush_page(env, T0 | (0xB << 28)); | |
1297 | + tlb_flush_page(env, T0 | (0xC << 28)); | |
1298 | + tlb_flush_page(env, T0 | (0xD << 28)); | |
1299 | + tlb_flush_page(env, T0 | (0xE << 28)); | |
1300 | + tlb_flush_page(env, T0 | (0xF << 28)); | |
1301 | +#else | |
1302 | + tlb_flush(env, 1); | |
1303 | +#endif | |
1304 | + } | |
1305 | +#else | |
1306 | + do_tlbia(); | |
1307 | +#endif | |
1308 | +} | |
1309 | +#endif | |
1310 | + | |
1311 | +#if defined(TARGET_PPC64) | |
1312 | +void do_slbia (void) | |
1313 | +{ | |
1314 | + /* XXX: TODO */ | |
1315 | + tlb_flush(env, 1); | |
1316 | +} | |
1317 | + | |
1318 | +void do_slbie (void) | |
1319 | +{ | |
1320 | + /* XXX: TODO */ | |
1321 | + tlb_flush(env, 1); | |
1322 | +} | |
1323 | +#endif | |
1324 | + | |
1053 | 1325 | /* Software driven TLBs management */ |
1054 | 1326 | /* PowerPC 602/603 software TLB load instructions helpers */ |
1055 | 1327 | void do_load_6xx_tlb (int is_code) |
1056 | 1328 | { |
1057 | 1329 | target_ulong RPN, CMP, EPN; |
1058 | 1330 | int way; |
1059 | - | |
1331 | + | |
1060 | 1332 | RPN = env->spr[SPR_RPA]; |
1061 | 1333 | if (is_code) { |
1062 | 1334 | CMP = env->spr[SPR_ICMP]; |
... | ... | @@ -1074,7 +1346,8 @@ void do_load_6xx_tlb (int is_code) |
1074 | 1346 | } |
1075 | 1347 | #endif |
1076 | 1348 | /* Store this TLB */ |
1077 | - ppc6xx_tlb_store(env, T0 & TARGET_PAGE_MASK, way, is_code, CMP, RPN); | |
1349 | + ppc6xx_tlb_store(env, (uint32_t)(T0 & TARGET_PAGE_MASK), | |
1350 | + way, is_code, CMP, RPN); | |
1078 | 1351 | } |
1079 | 1352 | |
1080 | 1353 | /* Helpers for 4xx TLB management */ | ... | ... |
target-ppc/op_helper.h
... | ... | @@ -35,6 +35,17 @@ void glue(do_POWER2_lfq_le, MEMSUFFIX) (void); |
35 | 35 | void glue(do_POWER2_stfq, MEMSUFFIX) (void); |
36 | 36 | void glue(do_POWER2_stfq_le, MEMSUFFIX) (void); |
37 | 37 | |
38 | +#if defined(TARGET_PPC64) | |
39 | +void glue(do_lsw_64, MEMSUFFIX) (int dst); | |
40 | +void glue(do_lsw_le_64, MEMSUFFIX) (int dst); | |
41 | +void glue(do_stsw_64, MEMSUFFIX) (int src); | |
42 | +void glue(do_stsw_le_64, MEMSUFFIX) (int src); | |
43 | +void glue(do_lmw_64, MEMSUFFIX) (int dst); | |
44 | +void glue(do_lmw_le_64, MEMSUFFIX) (int dst); | |
45 | +void glue(do_stmw_64, MEMSUFFIX) (int src); | |
46 | +void glue(do_stmw_le_64, MEMSUFFIX) (int src); | |
47 | +#endif | |
48 | + | |
38 | 49 | #else |
39 | 50 | |
40 | 51 | /* Registers load and stores */ |
... | ... | @@ -46,23 +57,34 @@ void do_load_fpscr (void); |
46 | 57 | void do_store_fpscr (uint32_t mask); |
47 | 58 | |
48 | 59 | /* Integer arithmetic helpers */ |
49 | -void do_addo (void); | |
50 | -void do_addco (void); | |
51 | 60 | void do_adde (void); |
52 | -void do_addeo (void); | |
53 | 61 | void do_addmeo (void); |
54 | -void do_addzeo (void); | |
55 | 62 | void do_divwo (void); |
56 | 63 | void do_divwuo (void); |
57 | 64 | void do_mullwo (void); |
58 | 65 | void do_nego (void); |
59 | -void do_subfo (void); | |
60 | -void do_subfco (void); | |
61 | 66 | void do_subfe (void); |
62 | -void do_subfeo (void); | |
63 | 67 | void do_subfmeo (void); |
64 | 68 | void do_subfzeo (void); |
65 | -void do_sraw(void); | |
69 | +void do_sraw (void); | |
70 | +#if defined(TARGET_PPC64) | |
71 | +void do_adde_64 (void); | |
72 | +void do_addmeo_64 (void); | |
73 | +void do_imul64 (uint64_t *tl, uint64_t *th); | |
74 | +void do_mul64 (uint64_t *tl, uint64_t *th); | |
75 | +void do_divdo (void); | |
76 | +void do_divduo (void); | |
77 | +void do_mulldo (void); | |
78 | +void do_nego_64 (void); | |
79 | +void do_subfe_64 (void); | |
80 | +void do_subfmeo_64 (void); | |
81 | +void do_subfzeo_64 (void); | |
82 | +void do_srad (void); | |
83 | +#endif | |
84 | +void do_popcntb (void); | |
85 | +#if defined(TARGET_PPC64) | |
86 | +void do_popcntb_64 (void); | |
87 | +#endif | |
66 | 88 | |
67 | 89 | /* Floating-point arithmetic helpers */ |
68 | 90 | void do_fsqrt (void); |
... | ... | @@ -77,13 +99,29 @@ void do_fcmpu (void); |
77 | 99 | void do_fcmpo (void); |
78 | 100 | |
79 | 101 | void do_tw (int flags); |
102 | +#if defined(TARGET_PPC64) | |
103 | +void do_td (int flags); | |
104 | +#endif | |
80 | 105 | void do_icbi (void); |
106 | +#if defined(TARGET_PPC64) | |
107 | +void do_icbi_64 (void); | |
108 | +#endif | |
81 | 109 | |
82 | 110 | #if !defined(CONFIG_USER_ONLY) |
83 | 111 | void do_rfi (void); |
112 | +#if defined(TARGET_PPC64) | |
113 | +void do_rfi_32 (void); | |
114 | +#endif | |
84 | 115 | void do_tlbia (void); |
85 | 116 | void do_tlbie (void); |
117 | +#if defined(TARGET_PPC64) | |
118 | +void do_tlbie_64 (void); | |
119 | +#endif | |
86 | 120 | void do_load_6xx_tlb (int is_code); |
121 | +#if defined(TARGET_PPC64) | |
122 | +void do_slbia (void); | |
123 | +void do_slbie (void); | |
124 | +#endif | |
87 | 125 | #endif |
88 | 126 | |
89 | 127 | /* POWER / PowerPC 601 specific helpers */ | ... | ... |
target-ppc/op_helper_mem.h
1 | 1 | /* |
2 | 2 | * PowerPC emulation micro-operations helpers for qemu. |
3 | - * | |
3 | + * | |
4 | 4 | * Copyright (c) 2003-2007 Jocelyn Mayer |
5 | 5 | * |
6 | 6 | * This library is free software; you can redistribute it and/or |
... | ... | @@ -37,98 +37,210 @@ static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, target_ulong data) |
37 | 37 | void glue(do_lmw, MEMSUFFIX) (int dst) |
38 | 38 | { |
39 | 39 | for (; dst < 32; dst++, T0 += 4) { |
40 | - ugpr(dst) = glue(ldl, MEMSUFFIX)(T0); | |
40 | + ugpr(dst) = glue(ldl, MEMSUFFIX)((uint32_t)T0); | |
41 | + } | |
42 | +} | |
43 | + | |
44 | +#if defined(TARGET_PPC64) | |
45 | +void glue(do_lmw_64, MEMSUFFIX) (int dst) | |
46 | +{ | |
47 | + for (; dst < 32; dst++, T0 += 4) { | |
48 | + ugpr(dst) = glue(ldl, MEMSUFFIX)((uint64_t)T0); | |
41 | 49 | } |
42 | 50 | } |
51 | +#endif | |
43 | 52 | |
44 | 53 | void glue(do_stmw, MEMSUFFIX) (int src) |
45 | 54 | { |
46 | 55 | for (; src < 32; src++, T0 += 4) { |
47 | - glue(stl, MEMSUFFIX)(T0, ugpr(src)); | |
56 | + glue(stl, MEMSUFFIX)((uint32_t)T0, ugpr(src)); | |
57 | + } | |
58 | +} | |
59 | + | |
60 | +#if defined(TARGET_PPC64) | |
61 | +void glue(do_stmw_64, MEMSUFFIX) (int src) | |
62 | +{ | |
63 | + for (; src < 32; src++, T0 += 4) { | |
64 | + glue(stl, MEMSUFFIX)((uint64_t)T0, ugpr(src)); | |
48 | 65 | } |
49 | 66 | } |
67 | +#endif | |
50 | 68 | |
51 | 69 | void glue(do_lmw_le, MEMSUFFIX) (int dst) |
52 | 70 | { |
53 | 71 | for (; dst < 32; dst++, T0 += 4) { |
54 | - ugpr(dst) = glue(ld32r, MEMSUFFIX)(T0); | |
72 | + ugpr(dst) = glue(ld32r, MEMSUFFIX)((uint32_t)T0); | |
73 | + } | |
74 | +} | |
75 | + | |
76 | +#if defined(TARGET_PPC64) | |
77 | +void glue(do_lmw_le_64, MEMSUFFIX) (int dst) | |
78 | +{ | |
79 | + for (; dst < 32; dst++, T0 += 4) { | |
80 | + ugpr(dst) = glue(ld32r, MEMSUFFIX)((uint64_t)T0); | |
55 | 81 | } |
56 | 82 | } |
83 | +#endif | |
57 | 84 | |
58 | 85 | void glue(do_stmw_le, MEMSUFFIX) (int src) |
59 | 86 | { |
60 | 87 | for (; src < 32; src++, T0 += 4) { |
61 | - glue(st32r, MEMSUFFIX)(T0, ugpr(src)); | |
88 | + glue(st32r, MEMSUFFIX)((uint32_t)T0, ugpr(src)); | |
62 | 89 | } |
63 | 90 | } |
64 | 91 | |
92 | +#if defined(TARGET_PPC64) | |
93 | +void glue(do_stmw_le_64, MEMSUFFIX) (int src) | |
94 | +{ | |
95 | + for (; src < 32; src++, T0 += 4) { | |
96 | + glue(st32r, MEMSUFFIX)((uint64_t)T0, ugpr(src)); | |
97 | + } | |
98 | +} | |
99 | +#endif | |
100 | + | |
65 | 101 | void glue(do_lsw, MEMSUFFIX) (int dst) |
66 | 102 | { |
67 | 103 | uint32_t tmp; |
68 | 104 | int sh; |
69 | 105 | |
70 | 106 | for (; T1 > 3; T1 -= 4, T0 += 4) { |
71 | - ugpr(dst++) = glue(ldl, MEMSUFFIX)(T0); | |
107 | + ugpr(dst++) = glue(ldl, MEMSUFFIX)((uint32_t)T0); | |
72 | 108 | if (unlikely(dst == 32)) |
73 | 109 | dst = 0; |
74 | 110 | } |
75 | 111 | if (unlikely(T1 != 0)) { |
76 | 112 | tmp = 0; |
77 | 113 | for (sh = 24; T1 > 0; T1--, T0++, sh -= 8) { |
78 | - tmp |= glue(ldub, MEMSUFFIX)(T0) << sh; | |
114 | + tmp |= glue(ldub, MEMSUFFIX)((uint32_t)T0) << sh; | |
79 | 115 | } |
80 | 116 | ugpr(dst) = tmp; |
81 | 117 | } |
82 | 118 | } |
83 | 119 | |
120 | +#if defined(TARGET_PPC64) | |
121 | +void glue(do_lsw_64, MEMSUFFIX) (int dst) | |
122 | +{ | |
123 | + uint32_t tmp; | |
124 | + int sh; | |
125 | + | |
126 | + for (; T1 > 3; T1 -= 4, T0 += 4) { | |
127 | + ugpr(dst++) = glue(ldl, MEMSUFFIX)((uint64_t)T0); | |
128 | + if (unlikely(dst == 32)) | |
129 | + dst = 0; | |
130 | + } | |
131 | + if (unlikely(T1 != 0)) { | |
132 | + tmp = 0; | |
133 | + for (sh = 24; T1 > 0; T1--, T0++, sh -= 8) { | |
134 | + tmp |= glue(ldub, MEMSUFFIX)((uint64_t)T0) << sh; | |
135 | + } | |
136 | + ugpr(dst) = tmp; | |
137 | + } | |
138 | +} | |
139 | +#endif | |
140 | + | |
84 | 141 | void glue(do_stsw, MEMSUFFIX) (int src) |
85 | 142 | { |
86 | 143 | int sh; |
87 | 144 | |
88 | 145 | for (; T1 > 3; T1 -= 4, T0 += 4) { |
89 | - glue(stl, MEMSUFFIX)(T0, ugpr(src++)); | |
146 | + glue(stl, MEMSUFFIX)((uint32_t)T0, ugpr(src++)); | |
90 | 147 | if (unlikely(src == 32)) |
91 | 148 | src = 0; |
92 | 149 | } |
93 | 150 | if (unlikely(T1 != 0)) { |
94 | 151 | for (sh = 24; T1 > 0; T1--, T0++, sh -= 8) |
95 | - glue(stb, MEMSUFFIX)(T0, (ugpr(src) >> sh) & 0xFF); | |
152 | + glue(stb, MEMSUFFIX)((uint32_t)T0, (ugpr(src) >> sh) & 0xFF); | |
96 | 153 | } |
97 | 154 | } |
98 | 155 | |
156 | +#if defined(TARGET_PPC64) | |
157 | +void glue(do_stsw_64, MEMSUFFIX) (int src) | |
158 | +{ | |
159 | + int sh; | |
160 | + | |
161 | + for (; T1 > 3; T1 -= 4, T0 += 4) { | |
162 | + glue(stl, MEMSUFFIX)((uint64_t)T0, ugpr(src++)); | |
163 | + if (unlikely(src == 32)) | |
164 | + src = 0; | |
165 | + } | |
166 | + if (unlikely(T1 != 0)) { | |
167 | + for (sh = 24; T1 > 0; T1--, T0++, sh -= 8) | |
168 | + glue(stb, MEMSUFFIX)((uint64_t)T0, (ugpr(src) >> sh) & 0xFF); | |
169 | + } | |
170 | +} | |
171 | +#endif | |
172 | + | |
99 | 173 | void glue(do_lsw_le, MEMSUFFIX) (int dst) |
100 | 174 | { |
101 | 175 | uint32_t tmp; |
102 | 176 | int sh; |
103 | 177 | |
104 | 178 | for (; T1 > 3; T1 -= 4, T0 += 4) { |
105 | - ugpr(dst++) = glue(ld32r, MEMSUFFIX)(T0); | |
179 | + ugpr(dst++) = glue(ld32r, MEMSUFFIX)((uint32_t)T0); | |
180 | + if (unlikely(dst == 32)) | |
181 | + dst = 0; | |
182 | + } | |
183 | + if (unlikely(T1 != 0)) { | |
184 | + tmp = 0; | |
185 | + for (sh = 0; T1 > 0; T1--, T0++, sh += 8) { | |
186 | + tmp |= glue(ldub, MEMSUFFIX)((uint32_t)T0) << sh; | |
187 | + } | |
188 | + ugpr(dst) = tmp; | |
189 | + } | |
190 | +} | |
191 | + | |
192 | +#if defined(TARGET_PPC64) | |
193 | +void glue(do_lsw_le_64, MEMSUFFIX) (int dst) | |
194 | +{ | |
195 | + uint32_t tmp; | |
196 | + int sh; | |
197 | + | |
198 | + for (; T1 > 3; T1 -= 4, T0 += 4) { | |
199 | + ugpr(dst++) = glue(ld32r, MEMSUFFIX)((uint64_t)T0); | |
106 | 200 | if (unlikely(dst == 32)) |
107 | 201 | dst = 0; |
108 | 202 | } |
109 | 203 | if (unlikely(T1 != 0)) { |
110 | 204 | tmp = 0; |
111 | 205 | for (sh = 0; T1 > 0; T1--, T0++, sh += 8) { |
112 | - tmp |= glue(ldub, MEMSUFFIX)(T0) << sh; | |
206 | + tmp |= glue(ldub, MEMSUFFIX)((uint64_t)T0) << sh; | |
113 | 207 | } |
114 | 208 | ugpr(dst) = tmp; |
115 | 209 | } |
116 | 210 | } |
211 | +#endif | |
117 | 212 | |
118 | 213 | void glue(do_stsw_le, MEMSUFFIX) (int src) |
119 | 214 | { |
120 | 215 | int sh; |
121 | 216 | |
122 | 217 | for (; T1 > 3; T1 -= 4, T0 += 4) { |
123 | - glue(st32r, MEMSUFFIX)(T0, ugpr(src++)); | |
218 | + glue(st32r, MEMSUFFIX)((uint32_t)T0, ugpr(src++)); | |
219 | + if (unlikely(src == 32)) | |
220 | + src = 0; | |
221 | + } | |
222 | + if (unlikely(T1 != 0)) { | |
223 | + for (sh = 0; T1 > 0; T1--, T0++, sh += 8) | |
224 | + glue(stb, MEMSUFFIX)((uint32_t)T0, (ugpr(src) >> sh) & 0xFF); | |
225 | + } | |
226 | +} | |
227 | + | |
228 | +#if defined(TARGET_PPC64) | |
229 | +void glue(do_stsw_le_64, MEMSUFFIX) (int src) | |
230 | +{ | |
231 | + int sh; | |
232 | + | |
233 | + for (; T1 > 3; T1 -= 4, T0 += 4) { | |
234 | + glue(st32r, MEMSUFFIX)((uint64_t)T0, ugpr(src++)); | |
124 | 235 | if (unlikely(src == 32)) |
125 | 236 | src = 0; |
126 | 237 | } |
127 | 238 | if (unlikely(T1 != 0)) { |
128 | 239 | for (sh = 0; T1 > 0; T1--, T0++, sh += 8) |
129 | - glue(stb, MEMSUFFIX)(T0, (ugpr(src) >> sh) & 0xFF); | |
240 | + glue(stb, MEMSUFFIX)((uint64_t)T0, (ugpr(src) >> sh) & 0xFF); | |
130 | 241 | } |
131 | 242 | } |
243 | +#endif | |
132 | 244 | |
133 | 245 | /* PPC 601 specific instructions (POWER bridge) */ |
134 | 246 | // XXX: to be tested |
... | ... | @@ -139,7 +251,7 @@ void glue(do_POWER_lscbx, MEMSUFFIX) (int dest, int ra, int rb) |
139 | 251 | d = 24; |
140 | 252 | reg = dest; |
141 | 253 | for (i = 0; i < T1; i++) { |
142 | - c = glue(ldub, MEMSUFFIX)(T0++); | |
254 | + c = glue(ldub, MEMSUFFIX)((uint32_t)T0++); | |
143 | 255 | /* ra (if not 0) and rb are never modified */ |
144 | 256 | if (likely(reg != rb && (ra == 0 || reg != ra))) { |
145 | 257 | ugpr(reg) = (ugpr(reg) & ~(0xFF << d)) | (c << d); |
... | ... | @@ -160,8 +272,8 @@ void glue(do_POWER_lscbx, MEMSUFFIX) (int dest, int ra, int rb) |
160 | 272 | /* XXX: TAGs are not managed */ |
161 | 273 | void glue(do_POWER2_lfq, MEMSUFFIX) (void) |
162 | 274 | { |
163 | - FT0 = glue(ldfq, MEMSUFFIX)(T0); | |
164 | - FT1 = glue(ldfq, MEMSUFFIX)(T0 + 4); | |
275 | + FT0 = glue(ldfq, MEMSUFFIX)((uint32_t)T0); | |
276 | + FT1 = glue(ldfq, MEMSUFFIX)((uint32_t)(T0 + 4)); | |
165 | 277 | } |
166 | 278 | |
167 | 279 | static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) |
... | ... | @@ -186,14 +298,14 @@ static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) |
186 | 298 | |
187 | 299 | void glue(do_POWER2_lfq_le, MEMSUFFIX) (void) |
188 | 300 | { |
189 | - FT0 = glue(ldfqr, MEMSUFFIX)(T0 + 4); | |
190 | - FT1 = glue(ldfqr, MEMSUFFIX)(T0); | |
301 | + FT0 = glue(ldfqr, MEMSUFFIX)((uint32_t)(T0 + 4)); | |
302 | + FT1 = glue(ldfqr, MEMSUFFIX)((uint32_t)T0); | |
191 | 303 | } |
192 | 304 | |
193 | 305 | void glue(do_POWER2_stfq, MEMSUFFIX) (void) |
194 | 306 | { |
195 | - glue(stfq, MEMSUFFIX)(T0, FT0); | |
196 | - glue(stfq, MEMSUFFIX)(T0 + 4, FT1); | |
307 | + glue(stfq, MEMSUFFIX)((uint32_t)T0, FT0); | |
308 | + glue(stfq, MEMSUFFIX)((uint32_t)(T0 + 4), FT1); | |
197 | 309 | } |
198 | 310 | |
199 | 311 | static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) |
... | ... | @@ -217,8 +329,8 @@ static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) |
217 | 329 | |
218 | 330 | void glue(do_POWER2_stfq_le, MEMSUFFIX) (void) |
219 | 331 | { |
220 | - glue(stfqr, MEMSUFFIX)(T0 + 4, FT0); | |
221 | - glue(stfqr, MEMSUFFIX)(T0, FT1); | |
332 | + glue(stfqr, MEMSUFFIX)((uint32_t)(T0 + 4), FT0); | |
333 | + glue(stfqr, MEMSUFFIX)((uint32_t)T0, FT1); | |
222 | 334 | } |
223 | 335 | |
224 | 336 | #undef MEMSUFFIX | ... | ... |
target-ppc/op_mem.h
... | ... | @@ -37,6 +37,33 @@ static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA) |
37 | 37 | ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24); |
38 | 38 | } |
39 | 39 | |
40 | +#if defined(TARGET_PPC64) | |
41 | +static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA) | |
42 | +{ | |
43 | + return (int32_t)glue(ldl, MEMSUFFIX)(EA); | |
44 | +} | |
45 | + | |
46 | +static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA) | |
47 | +{ | |
48 | + uint64_t tmp = glue(ldq, MEMSUFFIX)(EA); | |
49 | + return ((tmp & 0xFF00000000000000ULL) >> 56) | | |
50 | + ((tmp & 0x00FF000000000000ULL) >> 40) | | |
51 | + ((tmp & 0x0000FF0000000000ULL) >> 24) | | |
52 | + ((tmp & 0x000000FF00000000ULL) >> 8) | | |
53 | + ((tmp & 0x00000000FF000000ULL) << 8) | | |
54 | + ((tmp & 0x0000000000FF0000ULL) << 24) | | |
55 | + ((tmp & 0x000000000000FF00ULL) << 40) | | |
56 | + ((tmp & 0x00000000000000FFULL) << 54); | |
57 | +} | |
58 | + | |
59 | +static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA) | |
60 | +{ | |
61 | + uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); | |
62 | + return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) | | |
63 | + ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24); | |
64 | +} | |
65 | +#endif | |
66 | + | |
40 | 67 | static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data) |
41 | 68 | { |
42 | 69 | uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8); |
... | ... | @@ -50,140 +77,328 @@ static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data) |
50 | 77 | glue(stl, MEMSUFFIX)(EA, tmp); |
51 | 78 | } |
52 | 79 | |
80 | +#if defined(TARGET_PPC64) | |
81 | +static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data) | |
82 | +{ | |
83 | + uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) | | |
84 | + ((data & 0x00FF000000000000ULL) >> 40) | | |
85 | + ((data & 0x0000FF0000000000ULL) >> 24) | | |
86 | + ((data & 0x000000FF00000000ULL) >> 8) | | |
87 | + ((data & 0x00000000FF000000ULL) << 8) | | |
88 | + ((data & 0x0000000000FF0000ULL) << 24) | | |
89 | + ((data & 0x000000000000FF00ULL) << 40) | | |
90 | + ((data & 0x00000000000000FFULL) << 56); | |
91 | + glue(stq, MEMSUFFIX)(EA, tmp); | |
92 | +} | |
93 | +#endif | |
94 | + | |
53 | 95 | /*** Integer load ***/ |
54 | 96 | #define PPC_LD_OP(name, op) \ |
55 | -PPC_OP(glue(glue(l, name), MEMSUFFIX)) \ | |
97 | +void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \ | |
56 | 98 | { \ |
57 | - T1 = glue(op, MEMSUFFIX)(T0); \ | |
99 | + T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \ | |
58 | 100 | RETURN(); \ |
59 | 101 | } |
60 | 102 | |
103 | +#if defined(TARGET_PPC64) | |
104 | +#define PPC_LD_OP_64(name, op) \ | |
105 | +void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \ | |
106 | +{ \ | |
107 | + T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \ | |
108 | + RETURN(); \ | |
109 | +} | |
110 | +#endif | |
111 | + | |
61 | 112 | #define PPC_ST_OP(name, op) \ |
62 | -PPC_OP(glue(glue(st, name), MEMSUFFIX)) \ | |
113 | +void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \ | |
63 | 114 | { \ |
64 | - glue(op, MEMSUFFIX)(T0, T1); \ | |
115 | + glue(op, MEMSUFFIX)((uint32_t)T0, T1); \ | |
65 | 116 | RETURN(); \ |
66 | 117 | } |
67 | 118 | |
119 | +#if defined(TARGET_PPC64) | |
120 | +#define PPC_ST_OP_64(name, op) \ | |
121 | +void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \ | |
122 | +{ \ | |
123 | + glue(op, MEMSUFFIX)((uint64_t)T0, T1); \ | |
124 | + RETURN(); \ | |
125 | +} | |
126 | +#endif | |
127 | + | |
68 | 128 | PPC_LD_OP(bz, ldub); |
69 | 129 | PPC_LD_OP(ha, ldsw); |
70 | 130 | PPC_LD_OP(hz, lduw); |
71 | 131 | PPC_LD_OP(wz, ldl); |
132 | +#if defined(TARGET_PPC64) | |
133 | +PPC_LD_OP(d, ldq); | |
134 | +PPC_LD_OP(wa, ldsl); | |
135 | +PPC_LD_OP_64(d, ldq); | |
136 | +PPC_LD_OP_64(wa, ldsl); | |
137 | +PPC_LD_OP_64(bz, ldub); | |
138 | +PPC_LD_OP_64(ha, ldsw); | |
139 | +PPC_LD_OP_64(hz, lduw); | |
140 | +PPC_LD_OP_64(wz, ldl); | |
141 | +#endif | |
72 | 142 | |
73 | 143 | PPC_LD_OP(ha_le, ld16rs); |
74 | 144 | PPC_LD_OP(hz_le, ld16r); |
75 | 145 | PPC_LD_OP(wz_le, ld32r); |
146 | +#if defined(TARGET_PPC64) | |
147 | +PPC_LD_OP(d_le, ld64r); | |
148 | +PPC_LD_OP(wa_le, ld32rs); | |
149 | +PPC_LD_OP_64(d_le, ld64r); | |
150 | +PPC_LD_OP_64(wa_le, ld32rs); | |
151 | +PPC_LD_OP_64(ha_le, ld16rs); | |
152 | +PPC_LD_OP_64(hz_le, ld16r); | |
153 | +PPC_LD_OP_64(wz_le, ld32r); | |
154 | +#endif | |
76 | 155 | |
77 | 156 | /*** Integer store ***/ |
78 | 157 | PPC_ST_OP(b, stb); |
79 | 158 | PPC_ST_OP(h, stw); |
80 | 159 | PPC_ST_OP(w, stl); |
160 | +#if defined(TARGET_PPC64) | |
161 | +PPC_ST_OP(d, stq); | |
162 | +PPC_ST_OP_64(d, stq); | |
163 | +PPC_ST_OP_64(b, stb); | |
164 | +PPC_ST_OP_64(h, stw); | |
165 | +PPC_ST_OP_64(w, stl); | |
166 | +#endif | |
81 | 167 | |
82 | 168 | PPC_ST_OP(h_le, st16r); |
83 | 169 | PPC_ST_OP(w_le, st32r); |
170 | +#if defined(TARGET_PPC64) | |
171 | +PPC_ST_OP(d_le, st64r); | |
172 | +PPC_ST_OP_64(d_le, st64r); | |
173 | +PPC_ST_OP_64(h_le, st16r); | |
174 | +PPC_ST_OP_64(w_le, st32r); | |
175 | +#endif | |
84 | 176 | |
85 | 177 | /*** Integer load and store with byte reverse ***/ |
86 | 178 | PPC_LD_OP(hbr, ld16r); |
87 | 179 | PPC_LD_OP(wbr, ld32r); |
88 | 180 | PPC_ST_OP(hbr, st16r); |
89 | 181 | PPC_ST_OP(wbr, st32r); |
182 | +#if defined(TARGET_PPC64) | |
183 | +PPC_LD_OP_64(hbr, ld16r); | |
184 | +PPC_LD_OP_64(wbr, ld32r); | |
185 | +PPC_ST_OP_64(hbr, st16r); | |
186 | +PPC_ST_OP_64(wbr, st32r); | |
187 | +#endif | |
90 | 188 | |
91 | 189 | PPC_LD_OP(hbr_le, lduw); |
92 | 190 | PPC_LD_OP(wbr_le, ldl); |
93 | 191 | PPC_ST_OP(hbr_le, stw); |
94 | 192 | PPC_ST_OP(wbr_le, stl); |
193 | +#if defined(TARGET_PPC64) | |
194 | +PPC_LD_OP_64(hbr_le, lduw); | |
195 | +PPC_LD_OP_64(wbr_le, ldl); | |
196 | +PPC_ST_OP_64(hbr_le, stw); | |
197 | +PPC_ST_OP_64(wbr_le, stl); | |
198 | +#endif | |
95 | 199 | |
96 | 200 | /*** Integer load and store multiple ***/ |
97 | -PPC_OP(glue(lmw, MEMSUFFIX)) | |
201 | +void OPPROTO glue(op_lmw, MEMSUFFIX) (void) | |
98 | 202 | { |
99 | 203 | glue(do_lmw, MEMSUFFIX)(PARAM1); |
100 | 204 | RETURN(); |
101 | 205 | } |
102 | 206 | |
103 | -PPC_OP(glue(lmw_le, MEMSUFFIX)) | |
207 | +#if defined(TARGET_PPC64) | |
208 | +void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void) | |
209 | +{ | |
210 | + glue(do_lmw_64, MEMSUFFIX)(PARAM1); | |
211 | + RETURN(); | |
212 | +} | |
213 | +#endif | |
214 | + | |
215 | +void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void) | |
104 | 216 | { |
105 | 217 | glue(do_lmw_le, MEMSUFFIX)(PARAM1); |
106 | 218 | RETURN(); |
107 | 219 | } |
108 | 220 | |
109 | -PPC_OP(glue(stmw, MEMSUFFIX)) | |
221 | +#if defined(TARGET_PPC64) | |
222 | +void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void) | |
223 | +{ | |
224 | + glue(do_lmw_le_64, MEMSUFFIX)(PARAM1); | |
225 | + RETURN(); | |
226 | +} | |
227 | +#endif | |
228 | + | |
229 | +void OPPROTO glue(op_stmw, MEMSUFFIX) (void) | |
110 | 230 | { |
111 | 231 | glue(do_stmw, MEMSUFFIX)(PARAM1); |
112 | 232 | RETURN(); |
113 | 233 | } |
114 | 234 | |
115 | -PPC_OP(glue(stmw_le, MEMSUFFIX)) | |
235 | +#if defined(TARGET_PPC64) | |
236 | +void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void) | |
237 | +{ | |
238 | + glue(do_stmw_64, MEMSUFFIX)(PARAM1); | |
239 | + RETURN(); | |
240 | +} | |
241 | +#endif | |
242 | + | |
243 | +void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void) | |
116 | 244 | { |
117 | 245 | glue(do_stmw_le, MEMSUFFIX)(PARAM1); |
118 | 246 | RETURN(); |
119 | 247 | } |
120 | 248 | |
249 | +#if defined(TARGET_PPC64) | |
250 | +void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void) | |
251 | +{ | |
252 | + glue(do_stmw_le_64, MEMSUFFIX)(PARAM1); | |
253 | + RETURN(); | |
254 | +} | |
255 | +#endif | |
256 | + | |
121 | 257 | /*** Integer load and store strings ***/ |
122 | -PPC_OP(glue(lswi, MEMSUFFIX)) | |
258 | +void OPPROTO glue(op_lswi, MEMSUFFIX) (void) | |
259 | +{ | |
260 | + glue(do_lsw, MEMSUFFIX)(PARAM1); | |
261 | + RETURN(); | |
262 | +} | |
263 | + | |
264 | +#if defined(TARGET_PPC64) | |
265 | +void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void) | |
123 | 266 | { |
124 | - glue(do_lsw, MEMSUFFIX)(PARAM(1)); | |
267 | + glue(do_lsw_64, MEMSUFFIX)(PARAM1); | |
125 | 268 | RETURN(); |
126 | 269 | } |
270 | +#endif | |
127 | 271 | |
128 | -PPC_OP(glue(lswi_le, MEMSUFFIX)) | |
272 | +void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void) | |
129 | 273 | { |
130 | - glue(do_lsw_le, MEMSUFFIX)(PARAM(1)); | |
274 | + glue(do_lsw_le, MEMSUFFIX)(PARAM1); | |
131 | 275 | RETURN(); |
132 | 276 | } |
133 | 277 | |
278 | +#if defined(TARGET_PPC64) | |
279 | +void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void) | |
280 | +{ | |
281 | + glue(do_lsw_le_64, MEMSUFFIX)(PARAM1); | |
282 | + RETURN(); | |
283 | +} | |
284 | +#endif | |
285 | + | |
134 | 286 | /* PPC32 specification says we must generate an exception if |
135 | 287 | * rA is in the range of registers to be loaded. |
136 | 288 | * In an other hand, IBM says this is valid, but rA won't be loaded. |
137 | 289 | * For now, I'll follow the spec... |
138 | 290 | */ |
139 | -PPC_OP(glue(lswx, MEMSUFFIX)) | |
291 | +void OPPROTO glue(op_lswx, MEMSUFFIX) (void) | |
292 | +{ | |
293 | + /* Note: T1 comes from xer_bc then no cast is needed */ | |
294 | + if (likely(T1 != 0)) { | |
295 | + if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) || | |
296 | + (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { | |
297 | + do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX); | |
298 | + } else { | |
299 | + glue(do_lsw, MEMSUFFIX)(PARAM1); | |
300 | + } | |
301 | + } | |
302 | + RETURN(); | |
303 | +} | |
304 | + | |
305 | +#if defined(TARGET_PPC64) | |
306 | +void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void) | |
307 | +{ | |
308 | + /* Note: T1 comes from xer_bc then no cast is needed */ | |
309 | + if (likely(T1 != 0)) { | |
310 | + if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) || | |
311 | + (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { | |
312 | + do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX); | |
313 | + } else { | |
314 | + glue(do_lsw_64, MEMSUFFIX)(PARAM1); | |
315 | + } | |
316 | + } | |
317 | + RETURN(); | |
318 | +} | |
319 | +#endif | |
320 | + | |
321 | +void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void) | |
140 | 322 | { |
141 | - if (unlikely(T1 > 0)) { | |
323 | + /* Note: T1 comes from xer_bc then no cast is needed */ | |
324 | + if (likely(T1 != 0)) { | |
142 | 325 | if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) || |
143 | 326 | (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { |
144 | 327 | do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX); |
145 | 328 | } else { |
146 | - glue(do_lsw, MEMSUFFIX)(PARAM(1)); | |
329 | + glue(do_lsw_le, MEMSUFFIX)(PARAM1); | |
147 | 330 | } |
148 | 331 | } |
149 | 332 | RETURN(); |
150 | 333 | } |
151 | 334 | |
152 | -PPC_OP(glue(lswx_le, MEMSUFFIX)) | |
335 | +#if defined(TARGET_PPC64) | |
336 | +void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void) | |
153 | 337 | { |
154 | - if (unlikely(T1 > 0)) { | |
338 | + /* Note: T1 comes from xer_bc then no cast is needed */ | |
339 | + if (likely(T1 != 0)) { | |
155 | 340 | if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) || |
156 | 341 | (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { |
157 | 342 | do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX); |
158 | 343 | } else { |
159 | - glue(do_lsw_le, MEMSUFFIX)(PARAM(1)); | |
344 | + glue(do_lsw_le_64, MEMSUFFIX)(PARAM1); | |
160 | 345 | } |
161 | 346 | } |
162 | 347 | RETURN(); |
163 | 348 | } |
349 | +#endif | |
350 | + | |
351 | +void OPPROTO glue(op_stsw, MEMSUFFIX) (void) | |
352 | +{ | |
353 | + glue(do_stsw, MEMSUFFIX)(PARAM1); | |
354 | + RETURN(); | |
355 | +} | |
356 | + | |
357 | +#if defined(TARGET_PPC64) | |
358 | +void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void) | |
359 | +{ | |
360 | + glue(do_stsw_64, MEMSUFFIX)(PARAM1); | |
361 | + RETURN(); | |
362 | +} | |
363 | +#endif | |
164 | 364 | |
165 | -PPC_OP(glue(stsw, MEMSUFFIX)) | |
365 | +void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void) | |
166 | 366 | { |
167 | - glue(do_stsw, MEMSUFFIX)(PARAM(1)); | |
367 | + glue(do_stsw_le, MEMSUFFIX)(PARAM1); | |
168 | 368 | RETURN(); |
169 | 369 | } |
170 | 370 | |
171 | -PPC_OP(glue(stsw_le, MEMSUFFIX)) | |
371 | +#if defined(TARGET_PPC64) | |
372 | +void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void) | |
172 | 373 | { |
173 | - glue(do_stsw_le, MEMSUFFIX)(PARAM(1)); | |
374 | + glue(do_stsw_le_64, MEMSUFFIX)(PARAM1); | |
174 | 375 | RETURN(); |
175 | 376 | } |
377 | +#endif | |
176 | 378 | |
177 | 379 | /*** Floating-point store ***/ |
178 | 380 | #define PPC_STF_OP(name, op) \ |
179 | -PPC_OP(glue(glue(st, name), MEMSUFFIX)) \ | |
381 | +void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \ | |
382 | +{ \ | |
383 | + glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \ | |
384 | + RETURN(); \ | |
385 | +} | |
386 | + | |
387 | +#if defined(TARGET_PPC64) | |
388 | +#define PPC_STF_OP_64(name, op) \ | |
389 | +void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \ | |
180 | 390 | { \ |
181 | - glue(op, MEMSUFFIX)(T0, FT0); \ | |
391 | + glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \ | |
182 | 392 | RETURN(); \ |
183 | 393 | } |
394 | +#endif | |
184 | 395 | |
185 | 396 | PPC_STF_OP(fd, stfq); |
186 | 397 | PPC_STF_OP(fs, stfl); |
398 | +#if defined(TARGET_PPC64) | |
399 | +PPC_STF_OP_64(fd, stfq); | |
400 | +PPC_STF_OP_64(fs, stfl); | |
401 | +#endif | |
187 | 402 | |
188 | 403 | static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) |
189 | 404 | { |
... | ... | @@ -221,17 +436,34 @@ static inline void glue(stflr, MEMSUFFIX) (target_ulong EA, float f) |
221 | 436 | |
222 | 437 | PPC_STF_OP(fd_le, stfqr); |
223 | 438 | PPC_STF_OP(fs_le, stflr); |
439 | +#if defined(TARGET_PPC64) | |
440 | +PPC_STF_OP_64(fd_le, stfqr); | |
441 | +PPC_STF_OP_64(fs_le, stflr); | |
442 | +#endif | |
224 | 443 | |
225 | 444 | /*** Floating-point load ***/ |
226 | 445 | #define PPC_LDF_OP(name, op) \ |
227 | -PPC_OP(glue(glue(l, name), MEMSUFFIX)) \ | |
446 | +void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \ | |
447 | +{ \ | |
448 | + FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \ | |
449 | + RETURN(); \ | |
450 | +} | |
451 | + | |
452 | +#if defined(TARGET_PPC64) | |
453 | +#define PPC_LDF_OP_64(name, op) \ | |
454 | +void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \ | |
228 | 455 | { \ |
229 | - FT0 = glue(op, MEMSUFFIX)(T0); \ | |
456 | + FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \ | |
230 | 457 | RETURN(); \ |
231 | 458 | } |
459 | +#endif | |
232 | 460 | |
233 | 461 | PPC_LDF_OP(fd, ldfq); |
234 | 462 | PPC_LDF_OP(fs, ldfl); |
463 | +#if defined(TARGET_PPC64) | |
464 | +PPC_LDF_OP_64(fd, ldfq); | |
465 | +PPC_LDF_OP_64(fs, ldfl); | |
466 | +#endif | |
235 | 467 | |
236 | 468 | static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) |
237 | 469 | { |
... | ... | @@ -271,40 +503,142 @@ static inline float glue(ldflr, MEMSUFFIX) (target_ulong EA) |
271 | 503 | |
272 | 504 | PPC_LDF_OP(fd_le, ldfqr); |
273 | 505 | PPC_LDF_OP(fs_le, ldflr); |
506 | +#if defined(TARGET_PPC64) | |
507 | +PPC_LDF_OP_64(fd_le, ldfqr); | |
508 | +PPC_LDF_OP_64(fs_le, ldflr); | |
509 | +#endif | |
274 | 510 | |
275 | 511 | /* Load and set reservation */ |
276 | -PPC_OP(glue(lwarx, MEMSUFFIX)) | |
512 | +void OPPROTO glue(op_lwarx, MEMSUFFIX) (void) | |
513 | +{ | |
514 | + if (unlikely(T0 & 0x03)) { | |
515 | + do_raise_exception(EXCP_ALIGN); | |
516 | + } else { | |
517 | + T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0); | |
518 | + regs->reserve = (uint32_t)T0; | |
519 | + } | |
520 | + RETURN(); | |
521 | +} | |
522 | + | |
523 | +#if defined(TARGET_PPC64) | |
524 | +void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void) | |
525 | +{ | |
526 | + if (unlikely(T0 & 0x03)) { | |
527 | + do_raise_exception(EXCP_ALIGN); | |
528 | + } else { | |
529 | + T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0); | |
530 | + regs->reserve = (uint64_t)T0; | |
531 | + } | |
532 | + RETURN(); | |
533 | +} | |
534 | + | |
535 | +void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void) | |
536 | +{ | |
537 | + if (unlikely(T0 & 0x03)) { | |
538 | + do_raise_exception(EXCP_ALIGN); | |
539 | + } else { | |
540 | + T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0); | |
541 | + regs->reserve = (uint64_t)T0; | |
542 | + } | |
543 | + RETURN(); | |
544 | +} | |
545 | +#endif | |
546 | + | |
547 | +void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void) | |
548 | +{ | |
549 | + if (unlikely(T0 & 0x03)) { | |
550 | + do_raise_exception(EXCP_ALIGN); | |
551 | + } else { | |
552 | + T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0); | |
553 | + regs->reserve = (uint32_t)T0; | |
554 | + } | |
555 | + RETURN(); | |
556 | +} | |
557 | + | |
558 | +#if defined(TARGET_PPC64) | |
559 | +void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void) | |
277 | 560 | { |
278 | 561 | if (unlikely(T0 & 0x03)) { |
279 | 562 | do_raise_exception(EXCP_ALIGN); |
280 | 563 | } else { |
281 | - T1 = glue(ldl, MEMSUFFIX)(T0); | |
282 | - regs->reserve = T0; | |
564 | + T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0); | |
565 | + regs->reserve = (uint64_t)T0; | |
283 | 566 | } |
284 | 567 | RETURN(); |
285 | 568 | } |
286 | 569 | |
287 | -PPC_OP(glue(lwarx_le, MEMSUFFIX)) | |
570 | +void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void) | |
288 | 571 | { |
289 | 572 | if (unlikely(T0 & 0x03)) { |
290 | 573 | do_raise_exception(EXCP_ALIGN); |
291 | 574 | } else { |
292 | - T1 = glue(ld32r, MEMSUFFIX)(T0); | |
293 | - regs->reserve = T0; | |
575 | + T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0); | |
576 | + regs->reserve = (uint64_t)T0; | |
294 | 577 | } |
295 | 578 | RETURN(); |
296 | 579 | } |
580 | +#endif | |
297 | 581 | |
298 | 582 | /* Store with reservation */ |
299 | -PPC_OP(glue(stwcx, MEMSUFFIX)) | |
583 | +void OPPROTO glue(op_stwcx, MEMSUFFIX) (void) | |
584 | +{ | |
585 | + if (unlikely(T0 & 0x03)) { | |
586 | + do_raise_exception(EXCP_ALIGN); | |
587 | + } else { | |
588 | + if (unlikely(regs->reserve != (uint32_t)T0)) { | |
589 | + env->crf[0] = xer_ov; | |
590 | + } else { | |
591 | + glue(stl, MEMSUFFIX)((uint32_t)T0, T1); | |
592 | + env->crf[0] = xer_ov | 0x02; | |
593 | + } | |
594 | + } | |
595 | + regs->reserve = -1; | |
596 | + RETURN(); | |
597 | +} | |
598 | + | |
599 | +#if defined(TARGET_PPC64) | |
600 | +void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void) | |
601 | +{ | |
602 | + if (unlikely(T0 & 0x03)) { | |
603 | + do_raise_exception(EXCP_ALIGN); | |
604 | + } else { | |
605 | + if (unlikely(regs->reserve != (uint64_t)T0)) { | |
606 | + env->crf[0] = xer_ov; | |
607 | + } else { | |
608 | + glue(stl, MEMSUFFIX)((uint64_t)T0, T1); | |
609 | + env->crf[0] = xer_ov | 0x02; | |
610 | + } | |
611 | + } | |
612 | + regs->reserve = -1; | |
613 | + RETURN(); | |
614 | +} | |
615 | + | |
616 | +void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void) | |
300 | 617 | { |
301 | 618 | if (unlikely(T0 & 0x03)) { |
302 | 619 | do_raise_exception(EXCP_ALIGN); |
303 | 620 | } else { |
304 | - if (unlikely(regs->reserve != T0)) { | |
621 | + if (unlikely(regs->reserve != (uint64_t)T0)) { | |
305 | 622 | env->crf[0] = xer_ov; |
306 | 623 | } else { |
307 | - glue(stl, MEMSUFFIX)(T0, T1); | |
624 | + glue(stq, MEMSUFFIX)((uint64_t)T0, T1); | |
625 | + env->crf[0] = xer_ov | 0x02; | |
626 | + } | |
627 | + } | |
628 | + regs->reserve = -1; | |
629 | + RETURN(); | |
630 | +} | |
631 | +#endif | |
632 | + | |
633 | +void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void) | |
634 | +{ | |
635 | + if (unlikely(T0 & 0x03)) { | |
636 | + do_raise_exception(EXCP_ALIGN); | |
637 | + } else { | |
638 | + if (unlikely(regs->reserve != (uint32_t)T0)) { | |
639 | + env->crf[0] = xer_ov; | |
640 | + } else { | |
641 | + glue(st32r, MEMSUFFIX)((uint32_t)T0, T1); | |
308 | 642 | env->crf[0] = xer_ov | 0x02; |
309 | 643 | } |
310 | 644 | } |
... | ... | @@ -312,15 +646,16 @@ PPC_OP(glue(stwcx, MEMSUFFIX)) |
312 | 646 | RETURN(); |
313 | 647 | } |
314 | 648 | |
315 | -PPC_OP(glue(stwcx_le, MEMSUFFIX)) | |
649 | +#if defined(TARGET_PPC64) | |
650 | +void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void) | |
316 | 651 | { |
317 | 652 | if (unlikely(T0 & 0x03)) { |
318 | 653 | do_raise_exception(EXCP_ALIGN); |
319 | 654 | } else { |
320 | - if (unlikely(regs->reserve != T0)) { | |
655 | + if (unlikely(regs->reserve != (uint64_t)T0)) { | |
321 | 656 | env->crf[0] = xer_ov; |
322 | 657 | } else { |
323 | - glue(st32r, MEMSUFFIX)(T0, T1); | |
658 | + glue(st32r, MEMSUFFIX)((uint64_t)T0, T1); | |
324 | 659 | env->crf[0] = xer_ov | 0x02; |
325 | 660 | } |
326 | 661 | } |
... | ... | @@ -328,61 +663,136 @@ PPC_OP(glue(stwcx_le, MEMSUFFIX)) |
328 | 663 | RETURN(); |
329 | 664 | } |
330 | 665 | |
331 | -PPC_OP(glue(dcbz, MEMSUFFIX)) | |
666 | +void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void) | |
667 | +{ | |
668 | + if (unlikely(T0 & 0x03)) { | |
669 | + do_raise_exception(EXCP_ALIGN); | |
670 | + } else { | |
671 | + if (unlikely(regs->reserve != (uint64_t)T0)) { | |
672 | + env->crf[0] = xer_ov; | |
673 | + } else { | |
674 | + glue(st64r, MEMSUFFIX)((uint64_t)T0, T1); | |
675 | + env->crf[0] = xer_ov | 0x02; | |
676 | + } | |
677 | + } | |
678 | + regs->reserve = -1; | |
679 | + RETURN(); | |
680 | +} | |
681 | +#endif | |
682 | + | |
683 | +void OPPROTO glue(op_dcbz, MEMSUFFIX) (void) | |
684 | +{ | |
685 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0); | |
686 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0); | |
687 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0); | |
688 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0); | |
689 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0); | |
690 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0); | |
691 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0); | |
692 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0); | |
693 | +#if DCACHE_LINE_SIZE == 64 | |
694 | + /* XXX: cache line size should be 64 for POWER & PowerPC 601 */ | |
695 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0); | |
696 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0); | |
697 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0); | |
698 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0); | |
699 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0); | |
700 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0); | |
701 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0); | |
702 | + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0); | |
703 | +#endif | |
704 | + RETURN(); | |
705 | +} | |
706 | + | |
707 | +#if defined(TARGET_PPC64) | |
708 | +void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void) | |
332 | 709 | { |
333 | - glue(stl, MEMSUFFIX)(T0 + 0x00, 0); | |
334 | - glue(stl, MEMSUFFIX)(T0 + 0x04, 0); | |
335 | - glue(stl, MEMSUFFIX)(T0 + 0x08, 0); | |
336 | - glue(stl, MEMSUFFIX)(T0 + 0x0C, 0); | |
337 | - glue(stl, MEMSUFFIX)(T0 + 0x10, 0); | |
338 | - glue(stl, MEMSUFFIX)(T0 + 0x14, 0); | |
339 | - glue(stl, MEMSUFFIX)(T0 + 0x18, 0); | |
340 | - glue(stl, MEMSUFFIX)(T0 + 0x1C, 0); | |
710 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0); | |
711 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0); | |
712 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0); | |
713 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0); | |
714 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0); | |
715 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0); | |
716 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0); | |
717 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0); | |
341 | 718 | #if DCACHE_LINE_SIZE == 64 |
342 | 719 | /* XXX: cache line size should be 64 for POWER & PowerPC 601 */ |
343 | - glue(stl, MEMSUFFIX)(T0 + 0x20UL, 0); | |
344 | - glue(stl, MEMSUFFIX)(T0 + 0x24UL, 0); | |
345 | - glue(stl, MEMSUFFIX)(T0 + 0x28UL, 0); | |
346 | - glue(stl, MEMSUFFIX)(T0 + 0x2CUL, 0); | |
347 | - glue(stl, MEMSUFFIX)(T0 + 0x30UL, 0); | |
348 | - glue(stl, MEMSUFFIX)(T0 + 0x34UL, 0); | |
349 | - glue(stl, MEMSUFFIX)(T0 + 0x38UL, 0); | |
350 | - glue(stl, MEMSUFFIX)(T0 + 0x3CUL, 0); | |
720 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0); | |
721 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0); | |
722 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0); | |
723 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0); | |
724 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0); | |
725 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0); | |
726 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0); | |
727 | + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0); | |
351 | 728 | #endif |
352 | 729 | RETURN(); |
353 | 730 | } |
731 | +#endif | |
354 | 732 | |
355 | 733 | /* External access */ |
356 | -PPC_OP(glue(eciwx, MEMSUFFIX)) | |
734 | +void OPPROTO glue(op_eciwx, MEMSUFFIX) (void) | |
735 | +{ | |
736 | + T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0); | |
737 | + RETURN(); | |
738 | +} | |
739 | + | |
740 | +#if defined(TARGET_PPC64) | |
741 | +void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void) | |
742 | +{ | |
743 | + T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0); | |
744 | + RETURN(); | |
745 | +} | |
746 | +#endif | |
747 | + | |
748 | +void OPPROTO glue(op_ecowx, MEMSUFFIX) (void) | |
749 | +{ | |
750 | + glue(stl, MEMSUFFIX)((uint32_t)T0, T1); | |
751 | + RETURN(); | |
752 | +} | |
753 | + | |
754 | +#if defined(TARGET_PPC64) | |
755 | +void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void) | |
357 | 756 | { |
358 | - T1 = glue(ldl, MEMSUFFIX)(T0); | |
757 | + glue(stl, MEMSUFFIX)((uint64_t)T0, T1); | |
359 | 758 | RETURN(); |
360 | 759 | } |
760 | +#endif | |
361 | 761 | |
362 | -PPC_OP(glue(ecowx, MEMSUFFIX)) | |
762 | +void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void) | |
363 | 763 | { |
364 | - glue(stl, MEMSUFFIX)(T0, T1); | |
764 | + T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0); | |
365 | 765 | RETURN(); |
366 | 766 | } |
367 | 767 | |
368 | -PPC_OP(glue(eciwx_le, MEMSUFFIX)) | |
768 | +#if defined(TARGET_PPC64) | |
769 | +void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void) | |
369 | 770 | { |
370 | - T1 = glue(ld32r, MEMSUFFIX)(T0); | |
771 | + T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0); | |
371 | 772 | RETURN(); |
372 | 773 | } |
774 | +#endif | |
373 | 775 | |
374 | -PPC_OP(glue(ecowx_le, MEMSUFFIX)) | |
776 | +void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void) | |
375 | 777 | { |
376 | - glue(st32r, MEMSUFFIX)(T0, T1); | |
778 | + glue(st32r, MEMSUFFIX)((uint32_t)T0, T1); | |
377 | 779 | RETURN(); |
378 | 780 | } |
379 | 781 | |
782 | +#if defined(TARGET_PPC64) | |
783 | +void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void) | |
784 | +{ | |
785 | + glue(st32r, MEMSUFFIX)((uint64_t)T0, T1); | |
786 | + RETURN(); | |
787 | +} | |
788 | +#endif | |
789 | + | |
380 | 790 | /* XXX: those micro-ops need tests ! */ |
381 | 791 | /* PowerPC 601 specific instructions (POWER bridge) */ |
382 | 792 | void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void) |
383 | 793 | { |
384 | 794 | /* When byte count is 0, do nothing */ |
385 | - if (likely(T1 > 0)) { | |
795 | + if (likely(T1 != 0)) { | |
386 | 796 | glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3); |
387 | 797 | } |
388 | 798 | RETURN(); | ... | ... |