Commit 6a6ae23f3c7c80e66e8e900ed9820c7134997a36
1 parent
38d14952
target-ppc: convert SPE load/store to TCG
Signed-off-by: Aurelien Jarno <aurelien@aurel32.net> git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@5804 c046a42c-6fe2-441c-8c8c-71466251a162
Showing
5 changed files
with
330 additions
and
570 deletions
target-ppc/cpu.h
| ... | ... | @@ -536,12 +536,6 @@ struct CPUPPCState { |
| 536 | 536 | #if (TARGET_LONG_BITS > HOST_LONG_BITS) || defined(HOST_I386) |
| 537 | 537 | target_ulong t2; |
| 538 | 538 | #endif |
| 539 | -#if !defined(TARGET_PPC64) | |
| 540 | - /* temporary fixed-point registers | |
| 541 | - * used to emulate 64 bits registers on 32 bits targets | |
| 542 | - */ | |
| 543 | - uint64_t t0_64, t1_64, t2_64; | |
| 544 | -#endif | |
| 545 | 539 | |
| 546 | 540 | /* general purpose registers */ |
| 547 | 541 | target_ulong gpr[32]; | ... | ... |
target-ppc/exec.h
| ... | ... | @@ -44,16 +44,6 @@ register target_ulong T1 asm(AREG2); |
| 44 | 44 | register target_ulong T2 asm(AREG3); |
| 45 | 45 | #define TDX "%016lx" |
| 46 | 46 | #endif |
| 47 | -/* We may, sometime, need 64 bits registers on 32 bits targets */ | |
| 48 | -#if !defined(TARGET_PPC64) | |
| 49 | -#define T0_64 (env->t0_64) | |
| 50 | -#define T1_64 (env->t1_64) | |
| 51 | -#define T2_64 (env->t2_64) | |
| 52 | -#else | |
| 53 | -#define T0_64 T0 | |
| 54 | -#define T1_64 T1 | |
| 55 | -#define T2_64 T2 | |
| 56 | -#endif | |
| 57 | 47 | |
| 58 | 48 | #define FT0 (env->ft0) |
| 59 | 49 | #define FT1 (env->ft1) | ... | ... |
target-ppc/op.c
| ... | ... | @@ -914,35 +914,3 @@ void OPPROTO op_store_booke_tsr (void) |
| 914 | 914 | } |
| 915 | 915 | #endif /* !defined(CONFIG_USER_ONLY) */ |
| 916 | 916 | |
| 917 | -/* SPE extension */ | |
| 918 | -void OPPROTO op_splatw_T1_64 (void) | |
| 919 | -{ | |
| 920 | - T1_64 = (T1_64 << 32) | (T1_64 & 0x00000000FFFFFFFFULL); | |
| 921 | - RETURN(); | |
| 922 | -} | |
| 923 | - | |
| 924 | -void OPPROTO op_extsh_T1_64 (void) | |
| 925 | -{ | |
| 926 | - T1_64 = (int32_t)((int16_t)T1_64); | |
| 927 | - RETURN(); | |
| 928 | -} | |
| 929 | - | |
| 930 | -void OPPROTO op_sli16_T1_64 (void) | |
| 931 | -{ | |
| 932 | - T1_64 = T1_64 << 16; | |
| 933 | - RETURN(); | |
| 934 | -} | |
| 935 | - | |
| 936 | -void OPPROTO op_sli32_T1_64 (void) | |
| 937 | -{ | |
| 938 | - T1_64 = T1_64 << 32; | |
| 939 | - RETURN(); | |
| 940 | -} | |
| 941 | - | |
| 942 | -void OPPROTO op_srli32_T1_64 (void) | |
| 943 | -{ | |
| 944 | - T1_64 = T1_64 >> 32; | |
| 945 | - RETURN(); | |
| 946 | -} | |
| 947 | - | |
| 948 | - | ... | ... |
target-ppc/op_mem.h
| ... | ... | @@ -642,262 +642,4 @@ void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void) |
| 642 | 642 | RETURN(); |
| 643 | 643 | } |
| 644 | 644 | |
| 645 | -/* SPE extension */ | |
| 646 | -#define _PPC_SPE_LD_OP(name, op) \ | |
| 647 | -void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \ | |
| 648 | -{ \ | |
| 649 | - T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \ | |
| 650 | - RETURN(); \ | |
| 651 | -} | |
| 652 | - | |
| 653 | -#if defined(TARGET_PPC64) | |
| 654 | -#define _PPC_SPE_LD_OP_64(name, op) \ | |
| 655 | -void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \ | |
| 656 | -{ \ | |
| 657 | - T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \ | |
| 658 | - RETURN(); \ | |
| 659 | -} | |
| 660 | -#define PPC_SPE_LD_OP(name, op) \ | |
| 661 | -_PPC_SPE_LD_OP(name, op); \ | |
| 662 | -_PPC_SPE_LD_OP_64(name, op) | |
| 663 | -#else | |
| 664 | -#define PPC_SPE_LD_OP(name, op) \ | |
| 665 | -_PPC_SPE_LD_OP(name, op) | |
| 666 | -#endif | |
| 667 | - | |
| 668 | -#define _PPC_SPE_ST_OP(name, op) \ | |
| 669 | -void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \ | |
| 670 | -{ \ | |
| 671 | - glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \ | |
| 672 | - RETURN(); \ | |
| 673 | -} | |
| 674 | - | |
| 675 | -#if defined(TARGET_PPC64) | |
| 676 | -#define _PPC_SPE_ST_OP_64(name, op) \ | |
| 677 | -void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \ | |
| 678 | -{ \ | |
| 679 | - glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \ | |
| 680 | - RETURN(); \ | |
| 681 | -} | |
| 682 | -#define PPC_SPE_ST_OP(name, op) \ | |
| 683 | -_PPC_SPE_ST_OP(name, op); \ | |
| 684 | -_PPC_SPE_ST_OP_64(name, op) | |
| 685 | -#else | |
| 686 | -#define PPC_SPE_ST_OP(name, op) \ | |
| 687 | -_PPC_SPE_ST_OP(name, op) | |
| 688 | -#endif | |
| 689 | - | |
| 690 | -PPC_SPE_LD_OP(dd, ldu64); | |
| 691 | -PPC_SPE_ST_OP(dd, st64); | |
| 692 | -PPC_SPE_LD_OP(dd_le, ldu64r); | |
| 693 | -PPC_SPE_ST_OP(dd_le, st64r); | |
| 694 | -static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA) | |
| 695 | -{ | |
| 696 | - uint64_t ret; | |
| 697 | - ret = (uint64_t)glue(ldu32, MEMSUFFIX)(EA) << 32; | |
| 698 | - ret |= (uint64_t)glue(ldu32, MEMSUFFIX)(EA + 4); | |
| 699 | - return ret; | |
| 700 | -} | |
| 701 | -PPC_SPE_LD_OP(dw, spe_ldw); | |
| 702 | -static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, | |
| 703 | - uint64_t data) | |
| 704 | -{ | |
| 705 | - glue(st32, MEMSUFFIX)(EA, data >> 32); | |
| 706 | - glue(st32, MEMSUFFIX)(EA + 4, data); | |
| 707 | -} | |
| 708 | -PPC_SPE_ST_OP(dw, spe_stdw); | |
| 709 | -static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA) | |
| 710 | -{ | |
| 711 | - uint64_t ret; | |
| 712 | - ret = (uint64_t)glue(ldu32r, MEMSUFFIX)(EA) << 32; | |
| 713 | - ret |= (uint64_t)glue(ldu32r, MEMSUFFIX)(EA + 4); | |
| 714 | - return ret; | |
| 715 | -} | |
| 716 | -PPC_SPE_LD_OP(dw_le, spe_ldw_le); | |
| 717 | -static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA, | |
| 718 | - uint64_t data) | |
| 719 | -{ | |
| 720 | - glue(st32r, MEMSUFFIX)(EA, data >> 32); | |
| 721 | - glue(st32r, MEMSUFFIX)(EA + 4, data); | |
| 722 | -} | |
| 723 | -PPC_SPE_ST_OP(dw_le, spe_stdw_le); | |
| 724 | -static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA) | |
| 725 | -{ | |
| 726 | - uint64_t ret; | |
| 727 | - ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48; | |
| 728 | - ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 32; | |
| 729 | - ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 4) << 16; | |
| 730 | - ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 6); | |
| 731 | - return ret; | |
| 732 | -} | |
| 733 | -PPC_SPE_LD_OP(dh, spe_ldh); | |
| 734 | -static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, | |
| 735 | - uint64_t data) | |
| 736 | -{ | |
| 737 | - glue(st16, MEMSUFFIX)(EA, data >> 48); | |
| 738 | - glue(st16, MEMSUFFIX)(EA + 2, data >> 32); | |
| 739 | - glue(st16, MEMSUFFIX)(EA + 4, data >> 16); | |
| 740 | - glue(st16, MEMSUFFIX)(EA + 6, data); | |
| 741 | -} | |
| 742 | -PPC_SPE_ST_OP(dh, spe_stdh); | |
| 743 | -static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA) | |
| 744 | -{ | |
| 745 | - uint64_t ret; | |
| 746 | - ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48; | |
| 747 | - ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 32; | |
| 748 | - ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 4) << 16; | |
| 749 | - ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 6); | |
| 750 | - return ret; | |
| 751 | -} | |
| 752 | -PPC_SPE_LD_OP(dh_le, spe_ldh_le); | |
| 753 | -static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA, | |
| 754 | - uint64_t data) | |
| 755 | -{ | |
| 756 | - glue(st16r, MEMSUFFIX)(EA, data >> 48); | |
| 757 | - glue(st16r, MEMSUFFIX)(EA + 2, data >> 32); | |
| 758 | - glue(st16r, MEMSUFFIX)(EA + 4, data >> 16); | |
| 759 | - glue(st16r, MEMSUFFIX)(EA + 6, data); | |
| 760 | -} | |
| 761 | -PPC_SPE_ST_OP(dh_le, spe_stdh_le); | |
| 762 | -static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA) | |
| 763 | -{ | |
| 764 | - uint64_t ret; | |
| 765 | - ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48; | |
| 766 | - ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 16; | |
| 767 | - return ret; | |
| 768 | -} | |
| 769 | -PPC_SPE_LD_OP(whe, spe_lwhe); | |
| 770 | -static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, | |
| 771 | - uint64_t data) | |
| 772 | -{ | |
| 773 | - glue(st16, MEMSUFFIX)(EA, data >> 48); | |
| 774 | - glue(st16, MEMSUFFIX)(EA + 2, data >> 16); | |
| 775 | -} | |
| 776 | -PPC_SPE_ST_OP(whe, spe_stwhe); | |
| 777 | -static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA) | |
| 778 | -{ | |
| 779 | - uint64_t ret; | |
| 780 | - ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48; | |
| 781 | - ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 16; | |
| 782 | - return ret; | |
| 783 | -} | |
| 784 | -PPC_SPE_LD_OP(whe_le, spe_lwhe_le); | |
| 785 | -static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA, | |
| 786 | - uint64_t data) | |
| 787 | -{ | |
| 788 | - glue(st16r, MEMSUFFIX)(EA, data >> 48); | |
| 789 | - glue(st16r, MEMSUFFIX)(EA + 2, data >> 16); | |
| 790 | -} | |
| 791 | -PPC_SPE_ST_OP(whe_le, spe_stwhe_le); | |
| 792 | -static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA) | |
| 793 | -{ | |
| 794 | - uint64_t ret; | |
| 795 | - ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 32; | |
| 796 | - ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2); | |
| 797 | - return ret; | |
| 798 | -} | |
| 799 | -PPC_SPE_LD_OP(whou, spe_lwhou); | |
| 800 | -static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA) | |
| 801 | -{ | |
| 802 | - uint64_t ret; | |
| 803 | - ret = ((uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA))) << 32; | |
| 804 | - ret |= (uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA + 2)); | |
| 805 | - return ret; | |
| 806 | -} | |
| 807 | -PPC_SPE_LD_OP(whos, spe_lwhos); | |
| 808 | -static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, | |
| 809 | - uint64_t data) | |
| 810 | -{ | |
| 811 | - glue(st16, MEMSUFFIX)(EA, data >> 32); | |
| 812 | - glue(st16, MEMSUFFIX)(EA + 2, data); | |
| 813 | -} | |
| 814 | -PPC_SPE_ST_OP(who, spe_stwho); | |
| 815 | -static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA) | |
| 816 | -{ | |
| 817 | - uint64_t ret; | |
| 818 | - ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 32; | |
| 819 | - ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2); | |
| 820 | - return ret; | |
| 821 | -} | |
| 822 | -PPC_SPE_LD_OP(whou_le, spe_lwhou_le); | |
| 823 | -static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA) | |
| 824 | -{ | |
| 825 | - uint64_t ret; | |
| 826 | - ret = ((uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA))) << 32; | |
| 827 | - ret |= (uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA + 2)); | |
| 828 | - return ret; | |
| 829 | -} | |
| 830 | -PPC_SPE_LD_OP(whos_le, spe_lwhos_le); | |
| 831 | -static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA, | |
| 832 | - uint64_t data) | |
| 833 | -{ | |
| 834 | - glue(st16r, MEMSUFFIX)(EA, data >> 32); | |
| 835 | - glue(st16r, MEMSUFFIX)(EA + 2, data); | |
| 836 | -} | |
| 837 | -PPC_SPE_ST_OP(who_le, spe_stwho_le); | |
| 838 | -static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, | |
| 839 | - uint64_t data) | |
| 840 | -{ | |
| 841 | - glue(st32, MEMSUFFIX)(EA, data); | |
| 842 | -} | |
| 843 | -PPC_SPE_ST_OP(wwo, spe_stwwo); | |
| 844 | -static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA, | |
| 845 | - uint64_t data) | |
| 846 | -{ | |
| 847 | - glue(st32r, MEMSUFFIX)(EA, data); | |
| 848 | -} | |
| 849 | -PPC_SPE_ST_OP(wwo_le, spe_stwwo_le); | |
| 850 | -static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA) | |
| 851 | -{ | |
| 852 | - uint16_t tmp; | |
| 853 | - tmp = glue(ldu16, MEMSUFFIX)(EA); | |
| 854 | - return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16); | |
| 855 | -} | |
| 856 | -PPC_SPE_LD_OP(h, spe_lh); | |
| 857 | -static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA) | |
| 858 | -{ | |
| 859 | - uint16_t tmp; | |
| 860 | - tmp = glue(ldu16r, MEMSUFFIX)(EA); | |
| 861 | - return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16); | |
| 862 | -} | |
| 863 | -PPC_SPE_LD_OP(h_le, spe_lh_le); | |
| 864 | -static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA) | |
| 865 | -{ | |
| 866 | - uint32_t tmp; | |
| 867 | - tmp = glue(ldu32, MEMSUFFIX)(EA); | |
| 868 | - return ((uint64_t)tmp << 32) | (uint64_t)tmp; | |
| 869 | -} | |
| 870 | -PPC_SPE_LD_OP(wwsplat, spe_lwwsplat); | |
| 871 | -static always_inline | |
| 872 | -uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA) | |
| 873 | -{ | |
| 874 | - uint32_t tmp; | |
| 875 | - tmp = glue(ldu32r, MEMSUFFIX)(EA); | |
| 876 | - return ((uint64_t)tmp << 32) | (uint64_t)tmp; | |
| 877 | -} | |
| 878 | -PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le); | |
| 879 | -static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA) | |
| 880 | -{ | |
| 881 | - uint64_t ret; | |
| 882 | - uint16_t tmp; | |
| 883 | - tmp = glue(ldu16, MEMSUFFIX)(EA); | |
| 884 | - ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32); | |
| 885 | - tmp = glue(ldu16, MEMSUFFIX)(EA + 2); | |
| 886 | - ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp; | |
| 887 | - return ret; | |
| 888 | -} | |
| 889 | -PPC_SPE_LD_OP(whsplat, spe_lwhsplat); | |
| 890 | -static always_inline | |
| 891 | -uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA) | |
| 892 | -{ | |
| 893 | - uint64_t ret; | |
| 894 | - uint16_t tmp; | |
| 895 | - tmp = glue(ldu16r, MEMSUFFIX)(EA); | |
| 896 | - ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32); | |
| 897 | - tmp = glue(ldu16r, MEMSUFFIX)(EA + 2); | |
| 898 | - ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp; | |
| 899 | - return ret; | |
| 900 | -} | |
| 901 | -PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le); | |
| 902 | - | |
| 903 | 645 | #undef MEMSUFFIX | ... | ... |
target-ppc/translate.c
| ... | ... | @@ -71,11 +71,6 @@ static TCGv_i32 cpu_access_type; |
| 71 | 71 | |
| 72 | 72 | /* dyngen register indexes */ |
| 73 | 73 | static TCGv cpu_T[3]; |
| 74 | -#if defined(TARGET_PPC64) | |
| 75 | -#define cpu_T64 cpu_T | |
| 76 | -#else | |
| 77 | -static TCGv_i64 cpu_T64[3]; | |
| 78 | -#endif | |
| 79 | 74 | static TCGv_i64 cpu_FT[2]; |
| 80 | 75 | |
| 81 | 76 | #include "gen-icount.h" |
| ... | ... | @@ -107,14 +102,6 @@ void ppc_translate_init(void) |
| 107 | 102 | cpu_T[2] = tcg_global_reg_new(TCG_AREG3, "T2"); |
| 108 | 103 | #endif |
| 109 | 104 | #endif |
| 110 | -#if !defined(TARGET_PPC64) | |
| 111 | - cpu_T64[0] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t0_64), | |
| 112 | - "T0_64"); | |
| 113 | - cpu_T64[1] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t1_64), | |
| 114 | - "T1_64"); | |
| 115 | - cpu_T64[2] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t2_64), | |
| 116 | - "T2_64"); | |
| 117 | -#endif | |
| 118 | 105 | |
| 119 | 106 | cpu_FT[0] = tcg_global_mem_new_i64(TCG_AREG0, |
| 120 | 107 | offsetof(CPUState, ft0), "FT0"); |
| ... | ... | @@ -6031,91 +6018,6 @@ static always_inline void gen_speundef (DisasContext *ctx) |
| 6031 | 6018 | GEN_EXCP_INVAL(ctx); |
| 6032 | 6019 | } |
| 6033 | 6020 | |
| 6034 | -/* SPE load and stores */ | |
| 6035 | -static always_inline void gen_addr_spe_imm_index (TCGv EA, DisasContext *ctx, int sh) | |
| 6036 | -{ | |
| 6037 | - target_long simm = rB(ctx->opcode); | |
| 6038 | - | |
| 6039 | - if (rA(ctx->opcode) == 0) | |
| 6040 | - tcg_gen_movi_tl(EA, simm << sh); | |
| 6041 | - else if (likely(simm != 0)) | |
| 6042 | - tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm << sh); | |
| 6043 | - else | |
| 6044 | - tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); | |
| 6045 | -} | |
| 6046 | - | |
| 6047 | -#define op_spe_ldst(name) (*gen_op_##name[ctx->mem_idx])() | |
| 6048 | -#define OP_SPE_LD_TABLE(name) \ | |
| 6049 | -static GenOpFunc *gen_op_spe_l##name[NB_MEM_FUNCS] = { \ | |
| 6050 | - GEN_MEM_FUNCS(spe_l##name), \ | |
| 6051 | -}; | |
| 6052 | -#define OP_SPE_ST_TABLE(name) \ | |
| 6053 | -static GenOpFunc *gen_op_spe_st##name[NB_MEM_FUNCS] = { \ | |
| 6054 | - GEN_MEM_FUNCS(spe_st##name), \ | |
| 6055 | -}; | |
| 6056 | - | |
| 6057 | -#define GEN_SPE_LD(name, sh) \ | |
| 6058 | -static always_inline void gen_evl##name (DisasContext *ctx) \ | |
| 6059 | -{ \ | |
| 6060 | - if (unlikely(!ctx->spe_enabled)) { \ | |
| 6061 | - GEN_EXCP_NO_AP(ctx); \ | |
| 6062 | - return; \ | |
| 6063 | - } \ | |
| 6064 | - gen_addr_spe_imm_index(cpu_T[0], ctx, sh); \ | |
| 6065 | - op_spe_ldst(spe_l##name); \ | |
| 6066 | - gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \ | |
| 6067 | -} | |
| 6068 | - | |
| 6069 | -#define GEN_SPE_LDX(name) \ | |
| 6070 | -static always_inline void gen_evl##name##x (DisasContext *ctx) \ | |
| 6071 | -{ \ | |
| 6072 | - if (unlikely(!ctx->spe_enabled)) { \ | |
| 6073 | - GEN_EXCP_NO_AP(ctx); \ | |
| 6074 | - return; \ | |
| 6075 | - } \ | |
| 6076 | - gen_addr_reg_index(cpu_T[0], ctx); \ | |
| 6077 | - op_spe_ldst(spe_l##name); \ | |
| 6078 | - gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \ | |
| 6079 | -} | |
| 6080 | - | |
| 6081 | -#define GEN_SPEOP_LD(name, sh) \ | |
| 6082 | -OP_SPE_LD_TABLE(name); \ | |
| 6083 | -GEN_SPE_LD(name, sh); \ | |
| 6084 | -GEN_SPE_LDX(name) | |
| 6085 | - | |
| 6086 | -#define GEN_SPE_ST(name, sh) \ | |
| 6087 | -static always_inline void gen_evst##name (DisasContext *ctx) \ | |
| 6088 | -{ \ | |
| 6089 | - if (unlikely(!ctx->spe_enabled)) { \ | |
| 6090 | - GEN_EXCP_NO_AP(ctx); \ | |
| 6091 | - return; \ | |
| 6092 | - } \ | |
| 6093 | - gen_addr_spe_imm_index(cpu_T[0], ctx, sh); \ | |
| 6094 | - gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \ | |
| 6095 | - op_spe_ldst(spe_st##name); \ | |
| 6096 | -} | |
| 6097 | - | |
| 6098 | -#define GEN_SPE_STX(name) \ | |
| 6099 | -static always_inline void gen_evst##name##x (DisasContext *ctx) \ | |
| 6100 | -{ \ | |
| 6101 | - if (unlikely(!ctx->spe_enabled)) { \ | |
| 6102 | - GEN_EXCP_NO_AP(ctx); \ | |
| 6103 | - return; \ | |
| 6104 | - } \ | |
| 6105 | - gen_addr_reg_index(cpu_T[0], ctx); \ | |
| 6106 | - gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \ | |
| 6107 | - op_spe_ldst(spe_st##name); \ | |
| 6108 | -} | |
| 6109 | - | |
| 6110 | -#define GEN_SPEOP_ST(name, sh) \ | |
| 6111 | -OP_SPE_ST_TABLE(name); \ | |
| 6112 | -GEN_SPE_ST(name, sh); \ | |
| 6113 | -GEN_SPE_STX(name) | |
| 6114 | - | |
| 6115 | -#define GEN_SPEOP_LDST(name, sh) \ | |
| 6116 | -GEN_SPEOP_LD(name, sh); \ | |
| 6117 | -GEN_SPEOP_ST(name, sh) | |
| 6118 | - | |
| 6119 | 6021 | /* SPE logic */ |
| 6120 | 6022 | #if defined(TARGET_PPC64) |
| 6121 | 6023 | #define GEN_SPEOP_LOGIC2(name, tcg_op) \ |
| ... | ... | @@ -6681,188 +6583,352 @@ GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); //// |
| 6681 | 6583 | GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); //// |
| 6682 | 6584 | GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); //// |
| 6683 | 6585 | |
| 6684 | -/* Load and stores */ | |
| 6685 | -GEN_SPEOP_LDST(dd, 3); | |
| 6686 | -GEN_SPEOP_LDST(dw, 3); | |
| 6687 | -GEN_SPEOP_LDST(dh, 3); | |
| 6688 | -GEN_SPEOP_LDST(whe, 2); | |
| 6689 | -GEN_SPEOP_LD(whou, 2); | |
| 6690 | -GEN_SPEOP_LD(whos, 2); | |
| 6691 | -GEN_SPEOP_ST(who, 2); | |
| 6692 | - | |
| 6693 | -#define _GEN_OP_SPE_STWWE(suffix) \ | |
| 6694 | -static always_inline void gen_op_spe_stwwe_##suffix (void) \ | |
| 6695 | -{ \ | |
| 6696 | - gen_op_srli32_T1_64(); \ | |
| 6697 | - gen_op_spe_stwwo_##suffix(); \ | |
| 6586 | +/* SPE load and stores */ | |
| 6587 | +static always_inline void gen_addr_spe_imm_index (TCGv EA, DisasContext *ctx, int sh) | |
| 6588 | +{ | |
| 6589 | + target_ulong uimm = rB(ctx->opcode); | |
| 6590 | + | |
| 6591 | + if (rA(ctx->opcode) == 0) | |
| 6592 | + tcg_gen_movi_tl(EA, uimm << sh); | |
| 6593 | + else | |
| 6594 | + tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh); | |
| 6698 | 6595 | } |
| 6699 | -#define _GEN_OP_SPE_STWWE_LE(suffix) \ | |
| 6700 | -static always_inline void gen_op_spe_stwwe_le_##suffix (void) \ | |
| 6701 | -{ \ | |
| 6702 | - gen_op_srli32_T1_64(); \ | |
| 6703 | - gen_op_spe_stwwo_le_##suffix(); \ | |
| 6596 | + | |
| 6597 | +static always_inline void gen_op_evldd(DisasContext *ctx, TCGv addr) | |
| 6598 | +{ | |
| 6599 | +#if defined(TARGET_PPC64) | |
| 6600 | + gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx); | |
| 6601 | +#else | |
| 6602 | + TCGv_i64 t0 = tcg_temp_new_i64(); | |
| 6603 | + gen_qemu_ld64(t0, addr, ctx->mem_idx); | |
| 6604 | + tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0); | |
| 6605 | + tcg_gen_shri_i64(t0, t0, 32); | |
| 6606 | + tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0); | |
| 6607 | + tcg_temp_free_i64(t0); | |
| 6608 | +#endif | |
| 6704 | 6609 | } |
| 6610 | + | |
| 6611 | +static always_inline void gen_op_evldw(DisasContext *ctx, TCGv addr) | |
| 6612 | +{ | |
| 6705 | 6613 | #if defined(TARGET_PPC64) |
| 6706 | -#define GEN_OP_SPE_STWWE(suffix) \ | |
| 6707 | -_GEN_OP_SPE_STWWE(suffix); \ | |
| 6708 | -_GEN_OP_SPE_STWWE_LE(suffix); \ | |
| 6709 | -static always_inline void gen_op_spe_stwwe_64_##suffix (void) \ | |
| 6710 | -{ \ | |
| 6711 | - gen_op_srli32_T1_64(); \ | |
| 6712 | - gen_op_spe_stwwo_64_##suffix(); \ | |
| 6713 | -} \ | |
| 6714 | -static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \ | |
| 6715 | -{ \ | |
| 6716 | - gen_op_srli32_T1_64(); \ | |
| 6717 | - gen_op_spe_stwwo_le_64_##suffix(); \ | |
| 6614 | + TCGv t0 = tcg_temp_new(); | |
| 6615 | + gen_qemu_ld32u(t0, addr, ctx->mem_idx); | |
| 6616 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
| 6617 | + tcg_gen_addi_tl(addr, addr, 4); | |
| 6618 | + gen_qemu_ld32u(t0, addr, ctx->mem_idx); | |
| 6619 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6620 | + tcg_temp_free(t0); | |
| 6621 | +#else | |
| 6622 | + gen_qemu_ld32u(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx); | |
| 6623 | + tcg_gen_addi_tl(addr, addr, 4); | |
| 6624 | + gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx); | |
| 6625 | +#endif | |
| 6718 | 6626 | } |
| 6627 | + | |
| 6628 | +static always_inline void gen_op_evldh(DisasContext *ctx, TCGv addr) | |
| 6629 | +{ | |
| 6630 | + TCGv t0 = tcg_temp_new(); | |
| 6631 | +#if defined(TARGET_PPC64) | |
| 6632 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6633 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); | |
| 6634 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6635 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6636 | + tcg_gen_shli_tl(t0, t0, 32); | |
| 6637 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6638 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6639 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6640 | + tcg_gen_shli_tl(t0, t0, 16); | |
| 6641 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6642 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6643 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6644 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6719 | 6645 | #else |
| 6720 | -#define GEN_OP_SPE_STWWE(suffix) \ | |
| 6721 | -_GEN_OP_SPE_STWWE(suffix); \ | |
| 6722 | -_GEN_OP_SPE_STWWE_LE(suffix) | |
| 6646 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6647 | + tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); | |
| 6648 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6649 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6650 | + tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); | |
| 6651 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6652 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6653 | + tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); | |
| 6654 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6655 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6656 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6723 | 6657 | #endif |
| 6724 | -#if defined(CONFIG_USER_ONLY) | |
| 6725 | -GEN_OP_SPE_STWWE(raw); | |
| 6726 | -#else /* defined(CONFIG_USER_ONLY) */ | |
| 6727 | -GEN_OP_SPE_STWWE(user); | |
| 6728 | -GEN_OP_SPE_STWWE(kernel); | |
| 6729 | -GEN_OP_SPE_STWWE(hypv); | |
| 6730 | -#endif /* defined(CONFIG_USER_ONLY) */ | |
| 6731 | -GEN_SPEOP_ST(wwe, 2); | |
| 6732 | -GEN_SPEOP_ST(wwo, 2); | |
| 6733 | - | |
| 6734 | -#define GEN_SPE_LDSPLAT(name, op, suffix) \ | |
| 6735 | -static always_inline void gen_op_spe_l##name##_##suffix (void) \ | |
| 6736 | -{ \ | |
| 6737 | - gen_op_##op##_##suffix(); \ | |
| 6738 | - gen_op_splatw_T1_64(); \ | |
| 6658 | + tcg_temp_free(t0); | |
| 6739 | 6659 | } |
| 6740 | 6660 | |
| 6741 | -#define GEN_OP_SPE_LHE(suffix) \ | |
| 6742 | -static always_inline void gen_op_spe_lhe_##suffix (void) \ | |
| 6743 | -{ \ | |
| 6744 | - gen_op_spe_lh_##suffix(); \ | |
| 6745 | - gen_op_sli16_T1_64(); \ | |
| 6661 | +static always_inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr) | |
| 6662 | +{ | |
| 6663 | + TCGv t0 = tcg_temp_new(); | |
| 6664 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6665 | +#if defined(TARGET_PPC64) | |
| 6666 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); | |
| 6667 | + tcg_gen_shli_tl(t0, t0, 16); | |
| 6668 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6669 | +#else | |
| 6670 | + tcg_gen_shli_tl(t0, t0, 16); | |
| 6671 | + tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); | |
| 6672 | + tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
| 6673 | +#endif | |
| 6674 | + tcg_temp_free(t0); | |
| 6746 | 6675 | } |
| 6747 | 6676 | |
| 6748 | -#define GEN_OP_SPE_LHX(suffix) \ | |
| 6749 | -static always_inline void gen_op_spe_lhx_##suffix (void) \ | |
| 6750 | -{ \ | |
| 6751 | - gen_op_spe_lh_##suffix(); \ | |
| 6752 | - gen_op_extsh_T1_64(); \ | |
| 6677 | +static always_inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr) | |
| 6678 | +{ | |
| 6679 | + TCGv t0 = tcg_temp_new(); | |
| 6680 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6681 | +#if defined(TARGET_PPC64) | |
| 6682 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
| 6683 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6684 | +#else | |
| 6685 | + tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); | |
| 6686 | + tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
| 6687 | +#endif | |
| 6688 | + tcg_temp_free(t0); | |
| 6753 | 6689 | } |
| 6754 | 6690 | |
| 6755 | -#if defined(CONFIG_USER_ONLY) | |
| 6756 | -GEN_OP_SPE_LHE(raw); | |
| 6757 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, raw); | |
| 6758 | -GEN_OP_SPE_LHE(le_raw); | |
| 6759 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_raw); | |
| 6760 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, raw); | |
| 6761 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_raw); | |
| 6762 | -GEN_OP_SPE_LHX(raw); | |
| 6763 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, raw); | |
| 6764 | -GEN_OP_SPE_LHX(le_raw); | |
| 6765 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_raw); | |
| 6691 | +static always_inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr) | |
| 6692 | +{ | |
| 6693 | + TCGv t0 = tcg_temp_new(); | |
| 6694 | + gen_qemu_ld16s(t0, addr, ctx->mem_idx); | |
| 6695 | +#if defined(TARGET_PPC64) | |
| 6696 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
| 6697 | + tcg_gen_ext32u_tl(t0, t0); | |
| 6698 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6699 | +#else | |
| 6700 | + tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); | |
| 6701 | + tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
| 6702 | +#endif | |
| 6703 | + tcg_temp_free(t0); | |
| 6704 | +} | |
| 6705 | + | |
| 6706 | +static always_inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr) | |
| 6707 | +{ | |
| 6708 | + TCGv t0 = tcg_temp_new(); | |
| 6709 | +#if defined(TARGET_PPC64) | |
| 6710 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6711 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); | |
| 6712 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6713 | + tcg_gen_shli_tl(t0, t0, 16); | |
| 6714 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6715 | +#else | |
| 6716 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6717 | + tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); | |
| 6718 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6719 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6720 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16); | |
| 6721 | +#endif | |
| 6722 | + tcg_temp_free(t0); | |
| 6723 | +} | |
| 6724 | + | |
| 6725 | +static always_inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr) | |
| 6726 | +{ | |
| 6727 | +#if defined(TARGET_PPC64) | |
| 6728 | + TCGv t0 = tcg_temp_new(); | |
| 6729 | + gen_qemu_ld16u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx); | |
| 6730 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6731 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6732 | + tcg_gen_shli_tl(t0, t0, 32); | |
| 6733 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6734 | + tcg_temp_free(t0); | |
| 6735 | +#else | |
| 6736 | + gen_qemu_ld16u(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx); | |
| 6737 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6738 | + gen_qemu_ld16u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx); | |
| 6739 | +#endif | |
| 6740 | +} | |
| 6741 | + | |
| 6742 | +static always_inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr) | |
| 6743 | +{ | |
| 6744 | +#if defined(TARGET_PPC64) | |
| 6745 | + TCGv t0 = tcg_temp_new(); | |
| 6746 | + gen_qemu_ld16s(t0, addr, ctx->mem_idx); | |
| 6747 | + tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
| 6748 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6749 | + gen_qemu_ld16s(t0, addr, ctx->mem_idx); | |
| 6750 | + tcg_gen_shli_tl(t0, t0, 32); | |
| 6751 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6752 | + tcg_temp_free(t0); | |
| 6753 | +#else | |
| 6754 | + gen_qemu_ld16s(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx); | |
| 6755 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6756 | + gen_qemu_ld16s(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx); | |
| 6757 | +#endif | |
| 6758 | +} | |
| 6759 | + | |
| 6760 | +static always_inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr) | |
| 6761 | +{ | |
| 6762 | + TCGv t0 = tcg_temp_new(); | |
| 6763 | + gen_qemu_ld32u(t0, addr, ctx->mem_idx); | |
| 6766 | 6764 | #if defined(TARGET_PPC64) |
| 6767 | -GEN_OP_SPE_LHE(64_raw); | |
| 6768 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_raw); | |
| 6769 | -GEN_OP_SPE_LHE(le_64_raw); | |
| 6770 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_raw); | |
| 6771 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_raw); | |
| 6772 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_raw); | |
| 6773 | -GEN_OP_SPE_LHX(64_raw); | |
| 6774 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_raw); | |
| 6775 | -GEN_OP_SPE_LHX(le_64_raw); | |
| 6776 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_raw); | |
| 6765 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
| 6766 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6767 | +#else | |
| 6768 | + tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); | |
| 6769 | + tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
| 6770 | +#endif | |
| 6771 | + tcg_temp_free(t0); | |
| 6772 | +} | |
| 6773 | + | |
| 6774 | +static always_inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr) | |
| 6775 | +{ | |
| 6776 | + TCGv t0 = tcg_temp_new(); | |
| 6777 | +#if defined(TARGET_PPC64) | |
| 6778 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6779 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); | |
| 6780 | + tcg_gen_shli_tl(t0, t0, 32); | |
| 6781 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6782 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6783 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6784 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6785 | + tcg_gen_shli_tl(t0, t0, 16); | |
| 6786 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
| 6787 | +#else | |
| 6788 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6789 | + tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); | |
| 6790 | + tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); | |
| 6791 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6792 | + gen_qemu_ld16u(t0, addr, ctx->mem_idx); | |
| 6793 | + tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16); | |
| 6794 | + tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); | |
| 6777 | 6795 | #endif |
| 6796 | + tcg_temp_free(t0); | |
| 6797 | +} | |
| 6798 | + | |
| 6799 | +static always_inline void gen_op_evstdd(DisasContext *ctx, TCGv addr) | |
| 6800 | +{ | |
| 6801 | +#if defined(TARGET_PPC64) | |
| 6802 | + gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6778 | 6803 | #else |
| 6779 | -GEN_OP_SPE_LHE(user); | |
| 6780 | -GEN_OP_SPE_LHE(kernel); | |
| 6781 | -GEN_OP_SPE_LHE(hypv); | |
| 6782 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, user); | |
| 6783 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, kernel); | |
| 6784 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, hypv); | |
| 6785 | -GEN_OP_SPE_LHE(le_user); | |
| 6786 | -GEN_OP_SPE_LHE(le_kernel); | |
| 6787 | -GEN_OP_SPE_LHE(le_hypv); | |
| 6788 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_user); | |
| 6789 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_kernel); | |
| 6790 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_hypv); | |
| 6791 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, user); | |
| 6792 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, kernel); | |
| 6793 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, hypv); | |
| 6794 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_user); | |
| 6795 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_kernel); | |
| 6796 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_hypv); | |
| 6797 | -GEN_OP_SPE_LHX(user); | |
| 6798 | -GEN_OP_SPE_LHX(kernel); | |
| 6799 | -GEN_OP_SPE_LHX(hypv); | |
| 6800 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, user); | |
| 6801 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, kernel); | |
| 6802 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, hypv); | |
| 6803 | -GEN_OP_SPE_LHX(le_user); | |
| 6804 | -GEN_OP_SPE_LHX(le_kernel); | |
| 6805 | -GEN_OP_SPE_LHX(le_hypv); | |
| 6806 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_user); | |
| 6807 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_kernel); | |
| 6808 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_hypv); | |
| 6804 | + TCGv_i64 t0 = tcg_temp_new_i64(); | |
| 6805 | + tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]); | |
| 6806 | + gen_qemu_st64(t0, addr, ctx->mem_idx); | |
| 6807 | + tcg_temp_free_i64(t0); | |
| 6808 | +#endif | |
| 6809 | +} | |
| 6810 | + | |
| 6811 | +static always_inline void gen_op_evstdw(DisasContext *ctx, TCGv addr) | |
| 6812 | +{ | |
| 6809 | 6813 | #if defined(TARGET_PPC64) |
| 6810 | -GEN_OP_SPE_LHE(64_user); | |
| 6811 | -GEN_OP_SPE_LHE(64_kernel); | |
| 6812 | -GEN_OP_SPE_LHE(64_hypv); | |
| 6813 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_user); | |
| 6814 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_kernel); | |
| 6815 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_hypv); | |
| 6816 | -GEN_OP_SPE_LHE(le_64_user); | |
| 6817 | -GEN_OP_SPE_LHE(le_64_kernel); | |
| 6818 | -GEN_OP_SPE_LHE(le_64_hypv); | |
| 6819 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_user); | |
| 6820 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_kernel); | |
| 6821 | -GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_hypv); | |
| 6822 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_user); | |
| 6823 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_kernel); | |
| 6824 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_hypv); | |
| 6825 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_user); | |
| 6826 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_kernel); | |
| 6827 | -GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_hypv); | |
| 6828 | -GEN_OP_SPE_LHX(64_user); | |
| 6829 | -GEN_OP_SPE_LHX(64_kernel); | |
| 6830 | -GEN_OP_SPE_LHX(64_hypv); | |
| 6831 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_user); | |
| 6832 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_kernel); | |
| 6833 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_hypv); | |
| 6834 | -GEN_OP_SPE_LHX(le_64_user); | |
| 6835 | -GEN_OP_SPE_LHX(le_64_kernel); | |
| 6836 | -GEN_OP_SPE_LHX(le_64_hypv); | |
| 6837 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_user); | |
| 6838 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_kernel); | |
| 6839 | -GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_hypv); | |
| 6840 | -#endif | |
| 6841 | -#endif | |
| 6842 | -GEN_SPEOP_LD(hhesplat, 1); | |
| 6843 | -GEN_SPEOP_LD(hhousplat, 1); | |
| 6844 | -GEN_SPEOP_LD(hhossplat, 1); | |
| 6845 | -GEN_SPEOP_LD(wwsplat, 2); | |
| 6846 | -GEN_SPEOP_LD(whsplat, 2); | |
| 6847 | - | |
| 6848 | -GEN_SPE(evlddx, evldd, 0x00, 0x0C, 0x00000000, PPC_SPE); // | |
| 6849 | -GEN_SPE(evldwx, evldw, 0x01, 0x0C, 0x00000000, PPC_SPE); // | |
| 6850 | -GEN_SPE(evldhx, evldh, 0x02, 0x0C, 0x00000000, PPC_SPE); // | |
| 6851 | -GEN_SPE(evlhhesplatx, evlhhesplat, 0x04, 0x0C, 0x00000000, PPC_SPE); // | |
| 6852 | -GEN_SPE(evlhhousplatx, evlhhousplat, 0x06, 0x0C, 0x00000000, PPC_SPE); // | |
| 6853 | -GEN_SPE(evlhhossplatx, evlhhossplat, 0x07, 0x0C, 0x00000000, PPC_SPE); // | |
| 6854 | -GEN_SPE(evlwhex, evlwhe, 0x08, 0x0C, 0x00000000, PPC_SPE); // | |
| 6855 | -GEN_SPE(evlwhoux, evlwhou, 0x0A, 0x0C, 0x00000000, PPC_SPE); // | |
| 6856 | -GEN_SPE(evlwhosx, evlwhos, 0x0B, 0x0C, 0x00000000, PPC_SPE); // | |
| 6857 | -GEN_SPE(evlwwsplatx, evlwwsplat, 0x0C, 0x0C, 0x00000000, PPC_SPE); // | |
| 6858 | -GEN_SPE(evlwhsplatx, evlwhsplat, 0x0E, 0x0C, 0x00000000, PPC_SPE); // | |
| 6859 | -GEN_SPE(evstddx, evstdd, 0x10, 0x0C, 0x00000000, PPC_SPE); // | |
| 6860 | -GEN_SPE(evstdwx, evstdw, 0x11, 0x0C, 0x00000000, PPC_SPE); // | |
| 6861 | -GEN_SPE(evstdhx, evstdh, 0x12, 0x0C, 0x00000000, PPC_SPE); // | |
| 6862 | -GEN_SPE(evstwhex, evstwhe, 0x18, 0x0C, 0x00000000, PPC_SPE); // | |
| 6863 | -GEN_SPE(evstwhox, evstwho, 0x1A, 0x0C, 0x00000000, PPC_SPE); // | |
| 6864 | -GEN_SPE(evstwwex, evstwwe, 0x1C, 0x0C, 0x00000000, PPC_SPE); // | |
| 6865 | -GEN_SPE(evstwwox, evstwwo, 0x1E, 0x0C, 0x00000000, PPC_SPE); // | |
| 6814 | + TCGv t0 = tcg_temp_new(); | |
| 6815 | + tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); | |
| 6816 | + gen_qemu_st32(t0, addr, ctx->mem_idx); | |
| 6817 | + tcg_temp_free(t0); | |
| 6818 | +#else | |
| 6819 | + gen_qemu_st32(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6820 | +#endif | |
| 6821 | + tcg_gen_addi_tl(addr, addr, 4); | |
| 6822 | + gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6823 | +} | |
| 6824 | + | |
| 6825 | +static always_inline void gen_op_evstdh(DisasContext *ctx, TCGv addr) | |
| 6826 | +{ | |
| 6827 | + TCGv t0 = tcg_temp_new(); | |
| 6828 | +#if defined(TARGET_PPC64) | |
| 6829 | + tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48); | |
| 6830 | +#else | |
| 6831 | + tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16); | |
| 6832 | +#endif | |
| 6833 | + gen_qemu_st16(t0, addr, ctx->mem_idx); | |
| 6834 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6835 | +#if defined(TARGET_PPC64) | |
| 6836 | + tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); | |
| 6837 | + gen_qemu_st16(t0, addr, ctx->mem_idx); | |
| 6838 | +#else | |
| 6839 | + gen_qemu_st16(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6840 | +#endif | |
| 6841 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6842 | + tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16); | |
| 6843 | + gen_qemu_st16(t0, addr, ctx->mem_idx); | |
| 6844 | + tcg_temp_free(t0); | |
| 6845 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6846 | + gen_qemu_st16(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6847 | +} | |
| 6848 | + | |
| 6849 | +static always_inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr) | |
| 6850 | +{ | |
| 6851 | + TCGv t0 = tcg_temp_new(); | |
| 6852 | +#if defined(TARGET_PPC64) | |
| 6853 | + tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48); | |
| 6854 | +#else | |
| 6855 | + tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16); | |
| 6856 | +#endif | |
| 6857 | + gen_qemu_st16(t0, addr, ctx->mem_idx); | |
| 6858 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6859 | + tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16); | |
| 6860 | + gen_qemu_st16(t0, addr, ctx->mem_idx); | |
| 6861 | + tcg_temp_free(t0); | |
| 6862 | +} | |
| 6863 | + | |
| 6864 | +static always_inline void gen_op_evstwho(DisasContext *ctx, TCGv addr) | |
| 6865 | +{ | |
| 6866 | +#if defined(TARGET_PPC64) | |
| 6867 | + TCGv t0 = tcg_temp_new(); | |
| 6868 | + tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); | |
| 6869 | + gen_qemu_st16(t0, addr, ctx->mem_idx); | |
| 6870 | + tcg_temp_free(t0); | |
| 6871 | +#else | |
| 6872 | + gen_qemu_st16(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6873 | +#endif | |
| 6874 | + tcg_gen_addi_tl(addr, addr, 2); | |
| 6875 | + gen_qemu_st16(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6876 | +} | |
| 6877 | + | |
| 6878 | +static always_inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr) | |
| 6879 | +{ | |
| 6880 | +#if defined(TARGET_PPC64) | |
| 6881 | + TCGv t0 = tcg_temp_new(); | |
| 6882 | + tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); | |
| 6883 | + gen_qemu_st32(t0, addr, ctx->mem_idx); | |
| 6884 | + tcg_temp_free(t0); | |
| 6885 | +#else | |
| 6886 | + gen_qemu_st32(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6887 | +#endif | |
| 6888 | +} | |
| 6889 | + | |
| 6890 | +static always_inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr) | |
| 6891 | +{ | |
| 6892 | + gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx); | |
| 6893 | +} | |
| 6894 | + | |
| 6895 | +#define GEN_SPEOP_LDST(name, opc2, sh) \ | |
| 6896 | +GEN_HANDLER(gen_##name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE) \ | |
| 6897 | +{ \ | |
| 6898 | + TCGv t0; \ | |
| 6899 | + if (unlikely(!ctx->spe_enabled)) { \ | |
| 6900 | + GEN_EXCP_NO_AP(ctx); \ | |
| 6901 | + return; \ | |
| 6902 | + } \ | |
| 6903 | + t0 = tcg_temp_new(); \ | |
| 6904 | + if (Rc(ctx->opcode)) { \ | |
| 6905 | + gen_addr_spe_imm_index(t0, ctx, sh); \ | |
| 6906 | + } else { \ | |
| 6907 | + gen_addr_reg_index(t0, ctx); \ | |
| 6908 | + } \ | |
| 6909 | + gen_op_##name(ctx, t0); \ | |
| 6910 | + tcg_temp_free(t0); \ | |
| 6911 | +} | |
| 6912 | + | |
| 6913 | +GEN_SPEOP_LDST(evldd, 0x00, 3); | |
| 6914 | +GEN_SPEOP_LDST(evldw, 0x01, 3); | |
| 6915 | +GEN_SPEOP_LDST(evldh, 0x02, 3); | |
| 6916 | +GEN_SPEOP_LDST(evlhhesplat, 0x04, 1); | |
| 6917 | +GEN_SPEOP_LDST(evlhhousplat, 0x06, 1); | |
| 6918 | +GEN_SPEOP_LDST(evlhhossplat, 0x07, 1); | |
| 6919 | +GEN_SPEOP_LDST(evlwhe, 0x08, 2); | |
| 6920 | +GEN_SPEOP_LDST(evlwhou, 0x0A, 2); | |
| 6921 | +GEN_SPEOP_LDST(evlwhos, 0x0B, 2); | |
| 6922 | +GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2); | |
| 6923 | +GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2); | |
| 6924 | + | |
| 6925 | +GEN_SPEOP_LDST(evstdd, 0x10, 3); | |
| 6926 | +GEN_SPEOP_LDST(evstdw, 0x11, 3); | |
| 6927 | +GEN_SPEOP_LDST(evstdh, 0x12, 3); | |
| 6928 | +GEN_SPEOP_LDST(evstwhe, 0x18, 2); | |
| 6929 | +GEN_SPEOP_LDST(evstwho, 0x1A, 2); | |
| 6930 | +GEN_SPEOP_LDST(evstwwe, 0x1C, 2); | |
| 6931 | +GEN_SPEOP_LDST(evstwwo, 0x1E, 2); | |
| 6866 | 6932 | |
| 6867 | 6933 | /* Multiply and add - TODO */ |
| 6868 | 6934 | #if 0 | ... | ... |