Commit 6ae20372d4d27c8fca02b70a76f51d031b3e2848
1 parent
32b6c812
Rename T[012] according to their roles
git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@4131 c046a42c-6fe2-441c-8c8c-71466251a162
Showing
1 changed file
with
435 additions
and
423 deletions
target-sparc/translate.c
... | ... | @@ -48,6 +48,7 @@ |
48 | 48 | /* global register indexes */ |
49 | 49 | static TCGv cpu_env, cpu_T[3], cpu_regwptr, cpu_cc_src, cpu_cc_src2, cpu_cc_dst; |
50 | 50 | static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8]; |
51 | +static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val; | |
51 | 52 | #ifdef TARGET_SPARC64 |
52 | 53 | static TCGv cpu_xcc; |
53 | 54 | #endif |
... | ... | @@ -1841,6 +1842,15 @@ static void disas_sparc_insn(DisasContext * dc) |
1841 | 1842 | opc = GET_FIELD(insn, 0, 1); |
1842 | 1843 | |
1843 | 1844 | rd = GET_FIELD(insn, 2, 6); |
1845 | + | |
1846 | + cpu_dst = cpu_T[0]; | |
1847 | + cpu_src1 = cpu_T[0]; // const | |
1848 | + cpu_src2 = cpu_T[1]; // const | |
1849 | + | |
1850 | + // loads and stores | |
1851 | + cpu_addr = cpu_T[0]; | |
1852 | + cpu_val = cpu_T[1]; | |
1853 | + | |
1844 | 1854 | switch (opc) { |
1845 | 1855 | case 0: /* branches/sethi */ |
1846 | 1856 | { |
... | ... | @@ -1857,9 +1867,9 @@ static void disas_sparc_insn(DisasContext * dc) |
1857 | 1867 | target <<= 2; |
1858 | 1868 | cc = GET_FIELD_SP(insn, 20, 21); |
1859 | 1869 | if (cc == 0) |
1860 | - do_branch(dc, target, insn, 0, cpu_T[2]); | |
1870 | + do_branch(dc, target, insn, 0, cpu_cond); | |
1861 | 1871 | else if (cc == 2) |
1862 | - do_branch(dc, target, insn, 1, cpu_T[2]); | |
1872 | + do_branch(dc, target, insn, 1, cpu_cond); | |
1863 | 1873 | else |
1864 | 1874 | goto illegal_insn; |
1865 | 1875 | goto jmp_insn; |
... | ... | @@ -1871,19 +1881,19 @@ static void disas_sparc_insn(DisasContext * dc) |
1871 | 1881 | target = sign_extend(target, 16); |
1872 | 1882 | target <<= 2; |
1873 | 1883 | rs1 = GET_FIELD(insn, 13, 17); |
1874 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
1875 | - do_branch_reg(dc, target, insn, cpu_T[2], cpu_T[0]); | |
1884 | + gen_movl_reg_TN(rs1, cpu_src1); | |
1885 | + do_branch_reg(dc, target, insn, cpu_cond, cpu_src1); | |
1876 | 1886 | goto jmp_insn; |
1877 | 1887 | } |
1878 | 1888 | case 0x5: /* V9 FBPcc */ |
1879 | 1889 | { |
1880 | 1890 | int cc = GET_FIELD_SP(insn, 20, 21); |
1881 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
1891 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
1882 | 1892 | goto jmp_insn; |
1883 | 1893 | target = GET_FIELD_SP(insn, 0, 18); |
1884 | 1894 | target = sign_extend(target, 19); |
1885 | 1895 | target <<= 2; |
1886 | - do_fbranch(dc, target, insn, cc, cpu_T[2]); | |
1896 | + do_fbranch(dc, target, insn, cc, cpu_cond); | |
1887 | 1897 | goto jmp_insn; |
1888 | 1898 | } |
1889 | 1899 | #else |
... | ... | @@ -1897,17 +1907,17 @@ static void disas_sparc_insn(DisasContext * dc) |
1897 | 1907 | target = GET_FIELD(insn, 10, 31); |
1898 | 1908 | target = sign_extend(target, 22); |
1899 | 1909 | target <<= 2; |
1900 | - do_branch(dc, target, insn, 0, cpu_T[2]); | |
1910 | + do_branch(dc, target, insn, 0, cpu_cond); | |
1901 | 1911 | goto jmp_insn; |
1902 | 1912 | } |
1903 | 1913 | case 0x6: /* FBN+x */ |
1904 | 1914 | { |
1905 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
1915 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
1906 | 1916 | goto jmp_insn; |
1907 | 1917 | target = GET_FIELD(insn, 10, 31); |
1908 | 1918 | target = sign_extend(target, 22); |
1909 | 1919 | target <<= 2; |
1910 | - do_fbranch(dc, target, insn, 0, cpu_T[2]); | |
1920 | + do_fbranch(dc, target, insn, 0, cpu_cond); | |
1911 | 1921 | goto jmp_insn; |
1912 | 1922 | } |
1913 | 1923 | case 0x4: /* SETHI */ |
... | ... | @@ -1916,8 +1926,8 @@ static void disas_sparc_insn(DisasContext * dc) |
1916 | 1926 | if (rd) { // nop |
1917 | 1927 | #endif |
1918 | 1928 | uint32_t value = GET_FIELD(insn, 10, 31); |
1919 | - tcg_gen_movi_tl(cpu_T[0], value << 10); | |
1920 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
1929 | + tcg_gen_movi_tl(cpu_dst, value << 10); | |
1930 | + gen_movl_TN_reg(rd, cpu_dst); | |
1921 | 1931 | #if defined(OPTIM) |
1922 | 1932 | } |
1923 | 1933 | #endif |
... | ... | @@ -1935,7 +1945,7 @@ static void disas_sparc_insn(DisasContext * dc) |
1935 | 1945 | |
1936 | 1946 | gen_movl_TN_reg(15, tcg_const_tl(dc->pc)); |
1937 | 1947 | target += dc->pc; |
1938 | - gen_mov_pc_npc(dc, cpu_T[2]); | |
1948 | + gen_mov_pc_npc(dc, cpu_cond); | |
1939 | 1949 | dc->npc = target; |
1940 | 1950 | } |
1941 | 1951 | goto jmp_insn; |
... | ... | @@ -1946,32 +1956,32 @@ static void disas_sparc_insn(DisasContext * dc) |
1946 | 1956 | int cond; |
1947 | 1957 | |
1948 | 1958 | rs1 = GET_FIELD(insn, 13, 17); |
1949 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
1959 | + gen_movl_reg_TN(rs1, cpu_src1); | |
1950 | 1960 | if (IS_IMM) { |
1951 | 1961 | rs2 = GET_FIELD(insn, 25, 31); |
1952 | - tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2); | |
1962 | + tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2); | |
1953 | 1963 | } else { |
1954 | 1964 | rs2 = GET_FIELD(insn, 27, 31); |
1955 | 1965 | #if defined(OPTIM) |
1956 | 1966 | if (rs2 != 0) { |
1957 | 1967 | #endif |
1958 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
1959 | - tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
1968 | + gen_movl_reg_TN(rs2, cpu_src2); | |
1969 | + tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2); | |
1960 | 1970 | #if defined(OPTIM) |
1961 | 1971 | } |
1962 | 1972 | #endif |
1963 | 1973 | } |
1964 | 1974 | cond = GET_FIELD(insn, 3, 6); |
1965 | 1975 | if (cond == 0x8) { |
1966 | - save_state(dc, cpu_T[2]); | |
1967 | - tcg_gen_helper_0_1(helper_trap, cpu_T[0]); | |
1976 | + save_state(dc, cpu_cond); | |
1977 | + tcg_gen_helper_0_1(helper_trap, cpu_dst); | |
1968 | 1978 | } else if (cond != 0) { |
1969 | 1979 | TCGv r_cond = tcg_temp_new(TCG_TYPE_TL); |
1970 | 1980 | #ifdef TARGET_SPARC64 |
1971 | 1981 | /* V9 icc/xcc */ |
1972 | 1982 | int cc = GET_FIELD_SP(insn, 11, 12); |
1973 | 1983 | |
1974 | - save_state(dc, cpu_T[2]); | |
1984 | + save_state(dc, cpu_cond); | |
1975 | 1985 | if (cc == 0) |
1976 | 1986 | gen_cond(r_cond, 0, cond); |
1977 | 1987 | else if (cc == 2) |
... | ... | @@ -1979,10 +1989,10 @@ static void disas_sparc_insn(DisasContext * dc) |
1979 | 1989 | else |
1980 | 1990 | goto illegal_insn; |
1981 | 1991 | #else |
1982 | - save_state(dc, cpu_T[2]); | |
1992 | + save_state(dc, cpu_cond); | |
1983 | 1993 | gen_cond(r_cond, 0, cond); |
1984 | 1994 | #endif |
1985 | - tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], r_cond); | |
1995 | + tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond); | |
1986 | 1996 | tcg_gen_discard_tl(r_cond); |
1987 | 1997 | } |
1988 | 1998 | gen_op_next_insn(); |
... | ... | @@ -2003,18 +2013,18 @@ static void disas_sparc_insn(DisasContext * dc) |
2003 | 2013 | SPARCv8 manual, rdy on the |
2004 | 2014 | microSPARC II */ |
2005 | 2015 | #endif |
2006 | - tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, y)); | |
2007 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2016 | + tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, y)); | |
2017 | + gen_movl_TN_reg(rd, cpu_dst); | |
2008 | 2018 | break; |
2009 | 2019 | #ifdef TARGET_SPARC64 |
2010 | 2020 | case 0x2: /* V9 rdccr */ |
2011 | - tcg_gen_helper_1_0(helper_rdccr, cpu_T[0]); | |
2012 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2021 | + tcg_gen_helper_1_0(helper_rdccr, cpu_dst); | |
2022 | + gen_movl_TN_reg(rd, cpu_dst); | |
2013 | 2023 | break; |
2014 | 2024 | case 0x3: /* V9 rdasi */ |
2015 | 2025 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, asi)); |
2016 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2017 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2026 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2027 | + gen_movl_TN_reg(rd, cpu_dst); | |
2018 | 2028 | break; |
2019 | 2029 | case 0x4: /* V9 rdtick */ |
2020 | 2030 | { |
... | ... | @@ -2023,32 +2033,32 @@ static void disas_sparc_insn(DisasContext * dc) |
2023 | 2033 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
2024 | 2034 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
2025 | 2035 | offsetof(CPUState, tick)); |
2026 | - tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0], | |
2036 | + tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst, | |
2027 | 2037 | r_tickptr); |
2028 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2038 | + gen_movl_TN_reg(rd, cpu_dst); | |
2029 | 2039 | tcg_gen_discard_ptr(r_tickptr); |
2030 | 2040 | } |
2031 | 2041 | break; |
2032 | 2042 | case 0x5: /* V9 rdpc */ |
2033 | - tcg_gen_movi_tl(cpu_T[0], dc->pc); | |
2034 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2043 | + tcg_gen_movi_tl(cpu_dst, dc->pc); | |
2044 | + gen_movl_TN_reg(rd, cpu_dst); | |
2035 | 2045 | break; |
2036 | 2046 | case 0x6: /* V9 rdfprs */ |
2037 | 2047 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fprs)); |
2038 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2039 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2048 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2049 | + gen_movl_TN_reg(rd, cpu_dst); | |
2040 | 2050 | break; |
2041 | 2051 | case 0xf: /* V9 membar */ |
2042 | 2052 | break; /* no effect */ |
2043 | 2053 | case 0x13: /* Graphics Status */ |
2044 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
2054 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
2045 | 2055 | goto jmp_insn; |
2046 | - tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, gsr)); | |
2047 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2056 | + tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, gsr)); | |
2057 | + gen_movl_TN_reg(rd, cpu_dst); | |
2048 | 2058 | break; |
2049 | 2059 | case 0x17: /* Tick compare */ |
2050 | - tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tick_cmpr)); | |
2051 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2060 | + tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tick_cmpr)); | |
2061 | + gen_movl_TN_reg(rd, cpu_dst); | |
2052 | 2062 | break; |
2053 | 2063 | case 0x18: /* System tick */ |
2054 | 2064 | { |
... | ... | @@ -2057,15 +2067,15 @@ static void disas_sparc_insn(DisasContext * dc) |
2057 | 2067 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
2058 | 2068 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
2059 | 2069 | offsetof(CPUState, stick)); |
2060 | - tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0], | |
2070 | + tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst, | |
2061 | 2071 | r_tickptr); |
2062 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2072 | + gen_movl_TN_reg(rd, cpu_dst); | |
2063 | 2073 | tcg_gen_discard_ptr(r_tickptr); |
2064 | 2074 | } |
2065 | 2075 | break; |
2066 | 2076 | case 0x19: /* System tick compare */ |
2067 | - tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, stick_cmpr)); | |
2068 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2077 | + tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, stick_cmpr)); | |
2078 | + gen_movl_TN_reg(rd, cpu_dst); | |
2069 | 2079 | break; |
2070 | 2080 | case 0x10: /* Performance Control */ |
2071 | 2081 | case 0x11: /* Performance Instrumentation Counter */ |
... | ... | @@ -2082,7 +2092,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2082 | 2092 | #ifndef TARGET_SPARC64 |
2083 | 2093 | if (!supervisor(dc)) |
2084 | 2094 | goto priv_insn; |
2085 | - tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]); | |
2095 | + tcg_gen_helper_1_0(helper_rdpsr, cpu_dst); | |
2086 | 2096 | #else |
2087 | 2097 | if (!hypervisor(dc)) |
2088 | 2098 | goto priv_insn; |
... | ... | @@ -2096,25 +2106,25 @@ static void disas_sparc_insn(DisasContext * dc) |
2096 | 2106 | break; |
2097 | 2107 | case 3: // hintp |
2098 | 2108 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hintp)); |
2099 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2109 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2100 | 2110 | break; |
2101 | 2111 | case 5: // htba |
2102 | 2112 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, htba)); |
2103 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2113 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2104 | 2114 | break; |
2105 | 2115 | case 6: // hver |
2106 | 2116 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hver)); |
2107 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2117 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2108 | 2118 | break; |
2109 | 2119 | case 31: // hstick_cmpr |
2110 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
2120 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
2111 | 2121 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hstick_cmpr)); |
2112 | 2122 | break; |
2113 | 2123 | default: |
2114 | 2124 | goto illegal_insn; |
2115 | 2125 | } |
2116 | 2126 | #endif |
2117 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2127 | + gen_movl_TN_reg(rd, cpu_dst); | |
2118 | 2128 | break; |
2119 | 2129 | } else if (xop == 0x2a) { /* rdwim / V9 rdpr */ |
2120 | 2130 | if (!supervisor(dc)) |
... | ... | @@ -2129,7 +2139,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2129 | 2139 | r_tsptr = tcg_temp_new(TCG_TYPE_PTR); |
2130 | 2140 | tcg_gen_ld_ptr(r_tsptr, cpu_env, |
2131 | 2141 | offsetof(CPUState, tsptr)); |
2132 | - tcg_gen_ld_tl(cpu_T[0], r_tsptr, | |
2142 | + tcg_gen_ld_tl(cpu_dst, r_tsptr, | |
2133 | 2143 | offsetof(trap_state, tpc)); |
2134 | 2144 | tcg_gen_discard_ptr(r_tsptr); |
2135 | 2145 | } |
... | ... | @@ -2141,7 +2151,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2141 | 2151 | r_tsptr = tcg_temp_new(TCG_TYPE_PTR); |
2142 | 2152 | tcg_gen_ld_ptr(r_tsptr, cpu_env, |
2143 | 2153 | offsetof(CPUState, tsptr)); |
2144 | - tcg_gen_ld_tl(cpu_T[0], r_tsptr, | |
2154 | + tcg_gen_ld_tl(cpu_dst, r_tsptr, | |
2145 | 2155 | offsetof(trap_state, tnpc)); |
2146 | 2156 | tcg_gen_discard_ptr(r_tsptr); |
2147 | 2157 | } |
... | ... | @@ -2153,7 +2163,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2153 | 2163 | r_tsptr = tcg_temp_new(TCG_TYPE_PTR); |
2154 | 2164 | tcg_gen_ld_ptr(r_tsptr, cpu_env, |
2155 | 2165 | offsetof(CPUState, tsptr)); |
2156 | - tcg_gen_ld_tl(cpu_T[0], r_tsptr, | |
2166 | + tcg_gen_ld_tl(cpu_dst, r_tsptr, | |
2157 | 2167 | offsetof(trap_state, tstate)); |
2158 | 2168 | tcg_gen_discard_ptr(r_tsptr); |
2159 | 2169 | } |
... | ... | @@ -2165,7 +2175,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2165 | 2175 | r_tsptr = tcg_temp_new(TCG_TYPE_PTR); |
2166 | 2176 | tcg_gen_ld_ptr(r_tsptr, cpu_env, |
2167 | 2177 | offsetof(CPUState, tsptr)); |
2168 | - tcg_gen_ld_i32(cpu_T[0], r_tsptr, | |
2178 | + tcg_gen_ld_i32(cpu_dst, r_tsptr, | |
2169 | 2179 | offsetof(trap_state, tt)); |
2170 | 2180 | tcg_gen_discard_ptr(r_tsptr); |
2171 | 2181 | } |
... | ... | @@ -2177,62 +2187,62 @@ static void disas_sparc_insn(DisasContext * dc) |
2177 | 2187 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
2178 | 2188 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
2179 | 2189 | offsetof(CPUState, tick)); |
2180 | - tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0], | |
2190 | + tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst, | |
2181 | 2191 | r_tickptr); |
2182 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2192 | + gen_movl_TN_reg(rd, cpu_dst); | |
2183 | 2193 | tcg_gen_discard_ptr(r_tickptr); |
2184 | 2194 | } |
2185 | 2195 | break; |
2186 | 2196 | case 5: // tba |
2187 | - tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tbr)); | |
2197 | + tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tbr)); | |
2188 | 2198 | break; |
2189 | 2199 | case 6: // pstate |
2190 | 2200 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, pstate)); |
2191 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2201 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2192 | 2202 | break; |
2193 | 2203 | case 7: // tl |
2194 | 2204 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, tl)); |
2195 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2205 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2196 | 2206 | break; |
2197 | 2207 | case 8: // pil |
2198 | 2208 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, psrpil)); |
2199 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2209 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2200 | 2210 | break; |
2201 | 2211 | case 9: // cwp |
2202 | - tcg_gen_helper_1_0(helper_rdcwp, cpu_T[0]); | |
2212 | + tcg_gen_helper_1_0(helper_rdcwp, cpu_dst); | |
2203 | 2213 | break; |
2204 | 2214 | case 10: // cansave |
2205 | 2215 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cansave)); |
2206 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2216 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2207 | 2217 | break; |
2208 | 2218 | case 11: // canrestore |
2209 | 2219 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, canrestore)); |
2210 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2220 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2211 | 2221 | break; |
2212 | 2222 | case 12: // cleanwin |
2213 | 2223 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cleanwin)); |
2214 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2224 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2215 | 2225 | break; |
2216 | 2226 | case 13: // otherwin |
2217 | 2227 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, otherwin)); |
2218 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2228 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2219 | 2229 | break; |
2220 | 2230 | case 14: // wstate |
2221 | 2231 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wstate)); |
2222 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2232 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2223 | 2233 | break; |
2224 | 2234 | case 16: // UA2005 gl |
2225 | 2235 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, gl)); |
2226 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2236 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2227 | 2237 | break; |
2228 | 2238 | case 26: // UA2005 strand status |
2229 | 2239 | if (!hypervisor(dc)) |
2230 | 2240 | goto priv_insn; |
2231 | 2241 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ssr)); |
2232 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2242 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2233 | 2243 | break; |
2234 | 2244 | case 31: // ver |
2235 | - tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, version)); | |
2245 | + tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, version)); | |
2236 | 2246 | break; |
2237 | 2247 | case 15: // fq |
2238 | 2248 | default: |
... | ... | @@ -2240,9 +2250,9 @@ static void disas_sparc_insn(DisasContext * dc) |
2240 | 2250 | } |
2241 | 2251 | #else |
2242 | 2252 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wim)); |
2243 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2253 | + tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32); | |
2244 | 2254 | #endif |
2245 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2255 | + gen_movl_TN_reg(rd, cpu_dst); | |
2246 | 2256 | break; |
2247 | 2257 | } else if (xop == 0x2b) { /* rdtbr / V9 flushw */ |
2248 | 2258 | #ifdef TARGET_SPARC64 |
... | ... | @@ -2250,13 +2260,13 @@ static void disas_sparc_insn(DisasContext * dc) |
2250 | 2260 | #else |
2251 | 2261 | if (!supervisor(dc)) |
2252 | 2262 | goto priv_insn; |
2253 | - tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tbr)); | |
2254 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2263 | + tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tbr)); | |
2264 | + gen_movl_TN_reg(rd, cpu_dst); | |
2255 | 2265 | #endif |
2256 | 2266 | break; |
2257 | 2267 | #endif |
2258 | 2268 | } else if (xop == 0x34) { /* FPU Operations */ |
2259 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
2269 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
2260 | 2270 | goto jmp_insn; |
2261 | 2271 | gen_op_clear_ieee_excp_and_FTT(); |
2262 | 2272 | rs1 = GET_FIELD(insn, 13, 17); |
... | ... | @@ -2631,7 +2641,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2631 | 2641 | #ifdef TARGET_SPARC64 |
2632 | 2642 | int cond; |
2633 | 2643 | #endif |
2634 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
2644 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
2635 | 2645 | goto jmp_insn; |
2636 | 2646 | gen_op_clear_ieee_excp_and_FTT(); |
2637 | 2647 | rs1 = GET_FIELD(insn, 13, 17); |
... | ... | @@ -2644,8 +2654,8 @@ static void disas_sparc_insn(DisasContext * dc) |
2644 | 2654 | l1 = gen_new_label(); |
2645 | 2655 | cond = GET_FIELD_SP(insn, 14, 17); |
2646 | 2656 | rs1 = GET_FIELD(insn, 13, 17); |
2647 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
2648 | - tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], | |
2657 | + gen_movl_reg_TN(rs1, cpu_src1); | |
2658 | + tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_src1, | |
2649 | 2659 | tcg_const_tl(0), l1); |
2650 | 2660 | gen_op_load_fpr_FT0(rs2); |
2651 | 2661 | gen_op_store_FT0_fpr(rd); |
... | ... | @@ -2657,8 +2667,8 @@ static void disas_sparc_insn(DisasContext * dc) |
2657 | 2667 | l1 = gen_new_label(); |
2658 | 2668 | cond = GET_FIELD_SP(insn, 14, 17); |
2659 | 2669 | rs1 = GET_FIELD(insn, 13, 17); |
2660 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
2661 | - tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], | |
2670 | + gen_movl_reg_TN(rs1, cpu_src1); | |
2671 | + tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_src1, | |
2662 | 2672 | tcg_const_tl(0), l1); |
2663 | 2673 | gen_op_load_fpr_DT0(DFPREG(rs2)); |
2664 | 2674 | gen_op_store_DT0_fpr(DFPREG(rd)); |
... | ... | @@ -2671,8 +2681,8 @@ static void disas_sparc_insn(DisasContext * dc) |
2671 | 2681 | l1 = gen_new_label(); |
2672 | 2682 | cond = GET_FIELD_SP(insn, 14, 17); |
2673 | 2683 | rs1 = GET_FIELD(insn, 13, 17); |
2674 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
2675 | - tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], | |
2684 | + gen_movl_reg_TN(rs1, cpu_src1); | |
2685 | + tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_src1, | |
2676 | 2686 | tcg_const_tl(0), l1); |
2677 | 2687 | gen_op_load_fpr_QT0(QFPREG(rs2)); |
2678 | 2688 | gen_op_store_QT0_fpr(QFPREG(rd)); |
... | ... | @@ -2848,259 +2858,259 @@ static void disas_sparc_insn(DisasContext * dc) |
2848 | 2858 | // or %g0, x, y -> mov T0, x; mov y, T0 |
2849 | 2859 | if (IS_IMM) { /* immediate */ |
2850 | 2860 | rs2 = GET_FIELDs(insn, 19, 31); |
2851 | - tcg_gen_movi_tl(cpu_T[0], (int)rs2); | |
2861 | + tcg_gen_movi_tl(cpu_dst, (int)rs2); | |
2852 | 2862 | } else { /* register */ |
2853 | 2863 | rs2 = GET_FIELD(insn, 27, 31); |
2854 | - gen_movl_reg_TN(rs2, cpu_T[0]); | |
2864 | + gen_movl_reg_TN(rs2, cpu_dst); | |
2855 | 2865 | } |
2856 | 2866 | } else { |
2857 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
2867 | + gen_movl_reg_TN(rs1, cpu_src1); | |
2858 | 2868 | if (IS_IMM) { /* immediate */ |
2859 | 2869 | rs2 = GET_FIELDs(insn, 19, 31); |
2860 | - tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2); | |
2870 | + tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2); | |
2861 | 2871 | } else { /* register */ |
2862 | 2872 | // or x, %g0, y -> mov T1, x; mov y, T1 |
2863 | 2873 | rs2 = GET_FIELD(insn, 27, 31); |
2864 | 2874 | if (rs2 != 0) { |
2865 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
2866 | - tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2875 | + gen_movl_reg_TN(rs2, cpu_src2); | |
2876 | + tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2); | |
2867 | 2877 | } |
2868 | 2878 | } |
2869 | 2879 | } |
2870 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2880 | + gen_movl_TN_reg(rd, cpu_dst); | |
2871 | 2881 | #endif |
2872 | 2882 | #ifdef TARGET_SPARC64 |
2873 | 2883 | } else if (xop == 0x25) { /* sll, V9 sllx */ |
2874 | 2884 | rs1 = GET_FIELD(insn, 13, 17); |
2875 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
2885 | + gen_movl_reg_TN(rs1, cpu_src1); | |
2876 | 2886 | if (IS_IMM) { /* immediate */ |
2877 | 2887 | rs2 = GET_FIELDs(insn, 20, 31); |
2878 | 2888 | if (insn & (1 << 12)) { |
2879 | - tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f); | |
2889 | + tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f); | |
2880 | 2890 | } else { |
2881 | - tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL); | |
2882 | - tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f); | |
2891 | + tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL); | |
2892 | + tcg_gen_shli_i64(cpu_dst, cpu_dst, rs2 & 0x1f); | |
2883 | 2893 | } |
2884 | 2894 | } else { /* register */ |
2885 | 2895 | rs2 = GET_FIELD(insn, 27, 31); |
2886 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
2896 | + gen_movl_reg_TN(rs2, cpu_src2); | |
2887 | 2897 | if (insn & (1 << 12)) { |
2888 | - tcg_gen_andi_i64(cpu_tmp0, cpu_T[1], 0x3f); | |
2889 | - tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_tmp0); | |
2898 | + tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f); | |
2899 | + tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0); | |
2890 | 2900 | } else { |
2891 | - tcg_gen_andi_i64(cpu_tmp0, cpu_T[1], 0x1f); | |
2892 | - tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL); | |
2893 | - tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_tmp0); | |
2901 | + tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f); | |
2902 | + tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL); | |
2903 | + tcg_gen_shl_i64(cpu_dst, cpu_dst, cpu_tmp0); | |
2894 | 2904 | } |
2895 | 2905 | } |
2896 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2906 | + gen_movl_TN_reg(rd, cpu_dst); | |
2897 | 2907 | } else if (xop == 0x26) { /* srl, V9 srlx */ |
2898 | 2908 | rs1 = GET_FIELD(insn, 13, 17); |
2899 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
2909 | + gen_movl_reg_TN(rs1, cpu_src1); | |
2900 | 2910 | if (IS_IMM) { /* immediate */ |
2901 | 2911 | rs2 = GET_FIELDs(insn, 20, 31); |
2902 | 2912 | if (insn & (1 << 12)) { |
2903 | - tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f); | |
2913 | + tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f); | |
2904 | 2914 | } else { |
2905 | - tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL); | |
2906 | - tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f); | |
2915 | + tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL); | |
2916 | + tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f); | |
2907 | 2917 | } |
2908 | 2918 | } else { /* register */ |
2909 | 2919 | rs2 = GET_FIELD(insn, 27, 31); |
2910 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
2920 | + gen_movl_reg_TN(rs2, cpu_src2); | |
2911 | 2921 | if (insn & (1 << 12)) { |
2912 | - tcg_gen_andi_i64(cpu_tmp0, cpu_T[1], 0x3f); | |
2913 | - tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_tmp0); | |
2922 | + tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f); | |
2923 | + tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0); | |
2914 | 2924 | } else { |
2915 | - tcg_gen_andi_i64(cpu_tmp0, cpu_T[1], 0x1f); | |
2916 | - tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL); | |
2917 | - tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_tmp0); | |
2925 | + tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f); | |
2926 | + tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL); | |
2927 | + tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0); | |
2918 | 2928 | } |
2919 | 2929 | } |
2920 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2930 | + gen_movl_TN_reg(rd, cpu_dst); | |
2921 | 2931 | } else if (xop == 0x27) { /* sra, V9 srax */ |
2922 | 2932 | rs1 = GET_FIELD(insn, 13, 17); |
2923 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
2933 | + gen_movl_reg_TN(rs1, cpu_src1); | |
2924 | 2934 | if (IS_IMM) { /* immediate */ |
2925 | 2935 | rs2 = GET_FIELDs(insn, 20, 31); |
2926 | 2936 | if (insn & (1 << 12)) { |
2927 | - tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f); | |
2937 | + tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f); | |
2928 | 2938 | } else { |
2929 | - tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL); | |
2930 | - tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]); | |
2931 | - tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f); | |
2939 | + tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL); | |
2940 | + tcg_gen_ext_i32_i64(cpu_dst, cpu_dst); | |
2941 | + tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f); | |
2932 | 2942 | } |
2933 | 2943 | } else { /* register */ |
2934 | 2944 | rs2 = GET_FIELD(insn, 27, 31); |
2935 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
2945 | + gen_movl_reg_TN(rs2, cpu_src2); | |
2936 | 2946 | if (insn & (1 << 12)) { |
2937 | - tcg_gen_andi_i64(cpu_tmp0, cpu_T[1], 0x3f); | |
2938 | - tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_tmp0); | |
2947 | + tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f); | |
2948 | + tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0); | |
2939 | 2949 | } else { |
2940 | - tcg_gen_andi_i64(cpu_tmp0, cpu_T[1], 0x1f); | |
2941 | - tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL); | |
2942 | - tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_tmp0); | |
2950 | + tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f); | |
2951 | + tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL); | |
2952 | + tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0); | |
2943 | 2953 | } |
2944 | 2954 | } |
2945 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
2955 | + gen_movl_TN_reg(rd, cpu_dst); | |
2946 | 2956 | #endif |
2947 | 2957 | } else if (xop < 0x36) { |
2948 | 2958 | rs1 = GET_FIELD(insn, 13, 17); |
2949 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
2959 | + gen_movl_reg_TN(rs1, cpu_src1); | |
2950 | 2960 | if (IS_IMM) { /* immediate */ |
2951 | 2961 | rs2 = GET_FIELDs(insn, 19, 31); |
2952 | - tcg_gen_movi_tl(cpu_T[1], (int)rs2); | |
2962 | + tcg_gen_movi_tl(cpu_src2, (int)rs2); | |
2953 | 2963 | } else { /* register */ |
2954 | 2964 | rs2 = GET_FIELD(insn, 27, 31); |
2955 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
2965 | + gen_movl_reg_TN(rs2, cpu_src2); | |
2956 | 2966 | } |
2957 | 2967 | if (xop < 0x20) { |
2958 | 2968 | switch (xop & ~0x10) { |
2959 | 2969 | case 0x0: |
2960 | 2970 | if (xop & 0x10) |
2961 | - gen_op_add_cc(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2971 | + gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2); | |
2962 | 2972 | else |
2963 | - tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2973 | + tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2); | |
2964 | 2974 | break; |
2965 | 2975 | case 0x1: |
2966 | - tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2976 | + tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2); | |
2967 | 2977 | if (xop & 0x10) |
2968 | - gen_op_logic_cc(cpu_T[0]); | |
2978 | + gen_op_logic_cc(cpu_dst); | |
2969 | 2979 | break; |
2970 | 2980 | case 0x2: |
2971 | - tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2981 | + tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2); | |
2972 | 2982 | if (xop & 0x10) |
2973 | - gen_op_logic_cc(cpu_T[0]); | |
2983 | + gen_op_logic_cc(cpu_dst); | |
2974 | 2984 | break; |
2975 | 2985 | case 0x3: |
2976 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2986 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2); | |
2977 | 2987 | if (xop & 0x10) |
2978 | - gen_op_logic_cc(cpu_T[0]); | |
2988 | + gen_op_logic_cc(cpu_dst); | |
2979 | 2989 | break; |
2980 | 2990 | case 0x4: |
2981 | 2991 | if (xop & 0x10) |
2982 | - gen_op_sub_cc(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2992 | + gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2); | |
2983 | 2993 | else |
2984 | - tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2994 | + tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2); | |
2985 | 2995 | break; |
2986 | 2996 | case 0x5: |
2987 | - tcg_gen_xori_tl(cpu_tmp0, cpu_T[1], -1); | |
2988 | - tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0); | |
2997 | + tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1); | |
2998 | + tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0); | |
2989 | 2999 | if (xop & 0x10) |
2990 | - gen_op_logic_cc(cpu_T[0]); | |
3000 | + gen_op_logic_cc(cpu_dst); | |
2991 | 3001 | break; |
2992 | 3002 | case 0x6: |
2993 | - tcg_gen_xori_tl(cpu_tmp0, cpu_T[1], -1); | |
2994 | - tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0); | |
3003 | + tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1); | |
3004 | + tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0); | |
2995 | 3005 | if (xop & 0x10) |
2996 | - gen_op_logic_cc(cpu_T[0]); | |
3006 | + gen_op_logic_cc(cpu_dst); | |
2997 | 3007 | break; |
2998 | 3008 | case 0x7: |
2999 | - tcg_gen_xori_tl(cpu_tmp0, cpu_T[1], -1); | |
3000 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0); | |
3009 | + tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1); | |
3010 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0); | |
3001 | 3011 | if (xop & 0x10) |
3002 | - gen_op_logic_cc(cpu_T[0]); | |
3012 | + gen_op_logic_cc(cpu_dst); | |
3003 | 3013 | break; |
3004 | 3014 | case 0x8: |
3005 | 3015 | if (xop & 0x10) |
3006 | - gen_op_addx_cc(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3016 | + gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2); | |
3007 | 3017 | else { |
3008 | 3018 | gen_mov_reg_C(cpu_tmp0, cpu_psr); |
3009 | - tcg_gen_add_tl(cpu_tmp0, cpu_T[1], cpu_tmp0); | |
3010 | - tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp0); | |
3019 | + tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0); | |
3020 | + tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0); | |
3011 | 3021 | } |
3012 | 3022 | break; |
3013 | 3023 | #ifdef TARGET_SPARC64 |
3014 | 3024 | case 0x9: /* V9 mulx */ |
3015 | - tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3025 | + tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2); | |
3016 | 3026 | break; |
3017 | 3027 | #endif |
3018 | 3028 | case 0xa: |
3019 | - gen_op_umul(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3029 | + gen_op_umul(cpu_dst, cpu_src1, cpu_src2); | |
3020 | 3030 | if (xop & 0x10) |
3021 | - gen_op_logic_cc(cpu_T[0]); | |
3031 | + gen_op_logic_cc(cpu_dst); | |
3022 | 3032 | break; |
3023 | 3033 | case 0xb: |
3024 | - gen_op_smul(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3034 | + gen_op_smul(cpu_dst, cpu_src1, cpu_src2); | |
3025 | 3035 | if (xop & 0x10) |
3026 | - gen_op_logic_cc(cpu_T[0]); | |
3036 | + gen_op_logic_cc(cpu_dst); | |
3027 | 3037 | break; |
3028 | 3038 | case 0xc: |
3029 | 3039 | if (xop & 0x10) |
3030 | - gen_op_subx_cc(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3040 | + gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2); | |
3031 | 3041 | else { |
3032 | 3042 | gen_mov_reg_C(cpu_tmp0, cpu_psr); |
3033 | - tcg_gen_add_tl(cpu_tmp0, cpu_T[1], cpu_tmp0); | |
3034 | - tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp0); | |
3043 | + tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0); | |
3044 | + tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0); | |
3035 | 3045 | } |
3036 | 3046 | break; |
3037 | 3047 | #ifdef TARGET_SPARC64 |
3038 | 3048 | case 0xd: /* V9 udivx */ |
3039 | - gen_trap_ifdivzero_tl(cpu_T[1]); | |
3040 | - tcg_gen_divu_i64(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3049 | + gen_trap_ifdivzero_tl(cpu_src2); | |
3050 | + tcg_gen_divu_i64(cpu_dst, cpu_src1, cpu_src2); | |
3041 | 3051 | break; |
3042 | 3052 | #endif |
3043 | 3053 | case 0xe: |
3044 | - tcg_gen_helper_1_2(helper_udiv, cpu_T[0], cpu_T[0], cpu_T[1]); | |
3054 | + tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1, cpu_src2); | |
3045 | 3055 | if (xop & 0x10) |
3046 | - gen_op_div_cc(cpu_T[0]); | |
3056 | + gen_op_div_cc(cpu_dst); | |
3047 | 3057 | break; |
3048 | 3058 | case 0xf: |
3049 | - tcg_gen_helper_1_2(helper_sdiv, cpu_T[0], cpu_T[0], cpu_T[1]); | |
3059 | + tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1, cpu_src2); | |
3050 | 3060 | if (xop & 0x10) |
3051 | - gen_op_div_cc(cpu_T[0]); | |
3061 | + gen_op_div_cc(cpu_dst); | |
3052 | 3062 | break; |
3053 | 3063 | default: |
3054 | 3064 | goto illegal_insn; |
3055 | 3065 | } |
3056 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3066 | + gen_movl_TN_reg(rd, cpu_dst); | |
3057 | 3067 | } else { |
3058 | 3068 | switch (xop) { |
3059 | 3069 | case 0x20: /* taddcc */ |
3060 | - gen_op_tadd_cc(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3061 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3070 | + gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2); | |
3071 | + gen_movl_TN_reg(rd, cpu_dst); | |
3062 | 3072 | break; |
3063 | 3073 | case 0x21: /* tsubcc */ |
3064 | - gen_op_tsub_cc(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3065 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3074 | + gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2); | |
3075 | + gen_movl_TN_reg(rd, cpu_dst); | |
3066 | 3076 | break; |
3067 | 3077 | case 0x22: /* taddcctv */ |
3068 | - save_state(dc, cpu_T[2]); | |
3069 | - gen_op_tadd_ccTV(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3070 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3078 | + save_state(dc, cpu_cond); | |
3079 | + gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2); | |
3080 | + gen_movl_TN_reg(rd, cpu_dst); | |
3071 | 3081 | break; |
3072 | 3082 | case 0x23: /* tsubcctv */ |
3073 | - save_state(dc, cpu_T[2]); | |
3074 | - gen_op_tsub_ccTV(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3075 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3083 | + save_state(dc, cpu_cond); | |
3084 | + gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2); | |
3085 | + gen_movl_TN_reg(rd, cpu_dst); | |
3076 | 3086 | break; |
3077 | 3087 | case 0x24: /* mulscc */ |
3078 | - gen_op_mulscc(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3079 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3088 | + gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2); | |
3089 | + gen_movl_TN_reg(rd, cpu_dst); | |
3080 | 3090 | break; |
3081 | 3091 | #ifndef TARGET_SPARC64 |
3082 | 3092 | case 0x25: /* sll */ |
3083 | - tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], 0x1f); | |
3084 | - tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0); | |
3085 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3093 | + tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f); | |
3094 | + tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0); | |
3095 | + gen_movl_TN_reg(rd, cpu_dst); | |
3086 | 3096 | break; |
3087 | 3097 | case 0x26: /* srl */ |
3088 | - tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], 0x1f); | |
3089 | - tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0); | |
3090 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3098 | + tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f); | |
3099 | + tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0); | |
3100 | + gen_movl_TN_reg(rd, cpu_dst); | |
3091 | 3101 | break; |
3092 | 3102 | case 0x27: /* sra */ |
3093 | - tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], 0x1f); | |
3094 | - tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_tmp0); | |
3095 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3103 | + tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f); | |
3104 | + tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0); | |
3105 | + gen_movl_TN_reg(rd, cpu_dst); | |
3096 | 3106 | break; |
3097 | 3107 | #endif |
3098 | 3108 | case 0x30: |
3099 | 3109 | { |
3100 | 3110 | switch(rd) { |
3101 | 3111 | case 0: /* wry */ |
3102 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3103 | - tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, y)); | |
3112 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2); | |
3113 | + tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, y)); | |
3104 | 3114 | break; |
3105 | 3115 | #ifndef TARGET_SPARC64 |
3106 | 3116 | case 0x01 ... 0x0f: /* undefined in the |
... | ... | @@ -3114,19 +3124,19 @@ static void disas_sparc_insn(DisasContext * dc) |
3114 | 3124 | break; |
3115 | 3125 | #else |
3116 | 3126 | case 0x2: /* V9 wrccr */ |
3117 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3118 | - tcg_gen_helper_0_1(helper_wrccr, cpu_T[0]); | |
3127 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2); | |
3128 | + tcg_gen_helper_0_1(helper_wrccr, cpu_dst); | |
3119 | 3129 | break; |
3120 | 3130 | case 0x3: /* V9 wrasi */ |
3121 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3122 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3131 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2); | |
3132 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3123 | 3133 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, asi)); |
3124 | 3134 | break; |
3125 | 3135 | case 0x6: /* V9 wrfprs */ |
3126 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3127 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3136 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2); | |
3137 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3128 | 3138 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fprs)); |
3129 | - save_state(dc, cpu_T[2]); | |
3139 | + save_state(dc, cpu_cond); | |
3130 | 3140 | gen_op_next_insn(); |
3131 | 3141 | tcg_gen_exit_tb(0); |
3132 | 3142 | dc->is_br = 1; |
... | ... | @@ -3138,10 +3148,10 @@ static void disas_sparc_insn(DisasContext * dc) |
3138 | 3148 | #endif |
3139 | 3149 | break; |
3140 | 3150 | case 0x13: /* Graphics Status */ |
3141 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
3151 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
3142 | 3152 | goto jmp_insn; |
3143 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3144 | - tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, gsr)); | |
3153 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2); | |
3154 | + tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, gsr)); | |
3145 | 3155 | break; |
3146 | 3156 | case 0x17: /* Tick compare */ |
3147 | 3157 | #if !defined(CONFIG_USER_ONLY) |
... | ... | @@ -3151,15 +3161,15 @@ static void disas_sparc_insn(DisasContext * dc) |
3151 | 3161 | { |
3152 | 3162 | TCGv r_tickptr; |
3153 | 3163 | |
3154 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], | |
3155 | - cpu_T[1]); | |
3156 | - tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, | |
3164 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, | |
3165 | + cpu_src2); | |
3166 | + tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, | |
3157 | 3167 | tick_cmpr)); |
3158 | 3168 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
3159 | 3169 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
3160 | 3170 | offsetof(CPUState, tick)); |
3161 | 3171 | tcg_gen_helper_0_2(helper_tick_set_limit, |
3162 | - r_tickptr, cpu_T[0]); | |
3172 | + r_tickptr, cpu_dst); | |
3163 | 3173 | tcg_gen_discard_ptr(r_tickptr); |
3164 | 3174 | } |
3165 | 3175 | break; |
... | ... | @@ -3171,13 +3181,13 @@ static void disas_sparc_insn(DisasContext * dc) |
3171 | 3181 | { |
3172 | 3182 | TCGv r_tickptr; |
3173 | 3183 | |
3174 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], | |
3175 | - cpu_T[1]); | |
3184 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, | |
3185 | + cpu_src2); | |
3176 | 3186 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
3177 | 3187 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
3178 | 3188 | offsetof(CPUState, stick)); |
3179 | 3189 | tcg_gen_helper_0_2(helper_tick_set_count, |
3180 | - r_tickptr, cpu_T[0]); | |
3190 | + r_tickptr, cpu_dst); | |
3181 | 3191 | tcg_gen_discard_ptr(r_tickptr); |
3182 | 3192 | } |
3183 | 3193 | break; |
... | ... | @@ -3189,15 +3199,15 @@ static void disas_sparc_insn(DisasContext * dc) |
3189 | 3199 | { |
3190 | 3200 | TCGv r_tickptr; |
3191 | 3201 | |
3192 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], | |
3193 | - cpu_T[1]); | |
3194 | - tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, | |
3202 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, | |
3203 | + cpu_src2); | |
3204 | + tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, | |
3195 | 3205 | stick_cmpr)); |
3196 | 3206 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
3197 | 3207 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
3198 | 3208 | offsetof(CPUState, stick)); |
3199 | 3209 | tcg_gen_helper_0_2(helper_tick_set_limit, |
3200 | - r_tickptr, cpu_T[0]); | |
3210 | + r_tickptr, cpu_dst); | |
3201 | 3211 | tcg_gen_discard_ptr(r_tickptr); |
3202 | 3212 | } |
3203 | 3213 | break; |
... | ... | @@ -3236,9 +3246,9 @@ static void disas_sparc_insn(DisasContext * dc) |
3236 | 3246 | goto illegal_insn; |
3237 | 3247 | } |
3238 | 3248 | #else |
3239 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3240 | - tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]); | |
3241 | - save_state(dc, cpu_T[2]); | |
3249 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2); | |
3250 | + tcg_gen_helper_0_1(helper_wrpsr, cpu_dst); | |
3251 | + save_state(dc, cpu_cond); | |
3242 | 3252 | gen_op_next_insn(); |
3243 | 3253 | tcg_gen_exit_tb(0); |
3244 | 3254 | dc->is_br = 1; |
... | ... | @@ -3249,7 +3259,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3249 | 3259 | { |
3250 | 3260 | if (!supervisor(dc)) |
3251 | 3261 | goto priv_insn; |
3252 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3262 | + tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2); | |
3253 | 3263 | #ifdef TARGET_SPARC64 |
3254 | 3264 | switch (rd) { |
3255 | 3265 | case 0: // tpc |
... | ... | @@ -3259,7 +3269,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3259 | 3269 | r_tsptr = tcg_temp_new(TCG_TYPE_PTR); |
3260 | 3270 | tcg_gen_ld_ptr(r_tsptr, cpu_env, |
3261 | 3271 | offsetof(CPUState, tsptr)); |
3262 | - tcg_gen_st_tl(cpu_T[0], r_tsptr, | |
3272 | + tcg_gen_st_tl(cpu_dst, r_tsptr, | |
3263 | 3273 | offsetof(trap_state, tpc)); |
3264 | 3274 | tcg_gen_discard_ptr(r_tsptr); |
3265 | 3275 | } |
... | ... | @@ -3271,7 +3281,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3271 | 3281 | r_tsptr = tcg_temp_new(TCG_TYPE_PTR); |
3272 | 3282 | tcg_gen_ld_ptr(r_tsptr, cpu_env, |
3273 | 3283 | offsetof(CPUState, tsptr)); |
3274 | - tcg_gen_st_tl(cpu_T[0], r_tsptr, | |
3284 | + tcg_gen_st_tl(cpu_dst, r_tsptr, | |
3275 | 3285 | offsetof(trap_state, tnpc)); |
3276 | 3286 | tcg_gen_discard_ptr(r_tsptr); |
3277 | 3287 | } |
... | ... | @@ -3283,7 +3293,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3283 | 3293 | r_tsptr = tcg_temp_new(TCG_TYPE_PTR); |
3284 | 3294 | tcg_gen_ld_ptr(r_tsptr, cpu_env, |
3285 | 3295 | offsetof(CPUState, tsptr)); |
3286 | - tcg_gen_st_tl(cpu_T[0], r_tsptr, | |
3296 | + tcg_gen_st_tl(cpu_dst, r_tsptr, | |
3287 | 3297 | offsetof(trap_state, tstate)); |
3288 | 3298 | tcg_gen_discard_ptr(r_tsptr); |
3289 | 3299 | } |
... | ... | @@ -3295,7 +3305,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3295 | 3305 | r_tsptr = tcg_temp_new(TCG_TYPE_PTR); |
3296 | 3306 | tcg_gen_ld_ptr(r_tsptr, cpu_env, |
3297 | 3307 | offsetof(CPUState, tsptr)); |
3298 | - tcg_gen_st_i32(cpu_T[0], r_tsptr, | |
3308 | + tcg_gen_st_i32(cpu_dst, r_tsptr, | |
3299 | 3309 | offsetof(trap_state, tt)); |
3300 | 3310 | tcg_gen_discard_ptr(r_tsptr); |
3301 | 3311 | } |
... | ... | @@ -3308,67 +3318,67 @@ static void disas_sparc_insn(DisasContext * dc) |
3308 | 3318 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
3309 | 3319 | offsetof(CPUState, tick)); |
3310 | 3320 | tcg_gen_helper_0_2(helper_tick_set_count, |
3311 | - r_tickptr, cpu_T[0]); | |
3321 | + r_tickptr, cpu_dst); | |
3312 | 3322 | tcg_gen_discard_ptr(r_tickptr); |
3313 | 3323 | } |
3314 | 3324 | break; |
3315 | 3325 | case 5: // tba |
3316 | - tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tbr)); | |
3326 | + tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tbr)); | |
3317 | 3327 | break; |
3318 | 3328 | case 6: // pstate |
3319 | - save_state(dc, cpu_T[2]); | |
3320 | - tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]); | |
3329 | + save_state(dc, cpu_cond); | |
3330 | + tcg_gen_helper_0_1(helper_wrpstate, cpu_dst); | |
3321 | 3331 | gen_op_next_insn(); |
3322 | 3332 | tcg_gen_exit_tb(0); |
3323 | 3333 | dc->is_br = 1; |
3324 | 3334 | break; |
3325 | 3335 | case 7: // tl |
3326 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3336 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3327 | 3337 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, tl)); |
3328 | 3338 | break; |
3329 | 3339 | case 8: // pil |
3330 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3340 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3331 | 3341 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, psrpil)); |
3332 | 3342 | break; |
3333 | 3343 | case 9: // cwp |
3334 | - tcg_gen_helper_0_1(helper_wrcwp, cpu_T[0]); | |
3344 | + tcg_gen_helper_0_1(helper_wrcwp, cpu_dst); | |
3335 | 3345 | break; |
3336 | 3346 | case 10: // cansave |
3337 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3347 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3338 | 3348 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cansave)); |
3339 | 3349 | break; |
3340 | 3350 | case 11: // canrestore |
3341 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3351 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3342 | 3352 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, canrestore)); |
3343 | 3353 | break; |
3344 | 3354 | case 12: // cleanwin |
3345 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3355 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3346 | 3356 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cleanwin)); |
3347 | 3357 | break; |
3348 | 3358 | case 13: // otherwin |
3349 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3359 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3350 | 3360 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, otherwin)); |
3351 | 3361 | break; |
3352 | 3362 | case 14: // wstate |
3353 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3363 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3354 | 3364 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wstate)); |
3355 | 3365 | break; |
3356 | 3366 | case 16: // UA2005 gl |
3357 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3367 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3358 | 3368 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, gl)); |
3359 | 3369 | break; |
3360 | 3370 | case 26: // UA2005 strand status |
3361 | 3371 | if (!hypervisor(dc)) |
3362 | 3372 | goto priv_insn; |
3363 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3373 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3364 | 3374 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ssr)); |
3365 | 3375 | break; |
3366 | 3376 | default: |
3367 | 3377 | goto illegal_insn; |
3368 | 3378 | } |
3369 | 3379 | #else |
3370 | - tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1)); | |
3371 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3380 | + tcg_gen_andi_tl(cpu_dst, cpu_dst, ((1 << NWINDOWS) - 1)); | |
3381 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3372 | 3382 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wim)); |
3373 | 3383 | #endif |
3374 | 3384 | } |
... | ... | @@ -3378,16 +3388,16 @@ static void disas_sparc_insn(DisasContext * dc) |
3378 | 3388 | #ifndef TARGET_SPARC64 |
3379 | 3389 | if (!supervisor(dc)) |
3380 | 3390 | goto priv_insn; |
3381 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3382 | - tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tbr)); | |
3391 | + tcg_gen_xor_tl(cpu_dst, cpu_dst, cpu_src2); | |
3392 | + tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tbr)); | |
3383 | 3393 | #else |
3384 | 3394 | if (!hypervisor(dc)) |
3385 | 3395 | goto priv_insn; |
3386 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3396 | + tcg_gen_xor_tl(cpu_dst, cpu_dst, cpu_src2); | |
3387 | 3397 | switch (rd) { |
3388 | 3398 | case 0: // hpstate |
3389 | 3399 | // XXX gen_op_wrhpstate(); |
3390 | - save_state(dc, cpu_T[2]); | |
3400 | + save_state(dc, cpu_cond); | |
3391 | 3401 | gen_op_next_insn(); |
3392 | 3402 | tcg_gen_exit_tb(0); |
3393 | 3403 | dc->is_br = 1; |
... | ... | @@ -3396,24 +3406,24 @@ static void disas_sparc_insn(DisasContext * dc) |
3396 | 3406 | // XXX gen_op_wrhtstate(); |
3397 | 3407 | break; |
3398 | 3408 | case 3: // hintp |
3399 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3409 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3400 | 3410 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hintp)); |
3401 | 3411 | break; |
3402 | 3412 | case 5: // htba |
3403 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3413 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst); | |
3404 | 3414 | tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, htba)); |
3405 | 3415 | break; |
3406 | 3416 | case 31: // hstick_cmpr |
3407 | 3417 | { |
3408 | 3418 | TCGv r_tickptr; |
3409 | 3419 | |
3410 | - tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, | |
3420 | + tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, | |
3411 | 3421 | hstick_cmpr)); |
3412 | 3422 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
3413 | 3423 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
3414 | 3424 | offsetof(CPUState, hstick)); |
3415 | 3425 | tcg_gen_helper_0_2(helper_tick_set_limit, |
3416 | - r_tickptr, cpu_T[0]); | |
3426 | + r_tickptr, cpu_dst); | |
3417 | 3427 | tcg_gen_discard_ptr(r_tickptr); |
3418 | 3428 | } |
3419 | 3429 | break; |
... | ... | @@ -3451,34 +3461,34 @@ static void disas_sparc_insn(DisasContext * dc) |
3451 | 3461 | tcg_const_tl(0), l1); |
3452 | 3462 | if (IS_IMM) { /* immediate */ |
3453 | 3463 | rs2 = GET_FIELD_SPs(insn, 0, 10); |
3454 | - tcg_gen_movi_tl(cpu_T[0], (int)rs2); | |
3464 | + tcg_gen_movi_tl(cpu_dst, (int)rs2); | |
3455 | 3465 | } else { |
3456 | 3466 | rs2 = GET_FIELD_SP(insn, 0, 4); |
3457 | - gen_movl_reg_TN(rs2, cpu_T[0]); | |
3467 | + gen_movl_reg_TN(rs2, cpu_dst); | |
3458 | 3468 | } |
3459 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3469 | + gen_movl_TN_reg(rd, cpu_dst); | |
3460 | 3470 | gen_set_label(l1); |
3461 | 3471 | tcg_gen_discard_tl(r_cond); |
3462 | 3472 | break; |
3463 | 3473 | } |
3464 | 3474 | case 0x2d: /* V9 sdivx */ |
3465 | - gen_op_sdivx(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3466 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3475 | + gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2); | |
3476 | + gen_movl_TN_reg(rd, cpu_dst); | |
3467 | 3477 | break; |
3468 | 3478 | case 0x2e: /* V9 popc */ |
3469 | 3479 | { |
3470 | 3480 | if (IS_IMM) { /* immediate */ |
3471 | 3481 | rs2 = GET_FIELD_SPs(insn, 0, 12); |
3472 | - tcg_gen_movi_tl(cpu_T[1], (int)rs2); | |
3482 | + tcg_gen_movi_tl(cpu_src2, (int)rs2); | |
3473 | 3483 | // XXX optimize: popc(constant) |
3474 | 3484 | } |
3475 | 3485 | else { |
3476 | 3486 | rs2 = GET_FIELD_SP(insn, 0, 4); |
3477 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
3487 | + gen_movl_reg_TN(rs2, cpu_src2); | |
3478 | 3488 | } |
3479 | - tcg_gen_helper_1_1(helper_popc, cpu_T[0], | |
3480 | - cpu_T[1]); | |
3481 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3489 | + tcg_gen_helper_1_1(helper_popc, cpu_dst, | |
3490 | + cpu_src2); | |
3491 | + gen_movl_TN_reg(rd, cpu_dst); | |
3482 | 3492 | } |
3483 | 3493 | case 0x2f: /* V9 movr */ |
3484 | 3494 | { |
... | ... | @@ -3486,20 +3496,20 @@ static void disas_sparc_insn(DisasContext * dc) |
3486 | 3496 | int l1; |
3487 | 3497 | |
3488 | 3498 | rs1 = GET_FIELD(insn, 13, 17); |
3489 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
3499 | + gen_movl_reg_TN(rs1, cpu_src1); | |
3490 | 3500 | |
3491 | 3501 | l1 = gen_new_label(); |
3492 | 3502 | |
3493 | - tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], | |
3503 | + tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_src1, | |
3494 | 3504 | tcg_const_tl(0), l1); |
3495 | 3505 | if (IS_IMM) { /* immediate */ |
3496 | 3506 | rs2 = GET_FIELD_SPs(insn, 0, 9); |
3497 | - tcg_gen_movi_tl(cpu_T[0], (int)rs2); | |
3507 | + tcg_gen_movi_tl(cpu_dst, (int)rs2); | |
3498 | 3508 | } else { |
3499 | 3509 | rs2 = GET_FIELD_SP(insn, 0, 4); |
3500 | - gen_movl_reg_TN(rs2, cpu_T[0]); | |
3510 | + gen_movl_reg_TN(rs2, cpu_dst); | |
3501 | 3511 | } |
3502 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3512 | + gen_movl_TN_reg(rd, cpu_dst); | |
3503 | 3513 | gen_set_label(l1); |
3504 | 3514 | break; |
3505 | 3515 | } |
... | ... | @@ -3513,7 +3523,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3513 | 3523 | int opf = GET_FIELD_SP(insn, 5, 13); |
3514 | 3524 | rs1 = GET_FIELD(insn, 13, 17); |
3515 | 3525 | rs2 = GET_FIELD(insn, 27, 31); |
3516 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
3526 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
3517 | 3527 | goto jmp_insn; |
3518 | 3528 | |
3519 | 3529 | switch (opf) { |
... | ... | @@ -3532,34 +3542,34 @@ static void disas_sparc_insn(DisasContext * dc) |
3532 | 3542 | // XXX |
3533 | 3543 | goto illegal_insn; |
3534 | 3544 | case 0x010: /* VIS I array8 */ |
3535 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
3536 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
3537 | - tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0], | |
3538 | - cpu_T[1]); | |
3539 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3545 | + gen_movl_reg_TN(rs1, cpu_src1); | |
3546 | + gen_movl_reg_TN(rs2, cpu_src2); | |
3547 | + tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1, | |
3548 | + cpu_src2); | |
3549 | + gen_movl_TN_reg(rd, cpu_dst); | |
3540 | 3550 | break; |
3541 | 3551 | case 0x012: /* VIS I array16 */ |
3542 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
3543 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
3544 | - tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0], | |
3545 | - cpu_T[1]); | |
3546 | - tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 1); | |
3547 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3552 | + gen_movl_reg_TN(rs1, cpu_src1); | |
3553 | + gen_movl_reg_TN(rs2, cpu_src2); | |
3554 | + tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1, | |
3555 | + cpu_src2); | |
3556 | + tcg_gen_shli_i64(cpu_dst, cpu_dst, 1); | |
3557 | + gen_movl_TN_reg(rd, cpu_dst); | |
3548 | 3558 | break; |
3549 | 3559 | case 0x014: /* VIS I array32 */ |
3550 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
3551 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
3552 | - tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0], | |
3553 | - cpu_T[1]); | |
3554 | - tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2); | |
3555 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3560 | + gen_movl_reg_TN(rs1, cpu_src1); | |
3561 | + gen_movl_reg_TN(rs2, cpu_src2); | |
3562 | + tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1, | |
3563 | + cpu_src2); | |
3564 | + tcg_gen_shli_i64(cpu_dst, cpu_dst, 2); | |
3565 | + gen_movl_TN_reg(rd, cpu_dst); | |
3556 | 3566 | break; |
3557 | 3567 | case 0x018: /* VIS I alignaddr */ |
3558 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
3559 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
3560 | - tcg_gen_helper_1_2(helper_alignaddr, cpu_T[0], cpu_T[0], | |
3561 | - cpu_T[1]); | |
3562 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
3568 | + gen_movl_reg_TN(rs1, cpu_src1); | |
3569 | + gen_movl_reg_TN(rs2, cpu_src2); | |
3570 | + tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1, | |
3571 | + cpu_src2); | |
3572 | + gen_movl_TN_reg(rd, cpu_dst); | |
3563 | 3573 | break; |
3564 | 3574 | case 0x019: /* VIS II bmask */ |
3565 | 3575 | case 0x01a: /* VIS I alignaddrl */ |
... | ... | @@ -3921,42 +3931,42 @@ static void disas_sparc_insn(DisasContext * dc) |
3921 | 3931 | #ifdef TARGET_SPARC64 |
3922 | 3932 | } else if (xop == 0x39) { /* V9 return */ |
3923 | 3933 | rs1 = GET_FIELD(insn, 13, 17); |
3924 | - save_state(dc, cpu_T[2]); | |
3925 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
3934 | + save_state(dc, cpu_cond); | |
3935 | + gen_movl_reg_TN(rs1, cpu_src1); | |
3926 | 3936 | if (IS_IMM) { /* immediate */ |
3927 | 3937 | rs2 = GET_FIELDs(insn, 19, 31); |
3928 | - tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2); | |
3938 | + tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2); | |
3929 | 3939 | } else { /* register */ |
3930 | 3940 | rs2 = GET_FIELD(insn, 27, 31); |
3931 | 3941 | #if defined(OPTIM) |
3932 | 3942 | if (rs2) { |
3933 | 3943 | #endif |
3934 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
3935 | - tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3944 | + gen_movl_reg_TN(rs2, cpu_src2); | |
3945 | + tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2); | |
3936 | 3946 | #if defined(OPTIM) |
3937 | 3947 | } |
3938 | 3948 | #endif |
3939 | 3949 | } |
3940 | 3950 | tcg_gen_helper_0_0(helper_restore); |
3941 | - gen_mov_pc_npc(dc, cpu_T[2]); | |
3942 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
3943 | - tcg_gen_mov_tl(cpu_npc, cpu_T[0]); | |
3951 | + gen_mov_pc_npc(dc, cpu_cond); | |
3952 | + tcg_gen_helper_0_2(helper_check_align, cpu_dst, tcg_const_i32(3)); | |
3953 | + tcg_gen_mov_tl(cpu_npc, cpu_dst); | |
3944 | 3954 | dc->npc = DYNAMIC_PC; |
3945 | 3955 | goto jmp_insn; |
3946 | 3956 | #endif |
3947 | 3957 | } else { |
3948 | 3958 | rs1 = GET_FIELD(insn, 13, 17); |
3949 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
3959 | + gen_movl_reg_TN(rs1, cpu_src1); | |
3950 | 3960 | if (IS_IMM) { /* immediate */ |
3951 | 3961 | rs2 = GET_FIELDs(insn, 19, 31); |
3952 | - tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2); | |
3962 | + tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2); | |
3953 | 3963 | } else { /* register */ |
3954 | 3964 | rs2 = GET_FIELD(insn, 27, 31); |
3955 | 3965 | #if defined(OPTIM) |
3956 | 3966 | if (rs2) { |
3957 | 3967 | #endif |
3958 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
3959 | - tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3968 | + gen_movl_reg_TN(rs2, cpu_src2); | |
3969 | + tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2); | |
3960 | 3970 | #if defined(OPTIM) |
3961 | 3971 | } |
3962 | 3972 | #endif |
... | ... | @@ -3968,9 +3978,9 @@ static void disas_sparc_insn(DisasContext * dc) |
3968 | 3978 | tcg_gen_movi_tl(cpu_tmp0, dc->pc); |
3969 | 3979 | gen_movl_TN_reg(rd, cpu_tmp0); |
3970 | 3980 | } |
3971 | - gen_mov_pc_npc(dc, cpu_T[2]); | |
3972 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
3973 | - tcg_gen_mov_tl(cpu_npc, cpu_T[0]); | |
3981 | + gen_mov_pc_npc(dc, cpu_cond); | |
3982 | + tcg_gen_helper_0_2(helper_check_align, cpu_dst, tcg_const_i32(3)); | |
3983 | + tcg_gen_mov_tl(cpu_npc, cpu_dst); | |
3974 | 3984 | dc->npc = DYNAMIC_PC; |
3975 | 3985 | } |
3976 | 3986 | goto jmp_insn; |
... | ... | @@ -3979,26 +3989,26 @@ static void disas_sparc_insn(DisasContext * dc) |
3979 | 3989 | { |
3980 | 3990 | if (!supervisor(dc)) |
3981 | 3991 | goto priv_insn; |
3982 | - gen_mov_pc_npc(dc, cpu_T[2]); | |
3983 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
3984 | - tcg_gen_mov_tl(cpu_npc, cpu_T[0]); | |
3992 | + gen_mov_pc_npc(dc, cpu_cond); | |
3993 | + tcg_gen_helper_0_2(helper_check_align, cpu_dst, tcg_const_i32(3)); | |
3994 | + tcg_gen_mov_tl(cpu_npc, cpu_dst); | |
3985 | 3995 | dc->npc = DYNAMIC_PC; |
3986 | 3996 | tcg_gen_helper_0_0(helper_rett); |
3987 | 3997 | } |
3988 | 3998 | goto jmp_insn; |
3989 | 3999 | #endif |
3990 | 4000 | case 0x3b: /* flush */ |
3991 | - tcg_gen_helper_0_1(helper_flush, cpu_T[0]); | |
4001 | + tcg_gen_helper_0_1(helper_flush, cpu_dst); | |
3992 | 4002 | break; |
3993 | 4003 | case 0x3c: /* save */ |
3994 | - save_state(dc, cpu_T[2]); | |
4004 | + save_state(dc, cpu_cond); | |
3995 | 4005 | tcg_gen_helper_0_0(helper_save); |
3996 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
4006 | + gen_movl_TN_reg(rd, cpu_dst); | |
3997 | 4007 | break; |
3998 | 4008 | case 0x3d: /* restore */ |
3999 | - save_state(dc, cpu_T[2]); | |
4009 | + save_state(dc, cpu_cond); | |
4000 | 4010 | tcg_gen_helper_0_0(helper_restore); |
4001 | - gen_movl_TN_reg(rd, cpu_T[0]); | |
4011 | + gen_movl_TN_reg(rd, cpu_dst); | |
4002 | 4012 | break; |
4003 | 4013 | #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64) |
4004 | 4014 | case 0x3e: /* V9 done/retry */ |
... | ... | @@ -4035,23 +4045,23 @@ static void disas_sparc_insn(DisasContext * dc) |
4035 | 4045 | { |
4036 | 4046 | unsigned int xop = GET_FIELD(insn, 7, 12); |
4037 | 4047 | rs1 = GET_FIELD(insn, 13, 17); |
4038 | - save_state(dc, cpu_T[2]); | |
4039 | - gen_movl_reg_TN(rs1, cpu_T[0]); | |
4048 | + save_state(dc, cpu_cond); | |
4049 | + gen_movl_reg_TN(rs1, cpu_src1); | |
4040 | 4050 | if (xop == 0x3c || xop == 0x3e) |
4041 | 4051 | { |
4042 | 4052 | rs2 = GET_FIELD(insn, 27, 31); |
4043 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
4053 | + gen_movl_reg_TN(rs2, cpu_src2); | |
4044 | 4054 | } |
4045 | 4055 | else if (IS_IMM) { /* immediate */ |
4046 | 4056 | rs2 = GET_FIELDs(insn, 19, 31); |
4047 | - tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2); | |
4057 | + tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2); | |
4048 | 4058 | } else { /* register */ |
4049 | 4059 | rs2 = GET_FIELD(insn, 27, 31); |
4050 | 4060 | #if defined(OPTIM) |
4051 | 4061 | if (rs2 != 0) { |
4052 | 4062 | #endif |
4053 | - gen_movl_reg_TN(rs2, cpu_T[1]); | |
4054 | - tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
4063 | + gen_movl_reg_TN(rs2, cpu_src2); | |
4064 | + tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2); | |
4055 | 4065 | #if defined(OPTIM) |
4056 | 4066 | } |
4057 | 4067 | #endif |
... | ... | @@ -4061,55 +4071,55 @@ static void disas_sparc_insn(DisasContext * dc) |
4061 | 4071 | (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) { |
4062 | 4072 | switch (xop) { |
4063 | 4073 | case 0x0: /* load unsigned word */ |
4064 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4065 | - ABI32_MASK(cpu_T[0]); | |
4066 | - tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4074 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4075 | + ABI32_MASK(cpu_addr); | |
4076 | + tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx); | |
4067 | 4077 | break; |
4068 | 4078 | case 0x1: /* load unsigned byte */ |
4069 | - ABI32_MASK(cpu_T[0]); | |
4070 | - tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4079 | + ABI32_MASK(cpu_addr); | |
4080 | + tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx); | |
4071 | 4081 | break; |
4072 | 4082 | case 0x2: /* load unsigned halfword */ |
4073 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(1)); | |
4074 | - ABI32_MASK(cpu_T[0]); | |
4075 | - tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4083 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1)); | |
4084 | + ABI32_MASK(cpu_addr); | |
4085 | + tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx); | |
4076 | 4086 | break; |
4077 | 4087 | case 0x3: /* load double word */ |
4078 | 4088 | if (rd & 1) |
4079 | 4089 | goto illegal_insn; |
4080 | 4090 | else { |
4081 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4082 | - ABI32_MASK(cpu_T[0]); | |
4083 | - tcg_gen_qemu_ld64(cpu_tmp64, cpu_T[0], dc->mem_idx); | |
4091 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4092 | + ABI32_MASK(cpu_addr); | |
4093 | + tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx); | |
4084 | 4094 | tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64); |
4085 | 4095 | tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL); |
4086 | 4096 | gen_movl_TN_reg(rd + 1, cpu_tmp0); |
4087 | 4097 | tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32); |
4088 | - tcg_gen_trunc_i64_tl(cpu_T[1], cpu_tmp64); | |
4089 | - tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0xffffffffULL); | |
4098 | + tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64); | |
4099 | + tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL); | |
4090 | 4100 | } |
4091 | 4101 | break; |
4092 | 4102 | case 0x9: /* load signed byte */ |
4093 | - ABI32_MASK(cpu_T[0]); | |
4094 | - tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4103 | + ABI32_MASK(cpu_addr); | |
4104 | + tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx); | |
4095 | 4105 | break; |
4096 | 4106 | case 0xa: /* load signed halfword */ |
4097 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(1)); | |
4098 | - ABI32_MASK(cpu_T[0]); | |
4099 | - tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4107 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1)); | |
4108 | + ABI32_MASK(cpu_addr); | |
4109 | + tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx); | |
4100 | 4110 | break; |
4101 | 4111 | case 0xd: /* ldstub -- XXX: should be atomically */ |
4102 | - ABI32_MASK(cpu_T[0]); | |
4103 | - tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4104 | - tcg_gen_qemu_st8(tcg_const_tl(0xff), cpu_T[0], dc->mem_idx); | |
4112 | + ABI32_MASK(cpu_addr); | |
4113 | + tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx); | |
4114 | + tcg_gen_qemu_st8(tcg_const_tl(0xff), cpu_addr, dc->mem_idx); | |
4105 | 4115 | break; |
4106 | 4116 | case 0x0f: /* swap register with memory. Also atomically */ |
4107 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4108 | - gen_movl_reg_TN(rd, cpu_T[1]); | |
4109 | - ABI32_MASK(cpu_T[0]); | |
4110 | - tcg_gen_qemu_ld32u(cpu_tmp32, cpu_T[0], dc->mem_idx); | |
4111 | - tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4112 | - tcg_gen_extu_i32_tl(cpu_T[1], cpu_tmp32); | |
4117 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4118 | + gen_movl_reg_TN(rd, cpu_val); | |
4119 | + ABI32_MASK(cpu_addr); | |
4120 | + tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx); | |
4121 | + tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx); | |
4122 | + tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32); | |
4113 | 4123 | break; |
4114 | 4124 | #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64) |
4115 | 4125 | case 0x10: /* load word alternate */ |
... | ... | @@ -4119,8 +4129,8 @@ static void disas_sparc_insn(DisasContext * dc) |
4119 | 4129 | if (!supervisor(dc)) |
4120 | 4130 | goto priv_insn; |
4121 | 4131 | #endif |
4122 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4123 | - gen_ld_asi(cpu_T[1], cpu_T[0], insn, 4, 0); | |
4132 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4133 | + gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0); | |
4124 | 4134 | break; |
4125 | 4135 | case 0x11: /* load unsigned byte alternate */ |
4126 | 4136 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4129,7 +4139,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4129 | 4139 | if (!supervisor(dc)) |
4130 | 4140 | goto priv_insn; |
4131 | 4141 | #endif |
4132 | - gen_ld_asi(cpu_T[1], cpu_T[0], insn, 1, 0); | |
4142 | + gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0); | |
4133 | 4143 | break; |
4134 | 4144 | case 0x12: /* load unsigned halfword alternate */ |
4135 | 4145 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4138,8 +4148,8 @@ static void disas_sparc_insn(DisasContext * dc) |
4138 | 4148 | if (!supervisor(dc)) |
4139 | 4149 | goto priv_insn; |
4140 | 4150 | #endif |
4141 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(1)); | |
4142 | - gen_ld_asi(cpu_T[1], cpu_T[0], insn, 2, 0); | |
4151 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1)); | |
4152 | + gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0); | |
4143 | 4153 | break; |
4144 | 4154 | case 0x13: /* load double word alternate */ |
4145 | 4155 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4150,8 +4160,8 @@ static void disas_sparc_insn(DisasContext * dc) |
4150 | 4160 | #endif |
4151 | 4161 | if (rd & 1) |
4152 | 4162 | goto illegal_insn; |
4153 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4154 | - gen_ldda_asi(cpu_tmp0, cpu_T[1], cpu_T[0], insn); | |
4163 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4164 | + gen_ldda_asi(cpu_tmp0, cpu_val, cpu_addr, insn); | |
4155 | 4165 | gen_movl_TN_reg(rd + 1, cpu_tmp0); |
4156 | 4166 | break; |
4157 | 4167 | case 0x19: /* load signed byte alternate */ |
... | ... | @@ -4161,7 +4171,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4161 | 4171 | if (!supervisor(dc)) |
4162 | 4172 | goto priv_insn; |
4163 | 4173 | #endif |
4164 | - gen_ld_asi(cpu_T[1], cpu_T[0], insn, 1, 1); | |
4174 | + gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1); | |
4165 | 4175 | break; |
4166 | 4176 | case 0x1a: /* load signed halfword alternate */ |
4167 | 4177 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4170,8 +4180,8 @@ static void disas_sparc_insn(DisasContext * dc) |
4170 | 4180 | if (!supervisor(dc)) |
4171 | 4181 | goto priv_insn; |
4172 | 4182 | #endif |
4173 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(1)); | |
4174 | - gen_ld_asi(cpu_T[1], cpu_T[0], insn, 2, 1); | |
4183 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1)); | |
4184 | + gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1); | |
4175 | 4185 | break; |
4176 | 4186 | case 0x1d: /* ldstuba -- XXX: should be atomically */ |
4177 | 4187 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4180,7 +4190,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4180 | 4190 | if (!supervisor(dc)) |
4181 | 4191 | goto priv_insn; |
4182 | 4192 | #endif |
4183 | - gen_ldstub_asi(cpu_T[1], cpu_T[0], insn); | |
4193 | + gen_ldstub_asi(cpu_val, cpu_addr, insn); | |
4184 | 4194 | break; |
4185 | 4195 | case 0x1f: /* swap reg with alt. memory. Also atomically */ |
4186 | 4196 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4189,9 +4199,9 @@ static void disas_sparc_insn(DisasContext * dc) |
4189 | 4199 | if (!supervisor(dc)) |
4190 | 4200 | goto priv_insn; |
4191 | 4201 | #endif |
4192 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4193 | - gen_movl_reg_TN(rd, cpu_T[1]); | |
4194 | - gen_swap_asi(cpu_T[1], cpu_T[0], insn); | |
4202 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4203 | + gen_movl_reg_TN(rd, cpu_val); | |
4204 | + gen_swap_asi(cpu_val, cpu_addr, insn); | |
4195 | 4205 | break; |
4196 | 4206 | |
4197 | 4207 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4203,39 +4213,39 @@ static void disas_sparc_insn(DisasContext * dc) |
4203 | 4213 | #endif |
4204 | 4214 | #ifdef TARGET_SPARC64 |
4205 | 4215 | case 0x08: /* V9 ldsw */ |
4206 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4207 | - ABI32_MASK(cpu_T[0]); | |
4208 | - tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4216 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4217 | + ABI32_MASK(cpu_addr); | |
4218 | + tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx); | |
4209 | 4219 | break; |
4210 | 4220 | case 0x0b: /* V9 ldx */ |
4211 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4212 | - ABI32_MASK(cpu_T[0]); | |
4213 | - tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4221 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4222 | + ABI32_MASK(cpu_addr); | |
4223 | + tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx); | |
4214 | 4224 | break; |
4215 | 4225 | case 0x18: /* V9 ldswa */ |
4216 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4217 | - gen_ld_asi(cpu_T[1], cpu_T[0], insn, 4, 1); | |
4226 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4227 | + gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1); | |
4218 | 4228 | break; |
4219 | 4229 | case 0x1b: /* V9 ldxa */ |
4220 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4221 | - gen_ld_asi(cpu_T[1], cpu_T[0], insn, 8, 0); | |
4230 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4231 | + gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0); | |
4222 | 4232 | break; |
4223 | 4233 | case 0x2d: /* V9 prefetch, no effect */ |
4224 | 4234 | goto skip_move; |
4225 | 4235 | case 0x30: /* V9 ldfa */ |
4226 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4227 | - gen_ldf_asi(cpu_T[0], insn, 4, rd); | |
4236 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4237 | + gen_ldf_asi(cpu_addr, insn, 4, rd); | |
4228 | 4238 | goto skip_move; |
4229 | 4239 | case 0x33: /* V9 lddfa */ |
4230 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4231 | - gen_ldf_asi(cpu_T[0], insn, 8, DFPREG(rd)); | |
4240 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4241 | + gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd)); | |
4232 | 4242 | goto skip_move; |
4233 | 4243 | case 0x3d: /* V9 prefetcha, no effect */ |
4234 | 4244 | goto skip_move; |
4235 | 4245 | case 0x32: /* V9 ldqfa */ |
4236 | 4246 | #if defined(CONFIG_USER_ONLY) |
4237 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4238 | - gen_ldf_asi(cpu_T[0], insn, 16, QFPREG(rd)); | |
4247 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4248 | + gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd)); | |
4239 | 4249 | goto skip_move; |
4240 | 4250 | #else |
4241 | 4251 | goto nfpu_insn; |
... | ... | @@ -4244,30 +4254,30 @@ static void disas_sparc_insn(DisasContext * dc) |
4244 | 4254 | default: |
4245 | 4255 | goto illegal_insn; |
4246 | 4256 | } |
4247 | - gen_movl_TN_reg(rd, cpu_T[1]); | |
4257 | + gen_movl_TN_reg(rd, cpu_val); | |
4248 | 4258 | #ifdef TARGET_SPARC64 |
4249 | 4259 | skip_move: ; |
4250 | 4260 | #endif |
4251 | 4261 | } else if (xop >= 0x20 && xop < 0x24) { |
4252 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
4262 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
4253 | 4263 | goto jmp_insn; |
4254 | 4264 | switch (xop) { |
4255 | 4265 | case 0x20: /* load fpreg */ |
4256 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4257 | - tcg_gen_qemu_ld32u(cpu_tmp32, cpu_T[0], dc->mem_idx); | |
4266 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4267 | + tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx); | |
4258 | 4268 | tcg_gen_st_i32(cpu_tmp32, cpu_env, |
4259 | 4269 | offsetof(CPUState, fpr[rd])); |
4260 | 4270 | break; |
4261 | 4271 | case 0x21: /* load fsr */ |
4262 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4263 | - tcg_gen_qemu_ld32u(cpu_tmp32, cpu_T[0], dc->mem_idx); | |
4272 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4273 | + tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx); | |
4264 | 4274 | tcg_gen_st_i32(cpu_tmp32, cpu_env, |
4265 | 4275 | offsetof(CPUState, ft0)); |
4266 | 4276 | tcg_gen_helper_0_0(helper_ldfsr); |
4267 | 4277 | break; |
4268 | 4278 | case 0x22: /* load quad fpreg */ |
4269 | 4279 | #if defined(CONFIG_USER_ONLY) |
4270 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4280 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4271 | 4281 | gen_op_ldst(ldqf); |
4272 | 4282 | gen_op_store_QT0_fpr(QFPREG(rd)); |
4273 | 4283 | break; |
... | ... | @@ -4275,7 +4285,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4275 | 4285 | goto nfpu_insn; |
4276 | 4286 | #endif |
4277 | 4287 | case 0x23: /* load double fpreg */ |
4278 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4288 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4279 | 4289 | gen_op_ldst(lddf); |
4280 | 4290 | gen_op_store_DT0_fpr(DFPREG(rd)); |
4281 | 4291 | break; |
... | ... | @@ -4284,21 +4294,21 @@ static void disas_sparc_insn(DisasContext * dc) |
4284 | 4294 | } |
4285 | 4295 | } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \ |
4286 | 4296 | xop == 0xe || xop == 0x1e) { |
4287 | - gen_movl_reg_TN(rd, cpu_T[1]); | |
4297 | + gen_movl_reg_TN(rd, cpu_val); | |
4288 | 4298 | switch (xop) { |
4289 | 4299 | case 0x4: /* store word */ |
4290 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4291 | - ABI32_MASK(cpu_T[0]); | |
4292 | - tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4300 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4301 | + ABI32_MASK(cpu_addr); | |
4302 | + tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx); | |
4293 | 4303 | break; |
4294 | 4304 | case 0x5: /* store byte */ |
4295 | - ABI32_MASK(cpu_T[0]); | |
4296 | - tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4305 | + ABI32_MASK(cpu_addr); | |
4306 | + tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx); | |
4297 | 4307 | break; |
4298 | 4308 | case 0x6: /* store halfword */ |
4299 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(1)); | |
4300 | - ABI32_MASK(cpu_T[0]); | |
4301 | - tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4309 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1)); | |
4310 | + ABI32_MASK(cpu_addr); | |
4311 | + tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx); | |
4302 | 4312 | break; |
4303 | 4313 | case 0x7: /* store double word */ |
4304 | 4314 | if (rd & 1) |
... | ... | @@ -4307,17 +4317,17 @@ static void disas_sparc_insn(DisasContext * dc) |
4307 | 4317 | else { |
4308 | 4318 | TCGv r_low; |
4309 | 4319 | |
4310 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4320 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4311 | 4321 | r_low = tcg_temp_new(TCG_TYPE_I32); |
4312 | 4322 | gen_movl_reg_TN(rd + 1, r_low); |
4313 | - tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_T[1], | |
4323 | + tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val, | |
4314 | 4324 | r_low); |
4315 | - tcg_gen_qemu_st64(cpu_tmp64, cpu_T[0], dc->mem_idx); | |
4325 | + tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx); | |
4316 | 4326 | } |
4317 | 4327 | #else /* __i386__ */ |
4318 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4319 | - flush_cond(dc, cpu_T[2]); | |
4320 | - gen_movl_reg_TN(rd + 1, cpu_T[2]); | |
4328 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4329 | + flush_cond(dc, cpu_cond); | |
4330 | + gen_movl_reg_TN(rd + 1, cpu_cond); | |
4321 | 4331 | gen_op_ldst(std); |
4322 | 4332 | #endif /* __i386__ */ |
4323 | 4333 | break; |
... | ... | @@ -4329,8 +4339,8 @@ static void disas_sparc_insn(DisasContext * dc) |
4329 | 4339 | if (!supervisor(dc)) |
4330 | 4340 | goto priv_insn; |
4331 | 4341 | #endif |
4332 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4333 | - gen_st_asi(cpu_T[1], cpu_T[0], insn, 4); | |
4342 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4343 | + gen_st_asi(cpu_val, cpu_addr, insn, 4); | |
4334 | 4344 | break; |
4335 | 4345 | case 0x15: /* store byte alternate */ |
4336 | 4346 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4339,7 +4349,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4339 | 4349 | if (!supervisor(dc)) |
4340 | 4350 | goto priv_insn; |
4341 | 4351 | #endif |
4342 | - gen_st_asi(cpu_T[1], cpu_T[0], insn, 1); | |
4352 | + gen_st_asi(cpu_val, cpu_addr, insn, 1); | |
4343 | 4353 | break; |
4344 | 4354 | case 0x16: /* store halfword alternate */ |
4345 | 4355 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4348,8 +4358,8 @@ static void disas_sparc_insn(DisasContext * dc) |
4348 | 4358 | if (!supervisor(dc)) |
4349 | 4359 | goto priv_insn; |
4350 | 4360 | #endif |
4351 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(1)); | |
4352 | - gen_st_asi(cpu_T[1], cpu_T[0], insn, 2); | |
4361 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1)); | |
4362 | + gen_st_asi(cpu_val, cpu_addr, insn, 2); | |
4353 | 4363 | break; |
4354 | 4364 | case 0x17: /* store double word alternate */ |
4355 | 4365 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4361,49 +4371,49 @@ static void disas_sparc_insn(DisasContext * dc) |
4361 | 4371 | if (rd & 1) |
4362 | 4372 | goto illegal_insn; |
4363 | 4373 | else { |
4364 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4365 | - gen_stda_asi(cpu_T[1], cpu_T[0], insn, rd); | |
4374 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4375 | + gen_stda_asi(cpu_val, cpu_addr, insn, rd); | |
4366 | 4376 | } |
4367 | 4377 | break; |
4368 | 4378 | #endif |
4369 | 4379 | #ifdef TARGET_SPARC64 |
4370 | 4380 | case 0x0e: /* V9 stx */ |
4371 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4372 | - ABI32_MASK(cpu_T[0]); | |
4373 | - tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx); | |
4381 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4382 | + ABI32_MASK(cpu_addr); | |
4383 | + tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx); | |
4374 | 4384 | break; |
4375 | 4385 | case 0x1e: /* V9 stxa */ |
4376 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4377 | - gen_st_asi(cpu_T[1], cpu_T[0], insn, 8); | |
4386 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4387 | + gen_st_asi(cpu_val, cpu_addr, insn, 8); | |
4378 | 4388 | break; |
4379 | 4389 | #endif |
4380 | 4390 | default: |
4381 | 4391 | goto illegal_insn; |
4382 | 4392 | } |
4383 | 4393 | } else if (xop > 0x23 && xop < 0x28) { |
4384 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
4394 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
4385 | 4395 | goto jmp_insn; |
4386 | 4396 | switch (xop) { |
4387 | 4397 | case 0x24: /* store fpreg */ |
4388 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4398 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4389 | 4399 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, |
4390 | 4400 | offsetof(CPUState, fpr[rd])); |
4391 | - tcg_gen_qemu_st32(cpu_tmp32, cpu_T[0], dc->mem_idx); | |
4401 | + tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx); | |
4392 | 4402 | break; |
4393 | 4403 | case 0x25: /* stfsr, V9 stxfsr */ |
4394 | 4404 | #ifdef CONFIG_USER_ONLY |
4395 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4405 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4396 | 4406 | #endif |
4397 | 4407 | tcg_gen_helper_0_0(helper_stfsr); |
4398 | 4408 | tcg_gen_ld_i32(cpu_tmp32, cpu_env, |
4399 | 4409 | offsetof(CPUState, ft0)); |
4400 | - tcg_gen_qemu_st32(cpu_tmp32, cpu_T[0], dc->mem_idx); | |
4410 | + tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx); | |
4401 | 4411 | break; |
4402 | 4412 | case 0x26: |
4403 | 4413 | #ifdef TARGET_SPARC64 |
4404 | 4414 | #if defined(CONFIG_USER_ONLY) |
4405 | 4415 | /* V9 stqf, store quad fpreg */ |
4406 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4416 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4407 | 4417 | gen_op_load_fpr_QT0(QFPREG(rd)); |
4408 | 4418 | gen_op_ldst(stqf); |
4409 | 4419 | break; |
... | ... | @@ -4417,13 +4427,13 @@ static void disas_sparc_insn(DisasContext * dc) |
4417 | 4427 | #else |
4418 | 4428 | if (!supervisor(dc)) |
4419 | 4429 | goto priv_insn; |
4420 | - if (gen_trap_ifnofpu(dc, cpu_T[2])) | |
4430 | + if (gen_trap_ifnofpu(dc, cpu_cond)) | |
4421 | 4431 | goto jmp_insn; |
4422 | 4432 | goto nfq_insn; |
4423 | 4433 | #endif |
4424 | 4434 | #endif |
4425 | 4435 | case 0x27: |
4426 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4436 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4427 | 4437 | gen_op_load_fpr_DT0(DFPREG(rd)); |
4428 | 4438 | gen_op_ldst(stdf); |
4429 | 4439 | break; |
... | ... | @@ -4434,33 +4444,33 @@ static void disas_sparc_insn(DisasContext * dc) |
4434 | 4444 | switch (xop) { |
4435 | 4445 | #ifdef TARGET_SPARC64 |
4436 | 4446 | case 0x34: /* V9 stfa */ |
4437 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4447 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4438 | 4448 | gen_op_load_fpr_FT0(rd); |
4439 | - gen_stf_asi(cpu_T[0], insn, 4, rd); | |
4449 | + gen_stf_asi(cpu_addr, insn, 4, rd); | |
4440 | 4450 | break; |
4441 | 4451 | case 0x36: /* V9 stqfa */ |
4442 | 4452 | #if defined(CONFIG_USER_ONLY) |
4443 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4453 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4444 | 4454 | gen_op_load_fpr_QT0(QFPREG(rd)); |
4445 | - gen_stf_asi(cpu_T[0], insn, 16, QFPREG(rd)); | |
4455 | + gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd)); | |
4446 | 4456 | break; |
4447 | 4457 | #else |
4448 | 4458 | goto nfpu_insn; |
4449 | 4459 | #endif |
4450 | 4460 | case 0x37: /* V9 stdfa */ |
4451 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4461 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4452 | 4462 | gen_op_load_fpr_DT0(DFPREG(rd)); |
4453 | - gen_stf_asi(cpu_T[0], insn, 8, DFPREG(rd)); | |
4463 | + gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd)); | |
4454 | 4464 | break; |
4455 | 4465 | case 0x3c: /* V9 casa */ |
4456 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(3)); | |
4457 | - gen_cas_asi(cpu_T[1], cpu_T[0], cpu_T[1], insn, rd); | |
4458 | - gen_movl_TN_reg(rd, cpu_T[1]); | |
4466 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3)); | |
4467 | + gen_cas_asi(cpu_val, cpu_addr, cpu_val, insn, rd); | |
4468 | + gen_movl_TN_reg(rd, cpu_val); | |
4459 | 4469 | break; |
4460 | 4470 | case 0x3e: /* V9 casxa */ |
4461 | - tcg_gen_helper_0_2(helper_check_align, cpu_T[0], tcg_const_i32(7)); | |
4462 | - gen_casx_asi(cpu_T[1], cpu_T[0], cpu_T[1], insn, rd); | |
4463 | - gen_movl_TN_reg(rd, cpu_T[1]); | |
4471 | + tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7)); | |
4472 | + gen_casx_asi(cpu_val, cpu_addr, cpu_val, insn, rd); | |
4473 | + gen_movl_TN_reg(rd, cpu_val); | |
4464 | 4474 | break; |
4465 | 4475 | #else |
4466 | 4476 | case 0x34: /* stc */ |
... | ... | @@ -4484,7 +4494,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4484 | 4494 | gen_op_next_insn(); |
4485 | 4495 | } else if (dc->npc == JUMP_PC) { |
4486 | 4496 | /* we can do a static jump */ |
4487 | - gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]); | |
4497 | + gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond); | |
4488 | 4498 | dc->is_br = 1; |
4489 | 4499 | } else { |
4490 | 4500 | dc->pc = dc->npc; |
... | ... | @@ -4493,24 +4503,24 @@ static void disas_sparc_insn(DisasContext * dc) |
4493 | 4503 | jmp_insn: |
4494 | 4504 | return; |
4495 | 4505 | illegal_insn: |
4496 | - save_state(dc, cpu_T[2]); | |
4506 | + save_state(dc, cpu_cond); | |
4497 | 4507 | tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_ILL_INSN)); |
4498 | 4508 | dc->is_br = 1; |
4499 | 4509 | return; |
4500 | 4510 | #if !defined(CONFIG_USER_ONLY) |
4501 | 4511 | priv_insn: |
4502 | - save_state(dc, cpu_T[2]); | |
4512 | + save_state(dc, cpu_cond); | |
4503 | 4513 | tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_PRIV_INSN)); |
4504 | 4514 | dc->is_br = 1; |
4505 | 4515 | return; |
4506 | 4516 | nfpu_insn: |
4507 | - save_state(dc, cpu_T[2]); | |
4517 | + save_state(dc, cpu_cond); | |
4508 | 4518 | gen_op_fpexception_im(FSR_FTT_UNIMPFPOP); |
4509 | 4519 | dc->is_br = 1; |
4510 | 4520 | return; |
4511 | 4521 | #ifndef TARGET_SPARC64 |
4512 | 4522 | nfq_insn: |
4513 | - save_state(dc, cpu_T[2]); | |
4523 | + save_state(dc, cpu_cond); | |
4514 | 4524 | gen_op_fpexception_im(FSR_FTT_SEQ_ERROR); |
4515 | 4525 | dc->is_br = 1; |
4516 | 4526 | return; |
... | ... | @@ -4518,7 +4528,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4518 | 4528 | #endif |
4519 | 4529 | #ifndef TARGET_SPARC64 |
4520 | 4530 | ncp_insn: |
4521 | - save_state(dc, cpu_T[2]); | |
4531 | + save_state(dc, cpu_cond); | |
4522 | 4532 | tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_NCP_INSN)); |
4523 | 4533 | dc->is_br = 1; |
4524 | 4534 | return; |
... | ... | @@ -4551,12 +4561,14 @@ static inline int gen_intermediate_code_internal(TranslationBlock * tb, |
4551 | 4561 | cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32); |
4552 | 4562 | cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64); |
4553 | 4563 | |
4564 | + cpu_cond = cpu_T[2]; | |
4565 | + | |
4554 | 4566 | do { |
4555 | 4567 | if (env->nb_breakpoints > 0) { |
4556 | 4568 | for(j = 0; j < env->nb_breakpoints; j++) { |
4557 | 4569 | if (env->breakpoints[j] == dc->pc) { |
4558 | 4570 | if (dc->pc != pc_start) |
4559 | - save_state(dc, cpu_T[2]); | |
4571 | + save_state(dc, cpu_cond); | |
4560 | 4572 | tcg_gen_helper_0_0(helper_debug); |
4561 | 4573 | tcg_gen_exit_tb(0); |
4562 | 4574 | dc->is_br = 1; |
... | ... | @@ -4608,7 +4620,7 @@ static inline int gen_intermediate_code_internal(TranslationBlock * tb, |
4608 | 4620 | } else { |
4609 | 4621 | if (dc->pc != DYNAMIC_PC) |
4610 | 4622 | tcg_gen_movi_tl(cpu_pc, dc->pc); |
4611 | - save_npc(dc, cpu_T[2]); | |
4623 | + save_npc(dc, cpu_cond); | |
4612 | 4624 | tcg_gen_exit_tb(0); |
4613 | 4625 | } |
4614 | 4626 | } | ... | ... |