Commit 2f5680ee3307b05a7e49ead7d2cff26cf64f9386
1 parent
ccccab03
Remove some legacy definitions
git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@4108 c046a42c-6fe2-441c-8c8c-71466251a162
Showing
1 changed file
with
209 additions
and
278 deletions
target-sparc/translate.c
... | ... | @@ -112,8 +112,6 @@ static int sign_extend(int x, int len) |
112 | 112 | |
113 | 113 | #define IS_IMM (insn & (1<<13)) |
114 | 114 | |
115 | -static void disas_sparc_insn(DisasContext * dc); | |
116 | - | |
117 | 115 | /* floating point registers moves */ |
118 | 116 | static void gen_op_load_fpr_FT0(unsigned int src) |
119 | 117 | { |
... | ... | @@ -236,11 +234,6 @@ OP_LD_TABLE(lddf); |
236 | 234 | #define ABI32_MASK(addr) |
237 | 235 | #endif |
238 | 236 | |
239 | -static inline void gen_movl_simm_T1(int32_t val) | |
240 | -{ | |
241 | - tcg_gen_movi_tl(cpu_T[1], val); | |
242 | -} | |
243 | - | |
244 | 237 | static inline void gen_movl_reg_TN(int reg, TCGv tn) |
245 | 238 | { |
246 | 239 | if (reg == 0) |
... | ... | @@ -252,23 +245,6 @@ static inline void gen_movl_reg_TN(int reg, TCGv tn) |
252 | 245 | } |
253 | 246 | } |
254 | 247 | |
255 | -static inline void gen_movl_reg_T0(int reg) | |
256 | -{ | |
257 | - gen_movl_reg_TN(reg, cpu_T[0]); | |
258 | -} | |
259 | - | |
260 | -static inline void gen_movl_reg_T1(int reg) | |
261 | -{ | |
262 | - gen_movl_reg_TN(reg, cpu_T[1]); | |
263 | -} | |
264 | - | |
265 | -#ifdef __i386__ | |
266 | -static inline void gen_movl_reg_T2(int reg) | |
267 | -{ | |
268 | - gen_movl_reg_TN(reg, cpu_T[2]); | |
269 | -} | |
270 | - | |
271 | -#endif /* __i386__ */ | |
272 | 248 | static inline void gen_movl_TN_reg(int reg, TCGv tn) |
273 | 249 | { |
274 | 250 | if (reg == 0) |
... | ... | @@ -280,63 +256,6 @@ static inline void gen_movl_TN_reg(int reg, TCGv tn) |
280 | 256 | } |
281 | 257 | } |
282 | 258 | |
283 | -static inline void gen_movl_T0_reg(int reg) | |
284 | -{ | |
285 | - gen_movl_TN_reg(reg, cpu_T[0]); | |
286 | -} | |
287 | - | |
288 | -static inline void gen_movl_T1_reg(int reg) | |
289 | -{ | |
290 | - gen_movl_TN_reg(reg, cpu_T[1]); | |
291 | -} | |
292 | - | |
293 | -static inline void gen_op_movl_T0_env(size_t offset) | |
294 | -{ | |
295 | - tcg_gen_ld_i32(cpu_tmp32, cpu_env, offset); | |
296 | - tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
297 | -} | |
298 | - | |
299 | -static inline void gen_op_movl_env_T0(size_t offset) | |
300 | -{ | |
301 | - tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
302 | - tcg_gen_st_i32(cpu_tmp32, cpu_env, offset); | |
303 | -} | |
304 | - | |
305 | -static inline void gen_op_movtl_T0_env(size_t offset) | |
306 | -{ | |
307 | - tcg_gen_ld_tl(cpu_T[0], cpu_env, offset); | |
308 | -} | |
309 | - | |
310 | -static inline void gen_op_movtl_env_T0(size_t offset) | |
311 | -{ | |
312 | - tcg_gen_st_tl(cpu_T[0], cpu_env, offset); | |
313 | -} | |
314 | - | |
315 | -static inline void gen_op_add_T1_T0(void) | |
316 | -{ | |
317 | - tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
318 | -} | |
319 | - | |
320 | -static inline void gen_op_or_T1_T0(void) | |
321 | -{ | |
322 | - tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
323 | -} | |
324 | - | |
325 | -static inline void gen_op_xor_T1_T0(void) | |
326 | -{ | |
327 | - tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
328 | -} | |
329 | - | |
330 | -static inline void gen_jmp_im(target_ulong pc) | |
331 | -{ | |
332 | - tcg_gen_movi_tl(cpu_pc, pc); | |
333 | -} | |
334 | - | |
335 | -static inline void gen_movl_npc_im(target_ulong npc) | |
336 | -{ | |
337 | - tcg_gen_movi_tl(cpu_npc, npc); | |
338 | -} | |
339 | - | |
340 | 259 | static inline void gen_goto_tb(DisasContext *s, int tb_num, |
341 | 260 | target_ulong pc, target_ulong npc) |
342 | 261 | { |
... | ... | @@ -347,13 +266,13 @@ static inline void gen_goto_tb(DisasContext *s, int tb_num, |
347 | 266 | (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) { |
348 | 267 | /* jump to same page: we can use a direct jump */ |
349 | 268 | tcg_gen_goto_tb(tb_num); |
350 | - gen_jmp_im(pc); | |
351 | - gen_movl_npc_im(npc); | |
269 | + tcg_gen_movi_tl(cpu_pc, pc); | |
270 | + tcg_gen_movi_tl(cpu_npc, npc); | |
352 | 271 | tcg_gen_exit_tb((long)tb + tb_num); |
353 | 272 | } else { |
354 | 273 | /* jump to another page: currently not optimized */ |
355 | - gen_jmp_im(pc); | |
356 | - gen_movl_npc_im(npc); | |
274 | + tcg_gen_movi_tl(cpu_pc, pc); | |
275 | + tcg_gen_movi_tl(cpu_npc, npc); | |
357 | 276 | tcg_gen_exit_tb(0); |
358 | 277 | } |
359 | 278 | } |
... | ... | @@ -387,12 +306,6 @@ static inline void gen_mov_reg_C(TCGv reg, TCGv src) |
387 | 306 | tcg_gen_andi_tl(reg, reg, 0x1); |
388 | 307 | } |
389 | 308 | |
390 | -static inline void gen_op_exception(int exception) | |
391 | -{ | |
392 | - tcg_gen_movi_i32(cpu_tmp32, exception); | |
393 | - tcg_gen_helper_0_1(raise_exception, cpu_tmp32); | |
394 | -} | |
395 | - | |
396 | 309 | static inline void gen_cc_clear(void) |
397 | 310 | { |
398 | 311 | tcg_gen_movi_i32(cpu_psr, 0); |
... | ... | @@ -518,7 +431,7 @@ static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2) |
518 | 431 | tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0); |
519 | 432 | tcg_gen_andi_tl(r_temp, r_temp, (1 << 31)); |
520 | 433 | tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1); |
521 | - gen_op_exception(TT_TOVF); | |
434 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_TOVF)); | |
522 | 435 | gen_set_label(l1); |
523 | 436 | #ifdef TARGET_SPARC64 |
524 | 437 | { |
... | ... | @@ -531,7 +444,7 @@ static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2) |
531 | 444 | tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0); |
532 | 445 | tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63)); |
533 | 446 | tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2); |
534 | - gen_op_exception(TT_TOVF); | |
447 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_TOVF)); | |
535 | 448 | gen_set_label(l2); |
536 | 449 | } |
537 | 450 | #endif |
... | ... | @@ -558,7 +471,7 @@ static inline void gen_tag_tv(TCGv src1, TCGv src2) |
558 | 471 | tcg_gen_or_tl(cpu_tmp0, src1, src2); |
559 | 472 | tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3); |
560 | 473 | tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1); |
561 | - gen_op_exception(TT_TOVF); | |
474 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_TOVF)); | |
562 | 475 | gen_set_label(l1); |
563 | 476 | } |
564 | 477 | |
... | ... | @@ -685,7 +598,7 @@ static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2) |
685 | 598 | tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0); |
686 | 599 | tcg_gen_andi_tl(r_temp, r_temp, (1 << 31)); |
687 | 600 | tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1); |
688 | - gen_op_exception(TT_TOVF); | |
601 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_TOVF)); | |
689 | 602 | gen_set_label(l1); |
690 | 603 | #ifdef TARGET_SPARC64 |
691 | 604 | { |
... | ... | @@ -697,7 +610,7 @@ static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2) |
697 | 610 | tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0); |
698 | 611 | tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63)); |
699 | 612 | tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2); |
700 | - gen_op_exception(TT_TOVF); | |
613 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_TOVF)); | |
701 | 614 | gen_set_label(l2); |
702 | 615 | } |
703 | 616 | #endif |
... | ... | @@ -852,16 +765,6 @@ static inline void gen_op_smul_T1_T0(void) |
852 | 765 | tcg_gen_discard_i64(r_temp2); |
853 | 766 | } |
854 | 767 | |
855 | -static inline void gen_op_udiv_T1_T0(void) | |
856 | -{ | |
857 | - tcg_gen_helper_1_2(helper_udiv, cpu_T[0], cpu_T[0], cpu_T[1]); | |
858 | -} | |
859 | - | |
860 | -static inline void gen_op_sdiv_T1_T0(void) | |
861 | -{ | |
862 | - tcg_gen_helper_1_2(helper_sdiv, cpu_T[0], cpu_T[0], cpu_T[1]); | |
863 | -} | |
864 | - | |
865 | 768 | #ifdef TARGET_SPARC64 |
866 | 769 | static inline void gen_trap_ifdivzero_tl(TCGv divisor) |
867 | 770 | { |
... | ... | @@ -869,7 +772,7 @@ static inline void gen_trap_ifdivzero_tl(TCGv divisor) |
869 | 772 | |
870 | 773 | l1 = gen_new_label(); |
871 | 774 | tcg_gen_brcond_tl(TCG_COND_NE, divisor, tcg_const_tl(0), l1); |
872 | - gen_op_exception(TT_DIV_ZERO); | |
775 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_DIV_ZERO)); | |
873 | 776 | gen_set_label(l1); |
874 | 777 | } |
875 | 778 | |
... | ... | @@ -1210,12 +1113,6 @@ static inline void gen_branch_a(DisasContext *dc, target_ulong pc1, |
1210 | 1113 | gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8); |
1211 | 1114 | } |
1212 | 1115 | |
1213 | -static inline void gen_branch(DisasContext *dc, target_ulong pc, | |
1214 | - target_ulong npc) | |
1215 | -{ | |
1216 | - gen_goto_tb(dc, 0, pc, npc); | |
1217 | -} | |
1218 | - | |
1219 | 1116 | static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2, |
1220 | 1117 | TCGv r_cond) |
1221 | 1118 | { |
... | ... | @@ -1226,11 +1123,11 @@ static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2, |
1226 | 1123 | |
1227 | 1124 | tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1); |
1228 | 1125 | |
1229 | - gen_movl_npc_im(npc1); | |
1126 | + tcg_gen_movi_tl(cpu_npc, npc1); | |
1230 | 1127 | tcg_gen_br(l2); |
1231 | 1128 | |
1232 | 1129 | gen_set_label(l1); |
1233 | - gen_movl_npc_im(npc2); | |
1130 | + tcg_gen_movi_tl(cpu_npc, npc2); | |
1234 | 1131 | gen_set_label(l2); |
1235 | 1132 | } |
1236 | 1133 | |
... | ... | @@ -1249,13 +1146,13 @@ static inline void save_npc(DisasContext * dc) |
1249 | 1146 | gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]); |
1250 | 1147 | dc->npc = DYNAMIC_PC; |
1251 | 1148 | } else if (dc->npc != DYNAMIC_PC) { |
1252 | - gen_movl_npc_im(dc->npc); | |
1149 | + tcg_gen_movi_tl(cpu_npc, dc->npc); | |
1253 | 1150 | } |
1254 | 1151 | } |
1255 | 1152 | |
1256 | 1153 | static inline void save_state(DisasContext * dc) |
1257 | 1154 | { |
1258 | - gen_jmp_im(dc->pc); | |
1155 | + tcg_gen_movi_tl(cpu_pc, dc->pc); | |
1259 | 1156 | save_npc(dc); |
1260 | 1157 | } |
1261 | 1158 | |
... | ... | @@ -1660,7 +1557,7 @@ static inline void gen_op_fpexception_im(int fsr_flags) |
1660 | 1557 | { |
1661 | 1558 | tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~FSR_FTT_MASK); |
1662 | 1559 | tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags); |
1663 | - gen_op_exception(TT_FP_EXCP); | |
1560 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_FP_EXCP)); | |
1664 | 1561 | } |
1665 | 1562 | |
1666 | 1563 | static int gen_trap_ifnofpu(DisasContext * dc) |
... | ... | @@ -1668,7 +1565,7 @@ static int gen_trap_ifnofpu(DisasContext * dc) |
1668 | 1565 | #if !defined(CONFIG_USER_ONLY) |
1669 | 1566 | if (!dc->fpu_enabled) { |
1670 | 1567 | save_state(dc); |
1671 | - gen_op_exception(TT_NFPU_INSN); | |
1568 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_NFPU_INSN)); | |
1672 | 1569 | dc->is_br = 1; |
1673 | 1570 | return 1; |
1674 | 1571 | } |
... | ... | @@ -1947,7 +1844,7 @@ static void disas_sparc_insn(DisasContext * dc) |
1947 | 1844 | target = sign_extend(target, 16); |
1948 | 1845 | target <<= 2; |
1949 | 1846 | rs1 = GET_FIELD(insn, 13, 17); |
1950 | - gen_movl_reg_T0(rs1); | |
1847 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
1951 | 1848 | do_branch_reg(dc, target, insn); |
1952 | 1849 | goto jmp_insn; |
1953 | 1850 | } |
... | ... | @@ -1993,7 +1890,7 @@ static void disas_sparc_insn(DisasContext * dc) |
1993 | 1890 | #endif |
1994 | 1891 | uint32_t value = GET_FIELD(insn, 10, 31); |
1995 | 1892 | tcg_gen_movi_tl(cpu_T[0], value << 10); |
1996 | - gen_movl_T0_reg(rd); | |
1893 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
1997 | 1894 | #if defined(OPTIM) |
1998 | 1895 | } |
1999 | 1896 | #endif |
... | ... | @@ -2022,7 +1919,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2022 | 1919 | int cond; |
2023 | 1920 | |
2024 | 1921 | rs1 = GET_FIELD(insn, 13, 17); |
2025 | - gen_movl_reg_T0(rs1); | |
1922 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
2026 | 1923 | if (IS_IMM) { |
2027 | 1924 | rs2 = GET_FIELD(insn, 25, 31); |
2028 | 1925 | tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2); |
... | ... | @@ -2031,8 +1928,8 @@ static void disas_sparc_insn(DisasContext * dc) |
2031 | 1928 | #if defined(OPTIM) |
2032 | 1929 | if (rs2 != 0) { |
2033 | 1930 | #endif |
2034 | - gen_movl_reg_T1(rs2); | |
2035 | - gen_op_add_T1_T0(); | |
1931 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
1932 | + tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2036 | 1933 | #if defined(OPTIM) |
2037 | 1934 | } |
2038 | 1935 | #endif |
... | ... | @@ -2079,17 +1976,18 @@ static void disas_sparc_insn(DisasContext * dc) |
2079 | 1976 | SPARCv8 manual, rdy on the |
2080 | 1977 | microSPARC II */ |
2081 | 1978 | #endif |
2082 | - gen_op_movtl_T0_env(offsetof(CPUSPARCState, y)); | |
2083 | - gen_movl_T0_reg(rd); | |
1979 | + tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, y)); | |
1980 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2084 | 1981 | break; |
2085 | 1982 | #ifdef TARGET_SPARC64 |
2086 | 1983 | case 0x2: /* V9 rdccr */ |
2087 | 1984 | tcg_gen_helper_1_0(helper_rdccr, cpu_T[0]); |
2088 | - gen_movl_T0_reg(rd); | |
1985 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2089 | 1986 | break; |
2090 | 1987 | case 0x3: /* V9 rdasi */ |
2091 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, asi)); | |
2092 | - gen_movl_T0_reg(rd); | |
1988 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, asi)); | |
1989 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
1990 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2093 | 1991 | break; |
2094 | 1992 | case 0x4: /* V9 rdtick */ |
2095 | 1993 | { |
... | ... | @@ -2100,29 +1998,30 @@ static void disas_sparc_insn(DisasContext * dc) |
2100 | 1998 | offsetof(CPUState, tick)); |
2101 | 1999 | tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0], |
2102 | 2000 | r_tickptr); |
2103 | - gen_movl_T0_reg(rd); | |
2001 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2104 | 2002 | tcg_gen_discard_ptr(r_tickptr); |
2105 | 2003 | } |
2106 | 2004 | break; |
2107 | 2005 | case 0x5: /* V9 rdpc */ |
2108 | 2006 | tcg_gen_movi_tl(cpu_T[0], dc->pc); |
2109 | - gen_movl_T0_reg(rd); | |
2007 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2110 | 2008 | break; |
2111 | 2009 | case 0x6: /* V9 rdfprs */ |
2112 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs)); | |
2113 | - gen_movl_T0_reg(rd); | |
2010 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fprs)); | |
2011 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2012 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2114 | 2013 | break; |
2115 | 2014 | case 0xf: /* V9 membar */ |
2116 | 2015 | break; /* no effect */ |
2117 | 2016 | case 0x13: /* Graphics Status */ |
2118 | 2017 | if (gen_trap_ifnofpu(dc)) |
2119 | 2018 | goto jmp_insn; |
2120 | - gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr)); | |
2121 | - gen_movl_T0_reg(rd); | |
2019 | + tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, gsr)); | |
2020 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2122 | 2021 | break; |
2123 | 2022 | case 0x17: /* Tick compare */ |
2124 | - gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr)); | |
2125 | - gen_movl_T0_reg(rd); | |
2023 | + tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tick_cmpr)); | |
2024 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2126 | 2025 | break; |
2127 | 2026 | case 0x18: /* System tick */ |
2128 | 2027 | { |
... | ... | @@ -2133,13 +2032,13 @@ static void disas_sparc_insn(DisasContext * dc) |
2133 | 2032 | offsetof(CPUState, stick)); |
2134 | 2033 | tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0], |
2135 | 2034 | r_tickptr); |
2136 | - gen_movl_T0_reg(rd); | |
2035 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2137 | 2036 | tcg_gen_discard_ptr(r_tickptr); |
2138 | 2037 | } |
2139 | 2038 | break; |
2140 | 2039 | case 0x19: /* System tick compare */ |
2141 | - gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr)); | |
2142 | - gen_movl_T0_reg(rd); | |
2040 | + tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, stick_cmpr)); | |
2041 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2143 | 2042 | break; |
2144 | 2043 | case 0x10: /* Performance Control */ |
2145 | 2044 | case 0x11: /* Performance Instrumentation Counter */ |
... | ... | @@ -2169,22 +2068,26 @@ static void disas_sparc_insn(DisasContext * dc) |
2169 | 2068 | // gen_op_rdhtstate(); |
2170 | 2069 | break; |
2171 | 2070 | case 3: // hintp |
2172 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp)); | |
2071 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hintp)); | |
2072 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2173 | 2073 | break; |
2174 | 2074 | case 5: // htba |
2175 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, htba)); | |
2075 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, htba)); | |
2076 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2176 | 2077 | break; |
2177 | 2078 | case 6: // hver |
2178 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, hver)); | |
2079 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hver)); | |
2080 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2179 | 2081 | break; |
2180 | 2082 | case 31: // hstick_cmpr |
2181 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr)); | |
2083 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
2084 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hstick_cmpr)); | |
2182 | 2085 | break; |
2183 | 2086 | default: |
2184 | 2087 | goto illegal_insn; |
2185 | 2088 | } |
2186 | 2089 | #endif |
2187 | - gen_movl_T0_reg(rd); | |
2090 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2188 | 2091 | break; |
2189 | 2092 | } else if (xop == 0x2a) { /* rdwim / V9 rdpr */ |
2190 | 2093 | if (!supervisor(dc)) |
... | ... | @@ -2249,59 +2152,70 @@ static void disas_sparc_insn(DisasContext * dc) |
2249 | 2152 | offsetof(CPUState, tick)); |
2250 | 2153 | tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0], |
2251 | 2154 | r_tickptr); |
2252 | - gen_movl_T0_reg(rd); | |
2155 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2253 | 2156 | tcg_gen_discard_ptr(r_tickptr); |
2254 | 2157 | } |
2255 | 2158 | break; |
2256 | 2159 | case 5: // tba |
2257 | - gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr)); | |
2160 | + tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tbr)); | |
2258 | 2161 | break; |
2259 | 2162 | case 6: // pstate |
2260 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate)); | |
2163 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, pstate)); | |
2164 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2261 | 2165 | break; |
2262 | 2166 | case 7: // tl |
2263 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, tl)); | |
2167 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, tl)); | |
2168 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2264 | 2169 | break; |
2265 | 2170 | case 8: // pil |
2266 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil)); | |
2171 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, psrpil)); | |
2172 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2267 | 2173 | break; |
2268 | 2174 | case 9: // cwp |
2269 | 2175 | tcg_gen_helper_1_0(helper_rdcwp, cpu_T[0]); |
2270 | 2176 | break; |
2271 | 2177 | case 10: // cansave |
2272 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave)); | |
2178 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cansave)); | |
2179 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2273 | 2180 | break; |
2274 | 2181 | case 11: // canrestore |
2275 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore)); | |
2182 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, canrestore)); | |
2183 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2276 | 2184 | break; |
2277 | 2185 | case 12: // cleanwin |
2278 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin)); | |
2186 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cleanwin)); | |
2187 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2279 | 2188 | break; |
2280 | 2189 | case 13: // otherwin |
2281 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin)); | |
2190 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, otherwin)); | |
2191 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2282 | 2192 | break; |
2283 | 2193 | case 14: // wstate |
2284 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate)); | |
2194 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wstate)); | |
2195 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2285 | 2196 | break; |
2286 | 2197 | case 16: // UA2005 gl |
2287 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, gl)); | |
2198 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, gl)); | |
2199 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2288 | 2200 | break; |
2289 | 2201 | case 26: // UA2005 strand status |
2290 | 2202 | if (!hypervisor(dc)) |
2291 | 2203 | goto priv_insn; |
2292 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr)); | |
2204 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ssr)); | |
2205 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2293 | 2206 | break; |
2294 | 2207 | case 31: // ver |
2295 | - gen_op_movtl_T0_env(offsetof(CPUSPARCState, version)); | |
2208 | + tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, version)); | |
2296 | 2209 | break; |
2297 | 2210 | case 15: // fq |
2298 | 2211 | default: |
2299 | 2212 | goto illegal_insn; |
2300 | 2213 | } |
2301 | 2214 | #else |
2302 | - gen_op_movl_T0_env(offsetof(CPUSPARCState, wim)); | |
2215 | + tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wim)); | |
2216 | + tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32); | |
2303 | 2217 | #endif |
2304 | - gen_movl_T0_reg(rd); | |
2218 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2305 | 2219 | break; |
2306 | 2220 | } else if (xop == 0x2b) { /* rdtbr / V9 flushw */ |
2307 | 2221 | #ifdef TARGET_SPARC64 |
... | ... | @@ -2309,8 +2223,8 @@ static void disas_sparc_insn(DisasContext * dc) |
2309 | 2223 | #else |
2310 | 2224 | if (!supervisor(dc)) |
2311 | 2225 | goto priv_insn; |
2312 | - gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr)); | |
2313 | - gen_movl_T0_reg(rd); | |
2226 | + tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tbr)); | |
2227 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2314 | 2228 | #endif |
2315 | 2229 | break; |
2316 | 2230 | #endif |
... | ... | @@ -2703,7 +2617,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2703 | 2617 | l1 = gen_new_label(); |
2704 | 2618 | cond = GET_FIELD_SP(insn, 14, 17); |
2705 | 2619 | rs1 = GET_FIELD(insn, 13, 17); |
2706 | - gen_movl_reg_T0(rs1); | |
2620 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
2707 | 2621 | tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], |
2708 | 2622 | tcg_const_tl(0), l1); |
2709 | 2623 | gen_op_load_fpr_FT0(rs2); |
... | ... | @@ -2716,7 +2630,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2716 | 2630 | l1 = gen_new_label(); |
2717 | 2631 | cond = GET_FIELD_SP(insn, 14, 17); |
2718 | 2632 | rs1 = GET_FIELD(insn, 13, 17); |
2719 | - gen_movl_reg_T0(rs1); | |
2633 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
2720 | 2634 | tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], |
2721 | 2635 | tcg_const_tl(0), l1); |
2722 | 2636 | gen_op_load_fpr_DT0(DFPREG(rs2)); |
... | ... | @@ -2730,7 +2644,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2730 | 2644 | l1 = gen_new_label(); |
2731 | 2645 | cond = GET_FIELD_SP(insn, 14, 17); |
2732 | 2646 | rs1 = GET_FIELD(insn, 13, 17); |
2733 | - gen_movl_reg_T0(rs1); | |
2647 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
2734 | 2648 | tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], |
2735 | 2649 | tcg_const_tl(0), l1); |
2736 | 2650 | gen_op_load_fpr_QT0(QFPREG(rs2)); |
... | ... | @@ -2910,10 +2824,10 @@ static void disas_sparc_insn(DisasContext * dc) |
2910 | 2824 | tcg_gen_movi_tl(cpu_T[0], (int)rs2); |
2911 | 2825 | } else { /* register */ |
2912 | 2826 | rs2 = GET_FIELD(insn, 27, 31); |
2913 | - gen_movl_reg_T0(rs2); | |
2827 | + gen_movl_reg_TN(rs2, cpu_T[0]); | |
2914 | 2828 | } |
2915 | 2829 | } else { |
2916 | - gen_movl_reg_T0(rs1); | |
2830 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
2917 | 2831 | if (IS_IMM) { /* immediate */ |
2918 | 2832 | rs2 = GET_FIELDs(insn, 19, 31); |
2919 | 2833 | tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2); |
... | ... | @@ -2921,17 +2835,17 @@ static void disas_sparc_insn(DisasContext * dc) |
2921 | 2835 | // or x, %g0, y -> mov T1, x; mov y, T1 |
2922 | 2836 | rs2 = GET_FIELD(insn, 27, 31); |
2923 | 2837 | if (rs2 != 0) { |
2924 | - gen_movl_reg_T1(rs2); | |
2925 | - gen_op_or_T1_T0(); | |
2838 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
2839 | + tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
2926 | 2840 | } |
2927 | 2841 | } |
2928 | 2842 | } |
2929 | - gen_movl_T0_reg(rd); | |
2843 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2930 | 2844 | #endif |
2931 | 2845 | #ifdef TARGET_SPARC64 |
2932 | 2846 | } else if (xop == 0x25) { /* sll, V9 sllx */ |
2933 | 2847 | rs1 = GET_FIELD(insn, 13, 17); |
2934 | - gen_movl_reg_T0(rs1); | |
2848 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
2935 | 2849 | if (IS_IMM) { /* immediate */ |
2936 | 2850 | rs2 = GET_FIELDs(insn, 20, 31); |
2937 | 2851 | if (insn & (1 << 12)) { |
... | ... | @@ -2942,7 +2856,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2942 | 2856 | } |
2943 | 2857 | } else { /* register */ |
2944 | 2858 | rs2 = GET_FIELD(insn, 27, 31); |
2945 | - gen_movl_reg_T1(rs2); | |
2859 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
2946 | 2860 | if (insn & (1 << 12)) { |
2947 | 2861 | tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f); |
2948 | 2862 | tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]); |
... | ... | @@ -2952,10 +2866,10 @@ static void disas_sparc_insn(DisasContext * dc) |
2952 | 2866 | tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]); |
2953 | 2867 | } |
2954 | 2868 | } |
2955 | - gen_movl_T0_reg(rd); | |
2869 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2956 | 2870 | } else if (xop == 0x26) { /* srl, V9 srlx */ |
2957 | 2871 | rs1 = GET_FIELD(insn, 13, 17); |
2958 | - gen_movl_reg_T0(rs1); | |
2872 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
2959 | 2873 | if (IS_IMM) { /* immediate */ |
2960 | 2874 | rs2 = GET_FIELDs(insn, 20, 31); |
2961 | 2875 | if (insn & (1 << 12)) { |
... | ... | @@ -2966,7 +2880,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2966 | 2880 | } |
2967 | 2881 | } else { /* register */ |
2968 | 2882 | rs2 = GET_FIELD(insn, 27, 31); |
2969 | - gen_movl_reg_T1(rs2); | |
2883 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
2970 | 2884 | if (insn & (1 << 12)) { |
2971 | 2885 | tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f); |
2972 | 2886 | tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]); |
... | ... | @@ -2976,10 +2890,10 @@ static void disas_sparc_insn(DisasContext * dc) |
2976 | 2890 | tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]); |
2977 | 2891 | } |
2978 | 2892 | } |
2979 | - gen_movl_T0_reg(rd); | |
2893 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
2980 | 2894 | } else if (xop == 0x27) { /* sra, V9 srax */ |
2981 | 2895 | rs1 = GET_FIELD(insn, 13, 17); |
2982 | - gen_movl_reg_T0(rs1); | |
2896 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
2983 | 2897 | if (IS_IMM) { /* immediate */ |
2984 | 2898 | rs2 = GET_FIELDs(insn, 20, 31); |
2985 | 2899 | if (insn & (1 << 12)) { |
... | ... | @@ -2991,7 +2905,7 @@ static void disas_sparc_insn(DisasContext * dc) |
2991 | 2905 | } |
2992 | 2906 | } else { /* register */ |
2993 | 2907 | rs2 = GET_FIELD(insn, 27, 31); |
2994 | - gen_movl_reg_T1(rs2); | |
2908 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
2995 | 2909 | if (insn & (1 << 12)) { |
2996 | 2910 | tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f); |
2997 | 2911 | tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]); |
... | ... | @@ -3001,17 +2915,17 @@ static void disas_sparc_insn(DisasContext * dc) |
3001 | 2915 | tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]); |
3002 | 2916 | } |
3003 | 2917 | } |
3004 | - gen_movl_T0_reg(rd); | |
2918 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3005 | 2919 | #endif |
3006 | 2920 | } else if (xop < 0x36) { |
3007 | 2921 | rs1 = GET_FIELD(insn, 13, 17); |
3008 | - gen_movl_reg_T0(rs1); | |
2922 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
3009 | 2923 | if (IS_IMM) { /* immediate */ |
3010 | 2924 | rs2 = GET_FIELDs(insn, 19, 31); |
3011 | - gen_movl_simm_T1(rs2); | |
2925 | + tcg_gen_movi_tl(cpu_T[1], (int)rs2); | |
3012 | 2926 | } else { /* register */ |
3013 | 2927 | rs2 = GET_FIELD(insn, 27, 31); |
3014 | - gen_movl_reg_T1(rs2); | |
2928 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3015 | 2929 | } |
3016 | 2930 | if (xop < 0x20) { |
3017 | 2931 | switch (xop & ~0x10) { |
... | ... | @@ -3019,7 +2933,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3019 | 2933 | if (xop & 0x10) |
3020 | 2934 | gen_op_add_T1_T0_cc(); |
3021 | 2935 | else |
3022 | - gen_op_add_T1_T0(); | |
2936 | + tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3023 | 2937 | break; |
3024 | 2938 | case 0x1: |
3025 | 2939 | tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]); |
... | ... | @@ -3100,66 +3014,66 @@ static void disas_sparc_insn(DisasContext * dc) |
3100 | 3014 | break; |
3101 | 3015 | #endif |
3102 | 3016 | case 0xe: |
3103 | - gen_op_udiv_T1_T0(); | |
3017 | + tcg_gen_helper_1_2(helper_udiv, cpu_T[0], cpu_T[0], cpu_T[1]); | |
3104 | 3018 | if (xop & 0x10) |
3105 | 3019 | gen_op_div_cc(); |
3106 | 3020 | break; |
3107 | 3021 | case 0xf: |
3108 | - gen_op_sdiv_T1_T0(); | |
3022 | + tcg_gen_helper_1_2(helper_sdiv, cpu_T[0], cpu_T[0], cpu_T[1]); | |
3109 | 3023 | if (xop & 0x10) |
3110 | 3024 | gen_op_div_cc(); |
3111 | 3025 | break; |
3112 | 3026 | default: |
3113 | 3027 | goto illegal_insn; |
3114 | 3028 | } |
3115 | - gen_movl_T0_reg(rd); | |
3029 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3116 | 3030 | } else { |
3117 | 3031 | switch (xop) { |
3118 | 3032 | case 0x20: /* taddcc */ |
3119 | 3033 | gen_op_tadd_T1_T0_cc(); |
3120 | - gen_movl_T0_reg(rd); | |
3034 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3121 | 3035 | break; |
3122 | 3036 | case 0x21: /* tsubcc */ |
3123 | 3037 | gen_op_tsub_T1_T0_cc(); |
3124 | - gen_movl_T0_reg(rd); | |
3038 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3125 | 3039 | break; |
3126 | 3040 | case 0x22: /* taddcctv */ |
3127 | 3041 | save_state(dc); |
3128 | 3042 | gen_op_tadd_T1_T0_ccTV(); |
3129 | - gen_movl_T0_reg(rd); | |
3043 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3130 | 3044 | break; |
3131 | 3045 | case 0x23: /* tsubcctv */ |
3132 | 3046 | save_state(dc); |
3133 | 3047 | gen_op_tsub_T1_T0_ccTV(); |
3134 | - gen_movl_T0_reg(rd); | |
3048 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3135 | 3049 | break; |
3136 | 3050 | case 0x24: /* mulscc */ |
3137 | 3051 | gen_op_mulscc_T1_T0(); |
3138 | - gen_movl_T0_reg(rd); | |
3052 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3139 | 3053 | break; |
3140 | 3054 | #ifndef TARGET_SPARC64 |
3141 | 3055 | case 0x25: /* sll */ |
3142 | 3056 | tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0x1f); |
3143 | 3057 | tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]); |
3144 | - gen_movl_T0_reg(rd); | |
3058 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3145 | 3059 | break; |
3146 | 3060 | case 0x26: /* srl */ |
3147 | 3061 | tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0x1f); |
3148 | 3062 | tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]); |
3149 | - gen_movl_T0_reg(rd); | |
3063 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3150 | 3064 | break; |
3151 | 3065 | case 0x27: /* sra */ |
3152 | 3066 | tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0x1f); |
3153 | 3067 | tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]); |
3154 | - gen_movl_T0_reg(rd); | |
3068 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3155 | 3069 | break; |
3156 | 3070 | #endif |
3157 | 3071 | case 0x30: |
3158 | 3072 | { |
3159 | 3073 | switch(rd) { |
3160 | 3074 | case 0: /* wry */ |
3161 | - gen_op_xor_T1_T0(); | |
3162 | - gen_op_movtl_env_T0(offsetof(CPUSPARCState, y)); | |
3075 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3076 | + tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, y)); | |
3163 | 3077 | break; |
3164 | 3078 | #ifndef TARGET_SPARC64 |
3165 | 3079 | case 0x01 ... 0x0f: /* undefined in the |
... | ... | @@ -3173,16 +3087,18 @@ static void disas_sparc_insn(DisasContext * dc) |
3173 | 3087 | break; |
3174 | 3088 | #else |
3175 | 3089 | case 0x2: /* V9 wrccr */ |
3176 | - gen_op_xor_T1_T0(); | |
3090 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3177 | 3091 | tcg_gen_helper_0_1(helper_wrccr, cpu_T[0]); |
3178 | 3092 | break; |
3179 | 3093 | case 0x3: /* V9 wrasi */ |
3180 | - gen_op_xor_T1_T0(); | |
3181 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, asi)); | |
3094 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3095 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3096 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, asi)); | |
3182 | 3097 | break; |
3183 | 3098 | case 0x6: /* V9 wrfprs */ |
3184 | - gen_op_xor_T1_T0(); | |
3185 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs)); | |
3099 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3100 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3101 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fprs)); | |
3186 | 3102 | save_state(dc); |
3187 | 3103 | gen_op_next_insn(); |
3188 | 3104 | tcg_gen_exit_tb(0); |
... | ... | @@ -3197,8 +3113,8 @@ static void disas_sparc_insn(DisasContext * dc) |
3197 | 3113 | case 0x13: /* Graphics Status */ |
3198 | 3114 | if (gen_trap_ifnofpu(dc)) |
3199 | 3115 | goto jmp_insn; |
3200 | - gen_op_xor_T1_T0(); | |
3201 | - gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr)); | |
3116 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3117 | + tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, gsr)); | |
3202 | 3118 | break; |
3203 | 3119 | case 0x17: /* Tick compare */ |
3204 | 3120 | #if !defined(CONFIG_USER_ONLY) |
... | ... | @@ -3208,8 +3124,9 @@ static void disas_sparc_insn(DisasContext * dc) |
3208 | 3124 | { |
3209 | 3125 | TCGv r_tickptr; |
3210 | 3126 | |
3211 | - gen_op_xor_T1_T0(); | |
3212 | - gen_op_movtl_env_T0(offsetof(CPUSPARCState, | |
3127 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], | |
3128 | + cpu_T[1]); | |
3129 | + tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, | |
3213 | 3130 | tick_cmpr)); |
3214 | 3131 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
3215 | 3132 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
... | ... | @@ -3227,7 +3144,8 @@ static void disas_sparc_insn(DisasContext * dc) |
3227 | 3144 | { |
3228 | 3145 | TCGv r_tickptr; |
3229 | 3146 | |
3230 | - gen_op_xor_T1_T0(); | |
3147 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], | |
3148 | + cpu_T[1]); | |
3231 | 3149 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
3232 | 3150 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
3233 | 3151 | offsetof(CPUState, stick)); |
... | ... | @@ -3244,8 +3162,9 @@ static void disas_sparc_insn(DisasContext * dc) |
3244 | 3162 | { |
3245 | 3163 | TCGv r_tickptr; |
3246 | 3164 | |
3247 | - gen_op_xor_T1_T0(); | |
3248 | - gen_op_movtl_env_T0(offsetof(CPUSPARCState, | |
3165 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], | |
3166 | + cpu_T[1]); | |
3167 | + tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, | |
3249 | 3168 | stick_cmpr)); |
3250 | 3169 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
3251 | 3170 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
... | ... | @@ -3290,7 +3209,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3290 | 3209 | goto illegal_insn; |
3291 | 3210 | } |
3292 | 3211 | #else |
3293 | - gen_op_xor_T1_T0(); | |
3212 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3294 | 3213 | tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]); |
3295 | 3214 | save_state(dc); |
3296 | 3215 | gen_op_next_insn(); |
... | ... | @@ -3303,7 +3222,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3303 | 3222 | { |
3304 | 3223 | if (!supervisor(dc)) |
3305 | 3224 | goto priv_insn; |
3306 | - gen_op_xor_T1_T0(); | |
3225 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3307 | 3226 | #ifdef TARGET_SPARC64 |
3308 | 3227 | switch (rd) { |
3309 | 3228 | case 0: // tpc |
... | ... | @@ -3367,7 +3286,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3367 | 3286 | } |
3368 | 3287 | break; |
3369 | 3288 | case 5: // tba |
3370 | - gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr)); | |
3289 | + tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tbr)); | |
3371 | 3290 | break; |
3372 | 3291 | case 6: // pstate |
3373 | 3292 | save_state(dc); |
... | ... | @@ -3377,43 +3296,53 @@ static void disas_sparc_insn(DisasContext * dc) |
3377 | 3296 | dc->is_br = 1; |
3378 | 3297 | break; |
3379 | 3298 | case 7: // tl |
3380 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, tl)); | |
3299 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3300 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, tl)); | |
3381 | 3301 | break; |
3382 | 3302 | case 8: // pil |
3383 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil)); | |
3303 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3304 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, psrpil)); | |
3384 | 3305 | break; |
3385 | 3306 | case 9: // cwp |
3386 | 3307 | tcg_gen_helper_0_1(helper_wrcwp, cpu_T[0]); |
3387 | 3308 | break; |
3388 | 3309 | case 10: // cansave |
3389 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave)); | |
3310 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3311 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cansave)); | |
3390 | 3312 | break; |
3391 | 3313 | case 11: // canrestore |
3392 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore)); | |
3314 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3315 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, canrestore)); | |
3393 | 3316 | break; |
3394 | 3317 | case 12: // cleanwin |
3395 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin)); | |
3318 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3319 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cleanwin)); | |
3396 | 3320 | break; |
3397 | 3321 | case 13: // otherwin |
3398 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin)); | |
3322 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3323 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, otherwin)); | |
3399 | 3324 | break; |
3400 | 3325 | case 14: // wstate |
3401 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate)); | |
3326 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3327 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wstate)); | |
3402 | 3328 | break; |
3403 | 3329 | case 16: // UA2005 gl |
3404 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, gl)); | |
3330 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3331 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, gl)); | |
3405 | 3332 | break; |
3406 | 3333 | case 26: // UA2005 strand status |
3407 | 3334 | if (!hypervisor(dc)) |
3408 | 3335 | goto priv_insn; |
3409 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr)); | |
3336 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3337 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ssr)); | |
3410 | 3338 | break; |
3411 | 3339 | default: |
3412 | 3340 | goto illegal_insn; |
3413 | 3341 | } |
3414 | 3342 | #else |
3415 | 3343 | tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1)); |
3416 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, wim)); | |
3344 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3345 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wim)); | |
3417 | 3346 | #endif |
3418 | 3347 | } |
3419 | 3348 | break; |
... | ... | @@ -3422,12 +3351,12 @@ static void disas_sparc_insn(DisasContext * dc) |
3422 | 3351 | #ifndef TARGET_SPARC64 |
3423 | 3352 | if (!supervisor(dc)) |
3424 | 3353 | goto priv_insn; |
3425 | - gen_op_xor_T1_T0(); | |
3426 | - gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr)); | |
3354 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3355 | + tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, tbr)); | |
3427 | 3356 | #else |
3428 | 3357 | if (!hypervisor(dc)) |
3429 | 3358 | goto priv_insn; |
3430 | - gen_op_xor_T1_T0(); | |
3359 | + tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3431 | 3360 | switch (rd) { |
3432 | 3361 | case 0: // hpstate |
3433 | 3362 | // XXX gen_op_wrhpstate(); |
... | ... | @@ -3440,16 +3369,18 @@ static void disas_sparc_insn(DisasContext * dc) |
3440 | 3369 | // XXX gen_op_wrhtstate(); |
3441 | 3370 | break; |
3442 | 3371 | case 3: // hintp |
3443 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp)); | |
3372 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3373 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hintp)); | |
3444 | 3374 | break; |
3445 | 3375 | case 5: // htba |
3446 | - gen_op_movl_env_T0(offsetof(CPUSPARCState, htba)); | |
3376 | + tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]); | |
3377 | + tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, htba)); | |
3447 | 3378 | break; |
3448 | 3379 | case 31: // hstick_cmpr |
3449 | 3380 | { |
3450 | 3381 | TCGv r_tickptr; |
3451 | 3382 | |
3452 | - gen_op_movtl_env_T0(offsetof(CPUSPARCState, | |
3383 | + tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, | |
3453 | 3384 | hstick_cmpr)); |
3454 | 3385 | r_tickptr = tcg_temp_new(TCG_TYPE_PTR); |
3455 | 3386 | tcg_gen_ld_ptr(r_tickptr, cpu_env, |
... | ... | @@ -3493,34 +3424,34 @@ static void disas_sparc_insn(DisasContext * dc) |
3493 | 3424 | tcg_const_tl(0), l1); |
3494 | 3425 | if (IS_IMM) { /* immediate */ |
3495 | 3426 | rs2 = GET_FIELD_SPs(insn, 0, 10); |
3496 | - gen_movl_simm_T1(rs2); | |
3427 | + tcg_gen_movi_tl(cpu_T[1], (int)rs2); | |
3497 | 3428 | } else { |
3498 | 3429 | rs2 = GET_FIELD_SP(insn, 0, 4); |
3499 | - gen_movl_reg_T1(rs2); | |
3430 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3500 | 3431 | } |
3501 | - gen_movl_T1_reg(rd); | |
3432 | + gen_movl_TN_reg(rd, cpu_T[1]); | |
3502 | 3433 | gen_set_label(l1); |
3503 | 3434 | tcg_gen_discard_tl(r_cond); |
3504 | 3435 | break; |
3505 | 3436 | } |
3506 | 3437 | case 0x2d: /* V9 sdivx */ |
3507 | 3438 | gen_op_sdivx_T1_T0(); |
3508 | - gen_movl_T0_reg(rd); | |
3439 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3509 | 3440 | break; |
3510 | 3441 | case 0x2e: /* V9 popc */ |
3511 | 3442 | { |
3512 | 3443 | if (IS_IMM) { /* immediate */ |
3513 | 3444 | rs2 = GET_FIELD_SPs(insn, 0, 12); |
3514 | - gen_movl_simm_T1(rs2); | |
3445 | + tcg_gen_movi_tl(cpu_T[1], (int)rs2); | |
3515 | 3446 | // XXX optimize: popc(constant) |
3516 | 3447 | } |
3517 | 3448 | else { |
3518 | 3449 | rs2 = GET_FIELD_SP(insn, 0, 4); |
3519 | - gen_movl_reg_T1(rs2); | |
3450 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3520 | 3451 | } |
3521 | 3452 | tcg_gen_helper_1_1(helper_popc, cpu_T[0], |
3522 | 3453 | cpu_T[1]); |
3523 | - gen_movl_T0_reg(rd); | |
3454 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3524 | 3455 | } |
3525 | 3456 | case 0x2f: /* V9 movr */ |
3526 | 3457 | { |
... | ... | @@ -3528,7 +3459,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3528 | 3459 | int l1; |
3529 | 3460 | |
3530 | 3461 | rs1 = GET_FIELD(insn, 13, 17); |
3531 | - gen_movl_reg_T0(rs1); | |
3462 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
3532 | 3463 | |
3533 | 3464 | l1 = gen_new_label(); |
3534 | 3465 | |
... | ... | @@ -3536,12 +3467,12 @@ static void disas_sparc_insn(DisasContext * dc) |
3536 | 3467 | tcg_const_tl(0), l1); |
3537 | 3468 | if (IS_IMM) { /* immediate */ |
3538 | 3469 | rs2 = GET_FIELD_SPs(insn, 0, 9); |
3539 | - gen_movl_simm_T1(rs2); | |
3470 | + tcg_gen_movi_tl(cpu_T[1], (int)rs2); | |
3540 | 3471 | } else { |
3541 | 3472 | rs2 = GET_FIELD_SP(insn, 0, 4); |
3542 | - gen_movl_reg_T1(rs2); | |
3473 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3543 | 3474 | } |
3544 | - gen_movl_T1_reg(rd); | |
3475 | + gen_movl_TN_reg(rd, cpu_T[1]); | |
3545 | 3476 | gen_set_label(l1); |
3546 | 3477 | break; |
3547 | 3478 | } |
... | ... | @@ -3574,34 +3505,34 @@ static void disas_sparc_insn(DisasContext * dc) |
3574 | 3505 | // XXX |
3575 | 3506 | goto illegal_insn; |
3576 | 3507 | case 0x010: /* VIS I array8 */ |
3577 | - gen_movl_reg_T0(rs1); | |
3578 | - gen_movl_reg_T1(rs2); | |
3508 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
3509 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3579 | 3510 | tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0], |
3580 | 3511 | cpu_T[1]); |
3581 | - gen_movl_T0_reg(rd); | |
3512 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3582 | 3513 | break; |
3583 | 3514 | case 0x012: /* VIS I array16 */ |
3584 | - gen_movl_reg_T0(rs1); | |
3585 | - gen_movl_reg_T1(rs2); | |
3515 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
3516 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3586 | 3517 | tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0], |
3587 | 3518 | cpu_T[1]); |
3588 | 3519 | tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 1); |
3589 | - gen_movl_T0_reg(rd); | |
3520 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3590 | 3521 | break; |
3591 | 3522 | case 0x014: /* VIS I array32 */ |
3592 | - gen_movl_reg_T0(rs1); | |
3593 | - gen_movl_reg_T1(rs2); | |
3523 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
3524 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3594 | 3525 | tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0], |
3595 | 3526 | cpu_T[1]); |
3596 | 3527 | tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2); |
3597 | - gen_movl_T0_reg(rd); | |
3528 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3598 | 3529 | break; |
3599 | 3530 | case 0x018: /* VIS I alignaddr */ |
3600 | - gen_movl_reg_T0(rs1); | |
3601 | - gen_movl_reg_T1(rs2); | |
3531 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
3532 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3602 | 3533 | tcg_gen_helper_1_2(helper_alignaddr, cpu_T[0], cpu_T[0], |
3603 | 3534 | cpu_T[1]); |
3604 | - gen_movl_T0_reg(rd); | |
3535 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
3605 | 3536 | break; |
3606 | 3537 | case 0x019: /* VIS II bmask */ |
3607 | 3538 | case 0x01a: /* VIS I alignaddrl */ |
... | ... | @@ -3964,7 +3895,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3964 | 3895 | } else if (xop == 0x39) { /* V9 return */ |
3965 | 3896 | rs1 = GET_FIELD(insn, 13, 17); |
3966 | 3897 | save_state(dc); |
3967 | - gen_movl_reg_T0(rs1); | |
3898 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
3968 | 3899 | if (IS_IMM) { /* immediate */ |
3969 | 3900 | rs2 = GET_FIELDs(insn, 19, 31); |
3970 | 3901 | tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2); |
... | ... | @@ -3973,8 +3904,8 @@ static void disas_sparc_insn(DisasContext * dc) |
3973 | 3904 | #if defined(OPTIM) |
3974 | 3905 | if (rs2) { |
3975 | 3906 | #endif |
3976 | - gen_movl_reg_T1(rs2); | |
3977 | - gen_op_add_T1_T0(); | |
3907 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3908 | + tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
3978 | 3909 | #if defined(OPTIM) |
3979 | 3910 | } |
3980 | 3911 | #endif |
... | ... | @@ -3988,7 +3919,7 @@ static void disas_sparc_insn(DisasContext * dc) |
3988 | 3919 | #endif |
3989 | 3920 | } else { |
3990 | 3921 | rs1 = GET_FIELD(insn, 13, 17); |
3991 | - gen_movl_reg_T0(rs1); | |
3922 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
3992 | 3923 | if (IS_IMM) { /* immediate */ |
3993 | 3924 | rs2 = GET_FIELDs(insn, 19, 31); |
3994 | 3925 | tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2); |
... | ... | @@ -3997,8 +3928,8 @@ static void disas_sparc_insn(DisasContext * dc) |
3997 | 3928 | #if defined(OPTIM) |
3998 | 3929 | if (rs2) { |
3999 | 3930 | #endif |
4000 | - gen_movl_reg_T1(rs2); | |
4001 | - gen_op_add_T1_T0(); | |
3931 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
3932 | + tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
4002 | 3933 | #if defined(OPTIM) |
4003 | 3934 | } |
4004 | 3935 | #endif |
... | ... | @@ -4008,7 +3939,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4008 | 3939 | { |
4009 | 3940 | if (rd != 0) { |
4010 | 3941 | tcg_gen_movi_tl(cpu_T[1], dc->pc); |
4011 | - gen_movl_T1_reg(rd); | |
3942 | + gen_movl_TN_reg(rd, cpu_T[1]); | |
4012 | 3943 | } |
4013 | 3944 | gen_mov_pc_npc(dc); |
4014 | 3945 | gen_op_check_align_T0_3(); |
... | ... | @@ -4035,12 +3966,12 @@ static void disas_sparc_insn(DisasContext * dc) |
4035 | 3966 | case 0x3c: /* save */ |
4036 | 3967 | save_state(dc); |
4037 | 3968 | tcg_gen_helper_0_0(helper_save); |
4038 | - gen_movl_T0_reg(rd); | |
3969 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
4039 | 3970 | break; |
4040 | 3971 | case 0x3d: /* restore */ |
4041 | 3972 | save_state(dc); |
4042 | 3973 | tcg_gen_helper_0_0(helper_restore); |
4043 | - gen_movl_T0_reg(rd); | |
3974 | + gen_movl_TN_reg(rd, cpu_T[0]); | |
4044 | 3975 | break; |
4045 | 3976 | #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64) |
4046 | 3977 | case 0x3e: /* V9 done/retry */ |
... | ... | @@ -4078,11 +4009,11 @@ static void disas_sparc_insn(DisasContext * dc) |
4078 | 4009 | unsigned int xop = GET_FIELD(insn, 7, 12); |
4079 | 4010 | rs1 = GET_FIELD(insn, 13, 17); |
4080 | 4011 | save_state(dc); |
4081 | - gen_movl_reg_T0(rs1); | |
4012 | + gen_movl_reg_TN(rs1, cpu_T[0]); | |
4082 | 4013 | if (xop == 0x3c || xop == 0x3e) |
4083 | 4014 | { |
4084 | 4015 | rs2 = GET_FIELD(insn, 27, 31); |
4085 | - gen_movl_reg_T1(rs2); | |
4016 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
4086 | 4017 | } |
4087 | 4018 | else if (IS_IMM) { /* immediate */ |
4088 | 4019 | rs2 = GET_FIELDs(insn, 19, 31); |
... | ... | @@ -4092,8 +4023,8 @@ static void disas_sparc_insn(DisasContext * dc) |
4092 | 4023 | #if defined(OPTIM) |
4093 | 4024 | if (rs2 != 0) { |
4094 | 4025 | #endif |
4095 | - gen_movl_reg_T1(rs2); | |
4096 | - gen_op_add_T1_T0(); | |
4026 | + gen_movl_reg_TN(rs2, cpu_T[1]); | |
4027 | + tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); | |
4097 | 4028 | #if defined(OPTIM) |
4098 | 4029 | } |
4099 | 4030 | #endif |
... | ... | @@ -4125,7 +4056,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4125 | 4056 | tcg_gen_qemu_ld64(cpu_tmp64, cpu_T[0], dc->mem_idx); |
4126 | 4057 | tcg_gen_trunc_i64_tl(cpu_T[0], cpu_tmp64); |
4127 | 4058 | tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffffffffULL); |
4128 | - gen_movl_T0_reg(rd + 1); | |
4059 | + gen_movl_TN_reg(rd + 1, cpu_T[0]); | |
4129 | 4060 | tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32); |
4130 | 4061 | tcg_gen_trunc_i64_tl(cpu_T[1], cpu_tmp64); |
4131 | 4062 | tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0xffffffffULL); |
... | ... | @@ -4147,7 +4078,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4147 | 4078 | break; |
4148 | 4079 | case 0x0f: /* swap register with memory. Also atomically */ |
4149 | 4080 | gen_op_check_align_T0_3(); |
4150 | - gen_movl_reg_T1(rd); | |
4081 | + gen_movl_reg_TN(rd, cpu_T[1]); | |
4151 | 4082 | ABI32_MASK(cpu_T[0]); |
4152 | 4083 | tcg_gen_qemu_ld32u(cpu_tmp32, cpu_T[0], dc->mem_idx); |
4153 | 4084 | tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx); |
... | ... | @@ -4194,7 +4125,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4194 | 4125 | goto illegal_insn; |
4195 | 4126 | gen_op_check_align_T0_7(); |
4196 | 4127 | gen_ldda_asi(insn); |
4197 | - gen_movl_T0_reg(rd + 1); | |
4128 | + gen_movl_TN_reg(rd + 1, cpu_T[0]); | |
4198 | 4129 | break; |
4199 | 4130 | case 0x19: /* load signed byte alternate */ |
4200 | 4131 | #ifndef TARGET_SPARC64 |
... | ... | @@ -4232,7 +4163,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4232 | 4163 | goto priv_insn; |
4233 | 4164 | #endif |
4234 | 4165 | gen_op_check_align_T0_3(); |
4235 | - gen_movl_reg_T1(rd); | |
4166 | + gen_movl_reg_TN(rd, cpu_T[1]); | |
4236 | 4167 | gen_swap_asi(insn); |
4237 | 4168 | break; |
4238 | 4169 | |
... | ... | @@ -4286,7 +4217,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4286 | 4217 | default: |
4287 | 4218 | goto illegal_insn; |
4288 | 4219 | } |
4289 | - gen_movl_T1_reg(rd); | |
4220 | + gen_movl_TN_reg(rd, cpu_T[1]); | |
4290 | 4221 | #ifdef TARGET_SPARC64 |
4291 | 4222 | skip_move: ; |
4292 | 4223 | #endif |
... | ... | @@ -4326,7 +4257,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4326 | 4257 | } |
4327 | 4258 | } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \ |
4328 | 4259 | xop == 0xe || xop == 0x1e) { |
4329 | - gen_movl_reg_T1(rd); | |
4260 | + gen_movl_reg_TN(rd, cpu_T[1]); | |
4330 | 4261 | switch (xop) { |
4331 | 4262 | case 0x4: /* store word */ |
4332 | 4263 | gen_op_check_align_T0_3(); |
... | ... | @@ -4359,7 +4290,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4359 | 4290 | #else /* __i386__ */ |
4360 | 4291 | gen_op_check_align_T0_7(); |
4361 | 4292 | flush_T2(dc); |
4362 | - gen_movl_reg_T2(rd + 1); | |
4293 | + gen_movl_reg_TN(rd + 1, cpu_T[2]); | |
4363 | 4294 | gen_op_ldst(std); |
4364 | 4295 | #endif /* __i386__ */ |
4365 | 4296 | break; |
... | ... | @@ -4497,12 +4428,12 @@ static void disas_sparc_insn(DisasContext * dc) |
4497 | 4428 | case 0x3c: /* V9 casa */ |
4498 | 4429 | gen_op_check_align_T0_3(); |
4499 | 4430 | gen_cas_asi(insn, rd); |
4500 | - gen_movl_T1_reg(rd); | |
4431 | + gen_movl_TN_reg(rd, cpu_T[1]); | |
4501 | 4432 | break; |
4502 | 4433 | case 0x3e: /* V9 casxa */ |
4503 | 4434 | gen_op_check_align_T0_7(); |
4504 | 4435 | gen_casx_asi(insn, rd); |
4505 | - gen_movl_T1_reg(rd); | |
4436 | + gen_movl_TN_reg(rd, cpu_T[1]); | |
4506 | 4437 | break; |
4507 | 4438 | #else |
4508 | 4439 | case 0x34: /* stc */ |
... | ... | @@ -4536,13 +4467,13 @@ static void disas_sparc_insn(DisasContext * dc) |
4536 | 4467 | return; |
4537 | 4468 | illegal_insn: |
4538 | 4469 | save_state(dc); |
4539 | - gen_op_exception(TT_ILL_INSN); | |
4470 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_ILL_INSN)); | |
4540 | 4471 | dc->is_br = 1; |
4541 | 4472 | return; |
4542 | 4473 | #if !defined(CONFIG_USER_ONLY) |
4543 | 4474 | priv_insn: |
4544 | 4475 | save_state(dc); |
4545 | - gen_op_exception(TT_PRIV_INSN); | |
4476 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_PRIV_INSN)); | |
4546 | 4477 | dc->is_br = 1; |
4547 | 4478 | return; |
4548 | 4479 | nfpu_insn: |
... | ... | @@ -4561,7 +4492,7 @@ static void disas_sparc_insn(DisasContext * dc) |
4561 | 4492 | #ifndef TARGET_SPARC64 |
4562 | 4493 | ncp_insn: |
4563 | 4494 | save_state(dc); |
4564 | - gen_op_exception(TT_NCP_INSN); | |
4495 | + tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_NCP_INSN)); | |
4565 | 4496 | dc->is_br = 1; |
4566 | 4497 | return; |
4567 | 4498 | #endif |
... | ... | @@ -4634,7 +4565,7 @@ static inline int gen_intermediate_code_internal(TranslationBlock * tb, |
4634 | 4565 | /* if single step mode, we generate only one instruction and |
4635 | 4566 | generate an exception */ |
4636 | 4567 | if (env->singlestep_enabled) { |
4637 | - gen_jmp_im(dc->pc); | |
4568 | + tcg_gen_movi_tl(cpu_pc, dc->pc); | |
4638 | 4569 | tcg_gen_exit_tb(0); |
4639 | 4570 | break; |
4640 | 4571 | } |
... | ... | @@ -4646,10 +4577,10 @@ static inline int gen_intermediate_code_internal(TranslationBlock * tb, |
4646 | 4577 | if (dc->pc != DYNAMIC_PC && |
4647 | 4578 | (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) { |
4648 | 4579 | /* static PC and NPC: we can use direct chaining */ |
4649 | - gen_branch(dc, dc->pc, dc->npc); | |
4580 | + gen_goto_tb(dc, 0, dc->pc, dc->npc); | |
4650 | 4581 | } else { |
4651 | 4582 | if (dc->pc != DYNAMIC_PC) |
4652 | - gen_jmp_im(dc->pc); | |
4583 | + tcg_gen_movi_tl(cpu_pc, dc->pc); | |
4653 | 4584 | save_npc(dc); |
4654 | 4585 | tcg_gen_exit_tb(0); |
4655 | 4586 | } | ... | ... |