Commit cf360a32af84ee473b4bcba1dcaa4646f10ef937

Authored by aurel32
1 parent bdb4b689

target-ppc: convert load/store with reservation instructions to TCG

Signed-off-by: Aurelien Jarno <aurelien@aurel32.net>

git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@5830 c046a42c-6fe2-441c-8c8c-71466251a162
target-ppc/op_mem.h
@@ -20,232 +20,6 @@ @@ -20,232 +20,6 @@
20 20
21 #include "op_mem_access.h" 21 #include "op_mem_access.h"
22 22
23 -/* Load and set reservation */  
24 -void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)  
25 -{  
26 - if (unlikely(T0 & 0x03)) {  
27 - raise_exception(env, POWERPC_EXCP_ALIGN);  
28 - } else {  
29 - T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);  
30 - env->reserve = (uint32_t)T0;  
31 - }  
32 - RETURN();  
33 -}  
34 -  
35 -#if defined(TARGET_PPC64)  
36 -void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)  
37 -{  
38 - if (unlikely(T0 & 0x03)) {  
39 - raise_exception(env, POWERPC_EXCP_ALIGN);  
40 - } else {  
41 - T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);  
42 - env->reserve = (uint64_t)T0;  
43 - }  
44 - RETURN();  
45 -}  
46 -  
47 -void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)  
48 -{  
49 - if (unlikely(T0 & 0x03)) {  
50 - raise_exception(env, POWERPC_EXCP_ALIGN);  
51 - } else {  
52 - T1 = glue(ldu64, MEMSUFFIX)((uint32_t)T0);  
53 - env->reserve = (uint32_t)T0;  
54 - }  
55 - RETURN();  
56 -}  
57 -  
58 -void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)  
59 -{  
60 - if (unlikely(T0 & 0x03)) {  
61 - raise_exception(env, POWERPC_EXCP_ALIGN);  
62 - } else {  
63 - T1 = glue(ldu64, MEMSUFFIX)((uint64_t)T0);  
64 - env->reserve = (uint64_t)T0;  
65 - }  
66 - RETURN();  
67 -}  
68 -#endif  
69 -  
70 -void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)  
71 -{  
72 - if (unlikely(T0 & 0x03)) {  
73 - raise_exception(env, POWERPC_EXCP_ALIGN);  
74 - } else {  
75 - T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);  
76 - env->reserve = (uint32_t)T0;  
77 - }  
78 - RETURN();  
79 -}  
80 -  
81 -#if defined(TARGET_PPC64)  
82 -void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)  
83 -{  
84 - if (unlikely(T0 & 0x03)) {  
85 - raise_exception(env, POWERPC_EXCP_ALIGN);  
86 - } else {  
87 - T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);  
88 - env->reserve = (uint64_t)T0;  
89 - }  
90 - RETURN();  
91 -}  
92 -  
93 -void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)  
94 -{  
95 - if (unlikely(T0 & 0x03)) {  
96 - raise_exception(env, POWERPC_EXCP_ALIGN);  
97 - } else {  
98 - T1 = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);  
99 - env->reserve = (uint32_t)T0;  
100 - }  
101 - RETURN();  
102 -}  
103 -  
104 -void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)  
105 -{  
106 - if (unlikely(T0 & 0x03)) {  
107 - raise_exception(env, POWERPC_EXCP_ALIGN);  
108 - } else {  
109 - T1 = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);  
110 - env->reserve = (uint64_t)T0;  
111 - }  
112 - RETURN();  
113 -}  
114 -#endif  
115 -  
116 -/* Store with reservation */  
117 -void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)  
118 -{  
119 - if (unlikely(T0 & 0x03)) {  
120 - raise_exception(env, POWERPC_EXCP_ALIGN);  
121 - } else {  
122 - if (unlikely(env->reserve != (uint32_t)T0)) {  
123 - env->crf[0] = xer_so;  
124 - } else {  
125 - glue(st32, MEMSUFFIX)((uint32_t)T0, T1);  
126 - env->crf[0] = xer_so | 0x02;  
127 - }  
128 - }  
129 - env->reserve = (target_ulong)-1ULL;  
130 - RETURN();  
131 -}  
132 -  
133 -#if defined(TARGET_PPC64)  
134 -void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)  
135 -{  
136 - if (unlikely(T0 & 0x03)) {  
137 - raise_exception(env, POWERPC_EXCP_ALIGN);  
138 - } else {  
139 - if (unlikely(env->reserve != (uint64_t)T0)) {  
140 - env->crf[0] = xer_so;  
141 - } else {  
142 - glue(st32, MEMSUFFIX)((uint64_t)T0, T1);  
143 - env->crf[0] = xer_so | 0x02;  
144 - }  
145 - }  
146 - env->reserve = (target_ulong)-1ULL;  
147 - RETURN();  
148 -}  
149 -  
150 -void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)  
151 -{  
152 - if (unlikely(T0 & 0x03)) {  
153 - raise_exception(env, POWERPC_EXCP_ALIGN);  
154 - } else {  
155 - if (unlikely(env->reserve != (uint32_t)T0)) {  
156 - env->crf[0] = xer_so;  
157 - } else {  
158 - glue(st64, MEMSUFFIX)((uint32_t)T0, T1);  
159 - env->crf[0] = xer_so | 0x02;  
160 - }  
161 - }  
162 - env->reserve = (target_ulong)-1ULL;  
163 - RETURN();  
164 -}  
165 -  
166 -void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)  
167 -{  
168 - if (unlikely(T0 & 0x03)) {  
169 - raise_exception(env, POWERPC_EXCP_ALIGN);  
170 - } else {  
171 - if (unlikely(env->reserve != (uint64_t)T0)) {  
172 - env->crf[0] = xer_so;  
173 - } else {  
174 - glue(st64, MEMSUFFIX)((uint64_t)T0, T1);  
175 - env->crf[0] = xer_so | 0x02;  
176 - }  
177 - }  
178 - env->reserve = (target_ulong)-1ULL;  
179 - RETURN();  
180 -}  
181 -#endif  
182 -  
183 -void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)  
184 -{  
185 - if (unlikely(T0 & 0x03)) {  
186 - raise_exception(env, POWERPC_EXCP_ALIGN);  
187 - } else {  
188 - if (unlikely(env->reserve != (uint32_t)T0)) {  
189 - env->crf[0] = xer_so;  
190 - } else {  
191 - glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);  
192 - env->crf[0] = xer_so | 0x02;  
193 - }  
194 - }  
195 - env->reserve = (target_ulong)-1ULL;  
196 - RETURN();  
197 -}  
198 -  
199 -#if defined(TARGET_PPC64)  
200 -void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)  
201 -{  
202 - if (unlikely(T0 & 0x03)) {  
203 - raise_exception(env, POWERPC_EXCP_ALIGN);  
204 - } else {  
205 - if (unlikely(env->reserve != (uint64_t)T0)) {  
206 - env->crf[0] = xer_so;  
207 - } else {  
208 - glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);  
209 - env->crf[0] = xer_so | 0x02;  
210 - }  
211 - }  
212 - env->reserve = (target_ulong)-1ULL;  
213 - RETURN();  
214 -}  
215 -  
216 -void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)  
217 -{  
218 - if (unlikely(T0 & 0x03)) {  
219 - raise_exception(env, POWERPC_EXCP_ALIGN);  
220 - } else {  
221 - if (unlikely(env->reserve != (uint32_t)T0)) {  
222 - env->crf[0] = xer_so;  
223 - } else {  
224 - glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);  
225 - env->crf[0] = xer_so | 0x02;  
226 - }  
227 - }  
228 - env->reserve = (target_ulong)-1ULL;  
229 - RETURN();  
230 -}  
231 -  
232 -void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)  
233 -{  
234 - if (unlikely(T0 & 0x03)) {  
235 - raise_exception(env, POWERPC_EXCP_ALIGN);  
236 - } else {  
237 - if (unlikely(env->reserve != (uint64_t)T0)) {  
238 - env->crf[0] = xer_so;  
239 - } else {  
240 - glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);  
241 - env->crf[0] = xer_so | 0x02;  
242 - }  
243 - }  
244 - env->reserve = (target_ulong)-1ULL;  
245 - RETURN();  
246 -}  
247 -#endif  
248 -  
249 /* External access */ 23 /* External access */
250 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void) 24 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
251 { 25 {
target-ppc/translate.c
@@ -66,6 +66,7 @@ static TCGv cpu_nip; @@ -66,6 +66,7 @@ static TCGv cpu_nip;
66 static TCGv cpu_ctr; 66 static TCGv cpu_ctr;
67 static TCGv cpu_lr; 67 static TCGv cpu_lr;
68 static TCGv cpu_xer; 68 static TCGv cpu_xer;
  69 +static TCGv cpu_reserve;
69 static TCGv_i32 cpu_fpscr; 70 static TCGv_i32 cpu_fpscr;
70 static TCGv_i32 cpu_access_type; 71 static TCGv_i32 cpu_access_type;
71 72
@@ -161,6 +162,9 @@ void ppc_translate_init(void) @@ -161,6 +162,9 @@ void ppc_translate_init(void)
161 cpu_xer = tcg_global_mem_new(TCG_AREG0, 162 cpu_xer = tcg_global_mem_new(TCG_AREG0,
162 offsetof(CPUState, xer), "xer"); 163 offsetof(CPUState, xer), "xer");
163 164
  165 + cpu_reserve = tcg_global_mem_new(TCG_AREG0,
  166 + offsetof(CPUState, reserve), "reserve");
  167 +
164 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0, 168 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
165 offsetof(CPUState, fpscr), "fpscr"); 169 offsetof(CPUState, fpscr), "fpscr");
166 170
@@ -2468,6 +2472,24 @@ static always_inline void gen_addr_register (TCGv EA, @@ -2468,6 +2472,24 @@ static always_inline void gen_addr_register (TCGv EA,
2468 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2472 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2469 } 2473 }
2470 2474
  2475 +static always_inline void gen_check_align (DisasContext *ctx, TCGv EA, int mask)
  2476 +{
  2477 + int l1 = gen_new_label();
  2478 + TCGv t0 = tcg_temp_new();
  2479 + TCGv_i32 t1, t2;
  2480 + /* NIP cannot be restored if the memory exception comes from an helper */
  2481 + gen_update_nip(ctx, ctx->nip - 4);
  2482 + tcg_gen_andi_tl(t0, EA, mask);
  2483 + tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
  2484 + t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
  2485 + t2 = tcg_const_i32(0);
  2486 + gen_helper_raise_exception_err(t1, t2);
  2487 + tcg_temp_free_i32(t1);
  2488 + tcg_temp_free_i32(t2);
  2489 + gen_set_label(l1);
  2490 + tcg_temp_free(t0);
  2491 +}
  2492 +
2471 #if defined(TARGET_PPC64) 2493 #if defined(TARGET_PPC64)
2472 #define _GEN_MEM_FUNCS(name, mode) \ 2494 #define _GEN_MEM_FUNCS(name, mode) \
2473 &gen_op_##name##_##mode, \ 2495 &gen_op_##name##_##mode, \
@@ -3220,67 +3242,79 @@ GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM) @@ -3220,67 +3242,79 @@ GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
3220 GEN_STOP(ctx); 3242 GEN_STOP(ctx);
3221 } 3243 }
3222 3244
3223 -#define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])()  
3224 -#define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])()  
3225 -static GenOpFunc *gen_op_lwarx[NB_MEM_FUNCS] = {  
3226 - GEN_MEM_FUNCS(lwarx),  
3227 -};  
3228 -static GenOpFunc *gen_op_stwcx[NB_MEM_FUNCS] = {  
3229 - GEN_MEM_FUNCS(stwcx),  
3230 -};  
3231 -  
3232 /* lwarx */ 3245 /* lwarx */
3233 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES) 3246 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
3234 { 3247 {
3235 - /* NIP cannot be restored if the memory exception comes from an helper */  
3236 - gen_update_nip(ctx, ctx->nip - 4); 3248 + TCGv t0 = tcg_temp_local_new();
3237 gen_set_access_type(ACCESS_RES); 3249 gen_set_access_type(ACCESS_RES);
3238 - gen_addr_reg_index(cpu_T[0], ctx);  
3239 - op_lwarx();  
3240 - tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); 3250 + gen_addr_reg_index(t0, ctx);
  3251 + gen_check_align(ctx, t0, 0x03);
  3252 +#if defined(TARGET_PPC64)
  3253 + if (!ctx->sf_mode)
  3254 + tcg_gen_ext32u_tl(t0, t0);
  3255 +#endif
  3256 + gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
  3257 + tcg_gen_mov_tl(cpu_reserve, t0);
  3258 + tcg_temp_free(t0);
3241 } 3259 }
3242 3260
3243 /* stwcx. */ 3261 /* stwcx. */
3244 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES) 3262 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
3245 { 3263 {
3246 - /* NIP cannot be restored if the memory exception comes from an helper */  
3247 - gen_update_nip(ctx, ctx->nip - 4); 3264 + int l1 = gen_new_label();
  3265 + TCGv t0 = tcg_temp_local_new();
3248 gen_set_access_type(ACCESS_RES); 3266 gen_set_access_type(ACCESS_RES);
3249 - gen_addr_reg_index(cpu_T[0], ctx);  
3250 - tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);  
3251 - op_stwcx(); 3267 + gen_addr_reg_index(t0, ctx);
  3268 + gen_check_align(ctx, t0, 0x03);
  3269 +#if defined(TARGET_PPC64)
  3270 + if (!ctx->sf_mode)
  3271 + tcg_gen_ext32u_tl(t0, t0);
  3272 +#endif
  3273 + tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
  3274 + tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
  3275 + tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
  3276 + tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
  3277 + tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
  3278 + gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx);
  3279 + gen_set_label(l1);
  3280 + tcg_gen_movi_tl(cpu_reserve, -1);
  3281 + tcg_temp_free(t0);
3252 } 3282 }
3253 3283
3254 #if defined(TARGET_PPC64) 3284 #if defined(TARGET_PPC64)
3255 -#define op_ldarx() (*gen_op_ldarx[ctx->mem_idx])()  
3256 -#define op_stdcx() (*gen_op_stdcx[ctx->mem_idx])()  
3257 -static GenOpFunc *gen_op_ldarx[NB_MEM_FUNCS] = {  
3258 - GEN_MEM_FUNCS(ldarx),  
3259 -};  
3260 -static GenOpFunc *gen_op_stdcx[NB_MEM_FUNCS] = {  
3261 - GEN_MEM_FUNCS(stdcx),  
3262 -};  
3263 -  
3264 /* ldarx */ 3285 /* ldarx */
3265 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B) 3286 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
3266 { 3287 {
3267 - /* NIP cannot be restored if the memory exception comes from an helper */  
3268 - gen_update_nip(ctx, ctx->nip - 4); 3288 + TCGv t0 = tcg_temp_local_new();
3269 gen_set_access_type(ACCESS_RES); 3289 gen_set_access_type(ACCESS_RES);
3270 - gen_addr_reg_index(cpu_T[0], ctx);  
3271 - op_ldarx();  
3272 - tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); 3290 + gen_addr_reg_index(t0, ctx);
  3291 + gen_check_align(ctx, t0, 0x07);
  3292 + if (!ctx->sf_mode)
  3293 + tcg_gen_ext32u_tl(t0, t0);
  3294 + gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
  3295 + tcg_gen_mov_tl(cpu_reserve, t0);
  3296 + tcg_temp_free(t0);
3273 } 3297 }
3274 3298
3275 /* stdcx. */ 3299 /* stdcx. */
3276 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B) 3300 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
3277 { 3301 {
3278 - /* NIP cannot be restored if the memory exception comes from an helper */  
3279 - gen_update_nip(ctx, ctx->nip - 4); 3302 + int l1 = gen_new_label();
  3303 + TCGv t0 = tcg_temp_local_new();
3280 gen_set_access_type(ACCESS_RES); 3304 gen_set_access_type(ACCESS_RES);
3281 - gen_addr_reg_index(cpu_T[0], ctx);  
3282 - tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);  
3283 - op_stdcx(); 3305 + gen_addr_reg_index(t0, ctx);
  3306 + gen_check_align(ctx, t0, 0x07);
  3307 + if (!ctx->sf_mode)
  3308 + tcg_gen_ext32u_tl(t0, t0);
  3309 + tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
  3310 + tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
  3311 + tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
  3312 + tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
  3313 + tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
  3314 + gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx);
  3315 + gen_set_label(l1);
  3316 + tcg_gen_movi_tl(cpu_reserve, -1);
  3317 + tcg_temp_free(t0);
3284 } 3318 }
3285 #endif /* defined(TARGET_PPC64) */ 3319 #endif /* defined(TARGET_PPC64) */
3286 3320