Commit a4f30719a8cdffd49194774ef578c1ced88f9fe5
1 parent
9a87ce9b
PowerPC hypervisor mode is not fundamentally available only for PowerPC 64.
Remove TARGET_PPC64 dependency and add code provision to be able to define a fake 32 bits CPU with hypervisor feature support. git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@3678 c046a42c-6fe2-441c-8c8c-71466251a162
Showing
4 changed files
with
73 additions
and
108 deletions
target-ppc/cpu.h
| @@ -23,6 +23,8 @@ | @@ -23,6 +23,8 @@ | ||
| 23 | #include "config.h" | 23 | #include "config.h" |
| 24 | #include <inttypes.h> | 24 | #include <inttypes.h> |
| 25 | 25 | ||
| 26 | +//#define PPC_EMULATE_32BITS_HYPV | ||
| 27 | + | ||
| 26 | #if defined (TARGET_PPC64) | 28 | #if defined (TARGET_PPC64) |
| 27 | /* PowerPC 64 definitions */ | 29 | /* PowerPC 64 definitions */ |
| 28 | typedef uint64_t ppc_gpr_t; | 30 | typedef uint64_t ppc_gpr_t; |
| @@ -343,9 +345,10 @@ union ppc_tlb_t { | @@ -343,9 +345,10 @@ union ppc_tlb_t { | ||
| 343 | /* Machine state register bits definition */ | 345 | /* Machine state register bits definition */ |
| 344 | #define MSR_SF 63 /* Sixty-four-bit mode hflags */ | 346 | #define MSR_SF 63 /* Sixty-four-bit mode hflags */ |
| 345 | #define MSR_ISF 61 /* Sixty-four-bit interrupt mode on 630 */ | 347 | #define MSR_ISF 61 /* Sixty-four-bit interrupt mode on 630 */ |
| 346 | -#define MSR_HV 60 /* hypervisor state hflags */ | 348 | +#define MSR_SHV 60 /* hypervisor state hflags */ |
| 347 | #define MSR_CM 31 /* Computation mode for BookE hflags */ | 349 | #define MSR_CM 31 /* Computation mode for BookE hflags */ |
| 348 | #define MSR_ICM 30 /* Interrupt computation mode for BookE */ | 350 | #define MSR_ICM 30 /* Interrupt computation mode for BookE */ |
| 351 | +#define MSR_THV 29 /* hypervisor state for 32 bits PowerPC hflags */ | ||
| 349 | #define MSR_UCLE 26 /* User-mode cache lock enable for BookE */ | 352 | #define MSR_UCLE 26 /* User-mode cache lock enable for BookE */ |
| 350 | #define MSR_VR 25 /* altivec available x hflags */ | 353 | #define MSR_VR 25 /* altivec available x hflags */ |
| 351 | #define MSR_SPE 25 /* SPE enable for BookE x hflags */ | 354 | #define MSR_SPE 25 /* SPE enable for BookE x hflags */ |
| @@ -379,9 +382,10 @@ union ppc_tlb_t { | @@ -379,9 +382,10 @@ union ppc_tlb_t { | ||
| 379 | 382 | ||
| 380 | #define msr_sf ((env->msr >> MSR_SF) & 1) | 383 | #define msr_sf ((env->msr >> MSR_SF) & 1) |
| 381 | #define msr_isf ((env->msr >> MSR_ISF) & 1) | 384 | #define msr_isf ((env->msr >> MSR_ISF) & 1) |
| 382 | -#define msr_hv ((env->msr >> MSR_HV) & 1) | 385 | +#define msr_shv ((env->msr >> MSR_SHV) & 1) |
| 383 | #define msr_cm ((env->msr >> MSR_CM) & 1) | 386 | #define msr_cm ((env->msr >> MSR_CM) & 1) |
| 384 | #define msr_icm ((env->msr >> MSR_ICM) & 1) | 387 | #define msr_icm ((env->msr >> MSR_ICM) & 1) |
| 388 | +#define msr_thv ((env->msr >> MSR_THV) & 1) | ||
| 385 | #define msr_ucle ((env->msr >> MSR_UCLE) & 1) | 389 | #define msr_ucle ((env->msr >> MSR_UCLE) & 1) |
| 386 | #define msr_vr ((env->msr >> MSR_VR) & 1) | 390 | #define msr_vr ((env->msr >> MSR_VR) & 1) |
| 387 | #define msr_spe ((env->msr >> MSR_SE) & 1) | 391 | #define msr_spe ((env->msr >> MSR_SE) & 1) |
| @@ -412,6 +416,20 @@ union ppc_tlb_t { | @@ -412,6 +416,20 @@ union ppc_tlb_t { | ||
| 412 | #define msr_pmm ((env->msr >> MSR_PMM) & 1) | 416 | #define msr_pmm ((env->msr >> MSR_PMM) & 1) |
| 413 | #define msr_ri ((env->msr >> MSR_RI) & 1) | 417 | #define msr_ri ((env->msr >> MSR_RI) & 1) |
| 414 | #define msr_le ((env->msr >> MSR_LE) & 1) | 418 | #define msr_le ((env->msr >> MSR_LE) & 1) |
| 419 | +/* Hypervisor bit is more specific */ | ||
| 420 | +#if defined(TARGET_PPC64) | ||
| 421 | +#define MSR_HVB (1ULL << MSR_SHV) | ||
| 422 | +#define msr_hv msr_shv | ||
| 423 | +#else | ||
| 424 | +#if defined(PPC_EMULATE_32BITS_HYPV) | ||
| 425 | +#define MSR_HVB (1ULL << MSR_THV) | ||
| 426 | +#define msr_hv msr_thv | ||
| 427 | +#define | ||
| 428 | +#else | ||
| 429 | +#define MSR_HVB (0ULL) | ||
| 430 | +#define msr_hv (0) | ||
| 431 | +#endif | ||
| 432 | +#endif | ||
| 415 | 433 | ||
| 416 | enum { | 434 | enum { |
| 417 | POWERPC_FLAG_NONE = 0x00000000, | 435 | POWERPC_FLAG_NONE = 0x00000000, |
| @@ -428,7 +446,7 @@ enum { | @@ -428,7 +446,7 @@ enum { | ||
| 428 | /* Flag for MSR bit 9 signification (BE/DE) */ | 446 | /* Flag for MSR bit 9 signification (BE/DE) */ |
| 429 | POWERPC_FLAG_BE = 0x00000080, | 447 | POWERPC_FLAG_BE = 0x00000080, |
| 430 | POWERPC_FLAG_DE = 0x00000100, | 448 | POWERPC_FLAG_DE = 0x00000100, |
| 431 | - /* Flag for MSR but 2 signification (PX/PMM) */ | 449 | + /* Flag for MSR bit 2 signification (PX/PMM) */ |
| 432 | POWERPC_FLAG_PX = 0x00000200, | 450 | POWERPC_FLAG_PX = 0x00000200, |
| 433 | POWERPC_FLAG_PMM = 0x00000400, | 451 | POWERPC_FLAG_PMM = 0x00000400, |
| 434 | }; | 452 | }; |
target-ppc/helper.c
| @@ -2100,7 +2100,7 @@ void ppc_store_xer (CPUPPCState *env, target_ulong value) | @@ -2100,7 +2100,7 @@ void ppc_store_xer (CPUPPCState *env, target_ulong value) | ||
| 2100 | /* GDBstub can read and write MSR... */ | 2100 | /* GDBstub can read and write MSR... */ |
| 2101 | void ppc_store_msr (CPUPPCState *env, target_ulong value) | 2101 | void ppc_store_msr (CPUPPCState *env, target_ulong value) |
| 2102 | { | 2102 | { |
| 2103 | - hreg_store_msr(env, value); | 2103 | + hreg_store_msr(env, value, 0); |
| 2104 | } | 2104 | } |
| 2105 | 2105 | ||
| 2106 | /*****************************************************************************/ | 2106 | /*****************************************************************************/ |
| @@ -2134,10 +2134,7 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2134,10 +2134,7 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2134 | { | 2134 | { |
| 2135 | target_ulong msr, new_msr, vector; | 2135 | target_ulong msr, new_msr, vector; |
| 2136 | int srr0, srr1, asrr0, asrr1; | 2136 | int srr0, srr1, asrr0, asrr1; |
| 2137 | - int lpes0, lpes1; | ||
| 2138 | -#if defined(TARGET_PPC64) | ||
| 2139 | - int lev; | ||
| 2140 | -#endif | 2137 | + int lpes0, lpes1, lev; |
| 2141 | 2138 | ||
| 2142 | if (0) { | 2139 | if (0) { |
| 2143 | /* XXX: find a suitable condition to enable the hypervisor mode */ | 2140 | /* XXX: find a suitable condition to enable the hypervisor mode */ |
| @@ -2198,12 +2195,10 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2198,12 +2195,10 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2198 | } | 2195 | } |
| 2199 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2196 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2200 | new_msr &= ~((target_ulong)1 << MSR_ME); | 2197 | new_msr &= ~((target_ulong)1 << MSR_ME); |
| 2201 | -#if defined(TARGET_PPC64) | ||
| 2202 | if (0) { | 2198 | if (0) { |
| 2203 | /* XXX: find a suitable condition to enable the hypervisor mode */ | 2199 | /* XXX: find a suitable condition to enable the hypervisor mode */ |
| 2204 | - new_msr |= (target_ulong)1 << MSR_HV; | 2200 | + new_msr |= (target_ulong)MSR_HVB; |
| 2205 | } | 2201 | } |
| 2206 | -#endif | ||
| 2207 | /* XXX: should also have something loaded in DAR / DSISR */ | 2202 | /* XXX: should also have something loaded in DAR / DSISR */ |
| 2208 | switch (excp_model) { | 2203 | switch (excp_model) { |
| 2209 | case POWERPC_EXCP_40x: | 2204 | case POWERPC_EXCP_40x: |
| @@ -2228,10 +2223,8 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2228,10 +2223,8 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2228 | } | 2223 | } |
| 2229 | #endif | 2224 | #endif |
| 2230 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2225 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2231 | -#if defined(TARGET_PPC64) | ||
| 2232 | if (lpes1 == 0) | 2226 | if (lpes1 == 0) |
| 2233 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2234 | -#endif | 2227 | + new_msr |= (target_ulong)MSR_HVB; |
| 2235 | goto store_next; | 2228 | goto store_next; |
| 2236 | case POWERPC_EXCP_ISI: /* Instruction storage exception */ | 2229 | case POWERPC_EXCP_ISI: /* Instruction storage exception */ |
| 2237 | #if defined (DEBUG_EXCEPTIONS) | 2230 | #if defined (DEBUG_EXCEPTIONS) |
| @@ -2241,25 +2234,19 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2241,25 +2234,19 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2241 | } | 2234 | } |
| 2242 | #endif | 2235 | #endif |
| 2243 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2236 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2244 | -#if defined(TARGET_PPC64) | ||
| 2245 | if (lpes1 == 0) | 2237 | if (lpes1 == 0) |
| 2246 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2247 | -#endif | 2238 | + new_msr |= (target_ulong)MSR_HVB; |
| 2248 | msr |= env->error_code; | 2239 | msr |= env->error_code; |
| 2249 | goto store_next; | 2240 | goto store_next; |
| 2250 | case POWERPC_EXCP_EXTERNAL: /* External input */ | 2241 | case POWERPC_EXCP_EXTERNAL: /* External input */ |
| 2251 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2242 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2252 | -#if defined(TARGET_PPC64) | ||
| 2253 | if (lpes0 == 1) | 2243 | if (lpes0 == 1) |
| 2254 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2255 | -#endif | 2244 | + new_msr |= (target_ulong)MSR_HVB; |
| 2256 | goto store_next; | 2245 | goto store_next; |
| 2257 | case POWERPC_EXCP_ALIGN: /* Alignment exception */ | 2246 | case POWERPC_EXCP_ALIGN: /* Alignment exception */ |
| 2258 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2247 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2259 | -#if defined(TARGET_PPC64) | ||
| 2260 | if (lpes1 == 0) | 2248 | if (lpes1 == 0) |
| 2261 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2262 | -#endif | 2249 | + new_msr |= (target_ulong)MSR_HVB; |
| 2263 | /* XXX: this is false */ | 2250 | /* XXX: this is false */ |
| 2264 | /* Get rS/rD and rA from faulting opcode */ | 2251 | /* Get rS/rD and rA from faulting opcode */ |
| 2265 | env->spr[SPR_DSISR] |= (ldl_code((env->nip - 4)) & 0x03FF0000) >> 16; | 2252 | env->spr[SPR_DSISR] |= (ldl_code((env->nip - 4)) & 0x03FF0000) >> 16; |
| @@ -2278,10 +2265,8 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2278,10 +2265,8 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2278 | return; | 2265 | return; |
| 2279 | } | 2266 | } |
| 2280 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2267 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2281 | -#if defined(TARGET_PPC64) | ||
| 2282 | if (lpes1 == 0) | 2268 | if (lpes1 == 0) |
| 2283 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2284 | -#endif | 2269 | + new_msr |= (target_ulong)MSR_HVB; |
| 2285 | msr |= 0x00100000; | 2270 | msr |= 0x00100000; |
| 2286 | if (msr_fe0 == msr_fe1) | 2271 | if (msr_fe0 == msr_fe1) |
| 2287 | goto store_next; | 2272 | goto store_next; |
| @@ -2295,26 +2280,20 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2295,26 +2280,20 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2295 | } | 2280 | } |
| 2296 | #endif | 2281 | #endif |
| 2297 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2282 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2298 | -#if defined(TARGET_PPC64) | ||
| 2299 | if (lpes1 == 0) | 2283 | if (lpes1 == 0) |
| 2300 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2301 | -#endif | 2284 | + new_msr |= (target_ulong)MSR_HVB; |
| 2302 | msr |= 0x00080000; | 2285 | msr |= 0x00080000; |
| 2303 | break; | 2286 | break; |
| 2304 | case POWERPC_EXCP_PRIV: | 2287 | case POWERPC_EXCP_PRIV: |
| 2305 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2288 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2306 | -#if defined(TARGET_PPC64) | ||
| 2307 | if (lpes1 == 0) | 2289 | if (lpes1 == 0) |
| 2308 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2309 | -#endif | 2290 | + new_msr |= (target_ulong)MSR_HVB; |
| 2310 | msr |= 0x00040000; | 2291 | msr |= 0x00040000; |
| 2311 | break; | 2292 | break; |
| 2312 | case POWERPC_EXCP_TRAP: | 2293 | case POWERPC_EXCP_TRAP: |
| 2313 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2294 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2314 | -#if defined(TARGET_PPC64) | ||
| 2315 | if (lpes1 == 0) | 2295 | if (lpes1 == 0) |
| 2316 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2317 | -#endif | 2296 | + new_msr |= (target_ulong)MSR_HVB; |
| 2318 | msr |= 0x00020000; | 2297 | msr |= 0x00020000; |
| 2319 | break; | 2298 | break; |
| 2320 | default: | 2299 | default: |
| @@ -2326,10 +2305,8 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2326,10 +2305,8 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2326 | goto store_current; | 2305 | goto store_current; |
| 2327 | case POWERPC_EXCP_FPU: /* Floating-point unavailable exception */ | 2306 | case POWERPC_EXCP_FPU: /* Floating-point unavailable exception */ |
| 2328 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2307 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2329 | -#if defined(TARGET_PPC64) | ||
| 2330 | if (lpes1 == 0) | 2308 | if (lpes1 == 0) |
| 2331 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2332 | -#endif | 2309 | + new_msr |= (target_ulong)MSR_HVB; |
| 2333 | goto store_current; | 2310 | goto store_current; |
| 2334 | case POWERPC_EXCP_SYSCALL: /* System call exception */ | 2311 | case POWERPC_EXCP_SYSCALL: /* System call exception */ |
| 2335 | /* NOTE: this is a temporary hack to support graphics OSI | 2312 | /* NOTE: this is a temporary hack to support graphics OSI |
| @@ -2347,21 +2324,17 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2347,21 +2324,17 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2347 | dump_syscall(env); | 2324 | dump_syscall(env); |
| 2348 | } | 2325 | } |
| 2349 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2326 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2350 | -#if defined(TARGET_PPC64) | ||
| 2351 | lev = env->error_code; | 2327 | lev = env->error_code; |
| 2352 | if (lev == 1 || (lpes0 == 0 && lpes1 == 0)) | 2328 | if (lev == 1 || (lpes0 == 0 && lpes1 == 0)) |
| 2353 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2354 | -#endif | 2329 | + new_msr |= (target_ulong)MSR_HVB; |
| 2355 | goto store_next; | 2330 | goto store_next; |
| 2356 | case POWERPC_EXCP_APU: /* Auxiliary processor unavailable */ | 2331 | case POWERPC_EXCP_APU: /* Auxiliary processor unavailable */ |
| 2357 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2332 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2358 | goto store_current; | 2333 | goto store_current; |
| 2359 | case POWERPC_EXCP_DECR: /* Decrementer exception */ | 2334 | case POWERPC_EXCP_DECR: /* Decrementer exception */ |
| 2360 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2335 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2361 | -#if defined(TARGET_PPC64) | ||
| 2362 | if (lpes1 == 0) | 2336 | if (lpes1 == 0) |
| 2363 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2364 | -#endif | 2337 | + new_msr |= (target_ulong)MSR_HVB; |
| 2365 | goto store_next; | 2338 | goto store_next; |
| 2366 | case POWERPC_EXCP_FIT: /* Fixed-interval timer interrupt */ | 2339 | case POWERPC_EXCP_FIT: /* Fixed-interval timer interrupt */ |
| 2367 | /* FIT on 4xx */ | 2340 | /* FIT on 4xx */ |
| @@ -2445,72 +2418,55 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2445,72 +2418,55 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2445 | goto store_next; | 2418 | goto store_next; |
| 2446 | case POWERPC_EXCP_RESET: /* System reset exception */ | 2419 | case POWERPC_EXCP_RESET: /* System reset exception */ |
| 2447 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2420 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2448 | -#if defined(TARGET_PPC64) | ||
| 2449 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2450 | -#endif | 2421 | + if (0) { |
| 2422 | + /* XXX: find a suitable condition to enable the hypervisor mode */ | ||
| 2423 | + new_msr |= (target_ulong)MSR_HVB; | ||
| 2424 | + } | ||
| 2451 | goto store_next; | 2425 | goto store_next; |
| 2452 | case POWERPC_EXCP_DSEG: /* Data segment exception */ | 2426 | case POWERPC_EXCP_DSEG: /* Data segment exception */ |
| 2453 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2427 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2454 | -#if defined(TARGET_PPC64) | ||
| 2455 | if (lpes1 == 0) | 2428 | if (lpes1 == 0) |
| 2456 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2457 | -#endif | 2429 | + new_msr |= (target_ulong)MSR_HVB; |
| 2458 | goto store_next; | 2430 | goto store_next; |
| 2459 | case POWERPC_EXCP_ISEG: /* Instruction segment exception */ | 2431 | case POWERPC_EXCP_ISEG: /* Instruction segment exception */ |
| 2460 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2432 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2461 | -#if defined(TARGET_PPC64) | ||
| 2462 | if (lpes1 == 0) | 2433 | if (lpes1 == 0) |
| 2463 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2464 | -#endif | 2434 | + new_msr |= (target_ulong)MSR_HVB; |
| 2465 | goto store_next; | 2435 | goto store_next; |
| 2466 | case POWERPC_EXCP_HDECR: /* Hypervisor decrementer exception */ | 2436 | case POWERPC_EXCP_HDECR: /* Hypervisor decrementer exception */ |
| 2467 | srr0 = SPR_HSRR0; | 2437 | srr0 = SPR_HSRR0; |
| 2468 | srr1 = SPR_HSRR1; | 2438 | srr1 = SPR_HSRR1; |
| 2469 | -#if defined(TARGET_PPC64) | ||
| 2470 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2471 | -#endif | 2439 | + new_msr |= (target_ulong)MSR_HVB; |
| 2472 | goto store_next; | 2440 | goto store_next; |
| 2473 | case POWERPC_EXCP_TRACE: /* Trace exception */ | 2441 | case POWERPC_EXCP_TRACE: /* Trace exception */ |
| 2474 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2442 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2475 | -#if defined(TARGET_PPC64) | ||
| 2476 | if (lpes1 == 0) | 2443 | if (lpes1 == 0) |
| 2477 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2478 | -#endif | 2444 | + new_msr |= (target_ulong)MSR_HVB; |
| 2479 | goto store_next; | 2445 | goto store_next; |
| 2480 | case POWERPC_EXCP_HDSI: /* Hypervisor data storage exception */ | 2446 | case POWERPC_EXCP_HDSI: /* Hypervisor data storage exception */ |
| 2481 | srr0 = SPR_HSRR0; | 2447 | srr0 = SPR_HSRR0; |
| 2482 | srr1 = SPR_HSRR1; | 2448 | srr1 = SPR_HSRR1; |
| 2483 | -#if defined(TARGET_PPC64) | ||
| 2484 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2485 | -#endif | 2449 | + new_msr |= (target_ulong)MSR_HVB; |
| 2486 | goto store_next; | 2450 | goto store_next; |
| 2487 | case POWERPC_EXCP_HISI: /* Hypervisor instruction storage exception */ | 2451 | case POWERPC_EXCP_HISI: /* Hypervisor instruction storage exception */ |
| 2488 | srr0 = SPR_HSRR0; | 2452 | srr0 = SPR_HSRR0; |
| 2489 | srr1 = SPR_HSRR1; | 2453 | srr1 = SPR_HSRR1; |
| 2490 | -#if defined(TARGET_PPC64) | ||
| 2491 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2492 | -#endif | 2454 | + new_msr |= (target_ulong)MSR_HVB; |
| 2493 | goto store_next; | 2455 | goto store_next; |
| 2494 | case POWERPC_EXCP_HDSEG: /* Hypervisor data segment exception */ | 2456 | case POWERPC_EXCP_HDSEG: /* Hypervisor data segment exception */ |
| 2495 | srr0 = SPR_HSRR0; | 2457 | srr0 = SPR_HSRR0; |
| 2496 | srr1 = SPR_HSRR1; | 2458 | srr1 = SPR_HSRR1; |
| 2497 | -#if defined(TARGET_PPC64) | ||
| 2498 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2499 | -#endif | 2459 | + new_msr |= (target_ulong)MSR_HVB; |
| 2500 | goto store_next; | 2460 | goto store_next; |
| 2501 | case POWERPC_EXCP_HISEG: /* Hypervisor instruction segment exception */ | 2461 | case POWERPC_EXCP_HISEG: /* Hypervisor instruction segment exception */ |
| 2502 | srr0 = SPR_HSRR0; | 2462 | srr0 = SPR_HSRR0; |
| 2503 | srr1 = SPR_HSRR1; | 2463 | srr1 = SPR_HSRR1; |
| 2504 | -#if defined(TARGET_PPC64) | ||
| 2505 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2506 | -#endif | 2464 | + new_msr |= (target_ulong)MSR_HVB; |
| 2507 | goto store_next; | 2465 | goto store_next; |
| 2508 | case POWERPC_EXCP_VPU: /* Vector unavailable exception */ | 2466 | case POWERPC_EXCP_VPU: /* Vector unavailable exception */ |
| 2509 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2467 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2510 | -#if defined(TARGET_PPC64) | ||
| 2511 | if (lpes1 == 0) | 2468 | if (lpes1 == 0) |
| 2512 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2513 | -#endif | 2469 | + new_msr |= (target_ulong)MSR_HVB; |
| 2514 | goto store_current; | 2470 | goto store_current; |
| 2515 | case POWERPC_EXCP_PIT: /* Programmable interval timer interrupt */ | 2471 | case POWERPC_EXCP_PIT: /* Programmable interval timer interrupt */ |
| 2516 | #if defined (DEBUG_EXCEPTIONS) | 2472 | #if defined (DEBUG_EXCEPTIONS) |
| @@ -2534,10 +2490,8 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2534,10 +2490,8 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2534 | goto store_next; | 2490 | goto store_next; |
| 2535 | case POWERPC_EXCP_IFTLB: /* Instruction fetch TLB error */ | 2491 | case POWERPC_EXCP_IFTLB: /* Instruction fetch TLB error */ |
| 2536 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ | 2492 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ |
| 2537 | -#if defined(TARGET_PPC64) /* XXX: check this */ | ||
| 2538 | - if (lpes1 == 0) | ||
| 2539 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2540 | -#endif | 2493 | + if (lpes1 == 0) /* XXX: check this */ |
| 2494 | + new_msr |= (target_ulong)MSR_HVB; | ||
| 2541 | switch (excp_model) { | 2495 | switch (excp_model) { |
| 2542 | case POWERPC_EXCP_602: | 2496 | case POWERPC_EXCP_602: |
| 2543 | case POWERPC_EXCP_603: | 2497 | case POWERPC_EXCP_603: |
| @@ -2555,10 +2509,8 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2555,10 +2509,8 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2555 | break; | 2509 | break; |
| 2556 | case POWERPC_EXCP_DLTLB: /* Data load TLB miss */ | 2510 | case POWERPC_EXCP_DLTLB: /* Data load TLB miss */ |
| 2557 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ | 2511 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ |
| 2558 | -#if defined(TARGET_PPC64) /* XXX: check this */ | ||
| 2559 | - if (lpes1 == 0) | ||
| 2560 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2561 | -#endif | 2512 | + if (lpes1 == 0) /* XXX: check this */ |
| 2513 | + new_msr |= (target_ulong)MSR_HVB; | ||
| 2562 | switch (excp_model) { | 2514 | switch (excp_model) { |
| 2563 | case POWERPC_EXCP_602: | 2515 | case POWERPC_EXCP_602: |
| 2564 | case POWERPC_EXCP_603: | 2516 | case POWERPC_EXCP_603: |
| @@ -2576,10 +2528,8 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2576,10 +2528,8 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2576 | break; | 2528 | break; |
| 2577 | case POWERPC_EXCP_DSTLB: /* Data store TLB miss */ | 2529 | case POWERPC_EXCP_DSTLB: /* Data store TLB miss */ |
| 2578 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ | 2530 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ |
| 2579 | -#if defined(TARGET_PPC64) /* XXX: check this */ | ||
| 2580 | - if (lpes1 == 0) | ||
| 2581 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2582 | -#endif | 2531 | + if (lpes1 == 0) /* XXX: check this */ |
| 2532 | + new_msr |= (target_ulong)MSR_HVB; | ||
| 2583 | switch (excp_model) { | 2533 | switch (excp_model) { |
| 2584 | case POWERPC_EXCP_602: | 2534 | case POWERPC_EXCP_602: |
| 2585 | case POWERPC_EXCP_603: | 2535 | case POWERPC_EXCP_603: |
| @@ -2678,10 +2628,8 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2678,10 +2628,8 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2678 | goto store_next; | 2628 | goto store_next; |
| 2679 | case POWERPC_EXCP_PERFM: /* Embedded performance monitor interrupt */ | 2629 | case POWERPC_EXCP_PERFM: /* Embedded performance monitor interrupt */ |
| 2680 | new_msr &= ~((target_ulong)1 << MSR_RI); | 2630 | new_msr &= ~((target_ulong)1 << MSR_RI); |
| 2681 | -#if defined(TARGET_PPC64) | ||
| 2682 | if (lpes1 == 0) | 2631 | if (lpes1 == 0) |
| 2683 | - new_msr |= (target_ulong)1 << MSR_HV; | ||
| 2684 | -#endif | 2632 | + new_msr |= (target_ulong)MSR_HVB; |
| 2685 | /* XXX: TODO */ | 2633 | /* XXX: TODO */ |
| 2686 | cpu_abort(env, | 2634 | cpu_abort(env, |
| 2687 | "Performance counter exception is not implemented yet !\n"); | 2635 | "Performance counter exception is not implemented yet !\n"); |
| @@ -2768,8 +2716,7 @@ static always_inline void powerpc_excp (CPUState *env, | @@ -2768,8 +2716,7 @@ static always_inline void powerpc_excp (CPUState *env, | ||
| 2768 | /* XXX: we don't use hreg_store_msr here as already have treated | 2716 | /* XXX: we don't use hreg_store_msr here as already have treated |
| 2769 | * any special case that could occur. Just store MSR and update hflags | 2717 | * any special case that could occur. Just store MSR and update hflags |
| 2770 | */ | 2718 | */ |
| 2771 | - env->msr = new_msr; | ||
| 2772 | - env->hflags_nmsr = 0x00000000; | 2719 | + env->msr = new_msr & env->msr_mask; |
| 2773 | hreg_compute_hflags(env); | 2720 | hreg_compute_hflags(env); |
| 2774 | env->nip = vector; | 2721 | env->nip = vector; |
| 2775 | /* Reset exception state */ | 2722 | /* Reset exception state */ |
| @@ -2784,9 +2731,7 @@ void do_interrupt (CPUState *env) | @@ -2784,9 +2731,7 @@ void do_interrupt (CPUState *env) | ||
| 2784 | 2731 | ||
| 2785 | void ppc_hw_interrupt (CPUPPCState *env) | 2732 | void ppc_hw_interrupt (CPUPPCState *env) |
| 2786 | { | 2733 | { |
| 2787 | -#if defined(TARGET_PPC64) | ||
| 2788 | int hdice; | 2734 | int hdice; |
| 2789 | -#endif | ||
| 2790 | 2735 | ||
| 2791 | #if 0 | 2736 | #if 0 |
| 2792 | if (loglevel & CPU_LOG_INT) { | 2737 | if (loglevel & CPU_LOG_INT) { |
| @@ -2815,7 +2760,6 @@ void ppc_hw_interrupt (CPUPPCState *env) | @@ -2815,7 +2760,6 @@ void ppc_hw_interrupt (CPUPPCState *env) | ||
| 2815 | return; | 2760 | return; |
| 2816 | } | 2761 | } |
| 2817 | #endif | 2762 | #endif |
| 2818 | -#if defined(TARGET_PPC64) | ||
| 2819 | if (0) { | 2763 | if (0) { |
| 2820 | /* XXX: find a suitable condition to enable the hypervisor mode */ | 2764 | /* XXX: find a suitable condition to enable the hypervisor mode */ |
| 2821 | hdice = env->spr[SPR_LPCR] & 1; | 2765 | hdice = env->spr[SPR_LPCR] & 1; |
| @@ -2830,7 +2774,6 @@ void ppc_hw_interrupt (CPUPPCState *env) | @@ -2830,7 +2774,6 @@ void ppc_hw_interrupt (CPUPPCState *env) | ||
| 2830 | return; | 2774 | return; |
| 2831 | } | 2775 | } |
| 2832 | } | 2776 | } |
| 2833 | -#endif | ||
| 2834 | if (msr_ce != 0) { | 2777 | if (msr_ce != 0) { |
| 2835 | /* External critical interrupt */ | 2778 | /* External critical interrupt */ |
| 2836 | if (env->pending_interrupts & (1 << PPC_INTERRUPT_CEXT)) { | 2779 | if (env->pending_interrupts & (1 << PPC_INTERRUPT_CEXT)) { |
| @@ -2939,9 +2882,10 @@ void cpu_ppc_reset (void *opaque) | @@ -2939,9 +2882,10 @@ void cpu_ppc_reset (void *opaque) | ||
| 2939 | 2882 | ||
| 2940 | env = opaque; | 2883 | env = opaque; |
| 2941 | msr = (target_ulong)0; | 2884 | msr = (target_ulong)0; |
| 2942 | -#if defined(TARGET_PPC64) | ||
| 2943 | - msr |= (target_ulong)0 << MSR_HV; /* Should be 1... */ | ||
| 2944 | -#endif | 2885 | + if (0) { |
| 2886 | + /* XXX: find a suitable condition to enable the hypervisor mode */ | ||
| 2887 | + msr |= (target_ulong)MSR_HVB; | ||
| 2888 | + } | ||
| 2945 | msr |= (target_ulong)0 << MSR_AP; /* TO BE CHECKED */ | 2889 | msr |= (target_ulong)0 << MSR_AP; /* TO BE CHECKED */ |
| 2946 | msr |= (target_ulong)0 << MSR_SA; /* TO BE CHECKED */ | 2890 | msr |= (target_ulong)0 << MSR_SA; /* TO BE CHECKED */ |
| 2947 | msr |= (target_ulong)1 << MSR_EP; | 2891 | msr |= (target_ulong)1 << MSR_EP; |
target-ppc/helper_regs.h
| @@ -60,13 +60,12 @@ static always_inline void hreg_swap_gpr_tgpr (CPUPPCState *env) | @@ -60,13 +60,12 @@ static always_inline void hreg_swap_gpr_tgpr (CPUPPCState *env) | ||
| 60 | 60 | ||
| 61 | static always_inline void hreg_compute_mem_idx (CPUPPCState *env) | 61 | static always_inline void hreg_compute_mem_idx (CPUPPCState *env) |
| 62 | { | 62 | { |
| 63 | -#if defined (TARGET_PPC64) | ||
| 64 | /* Precompute MMU index */ | 63 | /* Precompute MMU index */ |
| 65 | - if (msr_pr == 0 && msr_hv != 0) | 64 | + if (msr_pr == 0 && msr_hv != 0) { |
| 66 | env->mmu_idx = 2; | 65 | env->mmu_idx = 2; |
| 67 | - else | ||
| 68 | -#endif | 66 | + } else { |
| 69 | env->mmu_idx = 1 - msr_pr; | 67 | env->mmu_idx = 1 - msr_pr; |
| 68 | + } | ||
| 70 | } | 69 | } |
| 71 | 70 | ||
| 72 | static always_inline void hreg_compute_hflags (CPUPPCState *env) | 71 | static always_inline void hreg_compute_hflags (CPUPPCState *env) |
| @@ -77,22 +76,26 @@ static always_inline void hreg_compute_hflags (CPUPPCState *env) | @@ -77,22 +76,26 @@ static always_inline void hreg_compute_hflags (CPUPPCState *env) | ||
| 77 | hflags_mask = (1 << MSR_VR) | (1 << MSR_AP) | (1 << MSR_SA) | | 76 | hflags_mask = (1 << MSR_VR) | (1 << MSR_AP) | (1 << MSR_SA) | |
| 78 | (1 << MSR_PR) | (1 << MSR_FP) | (1 << MSR_SE) | (1 << MSR_BE) | | 77 | (1 << MSR_PR) | (1 << MSR_FP) | (1 << MSR_SE) | (1 << MSR_BE) | |
| 79 | (1 << MSR_LE); | 78 | (1 << MSR_LE); |
| 80 | -#if defined (TARGET_PPC64) | ||
| 81 | - hflags_mask |= (1ULL << MSR_CM) | (1ULL << MSR_SF) | (1ULL << MSR_HV); | ||
| 82 | -#endif | 79 | + hflags_mask |= (1ULL << MSR_CM) | (1ULL << MSR_SF) | MSR_HVB; |
| 83 | hreg_compute_mem_idx(env); | 80 | hreg_compute_mem_idx(env); |
| 84 | env->hflags = env->msr & hflags_mask; | 81 | env->hflags = env->msr & hflags_mask; |
| 85 | /* Merge with hflags coming from other registers */ | 82 | /* Merge with hflags coming from other registers */ |
| 86 | env->hflags |= env->hflags_nmsr; | 83 | env->hflags |= env->hflags_nmsr; |
| 87 | } | 84 | } |
| 88 | 85 | ||
| 89 | -static always_inline int hreg_store_msr (CPUPPCState *env, target_ulong value) | 86 | +static always_inline int hreg_store_msr (CPUPPCState *env, target_ulong value, |
| 87 | + int alter_hv) | ||
| 90 | { | 88 | { |
| 91 | int excp; | 89 | int excp; |
| 92 | 90 | ||
| 93 | excp = 0; | 91 | excp = 0; |
| 94 | value &= env->msr_mask; | 92 | value &= env->msr_mask; |
| 95 | #if !defined (CONFIG_USER_ONLY) | 93 | #if !defined (CONFIG_USER_ONLY) |
| 94 | + if (!alter_hv) { | ||
| 95 | + /* mtmsr cannot alter the hypervisor state */ | ||
| 96 | + value &= ~MSR_HVB; | ||
| 97 | + value |= env->msr & MSR_HVB; | ||
| 98 | + } | ||
| 96 | if (((value >> MSR_IR) & 1) != msr_ir || | 99 | if (((value >> MSR_IR) & 1) != msr_ir || |
| 97 | ((value >> MSR_DR) & 1) != msr_dr) { | 100 | ((value >> MSR_DR) & 1) != msr_dr) { |
| 98 | /* Flush all tlb when changing translation mode */ | 101 | /* Flush all tlb when changing translation mode */ |
target-ppc/op_helper.c
| @@ -1425,7 +1425,7 @@ void cpu_dump_rfi (target_ulong RA, target_ulong msr); | @@ -1425,7 +1425,7 @@ void cpu_dump_rfi (target_ulong RA, target_ulong msr); | ||
| 1425 | 1425 | ||
| 1426 | void do_store_msr (void) | 1426 | void do_store_msr (void) |
| 1427 | { | 1427 | { |
| 1428 | - T0 = hreg_store_msr(env, T0); | 1428 | + T0 = hreg_store_msr(env, T0, 0); |
| 1429 | if (T0 != 0) { | 1429 | if (T0 != 0) { |
| 1430 | env->interrupt_request |= CPU_INTERRUPT_EXITTB; | 1430 | env->interrupt_request |= CPU_INTERRUPT_EXITTB; |
| 1431 | do_raise_exception(T0); | 1431 | do_raise_exception(T0); |
| @@ -1451,7 +1451,7 @@ static always_inline void __do_rfi (target_ulong nip, target_ulong msr, | @@ -1451,7 +1451,7 @@ static always_inline void __do_rfi (target_ulong nip, target_ulong msr, | ||
| 1451 | #endif | 1451 | #endif |
| 1452 | /* XXX: beware: this is false if VLE is supported */ | 1452 | /* XXX: beware: this is false if VLE is supported */ |
| 1453 | env->nip = nip & ~((target_ulong)0x00000003); | 1453 | env->nip = nip & ~((target_ulong)0x00000003); |
| 1454 | - hreg_store_msr(env, msr); | 1454 | + hreg_store_msr(env, msr, 1); |
| 1455 | #if defined (DEBUG_OP) | 1455 | #if defined (DEBUG_OP) |
| 1456 | cpu_dump_rfi(env->nip, env->msr); | 1456 | cpu_dump_rfi(env->nip, env->msr); |
| 1457 | #endif | 1457 | #endif |