Commit a4f30719a8cdffd49194774ef578c1ced88f9fe5
1 parent
9a87ce9b
PowerPC hypervisor mode is not fundamentally available only for PowerPC 64.
Remove TARGET_PPC64 dependency and add code provision to be able to define a fake 32 bits CPU with hypervisor feature support. git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@3678 c046a42c-6fe2-441c-8c8c-71466251a162
Showing
4 changed files
with
73 additions
and
108 deletions
target-ppc/cpu.h
... | ... | @@ -23,6 +23,8 @@ |
23 | 23 | #include "config.h" |
24 | 24 | #include <inttypes.h> |
25 | 25 | |
26 | +//#define PPC_EMULATE_32BITS_HYPV | |
27 | + | |
26 | 28 | #if defined (TARGET_PPC64) |
27 | 29 | /* PowerPC 64 definitions */ |
28 | 30 | typedef uint64_t ppc_gpr_t; |
... | ... | @@ -343,9 +345,10 @@ union ppc_tlb_t { |
343 | 345 | /* Machine state register bits definition */ |
344 | 346 | #define MSR_SF 63 /* Sixty-four-bit mode hflags */ |
345 | 347 | #define MSR_ISF 61 /* Sixty-four-bit interrupt mode on 630 */ |
346 | -#define MSR_HV 60 /* hypervisor state hflags */ | |
348 | +#define MSR_SHV 60 /* hypervisor state hflags */ | |
347 | 349 | #define MSR_CM 31 /* Computation mode for BookE hflags */ |
348 | 350 | #define MSR_ICM 30 /* Interrupt computation mode for BookE */ |
351 | +#define MSR_THV 29 /* hypervisor state for 32 bits PowerPC hflags */ | |
349 | 352 | #define MSR_UCLE 26 /* User-mode cache lock enable for BookE */ |
350 | 353 | #define MSR_VR 25 /* altivec available x hflags */ |
351 | 354 | #define MSR_SPE 25 /* SPE enable for BookE x hflags */ |
... | ... | @@ -379,9 +382,10 @@ union ppc_tlb_t { |
379 | 382 | |
380 | 383 | #define msr_sf ((env->msr >> MSR_SF) & 1) |
381 | 384 | #define msr_isf ((env->msr >> MSR_ISF) & 1) |
382 | -#define msr_hv ((env->msr >> MSR_HV) & 1) | |
385 | +#define msr_shv ((env->msr >> MSR_SHV) & 1) | |
383 | 386 | #define msr_cm ((env->msr >> MSR_CM) & 1) |
384 | 387 | #define msr_icm ((env->msr >> MSR_ICM) & 1) |
388 | +#define msr_thv ((env->msr >> MSR_THV) & 1) | |
385 | 389 | #define msr_ucle ((env->msr >> MSR_UCLE) & 1) |
386 | 390 | #define msr_vr ((env->msr >> MSR_VR) & 1) |
387 | 391 | #define msr_spe ((env->msr >> MSR_SE) & 1) |
... | ... | @@ -412,6 +416,20 @@ union ppc_tlb_t { |
412 | 416 | #define msr_pmm ((env->msr >> MSR_PMM) & 1) |
413 | 417 | #define msr_ri ((env->msr >> MSR_RI) & 1) |
414 | 418 | #define msr_le ((env->msr >> MSR_LE) & 1) |
419 | +/* Hypervisor bit is more specific */ | |
420 | +#if defined(TARGET_PPC64) | |
421 | +#define MSR_HVB (1ULL << MSR_SHV) | |
422 | +#define msr_hv msr_shv | |
423 | +#else | |
424 | +#if defined(PPC_EMULATE_32BITS_HYPV) | |
425 | +#define MSR_HVB (1ULL << MSR_THV) | |
426 | +#define msr_hv msr_thv | |
427 | +#define | |
428 | +#else | |
429 | +#define MSR_HVB (0ULL) | |
430 | +#define msr_hv (0) | |
431 | +#endif | |
432 | +#endif | |
415 | 433 | |
416 | 434 | enum { |
417 | 435 | POWERPC_FLAG_NONE = 0x00000000, |
... | ... | @@ -428,7 +446,7 @@ enum { |
428 | 446 | /* Flag for MSR bit 9 signification (BE/DE) */ |
429 | 447 | POWERPC_FLAG_BE = 0x00000080, |
430 | 448 | POWERPC_FLAG_DE = 0x00000100, |
431 | - /* Flag for MSR but 2 signification (PX/PMM) */ | |
449 | + /* Flag for MSR bit 2 signification (PX/PMM) */ | |
432 | 450 | POWERPC_FLAG_PX = 0x00000200, |
433 | 451 | POWERPC_FLAG_PMM = 0x00000400, |
434 | 452 | }; | ... | ... |
target-ppc/helper.c
... | ... | @@ -2100,7 +2100,7 @@ void ppc_store_xer (CPUPPCState *env, target_ulong value) |
2100 | 2100 | /* GDBstub can read and write MSR... */ |
2101 | 2101 | void ppc_store_msr (CPUPPCState *env, target_ulong value) |
2102 | 2102 | { |
2103 | - hreg_store_msr(env, value); | |
2103 | + hreg_store_msr(env, value, 0); | |
2104 | 2104 | } |
2105 | 2105 | |
2106 | 2106 | /*****************************************************************************/ |
... | ... | @@ -2134,10 +2134,7 @@ static always_inline void powerpc_excp (CPUState *env, |
2134 | 2134 | { |
2135 | 2135 | target_ulong msr, new_msr, vector; |
2136 | 2136 | int srr0, srr1, asrr0, asrr1; |
2137 | - int lpes0, lpes1; | |
2138 | -#if defined(TARGET_PPC64) | |
2139 | - int lev; | |
2140 | -#endif | |
2137 | + int lpes0, lpes1, lev; | |
2141 | 2138 | |
2142 | 2139 | if (0) { |
2143 | 2140 | /* XXX: find a suitable condition to enable the hypervisor mode */ |
... | ... | @@ -2198,12 +2195,10 @@ static always_inline void powerpc_excp (CPUState *env, |
2198 | 2195 | } |
2199 | 2196 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2200 | 2197 | new_msr &= ~((target_ulong)1 << MSR_ME); |
2201 | -#if defined(TARGET_PPC64) | |
2202 | 2198 | if (0) { |
2203 | 2199 | /* XXX: find a suitable condition to enable the hypervisor mode */ |
2204 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2200 | + new_msr |= (target_ulong)MSR_HVB; | |
2205 | 2201 | } |
2206 | -#endif | |
2207 | 2202 | /* XXX: should also have something loaded in DAR / DSISR */ |
2208 | 2203 | switch (excp_model) { |
2209 | 2204 | case POWERPC_EXCP_40x: |
... | ... | @@ -2228,10 +2223,8 @@ static always_inline void powerpc_excp (CPUState *env, |
2228 | 2223 | } |
2229 | 2224 | #endif |
2230 | 2225 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2231 | -#if defined(TARGET_PPC64) | |
2232 | 2226 | if (lpes1 == 0) |
2233 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2234 | -#endif | |
2227 | + new_msr |= (target_ulong)MSR_HVB; | |
2235 | 2228 | goto store_next; |
2236 | 2229 | case POWERPC_EXCP_ISI: /* Instruction storage exception */ |
2237 | 2230 | #if defined (DEBUG_EXCEPTIONS) |
... | ... | @@ -2241,25 +2234,19 @@ static always_inline void powerpc_excp (CPUState *env, |
2241 | 2234 | } |
2242 | 2235 | #endif |
2243 | 2236 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2244 | -#if defined(TARGET_PPC64) | |
2245 | 2237 | if (lpes1 == 0) |
2246 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2247 | -#endif | |
2238 | + new_msr |= (target_ulong)MSR_HVB; | |
2248 | 2239 | msr |= env->error_code; |
2249 | 2240 | goto store_next; |
2250 | 2241 | case POWERPC_EXCP_EXTERNAL: /* External input */ |
2251 | 2242 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2252 | -#if defined(TARGET_PPC64) | |
2253 | 2243 | if (lpes0 == 1) |
2254 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2255 | -#endif | |
2244 | + new_msr |= (target_ulong)MSR_HVB; | |
2256 | 2245 | goto store_next; |
2257 | 2246 | case POWERPC_EXCP_ALIGN: /* Alignment exception */ |
2258 | 2247 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2259 | -#if defined(TARGET_PPC64) | |
2260 | 2248 | if (lpes1 == 0) |
2261 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2262 | -#endif | |
2249 | + new_msr |= (target_ulong)MSR_HVB; | |
2263 | 2250 | /* XXX: this is false */ |
2264 | 2251 | /* Get rS/rD and rA from faulting opcode */ |
2265 | 2252 | env->spr[SPR_DSISR] |= (ldl_code((env->nip - 4)) & 0x03FF0000) >> 16; |
... | ... | @@ -2278,10 +2265,8 @@ static always_inline void powerpc_excp (CPUState *env, |
2278 | 2265 | return; |
2279 | 2266 | } |
2280 | 2267 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2281 | -#if defined(TARGET_PPC64) | |
2282 | 2268 | if (lpes1 == 0) |
2283 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2284 | -#endif | |
2269 | + new_msr |= (target_ulong)MSR_HVB; | |
2285 | 2270 | msr |= 0x00100000; |
2286 | 2271 | if (msr_fe0 == msr_fe1) |
2287 | 2272 | goto store_next; |
... | ... | @@ -2295,26 +2280,20 @@ static always_inline void powerpc_excp (CPUState *env, |
2295 | 2280 | } |
2296 | 2281 | #endif |
2297 | 2282 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2298 | -#if defined(TARGET_PPC64) | |
2299 | 2283 | if (lpes1 == 0) |
2300 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2301 | -#endif | |
2284 | + new_msr |= (target_ulong)MSR_HVB; | |
2302 | 2285 | msr |= 0x00080000; |
2303 | 2286 | break; |
2304 | 2287 | case POWERPC_EXCP_PRIV: |
2305 | 2288 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2306 | -#if defined(TARGET_PPC64) | |
2307 | 2289 | if (lpes1 == 0) |
2308 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2309 | -#endif | |
2290 | + new_msr |= (target_ulong)MSR_HVB; | |
2310 | 2291 | msr |= 0x00040000; |
2311 | 2292 | break; |
2312 | 2293 | case POWERPC_EXCP_TRAP: |
2313 | 2294 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2314 | -#if defined(TARGET_PPC64) | |
2315 | 2295 | if (lpes1 == 0) |
2316 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2317 | -#endif | |
2296 | + new_msr |= (target_ulong)MSR_HVB; | |
2318 | 2297 | msr |= 0x00020000; |
2319 | 2298 | break; |
2320 | 2299 | default: |
... | ... | @@ -2326,10 +2305,8 @@ static always_inline void powerpc_excp (CPUState *env, |
2326 | 2305 | goto store_current; |
2327 | 2306 | case POWERPC_EXCP_FPU: /* Floating-point unavailable exception */ |
2328 | 2307 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2329 | -#if defined(TARGET_PPC64) | |
2330 | 2308 | if (lpes1 == 0) |
2331 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2332 | -#endif | |
2309 | + new_msr |= (target_ulong)MSR_HVB; | |
2333 | 2310 | goto store_current; |
2334 | 2311 | case POWERPC_EXCP_SYSCALL: /* System call exception */ |
2335 | 2312 | /* NOTE: this is a temporary hack to support graphics OSI |
... | ... | @@ -2347,21 +2324,17 @@ static always_inline void powerpc_excp (CPUState *env, |
2347 | 2324 | dump_syscall(env); |
2348 | 2325 | } |
2349 | 2326 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2350 | -#if defined(TARGET_PPC64) | |
2351 | 2327 | lev = env->error_code; |
2352 | 2328 | if (lev == 1 || (lpes0 == 0 && lpes1 == 0)) |
2353 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2354 | -#endif | |
2329 | + new_msr |= (target_ulong)MSR_HVB; | |
2355 | 2330 | goto store_next; |
2356 | 2331 | case POWERPC_EXCP_APU: /* Auxiliary processor unavailable */ |
2357 | 2332 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2358 | 2333 | goto store_current; |
2359 | 2334 | case POWERPC_EXCP_DECR: /* Decrementer exception */ |
2360 | 2335 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2361 | -#if defined(TARGET_PPC64) | |
2362 | 2336 | if (lpes1 == 0) |
2363 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2364 | -#endif | |
2337 | + new_msr |= (target_ulong)MSR_HVB; | |
2365 | 2338 | goto store_next; |
2366 | 2339 | case POWERPC_EXCP_FIT: /* Fixed-interval timer interrupt */ |
2367 | 2340 | /* FIT on 4xx */ |
... | ... | @@ -2445,72 +2418,55 @@ static always_inline void powerpc_excp (CPUState *env, |
2445 | 2418 | goto store_next; |
2446 | 2419 | case POWERPC_EXCP_RESET: /* System reset exception */ |
2447 | 2420 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2448 | -#if defined(TARGET_PPC64) | |
2449 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2450 | -#endif | |
2421 | + if (0) { | |
2422 | + /* XXX: find a suitable condition to enable the hypervisor mode */ | |
2423 | + new_msr |= (target_ulong)MSR_HVB; | |
2424 | + } | |
2451 | 2425 | goto store_next; |
2452 | 2426 | case POWERPC_EXCP_DSEG: /* Data segment exception */ |
2453 | 2427 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2454 | -#if defined(TARGET_PPC64) | |
2455 | 2428 | if (lpes1 == 0) |
2456 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2457 | -#endif | |
2429 | + new_msr |= (target_ulong)MSR_HVB; | |
2458 | 2430 | goto store_next; |
2459 | 2431 | case POWERPC_EXCP_ISEG: /* Instruction segment exception */ |
2460 | 2432 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2461 | -#if defined(TARGET_PPC64) | |
2462 | 2433 | if (lpes1 == 0) |
2463 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2464 | -#endif | |
2434 | + new_msr |= (target_ulong)MSR_HVB; | |
2465 | 2435 | goto store_next; |
2466 | 2436 | case POWERPC_EXCP_HDECR: /* Hypervisor decrementer exception */ |
2467 | 2437 | srr0 = SPR_HSRR0; |
2468 | 2438 | srr1 = SPR_HSRR1; |
2469 | -#if defined(TARGET_PPC64) | |
2470 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2471 | -#endif | |
2439 | + new_msr |= (target_ulong)MSR_HVB; | |
2472 | 2440 | goto store_next; |
2473 | 2441 | case POWERPC_EXCP_TRACE: /* Trace exception */ |
2474 | 2442 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2475 | -#if defined(TARGET_PPC64) | |
2476 | 2443 | if (lpes1 == 0) |
2477 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2478 | -#endif | |
2444 | + new_msr |= (target_ulong)MSR_HVB; | |
2479 | 2445 | goto store_next; |
2480 | 2446 | case POWERPC_EXCP_HDSI: /* Hypervisor data storage exception */ |
2481 | 2447 | srr0 = SPR_HSRR0; |
2482 | 2448 | srr1 = SPR_HSRR1; |
2483 | -#if defined(TARGET_PPC64) | |
2484 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2485 | -#endif | |
2449 | + new_msr |= (target_ulong)MSR_HVB; | |
2486 | 2450 | goto store_next; |
2487 | 2451 | case POWERPC_EXCP_HISI: /* Hypervisor instruction storage exception */ |
2488 | 2452 | srr0 = SPR_HSRR0; |
2489 | 2453 | srr1 = SPR_HSRR1; |
2490 | -#if defined(TARGET_PPC64) | |
2491 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2492 | -#endif | |
2454 | + new_msr |= (target_ulong)MSR_HVB; | |
2493 | 2455 | goto store_next; |
2494 | 2456 | case POWERPC_EXCP_HDSEG: /* Hypervisor data segment exception */ |
2495 | 2457 | srr0 = SPR_HSRR0; |
2496 | 2458 | srr1 = SPR_HSRR1; |
2497 | -#if defined(TARGET_PPC64) | |
2498 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2499 | -#endif | |
2459 | + new_msr |= (target_ulong)MSR_HVB; | |
2500 | 2460 | goto store_next; |
2501 | 2461 | case POWERPC_EXCP_HISEG: /* Hypervisor instruction segment exception */ |
2502 | 2462 | srr0 = SPR_HSRR0; |
2503 | 2463 | srr1 = SPR_HSRR1; |
2504 | -#if defined(TARGET_PPC64) | |
2505 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2506 | -#endif | |
2464 | + new_msr |= (target_ulong)MSR_HVB; | |
2507 | 2465 | goto store_next; |
2508 | 2466 | case POWERPC_EXCP_VPU: /* Vector unavailable exception */ |
2509 | 2467 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2510 | -#if defined(TARGET_PPC64) | |
2511 | 2468 | if (lpes1 == 0) |
2512 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2513 | -#endif | |
2469 | + new_msr |= (target_ulong)MSR_HVB; | |
2514 | 2470 | goto store_current; |
2515 | 2471 | case POWERPC_EXCP_PIT: /* Programmable interval timer interrupt */ |
2516 | 2472 | #if defined (DEBUG_EXCEPTIONS) |
... | ... | @@ -2534,10 +2490,8 @@ static always_inline void powerpc_excp (CPUState *env, |
2534 | 2490 | goto store_next; |
2535 | 2491 | case POWERPC_EXCP_IFTLB: /* Instruction fetch TLB error */ |
2536 | 2492 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ |
2537 | -#if defined(TARGET_PPC64) /* XXX: check this */ | |
2538 | - if (lpes1 == 0) | |
2539 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2540 | -#endif | |
2493 | + if (lpes1 == 0) /* XXX: check this */ | |
2494 | + new_msr |= (target_ulong)MSR_HVB; | |
2541 | 2495 | switch (excp_model) { |
2542 | 2496 | case POWERPC_EXCP_602: |
2543 | 2497 | case POWERPC_EXCP_603: |
... | ... | @@ -2555,10 +2509,8 @@ static always_inline void powerpc_excp (CPUState *env, |
2555 | 2509 | break; |
2556 | 2510 | case POWERPC_EXCP_DLTLB: /* Data load TLB miss */ |
2557 | 2511 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ |
2558 | -#if defined(TARGET_PPC64) /* XXX: check this */ | |
2559 | - if (lpes1 == 0) | |
2560 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2561 | -#endif | |
2512 | + if (lpes1 == 0) /* XXX: check this */ | |
2513 | + new_msr |= (target_ulong)MSR_HVB; | |
2562 | 2514 | switch (excp_model) { |
2563 | 2515 | case POWERPC_EXCP_602: |
2564 | 2516 | case POWERPC_EXCP_603: |
... | ... | @@ -2576,10 +2528,8 @@ static always_inline void powerpc_excp (CPUState *env, |
2576 | 2528 | break; |
2577 | 2529 | case POWERPC_EXCP_DSTLB: /* Data store TLB miss */ |
2578 | 2530 | new_msr &= ~((target_ulong)1 << MSR_RI); /* XXX: check this */ |
2579 | -#if defined(TARGET_PPC64) /* XXX: check this */ | |
2580 | - if (lpes1 == 0) | |
2581 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2582 | -#endif | |
2531 | + if (lpes1 == 0) /* XXX: check this */ | |
2532 | + new_msr |= (target_ulong)MSR_HVB; | |
2583 | 2533 | switch (excp_model) { |
2584 | 2534 | case POWERPC_EXCP_602: |
2585 | 2535 | case POWERPC_EXCP_603: |
... | ... | @@ -2678,10 +2628,8 @@ static always_inline void powerpc_excp (CPUState *env, |
2678 | 2628 | goto store_next; |
2679 | 2629 | case POWERPC_EXCP_PERFM: /* Embedded performance monitor interrupt */ |
2680 | 2630 | new_msr &= ~((target_ulong)1 << MSR_RI); |
2681 | -#if defined(TARGET_PPC64) | |
2682 | 2631 | if (lpes1 == 0) |
2683 | - new_msr |= (target_ulong)1 << MSR_HV; | |
2684 | -#endif | |
2632 | + new_msr |= (target_ulong)MSR_HVB; | |
2685 | 2633 | /* XXX: TODO */ |
2686 | 2634 | cpu_abort(env, |
2687 | 2635 | "Performance counter exception is not implemented yet !\n"); |
... | ... | @@ -2768,8 +2716,7 @@ static always_inline void powerpc_excp (CPUState *env, |
2768 | 2716 | /* XXX: we don't use hreg_store_msr here as already have treated |
2769 | 2717 | * any special case that could occur. Just store MSR and update hflags |
2770 | 2718 | */ |
2771 | - env->msr = new_msr; | |
2772 | - env->hflags_nmsr = 0x00000000; | |
2719 | + env->msr = new_msr & env->msr_mask; | |
2773 | 2720 | hreg_compute_hflags(env); |
2774 | 2721 | env->nip = vector; |
2775 | 2722 | /* Reset exception state */ |
... | ... | @@ -2784,9 +2731,7 @@ void do_interrupt (CPUState *env) |
2784 | 2731 | |
2785 | 2732 | void ppc_hw_interrupt (CPUPPCState *env) |
2786 | 2733 | { |
2787 | -#if defined(TARGET_PPC64) | |
2788 | 2734 | int hdice; |
2789 | -#endif | |
2790 | 2735 | |
2791 | 2736 | #if 0 |
2792 | 2737 | if (loglevel & CPU_LOG_INT) { |
... | ... | @@ -2815,7 +2760,6 @@ void ppc_hw_interrupt (CPUPPCState *env) |
2815 | 2760 | return; |
2816 | 2761 | } |
2817 | 2762 | #endif |
2818 | -#if defined(TARGET_PPC64) | |
2819 | 2763 | if (0) { |
2820 | 2764 | /* XXX: find a suitable condition to enable the hypervisor mode */ |
2821 | 2765 | hdice = env->spr[SPR_LPCR] & 1; |
... | ... | @@ -2830,7 +2774,6 @@ void ppc_hw_interrupt (CPUPPCState *env) |
2830 | 2774 | return; |
2831 | 2775 | } |
2832 | 2776 | } |
2833 | -#endif | |
2834 | 2777 | if (msr_ce != 0) { |
2835 | 2778 | /* External critical interrupt */ |
2836 | 2779 | if (env->pending_interrupts & (1 << PPC_INTERRUPT_CEXT)) { |
... | ... | @@ -2939,9 +2882,10 @@ void cpu_ppc_reset (void *opaque) |
2939 | 2882 | |
2940 | 2883 | env = opaque; |
2941 | 2884 | msr = (target_ulong)0; |
2942 | -#if defined(TARGET_PPC64) | |
2943 | - msr |= (target_ulong)0 << MSR_HV; /* Should be 1... */ | |
2944 | -#endif | |
2885 | + if (0) { | |
2886 | + /* XXX: find a suitable condition to enable the hypervisor mode */ | |
2887 | + msr |= (target_ulong)MSR_HVB; | |
2888 | + } | |
2945 | 2889 | msr |= (target_ulong)0 << MSR_AP; /* TO BE CHECKED */ |
2946 | 2890 | msr |= (target_ulong)0 << MSR_SA; /* TO BE CHECKED */ |
2947 | 2891 | msr |= (target_ulong)1 << MSR_EP; | ... | ... |
target-ppc/helper_regs.h
... | ... | @@ -60,13 +60,12 @@ static always_inline void hreg_swap_gpr_tgpr (CPUPPCState *env) |
60 | 60 | |
61 | 61 | static always_inline void hreg_compute_mem_idx (CPUPPCState *env) |
62 | 62 | { |
63 | -#if defined (TARGET_PPC64) | |
64 | 63 | /* Precompute MMU index */ |
65 | - if (msr_pr == 0 && msr_hv != 0) | |
64 | + if (msr_pr == 0 && msr_hv != 0) { | |
66 | 65 | env->mmu_idx = 2; |
67 | - else | |
68 | -#endif | |
66 | + } else { | |
69 | 67 | env->mmu_idx = 1 - msr_pr; |
68 | + } | |
70 | 69 | } |
71 | 70 | |
72 | 71 | static always_inline void hreg_compute_hflags (CPUPPCState *env) |
... | ... | @@ -77,22 +76,26 @@ static always_inline void hreg_compute_hflags (CPUPPCState *env) |
77 | 76 | hflags_mask = (1 << MSR_VR) | (1 << MSR_AP) | (1 << MSR_SA) | |
78 | 77 | (1 << MSR_PR) | (1 << MSR_FP) | (1 << MSR_SE) | (1 << MSR_BE) | |
79 | 78 | (1 << MSR_LE); |
80 | -#if defined (TARGET_PPC64) | |
81 | - hflags_mask |= (1ULL << MSR_CM) | (1ULL << MSR_SF) | (1ULL << MSR_HV); | |
82 | -#endif | |
79 | + hflags_mask |= (1ULL << MSR_CM) | (1ULL << MSR_SF) | MSR_HVB; | |
83 | 80 | hreg_compute_mem_idx(env); |
84 | 81 | env->hflags = env->msr & hflags_mask; |
85 | 82 | /* Merge with hflags coming from other registers */ |
86 | 83 | env->hflags |= env->hflags_nmsr; |
87 | 84 | } |
88 | 85 | |
89 | -static always_inline int hreg_store_msr (CPUPPCState *env, target_ulong value) | |
86 | +static always_inline int hreg_store_msr (CPUPPCState *env, target_ulong value, | |
87 | + int alter_hv) | |
90 | 88 | { |
91 | 89 | int excp; |
92 | 90 | |
93 | 91 | excp = 0; |
94 | 92 | value &= env->msr_mask; |
95 | 93 | #if !defined (CONFIG_USER_ONLY) |
94 | + if (!alter_hv) { | |
95 | + /* mtmsr cannot alter the hypervisor state */ | |
96 | + value &= ~MSR_HVB; | |
97 | + value |= env->msr & MSR_HVB; | |
98 | + } | |
96 | 99 | if (((value >> MSR_IR) & 1) != msr_ir || |
97 | 100 | ((value >> MSR_DR) & 1) != msr_dr) { |
98 | 101 | /* Flush all tlb when changing translation mode */ | ... | ... |
target-ppc/op_helper.c
... | ... | @@ -1425,7 +1425,7 @@ void cpu_dump_rfi (target_ulong RA, target_ulong msr); |
1425 | 1425 | |
1426 | 1426 | void do_store_msr (void) |
1427 | 1427 | { |
1428 | - T0 = hreg_store_msr(env, T0); | |
1428 | + T0 = hreg_store_msr(env, T0, 0); | |
1429 | 1429 | if (T0 != 0) { |
1430 | 1430 | env->interrupt_request |= CPU_INTERRUPT_EXITTB; |
1431 | 1431 | do_raise_exception(T0); |
... | ... | @@ -1451,7 +1451,7 @@ static always_inline void __do_rfi (target_ulong nip, target_ulong msr, |
1451 | 1451 | #endif |
1452 | 1452 | /* XXX: beware: this is false if VLE is supported */ |
1453 | 1453 | env->nip = nip & ~((target_ulong)0x00000003); |
1454 | - hreg_store_msr(env, msr); | |
1454 | + hreg_store_msr(env, msr, 1); | |
1455 | 1455 | #if defined (DEBUG_OP) |
1456 | 1456 | cpu_dump_rfi(env->nip, env->msr); |
1457 | 1457 | #endif | ... | ... |