Commit 111bfab3b5c6a47a7182e095647e6a5e0e17feb8

Authored by bellard
1 parent c7d344af

This patch adds little-endian mode support to PPC emulation.

This is needed by OS/2 and Windows NT and some programs like VirtualPC.
This patch has been tested using OS/2 bootloader (thanks to Tero
Kaarlela).
(Jocelyn Mayer)


git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@1379 c046a42c-6fe2-441c-8c8c-71466251a162
cpu-exec.c
... ... @@ -364,7 +364,8 @@ int cpu_exec(CPUState *env1)
364 364 cs_base = env->npc;
365 365 pc = env->pc;
366 366 #elif defined(TARGET_PPC)
367   - flags = (msr_pr << MSR_PR) | (msr_fp << MSR_FP) | (msr_se << MSR_SE);
  367 + flags = (msr_pr << MSR_PR) | (msr_fp << MSR_FP) |
  368 + (msr_se << MSR_SE) | (msr_le << MSR_LE);
368 369 cs_base = 0;
369 370 pc = env->nip;
370 371 #else
... ...
... ... @@ -141,6 +141,8 @@ void target_disas(FILE *out, target_ulong code, target_ulong size, int flags)
141 141 #elif defined(TARGET_SPARC)
142 142 print_insn = print_insn_sparc;
143 143 #elif defined(TARGET_PPC)
  144 + if (cpu_single_env->msr[MSR_LE])
  145 + disasm_info.endian = BFD_ENDIAN_LITTLE;
144 146 print_insn = print_insn_ppc;
145 147 #else
146 148 fprintf(out, "0x" TARGET_FMT_lx
... ...
target-ppc/op_helper_mem.h
... ... @@ -40,4 +40,53 @@ void glue(do_stsw, MEMSUFFIX) (int src)
40 40 }
41 41 }
42 42  
  43 +void glue(do_lsw_le, MEMSUFFIX) (int dst)
  44 +{
  45 + uint32_t tmp;
  46 + int sh;
  47 +
  48 + if (loglevel > 0) {
  49 + fprintf(logfile, "%s: addr=0x%08x count=%d reg=%d\n",
  50 + __func__, T0, T1, dst);
  51 + }
  52 + for (; T1 > 3; T1 -= 4, T0 += 4) {
  53 + tmp = glue(ldl, MEMSUFFIX)(T0);
  54 + ugpr(dst++) = ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
  55 + ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
  56 + if (dst == 32)
  57 + dst = 0;
  58 + }
  59 + if (T1 > 0) {
  60 + tmp = 0;
  61 + for (sh = 0; T1 > 0; T1--, T0++, sh += 8) {
  62 + tmp |= glue(ldub, MEMSUFFIX)(T0) << sh;
  63 + }
  64 + ugpr(dst) = tmp;
  65 + }
  66 +}
  67 +
  68 +void glue(do_stsw_le, MEMSUFFIX) (int src)
  69 +{
  70 + uint32_t tmp;
  71 + int sh;
  72 +
  73 + if (loglevel > 0) {
  74 + fprintf(logfile, "%s: addr=0x%08x count=%d reg=%d\n",
  75 + __func__, T0, T1, src);
  76 + }
  77 + for (; T1 > 3; T1 -= 4, T0 += 4) {
  78 + tmp = ((ugpr(src++) & 0xFF000000) >> 24);
  79 + tmp |= ((ugpr(src++) & 0x00FF0000) >> 8);
  80 + tmp |= ((ugpr(src++) & 0x0000FF00) << 8);
  81 + tmp |= ((ugpr(src++) & 0x000000FF) << 24);
  82 + glue(stl, MEMSUFFIX)(T0, tmp);
  83 + if (src == 32)
  84 + src = 0;
  85 + }
  86 + if (T1 > 0) {
  87 + for (sh = 0; T1 > 0; T1--, T0++, sh += 8)
  88 + glue(stb, MEMSUFFIX)(T0, (ugpr(src) >> sh) & 0xFF);
  89 + }
  90 +}
  91 +
43 92 #undef MEMSUFFIX
... ...
target-ppc/op_mem.h
... ... @@ -8,6 +8,12 @@ static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
8 8 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
9 9 }
10 10  
  11 +static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
  12 +{
  13 + int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
  14 + return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
  15 +}
  16 +
11 17 static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
12 18 {
13 19 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
... ... @@ -48,17 +54,29 @@ PPC_LD_OP(ha, ldsw);
48 54 PPC_LD_OP(hz, lduw);
49 55 PPC_LD_OP(wz, ldl);
50 56  
  57 +PPC_LD_OP(ha_le, ld16rs);
  58 +PPC_LD_OP(hz_le, ld16r);
  59 +PPC_LD_OP(wz_le, ld32r);
  60 +
51 61 /*** Integer store ***/
52 62 PPC_ST_OP(b, stb);
53 63 PPC_ST_OP(h, stw);
54 64 PPC_ST_OP(w, stl);
55 65  
  66 +PPC_ST_OP(h_le, st16r);
  67 +PPC_ST_OP(w_le, st32r);
  68 +
56 69 /*** Integer load and store with byte reverse ***/
57 70 PPC_LD_OP(hbr, ld16r);
58 71 PPC_LD_OP(wbr, ld32r);
59 72 PPC_ST_OP(hbr, st16r);
60 73 PPC_ST_OP(wbr, st32r);
61 74  
  75 +PPC_LD_OP(hbr_le, lduw);
  76 +PPC_LD_OP(wbr_le, ldl);
  77 +PPC_ST_OP(hbr_le, stw);
  78 +PPC_ST_OP(wbr_le, stl);
  79 +
62 80 /*** Integer load and store multiple ***/
63 81 PPC_OP(glue(lmw, MEMSUFFIX))
64 82 {
... ... @@ -80,6 +98,26 @@ PPC_OP(glue(stmw, MEMSUFFIX))
80 98 RETURN();
81 99 }
82 100  
  101 +PPC_OP(glue(lmw_le, MEMSUFFIX))
  102 +{
  103 + int dst = PARAM(1);
  104 +
  105 + for (; dst < 32; dst++, T0 += 4) {
  106 + ugpr(dst) = glue(ld32r, MEMSUFFIX)(T0);
  107 + }
  108 + RETURN();
  109 +}
  110 +
  111 +PPC_OP(glue(stmw_le, MEMSUFFIX))
  112 +{
  113 + int src = PARAM(1);
  114 +
  115 + for (; src < 32; src++, T0 += 4) {
  116 + glue(st32r, MEMSUFFIX)(T0, ugpr(src));
  117 + }
  118 + RETURN();
  119 +}
  120 +
83 121 /*** Integer load and store strings ***/
84 122 PPC_OP(glue(lswi, MEMSUFFIX))
85 123 {
... ... @@ -87,6 +125,13 @@ PPC_OP(glue(lswi, MEMSUFFIX))
87 125 RETURN();
88 126 }
89 127  
  128 +void glue(do_lsw_le, MEMSUFFIX) (int dst);
  129 +PPC_OP(glue(lswi_le, MEMSUFFIX))
  130 +{
  131 + glue(do_lsw_le, MEMSUFFIX)(PARAM(1));
  132 + RETURN();
  133 +}
  134 +
90 135 /* PPC32 specification says we must generate an exception if
91 136 * rA is in the range of registers to be loaded.
92 137 * In an other hand, IBM says this is valid, but rA won't be loaded.
... ... @@ -105,12 +150,32 @@ PPC_OP(glue(lswx, MEMSUFFIX))
105 150 RETURN();
106 151 }
107 152  
  153 +PPC_OP(glue(lswx_le, MEMSUFFIX))
  154 +{
  155 + if (T1 > 0) {
  156 + if ((PARAM(1) < PARAM(2) && (PARAM(1) + T1) > PARAM(2)) ||
  157 + (PARAM(1) < PARAM(3) && (PARAM(1) + T1) > PARAM(3))) {
  158 + do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
  159 + } else {
  160 + glue(do_lsw_le, MEMSUFFIX)(PARAM(1));
  161 + }
  162 + }
  163 + RETURN();
  164 +}
  165 +
108 166 PPC_OP(glue(stsw, MEMSUFFIX))
109 167 {
110 168 glue(do_stsw, MEMSUFFIX)(PARAM(1));
111 169 RETURN();
112 170 }
113 171  
  172 +void glue(do_stsw_le, MEMSUFFIX) (int src);
  173 +PPC_OP(glue(stsw_le, MEMSUFFIX))
  174 +{
  175 + glue(do_stsw_le, MEMSUFFIX)(PARAM(1));
  176 + RETURN();
  177 +}
  178 +
114 179 /*** Floating-point store ***/
115 180 #define PPC_STF_OP(name, op) \
116 181 PPC_OP(glue(glue(st, name), MEMSUFFIX)) \
... ... @@ -122,6 +187,43 @@ PPC_OP(glue(glue(st, name), MEMSUFFIX)) \
122 187 PPC_STF_OP(fd, stfq);
123 188 PPC_STF_OP(fs, stfl);
124 189  
  190 +static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
  191 +{
  192 + union {
  193 + double d;
  194 + uint64_t u;
  195 + } u;
  196 +
  197 + u.d = d;
  198 + u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
  199 + ((u.u & 0x00FF000000000000ULL) >> 40) |
  200 + ((u.u & 0x0000FF0000000000ULL) >> 24) |
  201 + ((u.u & 0x000000FF00000000ULL) >> 8) |
  202 + ((u.u & 0x00000000FF000000ULL) << 8) |
  203 + ((u.u & 0x0000000000FF0000ULL) << 24) |
  204 + ((u.u & 0x000000000000FF00ULL) << 40) |
  205 + ((u.u & 0x00000000000000FFULL) << 56);
  206 + glue(stfq, MEMSUFFIX)(EA, u.d);
  207 +}
  208 +
  209 +static inline void glue(stflr, MEMSUFFIX) (target_ulong EA, float f)
  210 +{
  211 + union {
  212 + float f;
  213 + uint32_t u;
  214 + } u;
  215 +
  216 + u.f = f;
  217 + u.u = ((u.u & 0xFF000000UL) >> 24) |
  218 + ((u.u & 0x00FF0000ULL) >> 8) |
  219 + ((u.u & 0x0000FF00UL) << 8) |
  220 + ((u.u & 0x000000FFULL) << 24);
  221 + glue(stfl, MEMSUFFIX)(EA, u.f);
  222 +}
  223 +
  224 +PPC_STF_OP(fd_le, stfqr);
  225 +PPC_STF_OP(fs_le, stflr);
  226 +
125 227 /*** Floating-point load ***/
126 228 #define PPC_LDF_OP(name, op) \
127 229 PPC_OP(glue(glue(l, name), MEMSUFFIX)) \
... ... @@ -133,6 +235,45 @@ PPC_OP(glue(glue(l, name), MEMSUFFIX)) \
133 235 PPC_LDF_OP(fd, ldfq);
134 236 PPC_LDF_OP(fs, ldfl);
135 237  
  238 +static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
  239 +{
  240 + union {
  241 + double d;
  242 + uint64_t u;
  243 + } u;
  244 +
  245 + u.d = glue(ldfq, MEMSUFFIX)(EA);
  246 + u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
  247 + ((u.u & 0x00FF000000000000ULL) >> 40) |
  248 + ((u.u & 0x0000FF0000000000ULL) >> 24) |
  249 + ((u.u & 0x000000FF00000000ULL) >> 8) |
  250 + ((u.u & 0x00000000FF000000ULL) << 8) |
  251 + ((u.u & 0x0000000000FF0000ULL) << 24) |
  252 + ((u.u & 0x000000000000FF00ULL) << 40) |
  253 + ((u.u & 0x00000000000000FFULL) << 56);
  254 +
  255 + return u.d;
  256 +}
  257 +
  258 +static inline float glue(ldflr, MEMSUFFIX) (target_ulong EA)
  259 +{
  260 + union {
  261 + float f;
  262 + uint32_t u;
  263 + } u;
  264 +
  265 + u.f = glue(ldfl, MEMSUFFIX)(EA);
  266 + u.u = ((u.u & 0xFF000000UL) >> 24) |
  267 + ((u.u & 0x00FF0000ULL) >> 8) |
  268 + ((u.u & 0x0000FF00UL) << 8) |
  269 + ((u.u & 0x000000FFULL) << 24);
  270 +
  271 + return u.f;
  272 +}
  273 +
  274 +PPC_LDF_OP(fd_le, ldfqr);
  275 +PPC_LDF_OP(fs_le, ldflr);
  276 +
136 277 /* Load and set reservation */
137 278 PPC_OP(glue(lwarx, MEMSUFFIX))
138 279 {
... ... @@ -145,6 +286,17 @@ PPC_OP(glue(lwarx, MEMSUFFIX))
145 286 RETURN();
146 287 }
147 288  
  289 +PPC_OP(glue(lwarx_le, MEMSUFFIX))
  290 +{
  291 + if (T0 & 0x03) {
  292 + do_raise_exception(EXCP_ALIGN);
  293 + } else {
  294 + T1 = glue(ld32r, MEMSUFFIX)(T0);
  295 + regs->reserve = T0;
  296 + }
  297 + RETURN();
  298 +}
  299 +
148 300 /* Store with reservation */
149 301 PPC_OP(glue(stwcx, MEMSUFFIX))
150 302 {
... ... @@ -162,6 +314,22 @@ PPC_OP(glue(stwcx, MEMSUFFIX))
162 314 RETURN();
163 315 }
164 316  
  317 +PPC_OP(glue(stwcx_le, MEMSUFFIX))
  318 +{
  319 + if (T0 & 0x03) {
  320 + do_raise_exception(EXCP_ALIGN);
  321 + } else {
  322 + if (regs->reserve != T0) {
  323 + env->crf[0] = xer_ov;
  324 + } else {
  325 + glue(st32r, MEMSUFFIX)(T0, T1);
  326 + env->crf[0] = xer_ov | 0x02;
  327 + }
  328 + }
  329 + regs->reserve = 0;
  330 + RETURN();
  331 +}
  332 +
165 333 PPC_OP(glue(dcbz, MEMSUFFIX))
166 334 {
167 335 glue(stl, MEMSUFFIX)(T0 + 0x00, 0);
... ... @@ -188,4 +356,16 @@ PPC_OP(glue(ecowx, MEMSUFFIX))
188 356 RETURN();
189 357 }
190 358  
  359 +PPC_OP(glue(eciwx_le, MEMSUFFIX))
  360 +{
  361 + T1 = glue(ld32r, MEMSUFFIX)(T0);
  362 + RETURN();
  363 +}
  364 +
  365 +PPC_OP(glue(ecowx_le, MEMSUFFIX))
  366 +{
  367 + glue(st32r, MEMSUFFIX)(T0, T1);
  368 + RETURN();
  369 +}
  370 +
191 371 #undef MEMSUFFIX
... ...
target-ppc/translate.c
... ... @@ -1046,22 +1046,41 @@ GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
1046 1046 }
1047 1047  
1048 1048 /*** Integer load ***/
  1049 +#define op_ldst(name) (*gen_op_##name[ctx->mem_idx])()
1049 1050 #if defined(CONFIG_USER_ONLY)
1050   -#define op_ldst(name) gen_op_##name##_raw()
1051   -#define OP_LD_TABLE(width)
1052   -#define OP_ST_TABLE(width)
  1051 +#define OP_LD_TABLE(width) \
  1052 +static GenOpFunc *gen_op_l##width[] = { \
  1053 + &gen_op_l##width##_raw, \
  1054 + &gen_op_l##width##_le_raw, \
  1055 +};
  1056 +#define OP_ST_TABLE(width) \
  1057 +static GenOpFunc *gen_op_st##width[] = { \
  1058 + &gen_op_st##width##_raw, \
  1059 + &gen_op_st##width##_le_raw, \
  1060 +};
  1061 +/* Byte access routine are endian safe */
  1062 +#define gen_op_stb_le_raw gen_op_stb_raw
  1063 +#define gen_op_lbz_le_raw gen_op_lbz_raw
1053 1064 #else
1054   -#define op_ldst(name) (*gen_op_##name[ctx->mem_idx])()
1055 1065 #define OP_LD_TABLE(width) \
1056 1066 static GenOpFunc *gen_op_l##width[] = { \
1057 1067 &gen_op_l##width##_user, \
  1068 + &gen_op_l##width##_le_user, \
1058 1069 &gen_op_l##width##_kernel, \
1059   -}
  1070 + &gen_op_l##width##_le_kernel, \
  1071 +};
1060 1072 #define OP_ST_TABLE(width) \
1061 1073 static GenOpFunc *gen_op_st##width[] = { \
1062 1074 &gen_op_st##width##_user, \
  1075 + &gen_op_st##width##_le_user, \
1063 1076 &gen_op_st##width##_kernel, \
1064   -}
  1077 + &gen_op_st##width##_le_kernel, \
  1078 +};
  1079 +/* Byte access routine are endian safe */
  1080 +#define gen_op_stb_le_user gen_op_stb_user
  1081 +#define gen_op_lbz_le_user gen_op_lbz_user
  1082 +#define gen_op_stb_le_kernel gen_op_stb_kernel
  1083 +#define gen_op_lbz_le_kernel gen_op_lbz_kernel
1065 1084 #endif
1066 1085  
1067 1086 #define GEN_LD(width, opc) \
... ... @@ -1232,17 +1251,28 @@ OP_ST_TABLE(wbr);
1232 1251 GEN_STX(wbr, 0x16, 0x14);
1233 1252  
1234 1253 /*** Integer load and store multiple ***/
  1254 +#define op_ldstm(name, reg) (*gen_op_##name[ctx->mem_idx])(reg)
1235 1255 #if defined(CONFIG_USER_ONLY)
1236   -#define op_ldstm(name, reg) gen_op_##name##_raw(reg)
  1256 +static GenOpFunc1 *gen_op_lmw[] = {
  1257 + &gen_op_lmw_raw,
  1258 + &gen_op_lmw_le_raw,
  1259 +};
  1260 +static GenOpFunc1 *gen_op_stmw[] = {
  1261 + &gen_op_stmw_raw,
  1262 + &gen_op_stmw_le_raw,
  1263 +};
1237 1264 #else
1238   -#define op_ldstm(name, reg) (*gen_op_##name[ctx->mem_idx])(reg)
1239 1265 static GenOpFunc1 *gen_op_lmw[] = {
1240 1266 &gen_op_lmw_user,
  1267 + &gen_op_lmw_le_user,
1241 1268 &gen_op_lmw_kernel,
  1269 + &gen_op_lmw_le_kernel,
1242 1270 };
1243 1271 static GenOpFunc1 *gen_op_stmw[] = {
1244 1272 &gen_op_stmw_user,
  1273 + &gen_op_stmw_le_user,
1245 1274 &gen_op_stmw_kernel,
  1275 + &gen_op_stmw_le_kernel,
1246 1276 };
1247 1277 #endif
1248 1278  
... ... @@ -1277,23 +1307,39 @@ GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1277 1307 }
1278 1308  
1279 1309 /*** Integer load and store strings ***/
1280   -#if defined(CONFIG_USER_ONLY)
1281   -#define op_ldsts(name, start) gen_op_##name##_raw(start)
1282   -#define op_ldstsx(name, rd, ra, rb) gen_op_##name##_raw(rd, ra, rb)
1283   -#else
1284 1310 #define op_ldsts(name, start) (*gen_op_##name[ctx->mem_idx])(start)
1285 1311 #define op_ldstsx(name, rd, ra, rb) (*gen_op_##name[ctx->mem_idx])(rd, ra, rb)
  1312 +#if defined(CONFIG_USER_ONLY)
  1313 +static GenOpFunc1 *gen_op_lswi[] = {
  1314 + &gen_op_lswi_raw,
  1315 + &gen_op_lswi_le_raw,
  1316 +};
  1317 +static GenOpFunc3 *gen_op_lswx[] = {
  1318 + &gen_op_lswx_raw,
  1319 + &gen_op_lswx_le_raw,
  1320 +};
  1321 +static GenOpFunc1 *gen_op_stsw[] = {
  1322 + &gen_op_stsw_raw,
  1323 + &gen_op_stsw_le_raw,
  1324 +};
  1325 +#else
1286 1326 static GenOpFunc1 *gen_op_lswi[] = {
1287 1327 &gen_op_lswi_user,
  1328 + &gen_op_lswi_le_user,
1288 1329 &gen_op_lswi_kernel,
  1330 + &gen_op_lswi_le_kernel,
1289 1331 };
1290 1332 static GenOpFunc3 *gen_op_lswx[] = {
1291 1333 &gen_op_lswx_user,
  1334 + &gen_op_lswx_le_user,
1292 1335 &gen_op_lswx_kernel,
  1336 + &gen_op_lswx_le_kernel,
1293 1337 };
1294 1338 static GenOpFunc1 *gen_op_stsw[] = {
1295 1339 &gen_op_stsw_user,
  1340 + &gen_op_stsw_le_user,
1296 1341 &gen_op_stsw_kernel,
  1342 + &gen_op_stsw_le_kernel,
1297 1343 };
1298 1344 #endif
1299 1345  
... ... @@ -1389,23 +1435,33 @@ GEN_HANDLER(isync, 0x13, 0x16, 0xFF, 0x03FF0801, PPC_MEM)
1389 1435 {
1390 1436 }
1391 1437  
1392   -/* lwarx */
  1438 +#define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])()
  1439 +#define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])()
1393 1440 #if defined(CONFIG_USER_ONLY)
1394   -#define op_lwarx() gen_op_lwarx_raw()
1395   -#define op_stwcx() gen_op_stwcx_raw()
  1441 +static GenOpFunc *gen_op_lwarx[] = {
  1442 + &gen_op_lwarx_raw,
  1443 + &gen_op_lwarx_le_raw,
  1444 +};
  1445 +static GenOpFunc *gen_op_stwcx[] = {
  1446 + &gen_op_stwcx_raw,
  1447 + &gen_op_stwcx_le_raw,
  1448 +};
1396 1449 #else
1397   -#define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])()
1398 1450 static GenOpFunc *gen_op_lwarx[] = {
1399 1451 &gen_op_lwarx_user,
  1452 + &gen_op_lwarx_le_user,
1400 1453 &gen_op_lwarx_kernel,
  1454 + &gen_op_lwarx_le_kernel,
1401 1455 };
1402   -#define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])()
1403 1456 static GenOpFunc *gen_op_stwcx[] = {
1404 1457 &gen_op_stwcx_user,
  1458 + &gen_op_stwcx_le_user,
1405 1459 &gen_op_stwcx_kernel,
  1460 + &gen_op_stwcx_le_kernel,
1406 1461 };
1407 1462 #endif
1408 1463  
  1464 +/* lwarx */
1409 1465 GEN_HANDLER(lwarx, 0x1F, 0x14, 0xFF, 0x00000001, PPC_RES)
1410 1466 {
1411 1467 if (rA(ctx->opcode) == 0) {
... ... @@ -2498,23 +2554,33 @@ GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM)
2498 2554  
2499 2555 /*** External control ***/
2500 2556 /* Optional: */
2501   -/* eciwx */
2502   -#if defined(CONFIG_USER_ONLY)
2503   -#define op_eciwx() gen_op_eciwx_raw()
2504   -#define op_ecowx() gen_op_ecowx_raw()
2505   -#else
2506 2557 #define op_eciwx() (*gen_op_eciwx[ctx->mem_idx])()
2507 2558 #define op_ecowx() (*gen_op_ecowx[ctx->mem_idx])()
  2559 +#if defined(CONFIG_USER_ONLY)
  2560 +static GenOpFunc *gen_op_eciwx[] = {
  2561 + &gen_op_eciwx_raw,
  2562 + &gen_op_eciwx_le_raw,
  2563 +};
  2564 +static GenOpFunc *gen_op_ecowx[] = {
  2565 + &gen_op_ecowx_raw,
  2566 + &gen_op_ecowx_le_raw,
  2567 +};
  2568 +#else
2508 2569 static GenOpFunc *gen_op_eciwx[] = {
2509 2570 &gen_op_eciwx_user,
  2571 + &gen_op_eciwx_le_user,
2510 2572 &gen_op_eciwx_kernel,
  2573 + &gen_op_eciwx_le_kernel,
2511 2574 };
2512 2575 static GenOpFunc *gen_op_ecowx[] = {
2513 2576 &gen_op_ecowx_user,
  2577 + &gen_op_ecowx_le_user,
2514 2578 &gen_op_ecowx_kernel,
  2579 + &gen_op_ecowx_le_kernel,
2515 2580 };
2516 2581 #endif
2517 2582  
  2583 +/* eciwx */
2518 2584 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
2519 2585 {
2520 2586 /* Should check EAR[E] & alignment ! */
... ... @@ -3143,10 +3209,10 @@ int gen_intermediate_code_internal (CPUState *env, TranslationBlock *tb,
3143 3209 ctx.tb = tb;
3144 3210 ctx.exception = EXCP_NONE;
3145 3211 #if defined(CONFIG_USER_ONLY)
3146   - ctx.mem_idx = 0;
  3212 + ctx.mem_idx = msr_le;
3147 3213 #else
3148 3214 ctx.supervisor = 1 - msr_pr;
3149   - ctx.mem_idx = 1 - msr_pr;
  3215 + ctx.mem_idx = ((1 - msr_pr) << 1) | msr_le;
3150 3216 #endif
3151 3217 ctx.fpu_enabled = msr_fp;
3152 3218 #if defined (DO_SINGLE_STEP)
... ... @@ -3173,11 +3239,17 @@ int gen_intermediate_code_internal (CPUState *env, TranslationBlock *tb,
3173 3239 }
3174 3240 #endif
3175 3241 ctx.opcode = ldl_code(ctx.nip);
  3242 + if (msr_le) {
  3243 + ctx.opcode = ((ctx.opcode & 0xFF000000) >> 24) |
  3244 + ((ctx.opcode & 0x00FF0000) >> 8) |
  3245 + ((ctx.opcode & 0x0000FF00) << 8) |
  3246 + ((ctx.opcode & 0x000000FF) << 24);
  3247 + }
3176 3248 #if defined PPC_DEBUG_DISAS
3177 3249 if (loglevel & CPU_LOG_TB_IN_ASM) {
3178   - fprintf(logfile, "translate opcode %08x (%02x %02x %02x)\n",
  3250 + fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
3179 3251 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
3180   - opc3(ctx.opcode));
  3252 + opc3(ctx.opcode), msr_le ? "little" : "big");
3181 3253 }
3182 3254 #endif
3183 3255 ctx.nip += 4;
... ...