Commit b068d6a7138292de0f5c5fa6c99f0b79d4e1e7f0

Authored by j_mayer
1 parent ed26abdb

PowerPC target optimisations: make intensive use of always_inline.


git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@3347 c046a42c-6fe2-441c-8c8c-71466251a162
hw/ppc.c
... ... @@ -424,7 +424,8 @@ struct ppc_tb_t {
424 424 void *opaque;
425 425 };
426 426  
427   -static inline uint64_t cpu_ppc_get_tb (ppc_tb_t *tb_env, int64_t tb_offset)
  427 +static always_inline uint64_t cpu_ppc_get_tb (ppc_tb_t *tb_env,
  428 + int64_t tb_offset)
428 429 {
429 430 /* TB time in tb periods */
430 431 return muldiv64(qemu_get_clock(vm_clock) + tb_env->tb_offset,
... ... @@ -446,7 +447,7 @@ uint32_t cpu_ppc_load_tbl (CPUState *env)
446 447 return tb & 0xFFFFFFFF;
447 448 }
448 449  
449   -static inline uint32_t _cpu_ppc_load_tbu (CPUState *env)
  450 +static always_inline uint32_t _cpu_ppc_load_tbu (CPUState *env)
450 451 {
451 452 ppc_tb_t *tb_env = env->tb_env;
452 453 uint64_t tb;
... ... @@ -466,8 +467,9 @@ uint32_t cpu_ppc_load_tbu (CPUState *env)
466 467 return _cpu_ppc_load_tbu(env);
467 468 }
468 469  
469   -static inline void cpu_ppc_store_tb (ppc_tb_t *tb_env, int64_t *tb_offsetp,
470   - uint64_t value)
  470 +static always_inline void cpu_ppc_store_tb (ppc_tb_t *tb_env,
  471 + int64_t *tb_offsetp,
  472 + uint64_t value)
471 473 {
472 474 *tb_offsetp = muldiv64(value, ticks_per_sec, tb_env->tb_freq)
473 475 - qemu_get_clock(vm_clock);
... ... @@ -489,7 +491,7 @@ void cpu_ppc_store_tbl (CPUState *env, uint32_t value)
489 491 cpu_ppc_store_tb(tb_env, &tb_env->tb_offset, tb | (uint64_t)value);
490 492 }
491 493  
492   -static inline void _cpu_ppc_store_tbu (CPUState *env, uint32_t value)
  494 +static always_inline void _cpu_ppc_store_tbu (CPUState *env, uint32_t value)
493 495 {
494 496 ppc_tb_t *tb_env = env->tb_env;
495 497 uint64_t tb;
... ... @@ -556,7 +558,8 @@ void cpu_ppc_store_atbu (CPUState *env, uint32_t value)
556 558 ((uint64_t)value << 32) | tb);
557 559 }
558 560  
559   -static inline uint32_t _cpu_ppc_load_decr (CPUState *env, uint64_t *next)
  561 +static always_inline uint32_t _cpu_ppc_load_decr (CPUState *env,
  562 + uint64_t *next)
560 563 {
561 564 ppc_tb_t *tb_env = env->tb_env;
562 565 uint32_t decr;
... ... @@ -605,7 +608,7 @@ uint64_t cpu_ppc_load_purr (CPUState *env)
605 608 /* When decrementer expires,
606 609 * all we need to do is generate or queue a CPU exception
607 610 */
608   -static inline void cpu_ppc_decr_excp (CPUState *env)
  611 +static always_inline void cpu_ppc_decr_excp (CPUState *env)
609 612 {
610 613 /* Raise it */
611 614 #ifdef PPC_DEBUG_TB
... ... @@ -616,7 +619,7 @@ static inline void cpu_ppc_decr_excp (CPUState *env)
616 619 ppc_set_irq(env, PPC_INTERRUPT_DECR, 1);
617 620 }
618 621  
619   -static inline void cpu_ppc_hdecr_excp (CPUState *env)
  622 +static always_inline void cpu_ppc_hdecr_excp (CPUState *env)
620 623 {
621 624 /* Raise it */
622 625 #ifdef PPC_DEBUG_TB
... ... @@ -657,9 +660,8 @@ static void __cpu_ppc_store_decr (CPUState *env, uint64_t *nextp,
657 660 (*raise_excp)(env);
658 661 }
659 662  
660   -
661   -static inline void _cpu_ppc_store_decr (CPUState *env, uint32_t decr,
662   - uint32_t value, int is_excp)
  663 +static always_inline void _cpu_ppc_store_decr (CPUState *env, uint32_t decr,
  664 + uint32_t value, int is_excp)
663 665 {
664 666 ppc_tb_t *tb_env = env->tb_env;
665 667  
... ... @@ -678,8 +680,8 @@ static void cpu_ppc_decr_cb (void *opaque)
678 680 }
679 681  
680 682 #if defined(TARGET_PPC64H)
681   -static inline void _cpu_ppc_store_hdecr (CPUState *env, uint32_t hdecr,
682   - uint32_t value, int is_excp)
  683 +static always_inline void _cpu_ppc_store_hdecr (CPUState *env, uint32_t hdecr,
  684 + uint32_t value, int is_excp)
683 685 {
684 686 ppc_tb_t *tb_env = env->tb_env;
685 687  
... ...
hw/ppc405_uc.c
... ... @@ -463,7 +463,7 @@ static uint32_t sdram_bcr (target_phys_addr_t ram_base,
463 463 return bcr;
464 464 }
465 465  
466   -static inline target_phys_addr_t sdram_base (uint32_t bcr)
  466 +static always_inline target_phys_addr_t sdram_base (uint32_t bcr)
467 467 {
468 468 return bcr & 0xFF800000;
469 469 }
... ...
hw/ppc_prep.c
... ... @@ -107,7 +107,7 @@ static void _PPC_intack_write (void *opaque,
107 107 // printf("%s: 0x%08x => 0x%08x\n", __func__, addr, value);
108 108 }
109 109  
110   -static inline uint32_t _PPC_intack_read (target_phys_addr_t addr)
  110 +static always_inline uint32_t _PPC_intack_read (target_phys_addr_t addr)
111 111 {
112 112 uint32_t retval = 0;
113 113  
... ... @@ -412,8 +412,9 @@ static uint32_t PREP_io_800_readb (void *opaque, uint32_t addr)
412 412 return retval;
413 413 }
414 414  
415   -static inline target_phys_addr_t prep_IO_address (sysctrl_t *sysctrl,
416   - target_phys_addr_t addr)
  415 +static always_inline target_phys_addr_t prep_IO_address (sysctrl_t *sysctrl,
  416 + target_phys_addr_t
  417 + addr)
417 418 {
418 419 if (sysctrl->contiguous_map == 0) {
419 420 /* 64 KB contiguous space for IOs */
... ...
target-ppc/exec.h
... ... @@ -68,23 +68,23 @@ register unsigned long T2 asm(AREG3);
68 68 # define RETURN() __asm__ __volatile__("" : : : "memory");
69 69 #endif
70 70  
71   -static inline target_ulong rotl8 (target_ulong i, int n)
  71 +static always_inline target_ulong rotl8 (target_ulong i, int n)
72 72 {
73 73 return (((uint8_t)i << n) | ((uint8_t)i >> (8 - n)));
74 74 }
75 75  
76   -static inline target_ulong rotl16 (target_ulong i, int n)
  76 +static always_inline target_ulong rotl16 (target_ulong i, int n)
77 77 {
78 78 return (((uint16_t)i << n) | ((uint16_t)i >> (16 - n)));
79 79 }
80 80  
81   -static inline target_ulong rotl32 (target_ulong i, int n)
  81 +static always_inline target_ulong rotl32 (target_ulong i, int n)
82 82 {
83 83 return (((uint32_t)i << n) | ((uint32_t)i >> (32 - n)));
84 84 }
85 85  
86 86 #if defined(TARGET_PPC64)
87   -static inline target_ulong rotl64 (target_ulong i, int n)
  87 +static always_inline target_ulong rotl64 (target_ulong i, int n)
88 88 {
89 89 return (((uint64_t)i << n) | ((uint64_t)i >> (64 - n)));
90 90 }
... ... @@ -103,18 +103,18 @@ int get_physical_address (CPUState *env, mmu_ctx_t *ctx, target_ulong vaddr,
103 103 void ppc6xx_tlb_store (CPUState *env, target_ulong EPN, int way, int is_code,
104 104 target_ulong pte0, target_ulong pte1);
105 105  
106   -static inline void env_to_regs (void)
  106 +static always_inline void env_to_regs (void)
107 107 {
108 108 }
109 109  
110   -static inline void regs_to_env (void)
  110 +static always_inline void regs_to_env (void)
111 111 {
112 112 }
113 113  
114 114 int cpu_ppc_handle_mmu_fault (CPUState *env, target_ulong address, int rw,
115 115 int is_user, int is_softmmu);
116 116  
117   -static inline int cpu_halted (CPUState *env)
  117 +static always_inline int cpu_halted (CPUState *env)
118 118 {
119 119 if (!env->halted)
120 120 return 0;
... ...
target-ppc/helper.c
... ... @@ -67,23 +67,23 @@ target_phys_addr_t cpu_get_phys_page_debug (CPUState *env, target_ulong addr)
67 67  
68 68 #else
69 69 /* Common routines used by software and hardware TLBs emulation */
70   -static inline int pte_is_valid (target_ulong pte0)
  70 +static always_inline int pte_is_valid (target_ulong pte0)
71 71 {
72 72 return pte0 & 0x80000000 ? 1 : 0;
73 73 }
74 74  
75   -static inline void pte_invalidate (target_ulong *pte0)
  75 +static always_inline void pte_invalidate (target_ulong *pte0)
76 76 {
77 77 *pte0 &= ~0x80000000;
78 78 }
79 79  
80 80 #if defined(TARGET_PPC64)
81   -static inline int pte64_is_valid (target_ulong pte0)
  81 +static always_inline int pte64_is_valid (target_ulong pte0)
82 82 {
83 83 return pte0 & 0x0000000000000001ULL ? 1 : 0;
84 84 }
85 85  
86   -static inline void pte64_invalidate (target_ulong *pte0)
  86 +static always_inline void pte64_invalidate (target_ulong *pte0)
87 87 {
88 88 *pte0 &= ~0x0000000000000001ULL;
89 89 }
... ... @@ -96,9 +96,9 @@ static inline void pte64_invalidate (target_ulong *pte0)
96 96 #define PTE64_CHECK_MASK (TARGET_PAGE_MASK | 0x7F)
97 97 #endif
98 98  
99   -static inline int _pte_check (mmu_ctx_t *ctx, int is_64b,
100   - target_ulong pte0, target_ulong pte1,
101   - int h, int rw)
  99 +static always_inline int _pte_check (mmu_ctx_t *ctx, int is_64b,
  100 + target_ulong pte0, target_ulong pte1,
  101 + int h, int rw)
102 102 {
103 103 target_ulong ptem, mmask;
104 104 int access, ret, pteh, ptev;
... ... @@ -258,9 +258,10 @@ static void ppc6xx_tlb_invalidate_all (CPUState *env)
258 258 tlb_flush(env, 1);
259 259 }
260 260  
261   -static inline void __ppc6xx_tlb_invalidate_virt (CPUState *env,
262   - target_ulong eaddr,
263   - int is_code, int match_epn)
  261 +static always_inline void __ppc6xx_tlb_invalidate_virt (CPUState *env,
  262 + target_ulong eaddr,
  263 + int is_code,
  264 + int match_epn)
264 265 {
265 266 #if !defined(FLUSH_ALL_TLBS)
266 267 ppc6xx_tlb_t *tlb;
... ... @@ -487,7 +488,7 @@ static int get_bat (CPUState *env, mmu_ctx_t *ctx,
487 488 }
488 489  
489 490 /* PTE table lookup */
490   -static inline int _find_pte (mmu_ctx_t *ctx, int is_64b, int h, int rw)
  491 +static always_inline int _find_pte (mmu_ctx_t *ctx, int is_64b, int h, int rw)
491 492 {
492 493 target_ulong base, pte0, pte1;
493 494 int i, good = -1;
... ... @@ -588,7 +589,8 @@ static int find_pte64 (mmu_ctx_t *ctx, int h, int rw)
588 589 }
589 590 #endif
590 591  
591   -static inline int find_pte (CPUState *env, mmu_ctx_t *ctx, int h, int rw)
  592 +static always_inline int find_pte (CPUState *env, mmu_ctx_t *ctx,
  593 + int h, int rw)
592 594 {
593 595 #if defined(TARGET_PPC64)
594 596 if (env->mmu_model == POWERPC_MMU_64B)
... ... @@ -720,10 +722,10 @@ void ppc_store_slb (CPUPPCState *env, int slb_nr, target_ulong rs)
720 722 #endif /* defined(TARGET_PPC64) */
721 723  
722 724 /* Perform segment based translation */
723   -static inline target_phys_addr_t get_pgaddr (target_phys_addr_t sdr1,
724   - int sdr_sh,
725   - target_phys_addr_t hash,
726   - target_phys_addr_t mask)
  725 +static always_inline target_phys_addr_t get_pgaddr (target_phys_addr_t sdr1,
  726 + int sdr_sh,
  727 + target_phys_addr_t hash,
  728 + target_phys_addr_t mask)
727 729 {
728 730 return (sdr1 & ((target_ulong)(-1ULL) << sdr_sh)) | (hash & mask);
729 731 }
... ... @@ -1594,8 +1596,9 @@ int cpu_ppc_handle_mmu_fault (CPUState *env, target_ulong address, int rw,
1594 1596 /*****************************************************************************/
1595 1597 /* BATs management */
1596 1598 #if !defined(FLUSH_ALL_TLBS)
1597   -static inline void do_invalidate_BAT (CPUPPCState *env,
1598   - target_ulong BATu, target_ulong mask)
  1599 +static always_inline void do_invalidate_BAT (CPUPPCState *env,
  1600 + target_ulong BATu,
  1601 + target_ulong mask)
1599 1602 {
1600 1603 target_ulong base, end, page;
1601 1604  
... ... @@ -1616,8 +1619,8 @@ static inline void do_invalidate_BAT (CPUPPCState *env,
1616 1619 }
1617 1620 #endif
1618 1621  
1619   -static inline void dump_store_bat (CPUPPCState *env, char ID, int ul, int nr,
1620   - target_ulong value)
  1622 +static always_inline void dump_store_bat (CPUPPCState *env, char ID,
  1623 + int ul, int nr, target_ulong value)
1621 1624 {
1622 1625 #if defined (DEBUG_BATS)
1623 1626 if (loglevel != 0) {
... ... @@ -1931,7 +1934,7 @@ void ppc_store_xer (CPUPPCState *env, target_ulong value)
1931 1934 }
1932 1935  
1933 1936 /* Swap temporary saved registers with GPRs */
1934   -static inline void swap_gpr_tgpr (CPUPPCState *env)
  1937 +static always_inline void swap_gpr_tgpr (CPUPPCState *env)
1935 1938 {
1936 1939 ppc_gpr_t tmp;
1937 1940  
... ...
target-ppc/op_helper.c
... ... @@ -601,7 +601,7 @@ void do_srad (void)
601 601 }
602 602 #endif
603 603  
604   -static inline int popcnt (uint32_t val)
  604 +static always_inline int popcnt (uint32_t val)
605 605 {
606 606 int i;
607 607  
... ... @@ -707,7 +707,7 @@ void do_fctidz (void)
707 707  
708 708 #endif
709 709  
710   -static inline void do_fri (int rounding_mode)
  710 +static always_inline void do_fri (int rounding_mode)
711 711 {
712 712 int curmode;
713 713  
... ... @@ -1430,12 +1430,12 @@ static uint8_t hbrev[16] = {
1430 1430 0x1, 0x9, 0x5, 0xD, 0x3, 0xB, 0x7, 0xF,
1431 1431 };
1432 1432  
1433   -static inline uint8_t byte_reverse (uint8_t val)
  1433 +static always_inline uint8_t byte_reverse (uint8_t val)
1434 1434 {
1435 1435 return hbrev[val >> 4] | (hbrev[val & 0xF] << 4);
1436 1436 }
1437 1437  
1438   -static inline uint32_t word_reverse (uint32_t val)
  1438 +static always_inline uint32_t word_reverse (uint32_t val)
1439 1439 {
1440 1440 return byte_reverse(val >> 24) | (byte_reverse(val >> 16) << 8) |
1441 1441 (byte_reverse(val >> 8) << 16) | (byte_reverse(val) << 24);
... ... @@ -1468,7 +1468,7 @@ void do_ev##name (void) \
1468 1468 }
1469 1469  
1470 1470 /* Fixed-point vector arithmetic */
1471   -static inline uint32_t _do_eabs (uint32_t val)
  1471 +static always_inline uint32_t _do_eabs (uint32_t val)
1472 1472 {
1473 1473 if (val != 0x80000000)
1474 1474 val &= ~0x80000000;
... ... @@ -1476,12 +1476,12 @@ static inline uint32_t _do_eabs (uint32_t val)
1476 1476 return val;
1477 1477 }
1478 1478  
1479   -static inline uint32_t _do_eaddw (uint32_t op1, uint32_t op2)
  1479 +static always_inline uint32_t _do_eaddw (uint32_t op1, uint32_t op2)
1480 1480 {
1481 1481 return op1 + op2;
1482 1482 }
1483 1483  
1484   -static inline int _do_ecntlsw (uint32_t val)
  1484 +static always_inline int _do_ecntlsw (uint32_t val)
1485 1485 {
1486 1486 if (val & 0x80000000)
1487 1487 return _do_cntlzw(~val);
... ... @@ -1489,12 +1489,12 @@ static inline int _do_ecntlsw (uint32_t val)
1489 1489 return _do_cntlzw(val);
1490 1490 }
1491 1491  
1492   -static inline int _do_ecntlzw (uint32_t val)
  1492 +static always_inline int _do_ecntlzw (uint32_t val)
1493 1493 {
1494 1494 return _do_cntlzw(val);
1495 1495 }
1496 1496  
1497   -static inline uint32_t _do_eneg (uint32_t val)
  1497 +static always_inline uint32_t _do_eneg (uint32_t val)
1498 1498 {
1499 1499 if (val != 0x80000000)
1500 1500 val ^= 0x80000000;
... ... @@ -1502,35 +1502,35 @@ static inline uint32_t _do_eneg (uint32_t val)
1502 1502 return val;
1503 1503 }
1504 1504  
1505   -static inline uint32_t _do_erlw (uint32_t op1, uint32_t op2)
  1505 +static always_inline uint32_t _do_erlw (uint32_t op1, uint32_t op2)
1506 1506 {
1507 1507 return rotl32(op1, op2);
1508 1508 }
1509 1509  
1510   -static inline uint32_t _do_erndw (uint32_t val)
  1510 +static always_inline uint32_t _do_erndw (uint32_t val)
1511 1511 {
1512 1512 return (val + 0x000080000000) & 0xFFFF0000;
1513 1513 }
1514 1514  
1515   -static inline uint32_t _do_eslw (uint32_t op1, uint32_t op2)
  1515 +static always_inline uint32_t _do_eslw (uint32_t op1, uint32_t op2)
1516 1516 {
1517 1517 /* No error here: 6 bits are used */
1518 1518 return op1 << (op2 & 0x3F);
1519 1519 }
1520 1520  
1521   -static inline int32_t _do_esrws (int32_t op1, uint32_t op2)
  1521 +static always_inline int32_t _do_esrws (int32_t op1, uint32_t op2)
1522 1522 {
1523 1523 /* No error here: 6 bits are used */
1524 1524 return op1 >> (op2 & 0x3F);
1525 1525 }
1526 1526  
1527   -static inline uint32_t _do_esrwu (uint32_t op1, uint32_t op2)
  1527 +static always_inline uint32_t _do_esrwu (uint32_t op1, uint32_t op2)
1528 1528 {
1529 1529 /* No error here: 6 bits are used */
1530 1530 return op1 >> (op2 & 0x3F);
1531 1531 }
1532 1532  
1533   -static inline uint32_t _do_esubfw (uint32_t op1, uint32_t op2)
  1533 +static always_inline uint32_t _do_esubfw (uint32_t op1, uint32_t op2)
1534 1534 {
1535 1535 return op2 - op1;
1536 1536 }
... ... @@ -1559,7 +1559,7 @@ DO_SPE_OP2(srwu);
1559 1559 DO_SPE_OP2(subfw);
1560 1560  
1561 1561 /* evsel is a little bit more complicated... */
1562   -static inline uint32_t _do_esel (uint32_t op1, uint32_t op2, int n)
  1562 +static always_inline uint32_t _do_esel (uint32_t op1, uint32_t op2, int n)
1563 1563 {
1564 1564 if (n)
1565 1565 return op1;
... ... @@ -1582,31 +1582,31 @@ void do_ev##name (void) \
1582 1582 _do_e##name(T0_64, T1_64)); \
1583 1583 }
1584 1584  
1585   -static inline uint32_t _do_evcmp_merge (int t0, int t1)
  1585 +static always_inline uint32_t _do_evcmp_merge (int t0, int t1)
1586 1586 {
1587 1587 return (t0 << 3) | (t1 << 2) | ((t0 | t1) << 1) | (t0 & t1);
1588 1588 }
1589   -static inline int _do_ecmpeq (uint32_t op1, uint32_t op2)
  1589 +static always_inline int _do_ecmpeq (uint32_t op1, uint32_t op2)
1590 1590 {
1591 1591 return op1 == op2 ? 1 : 0;
1592 1592 }
1593 1593  
1594   -static inline int _do_ecmpgts (int32_t op1, int32_t op2)
  1594 +static always_inline int _do_ecmpgts (int32_t op1, int32_t op2)
1595 1595 {
1596 1596 return op1 > op2 ? 1 : 0;
1597 1597 }
1598 1598  
1599   -static inline int _do_ecmpgtu (uint32_t op1, uint32_t op2)
  1599 +static always_inline int _do_ecmpgtu (uint32_t op1, uint32_t op2)
1600 1600 {
1601 1601 return op1 > op2 ? 1 : 0;
1602 1602 }
1603 1603  
1604   -static inline int _do_ecmplts (int32_t op1, int32_t op2)
  1604 +static always_inline int _do_ecmplts (int32_t op1, int32_t op2)
1605 1605 {
1606 1606 return op1 < op2 ? 1 : 0;
1607 1607 }
1608 1608  
1609   -static inline int _do_ecmpltu (uint32_t op1, uint32_t op2)
  1609 +static always_inline int _do_ecmpltu (uint32_t op1, uint32_t op2)
1610 1610 {
1611 1611 return op1 < op2 ? 1 : 0;
1612 1612 }
... ... @@ -1623,7 +1623,7 @@ DO_SPE_CMP(cmplts);
1623 1623 DO_SPE_CMP(cmpltu);
1624 1624  
1625 1625 /* Single precision floating-point conversions from/to integer */
1626   -static inline uint32_t _do_efscfsi (int32_t val)
  1626 +static always_inline uint32_t _do_efscfsi (int32_t val)
1627 1627 {
1628 1628 union {
1629 1629 uint32_t u;
... ... @@ -1635,7 +1635,7 @@ static inline uint32_t _do_efscfsi (int32_t val)
1635 1635 return u.u;
1636 1636 }
1637 1637  
1638   -static inline uint32_t _do_efscfui (uint32_t val)
  1638 +static always_inline uint32_t _do_efscfui (uint32_t val)
1639 1639 {
1640 1640 union {
1641 1641 uint32_t u;
... ... @@ -1647,7 +1647,7 @@ static inline uint32_t _do_efscfui (uint32_t val)
1647 1647 return u.u;
1648 1648 }
1649 1649  
1650   -static inline int32_t _do_efsctsi (uint32_t val)
  1650 +static always_inline int32_t _do_efsctsi (uint32_t val)
1651 1651 {
1652 1652 union {
1653 1653 int32_t u;
... ... @@ -1662,7 +1662,7 @@ static inline int32_t _do_efsctsi (uint32_t val)
1662 1662 return float32_to_int32(u.f, &env->spe_status);
1663 1663 }
1664 1664  
1665   -static inline uint32_t _do_efsctui (uint32_t val)
  1665 +static always_inline uint32_t _do_efsctui (uint32_t val)
1666 1666 {
1667 1667 union {
1668 1668 int32_t u;
... ... @@ -1677,7 +1677,7 @@ static inline uint32_t _do_efsctui (uint32_t val)
1677 1677 return float32_to_uint32(u.f, &env->spe_status);
1678 1678 }
1679 1679  
1680   -static inline int32_t _do_efsctsiz (uint32_t val)
  1680 +static always_inline int32_t _do_efsctsiz (uint32_t val)
1681 1681 {
1682 1682 union {
1683 1683 int32_t u;
... ... @@ -1692,7 +1692,7 @@ static inline int32_t _do_efsctsiz (uint32_t val)
1692 1692 return float32_to_int32_round_to_zero(u.f, &env->spe_status);
1693 1693 }
1694 1694  
1695   -static inline uint32_t _do_efsctuiz (uint32_t val)
  1695 +static always_inline uint32_t _do_efsctuiz (uint32_t val)
1696 1696 {
1697 1697 union {
1698 1698 int32_t u;
... ... @@ -1738,7 +1738,7 @@ void do_efsctuiz (void)
1738 1738 }
1739 1739  
1740 1740 /* Single precision floating-point conversion to/from fractional */
1741   -static inline uint32_t _do_efscfsf (uint32_t val)
  1741 +static always_inline uint32_t _do_efscfsf (uint32_t val)
1742 1742 {
1743 1743 union {
1744 1744 uint32_t u;
... ... @@ -1753,7 +1753,7 @@ static inline uint32_t _do_efscfsf (uint32_t val)
1753 1753 return u.u;
1754 1754 }
1755 1755  
1756   -static inline uint32_t _do_efscfuf (uint32_t val)
  1756 +static always_inline uint32_t _do_efscfuf (uint32_t val)
1757 1757 {
1758 1758 union {
1759 1759 uint32_t u;
... ... @@ -1768,7 +1768,7 @@ static inline uint32_t _do_efscfuf (uint32_t val)
1768 1768 return u.u;
1769 1769 }
1770 1770  
1771   -static inline int32_t _do_efsctsf (uint32_t val)
  1771 +static always_inline int32_t _do_efsctsf (uint32_t val)
1772 1772 {
1773 1773 union {
1774 1774 int32_t u;
... ... @@ -1786,7 +1786,7 @@ static inline int32_t _do_efsctsf (uint32_t val)
1786 1786 return float32_to_int32(u.f, &env->spe_status);
1787 1787 }
1788 1788  
1789   -static inline uint32_t _do_efsctuf (uint32_t val)
  1789 +static always_inline uint32_t _do_efsctuf (uint32_t val)
1790 1790 {
1791 1791 union {
1792 1792 int32_t u;
... ... @@ -1804,7 +1804,7 @@ static inline uint32_t _do_efsctuf (uint32_t val)
1804 1804 return float32_to_uint32(u.f, &env->spe_status);
1805 1805 }
1806 1806  
1807   -static inline int32_t _do_efsctsfz (uint32_t val)
  1807 +static always_inline int32_t _do_efsctsfz (uint32_t val)
1808 1808 {
1809 1809 union {
1810 1810 int32_t u;
... ... @@ -1822,7 +1822,7 @@ static inline int32_t _do_efsctsfz (uint32_t val)
1822 1822 return float32_to_int32_round_to_zero(u.f, &env->spe_status);
1823 1823 }
1824 1824  
1825   -static inline uint32_t _do_efsctufz (uint32_t val)
  1825 +static always_inline uint32_t _do_efsctufz (uint32_t val)
1826 1826 {
1827 1827 union {
1828 1828 int32_t u;
... ... @@ -1871,19 +1871,19 @@ void do_efsctufz (void)
1871 1871 }
1872 1872  
1873 1873 /* Double precision floating point helpers */
1874   -static inline int _do_efdcmplt (uint64_t op1, uint64_t op2)
  1874 +static always_inline int _do_efdcmplt (uint64_t op1, uint64_t op2)
1875 1875 {
1876 1876 /* XXX: TODO: test special values (NaN, infinites, ...) */
1877 1877 return _do_efdtstlt(op1, op2);
1878 1878 }
1879 1879  
1880   -static inline int _do_efdcmpgt (uint64_t op1, uint64_t op2)
  1880 +static always_inline int _do_efdcmpgt (uint64_t op1, uint64_t op2)
1881 1881 {
1882 1882 /* XXX: TODO: test special values (NaN, infinites, ...) */
1883 1883 return _do_efdtstgt(op1, op2);
1884 1884 }
1885 1885  
1886   -static inline int _do_efdcmpeq (uint64_t op1, uint64_t op2)
  1886 +static always_inline int _do_efdcmpeq (uint64_t op1, uint64_t op2)
1887 1887 {
1888 1888 /* XXX: TODO: test special values (NaN, infinites, ...) */
1889 1889 return _do_efdtsteq(op1, op2);
... ... @@ -1905,7 +1905,7 @@ void do_efdcmpeq (void)
1905 1905 }
1906 1906  
1907 1907 /* Double precision floating-point conversion to/from integer */
1908   -static inline uint64_t _do_efdcfsi (int64_t val)
  1908 +static always_inline uint64_t _do_efdcfsi (int64_t val)
1909 1909 {
1910 1910 union {
1911 1911 uint64_t u;
... ... @@ -1917,7 +1917,7 @@ static inline uint64_t _do_efdcfsi (int64_t val)
1917 1917 return u.u;
1918 1918 }
1919 1919  
1920   -static inline uint64_t _do_efdcfui (uint64_t val)
  1920 +static always_inline uint64_t _do_efdcfui (uint64_t val)
1921 1921 {
1922 1922 union {
1923 1923 uint64_t u;
... ... @@ -1929,7 +1929,7 @@ static inline uint64_t _do_efdcfui (uint64_t val)
1929 1929 return u.u;
1930 1930 }
1931 1931  
1932   -static inline int64_t _do_efdctsi (uint64_t val)
  1932 +static always_inline int64_t _do_efdctsi (uint64_t val)
1933 1933 {
1934 1934 union {
1935 1935 int64_t u;
... ... @@ -1944,7 +1944,7 @@ static inline int64_t _do_efdctsi (uint64_t val)
1944 1944 return float64_to_int64(u.f, &env->spe_status);
1945 1945 }
1946 1946  
1947   -static inline uint64_t _do_efdctui (uint64_t val)
  1947 +static always_inline uint64_t _do_efdctui (uint64_t val)
1948 1948 {
1949 1949 union {
1950 1950 int64_t u;
... ... @@ -1959,7 +1959,7 @@ static inline uint64_t _do_efdctui (uint64_t val)
1959 1959 return float64_to_uint64(u.f, &env->spe_status);
1960 1960 }
1961 1961  
1962   -static inline int64_t _do_efdctsiz (uint64_t val)
  1962 +static always_inline int64_t _do_efdctsiz (uint64_t val)
1963 1963 {
1964 1964 union {
1965 1965 int64_t u;
... ... @@ -1974,7 +1974,7 @@ static inline int64_t _do_efdctsiz (uint64_t val)
1974 1974 return float64_to_int64_round_to_zero(u.f, &env->spe_status);
1975 1975 }
1976 1976  
1977   -static inline uint64_t _do_efdctuiz (uint64_t val)
  1977 +static always_inline uint64_t _do_efdctuiz (uint64_t val)
1978 1978 {
1979 1979 union {
1980 1980 int64_t u;
... ... @@ -2020,7 +2020,7 @@ void do_efdctuiz (void)
2020 2020 }
2021 2021  
2022 2022 /* Double precision floating-point conversion to/from fractional */
2023   -static inline uint64_t _do_efdcfsf (int64_t val)
  2023 +static always_inline uint64_t _do_efdcfsf (int64_t val)
2024 2024 {
2025 2025 union {
2026 2026 uint64_t u;
... ... @@ -2035,7 +2035,7 @@ static inline uint64_t _do_efdcfsf (int64_t val)
2035 2035 return u.u;
2036 2036 }
2037 2037  
2038   -static inline uint64_t _do_efdcfuf (uint64_t val)
  2038 +static always_inline uint64_t _do_efdcfuf (uint64_t val)
2039 2039 {
2040 2040 union {
2041 2041 uint64_t u;
... ... @@ -2050,7 +2050,7 @@ static inline uint64_t _do_efdcfuf (uint64_t val)
2050 2050 return u.u;
2051 2051 }
2052 2052  
2053   -static inline int64_t _do_efdctsf (uint64_t val)
  2053 +static always_inline int64_t _do_efdctsf (uint64_t val)
2054 2054 {
2055 2055 union {
2056 2056 int64_t u;
... ... @@ -2068,7 +2068,7 @@ static inline int64_t _do_efdctsf (uint64_t val)
2068 2068 return float64_to_int32(u.f, &env->spe_status);
2069 2069 }
2070 2070  
2071   -static inline uint64_t _do_efdctuf (uint64_t val)
  2071 +static always_inline uint64_t _do_efdctuf (uint64_t val)
2072 2072 {
2073 2073 union {
2074 2074 int64_t u;
... ... @@ -2086,7 +2086,7 @@ static inline uint64_t _do_efdctuf (uint64_t val)
2086 2086 return float64_to_uint32(u.f, &env->spe_status);
2087 2087 }
2088 2088  
2089   -static inline int64_t _do_efdctsfz (uint64_t val)
  2089 +static always_inline int64_t _do_efdctsfz (uint64_t val)
2090 2090 {
2091 2091 union {
2092 2092 int64_t u;
... ... @@ -2104,7 +2104,7 @@ static inline int64_t _do_efdctsfz (uint64_t val)
2104 2104 return float64_to_int32_round_to_zero(u.f, &env->spe_status);
2105 2105 }
2106 2106  
2107   -static inline uint64_t _do_efdctufz (uint64_t val)
  2107 +static always_inline uint64_t _do_efdctufz (uint64_t val)
2108 2108 {
2109 2109 union {
2110 2110 int64_t u;
... ... @@ -2153,7 +2153,7 @@ void do_efdctufz (void)
2153 2153 }
2154 2154  
2155 2155 /* Floating point conversion between single and double precision */
2156   -static inline uint32_t _do_efscfd (uint64_t val)
  2156 +static always_inline uint32_t _do_efscfd (uint64_t val)
2157 2157 {
2158 2158 union {
2159 2159 uint64_t u;
... ... @@ -2170,7 +2170,7 @@ static inline uint32_t _do_efscfd (uint64_t val)
2170 2170 return u2.u;
2171 2171 }
2172 2172  
2173   -static inline uint64_t _do_efdcfs (uint32_t val)
  2173 +static always_inline uint64_t _do_efdcfs (uint32_t val)
2174 2174 {
2175 2175 union {
2176 2176 uint64_t u;
... ... @@ -2214,19 +2214,19 @@ DO_SPE_OP2(fsmul);
2214 2214 DO_SPE_OP2(fsdiv);
2215 2215  
2216 2216 /* Single-precision floating-point comparisons */
2217   -static inline int _do_efscmplt (uint32_t op1, uint32_t op2)
  2217 +static always_inline int _do_efscmplt (uint32_t op1, uint32_t op2)
2218 2218 {
2219 2219 /* XXX: TODO: test special values (NaN, infinites, ...) */
2220 2220 return _do_efststlt(op1, op2);
2221 2221 }
2222 2222  
2223   -static inline int _do_efscmpgt (uint32_t op1, uint32_t op2)
  2223 +static always_inline int _do_efscmpgt (uint32_t op1, uint32_t op2)
2224 2224 {
2225 2225 /* XXX: TODO: test special values (NaN, infinites, ...) */
2226 2226 return _do_efststgt(op1, op2);
2227 2227 }
2228 2228  
2229   -static inline int _do_efscmpeq (uint32_t op1, uint32_t op2)
  2229 +static always_inline int _do_efscmpeq (uint32_t op1, uint32_t op2)
2230 2230 {
2231 2231 /* XXX: TODO: test special values (NaN, infinites, ...) */
2232 2232 return _do_efststeq(op1, op2);
... ...
target-ppc/op_helper.h
... ... @@ -277,7 +277,7 @@ void do_evfsctuiz (void);
277 277  
278 278 /* Inlined helpers: used in micro-operation as well as helpers */
279 279 /* Generic fixed-point helpers */
280   -static inline int _do_cntlzw (uint32_t val)
  280 +static always_inline int _do_cntlzw (uint32_t val)
281 281 {
282 282 int cnt = 0;
283 283 if (!(val & 0xFFFF0000UL)) {
... ... @@ -306,7 +306,7 @@ static inline int _do_cntlzw (uint32_t val)
306 306 return cnt;
307 307 }
308 308  
309   -static inline int _do_cntlzd (uint64_t val)
  309 +static always_inline int _do_cntlzd (uint64_t val)
310 310 {
311 311 int cnt = 0;
312 312 #if HOST_LONG_BITS == 64
... ... @@ -350,19 +350,19 @@ static inline int _do_cntlzd (uint64_t val)
350 350 #if defined(TARGET_PPCEMB)
351 351 /* SPE extension */
352 352 /* Single precision floating-point helpers */
353   -static inline uint32_t _do_efsabs (uint32_t val)
  353 +static always_inline uint32_t _do_efsabs (uint32_t val)
354 354 {
355 355 return val & ~0x80000000;
356 356 }
357   -static inline uint32_t _do_efsnabs (uint32_t val)
  357 +static always_inline uint32_t _do_efsnabs (uint32_t val)
358 358 {
359 359 return val | 0x80000000;
360 360 }
361   -static inline uint32_t _do_efsneg (uint32_t val)
  361 +static always_inline uint32_t _do_efsneg (uint32_t val)
362 362 {
363 363 return val ^ 0x80000000;
364 364 }
365   -static inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2)
  365 +static always_inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2)
366 366 {
367 367 union {
368 368 uint32_t u;
... ... @@ -373,7 +373,7 @@ static inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2)
373 373 u1.f = float32_add(u1.f, u2.f, &env->spe_status);
374 374 return u1.u;
375 375 }
376   -static inline uint32_t _do_efssub (uint32_t op1, uint32_t op2)
  376 +static always_inline uint32_t _do_efssub (uint32_t op1, uint32_t op2)
377 377 {
378 378 union {
379 379 uint32_t u;
... ... @@ -384,7 +384,7 @@ static inline uint32_t _do_efssub (uint32_t op1, uint32_t op2)
384 384 u1.f = float32_sub(u1.f, u2.f, &env->spe_status);
385 385 return u1.u;
386 386 }
387   -static inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2)
  387 +static always_inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2)
388 388 {
389 389 union {
390 390 uint32_t u;
... ... @@ -395,7 +395,7 @@ static inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2)
395 395 u1.f = float32_mul(u1.f, u2.f, &env->spe_status);
396 396 return u1.u;
397 397 }
398   -static inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2)
  398 +static always_inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2)
399 399 {
400 400 union {
401 401 uint32_t u;
... ... @@ -407,7 +407,7 @@ static inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2)
407 407 return u1.u;
408 408 }
409 409  
410   -static inline int _do_efststlt (uint32_t op1, uint32_t op2)
  410 +static always_inline int _do_efststlt (uint32_t op1, uint32_t op2)
411 411 {
412 412 union {
413 413 uint32_t u;
... ... @@ -417,7 +417,7 @@ static inline int _do_efststlt (uint32_t op1, uint32_t op2)
417 417 u2.u = op2;
418 418 return float32_lt(u1.f, u2.f, &env->spe_status) ? 1 : 0;
419 419 }
420   -static inline int _do_efststgt (uint32_t op1, uint32_t op2)
  420 +static always_inline int _do_efststgt (uint32_t op1, uint32_t op2)
421 421 {
422 422 union {
423 423 uint32_t u;
... ... @@ -427,7 +427,7 @@ static inline int _do_efststgt (uint32_t op1, uint32_t op2)
427 427 u2.u = op2;
428 428 return float32_le(u1.f, u2.f, &env->spe_status) ? 0 : 1;
429 429 }
430   -static inline int _do_efststeq (uint32_t op1, uint32_t op2)
  430 +static always_inline int _do_efststeq (uint32_t op1, uint32_t op2)
431 431 {
432 432 union {
433 433 uint32_t u;
... ... @@ -438,7 +438,7 @@ static inline int _do_efststeq (uint32_t op1, uint32_t op2)
438 438 return float32_eq(u1.f, u2.f, &env->spe_status) ? 1 : 0;
439 439 }
440 440 /* Double precision floating-point helpers */
441   -static inline int _do_efdtstlt (uint64_t op1, uint64_t op2)
  441 +static always_inline int _do_efdtstlt (uint64_t op1, uint64_t op2)
442 442 {
443 443 union {
444 444 uint64_t u;
... ... @@ -448,7 +448,7 @@ static inline int _do_efdtstlt (uint64_t op1, uint64_t op2)
448 448 u2.u = op2;
449 449 return float64_lt(u1.f, u2.f, &env->spe_status) ? 1 : 0;
450 450 }
451   -static inline int _do_efdtstgt (uint64_t op1, uint64_t op2)
  451 +static always_inline int _do_efdtstgt (uint64_t op1, uint64_t op2)
452 452 {
453 453 union {
454 454 uint64_t u;
... ... @@ -458,7 +458,7 @@ static inline int _do_efdtstgt (uint64_t op1, uint64_t op2)
458 458 u2.u = op2;
459 459 return float64_le(u1.f, u2.f, &env->spe_status) ? 0 : 1;
460 460 }
461   -static inline int _do_efdtsteq (uint64_t op1, uint64_t op2)
  461 +static always_inline int _do_efdtsteq (uint64_t op1, uint64_t op2)
462 462 {
463 463 union {
464 464 uint64_t u;
... ...
target-ppc/op_helper_mem.h
... ... @@ -19,14 +19,15 @@
19 19 */
20 20  
21 21 /* Multiple word / string load and store */
22   -static inline target_ulong glue(ld32r, MEMSUFFIX) (target_ulong EA)
  22 +static always_inline target_ulong glue(ld32r, MEMSUFFIX) (target_ulong EA)
23 23 {
24 24 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
25 25 return ((tmp & 0xFF000000UL) >> 24) | ((tmp & 0x00FF0000UL) >> 8) |
26 26 ((tmp & 0x0000FF00UL) << 8) | ((tmp & 0x000000FFUL) << 24);
27 27 }
28 28  
29   -static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, target_ulong data)
  29 +static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
  30 + target_ulong data)
30 31 {
31 32 uint32_t tmp =
32 33 ((data & 0xFF000000UL) >> 24) | ((data & 0x00FF0000UL) >> 8) |
... ... @@ -399,7 +400,7 @@ void glue(do_POWER2_lfq, MEMSUFFIX) (void)
399 400 FT1 = glue(ldfq, MEMSUFFIX)((uint32_t)(T0 + 4));
400 401 }
401 402  
402   -static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
  403 +static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
403 404 {
404 405 union {
405 406 double d;
... ... @@ -431,7 +432,7 @@ void glue(do_POWER2_stfq, MEMSUFFIX) (void)
431 432 glue(stfq, MEMSUFFIX)((uint32_t)(T0 + 4), FT1);
432 433 }
433 434  
434   -static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
  435 +static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
435 436 {
436 437 union {
437 438 double d;
... ...
target-ppc/op_mem.h
... ... @@ -18,19 +18,19 @@
18 18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 19 */
20 20  
21   -static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
  21 +static always_inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
22 22 {
23 23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24 24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25 25 }
26 26  
27   -static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
  27 +static always_inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28 28 {
29 29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30 30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
31 31 }
32 32  
33   -static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
  33 +static always_inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
34 34 {
35 35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36 36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
... ... @@ -38,7 +38,7 @@ static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
38 38 }
39 39  
40 40 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41   -static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
  41 +static always_inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42 42 {
43 43 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44 44 return ((tmp & 0xFF00000000000000ULL) >> 56) |
... ... @@ -53,12 +53,12 @@ static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
53 53 #endif
54 54  
55 55 #if defined(TARGET_PPC64)
56   -static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
  56 +static always_inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57 57 {
58 58 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59 59 }
60 60  
61   -static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
  61 +static always_inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62 62 {
63 63 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64 64 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
... ... @@ -66,13 +66,15 @@ static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
66 66 }
67 67 #endif
68 68  
69   -static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
  69 +static always_inline void glue(st16r, MEMSUFFIX) (target_ulong EA,
  70 + uint16_t data)
70 71 {
71 72 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
72 73 glue(stw, MEMSUFFIX)(EA, tmp);
73 74 }
74 75  
75   -static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
  76 +static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
  77 + uint32_t data)
76 78 {
77 79 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78 80 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
... ... @@ -80,7 +82,8 @@ static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
80 82 }
81 83  
82 84 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
83   -static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
  85 +static always_inline void glue(st64r, MEMSUFFIX) (target_ulong EA,
  86 + uint64_t data)
84 87 {
85 88 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86 89 ((data & 0x00FF000000000000ULL) >> 40) |
... ... @@ -403,12 +406,12 @@ void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
403 406 }
404 407 #endif
405 408  
406   -static inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
  409 +static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
407 410 {
408 411 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
409 412 }
410 413  
411   -static inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
  414 +static always_inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
412 415 {
413 416 union {
414 417 double d;
... ... @@ -429,7 +432,7 @@ PPC_STF_OP_64(fs, stfs);
429 432 PPC_STF_OP_64(fiwx, stfiwx);
430 433 #endif
431 434  
432   -static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
  435 +static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
433 436 {
434 437 union {
435 438 double d;
... ... @@ -448,7 +451,7 @@ static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
448 451 glue(stfq, MEMSUFFIX)(EA, u.d);
449 452 }
450 453  
451   -static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
  454 +static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
452 455 {
453 456 union {
454 457 float f;
... ... @@ -463,7 +466,7 @@ static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
463 466 glue(stfl, MEMSUFFIX)(EA, u.f);
464 467 }
465 468  
466   -static inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
  469 +static always_inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
467 470 {
468 471 union {
469 472 double d;
... ... @@ -506,7 +509,7 @@ void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
506 509 }
507 510 #endif
508 511  
509   -static inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
  512 +static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
510 513 {
511 514 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
512 515 }
... ... @@ -518,7 +521,7 @@ PPC_LDF_OP_64(fd, ldfq);
518 521 PPC_LDF_OP_64(fs, ldfs);
519 522 #endif
520 523  
521   -static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
  524 +static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
522 525 {
523 526 union {
524 527 double d;
... ... @@ -538,7 +541,7 @@ static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
538 541 return u.d;
539 542 }
540 543  
541   -static inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
  544 +static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
542 545 {
543 546 union {
544 547 float f;
... ... @@ -1105,7 +1108,7 @@ PPC_SPE_ST_OP(dd, stq);
1105 1108 PPC_SPE_LD_OP(dd_le, ld64r);
1106 1109 PPC_SPE_ST_OP(dd_le, st64r);
1107 1110 #endif
1108   -static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
  1111 +static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1109 1112 {
1110 1113 uint64_t ret;
1111 1114 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
... ... @@ -1113,13 +1116,14 @@ static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1113 1116 return ret;
1114 1117 }
1115 1118 PPC_SPE_LD_OP(dw, spe_ldw);
1116   -static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data)
  1119 +static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
  1120 + uint64_t data)
1117 1121 {
1118 1122 glue(stl, MEMSUFFIX)(EA, data >> 32);
1119 1123 glue(stl, MEMSUFFIX)(EA + 4, data);
1120 1124 }
1121 1125 PPC_SPE_ST_OP(dw, spe_stdw);
1122   -static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
  1126 +static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1123 1127 {
1124 1128 uint64_t ret;
1125 1129 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
... ... @@ -1127,14 +1131,14 @@ static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1127 1131 return ret;
1128 1132 }
1129 1133 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1130   -static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1131   - uint64_t data)
  1134 +static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
  1135 + uint64_t data)
1132 1136 {
1133 1137 glue(st32r, MEMSUFFIX)(EA, data >> 32);
1134 1138 glue(st32r, MEMSUFFIX)(EA + 4, data);
1135 1139 }
1136 1140 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1137   -static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
  1141 +static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1138 1142 {
1139 1143 uint64_t ret;
1140 1144 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
... ... @@ -1144,7 +1148,8 @@ static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1144 1148 return ret;
1145 1149 }
1146 1150 PPC_SPE_LD_OP(dh, spe_ldh);
1147   -static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
  1151 +static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
  1152 + uint64_t data)
1148 1153 {
1149 1154 glue(stw, MEMSUFFIX)(EA, data >> 48);
1150 1155 glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
... ... @@ -1152,7 +1157,7 @@ static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
1152 1157 glue(stw, MEMSUFFIX)(EA + 6, data);
1153 1158 }
1154 1159 PPC_SPE_ST_OP(dh, spe_stdh);
1155   -static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
  1160 +static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1156 1161 {
1157 1162 uint64_t ret;
1158 1163 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
... ... @@ -1162,8 +1167,8 @@ static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1162 1167 return ret;
1163 1168 }
1164 1169 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1165   -static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1166   - uint64_t data)
  1170 +static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
  1171 + uint64_t data)
1167 1172 {
1168 1173 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1169 1174 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
... ... @@ -1171,7 +1176,7 @@ static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1171 1176 glue(st16r, MEMSUFFIX)(EA + 6, data);
1172 1177 }
1173 1178 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1174   -static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
  1179 +static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1175 1180 {
1176 1181 uint64_t ret;
1177 1182 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
... ... @@ -1179,13 +1184,14 @@ static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1179 1184 return ret;
1180 1185 }
1181 1186 PPC_SPE_LD_OP(whe, spe_lwhe);
1182   -static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data)
  1187 +static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
  1188 + uint64_t data)
1183 1189 {
1184 1190 glue(stw, MEMSUFFIX)(EA, data >> 48);
1185 1191 glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1186 1192 }
1187 1193 PPC_SPE_ST_OP(whe, spe_stwhe);
1188   -static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
  1194 +static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1189 1195 {
1190 1196 uint64_t ret;
1191 1197 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
... ... @@ -1193,14 +1199,14 @@ static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1193 1199 return ret;
1194 1200 }
1195 1201 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1196   -static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1197   - uint64_t data)
  1202 +static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
  1203 + uint64_t data)
1198 1204 {
1199 1205 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1200 1206 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1201 1207 }
1202 1208 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1203   -static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
  1209 +static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1204 1210 {
1205 1211 uint64_t ret;
1206 1212 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
... ... @@ -1208,7 +1214,7 @@ static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1208 1214 return ret;
1209 1215 }
1210 1216 PPC_SPE_LD_OP(whou, spe_lwhou);
1211   -static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
  1217 +static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1212 1218 {
1213 1219 uint64_t ret;
1214 1220 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
... ... @@ -1216,13 +1222,14 @@ static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1216 1222 return ret;
1217 1223 }
1218 1224 PPC_SPE_LD_OP(whos, spe_lwhos);
1219   -static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data)
  1225 +static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
  1226 + uint64_t data)
1220 1227 {
1221 1228 glue(stw, MEMSUFFIX)(EA, data >> 32);
1222 1229 glue(stw, MEMSUFFIX)(EA + 2, data);
1223 1230 }
1224 1231 PPC_SPE_ST_OP(who, spe_stwho);
1225   -static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
  1232 +static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1226 1233 {
1227 1234 uint64_t ret;
1228 1235 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
... ... @@ -1230,7 +1237,7 @@ static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1230 1237 return ret;
1231 1238 }
1232 1239 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1233   -static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
  1240 +static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1234 1241 {
1235 1242 uint64_t ret;
1236 1243 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
... ... @@ -1238,55 +1245,57 @@ static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1238 1245 return ret;
1239 1246 }
1240 1247 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1241   -static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1242   - uint64_t data)
  1248 +static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
  1249 + uint64_t data)
1243 1250 {
1244 1251 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1245 1252 glue(st16r, MEMSUFFIX)(EA + 2, data);
1246 1253 }
1247 1254 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1248 1255 #if !defined(TARGET_PPC64)
1249   -static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data)
  1256 +static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
  1257 + uint64_t data)
1250 1258 {
1251 1259 glue(stl, MEMSUFFIX)(EA, data);
1252 1260 }
1253 1261 PPC_SPE_ST_OP(wwo, spe_stwwo);
1254   -static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1255   - uint64_t data)
  1262 +static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
  1263 + uint64_t data)
1256 1264 {
1257 1265 glue(st32r, MEMSUFFIX)(EA, data);
1258 1266 }
1259 1267 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1260 1268 #endif
1261   -static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
  1269 +static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1262 1270 {
1263 1271 uint16_t tmp;
1264 1272 tmp = glue(lduw, MEMSUFFIX)(EA);
1265 1273 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1266 1274 }
1267 1275 PPC_SPE_LD_OP(h, spe_lh);
1268   -static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
  1276 +static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1269 1277 {
1270 1278 uint16_t tmp;
1271 1279 tmp = glue(ld16r, MEMSUFFIX)(EA);
1272 1280 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1273 1281 }
1274 1282 PPC_SPE_LD_OP(h_le, spe_lh_le);
1275   -static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
  1283 +static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1276 1284 {
1277 1285 uint32_t tmp;
1278 1286 tmp = glue(ldl, MEMSUFFIX)(EA);
1279 1287 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1280 1288 }
1281 1289 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1282   -static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
  1290 +static always_inline
  1291 +uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1283 1292 {
1284 1293 uint32_t tmp;
1285 1294 tmp = glue(ld32r, MEMSUFFIX)(EA);
1286 1295 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1287 1296 }
1288 1297 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1289   -static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
  1298 +static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1290 1299 {
1291 1300 uint64_t ret;
1292 1301 uint16_t tmp;
... ... @@ -1297,7 +1306,8 @@ static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1297 1306 return ret;
1298 1307 }
1299 1308 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1300   -static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
  1309 +static always_inline
  1310 +uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1301 1311 {
1302 1312 uint64_t ret;
1303 1313 uint16_t tmp;
... ...
target-ppc/translate.c
... ... @@ -53,7 +53,7 @@ static uint32_t *gen_opparam_ptr;
53 53  
54 54 #include "gen-op.h"
55 55  
56   -static inline void gen_set_T0 (target_ulong val)
  56 +static always_inline void gen_set_T0 (target_ulong val)
57 57 {
58 58 #if defined(TARGET_PPC64)
59 59 if (val >> 32)
... ... @@ -63,7 +63,7 @@ static inline void gen_set_T0 (target_ulong val)
63 63 gen_op_set_T0(val);
64 64 }
65 65  
66   -static inline void gen_set_T1 (target_ulong val)
  66 +static always_inline void gen_set_T1 (target_ulong val)
67 67 {
68 68 #if defined(TARGET_PPC64)
69 69 if (val >> 32)
... ... @@ -78,7 +78,7 @@ static GenOpFunc *NAME ## _table [8] = { \
78 78 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
79 79 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
80 80 }; \
81   -static inline void func(int n) \
  81 +static always_inline void func (int n) \
82 82 { \
83 83 NAME ## _table[n](); \
84 84 }
... ... @@ -90,7 +90,7 @@ NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
90 90 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
91 91 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
92 92 }; \
93   -static inline void func(int n) \
  93 +static always_inline void func (int n) \
94 94 { \
95 95 NAME ## _table[n](); \
96 96 }
... ... @@ -106,7 +106,7 @@ NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
106 106 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
107 107 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
108 108 }; \
109   -static inline void func(int n) \
  109 +static always_inline void func (int n) \
110 110 { \
111 111 NAME ## _table[n](); \
112 112 }
... ... @@ -121,7 +121,7 @@ GEN8(gen_op_store_T1_crf, gen_op_store_T1_crf_crf);
121 121 GEN8(gen_op_load_fpscr_T0, gen_op_load_fpscr_T0_fpscr);
122 122 GEN8(gen_op_store_T0_fpscr, gen_op_store_T0_fpscr_fpscr);
123 123 GEN8(gen_op_clear_fpscr, gen_op_clear_fpscr_fpscr);
124   -static inline void gen_op_store_T0_fpscri (int n, uint8_t param)
  124 +static always_inline void gen_op_store_T0_fpscri (int n, uint8_t param)
125 125 {
126 126 gen_op_set_T0(param);
127 127 gen_op_store_T0_fpscr(n);
... ... @@ -187,7 +187,7 @@ struct opc_handler_t {
187 187 #endif
188 188 };
189 189  
190   -static inline void gen_set_Rc0 (DisasContext *ctx)
  190 +static always_inline void gen_set_Rc0 (DisasContext *ctx)
191 191 {
192 192 #if defined(TARGET_PPC64)
193 193 if (ctx->sf_mode)
... ... @@ -198,7 +198,7 @@ static inline void gen_set_Rc0 (DisasContext *ctx)
198 198 gen_op_set_Rc0();
199 199 }
200 200  
201   -static inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
  201 +static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
202 202 {
203 203 #if defined(TARGET_PPC64)
204 204 if (ctx->sf_mode)
... ... @@ -236,14 +236,14 @@ GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
236 236 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
237 237  
238 238 /* Stop translation */
239   -static inline void GEN_STOP (DisasContext *ctx)
  239 +static always_inline void GEN_STOP (DisasContext *ctx)
240 240 {
241 241 gen_update_nip(ctx, ctx->nip);
242 242 ctx->exception = POWERPC_EXCP_STOP;
243 243 }
244 244  
245 245 /* No need to update nip here, as execution flow will change */
246   -static inline void GEN_SYNC (DisasContext *ctx)
  246 +static always_inline void GEN_SYNC (DisasContext *ctx)
247 247 {
248 248 ctx->exception = POWERPC_EXCP_SYNC;
249 249 }
... ... @@ -267,13 +267,13 @@ typedef struct opcode_t {
267 267 /*****************************************************************************/
268 268 /*** Instruction decoding ***/
269 269 #define EXTRACT_HELPER(name, shift, nb) \
270   -static inline uint32_t name (uint32_t opcode) \
  270 +static always_inline uint32_t name (uint32_t opcode) \
271 271 { \
272 272 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
273 273 }
274 274  
275 275 #define EXTRACT_SHELPER(name, shift, nb) \
276   -static inline int32_t name (uint32_t opcode) \
  276 +static always_inline int32_t name (uint32_t opcode) \
277 277 { \
278 278 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
279 279 }
... ... @@ -304,7 +304,7 @@ EXTRACT_HELPER(crbA, 16, 5);
304 304 EXTRACT_HELPER(crbB, 11, 5);
305 305 /* SPR / TBL */
306 306 EXTRACT_HELPER(_SPR, 11, 10);
307   -static inline uint32_t SPR (uint32_t opcode)
  307 +static always_inline uint32_t SPR (uint32_t opcode)
308 308 {
309 309 uint32_t sprn = _SPR(opcode);
310 310  
... ... @@ -336,12 +336,12 @@ EXTRACT_HELPER(FPIMM, 20, 4);
336 336 /* Displacement */
337 337 EXTRACT_SHELPER(d, 0, 16);
338 338 /* Immediate address */
339   -static inline target_ulong LI (uint32_t opcode)
  339 +static always_inline target_ulong LI (uint32_t opcode)
340 340 {
341 341 return (opcode >> 0) & 0x03FFFFFC;
342 342 }
343 343  
344   -static inline uint32_t BD (uint32_t opcode)
  344 +static always_inline uint32_t BD (uint32_t opcode)
345 345 {
346 346 return (opcode >> 0) & 0xFFFC;
347 347 }
... ... @@ -354,7 +354,7 @@ EXTRACT_HELPER(AA, 1, 1);
354 354 EXTRACT_HELPER(LK, 0, 1);
355 355  
356 356 /* Create a mask between <start> and <end> bits */
357   -static inline target_ulong MASK (uint32_t start, uint32_t end)
  357 +static always_inline target_ulong MASK (uint32_t start, uint32_t end)
358 358 {
359 359 target_ulong ret;
360 360  
... ... @@ -694,7 +694,7 @@ __GEN_INT_ARITH1_O_64(name##o, opc1, opc2, opc3 | 0x10, type)
694 694 #endif
695 695  
696 696 /* add add. addo addo. */
697   -static inline void gen_op_addo (void)
  697 +static always_inline void gen_op_addo (void)
698 698 {
699 699 gen_op_move_T2_T0();
700 700 gen_op_add();
... ... @@ -702,7 +702,7 @@ static inline void gen_op_addo (void)
702 702 }
703 703 #if defined(TARGET_PPC64)
704 704 #define gen_op_add_64 gen_op_add
705   -static inline void gen_op_addo_64 (void)
  705 +static always_inline void gen_op_addo_64 (void)
706 706 {
707 707 gen_op_move_T2_T0();
708 708 gen_op_add();
... ... @@ -711,13 +711,13 @@ static inline void gen_op_addo_64 (void)
711 711 #endif
712 712 GEN_INT_ARITH2_64 (add, 0x1F, 0x0A, 0x08, PPC_INTEGER);
713 713 /* addc addc. addco addco. */
714   -static inline void gen_op_addc (void)
  714 +static always_inline void gen_op_addc (void)
715 715 {
716 716 gen_op_move_T2_T0();
717 717 gen_op_add();
718 718 gen_op_check_addc();
719 719 }
720   -static inline void gen_op_addco (void)
  720 +static always_inline void gen_op_addco (void)
721 721 {
722 722 gen_op_move_T2_T0();
723 723 gen_op_add();
... ... @@ -725,13 +725,13 @@ static inline void gen_op_addco (void)
725 725 gen_op_check_addo();
726 726 }
727 727 #if defined(TARGET_PPC64)
728   -static inline void gen_op_addc_64 (void)
  728 +static always_inline void gen_op_addc_64 (void)
729 729 {
730 730 gen_op_move_T2_T0();
731 731 gen_op_add();
732 732 gen_op_check_addc_64();
733 733 }
734   -static inline void gen_op_addco_64 (void)
  734 +static always_inline void gen_op_addco_64 (void)
735 735 {
736 736 gen_op_move_T2_T0();
737 737 gen_op_add();
... ... @@ -741,14 +741,14 @@ static inline void gen_op_addco_64 (void)
741 741 #endif
742 742 GEN_INT_ARITH2_64 (addc, 0x1F, 0x0A, 0x00, PPC_INTEGER);
743 743 /* adde adde. addeo addeo. */
744   -static inline void gen_op_addeo (void)
  744 +static always_inline void gen_op_addeo (void)
745 745 {
746 746 gen_op_move_T2_T0();
747 747 gen_op_adde();
748 748 gen_op_check_addo();
749 749 }
750 750 #if defined(TARGET_PPC64)
751   -static inline void gen_op_addeo_64 (void)
  751 +static always_inline void gen_op_addeo_64 (void)
752 752 {
753 753 gen_op_move_T2_T0();
754 754 gen_op_adde_64();
... ... @@ -757,13 +757,13 @@ static inline void gen_op_addeo_64 (void)
757 757 #endif
758 758 GEN_INT_ARITH2_64 (adde, 0x1F, 0x0A, 0x04, PPC_INTEGER);
759 759 /* addme addme. addmeo addmeo. */
760   -static inline void gen_op_addme (void)
  760 +static always_inline void gen_op_addme (void)
761 761 {
762 762 gen_op_move_T1_T0();
763 763 gen_op_add_me();
764 764 }
765 765 #if defined(TARGET_PPC64)
766   -static inline void gen_op_addme_64 (void)
  766 +static always_inline void gen_op_addme_64 (void)
767 767 {
768 768 gen_op_move_T1_T0();
769 769 gen_op_add_me_64();
... ... @@ -771,13 +771,13 @@ static inline void gen_op_addme_64 (void)
771 771 #endif
772 772 GEN_INT_ARITH1_64 (addme, 0x1F, 0x0A, 0x07, PPC_INTEGER);
773 773 /* addze addze. addzeo addzeo. */
774   -static inline void gen_op_addze (void)
  774 +static always_inline void gen_op_addze (void)
775 775 {
776 776 gen_op_move_T2_T0();
777 777 gen_op_add_ze();
778 778 gen_op_check_addc();
779 779 }
780   -static inline void gen_op_addzeo (void)
  780 +static always_inline void gen_op_addzeo (void)
781 781 {
782 782 gen_op_move_T2_T0();
783 783 gen_op_add_ze();
... ... @@ -785,13 +785,13 @@ static inline void gen_op_addzeo (void)
785 785 gen_op_check_addo();
786 786 }
787 787 #if defined(TARGET_PPC64)
788   -static inline void gen_op_addze_64 (void)
  788 +static always_inline void gen_op_addze_64 (void)
789 789 {
790 790 gen_op_move_T2_T0();
791 791 gen_op_add_ze();
792 792 gen_op_check_addc_64();
793 793 }
794   -static inline void gen_op_addzeo_64 (void)
  794 +static always_inline void gen_op_addzeo_64 (void)
795 795 {
796 796 gen_op_move_T2_T0();
797 797 gen_op_add_ze();
... ... @@ -813,7 +813,7 @@ GEN_INT_ARITH2 (mullw, 0x1F, 0x0B, 0x07, PPC_INTEGER);
813 813 /* neg neg. nego nego. */
814 814 GEN_INT_ARITH1_64 (neg, 0x1F, 0x08, 0x03, PPC_INTEGER);
815 815 /* subf subf. subfo subfo. */
816   -static inline void gen_op_subfo (void)
  816 +static always_inline void gen_op_subfo (void)
817 817 {
818 818 gen_op_move_T2_T0();
819 819 gen_op_subf();
... ... @@ -821,7 +821,7 @@ static inline void gen_op_subfo (void)
821 821 }
822 822 #if defined(TARGET_PPC64)
823 823 #define gen_op_subf_64 gen_op_subf
824   -static inline void gen_op_subfo_64 (void)
  824 +static always_inline void gen_op_subfo_64 (void)
825 825 {
826 826 gen_op_move_T2_T0();
827 827 gen_op_subf();
... ... @@ -830,12 +830,12 @@ static inline void gen_op_subfo_64 (void)
830 830 #endif
831 831 GEN_INT_ARITH2_64 (subf, 0x1F, 0x08, 0x01, PPC_INTEGER);
832 832 /* subfc subfc. subfco subfco. */
833   -static inline void gen_op_subfc (void)
  833 +static always_inline void gen_op_subfc (void)
834 834 {
835 835 gen_op_subf();
836 836 gen_op_check_subfc();
837 837 }
838   -static inline void gen_op_subfco (void)
  838 +static always_inline void gen_op_subfco (void)
839 839 {
840 840 gen_op_move_T2_T0();
841 841 gen_op_subf();
... ... @@ -843,12 +843,12 @@ static inline void gen_op_subfco (void)
843 843 gen_op_check_subfo();
844 844 }
845 845 #if defined(TARGET_PPC64)
846   -static inline void gen_op_subfc_64 (void)
  846 +static always_inline void gen_op_subfc_64 (void)
847 847 {
848 848 gen_op_subf();
849 849 gen_op_check_subfc_64();
850 850 }
851   -static inline void gen_op_subfco_64 (void)
  851 +static always_inline void gen_op_subfco_64 (void)
852 852 {
853 853 gen_op_move_T2_T0();
854 854 gen_op_subf();
... ... @@ -858,7 +858,7 @@ static inline void gen_op_subfco_64 (void)
858 858 #endif
859 859 GEN_INT_ARITH2_64 (subfc, 0x1F, 0x08, 0x00, PPC_INTEGER);
860 860 /* subfe subfe. subfeo subfeo. */
861   -static inline void gen_op_subfeo (void)
  861 +static always_inline void gen_op_subfeo (void)
862 862 {
863 863 gen_op_move_T2_T0();
864 864 gen_op_subfe();
... ... @@ -866,7 +866,7 @@ static inline void gen_op_subfeo (void)
866 866 }
867 867 #if defined(TARGET_PPC64)
868 868 #define gen_op_subfe_64 gen_op_subfe
869   -static inline void gen_op_subfeo_64 (void)
  869 +static always_inline void gen_op_subfeo_64 (void)
870 870 {
871 871 gen_op_move_T2_T0();
872 872 gen_op_subfe_64();
... ... @@ -1407,7 +1407,7 @@ GEN_HANDLER(name##3, opc1, opc2 | 0x11, 0xFF, 0x00000000, PPC_64B) \
1407 1407 gen_##name(ctx, 1, 1); \
1408 1408 }
1409 1409  
1410   -static inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask)
  1410 +static always_inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask)
1411 1411 {
1412 1412 if (mask >> 32)
1413 1413 gen_op_andi_T0_64(mask >> 32, mask & 0xFFFFFFFF);
... ... @@ -1415,7 +1415,7 @@ static inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask)
1415 1415 gen_op_andi_T0(mask);
1416 1416 }
1417 1417  
1418   -static inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask)
  1418 +static always_inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask)
1419 1419 {
1420 1420 if (mask >> 32)
1421 1421 gen_op_andi_T1_64(mask >> 32, mask & 0xFFFFFFFF);
... ... @@ -1423,8 +1423,8 @@ static inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask)
1423 1423 gen_op_andi_T1(mask);
1424 1424 }
1425 1425  
1426   -static inline void gen_rldinm (DisasContext *ctx, uint32_t mb, uint32_t me,
1427   - uint32_t sh)
  1426 +static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
  1427 + uint32_t me, uint32_t sh)
1428 1428 {
1429 1429 gen_op_load_gpr_T0(rS(ctx->opcode));
1430 1430 if (likely(sh == 0)) {
... ... @@ -1453,7 +1453,7 @@ static inline void gen_rldinm (DisasContext *ctx, uint32_t mb, uint32_t me,
1453 1453 gen_set_Rc0(ctx);
1454 1454 }
1455 1455 /* rldicl - rldicl. */
1456   -static inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
  1456 +static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1457 1457 {
1458 1458 uint32_t sh, mb;
1459 1459  
... ... @@ -1463,7 +1463,7 @@ static inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1463 1463 }
1464 1464 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1465 1465 /* rldicr - rldicr. */
1466   -static inline void gen_rldicr (DisasContext *ctx, int men, int shn)
  1466 +static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1467 1467 {
1468 1468 uint32_t sh, me;
1469 1469  
... ... @@ -1473,7 +1473,7 @@ static inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1473 1473 }
1474 1474 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1475 1475 /* rldic - rldic. */
1476   -static inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
  1476 +static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1477 1477 {
1478 1478 uint32_t sh, mb;
1479 1479  
... ... @@ -1483,7 +1483,8 @@ static inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1483 1483 }
1484 1484 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1485 1485  
1486   -static inline void gen_rldnm (DisasContext *ctx, uint32_t mb, uint32_t me)
  1486 +static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
  1487 + uint32_t me)
1487 1488 {
1488 1489 gen_op_load_gpr_T0(rS(ctx->opcode));
1489 1490 gen_op_load_gpr_T1(rB(ctx->opcode));
... ... @@ -1497,7 +1498,7 @@ static inline void gen_rldnm (DisasContext *ctx, uint32_t mb, uint32_t me)
1497 1498 }
1498 1499  
1499 1500 /* rldcl - rldcl. */
1500   -static inline void gen_rldcl (DisasContext *ctx, int mbn)
  1501 +static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1501 1502 {
1502 1503 uint32_t mb;
1503 1504  
... ... @@ -1506,7 +1507,7 @@ static inline void gen_rldcl (DisasContext *ctx, int mbn)
1506 1507 }
1507 1508 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1508 1509 /* rldcr - rldcr. */
1509   -static inline void gen_rldcr (DisasContext *ctx, int men)
  1510 +static always_inline void gen_rldcr (DisasContext *ctx, int men)
1510 1511 {
1511 1512 uint32_t me;
1512 1513  
... ... @@ -1515,7 +1516,7 @@ static inline void gen_rldcr (DisasContext *ctx, int men)
1515 1516 }
1516 1517 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1517 1518 /* rldimi - rldimi. */
1518   -static inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
  1519 +static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1519 1520 {
1520 1521 uint64_t mask;
1521 1522 uint32_t sh, mb;
... ... @@ -1583,7 +1584,7 @@ __GEN_LOGICAL2(sld, 0x1B, 0x00, PPC_64B);
1583 1584 /* srad & srad. */
1584 1585 __GEN_LOGICAL2(srad, 0x1A, 0x18, PPC_64B);
1585 1586 /* sradi & sradi. */
1586   -static inline void gen_sradi (DisasContext *ctx, int n)
  1587 +static always_inline void gen_sradi (DisasContext *ctx, int n)
1587 1588 {
1588 1589 uint64_t mask;
1589 1590 int sh, mb, me;
... ... @@ -1937,7 +1938,8 @@ GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
1937 1938  
1938 1939 /*** Addressing modes ***/
1939 1940 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
1940   -static inline void gen_addr_imm_index (DisasContext *ctx, target_long maskl)
  1941 +static always_inline void gen_addr_imm_index (DisasContext *ctx,
  1942 + target_long maskl)
1941 1943 {
1942 1944 target_long simm = SIMM(ctx->opcode);
1943 1945  
... ... @@ -1954,7 +1956,7 @@ static inline void gen_addr_imm_index (DisasContext *ctx, target_long maskl)
1954 1956 #endif
1955 1957 }
1956 1958  
1957   -static inline void gen_addr_reg_index (DisasContext *ctx)
  1959 +static always_inline void gen_addr_reg_index (DisasContext *ctx)
1958 1960 {
1959 1961 if (rA(ctx->opcode) == 0) {
1960 1962 gen_op_load_gpr_T0(rB(ctx->opcode));
... ... @@ -1968,7 +1970,7 @@ static inline void gen_addr_reg_index (DisasContext *ctx)
1968 1970 #endif
1969 1971 }
1970 1972  
1971   -static inline void gen_addr_register (DisasContext *ctx)
  1973 +static always_inline void gen_addr_register (DisasContext *ctx)
1972 1974 {
1973 1975 if (rA(ctx->opcode) == 0) {
1974 1976 gen_op_reset_T0();
... ... @@ -2964,7 +2966,8 @@ OP_ST_TABLE(fiwx);
2964 2966 GEN_STXF(fiwx, 0x17, 0x1E, PPC_FLOAT_STFIWX);
2965 2967  
2966 2968 /*** Branch ***/
2967   -static inline void gen_goto_tb (DisasContext *ctx, int n, target_ulong dest)
  2969 +static always_inline void gen_goto_tb (DisasContext *ctx, int n,
  2970 + target_ulong dest)
2968 2971 {
2969 2972 TranslationBlock *tb;
2970 2973 tb = ctx->tb;
... ... @@ -2999,7 +3002,7 @@ static inline void gen_goto_tb (DisasContext *ctx, int n, target_ulong dest)
2999 3002 }
3000 3003 }
3001 3004  
3002   -static inline void gen_setlr (DisasContext *ctx, target_ulong nip)
  3005 +static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
3003 3006 {
3004 3007 #if defined(TARGET_PPC64)
3005 3008 if (ctx->sf_mode != 0 && (nip >> 32))
... ... @@ -3039,7 +3042,7 @@ GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3039 3042 #define BCOND_LR 1
3040 3043 #define BCOND_CTR 2
3041 3044  
3042   -static inline void gen_bcond (DisasContext *ctx, int type)
  3045 +static always_inline void gen_bcond (DisasContext *ctx, int type)
3043 3046 {
3044 3047 target_ulong target = 0;
3045 3048 target_ulong li;
... ... @@ -3399,7 +3402,7 @@ static void spr_noaccess (void *opaque, int sprn)
3399 3402 #endif
3400 3403  
3401 3404 /* mfspr */
3402   -static inline void gen_op_mfspr (DisasContext *ctx)
  3405 +static always_inline void gen_op_mfspr (DisasContext *ctx)
3403 3406 {
3404 3407 void (*read_cb)(void *opaque, int sprn);
3405 3408 uint32_t sprn = SPR(ctx->opcode);
... ... @@ -3765,7 +3768,8 @@ static GenOpFunc *gen_op_dcbz[4][4] = {
3765 3768 #endif
3766 3769 #endif
3767 3770  
3768   -static inline void handler_dcbz (DisasContext *ctx, int dcache_line_size)
  3771 +static always_inline void handler_dcbz (DisasContext *ctx,
  3772 + int dcache_line_size)
3769 3773 {
3770 3774 int n;
3771 3775  
... ... @@ -4913,8 +4917,9 @@ GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_BOOKE_EXT)
4913 4917 }
4914 4918  
4915 4919 /* All 405 MAC instructions are translated here */
4916   -static inline void gen_405_mulladd_insn (DisasContext *ctx, int opc2, int opc3,
4917   - int ra, int rb, int rt, int Rc)
  4920 +static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
  4921 + int opc2, int opc3,
  4922 + int ra, int rb, int rt, int Rc)
4918 4923 {
4919 4924 gen_op_load_gpr_T0(ra);
4920 4925 gen_op_load_gpr_T1(rb);
... ... @@ -5551,13 +5556,13 @@ GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
5551 5556 }
5552 5557  
5553 5558 /* Handler for undefined SPE opcodes */
5554   -static inline void gen_speundef (DisasContext *ctx)
  5559 +static always_inline void gen_speundef (DisasContext *ctx)
5555 5560 {
5556 5561 GEN_EXCP_INVAL(ctx);
5557 5562 }
5558 5563  
5559 5564 /* SPE load and stores */
5560   -static inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh)
  5565 +static always_inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh)
5561 5566 {
5562 5567 target_long simm = rB(ctx->opcode);
5563 5568  
... ... @@ -5678,7 +5683,7 @@ static GenOpFunc *gen_op_spe_st##name[] = { \
5678 5683 #endif /* defined(CONFIG_USER_ONLY) */
5679 5684  
5680 5685 #define GEN_SPE_LD(name, sh) \
5681   -static inline void gen_evl##name (DisasContext *ctx) \
  5686 +static always_inline void gen_evl##name (DisasContext *ctx) \
5682 5687 { \
5683 5688 if (unlikely(!ctx->spe_enabled)) { \
5684 5689 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5690,7 +5695,7 @@ static inline void gen_evl##name (DisasContext *ctx) \
5690 5695 }
5691 5696  
5692 5697 #define GEN_SPE_LDX(name) \
5693   -static inline void gen_evl##name##x (DisasContext *ctx) \
  5698 +static always_inline void gen_evl##name##x (DisasContext *ctx) \
5694 5699 { \
5695 5700 if (unlikely(!ctx->spe_enabled)) { \
5696 5701 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5707,7 +5712,7 @@ GEN_SPE_LD(name, sh); \
5707 5712 GEN_SPE_LDX(name)
5708 5713  
5709 5714 #define GEN_SPE_ST(name, sh) \
5710   -static inline void gen_evst##name (DisasContext *ctx) \
  5715 +static always_inline void gen_evst##name (DisasContext *ctx) \
5711 5716 { \
5712 5717 if (unlikely(!ctx->spe_enabled)) { \
5713 5718 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5719,7 +5724,7 @@ static inline void gen_evst##name (DisasContext *ctx) \
5719 5724 }
5720 5725  
5721 5726 #define GEN_SPE_STX(name) \
5722   -static inline void gen_evst##name##x (DisasContext *ctx) \
  5727 +static always_inline void gen_evst##name##x (DisasContext *ctx) \
5723 5728 { \
5724 5729 if (unlikely(!ctx->spe_enabled)) { \
5725 5730 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5741,7 +5746,7 @@ GEN_SPEOP_ST(name, sh)
5741 5746  
5742 5747 /* SPE arithmetic and logic */
5743 5748 #define GEN_SPEOP_ARITH2(name) \
5744   -static inline void gen_##name (DisasContext *ctx) \
  5749 +static always_inline void gen_##name (DisasContext *ctx) \
5745 5750 { \
5746 5751 if (unlikely(!ctx->spe_enabled)) { \
5747 5752 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5754,7 +5759,7 @@ static inline void gen_##name (DisasContext *ctx) \
5754 5759 }
5755 5760  
5756 5761 #define GEN_SPEOP_ARITH1(name) \
5757   -static inline void gen_##name (DisasContext *ctx) \
  5762 +static always_inline void gen_##name (DisasContext *ctx) \
5758 5763 { \
5759 5764 if (unlikely(!ctx->spe_enabled)) { \
5760 5765 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5766,7 +5771,7 @@ static inline void gen_##name (DisasContext *ctx) \
5766 5771 }
5767 5772  
5768 5773 #define GEN_SPEOP_COMP(name) \
5769   -static inline void gen_##name (DisasContext *ctx) \
  5774 +static always_inline void gen_##name (DisasContext *ctx) \
5770 5775 { \
5771 5776 if (unlikely(!ctx->spe_enabled)) { \
5772 5777 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5806,7 +5811,7 @@ GEN_SPEOP_ARITH1(evextsh);
5806 5811 GEN_SPEOP_ARITH1(evrndw);
5807 5812 GEN_SPEOP_ARITH1(evcntlzw);
5808 5813 GEN_SPEOP_ARITH1(evcntlsw);
5809   -static inline void gen_brinc (DisasContext *ctx)
  5814 +static always_inline void gen_brinc (DisasContext *ctx)
5810 5815 {
5811 5816 /* Note: brinc is usable even if SPE is disabled */
5812 5817 gen_op_load_gpr64_T0(rA(ctx->opcode));
... ... @@ -5816,7 +5821,7 @@ static inline void gen_brinc (DisasContext *ctx)
5816 5821 }
5817 5822  
5818 5823 #define GEN_SPEOP_ARITH_IMM2(name) \
5819   -static inline void gen_##name##i (DisasContext *ctx) \
  5824 +static always_inline void gen_##name##i (DisasContext *ctx) \
5820 5825 { \
5821 5826 if (unlikely(!ctx->spe_enabled)) { \
5822 5827 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5829,7 +5834,7 @@ static inline void gen_##name##i (DisasContext *ctx) \
5829 5834 }
5830 5835  
5831 5836 #define GEN_SPEOP_LOGIC_IMM2(name) \
5832   -static inline void gen_##name##i (DisasContext *ctx) \
  5837 +static always_inline void gen_##name##i (DisasContext *ctx) \
5833 5838 { \
5834 5839 if (unlikely(!ctx->spe_enabled)) { \
5835 5840 GEN_EXCP_NO_AP(ctx); \
... ... @@ -5852,7 +5857,7 @@ GEN_SPEOP_LOGIC_IMM2(evsrws);
5852 5857 #define gen_evsrwiu gen_evsrwui
5853 5858 GEN_SPEOP_LOGIC_IMM2(evrlw);
5854 5859  
5855   -static inline void gen_evsplati (DisasContext *ctx)
  5860 +static always_inline void gen_evsplati (DisasContext *ctx)
5856 5861 {
5857 5862 int32_t imm = (int32_t)(rA(ctx->opcode) << 27) >> 27;
5858 5863  
... ... @@ -5860,7 +5865,7 @@ static inline void gen_evsplati (DisasContext *ctx)
5860 5865 gen_op_store_T0_gpr64(rD(ctx->opcode));
5861 5866 }
5862 5867  
5863   -static inline void gen_evsplatfi (DisasContext *ctx)
  5868 +static always_inline void gen_evsplatfi (DisasContext *ctx)
5864 5869 {
5865 5870 uint32_t imm = rA(ctx->opcode) << 27;
5866 5871  
... ... @@ -5901,7 +5906,7 @@ GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
5901 5906 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
5902 5907 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
5903 5908  
5904   -static inline void gen_evsel (DisasContext *ctx)
  5909 +static always_inline void gen_evsel (DisasContext *ctx)
5905 5910 {
5906 5911 if (unlikely(!ctx->spe_enabled)) {
5907 5912 GEN_EXCP_NO_AP(ctx);
... ... @@ -5991,13 +5996,13 @@ GEN_SPEOP_ST(who, 2);
5991 5996 #endif
5992 5997 #endif
5993 5998 #define _GEN_OP_SPE_STWWE(suffix) \
5994   -static inline void gen_op_spe_stwwe_##suffix (void) \
  5999 +static always_inline void gen_op_spe_stwwe_##suffix (void) \
5995 6000 { \
5996 6001 gen_op_srli32_T1_64(); \
5997 6002 gen_op_spe_stwwo_##suffix(); \
5998 6003 }
5999 6004 #define _GEN_OP_SPE_STWWE_LE(suffix) \
6000   -static inline void gen_op_spe_stwwe_le_##suffix (void) \
  6005 +static always_inline void gen_op_spe_stwwe_le_##suffix (void) \
6001 6006 { \
6002 6007 gen_op_srli32_T1_64(); \
6003 6008 gen_op_spe_stwwo_le_##suffix(); \
... ... @@ -6006,12 +6011,12 @@ static inline void gen_op_spe_stwwe_le_##suffix (void) \
6006 6011 #define GEN_OP_SPE_STWWE(suffix) \
6007 6012 _GEN_OP_SPE_STWWE(suffix); \
6008 6013 _GEN_OP_SPE_STWWE_LE(suffix); \
6009   -static inline void gen_op_spe_stwwe_64_##suffix (void) \
  6014 +static always_inline void gen_op_spe_stwwe_64_##suffix (void) \
6010 6015 { \
6011 6016 gen_op_srli32_T1_64(); \
6012 6017 gen_op_spe_stwwo_64_##suffix(); \
6013 6018 } \
6014   -static inline void gen_op_spe_stwwe_le_64_##suffix (void) \
  6019 +static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \
6015 6020 { \
6016 6021 gen_op_srli32_T1_64(); \
6017 6022 gen_op_spe_stwwo_le_64_##suffix(); \
... ... @@ -6031,21 +6036,21 @@ GEN_SPEOP_ST(wwe, 2);
6031 6036 GEN_SPEOP_ST(wwo, 2);
6032 6037  
6033 6038 #define GEN_SPE_LDSPLAT(name, op, suffix) \
6034   -static inline void gen_op_spe_l##name##_##suffix (void) \
  6039 +static always_inline void gen_op_spe_l##name##_##suffix (void) \
6035 6040 { \
6036 6041 gen_op_##op##_##suffix(); \
6037 6042 gen_op_splatw_T1_64(); \
6038 6043 }
6039 6044  
6040 6045 #define GEN_OP_SPE_LHE(suffix) \
6041   -static inline void gen_op_spe_lhe_##suffix (void) \
  6046 +static always_inline void gen_op_spe_lhe_##suffix (void) \
6042 6047 { \
6043 6048 gen_op_spe_lh_##suffix(); \
6044 6049 gen_op_sli16_T1_64(); \
6045 6050 }
6046 6051  
6047 6052 #define GEN_OP_SPE_LHX(suffix) \
6048   -static inline void gen_op_spe_lhx_##suffix (void) \
  6053 +static always_inline void gen_op_spe_lhx_##suffix (void) \
6049 6054 { \
6050 6055 gen_op_spe_lh_##suffix(); \
6051 6056 gen_op_extsh_T1_64(); \
... ... @@ -6221,7 +6226,7 @@ GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
6221 6226  
6222 6227 /*** SPE floating-point extension ***/
6223 6228 #define GEN_SPEFPUOP_CONV(name) \
6224   -static inline void gen_##name (DisasContext *ctx) \
  6229 +static always_inline void gen_##name (DisasContext *ctx) \
6225 6230 { \
6226 6231 gen_op_load_gpr64_T0(rB(ctx->opcode)); \
6227 6232 gen_op_##name(); \
... ... @@ -6376,7 +6381,7 @@ GEN_OPCODE_MARK(end);
6376 6381  
6377 6382 /*****************************************************************************/
6378 6383 /* Misc PowerPC helpers */
6379   -static inline uint32_t load_xer (CPUState *env)
  6384 +static always_inline uint32_t load_xer (CPUState *env)
6380 6385 {
6381 6386 return (xer_so << XER_SO) |
6382 6387 (xer_ov << XER_OV) |
... ... @@ -6507,9 +6512,9 @@ void cpu_dump_statistics (CPUState *env, FILE*f,
6507 6512 }
6508 6513  
6509 6514 /*****************************************************************************/
6510   -static inline int gen_intermediate_code_internal (CPUState *env,
6511   - TranslationBlock *tb,
6512   - int search_pc)
  6515 +static always_inline int gen_intermediate_code_internal (CPUState *env,
  6516 + TranslationBlock *tb,
  6517 + int search_pc)
6513 6518 {
6514 6519 DisasContext ctx, *ctxp = &ctx;
6515 6520 opc_handler_t **table, *handler;
... ...