Commit b068d6a7138292de0f5c5fa6c99f0b79d4e1e7f0

Authored by j_mayer
1 parent ed26abdb

PowerPC target optimisations: make intensive use of always_inline.


git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@3347 c046a42c-6fe2-441c-8c8c-71466251a162
hw/ppc.c
@@ -424,7 +424,8 @@ struct ppc_tb_t { @@ -424,7 +424,8 @@ struct ppc_tb_t {
424 void *opaque; 424 void *opaque;
425 }; 425 };
426 426
427 -static inline uint64_t cpu_ppc_get_tb (ppc_tb_t *tb_env, int64_t tb_offset) 427 +static always_inline uint64_t cpu_ppc_get_tb (ppc_tb_t *tb_env,
  428 + int64_t tb_offset)
428 { 429 {
429 /* TB time in tb periods */ 430 /* TB time in tb periods */
430 return muldiv64(qemu_get_clock(vm_clock) + tb_env->tb_offset, 431 return muldiv64(qemu_get_clock(vm_clock) + tb_env->tb_offset,
@@ -446,7 +447,7 @@ uint32_t cpu_ppc_load_tbl (CPUState *env) @@ -446,7 +447,7 @@ uint32_t cpu_ppc_load_tbl (CPUState *env)
446 return tb & 0xFFFFFFFF; 447 return tb & 0xFFFFFFFF;
447 } 448 }
448 449
449 -static inline uint32_t _cpu_ppc_load_tbu (CPUState *env) 450 +static always_inline uint32_t _cpu_ppc_load_tbu (CPUState *env)
450 { 451 {
451 ppc_tb_t *tb_env = env->tb_env; 452 ppc_tb_t *tb_env = env->tb_env;
452 uint64_t tb; 453 uint64_t tb;
@@ -466,8 +467,9 @@ uint32_t cpu_ppc_load_tbu (CPUState *env) @@ -466,8 +467,9 @@ uint32_t cpu_ppc_load_tbu (CPUState *env)
466 return _cpu_ppc_load_tbu(env); 467 return _cpu_ppc_load_tbu(env);
467 } 468 }
468 469
469 -static inline void cpu_ppc_store_tb (ppc_tb_t *tb_env, int64_t *tb_offsetp,  
470 - uint64_t value) 470 +static always_inline void cpu_ppc_store_tb (ppc_tb_t *tb_env,
  471 + int64_t *tb_offsetp,
  472 + uint64_t value)
471 { 473 {
472 *tb_offsetp = muldiv64(value, ticks_per_sec, tb_env->tb_freq) 474 *tb_offsetp = muldiv64(value, ticks_per_sec, tb_env->tb_freq)
473 - qemu_get_clock(vm_clock); 475 - qemu_get_clock(vm_clock);
@@ -489,7 +491,7 @@ void cpu_ppc_store_tbl (CPUState *env, uint32_t value) @@ -489,7 +491,7 @@ void cpu_ppc_store_tbl (CPUState *env, uint32_t value)
489 cpu_ppc_store_tb(tb_env, &tb_env->tb_offset, tb | (uint64_t)value); 491 cpu_ppc_store_tb(tb_env, &tb_env->tb_offset, tb | (uint64_t)value);
490 } 492 }
491 493
492 -static inline void _cpu_ppc_store_tbu (CPUState *env, uint32_t value) 494 +static always_inline void _cpu_ppc_store_tbu (CPUState *env, uint32_t value)
493 { 495 {
494 ppc_tb_t *tb_env = env->tb_env; 496 ppc_tb_t *tb_env = env->tb_env;
495 uint64_t tb; 497 uint64_t tb;
@@ -556,7 +558,8 @@ void cpu_ppc_store_atbu (CPUState *env, uint32_t value) @@ -556,7 +558,8 @@ void cpu_ppc_store_atbu (CPUState *env, uint32_t value)
556 ((uint64_t)value << 32) | tb); 558 ((uint64_t)value << 32) | tb);
557 } 559 }
558 560
559 -static inline uint32_t _cpu_ppc_load_decr (CPUState *env, uint64_t *next) 561 +static always_inline uint32_t _cpu_ppc_load_decr (CPUState *env,
  562 + uint64_t *next)
560 { 563 {
561 ppc_tb_t *tb_env = env->tb_env; 564 ppc_tb_t *tb_env = env->tb_env;
562 uint32_t decr; 565 uint32_t decr;
@@ -605,7 +608,7 @@ uint64_t cpu_ppc_load_purr (CPUState *env) @@ -605,7 +608,7 @@ uint64_t cpu_ppc_load_purr (CPUState *env)
605 /* When decrementer expires, 608 /* When decrementer expires,
606 * all we need to do is generate or queue a CPU exception 609 * all we need to do is generate or queue a CPU exception
607 */ 610 */
608 -static inline void cpu_ppc_decr_excp (CPUState *env) 611 +static always_inline void cpu_ppc_decr_excp (CPUState *env)
609 { 612 {
610 /* Raise it */ 613 /* Raise it */
611 #ifdef PPC_DEBUG_TB 614 #ifdef PPC_DEBUG_TB
@@ -616,7 +619,7 @@ static inline void cpu_ppc_decr_excp (CPUState *env) @@ -616,7 +619,7 @@ static inline void cpu_ppc_decr_excp (CPUState *env)
616 ppc_set_irq(env, PPC_INTERRUPT_DECR, 1); 619 ppc_set_irq(env, PPC_INTERRUPT_DECR, 1);
617 } 620 }
618 621
619 -static inline void cpu_ppc_hdecr_excp (CPUState *env) 622 +static always_inline void cpu_ppc_hdecr_excp (CPUState *env)
620 { 623 {
621 /* Raise it */ 624 /* Raise it */
622 #ifdef PPC_DEBUG_TB 625 #ifdef PPC_DEBUG_TB
@@ -657,9 +660,8 @@ static void __cpu_ppc_store_decr (CPUState *env, uint64_t *nextp, @@ -657,9 +660,8 @@ static void __cpu_ppc_store_decr (CPUState *env, uint64_t *nextp,
657 (*raise_excp)(env); 660 (*raise_excp)(env);
658 } 661 }
659 662
660 -  
661 -static inline void _cpu_ppc_store_decr (CPUState *env, uint32_t decr,  
662 - uint32_t value, int is_excp) 663 +static always_inline void _cpu_ppc_store_decr (CPUState *env, uint32_t decr,
  664 + uint32_t value, int is_excp)
663 { 665 {
664 ppc_tb_t *tb_env = env->tb_env; 666 ppc_tb_t *tb_env = env->tb_env;
665 667
@@ -678,8 +680,8 @@ static void cpu_ppc_decr_cb (void *opaque) @@ -678,8 +680,8 @@ static void cpu_ppc_decr_cb (void *opaque)
678 } 680 }
679 681
680 #if defined(TARGET_PPC64H) 682 #if defined(TARGET_PPC64H)
681 -static inline void _cpu_ppc_store_hdecr (CPUState *env, uint32_t hdecr,  
682 - uint32_t value, int is_excp) 683 +static always_inline void _cpu_ppc_store_hdecr (CPUState *env, uint32_t hdecr,
  684 + uint32_t value, int is_excp)
683 { 685 {
684 ppc_tb_t *tb_env = env->tb_env; 686 ppc_tb_t *tb_env = env->tb_env;
685 687
hw/ppc405_uc.c
@@ -463,7 +463,7 @@ static uint32_t sdram_bcr (target_phys_addr_t ram_base, @@ -463,7 +463,7 @@ static uint32_t sdram_bcr (target_phys_addr_t ram_base,
463 return bcr; 463 return bcr;
464 } 464 }
465 465
466 -static inline target_phys_addr_t sdram_base (uint32_t bcr) 466 +static always_inline target_phys_addr_t sdram_base (uint32_t bcr)
467 { 467 {
468 return bcr & 0xFF800000; 468 return bcr & 0xFF800000;
469 } 469 }
hw/ppc_prep.c
@@ -107,7 +107,7 @@ static void _PPC_intack_write (void *opaque, @@ -107,7 +107,7 @@ static void _PPC_intack_write (void *opaque,
107 // printf("%s: 0x%08x => 0x%08x\n", __func__, addr, value); 107 // printf("%s: 0x%08x => 0x%08x\n", __func__, addr, value);
108 } 108 }
109 109
110 -static inline uint32_t _PPC_intack_read (target_phys_addr_t addr) 110 +static always_inline uint32_t _PPC_intack_read (target_phys_addr_t addr)
111 { 111 {
112 uint32_t retval = 0; 112 uint32_t retval = 0;
113 113
@@ -412,8 +412,9 @@ static uint32_t PREP_io_800_readb (void *opaque, uint32_t addr) @@ -412,8 +412,9 @@ static uint32_t PREP_io_800_readb (void *opaque, uint32_t addr)
412 return retval; 412 return retval;
413 } 413 }
414 414
415 -static inline target_phys_addr_t prep_IO_address (sysctrl_t *sysctrl,  
416 - target_phys_addr_t addr) 415 +static always_inline target_phys_addr_t prep_IO_address (sysctrl_t *sysctrl,
  416 + target_phys_addr_t
  417 + addr)
417 { 418 {
418 if (sysctrl->contiguous_map == 0) { 419 if (sysctrl->contiguous_map == 0) {
419 /* 64 KB contiguous space for IOs */ 420 /* 64 KB contiguous space for IOs */
target-ppc/exec.h
@@ -68,23 +68,23 @@ register unsigned long T2 asm(AREG3); @@ -68,23 +68,23 @@ register unsigned long T2 asm(AREG3);
68 # define RETURN() __asm__ __volatile__("" : : : "memory"); 68 # define RETURN() __asm__ __volatile__("" : : : "memory");
69 #endif 69 #endif
70 70
71 -static inline target_ulong rotl8 (target_ulong i, int n) 71 +static always_inline target_ulong rotl8 (target_ulong i, int n)
72 { 72 {
73 return (((uint8_t)i << n) | ((uint8_t)i >> (8 - n))); 73 return (((uint8_t)i << n) | ((uint8_t)i >> (8 - n)));
74 } 74 }
75 75
76 -static inline target_ulong rotl16 (target_ulong i, int n) 76 +static always_inline target_ulong rotl16 (target_ulong i, int n)
77 { 77 {
78 return (((uint16_t)i << n) | ((uint16_t)i >> (16 - n))); 78 return (((uint16_t)i << n) | ((uint16_t)i >> (16 - n)));
79 } 79 }
80 80
81 -static inline target_ulong rotl32 (target_ulong i, int n) 81 +static always_inline target_ulong rotl32 (target_ulong i, int n)
82 { 82 {
83 return (((uint32_t)i << n) | ((uint32_t)i >> (32 - n))); 83 return (((uint32_t)i << n) | ((uint32_t)i >> (32 - n)));
84 } 84 }
85 85
86 #if defined(TARGET_PPC64) 86 #if defined(TARGET_PPC64)
87 -static inline target_ulong rotl64 (target_ulong i, int n) 87 +static always_inline target_ulong rotl64 (target_ulong i, int n)
88 { 88 {
89 return (((uint64_t)i << n) | ((uint64_t)i >> (64 - n))); 89 return (((uint64_t)i << n) | ((uint64_t)i >> (64 - n)));
90 } 90 }
@@ -103,18 +103,18 @@ int get_physical_address (CPUState *env, mmu_ctx_t *ctx, target_ulong vaddr, @@ -103,18 +103,18 @@ int get_physical_address (CPUState *env, mmu_ctx_t *ctx, target_ulong vaddr,
103 void ppc6xx_tlb_store (CPUState *env, target_ulong EPN, int way, int is_code, 103 void ppc6xx_tlb_store (CPUState *env, target_ulong EPN, int way, int is_code,
104 target_ulong pte0, target_ulong pte1); 104 target_ulong pte0, target_ulong pte1);
105 105
106 -static inline void env_to_regs (void) 106 +static always_inline void env_to_regs (void)
107 { 107 {
108 } 108 }
109 109
110 -static inline void regs_to_env (void) 110 +static always_inline void regs_to_env (void)
111 { 111 {
112 } 112 }
113 113
114 int cpu_ppc_handle_mmu_fault (CPUState *env, target_ulong address, int rw, 114 int cpu_ppc_handle_mmu_fault (CPUState *env, target_ulong address, int rw,
115 int is_user, int is_softmmu); 115 int is_user, int is_softmmu);
116 116
117 -static inline int cpu_halted (CPUState *env) 117 +static always_inline int cpu_halted (CPUState *env)
118 { 118 {
119 if (!env->halted) 119 if (!env->halted)
120 return 0; 120 return 0;
target-ppc/helper.c
@@ -67,23 +67,23 @@ target_phys_addr_t cpu_get_phys_page_debug (CPUState *env, target_ulong addr) @@ -67,23 +67,23 @@ target_phys_addr_t cpu_get_phys_page_debug (CPUState *env, target_ulong addr)
67 67
68 #else 68 #else
69 /* Common routines used by software and hardware TLBs emulation */ 69 /* Common routines used by software and hardware TLBs emulation */
70 -static inline int pte_is_valid (target_ulong pte0) 70 +static always_inline int pte_is_valid (target_ulong pte0)
71 { 71 {
72 return pte0 & 0x80000000 ? 1 : 0; 72 return pte0 & 0x80000000 ? 1 : 0;
73 } 73 }
74 74
75 -static inline void pte_invalidate (target_ulong *pte0) 75 +static always_inline void pte_invalidate (target_ulong *pte0)
76 { 76 {
77 *pte0 &= ~0x80000000; 77 *pte0 &= ~0x80000000;
78 } 78 }
79 79
80 #if defined(TARGET_PPC64) 80 #if defined(TARGET_PPC64)
81 -static inline int pte64_is_valid (target_ulong pte0) 81 +static always_inline int pte64_is_valid (target_ulong pte0)
82 { 82 {
83 return pte0 & 0x0000000000000001ULL ? 1 : 0; 83 return pte0 & 0x0000000000000001ULL ? 1 : 0;
84 } 84 }
85 85
86 -static inline void pte64_invalidate (target_ulong *pte0) 86 +static always_inline void pte64_invalidate (target_ulong *pte0)
87 { 87 {
88 *pte0 &= ~0x0000000000000001ULL; 88 *pte0 &= ~0x0000000000000001ULL;
89 } 89 }
@@ -96,9 +96,9 @@ static inline void pte64_invalidate (target_ulong *pte0) @@ -96,9 +96,9 @@ static inline void pte64_invalidate (target_ulong *pte0)
96 #define PTE64_CHECK_MASK (TARGET_PAGE_MASK | 0x7F) 96 #define PTE64_CHECK_MASK (TARGET_PAGE_MASK | 0x7F)
97 #endif 97 #endif
98 98
99 -static inline int _pte_check (mmu_ctx_t *ctx, int is_64b,  
100 - target_ulong pte0, target_ulong pte1,  
101 - int h, int rw) 99 +static always_inline int _pte_check (mmu_ctx_t *ctx, int is_64b,
  100 + target_ulong pte0, target_ulong pte1,
  101 + int h, int rw)
102 { 102 {
103 target_ulong ptem, mmask; 103 target_ulong ptem, mmask;
104 int access, ret, pteh, ptev; 104 int access, ret, pteh, ptev;
@@ -258,9 +258,10 @@ static void ppc6xx_tlb_invalidate_all (CPUState *env) @@ -258,9 +258,10 @@ static void ppc6xx_tlb_invalidate_all (CPUState *env)
258 tlb_flush(env, 1); 258 tlb_flush(env, 1);
259 } 259 }
260 260
261 -static inline void __ppc6xx_tlb_invalidate_virt (CPUState *env,  
262 - target_ulong eaddr,  
263 - int is_code, int match_epn) 261 +static always_inline void __ppc6xx_tlb_invalidate_virt (CPUState *env,
  262 + target_ulong eaddr,
  263 + int is_code,
  264 + int match_epn)
264 { 265 {
265 #if !defined(FLUSH_ALL_TLBS) 266 #if !defined(FLUSH_ALL_TLBS)
266 ppc6xx_tlb_t *tlb; 267 ppc6xx_tlb_t *tlb;
@@ -487,7 +488,7 @@ static int get_bat (CPUState *env, mmu_ctx_t *ctx, @@ -487,7 +488,7 @@ static int get_bat (CPUState *env, mmu_ctx_t *ctx,
487 } 488 }
488 489
489 /* PTE table lookup */ 490 /* PTE table lookup */
490 -static inline int _find_pte (mmu_ctx_t *ctx, int is_64b, int h, int rw) 491 +static always_inline int _find_pte (mmu_ctx_t *ctx, int is_64b, int h, int rw)
491 { 492 {
492 target_ulong base, pte0, pte1; 493 target_ulong base, pte0, pte1;
493 int i, good = -1; 494 int i, good = -1;
@@ -588,7 +589,8 @@ static int find_pte64 (mmu_ctx_t *ctx, int h, int rw) @@ -588,7 +589,8 @@ static int find_pte64 (mmu_ctx_t *ctx, int h, int rw)
588 } 589 }
589 #endif 590 #endif
590 591
591 -static inline int find_pte (CPUState *env, mmu_ctx_t *ctx, int h, int rw) 592 +static always_inline int find_pte (CPUState *env, mmu_ctx_t *ctx,
  593 + int h, int rw)
592 { 594 {
593 #if defined(TARGET_PPC64) 595 #if defined(TARGET_PPC64)
594 if (env->mmu_model == POWERPC_MMU_64B) 596 if (env->mmu_model == POWERPC_MMU_64B)
@@ -720,10 +722,10 @@ void ppc_store_slb (CPUPPCState *env, int slb_nr, target_ulong rs) @@ -720,10 +722,10 @@ void ppc_store_slb (CPUPPCState *env, int slb_nr, target_ulong rs)
720 #endif /* defined(TARGET_PPC64) */ 722 #endif /* defined(TARGET_PPC64) */
721 723
722 /* Perform segment based translation */ 724 /* Perform segment based translation */
723 -static inline target_phys_addr_t get_pgaddr (target_phys_addr_t sdr1,  
724 - int sdr_sh,  
725 - target_phys_addr_t hash,  
726 - target_phys_addr_t mask) 725 +static always_inline target_phys_addr_t get_pgaddr (target_phys_addr_t sdr1,
  726 + int sdr_sh,
  727 + target_phys_addr_t hash,
  728 + target_phys_addr_t mask)
727 { 729 {
728 return (sdr1 & ((target_ulong)(-1ULL) << sdr_sh)) | (hash & mask); 730 return (sdr1 & ((target_ulong)(-1ULL) << sdr_sh)) | (hash & mask);
729 } 731 }
@@ -1594,8 +1596,9 @@ int cpu_ppc_handle_mmu_fault (CPUState *env, target_ulong address, int rw, @@ -1594,8 +1596,9 @@ int cpu_ppc_handle_mmu_fault (CPUState *env, target_ulong address, int rw,
1594 /*****************************************************************************/ 1596 /*****************************************************************************/
1595 /* BATs management */ 1597 /* BATs management */
1596 #if !defined(FLUSH_ALL_TLBS) 1598 #if !defined(FLUSH_ALL_TLBS)
1597 -static inline void do_invalidate_BAT (CPUPPCState *env,  
1598 - target_ulong BATu, target_ulong mask) 1599 +static always_inline void do_invalidate_BAT (CPUPPCState *env,
  1600 + target_ulong BATu,
  1601 + target_ulong mask)
1599 { 1602 {
1600 target_ulong base, end, page; 1603 target_ulong base, end, page;
1601 1604
@@ -1616,8 +1619,8 @@ static inline void do_invalidate_BAT (CPUPPCState *env, @@ -1616,8 +1619,8 @@ static inline void do_invalidate_BAT (CPUPPCState *env,
1616 } 1619 }
1617 #endif 1620 #endif
1618 1621
1619 -static inline void dump_store_bat (CPUPPCState *env, char ID, int ul, int nr,  
1620 - target_ulong value) 1622 +static always_inline void dump_store_bat (CPUPPCState *env, char ID,
  1623 + int ul, int nr, target_ulong value)
1621 { 1624 {
1622 #if defined (DEBUG_BATS) 1625 #if defined (DEBUG_BATS)
1623 if (loglevel != 0) { 1626 if (loglevel != 0) {
@@ -1931,7 +1934,7 @@ void ppc_store_xer (CPUPPCState *env, target_ulong value) @@ -1931,7 +1934,7 @@ void ppc_store_xer (CPUPPCState *env, target_ulong value)
1931 } 1934 }
1932 1935
1933 /* Swap temporary saved registers with GPRs */ 1936 /* Swap temporary saved registers with GPRs */
1934 -static inline void swap_gpr_tgpr (CPUPPCState *env) 1937 +static always_inline void swap_gpr_tgpr (CPUPPCState *env)
1935 { 1938 {
1936 ppc_gpr_t tmp; 1939 ppc_gpr_t tmp;
1937 1940
target-ppc/op_helper.c
@@ -601,7 +601,7 @@ void do_srad (void) @@ -601,7 +601,7 @@ void do_srad (void)
601 } 601 }
602 #endif 602 #endif
603 603
604 -static inline int popcnt (uint32_t val) 604 +static always_inline int popcnt (uint32_t val)
605 { 605 {
606 int i; 606 int i;
607 607
@@ -707,7 +707,7 @@ void do_fctidz (void) @@ -707,7 +707,7 @@ void do_fctidz (void)
707 707
708 #endif 708 #endif
709 709
710 -static inline void do_fri (int rounding_mode) 710 +static always_inline void do_fri (int rounding_mode)
711 { 711 {
712 int curmode; 712 int curmode;
713 713
@@ -1430,12 +1430,12 @@ static uint8_t hbrev[16] = { @@ -1430,12 +1430,12 @@ static uint8_t hbrev[16] = {
1430 0x1, 0x9, 0x5, 0xD, 0x3, 0xB, 0x7, 0xF, 1430 0x1, 0x9, 0x5, 0xD, 0x3, 0xB, 0x7, 0xF,
1431 }; 1431 };
1432 1432
1433 -static inline uint8_t byte_reverse (uint8_t val) 1433 +static always_inline uint8_t byte_reverse (uint8_t val)
1434 { 1434 {
1435 return hbrev[val >> 4] | (hbrev[val & 0xF] << 4); 1435 return hbrev[val >> 4] | (hbrev[val & 0xF] << 4);
1436 } 1436 }
1437 1437
1438 -static inline uint32_t word_reverse (uint32_t val) 1438 +static always_inline uint32_t word_reverse (uint32_t val)
1439 { 1439 {
1440 return byte_reverse(val >> 24) | (byte_reverse(val >> 16) << 8) | 1440 return byte_reverse(val >> 24) | (byte_reverse(val >> 16) << 8) |
1441 (byte_reverse(val >> 8) << 16) | (byte_reverse(val) << 24); 1441 (byte_reverse(val >> 8) << 16) | (byte_reverse(val) << 24);
@@ -1468,7 +1468,7 @@ void do_ev##name (void) \ @@ -1468,7 +1468,7 @@ void do_ev##name (void) \
1468 } 1468 }
1469 1469
1470 /* Fixed-point vector arithmetic */ 1470 /* Fixed-point vector arithmetic */
1471 -static inline uint32_t _do_eabs (uint32_t val) 1471 +static always_inline uint32_t _do_eabs (uint32_t val)
1472 { 1472 {
1473 if (val != 0x80000000) 1473 if (val != 0x80000000)
1474 val &= ~0x80000000; 1474 val &= ~0x80000000;
@@ -1476,12 +1476,12 @@ static inline uint32_t _do_eabs (uint32_t val) @@ -1476,12 +1476,12 @@ static inline uint32_t _do_eabs (uint32_t val)
1476 return val; 1476 return val;
1477 } 1477 }
1478 1478
1479 -static inline uint32_t _do_eaddw (uint32_t op1, uint32_t op2) 1479 +static always_inline uint32_t _do_eaddw (uint32_t op1, uint32_t op2)
1480 { 1480 {
1481 return op1 + op2; 1481 return op1 + op2;
1482 } 1482 }
1483 1483
1484 -static inline int _do_ecntlsw (uint32_t val) 1484 +static always_inline int _do_ecntlsw (uint32_t val)
1485 { 1485 {
1486 if (val & 0x80000000) 1486 if (val & 0x80000000)
1487 return _do_cntlzw(~val); 1487 return _do_cntlzw(~val);
@@ -1489,12 +1489,12 @@ static inline int _do_ecntlsw (uint32_t val) @@ -1489,12 +1489,12 @@ static inline int _do_ecntlsw (uint32_t val)
1489 return _do_cntlzw(val); 1489 return _do_cntlzw(val);
1490 } 1490 }
1491 1491
1492 -static inline int _do_ecntlzw (uint32_t val) 1492 +static always_inline int _do_ecntlzw (uint32_t val)
1493 { 1493 {
1494 return _do_cntlzw(val); 1494 return _do_cntlzw(val);
1495 } 1495 }
1496 1496
1497 -static inline uint32_t _do_eneg (uint32_t val) 1497 +static always_inline uint32_t _do_eneg (uint32_t val)
1498 { 1498 {
1499 if (val != 0x80000000) 1499 if (val != 0x80000000)
1500 val ^= 0x80000000; 1500 val ^= 0x80000000;
@@ -1502,35 +1502,35 @@ static inline uint32_t _do_eneg (uint32_t val) @@ -1502,35 +1502,35 @@ static inline uint32_t _do_eneg (uint32_t val)
1502 return val; 1502 return val;
1503 } 1503 }
1504 1504
1505 -static inline uint32_t _do_erlw (uint32_t op1, uint32_t op2) 1505 +static always_inline uint32_t _do_erlw (uint32_t op1, uint32_t op2)
1506 { 1506 {
1507 return rotl32(op1, op2); 1507 return rotl32(op1, op2);
1508 } 1508 }
1509 1509
1510 -static inline uint32_t _do_erndw (uint32_t val) 1510 +static always_inline uint32_t _do_erndw (uint32_t val)
1511 { 1511 {
1512 return (val + 0x000080000000) & 0xFFFF0000; 1512 return (val + 0x000080000000) & 0xFFFF0000;
1513 } 1513 }
1514 1514
1515 -static inline uint32_t _do_eslw (uint32_t op1, uint32_t op2) 1515 +static always_inline uint32_t _do_eslw (uint32_t op1, uint32_t op2)
1516 { 1516 {
1517 /* No error here: 6 bits are used */ 1517 /* No error here: 6 bits are used */
1518 return op1 << (op2 & 0x3F); 1518 return op1 << (op2 & 0x3F);
1519 } 1519 }
1520 1520
1521 -static inline int32_t _do_esrws (int32_t op1, uint32_t op2) 1521 +static always_inline int32_t _do_esrws (int32_t op1, uint32_t op2)
1522 { 1522 {
1523 /* No error here: 6 bits are used */ 1523 /* No error here: 6 bits are used */
1524 return op1 >> (op2 & 0x3F); 1524 return op1 >> (op2 & 0x3F);
1525 } 1525 }
1526 1526
1527 -static inline uint32_t _do_esrwu (uint32_t op1, uint32_t op2) 1527 +static always_inline uint32_t _do_esrwu (uint32_t op1, uint32_t op2)
1528 { 1528 {
1529 /* No error here: 6 bits are used */ 1529 /* No error here: 6 bits are used */
1530 return op1 >> (op2 & 0x3F); 1530 return op1 >> (op2 & 0x3F);
1531 } 1531 }
1532 1532
1533 -static inline uint32_t _do_esubfw (uint32_t op1, uint32_t op2) 1533 +static always_inline uint32_t _do_esubfw (uint32_t op1, uint32_t op2)
1534 { 1534 {
1535 return op2 - op1; 1535 return op2 - op1;
1536 } 1536 }
@@ -1559,7 +1559,7 @@ DO_SPE_OP2(srwu); @@ -1559,7 +1559,7 @@ DO_SPE_OP2(srwu);
1559 DO_SPE_OP2(subfw); 1559 DO_SPE_OP2(subfw);
1560 1560
1561 /* evsel is a little bit more complicated... */ 1561 /* evsel is a little bit more complicated... */
1562 -static inline uint32_t _do_esel (uint32_t op1, uint32_t op2, int n) 1562 +static always_inline uint32_t _do_esel (uint32_t op1, uint32_t op2, int n)
1563 { 1563 {
1564 if (n) 1564 if (n)
1565 return op1; 1565 return op1;
@@ -1582,31 +1582,31 @@ void do_ev##name (void) \ @@ -1582,31 +1582,31 @@ void do_ev##name (void) \
1582 _do_e##name(T0_64, T1_64)); \ 1582 _do_e##name(T0_64, T1_64)); \
1583 } 1583 }
1584 1584
1585 -static inline uint32_t _do_evcmp_merge (int t0, int t1) 1585 +static always_inline uint32_t _do_evcmp_merge (int t0, int t1)
1586 { 1586 {
1587 return (t0 << 3) | (t1 << 2) | ((t0 | t1) << 1) | (t0 & t1); 1587 return (t0 << 3) | (t1 << 2) | ((t0 | t1) << 1) | (t0 & t1);
1588 } 1588 }
1589 -static inline int _do_ecmpeq (uint32_t op1, uint32_t op2) 1589 +static always_inline int _do_ecmpeq (uint32_t op1, uint32_t op2)
1590 { 1590 {
1591 return op1 == op2 ? 1 : 0; 1591 return op1 == op2 ? 1 : 0;
1592 } 1592 }
1593 1593
1594 -static inline int _do_ecmpgts (int32_t op1, int32_t op2) 1594 +static always_inline int _do_ecmpgts (int32_t op1, int32_t op2)
1595 { 1595 {
1596 return op1 > op2 ? 1 : 0; 1596 return op1 > op2 ? 1 : 0;
1597 } 1597 }
1598 1598
1599 -static inline int _do_ecmpgtu (uint32_t op1, uint32_t op2) 1599 +static always_inline int _do_ecmpgtu (uint32_t op1, uint32_t op2)
1600 { 1600 {
1601 return op1 > op2 ? 1 : 0; 1601 return op1 > op2 ? 1 : 0;
1602 } 1602 }
1603 1603
1604 -static inline int _do_ecmplts (int32_t op1, int32_t op2) 1604 +static always_inline int _do_ecmplts (int32_t op1, int32_t op2)
1605 { 1605 {
1606 return op1 < op2 ? 1 : 0; 1606 return op1 < op2 ? 1 : 0;
1607 } 1607 }
1608 1608
1609 -static inline int _do_ecmpltu (uint32_t op1, uint32_t op2) 1609 +static always_inline int _do_ecmpltu (uint32_t op1, uint32_t op2)
1610 { 1610 {
1611 return op1 < op2 ? 1 : 0; 1611 return op1 < op2 ? 1 : 0;
1612 } 1612 }
@@ -1623,7 +1623,7 @@ DO_SPE_CMP(cmplts); @@ -1623,7 +1623,7 @@ DO_SPE_CMP(cmplts);
1623 DO_SPE_CMP(cmpltu); 1623 DO_SPE_CMP(cmpltu);
1624 1624
1625 /* Single precision floating-point conversions from/to integer */ 1625 /* Single precision floating-point conversions from/to integer */
1626 -static inline uint32_t _do_efscfsi (int32_t val) 1626 +static always_inline uint32_t _do_efscfsi (int32_t val)
1627 { 1627 {
1628 union { 1628 union {
1629 uint32_t u; 1629 uint32_t u;
@@ -1635,7 +1635,7 @@ static inline uint32_t _do_efscfsi (int32_t val) @@ -1635,7 +1635,7 @@ static inline uint32_t _do_efscfsi (int32_t val)
1635 return u.u; 1635 return u.u;
1636 } 1636 }
1637 1637
1638 -static inline uint32_t _do_efscfui (uint32_t val) 1638 +static always_inline uint32_t _do_efscfui (uint32_t val)
1639 { 1639 {
1640 union { 1640 union {
1641 uint32_t u; 1641 uint32_t u;
@@ -1647,7 +1647,7 @@ static inline uint32_t _do_efscfui (uint32_t val) @@ -1647,7 +1647,7 @@ static inline uint32_t _do_efscfui (uint32_t val)
1647 return u.u; 1647 return u.u;
1648 } 1648 }
1649 1649
1650 -static inline int32_t _do_efsctsi (uint32_t val) 1650 +static always_inline int32_t _do_efsctsi (uint32_t val)
1651 { 1651 {
1652 union { 1652 union {
1653 int32_t u; 1653 int32_t u;
@@ -1662,7 +1662,7 @@ static inline int32_t _do_efsctsi (uint32_t val) @@ -1662,7 +1662,7 @@ static inline int32_t _do_efsctsi (uint32_t val)
1662 return float32_to_int32(u.f, &env->spe_status); 1662 return float32_to_int32(u.f, &env->spe_status);
1663 } 1663 }
1664 1664
1665 -static inline uint32_t _do_efsctui (uint32_t val) 1665 +static always_inline uint32_t _do_efsctui (uint32_t val)
1666 { 1666 {
1667 union { 1667 union {
1668 int32_t u; 1668 int32_t u;
@@ -1677,7 +1677,7 @@ static inline uint32_t _do_efsctui (uint32_t val) @@ -1677,7 +1677,7 @@ static inline uint32_t _do_efsctui (uint32_t val)
1677 return float32_to_uint32(u.f, &env->spe_status); 1677 return float32_to_uint32(u.f, &env->spe_status);
1678 } 1678 }
1679 1679
1680 -static inline int32_t _do_efsctsiz (uint32_t val) 1680 +static always_inline int32_t _do_efsctsiz (uint32_t val)
1681 { 1681 {
1682 union { 1682 union {
1683 int32_t u; 1683 int32_t u;
@@ -1692,7 +1692,7 @@ static inline int32_t _do_efsctsiz (uint32_t val) @@ -1692,7 +1692,7 @@ static inline int32_t _do_efsctsiz (uint32_t val)
1692 return float32_to_int32_round_to_zero(u.f, &env->spe_status); 1692 return float32_to_int32_round_to_zero(u.f, &env->spe_status);
1693 } 1693 }
1694 1694
1695 -static inline uint32_t _do_efsctuiz (uint32_t val) 1695 +static always_inline uint32_t _do_efsctuiz (uint32_t val)
1696 { 1696 {
1697 union { 1697 union {
1698 int32_t u; 1698 int32_t u;
@@ -1738,7 +1738,7 @@ void do_efsctuiz (void) @@ -1738,7 +1738,7 @@ void do_efsctuiz (void)
1738 } 1738 }
1739 1739
1740 /* Single precision floating-point conversion to/from fractional */ 1740 /* Single precision floating-point conversion to/from fractional */
1741 -static inline uint32_t _do_efscfsf (uint32_t val) 1741 +static always_inline uint32_t _do_efscfsf (uint32_t val)
1742 { 1742 {
1743 union { 1743 union {
1744 uint32_t u; 1744 uint32_t u;
@@ -1753,7 +1753,7 @@ static inline uint32_t _do_efscfsf (uint32_t val) @@ -1753,7 +1753,7 @@ static inline uint32_t _do_efscfsf (uint32_t val)
1753 return u.u; 1753 return u.u;
1754 } 1754 }
1755 1755
1756 -static inline uint32_t _do_efscfuf (uint32_t val) 1756 +static always_inline uint32_t _do_efscfuf (uint32_t val)
1757 { 1757 {
1758 union { 1758 union {
1759 uint32_t u; 1759 uint32_t u;
@@ -1768,7 +1768,7 @@ static inline uint32_t _do_efscfuf (uint32_t val) @@ -1768,7 +1768,7 @@ static inline uint32_t _do_efscfuf (uint32_t val)
1768 return u.u; 1768 return u.u;
1769 } 1769 }
1770 1770
1771 -static inline int32_t _do_efsctsf (uint32_t val) 1771 +static always_inline int32_t _do_efsctsf (uint32_t val)
1772 { 1772 {
1773 union { 1773 union {
1774 int32_t u; 1774 int32_t u;
@@ -1786,7 +1786,7 @@ static inline int32_t _do_efsctsf (uint32_t val) @@ -1786,7 +1786,7 @@ static inline int32_t _do_efsctsf (uint32_t val)
1786 return float32_to_int32(u.f, &env->spe_status); 1786 return float32_to_int32(u.f, &env->spe_status);
1787 } 1787 }
1788 1788
1789 -static inline uint32_t _do_efsctuf (uint32_t val) 1789 +static always_inline uint32_t _do_efsctuf (uint32_t val)
1790 { 1790 {
1791 union { 1791 union {
1792 int32_t u; 1792 int32_t u;
@@ -1804,7 +1804,7 @@ static inline uint32_t _do_efsctuf (uint32_t val) @@ -1804,7 +1804,7 @@ static inline uint32_t _do_efsctuf (uint32_t val)
1804 return float32_to_uint32(u.f, &env->spe_status); 1804 return float32_to_uint32(u.f, &env->spe_status);
1805 } 1805 }
1806 1806
1807 -static inline int32_t _do_efsctsfz (uint32_t val) 1807 +static always_inline int32_t _do_efsctsfz (uint32_t val)
1808 { 1808 {
1809 union { 1809 union {
1810 int32_t u; 1810 int32_t u;
@@ -1822,7 +1822,7 @@ static inline int32_t _do_efsctsfz (uint32_t val) @@ -1822,7 +1822,7 @@ static inline int32_t _do_efsctsfz (uint32_t val)
1822 return float32_to_int32_round_to_zero(u.f, &env->spe_status); 1822 return float32_to_int32_round_to_zero(u.f, &env->spe_status);
1823 } 1823 }
1824 1824
1825 -static inline uint32_t _do_efsctufz (uint32_t val) 1825 +static always_inline uint32_t _do_efsctufz (uint32_t val)
1826 { 1826 {
1827 union { 1827 union {
1828 int32_t u; 1828 int32_t u;
@@ -1871,19 +1871,19 @@ void do_efsctufz (void) @@ -1871,19 +1871,19 @@ void do_efsctufz (void)
1871 } 1871 }
1872 1872
1873 /* Double precision floating point helpers */ 1873 /* Double precision floating point helpers */
1874 -static inline int _do_efdcmplt (uint64_t op1, uint64_t op2) 1874 +static always_inline int _do_efdcmplt (uint64_t op1, uint64_t op2)
1875 { 1875 {
1876 /* XXX: TODO: test special values (NaN, infinites, ...) */ 1876 /* XXX: TODO: test special values (NaN, infinites, ...) */
1877 return _do_efdtstlt(op1, op2); 1877 return _do_efdtstlt(op1, op2);
1878 } 1878 }
1879 1879
1880 -static inline int _do_efdcmpgt (uint64_t op1, uint64_t op2) 1880 +static always_inline int _do_efdcmpgt (uint64_t op1, uint64_t op2)
1881 { 1881 {
1882 /* XXX: TODO: test special values (NaN, infinites, ...) */ 1882 /* XXX: TODO: test special values (NaN, infinites, ...) */
1883 return _do_efdtstgt(op1, op2); 1883 return _do_efdtstgt(op1, op2);
1884 } 1884 }
1885 1885
1886 -static inline int _do_efdcmpeq (uint64_t op1, uint64_t op2) 1886 +static always_inline int _do_efdcmpeq (uint64_t op1, uint64_t op2)
1887 { 1887 {
1888 /* XXX: TODO: test special values (NaN, infinites, ...) */ 1888 /* XXX: TODO: test special values (NaN, infinites, ...) */
1889 return _do_efdtsteq(op1, op2); 1889 return _do_efdtsteq(op1, op2);
@@ -1905,7 +1905,7 @@ void do_efdcmpeq (void) @@ -1905,7 +1905,7 @@ void do_efdcmpeq (void)
1905 } 1905 }
1906 1906
1907 /* Double precision floating-point conversion to/from integer */ 1907 /* Double precision floating-point conversion to/from integer */
1908 -static inline uint64_t _do_efdcfsi (int64_t val) 1908 +static always_inline uint64_t _do_efdcfsi (int64_t val)
1909 { 1909 {
1910 union { 1910 union {
1911 uint64_t u; 1911 uint64_t u;
@@ -1917,7 +1917,7 @@ static inline uint64_t _do_efdcfsi (int64_t val) @@ -1917,7 +1917,7 @@ static inline uint64_t _do_efdcfsi (int64_t val)
1917 return u.u; 1917 return u.u;
1918 } 1918 }
1919 1919
1920 -static inline uint64_t _do_efdcfui (uint64_t val) 1920 +static always_inline uint64_t _do_efdcfui (uint64_t val)
1921 { 1921 {
1922 union { 1922 union {
1923 uint64_t u; 1923 uint64_t u;
@@ -1929,7 +1929,7 @@ static inline uint64_t _do_efdcfui (uint64_t val) @@ -1929,7 +1929,7 @@ static inline uint64_t _do_efdcfui (uint64_t val)
1929 return u.u; 1929 return u.u;
1930 } 1930 }
1931 1931
1932 -static inline int64_t _do_efdctsi (uint64_t val) 1932 +static always_inline int64_t _do_efdctsi (uint64_t val)
1933 { 1933 {
1934 union { 1934 union {
1935 int64_t u; 1935 int64_t u;
@@ -1944,7 +1944,7 @@ static inline int64_t _do_efdctsi (uint64_t val) @@ -1944,7 +1944,7 @@ static inline int64_t _do_efdctsi (uint64_t val)
1944 return float64_to_int64(u.f, &env->spe_status); 1944 return float64_to_int64(u.f, &env->spe_status);
1945 } 1945 }
1946 1946
1947 -static inline uint64_t _do_efdctui (uint64_t val) 1947 +static always_inline uint64_t _do_efdctui (uint64_t val)
1948 { 1948 {
1949 union { 1949 union {
1950 int64_t u; 1950 int64_t u;
@@ -1959,7 +1959,7 @@ static inline uint64_t _do_efdctui (uint64_t val) @@ -1959,7 +1959,7 @@ static inline uint64_t _do_efdctui (uint64_t val)
1959 return float64_to_uint64(u.f, &env->spe_status); 1959 return float64_to_uint64(u.f, &env->spe_status);
1960 } 1960 }
1961 1961
1962 -static inline int64_t _do_efdctsiz (uint64_t val) 1962 +static always_inline int64_t _do_efdctsiz (uint64_t val)
1963 { 1963 {
1964 union { 1964 union {
1965 int64_t u; 1965 int64_t u;
@@ -1974,7 +1974,7 @@ static inline int64_t _do_efdctsiz (uint64_t val) @@ -1974,7 +1974,7 @@ static inline int64_t _do_efdctsiz (uint64_t val)
1974 return float64_to_int64_round_to_zero(u.f, &env->spe_status); 1974 return float64_to_int64_round_to_zero(u.f, &env->spe_status);
1975 } 1975 }
1976 1976
1977 -static inline uint64_t _do_efdctuiz (uint64_t val) 1977 +static always_inline uint64_t _do_efdctuiz (uint64_t val)
1978 { 1978 {
1979 union { 1979 union {
1980 int64_t u; 1980 int64_t u;
@@ -2020,7 +2020,7 @@ void do_efdctuiz (void) @@ -2020,7 +2020,7 @@ void do_efdctuiz (void)
2020 } 2020 }
2021 2021
2022 /* Double precision floating-point conversion to/from fractional */ 2022 /* Double precision floating-point conversion to/from fractional */
2023 -static inline uint64_t _do_efdcfsf (int64_t val) 2023 +static always_inline uint64_t _do_efdcfsf (int64_t val)
2024 { 2024 {
2025 union { 2025 union {
2026 uint64_t u; 2026 uint64_t u;
@@ -2035,7 +2035,7 @@ static inline uint64_t _do_efdcfsf (int64_t val) @@ -2035,7 +2035,7 @@ static inline uint64_t _do_efdcfsf (int64_t val)
2035 return u.u; 2035 return u.u;
2036 } 2036 }
2037 2037
2038 -static inline uint64_t _do_efdcfuf (uint64_t val) 2038 +static always_inline uint64_t _do_efdcfuf (uint64_t val)
2039 { 2039 {
2040 union { 2040 union {
2041 uint64_t u; 2041 uint64_t u;
@@ -2050,7 +2050,7 @@ static inline uint64_t _do_efdcfuf (uint64_t val) @@ -2050,7 +2050,7 @@ static inline uint64_t _do_efdcfuf (uint64_t val)
2050 return u.u; 2050 return u.u;
2051 } 2051 }
2052 2052
2053 -static inline int64_t _do_efdctsf (uint64_t val) 2053 +static always_inline int64_t _do_efdctsf (uint64_t val)
2054 { 2054 {
2055 union { 2055 union {
2056 int64_t u; 2056 int64_t u;
@@ -2068,7 +2068,7 @@ static inline int64_t _do_efdctsf (uint64_t val) @@ -2068,7 +2068,7 @@ static inline int64_t _do_efdctsf (uint64_t val)
2068 return float64_to_int32(u.f, &env->spe_status); 2068 return float64_to_int32(u.f, &env->spe_status);
2069 } 2069 }
2070 2070
2071 -static inline uint64_t _do_efdctuf (uint64_t val) 2071 +static always_inline uint64_t _do_efdctuf (uint64_t val)
2072 { 2072 {
2073 union { 2073 union {
2074 int64_t u; 2074 int64_t u;
@@ -2086,7 +2086,7 @@ static inline uint64_t _do_efdctuf (uint64_t val) @@ -2086,7 +2086,7 @@ static inline uint64_t _do_efdctuf (uint64_t val)
2086 return float64_to_uint32(u.f, &env->spe_status); 2086 return float64_to_uint32(u.f, &env->spe_status);
2087 } 2087 }
2088 2088
2089 -static inline int64_t _do_efdctsfz (uint64_t val) 2089 +static always_inline int64_t _do_efdctsfz (uint64_t val)
2090 { 2090 {
2091 union { 2091 union {
2092 int64_t u; 2092 int64_t u;
@@ -2104,7 +2104,7 @@ static inline int64_t _do_efdctsfz (uint64_t val) @@ -2104,7 +2104,7 @@ static inline int64_t _do_efdctsfz (uint64_t val)
2104 return float64_to_int32_round_to_zero(u.f, &env->spe_status); 2104 return float64_to_int32_round_to_zero(u.f, &env->spe_status);
2105 } 2105 }
2106 2106
2107 -static inline uint64_t _do_efdctufz (uint64_t val) 2107 +static always_inline uint64_t _do_efdctufz (uint64_t val)
2108 { 2108 {
2109 union { 2109 union {
2110 int64_t u; 2110 int64_t u;
@@ -2153,7 +2153,7 @@ void do_efdctufz (void) @@ -2153,7 +2153,7 @@ void do_efdctufz (void)
2153 } 2153 }
2154 2154
2155 /* Floating point conversion between single and double precision */ 2155 /* Floating point conversion between single and double precision */
2156 -static inline uint32_t _do_efscfd (uint64_t val) 2156 +static always_inline uint32_t _do_efscfd (uint64_t val)
2157 { 2157 {
2158 union { 2158 union {
2159 uint64_t u; 2159 uint64_t u;
@@ -2170,7 +2170,7 @@ static inline uint32_t _do_efscfd (uint64_t val) @@ -2170,7 +2170,7 @@ static inline uint32_t _do_efscfd (uint64_t val)
2170 return u2.u; 2170 return u2.u;
2171 } 2171 }
2172 2172
2173 -static inline uint64_t _do_efdcfs (uint32_t val) 2173 +static always_inline uint64_t _do_efdcfs (uint32_t val)
2174 { 2174 {
2175 union { 2175 union {
2176 uint64_t u; 2176 uint64_t u;
@@ -2214,19 +2214,19 @@ DO_SPE_OP2(fsmul); @@ -2214,19 +2214,19 @@ DO_SPE_OP2(fsmul);
2214 DO_SPE_OP2(fsdiv); 2214 DO_SPE_OP2(fsdiv);
2215 2215
2216 /* Single-precision floating-point comparisons */ 2216 /* Single-precision floating-point comparisons */
2217 -static inline int _do_efscmplt (uint32_t op1, uint32_t op2) 2217 +static always_inline int _do_efscmplt (uint32_t op1, uint32_t op2)
2218 { 2218 {
2219 /* XXX: TODO: test special values (NaN, infinites, ...) */ 2219 /* XXX: TODO: test special values (NaN, infinites, ...) */
2220 return _do_efststlt(op1, op2); 2220 return _do_efststlt(op1, op2);
2221 } 2221 }
2222 2222
2223 -static inline int _do_efscmpgt (uint32_t op1, uint32_t op2) 2223 +static always_inline int _do_efscmpgt (uint32_t op1, uint32_t op2)
2224 { 2224 {
2225 /* XXX: TODO: test special values (NaN, infinites, ...) */ 2225 /* XXX: TODO: test special values (NaN, infinites, ...) */
2226 return _do_efststgt(op1, op2); 2226 return _do_efststgt(op1, op2);
2227 } 2227 }
2228 2228
2229 -static inline int _do_efscmpeq (uint32_t op1, uint32_t op2) 2229 +static always_inline int _do_efscmpeq (uint32_t op1, uint32_t op2)
2230 { 2230 {
2231 /* XXX: TODO: test special values (NaN, infinites, ...) */ 2231 /* XXX: TODO: test special values (NaN, infinites, ...) */
2232 return _do_efststeq(op1, op2); 2232 return _do_efststeq(op1, op2);
target-ppc/op_helper.h
@@ -277,7 +277,7 @@ void do_evfsctuiz (void); @@ -277,7 +277,7 @@ void do_evfsctuiz (void);
277 277
278 /* Inlined helpers: used in micro-operation as well as helpers */ 278 /* Inlined helpers: used in micro-operation as well as helpers */
279 /* Generic fixed-point helpers */ 279 /* Generic fixed-point helpers */
280 -static inline int _do_cntlzw (uint32_t val) 280 +static always_inline int _do_cntlzw (uint32_t val)
281 { 281 {
282 int cnt = 0; 282 int cnt = 0;
283 if (!(val & 0xFFFF0000UL)) { 283 if (!(val & 0xFFFF0000UL)) {
@@ -306,7 +306,7 @@ static inline int _do_cntlzw (uint32_t val) @@ -306,7 +306,7 @@ static inline int _do_cntlzw (uint32_t val)
306 return cnt; 306 return cnt;
307 } 307 }
308 308
309 -static inline int _do_cntlzd (uint64_t val) 309 +static always_inline int _do_cntlzd (uint64_t val)
310 { 310 {
311 int cnt = 0; 311 int cnt = 0;
312 #if HOST_LONG_BITS == 64 312 #if HOST_LONG_BITS == 64
@@ -350,19 +350,19 @@ static inline int _do_cntlzd (uint64_t val) @@ -350,19 +350,19 @@ static inline int _do_cntlzd (uint64_t val)
350 #if defined(TARGET_PPCEMB) 350 #if defined(TARGET_PPCEMB)
351 /* SPE extension */ 351 /* SPE extension */
352 /* Single precision floating-point helpers */ 352 /* Single precision floating-point helpers */
353 -static inline uint32_t _do_efsabs (uint32_t val) 353 +static always_inline uint32_t _do_efsabs (uint32_t val)
354 { 354 {
355 return val & ~0x80000000; 355 return val & ~0x80000000;
356 } 356 }
357 -static inline uint32_t _do_efsnabs (uint32_t val) 357 +static always_inline uint32_t _do_efsnabs (uint32_t val)
358 { 358 {
359 return val | 0x80000000; 359 return val | 0x80000000;
360 } 360 }
361 -static inline uint32_t _do_efsneg (uint32_t val) 361 +static always_inline uint32_t _do_efsneg (uint32_t val)
362 { 362 {
363 return val ^ 0x80000000; 363 return val ^ 0x80000000;
364 } 364 }
365 -static inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2) 365 +static always_inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2)
366 { 366 {
367 union { 367 union {
368 uint32_t u; 368 uint32_t u;
@@ -373,7 +373,7 @@ static inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2) @@ -373,7 +373,7 @@ static inline uint32_t _do_efsadd (uint32_t op1, uint32_t op2)
373 u1.f = float32_add(u1.f, u2.f, &env->spe_status); 373 u1.f = float32_add(u1.f, u2.f, &env->spe_status);
374 return u1.u; 374 return u1.u;
375 } 375 }
376 -static inline uint32_t _do_efssub (uint32_t op1, uint32_t op2) 376 +static always_inline uint32_t _do_efssub (uint32_t op1, uint32_t op2)
377 { 377 {
378 union { 378 union {
379 uint32_t u; 379 uint32_t u;
@@ -384,7 +384,7 @@ static inline uint32_t _do_efssub (uint32_t op1, uint32_t op2) @@ -384,7 +384,7 @@ static inline uint32_t _do_efssub (uint32_t op1, uint32_t op2)
384 u1.f = float32_sub(u1.f, u2.f, &env->spe_status); 384 u1.f = float32_sub(u1.f, u2.f, &env->spe_status);
385 return u1.u; 385 return u1.u;
386 } 386 }
387 -static inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2) 387 +static always_inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2)
388 { 388 {
389 union { 389 union {
390 uint32_t u; 390 uint32_t u;
@@ -395,7 +395,7 @@ static inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2) @@ -395,7 +395,7 @@ static inline uint32_t _do_efsmul (uint32_t op1, uint32_t op2)
395 u1.f = float32_mul(u1.f, u2.f, &env->spe_status); 395 u1.f = float32_mul(u1.f, u2.f, &env->spe_status);
396 return u1.u; 396 return u1.u;
397 } 397 }
398 -static inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2) 398 +static always_inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2)
399 { 399 {
400 union { 400 union {
401 uint32_t u; 401 uint32_t u;
@@ -407,7 +407,7 @@ static inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2) @@ -407,7 +407,7 @@ static inline uint32_t _do_efsdiv (uint32_t op1, uint32_t op2)
407 return u1.u; 407 return u1.u;
408 } 408 }
409 409
410 -static inline int _do_efststlt (uint32_t op1, uint32_t op2) 410 +static always_inline int _do_efststlt (uint32_t op1, uint32_t op2)
411 { 411 {
412 union { 412 union {
413 uint32_t u; 413 uint32_t u;
@@ -417,7 +417,7 @@ static inline int _do_efststlt (uint32_t op1, uint32_t op2) @@ -417,7 +417,7 @@ static inline int _do_efststlt (uint32_t op1, uint32_t op2)
417 u2.u = op2; 417 u2.u = op2;
418 return float32_lt(u1.f, u2.f, &env->spe_status) ? 1 : 0; 418 return float32_lt(u1.f, u2.f, &env->spe_status) ? 1 : 0;
419 } 419 }
420 -static inline int _do_efststgt (uint32_t op1, uint32_t op2) 420 +static always_inline int _do_efststgt (uint32_t op1, uint32_t op2)
421 { 421 {
422 union { 422 union {
423 uint32_t u; 423 uint32_t u;
@@ -427,7 +427,7 @@ static inline int _do_efststgt (uint32_t op1, uint32_t op2) @@ -427,7 +427,7 @@ static inline int _do_efststgt (uint32_t op1, uint32_t op2)
427 u2.u = op2; 427 u2.u = op2;
428 return float32_le(u1.f, u2.f, &env->spe_status) ? 0 : 1; 428 return float32_le(u1.f, u2.f, &env->spe_status) ? 0 : 1;
429 } 429 }
430 -static inline int _do_efststeq (uint32_t op1, uint32_t op2) 430 +static always_inline int _do_efststeq (uint32_t op1, uint32_t op2)
431 { 431 {
432 union { 432 union {
433 uint32_t u; 433 uint32_t u;
@@ -438,7 +438,7 @@ static inline int _do_efststeq (uint32_t op1, uint32_t op2) @@ -438,7 +438,7 @@ static inline int _do_efststeq (uint32_t op1, uint32_t op2)
438 return float32_eq(u1.f, u2.f, &env->spe_status) ? 1 : 0; 438 return float32_eq(u1.f, u2.f, &env->spe_status) ? 1 : 0;
439 } 439 }
440 /* Double precision floating-point helpers */ 440 /* Double precision floating-point helpers */
441 -static inline int _do_efdtstlt (uint64_t op1, uint64_t op2) 441 +static always_inline int _do_efdtstlt (uint64_t op1, uint64_t op2)
442 { 442 {
443 union { 443 union {
444 uint64_t u; 444 uint64_t u;
@@ -448,7 +448,7 @@ static inline int _do_efdtstlt (uint64_t op1, uint64_t op2) @@ -448,7 +448,7 @@ static inline int _do_efdtstlt (uint64_t op1, uint64_t op2)
448 u2.u = op2; 448 u2.u = op2;
449 return float64_lt(u1.f, u2.f, &env->spe_status) ? 1 : 0; 449 return float64_lt(u1.f, u2.f, &env->spe_status) ? 1 : 0;
450 } 450 }
451 -static inline int _do_efdtstgt (uint64_t op1, uint64_t op2) 451 +static always_inline int _do_efdtstgt (uint64_t op1, uint64_t op2)
452 { 452 {
453 union { 453 union {
454 uint64_t u; 454 uint64_t u;
@@ -458,7 +458,7 @@ static inline int _do_efdtstgt (uint64_t op1, uint64_t op2) @@ -458,7 +458,7 @@ static inline int _do_efdtstgt (uint64_t op1, uint64_t op2)
458 u2.u = op2; 458 u2.u = op2;
459 return float64_le(u1.f, u2.f, &env->spe_status) ? 0 : 1; 459 return float64_le(u1.f, u2.f, &env->spe_status) ? 0 : 1;
460 } 460 }
461 -static inline int _do_efdtsteq (uint64_t op1, uint64_t op2) 461 +static always_inline int _do_efdtsteq (uint64_t op1, uint64_t op2)
462 { 462 {
463 union { 463 union {
464 uint64_t u; 464 uint64_t u;
target-ppc/op_helper_mem.h
@@ -19,14 +19,15 @@ @@ -19,14 +19,15 @@
19 */ 19 */
20 20
21 /* Multiple word / string load and store */ 21 /* Multiple word / string load and store */
22 -static inline target_ulong glue(ld32r, MEMSUFFIX) (target_ulong EA) 22 +static always_inline target_ulong glue(ld32r, MEMSUFFIX) (target_ulong EA)
23 { 23 {
24 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); 24 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
25 return ((tmp & 0xFF000000UL) >> 24) | ((tmp & 0x00FF0000UL) >> 8) | 25 return ((tmp & 0xFF000000UL) >> 24) | ((tmp & 0x00FF0000UL) >> 8) |
26 ((tmp & 0x0000FF00UL) << 8) | ((tmp & 0x000000FFUL) << 24); 26 ((tmp & 0x0000FF00UL) << 8) | ((tmp & 0x000000FFUL) << 24);
27 } 27 }
28 28
29 -static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, target_ulong data) 29 +static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
  30 + target_ulong data)
30 { 31 {
31 uint32_t tmp = 32 uint32_t tmp =
32 ((data & 0xFF000000UL) >> 24) | ((data & 0x00FF0000UL) >> 8) | 33 ((data & 0xFF000000UL) >> 24) | ((data & 0x00FF0000UL) >> 8) |
@@ -399,7 +400,7 @@ void glue(do_POWER2_lfq, MEMSUFFIX) (void) @@ -399,7 +400,7 @@ void glue(do_POWER2_lfq, MEMSUFFIX) (void)
399 FT1 = glue(ldfq, MEMSUFFIX)((uint32_t)(T0 + 4)); 400 FT1 = glue(ldfq, MEMSUFFIX)((uint32_t)(T0 + 4));
400 } 401 }
401 402
402 -static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) 403 +static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
403 { 404 {
404 union { 405 union {
405 double d; 406 double d;
@@ -431,7 +432,7 @@ void glue(do_POWER2_stfq, MEMSUFFIX) (void) @@ -431,7 +432,7 @@ void glue(do_POWER2_stfq, MEMSUFFIX) (void)
431 glue(stfq, MEMSUFFIX)((uint32_t)(T0 + 4), FT1); 432 glue(stfq, MEMSUFFIX)((uint32_t)(T0 + 4), FT1);
432 } 433 }
433 434
434 -static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) 435 +static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
435 { 436 {
436 union { 437 union {
437 double d; 438 double d;
target-ppc/op_mem.h
@@ -18,19 +18,19 @@ @@ -18,19 +18,19 @@
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */ 19 */
20 20
21 -static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA) 21 +static always_inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
22 { 22 {
23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA); 23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8); 24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25 } 25 }
26 26
27 -static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA) 27 +static always_inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28 { 28 {
29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA); 29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8); 30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
31 } 31 }
32 32
33 -static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA) 33 +static always_inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
34 { 34 {
35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); 35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) | 36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
@@ -38,7 +38,7 @@ static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA) @@ -38,7 +38,7 @@ static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
38 } 38 }
39 39
40 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB) 40 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41 -static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA) 41 +static always_inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42 { 42 {
43 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA); 43 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44 return ((tmp & 0xFF00000000000000ULL) >> 56) | 44 return ((tmp & 0xFF00000000000000ULL) >> 56) |
@@ -53,12 +53,12 @@ static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA) @@ -53,12 +53,12 @@ static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
53 #endif 53 #endif
54 54
55 #if defined(TARGET_PPC64) 55 #if defined(TARGET_PPC64)
56 -static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA) 56 +static always_inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57 { 57 {
58 return (int32_t)glue(ldl, MEMSUFFIX)(EA); 58 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59 } 59 }
60 60
61 -static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA) 61 +static always_inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62 { 62 {
63 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); 63 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) | 64 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
@@ -66,13 +66,15 @@ static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA) @@ -66,13 +66,15 @@ static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
66 } 66 }
67 #endif 67 #endif
68 68
69 -static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data) 69 +static always_inline void glue(st16r, MEMSUFFIX) (target_ulong EA,
  70 + uint16_t data)
70 { 71 {
71 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8); 72 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
72 glue(stw, MEMSUFFIX)(EA, tmp); 73 glue(stw, MEMSUFFIX)(EA, tmp);
73 } 74 }
74 75
75 -static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data) 76 +static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
  77 + uint32_t data)
76 { 78 {
77 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) | 79 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24); 80 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
@@ -80,7 +82,8 @@ static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data) @@ -80,7 +82,8 @@ static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
80 } 82 }
81 83
82 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB) 84 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
83 -static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data) 85 +static always_inline void glue(st64r, MEMSUFFIX) (target_ulong EA,
  86 + uint64_t data)
84 { 87 {
85 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) | 88 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86 ((data & 0x00FF000000000000ULL) >> 40) | 89 ((data & 0x00FF000000000000ULL) >> 40) |
@@ -403,12 +406,12 @@ void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \ @@ -403,12 +406,12 @@ void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
403 } 406 }
404 #endif 407 #endif
405 408
406 -static inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d) 409 +static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
407 { 410 {
408 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status)); 411 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
409 } 412 }
410 413
411 -static inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d) 414 +static always_inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
412 { 415 {
413 union { 416 union {
414 double d; 417 double d;
@@ -429,7 +432,7 @@ PPC_STF_OP_64(fs, stfs); @@ -429,7 +432,7 @@ PPC_STF_OP_64(fs, stfs);
429 PPC_STF_OP_64(fiwx, stfiwx); 432 PPC_STF_OP_64(fiwx, stfiwx);
430 #endif 433 #endif
431 434
432 -static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) 435 +static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
433 { 436 {
434 union { 437 union {
435 double d; 438 double d;
@@ -448,7 +451,7 @@ static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) @@ -448,7 +451,7 @@ static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
448 glue(stfq, MEMSUFFIX)(EA, u.d); 451 glue(stfq, MEMSUFFIX)(EA, u.d);
449 } 452 }
450 453
451 -static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d) 454 +static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
452 { 455 {
453 union { 456 union {
454 float f; 457 float f;
@@ -463,7 +466,7 @@ static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d) @@ -463,7 +466,7 @@ static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
463 glue(stfl, MEMSUFFIX)(EA, u.f); 466 glue(stfl, MEMSUFFIX)(EA, u.f);
464 } 467 }
465 468
466 -static inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d) 469 +static always_inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
467 { 470 {
468 union { 471 union {
469 double d; 472 double d;
@@ -506,7 +509,7 @@ void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \ @@ -506,7 +509,7 @@ void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
506 } 509 }
507 #endif 510 #endif
508 511
509 -static inline double glue(ldfs, MEMSUFFIX) (target_ulong EA) 512 +static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
510 { 513 {
511 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status); 514 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
512 } 515 }
@@ -518,7 +521,7 @@ PPC_LDF_OP_64(fd, ldfq); @@ -518,7 +521,7 @@ PPC_LDF_OP_64(fd, ldfq);
518 PPC_LDF_OP_64(fs, ldfs); 521 PPC_LDF_OP_64(fs, ldfs);
519 #endif 522 #endif
520 523
521 -static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) 524 +static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
522 { 525 {
523 union { 526 union {
524 double d; 527 double d;
@@ -538,7 +541,7 @@ static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) @@ -538,7 +541,7 @@ static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
538 return u.d; 541 return u.d;
539 } 542 }
540 543
541 -static inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA) 544 +static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
542 { 545 {
543 union { 546 union {
544 float f; 547 float f;
@@ -1105,7 +1108,7 @@ PPC_SPE_ST_OP(dd, stq); @@ -1105,7 +1108,7 @@ PPC_SPE_ST_OP(dd, stq);
1105 PPC_SPE_LD_OP(dd_le, ld64r); 1108 PPC_SPE_LD_OP(dd_le, ld64r);
1106 PPC_SPE_ST_OP(dd_le, st64r); 1109 PPC_SPE_ST_OP(dd_le, st64r);
1107 #endif 1110 #endif
1108 -static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA) 1111 +static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1109 { 1112 {
1110 uint64_t ret; 1113 uint64_t ret;
1111 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32; 1114 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
@@ -1113,13 +1116,14 @@ static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA) @@ -1113,13 +1116,14 @@ static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1113 return ret; 1116 return ret;
1114 } 1117 }
1115 PPC_SPE_LD_OP(dw, spe_ldw); 1118 PPC_SPE_LD_OP(dw, spe_ldw);
1116 -static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data) 1119 +static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
  1120 + uint64_t data)
1117 { 1121 {
1118 glue(stl, MEMSUFFIX)(EA, data >> 32); 1122 glue(stl, MEMSUFFIX)(EA, data >> 32);
1119 glue(stl, MEMSUFFIX)(EA + 4, data); 1123 glue(stl, MEMSUFFIX)(EA + 4, data);
1120 } 1124 }
1121 PPC_SPE_ST_OP(dw, spe_stdw); 1125 PPC_SPE_ST_OP(dw, spe_stdw);
1122 -static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA) 1126 +static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1123 { 1127 {
1124 uint64_t ret; 1128 uint64_t ret;
1125 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32; 1129 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
@@ -1127,14 +1131,14 @@ static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA) @@ -1127,14 +1131,14 @@ static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1127 return ret; 1131 return ret;
1128 } 1132 }
1129 PPC_SPE_LD_OP(dw_le, spe_ldw_le); 1133 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1130 -static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,  
1131 - uint64_t data) 1134 +static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
  1135 + uint64_t data)
1132 { 1136 {
1133 glue(st32r, MEMSUFFIX)(EA, data >> 32); 1137 glue(st32r, MEMSUFFIX)(EA, data >> 32);
1134 glue(st32r, MEMSUFFIX)(EA + 4, data); 1138 glue(st32r, MEMSUFFIX)(EA + 4, data);
1135 } 1139 }
1136 PPC_SPE_ST_OP(dw_le, spe_stdw_le); 1140 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1137 -static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA) 1141 +static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1138 { 1142 {
1139 uint64_t ret; 1143 uint64_t ret;
1140 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48; 1144 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
@@ -1144,7 +1148,8 @@ static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA) @@ -1144,7 +1148,8 @@ static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1144 return ret; 1148 return ret;
1145 } 1149 }
1146 PPC_SPE_LD_OP(dh, spe_ldh); 1150 PPC_SPE_LD_OP(dh, spe_ldh);
1147 -static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data) 1151 +static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
  1152 + uint64_t data)
1148 { 1153 {
1149 glue(stw, MEMSUFFIX)(EA, data >> 48); 1154 glue(stw, MEMSUFFIX)(EA, data >> 48);
1150 glue(stw, MEMSUFFIX)(EA + 2, data >> 32); 1155 glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
@@ -1152,7 +1157,7 @@ static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data) @@ -1152,7 +1157,7 @@ static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
1152 glue(stw, MEMSUFFIX)(EA + 6, data); 1157 glue(stw, MEMSUFFIX)(EA + 6, data);
1153 } 1158 }
1154 PPC_SPE_ST_OP(dh, spe_stdh); 1159 PPC_SPE_ST_OP(dh, spe_stdh);
1155 -static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA) 1160 +static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1156 { 1161 {
1157 uint64_t ret; 1162 uint64_t ret;
1158 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48; 1163 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
@@ -1162,8 +1167,8 @@ static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA) @@ -1162,8 +1167,8 @@ static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1162 return ret; 1167 return ret;
1163 } 1168 }
1164 PPC_SPE_LD_OP(dh_le, spe_ldh_le); 1169 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1165 -static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,  
1166 - uint64_t data) 1170 +static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
  1171 + uint64_t data)
1167 { 1172 {
1168 glue(st16r, MEMSUFFIX)(EA, data >> 48); 1173 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1169 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32); 1174 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
@@ -1171,7 +1176,7 @@ static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA, @@ -1171,7 +1176,7 @@ static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1171 glue(st16r, MEMSUFFIX)(EA + 6, data); 1176 glue(st16r, MEMSUFFIX)(EA + 6, data);
1172 } 1177 }
1173 PPC_SPE_ST_OP(dh_le, spe_stdh_le); 1178 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1174 -static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA) 1179 +static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1175 { 1180 {
1176 uint64_t ret; 1181 uint64_t ret;
1177 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48; 1182 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
@@ -1179,13 +1184,14 @@ static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA) @@ -1179,13 +1184,14 @@ static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1179 return ret; 1184 return ret;
1180 } 1185 }
1181 PPC_SPE_LD_OP(whe, spe_lwhe); 1186 PPC_SPE_LD_OP(whe, spe_lwhe);
1182 -static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data) 1187 +static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
  1188 + uint64_t data)
1183 { 1189 {
1184 glue(stw, MEMSUFFIX)(EA, data >> 48); 1190 glue(stw, MEMSUFFIX)(EA, data >> 48);
1185 glue(stw, MEMSUFFIX)(EA + 2, data >> 16); 1191 glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1186 } 1192 }
1187 PPC_SPE_ST_OP(whe, spe_stwhe); 1193 PPC_SPE_ST_OP(whe, spe_stwhe);
1188 -static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA) 1194 +static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1189 { 1195 {
1190 uint64_t ret; 1196 uint64_t ret;
1191 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48; 1197 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
@@ -1193,14 +1199,14 @@ static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA) @@ -1193,14 +1199,14 @@ static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1193 return ret; 1199 return ret;
1194 } 1200 }
1195 PPC_SPE_LD_OP(whe_le, spe_lwhe_le); 1201 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1196 -static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,  
1197 - uint64_t data) 1202 +static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
  1203 + uint64_t data)
1198 { 1204 {
1199 glue(st16r, MEMSUFFIX)(EA, data >> 48); 1205 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1200 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16); 1206 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1201 } 1207 }
1202 PPC_SPE_ST_OP(whe_le, spe_stwhe_le); 1208 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1203 -static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA) 1209 +static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1204 { 1210 {
1205 uint64_t ret; 1211 uint64_t ret;
1206 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32; 1212 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
@@ -1208,7 +1214,7 @@ static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA) @@ -1208,7 +1214,7 @@ static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1208 return ret; 1214 return ret;
1209 } 1215 }
1210 PPC_SPE_LD_OP(whou, spe_lwhou); 1216 PPC_SPE_LD_OP(whou, spe_lwhou);
1211 -static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA) 1217 +static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1212 { 1218 {
1213 uint64_t ret; 1219 uint64_t ret;
1214 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32; 1220 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
@@ -1216,13 +1222,14 @@ static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA) @@ -1216,13 +1222,14 @@ static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1216 return ret; 1222 return ret;
1217 } 1223 }
1218 PPC_SPE_LD_OP(whos, spe_lwhos); 1224 PPC_SPE_LD_OP(whos, spe_lwhos);
1219 -static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data) 1225 +static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
  1226 + uint64_t data)
1220 { 1227 {
1221 glue(stw, MEMSUFFIX)(EA, data >> 32); 1228 glue(stw, MEMSUFFIX)(EA, data >> 32);
1222 glue(stw, MEMSUFFIX)(EA + 2, data); 1229 glue(stw, MEMSUFFIX)(EA + 2, data);
1223 } 1230 }
1224 PPC_SPE_ST_OP(who, spe_stwho); 1231 PPC_SPE_ST_OP(who, spe_stwho);
1225 -static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA) 1232 +static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1226 { 1233 {
1227 uint64_t ret; 1234 uint64_t ret;
1228 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32; 1235 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
@@ -1230,7 +1237,7 @@ static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA) @@ -1230,7 +1237,7 @@ static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1230 return ret; 1237 return ret;
1231 } 1238 }
1232 PPC_SPE_LD_OP(whou_le, spe_lwhou_le); 1239 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1233 -static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA) 1240 +static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1234 { 1241 {
1235 uint64_t ret; 1242 uint64_t ret;
1236 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32; 1243 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
@@ -1238,55 +1245,57 @@ static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA) @@ -1238,55 +1245,57 @@ static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1238 return ret; 1245 return ret;
1239 } 1246 }
1240 PPC_SPE_LD_OP(whos_le, spe_lwhos_le); 1247 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1241 -static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,  
1242 - uint64_t data) 1248 +static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
  1249 + uint64_t data)
1243 { 1250 {
1244 glue(st16r, MEMSUFFIX)(EA, data >> 32); 1251 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1245 glue(st16r, MEMSUFFIX)(EA + 2, data); 1252 glue(st16r, MEMSUFFIX)(EA + 2, data);
1246 } 1253 }
1247 PPC_SPE_ST_OP(who_le, spe_stwho_le); 1254 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1248 #if !defined(TARGET_PPC64) 1255 #if !defined(TARGET_PPC64)
1249 -static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data) 1256 +static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
  1257 + uint64_t data)
1250 { 1258 {
1251 glue(stl, MEMSUFFIX)(EA, data); 1259 glue(stl, MEMSUFFIX)(EA, data);
1252 } 1260 }
1253 PPC_SPE_ST_OP(wwo, spe_stwwo); 1261 PPC_SPE_ST_OP(wwo, spe_stwwo);
1254 -static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,  
1255 - uint64_t data) 1262 +static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
  1263 + uint64_t data)
1256 { 1264 {
1257 glue(st32r, MEMSUFFIX)(EA, data); 1265 glue(st32r, MEMSUFFIX)(EA, data);
1258 } 1266 }
1259 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le); 1267 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1260 #endif 1268 #endif
1261 -static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA) 1269 +static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1262 { 1270 {
1263 uint16_t tmp; 1271 uint16_t tmp;
1264 tmp = glue(lduw, MEMSUFFIX)(EA); 1272 tmp = glue(lduw, MEMSUFFIX)(EA);
1265 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16); 1273 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1266 } 1274 }
1267 PPC_SPE_LD_OP(h, spe_lh); 1275 PPC_SPE_LD_OP(h, spe_lh);
1268 -static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA) 1276 +static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1269 { 1277 {
1270 uint16_t tmp; 1278 uint16_t tmp;
1271 tmp = glue(ld16r, MEMSUFFIX)(EA); 1279 tmp = glue(ld16r, MEMSUFFIX)(EA);
1272 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16); 1280 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1273 } 1281 }
1274 PPC_SPE_LD_OP(h_le, spe_lh_le); 1282 PPC_SPE_LD_OP(h_le, spe_lh_le);
1275 -static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA) 1283 +static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1276 { 1284 {
1277 uint32_t tmp; 1285 uint32_t tmp;
1278 tmp = glue(ldl, MEMSUFFIX)(EA); 1286 tmp = glue(ldl, MEMSUFFIX)(EA);
1279 return ((uint64_t)tmp << 32) | (uint64_t)tmp; 1287 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1280 } 1288 }
1281 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat); 1289 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1282 -static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA) 1290 +static always_inline
  1291 +uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1283 { 1292 {
1284 uint32_t tmp; 1293 uint32_t tmp;
1285 tmp = glue(ld32r, MEMSUFFIX)(EA); 1294 tmp = glue(ld32r, MEMSUFFIX)(EA);
1286 return ((uint64_t)tmp << 32) | (uint64_t)tmp; 1295 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1287 } 1296 }
1288 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le); 1297 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1289 -static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA) 1298 +static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1290 { 1299 {
1291 uint64_t ret; 1300 uint64_t ret;
1292 uint16_t tmp; 1301 uint16_t tmp;
@@ -1297,7 +1306,8 @@ static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA) @@ -1297,7 +1306,8 @@ static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1297 return ret; 1306 return ret;
1298 } 1307 }
1299 PPC_SPE_LD_OP(whsplat, spe_lwhsplat); 1308 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1300 -static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA) 1309 +static always_inline
  1310 +uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1301 { 1311 {
1302 uint64_t ret; 1312 uint64_t ret;
1303 uint16_t tmp; 1313 uint16_t tmp;
target-ppc/translate.c
@@ -53,7 +53,7 @@ static uint32_t *gen_opparam_ptr; @@ -53,7 +53,7 @@ static uint32_t *gen_opparam_ptr;
53 53
54 #include "gen-op.h" 54 #include "gen-op.h"
55 55
56 -static inline void gen_set_T0 (target_ulong val) 56 +static always_inline void gen_set_T0 (target_ulong val)
57 { 57 {
58 #if defined(TARGET_PPC64) 58 #if defined(TARGET_PPC64)
59 if (val >> 32) 59 if (val >> 32)
@@ -63,7 +63,7 @@ static inline void gen_set_T0 (target_ulong val) @@ -63,7 +63,7 @@ static inline void gen_set_T0 (target_ulong val)
63 gen_op_set_T0(val); 63 gen_op_set_T0(val);
64 } 64 }
65 65
66 -static inline void gen_set_T1 (target_ulong val) 66 +static always_inline void gen_set_T1 (target_ulong val)
67 { 67 {
68 #if defined(TARGET_PPC64) 68 #if defined(TARGET_PPC64)
69 if (val >> 32) 69 if (val >> 32)
@@ -78,7 +78,7 @@ static GenOpFunc *NAME ## _table [8] = { \ @@ -78,7 +78,7 @@ static GenOpFunc *NAME ## _table [8] = { \
78 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \ 78 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
79 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \ 79 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
80 }; \ 80 }; \
81 -static inline void func(int n) \ 81 +static always_inline void func (int n) \
82 { \ 82 { \
83 NAME ## _table[n](); \ 83 NAME ## _table[n](); \
84 } 84 }
@@ -90,7 +90,7 @@ NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \ @@ -90,7 +90,7 @@ NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
90 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \ 90 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
91 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \ 91 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
92 }; \ 92 }; \
93 -static inline void func(int n) \ 93 +static always_inline void func (int n) \
94 { \ 94 { \
95 NAME ## _table[n](); \ 95 NAME ## _table[n](); \
96 } 96 }
@@ -106,7 +106,7 @@ NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \ @@ -106,7 +106,7 @@ NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
106 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \ 106 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
107 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \ 107 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
108 }; \ 108 }; \
109 -static inline void func(int n) \ 109 +static always_inline void func (int n) \
110 { \ 110 { \
111 NAME ## _table[n](); \ 111 NAME ## _table[n](); \
112 } 112 }
@@ -121,7 +121,7 @@ GEN8(gen_op_store_T1_crf, gen_op_store_T1_crf_crf); @@ -121,7 +121,7 @@ GEN8(gen_op_store_T1_crf, gen_op_store_T1_crf_crf);
121 GEN8(gen_op_load_fpscr_T0, gen_op_load_fpscr_T0_fpscr); 121 GEN8(gen_op_load_fpscr_T0, gen_op_load_fpscr_T0_fpscr);
122 GEN8(gen_op_store_T0_fpscr, gen_op_store_T0_fpscr_fpscr); 122 GEN8(gen_op_store_T0_fpscr, gen_op_store_T0_fpscr_fpscr);
123 GEN8(gen_op_clear_fpscr, gen_op_clear_fpscr_fpscr); 123 GEN8(gen_op_clear_fpscr, gen_op_clear_fpscr_fpscr);
124 -static inline void gen_op_store_T0_fpscri (int n, uint8_t param) 124 +static always_inline void gen_op_store_T0_fpscri (int n, uint8_t param)
125 { 125 {
126 gen_op_set_T0(param); 126 gen_op_set_T0(param);
127 gen_op_store_T0_fpscr(n); 127 gen_op_store_T0_fpscr(n);
@@ -187,7 +187,7 @@ struct opc_handler_t { @@ -187,7 +187,7 @@ struct opc_handler_t {
187 #endif 187 #endif
188 }; 188 };
189 189
190 -static inline void gen_set_Rc0 (DisasContext *ctx) 190 +static always_inline void gen_set_Rc0 (DisasContext *ctx)
191 { 191 {
192 #if defined(TARGET_PPC64) 192 #if defined(TARGET_PPC64)
193 if (ctx->sf_mode) 193 if (ctx->sf_mode)
@@ -198,7 +198,7 @@ static inline void gen_set_Rc0 (DisasContext *ctx) @@ -198,7 +198,7 @@ static inline void gen_set_Rc0 (DisasContext *ctx)
198 gen_op_set_Rc0(); 198 gen_op_set_Rc0();
199 } 199 }
200 200
201 -static inline void gen_update_nip (DisasContext *ctx, target_ulong nip) 201 +static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
202 { 202 {
203 #if defined(TARGET_PPC64) 203 #if defined(TARGET_PPC64)
204 if (ctx->sf_mode) 204 if (ctx->sf_mode)
@@ -236,14 +236,14 @@ GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0) @@ -236,14 +236,14 @@ GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
236 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0) 236 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
237 237
238 /* Stop translation */ 238 /* Stop translation */
239 -static inline void GEN_STOP (DisasContext *ctx) 239 +static always_inline void GEN_STOP (DisasContext *ctx)
240 { 240 {
241 gen_update_nip(ctx, ctx->nip); 241 gen_update_nip(ctx, ctx->nip);
242 ctx->exception = POWERPC_EXCP_STOP; 242 ctx->exception = POWERPC_EXCP_STOP;
243 } 243 }
244 244
245 /* No need to update nip here, as execution flow will change */ 245 /* No need to update nip here, as execution flow will change */
246 -static inline void GEN_SYNC (DisasContext *ctx) 246 +static always_inline void GEN_SYNC (DisasContext *ctx)
247 { 247 {
248 ctx->exception = POWERPC_EXCP_SYNC; 248 ctx->exception = POWERPC_EXCP_SYNC;
249 } 249 }
@@ -267,13 +267,13 @@ typedef struct opcode_t { @@ -267,13 +267,13 @@ typedef struct opcode_t {
267 /*****************************************************************************/ 267 /*****************************************************************************/
268 /*** Instruction decoding ***/ 268 /*** Instruction decoding ***/
269 #define EXTRACT_HELPER(name, shift, nb) \ 269 #define EXTRACT_HELPER(name, shift, nb) \
270 -static inline uint32_t name (uint32_t opcode) \ 270 +static always_inline uint32_t name (uint32_t opcode) \
271 { \ 271 { \
272 return (opcode >> (shift)) & ((1 << (nb)) - 1); \ 272 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
273 } 273 }
274 274
275 #define EXTRACT_SHELPER(name, shift, nb) \ 275 #define EXTRACT_SHELPER(name, shift, nb) \
276 -static inline int32_t name (uint32_t opcode) \ 276 +static always_inline int32_t name (uint32_t opcode) \
277 { \ 277 { \
278 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \ 278 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
279 } 279 }
@@ -304,7 +304,7 @@ EXTRACT_HELPER(crbA, 16, 5); @@ -304,7 +304,7 @@ EXTRACT_HELPER(crbA, 16, 5);
304 EXTRACT_HELPER(crbB, 11, 5); 304 EXTRACT_HELPER(crbB, 11, 5);
305 /* SPR / TBL */ 305 /* SPR / TBL */
306 EXTRACT_HELPER(_SPR, 11, 10); 306 EXTRACT_HELPER(_SPR, 11, 10);
307 -static inline uint32_t SPR (uint32_t opcode) 307 +static always_inline uint32_t SPR (uint32_t opcode)
308 { 308 {
309 uint32_t sprn = _SPR(opcode); 309 uint32_t sprn = _SPR(opcode);
310 310
@@ -336,12 +336,12 @@ EXTRACT_HELPER(FPIMM, 20, 4); @@ -336,12 +336,12 @@ EXTRACT_HELPER(FPIMM, 20, 4);
336 /* Displacement */ 336 /* Displacement */
337 EXTRACT_SHELPER(d, 0, 16); 337 EXTRACT_SHELPER(d, 0, 16);
338 /* Immediate address */ 338 /* Immediate address */
339 -static inline target_ulong LI (uint32_t opcode) 339 +static always_inline target_ulong LI (uint32_t opcode)
340 { 340 {
341 return (opcode >> 0) & 0x03FFFFFC; 341 return (opcode >> 0) & 0x03FFFFFC;
342 } 342 }
343 343
344 -static inline uint32_t BD (uint32_t opcode) 344 +static always_inline uint32_t BD (uint32_t opcode)
345 { 345 {
346 return (opcode >> 0) & 0xFFFC; 346 return (opcode >> 0) & 0xFFFC;
347 } 347 }
@@ -354,7 +354,7 @@ EXTRACT_HELPER(AA, 1, 1); @@ -354,7 +354,7 @@ EXTRACT_HELPER(AA, 1, 1);
354 EXTRACT_HELPER(LK, 0, 1); 354 EXTRACT_HELPER(LK, 0, 1);
355 355
356 /* Create a mask between <start> and <end> bits */ 356 /* Create a mask between <start> and <end> bits */
357 -static inline target_ulong MASK (uint32_t start, uint32_t end) 357 +static always_inline target_ulong MASK (uint32_t start, uint32_t end)
358 { 358 {
359 target_ulong ret; 359 target_ulong ret;
360 360
@@ -694,7 +694,7 @@ __GEN_INT_ARITH1_O_64(name##o, opc1, opc2, opc3 | 0x10, type) @@ -694,7 +694,7 @@ __GEN_INT_ARITH1_O_64(name##o, opc1, opc2, opc3 | 0x10, type)
694 #endif 694 #endif
695 695
696 /* add add. addo addo. */ 696 /* add add. addo addo. */
697 -static inline void gen_op_addo (void) 697 +static always_inline void gen_op_addo (void)
698 { 698 {
699 gen_op_move_T2_T0(); 699 gen_op_move_T2_T0();
700 gen_op_add(); 700 gen_op_add();
@@ -702,7 +702,7 @@ static inline void gen_op_addo (void) @@ -702,7 +702,7 @@ static inline void gen_op_addo (void)
702 } 702 }
703 #if defined(TARGET_PPC64) 703 #if defined(TARGET_PPC64)
704 #define gen_op_add_64 gen_op_add 704 #define gen_op_add_64 gen_op_add
705 -static inline void gen_op_addo_64 (void) 705 +static always_inline void gen_op_addo_64 (void)
706 { 706 {
707 gen_op_move_T2_T0(); 707 gen_op_move_T2_T0();
708 gen_op_add(); 708 gen_op_add();
@@ -711,13 +711,13 @@ static inline void gen_op_addo_64 (void) @@ -711,13 +711,13 @@ static inline void gen_op_addo_64 (void)
711 #endif 711 #endif
712 GEN_INT_ARITH2_64 (add, 0x1F, 0x0A, 0x08, PPC_INTEGER); 712 GEN_INT_ARITH2_64 (add, 0x1F, 0x0A, 0x08, PPC_INTEGER);
713 /* addc addc. addco addco. */ 713 /* addc addc. addco addco. */
714 -static inline void gen_op_addc (void) 714 +static always_inline void gen_op_addc (void)
715 { 715 {
716 gen_op_move_T2_T0(); 716 gen_op_move_T2_T0();
717 gen_op_add(); 717 gen_op_add();
718 gen_op_check_addc(); 718 gen_op_check_addc();
719 } 719 }
720 -static inline void gen_op_addco (void) 720 +static always_inline void gen_op_addco (void)
721 { 721 {
722 gen_op_move_T2_T0(); 722 gen_op_move_T2_T0();
723 gen_op_add(); 723 gen_op_add();
@@ -725,13 +725,13 @@ static inline void gen_op_addco (void) @@ -725,13 +725,13 @@ static inline void gen_op_addco (void)
725 gen_op_check_addo(); 725 gen_op_check_addo();
726 } 726 }
727 #if defined(TARGET_PPC64) 727 #if defined(TARGET_PPC64)
728 -static inline void gen_op_addc_64 (void) 728 +static always_inline void gen_op_addc_64 (void)
729 { 729 {
730 gen_op_move_T2_T0(); 730 gen_op_move_T2_T0();
731 gen_op_add(); 731 gen_op_add();
732 gen_op_check_addc_64(); 732 gen_op_check_addc_64();
733 } 733 }
734 -static inline void gen_op_addco_64 (void) 734 +static always_inline void gen_op_addco_64 (void)
735 { 735 {
736 gen_op_move_T2_T0(); 736 gen_op_move_T2_T0();
737 gen_op_add(); 737 gen_op_add();
@@ -741,14 +741,14 @@ static inline void gen_op_addco_64 (void) @@ -741,14 +741,14 @@ static inline void gen_op_addco_64 (void)
741 #endif 741 #endif
742 GEN_INT_ARITH2_64 (addc, 0x1F, 0x0A, 0x00, PPC_INTEGER); 742 GEN_INT_ARITH2_64 (addc, 0x1F, 0x0A, 0x00, PPC_INTEGER);
743 /* adde adde. addeo addeo. */ 743 /* adde adde. addeo addeo. */
744 -static inline void gen_op_addeo (void) 744 +static always_inline void gen_op_addeo (void)
745 { 745 {
746 gen_op_move_T2_T0(); 746 gen_op_move_T2_T0();
747 gen_op_adde(); 747 gen_op_adde();
748 gen_op_check_addo(); 748 gen_op_check_addo();
749 } 749 }
750 #if defined(TARGET_PPC64) 750 #if defined(TARGET_PPC64)
751 -static inline void gen_op_addeo_64 (void) 751 +static always_inline void gen_op_addeo_64 (void)
752 { 752 {
753 gen_op_move_T2_T0(); 753 gen_op_move_T2_T0();
754 gen_op_adde_64(); 754 gen_op_adde_64();
@@ -757,13 +757,13 @@ static inline void gen_op_addeo_64 (void) @@ -757,13 +757,13 @@ static inline void gen_op_addeo_64 (void)
757 #endif 757 #endif
758 GEN_INT_ARITH2_64 (adde, 0x1F, 0x0A, 0x04, PPC_INTEGER); 758 GEN_INT_ARITH2_64 (adde, 0x1F, 0x0A, 0x04, PPC_INTEGER);
759 /* addme addme. addmeo addmeo. */ 759 /* addme addme. addmeo addmeo. */
760 -static inline void gen_op_addme (void) 760 +static always_inline void gen_op_addme (void)
761 { 761 {
762 gen_op_move_T1_T0(); 762 gen_op_move_T1_T0();
763 gen_op_add_me(); 763 gen_op_add_me();
764 } 764 }
765 #if defined(TARGET_PPC64) 765 #if defined(TARGET_PPC64)
766 -static inline void gen_op_addme_64 (void) 766 +static always_inline void gen_op_addme_64 (void)
767 { 767 {
768 gen_op_move_T1_T0(); 768 gen_op_move_T1_T0();
769 gen_op_add_me_64(); 769 gen_op_add_me_64();
@@ -771,13 +771,13 @@ static inline void gen_op_addme_64 (void) @@ -771,13 +771,13 @@ static inline void gen_op_addme_64 (void)
771 #endif 771 #endif
772 GEN_INT_ARITH1_64 (addme, 0x1F, 0x0A, 0x07, PPC_INTEGER); 772 GEN_INT_ARITH1_64 (addme, 0x1F, 0x0A, 0x07, PPC_INTEGER);
773 /* addze addze. addzeo addzeo. */ 773 /* addze addze. addzeo addzeo. */
774 -static inline void gen_op_addze (void) 774 +static always_inline void gen_op_addze (void)
775 { 775 {
776 gen_op_move_T2_T0(); 776 gen_op_move_T2_T0();
777 gen_op_add_ze(); 777 gen_op_add_ze();
778 gen_op_check_addc(); 778 gen_op_check_addc();
779 } 779 }
780 -static inline void gen_op_addzeo (void) 780 +static always_inline void gen_op_addzeo (void)
781 { 781 {
782 gen_op_move_T2_T0(); 782 gen_op_move_T2_T0();
783 gen_op_add_ze(); 783 gen_op_add_ze();
@@ -785,13 +785,13 @@ static inline void gen_op_addzeo (void) @@ -785,13 +785,13 @@ static inline void gen_op_addzeo (void)
785 gen_op_check_addo(); 785 gen_op_check_addo();
786 } 786 }
787 #if defined(TARGET_PPC64) 787 #if defined(TARGET_PPC64)
788 -static inline void gen_op_addze_64 (void) 788 +static always_inline void gen_op_addze_64 (void)
789 { 789 {
790 gen_op_move_T2_T0(); 790 gen_op_move_T2_T0();
791 gen_op_add_ze(); 791 gen_op_add_ze();
792 gen_op_check_addc_64(); 792 gen_op_check_addc_64();
793 } 793 }
794 -static inline void gen_op_addzeo_64 (void) 794 +static always_inline void gen_op_addzeo_64 (void)
795 { 795 {
796 gen_op_move_T2_T0(); 796 gen_op_move_T2_T0();
797 gen_op_add_ze(); 797 gen_op_add_ze();
@@ -813,7 +813,7 @@ GEN_INT_ARITH2 (mullw, 0x1F, 0x0B, 0x07, PPC_INTEGER); @@ -813,7 +813,7 @@ GEN_INT_ARITH2 (mullw, 0x1F, 0x0B, 0x07, PPC_INTEGER);
813 /* neg neg. nego nego. */ 813 /* neg neg. nego nego. */
814 GEN_INT_ARITH1_64 (neg, 0x1F, 0x08, 0x03, PPC_INTEGER); 814 GEN_INT_ARITH1_64 (neg, 0x1F, 0x08, 0x03, PPC_INTEGER);
815 /* subf subf. subfo subfo. */ 815 /* subf subf. subfo subfo. */
816 -static inline void gen_op_subfo (void) 816 +static always_inline void gen_op_subfo (void)
817 { 817 {
818 gen_op_move_T2_T0(); 818 gen_op_move_T2_T0();
819 gen_op_subf(); 819 gen_op_subf();
@@ -821,7 +821,7 @@ static inline void gen_op_subfo (void) @@ -821,7 +821,7 @@ static inline void gen_op_subfo (void)
821 } 821 }
822 #if defined(TARGET_PPC64) 822 #if defined(TARGET_PPC64)
823 #define gen_op_subf_64 gen_op_subf 823 #define gen_op_subf_64 gen_op_subf
824 -static inline void gen_op_subfo_64 (void) 824 +static always_inline void gen_op_subfo_64 (void)
825 { 825 {
826 gen_op_move_T2_T0(); 826 gen_op_move_T2_T0();
827 gen_op_subf(); 827 gen_op_subf();
@@ -830,12 +830,12 @@ static inline void gen_op_subfo_64 (void) @@ -830,12 +830,12 @@ static inline void gen_op_subfo_64 (void)
830 #endif 830 #endif
831 GEN_INT_ARITH2_64 (subf, 0x1F, 0x08, 0x01, PPC_INTEGER); 831 GEN_INT_ARITH2_64 (subf, 0x1F, 0x08, 0x01, PPC_INTEGER);
832 /* subfc subfc. subfco subfco. */ 832 /* subfc subfc. subfco subfco. */
833 -static inline void gen_op_subfc (void) 833 +static always_inline void gen_op_subfc (void)
834 { 834 {
835 gen_op_subf(); 835 gen_op_subf();
836 gen_op_check_subfc(); 836 gen_op_check_subfc();
837 } 837 }
838 -static inline void gen_op_subfco (void) 838 +static always_inline void gen_op_subfco (void)
839 { 839 {
840 gen_op_move_T2_T0(); 840 gen_op_move_T2_T0();
841 gen_op_subf(); 841 gen_op_subf();
@@ -843,12 +843,12 @@ static inline void gen_op_subfco (void) @@ -843,12 +843,12 @@ static inline void gen_op_subfco (void)
843 gen_op_check_subfo(); 843 gen_op_check_subfo();
844 } 844 }
845 #if defined(TARGET_PPC64) 845 #if defined(TARGET_PPC64)
846 -static inline void gen_op_subfc_64 (void) 846 +static always_inline void gen_op_subfc_64 (void)
847 { 847 {
848 gen_op_subf(); 848 gen_op_subf();
849 gen_op_check_subfc_64(); 849 gen_op_check_subfc_64();
850 } 850 }
851 -static inline void gen_op_subfco_64 (void) 851 +static always_inline void gen_op_subfco_64 (void)
852 { 852 {
853 gen_op_move_T2_T0(); 853 gen_op_move_T2_T0();
854 gen_op_subf(); 854 gen_op_subf();
@@ -858,7 +858,7 @@ static inline void gen_op_subfco_64 (void) @@ -858,7 +858,7 @@ static inline void gen_op_subfco_64 (void)
858 #endif 858 #endif
859 GEN_INT_ARITH2_64 (subfc, 0x1F, 0x08, 0x00, PPC_INTEGER); 859 GEN_INT_ARITH2_64 (subfc, 0x1F, 0x08, 0x00, PPC_INTEGER);
860 /* subfe subfe. subfeo subfeo. */ 860 /* subfe subfe. subfeo subfeo. */
861 -static inline void gen_op_subfeo (void) 861 +static always_inline void gen_op_subfeo (void)
862 { 862 {
863 gen_op_move_T2_T0(); 863 gen_op_move_T2_T0();
864 gen_op_subfe(); 864 gen_op_subfe();
@@ -866,7 +866,7 @@ static inline void gen_op_subfeo (void) @@ -866,7 +866,7 @@ static inline void gen_op_subfeo (void)
866 } 866 }
867 #if defined(TARGET_PPC64) 867 #if defined(TARGET_PPC64)
868 #define gen_op_subfe_64 gen_op_subfe 868 #define gen_op_subfe_64 gen_op_subfe
869 -static inline void gen_op_subfeo_64 (void) 869 +static always_inline void gen_op_subfeo_64 (void)
870 { 870 {
871 gen_op_move_T2_T0(); 871 gen_op_move_T2_T0();
872 gen_op_subfe_64(); 872 gen_op_subfe_64();
@@ -1407,7 +1407,7 @@ GEN_HANDLER(name##3, opc1, opc2 | 0x11, 0xFF, 0x00000000, PPC_64B) \ @@ -1407,7 +1407,7 @@ GEN_HANDLER(name##3, opc1, opc2 | 0x11, 0xFF, 0x00000000, PPC_64B) \
1407 gen_##name(ctx, 1, 1); \ 1407 gen_##name(ctx, 1, 1); \
1408 } 1408 }
1409 1409
1410 -static inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask) 1410 +static always_inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask)
1411 { 1411 {
1412 if (mask >> 32) 1412 if (mask >> 32)
1413 gen_op_andi_T0_64(mask >> 32, mask & 0xFFFFFFFF); 1413 gen_op_andi_T0_64(mask >> 32, mask & 0xFFFFFFFF);
@@ -1415,7 +1415,7 @@ static inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask) @@ -1415,7 +1415,7 @@ static inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask)
1415 gen_op_andi_T0(mask); 1415 gen_op_andi_T0(mask);
1416 } 1416 }
1417 1417
1418 -static inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask) 1418 +static always_inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask)
1419 { 1419 {
1420 if (mask >> 32) 1420 if (mask >> 32)
1421 gen_op_andi_T1_64(mask >> 32, mask & 0xFFFFFFFF); 1421 gen_op_andi_T1_64(mask >> 32, mask & 0xFFFFFFFF);
@@ -1423,8 +1423,8 @@ static inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask) @@ -1423,8 +1423,8 @@ static inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask)
1423 gen_op_andi_T1(mask); 1423 gen_op_andi_T1(mask);
1424 } 1424 }
1425 1425
1426 -static inline void gen_rldinm (DisasContext *ctx, uint32_t mb, uint32_t me,  
1427 - uint32_t sh) 1426 +static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
  1427 + uint32_t me, uint32_t sh)
1428 { 1428 {
1429 gen_op_load_gpr_T0(rS(ctx->opcode)); 1429 gen_op_load_gpr_T0(rS(ctx->opcode));
1430 if (likely(sh == 0)) { 1430 if (likely(sh == 0)) {
@@ -1453,7 +1453,7 @@ static inline void gen_rldinm (DisasContext *ctx, uint32_t mb, uint32_t me, @@ -1453,7 +1453,7 @@ static inline void gen_rldinm (DisasContext *ctx, uint32_t mb, uint32_t me,
1453 gen_set_Rc0(ctx); 1453 gen_set_Rc0(ctx);
1454 } 1454 }
1455 /* rldicl - rldicl. */ 1455 /* rldicl - rldicl. */
1456 -static inline void gen_rldicl (DisasContext *ctx, int mbn, int shn) 1456 +static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1457 { 1457 {
1458 uint32_t sh, mb; 1458 uint32_t sh, mb;
1459 1459
@@ -1463,7 +1463,7 @@ static inline void gen_rldicl (DisasContext *ctx, int mbn, int shn) @@ -1463,7 +1463,7 @@ static inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1463 } 1463 }
1464 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 1464 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1465 /* rldicr - rldicr. */ 1465 /* rldicr - rldicr. */
1466 -static inline void gen_rldicr (DisasContext *ctx, int men, int shn) 1466 +static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1467 { 1467 {
1468 uint32_t sh, me; 1468 uint32_t sh, me;
1469 1469
@@ -1473,7 +1473,7 @@ static inline void gen_rldicr (DisasContext *ctx, int men, int shn) @@ -1473,7 +1473,7 @@ static inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1473 } 1473 }
1474 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 1474 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1475 /* rldic - rldic. */ 1475 /* rldic - rldic. */
1476 -static inline void gen_rldic (DisasContext *ctx, int mbn, int shn) 1476 +static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1477 { 1477 {
1478 uint32_t sh, mb; 1478 uint32_t sh, mb;
1479 1479
@@ -1483,7 +1483,8 @@ static inline void gen_rldic (DisasContext *ctx, int mbn, int shn) @@ -1483,7 +1483,8 @@ static inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1483 } 1483 }
1484 GEN_PPC64_R4(rldic, 0x1E, 0x04); 1484 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1485 1485
1486 -static inline void gen_rldnm (DisasContext *ctx, uint32_t mb, uint32_t me) 1486 +static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
  1487 + uint32_t me)
1487 { 1488 {
1488 gen_op_load_gpr_T0(rS(ctx->opcode)); 1489 gen_op_load_gpr_T0(rS(ctx->opcode));
1489 gen_op_load_gpr_T1(rB(ctx->opcode)); 1490 gen_op_load_gpr_T1(rB(ctx->opcode));
@@ -1497,7 +1498,7 @@ static inline void gen_rldnm (DisasContext *ctx, uint32_t mb, uint32_t me) @@ -1497,7 +1498,7 @@ static inline void gen_rldnm (DisasContext *ctx, uint32_t mb, uint32_t me)
1497 } 1498 }
1498 1499
1499 /* rldcl - rldcl. */ 1500 /* rldcl - rldcl. */
1500 -static inline void gen_rldcl (DisasContext *ctx, int mbn) 1501 +static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1501 { 1502 {
1502 uint32_t mb; 1503 uint32_t mb;
1503 1504
@@ -1506,7 +1507,7 @@ static inline void gen_rldcl (DisasContext *ctx, int mbn) @@ -1506,7 +1507,7 @@ static inline void gen_rldcl (DisasContext *ctx, int mbn)
1506 } 1507 }
1507 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 1508 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1508 /* rldcr - rldcr. */ 1509 /* rldcr - rldcr. */
1509 -static inline void gen_rldcr (DisasContext *ctx, int men) 1510 +static always_inline void gen_rldcr (DisasContext *ctx, int men)
1510 { 1511 {
1511 uint32_t me; 1512 uint32_t me;
1512 1513
@@ -1515,7 +1516,7 @@ static inline void gen_rldcr (DisasContext *ctx, int men) @@ -1515,7 +1516,7 @@ static inline void gen_rldcr (DisasContext *ctx, int men)
1515 } 1516 }
1516 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 1517 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1517 /* rldimi - rldimi. */ 1518 /* rldimi - rldimi. */
1518 -static inline void gen_rldimi (DisasContext *ctx, int mbn, int shn) 1519 +static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1519 { 1520 {
1520 uint64_t mask; 1521 uint64_t mask;
1521 uint32_t sh, mb; 1522 uint32_t sh, mb;
@@ -1583,7 +1584,7 @@ __GEN_LOGICAL2(sld, 0x1B, 0x00, PPC_64B); @@ -1583,7 +1584,7 @@ __GEN_LOGICAL2(sld, 0x1B, 0x00, PPC_64B);
1583 /* srad & srad. */ 1584 /* srad & srad. */
1584 __GEN_LOGICAL2(srad, 0x1A, 0x18, PPC_64B); 1585 __GEN_LOGICAL2(srad, 0x1A, 0x18, PPC_64B);
1585 /* sradi & sradi. */ 1586 /* sradi & sradi. */
1586 -static inline void gen_sradi (DisasContext *ctx, int n) 1587 +static always_inline void gen_sradi (DisasContext *ctx, int n)
1587 { 1588 {
1588 uint64_t mask; 1589 uint64_t mask;
1589 int sh, mb, me; 1590 int sh, mb, me;
@@ -1937,7 +1938,8 @@ GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT) @@ -1937,7 +1938,8 @@ GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
1937 1938
1938 /*** Addressing modes ***/ 1939 /*** Addressing modes ***/
1939 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 1940 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
1940 -static inline void gen_addr_imm_index (DisasContext *ctx, target_long maskl) 1941 +static always_inline void gen_addr_imm_index (DisasContext *ctx,
  1942 + target_long maskl)
1941 { 1943 {
1942 target_long simm = SIMM(ctx->opcode); 1944 target_long simm = SIMM(ctx->opcode);
1943 1945
@@ -1954,7 +1956,7 @@ static inline void gen_addr_imm_index (DisasContext *ctx, target_long maskl) @@ -1954,7 +1956,7 @@ static inline void gen_addr_imm_index (DisasContext *ctx, target_long maskl)
1954 #endif 1956 #endif
1955 } 1957 }
1956 1958
1957 -static inline void gen_addr_reg_index (DisasContext *ctx) 1959 +static always_inline void gen_addr_reg_index (DisasContext *ctx)
1958 { 1960 {
1959 if (rA(ctx->opcode) == 0) { 1961 if (rA(ctx->opcode) == 0) {
1960 gen_op_load_gpr_T0(rB(ctx->opcode)); 1962 gen_op_load_gpr_T0(rB(ctx->opcode));
@@ -1968,7 +1970,7 @@ static inline void gen_addr_reg_index (DisasContext *ctx) @@ -1968,7 +1970,7 @@ static inline void gen_addr_reg_index (DisasContext *ctx)
1968 #endif 1970 #endif
1969 } 1971 }
1970 1972
1971 -static inline void gen_addr_register (DisasContext *ctx) 1973 +static always_inline void gen_addr_register (DisasContext *ctx)
1972 { 1974 {
1973 if (rA(ctx->opcode) == 0) { 1975 if (rA(ctx->opcode) == 0) {
1974 gen_op_reset_T0(); 1976 gen_op_reset_T0();
@@ -2964,7 +2966,8 @@ OP_ST_TABLE(fiwx); @@ -2964,7 +2966,8 @@ OP_ST_TABLE(fiwx);
2964 GEN_STXF(fiwx, 0x17, 0x1E, PPC_FLOAT_STFIWX); 2966 GEN_STXF(fiwx, 0x17, 0x1E, PPC_FLOAT_STFIWX);
2965 2967
2966 /*** Branch ***/ 2968 /*** Branch ***/
2967 -static inline void gen_goto_tb (DisasContext *ctx, int n, target_ulong dest) 2969 +static always_inline void gen_goto_tb (DisasContext *ctx, int n,
  2970 + target_ulong dest)
2968 { 2971 {
2969 TranslationBlock *tb; 2972 TranslationBlock *tb;
2970 tb = ctx->tb; 2973 tb = ctx->tb;
@@ -2999,7 +3002,7 @@ static inline void gen_goto_tb (DisasContext *ctx, int n, target_ulong dest) @@ -2999,7 +3002,7 @@ static inline void gen_goto_tb (DisasContext *ctx, int n, target_ulong dest)
2999 } 3002 }
3000 } 3003 }
3001 3004
3002 -static inline void gen_setlr (DisasContext *ctx, target_ulong nip) 3005 +static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
3003 { 3006 {
3004 #if defined(TARGET_PPC64) 3007 #if defined(TARGET_PPC64)
3005 if (ctx->sf_mode != 0 && (nip >> 32)) 3008 if (ctx->sf_mode != 0 && (nip >> 32))
@@ -3039,7 +3042,7 @@ GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW) @@ -3039,7 +3042,7 @@ GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3039 #define BCOND_LR 1 3042 #define BCOND_LR 1
3040 #define BCOND_CTR 2 3043 #define BCOND_CTR 2
3041 3044
3042 -static inline void gen_bcond (DisasContext *ctx, int type) 3045 +static always_inline void gen_bcond (DisasContext *ctx, int type)
3043 { 3046 {
3044 target_ulong target = 0; 3047 target_ulong target = 0;
3045 target_ulong li; 3048 target_ulong li;
@@ -3399,7 +3402,7 @@ static void spr_noaccess (void *opaque, int sprn) @@ -3399,7 +3402,7 @@ static void spr_noaccess (void *opaque, int sprn)
3399 #endif 3402 #endif
3400 3403
3401 /* mfspr */ 3404 /* mfspr */
3402 -static inline void gen_op_mfspr (DisasContext *ctx) 3405 +static always_inline void gen_op_mfspr (DisasContext *ctx)
3403 { 3406 {
3404 void (*read_cb)(void *opaque, int sprn); 3407 void (*read_cb)(void *opaque, int sprn);
3405 uint32_t sprn = SPR(ctx->opcode); 3408 uint32_t sprn = SPR(ctx->opcode);
@@ -3765,7 +3768,8 @@ static GenOpFunc *gen_op_dcbz[4][4] = { @@ -3765,7 +3768,8 @@ static GenOpFunc *gen_op_dcbz[4][4] = {
3765 #endif 3768 #endif
3766 #endif 3769 #endif
3767 3770
3768 -static inline void handler_dcbz (DisasContext *ctx, int dcache_line_size) 3771 +static always_inline void handler_dcbz (DisasContext *ctx,
  3772 + int dcache_line_size)
3769 { 3773 {
3770 int n; 3774 int n;
3771 3775
@@ -4913,8 +4917,9 @@ GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_BOOKE_EXT) @@ -4913,8 +4917,9 @@ GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_BOOKE_EXT)
4913 } 4917 }
4914 4918
4915 /* All 405 MAC instructions are translated here */ 4919 /* All 405 MAC instructions are translated here */
4916 -static inline void gen_405_mulladd_insn (DisasContext *ctx, int opc2, int opc3,  
4917 - int ra, int rb, int rt, int Rc) 4920 +static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
  4921 + int opc2, int opc3,
  4922 + int ra, int rb, int rt, int Rc)
4918 { 4923 {
4919 gen_op_load_gpr_T0(ra); 4924 gen_op_load_gpr_T0(ra);
4920 gen_op_load_gpr_T1(rb); 4925 gen_op_load_gpr_T1(rb);
@@ -5551,13 +5556,13 @@ GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \ @@ -5551,13 +5556,13 @@ GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
5551 } 5556 }
5552 5557
5553 /* Handler for undefined SPE opcodes */ 5558 /* Handler for undefined SPE opcodes */
5554 -static inline void gen_speundef (DisasContext *ctx) 5559 +static always_inline void gen_speundef (DisasContext *ctx)
5555 { 5560 {
5556 GEN_EXCP_INVAL(ctx); 5561 GEN_EXCP_INVAL(ctx);
5557 } 5562 }
5558 5563
5559 /* SPE load and stores */ 5564 /* SPE load and stores */
5560 -static inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh) 5565 +static always_inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh)
5561 { 5566 {
5562 target_long simm = rB(ctx->opcode); 5567 target_long simm = rB(ctx->opcode);
5563 5568
@@ -5678,7 +5683,7 @@ static GenOpFunc *gen_op_spe_st##name[] = { \ @@ -5678,7 +5683,7 @@ static GenOpFunc *gen_op_spe_st##name[] = { \
5678 #endif /* defined(CONFIG_USER_ONLY) */ 5683 #endif /* defined(CONFIG_USER_ONLY) */
5679 5684
5680 #define GEN_SPE_LD(name, sh) \ 5685 #define GEN_SPE_LD(name, sh) \
5681 -static inline void gen_evl##name (DisasContext *ctx) \ 5686 +static always_inline void gen_evl##name (DisasContext *ctx) \
5682 { \ 5687 { \
5683 if (unlikely(!ctx->spe_enabled)) { \ 5688 if (unlikely(!ctx->spe_enabled)) { \
5684 GEN_EXCP_NO_AP(ctx); \ 5689 GEN_EXCP_NO_AP(ctx); \
@@ -5690,7 +5695,7 @@ static inline void gen_evl##name (DisasContext *ctx) \ @@ -5690,7 +5695,7 @@ static inline void gen_evl##name (DisasContext *ctx) \
5690 } 5695 }
5691 5696
5692 #define GEN_SPE_LDX(name) \ 5697 #define GEN_SPE_LDX(name) \
5693 -static inline void gen_evl##name##x (DisasContext *ctx) \ 5698 +static always_inline void gen_evl##name##x (DisasContext *ctx) \
5694 { \ 5699 { \
5695 if (unlikely(!ctx->spe_enabled)) { \ 5700 if (unlikely(!ctx->spe_enabled)) { \
5696 GEN_EXCP_NO_AP(ctx); \ 5701 GEN_EXCP_NO_AP(ctx); \
@@ -5707,7 +5712,7 @@ GEN_SPE_LD(name, sh); \ @@ -5707,7 +5712,7 @@ GEN_SPE_LD(name, sh); \
5707 GEN_SPE_LDX(name) 5712 GEN_SPE_LDX(name)
5708 5713
5709 #define GEN_SPE_ST(name, sh) \ 5714 #define GEN_SPE_ST(name, sh) \
5710 -static inline void gen_evst##name (DisasContext *ctx) \ 5715 +static always_inline void gen_evst##name (DisasContext *ctx) \
5711 { \ 5716 { \
5712 if (unlikely(!ctx->spe_enabled)) { \ 5717 if (unlikely(!ctx->spe_enabled)) { \
5713 GEN_EXCP_NO_AP(ctx); \ 5718 GEN_EXCP_NO_AP(ctx); \
@@ -5719,7 +5724,7 @@ static inline void gen_evst##name (DisasContext *ctx) \ @@ -5719,7 +5724,7 @@ static inline void gen_evst##name (DisasContext *ctx) \
5719 } 5724 }
5720 5725
5721 #define GEN_SPE_STX(name) \ 5726 #define GEN_SPE_STX(name) \
5722 -static inline void gen_evst##name##x (DisasContext *ctx) \ 5727 +static always_inline void gen_evst##name##x (DisasContext *ctx) \
5723 { \ 5728 { \
5724 if (unlikely(!ctx->spe_enabled)) { \ 5729 if (unlikely(!ctx->spe_enabled)) { \
5725 GEN_EXCP_NO_AP(ctx); \ 5730 GEN_EXCP_NO_AP(ctx); \
@@ -5741,7 +5746,7 @@ GEN_SPEOP_ST(name, sh) @@ -5741,7 +5746,7 @@ GEN_SPEOP_ST(name, sh)
5741 5746
5742 /* SPE arithmetic and logic */ 5747 /* SPE arithmetic and logic */
5743 #define GEN_SPEOP_ARITH2(name) \ 5748 #define GEN_SPEOP_ARITH2(name) \
5744 -static inline void gen_##name (DisasContext *ctx) \ 5749 +static always_inline void gen_##name (DisasContext *ctx) \
5745 { \ 5750 { \
5746 if (unlikely(!ctx->spe_enabled)) { \ 5751 if (unlikely(!ctx->spe_enabled)) { \
5747 GEN_EXCP_NO_AP(ctx); \ 5752 GEN_EXCP_NO_AP(ctx); \
@@ -5754,7 +5759,7 @@ static inline void gen_##name (DisasContext *ctx) \ @@ -5754,7 +5759,7 @@ static inline void gen_##name (DisasContext *ctx) \
5754 } 5759 }
5755 5760
5756 #define GEN_SPEOP_ARITH1(name) \ 5761 #define GEN_SPEOP_ARITH1(name) \
5757 -static inline void gen_##name (DisasContext *ctx) \ 5762 +static always_inline void gen_##name (DisasContext *ctx) \
5758 { \ 5763 { \
5759 if (unlikely(!ctx->spe_enabled)) { \ 5764 if (unlikely(!ctx->spe_enabled)) { \
5760 GEN_EXCP_NO_AP(ctx); \ 5765 GEN_EXCP_NO_AP(ctx); \
@@ -5766,7 +5771,7 @@ static inline void gen_##name (DisasContext *ctx) \ @@ -5766,7 +5771,7 @@ static inline void gen_##name (DisasContext *ctx) \
5766 } 5771 }
5767 5772
5768 #define GEN_SPEOP_COMP(name) \ 5773 #define GEN_SPEOP_COMP(name) \
5769 -static inline void gen_##name (DisasContext *ctx) \ 5774 +static always_inline void gen_##name (DisasContext *ctx) \
5770 { \ 5775 { \
5771 if (unlikely(!ctx->spe_enabled)) { \ 5776 if (unlikely(!ctx->spe_enabled)) { \
5772 GEN_EXCP_NO_AP(ctx); \ 5777 GEN_EXCP_NO_AP(ctx); \
@@ -5806,7 +5811,7 @@ GEN_SPEOP_ARITH1(evextsh); @@ -5806,7 +5811,7 @@ GEN_SPEOP_ARITH1(evextsh);
5806 GEN_SPEOP_ARITH1(evrndw); 5811 GEN_SPEOP_ARITH1(evrndw);
5807 GEN_SPEOP_ARITH1(evcntlzw); 5812 GEN_SPEOP_ARITH1(evcntlzw);
5808 GEN_SPEOP_ARITH1(evcntlsw); 5813 GEN_SPEOP_ARITH1(evcntlsw);
5809 -static inline void gen_brinc (DisasContext *ctx) 5814 +static always_inline void gen_brinc (DisasContext *ctx)
5810 { 5815 {
5811 /* Note: brinc is usable even if SPE is disabled */ 5816 /* Note: brinc is usable even if SPE is disabled */
5812 gen_op_load_gpr64_T0(rA(ctx->opcode)); 5817 gen_op_load_gpr64_T0(rA(ctx->opcode));
@@ -5816,7 +5821,7 @@ static inline void gen_brinc (DisasContext *ctx) @@ -5816,7 +5821,7 @@ static inline void gen_brinc (DisasContext *ctx)
5816 } 5821 }
5817 5822
5818 #define GEN_SPEOP_ARITH_IMM2(name) \ 5823 #define GEN_SPEOP_ARITH_IMM2(name) \
5819 -static inline void gen_##name##i (DisasContext *ctx) \ 5824 +static always_inline void gen_##name##i (DisasContext *ctx) \
5820 { \ 5825 { \
5821 if (unlikely(!ctx->spe_enabled)) { \ 5826 if (unlikely(!ctx->spe_enabled)) { \
5822 GEN_EXCP_NO_AP(ctx); \ 5827 GEN_EXCP_NO_AP(ctx); \
@@ -5829,7 +5834,7 @@ static inline void gen_##name##i (DisasContext *ctx) \ @@ -5829,7 +5834,7 @@ static inline void gen_##name##i (DisasContext *ctx) \
5829 } 5834 }
5830 5835
5831 #define GEN_SPEOP_LOGIC_IMM2(name) \ 5836 #define GEN_SPEOP_LOGIC_IMM2(name) \
5832 -static inline void gen_##name##i (DisasContext *ctx) \ 5837 +static always_inline void gen_##name##i (DisasContext *ctx) \
5833 { \ 5838 { \
5834 if (unlikely(!ctx->spe_enabled)) { \ 5839 if (unlikely(!ctx->spe_enabled)) { \
5835 GEN_EXCP_NO_AP(ctx); \ 5840 GEN_EXCP_NO_AP(ctx); \
@@ -5852,7 +5857,7 @@ GEN_SPEOP_LOGIC_IMM2(evsrws); @@ -5852,7 +5857,7 @@ GEN_SPEOP_LOGIC_IMM2(evsrws);
5852 #define gen_evsrwiu gen_evsrwui 5857 #define gen_evsrwiu gen_evsrwui
5853 GEN_SPEOP_LOGIC_IMM2(evrlw); 5858 GEN_SPEOP_LOGIC_IMM2(evrlw);
5854 5859
5855 -static inline void gen_evsplati (DisasContext *ctx) 5860 +static always_inline void gen_evsplati (DisasContext *ctx)
5856 { 5861 {
5857 int32_t imm = (int32_t)(rA(ctx->opcode) << 27) >> 27; 5862 int32_t imm = (int32_t)(rA(ctx->opcode) << 27) >> 27;
5858 5863
@@ -5860,7 +5865,7 @@ static inline void gen_evsplati (DisasContext *ctx) @@ -5860,7 +5865,7 @@ static inline void gen_evsplati (DisasContext *ctx)
5860 gen_op_store_T0_gpr64(rD(ctx->opcode)); 5865 gen_op_store_T0_gpr64(rD(ctx->opcode));
5861 } 5866 }
5862 5867
5863 -static inline void gen_evsplatfi (DisasContext *ctx) 5868 +static always_inline void gen_evsplatfi (DisasContext *ctx)
5864 { 5869 {
5865 uint32_t imm = rA(ctx->opcode) << 27; 5870 uint32_t imm = rA(ctx->opcode) << 27;
5866 5871
@@ -5901,7 +5906,7 @@ GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); //// @@ -5901,7 +5906,7 @@ GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
5901 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); //// 5906 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
5902 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); //// 5907 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
5903 5908
5904 -static inline void gen_evsel (DisasContext *ctx) 5909 +static always_inline void gen_evsel (DisasContext *ctx)
5905 { 5910 {
5906 if (unlikely(!ctx->spe_enabled)) { 5911 if (unlikely(!ctx->spe_enabled)) {
5907 GEN_EXCP_NO_AP(ctx); 5912 GEN_EXCP_NO_AP(ctx);
@@ -5991,13 +5996,13 @@ GEN_SPEOP_ST(who, 2); @@ -5991,13 +5996,13 @@ GEN_SPEOP_ST(who, 2);
5991 #endif 5996 #endif
5992 #endif 5997 #endif
5993 #define _GEN_OP_SPE_STWWE(suffix) \ 5998 #define _GEN_OP_SPE_STWWE(suffix) \
5994 -static inline void gen_op_spe_stwwe_##suffix (void) \ 5999 +static always_inline void gen_op_spe_stwwe_##suffix (void) \
5995 { \ 6000 { \
5996 gen_op_srli32_T1_64(); \ 6001 gen_op_srli32_T1_64(); \
5997 gen_op_spe_stwwo_##suffix(); \ 6002 gen_op_spe_stwwo_##suffix(); \
5998 } 6003 }
5999 #define _GEN_OP_SPE_STWWE_LE(suffix) \ 6004 #define _GEN_OP_SPE_STWWE_LE(suffix) \
6000 -static inline void gen_op_spe_stwwe_le_##suffix (void) \ 6005 +static always_inline void gen_op_spe_stwwe_le_##suffix (void) \
6001 { \ 6006 { \
6002 gen_op_srli32_T1_64(); \ 6007 gen_op_srli32_T1_64(); \
6003 gen_op_spe_stwwo_le_##suffix(); \ 6008 gen_op_spe_stwwo_le_##suffix(); \
@@ -6006,12 +6011,12 @@ static inline void gen_op_spe_stwwe_le_##suffix (void) \ @@ -6006,12 +6011,12 @@ static inline void gen_op_spe_stwwe_le_##suffix (void) \
6006 #define GEN_OP_SPE_STWWE(suffix) \ 6011 #define GEN_OP_SPE_STWWE(suffix) \
6007 _GEN_OP_SPE_STWWE(suffix); \ 6012 _GEN_OP_SPE_STWWE(suffix); \
6008 _GEN_OP_SPE_STWWE_LE(suffix); \ 6013 _GEN_OP_SPE_STWWE_LE(suffix); \
6009 -static inline void gen_op_spe_stwwe_64_##suffix (void) \ 6014 +static always_inline void gen_op_spe_stwwe_64_##suffix (void) \
6010 { \ 6015 { \
6011 gen_op_srli32_T1_64(); \ 6016 gen_op_srli32_T1_64(); \
6012 gen_op_spe_stwwo_64_##suffix(); \ 6017 gen_op_spe_stwwo_64_##suffix(); \
6013 } \ 6018 } \
6014 -static inline void gen_op_spe_stwwe_le_64_##suffix (void) \ 6019 +static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \
6015 { \ 6020 { \
6016 gen_op_srli32_T1_64(); \ 6021 gen_op_srli32_T1_64(); \
6017 gen_op_spe_stwwo_le_64_##suffix(); \ 6022 gen_op_spe_stwwo_le_64_##suffix(); \
@@ -6031,21 +6036,21 @@ GEN_SPEOP_ST(wwe, 2); @@ -6031,21 +6036,21 @@ GEN_SPEOP_ST(wwe, 2);
6031 GEN_SPEOP_ST(wwo, 2); 6036 GEN_SPEOP_ST(wwo, 2);
6032 6037
6033 #define GEN_SPE_LDSPLAT(name, op, suffix) \ 6038 #define GEN_SPE_LDSPLAT(name, op, suffix) \
6034 -static inline void gen_op_spe_l##name##_##suffix (void) \ 6039 +static always_inline void gen_op_spe_l##name##_##suffix (void) \
6035 { \ 6040 { \
6036 gen_op_##op##_##suffix(); \ 6041 gen_op_##op##_##suffix(); \
6037 gen_op_splatw_T1_64(); \ 6042 gen_op_splatw_T1_64(); \
6038 } 6043 }
6039 6044
6040 #define GEN_OP_SPE_LHE(suffix) \ 6045 #define GEN_OP_SPE_LHE(suffix) \
6041 -static inline void gen_op_spe_lhe_##suffix (void) \ 6046 +static always_inline void gen_op_spe_lhe_##suffix (void) \
6042 { \ 6047 { \
6043 gen_op_spe_lh_##suffix(); \ 6048 gen_op_spe_lh_##suffix(); \
6044 gen_op_sli16_T1_64(); \ 6049 gen_op_sli16_T1_64(); \
6045 } 6050 }
6046 6051
6047 #define GEN_OP_SPE_LHX(suffix) \ 6052 #define GEN_OP_SPE_LHX(suffix) \
6048 -static inline void gen_op_spe_lhx_##suffix (void) \ 6053 +static always_inline void gen_op_spe_lhx_##suffix (void) \
6049 { \ 6054 { \
6050 gen_op_spe_lh_##suffix(); \ 6055 gen_op_spe_lh_##suffix(); \
6051 gen_op_extsh_T1_64(); \ 6056 gen_op_extsh_T1_64(); \
@@ -6221,7 +6226,7 @@ GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE); @@ -6221,7 +6226,7 @@ GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
6221 6226
6222 /*** SPE floating-point extension ***/ 6227 /*** SPE floating-point extension ***/
6223 #define GEN_SPEFPUOP_CONV(name) \ 6228 #define GEN_SPEFPUOP_CONV(name) \
6224 -static inline void gen_##name (DisasContext *ctx) \ 6229 +static always_inline void gen_##name (DisasContext *ctx) \
6225 { \ 6230 { \
6226 gen_op_load_gpr64_T0(rB(ctx->opcode)); \ 6231 gen_op_load_gpr64_T0(rB(ctx->opcode)); \
6227 gen_op_##name(); \ 6232 gen_op_##name(); \
@@ -6376,7 +6381,7 @@ GEN_OPCODE_MARK(end); @@ -6376,7 +6381,7 @@ GEN_OPCODE_MARK(end);
6376 6381
6377 /*****************************************************************************/ 6382 /*****************************************************************************/
6378 /* Misc PowerPC helpers */ 6383 /* Misc PowerPC helpers */
6379 -static inline uint32_t load_xer (CPUState *env) 6384 +static always_inline uint32_t load_xer (CPUState *env)
6380 { 6385 {
6381 return (xer_so << XER_SO) | 6386 return (xer_so << XER_SO) |
6382 (xer_ov << XER_OV) | 6387 (xer_ov << XER_OV) |
@@ -6507,9 +6512,9 @@ void cpu_dump_statistics (CPUState *env, FILE*f, @@ -6507,9 +6512,9 @@ void cpu_dump_statistics (CPUState *env, FILE*f,
6507 } 6512 }
6508 6513
6509 /*****************************************************************************/ 6514 /*****************************************************************************/
6510 -static inline int gen_intermediate_code_internal (CPUState *env,  
6511 - TranslationBlock *tb,  
6512 - int search_pc) 6515 +static always_inline int gen_intermediate_code_internal (CPUState *env,
  6516 + TranslationBlock *tb,
  6517 + int search_pc)
6513 { 6518 {
6514 DisasContext ctx, *ctxp = &ctx; 6519 DisasContext ctx, *ctxp = &ctx;
6515 opc_handler_t **table, *handler; 6520 opc_handler_t **table, *handler;