Commit 9f6839d4b1721524a10c77768f73697cf1724ce5
Committed by
Anthony Liguori
1 parent
1db6947d
Remove dead i386 assembly code from softmmu_header.h
This patch removes dead i386 assembly code from softmmu_header.h. The code is conditional on ASM_SOFTMMU, which is never defined. Optimisation for the fast path is already handled by tcg_out_qemu_ld() and tcg_out_qemu_st(), so there seems to be little need for this code. Signed-off-by: Stuart Brady <stuart.brady@gmail.com> Signed-off-by: Anthony Liguori <aliguori@us.ibm.com>
Showing
1 changed file
with
0 additions
and
146 deletions
softmmu_header.h
... | ... | @@ -69,150 +69,6 @@ |
69 | 69 | #define ADDR_READ addr_read |
70 | 70 | #endif |
71 | 71 | |
72 | -#if (DATA_SIZE <= 4) && (TARGET_LONG_BITS == 32) && defined(__i386__) && \ | |
73 | - (ACCESS_TYPE < NB_MMU_MODES) && defined(ASM_SOFTMMU) | |
74 | - | |
75 | -static inline RES_TYPE glue(glue(ld, USUFFIX), MEMSUFFIX)(target_ulong ptr) | |
76 | -{ | |
77 | - int res; | |
78 | - | |
79 | - asm volatile ("movl %1, %%edx\n" | |
80 | - "movl %1, %%eax\n" | |
81 | - "shrl %3, %%edx\n" | |
82 | - "andl %4, %%eax\n" | |
83 | - "andl %2, %%edx\n" | |
84 | - "leal %5(%%edx, %%ebp), %%edx\n" | |
85 | - "cmpl (%%edx), %%eax\n" | |
86 | - "movl %1, %%eax\n" | |
87 | - "je 1f\n" | |
88 | - "movl %6, %%edx\n" | |
89 | - "call %7\n" | |
90 | - "movl %%eax, %0\n" | |
91 | - "jmp 2f\n" | |
92 | - "1:\n" | |
93 | - "addl 12(%%edx), %%eax\n" | |
94 | -#if DATA_SIZE == 1 | |
95 | - "movzbl (%%eax), %0\n" | |
96 | -#elif DATA_SIZE == 2 | |
97 | - "movzwl (%%eax), %0\n" | |
98 | -#elif DATA_SIZE == 4 | |
99 | - "movl (%%eax), %0\n" | |
100 | -#else | |
101 | -#error unsupported size | |
102 | -#endif | |
103 | - "2:\n" | |
104 | - : "=r" (res) | |
105 | - : "r" (ptr), | |
106 | - "i" ((CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS), | |
107 | - "i" (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS), | |
108 | - "i" (TARGET_PAGE_MASK | (DATA_SIZE - 1)), | |
109 | - "m" (*(uint32_t *)offsetof(CPUState, tlb_table[CPU_MMU_INDEX][0].addr_read)), | |
110 | - "i" (CPU_MMU_INDEX), | |
111 | - "m" (*(uint8_t *)&glue(glue(__ld, SUFFIX), MMUSUFFIX)) | |
112 | - : "%eax", "%ecx", "%edx", "memory", "cc"); | |
113 | - return res; | |
114 | -} | |
115 | - | |
116 | -#if DATA_SIZE <= 2 | |
117 | -static inline int glue(glue(lds, SUFFIX), MEMSUFFIX)(target_ulong ptr) | |
118 | -{ | |
119 | - int res; | |
120 | - | |
121 | - asm volatile ("movl %1, %%edx\n" | |
122 | - "movl %1, %%eax\n" | |
123 | - "shrl %3, %%edx\n" | |
124 | - "andl %4, %%eax\n" | |
125 | - "andl %2, %%edx\n" | |
126 | - "leal %5(%%edx, %%ebp), %%edx\n" | |
127 | - "cmpl (%%edx), %%eax\n" | |
128 | - "movl %1, %%eax\n" | |
129 | - "je 1f\n" | |
130 | - "movl %6, %%edx\n" | |
131 | - "call %7\n" | |
132 | -#if DATA_SIZE == 1 | |
133 | - "movsbl %%al, %0\n" | |
134 | -#elif DATA_SIZE == 2 | |
135 | - "movswl %%ax, %0\n" | |
136 | -#else | |
137 | -#error unsupported size | |
138 | -#endif | |
139 | - "jmp 2f\n" | |
140 | - "1:\n" | |
141 | - "addl 12(%%edx), %%eax\n" | |
142 | -#if DATA_SIZE == 1 | |
143 | - "movsbl (%%eax), %0\n" | |
144 | -#elif DATA_SIZE == 2 | |
145 | - "movswl (%%eax), %0\n" | |
146 | -#else | |
147 | -#error unsupported size | |
148 | -#endif | |
149 | - "2:\n" | |
150 | - : "=r" (res) | |
151 | - : "r" (ptr), | |
152 | - "i" ((CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS), | |
153 | - "i" (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS), | |
154 | - "i" (TARGET_PAGE_MASK | (DATA_SIZE - 1)), | |
155 | - "m" (*(uint32_t *)offsetof(CPUState, tlb_table[CPU_MMU_INDEX][0].addr_read)), | |
156 | - "i" (CPU_MMU_INDEX), | |
157 | - "m" (*(uint8_t *)&glue(glue(__ld, SUFFIX), MMUSUFFIX)) | |
158 | - : "%eax", "%ecx", "%edx", "memory", "cc"); | |
159 | - return res; | |
160 | -} | |
161 | -#endif | |
162 | - | |
163 | -static inline void glue(glue(st, SUFFIX), MEMSUFFIX)(target_ulong ptr, RES_TYPE v) | |
164 | -{ | |
165 | - asm volatile ("movl %0, %%edx\n" | |
166 | - "movl %0, %%eax\n" | |
167 | - "shrl %3, %%edx\n" | |
168 | - "andl %4, %%eax\n" | |
169 | - "andl %2, %%edx\n" | |
170 | - "leal %5(%%edx, %%ebp), %%edx\n" | |
171 | - "cmpl (%%edx), %%eax\n" | |
172 | - "movl %0, %%eax\n" | |
173 | - "je 1f\n" | |
174 | -#if DATA_SIZE == 1 | |
175 | - "movzbl %b1, %%edx\n" | |
176 | -#elif DATA_SIZE == 2 | |
177 | - "movzwl %w1, %%edx\n" | |
178 | -#elif DATA_SIZE == 4 | |
179 | - "movl %1, %%edx\n" | |
180 | -#else | |
181 | -#error unsupported size | |
182 | -#endif | |
183 | - "movl %6, %%ecx\n" | |
184 | - "call %7\n" | |
185 | - "jmp 2f\n" | |
186 | - "1:\n" | |
187 | - "addl 8(%%edx), %%eax\n" | |
188 | -#if DATA_SIZE == 1 | |
189 | - "movb %b1, (%%eax)\n" | |
190 | -#elif DATA_SIZE == 2 | |
191 | - "movw %w1, (%%eax)\n" | |
192 | -#elif DATA_SIZE == 4 | |
193 | - "movl %1, (%%eax)\n" | |
194 | -#else | |
195 | -#error unsupported size | |
196 | -#endif | |
197 | - "2:\n" | |
198 | - : | |
199 | - : "r" (ptr), | |
200 | -#if DATA_SIZE == 1 | |
201 | - "q" (v), | |
202 | -#else | |
203 | - "r" (v), | |
204 | -#endif | |
205 | - "i" ((CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS), | |
206 | - "i" (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS), | |
207 | - "i" (TARGET_PAGE_MASK | (DATA_SIZE - 1)), | |
208 | - "m" (*(uint32_t *)offsetof(CPUState, tlb_table[CPU_MMU_INDEX][0].addr_write)), | |
209 | - "i" (CPU_MMU_INDEX), | |
210 | - "m" (*(uint8_t *)&glue(glue(__st, SUFFIX), MMUSUFFIX)) | |
211 | - : "%eax", "%ecx", "%edx", "memory", "cc"); | |
212 | -} | |
213 | - | |
214 | -#else | |
215 | - | |
216 | 72 | /* generic load/store macros */ |
217 | 73 | |
218 | 74 | static inline RES_TYPE glue(glue(ld, USUFFIX), MEMSUFFIX)(target_ulong ptr) |
... | ... | @@ -283,8 +139,6 @@ static inline void glue(glue(st, SUFFIX), MEMSUFFIX)(target_ulong ptr, RES_TYPE |
283 | 139 | |
284 | 140 | #endif /* ACCESS_TYPE != (NB_MMU_MODES + 1) */ |
285 | 141 | |
286 | -#endif /* !asm */ | |
287 | - | |
288 | 142 | #if ACCESS_TYPE != (NB_MMU_MODES + 1) |
289 | 143 | |
290 | 144 | #if DATA_SIZE == 8 | ... | ... |