x86: improve operand reversal
[deliverable/binutils-gdb.git] / gas / testsuite / gas / i386 / x86-64-pseudos.s
1 # Check 64bit instructions with pseudo prefixes for encoding
2
3 .text
4 _start:
5 {vex3} vmovaps %xmm7,%xmm2
6 {vex3} {load} vmovaps %xmm7,%xmm2
7 {vex3} {store} vmovaps %xmm7,%xmm2
8 vmovaps %xmm7,%xmm2
9 {vex2} vmovaps %xmm7,%xmm2
10 {vex2} {load} vmovaps %xmm7,%xmm2
11 {vex2} {store} vmovaps %xmm7,%xmm2
12 {vex3} vmovaps (%rax),%xmm2
13 vmovaps (%rax),%xmm2
14 {vex2} vmovaps (%rax),%xmm2
15 {evex} vmovaps (%rax),%xmm2
16 {disp32} vmovaps (%rax),%xmm2
17 {evex} {disp8} vmovaps (%rax),%xmm2
18 {evex} {disp32} vmovaps (%rax),%xmm2
19
20 mov %rcx, %rax
21 {load} mov %rcx, %rax
22 {store} mov %rcx, %rax
23 adc %ecx, %eax
24 {load} adc %ecx, %eax
25 {store} adc %ecx, %eax
26 add %ecx, %eax
27 {load} add %ecx, %eax
28 {store} add %ecx, %eax
29 and %ecx, %eax
30 {load} and %ecx, %eax
31 {store} and %ecx, %eax
32 cmp %ecx, %eax
33 {load} cmp %ecx, %eax
34 {store} cmp %ecx, %eax
35 or %ecx, %eax
36 {load} or %ecx, %eax
37 {store} or %ecx, %eax
38 sbb %ecx, %eax
39 {load} sbb %ecx, %eax
40 {store} sbb %ecx, %eax
41 sub %ecx, %eax
42 {load} sub %ecx, %eax
43 {store} sub %ecx, %eax
44 xor %ecx, %eax
45 {load} xor %ecx, %eax
46 {store} xor %ecx, %eax
47
48 {load} mov 0x12345678, %eax
49 {load} mov %eax, 0x12345678
50 {store} mov 0x12345678, %eax
51 {store} mov %eax, 0x12345678
52 {load} mov 0x123456789abcdef0, %eax
53 {load} mov %eax, 0x123456789abcdef0
54 {store} mov 0x123456789abcdef0, %eax
55 {store} mov %eax, 0x123456789abcdef0
56 {load} movabs 0x123456789abcdef0, %eax
57 {load} movabs %eax, 0x123456789abcdef0
58 {store} movabs 0x123456789abcdef0, %eax
59 {store} movabs %eax, 0x123456789abcdef0
60 {load} mov %eax, (%rdi)
61 {load} mov (%rdi), %eax
62 {store} mov %eax, (%rdi)
63 {store} mov (%rdi), %eax
64 {load} mov %es, %edi
65 {load} mov %eax, %gs
66 {store} mov %es, %edi
67 {store} mov %eax, %gs
68 {load} mov %cr0, %rdi
69 {load} mov %rax, %cr7
70 {store} mov %cr0, %rdi
71 {store} mov %rax, %cr7
72 {load} mov %dr0, %rdi
73 {load} mov %rax, %dr7
74 {store} mov %dr0, %rdi
75 {store} mov %rax, %dr7
76 {load} adc %eax, (%rdi)
77 {load} adc (%rdi), %eax
78 {store} adc %eax, (%rdi)
79 {store} adc (%rdi), %eax
80 {load} add %eax, (%rdi)
81 {load} add (%rdi), %eax
82 {store} add %eax, (%rdi)
83 {store} add (%rdi), %eax
84 {load} and %eax, (%rdi)
85 {load} and (%rdi), %eax
86 {store} and %eax, (%rdi)
87 {store} and (%rdi), %eax
88 {load} cmp %eax, (%rdi)
89 {load} cmp (%rdi), %eax
90 {store} cmp %eax, (%rdi)
91 {store} cmp (%rdi), %eax
92 {load} or %eax, (%rdi)
93 {load} or (%rdi), %eax
94 {store} or %eax, (%rdi)
95 {store} or (%rdi), %eax
96 {load} sbb %eax, (%rdi)
97 {load} sbb (%rdi), %eax
98 {store} sbb %eax, (%rdi)
99 {store} sbb (%rdi), %eax
100 {load} sub %eax, (%rdi)
101 {load} sub (%rdi), %eax
102 {store} sub %eax, (%rdi)
103 {store} sub (%rdi), %eax
104 {load} xor %eax, (%rdi)
105 {load} xor (%rdi), %eax
106 {store} xor %eax, (%rdi)
107 {store} xor (%rdi), %eax
108
109 fadd %st, %st
110 {load} fadd %st, %st
111 {store} fadd %st, %st
112 fdiv %st, %st
113 {load} fdiv %st, %st
114 {store} fdiv %st, %st
115 fdivr %st, %st
116 {load} fdivr %st, %st
117 {store} fdivr %st, %st
118 fmul %st, %st
119 {load} fmul %st, %st
120 {store} fmul %st, %st
121 fsub %st, %st
122 {load} fsub %st, %st
123 {store} fsub %st, %st
124 fsubr %st, %st
125 {load} fsubr %st, %st
126 {store} fsubr %st, %st
127
128 movq %mm0, %mm7
129 {load} movq %mm0, %mm7
130 {store} movq %mm0, %mm7
131
132 movaps %xmm0, %xmm7
133 {load} movaps %xmm0, %xmm7
134 {store} movaps %xmm0, %xmm7
135 movups %xmm0, %xmm7
136 {load} movups %xmm0, %xmm7
137 {store} movups %xmm0, %xmm7
138 movss %xmm0, %xmm7
139 {load} movss %xmm0, %xmm7
140 {store} movss %xmm0, %xmm7
141 movapd %xmm0, %xmm7
142 {load} movapd %xmm0, %xmm7
143 {store} movapd %xmm0, %xmm7
144 movupd %xmm0, %xmm7
145 {load} movupd %xmm0, %xmm7
146 {store} movupd %xmm0, %xmm7
147 movsd %xmm0, %xmm7
148 {load} movsd %xmm0, %xmm7
149 {store} movsd %xmm0, %xmm7
150 movdqa %xmm0, %xmm7
151 {load} movdqa %xmm0, %xmm7
152 {store} movdqa %xmm0, %xmm7
153 movdqu %xmm0, %xmm7
154 {load} movdqu %xmm0, %xmm7
155 {store} movdqu %xmm0, %xmm7
156 movq %xmm0, %xmm7
157 {load} movq %xmm0, %xmm7
158 {store} movq %xmm0, %xmm7
159 vmovaps %xmm0, %xmm7
160 {load} vmovaps %xmm0, %xmm7
161 {store} vmovaps %xmm0, %xmm7
162 vmovaps %zmm0, %zmm7
163 {load} vmovaps %zmm0, %zmm7
164 {store} vmovaps %zmm0, %zmm7
165 vmovaps %xmm0, %xmm7{%k7}
166 {load} vmovaps %xmm0, %xmm7{%k7}
167 {store} vmovaps %xmm0, %xmm7{%k7}
168 vmovups %zmm0, %zmm7
169 {load} vmovups %zmm0, %zmm7
170 {store} vmovups %zmm0, %zmm7
171 vmovups %xmm0, %xmm7
172 {load} vmovups %xmm0, %xmm7
173 {store} vmovups %xmm0, %xmm7
174 vmovups %xmm0, %xmm7{%k7}
175 {load} vmovups %xmm0, %xmm7{%k7}
176 {store} vmovups %xmm0, %xmm7{%k7}
177 vmovss %xmm0, %xmm1, %xmm7
178 {load} vmovss %xmm0, %xmm1, %xmm7
179 {store} vmovss %xmm0, %xmm1, %xmm7
180 vmovss %xmm0, %xmm1, %xmm7{%k7}
181 {load} vmovss %xmm0, %xmm1, %xmm7{%k7}
182 {store} vmovss %xmm0, %xmm1, %xmm7{%k7}
183 vmovapd %xmm0, %xmm7
184 {load} vmovapd %xmm0, %xmm7
185 {store} vmovapd %xmm0, %xmm7
186 vmovapd %zmm0, %zmm7
187 {load} vmovapd %zmm0, %zmm7
188 {store} vmovapd %zmm0, %zmm7
189 vmovapd %xmm0, %xmm7{%k7}
190 {load} vmovapd %xmm0, %xmm7{%k7}
191 {store} vmovapd %xmm0, %xmm7{%k7}
192 vmovupd %xmm0, %xmm7
193 {load} vmovupd %xmm0, %xmm7
194 {store} vmovupd %xmm0, %xmm7
195 vmovupd %zmm0, %zmm7
196 {load} vmovupd %zmm0, %zmm7
197 {store} vmovupd %zmm0, %zmm7
198 vmovupd %xmm0, %xmm7{%k7}
199 {load} vmovupd %xmm0, %xmm7{%k7}
200 {store} vmovupd %xmm0, %xmm7{%k7}
201 vmovsd %xmm0, %xmm1, %xmm7
202 {load} vmovsd %xmm0, %xmm1, %xmm7
203 {store} vmovsd %xmm0, %xmm1, %xmm7
204 vmovsd %xmm0, %xmm1, %xmm7{%k7}
205 {load} vmovsd %xmm0, %xmm1, %xmm7{%k7}
206 {store} vmovsd %xmm0, %xmm1, %xmm7{%k7}
207 vmovdqa %xmm0, %xmm7
208 {load} vmovdqa %xmm0, %xmm7
209 {store} vmovdqa %xmm0, %xmm7
210 vmovdqa32 %zmm0, %zmm7
211 {load} vmovdqa32 %zmm0, %zmm7
212 {store} vmovdqa32 %zmm0, %zmm7
213 vmovdqa32 %xmm0, %xmm7
214 {load} vmovdqa32 %xmm0, %xmm7
215 {store} vmovdqa32 %xmm0, %xmm7
216 vmovdqa64 %zmm0, %zmm7
217 {load} vmovdqa64 %zmm0, %zmm7
218 {store} vmovdqa64 %zmm0, %zmm7
219 vmovdqa64 %xmm0, %xmm7
220 {load} vmovdqa64 %xmm0, %xmm7
221 {store} vmovdqa64 %xmm0, %xmm7
222 vmovdqu %xmm0, %xmm7
223 {load} vmovdqu %xmm0, %xmm7
224 {store} vmovdqu %xmm0, %xmm7
225 vmovdqu8 %zmm0, %zmm7
226 {load} vmovdqu8 %zmm0, %zmm7
227 {store} vmovdqu8 %zmm0, %zmm7
228 vmovdqu8 %xmm0, %xmm7
229 {load} vmovdqu8 %xmm0, %xmm7
230 {store} vmovdqu8 %zmm0, %zmm7
231 vmovdqu16 %zmm0, %zmm7
232 {load} vmovdqu16 %zmm0, %zmm7
233 {store} vmovdqu16 %zmm0, %zmm7
234 vmovdqu16 %xmm0, %xmm7
235 {load} vmovdqu16 %xmm0, %xmm7
236 {store} vmovdqu16 %xmm0, %xmm7
237 vmovdqu32 %zmm0, %zmm7
238 {load} vmovdqu32 %zmm0, %zmm7
239 {store} vmovdqu32 %zmm0, %zmm7
240 vmovdqu32 %xmm0, %xmm7
241 {load} vmovdqu32 %xmm0, %xmm7
242 {store} vmovdqu32 %xmm0, %xmm7
243 vmovdqu64 %zmm0, %zmm7
244 {load} vmovdqu64 %zmm0, %zmm7
245 {store} vmovdqu64 %zmm0, %zmm7
246 vmovdqu64 %xmm0, %xmm7
247 {load} vmovdqu64 %xmm0, %xmm7
248 {store} vmovdqu64 %xmm0, %xmm7
249 vmovq %xmm0, %xmm7
250 {load} vmovq %xmm0, %xmm7
251 {store} vmovq %xmm0, %xmm7
252 {evex} vmovq %xmm0, %xmm7
253 {load} {evex} vmovq %xmm0, %xmm7
254 {store} {evex} vmovq %xmm0, %xmm7
255
256 bndmov %bnd3, %bnd0
257 {load} bndmov %bnd3, %bnd0
258 {store} bndmov %bnd3, %bnd0
259
260 movaps (%rax),%xmm2
261 {load} movaps (%rax),%xmm2
262 {store} movaps (%rax),%xmm2
263 {disp8} movaps (%rax),%xmm2
264 {disp32} movaps (%rax),%xmm2
265 movaps -1(%rax),%xmm2
266 {disp8} movaps -1(%rax),%xmm2
267 {disp32} movaps -1(%rax),%xmm2
268 movaps 128(%rax),%xmm2
269 {disp8} movaps 128(%rax),%xmm2
270 {disp32} movaps 128(%rax),%xmm2
271 {rex} mov %al,%ah
272 {rex} movl %eax,%ebx
273 {rex} movl %eax,%r14d
274 {rex} movl %eax,(%r8)
275 {rex} movaps %xmm7,%xmm2
276 {rex} movaps %xmm7,%xmm12
277 {rex} movaps (%rcx),%xmm2
278 {rex} movaps (%r8),%xmm2
279 {rex} phaddw (%rcx),%mm0
280 {rex} phaddw (%r8),%mm0
281 {rex} vmovaps %xmm7,%xmm2
282 {rex} vmovaps %xmm17,%xmm2
283 {rex} rorx $7,%eax,%ebx
284
285 .intel_syntax noprefix
286 {vex3} vmovaps xmm2,xmm7
287 {vex3} {load} vmovaps xmm2,xmm7
288 {vex3} {store} vmovaps xmm2,xmm7
289 vmovaps xmm2,xmm7
290 {vex2} vmovaps xmm2,xmm7
291 {vex2} {load} vmovaps xmm2,xmm7
292 {vex2} {store} vmovaps xmm2,xmm7
293 {vex3} vmovaps xmm2,XMMWORD PTR [rax]
294 vmovaps xmm2,XMMWORD PTR [rax]
295 {vex2} vmovaps xmm2,XMMWORD PTR [rax]
296 {evex} vmovaps xmm2,XMMWORD PTR [rax]
297 {disp32} vmovaps xmm2,XMMWORD PTR [rax]
298 {evex} {disp8} vmovaps xmm2,XMMWORD PTR [rax]
299 {evex} {disp32} vmovaps xmm2,XMMWORD PTR [rax]
300 mov rax,rcx
301 {load} mov rax,rcx
302 {store} mov rax,rcx
303 movaps xmm2,XMMWORD PTR [rax]
304 {load} movaps xmm2,XMMWORD PTR [rax]
305 {store} movaps xmm2,XMMWORD PTR [rax]
306 {disp8} movaps xmm2,XMMWORD PTR [rax]
307 {disp32} movaps xmm2,XMMWORD PTR [rax]
308 movaps xmm2,XMMWORD PTR [rax-1]
309 {disp8} movaps xmm2,XMMWORD PTR [rax-1]
310 {disp32} movaps xmm2,XMMWORD PTR [rax-1]
311 movaps xmm2,XMMWORD PTR [rax+128]
312 {disp8} movaps xmm2,XMMWORD PTR [rax+128]
313 {disp32} movaps xmm2,XMMWORD PTR [rax+128]
314 {rex} mov ah,al
315 {rex} mov ebx,eax
316 {rex} mov r14d,eax
317 {rex} mov DWORD PTR [r8],eax
318 {rex} movaps xmm2,xmm7
319 {rex} movaps xmm12,xmm7
320 {rex} movaps xmm2,XMMWORD PTR [rcx]
321 {rex} movaps xmm2,XMMWORD PTR [r8]
322 {rex} phaddw mm0,QWORD PTR [rcx]
323 {rex} phaddw mm0,QWORD PTR [r8]
324 {rex} vmovaps xmm2,xmm7
325 {rex} vmovaps xmm2,xmm17
326 {rex} rorx ebx,eax,0x7
This page took 0.04041 seconds and 4 git commands to generate.