Commit | Line | Data |
---|---|---|
03751133 L |
1 | # Check AVX WIG instructions |
2 | ||
3 | .allow_index_reg | |
4 | .text | |
5 | _start: | |
931d03b7 JB |
6 | andn (%eax), %eax, %eax |
7 | bextr %eax, (%eax), %eax | |
8 | bextr $0, (%eax), %eax | |
9 | blcfill (%eax), %eax | |
10 | blci (%eax), %eax | |
11 | blcic (%eax), %eax | |
12 | blcmsk (%eax), %eax | |
13 | blcs (%eax), %eax | |
14 | blsfill (%eax), %eax | |
15 | blsi (%eax), %eax | |
16 | blsic (%eax), %eax | |
17 | blsmsk (%eax), %eax | |
18 | blsr (%eax), %eax | |
19 | bzhi %eax, (%eax), %eax | |
58a211d2 JB |
20 | kmovd %eax, %k0 |
21 | kmovd %k0, %eax | |
931d03b7 JB |
22 | llwpcb %eax |
23 | lwpins $0, (%eax), %eax | |
24 | lwpval $0, (%eax), %eax | |
25 | mulx (%eax), %eax, %eax | |
26 | pdep (%eax), %eax, %eax | |
27 | pext (%eax), %eax, %eax | |
28 | rorx $0, (%eax), %eax | |
29 | sarx %eax, (%eax), %eax | |
30 | shlx %eax, (%eax), %eax | |
31 | shrx %eax, (%eax), %eax | |
32 | slwpcb %eax | |
33 | t1mskc (%eax), %eax | |
34 | tzmsk (%eax), %eax | |
03751133 L |
35 | vaddpd %ymm4,%ymm6,%ymm2 |
36 | vaddps %ymm4,%ymm6,%ymm2 | |
37 | vaddsd %xmm4,%xmm6,%xmm2 | |
38 | vaddss %xmm4,%xmm6,%xmm2 | |
39 | vaddsubpd %ymm4,%ymm6,%ymm2 | |
40 | vaddsubps %ymm4,%ymm6,%ymm2 | |
41 | vaesdec %xmm4,%xmm6,%xmm2 | |
42 | vaesdeclast %xmm4,%xmm6,%xmm2 | |
43 | vaesenc %xmm4,%xmm6,%xmm2 | |
44 | vaesenclast %xmm4,%xmm6,%xmm2 | |
45 | vaesimc %xmm4,%xmm6 | |
46 | vaeskeygenassist $7,%xmm4,%xmm6 | |
0aaca1d9 JB |
47 | vandnpd %ymm4,%ymm6,%ymm2 |
48 | vandnps %ymm4,%ymm6,%ymm2 | |
49 | vandpd %ymm4,%ymm6,%ymm2 | |
50 | vandps %ymm4,%ymm6,%ymm2 | |
03751133 L |
51 | vblendpd $7,%ymm4,%ymm6,%ymm2 |
52 | vblendps $7,%ymm4,%ymm6,%ymm2 | |
0aaca1d9 JB |
53 | vcmpeqpd %ymm4,%ymm6,%ymm2 |
54 | vcmpeqps %ymm4,%ymm6,%ymm2 | |
55 | vcmpeqsd %xmm4,%xmm6,%xmm2 | |
56 | vcmpeqss %xmm4,%xmm6,%xmm2 | |
03751133 L |
57 | vcmppd $7,%ymm4,%ymm6,%ymm2 |
58 | vcmpps $7,%ymm4,%ymm6,%ymm2 | |
59 | vcmpsd $7,%xmm4,%xmm6,%xmm2 | |
60 | vcmpss $7,%xmm4,%xmm6,%xmm2 | |
61 | vcomisd %xmm4,%xmm6 | |
62 | vcomiss %xmm4,%xmm6 | |
63 | vcvtdq2pd %xmm4,%ymm4 | |
64 | vcvtdq2ps %ymm4,%ymm6 | |
65 | vcvtpd2dqy %ymm4,%xmm4 | |
66 | vcvtpd2dqx %xmm4,%xmm6 | |
67 | vcvtpd2dqy %ymm4,%xmm4 | |
68 | vcvtpd2psy %ymm4,%xmm4 | |
69 | vcvtpd2psx %xmm4,%xmm6 | |
70 | vcvtpd2psy %ymm4,%xmm4 | |
71 | vcvtps2dq %ymm4,%ymm6 | |
72 | vcvtps2pd %xmm4,%ymm4 | |
73 | vcvtsd2ss %xmm4,%xmm6,%xmm2 | |
931d03b7 JB |
74 | vcvtsi2ss %eax, %xmm0, %xmm0 |
75 | vcvtsi2ss (%eax), %xmm0, %xmm0 | |
76 | vcvtsi2sd %eax, %xmm0, %xmm0 | |
77 | vcvtsi2sd (%eax), %xmm0, %xmm0 | |
0aaca1d9 | 78 | vcvtss2sd %xmm4,%xmm6,%xmm2 |
931d03b7 JB |
79 | vcvtss2si %xmm0, %eax |
80 | vcvtsd2si %xmm0, %eax | |
03751133 L |
81 | vcvttpd2dqy %ymm4,%xmm4 |
82 | vcvttpd2dqx %xmm4,%xmm6 | |
83 | vcvttpd2dqy %ymm4,%xmm4 | |
84 | vcvttps2dq %ymm4,%ymm6 | |
931d03b7 JB |
85 | vcvttss2si %xmm0, %eax |
86 | vcvttsd2si %xmm0, %eax | |
03751133 L |
87 | vdivpd %ymm4,%ymm6,%ymm2 |
88 | vdivps %ymm4,%ymm6,%ymm2 | |
89 | vdivsd %xmm4,%xmm6,%xmm2 | |
90 | vdivss %xmm4,%xmm6,%xmm2 | |
91 | vdppd $7,%xmm4,%xmm6,%xmm2 | |
92 | vdpps $7,%ymm4,%ymm6,%ymm2 | |
93 | vextractps $7,%xmm4,(%ecx) | |
94 | vhaddpd %ymm4,%ymm6,%ymm2 | |
95 | vhaddps %ymm4,%ymm6,%ymm2 | |
96 | vhsubpd %ymm4,%ymm6,%ymm2 | |
97 | vhsubps %ymm4,%ymm6,%ymm2 | |
98 | vinsertps $7,%xmm4,%xmm6,%xmm2 | |
99 | vlddqu (%ecx),%ymm4 | |
100 | vldmxcsr (%ecx) | |
101 | vmaskmovdqu %xmm4,%xmm6 | |
102 | vmaxpd %ymm4,%ymm6,%ymm2 | |
103 | vmaxps %ymm4,%ymm6,%ymm2 | |
104 | vmaxsd %xmm4,%xmm6,%xmm2 | |
105 | vmaxss %xmm4,%xmm6,%xmm2 | |
106 | vminpd %ymm4,%ymm6,%ymm2 | |
107 | vminps %ymm4,%ymm6,%ymm2 | |
108 | vminsd %xmm4,%xmm6,%xmm2 | |
109 | vminss %xmm4,%xmm6,%xmm2 | |
110 | vmovapd %ymm4,%ymm6 | |
111 | vmovaps %ymm4,%ymm6 | |
112 | {store} vmovapd %ymm4,%ymm6 | |
113 | {store} vmovaps %ymm4,%ymm6 | |
931d03b7 JB |
114 | vmovd %eax, %xmm0 |
115 | vmovd (%eax), %xmm0 | |
116 | vmovd %xmm0, %eax | |
117 | vmovd %xmm0, (%eax) | |
03751133 L |
118 | vmovddup %ymm4,%ymm6 |
119 | vmovdqa %ymm4,%ymm6 | |
120 | vmovdqu %ymm4,%ymm6 | |
121 | {store} vmovdqa %ymm4,%ymm6 | |
122 | {store} vmovdqu %ymm4,%ymm6 | |
123 | vmovhlps %xmm4,%xmm6,%xmm2 | |
124 | vmovhpd (%ecx),%xmm4,%xmm6 | |
125 | vmovhpd %xmm4,(%ecx) | |
126 | vmovhps (%ecx),%xmm4,%xmm6 | |
127 | vmovhps %xmm4,(%ecx) | |
128 | vmovlhps %xmm4,%xmm6,%xmm2 | |
129 | vmovlpd (%ecx),%xmm4,%xmm6 | |
130 | vmovlpd %xmm4,(%ecx) | |
131 | vmovlps (%ecx),%xmm4,%xmm6 | |
132 | vmovlps %xmm4,(%ecx) | |
133 | vmovmskpd %xmm4,%ecx | |
134 | vmovmskps %xmm4,%ecx | |
135 | vmovntdq %ymm4,(%ecx) | |
136 | vmovntdqa (%ecx),%xmm4 | |
137 | vmovntpd %ymm4,(%ecx) | |
138 | vmovntps %ymm4,(%ecx) | |
139 | vmovq %xmm4,%xmm6 | |
140 | vmovq %xmm4,(%ecx) | |
141 | vmovsd (%ecx),%xmm4 | |
142 | vmovsd %xmm4,(%ecx) | |
143 | vmovshdup %ymm4,%ymm6 | |
144 | vmovsldup %ymm4,%ymm6 | |
145 | vmovss (%ecx),%xmm4 | |
146 | vmovss %xmm4,(%ecx) | |
147 | vmovupd %ymm4,%ymm6 | |
148 | vmovupd %ymm4,(%ecx) | |
149 | vmovups %ymm4,%ymm6 | |
150 | vmovups %ymm4,(%ecx) | |
151 | vmpsadbw $7,%xmm4,%xmm6,%xmm2 | |
152 | vmulpd %ymm4,%ymm6,%ymm2 | |
153 | vmulps %ymm4,%ymm6,%ymm2 | |
154 | vmulsd %xmm4,%xmm6,%xmm2 | |
155 | vmulss %xmm4,%xmm6,%xmm2 | |
0aaca1d9 JB |
156 | vorpd %ymm4,%ymm6,%ymm2 |
157 | vorps %ymm4,%ymm6,%ymm2 | |
03751133 L |
158 | vpabsb %xmm4,%xmm6 |
159 | vpabsd %xmm4,%xmm6 | |
160 | vpabsw %xmm4,%xmm6 | |
161 | vpackssdw %xmm4,%xmm6,%xmm2 | |
162 | vpacksswb %xmm4,%xmm6,%xmm2 | |
163 | vpackusdw %xmm4,%xmm6,%xmm2 | |
164 | vpackuswb %xmm4,%xmm6,%xmm2 | |
165 | vpaddb %xmm4,%xmm6,%xmm2 | |
166 | vpaddd %xmm4,%xmm6,%xmm2 | |
167 | vpaddq %xmm4,%xmm6,%xmm2 | |
168 | vpaddsb %xmm4,%xmm6,%xmm2 | |
169 | vpaddsw %xmm4,%xmm6,%xmm2 | |
170 | vpaddusb %xmm4,%xmm6,%xmm2 | |
171 | vpaddusw %xmm4,%xmm6,%xmm2 | |
172 | vpaddw %xmm4,%xmm6,%xmm2 | |
173 | vpalignr $7,%xmm4,%xmm6,%xmm2 | |
174 | vpand %xmm4,%xmm6,%xmm2 | |
175 | vpandn %xmm4,%xmm6,%xmm2 | |
176 | vpavgb %xmm4,%xmm6,%xmm2 | |
177 | vpavgw %xmm4,%xmm6,%xmm2 | |
178 | vpblendw $7,%xmm4,%xmm6,%xmm2 | |
179 | vpclmulhqhqdq %xmm4,%xmm6,%xmm2 | |
180 | vpclmulhqlqdq %xmm4,%xmm6,%xmm2 | |
181 | vpclmullqhqdq %xmm4,%xmm6,%xmm2 | |
182 | vpclmullqlqdq %xmm4,%xmm6,%xmm2 | |
183 | vpclmulqdq $7,%xmm4,%xmm6,%xmm2 | |
184 | vpcmpeqb %xmm4,%xmm6,%xmm2 | |
185 | vpcmpeqd %xmm4,%xmm6,%xmm2 | |
186 | vpcmpeqq %xmm4,%xmm6,%xmm2 | |
187 | vpcmpeqw %xmm4,%xmm6,%xmm2 | |
931d03b7 JB |
188 | vpcmpestri $0, %xmm0, %xmm0 |
189 | vpcmpestrm $0, %xmm0, %xmm0 | |
03751133 L |
190 | vpcmpgtb %xmm4,%xmm6,%xmm2 |
191 | vpcmpgtd %xmm4,%xmm6,%xmm2 | |
192 | vpcmpgtq %xmm4,%xmm6,%xmm2 | |
193 | vpcmpgtw %xmm4,%xmm6,%xmm2 | |
194 | vpcmpistri $7,%xmm4,%xmm6 | |
195 | vpcmpistrm $7,%xmm4,%xmm6 | |
b50c9f31 JB |
196 | vpextrb $0, %xmm0, %eax |
197 | vpextrb $0, %xmm0, (%eax) | |
931d03b7 JB |
198 | vpextrd $0, %xmm0, %eax |
199 | vpextrd $0, %xmm0, (%eax) | |
b50c9f31 JB |
200 | vpextrw $0, %xmm0, %eax |
201 | {store} vpextrw $0, %xmm0, %eax | |
202 | vpextrw $0, %xmm0, (%eax) | |
03751133 L |
203 | vphaddd %xmm4,%xmm6,%xmm2 |
204 | vphaddsw %xmm4,%xmm6,%xmm2 | |
205 | vphaddw %xmm4,%xmm6,%xmm2 | |
206 | vphminposuw %xmm4,%xmm6 | |
207 | vphsubd %xmm4,%xmm6,%xmm2 | |
208 | vphsubsw %xmm4,%xmm6,%xmm2 | |
209 | vphsubw %xmm4,%xmm6,%xmm2 | |
b50c9f31 JB |
210 | vpinsrb $0, %eax, %xmm0, %xmm0 |
211 | vpinsrb $0, (%eax), %xmm0, %xmm0 | |
931d03b7 JB |
212 | vpinsrd $0, %eax, %xmm0, %xmm0 |
213 | vpinsrd $0, (%eax), %xmm0, %xmm0 | |
b50c9f31 JB |
214 | vpinsrw $0, %eax, %xmm0, %xmm0 |
215 | vpinsrw $0, (%eax), %xmm0, %xmm0 | |
03751133 L |
216 | vpmaddubsw %xmm4,%xmm6,%xmm2 |
217 | vpmaddwd %xmm4,%xmm6,%xmm2 | |
218 | vpmaxsb %xmm4,%xmm6,%xmm2 | |
219 | vpmaxsd %xmm4,%xmm6,%xmm2 | |
220 | vpmaxsw %xmm4,%xmm6,%xmm2 | |
221 | vpmaxub %xmm4,%xmm6,%xmm2 | |
222 | vpmaxud %xmm4,%xmm6,%xmm2 | |
223 | vpmaxuw %xmm4,%xmm6,%xmm2 | |
224 | vpminsb %xmm4,%xmm6,%xmm2 | |
225 | vpminsd %xmm4,%xmm6,%xmm2 | |
226 | vpminsw %xmm4,%xmm6,%xmm2 | |
227 | vpminub %xmm4,%xmm6,%xmm2 | |
228 | vpminud %xmm4,%xmm6,%xmm2 | |
229 | vpminuw %xmm4,%xmm6,%xmm2 | |
230 | vpmovmskb %xmm4,%ecx | |
231 | vpmovsxbd %xmm4,%xmm6 | |
232 | vpmovsxbq %xmm4,%xmm6 | |
233 | vpmovsxbw %xmm4,%xmm6 | |
234 | vpmovsxdq %xmm4,%xmm6 | |
235 | vpmovsxwd %xmm4,%xmm6 | |
236 | vpmovsxwq %xmm4,%xmm6 | |
237 | vpmovzxbd %xmm4,%xmm6 | |
238 | vpmovzxbq %xmm4,%xmm6 | |
239 | vpmovzxbw %xmm4,%xmm6 | |
240 | vpmovzxdq %xmm4,%xmm6 | |
241 | vpmovzxwd %xmm4,%xmm6 | |
242 | vpmovzxwq %xmm4,%xmm6 | |
243 | vpmuldq %xmm4,%xmm6,%xmm2 | |
244 | vpmulhrsw %xmm4,%xmm6,%xmm2 | |
245 | vpmulhuw %xmm4,%xmm6,%xmm2 | |
246 | vpmulhw %xmm4,%xmm6,%xmm2 | |
247 | vpmulld %xmm4,%xmm6,%xmm2 | |
248 | vpmullw %xmm4,%xmm6,%xmm2 | |
249 | vpmuludq %xmm4,%xmm6,%xmm2 | |
250 | vpor %xmm4,%xmm6,%xmm2 | |
251 | vpsadbw %xmm4,%xmm6,%xmm2 | |
252 | vpshufb %xmm4,%xmm6,%xmm2 | |
253 | vpshufd $7,%xmm4,%xmm6 | |
254 | vpshufhw $7,%xmm4,%xmm6 | |
255 | vpshuflw $7,%xmm4,%xmm6 | |
256 | vpsignb %xmm4,%xmm6,%xmm2 | |
257 | vpsignd %xmm4,%xmm6,%xmm2 | |
258 | vpsignw %xmm4,%xmm6,%xmm2 | |
259 | vpslld %xmm4,%xmm6,%xmm2 | |
260 | vpslldq $7,%xmm4,%xmm6 | |
261 | vpsllq %xmm4,%xmm6,%xmm2 | |
262 | vpsllw %xmm4,%xmm6,%xmm2 | |
263 | vpsrad %xmm4,%xmm6,%xmm2 | |
264 | vpsraw %xmm4,%xmm6,%xmm2 | |
265 | vpsrld %xmm4,%xmm6,%xmm2 | |
266 | vpsrldq $7,%xmm4,%xmm6 | |
267 | vpsrlq %xmm4,%xmm6,%xmm2 | |
268 | vpsrlw %xmm4,%xmm6,%xmm2 | |
269 | vpsubb %xmm4,%xmm6,%xmm2 | |
270 | vpsubd %xmm4,%xmm6,%xmm2 | |
271 | vpsubq %xmm4,%xmm6,%xmm2 | |
272 | vpsubsb %xmm4,%xmm6,%xmm2 | |
273 | vpsubsw %xmm4,%xmm6,%xmm2 | |
274 | vpsubusb %xmm4,%xmm6,%xmm2 | |
275 | vpsubusw %xmm4,%xmm6,%xmm2 | |
276 | vpsubw %xmm4,%xmm6,%xmm2 | |
277 | vptest %ymm4,%ymm6 | |
278 | vpunpckhbw %xmm4,%xmm6,%xmm2 | |
279 | vpunpckhdq %xmm4,%xmm6,%xmm2 | |
280 | vpunpckhqdq %xmm4,%xmm6,%xmm2 | |
281 | vpunpckhwd %xmm4,%xmm6,%xmm2 | |
282 | vpunpcklbw %xmm4,%xmm6,%xmm2 | |
283 | vpunpckldq %xmm4,%xmm6,%xmm2 | |
284 | vpunpcklqdq %xmm4,%xmm6,%xmm2 | |
285 | vpunpcklwd %xmm4,%xmm6,%xmm2 | |
286 | vpxor %xmm4,%xmm6,%xmm2 | |
287 | vrcpps %ymm4,%ymm6 | |
288 | vrcpss %xmm4,%xmm6,%xmm2 | |
289 | vroundpd $7,%ymm6,%ymm2 | |
290 | vroundps $7,%ymm6,%ymm2 | |
291 | vroundsd $7,%xmm4,%xmm6,%xmm2 | |
292 | vroundss $7,%xmm4,%xmm6,%xmm2 | |
293 | vrsqrtps %ymm4,%ymm6 | |
294 | vrsqrtss %xmm4,%xmm6,%xmm2 | |
295 | vshufpd $7,%ymm4,%ymm6,%ymm2 | |
296 | vshufps $7,%ymm4,%ymm6,%ymm2 | |
297 | vsqrtpd %ymm4,%ymm6 | |
298 | vsqrtps %ymm4,%ymm6 | |
299 | vsqrtsd %xmm4,%xmm6,%xmm2 | |
300 | vsqrtss %xmm4,%xmm6,%xmm2 | |
301 | vstmxcsr (%ecx) | |
302 | vsubpd %ymm4,%ymm6,%ymm2 | |
303 | vsubps %ymm4,%ymm6,%ymm2 | |
304 | vsubsd %xmm4,%xmm6,%xmm2 | |
305 | vsubss %xmm4,%xmm6,%xmm2 | |
306 | vucomisd %xmm4,%xmm6 | |
307 | vucomiss %xmm4,%xmm6 | |
308 | vunpckhpd %ymm4,%ymm6,%ymm2 | |
309 | vunpckhps %ymm4,%ymm6,%ymm2 | |
310 | vunpcklpd %ymm4,%ymm6,%ymm2 | |
311 | vunpcklps %ymm4,%ymm6,%ymm2 | |
312 | vxorpd %ymm4,%ymm6,%ymm2 | |
313 | vxorps %ymm4,%ymm6,%ymm2 | |
314 | vzeroall | |
315 | vzeroupper |