1 //====- X86InstrSSE.td - Describe the X86 Instruction Set -------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file was developed by the Evan Cheng and is distributed under
6 // the University of Illinois Open Source License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file describes the X86 SSE instruction set, defining the instructions,
11 // and properties of the instructions which are needed for code generation,
12 // machine code emission, and analysis.
14 //===----------------------------------------------------------------------===//
16 //===----------------------------------------------------------------------===//
17 // SSE specific DAG Nodes.
18 //===----------------------------------------------------------------------===//
20 def X86loadp : SDNode<"X86ISD::LOAD_PACK", SDTLoad,
22 def X86fand : SDNode<"X86ISD::FAND", SDTFPBinOp,
23 [SDNPCommutative, SDNPAssociative]>;
24 def X86fxor : SDNode<"X86ISD::FXOR", SDTFPBinOp,
25 [SDNPCommutative, SDNPAssociative]>;
26 def X86comi : SDNode<"X86ISD::COMI", SDTX86CmpTest,
28 def X86ucomi : SDNode<"X86ISD::UCOMI", SDTX86CmpTest,
30 def X86s2vec : SDNode<"X86ISD::S2VEC",
31 SDTypeProfile<1, 1, []>, []>;
32 def X86pextrw : SDNode<"X86ISD::PEXTRW",
33 SDTypeProfile<1, 2, []>, []>;
34 def X86pinsrw : SDNode<"X86ISD::PINSRW",
35 SDTypeProfile<1, 3, []>, []>;
37 //===----------------------------------------------------------------------===//
38 // SSE pattern fragments
39 //===----------------------------------------------------------------------===//
41 def X86loadpf32 : PatFrag<(ops node:$ptr), (f32 (X86loadp node:$ptr))>;
42 def X86loadpf64 : PatFrag<(ops node:$ptr), (f64 (X86loadp node:$ptr))>;
44 def loadv4f32 : PatFrag<(ops node:$ptr), (v4f32 (load node:$ptr))>;
45 def loadv2f64 : PatFrag<(ops node:$ptr), (v2f64 (load node:$ptr))>;
46 def loadv16i8 : PatFrag<(ops node:$ptr), (v16i8 (load node:$ptr))>;
47 def loadv8i16 : PatFrag<(ops node:$ptr), (v8i16 (load node:$ptr))>;
48 def loadv4i32 : PatFrag<(ops node:$ptr), (v4i32 (load node:$ptr))>;
49 def loadv2i64 : PatFrag<(ops node:$ptr), (v2i64 (load node:$ptr))>;
51 def bc_v4f32 : PatFrag<(ops node:$in), (v4f32 (bitconvert node:$in))>;
52 def bc_v2f64 : PatFrag<(ops node:$in), (v2f64 (bitconvert node:$in))>;
53 def bc_v16i8 : PatFrag<(ops node:$in), (v16i8 (bitconvert node:$in))>;
54 def bc_v8i16 : PatFrag<(ops node:$in), (v8i16 (bitconvert node:$in))>;
55 def bc_v4i32 : PatFrag<(ops node:$in), (v4i32 (bitconvert node:$in))>;
56 def bc_v2i64 : PatFrag<(ops node:$in), (v2i64 (bitconvert node:$in))>;
58 def fp32imm0 : PatLeaf<(f32 fpimm), [{
59 return N->isExactlyValue(+0.0);
62 def PSxLDQ_imm : SDNodeXForm<imm, [{
63 // Transformation function: imm >> 3
64 return getI32Imm(N->getValue() >> 3);
67 // SHUFFLE_get_shuf_imm xform function: convert vector_shuffle mask to PSHUF*,
69 def SHUFFLE_get_shuf_imm : SDNodeXForm<build_vector, [{
70 return getI8Imm(X86::getShuffleSHUFImmediate(N));
73 // SHUFFLE_get_pshufhw_imm xform function: convert vector_shuffle mask to
75 def SHUFFLE_get_pshufhw_imm : SDNodeXForm<build_vector, [{
76 return getI8Imm(X86::getShufflePSHUFHWImmediate(N));
79 // SHUFFLE_get_pshuflw_imm xform function: convert vector_shuffle mask to
81 def SHUFFLE_get_pshuflw_imm : SDNodeXForm<build_vector, [{
82 return getI8Imm(X86::getShufflePSHUFLWImmediate(N));
85 def SSE_splat_mask : PatLeaf<(build_vector), [{
86 return X86::isSplatMask(N);
87 }], SHUFFLE_get_shuf_imm>;
89 def SSE_splat_v2_mask : PatLeaf<(build_vector), [{
90 return X86::isSplatMask(N);
93 def MOVHLPS_shuffle_mask : PatLeaf<(build_vector), [{
94 return X86::isMOVHLPSMask(N);
97 def MOVHP_shuffle_mask : PatLeaf<(build_vector), [{
98 return X86::isMOVHPMask(N);
101 def MOVLP_shuffle_mask : PatLeaf<(build_vector), [{
102 return X86::isMOVLPMask(N);
105 def MOVL_shuffle_mask : PatLeaf<(build_vector), [{
106 return X86::isMOVLMask(N);
109 def MOVSHDUP_shuffle_mask : PatLeaf<(build_vector), [{
110 return X86::isMOVSHDUPMask(N);
113 def MOVSLDUP_shuffle_mask : PatLeaf<(build_vector), [{
114 return X86::isMOVSLDUPMask(N);
117 def UNPCKL_shuffle_mask : PatLeaf<(build_vector), [{
118 return X86::isUNPCKLMask(N);
121 def UNPCKH_shuffle_mask : PatLeaf<(build_vector), [{
122 return X86::isUNPCKHMask(N);
125 def UNPCKL_v_undef_shuffle_mask : PatLeaf<(build_vector), [{
126 return X86::isUNPCKL_v_undef_Mask(N);
129 def PSHUFD_shuffle_mask : PatLeaf<(build_vector), [{
130 return X86::isPSHUFDMask(N);
131 }], SHUFFLE_get_shuf_imm>;
133 def PSHUFHW_shuffle_mask : PatLeaf<(build_vector), [{
134 return X86::isPSHUFHWMask(N);
135 }], SHUFFLE_get_pshufhw_imm>;
137 def PSHUFLW_shuffle_mask : PatLeaf<(build_vector), [{
138 return X86::isPSHUFLWMask(N);
139 }], SHUFFLE_get_pshuflw_imm>;
141 def SHUFP_unary_shuffle_mask : PatLeaf<(build_vector), [{
142 return X86::isPSHUFDMask(N);
143 }], SHUFFLE_get_shuf_imm>;
145 def SHUFP_shuffle_mask : PatLeaf<(build_vector), [{
146 return X86::isSHUFPMask(N);
147 }], SHUFFLE_get_shuf_imm>;
149 def PSHUFD_binary_shuffle_mask : PatLeaf<(build_vector), [{
150 return X86::isSHUFPMask(N);
151 }], SHUFFLE_get_shuf_imm>;
153 //===----------------------------------------------------------------------===//
154 // SSE scalar FP Instructions
155 //===----------------------------------------------------------------------===//
157 // Instruction templates
158 // SSI - SSE1 instructions with XS prefix.
159 // SDI - SSE2 instructions with XD prefix.
160 // PSI - SSE1 instructions with TB prefix.
161 // PDI - SSE2 instructions with TB and OpSize prefixes.
162 // PSIi8 - SSE1 instructions with ImmT == Imm8 and TB prefix.
163 // PDIi8 - SSE2 instructions with ImmT == Imm8 and TB and OpSize prefixes.
164 // S3I - SSE3 instructions with TB and OpSize prefixes.
165 // S3SI - SSE3 instructions with XS prefix.
166 // S3DI - SSE3 instructions with XD prefix.
167 class SSI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
168 : I<o, F, ops, asm, pattern>, XS, Requires<[HasSSE1]>;
169 class SDI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
170 : I<o, F, ops, asm, pattern>, XD, Requires<[HasSSE2]>;
171 class PSI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
172 : I<o, F, ops, asm, pattern>, TB, Requires<[HasSSE1]>;
173 class PDI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
174 : I<o, F, ops, asm, pattern>, TB, OpSize, Requires<[HasSSE2]>;
175 class PSIi8<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
176 : X86Inst<o, F, Imm8, ops, asm>, TB, Requires<[HasSSE1]> {
177 let Pattern = pattern;
179 class PDIi8<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
180 : X86Inst<o, F, Imm8, ops, asm>, TB, OpSize, Requires<[HasSSE2]> {
181 let Pattern = pattern;
183 class S3SI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
184 : I<o, F, ops, asm, pattern>, XS, Requires<[HasSSE3]>;
185 class S3DI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
186 : I<o, F, ops, asm, pattern>, XD, Requires<[HasSSE3]>;
187 class S3I<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
188 : I<o, F, ops, asm, pattern>, TB, OpSize, Requires<[HasSSE3]>;
190 //===----------------------------------------------------------------------===//
191 // Helpers for defining instructions that directly correspond to intrinsics.
192 class SS_Intr<bits<8> o, string asm, Intrinsic IntId>
193 : SSI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src), asm,
194 [(set VR128:$dst, (v4f32 (IntId VR128:$src)))]>;
195 class SS_Intm<bits<8> o, string asm, Intrinsic IntId>
196 : SSI<o, MRMSrcMem, (ops VR128:$dst, f32mem:$src), asm,
197 [(set VR128:$dst, (v4f32 (IntId (load addr:$src))))]>;
198 class SD_Intr<bits<8> o, string asm, Intrinsic IntId>
199 : SDI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src), asm,
200 [(set VR128:$dst, (v2f64 (IntId VR128:$src)))]>;
201 class SD_Intm<bits<8> o, string asm, Intrinsic IntId>
202 : SDI<o, MRMSrcMem, (ops VR128:$dst, f64mem:$src), asm,
203 [(set VR128:$dst, (v2f64 (IntId (load addr:$src))))]>;
205 class SS_Intrr<bits<8> o, string asm, Intrinsic IntId>
206 : SSI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
207 [(set VR128:$dst, (v4f32 (IntId VR128:$src1, VR128:$src2)))]>;
208 class SS_Intrm<bits<8> o, string asm, Intrinsic IntId>
209 : SSI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f32mem:$src2), asm,
210 [(set VR128:$dst, (v4f32 (IntId VR128:$src1, (load addr:$src2))))]>;
211 class SD_Intrr<bits<8> o, string asm, Intrinsic IntId>
212 : SDI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
213 [(set VR128:$dst, (v2f64 (IntId VR128:$src1, VR128:$src2)))]>;
214 class SD_Intrm<bits<8> o, string asm, Intrinsic IntId>
215 : SDI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2), asm,
216 [(set VR128:$dst, (v2f64 (IntId VR128:$src1, (load addr:$src2))))]>;
218 class PS_Intr<bits<8> o, string asm, Intrinsic IntId>
219 : PSI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src), asm,
220 [(set VR128:$dst, (IntId VR128:$src))]>;
221 class PS_Intm<bits<8> o, string asm, Intrinsic IntId>
222 : PSI<o, MRMSrcMem, (ops VR128:$dst, f32mem:$src), asm,
223 [(set VR128:$dst, (IntId (loadv4f32 addr:$src)))]>;
224 class PD_Intr<bits<8> o, string asm, Intrinsic IntId>
225 : PDI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src), asm,
226 [(set VR128:$dst, (IntId VR128:$src))]>;
227 class PD_Intm<bits<8> o, string asm, Intrinsic IntId>
228 : PDI<o, MRMSrcMem, (ops VR128:$dst, f64mem:$src), asm,
229 [(set VR128:$dst, (IntId (loadv2f64 addr:$src)))]>;
231 class PS_Intrr<bits<8> o, string asm, Intrinsic IntId>
232 : PSI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
233 [(set VR128:$dst, (IntId VR128:$src1, VR128:$src2))]>;
234 class PS_Intrm<bits<8> o, string asm, Intrinsic IntId>
235 : PSI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f32mem:$src2), asm,
236 [(set VR128:$dst, (IntId VR128:$src1, (loadv4f32 addr:$src2)))]>;
237 class PD_Intrr<bits<8> o, string asm, Intrinsic IntId>
238 : PDI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
239 [(set VR128:$dst, (IntId VR128:$src1, VR128:$src2))]>;
240 class PD_Intrm<bits<8> o, string asm, Intrinsic IntId>
241 : PDI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2), asm,
242 [(set VR128:$dst, (IntId VR128:$src1, (loadv2f64 addr:$src2)))]>;
244 class S3D_Intrr<bits<8> o, string asm, Intrinsic IntId>
245 : S3DI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
246 [(set VR128:$dst, (v4f32 (IntId VR128:$src1, VR128:$src2)))]>;
247 class S3D_Intrm<bits<8> o, string asm, Intrinsic IntId>
248 : S3DI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2), asm,
249 [(set VR128:$dst, (v4f32 (IntId VR128:$src1,
250 (loadv4f32 addr:$src2))))]>;
251 class S3_Intrr<bits<8> o, string asm, Intrinsic IntId>
252 : S3I<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
253 [(set VR128:$dst, (v2f64 (IntId VR128:$src1, VR128:$src2)))]>;
254 class S3_Intrm<bits<8> o, string asm, Intrinsic IntId>
255 : S3I<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2), asm,
256 [(set VR128:$dst, (v2f64 (IntId VR128:$src1,
257 (loadv2f64 addr:$src2))))]>;
259 // Some 'special' instructions
260 def IMPLICIT_DEF_FR32 : I<0, Pseudo, (ops FR32:$dst),
261 "#IMPLICIT_DEF $dst",
262 [(set FR32:$dst, (undef))]>, Requires<[HasSSE2]>;
263 def IMPLICIT_DEF_FR64 : I<0, Pseudo, (ops FR64:$dst),
264 "#IMPLICIT_DEF $dst",
265 [(set FR64:$dst, (undef))]>, Requires<[HasSSE2]>;
267 // CMOV* - Used to implement the SSE SELECT DAG operation. Expanded by the
268 // scheduler into a branch sequence.
269 let usesCustomDAGSchedInserter = 1 in { // Expanded by the scheduler.
270 def CMOV_FR32 : I<0, Pseudo,
271 (ops FR32:$dst, FR32:$t, FR32:$f, i8imm:$cond),
272 "#CMOV_FR32 PSEUDO!",
273 [(set FR32:$dst, (X86cmov FR32:$t, FR32:$f, imm:$cond))]>;
274 def CMOV_FR64 : I<0, Pseudo,
275 (ops FR64:$dst, FR64:$t, FR64:$f, i8imm:$cond),
276 "#CMOV_FR64 PSEUDO!",
277 [(set FR64:$dst, (X86cmov FR64:$t, FR64:$f, imm:$cond))]>;
278 def CMOV_V4F32 : I<0, Pseudo,
279 (ops VR128:$dst, VR128:$t, VR128:$f, i8imm:$cond),
280 "#CMOV_V4F32 PSEUDO!",
282 (v4f32 (X86cmov VR128:$t, VR128:$f, imm:$cond)))]>;
283 def CMOV_V2F64 : I<0, Pseudo,
284 (ops VR128:$dst, VR128:$t, VR128:$f, i8imm:$cond),
285 "#CMOV_V2F64 PSEUDO!",
287 (v2f64 (X86cmov VR128:$t, VR128:$f, imm:$cond)))]>;
288 def CMOV_V2I64 : I<0, Pseudo,
289 (ops VR128:$dst, VR128:$t, VR128:$f, i8imm:$cond),
290 "#CMOV_V2I64 PSEUDO!",
292 (v2i64 (X86cmov VR128:$t, VR128:$f, imm:$cond)))]>;
296 def MOVSSrr : SSI<0x10, MRMSrcReg, (ops FR32:$dst, FR32:$src),
297 "movss {$src, $dst|$dst, $src}", []>;
298 def MOVSSrm : SSI<0x10, MRMSrcMem, (ops FR32:$dst, f32mem:$src),
299 "movss {$src, $dst|$dst, $src}",
300 [(set FR32:$dst, (loadf32 addr:$src))]>;
301 def MOVSDrr : SDI<0x10, MRMSrcReg, (ops FR64:$dst, FR64:$src),
302 "movsd {$src, $dst|$dst, $src}", []>;
303 def MOVSDrm : SDI<0x10, MRMSrcMem, (ops FR64:$dst, f64mem:$src),
304 "movsd {$src, $dst|$dst, $src}",
305 [(set FR64:$dst, (loadf64 addr:$src))]>;
307 def MOVSSmr : SSI<0x11, MRMDestMem, (ops f32mem:$dst, FR32:$src),
308 "movss {$src, $dst|$dst, $src}",
309 [(store FR32:$src, addr:$dst)]>;
310 def MOVSDmr : SDI<0x11, MRMDestMem, (ops f64mem:$dst, FR64:$src),
311 "movsd {$src, $dst|$dst, $src}",
312 [(store FR64:$src, addr:$dst)]>;
314 // Arithmetic instructions
315 let isTwoAddress = 1 in {
316 let isCommutable = 1 in {
317 def ADDSSrr : SSI<0x58, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
318 "addss {$src2, $dst|$dst, $src2}",
319 [(set FR32:$dst, (fadd FR32:$src1, FR32:$src2))]>;
320 def ADDSDrr : SDI<0x58, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
321 "addsd {$src2, $dst|$dst, $src2}",
322 [(set FR64:$dst, (fadd FR64:$src1, FR64:$src2))]>;
323 def MULSSrr : SSI<0x59, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
324 "mulss {$src2, $dst|$dst, $src2}",
325 [(set FR32:$dst, (fmul FR32:$src1, FR32:$src2))]>;
326 def MULSDrr : SDI<0x59, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
327 "mulsd {$src2, $dst|$dst, $src2}",
328 [(set FR64:$dst, (fmul FR64:$src1, FR64:$src2))]>;
331 def ADDSSrm : SSI<0x58, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
332 "addss {$src2, $dst|$dst, $src2}",
333 [(set FR32:$dst, (fadd FR32:$src1, (loadf32 addr:$src2)))]>;
334 def ADDSDrm : SDI<0x58, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f64mem:$src2),
335 "addsd {$src2, $dst|$dst, $src2}",
336 [(set FR64:$dst, (fadd FR64:$src1, (loadf64 addr:$src2)))]>;
337 def MULSSrm : SSI<0x59, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
338 "mulss {$src2, $dst|$dst, $src2}",
339 [(set FR32:$dst, (fmul FR32:$src1, (loadf32 addr:$src2)))]>;
340 def MULSDrm : SDI<0x59, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f64mem:$src2),
341 "mulsd {$src2, $dst|$dst, $src2}",
342 [(set FR64:$dst, (fmul FR64:$src1, (loadf64 addr:$src2)))]>;
344 def DIVSSrr : SSI<0x5E, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
345 "divss {$src2, $dst|$dst, $src2}",
346 [(set FR32:$dst, (fdiv FR32:$src1, FR32:$src2))]>;
347 def DIVSSrm : SSI<0x5E, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
348 "divss {$src2, $dst|$dst, $src2}",
349 [(set FR32:$dst, (fdiv FR32:$src1, (loadf32 addr:$src2)))]>;
350 def DIVSDrr : SDI<0x5E, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
351 "divsd {$src2, $dst|$dst, $src2}",
352 [(set FR64:$dst, (fdiv FR64:$src1, FR64:$src2))]>;
353 def DIVSDrm : SDI<0x5E, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f64mem:$src2),
354 "divsd {$src2, $dst|$dst, $src2}",
355 [(set FR64:$dst, (fdiv FR64:$src1, (loadf64 addr:$src2)))]>;
357 def SUBSSrr : SSI<0x5C, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
358 "subss {$src2, $dst|$dst, $src2}",
359 [(set FR32:$dst, (fsub FR32:$src1, FR32:$src2))]>;
360 def SUBSSrm : SSI<0x5C, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
361 "subss {$src2, $dst|$dst, $src2}",
362 [(set FR32:$dst, (fsub FR32:$src1, (loadf32 addr:$src2)))]>;
363 def SUBSDrr : SDI<0x5C, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
364 "subsd {$src2, $dst|$dst, $src2}",
365 [(set FR64:$dst, (fsub FR64:$src1, FR64:$src2))]>;
366 def SUBSDrm : SDI<0x5C, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f64mem:$src2),
367 "subsd {$src2, $dst|$dst, $src2}",
368 [(set FR64:$dst, (fsub FR64:$src1, (loadf64 addr:$src2)))]>;
371 def SQRTSSr : SSI<0x51, MRMSrcReg, (ops FR32:$dst, FR32:$src),
372 "sqrtss {$src, $dst|$dst, $src}",
373 [(set FR32:$dst, (fsqrt FR32:$src))]>;
374 def SQRTSSm : SSI<0x51, MRMSrcMem, (ops FR32:$dst, f32mem:$src),
375 "sqrtss {$src, $dst|$dst, $src}",
376 [(set FR32:$dst, (fsqrt (loadf32 addr:$src)))]>;
377 def SQRTSDr : SDI<0x51, MRMSrcReg, (ops FR64:$dst, FR64:$src),
378 "sqrtsd {$src, $dst|$dst, $src}",
379 [(set FR64:$dst, (fsqrt FR64:$src))]>;
380 def SQRTSDm : SDI<0x51, MRMSrcMem, (ops FR64:$dst, f64mem:$src),
381 "sqrtsd {$src, $dst|$dst, $src}",
382 [(set FR64:$dst, (fsqrt (loadf64 addr:$src)))]>;
384 def RSQRTSSr : SSI<0x52, MRMSrcReg, (ops FR32:$dst, FR32:$src),
385 "rsqrtss {$src, $dst|$dst, $src}", []>;
386 def RSQRTSSm : SSI<0x52, MRMSrcMem, (ops FR32:$dst, f32mem:$src),
387 "rsqrtss {$src, $dst|$dst, $src}", []>;
388 def RCPSSr : SSI<0x53, MRMSrcReg, (ops FR32:$dst, FR32:$src),
389 "rcpss {$src, $dst|$dst, $src}", []>;
390 def RCPSSm : SSI<0x53, MRMSrcMem, (ops FR32:$dst, f32mem:$src),
391 "rcpss {$src, $dst|$dst, $src}", []>;
393 let isTwoAddress = 1 in {
394 let isCommutable = 1 in {
395 def MAXSSrr : SSI<0x5F, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
396 "maxss {$src2, $dst|$dst, $src2}", []>;
397 def MAXSDrr : SDI<0x5F, MRMSrcReg, (ops FR64:$dst, FR32:$src1, FR64:$src2),
398 "maxsd {$src2, $dst|$dst, $src2}", []>;
399 def MINSSrr : SSI<0x5D, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
400 "minss {$src2, $dst|$dst, $src2}", []>;
401 def MINSDrr : SDI<0x5D, MRMSrcReg, (ops FR64:$dst, FR32:$src1, FR64:$src2),
402 "minsd {$src2, $dst|$dst, $src2}", []>;
404 def MAXSSrm : SSI<0x5F, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
405 "maxss {$src2, $dst|$dst, $src2}", []>;
406 def MAXSDrm : SDI<0x5F, MRMSrcMem, (ops FR64:$dst, FR32:$src1, f64mem:$src2),
407 "maxsd {$src2, $dst|$dst, $src2}", []>;
408 def MINSSrm : SSI<0x5D, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
409 "minss {$src2, $dst|$dst, $src2}", []>;
410 def MINSDrm : SDI<0x5D, MRMSrcMem, (ops FR64:$dst, FR32:$src1, f64mem:$src2),
411 "minsd {$src2, $dst|$dst, $src2}", []>;
414 // Aliases to match intrinsics which expect XMM operand(s).
415 let isTwoAddress = 1 in {
416 let isCommutable = 1 in {
417 def Int_ADDSSrr : SS_Intrr<0x58, "addss {$src2, $dst|$dst, $src2}",
419 def Int_ADDSDrr : SD_Intrr<0x58, "addsd {$src2, $dst|$dst, $src2}",
420 int_x86_sse2_add_sd>;
421 def Int_MULSSrr : SS_Intrr<0x59, "mulss {$src2, $dst|$dst, $src2}",
423 def Int_MULSDrr : SD_Intrr<0x59, "mulsd {$src2, $dst|$dst, $src2}",
424 int_x86_sse2_mul_sd>;
427 def Int_ADDSSrm : SS_Intrm<0x58, "addss {$src2, $dst|$dst, $src2}",
429 def Int_ADDSDrm : SD_Intrm<0x58, "addsd {$src2, $dst|$dst, $src2}",
430 int_x86_sse2_add_sd>;
431 def Int_MULSSrm : SS_Intrm<0x59, "mulss {$src2, $dst|$dst, $src2}",
433 def Int_MULSDrm : SD_Intrm<0x59, "mulsd {$src2, $dst|$dst, $src2}",
434 int_x86_sse2_mul_sd>;
436 def Int_DIVSSrr : SS_Intrr<0x5E, "divss {$src2, $dst|$dst, $src2}",
438 def Int_DIVSSrm : SS_Intrm<0x5E, "divss {$src2, $dst|$dst, $src2}",
440 def Int_DIVSDrr : SD_Intrr<0x5E, "divsd {$src2, $dst|$dst, $src2}",
441 int_x86_sse2_div_sd>;
442 def Int_DIVSDrm : SD_Intrm<0x5E, "divsd {$src2, $dst|$dst, $src2}",
443 int_x86_sse2_div_sd>;
445 def Int_SUBSSrr : SS_Intrr<0x5C, "subss {$src2, $dst|$dst, $src2}",
447 def Int_SUBSSrm : SS_Intrm<0x5C, "subss {$src2, $dst|$dst, $src2}",
449 def Int_SUBSDrr : SD_Intrr<0x5C, "subsd {$src2, $dst|$dst, $src2}",
450 int_x86_sse2_sub_sd>;
451 def Int_SUBSDrm : SD_Intrm<0x5C, "subsd {$src2, $dst|$dst, $src2}",
452 int_x86_sse2_sub_sd>;
455 def Int_SQRTSSr : SS_Intr<0x51, "sqrtss {$src, $dst|$dst, $src}",
456 int_x86_sse_sqrt_ss>;
457 def Int_SQRTSSm : SS_Intm<0x51, "sqrtss {$src, $dst|$dst, $src}",
458 int_x86_sse_sqrt_ss>;
459 def Int_SQRTSDr : SD_Intr<0x51, "sqrtsd {$src, $dst|$dst, $src}",
460 int_x86_sse2_sqrt_sd>;
461 def Int_SQRTSDm : SD_Intm<0x51, "sqrtsd {$src, $dst|$dst, $src}",
462 int_x86_sse2_sqrt_sd>;
464 def Int_RSQRTSSr : SS_Intr<0x52, "rsqrtss {$src, $dst|$dst, $src}",
465 int_x86_sse_rsqrt_ss>;
466 def Int_RSQRTSSm : SS_Intm<0x52, "rsqrtss {$src, $dst|$dst, $src}",
467 int_x86_sse_rsqrt_ss>;
468 def Int_RCPSSr : SS_Intr<0x53, "rcpss {$src, $dst|$dst, $src}",
470 def Int_RCPSSm : SS_Intm<0x53, "rcpss {$src, $dst|$dst, $src}",
473 let isTwoAddress = 1 in {
474 let isCommutable = 1 in {
475 def Int_MAXSSrr : SS_Intrr<0x5F, "maxss {$src2, $dst|$dst, $src2}",
477 def Int_MAXSDrr : SD_Intrr<0x5F, "maxsd {$src2, $dst|$dst, $src2}",
478 int_x86_sse2_max_sd>;
479 def Int_MINSSrr : SS_Intrr<0x5D, "minss {$src2, $dst|$dst, $src2}",
481 def Int_MINSDrr : SD_Intrr<0x5D, "minsd {$src2, $dst|$dst, $src2}",
482 int_x86_sse2_min_sd>;
484 def Int_MAXSSrm : SS_Intrm<0x5F, "maxss {$src2, $dst|$dst, $src2}",
486 def Int_MAXSDrm : SD_Intrm<0x5F, "maxsd {$src2, $dst|$dst, $src2}",
487 int_x86_sse2_max_sd>;
488 def Int_MINSSrm : SS_Intrm<0x5D, "minss {$src2, $dst|$dst, $src2}",
490 def Int_MINSDrm : SD_Intrm<0x5D, "minsd {$src2, $dst|$dst, $src2}",
491 int_x86_sse2_min_sd>;
494 // Conversion instructions
495 def CVTTSS2SIrr: SSI<0x2C, MRMSrcReg, (ops GR32:$dst, FR32:$src),
496 "cvttss2si {$src, $dst|$dst, $src}",
497 [(set GR32:$dst, (fp_to_sint FR32:$src))]>;
498 def CVTTSS2SIrm: SSI<0x2C, MRMSrcMem, (ops GR32:$dst, f32mem:$src),
499 "cvttss2si {$src, $dst|$dst, $src}",
500 [(set GR32:$dst, (fp_to_sint (loadf32 addr:$src)))]>;
501 def CVTTSD2SIrr: SDI<0x2C, MRMSrcReg, (ops GR32:$dst, FR64:$src),
502 "cvttsd2si {$src, $dst|$dst, $src}",
503 [(set GR32:$dst, (fp_to_sint FR64:$src))]>;
504 def CVTTSD2SIrm: SDI<0x2C, MRMSrcMem, (ops GR32:$dst, f64mem:$src),
505 "cvttsd2si {$src, $dst|$dst, $src}",
506 [(set GR32:$dst, (fp_to_sint (loadf64 addr:$src)))]>;
507 def CVTSD2SSrr: SDI<0x5A, MRMSrcReg, (ops FR32:$dst, FR64:$src),
508 "cvtsd2ss {$src, $dst|$dst, $src}",
509 [(set FR32:$dst, (fround FR64:$src))]>;
510 def CVTSD2SSrm: SDI<0x5A, MRMSrcMem, (ops FR32:$dst, f64mem:$src),
511 "cvtsd2ss {$src, $dst|$dst, $src}",
512 [(set FR32:$dst, (fround (loadf64 addr:$src)))]>;
513 def CVTSI2SSrr: SSI<0x2A, MRMSrcReg, (ops FR32:$dst, GR32:$src),
514 "cvtsi2ss {$src, $dst|$dst, $src}",
515 [(set FR32:$dst, (sint_to_fp GR32:$src))]>;
516 def CVTSI2SSrm: SSI<0x2A, MRMSrcMem, (ops FR32:$dst, i32mem:$src),
517 "cvtsi2ss {$src, $dst|$dst, $src}",
518 [(set FR32:$dst, (sint_to_fp (loadi32 addr:$src)))]>;
519 def CVTSI2SDrr: SDI<0x2A, MRMSrcReg, (ops FR64:$dst, GR32:$src),
520 "cvtsi2sd {$src, $dst|$dst, $src}",
521 [(set FR64:$dst, (sint_to_fp GR32:$src))]>;
522 def CVTSI2SDrm: SDI<0x2A, MRMSrcMem, (ops FR64:$dst, i32mem:$src),
523 "cvtsi2sd {$src, $dst|$dst, $src}",
524 [(set FR64:$dst, (sint_to_fp (loadi32 addr:$src)))]>;
526 // SSE2 instructions with XS prefix
527 def CVTSS2SDrr: I<0x5A, MRMSrcReg, (ops FR64:$dst, FR32:$src),
528 "cvtss2sd {$src, $dst|$dst, $src}",
529 [(set FR64:$dst, (fextend FR32:$src))]>, XS,
531 def CVTSS2SDrm: I<0x5A, MRMSrcMem, (ops FR64:$dst, f32mem:$src),
532 "cvtss2sd {$src, $dst|$dst, $src}",
533 [(set FR64:$dst, (extload addr:$src, f32))]>, XS,
536 // Match intrinsics which expect XMM operand(s).
537 def Int_CVTSS2SIrr: SSI<0x2D, MRMSrcReg, (ops GR32:$dst, VR128:$src),
538 "cvtss2si {$src, $dst|$dst, $src}",
539 [(set GR32:$dst, (int_x86_sse_cvtss2si VR128:$src))]>;
540 def Int_CVTSS2SIrm: SSI<0x2D, MRMSrcMem, (ops GR32:$dst, f32mem:$src),
541 "cvtss2si {$src, $dst|$dst, $src}",
542 [(set GR32:$dst, (int_x86_sse_cvtss2si
543 (loadv4f32 addr:$src)))]>;
544 def Int_CVTSD2SIrr: SDI<0x2D, MRMSrcReg, (ops GR32:$dst, VR128:$src),
545 "cvtsd2si {$src, $dst|$dst, $src}",
546 [(set GR32:$dst, (int_x86_sse2_cvtsd2si VR128:$src))]>;
547 def Int_CVTSD2SIrm: SDI<0x2D, MRMSrcMem, (ops GR32:$dst, f128mem:$src),
548 "cvtsd2si {$src, $dst|$dst, $src}",
549 [(set GR32:$dst, (int_x86_sse2_cvtsd2si
550 (loadv2f64 addr:$src)))]>;
552 // Aliases for intrinsics
553 def Int_CVTTSS2SIrr: SSI<0x2C, MRMSrcReg, (ops GR32:$dst, VR128:$src),
554 "cvttss2si {$src, $dst|$dst, $src}",
555 [(set GR32:$dst, (int_x86_sse_cvttss2si VR128:$src))]>;
556 def Int_CVTTSS2SIrm: SSI<0x2C, MRMSrcMem, (ops GR32:$dst, f32mem:$src),
557 "cvttss2si {$src, $dst|$dst, $src}",
558 [(set GR32:$dst, (int_x86_sse_cvttss2si
559 (loadv4f32 addr:$src)))]>;
560 def Int_CVTTSD2SIrr: SDI<0x2C, MRMSrcReg, (ops GR32:$dst, VR128:$src),
561 "cvttsd2si {$src, $dst|$dst, $src}",
562 [(set GR32:$dst, (int_x86_sse2_cvttsd2si VR128:$src))]>;
563 def Int_CVTTSD2SIrm: SDI<0x2C, MRMSrcMem, (ops GR32:$dst, f128mem:$src),
564 "cvttsd2si {$src, $dst|$dst, $src}",
565 [(set GR32:$dst, (int_x86_sse2_cvttsd2si
566 (loadv2f64 addr:$src)))]>;
568 let isTwoAddress = 1 in {
569 def Int_CVTSI2SSrr: SSI<0x2A, MRMSrcReg,
570 (ops VR128:$dst, VR128:$src1, GR32:$src2),
571 "cvtsi2ss {$src2, $dst|$dst, $src2}",
572 [(set VR128:$dst, (int_x86_sse_cvtsi2ss VR128:$src1,
574 def Int_CVTSI2SSrm: SSI<0x2A, MRMSrcMem,
575 (ops VR128:$dst, VR128:$src1, i32mem:$src2),
576 "cvtsi2ss {$src2, $dst|$dst, $src2}",
577 [(set VR128:$dst, (int_x86_sse_cvtsi2ss VR128:$src1,
578 (loadi32 addr:$src2)))]>;
581 // Comparison instructions
582 let isTwoAddress = 1 in {
583 def CMPSSrr : SSI<0xC2, MRMSrcReg,
584 (ops FR32:$dst, FR32:$src1, FR32:$src, SSECC:$cc),
585 "cmp${cc}ss {$src, $dst|$dst, $src}",
587 def CMPSSrm : SSI<0xC2, MRMSrcMem,
588 (ops FR32:$dst, FR32:$src1, f32mem:$src, SSECC:$cc),
589 "cmp${cc}ss {$src, $dst|$dst, $src}", []>;
590 def CMPSDrr : SDI<0xC2, MRMSrcReg,
591 (ops FR64:$dst, FR64:$src1, FR64:$src, SSECC:$cc),
592 "cmp${cc}sd {$src, $dst|$dst, $src}", []>;
593 def CMPSDrm : SDI<0xC2, MRMSrcMem,
594 (ops FR64:$dst, FR64:$src1, f64mem:$src, SSECC:$cc),
595 "cmp${cc}sd {$src, $dst|$dst, $src}", []>;
598 def UCOMISSrr: PSI<0x2E, MRMSrcReg, (ops FR32:$src1, FR32:$src2),
599 "ucomiss {$src2, $src1|$src1, $src2}",
600 [(X86cmp FR32:$src1, FR32:$src2)]>;
601 def UCOMISSrm: PSI<0x2E, MRMSrcMem, (ops FR32:$src1, f32mem:$src2),
602 "ucomiss {$src2, $src1|$src1, $src2}",
603 [(X86cmp FR32:$src1, (loadf32 addr:$src2))]>;
604 def UCOMISDrr: PDI<0x2E, MRMSrcReg, (ops FR64:$src1, FR64:$src2),
605 "ucomisd {$src2, $src1|$src1, $src2}",
606 [(X86cmp FR64:$src1, FR64:$src2)]>;
607 def UCOMISDrm: PDI<0x2E, MRMSrcMem, (ops FR64:$src1, f64mem:$src2),
608 "ucomisd {$src2, $src1|$src1, $src2}",
609 [(X86cmp FR64:$src1, (loadf64 addr:$src2))]>;
611 // Aliases to match intrinsics which expect XMM operand(s).
612 let isTwoAddress = 1 in {
613 def Int_CMPSSrr : SSI<0xC2, MRMSrcReg,
614 (ops VR128:$dst, VR128:$src1, VR128:$src, SSECC:$cc),
615 "cmp${cc}ss {$src, $dst|$dst, $src}",
616 [(set VR128:$dst, (int_x86_sse_cmp_ss VR128:$src1,
617 VR128:$src, imm:$cc))]>;
618 def Int_CMPSSrm : SSI<0xC2, MRMSrcMem,
619 (ops VR128:$dst, VR128:$src1, f32mem:$src, SSECC:$cc),
620 "cmp${cc}ss {$src, $dst|$dst, $src}",
621 [(set VR128:$dst, (int_x86_sse_cmp_ss VR128:$src1,
622 (load addr:$src), imm:$cc))]>;
623 def Int_CMPSDrr : SDI<0xC2, MRMSrcReg,
624 (ops VR128:$dst, VR128:$src1, VR128:$src, SSECC:$cc),
625 "cmp${cc}sd {$src, $dst|$dst, $src}", []>;
626 def Int_CMPSDrm : SDI<0xC2, MRMSrcMem,
627 (ops VR128:$dst, VR128:$src1, f64mem:$src, SSECC:$cc),
628 "cmp${cc}sd {$src, $dst|$dst, $src}", []>;
631 def Int_UCOMISSrr: PSI<0x2E, MRMSrcReg, (ops VR128:$src1, VR128:$src2),
632 "ucomiss {$src2, $src1|$src1, $src2}",
633 [(X86ucomi (v4f32 VR128:$src1), VR128:$src2)]>;
634 def Int_UCOMISSrm: PSI<0x2E, MRMSrcMem, (ops VR128:$src1, f128mem:$src2),
635 "ucomiss {$src2, $src1|$src1, $src2}",
636 [(X86ucomi (v4f32 VR128:$src1), (loadv4f32 addr:$src2))]>;
637 def Int_UCOMISDrr: PDI<0x2E, MRMSrcReg, (ops VR128:$src1, VR128:$src2),
638 "ucomisd {$src2, $src1|$src1, $src2}",
639 [(X86ucomi (v2f64 VR128:$src1), (v2f64 VR128:$src2))]>;
640 def Int_UCOMISDrm: PDI<0x2E, MRMSrcMem, (ops VR128:$src1, f128mem:$src2),
641 "ucomisd {$src2, $src1|$src1, $src2}",
642 [(X86ucomi (v2f64 VR128:$src1), (loadv2f64 addr:$src2))]>;
644 def Int_COMISSrr: PSI<0x2F, MRMSrcReg, (ops VR128:$src1, VR128:$src2),
645 "comiss {$src2, $src1|$src1, $src2}",
646 [(X86comi (v4f32 VR128:$src1), VR128:$src2)]>;
647 def Int_COMISSrm: PSI<0x2F, MRMSrcMem, (ops VR128:$src1, f128mem:$src2),
648 "comiss {$src2, $src1|$src1, $src2}",
649 [(X86comi (v4f32 VR128:$src1), (loadv4f32 addr:$src2))]>;
650 def Int_COMISDrr: PDI<0x2F, MRMSrcReg, (ops VR128:$src1, VR128:$src2),
651 "comisd {$src2, $src1|$src1, $src2}",
652 [(X86comi (v2f64 VR128:$src1), (v2f64 VR128:$src2))]>;
653 def Int_COMISDrm: PDI<0x2F, MRMSrcMem, (ops VR128:$src1, f128mem:$src2),
654 "comisd {$src2, $src1|$src1, $src2}",
655 [(X86comi (v2f64 VR128:$src1), (loadv2f64 addr:$src2))]>;
657 // Aliases of packed instructions for scalar use. These all have names that
660 // Alias instructions that map fld0 to pxor for sse.
661 // FIXME: remove when we can teach regalloc that xor reg, reg is ok.
662 def FsFLD0SS : I<0xEF, MRMInitReg, (ops FR32:$dst),
663 "pxor $dst, $dst", [(set FR32:$dst, fp32imm0)]>,
664 Requires<[HasSSE1]>, TB, OpSize;
665 def FsFLD0SD : I<0xEF, MRMInitReg, (ops FR64:$dst),
666 "pxor $dst, $dst", [(set FR64:$dst, fp64imm0)]>,
667 Requires<[HasSSE2]>, TB, OpSize;
669 // Alias instructions to do FR32 / FR64 reg-to-reg copy using movaps / movapd.
670 // Upper bits are disregarded.
671 def FsMOVAPSrr : PSI<0x28, MRMSrcReg, (ops FR32:$dst, FR32:$src),
672 "movaps {$src, $dst|$dst, $src}", []>;
673 def FsMOVAPDrr : PDI<0x28, MRMSrcReg, (ops FR64:$dst, FR64:$src),
674 "movapd {$src, $dst|$dst, $src}", []>;
676 // Alias instructions to load FR32 / FR64 from f128mem using movaps / movapd.
677 // Upper bits are disregarded.
678 def FsMOVAPSrm : PSI<0x28, MRMSrcMem, (ops FR32:$dst, f128mem:$src),
679 "movaps {$src, $dst|$dst, $src}",
680 [(set FR32:$dst, (X86loadpf32 addr:$src))]>;
681 def FsMOVAPDrm : PDI<0x28, MRMSrcMem, (ops FR64:$dst, f128mem:$src),
682 "movapd {$src, $dst|$dst, $src}",
683 [(set FR64:$dst, (X86loadpf64 addr:$src))]>;
685 // Alias bitwise logical operations using SSE logical ops on packed FP values.
686 let isTwoAddress = 1 in {
687 let isCommutable = 1 in {
688 def FsANDPSrr : PSI<0x54, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
689 "andps {$src2, $dst|$dst, $src2}",
690 [(set FR32:$dst, (X86fand FR32:$src1, FR32:$src2))]>;
691 def FsANDPDrr : PDI<0x54, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
692 "andpd {$src2, $dst|$dst, $src2}",
693 [(set FR64:$dst, (X86fand FR64:$src1, FR64:$src2))]>;
694 def FsORPSrr : PSI<0x56, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
695 "orps {$src2, $dst|$dst, $src2}", []>;
696 def FsORPDrr : PDI<0x56, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
697 "orpd {$src2, $dst|$dst, $src2}", []>;
698 def FsXORPSrr : PSI<0x57, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
699 "xorps {$src2, $dst|$dst, $src2}",
700 [(set FR32:$dst, (X86fxor FR32:$src1, FR32:$src2))]>;
701 def FsXORPDrr : PDI<0x57, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
702 "xorpd {$src2, $dst|$dst, $src2}",
703 [(set FR64:$dst, (X86fxor FR64:$src1, FR64:$src2))]>;
705 def FsANDPSrm : PSI<0x54, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f128mem:$src2),
706 "andps {$src2, $dst|$dst, $src2}",
707 [(set FR32:$dst, (X86fand FR32:$src1,
708 (X86loadpf32 addr:$src2)))]>;
709 def FsANDPDrm : PDI<0x54, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f128mem:$src2),
710 "andpd {$src2, $dst|$dst, $src2}",
711 [(set FR64:$dst, (X86fand FR64:$src1,
712 (X86loadpf64 addr:$src2)))]>;
713 def FsORPSrm : PSI<0x56, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f128mem:$src2),
714 "orps {$src2, $dst|$dst, $src2}", []>;
715 def FsORPDrm : PDI<0x56, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f128mem:$src2),
716 "orpd {$src2, $dst|$dst, $src2}", []>;
717 def FsXORPSrm : PSI<0x57, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f128mem:$src2),
718 "xorps {$src2, $dst|$dst, $src2}",
719 [(set FR32:$dst, (X86fxor FR32:$src1,
720 (X86loadpf32 addr:$src2)))]>;
721 def FsXORPDrm : PDI<0x57, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f128mem:$src2),
722 "xorpd {$src2, $dst|$dst, $src2}",
723 [(set FR64:$dst, (X86fxor FR64:$src1,
724 (X86loadpf64 addr:$src2)))]>;
726 def FsANDNPSrr : PSI<0x55, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
727 "andnps {$src2, $dst|$dst, $src2}", []>;
728 def FsANDNPSrm : PSI<0x55, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f128mem:$src2),
729 "andnps {$src2, $dst|$dst, $src2}", []>;
730 def FsANDNPDrr : PDI<0x55, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
731 "andnpd {$src2, $dst|$dst, $src2}", []>;
732 def FsANDNPDrm : PDI<0x55, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f128mem:$src2),
733 "andnpd {$src2, $dst|$dst, $src2}", []>;
736 //===----------------------------------------------------------------------===//
737 // SSE packed FP Instructions
738 //===----------------------------------------------------------------------===//
740 // Some 'special' instructions
741 def IMPLICIT_DEF_VR128 : I<0, Pseudo, (ops VR128:$dst),
742 "#IMPLICIT_DEF $dst",
743 [(set VR128:$dst, (v4f32 (undef)))]>,
747 def MOVAPSrr : PSI<0x28, MRMSrcReg, (ops VR128:$dst, VR128:$src),
748 "movaps {$src, $dst|$dst, $src}", []>;
749 def MOVAPSrm : PSI<0x28, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
750 "movaps {$src, $dst|$dst, $src}",
751 [(set VR128:$dst, (loadv4f32 addr:$src))]>;
752 def MOVAPDrr : PDI<0x28, MRMSrcReg, (ops VR128:$dst, VR128:$src),
753 "movapd {$src, $dst|$dst, $src}", []>;
754 def MOVAPDrm : PDI<0x28, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
755 "movapd {$src, $dst|$dst, $src}",
756 [(set VR128:$dst, (loadv2f64 addr:$src))]>;
758 def MOVAPSmr : PSI<0x29, MRMDestMem, (ops f128mem:$dst, VR128:$src),
759 "movaps {$src, $dst|$dst, $src}",
760 [(store (v4f32 VR128:$src), addr:$dst)]>;
761 def MOVAPDmr : PDI<0x29, MRMDestMem, (ops f128mem:$dst, VR128:$src),
762 "movapd {$src, $dst|$dst, $src}",
763 [(store (v2f64 VR128:$src), addr:$dst)]>;
765 def MOVUPSrr : PSI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src),
766 "movups {$src, $dst|$dst, $src}", []>;
767 def MOVUPSrm : PSI<0x10, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
768 "movups {$src, $dst|$dst, $src}",
769 [(set VR128:$dst, (int_x86_sse_loadu_ps addr:$src))]>;
770 def MOVUPSmr : PSI<0x11, MRMDestMem, (ops f128mem:$dst, VR128:$src),
771 "movups {$src, $dst|$dst, $src}",
772 [(int_x86_sse_storeu_ps addr:$dst, VR128:$src)]>;
773 def MOVUPDrr : PDI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src),
774 "movupd {$src, $dst|$dst, $src}", []>;
775 def MOVUPDrm : PDI<0x10, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
776 "movupd {$src, $dst|$dst, $src}",
777 [(set VR128:$dst, (int_x86_sse2_loadu_pd addr:$src))]>;
778 def MOVUPDmr : PDI<0x11, MRMDestMem, (ops f128mem:$dst, VR128:$src),
779 "movupd {$src, $dst|$dst, $src}",
780 [(int_x86_sse2_storeu_pd addr:$dst, VR128:$src)]>;
782 let isTwoAddress = 1 in {
783 let AddedComplexity = 20 in {
784 def MOVLPSrm : PSI<0x12, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2),
785 "movlps {$src2, $dst|$dst, $src2}",
787 (v4f32 (vector_shuffle VR128:$src1,
788 (bc_v4f32 (v2f64 (scalar_to_vector (loadf64 addr:$src2)))),
789 MOVLP_shuffle_mask)))]>;
790 def MOVLPDrm : PDI<0x12, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2),
791 "movlpd {$src2, $dst|$dst, $src2}",
793 (v2f64 (vector_shuffle VR128:$src1,
794 (scalar_to_vector (loadf64 addr:$src2)),
795 MOVLP_shuffle_mask)))]>;
796 def MOVHPSrm : PSI<0x16, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2),
797 "movhps {$src2, $dst|$dst, $src2}",
799 (v4f32 (vector_shuffle VR128:$src1,
800 (bc_v4f32 (v2f64 (scalar_to_vector (loadf64 addr:$src2)))),
801 MOVHP_shuffle_mask)))]>;
802 def MOVHPDrm : PDI<0x16, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2),
803 "movhpd {$src2, $dst|$dst, $src2}",
805 (v2f64 (vector_shuffle VR128:$src1,
806 (scalar_to_vector (loadf64 addr:$src2)),
807 MOVHP_shuffle_mask)))]>;
811 def MOVLPSmr : PSI<0x13, MRMDestMem, (ops f64mem:$dst, VR128:$src),
812 "movlps {$src, $dst|$dst, $src}",
813 [(store (f64 (vector_extract (bc_v2f64 (v4f32 VR128:$src)),
814 (iPTR 0))), addr:$dst)]>;
815 def MOVLPDmr : PDI<0x13, MRMDestMem, (ops f64mem:$dst, VR128:$src),
816 "movlpd {$src, $dst|$dst, $src}",
817 [(store (f64 (vector_extract (v2f64 VR128:$src),
818 (iPTR 0))), addr:$dst)]>;
820 // v2f64 extract element 1 is always custom lowered to unpack high to low
821 // and extract element 0 so the non-store version isn't too horrible.
822 def MOVHPSmr : PSI<0x17, MRMDestMem, (ops f64mem:$dst, VR128:$src),
823 "movhps {$src, $dst|$dst, $src}",
824 [(store (f64 (vector_extract
825 (v2f64 (vector_shuffle
826 (bc_v2f64 (v4f32 VR128:$src)), (undef),
827 UNPCKH_shuffle_mask)), (iPTR 0))),
829 def MOVHPDmr : PDI<0x17, MRMDestMem, (ops f64mem:$dst, VR128:$src),
830 "movhpd {$src, $dst|$dst, $src}",
831 [(store (f64 (vector_extract
832 (v2f64 (vector_shuffle VR128:$src, (undef),
833 UNPCKH_shuffle_mask)), (iPTR 0))),
836 let isTwoAddress = 1 in {
837 let AddedComplexity = 20 in {
838 def MOVLHPSrr : PSI<0x16, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
839 "movlhps {$src2, $dst|$dst, $src2}",
841 (v4f32 (vector_shuffle VR128:$src1, VR128:$src2,
842 MOVHP_shuffle_mask)))]>;
844 def MOVHLPSrr : PSI<0x12, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
845 "movhlps {$src2, $dst|$dst, $src2}",
847 (v4f32 (vector_shuffle VR128:$src1, VR128:$src2,
848 MOVHLPS_shuffle_mask)))]>;
852 def MOVSHDUPrr : S3SI<0x16, MRMSrcReg, (ops VR128:$dst, VR128:$src),
853 "movshdup {$src, $dst|$dst, $src}",
854 [(set VR128:$dst, (v4f32 (vector_shuffle
856 MOVSHDUP_shuffle_mask)))]>;
857 def MOVSHDUPrm : S3SI<0x16, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
858 "movshdup {$src, $dst|$dst, $src}",
859 [(set VR128:$dst, (v4f32 (vector_shuffle
860 (loadv4f32 addr:$src), (undef),
861 MOVSHDUP_shuffle_mask)))]>;
863 def MOVSLDUPrr : S3SI<0x12, MRMSrcReg, (ops VR128:$dst, VR128:$src),
864 "movsldup {$src, $dst|$dst, $src}",
865 [(set VR128:$dst, (v4f32 (vector_shuffle
867 MOVSLDUP_shuffle_mask)))]>;
868 def MOVSLDUPrm : S3SI<0x12, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
869 "movsldup {$src, $dst|$dst, $src}",
870 [(set VR128:$dst, (v4f32 (vector_shuffle
871 (loadv4f32 addr:$src), (undef),
872 MOVSLDUP_shuffle_mask)))]>;
874 def MOVDDUPrr : S3DI<0x12, MRMSrcReg, (ops VR128:$dst, VR128:$src),
875 "movddup {$src, $dst|$dst, $src}",
876 [(set VR128:$dst, (v2f64 (vector_shuffle
878 SSE_splat_v2_mask)))]>;
879 def MOVDDUPrm : S3DI<0x12, MRMSrcMem, (ops VR128:$dst, f64mem:$src),
880 "movddup {$src, $dst|$dst, $src}",
881 [(set VR128:$dst, (v2f64 (vector_shuffle
882 (scalar_to_vector (loadf64 addr:$src)),
884 SSE_splat_v2_mask)))]>;
886 // SSE2 instructions without OpSize prefix
887 def Int_CVTDQ2PSrr : I<0x5B, MRMSrcReg, (ops VR128:$dst, VR128:$src),
888 "cvtdq2ps {$src, $dst|$dst, $src}",
889 [(set VR128:$dst, (int_x86_sse2_cvtdq2ps VR128:$src))]>,
890 TB, Requires<[HasSSE2]>;
891 def Int_CVTDQ2PSrm : I<0x5B, MRMSrcMem, (ops VR128:$dst, i128mem:$src),
892 "cvtdq2ps {$src, $dst|$dst, $src}",
893 [(set VR128:$dst, (int_x86_sse2_cvtdq2ps
894 (bc_v4i32 (loadv2i64 addr:$src))))]>,
895 TB, Requires<[HasSSE2]>;
897 // SSE2 instructions with XS prefix
898 def Int_CVTDQ2PDrr : I<0xE6, MRMSrcReg, (ops VR128:$dst, VR128:$src),
899 "cvtdq2pd {$src, $dst|$dst, $src}",
900 [(set VR128:$dst, (int_x86_sse2_cvtdq2pd VR128:$src))]>,
901 XS, Requires<[HasSSE2]>;
902 def Int_CVTDQ2PDrm : I<0xE6, MRMSrcMem, (ops VR128:$dst, i64mem:$src),
903 "cvtdq2pd {$src, $dst|$dst, $src}",
904 [(set VR128:$dst, (int_x86_sse2_cvtdq2pd
905 (bc_v4i32 (loadv2i64 addr:$src))))]>,
906 XS, Requires<[HasSSE2]>;
908 def Int_CVTPS2DQrr : PDI<0x5B, MRMSrcReg, (ops VR128:$dst, VR128:$src),
909 "cvtps2dq {$src, $dst|$dst, $src}",
910 [(set VR128:$dst, (int_x86_sse2_cvtps2dq VR128:$src))]>;
911 def Int_CVTPS2DQrm : PDI<0x5B, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
912 "cvtps2dq {$src, $dst|$dst, $src}",
913 [(set VR128:$dst, (int_x86_sse2_cvtps2dq
914 (loadv4f32 addr:$src)))]>;
915 // SSE2 packed instructions with XS prefix
916 def Int_CVTTPS2DQrr : I<0x5B, MRMSrcReg, (ops VR128:$dst, VR128:$src),
917 "cvttps2dq {$src, $dst|$dst, $src}",
918 [(set VR128:$dst, (int_x86_sse2_cvttps2dq VR128:$src))]>,
919 XS, Requires<[HasSSE2]>;
920 def Int_CVTTPS2DQrm : I<0x5B, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
921 "cvttps2dq {$src, $dst|$dst, $src}",
922 [(set VR128:$dst, (int_x86_sse2_cvttps2dq
923 (loadv4f32 addr:$src)))]>,
924 XS, Requires<[HasSSE2]>;
926 // SSE2 packed instructions with XD prefix
927 def Int_CVTPD2DQrr : I<0xE6, MRMSrcReg, (ops VR128:$dst, VR128:$src),
928 "cvtpd2dq {$src, $dst|$dst, $src}",
929 [(set VR128:$dst, (int_x86_sse2_cvtpd2dq VR128:$src))]>,
930 XD, Requires<[HasSSE2]>;
931 def Int_CVTPD2DQrm : I<0xE6, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
932 "cvtpd2dq {$src, $dst|$dst, $src}",
933 [(set VR128:$dst, (int_x86_sse2_cvtpd2dq
934 (loadv2f64 addr:$src)))]>,
935 XD, Requires<[HasSSE2]>;
936 def Int_CVTTPD2DQrr : PDI<0xE6, MRMSrcReg, (ops VR128:$dst, VR128:$src),
937 "cvttpd2dq {$src, $dst|$dst, $src}",
938 [(set VR128:$dst, (int_x86_sse2_cvttpd2dq VR128:$src))]>;
939 def Int_CVTTPD2DQrm : PDI<0xE6, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
940 "cvttpd2dq {$src, $dst|$dst, $src}",
941 [(set VR128:$dst, (int_x86_sse2_cvttpd2dq
942 (loadv2f64 addr:$src)))]>;
944 // SSE2 instructions without OpSize prefix
945 def Int_CVTPS2PDrr : I<0x5A, MRMSrcReg, (ops VR128:$dst, VR128:$src),
946 "cvtps2pd {$src, $dst|$dst, $src}",
947 [(set VR128:$dst, (int_x86_sse2_cvtps2pd VR128:$src))]>,
948 TB, Requires<[HasSSE2]>;
949 def Int_CVTPS2PDrm : I<0x5A, MRMSrcReg, (ops VR128:$dst, f64mem:$src),
950 "cvtps2pd {$src, $dst|$dst, $src}",
951 [(set VR128:$dst, (int_x86_sse2_cvtps2pd
952 (loadv4f32 addr:$src)))]>,
953 TB, Requires<[HasSSE2]>;
955 def Int_CVTPD2PSrr : PDI<0x5A, MRMSrcReg, (ops VR128:$dst, VR128:$src),
956 "cvtpd2ps {$src, $dst|$dst, $src}",
957 [(set VR128:$dst, (int_x86_sse2_cvtpd2ps VR128:$src))]>;
958 def Int_CVTPD2PSrm : PDI<0x5A, MRMSrcReg, (ops VR128:$dst, f128mem:$src),
959 "cvtpd2ps {$src, $dst|$dst, $src}",
960 [(set VR128:$dst, (int_x86_sse2_cvtpd2ps
961 (loadv2f64 addr:$src)))]>;
963 // Match intrinsics which expect XMM operand(s).
964 // Aliases for intrinsics
965 let isTwoAddress = 1 in {
966 def Int_CVTSI2SDrr: SDI<0x2A, MRMSrcReg,
967 (ops VR128:$dst, VR128:$src1, GR32:$src2),
968 "cvtsi2sd {$src2, $dst|$dst, $src2}",
969 [(set VR128:$dst, (int_x86_sse2_cvtsi2sd VR128:$src1,
971 def Int_CVTSI2SDrm: SDI<0x2A, MRMSrcMem,
972 (ops VR128:$dst, VR128:$src1, i32mem:$src2),
973 "cvtsi2sd {$src2, $dst|$dst, $src2}",
974 [(set VR128:$dst, (int_x86_sse2_cvtsi2sd VR128:$src1,
975 (loadi32 addr:$src2)))]>;
976 def Int_CVTSD2SSrr: SDI<0x5A, MRMSrcReg,
977 (ops VR128:$dst, VR128:$src1, VR128:$src2),
978 "cvtsd2ss {$src2, $dst|$dst, $src2}",
979 [(set VR128:$dst, (int_x86_sse2_cvtsd2ss VR128:$src1,
981 def Int_CVTSD2SSrm: SDI<0x5A, MRMSrcMem,
982 (ops VR128:$dst, VR128:$src1, f64mem:$src2),
983 "cvtsd2ss {$src2, $dst|$dst, $src2}",
984 [(set VR128:$dst, (int_x86_sse2_cvtsd2ss VR128:$src1,
985 (loadv2f64 addr:$src2)))]>;
986 def Int_CVTSS2SDrr: I<0x5A, MRMSrcReg,
987 (ops VR128:$dst, VR128:$src1, VR128:$src2),
988 "cvtss2sd {$src2, $dst|$dst, $src2}",
989 [(set VR128:$dst, (int_x86_sse2_cvtss2sd VR128:$src1,
992 def Int_CVTSS2SDrm: I<0x5A, MRMSrcMem,
993 (ops VR128:$dst, VR128:$src1, f32mem:$src2),
994 "cvtss2sd {$src2, $dst|$dst, $src2}",
995 [(set VR128:$dst, (int_x86_sse2_cvtss2sd VR128:$src1,
996 (loadv4f32 addr:$src2)))]>, XS,
1001 let isTwoAddress = 1 in {
1002 let isCommutable = 1 in {
1003 def ADDPSrr : PSI<0x58, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1004 "addps {$src2, $dst|$dst, $src2}",
1005 [(set VR128:$dst, (v4f32 (fadd VR128:$src1, VR128:$src2)))]>;
1006 def ADDPDrr : PDI<0x58, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1007 "addpd {$src2, $dst|$dst, $src2}",
1008 [(set VR128:$dst, (v2f64 (fadd VR128:$src1, VR128:$src2)))]>;
1009 def MULPSrr : PSI<0x59, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1010 "mulps {$src2, $dst|$dst, $src2}",
1011 [(set VR128:$dst, (v4f32 (fmul VR128:$src1, VR128:$src2)))]>;
1012 def MULPDrr : PDI<0x59, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1013 "mulpd {$src2, $dst|$dst, $src2}",
1014 [(set VR128:$dst, (v2f64 (fmul VR128:$src1, VR128:$src2)))]>;
1017 def ADDPSrm : PSI<0x58, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1018 "addps {$src2, $dst|$dst, $src2}",
1019 [(set VR128:$dst, (v4f32 (fadd VR128:$src1,
1020 (load addr:$src2))))]>;
1021 def ADDPDrm : PDI<0x58, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1022 "addpd {$src2, $dst|$dst, $src2}",
1023 [(set VR128:$dst, (v2f64 (fadd VR128:$src1,
1024 (load addr:$src2))))]>;
1025 def MULPSrm : PSI<0x59, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1026 "mulps {$src2, $dst|$dst, $src2}",
1027 [(set VR128:$dst, (v4f32 (fmul VR128:$src1,
1028 (load addr:$src2))))]>;
1029 def MULPDrm : PDI<0x59, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1030 "mulpd {$src2, $dst|$dst, $src2}",
1031 [(set VR128:$dst, (v2f64 (fmul VR128:$src1,
1032 (load addr:$src2))))]>;
1034 def DIVPSrr : PSI<0x5E, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1035 "divps {$src2, $dst|$dst, $src2}",
1036 [(set VR128:$dst, (v4f32 (fdiv VR128:$src1, VR128:$src2)))]>;
1037 def DIVPSrm : PSI<0x5E, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1038 "divps {$src2, $dst|$dst, $src2}",
1039 [(set VR128:$dst, (v4f32 (fdiv VR128:$src1,
1040 (load addr:$src2))))]>;
1041 def DIVPDrr : PDI<0x5E, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1042 "divpd {$src2, $dst|$dst, $src2}",
1043 [(set VR128:$dst, (v2f64 (fdiv VR128:$src1, VR128:$src2)))]>;
1044 def DIVPDrm : PDI<0x5E, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1045 "divpd {$src2, $dst|$dst, $src2}",
1046 [(set VR128:$dst, (v2f64 (fdiv VR128:$src1,
1047 (load addr:$src2))))]>;
1049 def SUBPSrr : PSI<0x5C, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1050 "subps {$src2, $dst|$dst, $src2}",
1051 [(set VR128:$dst, (v4f32 (fsub VR128:$src1, VR128:$src2)))]>;
1052 def SUBPSrm : PSI<0x5C, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1053 "subps {$src2, $dst|$dst, $src2}",
1054 [(set VR128:$dst, (v4f32 (fsub VR128:$src1,
1055 (load addr:$src2))))]>;
1056 def SUBPDrr : PDI<0x5C, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1057 "subpd {$src2, $dst|$dst, $src2}",
1058 [(set VR128:$dst, (v2f64 (fsub VR128:$src1, VR128:$src2)))]>;
1059 def SUBPDrm : PDI<0x5C, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1060 "subpd {$src2, $dst|$dst, $src2}",
1061 [(set VR128:$dst, (v2f64 (fsub VR128:$src1,
1062 (load addr:$src2))))]>;
1064 def ADDSUBPSrr : S3DI<0xD0, MRMSrcReg,
1065 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1066 "addsubps {$src2, $dst|$dst, $src2}",
1067 [(set VR128:$dst, (int_x86_sse3_addsub_ps VR128:$src1,
1069 def ADDSUBPSrm : S3DI<0xD0, MRMSrcMem,
1070 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1071 "addsubps {$src2, $dst|$dst, $src2}",
1072 [(set VR128:$dst, (int_x86_sse3_addsub_ps VR128:$src1,
1073 (loadv4f32 addr:$src2)))]>;
1074 def ADDSUBPDrr : S3I<0xD0, MRMSrcReg,
1075 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1076 "addsubpd {$src2, $dst|$dst, $src2}",
1077 [(set VR128:$dst, (int_x86_sse3_addsub_pd VR128:$src1,
1079 def ADDSUBPDrm : S3I<0xD0, MRMSrcMem,
1080 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1081 "addsubpd {$src2, $dst|$dst, $src2}",
1082 [(set VR128:$dst, (int_x86_sse3_addsub_pd VR128:$src1,
1083 (loadv2f64 addr:$src2)))]>;
1086 def SQRTPSr : PS_Intr<0x51, "sqrtps {$src, $dst|$dst, $src}",
1087 int_x86_sse_sqrt_ps>;
1088 def SQRTPSm : PS_Intm<0x51, "sqrtps {$src, $dst|$dst, $src}",
1089 int_x86_sse_sqrt_ps>;
1090 def SQRTPDr : PD_Intr<0x51, "sqrtpd {$src, $dst|$dst, $src}",
1091 int_x86_sse2_sqrt_pd>;
1092 def SQRTPDm : PD_Intm<0x51, "sqrtpd {$src, $dst|$dst, $src}",
1093 int_x86_sse2_sqrt_pd>;
1095 def RSQRTPSr : PS_Intr<0x52, "rsqrtps {$src, $dst|$dst, $src}",
1096 int_x86_sse_rsqrt_ps>;
1097 def RSQRTPSm : PS_Intm<0x52, "rsqrtps {$src, $dst|$dst, $src}",
1098 int_x86_sse_rsqrt_ps>;
1099 def RCPPSr : PS_Intr<0x53, "rcpps {$src, $dst|$dst, $src}",
1100 int_x86_sse_rcp_ps>;
1101 def RCPPSm : PS_Intm<0x53, "rcpps {$src, $dst|$dst, $src}",
1102 int_x86_sse_rcp_ps>;
1104 let isTwoAddress = 1 in {
1105 let isCommutable = 1 in {
1106 def MAXPSrr : PS_Intrr<0x5F, "maxps {$src2, $dst|$dst, $src2}",
1107 int_x86_sse_max_ps>;
1108 def MAXPDrr : PD_Intrr<0x5F, "maxpd {$src2, $dst|$dst, $src2}",
1109 int_x86_sse2_max_pd>;
1110 def MINPSrr : PS_Intrr<0x5D, "minps {$src2, $dst|$dst, $src2}",
1111 int_x86_sse_min_ps>;
1112 def MINPDrr : PD_Intrr<0x5D, "minpd {$src2, $dst|$dst, $src2}",
1113 int_x86_sse2_min_pd>;
1115 def MAXPSrm : PS_Intrm<0x5F, "maxps {$src2, $dst|$dst, $src2}",
1116 int_x86_sse_max_ps>;
1117 def MAXPDrm : PD_Intrm<0x5F, "maxpd {$src2, $dst|$dst, $src2}",
1118 int_x86_sse2_max_pd>;
1119 def MINPSrm : PS_Intrm<0x5D, "minps {$src2, $dst|$dst, $src2}",
1120 int_x86_sse_min_ps>;
1121 def MINPDrm : PD_Intrm<0x5D, "minpd {$src2, $dst|$dst, $src2}",
1122 int_x86_sse2_min_pd>;
1126 let isTwoAddress = 1 in {
1127 let isCommutable = 1 in {
1128 def ANDPSrr : PSI<0x54, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1129 "andps {$src2, $dst|$dst, $src2}",
1130 [(set VR128:$dst, (v2i64 (and VR128:$src1, VR128:$src2)))]>;
1131 def ANDPDrr : PDI<0x54, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1132 "andpd {$src2, $dst|$dst, $src2}",
1134 (and (bc_v2i64 (v2f64 VR128:$src1)),
1135 (bc_v2i64 (v2f64 VR128:$src2))))]>;
1136 def ORPSrr : PSI<0x56, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1137 "orps {$src2, $dst|$dst, $src2}",
1138 [(set VR128:$dst, (v2i64 (or VR128:$src1, VR128:$src2)))]>;
1139 def ORPDrr : PDI<0x56, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1140 "orpd {$src2, $dst|$dst, $src2}",
1142 (or (bc_v2i64 (v2f64 VR128:$src1)),
1143 (bc_v2i64 (v2f64 VR128:$src2))))]>;
1144 def XORPSrr : PSI<0x57, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1145 "xorps {$src2, $dst|$dst, $src2}",
1146 [(set VR128:$dst, (v2i64 (xor VR128:$src1, VR128:$src2)))]>;
1147 def XORPDrr : PDI<0x57, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1148 "xorpd {$src2, $dst|$dst, $src2}",
1150 (xor (bc_v2i64 (v2f64 VR128:$src1)),
1151 (bc_v2i64 (v2f64 VR128:$src2))))]>;
1153 def ANDPSrm : PSI<0x54, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1154 "andps {$src2, $dst|$dst, $src2}",
1155 [(set VR128:$dst, (and VR128:$src1,
1156 (bc_v2i64 (loadv4f32 addr:$src2))))]>;
1157 def ANDPDrm : PDI<0x54, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1158 "andpd {$src2, $dst|$dst, $src2}",
1160 (and (bc_v2i64 (v2f64 VR128:$src1)),
1161 (bc_v2i64 (loadv2f64 addr:$src2))))]>;
1162 def ORPSrm : PSI<0x56, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1163 "orps {$src2, $dst|$dst, $src2}",
1164 [(set VR128:$dst, (or VR128:$src1,
1165 (bc_v2i64 (loadv4f32 addr:$src2))))]>;
1166 def ORPDrm : PDI<0x56, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1167 "orpd {$src2, $dst|$dst, $src2}",
1169 (or (bc_v2i64 (v2f64 VR128:$src1)),
1170 (bc_v2i64 (loadv2f64 addr:$src2))))]>;
1171 def XORPSrm : PSI<0x57, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1172 "xorps {$src2, $dst|$dst, $src2}",
1173 [(set VR128:$dst, (xor VR128:$src1,
1174 (bc_v2i64 (loadv4f32 addr:$src2))))]>;
1175 def XORPDrm : PDI<0x57, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1176 "xorpd {$src2, $dst|$dst, $src2}",
1178 (xor (bc_v2i64 (v2f64 VR128:$src1)),
1179 (bc_v2i64 (loadv2f64 addr:$src2))))]>;
1180 def ANDNPSrr : PSI<0x55, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1181 "andnps {$src2, $dst|$dst, $src2}",
1182 [(set VR128:$dst, (v2i64 (and (xor VR128:$src1,
1183 (bc_v2i64 (v4i32 immAllOnesV))),
1185 def ANDNPSrm : PSI<0x55, MRMSrcMem, (ops VR128:$dst, VR128:$src1,f128mem:$src2),
1186 "andnps {$src2, $dst|$dst, $src2}",
1187 [(set VR128:$dst, (v2i64 (and (xor VR128:$src1,
1188 (bc_v2i64 (v4i32 immAllOnesV))),
1189 (bc_v2i64 (loadv4f32 addr:$src2)))))]>;
1190 def ANDNPDrr : PDI<0x55, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1191 "andnpd {$src2, $dst|$dst, $src2}",
1193 (and (vnot (bc_v2i64 (v2f64 VR128:$src1))),
1194 (bc_v2i64 (v2f64 VR128:$src2))))]>;
1195 def ANDNPDrm : PDI<0x55, MRMSrcMem, (ops VR128:$dst, VR128:$src1,f128mem:$src2),
1196 "andnpd {$src2, $dst|$dst, $src2}",
1198 (and (vnot (bc_v2i64 (v2f64 VR128:$src1))),
1199 (bc_v2i64 (loadv2f64 addr:$src2))))]>;
1202 let isTwoAddress = 1 in {
1203 def CMPPSrri : PSIi8<0xC2, MRMSrcReg,
1204 (ops VR128:$dst, VR128:$src1, VR128:$src, SSECC:$cc),
1205 "cmp${cc}ps {$src, $dst|$dst, $src}",
1206 [(set VR128:$dst, (int_x86_sse_cmp_ps VR128:$src1,
1207 VR128:$src, imm:$cc))]>;
1208 def CMPPSrmi : PSIi8<0xC2, MRMSrcMem,
1209 (ops VR128:$dst, VR128:$src1, f128mem:$src, SSECC:$cc),
1210 "cmp${cc}ps {$src, $dst|$dst, $src}",
1211 [(set VR128:$dst, (int_x86_sse_cmp_ps VR128:$src1,
1212 (load addr:$src), imm:$cc))]>;
1213 def CMPPDrri : PDIi8<0xC2, MRMSrcReg,
1214 (ops VR128:$dst, VR128:$src1, VR128:$src, SSECC:$cc),
1215 "cmp${cc}pd {$src, $dst|$dst, $src}",
1216 [(set VR128:$dst, (int_x86_sse2_cmp_pd VR128:$src1,
1217 VR128:$src, imm:$cc))]>;
1218 def CMPPDrmi : PDIi8<0xC2, MRMSrcMem,
1219 (ops VR128:$dst, VR128:$src1, f128mem:$src, SSECC:$cc),
1220 "cmp${cc}pd {$src, $dst|$dst, $src}",
1221 [(set VR128:$dst, (int_x86_sse2_cmp_pd VR128:$src1,
1222 (load addr:$src), imm:$cc))]>;
1225 // Shuffle and unpack instructions
1226 let isTwoAddress = 1 in {
1227 let isCommutable = 1, isConvertibleToThreeAddress = 1 in // Convert to pshufd
1228 def SHUFPSrri : PSIi8<0xC6, MRMSrcReg,
1229 (ops VR128:$dst, VR128:$src1, VR128:$src2, i32i8imm:$src3),
1230 "shufps {$src3, $src2, $dst|$dst, $src2, $src3}",
1231 [(set VR128:$dst, (v4f32 (vector_shuffle
1232 VR128:$src1, VR128:$src2,
1233 SHUFP_shuffle_mask:$src3)))]>;
1234 def SHUFPSrmi : PSIi8<0xC6, MRMSrcMem,
1235 (ops VR128:$dst, VR128:$src1, f128mem:$src2, i32i8imm:$src3),
1236 "shufps {$src3, $src2, $dst|$dst, $src2, $src3}",
1237 [(set VR128:$dst, (v4f32 (vector_shuffle
1238 VR128:$src1, (load addr:$src2),
1239 SHUFP_shuffle_mask:$src3)))]>;
1240 let isCommutable = 1 in
1241 def SHUFPDrri : PDIi8<0xC6, MRMSrcReg,
1242 (ops VR128:$dst, VR128:$src1, VR128:$src2, i8imm:$src3),
1243 "shufpd {$src3, $src2, $dst|$dst, $src2, $src3}",
1244 [(set VR128:$dst, (v2f64 (vector_shuffle
1245 VR128:$src1, VR128:$src2,
1246 SHUFP_shuffle_mask:$src3)))]>;
1247 def SHUFPDrmi : PDIi8<0xC6, MRMSrcMem,
1248 (ops VR128:$dst, VR128:$src1, f128mem:$src2, i8imm:$src3),
1249 "shufpd {$src3, $src2, $dst|$dst, $src2, $src3}",
1250 [(set VR128:$dst, (v2f64 (vector_shuffle
1251 VR128:$src1, (load addr:$src2),
1252 SHUFP_shuffle_mask:$src3)))]>;
1254 let AddedComplexity = 10 in {
1255 def UNPCKHPSrr : PSI<0x15, MRMSrcReg,
1256 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1257 "unpckhps {$src2, $dst|$dst, $src2}",
1258 [(set VR128:$dst, (v4f32 (vector_shuffle
1259 VR128:$src1, VR128:$src2,
1260 UNPCKH_shuffle_mask)))]>;
1261 def UNPCKHPSrm : PSI<0x15, MRMSrcMem,
1262 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1263 "unpckhps {$src2, $dst|$dst, $src2}",
1264 [(set VR128:$dst, (v4f32 (vector_shuffle
1265 VR128:$src1, (load addr:$src2),
1266 UNPCKH_shuffle_mask)))]>;
1267 def UNPCKHPDrr : PDI<0x15, MRMSrcReg,
1268 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1269 "unpckhpd {$src2, $dst|$dst, $src2}",
1270 [(set VR128:$dst, (v2f64 (vector_shuffle
1271 VR128:$src1, VR128:$src2,
1272 UNPCKH_shuffle_mask)))]>;
1273 def UNPCKHPDrm : PDI<0x15, MRMSrcMem,
1274 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1275 "unpckhpd {$src2, $dst|$dst, $src2}",
1276 [(set VR128:$dst, (v2f64 (vector_shuffle
1277 VR128:$src1, (load addr:$src2),
1278 UNPCKH_shuffle_mask)))]>;
1280 def UNPCKLPSrr : PSI<0x14, MRMSrcReg,
1281 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1282 "unpcklps {$src2, $dst|$dst, $src2}",
1283 [(set VR128:$dst, (v4f32 (vector_shuffle
1284 VR128:$src1, VR128:$src2,
1285 UNPCKL_shuffle_mask)))]>;
1286 def UNPCKLPSrm : PSI<0x14, MRMSrcMem,
1287 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1288 "unpcklps {$src2, $dst|$dst, $src2}",
1289 [(set VR128:$dst, (v4f32 (vector_shuffle
1290 VR128:$src1, (load addr:$src2),
1291 UNPCKL_shuffle_mask)))]>;
1292 def UNPCKLPDrr : PDI<0x14, MRMSrcReg,
1293 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1294 "unpcklpd {$src2, $dst|$dst, $src2}",
1295 [(set VR128:$dst, (v2f64 (vector_shuffle
1296 VR128:$src1, VR128:$src2,
1297 UNPCKL_shuffle_mask)))]>;
1298 def UNPCKLPDrm : PDI<0x14, MRMSrcMem,
1299 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1300 "unpcklpd {$src2, $dst|$dst, $src2}",
1301 [(set VR128:$dst, (v2f64 (vector_shuffle
1302 VR128:$src1, (load addr:$src2),
1303 UNPCKL_shuffle_mask)))]>;
1304 } // AddedComplexity
1308 let isTwoAddress = 1 in {
1309 def HADDPSrr : S3D_Intrr<0x7C, "haddps {$src2, $dst|$dst, $src2}",
1310 int_x86_sse3_hadd_ps>;
1311 def HADDPSrm : S3D_Intrm<0x7C, "haddps {$src2, $dst|$dst, $src2}",
1312 int_x86_sse3_hadd_ps>;
1313 def HADDPDrr : S3_Intrr<0x7C, "haddpd {$src2, $dst|$dst, $src2}",
1314 int_x86_sse3_hadd_pd>;
1315 def HADDPDrm : S3_Intrm<0x7C, "haddpd {$src2, $dst|$dst, $src2}",
1316 int_x86_sse3_hadd_pd>;
1317 def HSUBPSrr : S3D_Intrr<0x7D, "hsubps {$src2, $dst|$dst, $src2}",
1318 int_x86_sse3_hsub_ps>;
1319 def HSUBPSrm : S3D_Intrm<0x7D, "hsubps {$src2, $dst|$dst, $src2}",
1320 int_x86_sse3_hsub_ps>;
1321 def HSUBPDrr : S3_Intrr<0x7D, "hsubpd {$src2, $dst|$dst, $src2}",
1322 int_x86_sse3_hsub_pd>;
1323 def HSUBPDrm : S3_Intrm<0x7D, "hsubpd {$src2, $dst|$dst, $src2}",
1324 int_x86_sse3_hsub_pd>;
1327 //===----------------------------------------------------------------------===//
1328 // SSE integer instructions
1329 //===----------------------------------------------------------------------===//
1331 // Move Instructions
1332 def MOVDQArr : PDI<0x6F, MRMSrcReg, (ops VR128:$dst, VR128:$src),
1333 "movdqa {$src, $dst|$dst, $src}", []>;
1334 def MOVDQArm : PDI<0x6F, MRMSrcMem, (ops VR128:$dst, i128mem:$src),
1335 "movdqa {$src, $dst|$dst, $src}",
1336 [(set VR128:$dst, (loadv2i64 addr:$src))]>;
1337 def MOVDQAmr : PDI<0x7F, MRMDestMem, (ops i128mem:$dst, VR128:$src),
1338 "movdqa {$src, $dst|$dst, $src}",
1339 [(store (v2i64 VR128:$src), addr:$dst)]>;
1340 def MOVDQUrm : I<0x6F, MRMSrcMem, (ops VR128:$dst, i128mem:$src),
1341 "movdqu {$src, $dst|$dst, $src}",
1342 [(set VR128:$dst, (int_x86_sse2_loadu_dq addr:$src))]>,
1343 XS, Requires<[HasSSE2]>;
1344 def MOVDQUmr : I<0x7F, MRMDestMem, (ops i128mem:$dst, VR128:$src),
1345 "movdqu {$src, $dst|$dst, $src}",
1346 [(int_x86_sse2_storeu_dq addr:$dst, VR128:$src)]>,
1347 XS, Requires<[HasSSE2]>;
1348 def LDDQUrm : S3DI<0xF0, MRMSrcMem, (ops VR128:$dst, i128mem:$src),
1349 "lddqu {$src, $dst|$dst, $src}",
1350 [(set VR128:$dst, (int_x86_sse3_ldu_dq addr:$src))]>;
1352 // 128-bit Integer Arithmetic
1353 let isTwoAddress = 1 in {
1354 let isCommutable = 1 in {
1355 def PADDBrr : PDI<0xFC, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1356 "paddb {$src2, $dst|$dst, $src2}",
1357 [(set VR128:$dst, (v16i8 (add VR128:$src1, VR128:$src2)))]>;
1358 def PADDWrr : PDI<0xFD, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1359 "paddw {$src2, $dst|$dst, $src2}",
1360 [(set VR128:$dst, (v8i16 (add VR128:$src1, VR128:$src2)))]>;
1361 def PADDDrr : PDI<0xFE, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1362 "paddd {$src2, $dst|$dst, $src2}",
1363 [(set VR128:$dst, (v4i32 (add VR128:$src1, VR128:$src2)))]>;
1365 def PADDQrr : PDI<0xD4, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1366 "paddq {$src2, $dst|$dst, $src2}",
1367 [(set VR128:$dst, (v2i64 (add VR128:$src1, VR128:$src2)))]>;
1369 def PADDBrm : PDI<0xFC, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1370 "paddb {$src2, $dst|$dst, $src2}",
1371 [(set VR128:$dst, (add VR128:$src1,
1372 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1373 def PADDWrm : PDI<0xFD, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1374 "paddw {$src2, $dst|$dst, $src2}",
1375 [(set VR128:$dst, (add VR128:$src1,
1376 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1377 def PADDDrm : PDI<0xFE, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1378 "paddd {$src2, $dst|$dst, $src2}",
1379 [(set VR128:$dst, (add VR128:$src1,
1380 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1381 def PADDQrm : PDI<0xD4, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1382 "paddd {$src2, $dst|$dst, $src2}",
1383 [(set VR128:$dst, (add VR128:$src1,
1384 (loadv2i64 addr:$src2)))]>;
1386 let isCommutable = 1 in {
1387 def PADDSBrr : PDI<0xEC, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1388 "paddsb {$src2, $dst|$dst, $src2}",
1389 [(set VR128:$dst, (int_x86_sse2_padds_b VR128:$src1,
1391 def PADDSWrr : PDI<0xED, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1392 "paddsw {$src2, $dst|$dst, $src2}",
1393 [(set VR128:$dst, (int_x86_sse2_padds_w VR128:$src1,
1395 def PADDUSBrr : PDI<0xDC, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1396 "paddusb {$src2, $dst|$dst, $src2}",
1397 [(set VR128:$dst, (int_x86_sse2_paddus_b VR128:$src1,
1399 def PADDUSWrr : PDI<0xDD, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1400 "paddusw {$src2, $dst|$dst, $src2}",
1401 [(set VR128:$dst, (int_x86_sse2_paddus_w VR128:$src1,
1404 def PADDSBrm : PDI<0xEC, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1405 "paddsb {$src2, $dst|$dst, $src2}",
1406 [(set VR128:$dst, (int_x86_sse2_padds_b VR128:$src1,
1407 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1408 def PADDSWrm : PDI<0xED, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1409 "paddsw {$src2, $dst|$dst, $src2}",
1410 [(set VR128:$dst, (int_x86_sse2_padds_w VR128:$src1,
1411 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1412 def PADDUSBrm : PDI<0xDC, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1413 "paddusb {$src2, $dst|$dst, $src2}",
1414 [(set VR128:$dst, (int_x86_sse2_paddus_b VR128:$src1,
1415 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1416 def PADDUSWrm : PDI<0xDD, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1417 "paddusw {$src2, $dst|$dst, $src2}",
1418 [(set VR128:$dst, (int_x86_sse2_paddus_w VR128:$src1,
1419 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1422 def PSUBBrr : PDI<0xF8, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1423 "psubb {$src2, $dst|$dst, $src2}",
1424 [(set VR128:$dst, (v16i8 (sub VR128:$src1, VR128:$src2)))]>;
1425 def PSUBWrr : PDI<0xF9, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1426 "psubw {$src2, $dst|$dst, $src2}",
1427 [(set VR128:$dst, (v8i16 (sub VR128:$src1, VR128:$src2)))]>;
1428 def PSUBDrr : PDI<0xFA, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1429 "psubd {$src2, $dst|$dst, $src2}",
1430 [(set VR128:$dst, (v4i32 (sub VR128:$src1, VR128:$src2)))]>;
1431 def PSUBQrr : PDI<0xFB, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1432 "psubq {$src2, $dst|$dst, $src2}",
1433 [(set VR128:$dst, (v2i64 (sub VR128:$src1, VR128:$src2)))]>;
1435 def PSUBBrm : PDI<0xF8, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1436 "psubb {$src2, $dst|$dst, $src2}",
1437 [(set VR128:$dst, (sub VR128:$src1,
1438 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1439 def PSUBWrm : PDI<0xF9, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1440 "psubw {$src2, $dst|$dst, $src2}",
1441 [(set VR128:$dst, (sub VR128:$src1,
1442 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1443 def PSUBDrm : PDI<0xFA, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1444 "psubd {$src2, $dst|$dst, $src2}",
1445 [(set VR128:$dst, (sub VR128:$src1,
1446 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1447 def PSUBQrm : PDI<0xFB, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1448 "psubd {$src2, $dst|$dst, $src2}",
1449 [(set VR128:$dst, (sub VR128:$src1,
1450 (loadv2i64 addr:$src2)))]>;
1452 def PSUBSBrr : PDI<0xE8, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1453 "psubsb {$src2, $dst|$dst, $src2}",
1454 [(set VR128:$dst, (int_x86_sse2_psubs_b VR128:$src1,
1456 def PSUBSWrr : PDI<0xE9, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1457 "psubsw {$src2, $dst|$dst, $src2}",
1458 [(set VR128:$dst, (int_x86_sse2_psubs_w VR128:$src1,
1460 def PSUBUSBrr : PDI<0xD8, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1461 "psubusb {$src2, $dst|$dst, $src2}",
1462 [(set VR128:$dst, (int_x86_sse2_psubus_b VR128:$src1,
1464 def PSUBUSWrr : PDI<0xD9, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1465 "psubusw {$src2, $dst|$dst, $src2}",
1466 [(set VR128:$dst, (int_x86_sse2_psubus_w VR128:$src1,
1469 def PSUBSBrm : PDI<0xE8, MRMSrcMem,
1470 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1471 "psubsb {$src2, $dst|$dst, $src2}",
1472 [(set VR128:$dst, (int_x86_sse2_psubs_b VR128:$src1,
1473 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1474 def PSUBSWrm : PDI<0xE9, MRMSrcMem,
1475 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1476 "psubsw {$src2, $dst|$dst, $src2}",
1477 [(set VR128:$dst, (int_x86_sse2_psubs_w VR128:$src1,
1478 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1479 def PSUBUSBrm : PDI<0xD8, MRMSrcMem,
1480 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1481 "psubusb {$src2, $dst|$dst, $src2}",
1482 [(set VR128:$dst, (int_x86_sse2_psubus_b VR128:$src1,
1483 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1484 def PSUBUSWrm : PDI<0xD9, MRMSrcMem,
1485 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1486 "psubusw {$src2, $dst|$dst, $src2}",
1487 [(set VR128:$dst, (int_x86_sse2_psubus_w VR128:$src1,
1488 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1490 let isCommutable = 1 in {
1491 def PMULHUWrr : PDI<0xE4, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1492 "pmulhuw {$src2, $dst|$dst, $src2}",
1493 [(set VR128:$dst, (int_x86_sse2_pmulhu_w VR128:$src1,
1495 def PMULHWrr : PDI<0xE5, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1496 "pmulhw {$src2, $dst|$dst, $src2}",
1497 [(set VR128:$dst, (int_x86_sse2_pmulh_w VR128:$src1,
1499 def PMULLWrr : PDI<0xD5, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1500 "pmullw {$src2, $dst|$dst, $src2}",
1501 [(set VR128:$dst, (v8i16 (mul VR128:$src1, VR128:$src2)))]>;
1502 def PMULUDQrr : PDI<0xF4, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1503 "pmuludq {$src2, $dst|$dst, $src2}",
1504 [(set VR128:$dst, (int_x86_sse2_pmulu_dq VR128:$src1,
1507 def PMULHUWrm : PDI<0xE4, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1508 "pmulhuw {$src2, $dst|$dst, $src2}",
1509 [(set VR128:$dst, (int_x86_sse2_pmulhu_w VR128:$src1,
1510 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1511 def PMULHWrm : PDI<0xE5, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1512 "pmulhw {$src2, $dst|$dst, $src2}",
1513 [(set VR128:$dst, (int_x86_sse2_pmulh_w VR128:$src1,
1514 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1515 def PMULLWrm : PDI<0xD5, MRMSrcMem,
1516 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1517 "pmullw {$src2, $dst|$dst, $src2}",
1518 [(set VR128:$dst, (v8i16 (mul VR128:$src1,
1519 (bc_v8i16 (loadv2i64 addr:$src2)))))]>;
1520 def PMULUDQrm : PDI<0xF4, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1521 "pmuludq {$src2, $dst|$dst, $src2}",
1522 [(set VR128:$dst, (int_x86_sse2_pmulu_dq VR128:$src1,
1523 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1525 let isCommutable = 1 in {
1526 def PMADDWDrr : PDI<0xF5, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1527 "pmaddwd {$src2, $dst|$dst, $src2}",
1528 [(set VR128:$dst, (int_x86_sse2_pmadd_wd VR128:$src1,
1531 def PMADDWDrm : PDI<0xF5, MRMSrcMem,
1532 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1533 "pmaddwd {$src2, $dst|$dst, $src2}",
1534 [(set VR128:$dst, (int_x86_sse2_pmadd_wd VR128:$src1,
1535 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1537 let isCommutable = 1 in {
1538 def PAVGBrr : PDI<0xE0, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1539 "pavgb {$src2, $dst|$dst, $src2}",
1540 [(set VR128:$dst, (int_x86_sse2_pavg_b VR128:$src1,
1542 def PAVGWrr : PDI<0xE3, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1543 "pavgw {$src2, $dst|$dst, $src2}",
1544 [(set VR128:$dst, (int_x86_sse2_pavg_w VR128:$src1,
1547 def PAVGBrm : PDI<0xE0, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1548 "pavgb {$src2, $dst|$dst, $src2}",
1549 [(set VR128:$dst, (int_x86_sse2_pavg_b VR128:$src1,
1550 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1551 def PAVGWrm : PDI<0xE3, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1552 "pavgw {$src2, $dst|$dst, $src2}",
1553 [(set VR128:$dst, (int_x86_sse2_pavg_w VR128:$src1,
1554 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1556 let isCommutable = 1 in {
1557 def PMAXUBrr : PDI<0xDE, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1558 "pmaxub {$src2, $dst|$dst, $src2}",
1559 [(set VR128:$dst, (int_x86_sse2_pmaxu_b VR128:$src1,
1561 def PMAXSWrr : PDI<0xEE, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1562 "pmaxsw {$src2, $dst|$dst, $src2}",
1563 [(set VR128:$dst, (int_x86_sse2_pmaxs_w VR128:$src1,
1566 def PMAXUBrm : PDI<0xDE, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1567 "pmaxub {$src2, $dst|$dst, $src2}",
1568 [(set VR128:$dst, (int_x86_sse2_pmaxu_b VR128:$src1,
1569 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1570 def PMAXSWrm : PDI<0xEE, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1571 "pmaxsw {$src2, $dst|$dst, $src2}",
1572 [(set VR128:$dst, (int_x86_sse2_pmaxs_w VR128:$src1,
1573 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1575 let isCommutable = 1 in {
1576 def PMINUBrr : PDI<0xDA, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1577 "pminub {$src2, $dst|$dst, $src2}",
1578 [(set VR128:$dst, (int_x86_sse2_pminu_b VR128:$src1,
1580 def PMINSWrr : PDI<0xEA, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1581 "pminsw {$src2, $dst|$dst, $src2}",
1582 [(set VR128:$dst, (int_x86_sse2_pmins_w VR128:$src1,
1585 def PMINUBrm : PDI<0xDA, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1586 "pminub {$src2, $dst|$dst, $src2}",
1587 [(set VR128:$dst, (int_x86_sse2_pminu_b VR128:$src1,
1588 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1589 def PMINSWrm : PDI<0xEA, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1590 "pminsw {$src2, $dst|$dst, $src2}",
1591 [(set VR128:$dst, (int_x86_sse2_pmins_w VR128:$src1,
1592 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1595 let isCommutable = 1 in {
1596 def PSADBWrr : PDI<0xE0, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1597 "psadbw {$src2, $dst|$dst, $src2}",
1598 [(set VR128:$dst, (int_x86_sse2_psad_bw VR128:$src1,
1601 def PSADBWrm : PDI<0xE0, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1602 "psadbw {$src2, $dst|$dst, $src2}",
1603 [(set VR128:$dst, (int_x86_sse2_psad_bw VR128:$src1,
1604 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1607 let isTwoAddress = 1 in {
1608 def PSLLWrr : PDIi8<0xF1, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1609 "psllw {$src2, $dst|$dst, $src2}",
1610 [(set VR128:$dst, (int_x86_sse2_psll_w VR128:$src1,
1612 def PSLLWrm : PDIi8<0xF1, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1613 "psllw {$src2, $dst|$dst, $src2}",
1614 [(set VR128:$dst, (int_x86_sse2_psll_w VR128:$src1,
1615 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1616 def PSLLWri : PDIi8<0x71, MRM6r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1617 "psllw {$src2, $dst|$dst, $src2}",
1618 [(set VR128:$dst, (int_x86_sse2_psll_w VR128:$src1,
1619 (scalar_to_vector (i32 imm:$src2))))]>;
1620 def PSLLDrr : PDIi8<0xF2, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1621 "pslld {$src2, $dst|$dst, $src2}",
1622 [(set VR128:$dst, (int_x86_sse2_psll_d VR128:$src1,
1624 def PSLLDrm : PDIi8<0xF2, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1625 "pslld {$src2, $dst|$dst, $src2}",
1626 [(set VR128:$dst, (int_x86_sse2_psll_d VR128:$src1,
1627 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1628 def PSLLDri : PDIi8<0x72, MRM6r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1629 "pslld {$src2, $dst|$dst, $src2}",
1630 [(set VR128:$dst, (int_x86_sse2_psll_d VR128:$src1,
1631 (scalar_to_vector (i32 imm:$src2))))]>;
1632 def PSLLQrr : PDIi8<0xF3, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1633 "psllq {$src2, $dst|$dst, $src2}",
1634 [(set VR128:$dst, (int_x86_sse2_psll_q VR128:$src1,
1636 def PSLLQrm : PDIi8<0xF3, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1637 "psllq {$src2, $dst|$dst, $src2}",
1638 [(set VR128:$dst, (int_x86_sse2_psll_q VR128:$src1,
1639 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1640 def PSLLQri : PDIi8<0x73, MRM6r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1641 "psllq {$src2, $dst|$dst, $src2}",
1642 [(set VR128:$dst, (int_x86_sse2_psll_q VR128:$src1,
1643 (scalar_to_vector (i32 imm:$src2))))]>;
1644 def PSLLDQri : PDIi8<0x73, MRM7r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1645 "pslldq {$src2, $dst|$dst, $src2}", []>;
1647 def PSRLWrr : PDIi8<0xD1, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1648 "psrlw {$src2, $dst|$dst, $src2}",
1649 [(set VR128:$dst, (int_x86_sse2_psrl_w VR128:$src1,
1651 def PSRLWrm : PDIi8<0xD1, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1652 "psrlw {$src2, $dst|$dst, $src2}",
1653 [(set VR128:$dst, (int_x86_sse2_psrl_w VR128:$src1,
1654 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1655 def PSRLWri : PDIi8<0x71, MRM2r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1656 "psrlw {$src2, $dst|$dst, $src2}",
1657 [(set VR128:$dst, (int_x86_sse2_psrl_w VR128:$src1,
1658 (scalar_to_vector (i32 imm:$src2))))]>;
1659 def PSRLDrr : PDIi8<0xD2, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1660 "psrld {$src2, $dst|$dst, $src2}",
1661 [(set VR128:$dst, (int_x86_sse2_psrl_d VR128:$src1,
1663 def PSRLDrm : PDIi8<0xD2, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1664 "psrld {$src2, $dst|$dst, $src2}",
1665 [(set VR128:$dst, (int_x86_sse2_psrl_d VR128:$src1,
1666 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1667 def PSRLDri : PDIi8<0x72, MRM2r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1668 "psrld {$src2, $dst|$dst, $src2}",
1669 [(set VR128:$dst, (int_x86_sse2_psrl_d VR128:$src1,
1670 (scalar_to_vector (i32 imm:$src2))))]>;
1671 def PSRLQrr : PDIi8<0xD3, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1672 "psrlq {$src2, $dst|$dst, $src2}",
1673 [(set VR128:$dst, (int_x86_sse2_psrl_q VR128:$src1,
1675 def PSRLQrm : PDIi8<0xD3, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1676 "psrlq {$src2, $dst|$dst, $src2}",
1677 [(set VR128:$dst, (int_x86_sse2_psrl_q VR128:$src1,
1678 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1679 def PSRLQri : PDIi8<0x73, MRM2r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1680 "psrlq {$src2, $dst|$dst, $src2}",
1681 [(set VR128:$dst, (int_x86_sse2_psrl_q VR128:$src1,
1682 (scalar_to_vector (i32 imm:$src2))))]>;
1683 def PSRLDQri : PDIi8<0x73, MRM3r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1684 "psrldq {$src2, $dst|$dst, $src2}", []>;
1686 def PSRAWrr : PDIi8<0xE1, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1687 "psraw {$src2, $dst|$dst, $src2}",
1688 [(set VR128:$dst, (int_x86_sse2_psra_w VR128:$src1,
1690 def PSRAWrm : PDIi8<0xE1, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1691 "psraw {$src2, $dst|$dst, $src2}",
1692 [(set VR128:$dst, (int_x86_sse2_psra_w VR128:$src1,
1693 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1694 def PSRAWri : PDIi8<0x71, MRM4r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1695 "psraw {$src2, $dst|$dst, $src2}",
1696 [(set VR128:$dst, (int_x86_sse2_psra_w VR128:$src1,
1697 (scalar_to_vector (i32 imm:$src2))))]>;
1698 def PSRADrr : PDIi8<0xE2, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1699 "psrad {$src2, $dst|$dst, $src2}",
1700 [(set VR128:$dst, (int_x86_sse2_psra_d VR128:$src1,
1702 def PSRADrm : PDIi8<0xE2, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1703 "psrad {$src2, $dst|$dst, $src2}",
1704 [(set VR128:$dst, (int_x86_sse2_psra_d VR128:$src1,
1705 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1706 def PSRADri : PDIi8<0x72, MRM4r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1707 "psrad {$src2, $dst|$dst, $src2}",
1708 [(set VR128:$dst, (int_x86_sse2_psra_d VR128:$src1,
1709 (scalar_to_vector (i32 imm:$src2))))]>;
1713 let isTwoAddress = 1 in {
1714 let isCommutable = 1 in {
1715 def PANDrr : PDI<0xDB, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1716 "pand {$src2, $dst|$dst, $src2}",
1717 [(set VR128:$dst, (v2i64 (and VR128:$src1, VR128:$src2)))]>;
1718 def PORrr : PDI<0xEB, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1719 "por {$src2, $dst|$dst, $src2}",
1720 [(set VR128:$dst, (v2i64 (or VR128:$src1, VR128:$src2)))]>;
1721 def PXORrr : PDI<0xEF, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1722 "pxor {$src2, $dst|$dst, $src2}",
1723 [(set VR128:$dst, (v2i64 (xor VR128:$src1, VR128:$src2)))]>;
1726 def PANDrm : PDI<0xDB, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1727 "pand {$src2, $dst|$dst, $src2}",
1728 [(set VR128:$dst, (v2i64 (and VR128:$src1,
1729 (load addr:$src2))))]>;
1730 def PORrm : PDI<0xEB, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1731 "por {$src2, $dst|$dst, $src2}",
1732 [(set VR128:$dst, (v2i64 (or VR128:$src1,
1733 (load addr:$src2))))]>;
1734 def PXORrm : PDI<0xEF, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1735 "pxor {$src2, $dst|$dst, $src2}",
1736 [(set VR128:$dst, (v2i64 (xor VR128:$src1,
1737 (load addr:$src2))))]>;
1739 def PANDNrr : PDI<0xDF, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1740 "pandn {$src2, $dst|$dst, $src2}",
1741 [(set VR128:$dst, (v2i64 (and (vnot VR128:$src1),
1744 def PANDNrm : PDI<0xDF, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1745 "pandn {$src2, $dst|$dst, $src2}",
1746 [(set VR128:$dst, (v2i64 (and (vnot VR128:$src1),
1747 (load addr:$src2))))]>;
1750 // SSE2 Integer comparison
1751 let isTwoAddress = 1 in {
1752 def PCMPEQBrr : PDI<0x74, MRMSrcReg,
1753 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1754 "pcmpeqb {$src2, $dst|$dst, $src2}",
1755 [(set VR128:$dst, (int_x86_sse2_pcmpeq_b VR128:$src1,
1757 def PCMPEQBrm : PDI<0x74, MRMSrcMem,
1758 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1759 "pcmpeqb {$src2, $dst|$dst, $src2}",
1760 [(set VR128:$dst, (int_x86_sse2_pcmpeq_b VR128:$src1,
1761 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1762 def PCMPEQWrr : PDI<0x75, MRMSrcReg,
1763 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1764 "pcmpeqw {$src2, $dst|$dst, $src2}",
1765 [(set VR128:$dst, (int_x86_sse2_pcmpeq_w VR128:$src1,
1767 def PCMPEQWrm : PDI<0x75, MRMSrcMem,
1768 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1769 "pcmpeqw {$src2, $dst|$dst, $src2}",
1770 [(set VR128:$dst, (int_x86_sse2_pcmpeq_w VR128:$src1,
1771 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1772 def PCMPEQDrr : PDI<0x76, MRMSrcReg,
1773 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1774 "pcmpeqd {$src2, $dst|$dst, $src2}",
1775 [(set VR128:$dst, (int_x86_sse2_pcmpeq_d VR128:$src1,
1777 def PCMPEQDrm : PDI<0x76, MRMSrcMem,
1778 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1779 "pcmpeqd {$src2, $dst|$dst, $src2}",
1780 [(set VR128:$dst, (int_x86_sse2_pcmpeq_d VR128:$src1,
1781 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1783 def PCMPGTBrr : PDI<0x64, MRMSrcReg,
1784 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1785 "pcmpgtb {$src2, $dst|$dst, $src2}",
1786 [(set VR128:$dst, (int_x86_sse2_pcmpgt_b VR128:$src1,
1788 def PCMPGTBrm : PDI<0x64, MRMSrcMem,
1789 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1790 "pcmpgtb {$src2, $dst|$dst, $src2}",
1791 [(set VR128:$dst, (int_x86_sse2_pcmpgt_b VR128:$src1,
1792 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1793 def PCMPGTWrr : PDI<0x65, MRMSrcReg,
1794 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1795 "pcmpgtw {$src2, $dst|$dst, $src2}",
1796 [(set VR128:$dst, (int_x86_sse2_pcmpgt_w VR128:$src1,
1798 def PCMPGTWrm : PDI<0x65, MRMSrcMem,
1799 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1800 "pcmpgtw {$src2, $dst|$dst, $src2}",
1801 [(set VR128:$dst, (int_x86_sse2_pcmpgt_w VR128:$src1,
1802 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1803 def PCMPGTDrr : PDI<0x66, MRMSrcReg,
1804 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1805 "pcmpgtd {$src2, $dst|$dst, $src2}",
1806 [(set VR128:$dst, (int_x86_sse2_pcmpgt_d VR128:$src1,
1808 def PCMPGTDrm : PDI<0x66, MRMSrcMem,
1809 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1810 "pcmpgtd {$src2, $dst|$dst, $src2}",
1811 [(set VR128:$dst, (int_x86_sse2_pcmpgt_d VR128:$src1,
1812 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1815 // Pack instructions
1816 let isTwoAddress = 1 in {
1817 def PACKSSWBrr : PDI<0x63, MRMSrcReg, (ops VR128:$dst, VR128:$src1,
1819 "packsswb {$src2, $dst|$dst, $src2}",
1820 [(set VR128:$dst, (v8i16 (int_x86_sse2_packsswb_128
1823 def PACKSSWBrm : PDI<0x63, MRMSrcMem, (ops VR128:$dst, VR128:$src1,
1825 "packsswb {$src2, $dst|$dst, $src2}",
1826 [(set VR128:$dst, (v8i16 (int_x86_sse2_packsswb_128
1828 (bc_v8i16 (loadv2f64 addr:$src2)))))]>;
1829 def PACKSSDWrr : PDI<0x6B, MRMSrcReg, (ops VR128:$dst, VR128:$src1,
1831 "packssdw {$src2, $dst|$dst, $src2}",
1832 [(set VR128:$dst, (v4i32 (int_x86_sse2_packssdw_128
1835 def PACKSSDWrm : PDI<0x6B, MRMSrcMem, (ops VR128:$dst, VR128:$src1,
1837 "packssdw {$src2, $dst|$dst, $src2}",
1838 [(set VR128:$dst, (v4i32 (int_x86_sse2_packssdw_128
1840 (bc_v4i32 (loadv2i64 addr:$src2)))))]>;
1841 def PACKUSWBrr : PDI<0x67, MRMSrcReg, (ops VR128:$dst, VR128:$src1,
1843 "packuswb {$src2, $dst|$dst, $src2}",
1844 [(set VR128:$dst, (v8i16 (int_x86_sse2_packuswb_128
1847 def PACKUSWBrm : PDI<0x67, MRMSrcMem, (ops VR128:$dst, VR128:$src1,
1849 "packuswb {$src2, $dst|$dst, $src2}",
1850 [(set VR128:$dst, (v8i16 (int_x86_sse2_packuswb_128
1852 (bc_v8i16 (loadv2i64 addr:$src2)))))]>;
1855 // Shuffle and unpack instructions
1856 def PSHUFDri : PDIi8<0x70, MRMSrcReg,
1857 (ops VR128:$dst, VR128:$src1, i8imm:$src2),
1858 "pshufd {$src2, $src1, $dst|$dst, $src1, $src2}",
1859 [(set VR128:$dst, (v4i32 (vector_shuffle
1860 VR128:$src1, (undef),
1861 PSHUFD_shuffle_mask:$src2)))]>;
1862 def PSHUFDmi : PDIi8<0x70, MRMSrcMem,
1863 (ops VR128:$dst, i128mem:$src1, i8imm:$src2),
1864 "pshufd {$src2, $src1, $dst|$dst, $src1, $src2}",
1865 [(set VR128:$dst, (v4i32 (vector_shuffle
1866 (bc_v4i32 (loadv2i64 addr:$src1)),
1868 PSHUFD_shuffle_mask:$src2)))]>;
1870 // SSE2 with ImmT == Imm8 and XS prefix.
1871 def PSHUFHWri : Ii8<0x70, MRMSrcReg,
1872 (ops VR128:$dst, VR128:$src1, i8imm:$src2),
1873 "pshufhw {$src2, $src1, $dst|$dst, $src1, $src2}",
1874 [(set VR128:$dst, (v8i16 (vector_shuffle
1875 VR128:$src1, (undef),
1876 PSHUFHW_shuffle_mask:$src2)))]>,
1877 XS, Requires<[HasSSE2]>;
1878 def PSHUFHWmi : Ii8<0x70, MRMSrcMem,
1879 (ops VR128:$dst, i128mem:$src1, i8imm:$src2),
1880 "pshufhw {$src2, $src1, $dst|$dst, $src1, $src2}",
1881 [(set VR128:$dst, (v8i16 (vector_shuffle
1882 (bc_v8i16 (loadv2i64 addr:$src1)),
1884 PSHUFHW_shuffle_mask:$src2)))]>,
1885 XS, Requires<[HasSSE2]>;
1887 // SSE2 with ImmT == Imm8 and XD prefix.
1888 def PSHUFLWri : Ii8<0x70, MRMSrcReg,
1889 (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1890 "pshuflw {$src2, $src1, $dst|$dst, $src1, $src2}",
1891 [(set VR128:$dst, (v8i16 (vector_shuffle
1892 VR128:$src1, (undef),
1893 PSHUFLW_shuffle_mask:$src2)))]>,
1894 XD, Requires<[HasSSE2]>;
1895 def PSHUFLWmi : Ii8<0x70, MRMSrcMem,
1896 (ops VR128:$dst, i128mem:$src1, i32i8imm:$src2),
1897 "pshuflw {$src2, $src1, $dst|$dst, $src1, $src2}",
1898 [(set VR128:$dst, (v8i16 (vector_shuffle
1899 (bc_v8i16 (loadv2i64 addr:$src1)),
1901 PSHUFLW_shuffle_mask:$src2)))]>,
1902 XD, Requires<[HasSSE2]>;
1904 let isTwoAddress = 1 in {
1905 def PUNPCKLBWrr : PDI<0x60, MRMSrcReg,
1906 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1907 "punpcklbw {$src2, $dst|$dst, $src2}",
1909 (v16i8 (vector_shuffle VR128:$src1, VR128:$src2,
1910 UNPCKL_shuffle_mask)))]>;
1911 def PUNPCKLBWrm : PDI<0x60, MRMSrcMem,
1912 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1913 "punpcklbw {$src2, $dst|$dst, $src2}",
1915 (v16i8 (vector_shuffle VR128:$src1,
1916 (bc_v16i8 (loadv2i64 addr:$src2)),
1917 UNPCKL_shuffle_mask)))]>;
1918 def PUNPCKLWDrr : PDI<0x61, MRMSrcReg,
1919 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1920 "punpcklwd {$src2, $dst|$dst, $src2}",
1922 (v8i16 (vector_shuffle VR128:$src1, VR128:$src2,
1923 UNPCKL_shuffle_mask)))]>;
1924 def PUNPCKLWDrm : PDI<0x61, MRMSrcMem,
1925 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1926 "punpcklwd {$src2, $dst|$dst, $src2}",
1928 (v8i16 (vector_shuffle VR128:$src1,
1929 (bc_v8i16 (loadv2i64 addr:$src2)),
1930 UNPCKL_shuffle_mask)))]>;
1931 def PUNPCKLDQrr : PDI<0x62, MRMSrcReg,
1932 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1933 "punpckldq {$src2, $dst|$dst, $src2}",
1935 (v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
1936 UNPCKL_shuffle_mask)))]>;
1937 def PUNPCKLDQrm : PDI<0x62, MRMSrcMem,
1938 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1939 "punpckldq {$src2, $dst|$dst, $src2}",
1941 (v4i32 (vector_shuffle VR128:$src1,
1942 (bc_v4i32 (loadv2i64 addr:$src2)),
1943 UNPCKL_shuffle_mask)))]>;
1944 def PUNPCKLQDQrr : PDI<0x6C, MRMSrcReg,
1945 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1946 "punpcklqdq {$src2, $dst|$dst, $src2}",
1948 (v2i64 (vector_shuffle VR128:$src1, VR128:$src2,
1949 UNPCKL_shuffle_mask)))]>;
1950 def PUNPCKLQDQrm : PDI<0x6C, MRMSrcMem,
1951 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1952 "punpcklqdq {$src2, $dst|$dst, $src2}",
1954 (v2i64 (vector_shuffle VR128:$src1,
1955 (loadv2i64 addr:$src2),
1956 UNPCKL_shuffle_mask)))]>;
1958 def PUNPCKHBWrr : PDI<0x68, MRMSrcReg,
1959 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1960 "punpckhbw {$src2, $dst|$dst, $src2}",
1962 (v16i8 (vector_shuffle VR128:$src1, VR128:$src2,
1963 UNPCKH_shuffle_mask)))]>;
1964 def PUNPCKHBWrm : PDI<0x68, MRMSrcMem,
1965 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1966 "punpckhbw {$src2, $dst|$dst, $src2}",
1968 (v16i8 (vector_shuffle VR128:$src1,
1969 (bc_v16i8 (loadv2i64 addr:$src2)),
1970 UNPCKH_shuffle_mask)))]>;
1971 def PUNPCKHWDrr : PDI<0x69, MRMSrcReg,
1972 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1973 "punpckhwd {$src2, $dst|$dst, $src2}",
1975 (v8i16 (vector_shuffle VR128:$src1, VR128:$src2,
1976 UNPCKH_shuffle_mask)))]>;
1977 def PUNPCKHWDrm : PDI<0x69, MRMSrcMem,
1978 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1979 "punpckhwd {$src2, $dst|$dst, $src2}",
1981 (v8i16 (vector_shuffle VR128:$src1,
1982 (bc_v8i16 (loadv2i64 addr:$src2)),
1983 UNPCKH_shuffle_mask)))]>;
1984 def PUNPCKHDQrr : PDI<0x6A, MRMSrcReg,
1985 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1986 "punpckhdq {$src2, $dst|$dst, $src2}",
1988 (v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
1989 UNPCKH_shuffle_mask)))]>;
1990 def PUNPCKHDQrm : PDI<0x6A, MRMSrcMem,
1991 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1992 "punpckhdq {$src2, $dst|$dst, $src2}",
1994 (v4i32 (vector_shuffle VR128:$src1,
1995 (bc_v4i32 (loadv2i64 addr:$src2)),
1996 UNPCKH_shuffle_mask)))]>;
1997 def PUNPCKHQDQrr : PDI<0x6D, MRMSrcReg,
1998 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1999 "punpckhqdq {$src2, $dst|$dst, $src2}",
2001 (v2i64 (vector_shuffle VR128:$src1, VR128:$src2,
2002 UNPCKH_shuffle_mask)))]>;
2003 def PUNPCKHQDQrm : PDI<0x6D, MRMSrcMem,
2004 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
2005 "punpckhqdq {$src2, $dst|$dst, $src2}",
2007 (v2i64 (vector_shuffle VR128:$src1,
2008 (loadv2i64 addr:$src2),
2009 UNPCKH_shuffle_mask)))]>;
2013 def PEXTRWri : PDIi8<0xC5, MRMSrcReg,
2014 (ops GR32:$dst, VR128:$src1, i32i8imm:$src2),
2015 "pextrw {$src2, $src1, $dst|$dst, $src1, $src2}",
2016 [(set GR32:$dst, (X86pextrw (v8i16 VR128:$src1),
2017 (i32 imm:$src2)))]>;
2018 let isTwoAddress = 1 in {
2019 def PINSRWrri : PDIi8<0xC4, MRMSrcReg,
2020 (ops VR128:$dst, VR128:$src1, GR32:$src2, i32i8imm:$src3),
2021 "pinsrw {$src3, $src2, $dst|$dst, $src2, $src3}",
2022 [(set VR128:$dst, (v8i16 (X86pinsrw (v8i16 VR128:$src1),
2023 GR32:$src2, (iPTR imm:$src3))))]>;
2024 def PINSRWrmi : PDIi8<0xC4, MRMSrcMem,
2025 (ops VR128:$dst, VR128:$src1, i16mem:$src2, i32i8imm:$src3),
2026 "pinsrw {$src3, $src2, $dst|$dst, $src2, $src3}",
2028 (v8i16 (X86pinsrw (v8i16 VR128:$src1),
2029 (i32 (anyext (loadi16 addr:$src2))),
2030 (iPTR imm:$src3))))]>;
2033 //===----------------------------------------------------------------------===//
2034 // Miscellaneous Instructions
2035 //===----------------------------------------------------------------------===//
2038 def MOVMSKPSrr : PSI<0x50, MRMSrcReg, (ops GR32:$dst, VR128:$src),
2039 "movmskps {$src, $dst|$dst, $src}",
2040 [(set GR32:$dst, (int_x86_sse_movmsk_ps VR128:$src))]>;
2041 def MOVMSKPDrr : PSI<0x50, MRMSrcReg, (ops GR32:$dst, VR128:$src),
2042 "movmskpd {$src, $dst|$dst, $src}",
2043 [(set GR32:$dst, (int_x86_sse2_movmsk_pd VR128:$src))]>;
2045 def PMOVMSKBrr : PDI<0xD7, MRMSrcReg, (ops GR32:$dst, VR128:$src),
2046 "pmovmskb {$src, $dst|$dst, $src}",
2047 [(set GR32:$dst, (int_x86_sse2_pmovmskb_128 VR128:$src))]>;
2049 // Conditional store
2050 def MASKMOVDQU : PDI<0xF7, RawFrm, (ops VR128:$src, VR128:$mask),
2051 "maskmovdqu {$mask, $src|$src, $mask}",
2052 [(int_x86_sse2_maskmov_dqu VR128:$src, VR128:$mask, EDI)]>,
2055 // Prefetching loads
2056 def PREFETCHT0 : PSI<0x18, MRM1m, (ops i8mem:$src),
2057 "prefetcht0 $src", []>;
2058 def PREFETCHT1 : PSI<0x18, MRM2m, (ops i8mem:$src),
2059 "prefetcht1 $src", []>;
2060 def PREFETCHT2 : PSI<0x18, MRM3m, (ops i8mem:$src),
2061 "prefetcht2 $src", []>;
2062 def PREFETCHTNTA : PSI<0x18, MRM0m, (ops i8mem:$src),
2063 "prefetchtnta $src", []>;
2065 // Non-temporal stores
2066 def MOVNTPSmr : PSI<0x2B, MRMDestMem, (ops i128mem:$dst, VR128:$src),
2067 "movntps {$src, $dst|$dst, $src}",
2068 [(int_x86_sse_movnt_ps addr:$dst, VR128:$src)]>;
2069 def MOVNTPDmr : PDI<0x2B, MRMDestMem, (ops i128mem:$dst, VR128:$src),
2070 "movntpd {$src, $dst|$dst, $src}",
2071 [(int_x86_sse2_movnt_pd addr:$dst, VR128:$src)]>;
2072 def MOVNTDQmr : PDI<0xE7, MRMDestMem, (ops f128mem:$dst, VR128:$src),
2073 "movntdq {$src, $dst|$dst, $src}",
2074 [(int_x86_sse2_movnt_dq addr:$dst, VR128:$src)]>;
2075 def MOVNTImr : I<0xC3, MRMDestMem, (ops i32mem:$dst, GR32:$src),
2076 "movnti {$src, $dst|$dst, $src}",
2077 [(int_x86_sse2_movnt_i addr:$dst, GR32:$src)]>,
2078 TB, Requires<[HasSSE2]>;
2081 def CLFLUSH : I<0xAE, MRM7m, (ops i8mem:$src),
2082 "clflush $src", [(int_x86_sse2_clflush addr:$src)]>,
2083 TB, Requires<[HasSSE2]>;
2085 // Load, store, and memory fence
2086 def SFENCE : I<0xAE, MRM7m, (ops),
2087 "sfence", [(int_x86_sse_sfence)]>, TB, Requires<[HasSSE1]>;
2088 def LFENCE : I<0xAE, MRM5m, (ops),
2089 "lfence", [(int_x86_sse2_lfence)]>, TB, Requires<[HasSSE2]>;
2090 def MFENCE : I<0xAE, MRM6m, (ops),
2091 "mfence", [(int_x86_sse2_mfence)]>, TB, Requires<[HasSSE2]>;
2094 def LDMXCSR : I<0xAE, MRM5m, (ops i32mem:$src),
2096 [(int_x86_sse_ldmxcsr addr:$src)]>, TB, Requires<[HasSSE1]>;
2097 def STMXCSR : I<0xAE, MRM3m, (ops i32mem:$dst),
2099 [(int_x86_sse_stmxcsr addr:$dst)]>, TB, Requires<[HasSSE1]>;
2101 // Thread synchronization
2102 def MONITOR : I<0xC8, RawFrm, (ops), "monitor",
2103 [(int_x86_sse3_monitor EAX, ECX, EDX)]>,
2104 TB, Requires<[HasSSE3]>;
2105 def MWAIT : I<0xC9, RawFrm, (ops), "mwait",
2106 [(int_x86_sse3_mwait ECX, EAX)]>,
2107 TB, Requires<[HasSSE3]>;
2109 //===----------------------------------------------------------------------===//
2110 // Alias Instructions
2111 //===----------------------------------------------------------------------===//
2113 // Alias instructions that map zero vector to pxor / xorp* for sse.
2114 // FIXME: remove when we can teach regalloc that xor reg, reg is ok.
2115 def V_SET0_PI : PDI<0xEF, MRMInitReg, (ops VR128:$dst),
2117 [(set VR128:$dst, (v2i64 immAllZerosV))]>;
2118 def V_SET0_PS : PSI<0x57, MRMInitReg, (ops VR128:$dst),
2120 [(set VR128:$dst, (v4f32 immAllZerosV))]>;
2121 def V_SET0_PD : PDI<0x57, MRMInitReg, (ops VR128:$dst),
2123 [(set VR128:$dst, (v2f64 immAllZerosV))]>;
2125 def V_SETALLONES : PDI<0x76, MRMInitReg, (ops VR128:$dst),
2126 "pcmpeqd $dst, $dst",
2127 [(set VR128:$dst, (v2f64 immAllOnesV))]>;
2129 // FR32 / FR64 to 128-bit vector conversion.
2130 def MOVSS2PSrr : SSI<0x10, MRMSrcReg, (ops VR128:$dst, FR32:$src),
2131 "movss {$src, $dst|$dst, $src}",
2133 (v4f32 (scalar_to_vector FR32:$src)))]>;
2134 def MOVSS2PSrm : SSI<0x10, MRMSrcMem, (ops VR128:$dst, f32mem:$src),
2135 "movss {$src, $dst|$dst, $src}",
2137 (v4f32 (scalar_to_vector (loadf32 addr:$src))))]>;
2138 def MOVSD2PDrr : SDI<0x10, MRMSrcReg, (ops VR128:$dst, FR64:$src),
2139 "movsd {$src, $dst|$dst, $src}",
2141 (v2f64 (scalar_to_vector FR64:$src)))]>;
2142 def MOVSD2PDrm : SDI<0x10, MRMSrcMem, (ops VR128:$dst, f64mem:$src),
2143 "movsd {$src, $dst|$dst, $src}",
2145 (v2f64 (scalar_to_vector (loadf64 addr:$src))))]>;
2147 def MOVDI2PDIrr : PDI<0x6E, MRMSrcReg, (ops VR128:$dst, GR32:$src),
2148 "movd {$src, $dst|$dst, $src}",
2150 (v4i32 (scalar_to_vector GR32:$src)))]>;
2151 def MOVDI2PDIrm : PDI<0x6E, MRMSrcMem, (ops VR128:$dst, i32mem:$src),
2152 "movd {$src, $dst|$dst, $src}",
2154 (v4i32 (scalar_to_vector (loadi32 addr:$src))))]>;
2155 // SSE2 instructions with XS prefix
2156 def MOVQI2PQIrr : I<0x7E, MRMSrcReg, (ops VR128:$dst, VR64:$src),
2157 "movq {$src, $dst|$dst, $src}",
2159 (v2i64 (scalar_to_vector VR64:$src)))]>, XS,
2160 Requires<[HasSSE2]>;
2161 def MOVQI2PQIrm : I<0x7E, MRMSrcMem, (ops VR128:$dst, i64mem:$src),
2162 "movq {$src, $dst|$dst, $src}",
2164 (v2i64 (scalar_to_vector (loadi64 addr:$src))))]>, XS,
2165 Requires<[HasSSE2]>;
2166 // FIXME: may not be able to eliminate this movss with coalescing the src and
2167 // dest register classes are different. We really want to write this pattern
2169 // def : Pat<(f32 (vector_extract (v4f32 VR128:$src), (iPTR 0))),
2170 // (f32 FR32:$src)>;
2171 def MOVPS2SSrr : SSI<0x10, MRMSrcReg, (ops FR32:$dst, VR128:$src),
2172 "movss {$src, $dst|$dst, $src}",
2173 [(set FR32:$dst, (vector_extract (v4f32 VR128:$src),
2175 def MOVPS2SSmr : SSI<0x11, MRMDestMem, (ops f32mem:$dst, VR128:$src),
2176 "movss {$src, $dst|$dst, $src}",
2177 [(store (f32 (vector_extract (v4f32 VR128:$src),
2178 (iPTR 0))), addr:$dst)]>;
2179 def MOVPD2SDrr : SDI<0x10, MRMSrcReg, (ops FR64:$dst, VR128:$src),
2180 "movsd {$src, $dst|$dst, $src}",
2181 [(set FR64:$dst, (vector_extract (v2f64 VR128:$src),
2183 def MOVPD2SDmr : SDI<0x11, MRMDestMem, (ops f64mem:$dst, VR128:$src),
2184 "movsd {$src, $dst|$dst, $src}",
2185 [(store (f64 (vector_extract (v2f64 VR128:$src),
2186 (iPTR 0))), addr:$dst)]>;
2187 def MOVPDI2DIrr : PDI<0x7E, MRMDestReg, (ops GR32:$dst, VR128:$src),
2188 "movd {$src, $dst|$dst, $src}",
2189 [(set GR32:$dst, (vector_extract (v4i32 VR128:$src),
2191 def MOVPDI2DImr : PDI<0x7E, MRMDestMem, (ops i32mem:$dst, VR128:$src),
2192 "movd {$src, $dst|$dst, $src}",
2193 [(store (i32 (vector_extract (v4i32 VR128:$src),
2194 (iPTR 0))), addr:$dst)]>;
2196 // Move to lower bits of a VR128, leaving upper bits alone.
2197 // Three operand (but two address) aliases.
2198 let isTwoAddress = 1 in {
2199 def MOVLSS2PSrr : SSI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src1, FR32:$src2),
2200 "movss {$src2, $dst|$dst, $src2}", []>;
2201 def MOVLSD2PDrr : SDI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src1, FR64:$src2),
2202 "movsd {$src2, $dst|$dst, $src2}", []>;
2204 let AddedComplexity = 20 in {
2205 def MOVLPSrr : SSI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
2206 "movss {$src2, $dst|$dst, $src2}",
2208 (v4f32 (vector_shuffle VR128:$src1, VR128:$src2,
2209 MOVL_shuffle_mask)))]>;
2210 def MOVLPDrr : SDI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
2211 "movsd {$src2, $dst|$dst, $src2}",
2213 (v2f64 (vector_shuffle VR128:$src1, VR128:$src2,
2214 MOVL_shuffle_mask)))]>;
2218 // Store / copy lower 64-bits of a XMM register.
2219 def MOVLQ128mr : PDI<0xD6, MRMDestMem, (ops i64mem:$dst, VR128:$src),
2220 "movq {$src, $dst|$dst, $src}",
2221 [(int_x86_sse2_storel_dq addr:$dst, VR128:$src)]>;
2223 // Move to lower bits of a VR128 and zeroing upper bits.
2224 // Loading from memory automatically zeroing upper bits.
2225 let AddedComplexity = 20 in {
2226 def MOVZSS2PSrm : SSI<0x10, MRMSrcMem, (ops VR128:$dst, f32mem:$src),
2227 "movss {$src, $dst|$dst, $src}",
2228 [(set VR128:$dst, (v4f32 (vector_shuffle immAllZerosV,
2229 (v4f32 (scalar_to_vector (loadf32 addr:$src))),
2230 MOVL_shuffle_mask)))]>;
2231 def MOVZSD2PDrm : SDI<0x10, MRMSrcMem, (ops VR128:$dst, f64mem:$src),
2232 "movsd {$src, $dst|$dst, $src}",
2233 [(set VR128:$dst, (v2f64 (vector_shuffle immAllZerosV,
2234 (v2f64 (scalar_to_vector (loadf64 addr:$src))),
2235 MOVL_shuffle_mask)))]>;
2236 // movd / movq to XMM register zero-extends
2237 def MOVZDI2PDIrr : PDI<0x6E, MRMSrcReg, (ops VR128:$dst, GR32:$src),
2238 "movd {$src, $dst|$dst, $src}",
2239 [(set VR128:$dst, (v4i32 (vector_shuffle immAllZerosV,
2240 (v4i32 (scalar_to_vector GR32:$src)),
2241 MOVL_shuffle_mask)))]>;
2242 def MOVZDI2PDIrm : PDI<0x6E, MRMSrcMem, (ops VR128:$dst, i32mem:$src),
2243 "movd {$src, $dst|$dst, $src}",
2244 [(set VR128:$dst, (v4i32 (vector_shuffle immAllZerosV,
2245 (v4i32 (scalar_to_vector (loadi32 addr:$src))),
2246 MOVL_shuffle_mask)))]>;
2247 // Moving from XMM to XMM but still clear upper 64 bits.
2248 def MOVZQI2PQIrr : I<0x7E, MRMSrcReg, (ops VR128:$dst, VR128:$src),
2249 "movq {$src, $dst|$dst, $src}",
2250 [(set VR128:$dst, (int_x86_sse2_movl_dq VR128:$src))]>,
2251 XS, Requires<[HasSSE2]>;
2252 def MOVZQI2PQIrm : I<0x7E, MRMSrcMem, (ops VR128:$dst, i64mem:$src),
2253 "movq {$src, $dst|$dst, $src}",
2254 [(set VR128:$dst, (int_x86_sse2_movl_dq
2255 (bc_v4i32 (loadv2i64 addr:$src))))]>,
2256 XS, Requires<[HasSSE2]>;
2259 //===----------------------------------------------------------------------===//
2260 // Non-Instruction Patterns
2261 //===----------------------------------------------------------------------===//
2263 // 128-bit vector undef's.
2264 def : Pat<(v2f64 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2265 def : Pat<(v16i8 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2266 def : Pat<(v8i16 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2267 def : Pat<(v4i32 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2268 def : Pat<(v2i64 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2270 // 128-bit vector all zero's.
2271 def : Pat<(v16i8 immAllZerosV), (v16i8 (V_SET0_PI))>, Requires<[HasSSE2]>;
2272 def : Pat<(v8i16 immAllZerosV), (v8i16 (V_SET0_PI))>, Requires<[HasSSE2]>;
2273 def : Pat<(v4i32 immAllZerosV), (v4i32 (V_SET0_PI))>, Requires<[HasSSE2]>;
2275 // 128-bit vector all one's.
2276 def : Pat<(v16i8 immAllOnesV), (v16i8 (V_SETALLONES))>, Requires<[HasSSE2]>;
2277 def : Pat<(v8i16 immAllOnesV), (v8i16 (V_SETALLONES))>, Requires<[HasSSE2]>;
2278 def : Pat<(v4i32 immAllOnesV), (v4i32 (V_SETALLONES))>, Requires<[HasSSE2]>;
2279 def : Pat<(v2i64 immAllOnesV), (v2i64 (V_SETALLONES))>, Requires<[HasSSE2]>;
2280 def : Pat<(v4f32 immAllOnesV), (v4f32 (V_SETALLONES))>, Requires<[HasSSE1]>;
2282 // Store 128-bit integer vector values.
2283 def : Pat<(store (v16i8 VR128:$src), addr:$dst),
2284 (MOVDQAmr addr:$dst, VR128:$src)>, Requires<[HasSSE2]>;
2285 def : Pat<(store (v8i16 VR128:$src), addr:$dst),
2286 (MOVDQAmr addr:$dst, VR128:$src)>, Requires<[HasSSE2]>;
2287 def : Pat<(store (v4i32 VR128:$src), addr:$dst),
2288 (MOVDQAmr addr:$dst, VR128:$src)>, Requires<[HasSSE2]>;
2290 // Scalar to v8i16 / v16i8. The source may be a GR32, but only the lower 8 or
2292 def : Pat<(v8i16 (X86s2vec GR32:$src)), (v8i16 (MOVDI2PDIrr GR32:$src))>,
2293 Requires<[HasSSE2]>;
2294 def : Pat<(v16i8 (X86s2vec GR32:$src)), (v16i8 (MOVDI2PDIrr GR32:$src))>,
2295 Requires<[HasSSE2]>;
2298 def : Pat<(v2i64 (bitconvert (v4i32 VR128:$src))), (v2i64 VR128:$src)>,
2299 Requires<[HasSSE2]>;
2300 def : Pat<(v2i64 (bitconvert (v8i16 VR128:$src))), (v2i64 VR128:$src)>,
2301 Requires<[HasSSE2]>;
2302 def : Pat<(v2i64 (bitconvert (v16i8 VR128:$src))), (v2i64 VR128:$src)>,
2303 Requires<[HasSSE2]>;
2304 def : Pat<(v2i64 (bitconvert (v2f64 VR128:$src))), (v2i64 VR128:$src)>,
2305 Requires<[HasSSE2]>;
2306 def : Pat<(v2i64 (bitconvert (v4f32 VR128:$src))), (v2i64 VR128:$src)>,
2307 Requires<[HasSSE2]>;
2308 def : Pat<(v4i32 (bitconvert (v2i64 VR128:$src))), (v4i32 VR128:$src)>,
2309 Requires<[HasSSE2]>;
2310 def : Pat<(v4i32 (bitconvert (v8i16 VR128:$src))), (v4i32 VR128:$src)>,
2311 Requires<[HasSSE2]>;
2312 def : Pat<(v4i32 (bitconvert (v16i8 VR128:$src))), (v4i32 VR128:$src)>,
2313 Requires<[HasSSE2]>;
2314 def : Pat<(v4i32 (bitconvert (v2f64 VR128:$src))), (v4i32 VR128:$src)>,
2315 Requires<[HasSSE2]>;
2316 def : Pat<(v4i32 (bitconvert (v4f32 VR128:$src))), (v4i32 VR128:$src)>,
2317 Requires<[HasSSE2]>;
2318 def : Pat<(v8i16 (bitconvert (v2i64 VR128:$src))), (v4i32 VR128:$src)>,
2319 Requires<[HasSSE2]>;
2320 def : Pat<(v8i16 (bitconvert (v4i32 VR128:$src))), (v4i32 VR128:$src)>,
2321 Requires<[HasSSE2]>;
2322 def : Pat<(v8i16 (bitconvert (v16i8 VR128:$src))), (v4i32 VR128:$src)>,
2323 Requires<[HasSSE2]>;
2324 def : Pat<(v8i16 (bitconvert (v2f64 VR128:$src))), (v8i16 VR128:$src)>,
2325 Requires<[HasSSE2]>;
2326 def : Pat<(v8i16 (bitconvert (v4f32 VR128:$src))), (v8i16 VR128:$src)>,
2327 Requires<[HasSSE2]>;
2328 def : Pat<(v16i8 (bitconvert (v2i64 VR128:$src))), (v4i32 VR128:$src)>,
2329 Requires<[HasSSE2]>;
2330 def : Pat<(v16i8 (bitconvert (v4i32 VR128:$src))), (v4i32 VR128:$src)>,
2331 Requires<[HasSSE2]>;
2332 def : Pat<(v16i8 (bitconvert (v8i16 VR128:$src))), (v4i32 VR128:$src)>,
2333 Requires<[HasSSE2]>;
2334 def : Pat<(v16i8 (bitconvert (v2f64 VR128:$src))), (v16i8 VR128:$src)>,
2335 Requires<[HasSSE2]>;
2336 def : Pat<(v16i8 (bitconvert (v4f32 VR128:$src))), (v16i8 VR128:$src)>,
2337 Requires<[HasSSE2]>;
2338 def : Pat<(v4f32 (bitconvert (v2i64 VR128:$src))), (v4f32 VR128:$src)>,
2339 Requires<[HasSSE2]>;
2340 def : Pat<(v4f32 (bitconvert (v4i32 VR128:$src))), (v4f32 VR128:$src)>,
2341 Requires<[HasSSE2]>;
2342 def : Pat<(v4f32 (bitconvert (v8i16 VR128:$src))), (v4f32 VR128:$src)>,
2343 Requires<[HasSSE2]>;
2344 def : Pat<(v4f32 (bitconvert (v16i8 VR128:$src))), (v4f32 VR128:$src)>,
2345 Requires<[HasSSE2]>;
2346 def : Pat<(v4f32 (bitconvert (v2f64 VR128:$src))), (v4f32 VR128:$src)>,
2347 Requires<[HasSSE2]>;
2348 def : Pat<(v2f64 (bitconvert (v2i64 VR128:$src))), (v2f64 VR128:$src)>,
2349 Requires<[HasSSE2]>;
2350 def : Pat<(v2f64 (bitconvert (v4i32 VR128:$src))), (v2f64 VR128:$src)>,
2351 Requires<[HasSSE2]>;
2352 def : Pat<(v2f64 (bitconvert (v8i16 VR128:$src))), (v2f64 VR128:$src)>,
2353 Requires<[HasSSE2]>;
2354 def : Pat<(v2f64 (bitconvert (v16i8 VR128:$src))), (v2f64 VR128:$src)>,
2355 Requires<[HasSSE2]>;
2356 def : Pat<(v2f64 (bitconvert (v4f32 VR128:$src))), (v2f64 VR128:$src)>,
2357 Requires<[HasSSE2]>;
2359 // Move scalar to XMM zero-extended
2360 // movd to XMM register zero-extends
2361 let AddedComplexity = 20 in {
2362 def : Pat<(v8i16 (vector_shuffle immAllZerosV,
2363 (v8i16 (X86s2vec GR32:$src)), MOVL_shuffle_mask)),
2364 (v8i16 (MOVZDI2PDIrr GR32:$src))>, Requires<[HasSSE2]>;
2365 def : Pat<(v16i8 (vector_shuffle immAllZerosV,
2366 (v16i8 (X86s2vec GR32:$src)), MOVL_shuffle_mask)),
2367 (v16i8 (MOVZDI2PDIrr GR32:$src))>, Requires<[HasSSE2]>;
2368 // Zeroing a VR128 then do a MOVS{S|D} to the lower bits.
2369 def : Pat<(v2f64 (vector_shuffle immAllZerosV,
2370 (v2f64 (scalar_to_vector FR64:$src)), MOVL_shuffle_mask)),
2371 (v2f64 (MOVLSD2PDrr (V_SET0_PD), FR64:$src))>, Requires<[HasSSE2]>;
2372 def : Pat<(v4f32 (vector_shuffle immAllZerosV,
2373 (v4f32 (scalar_to_vector FR32:$src)), MOVL_shuffle_mask)),
2374 (v4f32 (MOVLSS2PSrr (V_SET0_PS), FR32:$src))>, Requires<[HasSSE2]>;
2377 // Splat v2f64 / v2i64
2378 let AddedComplexity = 10 in {
2379 def : Pat<(vector_shuffle (v2f64 VR128:$src), (undef), SSE_splat_v2_mask:$sm),
2380 (v2f64 (UNPCKLPDrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2381 def : Pat<(vector_shuffle (v2i64 VR128:$src), (undef), SSE_splat_v2_mask:$sm),
2382 (v2i64 (PUNPCKLQDQrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2386 def : Pat<(vector_shuffle (v4f32 VR128:$src), (undef), SSE_splat_mask:$sm),
2387 (v4f32 (SHUFPSrri VR128:$src, VR128:$src, SSE_splat_mask:$sm))>,
2388 Requires<[HasSSE1]>;
2390 // Special unary SHUFPSrri case.
2391 // FIXME: when we want non two-address code, then we should use PSHUFD?
2392 def : Pat<(vector_shuffle (v4f32 VR128:$src1), (undef),
2393 SHUFP_unary_shuffle_mask:$sm),
2394 (v4f32 (SHUFPSrri VR128:$src1, VR128:$src1, SHUFP_unary_shuffle_mask:$sm))>,
2395 Requires<[HasSSE1]>;
2396 // Unary v4f32 shuffle with PSHUF* in order to fold a load.
2397 def : Pat<(vector_shuffle (loadv4f32 addr:$src1), (undef),
2398 SHUFP_unary_shuffle_mask:$sm),
2399 (v4f32 (PSHUFDmi addr:$src1, SHUFP_unary_shuffle_mask:$sm))>,
2400 Requires<[HasSSE2]>;
2401 // Special binary v4i32 shuffle cases with SHUFPS.
2402 def : Pat<(vector_shuffle (v4i32 VR128:$src1), (v4i32 VR128:$src2),
2403 PSHUFD_binary_shuffle_mask:$sm),
2404 (v4i32 (SHUFPSrri VR128:$src1, VR128:$src2,
2405 PSHUFD_binary_shuffle_mask:$sm))>, Requires<[HasSSE2]>;
2406 def : Pat<(vector_shuffle (v4i32 VR128:$src1),
2407 (bc_v4i32 (loadv2i64 addr:$src2)), PSHUFD_binary_shuffle_mask:$sm),
2408 (v4i32 (SHUFPSrmi VR128:$src1, addr:$src2,
2409 PSHUFD_binary_shuffle_mask:$sm))>, Requires<[HasSSE2]>;
2411 // vector_shuffle v1, <undef>, <0, 0, 1, 1, ...>
2412 let AddedComplexity = 10 in {
2413 def : Pat<(v4f32 (vector_shuffle VR128:$src, (undef),
2414 UNPCKL_v_undef_shuffle_mask)),
2415 (v4f32 (UNPCKLPSrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2416 def : Pat<(v16i8 (vector_shuffle VR128:$src, (undef),
2417 UNPCKL_v_undef_shuffle_mask)),
2418 (v16i8 (PUNPCKLBWrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2419 def : Pat<(v8i16 (vector_shuffle VR128:$src, (undef),
2420 UNPCKL_v_undef_shuffle_mask)),
2421 (v8i16 (PUNPCKLWDrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2422 def : Pat<(v4i32 (vector_shuffle VR128:$src, (undef),
2423 UNPCKL_v_undef_shuffle_mask)),
2424 (v4i32 (PUNPCKLDQrr VR128:$src, VR128:$src))>, Requires<[HasSSE1]>;
2427 let AddedComplexity = 20 in {
2428 // vector_shuffle v1, <undef> <1, 1, 3, 3>
2429 def : Pat<(v4i32 (vector_shuffle VR128:$src, (undef),
2430 MOVSHDUP_shuffle_mask)),
2431 (v4i32 (MOVSHDUPrr VR128:$src))>, Requires<[HasSSE3]>;
2432 def : Pat<(v4i32 (vector_shuffle (bc_v4i32 (loadv2i64 addr:$src)), (undef),
2433 MOVSHDUP_shuffle_mask)),
2434 (v4i32 (MOVSHDUPrm addr:$src))>, Requires<[HasSSE3]>;
2436 // vector_shuffle v1, <undef> <0, 0, 2, 2>
2437 def : Pat<(v4i32 (vector_shuffle VR128:$src, (undef),
2438 MOVSLDUP_shuffle_mask)),
2439 (v4i32 (MOVSLDUPrr VR128:$src))>, Requires<[HasSSE3]>;
2440 def : Pat<(v4i32 (vector_shuffle (bc_v4i32 (loadv2i64 addr:$src)), (undef),
2441 MOVSLDUP_shuffle_mask)),
2442 (v4i32 (MOVSLDUPrm addr:$src))>, Requires<[HasSSE3]>;
2445 let AddedComplexity = 20 in {
2446 // vector_shuffle v1, v2 <0, 1, 4, 5> using MOVLHPS
2447 def : Pat<(v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
2448 MOVHP_shuffle_mask)),
2449 (v4i32 (MOVLHPSrr VR128:$src1, VR128:$src2))>;
2451 // vector_shuffle v1, v2 <6, 7, 2, 3> using MOVHLPS
2452 def : Pat<(v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
2453 MOVHLPS_shuffle_mask)),
2454 (v4i32 (MOVHLPSrr VR128:$src1, VR128:$src2))>;
2456 // vector_shuffle v1, undef <2, 3, ?, ?> using MOVHLPS
2457 def : Pat<(v4f32 (vector_shuffle VR128:$src1, (undef),
2458 UNPCKH_shuffle_mask)),
2459 (v4f32 (MOVHLPSrr VR128:$src1, VR128:$src1))>;
2460 def : Pat<(v4i32 (vector_shuffle VR128:$src1, (undef),
2461 UNPCKH_shuffle_mask)),
2462 (v4i32 (MOVHLPSrr VR128:$src1, VR128:$src1))>;
2464 // vector_shuffle v1, (load v2) <4, 5, 2, 3> using MOVLPS
2465 // vector_shuffle v1, (load v2) <0, 1, 4, 5> using MOVHPS
2466 def : Pat<(v4f32 (vector_shuffle VR128:$src1, (loadv4f32 addr:$src2),
2467 MOVLP_shuffle_mask)),
2468 (v4f32 (MOVLPSrm VR128:$src1, addr:$src2))>, Requires<[HasSSE1]>;
2469 def : Pat<(v2f64 (vector_shuffle VR128:$src1, (loadv2f64 addr:$src2),
2470 MOVLP_shuffle_mask)),
2471 (v2f64 (MOVLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2472 def : Pat<(v4f32 (vector_shuffle VR128:$src1, (loadv4f32 addr:$src2),
2473 MOVHP_shuffle_mask)),
2474 (v4f32 (MOVHPSrm VR128:$src1, addr:$src2))>, Requires<[HasSSE1]>;
2475 def : Pat<(v2f64 (vector_shuffle VR128:$src1, (loadv2f64 addr:$src2),
2476 MOVHP_shuffle_mask)),
2477 (v2f64 (MOVHPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2479 def : Pat<(v4i32 (vector_shuffle VR128:$src1, (bc_v4i32 (loadv2i64 addr:$src2)),
2480 MOVLP_shuffle_mask)),
2481 (v4i32 (MOVLPSrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2482 def : Pat<(v2i64 (vector_shuffle VR128:$src1, (loadv2i64 addr:$src2),
2483 MOVLP_shuffle_mask)),
2484 (v2i64 (MOVLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2485 def : Pat<(v4i32 (vector_shuffle VR128:$src1, (bc_v4i32 (loadv2i64 addr:$src2)),
2486 MOVHP_shuffle_mask)),
2487 (v4i32 (MOVHPSrm VR128:$src1, addr:$src2))>, Requires<[HasSSE1]>;
2488 def : Pat<(v2i64 (vector_shuffle VR128:$src1, (loadv2i64 addr:$src2),
2489 MOVLP_shuffle_mask)),
2490 (v2i64 (MOVLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2492 // Setting the lowest element in the vector.
2493 def : Pat<(v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
2494 MOVL_shuffle_mask)),
2495 (v4i32 (MOVLPSrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2496 def : Pat<(v2i64 (vector_shuffle VR128:$src1, VR128:$src2,
2497 MOVL_shuffle_mask)),
2498 (v2i64 (MOVLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2500 // vector_shuffle v1, v2 <4, 5, 2, 3> using MOVLPDrr (movsd)
2501 def : Pat<(v4f32 (vector_shuffle VR128:$src1, VR128:$src2,
2502 MOVLP_shuffle_mask)),
2503 (v4f32 (MOVLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2504 def : Pat<(v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
2505 MOVLP_shuffle_mask)),
2506 (v4i32 (MOVLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2508 // Set lowest element and zero upper elements.
2509 def : Pat<(bc_v2i64 (vector_shuffle immAllZerosV,
2510 (v2f64 (scalar_to_vector (loadf64 addr:$src))),
2511 MOVL_shuffle_mask)),
2512 (v2i64 (MOVZQI2PQIrm addr:$src))>, Requires<[HasSSE2]>;
2515 // FIXME: Temporary workaround since 2-wide shuffle is broken.
2516 def : Pat<(int_x86_sse2_movs_d VR128:$src1, VR128:$src2),
2517 (v2f64 (MOVLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2518 def : Pat<(int_x86_sse2_loadh_pd VR128:$src1, addr:$src2),
2519 (v2f64 (MOVHPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2520 def : Pat<(int_x86_sse2_loadl_pd VR128:$src1, addr:$src2),
2521 (v2f64 (MOVLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2522 def : Pat<(int_x86_sse2_shuf_pd VR128:$src1, VR128:$src2, imm:$src3),
2523 (v2f64 (SHUFPDrri VR128:$src1, VR128:$src2, imm:$src3))>,
2524 Requires<[HasSSE2]>;
2525 def : Pat<(int_x86_sse2_shuf_pd VR128:$src1, (load addr:$src2), imm:$src3),
2526 (v2f64 (SHUFPDrmi VR128:$src1, addr:$src2, imm:$src3))>,
2527 Requires<[HasSSE2]>;
2528 def : Pat<(int_x86_sse2_unpckh_pd VR128:$src1, VR128:$src2),
2529 (v2f64 (UNPCKHPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2530 def : Pat<(int_x86_sse2_unpckh_pd VR128:$src1, (load addr:$src2)),
2531 (v2f64 (UNPCKHPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2532 def : Pat<(int_x86_sse2_unpckl_pd VR128:$src1, VR128:$src2),
2533 (v2f64 (UNPCKLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2534 def : Pat<(int_x86_sse2_unpckl_pd VR128:$src1, (load addr:$src2)),
2535 (v2f64 (UNPCKLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2536 def : Pat<(int_x86_sse2_punpckh_qdq VR128:$src1, VR128:$src2),
2537 (v2i64 (PUNPCKHQDQrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2538 def : Pat<(int_x86_sse2_punpckh_qdq VR128:$src1, (load addr:$src2)),
2539 (v2i64 (PUNPCKHQDQrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2540 def : Pat<(int_x86_sse2_punpckl_qdq VR128:$src1, VR128:$src2),
2541 (v2i64 (PUNPCKLQDQrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2542 def : Pat<(int_x86_sse2_punpckl_qdq VR128:$src1, (load addr:$src2)),
2543 (PUNPCKLQDQrm VR128:$src1, addr:$src2)>, Requires<[HasSSE2]>;
2545 // 128-bit logical shifts
2546 def : Pat<(int_x86_sse2_psll_dq VR128:$src1, imm:$src2),
2547 (v2i64 (PSLLDQri VR128:$src1, (PSxLDQ_imm imm:$src2)))>,
2548 Requires<[HasSSE2]>;
2549 def : Pat<(int_x86_sse2_psrl_dq VR128:$src1, imm:$src2),
2550 (v2i64 (PSRLDQri VR128:$src1, (PSxLDQ_imm imm:$src2)))>,
2551 Requires<[HasSSE2]>;
2553 // Some special case pandn patterns.
2554 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v4i32 immAllOnesV))),
2556 (v2i64 (PANDNrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2557 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v8i16 immAllOnesV))),
2559 (v2i64 (PANDNrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2560 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v16i8 immAllOnesV))),
2562 (v2i64 (PANDNrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2564 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v4i32 immAllOnesV))),
2565 (load addr:$src2))),
2566 (v2i64 (PANDNrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2567 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v8i16 immAllOnesV))),
2568 (load addr:$src2))),
2569 (v2i64 (PANDNrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2570 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v16i8 immAllOnesV))),
2571 (load addr:$src2))),
2572 (v2i64 (PANDNrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;