1 //====- X86InstrSSE.td - Describe the X86 Instruction Set -------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file was developed by the Evan Cheng and is distributed under
6 // the University of Illinois Open Source License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file describes the X86 SSE instruction set, defining the instructions,
11 // and properties of the instructions which are needed for code generation,
12 // machine code emission, and analysis.
14 //===----------------------------------------------------------------------===//
16 //===----------------------------------------------------------------------===//
17 // SSE specific DAG Nodes.
18 //===----------------------------------------------------------------------===//
20 def X86loadp : SDNode<"X86ISD::LOAD_PACK", SDTLoad,
22 def X86fand : SDNode<"X86ISD::FAND", SDTFPBinOp,
23 [SDNPCommutative, SDNPAssociative]>;
24 def X86fxor : SDNode<"X86ISD::FXOR", SDTFPBinOp,
25 [SDNPCommutative, SDNPAssociative]>;
26 def X86comi : SDNode<"X86ISD::COMI", SDTX86CmpTest,
28 def X86ucomi : SDNode<"X86ISD::UCOMI", SDTX86CmpTest,
30 def X86s2vec : SDNode<"X86ISD::S2VEC",
31 SDTypeProfile<1, 1, []>, []>;
32 def X86pextrw : SDNode<"X86ISD::PEXTRW",
33 SDTypeProfile<1, 2, []>, []>;
34 def X86pinsrw : SDNode<"X86ISD::PINSRW",
35 SDTypeProfile<1, 3, []>, []>;
37 //===----------------------------------------------------------------------===//
38 // SSE pattern fragments
39 //===----------------------------------------------------------------------===//
41 def X86loadpf32 : PatFrag<(ops node:$ptr), (f32 (X86loadp node:$ptr))>;
42 def X86loadpf64 : PatFrag<(ops node:$ptr), (f64 (X86loadp node:$ptr))>;
44 def loadv4f32 : PatFrag<(ops node:$ptr), (v4f32 (load node:$ptr))>;
45 def loadv2f64 : PatFrag<(ops node:$ptr), (v2f64 (load node:$ptr))>;
46 def loadv16i8 : PatFrag<(ops node:$ptr), (v16i8 (load node:$ptr))>;
47 def loadv8i16 : PatFrag<(ops node:$ptr), (v8i16 (load node:$ptr))>;
48 def loadv4i32 : PatFrag<(ops node:$ptr), (v4i32 (load node:$ptr))>;
49 def loadv2i64 : PatFrag<(ops node:$ptr), (v2i64 (load node:$ptr))>;
51 def bc_v4f32 : PatFrag<(ops node:$in), (v4f32 (bitconvert node:$in))>;
52 def bc_v2f64 : PatFrag<(ops node:$in), (v2f64 (bitconvert node:$in))>;
53 def bc_v16i8 : PatFrag<(ops node:$in), (v16i8 (bitconvert node:$in))>;
54 def bc_v8i16 : PatFrag<(ops node:$in), (v8i16 (bitconvert node:$in))>;
55 def bc_v4i32 : PatFrag<(ops node:$in), (v4i32 (bitconvert node:$in))>;
56 def bc_v2i64 : PatFrag<(ops node:$in), (v2i64 (bitconvert node:$in))>;
58 def fp32imm0 : PatLeaf<(f32 fpimm), [{
59 return N->isExactlyValue(+0.0);
62 def PSxLDQ_imm : SDNodeXForm<imm, [{
63 // Transformation function: imm >> 3
64 return getI32Imm(N->getValue() >> 3);
67 // SHUFFLE_get_shuf_imm xform function: convert vector_shuffle mask to PSHUF*,
69 def SHUFFLE_get_shuf_imm : SDNodeXForm<build_vector, [{
70 return getI8Imm(X86::getShuffleSHUFImmediate(N));
73 // SHUFFLE_get_pshufhw_imm xform function: convert vector_shuffle mask to
75 def SHUFFLE_get_pshufhw_imm : SDNodeXForm<build_vector, [{
76 return getI8Imm(X86::getShufflePSHUFHWImmediate(N));
79 // SHUFFLE_get_pshuflw_imm xform function: convert vector_shuffle mask to
81 def SHUFFLE_get_pshuflw_imm : SDNodeXForm<build_vector, [{
82 return getI8Imm(X86::getShufflePSHUFLWImmediate(N));
85 def SSE_splat_mask : PatLeaf<(build_vector), [{
86 return X86::isSplatMask(N);
87 }], SHUFFLE_get_shuf_imm>;
89 def SSE_splat_v2_mask : PatLeaf<(build_vector), [{
90 return X86::isSplatMask(N);
93 def MOVHLPS_shuffle_mask : PatLeaf<(build_vector), [{
94 return X86::isMOVHLPSMask(N);
97 def MOVHP_shuffle_mask : PatLeaf<(build_vector), [{
98 return X86::isMOVHPMask(N);
101 def MOVLP_shuffle_mask : PatLeaf<(build_vector), [{
102 return X86::isMOVLPMask(N);
105 def MOVL_shuffle_mask : PatLeaf<(build_vector), [{
106 return X86::isMOVLMask(N);
109 def MOVSHDUP_shuffle_mask : PatLeaf<(build_vector), [{
110 return X86::isMOVSHDUPMask(N);
113 def MOVSLDUP_shuffle_mask : PatLeaf<(build_vector), [{
114 return X86::isMOVSLDUPMask(N);
117 def UNPCKL_shuffle_mask : PatLeaf<(build_vector), [{
118 return X86::isUNPCKLMask(N);
121 def UNPCKH_shuffle_mask : PatLeaf<(build_vector), [{
122 return X86::isUNPCKHMask(N);
125 def UNPCKL_v_undef_shuffle_mask : PatLeaf<(build_vector), [{
126 return X86::isUNPCKL_v_undef_Mask(N);
129 def PSHUFD_shuffle_mask : PatLeaf<(build_vector), [{
130 return X86::isPSHUFDMask(N);
131 }], SHUFFLE_get_shuf_imm>;
133 def PSHUFHW_shuffle_mask : PatLeaf<(build_vector), [{
134 return X86::isPSHUFHWMask(N);
135 }], SHUFFLE_get_pshufhw_imm>;
137 def PSHUFLW_shuffle_mask : PatLeaf<(build_vector), [{
138 return X86::isPSHUFLWMask(N);
139 }], SHUFFLE_get_pshuflw_imm>;
141 def SHUFP_unary_shuffle_mask : PatLeaf<(build_vector), [{
142 return X86::isPSHUFDMask(N);
143 }], SHUFFLE_get_shuf_imm>;
145 def SHUFP_shuffle_mask : PatLeaf<(build_vector), [{
146 return X86::isSHUFPMask(N);
147 }], SHUFFLE_get_shuf_imm>;
149 def PSHUFD_binary_shuffle_mask : PatLeaf<(build_vector), [{
150 return X86::isSHUFPMask(N);
151 }], SHUFFLE_get_shuf_imm>;
153 //===----------------------------------------------------------------------===//
154 // SSE scalar FP Instructions
155 //===----------------------------------------------------------------------===//
157 // Instruction templates
158 // SSI - SSE1 instructions with XS prefix.
159 // SDI - SSE2 instructions with XD prefix.
160 // PSI - SSE1 instructions with TB prefix.
161 // PDI - SSE2 instructions with TB and OpSize prefixes.
162 // PSIi8 - SSE1 instructions with ImmT == Imm8 and TB prefix.
163 // PDIi8 - SSE2 instructions with ImmT == Imm8 and TB and OpSize prefixes.
164 // S3I - SSE3 instructions with TB and OpSize prefixes.
165 // S3SI - SSE3 instructions with XS prefix.
166 // S3DI - SSE3 instructions with XD prefix.
167 class SSI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
168 : I<o, F, ops, asm, pattern>, XS, Requires<[HasSSE1]>;
169 class SDI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
170 : I<o, F, ops, asm, pattern>, XD, Requires<[HasSSE2]>;
171 class PSI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
172 : I<o, F, ops, asm, pattern>, TB, Requires<[HasSSE1]>;
173 class PDI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
174 : I<o, F, ops, asm, pattern>, TB, OpSize, Requires<[HasSSE2]>;
175 class PSIi8<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
176 : X86Inst<o, F, Imm8, ops, asm>, TB, Requires<[HasSSE1]> {
177 let Pattern = pattern;
179 class PDIi8<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
180 : X86Inst<o, F, Imm8, ops, asm>, TB, OpSize, Requires<[HasSSE2]> {
181 let Pattern = pattern;
183 class S3SI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
184 : I<o, F, ops, asm, pattern>, XS, Requires<[HasSSE3]>;
185 class S3DI<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
186 : I<o, F, ops, asm, pattern>, XD, Requires<[HasSSE3]>;
187 class S3I<bits<8> o, Format F, dag ops, string asm, list<dag> pattern>
188 : I<o, F, ops, asm, pattern>, TB, OpSize, Requires<[HasSSE3]>;
190 //===----------------------------------------------------------------------===//
191 // Helpers for defining instructions that directly correspond to intrinsics.
192 class SS_Intr<bits<8> o, string asm, Intrinsic IntId>
193 : SSI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src), asm,
194 [(set VR128:$dst, (v4f32 (IntId VR128:$src)))]>;
195 class SS_Intm<bits<8> o, string asm, Intrinsic IntId>
196 : SSI<o, MRMSrcMem, (ops VR128:$dst, f32mem:$src), asm,
197 [(set VR128:$dst, (v4f32 (IntId (load addr:$src))))]>;
198 class SD_Intr<bits<8> o, string asm, Intrinsic IntId>
199 : SDI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src), asm,
200 [(set VR128:$dst, (v2f64 (IntId VR128:$src)))]>;
201 class SD_Intm<bits<8> o, string asm, Intrinsic IntId>
202 : SDI<o, MRMSrcMem, (ops VR128:$dst, f64mem:$src), asm,
203 [(set VR128:$dst, (v2f64 (IntId (load addr:$src))))]>;
205 class SS_Intrr<bits<8> o, string asm, Intrinsic IntId>
206 : SSI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
207 [(set VR128:$dst, (v4f32 (IntId VR128:$src1, VR128:$src2)))]>;
208 class SS_Intrm<bits<8> o, string asm, Intrinsic IntId>
209 : SSI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f32mem:$src2), asm,
210 [(set VR128:$dst, (v4f32 (IntId VR128:$src1, (load addr:$src2))))]>;
211 class SD_Intrr<bits<8> o, string asm, Intrinsic IntId>
212 : SDI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
213 [(set VR128:$dst, (v2f64 (IntId VR128:$src1, VR128:$src2)))]>;
214 class SD_Intrm<bits<8> o, string asm, Intrinsic IntId>
215 : SDI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2), asm,
216 [(set VR128:$dst, (v2f64 (IntId VR128:$src1, (load addr:$src2))))]>;
218 class PS_Intr<bits<8> o, string asm, Intrinsic IntId>
219 : PSI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src), asm,
220 [(set VR128:$dst, (IntId VR128:$src))]>;
221 class PS_Intm<bits<8> o, string asm, Intrinsic IntId>
222 : PSI<o, MRMSrcMem, (ops VR128:$dst, f32mem:$src), asm,
223 [(set VR128:$dst, (IntId (loadv4f32 addr:$src)))]>;
224 class PD_Intr<bits<8> o, string asm, Intrinsic IntId>
225 : PDI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src), asm,
226 [(set VR128:$dst, (IntId VR128:$src))]>;
227 class PD_Intm<bits<8> o, string asm, Intrinsic IntId>
228 : PDI<o, MRMSrcMem, (ops VR128:$dst, f64mem:$src), asm,
229 [(set VR128:$dst, (IntId (loadv2f64 addr:$src)))]>;
231 class PS_Intrr<bits<8> o, string asm, Intrinsic IntId>
232 : PSI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
233 [(set VR128:$dst, (IntId VR128:$src1, VR128:$src2))]>;
234 class PS_Intrm<bits<8> o, string asm, Intrinsic IntId>
235 : PSI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f32mem:$src2), asm,
236 [(set VR128:$dst, (IntId VR128:$src1, (loadv4f32 addr:$src2)))]>;
237 class PD_Intrr<bits<8> o, string asm, Intrinsic IntId>
238 : PDI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
239 [(set VR128:$dst, (IntId VR128:$src1, VR128:$src2))]>;
240 class PD_Intrm<bits<8> o, string asm, Intrinsic IntId>
241 : PDI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2), asm,
242 [(set VR128:$dst, (IntId VR128:$src1, (loadv2f64 addr:$src2)))]>;
244 class S3D_Intrr<bits<8> o, string asm, Intrinsic IntId>
245 : S3DI<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
246 [(set VR128:$dst, (v4f32 (IntId VR128:$src1, VR128:$src2)))]>;
247 class S3D_Intrm<bits<8> o, string asm, Intrinsic IntId>
248 : S3DI<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2), asm,
249 [(set VR128:$dst, (v4f32 (IntId VR128:$src1,
250 (loadv4f32 addr:$src2))))]>;
251 class S3_Intrr<bits<8> o, string asm, Intrinsic IntId>
252 : S3I<o, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2), asm,
253 [(set VR128:$dst, (v2f64 (IntId VR128:$src1, VR128:$src2)))]>;
254 class S3_Intrm<bits<8> o, string asm, Intrinsic IntId>
255 : S3I<o, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2), asm,
256 [(set VR128:$dst, (v2f64 (IntId VR128:$src1,
257 (loadv2f64 addr:$src2))))]>;
259 // Some 'special' instructions
260 def IMPLICIT_DEF_FR32 : I<0, Pseudo, (ops FR32:$dst),
261 "#IMPLICIT_DEF $dst",
262 [(set FR32:$dst, (undef))]>, Requires<[HasSSE2]>;
263 def IMPLICIT_DEF_FR64 : I<0, Pseudo, (ops FR64:$dst),
264 "#IMPLICIT_DEF $dst",
265 [(set FR64:$dst, (undef))]>, Requires<[HasSSE2]>;
267 // CMOV* - Used to implement the SSE SELECT DAG operation. Expanded by the
268 // scheduler into a branch sequence.
269 let usesCustomDAGSchedInserter = 1 in { // Expanded by the scheduler.
270 def CMOV_FR32 : I<0, Pseudo,
271 (ops FR32:$dst, FR32:$t, FR32:$f, i8imm:$cond),
272 "#CMOV_FR32 PSEUDO!",
273 [(set FR32:$dst, (X86cmov FR32:$t, FR32:$f, imm:$cond))]>;
274 def CMOV_FR64 : I<0, Pseudo,
275 (ops FR64:$dst, FR64:$t, FR64:$f, i8imm:$cond),
276 "#CMOV_FR64 PSEUDO!",
277 [(set FR64:$dst, (X86cmov FR64:$t, FR64:$f, imm:$cond))]>;
278 def CMOV_V4F32 : I<0, Pseudo,
279 (ops VR128:$dst, VR128:$t, VR128:$f, i8imm:$cond),
280 "#CMOV_V4F32 PSEUDO!",
282 (v4f32 (X86cmov VR128:$t, VR128:$f, imm:$cond)))]>;
283 def CMOV_V2F64 : I<0, Pseudo,
284 (ops VR128:$dst, VR128:$t, VR128:$f, i8imm:$cond),
285 "#CMOV_V2F64 PSEUDO!",
287 (v2f64 (X86cmov VR128:$t, VR128:$f, imm:$cond)))]>;
288 def CMOV_V2I64 : I<0, Pseudo,
289 (ops VR128:$dst, VR128:$t, VR128:$f, i8imm:$cond),
290 "#CMOV_V2I64 PSEUDO!",
292 (v2i64 (X86cmov VR128:$t, VR128:$f, imm:$cond)))]>;
296 def MOVSSrr : SSI<0x10, MRMSrcReg, (ops FR32:$dst, FR32:$src),
297 "movss {$src, $dst|$dst, $src}", []>;
298 def MOVSSrm : SSI<0x10, MRMSrcMem, (ops FR32:$dst, f32mem:$src),
299 "movss {$src, $dst|$dst, $src}",
300 [(set FR32:$dst, (loadf32 addr:$src))]>;
301 def MOVSDrr : SDI<0x10, MRMSrcReg, (ops FR64:$dst, FR64:$src),
302 "movsd {$src, $dst|$dst, $src}", []>;
303 def MOVSDrm : SDI<0x10, MRMSrcMem, (ops FR64:$dst, f64mem:$src),
304 "movsd {$src, $dst|$dst, $src}",
305 [(set FR64:$dst, (loadf64 addr:$src))]>;
307 def MOVSSmr : SSI<0x11, MRMDestMem, (ops f32mem:$dst, FR32:$src),
308 "movss {$src, $dst|$dst, $src}",
309 [(store FR32:$src, addr:$dst)]>;
310 def MOVSDmr : SDI<0x11, MRMDestMem, (ops f64mem:$dst, FR64:$src),
311 "movsd {$src, $dst|$dst, $src}",
312 [(store FR64:$src, addr:$dst)]>;
314 // Arithmetic instructions
315 let isTwoAddress = 1 in {
316 let isCommutable = 1 in {
317 def ADDSSrr : SSI<0x58, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
318 "addss {$src2, $dst|$dst, $src2}",
319 [(set FR32:$dst, (fadd FR32:$src1, FR32:$src2))]>;
320 def ADDSDrr : SDI<0x58, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
321 "addsd {$src2, $dst|$dst, $src2}",
322 [(set FR64:$dst, (fadd FR64:$src1, FR64:$src2))]>;
323 def MULSSrr : SSI<0x59, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
324 "mulss {$src2, $dst|$dst, $src2}",
325 [(set FR32:$dst, (fmul FR32:$src1, FR32:$src2))]>;
326 def MULSDrr : SDI<0x59, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
327 "mulsd {$src2, $dst|$dst, $src2}",
328 [(set FR64:$dst, (fmul FR64:$src1, FR64:$src2))]>;
331 def ADDSSrm : SSI<0x58, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
332 "addss {$src2, $dst|$dst, $src2}",
333 [(set FR32:$dst, (fadd FR32:$src1, (loadf32 addr:$src2)))]>;
334 def ADDSDrm : SDI<0x58, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f64mem:$src2),
335 "addsd {$src2, $dst|$dst, $src2}",
336 [(set FR64:$dst, (fadd FR64:$src1, (loadf64 addr:$src2)))]>;
337 def MULSSrm : SSI<0x59, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
338 "mulss {$src2, $dst|$dst, $src2}",
339 [(set FR32:$dst, (fmul FR32:$src1, (loadf32 addr:$src2)))]>;
340 def MULSDrm : SDI<0x59, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f64mem:$src2),
341 "mulsd {$src2, $dst|$dst, $src2}",
342 [(set FR64:$dst, (fmul FR64:$src1, (loadf64 addr:$src2)))]>;
344 def DIVSSrr : SSI<0x5E, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
345 "divss {$src2, $dst|$dst, $src2}",
346 [(set FR32:$dst, (fdiv FR32:$src1, FR32:$src2))]>;
347 def DIVSSrm : SSI<0x5E, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
348 "divss {$src2, $dst|$dst, $src2}",
349 [(set FR32:$dst, (fdiv FR32:$src1, (loadf32 addr:$src2)))]>;
350 def DIVSDrr : SDI<0x5E, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
351 "divsd {$src2, $dst|$dst, $src2}",
352 [(set FR64:$dst, (fdiv FR64:$src1, FR64:$src2))]>;
353 def DIVSDrm : SDI<0x5E, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f64mem:$src2),
354 "divsd {$src2, $dst|$dst, $src2}",
355 [(set FR64:$dst, (fdiv FR64:$src1, (loadf64 addr:$src2)))]>;
357 def SUBSSrr : SSI<0x5C, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
358 "subss {$src2, $dst|$dst, $src2}",
359 [(set FR32:$dst, (fsub FR32:$src1, FR32:$src2))]>;
360 def SUBSSrm : SSI<0x5C, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
361 "subss {$src2, $dst|$dst, $src2}",
362 [(set FR32:$dst, (fsub FR32:$src1, (loadf32 addr:$src2)))]>;
363 def SUBSDrr : SDI<0x5C, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
364 "subsd {$src2, $dst|$dst, $src2}",
365 [(set FR64:$dst, (fsub FR64:$src1, FR64:$src2))]>;
366 def SUBSDrm : SDI<0x5C, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f64mem:$src2),
367 "subsd {$src2, $dst|$dst, $src2}",
368 [(set FR64:$dst, (fsub FR64:$src1, (loadf64 addr:$src2)))]>;
371 def SQRTSSr : SSI<0x51, MRMSrcReg, (ops FR32:$dst, FR32:$src),
372 "sqrtss {$src, $dst|$dst, $src}",
373 [(set FR32:$dst, (fsqrt FR32:$src))]>;
374 def SQRTSSm : SSI<0x51, MRMSrcMem, (ops FR32:$dst, f32mem:$src),
375 "sqrtss {$src, $dst|$dst, $src}",
376 [(set FR32:$dst, (fsqrt (loadf32 addr:$src)))]>;
377 def SQRTSDr : SDI<0x51, MRMSrcReg, (ops FR64:$dst, FR64:$src),
378 "sqrtsd {$src, $dst|$dst, $src}",
379 [(set FR64:$dst, (fsqrt FR64:$src))]>;
380 def SQRTSDm : SDI<0x51, MRMSrcMem, (ops FR64:$dst, f64mem:$src),
381 "sqrtsd {$src, $dst|$dst, $src}",
382 [(set FR64:$dst, (fsqrt (loadf64 addr:$src)))]>;
384 def RSQRTSSr : SSI<0x52, MRMSrcReg, (ops FR32:$dst, FR32:$src),
385 "rsqrtss {$src, $dst|$dst, $src}", []>;
386 def RSQRTSSm : SSI<0x52, MRMSrcMem, (ops FR32:$dst, f32mem:$src),
387 "rsqrtss {$src, $dst|$dst, $src}", []>;
388 def RCPSSr : SSI<0x53, MRMSrcReg, (ops FR32:$dst, FR32:$src),
389 "rcpss {$src, $dst|$dst, $src}", []>;
390 def RCPSSm : SSI<0x53, MRMSrcMem, (ops FR32:$dst, f32mem:$src),
391 "rcpss {$src, $dst|$dst, $src}", []>;
393 let isTwoAddress = 1 in {
394 def MAXSSrr : SSI<0x5F, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
395 "maxss {$src2, $dst|$dst, $src2}", []>;
396 def MAXSSrm : SSI<0x5F, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
397 "maxss {$src2, $dst|$dst, $src2}", []>;
398 def MAXSDrr : SDI<0x5F, MRMSrcReg, (ops FR64:$dst, FR32:$src1, FR64:$src2),
399 "maxsd {$src2, $dst|$dst, $src2}", []>;
400 def MAXSDrm : SDI<0x5F, MRMSrcMem, (ops FR64:$dst, FR32:$src1, f64mem:$src2),
401 "maxsd {$src2, $dst|$dst, $src2}", []>;
402 def MINSSrr : SSI<0x5D, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
403 "minss {$src2, $dst|$dst, $src2}", []>;
404 def MINSSrm : SSI<0x5D, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f32mem:$src2),
405 "minss {$src2, $dst|$dst, $src2}", []>;
406 def MINSDrr : SDI<0x5D, MRMSrcReg, (ops FR64:$dst, FR32:$src1, FR64:$src2),
407 "minsd {$src2, $dst|$dst, $src2}", []>;
408 def MINSDrm : SDI<0x5D, MRMSrcMem, (ops FR64:$dst, FR32:$src1, f64mem:$src2),
409 "minsd {$src2, $dst|$dst, $src2}", []>;
412 // Aliases to match intrinsics which expect XMM operand(s).
413 let isTwoAddress = 1 in {
414 let isCommutable = 1 in {
415 def Int_ADDSSrr : SS_Intrr<0x58, "addss {$src2, $dst|$dst, $src2}",
417 def Int_ADDSDrr : SD_Intrr<0x58, "addsd {$src2, $dst|$dst, $src2}",
418 int_x86_sse2_add_sd>;
419 def Int_MULSSrr : SS_Intrr<0x59, "mulss {$src2, $dst|$dst, $src2}",
421 def Int_MULSDrr : SD_Intrr<0x59, "mulsd {$src2, $dst|$dst, $src2}",
422 int_x86_sse2_mul_sd>;
425 def Int_ADDSSrm : SS_Intrm<0x58, "addss {$src2, $dst|$dst, $src2}",
427 def Int_ADDSDrm : SD_Intrm<0x58, "addsd {$src2, $dst|$dst, $src2}",
428 int_x86_sse2_add_sd>;
429 def Int_MULSSrm : SS_Intrm<0x59, "mulss {$src2, $dst|$dst, $src2}",
431 def Int_MULSDrm : SD_Intrm<0x59, "mulsd {$src2, $dst|$dst, $src2}",
432 int_x86_sse2_mul_sd>;
434 def Int_DIVSSrr : SS_Intrr<0x5E, "divss {$src2, $dst|$dst, $src2}",
436 def Int_DIVSSrm : SS_Intrm<0x5E, "divss {$src2, $dst|$dst, $src2}",
438 def Int_DIVSDrr : SD_Intrr<0x5E, "divsd {$src2, $dst|$dst, $src2}",
439 int_x86_sse2_div_sd>;
440 def Int_DIVSDrm : SD_Intrm<0x5E, "divsd {$src2, $dst|$dst, $src2}",
441 int_x86_sse2_div_sd>;
443 def Int_SUBSSrr : SS_Intrr<0x5C, "subss {$src2, $dst|$dst, $src2}",
445 def Int_SUBSSrm : SS_Intrm<0x5C, "subss {$src2, $dst|$dst, $src2}",
447 def Int_SUBSDrr : SD_Intrr<0x5C, "subsd {$src2, $dst|$dst, $src2}",
448 int_x86_sse2_sub_sd>;
449 def Int_SUBSDrm : SD_Intrm<0x5C, "subsd {$src2, $dst|$dst, $src2}",
450 int_x86_sse2_sub_sd>;
453 def Int_SQRTSSr : SS_Intr<0x51, "sqrtss {$src, $dst|$dst, $src}",
454 int_x86_sse_sqrt_ss>;
455 def Int_SQRTSSm : SS_Intm<0x51, "sqrtss {$src, $dst|$dst, $src}",
456 int_x86_sse_sqrt_ss>;
457 def Int_SQRTSDr : SD_Intr<0x51, "sqrtsd {$src, $dst|$dst, $src}",
458 int_x86_sse2_sqrt_sd>;
459 def Int_SQRTSDm : SD_Intm<0x51, "sqrtsd {$src, $dst|$dst, $src}",
460 int_x86_sse2_sqrt_sd>;
462 def Int_RSQRTSSr : SS_Intr<0x52, "rsqrtss {$src, $dst|$dst, $src}",
463 int_x86_sse_rsqrt_ss>;
464 def Int_RSQRTSSm : SS_Intm<0x52, "rsqrtss {$src, $dst|$dst, $src}",
465 int_x86_sse_rsqrt_ss>;
466 def Int_RCPSSr : SS_Intr<0x53, "rcpss {$src, $dst|$dst, $src}",
468 def Int_RCPSSm : SS_Intm<0x53, "rcpss {$src, $dst|$dst, $src}",
471 let isTwoAddress = 1 in {
472 def Int_MAXSSrr : SS_Intrr<0x5F, "maxss {$src2, $dst|$dst, $src2}",
474 def Int_MAXSSrm : SS_Intrm<0x5F, "maxss {$src2, $dst|$dst, $src2}",
476 def Int_MAXSDrr : SD_Intrr<0x5F, "maxsd {$src2, $dst|$dst, $src2}",
477 int_x86_sse2_max_sd>;
478 def Int_MAXSDrm : SD_Intrm<0x5F, "maxsd {$src2, $dst|$dst, $src2}",
479 int_x86_sse2_max_sd>;
480 def Int_MINSSrr : SS_Intrr<0x5D, "minss {$src2, $dst|$dst, $src2}",
482 def Int_MINSSrm : SS_Intrm<0x5D, "minss {$src2, $dst|$dst, $src2}",
484 def Int_MINSDrr : SD_Intrr<0x5D, "minsd {$src2, $dst|$dst, $src2}",
485 int_x86_sse2_min_sd>;
486 def Int_MINSDrm : SD_Intrm<0x5D, "minsd {$src2, $dst|$dst, $src2}",
487 int_x86_sse2_min_sd>;
490 // Conversion instructions
491 def CVTTSS2SIrr: SSI<0x2C, MRMSrcReg, (ops GR32:$dst, FR32:$src),
492 "cvttss2si {$src, $dst|$dst, $src}",
493 [(set GR32:$dst, (fp_to_sint FR32:$src))]>;
494 def CVTTSS2SIrm: SSI<0x2C, MRMSrcMem, (ops GR32:$dst, f32mem:$src),
495 "cvttss2si {$src, $dst|$dst, $src}",
496 [(set GR32:$dst, (fp_to_sint (loadf32 addr:$src)))]>;
497 def CVTTSD2SIrr: SDI<0x2C, MRMSrcReg, (ops GR32:$dst, FR64:$src),
498 "cvttsd2si {$src, $dst|$dst, $src}",
499 [(set GR32:$dst, (fp_to_sint FR64:$src))]>;
500 def CVTTSD2SIrm: SDI<0x2C, MRMSrcMem, (ops GR32:$dst, f64mem:$src),
501 "cvttsd2si {$src, $dst|$dst, $src}",
502 [(set GR32:$dst, (fp_to_sint (loadf64 addr:$src)))]>;
503 def CVTSD2SSrr: SDI<0x5A, MRMSrcReg, (ops FR32:$dst, FR64:$src),
504 "cvtsd2ss {$src, $dst|$dst, $src}",
505 [(set FR32:$dst, (fround FR64:$src))]>;
506 def CVTSD2SSrm: SDI<0x5A, MRMSrcMem, (ops FR32:$dst, f64mem:$src),
507 "cvtsd2ss {$src, $dst|$dst, $src}",
508 [(set FR32:$dst, (fround (loadf64 addr:$src)))]>;
509 def CVTSI2SSrr: SSI<0x2A, MRMSrcReg, (ops FR32:$dst, GR32:$src),
510 "cvtsi2ss {$src, $dst|$dst, $src}",
511 [(set FR32:$dst, (sint_to_fp GR32:$src))]>;
512 def CVTSI2SSrm: SSI<0x2A, MRMSrcMem, (ops FR32:$dst, i32mem:$src),
513 "cvtsi2ss {$src, $dst|$dst, $src}",
514 [(set FR32:$dst, (sint_to_fp (loadi32 addr:$src)))]>;
515 def CVTSI2SDrr: SDI<0x2A, MRMSrcReg, (ops FR64:$dst, GR32:$src),
516 "cvtsi2sd {$src, $dst|$dst, $src}",
517 [(set FR64:$dst, (sint_to_fp GR32:$src))]>;
518 def CVTSI2SDrm: SDI<0x2A, MRMSrcMem, (ops FR64:$dst, i32mem:$src),
519 "cvtsi2sd {$src, $dst|$dst, $src}",
520 [(set FR64:$dst, (sint_to_fp (loadi32 addr:$src)))]>;
522 // SSE2 instructions with XS prefix
523 def CVTSS2SDrr: I<0x5A, MRMSrcReg, (ops FR64:$dst, FR32:$src),
524 "cvtss2sd {$src, $dst|$dst, $src}",
525 [(set FR64:$dst, (fextend FR32:$src))]>, XS,
527 def CVTSS2SDrm: I<0x5A, MRMSrcMem, (ops FR64:$dst, f32mem:$src),
528 "cvtss2sd {$src, $dst|$dst, $src}",
529 [(set FR64:$dst, (extload addr:$src, f32))]>, XS,
532 // Match intrinsics which expect XMM operand(s).
533 def CVTSS2SIrr: SSI<0x2D, MRMSrcReg, (ops GR32:$dst, VR128:$src),
534 "cvtss2si {$src, $dst|$dst, $src}",
535 [(set GR32:$dst, (int_x86_sse_cvtss2si VR128:$src))]>;
536 def CVTSS2SIrm: SSI<0x2D, MRMSrcMem, (ops GR32:$dst, f32mem:$src),
537 "cvtss2si {$src, $dst|$dst, $src}",
538 [(set GR32:$dst, (int_x86_sse_cvtss2si
539 (loadv4f32 addr:$src)))]>;
540 def CVTSD2SIrr: SDI<0x2D, MRMSrcReg, (ops GR32:$dst, VR128:$src),
541 "cvtsd2si {$src, $dst|$dst, $src}",
542 [(set GR32:$dst, (int_x86_sse2_cvtsd2si VR128:$src))]>;
543 def CVTSD2SIrm: SDI<0x2D, MRMSrcMem, (ops GR32:$dst, f128mem:$src),
544 "cvtsd2si {$src, $dst|$dst, $src}",
545 [(set GR32:$dst, (int_x86_sse2_cvtsd2si
546 (loadv2f64 addr:$src)))]>;
548 // Aliases for intrinsics
549 def Int_CVTTSS2SIrr: SSI<0x2C, MRMSrcReg, (ops GR32:$dst, VR128:$src),
550 "cvttss2si {$src, $dst|$dst, $src}",
551 [(set GR32:$dst, (int_x86_sse_cvttss2si VR128:$src))]>;
552 def Int_CVTTSS2SIrm: SSI<0x2C, MRMSrcMem, (ops GR32:$dst, f32mem:$src),
553 "cvttss2si {$src, $dst|$dst, $src}",
554 [(set GR32:$dst, (int_x86_sse_cvttss2si
555 (loadv4f32 addr:$src)))]>;
556 def Int_CVTTSD2SIrr: SDI<0x2C, MRMSrcReg, (ops GR32:$dst, VR128:$src),
557 "cvttsd2si {$src, $dst|$dst, $src}",
558 [(set GR32:$dst, (int_x86_sse2_cvttsd2si VR128:$src))]>;
559 def Int_CVTTSD2SIrm: SDI<0x2C, MRMSrcMem, (ops GR32:$dst, f128mem:$src),
560 "cvttsd2si {$src, $dst|$dst, $src}",
561 [(set GR32:$dst, (int_x86_sse2_cvttsd2si
562 (loadv2f64 addr:$src)))]>;
564 let isTwoAddress = 1 in {
565 def Int_CVTSI2SSrr: SSI<0x2A, MRMSrcReg,
566 (ops VR128:$dst, VR128:$src1, GR32:$src2),
567 "cvtsi2ss {$src2, $dst|$dst, $src2}",
568 [(set VR128:$dst, (int_x86_sse_cvtsi2ss VR128:$src1,
570 def Int_CVTSI2SSrm: SSI<0x2A, MRMSrcMem,
571 (ops VR128:$dst, VR128:$src1, i32mem:$src2),
572 "cvtsi2ss {$src2, $dst|$dst, $src2}",
573 [(set VR128:$dst, (int_x86_sse_cvtsi2ss VR128:$src1,
574 (loadi32 addr:$src2)))]>;
577 // Comparison instructions
578 let isTwoAddress = 1 in {
579 def CMPSSrr : SSI<0xC2, MRMSrcReg,
580 (ops FR32:$dst, FR32:$src1, FR32:$src, SSECC:$cc),
581 "cmp${cc}ss {$src, $dst|$dst, $src}",
583 def CMPSSrm : SSI<0xC2, MRMSrcMem,
584 (ops FR32:$dst, FR32:$src1, f32mem:$src, SSECC:$cc),
585 "cmp${cc}ss {$src, $dst|$dst, $src}", []>;
586 def CMPSDrr : SDI<0xC2, MRMSrcReg,
587 (ops FR64:$dst, FR64:$src1, FR64:$src, SSECC:$cc),
588 "cmp${cc}sd {$src, $dst|$dst, $src}", []>;
589 def CMPSDrm : SDI<0xC2, MRMSrcMem,
590 (ops FR64:$dst, FR64:$src1, f64mem:$src, SSECC:$cc),
591 "cmp${cc}sd {$src, $dst|$dst, $src}", []>;
594 def UCOMISSrr: PSI<0x2E, MRMSrcReg, (ops FR32:$src1, FR32:$src2),
595 "ucomiss {$src2, $src1|$src1, $src2}",
596 [(X86cmp FR32:$src1, FR32:$src2)]>;
597 def UCOMISSrm: PSI<0x2E, MRMSrcMem, (ops FR32:$src1, f32mem:$src2),
598 "ucomiss {$src2, $src1|$src1, $src2}",
599 [(X86cmp FR32:$src1, (loadf32 addr:$src2))]>;
600 def UCOMISDrr: PDI<0x2E, MRMSrcReg, (ops FR64:$src1, FR64:$src2),
601 "ucomisd {$src2, $src1|$src1, $src2}",
602 [(X86cmp FR64:$src1, FR64:$src2)]>;
603 def UCOMISDrm: PDI<0x2E, MRMSrcMem, (ops FR64:$src1, f64mem:$src2),
604 "ucomisd {$src2, $src1|$src1, $src2}",
605 [(X86cmp FR64:$src1, (loadf64 addr:$src2))]>;
607 // Aliases to match intrinsics which expect XMM operand(s).
608 let isTwoAddress = 1 in {
609 def Int_CMPSSrr : SSI<0xC2, MRMSrcReg,
610 (ops VR128:$dst, VR128:$src1, VR128:$src, SSECC:$cc),
611 "cmp${cc}ss {$src, $dst|$dst, $src}",
612 [(set VR128:$dst, (int_x86_sse_cmp_ss VR128:$src1,
613 VR128:$src, imm:$cc))]>;
614 def Int_CMPSSrm : SSI<0xC2, MRMSrcMem,
615 (ops VR128:$dst, VR128:$src1, f32mem:$src, SSECC:$cc),
616 "cmp${cc}ss {$src, $dst|$dst, $src}",
617 [(set VR128:$dst, (int_x86_sse_cmp_ss VR128:$src1,
618 (load addr:$src), imm:$cc))]>;
619 def Int_CMPSDrr : SDI<0xC2, MRMSrcReg,
620 (ops VR128:$dst, VR128:$src1, VR128:$src, SSECC:$cc),
621 "cmp${cc}sd {$src, $dst|$dst, $src}", []>;
622 def Int_CMPSDrm : SDI<0xC2, MRMSrcMem,
623 (ops VR128:$dst, VR128:$src1, f64mem:$src, SSECC:$cc),
624 "cmp${cc}sd {$src, $dst|$dst, $src}", []>;
627 def Int_UCOMISSrr: PSI<0x2E, MRMSrcReg, (ops VR128:$src1, VR128:$src2),
628 "ucomiss {$src2, $src1|$src1, $src2}",
629 [(X86ucomi (v4f32 VR128:$src1), VR128:$src2)]>;
630 def Int_UCOMISSrm: PSI<0x2E, MRMSrcMem, (ops VR128:$src1, f128mem:$src2),
631 "ucomiss {$src2, $src1|$src1, $src2}",
632 [(X86ucomi (v4f32 VR128:$src1), (loadv4f32 addr:$src2))]>;
633 def Int_UCOMISDrr: PDI<0x2E, MRMSrcReg, (ops VR128:$src1, VR128:$src2),
634 "ucomisd {$src2, $src1|$src1, $src2}",
635 [(X86ucomi (v2f64 VR128:$src1), (v2f64 VR128:$src2))]>;
636 def Int_UCOMISDrm: PDI<0x2E, MRMSrcMem, (ops VR128:$src1, f128mem:$src2),
637 "ucomisd {$src2, $src1|$src1, $src2}",
638 [(X86ucomi (v2f64 VR128:$src1), (loadv2f64 addr:$src2))]>;
640 def Int_COMISSrr: PSI<0x2F, MRMSrcReg, (ops VR128:$src1, VR128:$src2),
641 "comiss {$src2, $src1|$src1, $src2}",
642 [(X86comi (v4f32 VR128:$src1), VR128:$src2)]>;
643 def Int_COMISSrm: PSI<0x2F, MRMSrcMem, (ops VR128:$src1, f128mem:$src2),
644 "comiss {$src2, $src1|$src1, $src2}",
645 [(X86comi (v4f32 VR128:$src1), (loadv4f32 addr:$src2))]>;
646 def Int_COMISDrr: PDI<0x2F, MRMSrcReg, (ops VR128:$src1, VR128:$src2),
647 "comisd {$src2, $src1|$src1, $src2}",
648 [(X86comi (v2f64 VR128:$src1), (v2f64 VR128:$src2))]>;
649 def Int_COMISDrm: PDI<0x2F, MRMSrcMem, (ops VR128:$src1, f128mem:$src2),
650 "comisd {$src2, $src1|$src1, $src2}",
651 [(X86comi (v2f64 VR128:$src1), (loadv2f64 addr:$src2))]>;
653 // Aliases of packed instructions for scalar use. These all have names that
656 // Alias instructions that map fld0 to pxor for sse.
657 // FIXME: remove when we can teach regalloc that xor reg, reg is ok.
658 def FsFLD0SS : I<0xEF, MRMInitReg, (ops FR32:$dst),
659 "pxor $dst, $dst", [(set FR32:$dst, fp32imm0)]>,
660 Requires<[HasSSE1]>, TB, OpSize;
661 def FsFLD0SD : I<0xEF, MRMInitReg, (ops FR64:$dst),
662 "pxor $dst, $dst", [(set FR64:$dst, fp64imm0)]>,
663 Requires<[HasSSE2]>, TB, OpSize;
665 // Alias instructions to do FR32 / FR64 reg-to-reg copy using movaps / movapd.
666 // Upper bits are disregarded.
667 def FsMOVAPSrr : PSI<0x28, MRMSrcReg, (ops FR32:$dst, FR32:$src),
668 "movaps {$src, $dst|$dst, $src}", []>;
669 def FsMOVAPDrr : PDI<0x28, MRMSrcReg, (ops FR64:$dst, FR64:$src),
670 "movapd {$src, $dst|$dst, $src}", []>;
672 // Alias instructions to load FR32 / FR64 from f128mem using movaps / movapd.
673 // Upper bits are disregarded.
674 def FsMOVAPSrm : PSI<0x28, MRMSrcMem, (ops FR32:$dst, f128mem:$src),
675 "movaps {$src, $dst|$dst, $src}",
676 [(set FR32:$dst, (X86loadpf32 addr:$src))]>;
677 def FsMOVAPDrm : PDI<0x28, MRMSrcMem, (ops FR64:$dst, f128mem:$src),
678 "movapd {$src, $dst|$dst, $src}",
679 [(set FR64:$dst, (X86loadpf64 addr:$src))]>;
681 // Alias bitwise logical operations using SSE logical ops on packed FP values.
682 let isTwoAddress = 1 in {
683 let isCommutable = 1 in {
684 def FsANDPSrr : PSI<0x54, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
685 "andps {$src2, $dst|$dst, $src2}",
686 [(set FR32:$dst, (X86fand FR32:$src1, FR32:$src2))]>;
687 def FsANDPDrr : PDI<0x54, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
688 "andpd {$src2, $dst|$dst, $src2}",
689 [(set FR64:$dst, (X86fand FR64:$src1, FR64:$src2))]>;
690 def FsORPSrr : PSI<0x56, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
691 "orps {$src2, $dst|$dst, $src2}", []>;
692 def FsORPDrr : PDI<0x56, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
693 "orpd {$src2, $dst|$dst, $src2}", []>;
694 def FsXORPSrr : PSI<0x57, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
695 "xorps {$src2, $dst|$dst, $src2}",
696 [(set FR32:$dst, (X86fxor FR32:$src1, FR32:$src2))]>;
697 def FsXORPDrr : PDI<0x57, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
698 "xorpd {$src2, $dst|$dst, $src2}",
699 [(set FR64:$dst, (X86fxor FR64:$src1, FR64:$src2))]>;
701 def FsANDPSrm : PSI<0x54, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f128mem:$src2),
702 "andps {$src2, $dst|$dst, $src2}",
703 [(set FR32:$dst, (X86fand FR32:$src1,
704 (X86loadpf32 addr:$src2)))]>;
705 def FsANDPDrm : PDI<0x54, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f128mem:$src2),
706 "andpd {$src2, $dst|$dst, $src2}",
707 [(set FR64:$dst, (X86fand FR64:$src1,
708 (X86loadpf64 addr:$src2)))]>;
709 def FsORPSrm : PSI<0x56, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f128mem:$src2),
710 "orps {$src2, $dst|$dst, $src2}", []>;
711 def FsORPDrm : PDI<0x56, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f128mem:$src2),
712 "orpd {$src2, $dst|$dst, $src2}", []>;
713 def FsXORPSrm : PSI<0x57, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f128mem:$src2),
714 "xorps {$src2, $dst|$dst, $src2}",
715 [(set FR32:$dst, (X86fxor FR32:$src1,
716 (X86loadpf32 addr:$src2)))]>;
717 def FsXORPDrm : PDI<0x57, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f128mem:$src2),
718 "xorpd {$src2, $dst|$dst, $src2}",
719 [(set FR64:$dst, (X86fxor FR64:$src1,
720 (X86loadpf64 addr:$src2)))]>;
722 def FsANDNPSrr : PSI<0x55, MRMSrcReg, (ops FR32:$dst, FR32:$src1, FR32:$src2),
723 "andnps {$src2, $dst|$dst, $src2}", []>;
724 def FsANDNPSrm : PSI<0x55, MRMSrcMem, (ops FR32:$dst, FR32:$src1, f128mem:$src2),
725 "andnps {$src2, $dst|$dst, $src2}", []>;
726 def FsANDNPDrr : PDI<0x55, MRMSrcReg, (ops FR64:$dst, FR64:$src1, FR64:$src2),
727 "andnpd {$src2, $dst|$dst, $src2}", []>;
728 def FsANDNPDrm : PDI<0x55, MRMSrcMem, (ops FR64:$dst, FR64:$src1, f128mem:$src2),
729 "andnpd {$src2, $dst|$dst, $src2}", []>;
732 //===----------------------------------------------------------------------===//
733 // SSE packed FP Instructions
734 //===----------------------------------------------------------------------===//
736 // Some 'special' instructions
737 def IMPLICIT_DEF_VR128 : I<0, Pseudo, (ops VR128:$dst),
738 "#IMPLICIT_DEF $dst",
739 [(set VR128:$dst, (v4f32 (undef)))]>,
743 def MOVAPSrr : PSI<0x28, MRMSrcReg, (ops VR128:$dst, VR128:$src),
744 "movaps {$src, $dst|$dst, $src}", []>;
745 def MOVAPSrm : PSI<0x28, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
746 "movaps {$src, $dst|$dst, $src}",
747 [(set VR128:$dst, (loadv4f32 addr:$src))]>;
748 def MOVAPDrr : PDI<0x28, MRMSrcReg, (ops VR128:$dst, VR128:$src),
749 "movapd {$src, $dst|$dst, $src}", []>;
750 def MOVAPDrm : PDI<0x28, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
751 "movapd {$src, $dst|$dst, $src}",
752 [(set VR128:$dst, (loadv2f64 addr:$src))]>;
754 def MOVAPSmr : PSI<0x29, MRMDestMem, (ops f128mem:$dst, VR128:$src),
755 "movaps {$src, $dst|$dst, $src}",
756 [(store (v4f32 VR128:$src), addr:$dst)]>;
757 def MOVAPDmr : PDI<0x29, MRMDestMem, (ops f128mem:$dst, VR128:$src),
758 "movapd {$src, $dst|$dst, $src}",
759 [(store (v2f64 VR128:$src), addr:$dst)]>;
761 def MOVUPSrr : PSI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src),
762 "movups {$src, $dst|$dst, $src}", []>;
763 def MOVUPSrm : PSI<0x10, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
764 "movups {$src, $dst|$dst, $src}",
765 [(set VR128:$dst, (int_x86_sse_loadu_ps addr:$src))]>;
766 def MOVUPSmr : PSI<0x11, MRMDestMem, (ops f128mem:$dst, VR128:$src),
767 "movups {$src, $dst|$dst, $src}",
768 [(int_x86_sse_storeu_ps addr:$dst, VR128:$src)]>;
769 def MOVUPDrr : PDI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src),
770 "movupd {$src, $dst|$dst, $src}", []>;
771 def MOVUPDrm : PDI<0x10, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
772 "movupd {$src, $dst|$dst, $src}",
773 [(set VR128:$dst, (int_x86_sse2_loadu_pd addr:$src))]>;
774 def MOVUPDmr : PDI<0x11, MRMDestMem, (ops f128mem:$dst, VR128:$src),
775 "movupd {$src, $dst|$dst, $src}",
776 [(int_x86_sse2_storeu_pd addr:$dst, VR128:$src)]>;
778 let isTwoAddress = 1 in {
779 let AddedComplexity = 20 in {
780 def MOVLPSrm : PSI<0x12, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2),
781 "movlps {$src2, $dst|$dst, $src2}",
783 (v4f32 (vector_shuffle VR128:$src1,
784 (bc_v4f32 (v2f64 (scalar_to_vector (loadf64 addr:$src2)))),
785 MOVLP_shuffle_mask)))]>;
786 def MOVLPDrm : PDI<0x12, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2),
787 "movlpd {$src2, $dst|$dst, $src2}",
789 (v2f64 (vector_shuffle VR128:$src1,
790 (scalar_to_vector (loadf64 addr:$src2)),
791 MOVLP_shuffle_mask)))]>;
792 def MOVHPSrm : PSI<0x16, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2),
793 "movhps {$src2, $dst|$dst, $src2}",
795 (v4f32 (vector_shuffle VR128:$src1,
796 (bc_v4f32 (v2f64 (scalar_to_vector (loadf64 addr:$src2)))),
797 MOVHP_shuffle_mask)))]>;
798 def MOVHPDrm : PDI<0x16, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f64mem:$src2),
799 "movhpd {$src2, $dst|$dst, $src2}",
801 (v2f64 (vector_shuffle VR128:$src1,
802 (scalar_to_vector (loadf64 addr:$src2)),
803 MOVHP_shuffle_mask)))]>;
807 def MOVLPSmr : PSI<0x13, MRMDestMem, (ops f64mem:$dst, VR128:$src),
808 "movlps {$src, $dst|$dst, $src}",
809 [(store (f64 (vector_extract (bc_v2f64 (v4f32 VR128:$src)),
810 (i32 0))), addr:$dst)]>;
811 def MOVLPDmr : PDI<0x13, MRMDestMem, (ops f64mem:$dst, VR128:$src),
812 "movlpd {$src, $dst|$dst, $src}",
813 [(store (f64 (vector_extract (v2f64 VR128:$src),
814 (i32 0))), addr:$dst)]>;
816 // v2f64 extract element 1 is always custom lowered to unpack high to low
817 // and extract element 0 so the non-store version isn't too horrible.
818 def MOVHPSmr : PSI<0x17, MRMDestMem, (ops f64mem:$dst, VR128:$src),
819 "movhps {$src, $dst|$dst, $src}",
820 [(store (f64 (vector_extract
821 (v2f64 (vector_shuffle
822 (bc_v2f64 (v4f32 VR128:$src)), (undef),
823 UNPCKH_shuffle_mask)), (i32 0))),
825 def MOVHPDmr : PDI<0x17, MRMDestMem, (ops f64mem:$dst, VR128:$src),
826 "movhpd {$src, $dst|$dst, $src}",
827 [(store (f64 (vector_extract
828 (v2f64 (vector_shuffle VR128:$src, (undef),
829 UNPCKH_shuffle_mask)), (i32 0))),
832 let isTwoAddress = 1 in {
833 let AddedComplexity = 20 in {
834 def MOVLHPSrr : PSI<0x16, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
835 "movlhps {$src2, $dst|$dst, $src2}",
837 (v4f32 (vector_shuffle VR128:$src1, VR128:$src2,
838 MOVHP_shuffle_mask)))]>;
840 def MOVHLPSrr : PSI<0x12, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
841 "movhlps {$src2, $dst|$dst, $src2}",
843 (v4f32 (vector_shuffle VR128:$src1, VR128:$src2,
844 MOVHLPS_shuffle_mask)))]>;
848 def MOVSHDUPrr : S3SI<0x16, MRMSrcReg, (ops VR128:$dst, VR128:$src),
849 "movshdup {$src, $dst|$dst, $src}",
850 [(set VR128:$dst, (v4f32 (vector_shuffle
852 MOVSHDUP_shuffle_mask)))]>;
853 def MOVSHDUPrm : S3SI<0x16, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
854 "movshdup {$src, $dst|$dst, $src}",
855 [(set VR128:$dst, (v4f32 (vector_shuffle
856 (loadv4f32 addr:$src), (undef),
857 MOVSHDUP_shuffle_mask)))]>;
859 def MOVSLDUPrr : S3SI<0x12, MRMSrcReg, (ops VR128:$dst, VR128:$src),
860 "movsldup {$src, $dst|$dst, $src}",
861 [(set VR128:$dst, (v4f32 (vector_shuffle
863 MOVSLDUP_shuffle_mask)))]>;
864 def MOVSLDUPrm : S3SI<0x12, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
865 "movsldup {$src, $dst|$dst, $src}",
866 [(set VR128:$dst, (v4f32 (vector_shuffle
867 (loadv4f32 addr:$src), (undef),
868 MOVSLDUP_shuffle_mask)))]>;
870 def MOVDDUPrr : S3DI<0x12, MRMSrcReg, (ops VR128:$dst, VR128:$src),
871 "movddup {$src, $dst|$dst, $src}",
872 [(set VR128:$dst, (v2f64 (vector_shuffle
874 SSE_splat_v2_mask)))]>;
875 def MOVDDUPrm : S3DI<0x12, MRMSrcMem, (ops VR128:$dst, f64mem:$src),
876 "movddup {$src, $dst|$dst, $src}",
877 [(set VR128:$dst, (v2f64 (vector_shuffle
878 (scalar_to_vector (loadf64 addr:$src)),
880 SSE_splat_v2_mask)))]>;
882 // SSE2 instructions without OpSize prefix
883 def CVTDQ2PSrr : I<0x5B, MRMSrcReg, (ops VR128:$dst, VR128:$src),
884 "cvtdq2ps {$src, $dst|$dst, $src}",
885 [(set VR128:$dst, (int_x86_sse2_cvtdq2ps VR128:$src))]>,
886 TB, Requires<[HasSSE2]>;
887 def CVTDQ2PSrm : I<0x5B, MRMSrcMem, (ops VR128:$dst, i128mem:$src),
888 "cvtdq2ps {$src, $dst|$dst, $src}",
889 [(set VR128:$dst, (int_x86_sse2_cvtdq2ps
890 (bc_v4i32 (loadv2i64 addr:$src))))]>,
891 TB, Requires<[HasSSE2]>;
893 // SSE2 instructions with XS prefix
894 def CVTDQ2PDrr : I<0xE6, MRMSrcReg, (ops VR128:$dst, VR128:$src),
895 "cvtdq2pd {$src, $dst|$dst, $src}",
896 [(set VR128:$dst, (int_x86_sse2_cvtdq2pd VR128:$src))]>,
897 XS, Requires<[HasSSE2]>;
898 def CVTDQ2PDrm : I<0xE6, MRMSrcMem, (ops VR128:$dst, i64mem:$src),
899 "cvtdq2pd {$src, $dst|$dst, $src}",
900 [(set VR128:$dst, (int_x86_sse2_cvtdq2pd
901 (bc_v4i32 (loadv2i64 addr:$src))))]>,
902 XS, Requires<[HasSSE2]>;
904 def CVTPS2DQrr : PDI<0x5B, MRMSrcReg, (ops VR128:$dst, VR128:$src),
905 "cvtps2dq {$src, $dst|$dst, $src}",
906 [(set VR128:$dst, (int_x86_sse2_cvtps2dq VR128:$src))]>;
907 def CVTPS2DQrm : PDI<0x5B, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
908 "cvtps2dq {$src, $dst|$dst, $src}",
909 [(set VR128:$dst, (int_x86_sse2_cvtps2dq
910 (loadv4f32 addr:$src)))]>;
911 // SSE2 packed instructions with XS prefix
912 def CVTTPS2DQrr : I<0x5B, MRMSrcReg, (ops VR128:$dst, VR128:$src),
913 "cvttps2dq {$src, $dst|$dst, $src}",
914 [(set VR128:$dst, (int_x86_sse2_cvttps2dq VR128:$src))]>,
915 XS, Requires<[HasSSE2]>;
916 def CVTTPS2DQrm : I<0x5B, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
917 "cvttps2dq {$src, $dst|$dst, $src}",
918 [(set VR128:$dst, (int_x86_sse2_cvttps2dq
919 (loadv4f32 addr:$src)))]>,
920 XS, Requires<[HasSSE2]>;
922 // SSE2 packed instructions with XD prefix
923 def CVTPD2DQrr : I<0xE6, MRMSrcReg, (ops VR128:$dst, VR128:$src),
924 "cvtpd2dq {$src, $dst|$dst, $src}",
925 [(set VR128:$dst, (int_x86_sse2_cvtpd2dq VR128:$src))]>,
926 XD, Requires<[HasSSE2]>;
927 def CVTPD2DQrm : I<0xE6, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
928 "cvtpd2dq {$src, $dst|$dst, $src}",
929 [(set VR128:$dst, (int_x86_sse2_cvtpd2dq
930 (loadv2f64 addr:$src)))]>,
931 XD, Requires<[HasSSE2]>;
932 def CVTTPD2DQrr : PDI<0xE6, MRMSrcReg, (ops VR128:$dst, VR128:$src),
933 "cvttpd2dq {$src, $dst|$dst, $src}",
934 [(set VR128:$dst, (int_x86_sse2_cvttpd2dq VR128:$src))]>;
935 def CVTTPD2DQrm : PDI<0xE6, MRMSrcMem, (ops VR128:$dst, f128mem:$src),
936 "cvttpd2dq {$src, $dst|$dst, $src}",
937 [(set VR128:$dst, (int_x86_sse2_cvttpd2dq
938 (loadv2f64 addr:$src)))]>;
940 // SSE2 instructions without OpSize prefix
941 def CVTPS2PDrr : I<0x5A, MRMSrcReg, (ops VR128:$dst, VR128:$src),
942 "cvtps2pd {$src, $dst|$dst, $src}",
943 [(set VR128:$dst, (int_x86_sse2_cvtps2pd VR128:$src))]>,
944 TB, Requires<[HasSSE2]>;
945 def CVTPS2PDrm : I<0x5A, MRMSrcReg, (ops VR128:$dst, f64mem:$src),
946 "cvtps2pd {$src, $dst|$dst, $src}",
947 [(set VR128:$dst, (int_x86_sse2_cvtps2pd
948 (loadv4f32 addr:$src)))]>,
949 TB, Requires<[HasSSE2]>;
951 def CVTPD2PSrr : PDI<0x5A, MRMSrcReg, (ops VR128:$dst, VR128:$src),
952 "cvtpd2ps {$src, $dst|$dst, $src}",
953 [(set VR128:$dst, (int_x86_sse2_cvtpd2ps VR128:$src))]>;
954 def CVTPD2PSrm : PDI<0x5A, MRMSrcReg, (ops VR128:$dst, f128mem:$src),
955 "cvtpd2ps {$src, $dst|$dst, $src}",
956 [(set VR128:$dst, (int_x86_sse2_cvtpd2ps
957 (loadv2f64 addr:$src)))]>;
959 // Match intrinsics which expect XMM operand(s).
960 // Aliases for intrinsics
961 let isTwoAddress = 1 in {
962 def Int_CVTSI2SDrr: SDI<0x2A, MRMSrcReg,
963 (ops VR128:$dst, VR128:$src1, GR32:$src2),
964 "cvtsi2sd {$src2, $dst|$dst, $src2}",
965 [(set VR128:$dst, (int_x86_sse2_cvtsi2sd VR128:$src1,
967 def Int_CVTSI2SDrm: SDI<0x2A, MRMSrcMem,
968 (ops VR128:$dst, VR128:$src1, i32mem:$src2),
969 "cvtsi2sd {$src2, $dst|$dst, $src2}",
970 [(set VR128:$dst, (int_x86_sse2_cvtsi2sd VR128:$src1,
971 (loadi32 addr:$src2)))]>;
972 def Int_CVTSD2SSrr: SDI<0x5A, MRMSrcReg,
973 (ops VR128:$dst, VR128:$src1, VR128:$src2),
974 "cvtsd2ss {$src2, $dst|$dst, $src2}",
975 [(set VR128:$dst, (int_x86_sse2_cvtsd2ss VR128:$src1,
977 def Int_CVTSD2SSrm: SDI<0x5A, MRMSrcMem,
978 (ops VR128:$dst, VR128:$src1, f64mem:$src2),
979 "cvtsd2ss {$src2, $dst|$dst, $src2}",
980 [(set VR128:$dst, (int_x86_sse2_cvtsd2ss VR128:$src1,
981 (loadv2f64 addr:$src2)))]>;
982 def Int_CVTSS2SDrr: I<0x5A, MRMSrcReg,
983 (ops VR128:$dst, VR128:$src1, VR128:$src2),
984 "cvtss2sd {$src2, $dst|$dst, $src2}",
985 [(set VR128:$dst, (int_x86_sse2_cvtss2sd VR128:$src1,
988 def Int_CVTSS2SDrm: I<0x5A, MRMSrcMem,
989 (ops VR128:$dst, VR128:$src1, f32mem:$src2),
990 "cvtss2sd {$src2, $dst|$dst, $src2}",
991 [(set VR128:$dst, (int_x86_sse2_cvtss2sd VR128:$src1,
992 (loadv4f32 addr:$src2)))]>, XS,
997 let isTwoAddress = 1 in {
998 let isCommutable = 1 in {
999 def ADDPSrr : PSI<0x58, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1000 "addps {$src2, $dst|$dst, $src2}",
1001 [(set VR128:$dst, (v4f32 (fadd VR128:$src1, VR128:$src2)))]>;
1002 def ADDPDrr : PDI<0x58, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1003 "addpd {$src2, $dst|$dst, $src2}",
1004 [(set VR128:$dst, (v2f64 (fadd VR128:$src1, VR128:$src2)))]>;
1005 def MULPSrr : PSI<0x59, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1006 "mulps {$src2, $dst|$dst, $src2}",
1007 [(set VR128:$dst, (v4f32 (fmul VR128:$src1, VR128:$src2)))]>;
1008 def MULPDrr : PDI<0x59, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1009 "mulpd {$src2, $dst|$dst, $src2}",
1010 [(set VR128:$dst, (v2f64 (fmul VR128:$src1, VR128:$src2)))]>;
1013 def ADDPSrm : PSI<0x58, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1014 "addps {$src2, $dst|$dst, $src2}",
1015 [(set VR128:$dst, (v4f32 (fadd VR128:$src1,
1016 (load addr:$src2))))]>;
1017 def ADDPDrm : PDI<0x58, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1018 "addpd {$src2, $dst|$dst, $src2}",
1019 [(set VR128:$dst, (v2f64 (fadd VR128:$src1,
1020 (load addr:$src2))))]>;
1021 def MULPSrm : PSI<0x59, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1022 "mulps {$src2, $dst|$dst, $src2}",
1023 [(set VR128:$dst, (v4f32 (fmul VR128:$src1,
1024 (load addr:$src2))))]>;
1025 def MULPDrm : PDI<0x59, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1026 "mulpd {$src2, $dst|$dst, $src2}",
1027 [(set VR128:$dst, (v2f64 (fmul VR128:$src1,
1028 (load addr:$src2))))]>;
1030 def DIVPSrr : PSI<0x5E, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1031 "divps {$src2, $dst|$dst, $src2}",
1032 [(set VR128:$dst, (v4f32 (fdiv VR128:$src1, VR128:$src2)))]>;
1033 def DIVPSrm : PSI<0x5E, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1034 "divps {$src2, $dst|$dst, $src2}",
1035 [(set VR128:$dst, (v4f32 (fdiv VR128:$src1,
1036 (load addr:$src2))))]>;
1037 def DIVPDrr : PDI<0x5E, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1038 "divpd {$src2, $dst|$dst, $src2}",
1039 [(set VR128:$dst, (v2f64 (fdiv VR128:$src1, VR128:$src2)))]>;
1040 def DIVPDrm : PDI<0x5E, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1041 "divpd {$src2, $dst|$dst, $src2}",
1042 [(set VR128:$dst, (v2f64 (fdiv VR128:$src1,
1043 (load addr:$src2))))]>;
1045 def SUBPSrr : PSI<0x5C, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1046 "subps {$src2, $dst|$dst, $src2}",
1047 [(set VR128:$dst, (v4f32 (fsub VR128:$src1, VR128:$src2)))]>;
1048 def SUBPSrm : PSI<0x5C, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1049 "subps {$src2, $dst|$dst, $src2}",
1050 [(set VR128:$dst, (v4f32 (fsub VR128:$src1,
1051 (load addr:$src2))))]>;
1052 def SUBPDrr : PDI<0x5C, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1053 "subpd {$src2, $dst|$dst, $src2}",
1054 [(set VR128:$dst, (v2f64 (fsub VR128:$src1, VR128:$src2)))]>;
1055 def SUBPDrm : PDI<0x5C, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1056 "subpd {$src2, $dst|$dst, $src2}",
1057 [(set VR128:$dst, (v2f64 (fsub VR128:$src1,
1058 (load addr:$src2))))]>;
1060 def ADDSUBPSrr : S3DI<0xD0, MRMSrcReg,
1061 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1062 "addsubps {$src2, $dst|$dst, $src2}",
1063 [(set VR128:$dst, (int_x86_sse3_addsub_ps VR128:$src1,
1065 def ADDSUBPSrm : S3DI<0xD0, MRMSrcMem,
1066 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1067 "addsubps {$src2, $dst|$dst, $src2}",
1068 [(set VR128:$dst, (int_x86_sse3_addsub_ps VR128:$src1,
1069 (loadv4f32 addr:$src2)))]>;
1070 def ADDSUBPDrr : S3I<0xD0, MRMSrcReg,
1071 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1072 "addsubpd {$src2, $dst|$dst, $src2}",
1073 [(set VR128:$dst, (int_x86_sse3_addsub_pd VR128:$src1,
1075 def ADDSUBPDrm : S3I<0xD0, MRMSrcMem,
1076 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1077 "addsubpd {$src2, $dst|$dst, $src2}",
1078 [(set VR128:$dst, (int_x86_sse3_addsub_pd VR128:$src1,
1079 (loadv2f64 addr:$src2)))]>;
1082 def SQRTPSr : PS_Intr<0x51, "sqrtps {$src, $dst|$dst, $src}",
1083 int_x86_sse_sqrt_ps>;
1084 def SQRTPSm : PS_Intm<0x51, "sqrtps {$src, $dst|$dst, $src}",
1085 int_x86_sse_sqrt_ps>;
1086 def SQRTPDr : PD_Intr<0x51, "sqrtpd {$src, $dst|$dst, $src}",
1087 int_x86_sse2_sqrt_pd>;
1088 def SQRTPDm : PD_Intm<0x51, "sqrtpd {$src, $dst|$dst, $src}",
1089 int_x86_sse2_sqrt_pd>;
1091 def RSQRTPSr : PS_Intr<0x52, "rsqrtps {$src, $dst|$dst, $src}",
1092 int_x86_sse_rsqrt_ps>;
1093 def RSQRTPSm : PS_Intm<0x52, "rsqrtps {$src, $dst|$dst, $src}",
1094 int_x86_sse_rsqrt_ps>;
1095 def RCPPSr : PS_Intr<0x53, "rcpps {$src, $dst|$dst, $src}",
1096 int_x86_sse_rcp_ps>;
1097 def RCPPSm : PS_Intm<0x53, "rcpps {$src, $dst|$dst, $src}",
1098 int_x86_sse_rcp_ps>;
1100 let isTwoAddress = 1 in {
1101 def MAXPSrr : PS_Intrr<0x5F, "maxps {$src2, $dst|$dst, $src2}",
1102 int_x86_sse_max_ps>;
1103 def MAXPSrm : PS_Intrm<0x5F, "maxps {$src2, $dst|$dst, $src2}",
1104 int_x86_sse_max_ps>;
1105 def MAXPDrr : PD_Intrr<0x5F, "maxpd {$src2, $dst|$dst, $src2}",
1106 int_x86_sse2_max_pd>;
1107 def MAXPDrm : PD_Intrm<0x5F, "maxpd {$src2, $dst|$dst, $src2}",
1108 int_x86_sse2_max_pd>;
1109 def MINPSrr : PS_Intrr<0x5D, "minps {$src2, $dst|$dst, $src2}",
1110 int_x86_sse_min_ps>;
1111 def MINPSrm : PS_Intrm<0x5D, "minps {$src2, $dst|$dst, $src2}",
1112 int_x86_sse_min_ps>;
1113 def MINPDrr : PD_Intrr<0x5D, "minpd {$src2, $dst|$dst, $src2}",
1114 int_x86_sse2_min_pd>;
1115 def MINPDrm : PD_Intrm<0x5D, "minpd {$src2, $dst|$dst, $src2}",
1116 int_x86_sse2_min_pd>;
1120 let isTwoAddress = 1 in {
1121 let isCommutable = 1 in {
1122 def ANDPSrr : PSI<0x54, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1123 "andps {$src2, $dst|$dst, $src2}",
1124 [(set VR128:$dst, (v2i64 (and VR128:$src1, VR128:$src2)))]>;
1125 def ANDPDrr : PDI<0x54, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1126 "andpd {$src2, $dst|$dst, $src2}",
1128 (and (bc_v2i64 (v2f64 VR128:$src1)),
1129 (bc_v2i64 (v2f64 VR128:$src2))))]>;
1130 def ORPSrr : PSI<0x56, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1131 "orps {$src2, $dst|$dst, $src2}",
1132 [(set VR128:$dst, (v2i64 (or VR128:$src1, VR128:$src2)))]>;
1133 def ORPDrr : PDI<0x56, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1134 "orpd {$src2, $dst|$dst, $src2}",
1136 (or (bc_v2i64 (v2f64 VR128:$src1)),
1137 (bc_v2i64 (v2f64 VR128:$src2))))]>;
1138 def XORPSrr : PSI<0x57, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1139 "xorps {$src2, $dst|$dst, $src2}",
1140 [(set VR128:$dst, (v2i64 (xor VR128:$src1, VR128:$src2)))]>;
1141 def XORPDrr : PDI<0x57, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1142 "xorpd {$src2, $dst|$dst, $src2}",
1144 (xor (bc_v2i64 (v2f64 VR128:$src1)),
1145 (bc_v2i64 (v2f64 VR128:$src2))))]>;
1147 def ANDPSrm : PSI<0x54, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1148 "andps {$src2, $dst|$dst, $src2}",
1149 [(set VR128:$dst, (and VR128:$src1,
1150 (bc_v2i64 (loadv4f32 addr:$src2))))]>;
1151 def ANDPDrm : PDI<0x54, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1152 "andpd {$src2, $dst|$dst, $src2}",
1154 (and (bc_v2i64 (v2f64 VR128:$src1)),
1155 (bc_v2i64 (loadv2f64 addr:$src2))))]>;
1156 def ORPSrm : PSI<0x56, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1157 "orps {$src2, $dst|$dst, $src2}",
1158 [(set VR128:$dst, (or VR128:$src1,
1159 (bc_v2i64 (loadv4f32 addr:$src2))))]>;
1160 def ORPDrm : PDI<0x56, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1161 "orpd {$src2, $dst|$dst, $src2}",
1163 (or (bc_v2i64 (v2f64 VR128:$src1)),
1164 (bc_v2i64 (loadv2f64 addr:$src2))))]>;
1165 def XORPSrm : PSI<0x57, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1166 "xorps {$src2, $dst|$dst, $src2}",
1167 [(set VR128:$dst, (xor VR128:$src1,
1168 (bc_v2i64 (loadv4f32 addr:$src2))))]>;
1169 def XORPDrm : PDI<0x57, MRMSrcMem, (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1170 "xorpd {$src2, $dst|$dst, $src2}",
1172 (xor (bc_v2i64 (v2f64 VR128:$src1)),
1173 (bc_v2i64 (loadv2f64 addr:$src2))))]>;
1174 def ANDNPSrr : PSI<0x55, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1175 "andnps {$src2, $dst|$dst, $src2}",
1176 [(set VR128:$dst, (v2i64 (and (xor VR128:$src1,
1177 (bc_v2i64 (v4i32 immAllOnesV))),
1179 def ANDNPSrm : PSI<0x55, MRMSrcMem, (ops VR128:$dst, VR128:$src1,f128mem:$src2),
1180 "andnps {$src2, $dst|$dst, $src2}",
1181 [(set VR128:$dst, (v2i64 (and (xor VR128:$src1,
1182 (bc_v2i64 (v4i32 immAllOnesV))),
1183 (bc_v2i64 (loadv4f32 addr:$src2)))))]>;
1184 def ANDNPDrr : PDI<0x55, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1185 "andnpd {$src2, $dst|$dst, $src2}",
1187 (and (vnot (bc_v2i64 (v2f64 VR128:$src1))),
1188 (bc_v2i64 (v2f64 VR128:$src2))))]>;
1189 def ANDNPDrm : PDI<0x55, MRMSrcMem, (ops VR128:$dst, VR128:$src1,f128mem:$src2),
1190 "andnpd {$src2, $dst|$dst, $src2}",
1192 (and (vnot (bc_v2i64 (v2f64 VR128:$src1))),
1193 (bc_v2i64 (loadv2f64 addr:$src2))))]>;
1196 let isTwoAddress = 1 in {
1197 def CMPPSrri : PSIi8<0xC2, MRMSrcReg,
1198 (ops VR128:$dst, VR128:$src1, VR128:$src, SSECC:$cc),
1199 "cmp${cc}ps {$src, $dst|$dst, $src}",
1200 [(set VR128:$dst, (int_x86_sse_cmp_ps VR128:$src1,
1201 VR128:$src, imm:$cc))]>;
1202 def CMPPSrmi : PSIi8<0xC2, MRMSrcMem,
1203 (ops VR128:$dst, VR128:$src1, f128mem:$src, SSECC:$cc),
1204 "cmp${cc}ps {$src, $dst|$dst, $src}",
1205 [(set VR128:$dst, (int_x86_sse_cmp_ps VR128:$src1,
1206 (load addr:$src), imm:$cc))]>;
1207 def CMPPDrri : PDIi8<0xC2, MRMSrcReg,
1208 (ops VR128:$dst, VR128:$src1, VR128:$src, SSECC:$cc),
1209 "cmp${cc}pd {$src, $dst|$dst, $src}",
1210 [(set VR128:$dst, (int_x86_sse2_cmp_pd VR128:$src1,
1211 VR128:$src, imm:$cc))]>;
1212 def CMPPDrmi : PDIi8<0xC2, MRMSrcMem,
1213 (ops VR128:$dst, VR128:$src1, f128mem:$src, SSECC:$cc),
1214 "cmp${cc}pd {$src, $dst|$dst, $src}",
1215 [(set VR128:$dst, (int_x86_sse2_cmp_pd VR128:$src1,
1216 (load addr:$src), imm:$cc))]>;
1219 // Shuffle and unpack instructions
1220 let isTwoAddress = 1 in {
1221 let isConvertibleToThreeAddress = 1 in // Convert to pshufd
1222 def SHUFPSrri : PSIi8<0xC6, MRMSrcReg,
1223 (ops VR128:$dst, VR128:$src1, VR128:$src2, i32i8imm:$src3),
1224 "shufps {$src3, $src2, $dst|$dst, $src2, $src3}",
1225 [(set VR128:$dst, (v4f32 (vector_shuffle
1226 VR128:$src1, VR128:$src2,
1227 SHUFP_shuffle_mask:$src3)))]>;
1228 def SHUFPSrmi : PSIi8<0xC6, MRMSrcMem,
1229 (ops VR128:$dst, VR128:$src1, f128mem:$src2, i32i8imm:$src3),
1230 "shufps {$src3, $src2, $dst|$dst, $src2, $src3}",
1231 [(set VR128:$dst, (v4f32 (vector_shuffle
1232 VR128:$src1, (load addr:$src2),
1233 SHUFP_shuffle_mask:$src3)))]>;
1234 def SHUFPDrri : PDIi8<0xC6, MRMSrcReg,
1235 (ops VR128:$dst, VR128:$src1, VR128:$src2, i8imm:$src3),
1236 "shufpd {$src3, $src2, $dst|$dst, $src2, $src3}",
1237 [(set VR128:$dst, (v2f64 (vector_shuffle
1238 VR128:$src1, VR128:$src2,
1239 SHUFP_shuffle_mask:$src3)))]>;
1240 def SHUFPDrmi : PDIi8<0xC6, MRMSrcMem,
1241 (ops VR128:$dst, VR128:$src1, f128mem:$src2, i8imm:$src3),
1242 "shufpd {$src3, $src2, $dst|$dst, $src2, $src3}",
1243 [(set VR128:$dst, (v2f64 (vector_shuffle
1244 VR128:$src1, (load addr:$src2),
1245 SHUFP_shuffle_mask:$src3)))]>;
1247 let AddedComplexity = 10 in {
1248 def UNPCKHPSrr : PSI<0x15, MRMSrcReg,
1249 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1250 "unpckhps {$src2, $dst|$dst, $src2}",
1251 [(set VR128:$dst, (v4f32 (vector_shuffle
1252 VR128:$src1, VR128:$src2,
1253 UNPCKH_shuffle_mask)))]>;
1254 def UNPCKHPSrm : PSI<0x15, MRMSrcMem,
1255 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1256 "unpckhps {$src2, $dst|$dst, $src2}",
1257 [(set VR128:$dst, (v4f32 (vector_shuffle
1258 VR128:$src1, (load addr:$src2),
1259 UNPCKH_shuffle_mask)))]>;
1260 def UNPCKHPDrr : PDI<0x15, MRMSrcReg,
1261 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1262 "unpckhpd {$src2, $dst|$dst, $src2}",
1263 [(set VR128:$dst, (v2f64 (vector_shuffle
1264 VR128:$src1, VR128:$src2,
1265 UNPCKH_shuffle_mask)))]>;
1266 def UNPCKHPDrm : PDI<0x15, MRMSrcMem,
1267 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1268 "unpckhpd {$src2, $dst|$dst, $src2}",
1269 [(set VR128:$dst, (v2f64 (vector_shuffle
1270 VR128:$src1, (load addr:$src2),
1271 UNPCKH_shuffle_mask)))]>;
1273 def UNPCKLPSrr : PSI<0x14, MRMSrcReg,
1274 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1275 "unpcklps {$src2, $dst|$dst, $src2}",
1276 [(set VR128:$dst, (v4f32 (vector_shuffle
1277 VR128:$src1, VR128:$src2,
1278 UNPCKL_shuffle_mask)))]>;
1279 def UNPCKLPSrm : PSI<0x14, MRMSrcMem,
1280 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1281 "unpcklps {$src2, $dst|$dst, $src2}",
1282 [(set VR128:$dst, (v4f32 (vector_shuffle
1283 VR128:$src1, (load addr:$src2),
1284 UNPCKL_shuffle_mask)))]>;
1285 def UNPCKLPDrr : PDI<0x14, MRMSrcReg,
1286 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1287 "unpcklpd {$src2, $dst|$dst, $src2}",
1288 [(set VR128:$dst, (v2f64 (vector_shuffle
1289 VR128:$src1, VR128:$src2,
1290 UNPCKL_shuffle_mask)))]>;
1291 def UNPCKLPDrm : PDI<0x14, MRMSrcMem,
1292 (ops VR128:$dst, VR128:$src1, f128mem:$src2),
1293 "unpcklpd {$src2, $dst|$dst, $src2}",
1294 [(set VR128:$dst, (v2f64 (vector_shuffle
1295 VR128:$src1, (load addr:$src2),
1296 UNPCKL_shuffle_mask)))]>;
1297 } // AddedComplexity
1301 let isTwoAddress = 1 in {
1302 def HADDPSrr : S3D_Intrr<0x7C, "haddps {$src2, $dst|$dst, $src2}",
1303 int_x86_sse3_hadd_ps>;
1304 def HADDPSrm : S3D_Intrm<0x7C, "haddps {$src2, $dst|$dst, $src2}",
1305 int_x86_sse3_hadd_ps>;
1306 def HADDPDrr : S3_Intrr<0x7C, "haddpd {$src2, $dst|$dst, $src2}",
1307 int_x86_sse3_hadd_pd>;
1308 def HADDPDrm : S3_Intrm<0x7C, "haddpd {$src2, $dst|$dst, $src2}",
1309 int_x86_sse3_hadd_pd>;
1310 def HSUBPSrr : S3D_Intrr<0x7D, "hsubps {$src2, $dst|$dst, $src2}",
1311 int_x86_sse3_hsub_ps>;
1312 def HSUBPSrm : S3D_Intrm<0x7D, "hsubps {$src2, $dst|$dst, $src2}",
1313 int_x86_sse3_hsub_ps>;
1314 def HSUBPDrr : S3_Intrr<0x7D, "hsubpd {$src2, $dst|$dst, $src2}",
1315 int_x86_sse3_hsub_pd>;
1316 def HSUBPDrm : S3_Intrm<0x7D, "hsubpd {$src2, $dst|$dst, $src2}",
1317 int_x86_sse3_hsub_pd>;
1320 //===----------------------------------------------------------------------===//
1321 // SSE integer instructions
1322 //===----------------------------------------------------------------------===//
1324 // Move Instructions
1325 def MOVDQArr : PDI<0x6F, MRMSrcReg, (ops VR128:$dst, VR128:$src),
1326 "movdqa {$src, $dst|$dst, $src}", []>;
1327 def MOVDQArm : PDI<0x6F, MRMSrcMem, (ops VR128:$dst, i128mem:$src),
1328 "movdqa {$src, $dst|$dst, $src}",
1329 [(set VR128:$dst, (loadv2i64 addr:$src))]>;
1330 def MOVDQAmr : PDI<0x7F, MRMDestMem, (ops i128mem:$dst, VR128:$src),
1331 "movdqa {$src, $dst|$dst, $src}",
1332 [(store (v2i64 VR128:$src), addr:$dst)]>;
1333 def MOVDQUrm : I<0x6F, MRMSrcMem, (ops VR128:$dst, i128mem:$src),
1334 "movdqu {$src, $dst|$dst, $src}",
1335 [(set VR128:$dst, (int_x86_sse2_loadu_dq addr:$src))]>,
1336 XS, Requires<[HasSSE2]>;
1337 def MOVDQUmr : I<0x7F, MRMDestMem, (ops i128mem:$dst, VR128:$src),
1338 "movdqu {$src, $dst|$dst, $src}",
1339 [(int_x86_sse2_storeu_dq addr:$dst, VR128:$src)]>,
1340 XS, Requires<[HasSSE2]>;
1341 def LDDQUrm : S3DI<0xF0, MRMSrcMem, (ops VR128:$dst, i128mem:$src),
1342 "lddqu {$src, $dst|$dst, $src}",
1343 [(set VR128:$dst, (int_x86_sse3_ldu_dq addr:$src))]>;
1345 // 128-bit Integer Arithmetic
1346 let isTwoAddress = 1 in {
1347 let isCommutable = 1 in {
1348 def PADDBrr : PDI<0xFC, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1349 "paddb {$src2, $dst|$dst, $src2}",
1350 [(set VR128:$dst, (v16i8 (add VR128:$src1, VR128:$src2)))]>;
1351 def PADDWrr : PDI<0xFD, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1352 "paddw {$src2, $dst|$dst, $src2}",
1353 [(set VR128:$dst, (v8i16 (add VR128:$src1, VR128:$src2)))]>;
1354 def PADDDrr : PDI<0xFE, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1355 "paddd {$src2, $dst|$dst, $src2}",
1356 [(set VR128:$dst, (v4i32 (add VR128:$src1, VR128:$src2)))]>;
1358 def PADDQrr : PDI<0xD4, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1359 "paddq {$src2, $dst|$dst, $src2}",
1360 [(set VR128:$dst, (v2i64 (add VR128:$src1, VR128:$src2)))]>;
1362 def PADDBrm : PDI<0xFC, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1363 "paddb {$src2, $dst|$dst, $src2}",
1364 [(set VR128:$dst, (add VR128:$src1,
1365 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1366 def PADDWrm : PDI<0xFD, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1367 "paddw {$src2, $dst|$dst, $src2}",
1368 [(set VR128:$dst, (add VR128:$src1,
1369 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1370 def PADDDrm : PDI<0xFE, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1371 "paddd {$src2, $dst|$dst, $src2}",
1372 [(set VR128:$dst, (add VR128:$src1,
1373 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1374 def PADDQrm : PDI<0xD4, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1375 "paddd {$src2, $dst|$dst, $src2}",
1376 [(set VR128:$dst, (add VR128:$src1,
1377 (loadv2i64 addr:$src2)))]>;
1379 let isCommutable = 1 in {
1380 def PADDSBrr : PDI<0xEC, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1381 "paddsb {$src2, $dst|$dst, $src2}",
1382 [(set VR128:$dst, (int_x86_sse2_padds_b VR128:$src1,
1384 def PADDSWrr : PDI<0xED, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1385 "paddsw {$src2, $dst|$dst, $src2}",
1386 [(set VR128:$dst, (int_x86_sse2_padds_w VR128:$src1,
1388 def PADDUSBrr : PDI<0xDC, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1389 "paddusb {$src2, $dst|$dst, $src2}",
1390 [(set VR128:$dst, (int_x86_sse2_paddus_b VR128:$src1,
1392 def PADDUSWrr : PDI<0xDD, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1393 "paddusw {$src2, $dst|$dst, $src2}",
1394 [(set VR128:$dst, (int_x86_sse2_paddus_w VR128:$src1,
1397 def PADDSBrm : PDI<0xEC, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1398 "paddsb {$src2, $dst|$dst, $src2}",
1399 [(set VR128:$dst, (int_x86_sse2_padds_b VR128:$src1,
1400 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1401 def PADDSWrm : PDI<0xED, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1402 "paddsw {$src2, $dst|$dst, $src2}",
1403 [(set VR128:$dst, (int_x86_sse2_padds_w VR128:$src1,
1404 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1405 def PADDUSBrm : PDI<0xDC, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1406 "paddusb {$src2, $dst|$dst, $src2}",
1407 [(set VR128:$dst, (int_x86_sse2_paddus_b VR128:$src1,
1408 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1409 def PADDUSWrm : PDI<0xDD, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1410 "paddusw {$src2, $dst|$dst, $src2}",
1411 [(set VR128:$dst, (int_x86_sse2_paddus_w VR128:$src1,
1412 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1415 def PSUBBrr : PDI<0xF8, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1416 "psubb {$src2, $dst|$dst, $src2}",
1417 [(set VR128:$dst, (v16i8 (sub VR128:$src1, VR128:$src2)))]>;
1418 def PSUBWrr : PDI<0xF9, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1419 "psubw {$src2, $dst|$dst, $src2}",
1420 [(set VR128:$dst, (v8i16 (sub VR128:$src1, VR128:$src2)))]>;
1421 def PSUBDrr : PDI<0xFA, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1422 "psubd {$src2, $dst|$dst, $src2}",
1423 [(set VR128:$dst, (v4i32 (sub VR128:$src1, VR128:$src2)))]>;
1424 def PSUBQrr : PDI<0xFB, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1425 "psubq {$src2, $dst|$dst, $src2}",
1426 [(set VR128:$dst, (v2i64 (sub VR128:$src1, VR128:$src2)))]>;
1428 def PSUBBrm : PDI<0xF8, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1429 "psubb {$src2, $dst|$dst, $src2}",
1430 [(set VR128:$dst, (sub VR128:$src1,
1431 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1432 def PSUBWrm : PDI<0xF9, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1433 "psubw {$src2, $dst|$dst, $src2}",
1434 [(set VR128:$dst, (sub VR128:$src1,
1435 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1436 def PSUBDrm : PDI<0xFA, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1437 "psubd {$src2, $dst|$dst, $src2}",
1438 [(set VR128:$dst, (sub VR128:$src1,
1439 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1440 def PSUBQrm : PDI<0xFB, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1441 "psubd {$src2, $dst|$dst, $src2}",
1442 [(set VR128:$dst, (sub VR128:$src1,
1443 (loadv2i64 addr:$src2)))]>;
1445 def PSUBSBrr : PDI<0xE8, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1446 "psubsb {$src2, $dst|$dst, $src2}",
1447 [(set VR128:$dst, (int_x86_sse2_psubs_b VR128:$src1,
1449 def PSUBSWrr : PDI<0xE9, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1450 "psubsw {$src2, $dst|$dst, $src2}",
1451 [(set VR128:$dst, (int_x86_sse2_psubs_w VR128:$src1,
1453 def PSUBUSBrr : PDI<0xD8, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1454 "psubusb {$src2, $dst|$dst, $src2}",
1455 [(set VR128:$dst, (int_x86_sse2_psubus_b VR128:$src1,
1457 def PSUBUSWrr : PDI<0xD9, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1458 "psubusw {$src2, $dst|$dst, $src2}",
1459 [(set VR128:$dst, (int_x86_sse2_psubus_w VR128:$src1,
1462 def PSUBSBrm : PDI<0xE8, MRMSrcMem,
1463 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1464 "psubsb {$src2, $dst|$dst, $src2}",
1465 [(set VR128:$dst, (int_x86_sse2_psubs_b VR128:$src1,
1466 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1467 def PSUBSWrm : PDI<0xE9, MRMSrcMem,
1468 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1469 "psubsw {$src2, $dst|$dst, $src2}",
1470 [(set VR128:$dst, (int_x86_sse2_psubs_w VR128:$src1,
1471 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1472 def PSUBUSBrm : PDI<0xD8, MRMSrcMem,
1473 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1474 "psubusb {$src2, $dst|$dst, $src2}",
1475 [(set VR128:$dst, (int_x86_sse2_psubus_b VR128:$src1,
1476 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1477 def PSUBUSWrm : PDI<0xD9, MRMSrcMem,
1478 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1479 "psubusw {$src2, $dst|$dst, $src2}",
1480 [(set VR128:$dst, (int_x86_sse2_psubus_w VR128:$src1,
1481 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1483 let isCommutable = 1 in {
1484 def PMULHUWrr : PDI<0xE4, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1485 "pmulhuw {$src2, $dst|$dst, $src2}",
1486 [(set VR128:$dst, (int_x86_sse2_pmulhu_w VR128:$src1,
1488 def PMULHWrr : PDI<0xE5, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1489 "pmulhw {$src2, $dst|$dst, $src2}",
1490 [(set VR128:$dst, (int_x86_sse2_pmulh_w VR128:$src1,
1492 def PMULLWrr : PDI<0xD5, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1493 "pmullw {$src2, $dst|$dst, $src2}",
1494 [(set VR128:$dst, (v8i16 (mul VR128:$src1, VR128:$src2)))]>;
1495 def PMULUDQrr : PDI<0xF4, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1496 "pmuludq {$src2, $dst|$dst, $src2}",
1497 [(set VR128:$dst, (int_x86_sse2_pmulu_dq VR128:$src1,
1500 def PMULHUWrm : PDI<0xE4, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1501 "pmulhuw {$src2, $dst|$dst, $src2}",
1502 [(set VR128:$dst, (int_x86_sse2_pmulhu_w VR128:$src1,
1503 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1504 def PMULHWrm : PDI<0xE5, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1505 "pmulhw {$src2, $dst|$dst, $src2}",
1506 [(set VR128:$dst, (int_x86_sse2_pmulh_w VR128:$src1,
1507 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1508 def PMULLWrm : PDI<0xD5, MRMSrcMem,
1509 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1510 "pmullw {$src2, $dst|$dst, $src2}",
1511 [(set VR128:$dst, (v8i16 (mul VR128:$src1,
1512 (bc_v8i16 (loadv2i64 addr:$src2)))))]>;
1513 def PMULUDQrm : PDI<0xF4, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1514 "pmuludq {$src2, $dst|$dst, $src2}",
1515 [(set VR128:$dst, (int_x86_sse2_pmulu_dq VR128:$src1,
1516 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1518 let isCommutable = 1 in {
1519 def PMADDWDrr : PDI<0xF5, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1520 "pmaddwd {$src2, $dst|$dst, $src2}",
1521 [(set VR128:$dst, (int_x86_sse2_pmadd_wd VR128:$src1,
1524 def PMADDWDrm : PDI<0xF5, MRMSrcMem,
1525 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1526 "pmaddwd {$src2, $dst|$dst, $src2}",
1527 [(set VR128:$dst, (int_x86_sse2_pmadd_wd VR128:$src1,
1528 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1530 let isCommutable = 1 in {
1531 def PAVGBrr : PDI<0xE0, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1532 "pavgb {$src2, $dst|$dst, $src2}",
1533 [(set VR128:$dst, (int_x86_sse2_pavg_b VR128:$src1,
1535 def PAVGWrr : PDI<0xE3, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1536 "pavgw {$src2, $dst|$dst, $src2}",
1537 [(set VR128:$dst, (int_x86_sse2_pavg_w VR128:$src1,
1540 def PAVGBrm : PDI<0xE0, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1541 "pavgb {$src2, $dst|$dst, $src2}",
1542 [(set VR128:$dst, (int_x86_sse2_pavg_b VR128:$src1,
1543 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1544 def PAVGWrm : PDI<0xE3, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1545 "pavgw {$src2, $dst|$dst, $src2}",
1546 [(set VR128:$dst, (int_x86_sse2_pavg_w VR128:$src1,
1547 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1549 let isCommutable = 1 in {
1550 def PMAXUBrr : PDI<0xDE, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1551 "pmaxub {$src2, $dst|$dst, $src2}",
1552 [(set VR128:$dst, (int_x86_sse2_pmaxu_b VR128:$src1,
1554 def PMAXSWrr : PDI<0xEE, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1555 "pmaxsw {$src2, $dst|$dst, $src2}",
1556 [(set VR128:$dst, (int_x86_sse2_pmaxs_w VR128:$src1,
1559 def PMAXUBrm : PDI<0xDE, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1560 "pmaxub {$src2, $dst|$dst, $src2}",
1561 [(set VR128:$dst, (int_x86_sse2_pmaxu_b VR128:$src1,
1562 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1563 def PMAXSWrm : PDI<0xEE, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1564 "pmaxsw {$src2, $dst|$dst, $src2}",
1565 [(set VR128:$dst, (int_x86_sse2_pmaxs_w VR128:$src1,
1566 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1568 let isCommutable = 1 in {
1569 def PMINUBrr : PDI<0xDA, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1570 "pminub {$src2, $dst|$dst, $src2}",
1571 [(set VR128:$dst, (int_x86_sse2_pminu_b VR128:$src1,
1573 def PMINSWrr : PDI<0xEA, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1574 "pminsw {$src2, $dst|$dst, $src2}",
1575 [(set VR128:$dst, (int_x86_sse2_pmins_w VR128:$src1,
1578 def PMINUBrm : PDI<0xDA, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1579 "pminub {$src2, $dst|$dst, $src2}",
1580 [(set VR128:$dst, (int_x86_sse2_pminu_b VR128:$src1,
1581 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1582 def PMINSWrm : PDI<0xEA, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1583 "pminsw {$src2, $dst|$dst, $src2}",
1584 [(set VR128:$dst, (int_x86_sse2_pmins_w VR128:$src1,
1585 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1588 let isCommutable = 1 in {
1589 def PSADBWrr : PDI<0xE0, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1590 "psadbw {$src2, $dst|$dst, $src2}",
1591 [(set VR128:$dst, (int_x86_sse2_psad_bw VR128:$src1,
1594 def PSADBWrm : PDI<0xE0, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1595 "psadbw {$src2, $dst|$dst, $src2}",
1596 [(set VR128:$dst, (int_x86_sse2_psad_bw VR128:$src1,
1597 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1600 let isTwoAddress = 1 in {
1601 def PSLLWrr : PDIi8<0xF1, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1602 "psllw {$src2, $dst|$dst, $src2}",
1603 [(set VR128:$dst, (int_x86_sse2_psll_w VR128:$src1,
1605 def PSLLWrm : PDIi8<0xF1, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1606 "psllw {$src2, $dst|$dst, $src2}",
1607 [(set VR128:$dst, (int_x86_sse2_psll_w VR128:$src1,
1608 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1609 def PSLLWri : PDIi8<0x71, MRM6r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1610 "psllw {$src2, $dst|$dst, $src2}",
1611 [(set VR128:$dst, (int_x86_sse2_psll_w VR128:$src1,
1612 (scalar_to_vector (i32 imm:$src2))))]>;
1613 def PSLLDrr : PDIi8<0xF2, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1614 "pslld {$src2, $dst|$dst, $src2}",
1615 [(set VR128:$dst, (int_x86_sse2_psll_d VR128:$src1,
1617 def PSLLDrm : PDIi8<0xF2, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1618 "pslld {$src2, $dst|$dst, $src2}",
1619 [(set VR128:$dst, (int_x86_sse2_psll_d VR128:$src1,
1620 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1621 def PSLLDri : PDIi8<0x72, MRM6r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1622 "pslld {$src2, $dst|$dst, $src2}",
1623 [(set VR128:$dst, (int_x86_sse2_psll_d VR128:$src1,
1624 (scalar_to_vector (i32 imm:$src2))))]>;
1625 def PSLLQrr : PDIi8<0xF3, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1626 "psllq {$src2, $dst|$dst, $src2}",
1627 [(set VR128:$dst, (int_x86_sse2_psll_q VR128:$src1,
1629 def PSLLQrm : PDIi8<0xF3, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1630 "psllq {$src2, $dst|$dst, $src2}",
1631 [(set VR128:$dst, (int_x86_sse2_psll_q VR128:$src1,
1632 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1633 def PSLLQri : PDIi8<0x73, MRM6r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1634 "psllq {$src2, $dst|$dst, $src2}",
1635 [(set VR128:$dst, (int_x86_sse2_psll_q VR128:$src1,
1636 (scalar_to_vector (i32 imm:$src2))))]>;
1637 def PSLLDQri : PDIi8<0x73, MRM7r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1638 "pslldq {$src2, $dst|$dst, $src2}", []>;
1640 def PSRLWrr : PDIi8<0xD1, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1641 "psrlw {$src2, $dst|$dst, $src2}",
1642 [(set VR128:$dst, (int_x86_sse2_psrl_w VR128:$src1,
1644 def PSRLWrm : PDIi8<0xD1, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1645 "psrlw {$src2, $dst|$dst, $src2}",
1646 [(set VR128:$dst, (int_x86_sse2_psrl_w VR128:$src1,
1647 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1648 def PSRLWri : PDIi8<0x71, MRM2r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1649 "psrlw {$src2, $dst|$dst, $src2}",
1650 [(set VR128:$dst, (int_x86_sse2_psrl_w VR128:$src1,
1651 (scalar_to_vector (i32 imm:$src2))))]>;
1652 def PSRLDrr : PDIi8<0xD2, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1653 "psrld {$src2, $dst|$dst, $src2}",
1654 [(set VR128:$dst, (int_x86_sse2_psrl_d VR128:$src1,
1656 def PSRLDrm : PDIi8<0xD2, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1657 "psrld {$src2, $dst|$dst, $src2}",
1658 [(set VR128:$dst, (int_x86_sse2_psrl_d VR128:$src1,
1659 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1660 def PSRLDri : PDIi8<0x72, MRM2r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1661 "psrld {$src2, $dst|$dst, $src2}",
1662 [(set VR128:$dst, (int_x86_sse2_psrl_d VR128:$src1,
1663 (scalar_to_vector (i32 imm:$src2))))]>;
1664 def PSRLQrr : PDIi8<0xD3, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1665 "psrlq {$src2, $dst|$dst, $src2}",
1666 [(set VR128:$dst, (int_x86_sse2_psrl_q VR128:$src1,
1668 def PSRLQrm : PDIi8<0xD3, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1669 "psrlq {$src2, $dst|$dst, $src2}",
1670 [(set VR128:$dst, (int_x86_sse2_psrl_q VR128:$src1,
1671 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1672 def PSRLQri : PDIi8<0x73, MRM2r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1673 "psrlq {$src2, $dst|$dst, $src2}",
1674 [(set VR128:$dst, (int_x86_sse2_psrl_q VR128:$src1,
1675 (scalar_to_vector (i32 imm:$src2))))]>;
1676 def PSRLDQri : PDIi8<0x73, MRM3r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1677 "psrldq {$src2, $dst|$dst, $src2}", []>;
1679 def PSRAWrr : PDIi8<0xE1, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1680 "psraw {$src2, $dst|$dst, $src2}",
1681 [(set VR128:$dst, (int_x86_sse2_psra_w VR128:$src1,
1683 def PSRAWrm : PDIi8<0xE1, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1684 "psraw {$src2, $dst|$dst, $src2}",
1685 [(set VR128:$dst, (int_x86_sse2_psra_w VR128:$src1,
1686 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1687 def PSRAWri : PDIi8<0x71, MRM4r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1688 "psraw {$src2, $dst|$dst, $src2}",
1689 [(set VR128:$dst, (int_x86_sse2_psra_w VR128:$src1,
1690 (scalar_to_vector (i32 imm:$src2))))]>;
1691 def PSRADrr : PDIi8<0xE2, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1692 "psrad {$src2, $dst|$dst, $src2}",
1693 [(set VR128:$dst, (int_x86_sse2_psra_d VR128:$src1,
1695 def PSRADrm : PDIi8<0xE2, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1696 "psrad {$src2, $dst|$dst, $src2}",
1697 [(set VR128:$dst, (int_x86_sse2_psra_d VR128:$src1,
1698 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1699 def PSRADri : PDIi8<0x72, MRM4r, (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1700 "psrad {$src2, $dst|$dst, $src2}",
1701 [(set VR128:$dst, (int_x86_sse2_psra_d VR128:$src1,
1702 (scalar_to_vector (i32 imm:$src2))))]>;
1706 let isTwoAddress = 1 in {
1707 let isCommutable = 1 in {
1708 def PANDrr : PDI<0xDB, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1709 "pand {$src2, $dst|$dst, $src2}",
1710 [(set VR128:$dst, (v2i64 (and VR128:$src1, VR128:$src2)))]>;
1711 def PORrr : PDI<0xEB, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1712 "por {$src2, $dst|$dst, $src2}",
1713 [(set VR128:$dst, (v2i64 (or VR128:$src1, VR128:$src2)))]>;
1714 def PXORrr : PDI<0xEF, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1715 "pxor {$src2, $dst|$dst, $src2}",
1716 [(set VR128:$dst, (v2i64 (xor VR128:$src1, VR128:$src2)))]>;
1719 def PANDrm : PDI<0xDB, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1720 "pand {$src2, $dst|$dst, $src2}",
1721 [(set VR128:$dst, (v2i64 (and VR128:$src1,
1722 (load addr:$src2))))]>;
1723 def PORrm : PDI<0xEB, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1724 "por {$src2, $dst|$dst, $src2}",
1725 [(set VR128:$dst, (v2i64 (or VR128:$src1,
1726 (load addr:$src2))))]>;
1727 def PXORrm : PDI<0xEF, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1728 "pxor {$src2, $dst|$dst, $src2}",
1729 [(set VR128:$dst, (v2i64 (xor VR128:$src1,
1730 (load addr:$src2))))]>;
1732 def PANDNrr : PDI<0xDF, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
1733 "pandn {$src2, $dst|$dst, $src2}",
1734 [(set VR128:$dst, (v2i64 (and (vnot VR128:$src1),
1737 def PANDNrm : PDI<0xDF, MRMSrcMem, (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1738 "pandn {$src2, $dst|$dst, $src2}",
1739 [(set VR128:$dst, (v2i64 (and (vnot VR128:$src1),
1740 (load addr:$src2))))]>;
1743 // SSE2 Integer comparison
1744 let isTwoAddress = 1 in {
1745 def PCMPEQBrr : PDI<0x74, MRMSrcReg,
1746 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1747 "pcmpeqb {$src2, $dst|$dst, $src2}",
1748 [(set VR128:$dst, (int_x86_sse2_pcmpeq_b VR128:$src1,
1750 def PCMPEQBrm : PDI<0x74, MRMSrcMem,
1751 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1752 "pcmpeqb {$src2, $dst|$dst, $src2}",
1753 [(set VR128:$dst, (int_x86_sse2_pcmpeq_b VR128:$src1,
1754 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1755 def PCMPEQWrr : PDI<0x75, MRMSrcReg,
1756 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1757 "pcmpeqw {$src2, $dst|$dst, $src2}",
1758 [(set VR128:$dst, (int_x86_sse2_pcmpeq_w VR128:$src1,
1760 def PCMPEQWrm : PDI<0x75, MRMSrcMem,
1761 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1762 "pcmpeqw {$src2, $dst|$dst, $src2}",
1763 [(set VR128:$dst, (int_x86_sse2_pcmpeq_w VR128:$src1,
1764 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1765 def PCMPEQDrr : PDI<0x76, MRMSrcReg,
1766 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1767 "pcmpeqd {$src2, $dst|$dst, $src2}",
1768 [(set VR128:$dst, (int_x86_sse2_pcmpeq_d VR128:$src1,
1770 def PCMPEQDrm : PDI<0x76, MRMSrcMem,
1771 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1772 "pcmpeqd {$src2, $dst|$dst, $src2}",
1773 [(set VR128:$dst, (int_x86_sse2_pcmpeq_d VR128:$src1,
1774 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1776 def PCMPGTBrr : PDI<0x64, MRMSrcReg,
1777 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1778 "pcmpgtb {$src2, $dst|$dst, $src2}",
1779 [(set VR128:$dst, (int_x86_sse2_pcmpgt_b VR128:$src1,
1781 def PCMPGTBrm : PDI<0x64, MRMSrcMem,
1782 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1783 "pcmpgtb {$src2, $dst|$dst, $src2}",
1784 [(set VR128:$dst, (int_x86_sse2_pcmpgt_b VR128:$src1,
1785 (bc_v16i8 (loadv2i64 addr:$src2))))]>;
1786 def PCMPGTWrr : PDI<0x65, MRMSrcReg,
1787 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1788 "pcmpgtw {$src2, $dst|$dst, $src2}",
1789 [(set VR128:$dst, (int_x86_sse2_pcmpgt_w VR128:$src1,
1791 def PCMPGTWrm : PDI<0x65, MRMSrcMem,
1792 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1793 "pcmpgtw {$src2, $dst|$dst, $src2}",
1794 [(set VR128:$dst, (int_x86_sse2_pcmpgt_w VR128:$src1,
1795 (bc_v8i16 (loadv2i64 addr:$src2))))]>;
1796 def PCMPGTDrr : PDI<0x66, MRMSrcReg,
1797 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1798 "pcmpgtd {$src2, $dst|$dst, $src2}",
1799 [(set VR128:$dst, (int_x86_sse2_pcmpgt_d VR128:$src1,
1801 def PCMPGTDrm : PDI<0x66, MRMSrcMem,
1802 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1803 "pcmpgtd {$src2, $dst|$dst, $src2}",
1804 [(set VR128:$dst, (int_x86_sse2_pcmpgt_d VR128:$src1,
1805 (bc_v4i32 (loadv2i64 addr:$src2))))]>;
1808 // Pack instructions
1809 let isTwoAddress = 1 in {
1810 def PACKSSWBrr : PDI<0x63, MRMSrcReg, (ops VR128:$dst, VR128:$src1,
1812 "packsswb {$src2, $dst|$dst, $src2}",
1813 [(set VR128:$dst, (v8i16 (int_x86_sse2_packsswb_128
1816 def PACKSSWBrm : PDI<0x63, MRMSrcMem, (ops VR128:$dst, VR128:$src1,
1818 "packsswb {$src2, $dst|$dst, $src2}",
1819 [(set VR128:$dst, (v8i16 (int_x86_sse2_packsswb_128
1821 (bc_v8i16 (loadv2f64 addr:$src2)))))]>;
1822 def PACKSSDWrr : PDI<0x6B, MRMSrcReg, (ops VR128:$dst, VR128:$src1,
1824 "packssdw {$src2, $dst|$dst, $src2}",
1825 [(set VR128:$dst, (v4i32 (int_x86_sse2_packssdw_128
1828 def PACKSSDWrm : PDI<0x6B, MRMSrcMem, (ops VR128:$dst, VR128:$src1,
1830 "packssdw {$src2, $dst|$dst, $src2}",
1831 [(set VR128:$dst, (v4i32 (int_x86_sse2_packssdw_128
1833 (bc_v4i32 (loadv2i64 addr:$src2)))))]>;
1834 def PACKUSWBrr : PDI<0x67, MRMSrcReg, (ops VR128:$dst, VR128:$src1,
1836 "packuswb {$src2, $dst|$dst, $src2}",
1837 [(set VR128:$dst, (v8i16 (int_x86_sse2_packuswb_128
1840 def PACKUSWBrm : PDI<0x67, MRMSrcMem, (ops VR128:$dst, VR128:$src1,
1842 "packuswb {$src2, $dst|$dst, $src2}",
1843 [(set VR128:$dst, (v8i16 (int_x86_sse2_packuswb_128
1845 (bc_v8i16 (loadv2i64 addr:$src2)))))]>;
1848 // Shuffle and unpack instructions
1849 def PSHUFDri : PDIi8<0x70, MRMSrcReg,
1850 (ops VR128:$dst, VR128:$src1, i8imm:$src2),
1851 "pshufd {$src2, $src1, $dst|$dst, $src1, $src2}",
1852 [(set VR128:$dst, (v4i32 (vector_shuffle
1853 VR128:$src1, (undef),
1854 PSHUFD_shuffle_mask:$src2)))]>;
1855 def PSHUFDmi : PDIi8<0x70, MRMSrcMem,
1856 (ops VR128:$dst, i128mem:$src1, i8imm:$src2),
1857 "pshufd {$src2, $src1, $dst|$dst, $src1, $src2}",
1858 [(set VR128:$dst, (v4i32 (vector_shuffle
1859 (bc_v4i32 (loadv2i64 addr:$src1)),
1861 PSHUFD_shuffle_mask:$src2)))]>;
1863 // SSE2 with ImmT == Imm8 and XS prefix.
1864 def PSHUFHWri : Ii8<0x70, MRMSrcReg,
1865 (ops VR128:$dst, VR128:$src1, i8imm:$src2),
1866 "pshufhw {$src2, $src1, $dst|$dst, $src1, $src2}",
1867 [(set VR128:$dst, (v8i16 (vector_shuffle
1868 VR128:$src1, (undef),
1869 PSHUFHW_shuffle_mask:$src2)))]>,
1870 XS, Requires<[HasSSE2]>;
1871 def PSHUFHWmi : Ii8<0x70, MRMSrcMem,
1872 (ops VR128:$dst, i128mem:$src1, i8imm:$src2),
1873 "pshufhw {$src2, $src1, $dst|$dst, $src1, $src2}",
1874 [(set VR128:$dst, (v8i16 (vector_shuffle
1875 (bc_v8i16 (loadv2i64 addr:$src1)),
1877 PSHUFHW_shuffle_mask:$src2)))]>,
1878 XS, Requires<[HasSSE2]>;
1880 // SSE2 with ImmT == Imm8 and XD prefix.
1881 def PSHUFLWri : Ii8<0x70, MRMSrcReg,
1882 (ops VR128:$dst, VR128:$src1, i32i8imm:$src2),
1883 "pshuflw {$src2, $src1, $dst|$dst, $src1, $src2}",
1884 [(set VR128:$dst, (v8i16 (vector_shuffle
1885 VR128:$src1, (undef),
1886 PSHUFLW_shuffle_mask:$src2)))]>,
1887 XD, Requires<[HasSSE2]>;
1888 def PSHUFLWmi : Ii8<0x70, MRMSrcMem,
1889 (ops VR128:$dst, i128mem:$src1, i32i8imm:$src2),
1890 "pshuflw {$src2, $src1, $dst|$dst, $src1, $src2}",
1891 [(set VR128:$dst, (v8i16 (vector_shuffle
1892 (bc_v8i16 (loadv2i64 addr:$src1)),
1894 PSHUFLW_shuffle_mask:$src2)))]>,
1895 XD, Requires<[HasSSE2]>;
1897 let isTwoAddress = 1 in {
1898 def PUNPCKLBWrr : PDI<0x60, MRMSrcReg,
1899 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1900 "punpcklbw {$src2, $dst|$dst, $src2}",
1902 (v16i8 (vector_shuffle VR128:$src1, VR128:$src2,
1903 UNPCKL_shuffle_mask)))]>;
1904 def PUNPCKLBWrm : PDI<0x60, MRMSrcMem,
1905 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1906 "punpcklbw {$src2, $dst|$dst, $src2}",
1908 (v16i8 (vector_shuffle VR128:$src1,
1909 (bc_v16i8 (loadv2i64 addr:$src2)),
1910 UNPCKL_shuffle_mask)))]>;
1911 def PUNPCKLWDrr : PDI<0x61, MRMSrcReg,
1912 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1913 "punpcklwd {$src2, $dst|$dst, $src2}",
1915 (v8i16 (vector_shuffle VR128:$src1, VR128:$src2,
1916 UNPCKL_shuffle_mask)))]>;
1917 def PUNPCKLWDrm : PDI<0x61, MRMSrcMem,
1918 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1919 "punpcklwd {$src2, $dst|$dst, $src2}",
1921 (v8i16 (vector_shuffle VR128:$src1,
1922 (bc_v8i16 (loadv2i64 addr:$src2)),
1923 UNPCKL_shuffle_mask)))]>;
1924 def PUNPCKLDQrr : PDI<0x62, MRMSrcReg,
1925 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1926 "punpckldq {$src2, $dst|$dst, $src2}",
1928 (v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
1929 UNPCKL_shuffle_mask)))]>;
1930 def PUNPCKLDQrm : PDI<0x62, MRMSrcMem,
1931 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1932 "punpckldq {$src2, $dst|$dst, $src2}",
1934 (v4i32 (vector_shuffle VR128:$src1,
1935 (bc_v4i32 (loadv2i64 addr:$src2)),
1936 UNPCKL_shuffle_mask)))]>;
1937 def PUNPCKLQDQrr : PDI<0x6C, MRMSrcReg,
1938 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1939 "punpcklqdq {$src2, $dst|$dst, $src2}",
1941 (v2i64 (vector_shuffle VR128:$src1, VR128:$src2,
1942 UNPCKL_shuffle_mask)))]>;
1943 def PUNPCKLQDQrm : PDI<0x6C, MRMSrcMem,
1944 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1945 "punpcklqdq {$src2, $dst|$dst, $src2}",
1947 (v2i64 (vector_shuffle VR128:$src1,
1948 (loadv2i64 addr:$src2),
1949 UNPCKL_shuffle_mask)))]>;
1951 def PUNPCKHBWrr : PDI<0x68, MRMSrcReg,
1952 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1953 "punpckhbw {$src2, $dst|$dst, $src2}",
1955 (v16i8 (vector_shuffle VR128:$src1, VR128:$src2,
1956 UNPCKH_shuffle_mask)))]>;
1957 def PUNPCKHBWrm : PDI<0x68, MRMSrcMem,
1958 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1959 "punpckhbw {$src2, $dst|$dst, $src2}",
1961 (v16i8 (vector_shuffle VR128:$src1,
1962 (bc_v16i8 (loadv2i64 addr:$src2)),
1963 UNPCKH_shuffle_mask)))]>;
1964 def PUNPCKHWDrr : PDI<0x69, MRMSrcReg,
1965 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1966 "punpckhwd {$src2, $dst|$dst, $src2}",
1968 (v8i16 (vector_shuffle VR128:$src1, VR128:$src2,
1969 UNPCKH_shuffle_mask)))]>;
1970 def PUNPCKHWDrm : PDI<0x69, MRMSrcMem,
1971 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1972 "punpckhwd {$src2, $dst|$dst, $src2}",
1974 (v8i16 (vector_shuffle VR128:$src1,
1975 (bc_v8i16 (loadv2i64 addr:$src2)),
1976 UNPCKH_shuffle_mask)))]>;
1977 def PUNPCKHDQrr : PDI<0x6A, MRMSrcReg,
1978 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1979 "punpckhdq {$src2, $dst|$dst, $src2}",
1981 (v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
1982 UNPCKH_shuffle_mask)))]>;
1983 def PUNPCKHDQrm : PDI<0x6A, MRMSrcMem,
1984 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1985 "punpckhdq {$src2, $dst|$dst, $src2}",
1987 (v4i32 (vector_shuffle VR128:$src1,
1988 (bc_v4i32 (loadv2i64 addr:$src2)),
1989 UNPCKH_shuffle_mask)))]>;
1990 def PUNPCKHQDQrr : PDI<0x6D, MRMSrcReg,
1991 (ops VR128:$dst, VR128:$src1, VR128:$src2),
1992 "punpckhqdq {$src2, $dst|$dst, $src2}",
1994 (v2i64 (vector_shuffle VR128:$src1, VR128:$src2,
1995 UNPCKH_shuffle_mask)))]>;
1996 def PUNPCKHQDQrm : PDI<0x6D, MRMSrcMem,
1997 (ops VR128:$dst, VR128:$src1, i128mem:$src2),
1998 "punpckhqdq {$src2, $dst|$dst, $src2}",
2000 (v2i64 (vector_shuffle VR128:$src1,
2001 (loadv2i64 addr:$src2),
2002 UNPCKH_shuffle_mask)))]>;
2006 def PEXTRWri : PDIi8<0xC5, MRMSrcReg,
2007 (ops GR32:$dst, VR128:$src1, i32i8imm:$src2),
2008 "pextrw {$src2, $src1, $dst|$dst, $src1, $src2}",
2009 [(set GR32:$dst, (X86pextrw (v8i16 VR128:$src1),
2010 (i32 imm:$src2)))]>;
2011 let isTwoAddress = 1 in {
2012 def PINSRWrri : PDIi8<0xC4, MRMSrcReg,
2013 (ops VR128:$dst, VR128:$src1, GR32:$src2, i32i8imm:$src3),
2014 "pinsrw {$src3, $src2, $dst|$dst, $src2, $src3}",
2015 [(set VR128:$dst, (v8i16 (X86pinsrw (v8i16 VR128:$src1),
2016 GR32:$src2, (i32 imm:$src3))))]>;
2017 def PINSRWrmi : PDIi8<0xC4, MRMSrcMem,
2018 (ops VR128:$dst, VR128:$src1, i16mem:$src2, i32i8imm:$src3),
2019 "pinsrw {$src3, $src2, $dst|$dst, $src2, $src3}",
2021 (v8i16 (X86pinsrw (v8i16 VR128:$src1),
2022 (i32 (anyext (loadi16 addr:$src2))),
2023 (i32 imm:$src3))))]>;
2026 //===----------------------------------------------------------------------===//
2027 // Miscellaneous Instructions
2028 //===----------------------------------------------------------------------===//
2031 def MOVMSKPSrr : PSI<0x50, MRMSrcReg, (ops GR32:$dst, VR128:$src),
2032 "movmskps {$src, $dst|$dst, $src}",
2033 [(set GR32:$dst, (int_x86_sse_movmsk_ps VR128:$src))]>;
2034 def MOVMSKPDrr : PSI<0x50, MRMSrcReg, (ops GR32:$dst, VR128:$src),
2035 "movmskpd {$src, $dst|$dst, $src}",
2036 [(set GR32:$dst, (int_x86_sse2_movmsk_pd VR128:$src))]>;
2038 def PMOVMSKBrr : PDI<0xD7, MRMSrcReg, (ops GR32:$dst, VR128:$src),
2039 "pmovmskb {$src, $dst|$dst, $src}",
2040 [(set GR32:$dst, (int_x86_sse2_pmovmskb_128 VR128:$src))]>;
2042 // Conditional store
2043 def MASKMOVDQU : PDI<0xF7, RawFrm, (ops VR128:$src, VR128:$mask),
2044 "maskmovdqu {$mask, $src|$src, $mask}",
2045 [(int_x86_sse2_maskmov_dqu VR128:$src, VR128:$mask, EDI)]>,
2048 // Prefetching loads
2049 def PREFETCHT0 : PSI<0x18, MRM1m, (ops i8mem:$src),
2050 "prefetcht0 $src", []>;
2051 def PREFETCHT1 : PSI<0x18, MRM2m, (ops i8mem:$src),
2052 "prefetcht1 $src", []>;
2053 def PREFETCHT2 : PSI<0x18, MRM3m, (ops i8mem:$src),
2054 "prefetcht2 $src", []>;
2055 def PREFETCHTNTA : PSI<0x18, MRM0m, (ops i8mem:$src),
2056 "prefetchtnta $src", []>;
2058 // Non-temporal stores
2059 def MOVNTPSmr : PSI<0x2B, MRMDestMem, (ops i128mem:$dst, VR128:$src),
2060 "movntps {$src, $dst|$dst, $src}",
2061 [(int_x86_sse_movnt_ps addr:$dst, VR128:$src)]>;
2062 def MOVNTPDmr : PDI<0x2B, MRMDestMem, (ops i128mem:$dst, VR128:$src),
2063 "movntpd {$src, $dst|$dst, $src}",
2064 [(int_x86_sse2_movnt_pd addr:$dst, VR128:$src)]>;
2065 def MOVNTDQmr : PDI<0xE7, MRMDestMem, (ops f128mem:$dst, VR128:$src),
2066 "movntdq {$src, $dst|$dst, $src}",
2067 [(int_x86_sse2_movnt_dq addr:$dst, VR128:$src)]>;
2068 def MOVNTImr : I<0xC3, MRMDestMem, (ops i32mem:$dst, GR32:$src),
2069 "movnti {$src, $dst|$dst, $src}",
2070 [(int_x86_sse2_movnt_i addr:$dst, GR32:$src)]>,
2071 TB, Requires<[HasSSE2]>;
2074 def CLFLUSH : I<0xAE, MRM7m, (ops i8mem:$src),
2075 "clflush $src", [(int_x86_sse2_clflush addr:$src)]>,
2076 TB, Requires<[HasSSE2]>;
2078 // Load, store, and memory fence
2079 def SFENCE : I<0xAE, MRM7m, (ops),
2080 "sfence", [(int_x86_sse_sfence)]>, TB, Requires<[HasSSE1]>;
2081 def LFENCE : I<0xAE, MRM5m, (ops),
2082 "lfence", [(int_x86_sse2_lfence)]>, TB, Requires<[HasSSE2]>;
2083 def MFENCE : I<0xAE, MRM6m, (ops),
2084 "mfence", [(int_x86_sse2_mfence)]>, TB, Requires<[HasSSE2]>;
2087 def LDMXCSR : I<0xAE, MRM5m, (ops i32mem:$src),
2089 [(int_x86_sse_ldmxcsr addr:$src)]>, TB, Requires<[HasSSE1]>;
2090 def STMXCSR : I<0xAE, MRM3m, (ops i32mem:$dst),
2092 [(int_x86_sse_stmxcsr addr:$dst)]>, TB, Requires<[HasSSE1]>;
2094 // Thread synchronization
2095 def MONITOR : I<0xC8, RawFrm, (ops), "monitor",
2096 [(int_x86_sse3_monitor EAX, ECX, EDX)]>,
2097 TB, Requires<[HasSSE3]>;
2098 def MWAIT : I<0xC9, RawFrm, (ops), "mwait",
2099 [(int_x86_sse3_mwait ECX, EAX)]>,
2100 TB, Requires<[HasSSE3]>;
2102 //===----------------------------------------------------------------------===//
2103 // Alias Instructions
2104 //===----------------------------------------------------------------------===//
2106 // Alias instructions that map zero vector to pxor / xorp* for sse.
2107 // FIXME: remove when we can teach regalloc that xor reg, reg is ok.
2108 def V_SET0_PI : PDI<0xEF, MRMInitReg, (ops VR128:$dst),
2110 [(set VR128:$dst, (v2i64 immAllZerosV))]>;
2111 def V_SET0_PS : PSI<0x57, MRMInitReg, (ops VR128:$dst),
2113 [(set VR128:$dst, (v4f32 immAllZerosV))]>;
2114 def V_SET0_PD : PDI<0x57, MRMInitReg, (ops VR128:$dst),
2116 [(set VR128:$dst, (v2f64 immAllZerosV))]>;
2118 def V_SETALLONES : PDI<0x76, MRMInitReg, (ops VR128:$dst),
2119 "pcmpeqd $dst, $dst",
2120 [(set VR128:$dst, (v2f64 immAllOnesV))]>;
2122 // FR32 / FR64 to 128-bit vector conversion.
2123 def MOVSS2PSrr : SSI<0x10, MRMSrcReg, (ops VR128:$dst, FR32:$src),
2124 "movss {$src, $dst|$dst, $src}",
2126 (v4f32 (scalar_to_vector FR32:$src)))]>;
2127 def MOVSS2PSrm : SSI<0x10, MRMSrcMem, (ops VR128:$dst, f32mem:$src),
2128 "movss {$src, $dst|$dst, $src}",
2130 (v4f32 (scalar_to_vector (loadf32 addr:$src))))]>;
2131 def MOVSD2PDrr : SDI<0x10, MRMSrcReg, (ops VR128:$dst, FR64:$src),
2132 "movsd {$src, $dst|$dst, $src}",
2134 (v2f64 (scalar_to_vector FR64:$src)))]>;
2135 def MOVSD2PDrm : SDI<0x10, MRMSrcMem, (ops VR128:$dst, f64mem:$src),
2136 "movsd {$src, $dst|$dst, $src}",
2138 (v2f64 (scalar_to_vector (loadf64 addr:$src))))]>;
2140 def MOVDI2PDIrr : PDI<0x6E, MRMSrcReg, (ops VR128:$dst, GR32:$src),
2141 "movd {$src, $dst|$dst, $src}",
2143 (v4i32 (scalar_to_vector GR32:$src)))]>;
2144 def MOVDI2PDIrm : PDI<0x6E, MRMSrcMem, (ops VR128:$dst, i32mem:$src),
2145 "movd {$src, $dst|$dst, $src}",
2147 (v4i32 (scalar_to_vector (loadi32 addr:$src))))]>;
2148 // SSE2 instructions with XS prefix
2149 def MOVQI2PQIrr : I<0x7E, MRMSrcReg, (ops VR128:$dst, VR64:$src),
2150 "movq {$src, $dst|$dst, $src}",
2152 (v2i64 (scalar_to_vector VR64:$src)))]>, XS,
2153 Requires<[HasSSE2]>;
2154 def MOVQI2PQIrm : I<0x7E, MRMSrcMem, (ops VR128:$dst, i64mem:$src),
2155 "movq {$src, $dst|$dst, $src}",
2157 (v2i64 (scalar_to_vector (loadi64 addr:$src))))]>, XS,
2158 Requires<[HasSSE2]>;
2159 // FIXME: may not be able to eliminate this movss with coalescing the src and
2160 // dest register classes are different. We really want to write this pattern
2162 // def : Pat<(f32 (vector_extract (v4f32 VR128:$src), (i32 0))),
2163 // (f32 FR32:$src)>;
2164 def MOVPS2SSrr : SSI<0x10, MRMSrcReg, (ops FR32:$dst, VR128:$src),
2165 "movss {$src, $dst|$dst, $src}",
2166 [(set FR32:$dst, (vector_extract (v4f32 VR128:$src),
2168 def MOVPS2SSmr : SSI<0x11, MRMDestMem, (ops f32mem:$dst, VR128:$src),
2169 "movss {$src, $dst|$dst, $src}",
2170 [(store (f32 (vector_extract (v4f32 VR128:$src),
2171 (i32 0))), addr:$dst)]>;
2172 def MOVPD2SDrr : SDI<0x10, MRMSrcReg, (ops FR64:$dst, VR128:$src),
2173 "movsd {$src, $dst|$dst, $src}",
2174 [(set FR64:$dst, (vector_extract (v2f64 VR128:$src),
2176 def MOVPD2SDmr : SDI<0x11, MRMDestMem, (ops f64mem:$dst, VR128:$src),
2177 "movsd {$src, $dst|$dst, $src}",
2178 [(store (f64 (vector_extract (v2f64 VR128:$src),
2179 (i32 0))), addr:$dst)]>;
2180 def MOVPDI2DIrr : PDI<0x7E, MRMDestReg, (ops GR32:$dst, VR128:$src),
2181 "movd {$src, $dst|$dst, $src}",
2182 [(set GR32:$dst, (vector_extract (v4i32 VR128:$src),
2184 def MOVPDI2DImr : PDI<0x7E, MRMDestMem, (ops i32mem:$dst, VR128:$src),
2185 "movd {$src, $dst|$dst, $src}",
2186 [(store (i32 (vector_extract (v4i32 VR128:$src),
2187 (i32 0))), addr:$dst)]>;
2189 // Move to lower bits of a VR128, leaving upper bits alone.
2190 // Three operand (but two address) aliases.
2191 let isTwoAddress = 1 in {
2192 def MOVLSS2PSrr : SSI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src1, FR32:$src2),
2193 "movss {$src2, $dst|$dst, $src2}", []>;
2194 def MOVLSD2PDrr : SDI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src1, FR64:$src2),
2195 "movsd {$src2, $dst|$dst, $src2}", []>;
2197 let AddedComplexity = 20 in {
2198 def MOVLPSrr : SSI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
2199 "movss {$src2, $dst|$dst, $src2}",
2201 (v4f32 (vector_shuffle VR128:$src1, VR128:$src2,
2202 MOVL_shuffle_mask)))]>;
2203 def MOVLPDrr : SDI<0x10, MRMSrcReg, (ops VR128:$dst, VR128:$src1, VR128:$src2),
2204 "movsd {$src2, $dst|$dst, $src2}",
2206 (v2f64 (vector_shuffle VR128:$src1, VR128:$src2,
2207 MOVL_shuffle_mask)))]>;
2211 // Store / copy lower 64-bits of a XMM register.
2212 def MOVLQ128mr : PDI<0xD6, MRMDestMem, (ops i64mem:$dst, VR128:$src),
2213 "movq {$src, $dst|$dst, $src}",
2214 [(int_x86_sse2_storel_dq addr:$dst, VR128:$src)]>;
2216 // Move to lower bits of a VR128 and zeroing upper bits.
2217 // Loading from memory automatically zeroing upper bits.
2218 let AddedComplexity = 20 in {
2219 def MOVZSS2PSrm : SSI<0x10, MRMSrcMem, (ops VR128:$dst, f32mem:$src),
2220 "movss {$src, $dst|$dst, $src}",
2221 [(set VR128:$dst, (v4f32 (vector_shuffle immAllZerosV,
2222 (v4f32 (scalar_to_vector (loadf32 addr:$src))),
2223 MOVL_shuffle_mask)))]>;
2224 def MOVZSD2PDrm : SDI<0x10, MRMSrcMem, (ops VR128:$dst, f64mem:$src),
2225 "movsd {$src, $dst|$dst, $src}",
2226 [(set VR128:$dst, (v2f64 (vector_shuffle immAllZerosV,
2227 (v2f64 (scalar_to_vector (loadf64 addr:$src))),
2228 MOVL_shuffle_mask)))]>;
2229 // movd / movq to XMM register zero-extends
2230 def MOVZDI2PDIrr : PDI<0x6E, MRMSrcReg, (ops VR128:$dst, GR32:$src),
2231 "movd {$src, $dst|$dst, $src}",
2232 [(set VR128:$dst, (v4i32 (vector_shuffle immAllZerosV,
2233 (v4i32 (scalar_to_vector GR32:$src)),
2234 MOVL_shuffle_mask)))]>;
2235 def MOVZDI2PDIrm : PDI<0x6E, MRMSrcMem, (ops VR128:$dst, i32mem:$src),
2236 "movd {$src, $dst|$dst, $src}",
2237 [(set VR128:$dst, (v4i32 (vector_shuffle immAllZerosV,
2238 (v4i32 (scalar_to_vector (loadi32 addr:$src))),
2239 MOVL_shuffle_mask)))]>;
2240 // Moving from XMM to XMM but still clear upper 64 bits.
2241 def MOVZQI2PQIrr : I<0x7E, MRMSrcReg, (ops VR128:$dst, VR128:$src),
2242 "movq {$src, $dst|$dst, $src}",
2243 [(set VR128:$dst, (int_x86_sse2_movl_dq VR128:$src))]>,
2244 XS, Requires<[HasSSE2]>;
2245 def MOVZQI2PQIrm : I<0x7E, MRMSrcMem, (ops VR128:$dst, i64mem:$src),
2246 "movq {$src, $dst|$dst, $src}",
2247 [(set VR128:$dst, (int_x86_sse2_movl_dq
2248 (bc_v4i32 (loadv2i64 addr:$src))))]>,
2249 XS, Requires<[HasSSE2]>;
2252 //===----------------------------------------------------------------------===//
2253 // Non-Instruction Patterns
2254 //===----------------------------------------------------------------------===//
2256 // 128-bit vector undef's.
2257 def : Pat<(v2f64 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2258 def : Pat<(v16i8 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2259 def : Pat<(v8i16 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2260 def : Pat<(v4i32 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2261 def : Pat<(v2i64 (undef)), (IMPLICIT_DEF_VR128)>, Requires<[HasSSE2]>;
2263 // 128-bit vector all zero's.
2264 def : Pat<(v16i8 immAllZerosV), (v16i8 (V_SET0_PI))>, Requires<[HasSSE2]>;
2265 def : Pat<(v8i16 immAllZerosV), (v8i16 (V_SET0_PI))>, Requires<[HasSSE2]>;
2266 def : Pat<(v4i32 immAllZerosV), (v4i32 (V_SET0_PI))>, Requires<[HasSSE2]>;
2268 // 128-bit vector all one's.
2269 def : Pat<(v16i8 immAllOnesV), (v16i8 (V_SETALLONES))>, Requires<[HasSSE2]>;
2270 def : Pat<(v8i16 immAllOnesV), (v8i16 (V_SETALLONES))>, Requires<[HasSSE2]>;
2271 def : Pat<(v4i32 immAllOnesV), (v4i32 (V_SETALLONES))>, Requires<[HasSSE2]>;
2272 def : Pat<(v2i64 immAllOnesV), (v2i64 (V_SETALLONES))>, Requires<[HasSSE2]>;
2273 def : Pat<(v4f32 immAllOnesV), (v4f32 (V_SETALLONES))>, Requires<[HasSSE1]>;
2275 // Store 128-bit integer vector values.
2276 def : Pat<(store (v16i8 VR128:$src), addr:$dst),
2277 (MOVDQAmr addr:$dst, VR128:$src)>, Requires<[HasSSE2]>;
2278 def : Pat<(store (v8i16 VR128:$src), addr:$dst),
2279 (MOVDQAmr addr:$dst, VR128:$src)>, Requires<[HasSSE2]>;
2280 def : Pat<(store (v4i32 VR128:$src), addr:$dst),
2281 (MOVDQAmr addr:$dst, VR128:$src)>, Requires<[HasSSE2]>;
2283 // Scalar to v8i16 / v16i8. The source may be a GR32, but only the lower 8 or
2285 def : Pat<(v8i16 (X86s2vec GR32:$src)), (v8i16 (MOVDI2PDIrr GR32:$src))>,
2286 Requires<[HasSSE2]>;
2287 def : Pat<(v16i8 (X86s2vec GR32:$src)), (v16i8 (MOVDI2PDIrr GR32:$src))>,
2288 Requires<[HasSSE2]>;
2291 def : Pat<(v2i64 (bitconvert (v4i32 VR128:$src))), (v2i64 VR128:$src)>,
2292 Requires<[HasSSE2]>;
2293 def : Pat<(v2i64 (bitconvert (v8i16 VR128:$src))), (v2i64 VR128:$src)>,
2294 Requires<[HasSSE2]>;
2295 def : Pat<(v2i64 (bitconvert (v16i8 VR128:$src))), (v2i64 VR128:$src)>,
2296 Requires<[HasSSE2]>;
2297 def : Pat<(v2i64 (bitconvert (v2f64 VR128:$src))), (v2i64 VR128:$src)>,
2298 Requires<[HasSSE2]>;
2299 def : Pat<(v2i64 (bitconvert (v4f32 VR128:$src))), (v2i64 VR128:$src)>,
2300 Requires<[HasSSE2]>;
2301 def : Pat<(v4i32 (bitconvert (v2i64 VR128:$src))), (v4i32 VR128:$src)>,
2302 Requires<[HasSSE2]>;
2303 def : Pat<(v4i32 (bitconvert (v8i16 VR128:$src))), (v4i32 VR128:$src)>,
2304 Requires<[HasSSE2]>;
2305 def : Pat<(v4i32 (bitconvert (v16i8 VR128:$src))), (v4i32 VR128:$src)>,
2306 Requires<[HasSSE2]>;
2307 def : Pat<(v4i32 (bitconvert (v2f64 VR128:$src))), (v4i32 VR128:$src)>,
2308 Requires<[HasSSE2]>;
2309 def : Pat<(v4i32 (bitconvert (v4f32 VR128:$src))), (v4i32 VR128:$src)>,
2310 Requires<[HasSSE2]>;
2311 def : Pat<(v8i16 (bitconvert (v2i64 VR128:$src))), (v4i32 VR128:$src)>,
2312 Requires<[HasSSE2]>;
2313 def : Pat<(v8i16 (bitconvert (v4i32 VR128:$src))), (v4i32 VR128:$src)>,
2314 Requires<[HasSSE2]>;
2315 def : Pat<(v8i16 (bitconvert (v16i8 VR128:$src))), (v4i32 VR128:$src)>,
2316 Requires<[HasSSE2]>;
2317 def : Pat<(v8i16 (bitconvert (v2f64 VR128:$src))), (v8i16 VR128:$src)>,
2318 Requires<[HasSSE2]>;
2319 def : Pat<(v8i16 (bitconvert (v4f32 VR128:$src))), (v8i16 VR128:$src)>,
2320 Requires<[HasSSE2]>;
2321 def : Pat<(v16i8 (bitconvert (v2i64 VR128:$src))), (v4i32 VR128:$src)>,
2322 Requires<[HasSSE2]>;
2323 def : Pat<(v16i8 (bitconvert (v4i32 VR128:$src))), (v4i32 VR128:$src)>,
2324 Requires<[HasSSE2]>;
2325 def : Pat<(v16i8 (bitconvert (v8i16 VR128:$src))), (v4i32 VR128:$src)>,
2326 Requires<[HasSSE2]>;
2327 def : Pat<(v16i8 (bitconvert (v2f64 VR128:$src))), (v16i8 VR128:$src)>,
2328 Requires<[HasSSE2]>;
2329 def : Pat<(v16i8 (bitconvert (v4f32 VR128:$src))), (v16i8 VR128:$src)>,
2330 Requires<[HasSSE2]>;
2331 def : Pat<(v4f32 (bitconvert (v2i64 VR128:$src))), (v4f32 VR128:$src)>,
2332 Requires<[HasSSE2]>;
2333 def : Pat<(v4f32 (bitconvert (v4i32 VR128:$src))), (v4f32 VR128:$src)>,
2334 Requires<[HasSSE2]>;
2335 def : Pat<(v4f32 (bitconvert (v8i16 VR128:$src))), (v4f32 VR128:$src)>,
2336 Requires<[HasSSE2]>;
2337 def : Pat<(v4f32 (bitconvert (v16i8 VR128:$src))), (v4f32 VR128:$src)>,
2338 Requires<[HasSSE2]>;
2339 def : Pat<(v4f32 (bitconvert (v2f64 VR128:$src))), (v4f32 VR128:$src)>,
2340 Requires<[HasSSE2]>;
2341 def : Pat<(v2f64 (bitconvert (v2i64 VR128:$src))), (v2f64 VR128:$src)>,
2342 Requires<[HasSSE2]>;
2343 def : Pat<(v2f64 (bitconvert (v4i32 VR128:$src))), (v2f64 VR128:$src)>,
2344 Requires<[HasSSE2]>;
2345 def : Pat<(v2f64 (bitconvert (v8i16 VR128:$src))), (v2f64 VR128:$src)>,
2346 Requires<[HasSSE2]>;
2347 def : Pat<(v2f64 (bitconvert (v16i8 VR128:$src))), (v2f64 VR128:$src)>,
2348 Requires<[HasSSE2]>;
2349 def : Pat<(v2f64 (bitconvert (v4f32 VR128:$src))), (v2f64 VR128:$src)>,
2350 Requires<[HasSSE2]>;
2352 // Move scalar to XMM zero-extended
2353 // movd to XMM register zero-extends
2354 let AddedComplexity = 20 in {
2355 def : Pat<(v8i16 (vector_shuffle immAllZerosV,
2356 (v8i16 (X86s2vec GR32:$src)), MOVL_shuffle_mask)),
2357 (v8i16 (MOVZDI2PDIrr GR32:$src))>, Requires<[HasSSE2]>;
2358 def : Pat<(v16i8 (vector_shuffle immAllZerosV,
2359 (v16i8 (X86s2vec GR32:$src)), MOVL_shuffle_mask)),
2360 (v16i8 (MOVZDI2PDIrr GR32:$src))>, Requires<[HasSSE2]>;
2361 // Zeroing a VR128 then do a MOVS{S|D} to the lower bits.
2362 def : Pat<(v2f64 (vector_shuffle immAllZerosV,
2363 (v2f64 (scalar_to_vector FR64:$src)), MOVL_shuffle_mask)),
2364 (v2f64 (MOVLSD2PDrr (V_SET0_PD), FR64:$src))>, Requires<[HasSSE2]>;
2365 def : Pat<(v4f32 (vector_shuffle immAllZerosV,
2366 (v4f32 (scalar_to_vector FR32:$src)), MOVL_shuffle_mask)),
2367 (v4f32 (MOVLSS2PSrr (V_SET0_PS), FR32:$src))>, Requires<[HasSSE2]>;
2370 // Splat v2f64 / v2i64
2371 let AddedComplexity = 10 in {
2372 def : Pat<(vector_shuffle (v2f64 VR128:$src), (undef), SSE_splat_v2_mask:$sm),
2373 (v2f64 (UNPCKLPDrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2374 def : Pat<(vector_shuffle (v2i64 VR128:$src), (undef), SSE_splat_v2_mask:$sm),
2375 (v2i64 (PUNPCKLQDQrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2379 def : Pat<(vector_shuffle (v4f32 VR128:$src), (undef), SSE_splat_mask:$sm),
2380 (v4f32 (SHUFPSrri VR128:$src, VR128:$src, SSE_splat_mask:$sm))>,
2381 Requires<[HasSSE1]>;
2383 // Special unary SHUFPSrri case.
2384 // FIXME: when we want non two-address code, then we should use PSHUFD?
2385 def : Pat<(vector_shuffle (v4f32 VR128:$src1), (undef),
2386 SHUFP_unary_shuffle_mask:$sm),
2387 (v4f32 (SHUFPSrri VR128:$src1, VR128:$src1, SHUFP_unary_shuffle_mask:$sm))>,
2388 Requires<[HasSSE1]>;
2389 // Unary v4f32 shuffle with PSHUF* in order to fold a load.
2390 def : Pat<(vector_shuffle (loadv4f32 addr:$src1), (undef),
2391 SHUFP_unary_shuffle_mask:$sm),
2392 (v4f32 (PSHUFDmi addr:$src1, SHUFP_unary_shuffle_mask:$sm))>,
2393 Requires<[HasSSE2]>;
2394 // Special binary v4i32 shuffle cases with SHUFPS.
2395 def : Pat<(vector_shuffle (v4i32 VR128:$src1), (v4i32 VR128:$src2),
2396 PSHUFD_binary_shuffle_mask:$sm),
2397 (v4i32 (SHUFPSrri VR128:$src1, VR128:$src2,
2398 PSHUFD_binary_shuffle_mask:$sm))>, Requires<[HasSSE2]>;
2399 def : Pat<(vector_shuffle (v4i32 VR128:$src1),
2400 (bc_v4i32 (loadv2i64 addr:$src2)), PSHUFD_binary_shuffle_mask:$sm),
2401 (v4i32 (SHUFPSrmi VR128:$src1, addr:$src2,
2402 PSHUFD_binary_shuffle_mask:$sm))>, Requires<[HasSSE2]>;
2404 // vector_shuffle v1, <undef>, <0, 0, 1, 1, ...>
2405 let AddedComplexity = 10 in {
2406 def : Pat<(v4f32 (vector_shuffle VR128:$src, (undef),
2407 UNPCKL_v_undef_shuffle_mask)),
2408 (v4f32 (UNPCKLPSrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2409 def : Pat<(v16i8 (vector_shuffle VR128:$src, (undef),
2410 UNPCKL_v_undef_shuffle_mask)),
2411 (v16i8 (PUNPCKLBWrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2412 def : Pat<(v8i16 (vector_shuffle VR128:$src, (undef),
2413 UNPCKL_v_undef_shuffle_mask)),
2414 (v8i16 (PUNPCKLWDrr VR128:$src, VR128:$src))>, Requires<[HasSSE2]>;
2415 def : Pat<(v4i32 (vector_shuffle VR128:$src, (undef),
2416 UNPCKL_v_undef_shuffle_mask)),
2417 (v4i32 (PUNPCKLDQrr VR128:$src, VR128:$src))>, Requires<[HasSSE1]>;
2420 let AddedComplexity = 20 in {
2421 // vector_shuffle v1, <undef> <1, 1, 3, 3>
2422 def : Pat<(v4i32 (vector_shuffle VR128:$src, (undef),
2423 MOVSHDUP_shuffle_mask)),
2424 (v4i32 (MOVSHDUPrr VR128:$src))>, Requires<[HasSSE3]>;
2425 def : Pat<(v4i32 (vector_shuffle (bc_v4i32 (loadv2i64 addr:$src)), (undef),
2426 MOVSHDUP_shuffle_mask)),
2427 (v4i32 (MOVSHDUPrm addr:$src))>, Requires<[HasSSE3]>;
2429 // vector_shuffle v1, <undef> <0, 0, 2, 2>
2430 def : Pat<(v4i32 (vector_shuffle VR128:$src, (undef),
2431 MOVSLDUP_shuffle_mask)),
2432 (v4i32 (MOVSLDUPrr VR128:$src))>, Requires<[HasSSE3]>;
2433 def : Pat<(v4i32 (vector_shuffle (bc_v4i32 (loadv2i64 addr:$src)), (undef),
2434 MOVSLDUP_shuffle_mask)),
2435 (v4i32 (MOVSLDUPrm addr:$src))>, Requires<[HasSSE3]>;
2438 let AddedComplexity = 20 in {
2439 // vector_shuffle v1, v2 <0, 1, 4, 5> using MOVLHPS
2440 def : Pat<(v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
2441 MOVHP_shuffle_mask)),
2442 (v4i32 (MOVLHPSrr VR128:$src1, VR128:$src2))>;
2444 // vector_shuffle v1, v2 <6, 7, 2, 3> using MOVHLPS
2445 def : Pat<(v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
2446 MOVHLPS_shuffle_mask)),
2447 (v4i32 (MOVHLPSrr VR128:$src1, VR128:$src2))>;
2449 // vector_shuffle v1, (load v2) <4, 5, 2, 3> using MOVLPS
2450 // vector_shuffle v1, (load v2) <0, 1, 4, 5> using MOVHPS
2451 def : Pat<(v4f32 (vector_shuffle VR128:$src1, (loadv4f32 addr:$src2),
2452 MOVLP_shuffle_mask)),
2453 (v4f32 (MOVLPSrm VR128:$src1, addr:$src2))>, Requires<[HasSSE1]>;
2454 def : Pat<(v2f64 (vector_shuffle VR128:$src1, (loadv2f64 addr:$src2),
2455 MOVLP_shuffle_mask)),
2456 (v2f64 (MOVLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2457 def : Pat<(v4f32 (vector_shuffle VR128:$src1, (loadv4f32 addr:$src2),
2458 MOVHP_shuffle_mask)),
2459 (v4f32 (MOVHPSrm VR128:$src1, addr:$src2))>, Requires<[HasSSE1]>;
2460 def : Pat<(v2f64 (vector_shuffle VR128:$src1, (loadv2f64 addr:$src2),
2461 MOVHP_shuffle_mask)),
2462 (v2f64 (MOVHPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2464 def : Pat<(v4i32 (vector_shuffle VR128:$src1, (bc_v4i32 (loadv2i64 addr:$src2)),
2465 MOVLP_shuffle_mask)),
2466 (v4i32 (MOVLPSrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2467 def : Pat<(v2i64 (vector_shuffle VR128:$src1, (loadv2i64 addr:$src2),
2468 MOVLP_shuffle_mask)),
2469 (v2i64 (MOVLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2470 def : Pat<(v4i32 (vector_shuffle VR128:$src1, (bc_v4i32 (loadv2i64 addr:$src2)),
2471 MOVHP_shuffle_mask)),
2472 (v4i32 (MOVHPSrm VR128:$src1, addr:$src2))>, Requires<[HasSSE1]>;
2473 def : Pat<(v2i64 (vector_shuffle VR128:$src1, (loadv2i64 addr:$src2),
2474 MOVLP_shuffle_mask)),
2475 (v2i64 (MOVLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2477 // Setting the lowest element in the vector.
2478 def : Pat<(v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
2479 MOVL_shuffle_mask)),
2480 (v4i32 (MOVLPSrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2481 def : Pat<(v2i64 (vector_shuffle VR128:$src1, VR128:$src2,
2482 MOVL_shuffle_mask)),
2483 (v2i64 (MOVLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2485 // vector_shuffle v1, v2 <4, 5, 2, 3> using MOVLPDrr (movsd)
2486 def : Pat<(v4f32 (vector_shuffle VR128:$src1, VR128:$src2,
2487 MOVLP_shuffle_mask)),
2488 (v4f32 (MOVLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2489 def : Pat<(v4i32 (vector_shuffle VR128:$src1, VR128:$src2,
2490 MOVLP_shuffle_mask)),
2491 (v4i32 (MOVLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2493 // Set lowest element and zero upper elements.
2494 def : Pat<(bc_v2i64 (vector_shuffle immAllZerosV,
2495 (v2f64 (scalar_to_vector (loadf64 addr:$src))),
2496 MOVL_shuffle_mask)),
2497 (v2i64 (MOVZQI2PQIrm addr:$src))>, Requires<[HasSSE2]>;
2500 // FIXME: Temporary workaround since 2-wide shuffle is broken.
2501 def : Pat<(int_x86_sse2_movs_d VR128:$src1, VR128:$src2),
2502 (v2f64 (MOVLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2503 def : Pat<(int_x86_sse2_loadh_pd VR128:$src1, addr:$src2),
2504 (v2f64 (MOVHPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2505 def : Pat<(int_x86_sse2_loadl_pd VR128:$src1, addr:$src2),
2506 (v2f64 (MOVLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2507 def : Pat<(int_x86_sse2_shuf_pd VR128:$src1, VR128:$src2, imm:$src3),
2508 (v2f64 (SHUFPDrri VR128:$src1, VR128:$src2, imm:$src3))>,
2509 Requires<[HasSSE2]>;
2510 def : Pat<(int_x86_sse2_shuf_pd VR128:$src1, (load addr:$src2), imm:$src3),
2511 (v2f64 (SHUFPDrmi VR128:$src1, addr:$src2, imm:$src3))>,
2512 Requires<[HasSSE2]>;
2513 def : Pat<(int_x86_sse2_unpckh_pd VR128:$src1, VR128:$src2),
2514 (v2f64 (UNPCKHPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2515 def : Pat<(int_x86_sse2_unpckh_pd VR128:$src1, (load addr:$src2)),
2516 (v2f64 (UNPCKHPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2517 def : Pat<(int_x86_sse2_unpckl_pd VR128:$src1, VR128:$src2),
2518 (v2f64 (UNPCKLPDrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2519 def : Pat<(int_x86_sse2_unpckl_pd VR128:$src1, (load addr:$src2)),
2520 (v2f64 (UNPCKLPDrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2521 def : Pat<(int_x86_sse2_punpckh_qdq VR128:$src1, VR128:$src2),
2522 (v2i64 (PUNPCKHQDQrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2523 def : Pat<(int_x86_sse2_punpckh_qdq VR128:$src1, (load addr:$src2)),
2524 (v2i64 (PUNPCKHQDQrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2525 def : Pat<(int_x86_sse2_punpckl_qdq VR128:$src1, VR128:$src2),
2526 (v2i64 (PUNPCKLQDQrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2527 def : Pat<(int_x86_sse2_punpckl_qdq VR128:$src1, (load addr:$src2)),
2528 (PUNPCKLQDQrm VR128:$src1, addr:$src2)>, Requires<[HasSSE2]>;
2530 // 128-bit logical shifts
2531 def : Pat<(int_x86_sse2_psll_dq VR128:$src1, imm:$src2),
2532 (v2i64 (PSLLDQri VR128:$src1, (PSxLDQ_imm imm:$src2)))>,
2533 Requires<[HasSSE2]>;
2534 def : Pat<(int_x86_sse2_psrl_dq VR128:$src1, imm:$src2),
2535 (v2i64 (PSRLDQri VR128:$src1, (PSxLDQ_imm imm:$src2)))>,
2536 Requires<[HasSSE2]>;
2538 // Some special case pandn patterns.
2539 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v4i32 immAllOnesV))),
2541 (v2i64 (PANDNrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2542 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v8i16 immAllOnesV))),
2544 (v2i64 (PANDNrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2545 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v16i8 immAllOnesV))),
2547 (v2i64 (PANDNrr VR128:$src1, VR128:$src2))>, Requires<[HasSSE2]>;
2549 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v4i32 immAllOnesV))),
2550 (load addr:$src2))),
2551 (v2i64 (PANDNrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2552 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v8i16 immAllOnesV))),
2553 (load addr:$src2))),
2554 (v2i64 (PANDNrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;
2555 def : Pat<(v2i64 (and (xor VR128:$src1, (bc_v2i64 (v16i8 immAllOnesV))),
2556 (load addr:$src2))),
2557 (v2i64 (PANDNrm VR128:$src1, addr:$src2))>, Requires<[HasSSE2]>;