1 //===- PPCInstrVSX.td - The PowerPC VSX Extension --*- tablegen -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file describes the VSX extension to the PowerPC instruction set.
12 //===----------------------------------------------------------------------===//
14 // *********************************** NOTE ***********************************
15 // ** For POWER8 Little Endian, the VSX swap optimization relies on knowing **
16 // ** which VMX and VSX instructions are lane-sensitive and which are not. **
17 // ** A lane-sensitive instruction relies, implicitly or explicitly, on **
18 // ** whether lanes are numbered from left to right. An instruction like **
19 // ** VADDFP is not lane-sensitive, because each lane of the result vector **
20 // ** relies only on the corresponding lane of the source vectors. However, **
21 // ** an instruction like VMULESB is lane-sensitive, because "even" and **
22 // ** "odd" lanes are different for big-endian and little-endian numbering. **
24 // ** When adding new VMX and VSX instructions, please consider whether they **
25 // ** are lane-sensitive. If so, they must be added to a switch statement **
26 // ** in PPCVSXSwapRemoval::gatherVectorInstructions(). **
27 // ****************************************************************************
29 def PPCRegVSRCAsmOperand : AsmOperandClass {
30 let Name = "RegVSRC"; let PredicateMethod = "isVSRegNumber";
32 def vsrc : RegisterOperand<VSRC> {
33 let ParserMatchClass = PPCRegVSRCAsmOperand;
36 def PPCRegVSFRCAsmOperand : AsmOperandClass {
37 let Name = "RegVSFRC"; let PredicateMethod = "isVSRegNumber";
39 def vsfrc : RegisterOperand<VSFRC> {
40 let ParserMatchClass = PPCRegVSFRCAsmOperand;
43 def PPCRegVSSRCAsmOperand : AsmOperandClass {
44 let Name = "RegVSSRC"; let PredicateMethod = "isVSRegNumber";
46 def vssrc : RegisterOperand<VSSRC> {
47 let ParserMatchClass = PPCRegVSSRCAsmOperand;
50 // Little-endian-specific nodes.
51 def SDT_PPClxvd2x : SDTypeProfile<1, 1, [
52 SDTCisVT<0, v2f64>, SDTCisPtrTy<1>
54 def SDT_PPCstxvd2x : SDTypeProfile<0, 2, [
55 SDTCisVT<0, v2f64>, SDTCisPtrTy<1>
57 def SDT_PPCxxswapd : SDTypeProfile<1, 1, [
61 def PPClxvd2x : SDNode<"PPCISD::LXVD2X", SDT_PPClxvd2x,
62 [SDNPHasChain, SDNPMayLoad]>;
63 def PPCstxvd2x : SDNode<"PPCISD::STXVD2X", SDT_PPCstxvd2x,
64 [SDNPHasChain, SDNPMayStore]>;
65 def PPCxxswapd : SDNode<"PPCISD::XXSWAPD", SDT_PPCxxswapd, [SDNPHasChain]>;
66 def PPCmfvsr : SDNode<"PPCISD::MFVSR", SDTUnaryOp, []>;
67 def PPCmtvsra : SDNode<"PPCISD::MTVSRA", SDTUnaryOp, []>;
68 def PPCmtvsrz : SDNode<"PPCISD::MTVSRZ", SDTUnaryOp, []>;
70 multiclass XX3Form_Rcr<bits<6> opcode, bits<7> xo, string asmbase,
71 string asmstr, InstrItinClass itin, Intrinsic Int,
72 ValueType OutTy, ValueType InTy> {
73 let BaseName = asmbase in {
74 def NAME : XX3Form_Rc<opcode, xo, (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
75 !strconcat(asmbase, !strconcat(" ", asmstr)), itin,
76 [(set OutTy:$XT, (Int InTy:$XA, InTy:$XB))]>;
78 def o : XX3Form_Rc<opcode, xo, (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
79 !strconcat(asmbase, !strconcat(". ", asmstr)), itin,
81 (InTy (PPCvcmp_o InTy:$XA, InTy:$XB, xo)))]>,
86 def HasVSX : Predicate<"PPCSubTarget->hasVSX()">;
87 def IsLittleEndian : Predicate<"PPCSubTarget->isLittleEndian()">;
88 def IsBigEndian : Predicate<"!PPCSubTarget->isLittleEndian()">;
90 let Predicates = [HasVSX] in {
91 let AddedComplexity = 400 in { // Prefer VSX patterns over non-VSX patterns.
92 let hasSideEffects = 0 in { // VSX instructions don't have side effects.
95 // Load indexed instructions
97 def LXSDX : XX1Form<31, 588,
98 (outs vsfrc:$XT), (ins memrr:$src),
99 "lxsdx $XT, $src", IIC_LdStLFD,
100 [(set f64:$XT, (load xoaddr:$src))]>;
102 def LXVD2X : XX1Form<31, 844,
103 (outs vsrc:$XT), (ins memrr:$src),
104 "lxvd2x $XT, $src", IIC_LdStLFD,
105 [(set v2f64:$XT, (int_ppc_vsx_lxvd2x xoaddr:$src))]>;
107 def LXVDSX : XX1Form<31, 332,
108 (outs vsrc:$XT), (ins memrr:$src),
109 "lxvdsx $XT, $src", IIC_LdStLFD, []>;
111 def LXVW4X : XX1Form<31, 780,
112 (outs vsrc:$XT), (ins memrr:$src),
113 "lxvw4x $XT, $src", IIC_LdStLFD,
114 [(set v4i32:$XT, (int_ppc_vsx_lxvw4x xoaddr:$src))]>;
117 // Store indexed instructions
118 let mayStore = 1 in {
119 def STXSDX : XX1Form<31, 716,
120 (outs), (ins vsfrc:$XT, memrr:$dst),
121 "stxsdx $XT, $dst", IIC_LdStSTFD,
122 [(store f64:$XT, xoaddr:$dst)]>;
124 def STXVD2X : XX1Form<31, 972,
125 (outs), (ins vsrc:$XT, memrr:$dst),
126 "stxvd2x $XT, $dst", IIC_LdStSTFD,
127 [(store v2f64:$XT, xoaddr:$dst)]>;
129 def STXVW4X : XX1Form<31, 908,
130 (outs), (ins vsrc:$XT, memrr:$dst),
131 "stxvw4x $XT, $dst", IIC_LdStSTFD,
132 [(store v4i32:$XT, xoaddr:$dst)]>;
136 // Add/Mul Instructions
137 let isCommutable = 1 in {
138 def XSADDDP : XX3Form<60, 32,
139 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
140 "xsadddp $XT, $XA, $XB", IIC_VecFP,
141 [(set f64:$XT, (fadd f64:$XA, f64:$XB))]>;
142 def XSMULDP : XX3Form<60, 48,
143 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
144 "xsmuldp $XT, $XA, $XB", IIC_VecFP,
145 [(set f64:$XT, (fmul f64:$XA, f64:$XB))]>;
147 def XVADDDP : XX3Form<60, 96,
148 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
149 "xvadddp $XT, $XA, $XB", IIC_VecFP,
150 [(set v2f64:$XT, (fadd v2f64:$XA, v2f64:$XB))]>;
152 def XVADDSP : XX3Form<60, 64,
153 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
154 "xvaddsp $XT, $XA, $XB", IIC_VecFP,
155 [(set v4f32:$XT, (fadd v4f32:$XA, v4f32:$XB))]>;
157 def XVMULDP : XX3Form<60, 112,
158 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
159 "xvmuldp $XT, $XA, $XB", IIC_VecFP,
160 [(set v2f64:$XT, (fmul v2f64:$XA, v2f64:$XB))]>;
162 def XVMULSP : XX3Form<60, 80,
163 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
164 "xvmulsp $XT, $XA, $XB", IIC_VecFP,
165 [(set v4f32:$XT, (fmul v4f32:$XA, v4f32:$XB))]>;
168 // Subtract Instructions
169 def XSSUBDP : XX3Form<60, 40,
170 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
171 "xssubdp $XT, $XA, $XB", IIC_VecFP,
172 [(set f64:$XT, (fsub f64:$XA, f64:$XB))]>;
174 def XVSUBDP : XX3Form<60, 104,
175 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
176 "xvsubdp $XT, $XA, $XB", IIC_VecFP,
177 [(set v2f64:$XT, (fsub v2f64:$XA, v2f64:$XB))]>;
178 def XVSUBSP : XX3Form<60, 72,
179 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
180 "xvsubsp $XT, $XA, $XB", IIC_VecFP,
181 [(set v4f32:$XT, (fsub v4f32:$XA, v4f32:$XB))]>;
184 let BaseName = "XSMADDADP" in {
185 let isCommutable = 1 in
186 def XSMADDADP : XX3Form<60, 33,
187 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
188 "xsmaddadp $XT, $XA, $XB", IIC_VecFP,
189 [(set f64:$XT, (fma f64:$XA, f64:$XB, f64:$XTi))]>,
190 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
192 let IsVSXFMAAlt = 1 in
193 def XSMADDMDP : XX3Form<60, 41,
194 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
195 "xsmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
196 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
200 let BaseName = "XSMSUBADP" in {
201 let isCommutable = 1 in
202 def XSMSUBADP : XX3Form<60, 49,
203 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
204 "xsmsubadp $XT, $XA, $XB", IIC_VecFP,
205 [(set f64:$XT, (fma f64:$XA, f64:$XB, (fneg f64:$XTi)))]>,
206 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
208 let IsVSXFMAAlt = 1 in
209 def XSMSUBMDP : XX3Form<60, 57,
210 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
211 "xsmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
212 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
216 let BaseName = "XSNMADDADP" in {
217 let isCommutable = 1 in
218 def XSNMADDADP : XX3Form<60, 161,
219 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
220 "xsnmaddadp $XT, $XA, $XB", IIC_VecFP,
221 [(set f64:$XT, (fneg (fma f64:$XA, f64:$XB, f64:$XTi)))]>,
222 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
224 let IsVSXFMAAlt = 1 in
225 def XSNMADDMDP : XX3Form<60, 169,
226 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
227 "xsnmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
228 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
232 let BaseName = "XSNMSUBADP" in {
233 let isCommutable = 1 in
234 def XSNMSUBADP : XX3Form<60, 177,
235 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
236 "xsnmsubadp $XT, $XA, $XB", IIC_VecFP,
237 [(set f64:$XT, (fneg (fma f64:$XA, f64:$XB, (fneg f64:$XTi))))]>,
238 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
240 let IsVSXFMAAlt = 1 in
241 def XSNMSUBMDP : XX3Form<60, 185,
242 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
243 "xsnmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
244 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
248 let BaseName = "XVMADDADP" in {
249 let isCommutable = 1 in
250 def XVMADDADP : XX3Form<60, 97,
251 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
252 "xvmaddadp $XT, $XA, $XB", IIC_VecFP,
253 [(set v2f64:$XT, (fma v2f64:$XA, v2f64:$XB, v2f64:$XTi))]>,
254 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
256 let IsVSXFMAAlt = 1 in
257 def XVMADDMDP : XX3Form<60, 105,
258 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
259 "xvmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
260 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
264 let BaseName = "XVMADDASP" in {
265 let isCommutable = 1 in
266 def XVMADDASP : XX3Form<60, 65,
267 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
268 "xvmaddasp $XT, $XA, $XB", IIC_VecFP,
269 [(set v4f32:$XT, (fma v4f32:$XA, v4f32:$XB, v4f32:$XTi))]>,
270 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
272 let IsVSXFMAAlt = 1 in
273 def XVMADDMSP : XX3Form<60, 73,
274 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
275 "xvmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
276 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
280 let BaseName = "XVMSUBADP" in {
281 let isCommutable = 1 in
282 def XVMSUBADP : XX3Form<60, 113,
283 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
284 "xvmsubadp $XT, $XA, $XB", IIC_VecFP,
285 [(set v2f64:$XT, (fma v2f64:$XA, v2f64:$XB, (fneg v2f64:$XTi)))]>,
286 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
288 let IsVSXFMAAlt = 1 in
289 def XVMSUBMDP : XX3Form<60, 121,
290 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
291 "xvmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
292 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
296 let BaseName = "XVMSUBASP" in {
297 let isCommutable = 1 in
298 def XVMSUBASP : XX3Form<60, 81,
299 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
300 "xvmsubasp $XT, $XA, $XB", IIC_VecFP,
301 [(set v4f32:$XT, (fma v4f32:$XA, v4f32:$XB, (fneg v4f32:$XTi)))]>,
302 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
304 let IsVSXFMAAlt = 1 in
305 def XVMSUBMSP : XX3Form<60, 89,
306 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
307 "xvmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
308 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
312 let BaseName = "XVNMADDADP" in {
313 let isCommutable = 1 in
314 def XVNMADDADP : XX3Form<60, 225,
315 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
316 "xvnmaddadp $XT, $XA, $XB", IIC_VecFP,
317 [(set v2f64:$XT, (fneg (fma v2f64:$XA, v2f64:$XB, v2f64:$XTi)))]>,
318 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
320 let IsVSXFMAAlt = 1 in
321 def XVNMADDMDP : XX3Form<60, 233,
322 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
323 "xvnmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
324 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
328 let BaseName = "XVNMADDASP" in {
329 let isCommutable = 1 in
330 def XVNMADDASP : XX3Form<60, 193,
331 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
332 "xvnmaddasp $XT, $XA, $XB", IIC_VecFP,
333 [(set v4f32:$XT, (fneg (fma v4f32:$XA, v4f32:$XB, v4f32:$XTi)))]>,
334 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
336 let IsVSXFMAAlt = 1 in
337 def XVNMADDMSP : XX3Form<60, 201,
338 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
339 "xvnmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
340 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
344 let BaseName = "XVNMSUBADP" in {
345 let isCommutable = 1 in
346 def XVNMSUBADP : XX3Form<60, 241,
347 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
348 "xvnmsubadp $XT, $XA, $XB", IIC_VecFP,
349 [(set v2f64:$XT, (fneg (fma v2f64:$XA, v2f64:$XB, (fneg v2f64:$XTi))))]>,
350 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
352 let IsVSXFMAAlt = 1 in
353 def XVNMSUBMDP : XX3Form<60, 249,
354 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
355 "xvnmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
356 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
360 let BaseName = "XVNMSUBASP" in {
361 let isCommutable = 1 in
362 def XVNMSUBASP : XX3Form<60, 209,
363 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
364 "xvnmsubasp $XT, $XA, $XB", IIC_VecFP,
365 [(set v4f32:$XT, (fneg (fma v4f32:$XA, v4f32:$XB, (fneg v4f32:$XTi))))]>,
366 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
368 let IsVSXFMAAlt = 1 in
369 def XVNMSUBMSP : XX3Form<60, 217,
370 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
371 "xvnmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
372 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
376 // Division Instructions
377 def XSDIVDP : XX3Form<60, 56,
378 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
379 "xsdivdp $XT, $XA, $XB", IIC_FPDivD,
380 [(set f64:$XT, (fdiv f64:$XA, f64:$XB))]>;
381 def XSSQRTDP : XX2Form<60, 75,
382 (outs vsfrc:$XT), (ins vsfrc:$XB),
383 "xssqrtdp $XT, $XB", IIC_FPSqrtD,
384 [(set f64:$XT, (fsqrt f64:$XB))]>;
386 def XSREDP : XX2Form<60, 90,
387 (outs vsfrc:$XT), (ins vsfrc:$XB),
388 "xsredp $XT, $XB", IIC_VecFP,
389 [(set f64:$XT, (PPCfre f64:$XB))]>;
390 def XSRSQRTEDP : XX2Form<60, 74,
391 (outs vsfrc:$XT), (ins vsfrc:$XB),
392 "xsrsqrtedp $XT, $XB", IIC_VecFP,
393 [(set f64:$XT, (PPCfrsqrte f64:$XB))]>;
395 def XSTDIVDP : XX3Form_1<60, 61,
396 (outs crrc:$crD), (ins vsfrc:$XA, vsfrc:$XB),
397 "xstdivdp $crD, $XA, $XB", IIC_FPCompare, []>;
398 def XSTSQRTDP : XX2Form_1<60, 106,
399 (outs crrc:$crD), (ins vsfrc:$XB),
400 "xstsqrtdp $crD, $XB", IIC_FPCompare, []>;
402 def XVDIVDP : XX3Form<60, 120,
403 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
404 "xvdivdp $XT, $XA, $XB", IIC_FPDivD,
405 [(set v2f64:$XT, (fdiv v2f64:$XA, v2f64:$XB))]>;
406 def XVDIVSP : XX3Form<60, 88,
407 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
408 "xvdivsp $XT, $XA, $XB", IIC_FPDivS,
409 [(set v4f32:$XT, (fdiv v4f32:$XA, v4f32:$XB))]>;
411 def XVSQRTDP : XX2Form<60, 203,
412 (outs vsrc:$XT), (ins vsrc:$XB),
413 "xvsqrtdp $XT, $XB", IIC_FPSqrtD,
414 [(set v2f64:$XT, (fsqrt v2f64:$XB))]>;
415 def XVSQRTSP : XX2Form<60, 139,
416 (outs vsrc:$XT), (ins vsrc:$XB),
417 "xvsqrtsp $XT, $XB", IIC_FPSqrtS,
418 [(set v4f32:$XT, (fsqrt v4f32:$XB))]>;
420 def XVTDIVDP : XX3Form_1<60, 125,
421 (outs crrc:$crD), (ins vsrc:$XA, vsrc:$XB),
422 "xvtdivdp $crD, $XA, $XB", IIC_FPCompare, []>;
423 def XVTDIVSP : XX3Form_1<60, 93,
424 (outs crrc:$crD), (ins vsrc:$XA, vsrc:$XB),
425 "xvtdivsp $crD, $XA, $XB", IIC_FPCompare, []>;
427 def XVTSQRTDP : XX2Form_1<60, 234,
428 (outs crrc:$crD), (ins vsrc:$XB),
429 "xvtsqrtdp $crD, $XB", IIC_FPCompare, []>;
430 def XVTSQRTSP : XX2Form_1<60, 170,
431 (outs crrc:$crD), (ins vsrc:$XB),
432 "xvtsqrtsp $crD, $XB", IIC_FPCompare, []>;
434 def XVREDP : XX2Form<60, 218,
435 (outs vsrc:$XT), (ins vsrc:$XB),
436 "xvredp $XT, $XB", IIC_VecFP,
437 [(set v2f64:$XT, (PPCfre v2f64:$XB))]>;
438 def XVRESP : XX2Form<60, 154,
439 (outs vsrc:$XT), (ins vsrc:$XB),
440 "xvresp $XT, $XB", IIC_VecFP,
441 [(set v4f32:$XT, (PPCfre v4f32:$XB))]>;
443 def XVRSQRTEDP : XX2Form<60, 202,
444 (outs vsrc:$XT), (ins vsrc:$XB),
445 "xvrsqrtedp $XT, $XB", IIC_VecFP,
446 [(set v2f64:$XT, (PPCfrsqrte v2f64:$XB))]>;
447 def XVRSQRTESP : XX2Form<60, 138,
448 (outs vsrc:$XT), (ins vsrc:$XB),
449 "xvrsqrtesp $XT, $XB", IIC_VecFP,
450 [(set v4f32:$XT, (PPCfrsqrte v4f32:$XB))]>;
452 // Compare Instructions
453 def XSCMPODP : XX3Form_1<60, 43,
454 (outs crrc:$crD), (ins vsfrc:$XA, vsfrc:$XB),
455 "xscmpodp $crD, $XA, $XB", IIC_FPCompare, []>;
456 def XSCMPUDP : XX3Form_1<60, 35,
457 (outs crrc:$crD), (ins vsfrc:$XA, vsfrc:$XB),
458 "xscmpudp $crD, $XA, $XB", IIC_FPCompare, []>;
460 defm XVCMPEQDP : XX3Form_Rcr<60, 99,
461 "xvcmpeqdp", "$XT, $XA, $XB", IIC_VecFPCompare,
462 int_ppc_vsx_xvcmpeqdp, v2i64, v2f64>;
463 defm XVCMPEQSP : XX3Form_Rcr<60, 67,
464 "xvcmpeqsp", "$XT, $XA, $XB", IIC_VecFPCompare,
465 int_ppc_vsx_xvcmpeqsp, v4i32, v4f32>;
466 defm XVCMPGEDP : XX3Form_Rcr<60, 115,
467 "xvcmpgedp", "$XT, $XA, $XB", IIC_VecFPCompare,
468 int_ppc_vsx_xvcmpgedp, v2i64, v2f64>;
469 defm XVCMPGESP : XX3Form_Rcr<60, 83,
470 "xvcmpgesp", "$XT, $XA, $XB", IIC_VecFPCompare,
471 int_ppc_vsx_xvcmpgesp, v4i32, v4f32>;
472 defm XVCMPGTDP : XX3Form_Rcr<60, 107,
473 "xvcmpgtdp", "$XT, $XA, $XB", IIC_VecFPCompare,
474 int_ppc_vsx_xvcmpgtdp, v2i64, v2f64>;
475 defm XVCMPGTSP : XX3Form_Rcr<60, 75,
476 "xvcmpgtsp", "$XT, $XA, $XB", IIC_VecFPCompare,
477 int_ppc_vsx_xvcmpgtsp, v4i32, v4f32>;
480 def XSABSDP : XX2Form<60, 345,
481 (outs vsfrc:$XT), (ins vsfrc:$XB),
482 "xsabsdp $XT, $XB", IIC_VecFP,
483 [(set f64:$XT, (fabs f64:$XB))]>;
484 def XSNABSDP : XX2Form<60, 361,
485 (outs vsfrc:$XT), (ins vsfrc:$XB),
486 "xsnabsdp $XT, $XB", IIC_VecFP,
487 [(set f64:$XT, (fneg (fabs f64:$XB)))]>;
488 def XSNEGDP : XX2Form<60, 377,
489 (outs vsfrc:$XT), (ins vsfrc:$XB),
490 "xsnegdp $XT, $XB", IIC_VecFP,
491 [(set f64:$XT, (fneg f64:$XB))]>;
492 def XSCPSGNDP : XX3Form<60, 176,
493 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
494 "xscpsgndp $XT, $XA, $XB", IIC_VecFP,
495 [(set f64:$XT, (fcopysign f64:$XB, f64:$XA))]>;
497 def XVABSDP : XX2Form<60, 473,
498 (outs vsrc:$XT), (ins vsrc:$XB),
499 "xvabsdp $XT, $XB", IIC_VecFP,
500 [(set v2f64:$XT, (fabs v2f64:$XB))]>;
502 def XVABSSP : XX2Form<60, 409,
503 (outs vsrc:$XT), (ins vsrc:$XB),
504 "xvabssp $XT, $XB", IIC_VecFP,
505 [(set v4f32:$XT, (fabs v4f32:$XB))]>;
507 def XVCPSGNDP : XX3Form<60, 240,
508 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
509 "xvcpsgndp $XT, $XA, $XB", IIC_VecFP,
510 [(set v2f64:$XT, (fcopysign v2f64:$XB, v2f64:$XA))]>;
511 def XVCPSGNSP : XX3Form<60, 208,
512 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
513 "xvcpsgnsp $XT, $XA, $XB", IIC_VecFP,
514 [(set v4f32:$XT, (fcopysign v4f32:$XB, v4f32:$XA))]>;
516 def XVNABSDP : XX2Form<60, 489,
517 (outs vsrc:$XT), (ins vsrc:$XB),
518 "xvnabsdp $XT, $XB", IIC_VecFP,
519 [(set v2f64:$XT, (fneg (fabs v2f64:$XB)))]>;
520 def XVNABSSP : XX2Form<60, 425,
521 (outs vsrc:$XT), (ins vsrc:$XB),
522 "xvnabssp $XT, $XB", IIC_VecFP,
523 [(set v4f32:$XT, (fneg (fabs v4f32:$XB)))]>;
525 def XVNEGDP : XX2Form<60, 505,
526 (outs vsrc:$XT), (ins vsrc:$XB),
527 "xvnegdp $XT, $XB", IIC_VecFP,
528 [(set v2f64:$XT, (fneg v2f64:$XB))]>;
529 def XVNEGSP : XX2Form<60, 441,
530 (outs vsrc:$XT), (ins vsrc:$XB),
531 "xvnegsp $XT, $XB", IIC_VecFP,
532 [(set v4f32:$XT, (fneg v4f32:$XB))]>;
534 // Conversion Instructions
535 def XSCVDPSP : XX2Form<60, 265,
536 (outs vsfrc:$XT), (ins vsfrc:$XB),
537 "xscvdpsp $XT, $XB", IIC_VecFP, []>;
538 def XSCVDPSXDS : XX2Form<60, 344,
539 (outs vsfrc:$XT), (ins vsfrc:$XB),
540 "xscvdpsxds $XT, $XB", IIC_VecFP,
541 [(set f64:$XT, (PPCfctidz f64:$XB))]>;
542 def XSCVDPSXWS : XX2Form<60, 88,
543 (outs vsfrc:$XT), (ins vsfrc:$XB),
544 "xscvdpsxws $XT, $XB", IIC_VecFP,
545 [(set f64:$XT, (PPCfctiwz f64:$XB))]>;
546 def XSCVDPUXDS : XX2Form<60, 328,
547 (outs vsfrc:$XT), (ins vsfrc:$XB),
548 "xscvdpuxds $XT, $XB", IIC_VecFP,
549 [(set f64:$XT, (PPCfctiduz f64:$XB))]>;
550 def XSCVDPUXWS : XX2Form<60, 72,
551 (outs vsfrc:$XT), (ins vsfrc:$XB),
552 "xscvdpuxws $XT, $XB", IIC_VecFP,
553 [(set f64:$XT, (PPCfctiwuz f64:$XB))]>;
554 def XSCVSPDP : XX2Form<60, 329,
555 (outs vsfrc:$XT), (ins vsfrc:$XB),
556 "xscvspdp $XT, $XB", IIC_VecFP, []>;
557 def XSCVSXDDP : XX2Form<60, 376,
558 (outs vsfrc:$XT), (ins vsfrc:$XB),
559 "xscvsxddp $XT, $XB", IIC_VecFP,
560 [(set f64:$XT, (PPCfcfid f64:$XB))]>;
561 def XSCVUXDDP : XX2Form<60, 360,
562 (outs vsfrc:$XT), (ins vsfrc:$XB),
563 "xscvuxddp $XT, $XB", IIC_VecFP,
564 [(set f64:$XT, (PPCfcfidu f64:$XB))]>;
566 def XVCVDPSP : XX2Form<60, 393,
567 (outs vsrc:$XT), (ins vsrc:$XB),
568 "xvcvdpsp $XT, $XB", IIC_VecFP, []>;
569 def XVCVDPSXDS : XX2Form<60, 472,
570 (outs vsrc:$XT), (ins vsrc:$XB),
571 "xvcvdpsxds $XT, $XB", IIC_VecFP,
572 [(set v2i64:$XT, (fp_to_sint v2f64:$XB))]>;
573 def XVCVDPSXWS : XX2Form<60, 216,
574 (outs vsrc:$XT), (ins vsrc:$XB),
575 "xvcvdpsxws $XT, $XB", IIC_VecFP, []>;
576 def XVCVDPUXDS : XX2Form<60, 456,
577 (outs vsrc:$XT), (ins vsrc:$XB),
578 "xvcvdpuxds $XT, $XB", IIC_VecFP,
579 [(set v2i64:$XT, (fp_to_uint v2f64:$XB))]>;
580 def XVCVDPUXWS : XX2Form<60, 200,
581 (outs vsrc:$XT), (ins vsrc:$XB),
582 "xvcvdpuxws $XT, $XB", IIC_VecFP, []>;
584 def XVCVSPDP : XX2Form<60, 457,
585 (outs vsrc:$XT), (ins vsrc:$XB),
586 "xvcvspdp $XT, $XB", IIC_VecFP, []>;
587 def XVCVSPSXDS : XX2Form<60, 408,
588 (outs vsrc:$XT), (ins vsrc:$XB),
589 "xvcvspsxds $XT, $XB", IIC_VecFP, []>;
590 def XVCVSPSXWS : XX2Form<60, 152,
591 (outs vsrc:$XT), (ins vsrc:$XB),
592 "xvcvspsxws $XT, $XB", IIC_VecFP, []>;
593 def XVCVSPUXDS : XX2Form<60, 392,
594 (outs vsrc:$XT), (ins vsrc:$XB),
595 "xvcvspuxds $XT, $XB", IIC_VecFP, []>;
596 def XVCVSPUXWS : XX2Form<60, 136,
597 (outs vsrc:$XT), (ins vsrc:$XB),
598 "xvcvspuxws $XT, $XB", IIC_VecFP, []>;
599 def XVCVSXDDP : XX2Form<60, 504,
600 (outs vsrc:$XT), (ins vsrc:$XB),
601 "xvcvsxddp $XT, $XB", IIC_VecFP,
602 [(set v2f64:$XT, (sint_to_fp v2i64:$XB))]>;
603 def XVCVSXDSP : XX2Form<60, 440,
604 (outs vsrc:$XT), (ins vsrc:$XB),
605 "xvcvsxdsp $XT, $XB", IIC_VecFP, []>;
606 def XVCVSXWDP : XX2Form<60, 248,
607 (outs vsrc:$XT), (ins vsrc:$XB),
608 "xvcvsxwdp $XT, $XB", IIC_VecFP, []>;
609 def XVCVSXWSP : XX2Form<60, 184,
610 (outs vsrc:$XT), (ins vsrc:$XB),
611 "xvcvsxwsp $XT, $XB", IIC_VecFP, []>;
612 def XVCVUXDDP : XX2Form<60, 488,
613 (outs vsrc:$XT), (ins vsrc:$XB),
614 "xvcvuxddp $XT, $XB", IIC_VecFP,
615 [(set v2f64:$XT, (uint_to_fp v2i64:$XB))]>;
616 def XVCVUXDSP : XX2Form<60, 424,
617 (outs vsrc:$XT), (ins vsrc:$XB),
618 "xvcvuxdsp $XT, $XB", IIC_VecFP, []>;
619 def XVCVUXWDP : XX2Form<60, 232,
620 (outs vsrc:$XT), (ins vsrc:$XB),
621 "xvcvuxwdp $XT, $XB", IIC_VecFP, []>;
622 def XVCVUXWSP : XX2Form<60, 168,
623 (outs vsrc:$XT), (ins vsrc:$XB),
624 "xvcvuxwsp $XT, $XB", IIC_VecFP, []>;
626 // Rounding Instructions
627 def XSRDPI : XX2Form<60, 73,
628 (outs vsfrc:$XT), (ins vsfrc:$XB),
629 "xsrdpi $XT, $XB", IIC_VecFP,
630 [(set f64:$XT, (frnd f64:$XB))]>;
631 def XSRDPIC : XX2Form<60, 107,
632 (outs vsfrc:$XT), (ins vsfrc:$XB),
633 "xsrdpic $XT, $XB", IIC_VecFP,
634 [(set f64:$XT, (fnearbyint f64:$XB))]>;
635 def XSRDPIM : XX2Form<60, 121,
636 (outs vsfrc:$XT), (ins vsfrc:$XB),
637 "xsrdpim $XT, $XB", IIC_VecFP,
638 [(set f64:$XT, (ffloor f64:$XB))]>;
639 def XSRDPIP : XX2Form<60, 105,
640 (outs vsfrc:$XT), (ins vsfrc:$XB),
641 "xsrdpip $XT, $XB", IIC_VecFP,
642 [(set f64:$XT, (fceil f64:$XB))]>;
643 def XSRDPIZ : XX2Form<60, 89,
644 (outs vsfrc:$XT), (ins vsfrc:$XB),
645 "xsrdpiz $XT, $XB", IIC_VecFP,
646 [(set f64:$XT, (ftrunc f64:$XB))]>;
648 def XVRDPI : XX2Form<60, 201,
649 (outs vsrc:$XT), (ins vsrc:$XB),
650 "xvrdpi $XT, $XB", IIC_VecFP,
651 [(set v2f64:$XT, (frnd v2f64:$XB))]>;
652 def XVRDPIC : XX2Form<60, 235,
653 (outs vsrc:$XT), (ins vsrc:$XB),
654 "xvrdpic $XT, $XB", IIC_VecFP,
655 [(set v2f64:$XT, (fnearbyint v2f64:$XB))]>;
656 def XVRDPIM : XX2Form<60, 249,
657 (outs vsrc:$XT), (ins vsrc:$XB),
658 "xvrdpim $XT, $XB", IIC_VecFP,
659 [(set v2f64:$XT, (ffloor v2f64:$XB))]>;
660 def XVRDPIP : XX2Form<60, 233,
661 (outs vsrc:$XT), (ins vsrc:$XB),
662 "xvrdpip $XT, $XB", IIC_VecFP,
663 [(set v2f64:$XT, (fceil v2f64:$XB))]>;
664 def XVRDPIZ : XX2Form<60, 217,
665 (outs vsrc:$XT), (ins vsrc:$XB),
666 "xvrdpiz $XT, $XB", IIC_VecFP,
667 [(set v2f64:$XT, (ftrunc v2f64:$XB))]>;
669 def XVRSPI : XX2Form<60, 137,
670 (outs vsrc:$XT), (ins vsrc:$XB),
671 "xvrspi $XT, $XB", IIC_VecFP,
672 [(set v4f32:$XT, (frnd v4f32:$XB))]>;
673 def XVRSPIC : XX2Form<60, 171,
674 (outs vsrc:$XT), (ins vsrc:$XB),
675 "xvrspic $XT, $XB", IIC_VecFP,
676 [(set v4f32:$XT, (fnearbyint v4f32:$XB))]>;
677 def XVRSPIM : XX2Form<60, 185,
678 (outs vsrc:$XT), (ins vsrc:$XB),
679 "xvrspim $XT, $XB", IIC_VecFP,
680 [(set v4f32:$XT, (ffloor v4f32:$XB))]>;
681 def XVRSPIP : XX2Form<60, 169,
682 (outs vsrc:$XT), (ins vsrc:$XB),
683 "xvrspip $XT, $XB", IIC_VecFP,
684 [(set v4f32:$XT, (fceil v4f32:$XB))]>;
685 def XVRSPIZ : XX2Form<60, 153,
686 (outs vsrc:$XT), (ins vsrc:$XB),
687 "xvrspiz $XT, $XB", IIC_VecFP,
688 [(set v4f32:$XT, (ftrunc v4f32:$XB))]>;
690 // Max/Min Instructions
691 let isCommutable = 1 in {
692 def XSMAXDP : XX3Form<60, 160,
693 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
694 "xsmaxdp $XT, $XA, $XB", IIC_VecFP,
696 (int_ppc_vsx_xsmaxdp vsfrc:$XA, vsfrc:$XB))]>;
697 def XSMINDP : XX3Form<60, 168,
698 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
699 "xsmindp $XT, $XA, $XB", IIC_VecFP,
701 (int_ppc_vsx_xsmindp vsfrc:$XA, vsfrc:$XB))]>;
703 def XVMAXDP : XX3Form<60, 224,
704 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
705 "xvmaxdp $XT, $XA, $XB", IIC_VecFP,
707 (int_ppc_vsx_xvmaxdp vsrc:$XA, vsrc:$XB))]>;
708 def XVMINDP : XX3Form<60, 232,
709 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
710 "xvmindp $XT, $XA, $XB", IIC_VecFP,
712 (int_ppc_vsx_xvmindp vsrc:$XA, vsrc:$XB))]>;
714 def XVMAXSP : XX3Form<60, 192,
715 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
716 "xvmaxsp $XT, $XA, $XB", IIC_VecFP,
718 (int_ppc_vsx_xvmaxsp vsrc:$XA, vsrc:$XB))]>;
719 def XVMINSP : XX3Form<60, 200,
720 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
721 "xvminsp $XT, $XA, $XB", IIC_VecFP,
723 (int_ppc_vsx_xvminsp vsrc:$XA, vsrc:$XB))]>;
727 // Logical Instructions
728 let isCommutable = 1 in
729 def XXLAND : XX3Form<60, 130,
730 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
731 "xxland $XT, $XA, $XB", IIC_VecGeneral,
732 [(set v4i32:$XT, (and v4i32:$XA, v4i32:$XB))]>;
733 def XXLANDC : XX3Form<60, 138,
734 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
735 "xxlandc $XT, $XA, $XB", IIC_VecGeneral,
736 [(set v4i32:$XT, (and v4i32:$XA,
737 (vnot_ppc v4i32:$XB)))]>;
738 let isCommutable = 1 in {
739 def XXLNOR : XX3Form<60, 162,
740 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
741 "xxlnor $XT, $XA, $XB", IIC_VecGeneral,
742 [(set v4i32:$XT, (vnot_ppc (or v4i32:$XA,
744 def XXLOR : XX3Form<60, 146,
745 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
746 "xxlor $XT, $XA, $XB", IIC_VecGeneral,
747 [(set v4i32:$XT, (or v4i32:$XA, v4i32:$XB))]>;
748 let isCodeGenOnly = 1 in
749 def XXLORf: XX3Form<60, 146,
750 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
751 "xxlor $XT, $XA, $XB", IIC_VecGeneral, []>;
752 def XXLXOR : XX3Form<60, 154,
753 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
754 "xxlxor $XT, $XA, $XB", IIC_VecGeneral,
755 [(set v4i32:$XT, (xor v4i32:$XA, v4i32:$XB))]>;
758 // Permutation Instructions
759 def XXMRGHW : XX3Form<60, 18,
760 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
761 "xxmrghw $XT, $XA, $XB", IIC_VecPerm, []>;
762 def XXMRGLW : XX3Form<60, 50,
763 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
764 "xxmrglw $XT, $XA, $XB", IIC_VecPerm, []>;
766 def XXPERMDI : XX3Form_2<60, 10,
767 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, u2imm:$DM),
768 "xxpermdi $XT, $XA, $XB, $DM", IIC_VecPerm, []>;
769 def XXSEL : XX4Form<60, 3,
770 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, vsrc:$XC),
771 "xxsel $XT, $XA, $XB, $XC", IIC_VecPerm, []>;
773 def XXSLDWI : XX3Form_2<60, 2,
774 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, u2imm:$SHW),
775 "xxsldwi $XT, $XA, $XB, $SHW", IIC_VecPerm, []>;
776 def XXSPLTW : XX2Form_2<60, 164,
777 (outs vsrc:$XT), (ins vsrc:$XB, u2imm:$UIM),
778 "xxspltw $XT, $XB, $UIM", IIC_VecPerm, []>;
781 // SELECT_CC_* - Used to implement the SELECT_CC DAG operation. Expanded after
782 // instruction selection into a branch sequence.
783 let usesCustomInserter = 1, // Expanded after instruction selection.
784 PPC970_Single = 1 in {
786 def SELECT_CC_VSRC: Pseudo<(outs vsrc:$dst),
787 (ins crrc:$cond, vsrc:$T, vsrc:$F, i32imm:$BROPC),
790 def SELECT_VSRC: Pseudo<(outs vsrc:$dst),
791 (ins crbitrc:$cond, vsrc:$T, vsrc:$F),
794 (select i1:$cond, v2f64:$T, v2f64:$F))]>;
795 def SELECT_CC_VSFRC: Pseudo<(outs f8rc:$dst),
796 (ins crrc:$cond, f8rc:$T, f8rc:$F,
797 i32imm:$BROPC), "#SELECT_CC_VSFRC",
799 def SELECT_VSFRC: Pseudo<(outs f8rc:$dst),
800 (ins crbitrc:$cond, f8rc:$T, f8rc:$F),
803 (select i1:$cond, f64:$T, f64:$F))]>;
804 def SELECT_CC_VSSRC: Pseudo<(outs f4rc:$dst),
805 (ins crrc:$cond, f4rc:$T, f4rc:$F,
806 i32imm:$BROPC), "#SELECT_CC_VSSRC",
808 def SELECT_VSSRC: Pseudo<(outs f4rc:$dst),
809 (ins crbitrc:$cond, f4rc:$T, f4rc:$F),
812 (select i1:$cond, f32:$T, f32:$F))]>;
813 } // usesCustomInserter
816 def : InstAlias<"xvmovdp $XT, $XB",
817 (XVCPSGNDP vsrc:$XT, vsrc:$XB, vsrc:$XB)>;
818 def : InstAlias<"xvmovsp $XT, $XB",
819 (XVCPSGNSP vsrc:$XT, vsrc:$XB, vsrc:$XB)>;
821 def : InstAlias<"xxspltd $XT, $XB, 0",
822 (XXPERMDI vsrc:$XT, vsrc:$XB, vsrc:$XB, 0)>;
823 def : InstAlias<"xxspltd $XT, $XB, 1",
824 (XXPERMDI vsrc:$XT, vsrc:$XB, vsrc:$XB, 3)>;
825 def : InstAlias<"xxmrghd $XT, $XA, $XB",
826 (XXPERMDI vsrc:$XT, vsrc:$XA, vsrc:$XB, 0)>;
827 def : InstAlias<"xxmrgld $XT, $XA, $XB",
828 (XXPERMDI vsrc:$XT, vsrc:$XA, vsrc:$XB, 3)>;
829 def : InstAlias<"xxswapd $XT, $XB",
830 (XXPERMDI vsrc:$XT, vsrc:$XB, vsrc:$XB, 2)>;
832 let AddedComplexity = 400 in { // Prefer VSX patterns over non-VSX patterns.
834 let Predicates = [IsBigEndian] in {
835 def : Pat<(v2f64 (scalar_to_vector f64:$A)),
836 (v2f64 (SUBREG_TO_REG (i64 1), $A, sub_64))>;
838 def : Pat<(f64 (vector_extract v2f64:$S, 0)),
839 (f64 (EXTRACT_SUBREG $S, sub_64))>;
840 def : Pat<(f64 (vector_extract v2f64:$S, 1)),
841 (f64 (EXTRACT_SUBREG (XXPERMDI $S, $S, 2), sub_64))>;
844 let Predicates = [IsLittleEndian] in {
845 def : Pat<(v2f64 (scalar_to_vector f64:$A)),
846 (v2f64 (XXPERMDI (SUBREG_TO_REG (i64 1), $A, sub_64),
847 (SUBREG_TO_REG (i64 1), $A, sub_64), 0))>;
849 def : Pat<(f64 (vector_extract v2f64:$S, 0)),
850 (f64 (EXTRACT_SUBREG (XXPERMDI $S, $S, 2), sub_64))>;
851 def : Pat<(f64 (vector_extract v2f64:$S, 1)),
852 (f64 (EXTRACT_SUBREG $S, sub_64))>;
855 // Additional fnmsub patterns: -a*c + b == -(a*c - b)
856 def : Pat<(fma (fneg f64:$A), f64:$C, f64:$B),
857 (XSNMSUBADP $B, $C, $A)>;
858 def : Pat<(fma f64:$A, (fneg f64:$C), f64:$B),
859 (XSNMSUBADP $B, $C, $A)>;
861 def : Pat<(fma (fneg v2f64:$A), v2f64:$C, v2f64:$B),
862 (XVNMSUBADP $B, $C, $A)>;
863 def : Pat<(fma v2f64:$A, (fneg v2f64:$C), v2f64:$B),
864 (XVNMSUBADP $B, $C, $A)>;
866 def : Pat<(fma (fneg v4f32:$A), v4f32:$C, v4f32:$B),
867 (XVNMSUBASP $B, $C, $A)>;
868 def : Pat<(fma v4f32:$A, (fneg v4f32:$C), v4f32:$B),
869 (XVNMSUBASP $B, $C, $A)>;
871 def : Pat<(v2f64 (bitconvert v4f32:$A)),
872 (COPY_TO_REGCLASS $A, VSRC)>;
873 def : Pat<(v2f64 (bitconvert v4i32:$A)),
874 (COPY_TO_REGCLASS $A, VSRC)>;
875 def : Pat<(v2f64 (bitconvert v8i16:$A)),
876 (COPY_TO_REGCLASS $A, VSRC)>;
877 def : Pat<(v2f64 (bitconvert v16i8:$A)),
878 (COPY_TO_REGCLASS $A, VSRC)>;
880 def : Pat<(v4f32 (bitconvert v2f64:$A)),
881 (COPY_TO_REGCLASS $A, VRRC)>;
882 def : Pat<(v4i32 (bitconvert v2f64:$A)),
883 (COPY_TO_REGCLASS $A, VRRC)>;
884 def : Pat<(v8i16 (bitconvert v2f64:$A)),
885 (COPY_TO_REGCLASS $A, VRRC)>;
886 def : Pat<(v16i8 (bitconvert v2f64:$A)),
887 (COPY_TO_REGCLASS $A, VRRC)>;
889 def : Pat<(v2i64 (bitconvert v4f32:$A)),
890 (COPY_TO_REGCLASS $A, VSRC)>;
891 def : Pat<(v2i64 (bitconvert v4i32:$A)),
892 (COPY_TO_REGCLASS $A, VSRC)>;
893 def : Pat<(v2i64 (bitconvert v8i16:$A)),
894 (COPY_TO_REGCLASS $A, VSRC)>;
895 def : Pat<(v2i64 (bitconvert v16i8:$A)),
896 (COPY_TO_REGCLASS $A, VSRC)>;
898 def : Pat<(v4f32 (bitconvert v2i64:$A)),
899 (COPY_TO_REGCLASS $A, VRRC)>;
900 def : Pat<(v4i32 (bitconvert v2i64:$A)),
901 (COPY_TO_REGCLASS $A, VRRC)>;
902 def : Pat<(v8i16 (bitconvert v2i64:$A)),
903 (COPY_TO_REGCLASS $A, VRRC)>;
904 def : Pat<(v16i8 (bitconvert v2i64:$A)),
905 (COPY_TO_REGCLASS $A, VRRC)>;
907 def : Pat<(v2f64 (bitconvert v2i64:$A)),
908 (COPY_TO_REGCLASS $A, VRRC)>;
909 def : Pat<(v2i64 (bitconvert v2f64:$A)),
910 (COPY_TO_REGCLASS $A, VRRC)>;
912 def : Pat<(v2f64 (bitconvert v1i128:$A)),
913 (COPY_TO_REGCLASS $A, VRRC)>;
914 def : Pat<(v1i128 (bitconvert v2f64:$A)),
915 (COPY_TO_REGCLASS $A, VRRC)>;
917 // sign extension patterns
918 // To extend "in place" from v2i32 to v2i64, we have input data like:
919 // | undef | i32 | undef | i32 |
920 // but xvcvsxwdp expects the input in big-Endian format:
921 // | i32 | undef | i32 | undef |
922 // so we need to shift everything to the left by one i32 (word) before
924 def : Pat<(sext_inreg v2i64:$C, v2i32),
925 (XVCVDPSXDS (XVCVSXWDP (XXSLDWI $C, $C, 1)))>;
926 def : Pat<(v2f64 (sint_to_fp (sext_inreg v2i64:$C, v2i32))),
927 (XVCVSXWDP (XXSLDWI $C, $C, 1))>;
930 def : Pat<(v2f64 (load xoaddr:$src)), (LXVD2X xoaddr:$src)>;
931 def : Pat<(v2i64 (load xoaddr:$src)), (LXVD2X xoaddr:$src)>;
932 def : Pat<(v4i32 (load xoaddr:$src)), (LXVW4X xoaddr:$src)>;
933 def : Pat<(v2f64 (PPClxvd2x xoaddr:$src)), (LXVD2X xoaddr:$src)>;
936 def : Pat<(int_ppc_vsx_stxvd2x v2f64:$rS, xoaddr:$dst),
937 (STXVD2X $rS, xoaddr:$dst)>;
938 def : Pat<(store v2i64:$rS, xoaddr:$dst), (STXVD2X $rS, xoaddr:$dst)>;
939 def : Pat<(int_ppc_vsx_stxvw4x v4i32:$rS, xoaddr:$dst),
940 (STXVW4X $rS, xoaddr:$dst)>;
941 def : Pat<(PPCstxvd2x v2f64:$rS, xoaddr:$dst), (STXVD2X $rS, xoaddr:$dst)>;
944 def : Pat<(v2f64 (PPCxxswapd v2f64:$src)), (XXPERMDI $src, $src, 2)>;
945 def : Pat<(v2i64 (PPCxxswapd v2i64:$src)), (XXPERMDI $src, $src, 2)>;
946 def : Pat<(v4f32 (PPCxxswapd v4f32:$src)), (XXPERMDI $src, $src, 2)>;
947 def : Pat<(v4i32 (PPCxxswapd v4i32:$src)), (XXPERMDI $src, $src, 2)>;
950 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETLT)),
951 (SELECT_VSRC (CRANDC $lhs, $rhs), $tval, $fval)>;
952 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETULT)),
953 (SELECT_VSRC (CRANDC $rhs, $lhs), $tval, $fval)>;
954 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETLE)),
955 (SELECT_VSRC (CRORC $lhs, $rhs), $tval, $fval)>;
956 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETULE)),
957 (SELECT_VSRC (CRORC $rhs, $lhs), $tval, $fval)>;
958 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETEQ)),
959 (SELECT_VSRC (CREQV $lhs, $rhs), $tval, $fval)>;
960 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETGE)),
961 (SELECT_VSRC (CRORC $rhs, $lhs), $tval, $fval)>;
962 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETUGE)),
963 (SELECT_VSRC (CRORC $lhs, $rhs), $tval, $fval)>;
964 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETGT)),
965 (SELECT_VSRC (CRANDC $rhs, $lhs), $tval, $fval)>;
966 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETUGT)),
967 (SELECT_VSRC (CRANDC $lhs, $rhs), $tval, $fval)>;
968 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETNE)),
969 (SELECT_VSRC (CRXOR $lhs, $rhs), $tval, $fval)>;
971 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETLT)),
972 (SELECT_VSFRC (CRANDC $lhs, $rhs), $tval, $fval)>;
973 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETULT)),
974 (SELECT_VSFRC (CRANDC $rhs, $lhs), $tval, $fval)>;
975 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETLE)),
976 (SELECT_VSFRC (CRORC $lhs, $rhs), $tval, $fval)>;
977 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETULE)),
978 (SELECT_VSFRC (CRORC $rhs, $lhs), $tval, $fval)>;
979 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETEQ)),
980 (SELECT_VSFRC (CREQV $lhs, $rhs), $tval, $fval)>;
981 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETGE)),
982 (SELECT_VSFRC (CRORC $rhs, $lhs), $tval, $fval)>;
983 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETUGE)),
984 (SELECT_VSFRC (CRORC $lhs, $rhs), $tval, $fval)>;
985 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETGT)),
986 (SELECT_VSFRC (CRANDC $rhs, $lhs), $tval, $fval)>;
987 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETUGT)),
988 (SELECT_VSFRC (CRANDC $lhs, $rhs), $tval, $fval)>;
989 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETNE)),
990 (SELECT_VSFRC (CRXOR $lhs, $rhs), $tval, $fval)>;
993 def : Pat<(int_ppc_vsx_xvdivsp v4f32:$A, v4f32:$B),
995 def : Pat<(int_ppc_vsx_xvdivdp v2f64:$A, v2f64:$B),
998 // Reciprocal estimate
999 def : Pat<(int_ppc_vsx_xvresp v4f32:$A),
1001 def : Pat<(int_ppc_vsx_xvredp v2f64:$A),
1004 // Recip. square root estimate
1005 def : Pat<(int_ppc_vsx_xvrsqrtesp v4f32:$A),
1007 def : Pat<(int_ppc_vsx_xvrsqrtedp v2f64:$A),
1010 } // AddedComplexity
1013 // The following VSX instructions were introduced in Power ISA 2.07
1014 /* FIXME: if the operands are v2i64, these patterns will not match.
1015 we should define new patterns or otherwise match the same patterns
1016 when the elements are larger than i32.
1018 def HasP8Vector : Predicate<"PPCSubTarget->hasP8Vector()">;
1019 def HasDirectMove : Predicate<"PPCSubTarget->hasDirectMove()">;
1020 let Predicates = [HasP8Vector] in {
1021 let AddedComplexity = 400 in { // Prefer VSX patterns over non-VSX patterns.
1022 let isCommutable = 1 in {
1023 def XXLEQV : XX3Form<60, 186,
1024 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1025 "xxleqv $XT, $XA, $XB", IIC_VecGeneral,
1026 [(set v4i32:$XT, (vnot_ppc (xor v4i32:$XA, v4i32:$XB)))]>;
1027 def XXLNAND : XX3Form<60, 178,
1028 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1029 "xxlnand $XT, $XA, $XB", IIC_VecGeneral,
1030 [(set v4i32:$XT, (vnot_ppc (and v4i32:$XA,
1034 def : Pat<(int_ppc_vsx_xxleqv v4i32:$A, v4i32:$B),
1037 def XXLORC : XX3Form<60, 170,
1038 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1039 "xxlorc $XT, $XA, $XB", IIC_VecGeneral,
1040 [(set v4i32:$XT, (or v4i32:$XA, (vnot_ppc v4i32:$XB)))]>;
1042 // VSX scalar loads introduced in ISA 2.07
1043 let mayLoad = 1 in {
1044 def LXSSPX : XX1Form<31, 524, (outs vssrc:$XT), (ins memrr:$src),
1045 "lxsspx $XT, $src", IIC_LdStLFD,
1046 [(set f32:$XT, (load xoaddr:$src))]>;
1047 def LXSIWAX : XX1Form<31, 76, (outs vsfrc:$XT), (ins memrr:$src),
1048 "lxsiwax $XT, $src", IIC_LdStLFD,
1049 [(set f64:$XT, (PPClfiwax xoaddr:$src))]>;
1050 def LXSIWZX : XX1Form<31, 12, (outs vsfrc:$XT), (ins memrr:$src),
1051 "lxsiwzx $XT, $src", IIC_LdStLFD,
1052 [(set f64:$XT, (PPClfiwzx xoaddr:$src))]>;
1055 // VSX scalar stores introduced in ISA 2.07
1056 let mayStore = 1 in {
1057 def STXSSPX : XX1Form<31, 652, (outs), (ins vssrc:$XT, memrr:$dst),
1058 "stxsspx $XT, $dst", IIC_LdStSTFD,
1059 [(store f32:$XT, xoaddr:$dst)]>;
1060 def STXSIWX : XX1Form<31, 140, (outs), (ins vsfrc:$XT, memrr:$dst),
1061 "stxsiwx $XT, $dst", IIC_LdStSTFD,
1062 [(PPCstfiwx f64:$XT, xoaddr:$dst)]>;
1065 def : Pat<(f64 (extloadf32 xoaddr:$src)),
1066 (COPY_TO_REGCLASS (LXSSPX xoaddr:$src), VSFRC)>;
1067 def : Pat<(f64 (fextend f32:$src)),
1068 (COPY_TO_REGCLASS $src, VSFRC)>;
1070 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETLT)),
1071 (SELECT_VSSRC (CRANDC $lhs, $rhs), $tval, $fval)>;
1072 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETULT)),
1073 (SELECT_VSSRC (CRANDC $rhs, $lhs), $tval, $fval)>;
1074 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETLE)),
1075 (SELECT_VSSRC (CRORC $lhs, $rhs), $tval, $fval)>;
1076 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETULE)),
1077 (SELECT_VSSRC (CRORC $rhs, $lhs), $tval, $fval)>;
1078 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETEQ)),
1079 (SELECT_VSSRC (CREQV $lhs, $rhs), $tval, $fval)>;
1080 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETGE)),
1081 (SELECT_VSSRC (CRORC $rhs, $lhs), $tval, $fval)>;
1082 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETUGE)),
1083 (SELECT_VSSRC (CRORC $lhs, $rhs), $tval, $fval)>;
1084 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETGT)),
1085 (SELECT_VSSRC (CRANDC $rhs, $lhs), $tval, $fval)>;
1086 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETUGT)),
1087 (SELECT_VSSRC (CRANDC $lhs, $rhs), $tval, $fval)>;
1088 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETNE)),
1089 (SELECT_VSSRC (CRXOR $lhs, $rhs), $tval, $fval)>;
1091 // VSX Elementary Scalar FP arithmetic (SP)
1092 let isCommutable = 1 in {
1093 def XSADDSP : XX3Form<60, 0,
1094 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1095 "xsaddsp $XT, $XA, $XB", IIC_VecFP,
1096 [(set f32:$XT, (fadd f32:$XA, f32:$XB))]>;
1097 def XSMULSP : XX3Form<60, 16,
1098 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1099 "xsmulsp $XT, $XA, $XB", IIC_VecFP,
1100 [(set f32:$XT, (fmul f32:$XA, f32:$XB))]>;
1103 def XSDIVSP : XX3Form<60, 24,
1104 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1105 "xsdivsp $XT, $XA, $XB", IIC_FPDivS,
1106 [(set f32:$XT, (fdiv f32:$XA, f32:$XB))]>;
1107 def XSRESP : XX2Form<60, 26,
1108 (outs vssrc:$XT), (ins vssrc:$XB),
1109 "xsresp $XT, $XB", IIC_VecFP,
1110 [(set f32:$XT, (PPCfre f32:$XB))]>;
1111 def XSSQRTSP : XX2Form<60, 11,
1112 (outs vssrc:$XT), (ins vssrc:$XB),
1113 "xssqrtsp $XT, $XB", IIC_FPSqrtS,
1114 [(set f32:$XT, (fsqrt f32:$XB))]>;
1115 def XSRSQRTESP : XX2Form<60, 10,
1116 (outs vssrc:$XT), (ins vssrc:$XB),
1117 "xsrsqrtesp $XT, $XB", IIC_VecFP,
1118 [(set f32:$XT, (PPCfrsqrte f32:$XB))]>;
1119 def XSSUBSP : XX3Form<60, 8,
1120 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1121 "xssubsp $XT, $XA, $XB", IIC_VecFP,
1122 [(set f32:$XT, (fsub f32:$XA, f32:$XB))]>;
1125 let BaseName = "XSMADDASP" in {
1126 let isCommutable = 1 in
1127 def XSMADDASP : XX3Form<60, 1,
1129 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1130 "xsmaddasp $XT, $XA, $XB", IIC_VecFP,
1131 [(set f32:$XT, (fma f32:$XA, f32:$XB, f32:$XTi))]>,
1132 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1134 let IsVSXFMAAlt = 1 in
1135 def XSMADDMSP : XX3Form<60, 9,
1137 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1138 "xsmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
1139 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1143 let BaseName = "XSMSUBASP" in {
1144 let isCommutable = 1 in
1145 def XSMSUBASP : XX3Form<60, 17,
1147 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1148 "xsmsubasp $XT, $XA, $XB", IIC_VecFP,
1149 [(set f32:$XT, (fma f32:$XA, f32:$XB,
1150 (fneg f32:$XTi)))]>,
1151 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1153 let IsVSXFMAAlt = 1 in
1154 def XSMSUBMSP : XX3Form<60, 25,
1156 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1157 "xsmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
1158 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1162 let BaseName = "XSNMADDASP" in {
1163 let isCommutable = 1 in
1164 def XSNMADDASP : XX3Form<60, 129,
1166 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1167 "xsnmaddasp $XT, $XA, $XB", IIC_VecFP,
1168 [(set f32:$XT, (fneg (fma f32:$XA, f32:$XB,
1170 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1172 let IsVSXFMAAlt = 1 in
1173 def XSNMADDMSP : XX3Form<60, 137,
1175 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1176 "xsnmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
1177 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1181 let BaseName = "XSNMSUBASP" in {
1182 let isCommutable = 1 in
1183 def XSNMSUBASP : XX3Form<60, 145,
1185 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1186 "xsnmsubasp $XT, $XA, $XB", IIC_VecFP,
1187 [(set f32:$XT, (fneg (fma f32:$XA, f32:$XB,
1188 (fneg f32:$XTi))))]>,
1189 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1191 let IsVSXFMAAlt = 1 in
1192 def XSNMSUBMSP : XX3Form<60, 153,
1194 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1195 "xsnmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
1196 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1200 // Single Precision Conversions (FP <-> INT)
1201 def XSCVSXDSP : XX2Form<60, 312,
1202 (outs vssrc:$XT), (ins vsfrc:$XB),
1203 "xscvsxdsp $XT, $XB", IIC_VecFP,
1204 [(set f32:$XT, (PPCfcfids f64:$XB))]>;
1205 def XSCVUXDSP : XX2Form<60, 296,
1206 (outs vssrc:$XT), (ins vsfrc:$XB),
1207 "xscvuxdsp $XT, $XB", IIC_VecFP,
1208 [(set f32:$XT, (PPCfcfidus f64:$XB))]>;
1210 // Conversions between vector and scalar single precision
1211 def XSCVDPSPN : XX2Form<60, 267, (outs vsrc:$XT), (ins vssrc:$XB),
1212 "xscvdpspn $XT, $XB", IIC_VecFP, []>;
1213 def XSCVSPDPN : XX2Form<60, 331, (outs vssrc:$XT), (ins vsrc:$XB),
1214 "xscvspdpn $XT, $XB", IIC_VecFP, []>;
1216 } // AddedComplexity = 400
1219 let Predicates = [HasDirectMove, HasVSX] in {
1220 // VSX direct move instructions
1221 def MFVSRD : XX1_RS6_RD5_XO<31, 51, (outs g8rc:$rA), (ins vsfrc:$XT),
1222 "mfvsrd $rA, $XT", IIC_VecGeneral,
1223 [(set i64:$rA, (PPCmfvsr f64:$XT))]>,
1224 Requires<[In64BitMode]>;
1225 def MFVSRWZ : XX1_RS6_RD5_XO<31, 115, (outs gprc:$rA), (ins vsfrc:$XT),
1226 "mfvsrwz $rA, $XT", IIC_VecGeneral,
1227 [(set i32:$rA, (PPCmfvsr f64:$XT))]>;
1228 def MTVSRD : XX1_RS6_RD5_XO<31, 179, (outs vsfrc:$XT), (ins g8rc:$rA),
1229 "mtvsrd $XT, $rA", IIC_VecGeneral,
1230 [(set f64:$XT, (PPCmtvsra i64:$rA))]>,
1231 Requires<[In64BitMode]>;
1232 def MTVSRWA : XX1_RS6_RD5_XO<31, 211, (outs vsfrc:$XT), (ins gprc:$rA),
1233 "mtvsrwa $XT, $rA", IIC_VecGeneral,
1234 [(set f64:$XT, (PPCmtvsra i32:$rA))]>;
1235 def MTVSRWZ : XX1_RS6_RD5_XO<31, 243, (outs vsfrc:$XT), (ins gprc:$rA),
1236 "mtvsrwz $XT, $rA", IIC_VecGeneral,
1237 [(set f64:$XT, (PPCmtvsrz i32:$rA))]>;
1238 } // HasDirectMove, HasVSX
1240 /* Direct moves of various size entities from GPR's into VSR's. Each lines
1241 the value up into element 0 (both BE and LE). Namely, entities smaller than
1242 a doubleword are shifted left and moved for BE. For LE, they're moved, then
1243 swapped to go into the least significant element of the VSR.
1246 dag BE_BYTE_0 = (MTVSRD
1248 (INSERT_SUBREG (i64 (IMPLICIT_DEF)), $A, sub_32), 56, 7));
1249 dag BE_HALF_0 = (MTVSRD
1251 (INSERT_SUBREG (i64 (IMPLICIT_DEF)), $A, sub_32), 48, 15));
1252 dag BE_WORD_0 = (MTVSRD
1254 (INSERT_SUBREG (i64 (IMPLICIT_DEF)), $A, sub_32), 32, 31));
1255 dag BE_DWORD_0 = (MTVSRD $A);
1257 dag LE_MTVSRW = (MTVSRD (INSERT_SUBREG (i64 (IMPLICIT_DEF)), $A, sub_32));
1258 dag LE_WORD_1 = (v2i64 (COPY_TO_REGCLASS LE_MTVSRW, VSRC));
1259 dag LE_WORD_0 = (XXPERMDI LE_WORD_1, LE_WORD_1, 2);
1260 dag LE_DWORD_1 = (v2i64 (COPY_TO_REGCLASS BE_DWORD_0, VSRC));
1261 dag LE_DWORD_0 = (XXPERMDI LE_DWORD_1, LE_DWORD_1, 2);
1264 let Predicates = [IsBigEndian, HasP8Vector] in {
1265 def : Pat<(v4f32 (scalar_to_vector f32:$A)),
1266 (v4f32 (XSCVDPSPN $A))>;
1267 } // IsBigEndian, HasP8Vector
1269 let Predicates = [IsBigEndian, HasDirectMove] in {
1270 def : Pat<(v16i8 (scalar_to_vector i32:$A)),
1271 (v16i8 (COPY_TO_REGCLASS Moves.BE_BYTE_0, VSRC))>;
1272 def : Pat<(v8i16 (scalar_to_vector i32:$A)),
1273 (v8i16 (COPY_TO_REGCLASS Moves.BE_HALF_0, VSRC))>;
1274 def : Pat<(v4i32 (scalar_to_vector i32:$A)),
1275 (v4i32 (COPY_TO_REGCLASS Moves.BE_WORD_0, VSRC))>;
1276 def : Pat<(v2i64 (scalar_to_vector i64:$A)),
1277 (v2i64 (COPY_TO_REGCLASS Moves.BE_DWORD_0, VSRC))>;
1278 } // IsBigEndian, HasDirectMove
1280 let Predicates = [IsLittleEndian, HasP8Vector] in {
1281 def : Pat<(v4f32 (scalar_to_vector f32:$A)),
1282 (v4f32 (XXSLDWI (XSCVDPSPN $A), (XSCVDPSPN $A), 1))>;
1283 } // IsLittleEndian, HasP8Vector
1285 let Predicates = [IsLittleEndian, HasDirectMove] in {
1286 def : Pat<(v16i8 (scalar_to_vector i32:$A)),
1287 (v16i8 (COPY_TO_REGCLASS Moves.LE_WORD_0, VSRC))>;
1288 def : Pat<(v8i16 (scalar_to_vector i32:$A)),
1289 (v8i16 (COPY_TO_REGCLASS Moves.LE_WORD_0, VSRC))>;
1290 def : Pat<(v4i32 (scalar_to_vector i32:$A)),
1291 (v4i32 (COPY_TO_REGCLASS Moves.LE_WORD_0, VSRC))>;
1292 def : Pat<(v2i64 (scalar_to_vector i64:$A)),
1293 (v2i64 Moves.LE_DWORD_0)>;
1294 } // IsLittleEndian, HasDirectMove