1 //===- PPCInstrVSX.td - The PowerPC VSX Extension --*- tablegen -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file describes the VSX extension to the PowerPC instruction set.
12 //===----------------------------------------------------------------------===//
14 // *********************************** NOTE ***********************************
15 // ** For POWER8 Little Endian, the VSX swap optimization relies on knowing **
16 // ** which VMX and VSX instructions are lane-sensitive and which are not. **
17 // ** A lane-sensitive instruction relies, implicitly or explicitly, on **
18 // ** whether lanes are numbered from left to right. An instruction like **
19 // ** VADDFP is not lane-sensitive, because each lane of the result vector **
20 // ** relies only on the corresponding lane of the source vectors. However, **
21 // ** an instruction like VMULESB is lane-sensitive, because "even" and **
22 // ** "odd" lanes are different for big-endian and little-endian numbering. **
24 // ** When adding new VMX and VSX instructions, please consider whether they **
25 // ** are lane-sensitive. If so, they must be added to a switch statement **
26 // ** in PPCVSXSwapRemoval::gatherVectorInstructions(). **
27 // ****************************************************************************
29 def PPCRegVSRCAsmOperand : AsmOperandClass {
30 let Name = "RegVSRC"; let PredicateMethod = "isVSRegNumber";
32 def vsrc : RegisterOperand<VSRC> {
33 let ParserMatchClass = PPCRegVSRCAsmOperand;
36 def PPCRegVSFRCAsmOperand : AsmOperandClass {
37 let Name = "RegVSFRC"; let PredicateMethod = "isVSRegNumber";
39 def vsfrc : RegisterOperand<VSFRC> {
40 let ParserMatchClass = PPCRegVSFRCAsmOperand;
43 def PPCRegVSSRCAsmOperand : AsmOperandClass {
44 let Name = "RegVSSRC"; let PredicateMethod = "isVSRegNumber";
46 def vssrc : RegisterOperand<VSSRC> {
47 let ParserMatchClass = PPCRegVSSRCAsmOperand;
50 // Little-endian-specific nodes.
51 def SDT_PPClxvd2x : SDTypeProfile<1, 1, [
52 SDTCisVT<0, v2f64>, SDTCisPtrTy<1>
54 def SDT_PPCstxvd2x : SDTypeProfile<0, 2, [
55 SDTCisVT<0, v2f64>, SDTCisPtrTy<1>
57 def SDT_PPCxxswapd : SDTypeProfile<1, 1, [
61 def PPClxvd2x : SDNode<"PPCISD::LXVD2X", SDT_PPClxvd2x,
62 [SDNPHasChain, SDNPMayLoad]>;
63 def PPCstxvd2x : SDNode<"PPCISD::STXVD2X", SDT_PPCstxvd2x,
64 [SDNPHasChain, SDNPMayStore]>;
65 def PPCxxswapd : SDNode<"PPCISD::XXSWAPD", SDT_PPCxxswapd, [SDNPHasChain]>;
66 def PPCmfvsr : SDNode<"PPCISD::MFVSR", SDTUnaryOp, []>;
67 def PPCmtvsra : SDNode<"PPCISD::MTVSRA", SDTUnaryOp, []>;
68 def PPCmtvsrz : SDNode<"PPCISD::MTVSRZ", SDTUnaryOp, []>;
70 multiclass XX3Form_Rcr<bits<6> opcode, bits<7> xo, dag OOL, dag IOL,
71 string asmbase, string asmstr, InstrItinClass itin,
73 let BaseName = asmbase in {
74 def NAME : XX3Form_Rc<opcode, xo, OOL, IOL,
75 !strconcat(asmbase, !strconcat(" ", asmstr)), itin,
78 def o : XX3Form_Rc<opcode, xo, OOL, IOL,
79 !strconcat(asmbase, !strconcat(". ", asmstr)), itin,
84 def HasVSX : Predicate<"PPCSubTarget->hasVSX()">;
85 def IsLittleEndian : Predicate<"PPCSubTarget->isLittleEndian()">;
86 def IsBigEndian : Predicate<"!PPCSubTarget->isLittleEndian()">;
88 let Predicates = [HasVSX] in {
89 let AddedComplexity = 400 in { // Prefer VSX patterns over non-VSX patterns.
90 let hasSideEffects = 0 in { // VSX instructions don't have side effects.
93 // Load indexed instructions
95 def LXSDX : XX1Form<31, 588,
96 (outs vsfrc:$XT), (ins memrr:$src),
97 "lxsdx $XT, $src", IIC_LdStLFD,
98 [(set f64:$XT, (load xoaddr:$src))]>;
100 def LXVD2X : XX1Form<31, 844,
101 (outs vsrc:$XT), (ins memrr:$src),
102 "lxvd2x $XT, $src", IIC_LdStLFD,
103 [(set v2f64:$XT, (int_ppc_vsx_lxvd2x xoaddr:$src))]>;
105 def LXVDSX : XX1Form<31, 332,
106 (outs vsrc:$XT), (ins memrr:$src),
107 "lxvdsx $XT, $src", IIC_LdStLFD, []>;
109 def LXVW4X : XX1Form<31, 780,
110 (outs vsrc:$XT), (ins memrr:$src),
111 "lxvw4x $XT, $src", IIC_LdStLFD,
112 [(set v4i32:$XT, (int_ppc_vsx_lxvw4x xoaddr:$src))]>;
115 // Store indexed instructions
116 let mayStore = 1 in {
117 def STXSDX : XX1Form<31, 716,
118 (outs), (ins vsfrc:$XT, memrr:$dst),
119 "stxsdx $XT, $dst", IIC_LdStSTFD,
120 [(store f64:$XT, xoaddr:$dst)]>;
122 def STXVD2X : XX1Form<31, 972,
123 (outs), (ins vsrc:$XT, memrr:$dst),
124 "stxvd2x $XT, $dst", IIC_LdStSTFD,
125 [(store v2f64:$XT, xoaddr:$dst)]>;
127 def STXVW4X : XX1Form<31, 908,
128 (outs), (ins vsrc:$XT, memrr:$dst),
129 "stxvw4x $XT, $dst", IIC_LdStSTFD,
130 [(store v4i32:$XT, xoaddr:$dst)]>;
134 // Add/Mul Instructions
135 let isCommutable = 1 in {
136 def XSADDDP : XX3Form<60, 32,
137 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
138 "xsadddp $XT, $XA, $XB", IIC_VecFP,
139 [(set f64:$XT, (fadd f64:$XA, f64:$XB))]>;
140 def XSMULDP : XX3Form<60, 48,
141 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
142 "xsmuldp $XT, $XA, $XB", IIC_VecFP,
143 [(set f64:$XT, (fmul f64:$XA, f64:$XB))]>;
145 def XVADDDP : XX3Form<60, 96,
146 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
147 "xvadddp $XT, $XA, $XB", IIC_VecFP,
148 [(set v2f64:$XT, (fadd v2f64:$XA, v2f64:$XB))]>;
150 def XVADDSP : XX3Form<60, 64,
151 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
152 "xvaddsp $XT, $XA, $XB", IIC_VecFP,
153 [(set v4f32:$XT, (fadd v4f32:$XA, v4f32:$XB))]>;
155 def XVMULDP : XX3Form<60, 112,
156 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
157 "xvmuldp $XT, $XA, $XB", IIC_VecFP,
158 [(set v2f64:$XT, (fmul v2f64:$XA, v2f64:$XB))]>;
160 def XVMULSP : XX3Form<60, 80,
161 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
162 "xvmulsp $XT, $XA, $XB", IIC_VecFP,
163 [(set v4f32:$XT, (fmul v4f32:$XA, v4f32:$XB))]>;
166 // Subtract Instructions
167 def XSSUBDP : XX3Form<60, 40,
168 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
169 "xssubdp $XT, $XA, $XB", IIC_VecFP,
170 [(set f64:$XT, (fsub f64:$XA, f64:$XB))]>;
172 def XVSUBDP : XX3Form<60, 104,
173 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
174 "xvsubdp $XT, $XA, $XB", IIC_VecFP,
175 [(set v2f64:$XT, (fsub v2f64:$XA, v2f64:$XB))]>;
176 def XVSUBSP : XX3Form<60, 72,
177 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
178 "xvsubsp $XT, $XA, $XB", IIC_VecFP,
179 [(set v4f32:$XT, (fsub v4f32:$XA, v4f32:$XB))]>;
182 let BaseName = "XSMADDADP" in {
183 let isCommutable = 1 in
184 def XSMADDADP : XX3Form<60, 33,
185 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
186 "xsmaddadp $XT, $XA, $XB", IIC_VecFP,
187 [(set f64:$XT, (fma f64:$XA, f64:$XB, f64:$XTi))]>,
188 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
190 let IsVSXFMAAlt = 1 in
191 def XSMADDMDP : XX3Form<60, 41,
192 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
193 "xsmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
194 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
198 let BaseName = "XSMSUBADP" in {
199 let isCommutable = 1 in
200 def XSMSUBADP : XX3Form<60, 49,
201 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
202 "xsmsubadp $XT, $XA, $XB", IIC_VecFP,
203 [(set f64:$XT, (fma f64:$XA, f64:$XB, (fneg f64:$XTi)))]>,
204 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
206 let IsVSXFMAAlt = 1 in
207 def XSMSUBMDP : XX3Form<60, 57,
208 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
209 "xsmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
210 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
214 let BaseName = "XSNMADDADP" in {
215 let isCommutable = 1 in
216 def XSNMADDADP : XX3Form<60, 161,
217 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
218 "xsnmaddadp $XT, $XA, $XB", IIC_VecFP,
219 [(set f64:$XT, (fneg (fma f64:$XA, f64:$XB, f64:$XTi)))]>,
220 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
222 let IsVSXFMAAlt = 1 in
223 def XSNMADDMDP : XX3Form<60, 169,
224 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
225 "xsnmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
226 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
230 let BaseName = "XSNMSUBADP" in {
231 let isCommutable = 1 in
232 def XSNMSUBADP : XX3Form<60, 177,
233 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
234 "xsnmsubadp $XT, $XA, $XB", IIC_VecFP,
235 [(set f64:$XT, (fneg (fma f64:$XA, f64:$XB, (fneg f64:$XTi))))]>,
236 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
238 let IsVSXFMAAlt = 1 in
239 def XSNMSUBMDP : XX3Form<60, 185,
240 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
241 "xsnmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
242 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
246 let BaseName = "XVMADDADP" in {
247 let isCommutable = 1 in
248 def XVMADDADP : XX3Form<60, 97,
249 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
250 "xvmaddadp $XT, $XA, $XB", IIC_VecFP,
251 [(set v2f64:$XT, (fma v2f64:$XA, v2f64:$XB, v2f64:$XTi))]>,
252 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
254 let IsVSXFMAAlt = 1 in
255 def XVMADDMDP : XX3Form<60, 105,
256 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
257 "xvmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
258 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
262 let BaseName = "XVMADDASP" in {
263 let isCommutable = 1 in
264 def XVMADDASP : XX3Form<60, 65,
265 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
266 "xvmaddasp $XT, $XA, $XB", IIC_VecFP,
267 [(set v4f32:$XT, (fma v4f32:$XA, v4f32:$XB, v4f32:$XTi))]>,
268 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
270 let IsVSXFMAAlt = 1 in
271 def XVMADDMSP : XX3Form<60, 73,
272 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
273 "xvmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
274 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
278 let BaseName = "XVMSUBADP" in {
279 let isCommutable = 1 in
280 def XVMSUBADP : XX3Form<60, 113,
281 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
282 "xvmsubadp $XT, $XA, $XB", IIC_VecFP,
283 [(set v2f64:$XT, (fma v2f64:$XA, v2f64:$XB, (fneg v2f64:$XTi)))]>,
284 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
286 let IsVSXFMAAlt = 1 in
287 def XVMSUBMDP : XX3Form<60, 121,
288 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
289 "xvmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
290 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
294 let BaseName = "XVMSUBASP" in {
295 let isCommutable = 1 in
296 def XVMSUBASP : XX3Form<60, 81,
297 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
298 "xvmsubasp $XT, $XA, $XB", IIC_VecFP,
299 [(set v4f32:$XT, (fma v4f32:$XA, v4f32:$XB, (fneg v4f32:$XTi)))]>,
300 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
302 let IsVSXFMAAlt = 1 in
303 def XVMSUBMSP : XX3Form<60, 89,
304 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
305 "xvmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
306 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
310 let BaseName = "XVNMADDADP" in {
311 let isCommutable = 1 in
312 def XVNMADDADP : XX3Form<60, 225,
313 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
314 "xvnmaddadp $XT, $XA, $XB", IIC_VecFP,
315 [(set v2f64:$XT, (fneg (fma v2f64:$XA, v2f64:$XB, v2f64:$XTi)))]>,
316 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
318 let IsVSXFMAAlt = 1 in
319 def XVNMADDMDP : XX3Form<60, 233,
320 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
321 "xvnmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
322 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
326 let BaseName = "XVNMADDASP" in {
327 let isCommutable = 1 in
328 def XVNMADDASP : XX3Form<60, 193,
329 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
330 "xvnmaddasp $XT, $XA, $XB", IIC_VecFP,
331 [(set v4f32:$XT, (fneg (fma v4f32:$XA, v4f32:$XB, v4f32:$XTi)))]>,
332 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
334 let IsVSXFMAAlt = 1 in
335 def XVNMADDMSP : XX3Form<60, 201,
336 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
337 "xvnmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
338 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
342 let BaseName = "XVNMSUBADP" in {
343 let isCommutable = 1 in
344 def XVNMSUBADP : XX3Form<60, 241,
345 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
346 "xvnmsubadp $XT, $XA, $XB", IIC_VecFP,
347 [(set v2f64:$XT, (fneg (fma v2f64:$XA, v2f64:$XB, (fneg v2f64:$XTi))))]>,
348 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
350 let IsVSXFMAAlt = 1 in
351 def XVNMSUBMDP : XX3Form<60, 249,
352 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
353 "xvnmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
354 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
358 let BaseName = "XVNMSUBASP" in {
359 let isCommutable = 1 in
360 def XVNMSUBASP : XX3Form<60, 209,
361 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
362 "xvnmsubasp $XT, $XA, $XB", IIC_VecFP,
363 [(set v4f32:$XT, (fneg (fma v4f32:$XA, v4f32:$XB, (fneg v4f32:$XTi))))]>,
364 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
366 let IsVSXFMAAlt = 1 in
367 def XVNMSUBMSP : XX3Form<60, 217,
368 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
369 "xvnmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
370 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
374 // Division Instructions
375 def XSDIVDP : XX3Form<60, 56,
376 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
377 "xsdivdp $XT, $XA, $XB", IIC_FPDivD,
378 [(set f64:$XT, (fdiv f64:$XA, f64:$XB))]>;
379 def XSSQRTDP : XX2Form<60, 75,
380 (outs vsfrc:$XT), (ins vsfrc:$XB),
381 "xssqrtdp $XT, $XB", IIC_FPSqrtD,
382 [(set f64:$XT, (fsqrt f64:$XB))]>;
384 def XSREDP : XX2Form<60, 90,
385 (outs vsfrc:$XT), (ins vsfrc:$XB),
386 "xsredp $XT, $XB", IIC_VecFP,
387 [(set f64:$XT, (PPCfre f64:$XB))]>;
388 def XSRSQRTEDP : XX2Form<60, 74,
389 (outs vsfrc:$XT), (ins vsfrc:$XB),
390 "xsrsqrtedp $XT, $XB", IIC_VecFP,
391 [(set f64:$XT, (PPCfrsqrte f64:$XB))]>;
393 def XSTDIVDP : XX3Form_1<60, 61,
394 (outs crrc:$crD), (ins vsfrc:$XA, vsfrc:$XB),
395 "xstdivdp $crD, $XA, $XB", IIC_FPCompare, []>;
396 def XSTSQRTDP : XX2Form_1<60, 106,
397 (outs crrc:$crD), (ins vsfrc:$XB),
398 "xstsqrtdp $crD, $XB", IIC_FPCompare, []>;
400 def XVDIVDP : XX3Form<60, 120,
401 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
402 "xvdivdp $XT, $XA, $XB", IIC_FPDivD,
403 [(set v2f64:$XT, (fdiv v2f64:$XA, v2f64:$XB))]>;
404 def XVDIVSP : XX3Form<60, 88,
405 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
406 "xvdivsp $XT, $XA, $XB", IIC_FPDivS,
407 [(set v4f32:$XT, (fdiv v4f32:$XA, v4f32:$XB))]>;
409 def XVSQRTDP : XX2Form<60, 203,
410 (outs vsrc:$XT), (ins vsrc:$XB),
411 "xvsqrtdp $XT, $XB", IIC_FPSqrtD,
412 [(set v2f64:$XT, (fsqrt v2f64:$XB))]>;
413 def XVSQRTSP : XX2Form<60, 139,
414 (outs vsrc:$XT), (ins vsrc:$XB),
415 "xvsqrtsp $XT, $XB", IIC_FPSqrtS,
416 [(set v4f32:$XT, (fsqrt v4f32:$XB))]>;
418 def XVTDIVDP : XX3Form_1<60, 125,
419 (outs crrc:$crD), (ins vsrc:$XA, vsrc:$XB),
420 "xvtdivdp $crD, $XA, $XB", IIC_FPCompare, []>;
421 def XVTDIVSP : XX3Form_1<60, 93,
422 (outs crrc:$crD), (ins vsrc:$XA, vsrc:$XB),
423 "xvtdivsp $crD, $XA, $XB", IIC_FPCompare, []>;
425 def XVTSQRTDP : XX2Form_1<60, 234,
426 (outs crrc:$crD), (ins vsrc:$XB),
427 "xvtsqrtdp $crD, $XB", IIC_FPCompare, []>;
428 def XVTSQRTSP : XX2Form_1<60, 170,
429 (outs crrc:$crD), (ins vsrc:$XB),
430 "xvtsqrtsp $crD, $XB", IIC_FPCompare, []>;
432 def XVREDP : XX2Form<60, 218,
433 (outs vsrc:$XT), (ins vsrc:$XB),
434 "xvredp $XT, $XB", IIC_VecFP,
435 [(set v2f64:$XT, (PPCfre v2f64:$XB))]>;
436 def XVRESP : XX2Form<60, 154,
437 (outs vsrc:$XT), (ins vsrc:$XB),
438 "xvresp $XT, $XB", IIC_VecFP,
439 [(set v4f32:$XT, (PPCfre v4f32:$XB))]>;
441 def XVRSQRTEDP : XX2Form<60, 202,
442 (outs vsrc:$XT), (ins vsrc:$XB),
443 "xvrsqrtedp $XT, $XB", IIC_VecFP,
444 [(set v2f64:$XT, (PPCfrsqrte v2f64:$XB))]>;
445 def XVRSQRTESP : XX2Form<60, 138,
446 (outs vsrc:$XT), (ins vsrc:$XB),
447 "xvrsqrtesp $XT, $XB", IIC_VecFP,
448 [(set v4f32:$XT, (PPCfrsqrte v4f32:$XB))]>;
450 // Compare Instructions
451 def XSCMPODP : XX3Form_1<60, 43,
452 (outs crrc:$crD), (ins vsfrc:$XA, vsfrc:$XB),
453 "xscmpodp $crD, $XA, $XB", IIC_FPCompare, []>;
454 def XSCMPUDP : XX3Form_1<60, 35,
455 (outs crrc:$crD), (ins vsfrc:$XA, vsfrc:$XB),
456 "xscmpudp $crD, $XA, $XB", IIC_FPCompare, []>;
458 defm XVCMPEQDP : XX3Form_Rcr<60, 99,
459 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
460 "xvcmpeqdp", "$XT, $XA, $XB", IIC_VecFPCompare,
462 (int_ppc_vsx_xvcmpeqdp v2f64:$XA, v2f64:$XB))]>;
463 defm XVCMPEQSP : XX3Form_Rcr<60, 67,
464 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
465 "xvcmpeqsp", "$XT, $XA, $XB", IIC_VecFPCompare,
467 (int_ppc_vsx_xvcmpeqsp v4f32:$XA, v4f32:$XB))]>;
468 defm XVCMPGEDP : XX3Form_Rcr<60, 115,
469 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
470 "xvcmpgedp", "$XT, $XA, $XB", IIC_VecFPCompare,
472 (int_ppc_vsx_xvcmpgedp v2f64:$XA, v2f64:$XB))]>;
473 defm XVCMPGESP : XX3Form_Rcr<60, 83,
474 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
475 "xvcmpgesp", "$XT, $XA, $XB", IIC_VecFPCompare,
477 (int_ppc_vsx_xvcmpgesp v4f32:$XA, v4f32:$XB))]>;
478 defm XVCMPGTDP : XX3Form_Rcr<60, 107,
479 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
480 "xvcmpgtdp", "$XT, $XA, $XB", IIC_VecFPCompare,
482 (int_ppc_vsx_xvcmpgtdp v2f64:$XA, v2f64:$XB))]>;
483 defm XVCMPGTSP : XX3Form_Rcr<60, 75,
484 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
485 "xvcmpgtsp", "$XT, $XA, $XB", IIC_VecFPCompare,
487 (int_ppc_vsx_xvcmpgtsp v4f32:$XA, v4f32:$XB))]>;
490 def XSABSDP : XX2Form<60, 345,
491 (outs vsfrc:$XT), (ins vsfrc:$XB),
492 "xsabsdp $XT, $XB", IIC_VecFP,
493 [(set f64:$XT, (fabs f64:$XB))]>;
494 def XSNABSDP : XX2Form<60, 361,
495 (outs vsfrc:$XT), (ins vsfrc:$XB),
496 "xsnabsdp $XT, $XB", IIC_VecFP,
497 [(set f64:$XT, (fneg (fabs f64:$XB)))]>;
498 def XSNEGDP : XX2Form<60, 377,
499 (outs vsfrc:$XT), (ins vsfrc:$XB),
500 "xsnegdp $XT, $XB", IIC_VecFP,
501 [(set f64:$XT, (fneg f64:$XB))]>;
502 def XSCPSGNDP : XX3Form<60, 176,
503 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
504 "xscpsgndp $XT, $XA, $XB", IIC_VecFP,
505 [(set f64:$XT, (fcopysign f64:$XB, f64:$XA))]>;
507 def XVABSDP : XX2Form<60, 473,
508 (outs vsrc:$XT), (ins vsrc:$XB),
509 "xvabsdp $XT, $XB", IIC_VecFP,
510 [(set v2f64:$XT, (fabs v2f64:$XB))]>;
512 def XVABSSP : XX2Form<60, 409,
513 (outs vsrc:$XT), (ins vsrc:$XB),
514 "xvabssp $XT, $XB", IIC_VecFP,
515 [(set v4f32:$XT, (fabs v4f32:$XB))]>;
517 def XVCPSGNDP : XX3Form<60, 240,
518 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
519 "xvcpsgndp $XT, $XA, $XB", IIC_VecFP,
520 [(set v2f64:$XT, (fcopysign v2f64:$XB, v2f64:$XA))]>;
521 def XVCPSGNSP : XX3Form<60, 208,
522 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
523 "xvcpsgnsp $XT, $XA, $XB", IIC_VecFP,
524 [(set v4f32:$XT, (fcopysign v4f32:$XB, v4f32:$XA))]>;
526 def XVNABSDP : XX2Form<60, 489,
527 (outs vsrc:$XT), (ins vsrc:$XB),
528 "xvnabsdp $XT, $XB", IIC_VecFP,
529 [(set v2f64:$XT, (fneg (fabs v2f64:$XB)))]>;
530 def XVNABSSP : XX2Form<60, 425,
531 (outs vsrc:$XT), (ins vsrc:$XB),
532 "xvnabssp $XT, $XB", IIC_VecFP,
533 [(set v4f32:$XT, (fneg (fabs v4f32:$XB)))]>;
535 def XVNEGDP : XX2Form<60, 505,
536 (outs vsrc:$XT), (ins vsrc:$XB),
537 "xvnegdp $XT, $XB", IIC_VecFP,
538 [(set v2f64:$XT, (fneg v2f64:$XB))]>;
539 def XVNEGSP : XX2Form<60, 441,
540 (outs vsrc:$XT), (ins vsrc:$XB),
541 "xvnegsp $XT, $XB", IIC_VecFP,
542 [(set v4f32:$XT, (fneg v4f32:$XB))]>;
544 // Conversion Instructions
545 def XSCVDPSP : XX2Form<60, 265,
546 (outs vsfrc:$XT), (ins vsfrc:$XB),
547 "xscvdpsp $XT, $XB", IIC_VecFP, []>;
548 def XSCVDPSXDS : XX2Form<60, 344,
549 (outs vsfrc:$XT), (ins vsfrc:$XB),
550 "xscvdpsxds $XT, $XB", IIC_VecFP,
551 [(set f64:$XT, (PPCfctidz f64:$XB))]>;
552 def XSCVDPSXWS : XX2Form<60, 88,
553 (outs vsfrc:$XT), (ins vsfrc:$XB),
554 "xscvdpsxws $XT, $XB", IIC_VecFP,
555 [(set f64:$XT, (PPCfctiwz f64:$XB))]>;
556 def XSCVDPUXDS : XX2Form<60, 328,
557 (outs vsfrc:$XT), (ins vsfrc:$XB),
558 "xscvdpuxds $XT, $XB", IIC_VecFP,
559 [(set f64:$XT, (PPCfctiduz f64:$XB))]>;
560 def XSCVDPUXWS : XX2Form<60, 72,
561 (outs vsfrc:$XT), (ins vsfrc:$XB),
562 "xscvdpuxws $XT, $XB", IIC_VecFP,
563 [(set f64:$XT, (PPCfctiwuz f64:$XB))]>;
564 def XSCVSPDP : XX2Form<60, 329,
565 (outs vsfrc:$XT), (ins vsfrc:$XB),
566 "xscvspdp $XT, $XB", IIC_VecFP, []>;
567 def XSCVSXDDP : XX2Form<60, 376,
568 (outs vsfrc:$XT), (ins vsfrc:$XB),
569 "xscvsxddp $XT, $XB", IIC_VecFP,
570 [(set f64:$XT, (PPCfcfid f64:$XB))]>;
571 def XSCVUXDDP : XX2Form<60, 360,
572 (outs vsfrc:$XT), (ins vsfrc:$XB),
573 "xscvuxddp $XT, $XB", IIC_VecFP,
574 [(set f64:$XT, (PPCfcfidu f64:$XB))]>;
576 def XVCVDPSP : XX2Form<60, 393,
577 (outs vsrc:$XT), (ins vsrc:$XB),
578 "xvcvdpsp $XT, $XB", IIC_VecFP, []>;
579 def XVCVDPSXDS : XX2Form<60, 472,
580 (outs vsrc:$XT), (ins vsrc:$XB),
581 "xvcvdpsxds $XT, $XB", IIC_VecFP,
582 [(set v2i64:$XT, (fp_to_sint v2f64:$XB))]>;
583 def XVCVDPSXWS : XX2Form<60, 216,
584 (outs vsrc:$XT), (ins vsrc:$XB),
585 "xvcvdpsxws $XT, $XB", IIC_VecFP, []>;
586 def XVCVDPUXDS : XX2Form<60, 456,
587 (outs vsrc:$XT), (ins vsrc:$XB),
588 "xvcvdpuxds $XT, $XB", IIC_VecFP,
589 [(set v2i64:$XT, (fp_to_uint v2f64:$XB))]>;
590 def XVCVDPUXWS : XX2Form<60, 200,
591 (outs vsrc:$XT), (ins vsrc:$XB),
592 "xvcvdpuxws $XT, $XB", IIC_VecFP, []>;
594 def XVCVSPDP : XX2Form<60, 457,
595 (outs vsrc:$XT), (ins vsrc:$XB),
596 "xvcvspdp $XT, $XB", IIC_VecFP, []>;
597 def XVCVSPSXDS : XX2Form<60, 408,
598 (outs vsrc:$XT), (ins vsrc:$XB),
599 "xvcvspsxds $XT, $XB", IIC_VecFP, []>;
600 def XVCVSPSXWS : XX2Form<60, 152,
601 (outs vsrc:$XT), (ins vsrc:$XB),
602 "xvcvspsxws $XT, $XB", IIC_VecFP, []>;
603 def XVCVSPUXDS : XX2Form<60, 392,
604 (outs vsrc:$XT), (ins vsrc:$XB),
605 "xvcvspuxds $XT, $XB", IIC_VecFP, []>;
606 def XVCVSPUXWS : XX2Form<60, 136,
607 (outs vsrc:$XT), (ins vsrc:$XB),
608 "xvcvspuxws $XT, $XB", IIC_VecFP, []>;
609 def XVCVSXDDP : XX2Form<60, 504,
610 (outs vsrc:$XT), (ins vsrc:$XB),
611 "xvcvsxddp $XT, $XB", IIC_VecFP,
612 [(set v2f64:$XT, (sint_to_fp v2i64:$XB))]>;
613 def XVCVSXDSP : XX2Form<60, 440,
614 (outs vsrc:$XT), (ins vsrc:$XB),
615 "xvcvsxdsp $XT, $XB", IIC_VecFP, []>;
616 def XVCVSXWDP : XX2Form<60, 248,
617 (outs vsrc:$XT), (ins vsrc:$XB),
618 "xvcvsxwdp $XT, $XB", IIC_VecFP, []>;
619 def XVCVSXWSP : XX2Form<60, 184,
620 (outs vsrc:$XT), (ins vsrc:$XB),
621 "xvcvsxwsp $XT, $XB", IIC_VecFP, []>;
622 def XVCVUXDDP : XX2Form<60, 488,
623 (outs vsrc:$XT), (ins vsrc:$XB),
624 "xvcvuxddp $XT, $XB", IIC_VecFP,
625 [(set v2f64:$XT, (uint_to_fp v2i64:$XB))]>;
626 def XVCVUXDSP : XX2Form<60, 424,
627 (outs vsrc:$XT), (ins vsrc:$XB),
628 "xvcvuxdsp $XT, $XB", IIC_VecFP, []>;
629 def XVCVUXWDP : XX2Form<60, 232,
630 (outs vsrc:$XT), (ins vsrc:$XB),
631 "xvcvuxwdp $XT, $XB", IIC_VecFP, []>;
632 def XVCVUXWSP : XX2Form<60, 168,
633 (outs vsrc:$XT), (ins vsrc:$XB),
634 "xvcvuxwsp $XT, $XB", IIC_VecFP, []>;
636 // Rounding Instructions
637 def XSRDPI : XX2Form<60, 73,
638 (outs vsfrc:$XT), (ins vsfrc:$XB),
639 "xsrdpi $XT, $XB", IIC_VecFP,
640 [(set f64:$XT, (frnd f64:$XB))]>;
641 def XSRDPIC : XX2Form<60, 107,
642 (outs vsfrc:$XT), (ins vsfrc:$XB),
643 "xsrdpic $XT, $XB", IIC_VecFP,
644 [(set f64:$XT, (fnearbyint f64:$XB))]>;
645 def XSRDPIM : XX2Form<60, 121,
646 (outs vsfrc:$XT), (ins vsfrc:$XB),
647 "xsrdpim $XT, $XB", IIC_VecFP,
648 [(set f64:$XT, (ffloor f64:$XB))]>;
649 def XSRDPIP : XX2Form<60, 105,
650 (outs vsfrc:$XT), (ins vsfrc:$XB),
651 "xsrdpip $XT, $XB", IIC_VecFP,
652 [(set f64:$XT, (fceil f64:$XB))]>;
653 def XSRDPIZ : XX2Form<60, 89,
654 (outs vsfrc:$XT), (ins vsfrc:$XB),
655 "xsrdpiz $XT, $XB", IIC_VecFP,
656 [(set f64:$XT, (ftrunc f64:$XB))]>;
658 def XVRDPI : XX2Form<60, 201,
659 (outs vsrc:$XT), (ins vsrc:$XB),
660 "xvrdpi $XT, $XB", IIC_VecFP,
661 [(set v2f64:$XT, (frnd v2f64:$XB))]>;
662 def XVRDPIC : XX2Form<60, 235,
663 (outs vsrc:$XT), (ins vsrc:$XB),
664 "xvrdpic $XT, $XB", IIC_VecFP,
665 [(set v2f64:$XT, (fnearbyint v2f64:$XB))]>;
666 def XVRDPIM : XX2Form<60, 249,
667 (outs vsrc:$XT), (ins vsrc:$XB),
668 "xvrdpim $XT, $XB", IIC_VecFP,
669 [(set v2f64:$XT, (ffloor v2f64:$XB))]>;
670 def XVRDPIP : XX2Form<60, 233,
671 (outs vsrc:$XT), (ins vsrc:$XB),
672 "xvrdpip $XT, $XB", IIC_VecFP,
673 [(set v2f64:$XT, (fceil v2f64:$XB))]>;
674 def XVRDPIZ : XX2Form<60, 217,
675 (outs vsrc:$XT), (ins vsrc:$XB),
676 "xvrdpiz $XT, $XB", IIC_VecFP,
677 [(set v2f64:$XT, (ftrunc v2f64:$XB))]>;
679 def XVRSPI : XX2Form<60, 137,
680 (outs vsrc:$XT), (ins vsrc:$XB),
681 "xvrspi $XT, $XB", IIC_VecFP,
682 [(set v4f32:$XT, (frnd v4f32:$XB))]>;
683 def XVRSPIC : XX2Form<60, 171,
684 (outs vsrc:$XT), (ins vsrc:$XB),
685 "xvrspic $XT, $XB", IIC_VecFP,
686 [(set v4f32:$XT, (fnearbyint v4f32:$XB))]>;
687 def XVRSPIM : XX2Form<60, 185,
688 (outs vsrc:$XT), (ins vsrc:$XB),
689 "xvrspim $XT, $XB", IIC_VecFP,
690 [(set v4f32:$XT, (ffloor v4f32:$XB))]>;
691 def XVRSPIP : XX2Form<60, 169,
692 (outs vsrc:$XT), (ins vsrc:$XB),
693 "xvrspip $XT, $XB", IIC_VecFP,
694 [(set v4f32:$XT, (fceil v4f32:$XB))]>;
695 def XVRSPIZ : XX2Form<60, 153,
696 (outs vsrc:$XT), (ins vsrc:$XB),
697 "xvrspiz $XT, $XB", IIC_VecFP,
698 [(set v4f32:$XT, (ftrunc v4f32:$XB))]>;
700 // Max/Min Instructions
701 let isCommutable = 1 in {
702 def XSMAXDP : XX3Form<60, 160,
703 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
704 "xsmaxdp $XT, $XA, $XB", IIC_VecFP,
706 (int_ppc_vsx_xsmaxdp vsfrc:$XA, vsfrc:$XB))]>;
707 def XSMINDP : XX3Form<60, 168,
708 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
709 "xsmindp $XT, $XA, $XB", IIC_VecFP,
711 (int_ppc_vsx_xsmindp vsfrc:$XA, vsfrc:$XB))]>;
713 def XVMAXDP : XX3Form<60, 224,
714 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
715 "xvmaxdp $XT, $XA, $XB", IIC_VecFP,
717 (int_ppc_vsx_xvmaxdp vsrc:$XA, vsrc:$XB))]>;
718 def XVMINDP : XX3Form<60, 232,
719 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
720 "xvmindp $XT, $XA, $XB", IIC_VecFP,
722 (int_ppc_vsx_xvmindp vsrc:$XA, vsrc:$XB))]>;
724 def XVMAXSP : XX3Form<60, 192,
725 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
726 "xvmaxsp $XT, $XA, $XB", IIC_VecFP,
728 (int_ppc_vsx_xvmaxsp vsrc:$XA, vsrc:$XB))]>;
729 def XVMINSP : XX3Form<60, 200,
730 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
731 "xvminsp $XT, $XA, $XB", IIC_VecFP,
733 (int_ppc_vsx_xvminsp vsrc:$XA, vsrc:$XB))]>;
737 // Logical Instructions
738 let isCommutable = 1 in
739 def XXLAND : XX3Form<60, 130,
740 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
741 "xxland $XT, $XA, $XB", IIC_VecGeneral,
742 [(set v4i32:$XT, (and v4i32:$XA, v4i32:$XB))]>;
743 def XXLANDC : XX3Form<60, 138,
744 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
745 "xxlandc $XT, $XA, $XB", IIC_VecGeneral,
746 [(set v4i32:$XT, (and v4i32:$XA,
747 (vnot_ppc v4i32:$XB)))]>;
748 let isCommutable = 1 in {
749 def XXLNOR : XX3Form<60, 162,
750 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
751 "xxlnor $XT, $XA, $XB", IIC_VecGeneral,
752 [(set v4i32:$XT, (vnot_ppc (or v4i32:$XA,
754 def XXLOR : XX3Form<60, 146,
755 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
756 "xxlor $XT, $XA, $XB", IIC_VecGeneral,
757 [(set v4i32:$XT, (or v4i32:$XA, v4i32:$XB))]>;
758 let isCodeGenOnly = 1 in
759 def XXLORf: XX3Form<60, 146,
760 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
761 "xxlor $XT, $XA, $XB", IIC_VecGeneral, []>;
762 def XXLXOR : XX3Form<60, 154,
763 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
764 "xxlxor $XT, $XA, $XB", IIC_VecGeneral,
765 [(set v4i32:$XT, (xor v4i32:$XA, v4i32:$XB))]>;
768 // Permutation Instructions
769 def XXMRGHW : XX3Form<60, 18,
770 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
771 "xxmrghw $XT, $XA, $XB", IIC_VecPerm, []>;
772 def XXMRGLW : XX3Form<60, 50,
773 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
774 "xxmrglw $XT, $XA, $XB", IIC_VecPerm, []>;
776 def XXPERMDI : XX3Form_2<60, 10,
777 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, u2imm:$DM),
778 "xxpermdi $XT, $XA, $XB, $DM", IIC_VecPerm, []>;
779 def XXSEL : XX4Form<60, 3,
780 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, vsrc:$XC),
781 "xxsel $XT, $XA, $XB, $XC", IIC_VecPerm, []>;
783 def XXSLDWI : XX3Form_2<60, 2,
784 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, u2imm:$SHW),
785 "xxsldwi $XT, $XA, $XB, $SHW", IIC_VecPerm, []>;
786 def XXSPLTW : XX2Form_2<60, 164,
787 (outs vsrc:$XT), (ins vsrc:$XB, u2imm:$UIM),
788 "xxspltw $XT, $XB, $UIM", IIC_VecPerm, []>;
791 // SELECT_CC_* - Used to implement the SELECT_CC DAG operation. Expanded after
792 // instruction selection into a branch sequence.
793 let usesCustomInserter = 1, // Expanded after instruction selection.
794 PPC970_Single = 1 in {
796 def SELECT_CC_VSRC: Pseudo<(outs vsrc:$dst),
797 (ins crrc:$cond, vsrc:$T, vsrc:$F, i32imm:$BROPC),
800 def SELECT_VSRC: Pseudo<(outs vsrc:$dst),
801 (ins crbitrc:$cond, vsrc:$T, vsrc:$F),
804 (select i1:$cond, v2f64:$T, v2f64:$F))]>;
805 def SELECT_CC_VSFRC: Pseudo<(outs f8rc:$dst),
806 (ins crrc:$cond, f8rc:$T, f8rc:$F,
807 i32imm:$BROPC), "#SELECT_CC_VSFRC",
809 def SELECT_VSFRC: Pseudo<(outs f8rc:$dst),
810 (ins crbitrc:$cond, f8rc:$T, f8rc:$F),
813 (select i1:$cond, f64:$T, f64:$F))]>;
814 def SELECT_CC_VSSRC: Pseudo<(outs f4rc:$dst),
815 (ins crrc:$cond, f4rc:$T, f4rc:$F,
816 i32imm:$BROPC), "#SELECT_CC_VSSRC",
818 def SELECT_VSSRC: Pseudo<(outs f4rc:$dst),
819 (ins crbitrc:$cond, f4rc:$T, f4rc:$F),
822 (select i1:$cond, f32:$T, f32:$F))]>;
823 } // usesCustomInserter
826 def : InstAlias<"xvmovdp $XT, $XB",
827 (XVCPSGNDP vsrc:$XT, vsrc:$XB, vsrc:$XB)>;
828 def : InstAlias<"xvmovsp $XT, $XB",
829 (XVCPSGNSP vsrc:$XT, vsrc:$XB, vsrc:$XB)>;
831 def : InstAlias<"xxspltd $XT, $XB, 0",
832 (XXPERMDI vsrc:$XT, vsrc:$XB, vsrc:$XB, 0)>;
833 def : InstAlias<"xxspltd $XT, $XB, 1",
834 (XXPERMDI vsrc:$XT, vsrc:$XB, vsrc:$XB, 3)>;
835 def : InstAlias<"xxmrghd $XT, $XA, $XB",
836 (XXPERMDI vsrc:$XT, vsrc:$XA, vsrc:$XB, 0)>;
837 def : InstAlias<"xxmrgld $XT, $XA, $XB",
838 (XXPERMDI vsrc:$XT, vsrc:$XA, vsrc:$XB, 3)>;
839 def : InstAlias<"xxswapd $XT, $XB",
840 (XXPERMDI vsrc:$XT, vsrc:$XB, vsrc:$XB, 2)>;
842 let AddedComplexity = 400 in { // Prefer VSX patterns over non-VSX patterns.
844 let Predicates = [IsBigEndian] in {
845 def : Pat<(v2f64 (scalar_to_vector f64:$A)),
846 (v2f64 (SUBREG_TO_REG (i64 1), $A, sub_64))>;
848 def : Pat<(f64 (vector_extract v2f64:$S, 0)),
849 (f64 (EXTRACT_SUBREG $S, sub_64))>;
850 def : Pat<(f64 (vector_extract v2f64:$S, 1)),
851 (f64 (EXTRACT_SUBREG (XXPERMDI $S, $S, 2), sub_64))>;
854 let Predicates = [IsLittleEndian] in {
855 def : Pat<(v2f64 (scalar_to_vector f64:$A)),
856 (v2f64 (XXPERMDI (SUBREG_TO_REG (i64 1), $A, sub_64),
857 (SUBREG_TO_REG (i64 1), $A, sub_64), 0))>;
859 def : Pat<(f64 (vector_extract v2f64:$S, 0)),
860 (f64 (EXTRACT_SUBREG (XXPERMDI $S, $S, 2), sub_64))>;
861 def : Pat<(f64 (vector_extract v2f64:$S, 1)),
862 (f64 (EXTRACT_SUBREG $S, sub_64))>;
865 // Additional fnmsub patterns: -a*c + b == -(a*c - b)
866 def : Pat<(fma (fneg f64:$A), f64:$C, f64:$B),
867 (XSNMSUBADP $B, $C, $A)>;
868 def : Pat<(fma f64:$A, (fneg f64:$C), f64:$B),
869 (XSNMSUBADP $B, $C, $A)>;
871 def : Pat<(fma (fneg v2f64:$A), v2f64:$C, v2f64:$B),
872 (XVNMSUBADP $B, $C, $A)>;
873 def : Pat<(fma v2f64:$A, (fneg v2f64:$C), v2f64:$B),
874 (XVNMSUBADP $B, $C, $A)>;
876 def : Pat<(fma (fneg v4f32:$A), v4f32:$C, v4f32:$B),
877 (XVNMSUBASP $B, $C, $A)>;
878 def : Pat<(fma v4f32:$A, (fneg v4f32:$C), v4f32:$B),
879 (XVNMSUBASP $B, $C, $A)>;
881 def : Pat<(v2f64 (bitconvert v4f32:$A)),
882 (COPY_TO_REGCLASS $A, VSRC)>;
883 def : Pat<(v2f64 (bitconvert v4i32:$A)),
884 (COPY_TO_REGCLASS $A, VSRC)>;
885 def : Pat<(v2f64 (bitconvert v8i16:$A)),
886 (COPY_TO_REGCLASS $A, VSRC)>;
887 def : Pat<(v2f64 (bitconvert v16i8:$A)),
888 (COPY_TO_REGCLASS $A, VSRC)>;
890 def : Pat<(v4f32 (bitconvert v2f64:$A)),
891 (COPY_TO_REGCLASS $A, VRRC)>;
892 def : Pat<(v4i32 (bitconvert v2f64:$A)),
893 (COPY_TO_REGCLASS $A, VRRC)>;
894 def : Pat<(v8i16 (bitconvert v2f64:$A)),
895 (COPY_TO_REGCLASS $A, VRRC)>;
896 def : Pat<(v16i8 (bitconvert v2f64:$A)),
897 (COPY_TO_REGCLASS $A, VRRC)>;
899 def : Pat<(v2i64 (bitconvert v4f32:$A)),
900 (COPY_TO_REGCLASS $A, VSRC)>;
901 def : Pat<(v2i64 (bitconvert v4i32:$A)),
902 (COPY_TO_REGCLASS $A, VSRC)>;
903 def : Pat<(v2i64 (bitconvert v8i16:$A)),
904 (COPY_TO_REGCLASS $A, VSRC)>;
905 def : Pat<(v2i64 (bitconvert v16i8:$A)),
906 (COPY_TO_REGCLASS $A, VSRC)>;
908 def : Pat<(v4f32 (bitconvert v2i64:$A)),
909 (COPY_TO_REGCLASS $A, VRRC)>;
910 def : Pat<(v4i32 (bitconvert v2i64:$A)),
911 (COPY_TO_REGCLASS $A, VRRC)>;
912 def : Pat<(v8i16 (bitconvert v2i64:$A)),
913 (COPY_TO_REGCLASS $A, VRRC)>;
914 def : Pat<(v16i8 (bitconvert v2i64:$A)),
915 (COPY_TO_REGCLASS $A, VRRC)>;
917 def : Pat<(v2f64 (bitconvert v2i64:$A)),
918 (COPY_TO_REGCLASS $A, VRRC)>;
919 def : Pat<(v2i64 (bitconvert v2f64:$A)),
920 (COPY_TO_REGCLASS $A, VRRC)>;
922 def : Pat<(v2f64 (bitconvert v1i128:$A)),
923 (COPY_TO_REGCLASS $A, VRRC)>;
924 def : Pat<(v1i128 (bitconvert v2f64:$A)),
925 (COPY_TO_REGCLASS $A, VRRC)>;
927 // sign extension patterns
928 // To extend "in place" from v2i32 to v2i64, we have input data like:
929 // | undef | i32 | undef | i32 |
930 // but xvcvsxwdp expects the input in big-Endian format:
931 // | i32 | undef | i32 | undef |
932 // so we need to shift everything to the left by one i32 (word) before
934 def : Pat<(sext_inreg v2i64:$C, v2i32),
935 (XVCVDPSXDS (XVCVSXWDP (XXSLDWI $C, $C, 1)))>;
936 def : Pat<(v2f64 (sint_to_fp (sext_inreg v2i64:$C, v2i32))),
937 (XVCVSXWDP (XXSLDWI $C, $C, 1))>;
940 def : Pat<(v2f64 (load xoaddr:$src)), (LXVD2X xoaddr:$src)>;
941 def : Pat<(v2i64 (load xoaddr:$src)), (LXVD2X xoaddr:$src)>;
942 def : Pat<(v4i32 (load xoaddr:$src)), (LXVW4X xoaddr:$src)>;
943 def : Pat<(v2f64 (PPClxvd2x xoaddr:$src)), (LXVD2X xoaddr:$src)>;
946 def : Pat<(int_ppc_vsx_stxvd2x v2f64:$rS, xoaddr:$dst),
947 (STXVD2X $rS, xoaddr:$dst)>;
948 def : Pat<(store v2i64:$rS, xoaddr:$dst), (STXVD2X $rS, xoaddr:$dst)>;
949 def : Pat<(int_ppc_vsx_stxvw4x v4i32:$rS, xoaddr:$dst),
950 (STXVW4X $rS, xoaddr:$dst)>;
951 def : Pat<(PPCstxvd2x v2f64:$rS, xoaddr:$dst), (STXVD2X $rS, xoaddr:$dst)>;
954 def : Pat<(v2f64 (PPCxxswapd v2f64:$src)), (XXPERMDI $src, $src, 2)>;
955 def : Pat<(v2i64 (PPCxxswapd v2i64:$src)), (XXPERMDI $src, $src, 2)>;
956 def : Pat<(v4f32 (PPCxxswapd v4f32:$src)), (XXPERMDI $src, $src, 2)>;
957 def : Pat<(v4i32 (PPCxxswapd v4i32:$src)), (XXPERMDI $src, $src, 2)>;
960 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETLT)),
961 (SELECT_VSRC (CRANDC $lhs, $rhs), $tval, $fval)>;
962 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETULT)),
963 (SELECT_VSRC (CRANDC $rhs, $lhs), $tval, $fval)>;
964 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETLE)),
965 (SELECT_VSRC (CRORC $lhs, $rhs), $tval, $fval)>;
966 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETULE)),
967 (SELECT_VSRC (CRORC $rhs, $lhs), $tval, $fval)>;
968 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETEQ)),
969 (SELECT_VSRC (CREQV $lhs, $rhs), $tval, $fval)>;
970 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETGE)),
971 (SELECT_VSRC (CRORC $rhs, $lhs), $tval, $fval)>;
972 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETUGE)),
973 (SELECT_VSRC (CRORC $lhs, $rhs), $tval, $fval)>;
974 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETGT)),
975 (SELECT_VSRC (CRANDC $rhs, $lhs), $tval, $fval)>;
976 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETUGT)),
977 (SELECT_VSRC (CRANDC $lhs, $rhs), $tval, $fval)>;
978 def : Pat<(v2f64 (selectcc i1:$lhs, i1:$rhs, v2f64:$tval, v2f64:$fval, SETNE)),
979 (SELECT_VSRC (CRXOR $lhs, $rhs), $tval, $fval)>;
981 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETLT)),
982 (SELECT_VSFRC (CRANDC $lhs, $rhs), $tval, $fval)>;
983 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETULT)),
984 (SELECT_VSFRC (CRANDC $rhs, $lhs), $tval, $fval)>;
985 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETLE)),
986 (SELECT_VSFRC (CRORC $lhs, $rhs), $tval, $fval)>;
987 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETULE)),
988 (SELECT_VSFRC (CRORC $rhs, $lhs), $tval, $fval)>;
989 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETEQ)),
990 (SELECT_VSFRC (CREQV $lhs, $rhs), $tval, $fval)>;
991 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETGE)),
992 (SELECT_VSFRC (CRORC $rhs, $lhs), $tval, $fval)>;
993 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETUGE)),
994 (SELECT_VSFRC (CRORC $lhs, $rhs), $tval, $fval)>;
995 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETGT)),
996 (SELECT_VSFRC (CRANDC $rhs, $lhs), $tval, $fval)>;
997 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETUGT)),
998 (SELECT_VSFRC (CRANDC $lhs, $rhs), $tval, $fval)>;
999 def : Pat<(f64 (selectcc i1:$lhs, i1:$rhs, f64:$tval, f64:$fval, SETNE)),
1000 (SELECT_VSFRC (CRXOR $lhs, $rhs), $tval, $fval)>;
1003 def : Pat<(int_ppc_vsx_xvdivsp v4f32:$A, v4f32:$B),
1005 def : Pat<(int_ppc_vsx_xvdivdp v2f64:$A, v2f64:$B),
1008 // Reciprocal estimate
1009 def : Pat<(int_ppc_vsx_xvresp v4f32:$A),
1011 def : Pat<(int_ppc_vsx_xvredp v2f64:$A),
1014 // Recip. square root estimate
1015 def : Pat<(int_ppc_vsx_xvrsqrtesp v4f32:$A),
1017 def : Pat<(int_ppc_vsx_xvrsqrtedp v2f64:$A),
1020 } // AddedComplexity
1023 // The following VSX instructions were introduced in Power ISA 2.07
1024 /* FIXME: if the operands are v2i64, these patterns will not match.
1025 we should define new patterns or otherwise match the same patterns
1026 when the elements are larger than i32.
1028 def HasP8Vector : Predicate<"PPCSubTarget->hasP8Vector()">;
1029 def HasDirectMove : Predicate<"PPCSubTarget->hasDirectMove()">;
1030 let Predicates = [HasP8Vector] in {
1031 let AddedComplexity = 400 in { // Prefer VSX patterns over non-VSX patterns.
1032 let isCommutable = 1 in {
1033 def XXLEQV : XX3Form<60, 186,
1034 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1035 "xxleqv $XT, $XA, $XB", IIC_VecGeneral,
1036 [(set v4i32:$XT, (vnot_ppc (xor v4i32:$XA, v4i32:$XB)))]>;
1037 def XXLNAND : XX3Form<60, 178,
1038 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1039 "xxlnand $XT, $XA, $XB", IIC_VecGeneral,
1040 [(set v4i32:$XT, (vnot_ppc (and v4i32:$XA,
1044 def : Pat<(int_ppc_vsx_xxleqv v4i32:$A, v4i32:$B),
1047 def XXLORC : XX3Form<60, 170,
1048 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1049 "xxlorc $XT, $XA, $XB", IIC_VecGeneral,
1050 [(set v4i32:$XT, (or v4i32:$XA, (vnot_ppc v4i32:$XB)))]>;
1052 // VSX scalar loads introduced in ISA 2.07
1053 let mayLoad = 1 in {
1054 def LXSSPX : XX1Form<31, 524, (outs vssrc:$XT), (ins memrr:$src),
1055 "lxsspx $XT, $src", IIC_LdStLFD,
1056 [(set f32:$XT, (load xoaddr:$src))]>;
1057 def LXSIWAX : XX1Form<31, 76, (outs vsfrc:$XT), (ins memrr:$src),
1058 "lxsiwax $XT, $src", IIC_LdStLFD,
1059 [(set f64:$XT, (PPClfiwax xoaddr:$src))]>;
1060 def LXSIWZX : XX1Form<31, 12, (outs vsfrc:$XT), (ins memrr:$src),
1061 "lxsiwzx $XT, $src", IIC_LdStLFD,
1062 [(set f64:$XT, (PPClfiwzx xoaddr:$src))]>;
1065 // VSX scalar stores introduced in ISA 2.07
1066 let mayStore = 1 in {
1067 def STXSSPX : XX1Form<31, 652, (outs), (ins vssrc:$XT, memrr:$dst),
1068 "stxsspx $XT, $dst", IIC_LdStSTFD,
1069 [(store f32:$XT, xoaddr:$dst)]>;
1070 def STXSIWX : XX1Form<31, 140, (outs), (ins vsfrc:$XT, memrr:$dst),
1071 "stxsiwx $XT, $dst", IIC_LdStSTFD,
1072 [(PPCstfiwx f64:$XT, xoaddr:$dst)]>;
1075 def : Pat<(f64 (extloadf32 xoaddr:$src)),
1076 (COPY_TO_REGCLASS (LXSSPX xoaddr:$src), VSFRC)>;
1077 def : Pat<(f64 (fextend f32:$src)),
1078 (COPY_TO_REGCLASS $src, VSFRC)>;
1080 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETLT)),
1081 (SELECT_VSSRC (CRANDC $lhs, $rhs), $tval, $fval)>;
1082 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETULT)),
1083 (SELECT_VSSRC (CRANDC $rhs, $lhs), $tval, $fval)>;
1084 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETLE)),
1085 (SELECT_VSSRC (CRORC $lhs, $rhs), $tval, $fval)>;
1086 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETULE)),
1087 (SELECT_VSSRC (CRORC $rhs, $lhs), $tval, $fval)>;
1088 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETEQ)),
1089 (SELECT_VSSRC (CREQV $lhs, $rhs), $tval, $fval)>;
1090 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETGE)),
1091 (SELECT_VSSRC (CRORC $rhs, $lhs), $tval, $fval)>;
1092 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETUGE)),
1093 (SELECT_VSSRC (CRORC $lhs, $rhs), $tval, $fval)>;
1094 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETGT)),
1095 (SELECT_VSSRC (CRANDC $rhs, $lhs), $tval, $fval)>;
1096 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETUGT)),
1097 (SELECT_VSSRC (CRANDC $lhs, $rhs), $tval, $fval)>;
1098 def : Pat<(f32 (selectcc i1:$lhs, i1:$rhs, f32:$tval, f32:$fval, SETNE)),
1099 (SELECT_VSSRC (CRXOR $lhs, $rhs), $tval, $fval)>;
1101 // VSX Elementary Scalar FP arithmetic (SP)
1102 let isCommutable = 1 in {
1103 def XSADDSP : XX3Form<60, 0,
1104 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1105 "xsaddsp $XT, $XA, $XB", IIC_VecFP,
1106 [(set f32:$XT, (fadd f32:$XA, f32:$XB))]>;
1107 def XSMULSP : XX3Form<60, 16,
1108 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1109 "xsmulsp $XT, $XA, $XB", IIC_VecFP,
1110 [(set f32:$XT, (fmul f32:$XA, f32:$XB))]>;
1113 def XSDIVSP : XX3Form<60, 24,
1114 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1115 "xsdivsp $XT, $XA, $XB", IIC_FPDivS,
1116 [(set f32:$XT, (fdiv f32:$XA, f32:$XB))]>;
1117 def XSRESP : XX2Form<60, 26,
1118 (outs vssrc:$XT), (ins vssrc:$XB),
1119 "xsresp $XT, $XB", IIC_VecFP,
1120 [(set f32:$XT, (PPCfre f32:$XB))]>;
1121 def XSSQRTSP : XX2Form<60, 11,
1122 (outs vssrc:$XT), (ins vssrc:$XB),
1123 "xssqrtsp $XT, $XB", IIC_FPSqrtS,
1124 [(set f32:$XT, (fsqrt f32:$XB))]>;
1125 def XSRSQRTESP : XX2Form<60, 10,
1126 (outs vssrc:$XT), (ins vssrc:$XB),
1127 "xsrsqrtesp $XT, $XB", IIC_VecFP,
1128 [(set f32:$XT, (PPCfrsqrte f32:$XB))]>;
1129 def XSSUBSP : XX3Form<60, 8,
1130 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1131 "xssubsp $XT, $XA, $XB", IIC_VecFP,
1132 [(set f32:$XT, (fsub f32:$XA, f32:$XB))]>;
1135 let BaseName = "XSMADDASP" in {
1136 let isCommutable = 1 in
1137 def XSMADDASP : XX3Form<60, 1,
1139 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1140 "xsmaddasp $XT, $XA, $XB", IIC_VecFP,
1141 [(set f32:$XT, (fma f32:$XA, f32:$XB, f32:$XTi))]>,
1142 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1144 let IsVSXFMAAlt = 1 in
1145 def XSMADDMSP : XX3Form<60, 9,
1147 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1148 "xsmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
1149 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1153 let BaseName = "XSMSUBASP" in {
1154 let isCommutable = 1 in
1155 def XSMSUBASP : XX3Form<60, 17,
1157 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1158 "xsmsubasp $XT, $XA, $XB", IIC_VecFP,
1159 [(set f32:$XT, (fma f32:$XA, f32:$XB,
1160 (fneg f32:$XTi)))]>,
1161 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1163 let IsVSXFMAAlt = 1 in
1164 def XSMSUBMSP : XX3Form<60, 25,
1166 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1167 "xsmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
1168 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1172 let BaseName = "XSNMADDASP" in {
1173 let isCommutable = 1 in
1174 def XSNMADDASP : XX3Form<60, 129,
1176 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1177 "xsnmaddasp $XT, $XA, $XB", IIC_VecFP,
1178 [(set f32:$XT, (fneg (fma f32:$XA, f32:$XB,
1180 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1182 let IsVSXFMAAlt = 1 in
1183 def XSNMADDMSP : XX3Form<60, 137,
1185 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1186 "xsnmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
1187 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1191 let BaseName = "XSNMSUBASP" in {
1192 let isCommutable = 1 in
1193 def XSNMSUBASP : XX3Form<60, 145,
1195 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1196 "xsnmsubasp $XT, $XA, $XB", IIC_VecFP,
1197 [(set f32:$XT, (fneg (fma f32:$XA, f32:$XB,
1198 (fneg f32:$XTi))))]>,
1199 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1201 let IsVSXFMAAlt = 1 in
1202 def XSNMSUBMSP : XX3Form<60, 153,
1204 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1205 "xsnmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
1206 RegConstraint<"$XTi = $XT">, NoEncode<"$XTi">,
1210 // Single Precision Conversions (FP <-> INT)
1211 def XSCVSXDSP : XX2Form<60, 312,
1212 (outs vssrc:$XT), (ins vsfrc:$XB),
1213 "xscvsxdsp $XT, $XB", IIC_VecFP,
1214 [(set f32:$XT, (PPCfcfids f64:$XB))]>;
1215 def XSCVUXDSP : XX2Form<60, 296,
1216 (outs vssrc:$XT), (ins vsfrc:$XB),
1217 "xscvuxdsp $XT, $XB", IIC_VecFP,
1218 [(set f32:$XT, (PPCfcfidus f64:$XB))]>;
1220 // Conversions between vector and scalar single precision
1221 def XSCVDPSPN : XX2Form<60, 267, (outs vsrc:$XT), (ins vssrc:$XB),
1222 "xscvdpspn $XT, $XB", IIC_VecFP, []>;
1223 def XSCVSPDPN : XX2Form<60, 331, (outs vssrc:$XT), (ins vsrc:$XB),
1224 "xscvspdpn $XT, $XB", IIC_VecFP, []>;
1226 } // AddedComplexity = 400
1229 let Predicates = [HasDirectMove, HasVSX] in {
1230 // VSX direct move instructions
1231 def MFVSRD : XX1_RS6_RD5_XO<31, 51, (outs g8rc:$rA), (ins vsfrc:$XT),
1232 "mfvsrd $rA, $XT", IIC_VecGeneral,
1233 [(set i64:$rA, (PPCmfvsr f64:$XT))]>,
1234 Requires<[In64BitMode]>;
1235 def MFVSRWZ : XX1_RS6_RD5_XO<31, 115, (outs gprc:$rA), (ins vsfrc:$XT),
1236 "mfvsrwz $rA, $XT", IIC_VecGeneral,
1237 [(set i32:$rA, (PPCmfvsr f64:$XT))]>;
1238 def MTVSRD : XX1_RS6_RD5_XO<31, 179, (outs vsfrc:$XT), (ins g8rc:$rA),
1239 "mtvsrd $XT, $rA", IIC_VecGeneral,
1240 [(set f64:$XT, (PPCmtvsra i64:$rA))]>,
1241 Requires<[In64BitMode]>;
1242 def MTVSRWA : XX1_RS6_RD5_XO<31, 211, (outs vsfrc:$XT), (ins gprc:$rA),
1243 "mtvsrwa $XT, $rA", IIC_VecGeneral,
1244 [(set f64:$XT, (PPCmtvsra i32:$rA))]>;
1245 def MTVSRWZ : XX1_RS6_RD5_XO<31, 243, (outs vsfrc:$XT), (ins gprc:$rA),
1246 "mtvsrwz $XT, $rA", IIC_VecGeneral,
1247 [(set f64:$XT, (PPCmtvsrz i32:$rA))]>;
1248 } // HasDirectMove, HasVSX
1250 /* Direct moves of various size entities from GPR's into VSR's. Each lines
1251 the value up into element 0 (both BE and LE). Namely, entities smaller than
1252 a doubleword are shifted left and moved for BE. For LE, they're moved, then
1253 swapped to go into the least significant element of the VSR.
1256 dag BE_BYTE_0 = (MTVSRD
1258 (INSERT_SUBREG (i64 (IMPLICIT_DEF)), $A, sub_32), 56, 7));
1259 dag BE_HALF_0 = (MTVSRD
1261 (INSERT_SUBREG (i64 (IMPLICIT_DEF)), $A, sub_32), 48, 15));
1262 dag BE_WORD_0 = (MTVSRD
1264 (INSERT_SUBREG (i64 (IMPLICIT_DEF)), $A, sub_32), 32, 31));
1265 dag BE_DWORD_0 = (MTVSRD $A);
1267 dag LE_MTVSRW = (MTVSRD (INSERT_SUBREG (i64 (IMPLICIT_DEF)), $A, sub_32));
1268 dag LE_WORD_1 = (v2i64 (COPY_TO_REGCLASS LE_MTVSRW, VSRC));
1269 dag LE_WORD_0 = (XXPERMDI LE_WORD_1, LE_WORD_1, 2);
1270 dag LE_DWORD_1 = (v2i64 (COPY_TO_REGCLASS BE_DWORD_0, VSRC));
1271 dag LE_DWORD_0 = (XXPERMDI LE_DWORD_1, LE_DWORD_1, 2);
1274 let Predicates = [IsBigEndian, HasP8Vector] in {
1275 def : Pat<(v4f32 (scalar_to_vector f32:$A)),
1276 (v4f32 (XSCVDPSPN $A))>;
1277 } // IsBigEndian, HasP8Vector
1279 let Predicates = [IsBigEndian, HasDirectMove] in {
1280 def : Pat<(v16i8 (scalar_to_vector i32:$A)),
1281 (v16i8 (COPY_TO_REGCLASS Moves.BE_BYTE_0, VSRC))>;
1282 def : Pat<(v8i16 (scalar_to_vector i32:$A)),
1283 (v8i16 (COPY_TO_REGCLASS Moves.BE_HALF_0, VSRC))>;
1284 def : Pat<(v4i32 (scalar_to_vector i32:$A)),
1285 (v4i32 (COPY_TO_REGCLASS Moves.BE_WORD_0, VSRC))>;
1286 def : Pat<(v2i64 (scalar_to_vector i64:$A)),
1287 (v2i64 (COPY_TO_REGCLASS Moves.BE_DWORD_0, VSRC))>;
1288 } // IsBigEndian, HasDirectMove
1290 let Predicates = [IsLittleEndian, HasP8Vector] in {
1291 def : Pat<(v4f32 (scalar_to_vector f32:$A)),
1292 (v4f32 (XXSLDWI (XSCVDPSPN $A), (XSCVDPSPN $A), 1))>;
1293 } // IsLittleEndian, HasP8Vector
1295 let Predicates = [IsLittleEndian, HasDirectMove] in {
1296 def : Pat<(v16i8 (scalar_to_vector i32:$A)),
1297 (v16i8 (COPY_TO_REGCLASS Moves.LE_WORD_0, VSRC))>;
1298 def : Pat<(v8i16 (scalar_to_vector i32:$A)),
1299 (v8i16 (COPY_TO_REGCLASS Moves.LE_WORD_0, VSRC))>;
1300 def : Pat<(v4i32 (scalar_to_vector i32:$A)),
1301 (v4i32 (COPY_TO_REGCLASS Moves.LE_WORD_0, VSRC))>;
1302 def : Pat<(v2i64 (scalar_to_vector i64:$A)),
1303 (v2i64 Moves.LE_DWORD_0)>;
1304 } // IsLittleEndian, HasDirectMove