V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
instruction-selector-ppc.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/base/adapters.h"
6 #include "src/compiler/backend/instruction-selector-impl.h"
7 #include "src/compiler/node-matchers.h"
8 #include "src/compiler/node-properties.h"
9 #include "src/ppc/frame-constants-ppc.h"
10 
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14 
15 enum ImmediateMode {
16  kInt16Imm,
17  kInt16Imm_Unsigned,
18  kInt16Imm_Negate,
19  kInt16Imm_4ByteAligned,
20  kShift32Imm,
21  kShift64Imm,
22  kNoImmediate
23 };
24 
25 // Adds PPC-specific methods for generating operands.
26 class PPCOperandGenerator final : public OperandGenerator {
27  public:
28  explicit PPCOperandGenerator(InstructionSelector* selector)
29  : OperandGenerator(selector) {}
30 
31  InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
32  if (CanBeImmediate(node, mode)) {
33  return UseImmediate(node);
34  }
35  return UseRegister(node);
36  }
37 
38  bool CanBeImmediate(Node* node, ImmediateMode mode) {
39  int64_t value;
40  if (node->opcode() == IrOpcode::kInt32Constant)
41  value = OpParameter<int32_t>(node->op());
42  else if (node->opcode() == IrOpcode::kInt64Constant)
43  value = OpParameter<int64_t>(node->op());
44  else
45  return false;
46  return CanBeImmediate(value, mode);
47  }
48 
49  bool CanBeImmediate(int64_t value, ImmediateMode mode) {
50  switch (mode) {
51  case kInt16Imm:
52  return is_int16(value);
53  case kInt16Imm_Unsigned:
54  return is_uint16(value);
55  case kInt16Imm_Negate:
56  return is_int16(-value);
57  case kInt16Imm_4ByteAligned:
58  return is_int16(value) && !(value & 3);
59  case kShift32Imm:
60  return 0 <= value && value < 32;
61  case kShift64Imm:
62  return 0 <= value && value < 64;
63  case kNoImmediate:
64  return false;
65  }
66  return false;
67  }
68 
69  // Use the stack pointer if the node is LoadStackPointer, otherwise assign a
70  // register.
71  InstructionOperand UseRegisterOrStackPointer(Node* node) {
72  if (node->opcode() == IrOpcode::kLoadStackPointer) {
73  return LocationOperand(LocationOperand::EXPLICIT,
74  LocationOperand::REGISTER,
75  MachineRepresentation::kWord32, sp.code());
76  }
77  return UseRegister(node);
78  }
79 };
80 
81 namespace {
82 
83 void VisitRR(InstructionSelector* selector, InstructionCode opcode,
84  Node* node) {
85  PPCOperandGenerator g(selector);
86  selector->Emit(opcode, g.DefineAsRegister(node),
87  g.UseRegister(node->InputAt(0)));
88 }
89 
90 void VisitRRR(InstructionSelector* selector, InstructionCode opcode,
91  Node* node) {
92  PPCOperandGenerator g(selector);
93  selector->Emit(opcode, g.DefineAsRegister(node),
94  g.UseRegister(node->InputAt(0)),
95  g.UseRegister(node->InputAt(1)));
96 }
97 
98 void VisitRRO(InstructionSelector* selector, InstructionCode opcode, Node* node,
99  ImmediateMode operand_mode) {
100  PPCOperandGenerator g(selector);
101  selector->Emit(opcode, g.DefineAsRegister(node),
102  g.UseRegister(node->InputAt(0)),
103  g.UseOperand(node->InputAt(1), operand_mode));
104 }
105 
106 #if V8_TARGET_ARCH_PPC64
107 void VisitTryTruncateDouble(InstructionSelector* selector,
108  InstructionCode opcode, Node* node) {
109  PPCOperandGenerator g(selector);
110  InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
111  InstructionOperand outputs[2];
112  size_t output_count = 0;
113  outputs[output_count++] = g.DefineAsRegister(node);
114 
115  Node* success_output = NodeProperties::FindProjection(node, 1);
116  if (success_output) {
117  outputs[output_count++] = g.DefineAsRegister(success_output);
118  }
119 
120  selector->Emit(opcode, output_count, outputs, 1, inputs);
121 }
122 #endif
123 
124 // Shared routine for multiple binary operations.
125 template <typename Matcher>
126 void VisitBinop(InstructionSelector* selector, Node* node,
127  InstructionCode opcode, ImmediateMode operand_mode,
128  FlagsContinuation* cont) {
129  PPCOperandGenerator g(selector);
130  Matcher m(node);
131  InstructionOperand inputs[4];
132  size_t input_count = 0;
133  InstructionOperand outputs[2];
134  size_t output_count = 0;
135 
136  inputs[input_count++] = g.UseRegister(m.left().node());
137  inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
138 
139  if (cont->IsDeoptimize()) {
140  // If we can deoptimize as a result of the binop, we need to make sure that
141  // the deopt inputs are not overwritten by the binop result. One way
142  // to achieve that is to declare the output register as same-as-first.
143  outputs[output_count++] = g.DefineSameAsFirst(node);
144  } else {
145  outputs[output_count++] = g.DefineAsRegister(node);
146  }
147 
148  DCHECK_NE(0u, input_count);
149  DCHECK_NE(0u, output_count);
150  DCHECK_GE(arraysize(inputs), input_count);
151  DCHECK_GE(arraysize(outputs), output_count);
152 
153  selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
154  inputs, cont);
155 }
156 
157 // Shared routine for multiple binary operations.
158 template <typename Matcher>
159 void VisitBinop(InstructionSelector* selector, Node* node,
160  InstructionCode opcode, ImmediateMode operand_mode) {
161  FlagsContinuation cont;
162  VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
163 }
164 
165 } // namespace
166 
167 void InstructionSelector::VisitStackSlot(Node* node) {
168  StackSlotRepresentation rep = StackSlotRepresentationOf(node->op());
169  int slot = frame_->AllocateSpillSlot(rep.size());
170  OperandGenerator g(this);
171 
172  Emit(kArchStackSlot, g.DefineAsRegister(node),
173  sequence()->AddImmediate(Constant(slot)), 0, nullptr);
174 }
175 
176 void InstructionSelector::VisitDebugAbort(Node* node) {
177  PPCOperandGenerator g(this);
178  Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), r4));
179 }
180 
181 void InstructionSelector::VisitLoad(Node* node) {
182  LoadRepresentation load_rep = LoadRepresentationOf(node->op());
183  PPCOperandGenerator g(this);
184  Node* base = node->InputAt(0);
185  Node* offset = node->InputAt(1);
186  InstructionCode opcode = kArchNop;
187  ImmediateMode mode = kInt16Imm;
188  switch (load_rep.representation()) {
189  case MachineRepresentation::kFloat32:
190  opcode = kPPC_LoadFloat32;
191  break;
192  case MachineRepresentation::kFloat64:
193  opcode = kPPC_LoadDouble;
194  break;
195  case MachineRepresentation::kBit: // Fall through.
196  case MachineRepresentation::kWord8:
197  opcode = load_rep.IsSigned() ? kPPC_LoadWordS8 : kPPC_LoadWordU8;
198  break;
199  case MachineRepresentation::kWord16:
200  opcode = load_rep.IsSigned() ? kPPC_LoadWordS16 : kPPC_LoadWordU16;
201  break;
202  case MachineRepresentation::kWord32:
203  opcode = kPPC_LoadWordU32;
204  break;
205  case MachineRepresentation::kTaggedSigned: // Fall through.
206  case MachineRepresentation::kTaggedPointer: // Fall through.
207  case MachineRepresentation::kTagged: // Fall through.
208  case MachineRepresentation::kWord64:
209  opcode = kPPC_LoadWord64;
210  mode = kInt16Imm_4ByteAligned;
211  break;
212  case MachineRepresentation::kSimd128: // Fall through.
213  case MachineRepresentation::kNone:
214  UNREACHABLE();
215  return;
216  }
217 
218  if (node->opcode() == IrOpcode::kPoisonedLoad &&
219  poisoning_level_ != PoisoningMitigationLevel::kDontPoison) {
220  opcode |= MiscField::encode(kMemoryAccessPoisoned);
221  }
222 
223  bool is_atomic = (node->opcode() == IrOpcode::kWord32AtomicLoad ||
224  node->opcode() == IrOpcode::kWord64AtomicLoad);
225 
226  if (g.CanBeImmediate(offset, mode)) {
227  Emit(opcode | AddressingModeField::encode(kMode_MRI),
228  g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset),
229  g.UseImmediate(is_atomic));
230  } else if (g.CanBeImmediate(base, mode)) {
231  Emit(opcode | AddressingModeField::encode(kMode_MRI),
232  g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base),
233  g.UseImmediate(is_atomic));
234  } else {
235  Emit(opcode | AddressingModeField::encode(kMode_MRR),
236  g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
237  g.UseImmediate(is_atomic));
238  }
239 }
240 
241 void InstructionSelector::VisitPoisonedLoad(Node* node) { VisitLoad(node); }
242 
243 void InstructionSelector::VisitProtectedLoad(Node* node) {
244  // TODO(eholk)
245  UNIMPLEMENTED();
246 }
247 
248 void InstructionSelector::VisitStore(Node* node) {
249  PPCOperandGenerator g(this);
250  Node* base = node->InputAt(0);
251  Node* offset = node->InputAt(1);
252  Node* value = node->InputAt(2);
253 
254  bool is_atomic = (node->opcode() == IrOpcode::kWord32AtomicStore ||
255  node->opcode() == IrOpcode::kWord64AtomicStore);
256 
257  MachineRepresentation rep;
258  WriteBarrierKind write_barrier_kind = kNoWriteBarrier;
259 
260  if (is_atomic) {
261  rep = AtomicStoreRepresentationOf(node->op());
262  } else {
263  StoreRepresentation store_rep = StoreRepresentationOf(node->op());
264  write_barrier_kind = store_rep.write_barrier_kind();
265  rep = store_rep.representation();
266  }
267 
268  if (write_barrier_kind != kNoWriteBarrier) {
269  DCHECK(CanBeTaggedPointer(rep));
270  AddressingMode addressing_mode;
271  InstructionOperand inputs[3];
272  size_t input_count = 0;
273  inputs[input_count++] = g.UseUniqueRegister(base);
274  // OutOfLineRecordWrite uses the offset in an 'add' instruction as well as
275  // for the store itself, so we must check compatibility with both.
276  if (g.CanBeImmediate(offset, kInt16Imm)
277 #if V8_TARGET_ARCH_PPC64
278  && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
279 #endif
280  ) {
281  inputs[input_count++] = g.UseImmediate(offset);
282  addressing_mode = kMode_MRI;
283  } else {
284  inputs[input_count++] = g.UseUniqueRegister(offset);
285  addressing_mode = kMode_MRR;
286  }
287  inputs[input_count++] = g.UseUniqueRegister(value);
288  RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
289  switch (write_barrier_kind) {
290  case kNoWriteBarrier:
291  UNREACHABLE();
292  break;
293  case kMapWriteBarrier:
294  record_write_mode = RecordWriteMode::kValueIsMap;
295  break;
296  case kPointerWriteBarrier:
297  record_write_mode = RecordWriteMode::kValueIsPointer;
298  break;
299  case kFullWriteBarrier:
300  record_write_mode = RecordWriteMode::kValueIsAny;
301  break;
302  }
303  InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
304  size_t const temp_count = arraysize(temps);
305  InstructionCode code = kArchStoreWithWriteBarrier;
306  code |= AddressingModeField::encode(addressing_mode);
307  code |= MiscField::encode(static_cast<int>(record_write_mode));
308  CHECK_EQ(is_atomic, false);
309  Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
310  } else {
311  ArchOpcode opcode = kArchNop;
312  ImmediateMode mode = kInt16Imm;
313  switch (rep) {
314  case MachineRepresentation::kFloat32:
315  opcode = kPPC_StoreFloat32;
316  break;
317  case MachineRepresentation::kFloat64:
318  opcode = kPPC_StoreDouble;
319  break;
320  case MachineRepresentation::kBit: // Fall through.
321  case MachineRepresentation::kWord8:
322  opcode = kPPC_StoreWord8;
323  break;
324  case MachineRepresentation::kWord16:
325  opcode = kPPC_StoreWord16;
326  break;
327 #if !V8_TARGET_ARCH_PPC64
328  case MachineRepresentation::kTaggedSigned: // Fall through.
329  case MachineRepresentation::kTaggedPointer: // Fall through.
330  case MachineRepresentation::kTagged: // Fall through.
331 #endif
332  case MachineRepresentation::kWord32:
333  opcode = kPPC_StoreWord32;
334  break;
335 #if V8_TARGET_ARCH_PPC64
336  case MachineRepresentation::kTaggedSigned: // Fall through.
337  case MachineRepresentation::kTaggedPointer: // Fall through.
338  case MachineRepresentation::kTagged: // Fall through.
339  case MachineRepresentation::kWord64:
340  opcode = kPPC_StoreWord64;
341  mode = kInt16Imm_4ByteAligned;
342  break;
343 #else
344  case MachineRepresentation::kWord64: // Fall through.
345 #endif
346  case MachineRepresentation::kSimd128: // Fall through.
347  case MachineRepresentation::kNone:
348  UNREACHABLE();
349  return;
350  }
351 
352  if (g.CanBeImmediate(offset, mode)) {
353  Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
354  g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value),
355  g.UseImmediate(is_atomic));
356  } else if (g.CanBeImmediate(base, mode)) {
357  Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
358  g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value),
359  g.UseImmediate(is_atomic));
360  } else {
361  Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
362  g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value),
363  g.UseImmediate(is_atomic));
364  }
365  }
366 }
367 
368 void InstructionSelector::VisitProtectedStore(Node* node) {
369  // TODO(eholk)
370  UNIMPLEMENTED();
371 }
372 
373 // Architecture supports unaligned access, therefore VisitLoad is used instead
374 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
375 
376 // Architecture supports unaligned access, therefore VisitStore is used instead
377 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
378 
379 template <typename Matcher>
380 static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
381  ArchOpcode opcode, bool left_can_cover,
382  bool right_can_cover, ImmediateMode imm_mode) {
383  PPCOperandGenerator g(selector);
384 
385  // Map instruction to equivalent operation with inverted right input.
386  ArchOpcode inv_opcode = opcode;
387  switch (opcode) {
388  case kPPC_And:
389  inv_opcode = kPPC_AndComplement;
390  break;
391  case kPPC_Or:
392  inv_opcode = kPPC_OrComplement;
393  break;
394  default:
395  UNREACHABLE();
396  }
397 
398  // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
399  if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
400  Matcher mleft(m->left().node());
401  if (mleft.right().Is(-1)) {
402  selector->Emit(inv_opcode, g.DefineAsRegister(node),
403  g.UseRegister(m->right().node()),
404  g.UseRegister(mleft.left().node()));
405  return;
406  }
407  }
408 
409  // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
410  if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
411  right_can_cover) {
412  Matcher mright(m->right().node());
413  if (mright.right().Is(-1)) {
414  // TODO(all): support shifted operand on right.
415  selector->Emit(inv_opcode, g.DefineAsRegister(node),
416  g.UseRegister(m->left().node()),
417  g.UseRegister(mright.left().node()));
418  return;
419  }
420  }
421 
422  VisitBinop<Matcher>(selector, node, opcode, imm_mode);
423 }
424 
425 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
426  int mask_width = base::bits::CountPopulation(value);
427  int mask_msb = base::bits::CountLeadingZeros32(value);
428  int mask_lsb = base::bits::CountTrailingZeros32(value);
429  if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
430  return false;
431  *mb = mask_lsb + mask_width - 1;
432  *me = mask_lsb;
433  return true;
434 }
435 
436 #if V8_TARGET_ARCH_PPC64
437 static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
438  int mask_width = base::bits::CountPopulation(value);
439  int mask_msb = base::bits::CountLeadingZeros64(value);
440  int mask_lsb = base::bits::CountTrailingZeros64(value);
441  if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
442  return false;
443  *mb = mask_lsb + mask_width - 1;
444  *me = mask_lsb;
445  return true;
446 }
447 #endif
448 
449 // TODO(mbrandy): Absorb rotate-right into rlwinm?
450 void InstructionSelector::VisitWord32And(Node* node) {
451  PPCOperandGenerator g(this);
452  Int32BinopMatcher m(node);
453  int mb = 0;
454  int me = 0;
455  if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
456  int sh = 0;
457  Node* left = m.left().node();
458  if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
459  CanCover(node, left)) {
460  // Try to absorb left/right shift into rlwinm
461  Int32BinopMatcher mleft(m.left().node());
462  if (mleft.right().IsInRange(0, 31)) {
463  left = mleft.left().node();
464  sh = mleft.right().Value();
465  if (m.left().IsWord32Shr()) {
466  // Adjust the mask such that it doesn't include any rotated bits.
467  if (mb > 31 - sh) mb = 31 - sh;
468  sh = (32 - sh) & 0x1F;
469  } else {
470  // Adjust the mask such that it doesn't include any rotated bits.
471  if (me < sh) me = sh;
472  }
473  }
474  }
475  if (mb >= me) {
476  Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node), g.UseRegister(left),
477  g.TempImmediate(sh), g.TempImmediate(mb), g.TempImmediate(me));
478  return;
479  }
480  }
481  VisitLogical<Int32BinopMatcher>(
482  this, node, &m, kPPC_And, CanCover(node, m.left().node()),
483  CanCover(node, m.right().node()), kInt16Imm_Unsigned);
484 }
485 
486 #if V8_TARGET_ARCH_PPC64
487 // TODO(mbrandy): Absorb rotate-right into rldic?
488 void InstructionSelector::VisitWord64And(Node* node) {
489  PPCOperandGenerator g(this);
490  Int64BinopMatcher m(node);
491  int mb = 0;
492  int me = 0;
493  if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
494  int sh = 0;
495  Node* left = m.left().node();
496  if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
497  CanCover(node, left)) {
498  // Try to absorb left/right shift into rldic
499  Int64BinopMatcher mleft(m.left().node());
500  if (mleft.right().IsInRange(0, 63)) {
501  left = mleft.left().node();
502  sh = mleft.right().Value();
503  if (m.left().IsWord64Shr()) {
504  // Adjust the mask such that it doesn't include any rotated bits.
505  if (mb > 63 - sh) mb = 63 - sh;
506  sh = (64 - sh) & 0x3F;
507  } else {
508  // Adjust the mask such that it doesn't include any rotated bits.
509  if (me < sh) me = sh;
510  }
511  }
512  }
513  if (mb >= me) {
514  bool match = false;
515  ArchOpcode opcode;
516  int mask;
517  if (me == 0) {
518  match = true;
519  opcode = kPPC_RotLeftAndClearLeft64;
520  mask = mb;
521  } else if (mb == 63) {
522  match = true;
523  opcode = kPPC_RotLeftAndClearRight64;
524  mask = me;
525  } else if (sh && me <= sh && m.left().IsWord64Shl()) {
526  match = true;
527  opcode = kPPC_RotLeftAndClear64;
528  mask = mb;
529  }
530  if (match) {
531  Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
532  g.TempImmediate(sh), g.TempImmediate(mask));
533  return;
534  }
535  }
536  }
537  VisitLogical<Int64BinopMatcher>(
538  this, node, &m, kPPC_And, CanCover(node, m.left().node()),
539  CanCover(node, m.right().node()), kInt16Imm_Unsigned);
540 }
541 #endif
542 
543 void InstructionSelector::VisitWord32Or(Node* node) {
544  Int32BinopMatcher m(node);
545  VisitLogical<Int32BinopMatcher>(
546  this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
547  CanCover(node, m.right().node()), kInt16Imm_Unsigned);
548 }
549 
550 #if V8_TARGET_ARCH_PPC64
551 void InstructionSelector::VisitWord64Or(Node* node) {
552  Int64BinopMatcher m(node);
553  VisitLogical<Int64BinopMatcher>(
554  this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
555  CanCover(node, m.right().node()), kInt16Imm_Unsigned);
556 }
557 #endif
558 
559 void InstructionSelector::VisitWord32Xor(Node* node) {
560  PPCOperandGenerator g(this);
561  Int32BinopMatcher m(node);
562  if (m.right().Is(-1)) {
563  Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
564  } else {
565  VisitBinop<Int32BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
566  }
567 }
568 
569 #if V8_TARGET_ARCH_PPC64
570 void InstructionSelector::VisitWord64Xor(Node* node) {
571  PPCOperandGenerator g(this);
572  Int64BinopMatcher m(node);
573  if (m.right().Is(-1)) {
574  Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
575  } else {
576  VisitBinop<Int64BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
577  }
578 }
579 #endif
580 
581 void InstructionSelector::VisitWord32Shl(Node* node) {
582  PPCOperandGenerator g(this);
583  Int32BinopMatcher m(node);
584  if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
585  // Try to absorb logical-and into rlwinm
586  Int32BinopMatcher mleft(m.left().node());
587  int sh = m.right().Value();
588  int mb;
589  int me;
590  if (mleft.right().HasValue() &&
591  IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
592  // Adjust the mask such that it doesn't include any rotated bits.
593  if (me < sh) me = sh;
594  if (mb >= me) {
595  Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
596  g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
597  g.TempImmediate(mb), g.TempImmediate(me));
598  return;
599  }
600  }
601  }
602  VisitRRO(this, kPPC_ShiftLeft32, node, kShift32Imm);
603 }
604 
605 #if V8_TARGET_ARCH_PPC64
606 void InstructionSelector::VisitWord64Shl(Node* node) {
607  PPCOperandGenerator g(this);
608  Int64BinopMatcher m(node);
609  // TODO(mbrandy): eliminate left sign extension if right >= 32
610  if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
611  // Try to absorb logical-and into rldic
612  Int64BinopMatcher mleft(m.left().node());
613  int sh = m.right().Value();
614  int mb;
615  int me;
616  if (mleft.right().HasValue() &&
617  IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
618  // Adjust the mask such that it doesn't include any rotated bits.
619  if (me < sh) me = sh;
620  if (mb >= me) {
621  bool match = false;
622  ArchOpcode opcode;
623  int mask;
624  if (me == 0) {
625  match = true;
626  opcode = kPPC_RotLeftAndClearLeft64;
627  mask = mb;
628  } else if (mb == 63) {
629  match = true;
630  opcode = kPPC_RotLeftAndClearRight64;
631  mask = me;
632  } else if (sh && me <= sh) {
633  match = true;
634  opcode = kPPC_RotLeftAndClear64;
635  mask = mb;
636  }
637  if (match) {
638  Emit(opcode, g.DefineAsRegister(node),
639  g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
640  g.TempImmediate(mask));
641  return;
642  }
643  }
644  }
645  }
646  VisitRRO(this, kPPC_ShiftLeft64, node, kShift64Imm);
647 }
648 #endif
649 
650 void InstructionSelector::VisitWord32Shr(Node* node) {
651  PPCOperandGenerator g(this);
652  Int32BinopMatcher m(node);
653  if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
654  // Try to absorb logical-and into rlwinm
655  Int32BinopMatcher mleft(m.left().node());
656  int sh = m.right().Value();
657  int mb;
658  int me;
659  if (mleft.right().HasValue() &&
660  IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
661  // Adjust the mask such that it doesn't include any rotated bits.
662  if (mb > 31 - sh) mb = 31 - sh;
663  sh = (32 - sh) & 0x1F;
664  if (mb >= me) {
665  Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
666  g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
667  g.TempImmediate(mb), g.TempImmediate(me));
668  return;
669  }
670  }
671  }
672  VisitRRO(this, kPPC_ShiftRight32, node, kShift32Imm);
673 }
674 
675 #if V8_TARGET_ARCH_PPC64
676 void InstructionSelector::VisitWord64Shr(Node* node) {
677  PPCOperandGenerator g(this);
678  Int64BinopMatcher m(node);
679  if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
680  // Try to absorb logical-and into rldic
681  Int64BinopMatcher mleft(m.left().node());
682  int sh = m.right().Value();
683  int mb;
684  int me;
685  if (mleft.right().HasValue() &&
686  IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
687  // Adjust the mask such that it doesn't include any rotated bits.
688  if (mb > 63 - sh) mb = 63 - sh;
689  sh = (64 - sh) & 0x3F;
690  if (mb >= me) {
691  bool match = false;
692  ArchOpcode opcode;
693  int mask;
694  if (me == 0) {
695  match = true;
696  opcode = kPPC_RotLeftAndClearLeft64;
697  mask = mb;
698  } else if (mb == 63) {
699  match = true;
700  opcode = kPPC_RotLeftAndClearRight64;
701  mask = me;
702  }
703  if (match) {
704  Emit(opcode, g.DefineAsRegister(node),
705  g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
706  g.TempImmediate(mask));
707  return;
708  }
709  }
710  }
711  }
712  VisitRRO(this, kPPC_ShiftRight64, node, kShift64Imm);
713 }
714 #endif
715 
716 void InstructionSelector::VisitWord32Sar(Node* node) {
717  PPCOperandGenerator g(this);
718  Int32BinopMatcher m(node);
719  // Replace with sign extension for (x << K) >> K where K is 16 or 24.
720  if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
721  Int32BinopMatcher mleft(m.left().node());
722  if (mleft.right().Is(16) && m.right().Is(16)) {
723  Emit(kPPC_ExtendSignWord16, g.DefineAsRegister(node),
724  g.UseRegister(mleft.left().node()));
725  return;
726  } else if (mleft.right().Is(24) && m.right().Is(24)) {
727  Emit(kPPC_ExtendSignWord8, g.DefineAsRegister(node),
728  g.UseRegister(mleft.left().node()));
729  return;
730  }
731  }
732  VisitRRO(this, kPPC_ShiftRightAlg32, node, kShift32Imm);
733 }
734 
735 #if !V8_TARGET_ARCH_PPC64
736 void VisitPairBinop(InstructionSelector* selector, InstructionCode opcode,
737  InstructionCode opcode2, Node* node) {
738  PPCOperandGenerator g(selector);
739 
740  Node* projection1 = NodeProperties::FindProjection(node, 1);
741  if (projection1) {
742  // We use UseUniqueRegister here to avoid register sharing with the output
743  // registers.
744  InstructionOperand inputs[] = {
745  g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
746  g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
747 
748  InstructionOperand outputs[] = {
749  g.DefineAsRegister(node),
750  g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
751 
752  selector->Emit(opcode, 2, outputs, 4, inputs);
753  } else {
754  // The high word of the result is not used, so we emit the standard 32 bit
755  // instruction.
756  selector->Emit(opcode2, g.DefineSameAsFirst(node),
757  g.UseRegister(node->InputAt(0)),
758  g.UseRegister(node->InputAt(2)));
759  }
760 }
761 
762 void InstructionSelector::VisitInt32PairAdd(Node* node) {
763  VisitPairBinop(this, kPPC_AddPair, kPPC_Add32, node);
764 }
765 
766 void InstructionSelector::VisitInt32PairSub(Node* node) {
767  VisitPairBinop(this, kPPC_SubPair, kPPC_Sub, node);
768 }
769 
770 void InstructionSelector::VisitInt32PairMul(Node* node) {
771  PPCOperandGenerator g(this);
772  Node* projection1 = NodeProperties::FindProjection(node, 1);
773  if (projection1) {
774  InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
775  g.UseUniqueRegister(node->InputAt(1)),
776  g.UseUniqueRegister(node->InputAt(2)),
777  g.UseUniqueRegister(node->InputAt(3))};
778 
779  InstructionOperand outputs[] = {
780  g.DefineAsRegister(node),
781  g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
782 
783  InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
784 
785  Emit(kPPC_MulPair, 2, outputs, 4, inputs, 2, temps);
786  } else {
787  // The high word of the result is not used, so we emit the standard 32 bit
788  // instruction.
789  Emit(kPPC_Mul32, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)),
790  g.UseRegister(node->InputAt(2)));
791  }
792 }
793 
794 namespace {
795 // Shared routine for multiple shift operations.
796 void VisitPairShift(InstructionSelector* selector, InstructionCode opcode,
797  Node* node) {
798  PPCOperandGenerator g(selector);
799  // We use g.UseUniqueRegister here to guarantee that there is
800  // no register aliasing of input registers with output registers.
801  Int32Matcher m(node->InputAt(2));
802  InstructionOperand shift_operand;
803  if (m.HasValue()) {
804  shift_operand = g.UseImmediate(m.node());
805  } else {
806  shift_operand = g.UseUniqueRegister(m.node());
807  }
808 
809  InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
810  g.UseUniqueRegister(node->InputAt(1)),
811  shift_operand};
812 
813  Node* projection1 = NodeProperties::FindProjection(node, 1);
814 
815  InstructionOperand outputs[2];
816  InstructionOperand temps[1];
817  int32_t output_count = 0;
818  int32_t temp_count = 0;
819 
820  outputs[output_count++] = g.DefineAsRegister(node);
821  if (projection1) {
822  outputs[output_count++] = g.DefineAsRegister(projection1);
823  } else {
824  temps[temp_count++] = g.TempRegister();
825  }
826 
827  selector->Emit(opcode, output_count, outputs, 3, inputs, temp_count, temps);
828 }
829 } // namespace
830 
831 void InstructionSelector::VisitWord32PairShl(Node* node) {
832  VisitPairShift(this, kPPC_ShiftLeftPair, node);
833 }
834 
835 void InstructionSelector::VisitWord32PairShr(Node* node) {
836  VisitPairShift(this, kPPC_ShiftRightPair, node);
837 }
838 
839 void InstructionSelector::VisitWord32PairSar(Node* node) {
840  VisitPairShift(this, kPPC_ShiftRightAlgPair, node);
841 }
842 #endif
843 
844 #if V8_TARGET_ARCH_PPC64
845 void InstructionSelector::VisitWord64Sar(Node* node) {
846  PPCOperandGenerator g(this);
847  Int64BinopMatcher m(node);
848  if (CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
849  m.right().Is(32)) {
850  // Just load and sign-extend the interesting 4 bytes instead. This happens,
851  // for example, when we're loading and untagging SMIs.
852  BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
853  AddressOption::kAllowAll);
854  if (mleft.matches() && mleft.index() == nullptr) {
855  int64_t offset = 0;
856  Node* displacement = mleft.displacement();
857  if (displacement != nullptr) {
858  Int64Matcher mdisplacement(displacement);
859  DCHECK(mdisplacement.HasValue());
860  offset = mdisplacement.Value();
861  }
862  offset = SmiWordOffset(offset);
863  if (g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)) {
864  Emit(kPPC_LoadWordS32 | AddressingModeField::encode(kMode_MRI),
865  g.DefineAsRegister(node), g.UseRegister(mleft.base()),
866  g.TempImmediate(offset), g.UseImmediate(0));
867  return;
868  }
869  }
870  }
871  VisitRRO(this, kPPC_ShiftRightAlg64, node, kShift64Imm);
872 }
873 #endif
874 
875 // TODO(mbrandy): Absorb logical-and into rlwinm?
876 void InstructionSelector::VisitWord32Ror(Node* node) {
877  VisitRRO(this, kPPC_RotRight32, node, kShift32Imm);
878 }
879 
880 #if V8_TARGET_ARCH_PPC64
881 // TODO(mbrandy): Absorb logical-and into rldic?
882 void InstructionSelector::VisitWord64Ror(Node* node) {
883  VisitRRO(this, kPPC_RotRight64, node, kShift64Imm);
884 }
885 #endif
886 
887 void InstructionSelector::VisitWord32Clz(Node* node) {
888  PPCOperandGenerator g(this);
889  Emit(kPPC_Cntlz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
890 }
891 
892 #if V8_TARGET_ARCH_PPC64
893 void InstructionSelector::VisitWord64Clz(Node* node) {
894  PPCOperandGenerator g(this);
895  Emit(kPPC_Cntlz64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
896 }
897 #endif
898 
899 void InstructionSelector::VisitWord32Popcnt(Node* node) {
900  PPCOperandGenerator g(this);
901  Emit(kPPC_Popcnt32, g.DefineAsRegister(node),
902  g.UseRegister(node->InputAt(0)));
903 }
904 
905 #if V8_TARGET_ARCH_PPC64
906 void InstructionSelector::VisitWord64Popcnt(Node* node) {
907  PPCOperandGenerator g(this);
908  Emit(kPPC_Popcnt64, g.DefineAsRegister(node),
909  g.UseRegister(node->InputAt(0)));
910 }
911 #endif
912 
913 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
914 
915 #if V8_TARGET_ARCH_PPC64
916 void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
917 #endif
918 
919 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
920 
921 #if V8_TARGET_ARCH_PPC64
922 void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
923 #endif
924 
925 void InstructionSelector::VisitWord64ReverseBytes(Node* node) {
926  PPCOperandGenerator g(this);
927  InstructionOperand temp[] = {g.TempRegister()};
928  Emit(kPPC_ByteRev64, g.DefineAsRegister(node),
929  g.UseUniqueRegister(node->InputAt(0)), 1, temp);
930 }
931 
932 void InstructionSelector::VisitWord32ReverseBytes(Node* node) {
933  PPCOperandGenerator g(this);
934  Emit(kPPC_ByteRev32, g.DefineAsRegister(node),
935  g.UseRegister(node->InputAt(0)));
936 }
937 
938 void InstructionSelector::VisitSpeculationFence(Node* node) { UNREACHABLE(); }
939 
940 void InstructionSelector::VisitInt32Add(Node* node) {
941  VisitBinop<Int32BinopMatcher>(this, node, kPPC_Add32, kInt16Imm);
942 }
943 
944 #if V8_TARGET_ARCH_PPC64
945 void InstructionSelector::VisitInt64Add(Node* node) {
946  VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm);
947 }
948 #endif
949 
950 void InstructionSelector::VisitInt32Sub(Node* node) {
951  PPCOperandGenerator g(this);
952  Int32BinopMatcher m(node);
953  if (m.left().Is(0)) {
954  Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
955  } else {
956  VisitBinop<Int32BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
957  }
958 }
959 
960 #if V8_TARGET_ARCH_PPC64
961 void InstructionSelector::VisitInt64Sub(Node* node) {
962  PPCOperandGenerator g(this);
963  Int64BinopMatcher m(node);
964  if (m.left().Is(0)) {
965  Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
966  } else {
967  VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
968  }
969 }
970 #endif
971 
972 namespace {
973 
974 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
975  InstructionOperand left, InstructionOperand right,
976  FlagsContinuation* cont);
977 void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node,
978  FlagsContinuation* cont) {
979  PPCOperandGenerator g(selector);
980  Int32BinopMatcher m(node);
981  InstructionOperand result_operand = g.DefineAsRegister(node);
982  InstructionOperand high32_operand = g.TempRegister();
983  InstructionOperand temp_operand = g.TempRegister();
984  {
985  InstructionOperand outputs[] = {result_operand, high32_operand};
986  InstructionOperand inputs[] = {g.UseRegister(m.left().node()),
987  g.UseRegister(m.right().node())};
988  selector->Emit(kPPC_Mul32WithHigh32, 2, outputs, 2, inputs);
989  }
990  {
991  InstructionOperand shift_31 = g.UseImmediate(31);
992  InstructionOperand outputs[] = {temp_operand};
993  InstructionOperand inputs[] = {result_operand, shift_31};
994  selector->Emit(kPPC_ShiftRightAlg32, 1, outputs, 2, inputs);
995  }
996 
997  VisitCompare(selector, kPPC_Cmp32, high32_operand, temp_operand, cont);
998 }
999 
1000 } // namespace
1001 
1002 void InstructionSelector::VisitInt32Mul(Node* node) {
1003  VisitRRR(this, kPPC_Mul32, node);
1004 }
1005 
1006 #if V8_TARGET_ARCH_PPC64
1007 void InstructionSelector::VisitInt64Mul(Node* node) {
1008  VisitRRR(this, kPPC_Mul64, node);
1009 }
1010 #endif
1011 
1012 void InstructionSelector::VisitInt32MulHigh(Node* node) {
1013  PPCOperandGenerator g(this);
1014  Emit(kPPC_MulHigh32, g.DefineAsRegister(node),
1015  g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1016 }
1017 
1018 void InstructionSelector::VisitUint32MulHigh(Node* node) {
1019  PPCOperandGenerator g(this);
1020  Emit(kPPC_MulHighU32, g.DefineAsRegister(node),
1021  g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1022 }
1023 
1024 void InstructionSelector::VisitInt32Div(Node* node) {
1025  VisitRRR(this, kPPC_Div32, node);
1026 }
1027 
1028 #if V8_TARGET_ARCH_PPC64
1029 void InstructionSelector::VisitInt64Div(Node* node) {
1030  VisitRRR(this, kPPC_Div64, node);
1031 }
1032 #endif
1033 
1034 void InstructionSelector::VisitUint32Div(Node* node) {
1035  VisitRRR(this, kPPC_DivU32, node);
1036 }
1037 
1038 #if V8_TARGET_ARCH_PPC64
1039 void InstructionSelector::VisitUint64Div(Node* node) {
1040  VisitRRR(this, kPPC_DivU64, node);
1041 }
1042 #endif
1043 
1044 void InstructionSelector::VisitInt32Mod(Node* node) {
1045  VisitRRR(this, kPPC_Mod32, node);
1046 }
1047 
1048 #if V8_TARGET_ARCH_PPC64
1049 void InstructionSelector::VisitInt64Mod(Node* node) {
1050  VisitRRR(this, kPPC_Mod64, node);
1051 }
1052 #endif
1053 
1054 void InstructionSelector::VisitUint32Mod(Node* node) {
1055  VisitRRR(this, kPPC_ModU32, node);
1056 }
1057 
1058 #if V8_TARGET_ARCH_PPC64
1059 void InstructionSelector::VisitUint64Mod(Node* node) {
1060  VisitRRR(this, kPPC_ModU64, node);
1061 }
1062 #endif
1063 
1064 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
1065  VisitRR(this, kPPC_Float32ToDouble, node);
1066 }
1067 
1068 void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1069  VisitRR(this, kPPC_Int32ToFloat32, node);
1070 }
1071 
1072 void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1073  VisitRR(this, kPPC_Uint32ToFloat32, node);
1074 }
1075 
1076 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
1077  VisitRR(this, kPPC_Int32ToDouble, node);
1078 }
1079 
1080 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
1081  VisitRR(this, kPPC_Uint32ToDouble, node);
1082 }
1083 
1084 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
1085  VisitRR(this, kPPC_DoubleToInt32, node);
1086 }
1087 
1088 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1089  VisitRR(this, kPPC_DoubleToUint32, node);
1090 }
1091 
1092 void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1093  VisitRR(this, kPPC_DoubleToUint32, node);
1094 }
1095 
1096 void InstructionSelector::VisitSignExtendWord8ToInt32(Node* node) {
1097  // TODO(mbrandy): inspect input to see if nop is appropriate.
1098  VisitRR(this, kPPC_ExtendSignWord8, node);
1099 }
1100 
1101 void InstructionSelector::VisitSignExtendWord16ToInt32(Node* node) {
1102  // TODO(mbrandy): inspect input to see if nop is appropriate.
1103  VisitRR(this, kPPC_ExtendSignWord16, node);
1104 }
1105 
1106 #if V8_TARGET_ARCH_PPC64
1107 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1108  VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1109 }
1110 
1111 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1112  VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1113 }
1114 
1115 void InstructionSelector::VisitTruncateFloat64ToInt64(Node* node) {
1116  VisitRR(this, kPPC_DoubleToInt64, node);
1117 }
1118 
1119 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1120  VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1121 }
1122 
1123 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1124  VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1125 }
1126 
1127 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1128  // TODO(mbrandy): inspect input to see if nop is appropriate.
1129  VisitRR(this, kPPC_ExtendSignWord32, node);
1130 }
1131 
1132 void InstructionSelector::VisitSignExtendWord8ToInt64(Node* node) {
1133  // TODO(mbrandy): inspect input to see if nop is appropriate.
1134  VisitRR(this, kPPC_ExtendSignWord8, node);
1135 }
1136 
1137 void InstructionSelector::VisitSignExtendWord16ToInt64(Node* node) {
1138  // TODO(mbrandy): inspect input to see if nop is appropriate.
1139  VisitRR(this, kPPC_ExtendSignWord16, node);
1140 }
1141 
1142 void InstructionSelector::VisitSignExtendWord32ToInt64(Node* node) {
1143  // TODO(mbrandy): inspect input to see if nop is appropriate.
1144  VisitRR(this, kPPC_ExtendSignWord32, node);
1145 }
1146 
1147 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1148  // TODO(mbrandy): inspect input to see if nop is appropriate.
1149  VisitRR(this, kPPC_Uint32ToUint64, node);
1150 }
1151 
1152 void InstructionSelector::VisitChangeFloat64ToUint64(Node* node) {
1153  VisitRR(this, kPPC_DoubleToUint64, node);
1154 }
1155 
1156 void InstructionSelector::VisitChangeFloat64ToInt64(Node* node) {
1157  VisitRR(this, kPPC_DoubleToInt64, node);
1158 }
1159 #endif
1160 
1161 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1162  VisitRR(this, kPPC_DoubleToFloat32, node);
1163 }
1164 
1165 void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1166  VisitRR(this, kArchTruncateDoubleToI, node);
1167 }
1168 
1169 void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1170  VisitRR(this, kPPC_DoubleToInt32, node);
1171 }
1172 
1173 void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1174  VisitRR(this, kPPC_DoubleToInt32, node);
1175 }
1176 
1177 void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1178  VisitRR(this, kPPC_DoubleToUint32, node);
1179 }
1180 
1181 #if V8_TARGET_ARCH_PPC64
1182 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1183  // TODO(mbrandy): inspect input to see if nop is appropriate.
1184  VisitRR(this, kPPC_Int64ToInt32, node);
1185 }
1186 
1187 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1188  VisitRR(this, kPPC_Int64ToFloat32, node);
1189 }
1190 
1191 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1192  VisitRR(this, kPPC_Int64ToDouble, node);
1193 }
1194 
1195 void InstructionSelector::VisitChangeInt64ToFloat64(Node* node) {
1196  VisitRR(this, kPPC_Int64ToDouble, node);
1197 }
1198 
1199 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1200  VisitRR(this, kPPC_Uint64ToFloat32, node);
1201 }
1202 
1203 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1204  VisitRR(this, kPPC_Uint64ToDouble, node);
1205 }
1206 #endif
1207 
1208 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1209  VisitRR(this, kPPC_BitcastFloat32ToInt32, node);
1210 }
1211 
1212 #if V8_TARGET_ARCH_PPC64
1213 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1214  VisitRR(this, kPPC_BitcastDoubleToInt64, node);
1215 }
1216 #endif
1217 
1218 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1219  VisitRR(this, kPPC_BitcastInt32ToFloat32, node);
1220 }
1221 
1222 #if V8_TARGET_ARCH_PPC64
1223 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1224  VisitRR(this, kPPC_BitcastInt64ToDouble, node);
1225 }
1226 #endif
1227 
1228 void InstructionSelector::VisitFloat32Add(Node* node) {
1229  VisitRRR(this, kPPC_AddDouble | MiscField::encode(1), node);
1230 }
1231 
1232 void InstructionSelector::VisitFloat64Add(Node* node) {
1233  // TODO(mbrandy): detect multiply-add
1234  VisitRRR(this, kPPC_AddDouble, node);
1235 }
1236 
1237 void InstructionSelector::VisitFloat32Sub(Node* node) {
1238  VisitRRR(this, kPPC_SubDouble | MiscField::encode(1), node);
1239 }
1240 
1241 void InstructionSelector::VisitFloat64Sub(Node* node) {
1242  // TODO(mbrandy): detect multiply-subtract
1243  VisitRRR(this, kPPC_SubDouble, node);
1244 }
1245 
1246 void InstructionSelector::VisitFloat32Mul(Node* node) {
1247  VisitRRR(this, kPPC_MulDouble | MiscField::encode(1), node);
1248 }
1249 
1250 void InstructionSelector::VisitFloat64Mul(Node* node) {
1251  // TODO(mbrandy): detect negate
1252  VisitRRR(this, kPPC_MulDouble, node);
1253 }
1254 
1255 void InstructionSelector::VisitFloat32Div(Node* node) {
1256  VisitRRR(this, kPPC_DivDouble | MiscField::encode(1), node);
1257 }
1258 
1259 void InstructionSelector::VisitFloat64Div(Node* node) {
1260  VisitRRR(this, kPPC_DivDouble, node);
1261 }
1262 
1263 void InstructionSelector::VisitFloat64Mod(Node* node) {
1264  PPCOperandGenerator g(this);
1265  Emit(kPPC_ModDouble, g.DefineAsFixed(node, d1),
1266  g.UseFixed(node->InputAt(0), d1), g.UseFixed(node->InputAt(1), d2))
1267  ->MarkAsCall();
1268 }
1269 
1270 void InstructionSelector::VisitFloat32Max(Node* node) {
1271  VisitRRR(this, kPPC_MaxDouble | MiscField::encode(1), node);
1272 }
1273 
1274 void InstructionSelector::VisitFloat64Max(Node* node) {
1275  VisitRRR(this, kPPC_MaxDouble, node);
1276 }
1277 
1278 void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
1279  VisitRR(this, kPPC_Float64SilenceNaN, node);
1280 }
1281 
1282 void InstructionSelector::VisitFloat32Min(Node* node) {
1283  VisitRRR(this, kPPC_MinDouble | MiscField::encode(1), node);
1284 }
1285 
1286 void InstructionSelector::VisitFloat64Min(Node* node) {
1287  VisitRRR(this, kPPC_MinDouble, node);
1288 }
1289 
1290 void InstructionSelector::VisitFloat32Abs(Node* node) {
1291  VisitRR(this, kPPC_AbsDouble | MiscField::encode(1), node);
1292 }
1293 
1294 void InstructionSelector::VisitFloat64Abs(Node* node) {
1295  VisitRR(this, kPPC_AbsDouble, node);
1296 }
1297 
1298 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1299  VisitRR(this, kPPC_SqrtDouble | MiscField::encode(1), node);
1300 }
1301 
1302 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1303  InstructionCode opcode) {
1304  PPCOperandGenerator g(this);
1305  Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1))
1306  ->MarkAsCall();
1307 }
1308 
1309 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1310  InstructionCode opcode) {
1311  PPCOperandGenerator g(this);
1312  Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1),
1313  g.UseFixed(node->InputAt(1), d2))
1314  ->MarkAsCall();
1315 }
1316 
1317 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1318  VisitRR(this, kPPC_SqrtDouble, node);
1319 }
1320 
1321 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1322  VisitRR(this, kPPC_FloorDouble | MiscField::encode(1), node);
1323 }
1324 
1325 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1326  VisitRR(this, kPPC_FloorDouble, node);
1327 }
1328 
1329 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1330  VisitRR(this, kPPC_CeilDouble | MiscField::encode(1), node);
1331 }
1332 
1333 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1334  VisitRR(this, kPPC_CeilDouble, node);
1335 }
1336 
1337 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1338  VisitRR(this, kPPC_TruncateDouble | MiscField::encode(1), node);
1339 }
1340 
1341 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1342  VisitRR(this, kPPC_TruncateDouble, node);
1343 }
1344 
1345 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1346  VisitRR(this, kPPC_RoundDouble, node);
1347 }
1348 
1349 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1350  UNREACHABLE();
1351 }
1352 
1353 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1354  UNREACHABLE();
1355 }
1356 
1357 void InstructionSelector::VisitFloat32Neg(Node* node) {
1358  VisitRR(this, kPPC_NegDouble, node);
1359 }
1360 
1361 void InstructionSelector::VisitFloat64Neg(Node* node) {
1362  VisitRR(this, kPPC_NegDouble, node);
1363 }
1364 
1365 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1366  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1367  FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1368  return VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32,
1369  kInt16Imm, &cont);
1370  }
1371  FlagsContinuation cont;
1372  VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32, kInt16Imm,
1373  &cont);
1374 }
1375 
1376 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1377  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1378  FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1379  return VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1380  kInt16Imm_Negate, &cont);
1381  }
1382  FlagsContinuation cont;
1383  VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1384  kInt16Imm_Negate, &cont);
1385 }
1386 
1387 #if V8_TARGET_ARCH_PPC64
1388 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1389  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1390  FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1391  return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm,
1392  &cont);
1393  }
1394  FlagsContinuation cont;
1395  VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm, &cont);
1396 }
1397 
1398 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1399  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1400  FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1401  return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate,
1402  &cont);
1403  }
1404  FlagsContinuation cont;
1405  VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, &cont);
1406 }
1407 #endif
1408 
1409 static bool CompareLogical(FlagsContinuation* cont) {
1410  switch (cont->condition()) {
1411  case kUnsignedLessThan:
1412  case kUnsignedGreaterThanOrEqual:
1413  case kUnsignedLessThanOrEqual:
1414  case kUnsignedGreaterThan:
1415  return true;
1416  default:
1417  return false;
1418  }
1419  UNREACHABLE();
1420 }
1421 
1422 namespace {
1423 
1424 // Shared routine for multiple compare operations.
1425 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1426  InstructionOperand left, InstructionOperand right,
1427  FlagsContinuation* cont) {
1428  selector->EmitWithContinuation(opcode, left, right, cont);
1429 }
1430 
1431 // Shared routine for multiple word compare operations.
1432 void VisitWordCompare(InstructionSelector* selector, Node* node,
1433  InstructionCode opcode, FlagsContinuation* cont,
1434  bool commutative, ImmediateMode immediate_mode) {
1435  PPCOperandGenerator g(selector);
1436  Node* left = node->InputAt(0);
1437  Node* right = node->InputAt(1);
1438 
1439  // Match immediates on left or right side of comparison.
1440  if (g.CanBeImmediate(right, immediate_mode)) {
1441  VisitCompare(selector, opcode, g.UseRegisterOrStackPointer(left),
1442  g.UseImmediate(right), cont);
1443  } else if (g.CanBeImmediate(left, immediate_mode)) {
1444  if (!commutative) cont->Commute();
1445  VisitCompare(selector, opcode, g.UseRegisterOrStackPointer(right),
1446  g.UseImmediate(left), cont);
1447  } else {
1448  VisitCompare(selector, opcode, g.UseRegisterOrStackPointer(left),
1449  g.UseRegisterOrStackPointer(right), cont);
1450  }
1451 }
1452 
1453 void VisitWord32Compare(InstructionSelector* selector, Node* node,
1454  FlagsContinuation* cont) {
1455  ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1456  VisitWordCompare(selector, node, kPPC_Cmp32, cont, false, mode);
1457 }
1458 
1459 #if V8_TARGET_ARCH_PPC64
1460 void VisitWord64Compare(InstructionSelector* selector, Node* node,
1461  FlagsContinuation* cont) {
1462  ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1463  VisitWordCompare(selector, node, kPPC_Cmp64, cont, false, mode);
1464 }
1465 #endif
1466 
1467 // Shared routine for multiple float32 compare operations.
1468 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1469  FlagsContinuation* cont) {
1470  PPCOperandGenerator g(selector);
1471  Node* left = node->InputAt(0);
1472  Node* right = node->InputAt(1);
1473  VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1474  g.UseRegister(right), cont);
1475 }
1476 
1477 // Shared routine for multiple float64 compare operations.
1478 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1479  FlagsContinuation* cont) {
1480  PPCOperandGenerator g(selector);
1481  Node* left = node->InputAt(0);
1482  Node* right = node->InputAt(1);
1483  VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1484  g.UseRegister(right), cont);
1485 }
1486 
1487 } // namespace
1488 
1489 // Shared routine for word comparisons against zero.
1490 void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
1491  FlagsContinuation* cont) {
1492  // Try to combine with comparisons against 0 by simply inverting the branch.
1493  while (value->opcode() == IrOpcode::kWord32Equal && CanCover(user, value)) {
1494  Int32BinopMatcher m(value);
1495  if (!m.right().Is(0)) break;
1496 
1497  user = value;
1498  value = m.left().node();
1499  cont->Negate();
1500  }
1501 
1502  if (CanCover(user, value)) {
1503  switch (value->opcode()) {
1504  case IrOpcode::kWord32Equal:
1505  cont->OverwriteAndNegateIfEqual(kEqual);
1506  return VisitWord32Compare(this, value, cont);
1507  case IrOpcode::kInt32LessThan:
1508  cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1509  return VisitWord32Compare(this, value, cont);
1510  case IrOpcode::kInt32LessThanOrEqual:
1511  cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1512  return VisitWord32Compare(this, value, cont);
1513  case IrOpcode::kUint32LessThan:
1514  cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1515  return VisitWord32Compare(this, value, cont);
1516  case IrOpcode::kUint32LessThanOrEqual:
1517  cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1518  return VisitWord32Compare(this, value, cont);
1519 #if V8_TARGET_ARCH_PPC64
1520  case IrOpcode::kWord64Equal:
1521  cont->OverwriteAndNegateIfEqual(kEqual);
1522  return VisitWord64Compare(this, value, cont);
1523  case IrOpcode::kInt64LessThan:
1524  cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1525  return VisitWord64Compare(this, value, cont);
1526  case IrOpcode::kInt64LessThanOrEqual:
1527  cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1528  return VisitWord64Compare(this, value, cont);
1529  case IrOpcode::kUint64LessThan:
1530  cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1531  return VisitWord64Compare(this, value, cont);
1532  case IrOpcode::kUint64LessThanOrEqual:
1533  cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1534  return VisitWord64Compare(this, value, cont);
1535 #endif
1536  case IrOpcode::kFloat32Equal:
1537  cont->OverwriteAndNegateIfEqual(kEqual);
1538  return VisitFloat32Compare(this, value, cont);
1539  case IrOpcode::kFloat32LessThan:
1540  cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1541  return VisitFloat32Compare(this, value, cont);
1542  case IrOpcode::kFloat32LessThanOrEqual:
1543  cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1544  return VisitFloat32Compare(this, value, cont);
1545  case IrOpcode::kFloat64Equal:
1546  cont->OverwriteAndNegateIfEqual(kEqual);
1547  return VisitFloat64Compare(this, value, cont);
1548  case IrOpcode::kFloat64LessThan:
1549  cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1550  return VisitFloat64Compare(this, value, cont);
1551  case IrOpcode::kFloat64LessThanOrEqual:
1552  cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1553  return VisitFloat64Compare(this, value, cont);
1554  case IrOpcode::kProjection:
1555  // Check if this is the overflow output projection of an
1556  // <Operation>WithOverflow node.
1557  if (ProjectionIndexOf(value->op()) == 1u) {
1558  // We cannot combine the <Operation>WithOverflow with this branch
1559  // unless the 0th projection (the use of the actual value of the
1560  // <Operation> is either nullptr, which means there's no use of the
1561  // actual value, or was already defined, which means it is scheduled
1562  // *AFTER* this branch).
1563  Node* const node = value->InputAt(0);
1564  Node* const result = NodeProperties::FindProjection(node, 0);
1565  if (result == nullptr || IsDefined(result)) {
1566  switch (node->opcode()) {
1567  case IrOpcode::kInt32AddWithOverflow:
1568  cont->OverwriteAndNegateIfEqual(kOverflow);
1569  return VisitBinop<Int32BinopMatcher>(
1570  this, node, kPPC_AddWithOverflow32, kInt16Imm, cont);
1571  case IrOpcode::kInt32SubWithOverflow:
1572  cont->OverwriteAndNegateIfEqual(kOverflow);
1573  return VisitBinop<Int32BinopMatcher>(
1574  this, node, kPPC_SubWithOverflow32, kInt16Imm_Negate, cont);
1575  case IrOpcode::kInt32MulWithOverflow:
1576  cont->OverwriteAndNegateIfEqual(kNotEqual);
1577  return EmitInt32MulWithOverflow(this, node, cont);
1578 #if V8_TARGET_ARCH_PPC64
1579  case IrOpcode::kInt64AddWithOverflow:
1580  cont->OverwriteAndNegateIfEqual(kOverflow);
1581  return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64,
1582  kInt16Imm, cont);
1583  case IrOpcode::kInt64SubWithOverflow:
1584  cont->OverwriteAndNegateIfEqual(kOverflow);
1585  return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub,
1586  kInt16Imm_Negate, cont);
1587 #endif
1588  default:
1589  break;
1590  }
1591  }
1592  }
1593  break;
1594  case IrOpcode::kInt32Sub:
1595  return VisitWord32Compare(this, value, cont);
1596  case IrOpcode::kWord32And:
1597  // TODO(mbandy): opportunity for rlwinm?
1598  return VisitWordCompare(this, value, kPPC_Tst32, cont, true,
1599  kInt16Imm_Unsigned);
1600 // TODO(mbrandy): Handle?
1601 // case IrOpcode::kInt32Add:
1602 // case IrOpcode::kWord32Or:
1603 // case IrOpcode::kWord32Xor:
1604 // case IrOpcode::kWord32Sar:
1605 // case IrOpcode::kWord32Shl:
1606 // case IrOpcode::kWord32Shr:
1607 // case IrOpcode::kWord32Ror:
1608 #if V8_TARGET_ARCH_PPC64
1609  case IrOpcode::kInt64Sub:
1610  return VisitWord64Compare(this, value, cont);
1611  case IrOpcode::kWord64And:
1612  // TODO(mbandy): opportunity for rldic?
1613  return VisitWordCompare(this, value, kPPC_Tst64, cont, true,
1614  kInt16Imm_Unsigned);
1615 // TODO(mbrandy): Handle?
1616 // case IrOpcode::kInt64Add:
1617 // case IrOpcode::kWord64Or:
1618 // case IrOpcode::kWord64Xor:
1619 // case IrOpcode::kWord64Sar:
1620 // case IrOpcode::kWord64Shl:
1621 // case IrOpcode::kWord64Shr:
1622 // case IrOpcode::kWord64Ror:
1623 #endif
1624  default:
1625  break;
1626  }
1627  }
1628 
1629  // Branch could not be combined with a compare, emit compare against 0.
1630  PPCOperandGenerator g(this);
1631  VisitCompare(this, kPPC_Cmp32, g.UseRegister(value), g.TempImmediate(0),
1632  cont);
1633 }
1634 
1635 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1636  PPCOperandGenerator g(this);
1637  InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1638 
1639  // Emit either ArchTableSwitch or ArchLookupSwitch.
1640  if (enable_switch_jump_table_ == kEnableSwitchJumpTable) {
1641  static const size_t kMaxTableSwitchValueRange = 2 << 16;
1642  size_t table_space_cost = 4 + sw.value_range();
1643  size_t table_time_cost = 3;
1644  size_t lookup_space_cost = 3 + 2 * sw.case_count();
1645  size_t lookup_time_cost = sw.case_count();
1646  if (sw.case_count() > 0 &&
1647  table_space_cost + 3 * table_time_cost <=
1648  lookup_space_cost + 3 * lookup_time_cost &&
1649  sw.min_value() > std::numeric_limits<int32_t>::min() &&
1650  sw.value_range() <= kMaxTableSwitchValueRange) {
1651  InstructionOperand index_operand = value_operand;
1652  if (sw.min_value()) {
1653  index_operand = g.TempRegister();
1654  Emit(kPPC_Sub, index_operand, value_operand,
1655  g.TempImmediate(sw.min_value()));
1656  }
1657  // Generate a table lookup.
1658  return EmitTableSwitch(sw, index_operand);
1659  }
1660  }
1661 
1662  // Generate a tree of conditional jumps.
1663  return EmitBinarySearchSwitch(sw, value_operand);
1664 }
1665 
1666 void InstructionSelector::VisitWord32Equal(Node* const node) {
1667  FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1668  VisitWord32Compare(this, node, &cont);
1669 }
1670 
1671 void InstructionSelector::VisitInt32LessThan(Node* node) {
1672  FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1673  VisitWord32Compare(this, node, &cont);
1674 }
1675 
1676 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1677  FlagsContinuation cont =
1678  FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1679  VisitWord32Compare(this, node, &cont);
1680 }
1681 
1682 void InstructionSelector::VisitUint32LessThan(Node* node) {
1683  FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1684  VisitWord32Compare(this, node, &cont);
1685 }
1686 
1687 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1688  FlagsContinuation cont =
1689  FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1690  VisitWord32Compare(this, node, &cont);
1691 }
1692 
1693 #if V8_TARGET_ARCH_PPC64
1694 void InstructionSelector::VisitWord64Equal(Node* const node) {
1695  FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1696  VisitWord64Compare(this, node, &cont);
1697 }
1698 
1699 void InstructionSelector::VisitInt64LessThan(Node* node) {
1700  FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1701  VisitWord64Compare(this, node, &cont);
1702 }
1703 
1704 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1705  FlagsContinuation cont =
1706  FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1707  VisitWord64Compare(this, node, &cont);
1708 }
1709 
1710 void InstructionSelector::VisitUint64LessThan(Node* node) {
1711  FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1712  VisitWord64Compare(this, node, &cont);
1713 }
1714 
1715 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1716  FlagsContinuation cont =
1717  FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1718  VisitWord64Compare(this, node, &cont);
1719 }
1720 #endif
1721 
1722 void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
1723  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1724  FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf);
1725  return EmitInt32MulWithOverflow(this, node, &cont);
1726  }
1727  FlagsContinuation cont;
1728  EmitInt32MulWithOverflow(this, node, &cont);
1729 }
1730 
1731 void InstructionSelector::VisitFloat32Equal(Node* node) {
1732  FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1733  VisitFloat32Compare(this, node, &cont);
1734 }
1735 
1736 void InstructionSelector::VisitFloat32LessThan(Node* node) {
1737  FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1738  VisitFloat32Compare(this, node, &cont);
1739 }
1740 
1741 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1742  FlagsContinuation cont =
1743  FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1744  VisitFloat32Compare(this, node, &cont);
1745 }
1746 
1747 void InstructionSelector::VisitFloat64Equal(Node* node) {
1748  FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1749  VisitFloat64Compare(this, node, &cont);
1750 }
1751 
1752 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1753  FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1754  VisitFloat64Compare(this, node, &cont);
1755 }
1756 
1757 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1758  FlagsContinuation cont =
1759  FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1760  VisitFloat64Compare(this, node, &cont);
1761 }
1762 
1763 void InstructionSelector::EmitPrepareArguments(
1764  ZoneVector<PushParameter>* arguments, const CallDescriptor* call_descriptor,
1765  Node* node) {
1766  PPCOperandGenerator g(this);
1767 
1768  // Prepare for C function call.
1769  if (call_descriptor->IsCFunctionCall()) {
1770  Emit(kArchPrepareCallCFunction | MiscField::encode(static_cast<int>(
1771  call_descriptor->ParameterCount())),
1772  0, nullptr, 0, nullptr);
1773 
1774  // Poke any stack arguments.
1775  int slot = kStackFrameExtraParamSlot;
1776  for (PushParameter input : (*arguments)) {
1777  if (input.node == nullptr) continue;
1778  Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node),
1779  g.TempImmediate(slot));
1780  ++slot;
1781  }
1782  } else {
1783  // Push any stack arguments.
1784  for (PushParameter input : base::Reversed(*arguments)) {
1785  // Skip any alignment holes in pushed nodes.
1786  if (input.node == nullptr) continue;
1787  Emit(kPPC_Push, g.NoOutput(), g.UseRegister(input.node));
1788  }
1789  }
1790 }
1791 
1792 bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1793 
1794 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1795 
1796 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1797  PPCOperandGenerator g(this);
1798  Emit(kPPC_DoubleExtractLowWord32, g.DefineAsRegister(node),
1799  g.UseRegister(node->InputAt(0)));
1800 }
1801 
1802 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1803  PPCOperandGenerator g(this);
1804  Emit(kPPC_DoubleExtractHighWord32, g.DefineAsRegister(node),
1805  g.UseRegister(node->InputAt(0)));
1806 }
1807 
1808 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1809  PPCOperandGenerator g(this);
1810  Node* left = node->InputAt(0);
1811  Node* right = node->InputAt(1);
1812  if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1813  CanCover(node, left)) {
1814  left = left->InputAt(1);
1815  Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
1816  g.UseRegister(right));
1817  return;
1818  }
1819  Emit(kPPC_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
1820  g.UseRegister(left), g.UseRegister(right));
1821 }
1822 
1823 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1824  PPCOperandGenerator g(this);
1825  Node* left = node->InputAt(0);
1826  Node* right = node->InputAt(1);
1827  if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1828  CanCover(node, left)) {
1829  left = left->InputAt(1);
1830  Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
1831  g.UseRegister(left));
1832  return;
1833  }
1834  Emit(kPPC_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
1835  g.UseRegister(left), g.UseRegister(right));
1836 }
1837 
1838 void InstructionSelector::VisitWord32AtomicLoad(Node* node) { VisitLoad(node); }
1839 
1840 void InstructionSelector::VisitWord64AtomicLoad(Node* node) { VisitLoad(node); }
1841 
1842 void InstructionSelector::VisitWord32AtomicStore(Node* node) {
1843  VisitStore(node);
1844 }
1845 
1846 void InstructionSelector::VisitWord64AtomicStore(Node* node) {
1847  VisitStore(node);
1848 }
1849 
1850 void VisitAtomicExchange(InstructionSelector* selector, Node* node,
1851  ArchOpcode opcode) {
1852  PPCOperandGenerator g(selector);
1853  Node* base = node->InputAt(0);
1854  Node* index = node->InputAt(1);
1855  Node* value = node->InputAt(2);
1856 
1857  AddressingMode addressing_mode = kMode_MRR;
1858  InstructionOperand inputs[3];
1859  size_t input_count = 0;
1860  inputs[input_count++] = g.UseUniqueRegister(base);
1861  inputs[input_count++] = g.UseUniqueRegister(index);
1862  inputs[input_count++] = g.UseUniqueRegister(value);
1863  InstructionOperand outputs[1];
1864  outputs[0] = g.UseUniqueRegister(node);
1865  InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
1866  selector->Emit(code, 1, outputs, input_count, inputs);
1867 }
1868 
1869 void InstructionSelector::VisitWord32AtomicExchange(Node* node) {
1870  ArchOpcode opcode = kArchNop;
1871  MachineType type = AtomicOpType(node->op());
1872  if (type == MachineType::Int8()) {
1873  opcode = kWord32AtomicExchangeInt8;
1874  } else if (type == MachineType::Uint8()) {
1875  opcode = kPPC_AtomicExchangeUint8;
1876  } else if (type == MachineType::Int16()) {
1877  opcode = kWord32AtomicExchangeInt16;
1878  } else if (type == MachineType::Uint16()) {
1879  opcode = kPPC_AtomicExchangeUint16;
1880  } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
1881  opcode = kPPC_AtomicExchangeWord32;
1882  } else {
1883  UNREACHABLE();
1884  return;
1885  }
1886  VisitAtomicExchange(this, node, opcode);
1887 }
1888 
1889 void InstructionSelector::VisitWord64AtomicExchange(Node* node) {
1890  ArchOpcode opcode = kArchNop;
1891  MachineType type = AtomicOpType(node->op());
1892  if (type == MachineType::Uint8()) {
1893  opcode = kPPC_AtomicExchangeUint8;
1894  } else if (type == MachineType::Uint16()) {
1895  opcode = kPPC_AtomicExchangeUint16;
1896  } else if (type == MachineType::Uint32()) {
1897  opcode = kPPC_AtomicExchangeWord32;
1898  } else if (type == MachineType::Uint64()) {
1899  opcode = kPPC_AtomicExchangeWord64;
1900  } else {
1901  UNREACHABLE();
1902  return;
1903  }
1904  VisitAtomicExchange(this, node, opcode);
1905 }
1906 
1907 void VisitAtomicCompareExchange(InstructionSelector* selector, Node* node,
1908  ArchOpcode opcode) {
1909  PPCOperandGenerator g(selector);
1910  Node* base = node->InputAt(0);
1911  Node* index = node->InputAt(1);
1912  Node* old_value = node->InputAt(2);
1913  Node* new_value = node->InputAt(3);
1914 
1915  AddressingMode addressing_mode = kMode_MRR;
1916  InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
1917 
1918  InstructionOperand inputs[4];
1919  size_t input_count = 0;
1920  inputs[input_count++] = g.UseUniqueRegister(base);
1921  inputs[input_count++] = g.UseUniqueRegister(index);
1922  inputs[input_count++] = g.UseUniqueRegister(old_value);
1923  inputs[input_count++] = g.UseUniqueRegister(new_value);
1924 
1925  InstructionOperand outputs[1];
1926  size_t output_count = 0;
1927  outputs[output_count++] = g.DefineAsRegister(node);
1928 
1929  selector->Emit(code, output_count, outputs, input_count, inputs);
1930 }
1931 
1932 void InstructionSelector::VisitWord32AtomicCompareExchange(Node* node) {
1933  MachineType type = AtomicOpType(node->op());
1934  ArchOpcode opcode = kArchNop;
1935  if (type == MachineType::Int8()) {
1936  opcode = kWord32AtomicCompareExchangeInt8;
1937  } else if (type == MachineType::Uint8()) {
1938  opcode = kPPC_AtomicCompareExchangeUint8;
1939  } else if (type == MachineType::Int16()) {
1940  opcode = kWord32AtomicCompareExchangeInt16;
1941  } else if (type == MachineType::Uint16()) {
1942  opcode = kPPC_AtomicCompareExchangeUint16;
1943  } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
1944  opcode = kPPC_AtomicCompareExchangeWord32;
1945  } else {
1946  UNREACHABLE();
1947  return;
1948  }
1949  VisitAtomicCompareExchange(this, node, opcode);
1950 }
1951 
1952 void InstructionSelector::VisitWord64AtomicCompareExchange(Node* node) {
1953  MachineType type = AtomicOpType(node->op());
1954  ArchOpcode opcode = kArchNop;
1955  if (type == MachineType::Uint8()) {
1956  opcode = kPPC_AtomicCompareExchangeUint8;
1957  } else if (type == MachineType::Uint16()) {
1958  opcode = kPPC_AtomicCompareExchangeUint16;
1959  } else if (type == MachineType::Uint32()) {
1960  opcode = kPPC_AtomicCompareExchangeWord32;
1961  } else if (type == MachineType::Uint64()) {
1962  opcode = kPPC_AtomicCompareExchangeWord64;
1963  } else {
1964  UNREACHABLE();
1965  return;
1966  }
1967  VisitAtomicCompareExchange(this, node, opcode);
1968 }
1969 
1970 void VisitAtomicBinaryOperation(InstructionSelector* selector, Node* node,
1971  ArchOpcode int8_op, ArchOpcode uint8_op,
1972  ArchOpcode int16_op, ArchOpcode uint16_op,
1973  ArchOpcode int32_op, ArchOpcode uint32_op,
1974  ArchOpcode int64_op, ArchOpcode uint64_op) {
1975  PPCOperandGenerator g(selector);
1976  Node* base = node->InputAt(0);
1977  Node* index = node->InputAt(1);
1978  Node* value = node->InputAt(2);
1979  MachineType type = AtomicOpType(node->op());
1980 
1981  ArchOpcode opcode = kArchNop;
1982 
1983  if (type == MachineType::Int8()) {
1984  opcode = int8_op;
1985  } else if (type == MachineType::Uint8()) {
1986  opcode = uint8_op;
1987  } else if (type == MachineType::Int16()) {
1988  opcode = int16_op;
1989  } else if (type == MachineType::Uint16()) {
1990  opcode = uint16_op;
1991  } else if (type == MachineType::Int32()) {
1992  opcode = int32_op;
1993  } else if (type == MachineType::Uint32()) {
1994  opcode = uint32_op;
1995  } else if (type == MachineType::Int64()) {
1996  opcode = int64_op;
1997  } else if (type == MachineType::Uint64()) {
1998  opcode = uint64_op;
1999  } else {
2000  UNREACHABLE();
2001  return;
2002  }
2003 
2004  AddressingMode addressing_mode = kMode_MRR;
2005  InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2006  InstructionOperand inputs[3];
2007 
2008  size_t input_count = 0;
2009  inputs[input_count++] = g.UseUniqueRegister(base);
2010  inputs[input_count++] = g.UseUniqueRegister(index);
2011  inputs[input_count++] = g.UseUniqueRegister(value);
2012 
2013  InstructionOperand outputs[1];
2014  size_t output_count = 0;
2015  outputs[output_count++] = g.DefineAsRegister(node);
2016 
2017  selector->Emit(code, output_count, outputs, input_count, inputs);
2018 }
2019 
2020 void InstructionSelector::VisitWord32AtomicBinaryOperation(
2021  Node* node, ArchOpcode int8_op, ArchOpcode uint8_op, ArchOpcode int16_op,
2022  ArchOpcode uint16_op, ArchOpcode word32_op) {
2023  // Unused
2024  UNREACHABLE();
2025 }
2026 
2027 void InstructionSelector::VisitWord64AtomicBinaryOperation(
2028  Node* node, ArchOpcode uint8_op, ArchOpcode uint16_op, ArchOpcode uint32_op,
2029  ArchOpcode uint64_op) {
2030  // Unused
2031  UNREACHABLE();
2032 }
2033 
2034 #define VISIT_ATOMIC_BINOP(op) \
2035  void InstructionSelector::VisitWord32Atomic##op(Node* node) { \
2036  VisitAtomicBinaryOperation( \
2037  this, node, kPPC_Atomic##op##Int8, kPPC_Atomic##op##Uint8, \
2038  kPPC_Atomic##op##Int16, kPPC_Atomic##op##Uint16, \
2039  kPPC_Atomic##op##Int32, kPPC_Atomic##op##Uint32, \
2040  kPPC_Atomic##op##Int64, kPPC_Atomic##op##Uint64); \
2041  } \
2042  void InstructionSelector::VisitWord64Atomic##op(Node* node) { \
2043  VisitAtomicBinaryOperation( \
2044  this, node, kPPC_Atomic##op##Int8, kPPC_Atomic##op##Uint8, \
2045  kPPC_Atomic##op##Int16, kPPC_Atomic##op##Uint16, \
2046  kPPC_Atomic##op##Int32, kPPC_Atomic##op##Uint32, \
2047  kPPC_Atomic##op##Int64, kPPC_Atomic##op##Uint64); \
2048  }
2049 VISIT_ATOMIC_BINOP(Add)
2050 VISIT_ATOMIC_BINOP(Sub)
2051 VISIT_ATOMIC_BINOP(And)
2052 VISIT_ATOMIC_BINOP(Or)
2053 VISIT_ATOMIC_BINOP(Xor)
2054 #undef VISIT_ATOMIC_BINOP
2055 
2056 void InstructionSelector::VisitInt32AbsWithOverflow(Node* node) {
2057  UNREACHABLE();
2058 }
2059 
2060 void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
2061  UNREACHABLE();
2062 }
2063 
2064 void InstructionSelector::VisitI32x4Splat(Node* node) { UNIMPLEMENTED(); }
2065 
2066 void InstructionSelector::VisitI32x4ExtractLane(Node* node) { UNIMPLEMENTED(); }
2067 
2068 void InstructionSelector::VisitI32x4ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2069 
2070 void InstructionSelector::VisitI32x4Add(Node* node) { UNIMPLEMENTED(); }
2071 
2072 void InstructionSelector::VisitI32x4Sub(Node* node) { UNIMPLEMENTED(); }
2073 
2074 void InstructionSelector::VisitI32x4Shl(Node* node) { UNIMPLEMENTED(); }
2075 
2076 void InstructionSelector::VisitI32x4ShrS(Node* node) { UNIMPLEMENTED(); }
2077 
2078 void InstructionSelector::VisitI32x4Mul(Node* node) { UNIMPLEMENTED(); }
2079 
2080 void InstructionSelector::VisitI32x4MaxS(Node* node) { UNIMPLEMENTED(); }
2081 
2082 void InstructionSelector::VisitI32x4MinS(Node* node) { UNIMPLEMENTED(); }
2083 
2084 void InstructionSelector::VisitI32x4Eq(Node* node) { UNIMPLEMENTED(); }
2085 
2086 void InstructionSelector::VisitI32x4Ne(Node* node) { UNIMPLEMENTED(); }
2087 
2088 void InstructionSelector::VisitI32x4MinU(Node* node) { UNIMPLEMENTED(); }
2089 
2090 void InstructionSelector::VisitI32x4MaxU(Node* node) { UNIMPLEMENTED(); }
2091 
2092 void InstructionSelector::VisitI32x4ShrU(Node* node) { UNIMPLEMENTED(); }
2093 
2094 void InstructionSelector::VisitI32x4Neg(Node* node) { UNIMPLEMENTED(); }
2095 
2096 void InstructionSelector::VisitI32x4GtS(Node* node) { UNIMPLEMENTED(); }
2097 
2098 void InstructionSelector::VisitI32x4GeS(Node* node) { UNIMPLEMENTED(); }
2099 
2100 void InstructionSelector::VisitI32x4GtU(Node* node) { UNIMPLEMENTED(); }
2101 
2102 void InstructionSelector::VisitI32x4GeU(Node* node) { UNIMPLEMENTED(); }
2103 
2104 void InstructionSelector::VisitI16x8Splat(Node* node) { UNIMPLEMENTED(); }
2105 
2106 void InstructionSelector::VisitI16x8ExtractLane(Node* node) { UNIMPLEMENTED(); }
2107 
2108 void InstructionSelector::VisitI16x8ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2109 
2110 void InstructionSelector::VisitI16x8Shl(Node* node) { UNIMPLEMENTED(); }
2111 
2112 void InstructionSelector::VisitI16x8ShrS(Node* node) { UNIMPLEMENTED(); }
2113 
2114 void InstructionSelector::VisitI16x8ShrU(Node* node) { UNIMPLEMENTED(); }
2115 
2116 void InstructionSelector::VisitI16x8Add(Node* node) { UNIMPLEMENTED(); }
2117 
2118 void InstructionSelector::VisitI16x8AddSaturateS(Node* node) {
2119  UNIMPLEMENTED();
2120 }
2121 
2122 void InstructionSelector::VisitI16x8Sub(Node* node) { UNIMPLEMENTED(); }
2123 
2124 void InstructionSelector::VisitI16x8SubSaturateS(Node* node) {
2125  UNIMPLEMENTED();
2126 }
2127 
2128 void InstructionSelector::VisitI16x8Mul(Node* node) { UNIMPLEMENTED(); }
2129 
2130 void InstructionSelector::VisitI16x8MinS(Node* node) { UNIMPLEMENTED(); }
2131 
2132 void InstructionSelector::VisitI16x8MaxS(Node* node) { UNIMPLEMENTED(); }
2133 
2134 void InstructionSelector::VisitI16x8Eq(Node* node) { UNIMPLEMENTED(); }
2135 
2136 void InstructionSelector::VisitI16x8Ne(Node* node) { UNIMPLEMENTED(); }
2137 
2138 void InstructionSelector::VisitI16x8AddSaturateU(Node* node) {
2139  UNIMPLEMENTED();
2140 }
2141 
2142 void InstructionSelector::VisitI16x8SubSaturateU(Node* node) {
2143  UNIMPLEMENTED();
2144 }
2145 
2146 void InstructionSelector::VisitI16x8MinU(Node* node) { UNIMPLEMENTED(); }
2147 
2148 void InstructionSelector::VisitI16x8MaxU(Node* node) { UNIMPLEMENTED(); }
2149 
2150 void InstructionSelector::VisitI16x8Neg(Node* node) { UNIMPLEMENTED(); }
2151 
2152 void InstructionSelector::VisitI16x8GtS(Node* node) { UNIMPLEMENTED(); }
2153 
2154 void InstructionSelector::VisitI16x8GeS(Node* node) { UNIMPLEMENTED(); }
2155 
2156 void InstructionSelector::VisitI16x8GtU(Node* node) { UNIMPLEMENTED(); }
2157 
2158 void InstructionSelector::VisitI16x8GeU(Node* node) { UNIMPLEMENTED(); }
2159 
2160 void InstructionSelector::VisitI8x16Neg(Node* node) { UNIMPLEMENTED(); }
2161 
2162 void InstructionSelector::VisitI8x16Splat(Node* node) { UNIMPLEMENTED(); }
2163 
2164 void InstructionSelector::VisitI8x16ExtractLane(Node* node) { UNIMPLEMENTED(); }
2165 
2166 void InstructionSelector::VisitI8x16ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2167 
2168 void InstructionSelector::VisitI8x16Add(Node* node) { UNIMPLEMENTED(); }
2169 
2170 void InstructionSelector::VisitI8x16AddSaturateS(Node* node) {
2171  UNIMPLEMENTED();
2172 }
2173 
2174 void InstructionSelector::VisitI8x16Sub(Node* node) { UNIMPLEMENTED(); }
2175 
2176 void InstructionSelector::VisitI8x16SubSaturateS(Node* node) {
2177  UNIMPLEMENTED();
2178 }
2179 
2180 void InstructionSelector::VisitI8x16MinS(Node* node) { UNIMPLEMENTED(); }
2181 
2182 void InstructionSelector::VisitI8x16MaxS(Node* node) { UNIMPLEMENTED(); }
2183 
2184 void InstructionSelector::VisitI8x16Eq(Node* node) { UNIMPLEMENTED(); }
2185 
2186 void InstructionSelector::VisitI8x16Ne(Node* node) { UNIMPLEMENTED(); }
2187 
2188 void InstructionSelector::VisitI8x16GtS(Node* node) { UNIMPLEMENTED(); }
2189 
2190 void InstructionSelector::VisitI8x16GeS(Node* node) { UNIMPLEMENTED(); }
2191 
2192 void InstructionSelector::VisitI8x16AddSaturateU(Node* node) {
2193  UNIMPLEMENTED();
2194 }
2195 
2196 void InstructionSelector::VisitI8x16SubSaturateU(Node* node) {
2197  UNIMPLEMENTED();
2198 }
2199 
2200 void InstructionSelector::VisitI8x16MinU(Node* node) { UNIMPLEMENTED(); }
2201 
2202 void InstructionSelector::VisitI8x16MaxU(Node* node) { UNIMPLEMENTED(); }
2203 
2204 void InstructionSelector::VisitI8x16GtU(Node* node) { UNIMPLEMENTED(); }
2205 
2206 void InstructionSelector::VisitI8x16GeU(Node* node) { UNIMPLEMENTED(); }
2207 
2208 void InstructionSelector::VisitS128And(Node* node) { UNIMPLEMENTED(); }
2209 
2210 void InstructionSelector::VisitS128Or(Node* node) { UNIMPLEMENTED(); }
2211 
2212 void InstructionSelector::VisitS128Xor(Node* node) { UNIMPLEMENTED(); }
2213 
2214 void InstructionSelector::VisitS128Not(Node* node) { UNIMPLEMENTED(); }
2215 
2216 void InstructionSelector::VisitS128Zero(Node* node) { UNIMPLEMENTED(); }
2217 
2218 void InstructionSelector::VisitF32x4Eq(Node* node) { UNIMPLEMENTED(); }
2219 
2220 void InstructionSelector::VisitF32x4Ne(Node* node) { UNIMPLEMENTED(); }
2221 
2222 void InstructionSelector::VisitF32x4Lt(Node* node) { UNIMPLEMENTED(); }
2223 
2224 void InstructionSelector::VisitF32x4Le(Node* node) { UNIMPLEMENTED(); }
2225 
2226 void InstructionSelector::VisitF32x4Splat(Node* node) { UNIMPLEMENTED(); }
2227 
2228 void InstructionSelector::VisitF32x4ExtractLane(Node* node) { UNIMPLEMENTED(); }
2229 
2230 void InstructionSelector::VisitF32x4ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2231 
2232 void InstructionSelector::EmitPrepareResults(
2233  ZoneVector<PushParameter>* results, const CallDescriptor* call_descriptor,
2234  Node* node) {
2235  // TODO(John): Port.
2236 }
2237 
2238 void InstructionSelector::VisitF32x4Add(Node* node) { UNIMPLEMENTED(); }
2239 
2240 void InstructionSelector::VisitF32x4Sub(Node* node) { UNIMPLEMENTED(); }
2241 
2242 void InstructionSelector::VisitF32x4Mul(Node* node) { UNIMPLEMENTED(); }
2243 
2244 void InstructionSelector::VisitF32x4Min(Node* node) { UNIMPLEMENTED(); }
2245 
2246 void InstructionSelector::VisitF32x4Max(Node* node) { UNIMPLEMENTED(); }
2247 
2248 void InstructionSelector::VisitS128Select(Node* node) { UNIMPLEMENTED(); }
2249 
2250 void InstructionSelector::VisitF32x4Neg(Node* node) { UNIMPLEMENTED(); }
2251 
2252 void InstructionSelector::VisitF32x4Abs(Node* node) { UNIMPLEMENTED(); }
2253 
2254 void InstructionSelector::VisitF32x4RecipSqrtApprox(Node* node) {
2255  UNIMPLEMENTED();
2256 }
2257 
2258 void InstructionSelector::VisitF32x4RecipApprox(Node* node) { UNIMPLEMENTED(); }
2259 
2260 void InstructionSelector::VisitF32x4AddHoriz(Node* node) { UNIMPLEMENTED(); }
2261 void InstructionSelector::VisitI32x4AddHoriz(Node* node) { UNIMPLEMENTED(); }
2262 void InstructionSelector::VisitI16x8AddHoriz(Node* node) { UNIMPLEMENTED(); }
2263 
2264 void InstructionSelector::VisitF32x4SConvertI32x4(Node* node) {
2265  UNIMPLEMENTED();
2266 }
2267 
2268 void InstructionSelector::VisitF32x4UConvertI32x4(Node* node) {
2269  UNIMPLEMENTED();
2270 }
2271 
2272 void InstructionSelector::VisitI32x4SConvertF32x4(Node* node) {
2273  UNIMPLEMENTED();
2274 }
2275 
2276 void InstructionSelector::VisitI32x4UConvertF32x4(Node* node) {
2277  UNIMPLEMENTED();
2278 }
2279 
2280 void InstructionSelector::VisitI32x4SConvertI16x8Low(Node* node) {
2281  UNIMPLEMENTED();
2282 }
2283 
2284 void InstructionSelector::VisitI32x4SConvertI16x8High(Node* node) {
2285  UNIMPLEMENTED();
2286 }
2287 
2288 void InstructionSelector::VisitI32x4UConvertI16x8Low(Node* node) {
2289  UNIMPLEMENTED();
2290 }
2291 
2292 void InstructionSelector::VisitI32x4UConvertI16x8High(Node* node) {
2293  UNIMPLEMENTED();
2294 }
2295 
2296 void InstructionSelector::VisitI16x8SConvertI8x16Low(Node* node) {
2297  UNIMPLEMENTED();
2298 }
2299 
2300 void InstructionSelector::VisitI16x8SConvertI8x16High(Node* node) {
2301  UNIMPLEMENTED();
2302 }
2303 
2304 void InstructionSelector::VisitI16x8UConvertI8x16Low(Node* node) {
2305  UNIMPLEMENTED();
2306 }
2307 
2308 void InstructionSelector::VisitI16x8UConvertI8x16High(Node* node) {
2309  UNIMPLEMENTED();
2310 }
2311 
2312 void InstructionSelector::VisitI16x8SConvertI32x4(Node* node) {
2313  UNIMPLEMENTED();
2314 }
2315 void InstructionSelector::VisitI16x8UConvertI32x4(Node* node) {
2316  UNIMPLEMENTED();
2317 }
2318 
2319 void InstructionSelector::VisitI8x16SConvertI16x8(Node* node) {
2320  UNIMPLEMENTED();
2321 }
2322 
2323 void InstructionSelector::VisitI8x16UConvertI16x8(Node* node) {
2324  UNIMPLEMENTED();
2325 }
2326 
2327 void InstructionSelector::VisitS1x4AnyTrue(Node* node) { UNIMPLEMENTED(); }
2328 
2329 void InstructionSelector::VisitS1x4AllTrue(Node* node) { UNIMPLEMENTED(); }
2330 
2331 void InstructionSelector::VisitS1x8AnyTrue(Node* node) { UNIMPLEMENTED(); }
2332 
2333 void InstructionSelector::VisitS1x8AllTrue(Node* node) { UNIMPLEMENTED(); }
2334 
2335 void InstructionSelector::VisitS1x16AnyTrue(Node* node) { UNIMPLEMENTED(); }
2336 
2337 void InstructionSelector::VisitS1x16AllTrue(Node* node) { UNIMPLEMENTED(); }
2338 
2339 void InstructionSelector::VisitI8x16Shl(Node* node) { UNIMPLEMENTED(); }
2340 
2341 void InstructionSelector::VisitI8x16ShrS(Node* node) { UNIMPLEMENTED(); }
2342 
2343 void InstructionSelector::VisitI8x16ShrU(Node* node) { UNIMPLEMENTED(); }
2344 
2345 void InstructionSelector::VisitI8x16Mul(Node* node) { UNIMPLEMENTED(); }
2346 
2347 // static
2348 MachineOperatorBuilder::Flags
2349 InstructionSelector::SupportedMachineOperatorFlags() {
2350  return MachineOperatorBuilder::kFloat32RoundDown |
2351  MachineOperatorBuilder::kFloat64RoundDown |
2352  MachineOperatorBuilder::kFloat32RoundUp |
2353  MachineOperatorBuilder::kFloat64RoundUp |
2354  MachineOperatorBuilder::kFloat32RoundTruncate |
2355  MachineOperatorBuilder::kFloat64RoundTruncate |
2356  MachineOperatorBuilder::kFloat64RoundTiesAway |
2357  MachineOperatorBuilder::kWord32Popcnt |
2358  MachineOperatorBuilder::kWord64Popcnt;
2359  // We omit kWord32ShiftIsSafe as s[rl]w use 0x3F as a mask rather than 0x1F.
2360 }
2361 
2362 // static
2363 MachineOperatorBuilder::AlignmentRequirements
2364 InstructionSelector::AlignmentRequirements() {
2365  return MachineOperatorBuilder::AlignmentRequirements::
2366  FullUnalignedAccessSupport();
2367 }
2368 
2369 } // namespace compiler
2370 } // namespace internal
2371 } // namespace v8
Definition: libplatform.h:13