V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
effect-control-linearizer.cc
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/effect-control-linearizer.h"
6 
7 #include "src/code-factory.h"
8 #include "src/compiler/access-builder.h"
9 #include "src/compiler/compiler-source-position-table.h"
10 #include "src/compiler/js-graph.h"
11 #include "src/compiler/linkage.h"
12 #include "src/compiler/node-matchers.h"
13 #include "src/compiler/node-origin-table.h"
14 #include "src/compiler/node-properties.h"
15 #include "src/compiler/node.h"
16 #include "src/compiler/schedule.h"
17 #include "src/heap/factory-inl.h"
18 
19 namespace v8 {
20 namespace internal {
21 namespace compiler {
22 
23 EffectControlLinearizer::EffectControlLinearizer(
24  JSGraph* js_graph, Schedule* schedule, Zone* temp_zone,
25  SourcePositionTable* source_positions, NodeOriginTable* node_origins,
26  MaskArrayIndexEnable mask_array_index)
27  : js_graph_(js_graph),
28  schedule_(schedule),
29  temp_zone_(temp_zone),
30  mask_array_index_(mask_array_index),
31  source_positions_(source_positions),
32  node_origins_(node_origins),
33  graph_assembler_(js_graph, nullptr, nullptr, temp_zone),
34  frame_state_zapper_(nullptr) {}
35 
36 Graph* EffectControlLinearizer::graph() const { return js_graph_->graph(); }
37 CommonOperatorBuilder* EffectControlLinearizer::common() const {
38  return js_graph_->common();
39 }
40 SimplifiedOperatorBuilder* EffectControlLinearizer::simplified() const {
41  return js_graph_->simplified();
42 }
43 MachineOperatorBuilder* EffectControlLinearizer::machine() const {
44  return js_graph_->machine();
45 }
46 
47 namespace {
48 
49 struct BlockEffectControlData {
50  Node* current_effect = nullptr; // New effect.
51  Node* current_control = nullptr; // New control.
52  Node* current_frame_state = nullptr; // New frame state.
53 };
54 
55 class BlockEffectControlMap {
56  public:
57  explicit BlockEffectControlMap(Zone* temp_zone) : map_(temp_zone) {}
58 
59  BlockEffectControlData& For(BasicBlock* from, BasicBlock* to) {
60  return map_[std::make_pair(from->rpo_number(), to->rpo_number())];
61  }
62 
63  const BlockEffectControlData& For(BasicBlock* from, BasicBlock* to) const {
64  return map_.at(std::make_pair(from->rpo_number(), to->rpo_number()));
65  }
66 
67  private:
68  typedef std::pair<int32_t, int32_t> Key;
69  typedef ZoneMap<Key, BlockEffectControlData> Map;
70 
71  Map map_;
72 };
73 
74 // Effect phis that need to be updated after the first pass.
75 struct PendingEffectPhi {
76  Node* effect_phi;
77  BasicBlock* block;
78 
79  PendingEffectPhi(Node* effect_phi, BasicBlock* block)
80  : effect_phi(effect_phi), block(block) {}
81 };
82 
83 void ConnectUnreachableToEnd(Node* effect, Node* control, JSGraph* jsgraph) {
84  Graph* graph = jsgraph->graph();
85  CommonOperatorBuilder* common = jsgraph->common();
86  if (effect->opcode() == IrOpcode::kDead) return;
87  if (effect->opcode() != IrOpcode::kUnreachable) {
88  effect = graph->NewNode(common->Unreachable(), effect, control);
89  }
90  Node* throw_node = graph->NewNode(common->Throw(), effect, control);
91  NodeProperties::MergeControlToEnd(graph, common, throw_node);
92 }
93 
94 void UpdateEffectPhi(Node* node, BasicBlock* block,
95  BlockEffectControlMap* block_effects, JSGraph* jsgraph) {
96  // Update all inputs to an effect phi with the effects from the given
97  // block->effect map.
98  DCHECK_EQ(IrOpcode::kEffectPhi, node->opcode());
99  DCHECK_EQ(static_cast<size_t>(node->op()->EffectInputCount()),
100  block->PredecessorCount());
101  for (int i = 0; i < node->op()->EffectInputCount(); i++) {
102  Node* input = node->InputAt(i);
103  BasicBlock* predecessor = block->PredecessorAt(static_cast<size_t>(i));
104  const BlockEffectControlData& block_effect =
105  block_effects->For(predecessor, block);
106  Node* effect = block_effect.current_effect;
107  if (input != effect) {
108  node->ReplaceInput(i, effect);
109  }
110  }
111 }
112 
113 void UpdateBlockControl(BasicBlock* block,
114  BlockEffectControlMap* block_effects) {
115  Node* control = block->NodeAt(0);
116  DCHECK(NodeProperties::IsControl(control));
117 
118  // Do not rewire the end node.
119  if (control->opcode() == IrOpcode::kEnd) return;
120 
121  // Update all inputs to the given control node with the correct control.
122  DCHECK(control->opcode() == IrOpcode::kMerge ||
123  static_cast<size_t>(control->op()->ControlInputCount()) ==
124  block->PredecessorCount());
125  if (static_cast<size_t>(control->op()->ControlInputCount()) !=
126  block->PredecessorCount()) {
127  return; // We already re-wired the control inputs of this node.
128  }
129  for (int i = 0; i < control->op()->ControlInputCount(); i++) {
130  Node* input = NodeProperties::GetControlInput(control, i);
131  BasicBlock* predecessor = block->PredecessorAt(static_cast<size_t>(i));
132  const BlockEffectControlData& block_effect =
133  block_effects->For(predecessor, block);
134  if (input != block_effect.current_control) {
135  NodeProperties::ReplaceControlInput(control, block_effect.current_control,
136  i);
137  }
138  }
139 }
140 
141 bool HasIncomingBackEdges(BasicBlock* block) {
142  for (BasicBlock* pred : block->predecessors()) {
143  if (pred->rpo_number() >= block->rpo_number()) {
144  return true;
145  }
146  }
147  return false;
148 }
149 
150 void RemoveRenameNode(Node* node) {
151  DCHECK(IrOpcode::kFinishRegion == node->opcode() ||
152  IrOpcode::kBeginRegion == node->opcode() ||
153  IrOpcode::kTypeGuard == node->opcode());
154  // Update the value/context uses to the value input of the finish node and
155  // the effect uses to the effect input.
156  for (Edge edge : node->use_edges()) {
157  DCHECK(!edge.from()->IsDead());
158  if (NodeProperties::IsEffectEdge(edge)) {
159  edge.UpdateTo(NodeProperties::GetEffectInput(node));
160  } else {
161  DCHECK(!NodeProperties::IsControlEdge(edge));
162  DCHECK(!NodeProperties::IsFrameStateEdge(edge));
163  edge.UpdateTo(node->InputAt(0));
164  }
165  }
166  node->Kill();
167 }
168 
169 void TryCloneBranch(Node* node, BasicBlock* block, Zone* temp_zone,
170  Graph* graph, CommonOperatorBuilder* common,
171  BlockEffectControlMap* block_effects,
172  SourcePositionTable* source_positions,
173  NodeOriginTable* node_origins) {
174  DCHECK_EQ(IrOpcode::kBranch, node->opcode());
175 
176  // This optimization is a special case of (super)block cloning. It takes an
177  // input graph as shown below and clones the Branch node for every predecessor
178  // to the Merge, essentially removing the Merge completely. This avoids
179  // materializing the bit for the Phi and may offer potential for further
180  // branch folding optimizations (i.e. because one or more inputs to the Phi is
181  // a constant). Note that there may be more Phi nodes hanging off the Merge,
182  // but we can only a certain subset of them currently (actually only Phi and
183  // EffectPhi nodes whose uses have either the IfTrue or IfFalse as control
184  // input).
185 
186  // Control1 ... ControlN
187  // ^ ^
188  // | | Cond1 ... CondN
189  // +----+ +----+ ^ ^
190  // | | | |
191  // | | +----+ |
192  // Merge<--+ | +------------+
193  // ^ \|/
194  // | Phi
195  // | |
196  // Branch----+
197  // ^
198  // |
199  // +-----+-----+
200  // | |
201  // IfTrue IfFalse
202  // ^ ^
203  // | |
204 
205  // The resulting graph (modulo the Phi and EffectPhi nodes) looks like this:
206 
207  // Control1 Cond1 ... ControlN CondN
208  // ^ ^ ^ ^
209  // \ / \ /
210  // Branch ... Branch
211  // ^ ^
212  // | |
213  // +---+---+ +---+----+
214  // | | | |
215  // IfTrue IfFalse ... IfTrue IfFalse
216  // ^ ^ ^ ^
217  // | | | |
218  // +--+ +-------------+ |
219  // | | +--------------+ +--+
220  // | | | |
221  // Merge Merge
222  // ^ ^
223  // | |
224 
225  SourcePositionTable::Scope scope(source_positions,
226  source_positions->GetSourcePosition(node));
227  NodeOriginTable::Scope origin_scope(node_origins, "clone branch", node);
228  Node* branch = node;
229  Node* cond = NodeProperties::GetValueInput(branch, 0);
230  if (!cond->OwnedBy(branch) || cond->opcode() != IrOpcode::kPhi) return;
231  Node* merge = NodeProperties::GetControlInput(branch);
232  if (merge->opcode() != IrOpcode::kMerge ||
233  NodeProperties::GetControlInput(cond) != merge) {
234  return;
235  }
236  // Grab the IfTrue/IfFalse projections of the Branch.
237  BranchMatcher matcher(branch);
238  // Check/collect other Phi/EffectPhi nodes hanging off the Merge.
239  NodeVector phis(temp_zone);
240  for (Node* const use : merge->uses()) {
241  if (use == branch || use == cond) continue;
242  // We cannot currently deal with non-Phi/EffectPhi nodes hanging off the
243  // Merge. Ideally, we would just clone the nodes (and everything that
244  // depends on it to some distant join point), but that requires knowledge
245  // about dominance/post-dominance.
246  if (!NodeProperties::IsPhi(use)) return;
247  for (Edge edge : use->use_edges()) {
248  // Right now we can only handle Phi/EffectPhi nodes whose uses are
249  // directly control-dependend on either the IfTrue or the IfFalse
250  // successor, because we know exactly how to update those uses.
251  if (edge.from()->op()->ControlInputCount() != 1) return;
252  Node* control = NodeProperties::GetControlInput(edge.from());
253  if (NodeProperties::IsPhi(edge.from())) {
254  control = NodeProperties::GetControlInput(control, edge.index());
255  }
256  if (control != matcher.IfTrue() && control != matcher.IfFalse()) return;
257  }
258  phis.push_back(use);
259  }
260  BranchHint const hint = BranchHintOf(branch->op());
261  int const input_count = merge->op()->ControlInputCount();
262  DCHECK_LE(1, input_count);
263  Node** const inputs = graph->zone()->NewArray<Node*>(2 * input_count);
264  Node** const merge_true_inputs = &inputs[0];
265  Node** const merge_false_inputs = &inputs[input_count];
266  for (int index = 0; index < input_count; ++index) {
267  Node* cond1 = NodeProperties::GetValueInput(cond, index);
268  Node* control1 = NodeProperties::GetControlInput(merge, index);
269  Node* branch1 = graph->NewNode(common->Branch(hint), cond1, control1);
270  merge_true_inputs[index] = graph->NewNode(common->IfTrue(), branch1);
271  merge_false_inputs[index] = graph->NewNode(common->IfFalse(), branch1);
272  }
273  Node* const merge_true = matcher.IfTrue();
274  Node* const merge_false = matcher.IfFalse();
275  merge_true->TrimInputCount(0);
276  merge_false->TrimInputCount(0);
277  for (int i = 0; i < input_count; ++i) {
278  merge_true->AppendInput(graph->zone(), merge_true_inputs[i]);
279  merge_false->AppendInput(graph->zone(), merge_false_inputs[i]);
280  }
281  DCHECK_EQ(2u, block->SuccessorCount());
282  NodeProperties::ChangeOp(matcher.IfTrue(), common->Merge(input_count));
283  NodeProperties::ChangeOp(matcher.IfFalse(), common->Merge(input_count));
284  int const true_index =
285  block->SuccessorAt(0)->NodeAt(0) == matcher.IfTrue() ? 0 : 1;
286  BlockEffectControlData* true_block_data =
287  &block_effects->For(block, block->SuccessorAt(true_index));
288  BlockEffectControlData* false_block_data =
289  &block_effects->For(block, block->SuccessorAt(true_index ^ 1));
290  for (Node* const phi : phis) {
291  for (int index = 0; index < input_count; ++index) {
292  inputs[index] = phi->InputAt(index);
293  }
294  inputs[input_count] = merge_true;
295  Node* phi_true = graph->NewNode(phi->op(), input_count + 1, inputs);
296  inputs[input_count] = merge_false;
297  Node* phi_false = graph->NewNode(phi->op(), input_count + 1, inputs);
298  if (phi->UseCount() == 0) {
299  DCHECK_EQ(phi->opcode(), IrOpcode::kEffectPhi);
300  } else {
301  for (Edge edge : phi->use_edges()) {
302  Node* control = NodeProperties::GetControlInput(edge.from());
303  if (NodeProperties::IsPhi(edge.from())) {
304  control = NodeProperties::GetControlInput(control, edge.index());
305  }
306  DCHECK(control == matcher.IfTrue() || control == matcher.IfFalse());
307  edge.UpdateTo((control == matcher.IfTrue()) ? phi_true : phi_false);
308  }
309  }
310  if (phi->opcode() == IrOpcode::kEffectPhi) {
311  true_block_data->current_effect = phi_true;
312  false_block_data->current_effect = phi_false;
313  }
314  phi->Kill();
315  }
316  // Fix up IfTrue and IfFalse and kill all dead nodes.
317  if (branch == block->control_input()) {
318  true_block_data->current_control = merge_true;
319  false_block_data->current_control = merge_false;
320  }
321  branch->Kill();
322  cond->Kill();
323  merge->Kill();
324 }
325 
326 } // namespace
327 
328 void EffectControlLinearizer::Run() {
329  BlockEffectControlMap block_effects(temp_zone());
330  ZoneVector<PendingEffectPhi> pending_effect_phis(temp_zone());
331  ZoneVector<BasicBlock*> pending_block_controls(temp_zone());
332  NodeVector inputs_buffer(temp_zone());
333 
334  for (BasicBlock* block : *(schedule()->rpo_order())) {
335  size_t instr = 0;
336 
337  // The control node should be the first.
338  Node* control = block->NodeAt(instr);
339  DCHECK(NodeProperties::IsControl(control));
340  // Update the control inputs.
341  if (HasIncomingBackEdges(block)) {
342  // If there are back edges, we need to update later because we have not
343  // computed the control yet. This should only happen for loops.
344  DCHECK_EQ(IrOpcode::kLoop, control->opcode());
345  pending_block_controls.push_back(block);
346  } else {
347  // If there are no back edges, we can update now.
348  UpdateBlockControl(block, &block_effects);
349  }
350  instr++;
351 
352  // Iterate over the phis and update the effect phis.
353  Node* effect_phi = nullptr;
354  Node* terminate = nullptr;
355  for (; instr < block->NodeCount(); instr++) {
356  Node* node = block->NodeAt(instr);
357  // Only go through the phis and effect phis.
358  if (node->opcode() == IrOpcode::kEffectPhi) {
359  // There should be at most one effect phi in a block.
360  DCHECK_NULL(effect_phi);
361  // IfException blocks should not have effect phis.
362  DCHECK_NE(IrOpcode::kIfException, control->opcode());
363  effect_phi = node;
364  } else if (node->opcode() == IrOpcode::kPhi) {
365  // Just skip phis.
366  } else if (node->opcode() == IrOpcode::kTerminate) {
367  DCHECK_NULL(terminate);
368  terminate = node;
369  } else {
370  break;
371  }
372  }
373 
374  if (effect_phi) {
375  // Make sure we update the inputs to the incoming blocks' effects.
376  if (HasIncomingBackEdges(block)) {
377  // In case of loops, we do not update the effect phi immediately
378  // because the back predecessor has not been handled yet. We just
379  // record the effect phi for later processing.
380  pending_effect_phis.push_back(PendingEffectPhi(effect_phi, block));
381  } else {
382  UpdateEffectPhi(effect_phi, block, &block_effects, jsgraph());
383  }
384  }
385 
386  Node* effect = effect_phi;
387  if (effect == nullptr) {
388  // There was no effect phi.
389 
390  // Since a loop should have at least a StackCheck, only loops in
391  // unreachable code can have no effect phi.
392  DCHECK_IMPLIES(
393  HasIncomingBackEdges(block),
394  block_effects.For(block->PredecessorAt(0), block)
395  .current_effect->opcode() == IrOpcode::kUnreachable);
396  if (block == schedule()->start()) {
397  // Start block => effect is start.
398  DCHECK_EQ(graph()->start(), control);
399  effect = graph()->start();
400  } else if (control->opcode() == IrOpcode::kEnd) {
401  // End block is just a dummy, no effect needed.
402  DCHECK_EQ(BasicBlock::kNone, block->control());
403  DCHECK_EQ(1u, block->size());
404  effect = nullptr;
405  } else {
406  // If all the predecessors have the same effect, we can use it as our
407  // current effect.
408  for (size_t i = 0; i < block->PredecessorCount(); ++i) {
409  const BlockEffectControlData& data =
410  block_effects.For(block->PredecessorAt(i), block);
411  if (!effect) effect = data.current_effect;
412  if (data.current_effect != effect) {
413  effect = nullptr;
414  break;
415  }
416  }
417  if (effect == nullptr) {
418  DCHECK_NE(IrOpcode::kIfException, control->opcode());
419  // The input blocks do not have the same effect. We have
420  // to create an effect phi node.
421  inputs_buffer.clear();
422  inputs_buffer.resize(block->PredecessorCount(), jsgraph()->Dead());
423  inputs_buffer.push_back(control);
424  effect = graph()->NewNode(
425  common()->EffectPhi(static_cast<int>(block->PredecessorCount())),
426  static_cast<int>(inputs_buffer.size()), &(inputs_buffer.front()));
427  // For loops, we update the effect phi node later to break cycles.
428  if (control->opcode() == IrOpcode::kLoop) {
429  pending_effect_phis.push_back(PendingEffectPhi(effect, block));
430  } else {
431  UpdateEffectPhi(effect, block, &block_effects, jsgraph());
432  }
433  } else if (control->opcode() == IrOpcode::kIfException) {
434  // The IfException is connected into the effect chain, so we need
435  // to update the effect here.
436  NodeProperties::ReplaceEffectInput(control, effect);
437  effect = control;
438  }
439  }
440  }
441 
442  // Fixup the Terminate node.
443  if (terminate != nullptr) {
444  NodeProperties::ReplaceEffectInput(terminate, effect);
445  }
446 
447  // The frame state at block entry is determined by the frame states leaving
448  // all predecessors. In case there is no frame state dominating this block,
449  // we can rely on a checkpoint being present before the next deoptimization.
450  // TODO(mstarzinger): Eventually we will need to go hunt for a frame state
451  // once deoptimizing nodes roam freely through the schedule.
452  Node* frame_state = nullptr;
453  if (block != schedule()->start()) {
454  // If all the predecessors have the same effect, we can use it
455  // as our current effect.
456  frame_state =
457  block_effects.For(block->PredecessorAt(0), block).current_frame_state;
458  for (size_t i = 1; i < block->PredecessorCount(); i++) {
459  if (block_effects.For(block->PredecessorAt(i), block)
460  .current_frame_state != frame_state) {
461  frame_state = nullptr;
462  frame_state_zapper_ = graph()->end();
463  break;
464  }
465  }
466  }
467 
468  // Process the ordinary instructions.
469  for (; instr < block->NodeCount(); instr++) {
470  Node* node = block->NodeAt(instr);
471  ProcessNode(node, &frame_state, &effect, &control);
472  }
473 
474  switch (block->control()) {
475  case BasicBlock::kGoto:
476  case BasicBlock::kNone:
477  break;
478 
479  case BasicBlock::kCall:
480  case BasicBlock::kTailCall:
481  case BasicBlock::kSwitch:
482  case BasicBlock::kReturn:
483  case BasicBlock::kDeoptimize:
484  case BasicBlock::kThrow:
485  ProcessNode(block->control_input(), &frame_state, &effect, &control);
486  break;
487 
488  case BasicBlock::kBranch:
489  ProcessNode(block->control_input(), &frame_state, &effect, &control);
490  TryCloneBranch(block->control_input(), block, temp_zone(), graph(),
491  common(), &block_effects, source_positions_,
492  node_origins_);
493  break;
494  }
495 
496  // Store the effect, control and frame state for later use.
497  for (BasicBlock* successor : block->successors()) {
498  BlockEffectControlData* data = &block_effects.For(block, successor);
499  if (data->current_effect == nullptr) {
500  data->current_effect = effect;
501  }
502  if (data->current_control == nullptr) {
503  data->current_control = control;
504  }
505  data->current_frame_state = frame_state;
506  }
507  }
508 
509  for (BasicBlock* pending_block_control : pending_block_controls) {
510  UpdateBlockControl(pending_block_control, &block_effects);
511  }
512  // Update the incoming edges of the effect phis that could not be processed
513  // during the first pass (because they could have incoming back edges).
514  for (const PendingEffectPhi& pending_effect_phi : pending_effect_phis) {
515  UpdateEffectPhi(pending_effect_phi.effect_phi, pending_effect_phi.block,
516  &block_effects, jsgraph());
517  }
518 }
519 
520 void EffectControlLinearizer::ProcessNode(Node* node, Node** frame_state,
521  Node** effect, Node** control) {
522  SourcePositionTable::Scope scope(source_positions_,
523  source_positions_->GetSourcePosition(node));
524  NodeOriginTable::Scope origin_scope(node_origins_, "process node", node);
525 
526  // If the node needs to be wired into the effect/control chain, do this
527  // here. Pass current frame state for lowering to eager deoptimization.
528  if (TryWireInStateEffect(node, *frame_state, effect, control)) {
529  return;
530  }
531 
532  // If the node has a visible effect, then there must be a checkpoint in the
533  // effect chain before we are allowed to place another eager deoptimization
534  // point. We zap the frame state to ensure this invariant is maintained.
535  if (region_observability_ == RegionObservability::kObservable &&
536  !node->op()->HasProperty(Operator::kNoWrite)) {
537  *frame_state = nullptr;
538  frame_state_zapper_ = node;
539  }
540 
541  // Remove the end markers of 'atomic' allocation region because the
542  // region should be wired-in now.
543  if (node->opcode() == IrOpcode::kFinishRegion) {
544  // Reset the current region observability.
545  region_observability_ = RegionObservability::kObservable;
546  // Update the value uses to the value input of the finish node and
547  // the effect uses to the effect input.
548  return RemoveRenameNode(node);
549  }
550  if (node->opcode() == IrOpcode::kBeginRegion) {
551  // Determine the observability for this region and use that for all
552  // nodes inside the region (i.e. ignore the absence of kNoWrite on
553  // StoreField and other operators).
554  DCHECK_NE(RegionObservability::kNotObservable, region_observability_);
555  region_observability_ = RegionObservabilityOf(node->op());
556  // Update the value uses to the value input of the finish node and
557  // the effect uses to the effect input.
558  return RemoveRenameNode(node);
559  }
560  if (node->opcode() == IrOpcode::kTypeGuard) {
561  return RemoveRenameNode(node);
562  }
563 
564  // Special treatment for checkpoint nodes.
565  if (node->opcode() == IrOpcode::kCheckpoint) {
566  // Unlink the check point; effect uses will be updated to the incoming
567  // effect that is passed. The frame state is preserved for lowering.
568  DCHECK_EQ(RegionObservability::kObservable, region_observability_);
569  *frame_state = NodeProperties::GetFrameStateInput(node);
570  return;
571  }
572 
573  // The IfSuccess nodes should always start a basic block (and basic block
574  // start nodes are not handled in the ProcessNode method).
575  DCHECK_NE(IrOpcode::kIfSuccess, node->opcode());
576 
577  // If the node takes an effect, replace with the current one.
578  if (node->op()->EffectInputCount() > 0) {
579  DCHECK_EQ(1, node->op()->EffectInputCount());
580  Node* input_effect = NodeProperties::GetEffectInput(node);
581 
582  if (input_effect != *effect) {
583  NodeProperties::ReplaceEffectInput(node, *effect);
584  }
585 
586  // If the node produces an effect, update our current effect. (However,
587  // ignore new effect chains started with ValueEffect.)
588  if (node->op()->EffectOutputCount() > 0) {
589  DCHECK_EQ(1, node->op()->EffectOutputCount());
590  *effect = node;
591  }
592  } else {
593  // New effect chain is only started with a Start or ValueEffect node.
594  DCHECK(node->op()->EffectOutputCount() == 0 ||
595  node->opcode() == IrOpcode::kStart);
596  }
597 
598  // Rewire control inputs.
599  for (int i = 0; i < node->op()->ControlInputCount(); i++) {
600  NodeProperties::ReplaceControlInput(node, *control, i);
601  }
602  // Update the current control.
603  if (node->op()->ControlOutputCount() > 0) {
604  *control = node;
605  }
606 
607  // Break the effect chain on {Unreachable} and reconnect to the graph end.
608  // Mark the following code for deletion by connecting to the {Dead} node.
609  if (node->opcode() == IrOpcode::kUnreachable) {
610  ConnectUnreachableToEnd(*effect, *control, jsgraph());
611  *effect = *control = jsgraph()->Dead();
612  }
613 }
614 
615 bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
616  Node* frame_state,
617  Node** effect,
618  Node** control) {
619  gasm()->Reset(*effect, *control);
620  Node* result = nullptr;
621  switch (node->opcode()) {
622  case IrOpcode::kChangeBitToTagged:
623  result = LowerChangeBitToTagged(node);
624  break;
625  case IrOpcode::kChangeInt31ToTaggedSigned:
626  result = LowerChangeInt31ToTaggedSigned(node);
627  break;
628  case IrOpcode::kChangeInt32ToTagged:
629  result = LowerChangeInt32ToTagged(node);
630  break;
631  case IrOpcode::kChangeInt64ToTagged:
632  result = LowerChangeInt64ToTagged(node);
633  break;
634  case IrOpcode::kChangeUint32ToTagged:
635  result = LowerChangeUint32ToTagged(node);
636  break;
637  case IrOpcode::kChangeUint64ToTagged:
638  result = LowerChangeUint64ToTagged(node);
639  break;
640  case IrOpcode::kChangeFloat64ToTagged:
641  result = LowerChangeFloat64ToTagged(node);
642  break;
643  case IrOpcode::kChangeFloat64ToTaggedPointer:
644  result = LowerChangeFloat64ToTaggedPointer(node);
645  break;
646  case IrOpcode::kChangeTaggedSignedToInt32:
647  result = LowerChangeTaggedSignedToInt32(node);
648  break;
649  case IrOpcode::kChangeTaggedSignedToInt64:
650  result = LowerChangeTaggedSignedToInt64(node);
651  break;
652  case IrOpcode::kChangeTaggedToBit:
653  result = LowerChangeTaggedToBit(node);
654  break;
655  case IrOpcode::kChangeTaggedToInt32:
656  result = LowerChangeTaggedToInt32(node);
657  break;
658  case IrOpcode::kChangeTaggedToUint32:
659  result = LowerChangeTaggedToUint32(node);
660  break;
661  case IrOpcode::kChangeTaggedToInt64:
662  result = LowerChangeTaggedToInt64(node);
663  break;
664  case IrOpcode::kChangeTaggedToFloat64:
665  result = LowerChangeTaggedToFloat64(node);
666  break;
667  case IrOpcode::kChangeTaggedToTaggedSigned:
668  result = LowerChangeTaggedToTaggedSigned(node);
669  break;
670  case IrOpcode::kTruncateTaggedToBit:
671  result = LowerTruncateTaggedToBit(node);
672  break;
673  case IrOpcode::kTruncateTaggedPointerToBit:
674  result = LowerTruncateTaggedPointerToBit(node);
675  break;
676  case IrOpcode::kTruncateTaggedToFloat64:
677  result = LowerTruncateTaggedToFloat64(node);
678  break;
679  case IrOpcode::kPoisonIndex:
680  result = LowerPoisonIndex(node);
681  break;
682  case IrOpcode::kCheckMaps:
683  LowerCheckMaps(node, frame_state);
684  break;
685  case IrOpcode::kCompareMaps:
686  result = LowerCompareMaps(node);
687  break;
688  case IrOpcode::kCheckNumber:
689  result = LowerCheckNumber(node, frame_state);
690  break;
691  case IrOpcode::kCheckReceiver:
692  result = LowerCheckReceiver(node, frame_state);
693  break;
694  case IrOpcode::kCheckReceiverOrNullOrUndefined:
695  result = LowerCheckReceiverOrNullOrUndefined(node, frame_state);
696  break;
697  case IrOpcode::kCheckSymbol:
698  result = LowerCheckSymbol(node, frame_state);
699  break;
700  case IrOpcode::kCheckString:
701  result = LowerCheckString(node, frame_state);
702  break;
703  case IrOpcode::kCheckInternalizedString:
704  result = LowerCheckInternalizedString(node, frame_state);
705  break;
706  case IrOpcode::kCheckIf:
707  LowerCheckIf(node, frame_state);
708  break;
709  case IrOpcode::kCheckedInt32Add:
710  result = LowerCheckedInt32Add(node, frame_state);
711  break;
712  case IrOpcode::kCheckedInt32Sub:
713  result = LowerCheckedInt32Sub(node, frame_state);
714  break;
715  case IrOpcode::kCheckedInt32Div:
716  result = LowerCheckedInt32Div(node, frame_state);
717  break;
718  case IrOpcode::kCheckedInt32Mod:
719  result = LowerCheckedInt32Mod(node, frame_state);
720  break;
721  case IrOpcode::kCheckedUint32Div:
722  result = LowerCheckedUint32Div(node, frame_state);
723  break;
724  case IrOpcode::kCheckedUint32Mod:
725  result = LowerCheckedUint32Mod(node, frame_state);
726  break;
727  case IrOpcode::kCheckedInt32Mul:
728  result = LowerCheckedInt32Mul(node, frame_state);
729  break;
730  case IrOpcode::kCheckedInt32ToTaggedSigned:
731  result = LowerCheckedInt32ToTaggedSigned(node, frame_state);
732  break;
733  case IrOpcode::kCheckedInt64ToInt32:
734  result = LowerCheckedInt64ToInt32(node, frame_state);
735  break;
736  case IrOpcode::kCheckedInt64ToTaggedSigned:
737  result = LowerCheckedInt64ToTaggedSigned(node, frame_state);
738  break;
739  case IrOpcode::kCheckedUint32Bounds:
740  result = LowerCheckedUint32Bounds(node, frame_state);
741  break;
742  case IrOpcode::kCheckedUint32ToInt32:
743  result = LowerCheckedUint32ToInt32(node, frame_state);
744  break;
745  case IrOpcode::kCheckedUint32ToTaggedSigned:
746  result = LowerCheckedUint32ToTaggedSigned(node, frame_state);
747  break;
748  case IrOpcode::kCheckedUint64Bounds:
749  result = LowerCheckedUint64Bounds(node, frame_state);
750  break;
751  case IrOpcode::kCheckedUint64ToInt32:
752  result = LowerCheckedUint64ToInt32(node, frame_state);
753  break;
754  case IrOpcode::kCheckedUint64ToTaggedSigned:
755  result = LowerCheckedUint64ToTaggedSigned(node, frame_state);
756  break;
757  case IrOpcode::kCheckedFloat64ToInt32:
758  result = LowerCheckedFloat64ToInt32(node, frame_state);
759  break;
760  case IrOpcode::kCheckedFloat64ToInt64:
761  result = LowerCheckedFloat64ToInt64(node, frame_state);
762  break;
763  case IrOpcode::kCheckedTaggedSignedToInt32:
764  if (frame_state == nullptr) {
765  FATAL("No frame state (zapped by #%d: %s)", frame_state_zapper_->id(),
766  frame_state_zapper_->op()->mnemonic());
767  }
768  result = LowerCheckedTaggedSignedToInt32(node, frame_state);
769  break;
770  case IrOpcode::kCheckedTaggedToInt32:
771  result = LowerCheckedTaggedToInt32(node, frame_state);
772  break;
773  case IrOpcode::kCheckedTaggedToInt64:
774  result = LowerCheckedTaggedToInt64(node, frame_state);
775  break;
776  case IrOpcode::kCheckedTaggedToFloat64:
777  result = LowerCheckedTaggedToFloat64(node, frame_state);
778  break;
779  case IrOpcode::kCheckedTaggedToTaggedSigned:
780  result = LowerCheckedTaggedToTaggedSigned(node, frame_state);
781  break;
782  case IrOpcode::kCheckedTaggedToTaggedPointer:
783  result = LowerCheckedTaggedToTaggedPointer(node, frame_state);
784  break;
785  case IrOpcode::kTruncateTaggedToWord32:
786  result = LowerTruncateTaggedToWord32(node);
787  break;
788  case IrOpcode::kCheckedTruncateTaggedToWord32:
789  result = LowerCheckedTruncateTaggedToWord32(node, frame_state);
790  break;
791  case IrOpcode::kNumberToString:
792  result = LowerNumberToString(node);
793  break;
794  case IrOpcode::kObjectIsArrayBufferView:
795  result = LowerObjectIsArrayBufferView(node);
796  break;
797  case IrOpcode::kObjectIsBigInt:
798  result = LowerObjectIsBigInt(node);
799  break;
800  case IrOpcode::kObjectIsCallable:
801  result = LowerObjectIsCallable(node);
802  break;
803  case IrOpcode::kObjectIsConstructor:
804  result = LowerObjectIsConstructor(node);
805  break;
806  case IrOpcode::kObjectIsDetectableCallable:
807  result = LowerObjectIsDetectableCallable(node);
808  break;
809  case IrOpcode::kObjectIsMinusZero:
810  result = LowerObjectIsMinusZero(node);
811  break;
812  case IrOpcode::kNumberIsMinusZero:
813  result = LowerNumberIsMinusZero(node);
814  break;
815  case IrOpcode::kObjectIsNaN:
816  result = LowerObjectIsNaN(node);
817  break;
818  case IrOpcode::kNumberIsNaN:
819  result = LowerNumberIsNaN(node);
820  break;
821  case IrOpcode::kObjectIsNonCallable:
822  result = LowerObjectIsNonCallable(node);
823  break;
824  case IrOpcode::kObjectIsNumber:
825  result = LowerObjectIsNumber(node);
826  break;
827  case IrOpcode::kObjectIsReceiver:
828  result = LowerObjectIsReceiver(node);
829  break;
830  case IrOpcode::kObjectIsSmi:
831  result = LowerObjectIsSmi(node);
832  break;
833  case IrOpcode::kObjectIsString:
834  result = LowerObjectIsString(node);
835  break;
836  case IrOpcode::kObjectIsSymbol:
837  result = LowerObjectIsSymbol(node);
838  break;
839  case IrOpcode::kObjectIsUndetectable:
840  result = LowerObjectIsUndetectable(node);
841  break;
842  case IrOpcode::kArgumentsFrame:
843  result = LowerArgumentsFrame(node);
844  break;
845  case IrOpcode::kArgumentsLength:
846  result = LowerArgumentsLength(node);
847  break;
848  case IrOpcode::kToBoolean:
849  result = LowerToBoolean(node);
850  break;
851  case IrOpcode::kTypeOf:
852  result = LowerTypeOf(node);
853  break;
854  case IrOpcode::kNewDoubleElements:
855  result = LowerNewDoubleElements(node);
856  break;
857  case IrOpcode::kNewSmiOrObjectElements:
858  result = LowerNewSmiOrObjectElements(node);
859  break;
860  case IrOpcode::kNewArgumentsElements:
861  result = LowerNewArgumentsElements(node);
862  break;
863  case IrOpcode::kNewConsString:
864  result = LowerNewConsString(node);
865  break;
866  case IrOpcode::kSameValue:
867  result = LowerSameValue(node);
868  break;
869  case IrOpcode::kDeadValue:
870  result = LowerDeadValue(node);
871  break;
872  case IrOpcode::kStringConcat:
873  result = LowerStringConcat(node);
874  break;
875  case IrOpcode::kStringFromSingleCharCode:
876  result = LowerStringFromSingleCharCode(node);
877  break;
878  case IrOpcode::kStringFromSingleCodePoint:
879  result = LowerStringFromSingleCodePoint(node);
880  break;
881  case IrOpcode::kStringIndexOf:
882  result = LowerStringIndexOf(node);
883  break;
884  case IrOpcode::kStringLength:
885  result = LowerStringLength(node);
886  break;
887  case IrOpcode::kStringToNumber:
888  result = LowerStringToNumber(node);
889  break;
890  case IrOpcode::kStringCharCodeAt:
891  result = LowerStringCharCodeAt(node);
892  break;
893  case IrOpcode::kStringCodePointAt:
894  result = LowerStringCodePointAt(node, UnicodeEncodingOf(node->op()));
895  break;
896  case IrOpcode::kStringToLowerCaseIntl:
897  result = LowerStringToLowerCaseIntl(node);
898  break;
899  case IrOpcode::kStringToUpperCaseIntl:
900  result = LowerStringToUpperCaseIntl(node);
901  break;
902  case IrOpcode::kStringSubstring:
903  result = LowerStringSubstring(node);
904  break;
905  case IrOpcode::kStringEqual:
906  result = LowerStringEqual(node);
907  break;
908  case IrOpcode::kStringLessThan:
909  result = LowerStringLessThan(node);
910  break;
911  case IrOpcode::kStringLessThanOrEqual:
912  result = LowerStringLessThanOrEqual(node);
913  break;
914  case IrOpcode::kNumberIsFloat64Hole:
915  result = LowerNumberIsFloat64Hole(node);
916  break;
917  case IrOpcode::kNumberIsFinite:
918  result = LowerNumberIsFinite(node);
919  break;
920  case IrOpcode::kObjectIsFiniteNumber:
921  result = LowerObjectIsFiniteNumber(node);
922  break;
923  case IrOpcode::kNumberIsInteger:
924  result = LowerNumberIsInteger(node);
925  break;
926  case IrOpcode::kObjectIsInteger:
927  result = LowerObjectIsInteger(node);
928  break;
929  case IrOpcode::kNumberIsSafeInteger:
930  result = LowerNumberIsSafeInteger(node);
931  break;
932  case IrOpcode::kObjectIsSafeInteger:
933  result = LowerObjectIsSafeInteger(node);
934  break;
935  case IrOpcode::kCheckFloat64Hole:
936  result = LowerCheckFloat64Hole(node, frame_state);
937  break;
938  case IrOpcode::kCheckNotTaggedHole:
939  result = LowerCheckNotTaggedHole(node, frame_state);
940  break;
941  case IrOpcode::kConvertTaggedHoleToUndefined:
942  result = LowerConvertTaggedHoleToUndefined(node);
943  break;
944  case IrOpcode::kCheckEqualsInternalizedString:
945  LowerCheckEqualsInternalizedString(node, frame_state);
946  break;
947  case IrOpcode::kAllocate:
948  result = LowerAllocate(node);
949  break;
950  case IrOpcode::kCheckEqualsSymbol:
951  LowerCheckEqualsSymbol(node, frame_state);
952  break;
953  case IrOpcode::kPlainPrimitiveToNumber:
954  result = LowerPlainPrimitiveToNumber(node);
955  break;
956  case IrOpcode::kPlainPrimitiveToWord32:
957  result = LowerPlainPrimitiveToWord32(node);
958  break;
959  case IrOpcode::kPlainPrimitiveToFloat64:
960  result = LowerPlainPrimitiveToFloat64(node);
961  break;
962  case IrOpcode::kEnsureWritableFastElements:
963  result = LowerEnsureWritableFastElements(node);
964  break;
965  case IrOpcode::kMaybeGrowFastElements:
966  result = LowerMaybeGrowFastElements(node, frame_state);
967  break;
968  case IrOpcode::kTransitionElementsKind:
969  LowerTransitionElementsKind(node);
970  break;
971  case IrOpcode::kLoadFieldByIndex:
972  result = LowerLoadFieldByIndex(node);
973  break;
974  case IrOpcode::kLoadTypedElement:
975  result = LowerLoadTypedElement(node);
976  break;
977  case IrOpcode::kLoadDataViewElement:
978  result = LowerLoadDataViewElement(node);
979  break;
980  case IrOpcode::kStoreTypedElement:
981  LowerStoreTypedElement(node);
982  break;
983  case IrOpcode::kStoreDataViewElement:
984  LowerStoreDataViewElement(node);
985  break;
986  case IrOpcode::kStoreSignedSmallElement:
987  LowerStoreSignedSmallElement(node);
988  break;
989  case IrOpcode::kFindOrderedHashMapEntry:
990  result = LowerFindOrderedHashMapEntry(node);
991  break;
992  case IrOpcode::kFindOrderedHashMapEntryForInt32Key:
993  result = LowerFindOrderedHashMapEntryForInt32Key(node);
994  break;
995  case IrOpcode::kTransitionAndStoreNumberElement:
996  LowerTransitionAndStoreNumberElement(node);
997  break;
998  case IrOpcode::kTransitionAndStoreNonNumberElement:
999  LowerTransitionAndStoreNonNumberElement(node);
1000  break;
1001  case IrOpcode::kTransitionAndStoreElement:
1002  LowerTransitionAndStoreElement(node);
1003  break;
1004  case IrOpcode::kRuntimeAbort:
1005  LowerRuntimeAbort(node);
1006  break;
1007  case IrOpcode::kConvertReceiver:
1008  result = LowerConvertReceiver(node);
1009  break;
1010  case IrOpcode::kFloat64RoundUp:
1011  if (!LowerFloat64RoundUp(node).To(&result)) {
1012  return false;
1013  }
1014  break;
1015  case IrOpcode::kFloat64RoundDown:
1016  if (!LowerFloat64RoundDown(node).To(&result)) {
1017  return false;
1018  }
1019  break;
1020  case IrOpcode::kFloat64RoundTruncate:
1021  if (!LowerFloat64RoundTruncate(node).To(&result)) {
1022  return false;
1023  }
1024  break;
1025  case IrOpcode::kFloat64RoundTiesEven:
1026  if (!LowerFloat64RoundTiesEven(node).To(&result)) {
1027  return false;
1028  }
1029  break;
1030  case IrOpcode::kDateNow:
1031  result = LowerDateNow(node);
1032  break;
1033  default:
1034  return false;
1035  }
1036 
1037  if ((result ? 1 : 0) != node->op()->ValueOutputCount()) {
1038  FATAL(
1039  "Effect control linearizer lowering of '%s':"
1040  " value output count does not agree.",
1041  node->op()->mnemonic());
1042  }
1043 
1044  *effect = gasm()->ExtractCurrentEffect();
1045  *control = gasm()->ExtractCurrentControl();
1046  NodeProperties::ReplaceUses(node, result, *effect, *control);
1047  return true;
1048 }
1049 
1050 #define __ gasm()->
1051 
1052 Node* EffectControlLinearizer::LowerChangeFloat64ToTagged(Node* node) {
1053  CheckForMinusZeroMode mode = CheckMinusZeroModeOf(node->op());
1054  Node* value = node->InputAt(0);
1055 
1056  auto done = __ MakeLabel(MachineRepresentation::kTagged);
1057  auto if_heapnumber = __ MakeDeferredLabel();
1058  auto if_int32 = __ MakeLabel();
1059 
1060  Node* value32 = __ RoundFloat64ToInt32(value);
1061  __ GotoIf(__ Float64Equal(value, __ ChangeInt32ToFloat64(value32)),
1062  &if_int32);
1063  __ Goto(&if_heapnumber);
1064 
1065  __ Bind(&if_int32);
1066  {
1067  if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
1068  Node* zero = __ Int32Constant(0);
1069  auto if_zero = __ MakeDeferredLabel();
1070  auto if_smi = __ MakeLabel();
1071 
1072  __ GotoIf(__ Word32Equal(value32, zero), &if_zero);
1073  __ Goto(&if_smi);
1074 
1075  __ Bind(&if_zero);
1076  {
1077  // In case of 0, we need to check the high bits for the IEEE -0 pattern.
1078  __ GotoIf(__ Int32LessThan(__ Float64ExtractHighWord32(value), zero),
1079  &if_heapnumber);
1080  __ Goto(&if_smi);
1081  }
1082 
1083  __ Bind(&if_smi);
1084  }
1085 
1086  if (SmiValuesAre32Bits()) {
1087  Node* value_smi = ChangeInt32ToSmi(value32);
1088  __ Goto(&done, value_smi);
1089  } else {
1090  DCHECK(SmiValuesAre31Bits());
1091  Node* add = __ Int32AddWithOverflow(value32, value32);
1092  Node* ovf = __ Projection(1, add);
1093  __ GotoIf(ovf, &if_heapnumber);
1094  Node* value_smi = __ Projection(0, add);
1095  value_smi = ChangeInt32ToIntPtr(value_smi);
1096  __ Goto(&done, value_smi);
1097  }
1098  }
1099 
1100  __ Bind(&if_heapnumber);
1101  {
1102  Node* value_number = AllocateHeapNumberWithValue(value);
1103  __ Goto(&done, value_number);
1104  }
1105 
1106  __ Bind(&done);
1107  return done.PhiAt(0);
1108 }
1109 
1110 Node* EffectControlLinearizer::LowerChangeFloat64ToTaggedPointer(Node* node) {
1111  Node* value = node->InputAt(0);
1112  return AllocateHeapNumberWithValue(value);
1113 }
1114 
1115 Node* EffectControlLinearizer::LowerChangeBitToTagged(Node* node) {
1116  Node* value = node->InputAt(0);
1117 
1118  auto if_true = __ MakeLabel();
1119  auto done = __ MakeLabel(MachineRepresentation::kTagged);
1120 
1121  __ GotoIf(value, &if_true);
1122  __ Goto(&done, __ FalseConstant());
1123 
1124  __ Bind(&if_true);
1125  __ Goto(&done, __ TrueConstant());
1126 
1127  __ Bind(&done);
1128  return done.PhiAt(0);
1129 }
1130 
1131 Node* EffectControlLinearizer::LowerChangeInt31ToTaggedSigned(Node* node) {
1132  Node* value = node->InputAt(0);
1133  return ChangeInt32ToSmi(value);
1134 }
1135 
1136 Node* EffectControlLinearizer::LowerChangeInt32ToTagged(Node* node) {
1137  Node* value = node->InputAt(0);
1138 
1139  if (SmiValuesAre32Bits()) {
1140  return ChangeInt32ToSmi(value);
1141  }
1142  DCHECK(SmiValuesAre31Bits());
1143 
1144  auto if_overflow = __ MakeDeferredLabel();
1145  auto done = __ MakeLabel(MachineRepresentation::kTagged);
1146 
1147  Node* add = __ Int32AddWithOverflow(value, value);
1148  Node* ovf = __ Projection(1, add);
1149  __ GotoIf(ovf, &if_overflow);
1150  Node* value_smi = __ Projection(0, add);
1151  value_smi = ChangeInt32ToIntPtr(value_smi);
1152  __ Goto(&done, value_smi);
1153 
1154  __ Bind(&if_overflow);
1155  Node* number = AllocateHeapNumberWithValue(__ ChangeInt32ToFloat64(value));
1156  __ Goto(&done, number);
1157 
1158  __ Bind(&done);
1159  return done.PhiAt(0);
1160 }
1161 
1162 Node* EffectControlLinearizer::LowerChangeInt64ToTagged(Node* node) {
1163  Node* value = node->InputAt(0);
1164 
1165  auto if_not_in_smi_range = __ MakeDeferredLabel();
1166  auto done = __ MakeLabel(MachineRepresentation::kTagged);
1167 
1168  Node* value32 = __ TruncateInt64ToInt32(value);
1169  __ GotoIfNot(__ Word64Equal(__ ChangeInt32ToInt64(value32), value),
1170  &if_not_in_smi_range);
1171 
1172  if (SmiValuesAre32Bits()) {
1173  Node* value_smi = ChangeInt64ToSmi(value);
1174  __ Goto(&done, value_smi);
1175  } else {
1176  Node* add = __ Int32AddWithOverflow(value32, value32);
1177  Node* ovf = __ Projection(1, add);
1178  __ GotoIf(ovf, &if_not_in_smi_range);
1179  Node* value_smi = ChangeInt32ToIntPtr(__ Projection(0, add));
1180  __ Goto(&done, value_smi);
1181  }
1182 
1183  __ Bind(&if_not_in_smi_range);
1184  Node* number = AllocateHeapNumberWithValue(__ ChangeInt64ToFloat64(value));
1185  __ Goto(&done, number);
1186 
1187  __ Bind(&done);
1188  return done.PhiAt(0);
1189 }
1190 
1191 Node* EffectControlLinearizer::LowerChangeUint32ToTagged(Node* node) {
1192  Node* value = node->InputAt(0);
1193 
1194  auto if_not_in_smi_range = __ MakeDeferredLabel();
1195  auto done = __ MakeLabel(MachineRepresentation::kTagged);
1196 
1197  Node* check = __ Uint32LessThanOrEqual(value, SmiMaxValueConstant());
1198  __ GotoIfNot(check, &if_not_in_smi_range);
1199  __ Goto(&done, ChangeUint32ToSmi(value));
1200 
1201  __ Bind(&if_not_in_smi_range);
1202  Node* number = AllocateHeapNumberWithValue(__ ChangeUint32ToFloat64(value));
1203 
1204  __ Goto(&done, number);
1205  __ Bind(&done);
1206 
1207  return done.PhiAt(0);
1208 }
1209 
1210 Node* EffectControlLinearizer::LowerChangeUint64ToTagged(Node* node) {
1211  Node* value = node->InputAt(0);
1212 
1213  auto if_not_in_smi_range = __ MakeDeferredLabel();
1214  auto done = __ MakeLabel(MachineRepresentation::kTagged);
1215 
1216  Node* check =
1217  __ Uint64LessThanOrEqual(value, __ Int64Constant(Smi::kMaxValue));
1218  __ GotoIfNot(check, &if_not_in_smi_range);
1219  __ Goto(&done, ChangeInt64ToSmi(value));
1220 
1221  __ Bind(&if_not_in_smi_range);
1222  Node* number = AllocateHeapNumberWithValue(__ ChangeInt64ToFloat64(value));
1223 
1224  __ Goto(&done, number);
1225  __ Bind(&done);
1226 
1227  return done.PhiAt(0);
1228 }
1229 
1230 Node* EffectControlLinearizer::LowerChangeTaggedSignedToInt32(Node* node) {
1231  Node* value = node->InputAt(0);
1232  return ChangeSmiToInt32(value);
1233 }
1234 
1235 Node* EffectControlLinearizer::LowerChangeTaggedSignedToInt64(Node* node) {
1236  Node* value = node->InputAt(0);
1237  return ChangeSmiToInt64(value);
1238 }
1239 
1240 Node* EffectControlLinearizer::LowerChangeTaggedToBit(Node* node) {
1241  Node* value = node->InputAt(0);
1242  return __ WordEqual(value, __ TrueConstant());
1243 }
1244 
1245 void EffectControlLinearizer::TruncateTaggedPointerToBit(
1246  Node* node, GraphAssemblerLabel<1>* done) {
1247  Node* value = node->InputAt(0);
1248 
1249  auto if_heapnumber = __ MakeDeferredLabel();
1250  auto if_bigint = __ MakeDeferredLabel();
1251 
1252  Node* zero = __ Int32Constant(0);
1253  Node* fzero = __ Float64Constant(0.0);
1254 
1255  // Check if {value} is false.
1256  __ GotoIf(__ WordEqual(value, __ FalseConstant()), done, zero);
1257 
1258  // Check if {value} is the empty string.
1259  __ GotoIf(__ WordEqual(value, __ EmptyStringConstant()), done, zero);
1260 
1261  // Load the map of {value}.
1262  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1263 
1264  // Check if the {value} is undetectable and immediately return false.
1265  // This includes undefined and null.
1266  Node* value_map_bitfield =
1267  __ LoadField(AccessBuilder::ForMapBitField(), value_map);
1268  __ GotoIfNot(
1269  __ Word32Equal(
1270  __ Word32And(value_map_bitfield,
1271  __ Int32Constant(Map::IsUndetectableBit::kMask)),
1272  zero),
1273  done, zero);
1274 
1275  // Check if {value} is a HeapNumber.
1276  __ GotoIf(__ WordEqual(value_map, __ HeapNumberMapConstant()),
1277  &if_heapnumber);
1278 
1279  // Check if {value} is a BigInt.
1280  Node* value_instance_type =
1281  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1282  __ GotoIf(__ Word32Equal(value_instance_type, __ Int32Constant(BIGINT_TYPE)),
1283  &if_bigint);
1284 
1285  // All other values that reach here are true.
1286  __ Goto(done, __ Int32Constant(1));
1287 
1288  __ Bind(&if_heapnumber);
1289  {
1290  // For HeapNumber {value}, just check that its value is not 0.0, -0.0 or
1291  // NaN.
1292  Node* value_value =
1293  __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1294  __ Goto(done, __ Float64LessThan(fzero, __ Float64Abs(value_value)));
1295  }
1296 
1297  __ Bind(&if_bigint);
1298  {
1299  Node* bitfield = __ LoadField(AccessBuilder::ForBigIntBitfield(), value);
1300  Node* length_is_zero = __ WordEqual(
1301  __ WordAnd(bitfield, __ IntPtrConstant(BigInt::LengthBits::kMask)),
1302  __ IntPtrConstant(0));
1303  __ Goto(done, __ Word32Equal(length_is_zero, zero));
1304  }
1305 }
1306 
1307 Node* EffectControlLinearizer::LowerTruncateTaggedToBit(Node* node) {
1308  auto done = __ MakeLabel(MachineRepresentation::kBit);
1309  auto if_smi = __ MakeDeferredLabel();
1310 
1311  Node* value = node->InputAt(0);
1312  __ GotoIf(ObjectIsSmi(value), &if_smi);
1313 
1314  TruncateTaggedPointerToBit(node, &done);
1315 
1316  __ Bind(&if_smi);
1317  {
1318  // If {value} is a Smi, then we only need to check that it's not zero.
1319  __ Goto(&done, __ Word32Equal(__ WordEqual(value, __ IntPtrConstant(0)),
1320  __ Int32Constant(0)));
1321  }
1322 
1323  __ Bind(&done);
1324  return done.PhiAt(0);
1325 }
1326 
1327 Node* EffectControlLinearizer::LowerTruncateTaggedPointerToBit(Node* node) {
1328  auto done = __ MakeLabel(MachineRepresentation::kBit);
1329 
1330  TruncateTaggedPointerToBit(node, &done);
1331 
1332  __ Bind(&done);
1333  return done.PhiAt(0);
1334 }
1335 
1336 Node* EffectControlLinearizer::LowerChangeTaggedToInt32(Node* node) {
1337  Node* value = node->InputAt(0);
1338 
1339  auto if_not_smi = __ MakeDeferredLabel();
1340  auto done = __ MakeLabel(MachineRepresentation::kWord32);
1341 
1342  Node* check = ObjectIsSmi(value);
1343  __ GotoIfNot(check, &if_not_smi);
1344  __ Goto(&done, ChangeSmiToInt32(value));
1345 
1346  __ Bind(&if_not_smi);
1347  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1348  Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1349  vfalse = __ ChangeFloat64ToInt32(vfalse);
1350  __ Goto(&done, vfalse);
1351 
1352  __ Bind(&done);
1353  return done.PhiAt(0);
1354 }
1355 
1356 Node* EffectControlLinearizer::LowerChangeTaggedToUint32(Node* node) {
1357  Node* value = node->InputAt(0);
1358 
1359  auto if_not_smi = __ MakeDeferredLabel();
1360  auto done = __ MakeLabel(MachineRepresentation::kWord32);
1361 
1362  Node* check = ObjectIsSmi(value);
1363  __ GotoIfNot(check, &if_not_smi);
1364  __ Goto(&done, ChangeSmiToInt32(value));
1365 
1366  __ Bind(&if_not_smi);
1367  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1368  Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1369  vfalse = __ ChangeFloat64ToUint32(vfalse);
1370  __ Goto(&done, vfalse);
1371 
1372  __ Bind(&done);
1373  return done.PhiAt(0);
1374 }
1375 
1376 Node* EffectControlLinearizer::LowerChangeTaggedToInt64(Node* node) {
1377  Node* value = node->InputAt(0);
1378 
1379  auto if_not_smi = __ MakeDeferredLabel();
1380  auto done = __ MakeLabel(MachineRepresentation::kWord64);
1381 
1382  Node* check = ObjectIsSmi(value);
1383  __ GotoIfNot(check, &if_not_smi);
1384  __ Goto(&done, ChangeSmiToInt64(value));
1385 
1386  __ Bind(&if_not_smi);
1387  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1388  Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1389  vfalse = __ ChangeFloat64ToInt64(vfalse);
1390  __ Goto(&done, vfalse);
1391 
1392  __ Bind(&done);
1393  return done.PhiAt(0);
1394 }
1395 
1396 Node* EffectControlLinearizer::LowerChangeTaggedToFloat64(Node* node) {
1397  return LowerTruncateTaggedToFloat64(node);
1398 }
1399 
1400 Node* EffectControlLinearizer::LowerChangeTaggedToTaggedSigned(Node* node) {
1401  Node* value = node->InputAt(0);
1402 
1403  auto if_not_smi = __ MakeDeferredLabel();
1404  auto done = __ MakeLabel(MachineRepresentation::kWord32);
1405 
1406  Node* check = ObjectIsSmi(value);
1407  __ GotoIfNot(check, &if_not_smi);
1408  __ Goto(&done, value);
1409 
1410  __ Bind(&if_not_smi);
1411  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1412  Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1413  vfalse = __ ChangeFloat64ToInt32(vfalse);
1414  vfalse = ChangeInt32ToSmi(vfalse);
1415  __ Goto(&done, vfalse);
1416 
1417  __ Bind(&done);
1418  return done.PhiAt(0);
1419 }
1420 
1421 Node* EffectControlLinearizer::LowerTruncateTaggedToFloat64(Node* node) {
1422  Node* value = node->InputAt(0);
1423 
1424  auto if_not_smi = __ MakeDeferredLabel();
1425  auto done = __ MakeLabel(MachineRepresentation::kFloat64);
1426 
1427  Node* check = ObjectIsSmi(value);
1428  __ GotoIfNot(check, &if_not_smi);
1429  Node* vtrue = ChangeSmiToInt32(value);
1430  vtrue = __ ChangeInt32ToFloat64(vtrue);
1431  __ Goto(&done, vtrue);
1432 
1433  __ Bind(&if_not_smi);
1434  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1435  Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1436  __ Goto(&done, vfalse);
1437 
1438  __ Bind(&done);
1439  return done.PhiAt(0);
1440 }
1441 
1442 Node* EffectControlLinearizer::LowerPoisonIndex(Node* node) {
1443  Node* index = node->InputAt(0);
1444  if (mask_array_index_ == kMaskArrayIndex) {
1445  index = __ Word32PoisonOnSpeculation(index);
1446  }
1447  return index;
1448 }
1449 
1450 void EffectControlLinearizer::LowerCheckMaps(Node* node, Node* frame_state) {
1451  CheckMapsParameters const& p = CheckMapsParametersOf(node->op());
1452  Node* value = node->InputAt(0);
1453 
1454  ZoneHandleSet<Map> const& maps = p.maps();
1455  size_t const map_count = maps.size();
1456 
1457  if (p.flags() & CheckMapsFlag::kTryMigrateInstance) {
1458  auto done = __ MakeLabel();
1459  auto migrate = __ MakeDeferredLabel();
1460 
1461  // Load the current map of the {value}.
1462  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1463 
1464  // Perform the map checks.
1465  for (size_t i = 0; i < map_count; ++i) {
1466  Node* map = __ HeapConstant(maps[i]);
1467  Node* check = __ WordEqual(value_map, map);
1468  if (i == map_count - 1) {
1469  __ Branch(check, &done, &migrate, IsSafetyCheck::kCriticalSafetyCheck);
1470  } else {
1471  auto next_map = __ MakeLabel();
1472  __ Branch(check, &done, &next_map, IsSafetyCheck::kCriticalSafetyCheck);
1473  __ Bind(&next_map);
1474  }
1475  }
1476 
1477  // Perform the (deferred) instance migration.
1478  __ Bind(&migrate);
1479  {
1480  // If map is not deprecated the migration attempt does not make sense.
1481  Node* bitfield3 =
1482  __ LoadField(AccessBuilder::ForMapBitField3(), value_map);
1483  Node* if_not_deprecated = __ WordEqual(
1484  __ Word32And(bitfield3,
1485  __ Int32Constant(Map::IsDeprecatedBit::kMask)),
1486  __ Int32Constant(0));
1487  __ DeoptimizeIf(DeoptimizeReason::kWrongMap, p.feedback(),
1488  if_not_deprecated, frame_state,
1489  IsSafetyCheck::kCriticalSafetyCheck);
1490 
1491  Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
1492  Runtime::FunctionId id = Runtime::kTryMigrateInstance;
1493  auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
1494  graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
1495  Node* result = __ Call(call_descriptor, __ CEntryStubConstant(1), value,
1496  __ ExternalConstant(ExternalReference::Create(id)),
1497  __ Int32Constant(1), __ NoContextConstant());
1498  Node* check = ObjectIsSmi(result);
1499  __ DeoptimizeIf(DeoptimizeReason::kInstanceMigrationFailed, p.feedback(),
1500  check, frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1501  }
1502 
1503  // Reload the current map of the {value}.
1504  value_map = __ LoadField(AccessBuilder::ForMap(), value);
1505 
1506  // Perform the map checks again.
1507  for (size_t i = 0; i < map_count; ++i) {
1508  Node* map = __ HeapConstant(maps[i]);
1509  Node* check = __ WordEqual(value_map, map);
1510  if (i == map_count - 1) {
1511  __ DeoptimizeIfNot(DeoptimizeReason::kWrongMap, p.feedback(), check,
1512  frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1513  } else {
1514  auto next_map = __ MakeLabel();
1515  __ Branch(check, &done, &next_map, IsSafetyCheck::kCriticalSafetyCheck);
1516  __ Bind(&next_map);
1517  }
1518  }
1519 
1520  __ Goto(&done);
1521  __ Bind(&done);
1522  } else {
1523  auto done = __ MakeLabel();
1524 
1525  // Load the current map of the {value}.
1526  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1527 
1528  for (size_t i = 0; i < map_count; ++i) {
1529  Node* map = __ HeapConstant(maps[i]);
1530  Node* check = __ WordEqual(value_map, map);
1531  if (i == map_count - 1) {
1532  __ DeoptimizeIfNot(DeoptimizeReason::kWrongMap, p.feedback(), check,
1533  frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1534  } else {
1535  auto next_map = __ MakeLabel();
1536  __ Branch(check, &done, &next_map, IsSafetyCheck::kCriticalSafetyCheck);
1537  __ Bind(&next_map);
1538  }
1539  }
1540  __ Goto(&done);
1541  __ Bind(&done);
1542  }
1543 }
1544 
1545 Node* EffectControlLinearizer::LowerCompareMaps(Node* node) {
1546  ZoneHandleSet<Map> const& maps = CompareMapsParametersOf(node->op()).maps();
1547  size_t const map_count = maps.size();
1548  Node* value = node->InputAt(0);
1549 
1550  auto done = __ MakeLabel(MachineRepresentation::kBit);
1551 
1552  // Load the current map of the {value}.
1553  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1554 
1555  for (size_t i = 0; i < map_count; ++i) {
1556  Node* map = __ HeapConstant(maps[i]);
1557  Node* check = __ WordEqual(value_map, map);
1558  auto next_map = __ MakeLabel();
1559  auto passed = __ MakeLabel();
1560  __ Branch(check, &passed, &next_map, IsSafetyCheck::kCriticalSafetyCheck);
1561 
1562  __ Bind(&passed);
1563  __ Goto(&done, __ Int32Constant(1));
1564 
1565  __ Bind(&next_map);
1566  }
1567  __ Goto(&done, __ Int32Constant(0));
1568 
1569  __ Bind(&done);
1570  return done.PhiAt(0);
1571 }
1572 
1573 Node* EffectControlLinearizer::LowerCheckNumber(Node* node, Node* frame_state) {
1574  Node* value = node->InputAt(0);
1575  const CheckParameters& params = CheckParametersOf(node->op());
1576 
1577  auto if_not_smi = __ MakeDeferredLabel();
1578  auto done = __ MakeLabel();
1579 
1580  Node* check0 = ObjectIsSmi(value);
1581  __ GotoIfNot(check0, &if_not_smi);
1582  __ Goto(&done);
1583 
1584  __ Bind(&if_not_smi);
1585  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1586  Node* check1 = __ WordEqual(value_map, __ HeapNumberMapConstant());
1587  __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, params.feedback(),
1588  check1, frame_state);
1589  __ Goto(&done);
1590 
1591  __ Bind(&done);
1592  return value;
1593 }
1594 
1595 Node* EffectControlLinearizer::LowerCheckReceiver(Node* node,
1596  Node* frame_state) {
1597  Node* value = node->InputAt(0);
1598 
1599  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1600  Node* value_instance_type =
1601  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1602 
1603  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1604  Node* check = __ Uint32LessThanOrEqual(
1605  __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
1606  __ DeoptimizeIfNot(DeoptimizeReason::kNotAJavaScriptObject, VectorSlotPair(),
1607  check, frame_state);
1608  return value;
1609 }
1610 
1611 Node* EffectControlLinearizer::LowerCheckReceiverOrNullOrUndefined(
1612  Node* node, Node* frame_state) {
1613  Node* value = node->InputAt(0);
1614 
1615  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1616  Node* value_instance_type =
1617  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1618 
1619  // Rule out all primitives except oddballs (true, false, undefined, null).
1620  STATIC_ASSERT(LAST_PRIMITIVE_TYPE == ODDBALL_TYPE);
1621  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1622  Node* check0 = __ Uint32LessThanOrEqual(__ Uint32Constant(ODDBALL_TYPE),
1623  value_instance_type);
1624  __ DeoptimizeIfNot(DeoptimizeReason::kNotAJavaScriptObjectOrNullOrUndefined,
1625  VectorSlotPair(), check0, frame_state);
1626 
1627  // Rule out booleans.
1628  Node* check1 = __ WordEqual(value_map, __ BooleanMapConstant());
1629  __ DeoptimizeIf(DeoptimizeReason::kNotAJavaScriptObjectOrNullOrUndefined,
1630  VectorSlotPair(), check1, frame_state);
1631  return value;
1632 }
1633 
1634 Node* EffectControlLinearizer::LowerCheckSymbol(Node* node, Node* frame_state) {
1635  Node* value = node->InputAt(0);
1636 
1637  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1638 
1639  Node* check =
1640  __ WordEqual(value_map, __ HeapConstant(factory()->symbol_map()));
1641  __ DeoptimizeIfNot(DeoptimizeReason::kNotASymbol, VectorSlotPair(), check,
1642  frame_state);
1643  return value;
1644 }
1645 
1646 Node* EffectControlLinearizer::LowerCheckString(Node* node, Node* frame_state) {
1647  Node* value = node->InputAt(0);
1648  const CheckParameters& params = CheckParametersOf(node->op());
1649 
1650  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1651  Node* value_instance_type =
1652  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1653 
1654  Node* check = __ Uint32LessThan(value_instance_type,
1655  __ Uint32Constant(FIRST_NONSTRING_TYPE));
1656  __ DeoptimizeIfNot(DeoptimizeReason::kNotAString, params.feedback(), check,
1657  frame_state);
1658  return value;
1659 }
1660 
1661 Node* EffectControlLinearizer::LowerCheckInternalizedString(Node* node,
1662  Node* frame_state) {
1663  Node* value = node->InputAt(0);
1664 
1665  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1666  Node* value_instance_type =
1667  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1668 
1669  Node* check = __ Word32Equal(
1670  __ Word32And(value_instance_type,
1671  __ Int32Constant(kIsNotStringMask | kIsNotInternalizedMask)),
1672  __ Int32Constant(kInternalizedTag));
1673  __ DeoptimizeIfNot(DeoptimizeReason::kWrongInstanceType, VectorSlotPair(),
1674  check, frame_state);
1675 
1676  return value;
1677 }
1678 
1679 void EffectControlLinearizer::LowerCheckIf(Node* node, Node* frame_state) {
1680  Node* value = node->InputAt(0);
1681  const CheckIfParameters& p = CheckIfParametersOf(node->op());
1682  __ DeoptimizeIfNot(p.reason(), p.feedback(), value, frame_state);
1683 }
1684 
1685 Node* EffectControlLinearizer::LowerStringConcat(Node* node) {
1686  Node* lhs = node->InputAt(1);
1687  Node* rhs = node->InputAt(2);
1688 
1689  Callable const callable =
1690  CodeFactory::StringAdd(isolate(), STRING_ADD_CHECK_NONE);
1691  auto call_descriptor = Linkage::GetStubCallDescriptor(
1692  graph()->zone(), callable.descriptor(),
1693  callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
1694  Operator::kNoDeopt | Operator::kNoWrite | Operator::kNoThrow);
1695 
1696  Node* value =
1697  __ Call(call_descriptor, jsgraph()->HeapConstant(callable.code()), lhs,
1698  rhs, __ NoContextConstant());
1699 
1700  return value;
1701 }
1702 
1703 Node* EffectControlLinearizer::LowerCheckedInt32Add(Node* node,
1704  Node* frame_state) {
1705  Node* lhs = node->InputAt(0);
1706  Node* rhs = node->InputAt(1);
1707 
1708  Node* value = __ Int32AddWithOverflow(lhs, rhs);
1709  Node* check = __ Projection(1, value);
1710  __ DeoptimizeIf(DeoptimizeReason::kOverflow, VectorSlotPair(), check,
1711  frame_state);
1712  return __ Projection(0, value);
1713 }
1714 
1715 Node* EffectControlLinearizer::LowerCheckedInt32Sub(Node* node,
1716  Node* frame_state) {
1717  Node* lhs = node->InputAt(0);
1718  Node* rhs = node->InputAt(1);
1719 
1720  Node* value = __ Int32SubWithOverflow(lhs, rhs);
1721  Node* check = __ Projection(1, value);
1722  __ DeoptimizeIf(DeoptimizeReason::kOverflow, VectorSlotPair(), check,
1723  frame_state);
1724  return __ Projection(0, value);
1725 }
1726 
1727 Node* EffectControlLinearizer::LowerCheckedInt32Div(Node* node,
1728  Node* frame_state) {
1729  Node* lhs = node->InputAt(0);
1730  Node* rhs = node->InputAt(1);
1731  Node* zero = __ Int32Constant(0);
1732 
1733  // Check if the {rhs} is a known power of two.
1734  Int32Matcher m(rhs);
1735  if (m.IsPowerOf2()) {
1736  // Since we know that {rhs} is a power of two, we can perform a fast
1737  // check to see if the relevant least significant bits of the {lhs}
1738  // are all zero, and if so we know that we can perform a division
1739  // safely (and fast by doing an arithmetic - aka sign preserving -
1740  // right shift on {lhs}).
1741  int32_t divisor = m.Value();
1742  Node* mask = __ Int32Constant(divisor - 1);
1743  Node* shift = __ Int32Constant(WhichPowerOf2(divisor));
1744  Node* check = __ Word32Equal(__ Word32And(lhs, mask), zero);
1745  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, VectorSlotPair(),
1746  check, frame_state);
1747  return __ Word32Sar(lhs, shift);
1748  } else {
1749  auto if_rhs_positive = __ MakeLabel();
1750  auto if_rhs_negative = __ MakeDeferredLabel();
1751  auto done = __ MakeLabel(MachineRepresentation::kWord32);
1752 
1753  // Check if {rhs} is positive (and not zero).
1754  Node* check_rhs_positive = __ Int32LessThan(zero, rhs);
1755  __ Branch(check_rhs_positive, &if_rhs_positive, &if_rhs_negative);
1756 
1757  __ Bind(&if_rhs_positive);
1758  {
1759  // Fast case, no additional checking required.
1760  __ Goto(&done, __ Int32Div(lhs, rhs));
1761  }
1762 
1763  __ Bind(&if_rhs_negative);
1764  {
1765  auto if_lhs_minint = __ MakeDeferredLabel();
1766  auto if_lhs_notminint = __ MakeLabel();
1767 
1768  // Check if {rhs} is zero.
1769  Node* check_rhs_zero = __ Word32Equal(rhs, zero);
1770  __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, VectorSlotPair(),
1771  check_rhs_zero, frame_state);
1772 
1773  // Check if {lhs} is zero, as that would produce minus zero.
1774  Node* check_lhs_zero = __ Word32Equal(lhs, zero);
1775  __ DeoptimizeIf(DeoptimizeReason::kMinusZero, VectorSlotPair(),
1776  check_lhs_zero, frame_state);
1777 
1778  // Check if {lhs} is kMinInt and {rhs} is -1, in which case we'd have
1779  // to return -kMinInt, which is not representable as Word32.
1780  Node* check_lhs_minint = graph()->NewNode(machine()->Word32Equal(), lhs,
1781  __ Int32Constant(kMinInt));
1782  __ Branch(check_lhs_minint, &if_lhs_minint, &if_lhs_notminint);
1783 
1784  __ Bind(&if_lhs_minint);
1785  {
1786  // Check that {rhs} is not -1, otherwise result would be -kMinInt.
1787  Node* check_rhs_minusone = __ Word32Equal(rhs, __ Int32Constant(-1));
1788  __ DeoptimizeIf(DeoptimizeReason::kOverflow, VectorSlotPair(),
1789  check_rhs_minusone, frame_state);
1790 
1791  // Perform the actual integer division.
1792  __ Goto(&done, __ Int32Div(lhs, rhs));
1793  }
1794 
1795  __ Bind(&if_lhs_notminint);
1796  {
1797  // Perform the actual integer division.
1798  __ Goto(&done, __ Int32Div(lhs, rhs));
1799  }
1800  }
1801 
1802  __ Bind(&done);
1803  Node* value = done.PhiAt(0);
1804 
1805  // Check if the remainder is non-zero.
1806  Node* check = __ Word32Equal(lhs, __ Int32Mul(value, rhs));
1807  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, VectorSlotPair(),
1808  check, frame_state);
1809 
1810  return value;
1811  }
1812 }
1813 
1814 Node* EffectControlLinearizer::BuildUint32Mod(Node* lhs, Node* rhs) {
1815  auto if_rhs_power_of_two = __ MakeLabel();
1816  auto done = __ MakeLabel(MachineRepresentation::kWord32);
1817 
1818  // Compute the mask for the {rhs}.
1819  Node* one = __ Int32Constant(1);
1820  Node* msk = __ Int32Sub(rhs, one);
1821 
1822  // Check if the {rhs} is a power of two.
1823  __ GotoIf(__ Word32Equal(__ Word32And(rhs, msk), __ Int32Constant(0)),
1824  &if_rhs_power_of_two);
1825  {
1826  // The {rhs} is not a power of two, do a generic Uint32Mod.
1827  __ Goto(&done, __ Uint32Mod(lhs, rhs));
1828  }
1829 
1830  __ Bind(&if_rhs_power_of_two);
1831  {
1832  // The {rhs} is a power of two, just do a fast bit masking.
1833  __ Goto(&done, __ Word32And(lhs, msk));
1834  }
1835 
1836  __ Bind(&done);
1837  return done.PhiAt(0);
1838 }
1839 
1840 Node* EffectControlLinearizer::LowerCheckedInt32Mod(Node* node,
1841  Node* frame_state) {
1842  // General case for signed integer modulus, with optimization for (unknown)
1843  // power of 2 right hand side.
1844  //
1845  // if rhs <= 0 then
1846  // rhs = -rhs
1847  // deopt if rhs == 0
1848  // let msk = rhs - 1 in
1849  // if lhs < 0 then
1850  // let lhs_abs = -lsh in
1851  // let res = if rhs & msk == 0 then
1852  // lhs_abs & msk
1853  // else
1854  // lhs_abs % rhs in
1855  // if lhs < 0 then
1856  // deopt if res == 0
1857  // -res
1858  // else
1859  // res
1860  // else
1861  // if rhs & msk == 0 then
1862  // lhs & msk
1863  // else
1864  // lhs % rhs
1865  //
1866  Node* lhs = node->InputAt(0);
1867  Node* rhs = node->InputAt(1);
1868 
1869  auto if_rhs_not_positive = __ MakeDeferredLabel();
1870  auto if_lhs_negative = __ MakeDeferredLabel();
1871  auto if_rhs_power_of_two = __ MakeLabel();
1872  auto rhs_checked = __ MakeLabel(MachineRepresentation::kWord32);
1873  auto done = __ MakeLabel(MachineRepresentation::kWord32);
1874 
1875  Node* zero = __ Int32Constant(0);
1876 
1877  // Check if {rhs} is not strictly positive.
1878  Node* check0 = __ Int32LessThanOrEqual(rhs, zero);
1879  __ GotoIf(check0, &if_rhs_not_positive);
1880  __ Goto(&rhs_checked, rhs);
1881 
1882  __ Bind(&if_rhs_not_positive);
1883  {
1884  // Negate {rhs}, might still produce a negative result in case of
1885  // -2^31, but that is handled safely below.
1886  Node* vtrue0 = __ Int32Sub(zero, rhs);
1887 
1888  // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
1889  __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, VectorSlotPair(),
1890  __ Word32Equal(vtrue0, zero), frame_state);
1891  __ Goto(&rhs_checked, vtrue0);
1892  }
1893 
1894  __ Bind(&rhs_checked);
1895  rhs = rhs_checked.PhiAt(0);
1896 
1897  __ GotoIf(__ Int32LessThan(lhs, zero), &if_lhs_negative);
1898  {
1899  // The {lhs} is a non-negative integer.
1900  __ Goto(&done, BuildUint32Mod(lhs, rhs));
1901  }
1902 
1903  __ Bind(&if_lhs_negative);
1904  {
1905  // The {lhs} is a negative integer. This is very unlikely and
1906  // we intentionally don't use the BuildUint32Mod() here, which
1907  // would try to figure out whether {rhs} is a power of two,
1908  // since this is intended to be a slow-path.
1909  Node* res = __ Uint32Mod(__ Int32Sub(zero, lhs), rhs);
1910 
1911  // Check if we would have to return -0.
1912  __ DeoptimizeIf(DeoptimizeReason::kMinusZero, VectorSlotPair(),
1913  __ Word32Equal(res, zero), frame_state);
1914  __ Goto(&done, __ Int32Sub(zero, res));
1915  }
1916 
1917  __ Bind(&done);
1918  return done.PhiAt(0);
1919 }
1920 
1921 Node* EffectControlLinearizer::LowerCheckedUint32Div(Node* node,
1922  Node* frame_state) {
1923  Node* lhs = node->InputAt(0);
1924  Node* rhs = node->InputAt(1);
1925  Node* zero = __ Int32Constant(0);
1926 
1927  // Check if the {rhs} is a known power of two.
1928  Uint32Matcher m(rhs);
1929  if (m.IsPowerOf2()) {
1930  // Since we know that {rhs} is a power of two, we can perform a fast
1931  // check to see if the relevant least significant bits of the {lhs}
1932  // are all zero, and if so we know that we can perform a division
1933  // safely (and fast by doing a logical - aka zero extending - right
1934  // shift on {lhs}).
1935  uint32_t divisor = m.Value();
1936  Node* mask = __ Uint32Constant(divisor - 1);
1937  Node* shift = __ Uint32Constant(WhichPowerOf2(divisor));
1938  Node* check = __ Word32Equal(__ Word32And(lhs, mask), zero);
1939  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, VectorSlotPair(),
1940  check, frame_state);
1941  return __ Word32Shr(lhs, shift);
1942  } else {
1943  // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
1944  Node* check = __ Word32Equal(rhs, zero);
1945  __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, VectorSlotPair(), check,
1946  frame_state);
1947 
1948  // Perform the actual unsigned integer division.
1949  Node* value = __ Uint32Div(lhs, rhs);
1950 
1951  // Check if the remainder is non-zero.
1952  check = __ Word32Equal(lhs, __ Int32Mul(rhs, value));
1953  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, VectorSlotPair(),
1954  check, frame_state);
1955  return value;
1956  }
1957 }
1958 
1959 Node* EffectControlLinearizer::LowerCheckedUint32Mod(Node* node,
1960  Node* frame_state) {
1961  Node* lhs = node->InputAt(0);
1962  Node* rhs = node->InputAt(1);
1963 
1964  Node* zero = __ Int32Constant(0);
1965 
1966  // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
1967  Node* check = __ Word32Equal(rhs, zero);
1968  __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, VectorSlotPair(), check,
1969  frame_state);
1970 
1971  // Perform the actual unsigned integer modulus.
1972  return BuildUint32Mod(lhs, rhs);
1973 }
1974 
1975 Node* EffectControlLinearizer::LowerCheckedInt32Mul(Node* node,
1976  Node* frame_state) {
1977  CheckForMinusZeroMode mode = CheckMinusZeroModeOf(node->op());
1978  Node* lhs = node->InputAt(0);
1979  Node* rhs = node->InputAt(1);
1980 
1981  Node* projection = __ Int32MulWithOverflow(lhs, rhs);
1982  Node* check = __ Projection(1, projection);
1983  __ DeoptimizeIf(DeoptimizeReason::kOverflow, VectorSlotPair(), check,
1984  frame_state);
1985 
1986  Node* value = __ Projection(0, projection);
1987 
1988  if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
1989  auto if_zero = __ MakeDeferredLabel();
1990  auto check_done = __ MakeLabel();
1991  Node* zero = __ Int32Constant(0);
1992  Node* check_zero = __ Word32Equal(value, zero);
1993  __ GotoIf(check_zero, &if_zero);
1994  __ Goto(&check_done);
1995 
1996  __ Bind(&if_zero);
1997  // We may need to return negative zero.
1998  Node* check_or = __ Int32LessThan(__ Word32Or(lhs, rhs), zero);
1999  __ DeoptimizeIf(DeoptimizeReason::kMinusZero, VectorSlotPair(), check_or,
2000  frame_state);
2001  __ Goto(&check_done);
2002 
2003  __ Bind(&check_done);
2004  }
2005 
2006  return value;
2007 }
2008 
2009 Node* EffectControlLinearizer::LowerCheckedInt32ToTaggedSigned(
2010  Node* node, Node* frame_state) {
2011  DCHECK(SmiValuesAre31Bits());
2012  Node* value = node->InputAt(0);
2013  const CheckParameters& params = CheckParametersOf(node->op());
2014 
2015  Node* add = __ Int32AddWithOverflow(value, value);
2016  Node* check = __ Projection(1, add);
2017  __ DeoptimizeIf(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2018  frame_state);
2019  Node* result = __ Projection(0, add);
2020  result = ChangeInt32ToIntPtr(result);
2021  return result;
2022 }
2023 
2024 Node* EffectControlLinearizer::LowerCheckedInt64ToInt32(Node* node,
2025  Node* frame_state) {
2026  Node* value = node->InputAt(0);
2027  const CheckParameters& params = CheckParametersOf(node->op());
2028 
2029  Node* value32 = __ TruncateInt64ToInt32(value);
2030  Node* check = __ Word64Equal(__ ChangeInt32ToInt64(value32), value);
2031  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2032  frame_state);
2033  return value32;
2034 }
2035 
2036 Node* EffectControlLinearizer::LowerCheckedInt64ToTaggedSigned(
2037  Node* node, Node* frame_state) {
2038  Node* value = node->InputAt(0);
2039  const CheckParameters& params = CheckParametersOf(node->op());
2040 
2041  Node* value32 = __ TruncateInt64ToInt32(value);
2042  Node* check = __ Word64Equal(__ ChangeInt32ToInt64(value32), value);
2043  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2044  frame_state);
2045 
2046  if (SmiValuesAre32Bits()) {
2047  return ChangeInt64ToSmi(value);
2048  } else {
2049  Node* add = __ Int32AddWithOverflow(value32, value32);
2050  Node* check = __ Projection(1, add);
2051  __ DeoptimizeIf(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2052  frame_state);
2053  Node* result = __ Projection(0, add);
2054  result = ChangeInt32ToIntPtr(result);
2055  return result;
2056  }
2057 }
2058 
2059 Node* EffectControlLinearizer::LowerCheckedUint32Bounds(Node* node,
2060  Node* frame_state) {
2061  Node* index = node->InputAt(0);
2062  Node* limit = node->InputAt(1);
2063  const CheckParameters& params = CheckParametersOf(node->op());
2064 
2065  Node* check = __ Uint32LessThan(index, limit);
2066  __ DeoptimizeIfNot(DeoptimizeReason::kOutOfBounds, params.feedback(), check,
2067  frame_state, IsSafetyCheck::kCriticalSafetyCheck);
2068  return index;
2069 }
2070 
2071 Node* EffectControlLinearizer::LowerCheckedUint32ToInt32(Node* node,
2072  Node* frame_state) {
2073  Node* value = node->InputAt(0);
2074  const CheckParameters& params = CheckParametersOf(node->op());
2075  Node* unsafe = __ Int32LessThan(value, __ Int32Constant(0));
2076  __ DeoptimizeIf(DeoptimizeReason::kLostPrecision, params.feedback(), unsafe,
2077  frame_state);
2078  return value;
2079 }
2080 
2081 Node* EffectControlLinearizer::LowerCheckedUint32ToTaggedSigned(
2082  Node* node, Node* frame_state) {
2083  Node* value = node->InputAt(0);
2084  const CheckParameters& params = CheckParametersOf(node->op());
2085  Node* check = __ Uint32LessThanOrEqual(value, SmiMaxValueConstant());
2086  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2087  frame_state);
2088  return ChangeUint32ToSmi(value);
2089 }
2090 
2091 Node* EffectControlLinearizer::LowerCheckedUint64Bounds(Node* node,
2092  Node* frame_state) {
2093  CheckParameters const& params = CheckParametersOf(node->op());
2094  Node* const index = node->InputAt(0);
2095  Node* const limit = node->InputAt(1);
2096 
2097  Node* check = __ Uint64LessThan(index, limit);
2098  __ DeoptimizeIfNot(DeoptimizeReason::kOutOfBounds, params.feedback(), check,
2099  frame_state, IsSafetyCheck::kCriticalSafetyCheck);
2100  return index;
2101 }
2102 
2103 Node* EffectControlLinearizer::LowerCheckedUint64ToInt32(Node* node,
2104  Node* frame_state) {
2105  Node* value = node->InputAt(0);
2106  const CheckParameters& params = CheckParametersOf(node->op());
2107 
2108  Node* check = __ Uint64LessThanOrEqual(value, __ Int64Constant(kMaxInt));
2109  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2110  frame_state);
2111  return __ TruncateInt64ToInt32(value);
2112 }
2113 
2114 Node* EffectControlLinearizer::LowerCheckedUint64ToTaggedSigned(
2115  Node* node, Node* frame_state) {
2116  Node* value = node->InputAt(0);
2117  const CheckParameters& params = CheckParametersOf(node->op());
2118 
2119  Node* check =
2120  __ Uint64LessThanOrEqual(value, __ Int64Constant(Smi::kMaxValue));
2121  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2122  frame_state);
2123  return ChangeInt64ToSmi(value);
2124 }
2125 
2126 Node* EffectControlLinearizer::BuildCheckedFloat64ToInt32(
2127  CheckForMinusZeroMode mode, const VectorSlotPair& feedback, Node* value,
2128  Node* frame_state) {
2129  Node* value32 = __ RoundFloat64ToInt32(value);
2130  Node* check_same = __ Float64Equal(value, __ ChangeInt32ToFloat64(value32));
2131  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecisionOrNaN, feedback,
2132  check_same, frame_state);
2133 
2134  if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
2135  // Check if {value} is -0.
2136  auto if_zero = __ MakeDeferredLabel();
2137  auto check_done = __ MakeLabel();
2138 
2139  Node* check_zero = __ Word32Equal(value32, __ Int32Constant(0));
2140  __ GotoIf(check_zero, &if_zero);
2141  __ Goto(&check_done);
2142 
2143  __ Bind(&if_zero);
2144  // In case of 0, we need to check the high bits for the IEEE -0 pattern.
2145  Node* check_negative = __ Int32LessThan(__ Float64ExtractHighWord32(value),
2146  __ Int32Constant(0));
2147  __ DeoptimizeIf(DeoptimizeReason::kMinusZero, feedback, check_negative,
2148  frame_state);
2149  __ Goto(&check_done);
2150 
2151  __ Bind(&check_done);
2152  }
2153  return value32;
2154 }
2155 
2156 Node* EffectControlLinearizer::LowerCheckedFloat64ToInt32(Node* node,
2157  Node* frame_state) {
2158  const CheckMinusZeroParameters& params =
2159  CheckMinusZeroParametersOf(node->op());
2160  Node* value = node->InputAt(0);
2161  return BuildCheckedFloat64ToInt32(params.mode(), params.feedback(), value,
2162  frame_state);
2163 }
2164 
2165 Node* EffectControlLinearizer::BuildCheckedFloat64ToInt64(
2166  CheckForMinusZeroMode mode, const VectorSlotPair& feedback, Node* value,
2167  Node* frame_state) {
2168  Node* value64 = __ TruncateFloat64ToInt64(value);
2169  Node* check_same = __ Float64Equal(value, __ ChangeInt64ToFloat64(value64));
2170  __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecisionOrNaN, feedback,
2171  check_same, frame_state);
2172 
2173  if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
2174  // Check if {value} is -0.
2175  auto if_zero = __ MakeDeferredLabel();
2176  auto check_done = __ MakeLabel();
2177 
2178  Node* check_zero = __ Word64Equal(value64, __ Int64Constant(0));
2179  __ GotoIf(check_zero, &if_zero);
2180  __ Goto(&check_done);
2181 
2182  __ Bind(&if_zero);
2183  // In case of 0, we need to check the high bits for the IEEE -0 pattern.
2184  Node* check_negative = __ Int32LessThan(__ Float64ExtractHighWord32(value),
2185  __ Int32Constant(0));
2186  __ DeoptimizeIf(DeoptimizeReason::kMinusZero, feedback, check_negative,
2187  frame_state);
2188  __ Goto(&check_done);
2189 
2190  __ Bind(&check_done);
2191  }
2192  return value64;
2193 }
2194 
2195 Node* EffectControlLinearizer::LowerCheckedFloat64ToInt64(Node* node,
2196  Node* frame_state) {
2197  const CheckMinusZeroParameters& params =
2198  CheckMinusZeroParametersOf(node->op());
2199  Node* value = node->InputAt(0);
2200  return BuildCheckedFloat64ToInt64(params.mode(), params.feedback(), value,
2201  frame_state);
2202 }
2203 
2204 Node* EffectControlLinearizer::LowerCheckedTaggedSignedToInt32(
2205  Node* node, Node* frame_state) {
2206  Node* value = node->InputAt(0);
2207  const CheckParameters& params = CheckParametersOf(node->op());
2208  Node* check = ObjectIsSmi(value);
2209  __ DeoptimizeIfNot(DeoptimizeReason::kNotASmi, params.feedback(), check,
2210  frame_state);
2211  return ChangeSmiToInt32(value);
2212 }
2213 
2214 Node* EffectControlLinearizer::LowerCheckedTaggedToInt32(Node* node,
2215  Node* frame_state) {
2216  const CheckMinusZeroParameters& params =
2217  CheckMinusZeroParametersOf(node->op());
2218  Node* value = node->InputAt(0);
2219 
2220  auto if_not_smi = __ MakeDeferredLabel();
2221  auto done = __ MakeLabel(MachineRepresentation::kWord32);
2222 
2223  Node* check = ObjectIsSmi(value);
2224  __ GotoIfNot(check, &if_not_smi);
2225  // In the Smi case, just convert to int32.
2226  __ Goto(&done, ChangeSmiToInt32(value));
2227 
2228  // In the non-Smi case, check the heap numberness, load the number and convert
2229  // to int32.
2230  __ Bind(&if_not_smi);
2231  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2232  Node* check_map = __ WordEqual(value_map, __ HeapNumberMapConstant());
2233  __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, params.feedback(),
2234  check_map, frame_state);
2235  Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2236  vfalse = BuildCheckedFloat64ToInt32(params.mode(), params.feedback(), vfalse,
2237  frame_state);
2238  __ Goto(&done, vfalse);
2239 
2240  __ Bind(&done);
2241  return done.PhiAt(0);
2242 }
2243 
2244 Node* EffectControlLinearizer::LowerCheckedTaggedToInt64(Node* node,
2245  Node* frame_state) {
2246  const CheckMinusZeroParameters& params =
2247  CheckMinusZeroParametersOf(node->op());
2248  Node* value = node->InputAt(0);
2249 
2250  auto if_not_smi = __ MakeDeferredLabel();
2251  auto done = __ MakeLabel(MachineRepresentation::kWord64);
2252 
2253  Node* check = ObjectIsSmi(value);
2254  __ GotoIfNot(check, &if_not_smi);
2255  // In the Smi case, just convert to int64.
2256  __ Goto(&done, ChangeSmiToInt64(value));
2257 
2258  // In the non-Smi case, check the heap numberness, load the number and convert
2259  // to int64.
2260  __ Bind(&if_not_smi);
2261  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2262  Node* check_map = __ WordEqual(value_map, __ HeapNumberMapConstant());
2263  __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, params.feedback(),
2264  check_map, frame_state);
2265  Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2266  vfalse = BuildCheckedFloat64ToInt64(params.mode(), params.feedback(), vfalse,
2267  frame_state);
2268  __ Goto(&done, vfalse);
2269 
2270  __ Bind(&done);
2271  return done.PhiAt(0);
2272 }
2273 
2274 Node* EffectControlLinearizer::BuildCheckedHeapNumberOrOddballToFloat64(
2275  CheckTaggedInputMode mode, const VectorSlotPair& feedback, Node* value,
2276  Node* frame_state) {
2277  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2278  Node* check_number = __ WordEqual(value_map, __ HeapNumberMapConstant());
2279  switch (mode) {
2280  case CheckTaggedInputMode::kNumber: {
2281  __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, feedback,
2282  check_number, frame_state);
2283  break;
2284  }
2285  case CheckTaggedInputMode::kNumberOrOddball: {
2286  auto check_done = __ MakeLabel();
2287 
2288  __ GotoIf(check_number, &check_done);
2289  // For oddballs also contain the numeric value, let us just check that
2290  // we have an oddball here.
2291  Node* instance_type =
2292  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2293  Node* check_oddball =
2294  __ Word32Equal(instance_type, __ Int32Constant(ODDBALL_TYPE));
2295  __ DeoptimizeIfNot(DeoptimizeReason::kNotANumberOrOddball, feedback,
2296  check_oddball, frame_state);
2297  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
2298  __ Goto(&check_done);
2299 
2300  __ Bind(&check_done);
2301  break;
2302  }
2303  }
2304  return __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2305 }
2306 
2307 Node* EffectControlLinearizer::LowerCheckedTaggedToFloat64(Node* node,
2308  Node* frame_state) {
2309  CheckTaggedInputParameters const& p =
2310  CheckTaggedInputParametersOf(node->op());
2311  Node* value = node->InputAt(0);
2312 
2313  auto if_smi = __ MakeLabel();
2314  auto done = __ MakeLabel(MachineRepresentation::kFloat64);
2315 
2316  Node* check = ObjectIsSmi(value);
2317  __ GotoIf(check, &if_smi);
2318 
2319  // In the Smi case, just convert to int32 and then float64.
2320  // Otherwise, check heap numberness and load the number.
2321  Node* number = BuildCheckedHeapNumberOrOddballToFloat64(
2322  p.mode(), p.feedback(), value, frame_state);
2323  __ Goto(&done, number);
2324 
2325  __ Bind(&if_smi);
2326  Node* from_smi = ChangeSmiToInt32(value);
2327  from_smi = __ ChangeInt32ToFloat64(from_smi);
2328  __ Goto(&done, from_smi);
2329 
2330  __ Bind(&done);
2331  return done.PhiAt(0);
2332 }
2333 
2334 Node* EffectControlLinearizer::LowerCheckedTaggedToTaggedSigned(
2335  Node* node, Node* frame_state) {
2336  Node* value = node->InputAt(0);
2337  const CheckParameters& params = CheckParametersOf(node->op());
2338 
2339  Node* check = ObjectIsSmi(value);
2340  __ DeoptimizeIfNot(DeoptimizeReason::kNotASmi, params.feedback(), check,
2341  frame_state);
2342 
2343  return value;
2344 }
2345 
2346 Node* EffectControlLinearizer::LowerCheckedTaggedToTaggedPointer(
2347  Node* node, Node* frame_state) {
2348  Node* value = node->InputAt(0);
2349  const CheckParameters& params = CheckParametersOf(node->op());
2350 
2351  Node* check = ObjectIsSmi(value);
2352  __ DeoptimizeIf(DeoptimizeReason::kSmi, params.feedback(), check,
2353  frame_state);
2354  return value;
2355 }
2356 
2357 Node* EffectControlLinearizer::LowerTruncateTaggedToWord32(Node* node) {
2358  Node* value = node->InputAt(0);
2359 
2360  auto if_not_smi = __ MakeDeferredLabel();
2361  auto done = __ MakeLabel(MachineRepresentation::kWord32);
2362 
2363  Node* check = ObjectIsSmi(value);
2364  __ GotoIfNot(check, &if_not_smi);
2365  __ Goto(&done, ChangeSmiToInt32(value));
2366 
2367  __ Bind(&if_not_smi);
2368  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
2369  Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2370  vfalse = __ TruncateFloat64ToWord32(vfalse);
2371  __ Goto(&done, vfalse);
2372 
2373  __ Bind(&done);
2374  return done.PhiAt(0);
2375 }
2376 
2377 Node* EffectControlLinearizer::LowerCheckedTruncateTaggedToWord32(
2378  Node* node, Node* frame_state) {
2379  const CheckTaggedInputParameters& params =
2380  CheckTaggedInputParametersOf(node->op());
2381  Node* value = node->InputAt(0);
2382 
2383  auto if_not_smi = __ MakeLabel();
2384  auto done = __ MakeLabel(MachineRepresentation::kWord32);
2385 
2386  Node* check = ObjectIsSmi(value);
2387  __ GotoIfNot(check, &if_not_smi);
2388  // In the Smi case, just convert to int32.
2389  __ Goto(&done, ChangeSmiToInt32(value));
2390 
2391  // Otherwise, check that it's a heap number or oddball and truncate the value
2392  // to int32.
2393  __ Bind(&if_not_smi);
2394  Node* number = BuildCheckedHeapNumberOrOddballToFloat64(
2395  params.mode(), params.feedback(), value, frame_state);
2396  number = __ TruncateFloat64ToWord32(number);
2397  __ Goto(&done, number);
2398 
2399  __ Bind(&done);
2400  return done.PhiAt(0);
2401 }
2402 
2403 Node* EffectControlLinearizer::LowerAllocate(Node* node) {
2404  Node* size = node->InputAt(0);
2405  PretenureFlag pretenure = PretenureFlagOf(node->op());
2406  Node* new_node = __ Allocate(pretenure, size);
2407  return new_node;
2408 }
2409 
2410 Node* EffectControlLinearizer::LowerNumberToString(Node* node) {
2411  Node* argument = node->InputAt(0);
2412 
2413  Callable const callable =
2414  Builtins::CallableFor(isolate(), Builtins::kNumberToString);
2415  Operator::Properties properties = Operator::kEliminatable;
2416  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
2417  auto call_descriptor = Linkage::GetStubCallDescriptor(
2418  graph()->zone(), callable.descriptor(),
2419  callable.descriptor().GetStackParameterCount(), flags, properties);
2420  return __ Call(call_descriptor, __ HeapConstant(callable.code()), argument,
2421  __ NoContextConstant());
2422 }
2423 
2424 Node* EffectControlLinearizer::LowerObjectIsArrayBufferView(Node* node) {
2425  Node* value = node->InputAt(0);
2426 
2427  auto if_smi = __ MakeDeferredLabel();
2428  auto done = __ MakeLabel(MachineRepresentation::kBit);
2429 
2430  Node* check = ObjectIsSmi(value);
2431  __ GotoIf(check, &if_smi);
2432 
2433  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2434  Node* value_instance_type =
2435  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2436  STATIC_ASSERT(JS_TYPED_ARRAY_TYPE + 1 == JS_DATA_VIEW_TYPE);
2437  Node* vfalse = __ Uint32LessThan(
2438  __ Int32Sub(value_instance_type, __ Int32Constant(JS_TYPED_ARRAY_TYPE)),
2439  __ Int32Constant(2));
2440  __ Goto(&done, vfalse);
2441 
2442  __ Bind(&if_smi);
2443  __ Goto(&done, __ Int32Constant(0));
2444 
2445  __ Bind(&done);
2446  return done.PhiAt(0);
2447 }
2448 
2449 Node* EffectControlLinearizer::LowerObjectIsBigInt(Node* node) {
2450  Node* value = node->InputAt(0);
2451 
2452  auto if_smi = __ MakeDeferredLabel();
2453  auto done = __ MakeLabel(MachineRepresentation::kBit);
2454 
2455  Node* check = ObjectIsSmi(value);
2456  __ GotoIf(check, &if_smi);
2457  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2458  Node* value_instance_type =
2459  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2460  Node* vfalse =
2461  __ Word32Equal(value_instance_type, __ Uint32Constant(BIGINT_TYPE));
2462  __ Goto(&done, vfalse);
2463 
2464  __ Bind(&if_smi);
2465  __ Goto(&done, __ Int32Constant(0));
2466 
2467  __ Bind(&done);
2468  return done.PhiAt(0);
2469 }
2470 
2471 Node* EffectControlLinearizer::LowerObjectIsCallable(Node* node) {
2472  Node* value = node->InputAt(0);
2473 
2474  auto if_smi = __ MakeDeferredLabel();
2475  auto done = __ MakeLabel(MachineRepresentation::kBit);
2476 
2477  Node* check = ObjectIsSmi(value);
2478  __ GotoIf(check, &if_smi);
2479 
2480  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2481  Node* value_bit_field =
2482  __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2483  Node* vfalse =
2484  __ Word32Equal(__ Int32Constant(Map::IsCallableBit::kMask),
2485  __ Word32And(value_bit_field,
2486  __ Int32Constant(Map::IsCallableBit::kMask)));
2487  __ Goto(&done, vfalse);
2488 
2489  __ Bind(&if_smi);
2490  __ Goto(&done, __ Int32Constant(0));
2491 
2492  __ Bind(&done);
2493  return done.PhiAt(0);
2494 }
2495 
2496 Node* EffectControlLinearizer::LowerObjectIsConstructor(Node* node) {
2497  Node* value = node->InputAt(0);
2498 
2499  auto if_smi = __ MakeDeferredLabel();
2500  auto done = __ MakeLabel(MachineRepresentation::kBit);
2501 
2502  Node* check = ObjectIsSmi(value);
2503  __ GotoIf(check, &if_smi);
2504 
2505  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2506  Node* value_bit_field =
2507  __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2508  Node* vfalse = __ Word32Equal(
2509  __ Int32Constant(Map::IsConstructorBit::kMask),
2510  __ Word32And(value_bit_field,
2511  __ Int32Constant(Map::IsConstructorBit::kMask)));
2512  __ Goto(&done, vfalse);
2513 
2514  __ Bind(&if_smi);
2515  __ Goto(&done, __ Int32Constant(0));
2516 
2517  __ Bind(&done);
2518  return done.PhiAt(0);
2519 }
2520 
2521 Node* EffectControlLinearizer::LowerObjectIsDetectableCallable(Node* node) {
2522  Node* value = node->InputAt(0);
2523 
2524  auto if_smi = __ MakeDeferredLabel();
2525  auto done = __ MakeLabel(MachineRepresentation::kBit);
2526 
2527  Node* check = ObjectIsSmi(value);
2528  __ GotoIf(check, &if_smi);
2529 
2530  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2531  Node* value_bit_field =
2532  __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2533  Node* vfalse = __ Word32Equal(
2534  __ Int32Constant(Map::IsCallableBit::kMask),
2535  __ Word32And(value_bit_field,
2536  __ Int32Constant((Map::IsCallableBit::kMask) |
2537  (Map::IsUndetectableBit::kMask))));
2538  __ Goto(&done, vfalse);
2539 
2540  __ Bind(&if_smi);
2541  __ Goto(&done, __ Int32Constant(0));
2542 
2543  __ Bind(&done);
2544  return done.PhiAt(0);
2545 }
2546 
2547 Node* EffectControlLinearizer::LowerNumberIsFloat64Hole(Node* node) {
2548  Node* value = node->InputAt(0);
2549  Node* check = __ Word32Equal(__ Float64ExtractHighWord32(value),
2550  __ Int32Constant(kHoleNanUpper32));
2551  return check;
2552 }
2553 
2554 Node* EffectControlLinearizer::LowerNumberIsFinite(Node* node) {
2555  Node* number = node->InputAt(0);
2556  Node* diff = __ Float64Sub(number, number);
2557  Node* check = __ Float64Equal(diff, diff);
2558  return check;
2559 }
2560 
2561 Node* EffectControlLinearizer::LowerObjectIsFiniteNumber(Node* node) {
2562  Node* object = node->InputAt(0);
2563  Node* zero = __ Int32Constant(0);
2564  Node* one = __ Int32Constant(1);
2565 
2566  auto done = __ MakeLabel(MachineRepresentation::kBit);
2567 
2568  // Check if {object} is a Smi.
2569  __ GotoIf(ObjectIsSmi(object), &done, one);
2570 
2571  // Check if {object} is a HeapNumber.
2572  Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
2573  __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2574  zero);
2575 
2576  // {object} is a HeapNumber.
2577  Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
2578  Node* diff = __ Float64Sub(value, value);
2579  Node* check = __ Float64Equal(diff, diff);
2580  __ Goto(&done, check);
2581 
2582  __ Bind(&done);
2583  return done.PhiAt(0);
2584 }
2585 
2586 Node* EffectControlLinearizer::LowerNumberIsInteger(Node* node) {
2587  Node* number = node->InputAt(0);
2588  Node* trunc = BuildFloat64RoundTruncate(number);
2589  Node* diff = __ Float64Sub(number, trunc);
2590  Node* check = __ Float64Equal(diff, __ Float64Constant(0));
2591  return check;
2592 }
2593 
2594 Node* EffectControlLinearizer::LowerObjectIsInteger(Node* node) {
2595  Node* object = node->InputAt(0);
2596  Node* zero = __ Int32Constant(0);
2597  Node* one = __ Int32Constant(1);
2598 
2599  auto done = __ MakeLabel(MachineRepresentation::kBit);
2600 
2601  // Check if {object} is a Smi.
2602  __ GotoIf(ObjectIsSmi(object), &done, one);
2603 
2604  // Check if {object} is a HeapNumber.
2605  Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
2606  __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2607  zero);
2608 
2609  // {object} is a HeapNumber.
2610  Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
2611  Node* trunc = BuildFloat64RoundTruncate(value);
2612  Node* diff = __ Float64Sub(value, trunc);
2613  Node* check = __ Float64Equal(diff, __ Float64Constant(0));
2614  __ Goto(&done, check);
2615 
2616  __ Bind(&done);
2617  return done.PhiAt(0);
2618 }
2619 
2620 Node* EffectControlLinearizer::LowerNumberIsSafeInteger(Node* node) {
2621  Node* number = node->InputAt(0);
2622  Node* zero = __ Int32Constant(0);
2623  auto done = __ MakeLabel(MachineRepresentation::kBit);
2624 
2625  Node* trunc = BuildFloat64RoundTruncate(number);
2626  Node* diff = __ Float64Sub(number, trunc);
2627  Node* check = __ Float64Equal(diff, __ Float64Constant(0));
2628  __ GotoIfNot(check, &done, zero);
2629  Node* in_range = __ Float64LessThanOrEqual(
2630  __ Float64Abs(trunc), __ Float64Constant(kMaxSafeInteger));
2631  __ Goto(&done, in_range);
2632 
2633  __ Bind(&done);
2634  return done.PhiAt(0);
2635 }
2636 
2637 Node* EffectControlLinearizer::LowerObjectIsSafeInteger(Node* node) {
2638  Node* object = node->InputAt(0);
2639  Node* zero = __ Int32Constant(0);
2640  Node* one = __ Int32Constant(1);
2641 
2642  auto done = __ MakeLabel(MachineRepresentation::kBit);
2643 
2644  // Check if {object} is a Smi.
2645  __ GotoIf(ObjectIsSmi(object), &done, one);
2646 
2647  // Check if {object} is a HeapNumber.
2648  Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
2649  __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2650  zero);
2651 
2652  // {object} is a HeapNumber.
2653  Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
2654  Node* trunc = BuildFloat64RoundTruncate(value);
2655  Node* diff = __ Float64Sub(value, trunc);
2656  Node* check = __ Float64Equal(diff, __ Float64Constant(0));
2657  __ GotoIfNot(check, &done, zero);
2658  Node* in_range = __ Float64LessThanOrEqual(
2659  __ Float64Abs(trunc), __ Float64Constant(kMaxSafeInteger));
2660  __ Goto(&done, in_range);
2661 
2662  __ Bind(&done);
2663  return done.PhiAt(0);
2664 }
2665 
2666 namespace {
2667 
2668 const int64_t kMinusZeroBits = bit_cast<int64_t>(-0.0);
2669 const int32_t kMinusZeroLoBits = static_cast<int32_t>(kMinusZeroBits);
2670 const int32_t kMinusZeroHiBits = static_cast<int32_t>(kMinusZeroBits >> 32);
2671 
2672 } // namespace
2673 
2674 Node* EffectControlLinearizer::LowerObjectIsMinusZero(Node* node) {
2675  Node* value = node->InputAt(0);
2676  Node* zero = __ Int32Constant(0);
2677 
2678  auto done = __ MakeLabel(MachineRepresentation::kBit);
2679 
2680  // Check if {value} is a Smi.
2681  __ GotoIf(ObjectIsSmi(value), &done, zero);
2682 
2683  // Check if {value} is a HeapNumber.
2684  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2685  __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2686  zero);
2687 
2688  // Check if {value} contains -0.
2689  Node* value_value = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2690  if (machine()->Is64()) {
2691  Node* value64 = __ BitcastFloat64ToInt64(value_value);
2692  __ Goto(&done, __ Word64Equal(value64, __ Int64Constant(kMinusZeroBits)));
2693  } else {
2694  Node* value_lo = __ Float64ExtractLowWord32(value_value);
2695  __ GotoIfNot(__ Word32Equal(value_lo, __ Int32Constant(kMinusZeroLoBits)),
2696  &done, zero);
2697  Node* value_hi = __ Float64ExtractHighWord32(value_value);
2698  __ Goto(&done,
2699  __ Word32Equal(value_hi, __ Int32Constant(kMinusZeroHiBits)));
2700  }
2701 
2702  __ Bind(&done);
2703  return done.PhiAt(0);
2704 }
2705 
2706 Node* EffectControlLinearizer::LowerNumberIsMinusZero(Node* node) {
2707  Node* value = node->InputAt(0);
2708 
2709  if (machine()->Is64()) {
2710  Node* value64 = __ BitcastFloat64ToInt64(value);
2711  return __ Word64Equal(value64, __ Int64Constant(kMinusZeroBits));
2712  } else {
2713  auto done = __ MakeLabel(MachineRepresentation::kBit);
2714 
2715  Node* value_lo = __ Float64ExtractLowWord32(value);
2716  __ GotoIfNot(__ Word32Equal(value_lo, __ Int32Constant(kMinusZeroLoBits)),
2717  &done, __ Int32Constant(0));
2718  Node* value_hi = __ Float64ExtractHighWord32(value);
2719  __ Goto(&done,
2720  __ Word32Equal(value_hi, __ Int32Constant(kMinusZeroHiBits)));
2721 
2722  __ Bind(&done);
2723  return done.PhiAt(0);
2724  }
2725 }
2726 
2727 Node* EffectControlLinearizer::LowerObjectIsNaN(Node* node) {
2728  Node* value = node->InputAt(0);
2729  Node* zero = __ Int32Constant(0);
2730 
2731  auto done = __ MakeLabel(MachineRepresentation::kBit);
2732 
2733  // Check if {value} is a Smi.
2734  __ GotoIf(ObjectIsSmi(value), &done, zero);
2735 
2736  // Check if {value} is a HeapNumber.
2737  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2738  __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2739  zero);
2740 
2741  // Check if {value} contains a NaN.
2742  Node* value_value = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2743  __ Goto(&done,
2744  __ Word32Equal(__ Float64Equal(value_value, value_value), zero));
2745 
2746  __ Bind(&done);
2747  return done.PhiAt(0);
2748 }
2749 
2750 Node* EffectControlLinearizer::LowerNumberIsNaN(Node* node) {
2751  Node* number = node->InputAt(0);
2752  Node* diff = __ Float64Equal(number, number);
2753  Node* check = __ Word32Equal(diff, __ Int32Constant(0));
2754  return check;
2755 }
2756 
2757 Node* EffectControlLinearizer::LowerObjectIsNonCallable(Node* node) {
2758  Node* value = node->InputAt(0);
2759 
2760  auto if_primitive = __ MakeDeferredLabel();
2761  auto done = __ MakeLabel(MachineRepresentation::kBit);
2762 
2763  Node* check0 = ObjectIsSmi(value);
2764  __ GotoIf(check0, &if_primitive);
2765 
2766  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2767  Node* value_instance_type =
2768  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2769  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2770  Node* check1 = __ Uint32LessThanOrEqual(
2771  __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
2772  __ GotoIfNot(check1, &if_primitive);
2773 
2774  Node* value_bit_field =
2775  __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2776  Node* check2 =
2777  __ Word32Equal(__ Int32Constant(0),
2778  __ Word32And(value_bit_field,
2779  __ Int32Constant(Map::IsCallableBit::kMask)));
2780  __ Goto(&done, check2);
2781 
2782  __ Bind(&if_primitive);
2783  __ Goto(&done, __ Int32Constant(0));
2784 
2785  __ Bind(&done);
2786  return done.PhiAt(0);
2787 }
2788 
2789 Node* EffectControlLinearizer::LowerObjectIsNumber(Node* node) {
2790  Node* value = node->InputAt(0);
2791 
2792  auto if_smi = __ MakeLabel();
2793  auto done = __ MakeLabel(MachineRepresentation::kBit);
2794 
2795  __ GotoIf(ObjectIsSmi(value), &if_smi);
2796  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2797  __ Goto(&done, __ WordEqual(value_map, __ HeapNumberMapConstant()));
2798 
2799  __ Bind(&if_smi);
2800  __ Goto(&done, __ Int32Constant(1));
2801 
2802  __ Bind(&done);
2803  return done.PhiAt(0);
2804 }
2805 
2806 Node* EffectControlLinearizer::LowerObjectIsReceiver(Node* node) {
2807  Node* value = node->InputAt(0);
2808 
2809  auto if_smi = __ MakeDeferredLabel();
2810  auto done = __ MakeLabel(MachineRepresentation::kBit);
2811 
2812  __ GotoIf(ObjectIsSmi(value), &if_smi);
2813 
2814  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2815  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2816  Node* value_instance_type =
2817  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2818  Node* result = __ Uint32LessThanOrEqual(
2819  __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
2820  __ Goto(&done, result);
2821 
2822  __ Bind(&if_smi);
2823  __ Goto(&done, __ Int32Constant(0));
2824 
2825  __ Bind(&done);
2826  return done.PhiAt(0);
2827 }
2828 
2829 Node* EffectControlLinearizer::LowerObjectIsSmi(Node* node) {
2830  Node* value = node->InputAt(0);
2831  return ObjectIsSmi(value);
2832 }
2833 
2834 Node* EffectControlLinearizer::LowerObjectIsString(Node* node) {
2835  Node* value = node->InputAt(0);
2836 
2837  auto if_smi = __ MakeDeferredLabel();
2838  auto done = __ MakeLabel(MachineRepresentation::kBit);
2839 
2840  Node* check = ObjectIsSmi(value);
2841  __ GotoIf(check, &if_smi);
2842  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2843  Node* value_instance_type =
2844  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2845  Node* vfalse = __ Uint32LessThan(value_instance_type,
2846  __ Uint32Constant(FIRST_NONSTRING_TYPE));
2847  __ Goto(&done, vfalse);
2848 
2849  __ Bind(&if_smi);
2850  __ Goto(&done, __ Int32Constant(0));
2851 
2852  __ Bind(&done);
2853  return done.PhiAt(0);
2854 }
2855 
2856 Node* EffectControlLinearizer::LowerObjectIsSymbol(Node* node) {
2857  Node* value = node->InputAt(0);
2858 
2859  auto if_smi = __ MakeDeferredLabel();
2860  auto done = __ MakeLabel(MachineRepresentation::kBit);
2861 
2862  Node* check = ObjectIsSmi(value);
2863  __ GotoIf(check, &if_smi);
2864  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2865  Node* value_instance_type =
2866  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2867  Node* vfalse =
2868  __ Word32Equal(value_instance_type, __ Uint32Constant(SYMBOL_TYPE));
2869  __ Goto(&done, vfalse);
2870 
2871  __ Bind(&if_smi);
2872  __ Goto(&done, __ Int32Constant(0));
2873 
2874  __ Bind(&done);
2875  return done.PhiAt(0);
2876 }
2877 
2878 Node* EffectControlLinearizer::LowerObjectIsUndetectable(Node* node) {
2879  Node* value = node->InputAt(0);
2880 
2881  auto if_smi = __ MakeDeferredLabel();
2882  auto done = __ MakeLabel(MachineRepresentation::kBit);
2883 
2884  Node* check = ObjectIsSmi(value);
2885  __ GotoIf(check, &if_smi);
2886 
2887  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2888  Node* value_bit_field =
2889  __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2890  Node* vfalse = __ Word32Equal(
2891  __ Word32Equal(
2892  __ Int32Constant(0),
2893  __ Word32And(value_bit_field,
2894  __ Int32Constant(Map::IsUndetectableBit::kMask))),
2895  __ Int32Constant(0));
2896  __ Goto(&done, vfalse);
2897 
2898  __ Bind(&if_smi);
2899  __ Goto(&done, __ Int32Constant(0));
2900 
2901  __ Bind(&done);
2902  return done.PhiAt(0);
2903 }
2904 
2905 Node* EffectControlLinearizer::LowerTypeOf(Node* node) {
2906  Node* obj = node->InputAt(0);
2907  Callable const callable = Builtins::CallableFor(isolate(), Builtins::kTypeof);
2908  Operator::Properties const properties = Operator::kEliminatable;
2909  CallDescriptor::Flags const flags = CallDescriptor::kNoAllocate;
2910  auto call_descriptor = Linkage::GetStubCallDescriptor(
2911  graph()->zone(), callable.descriptor(),
2912  callable.descriptor().GetStackParameterCount(), flags, properties);
2913  return __ Call(call_descriptor, __ HeapConstant(callable.code()), obj,
2914  __ NoContextConstant());
2915 }
2916 
2917 Node* EffectControlLinearizer::LowerToBoolean(Node* node) {
2918  Node* obj = node->InputAt(0);
2919  Callable const callable =
2920  Builtins::CallableFor(isolate(), Builtins::kToBoolean);
2921  Operator::Properties const properties = Operator::kEliminatable;
2922  CallDescriptor::Flags const flags = CallDescriptor::kNoAllocate;
2923  auto call_descriptor = Linkage::GetStubCallDescriptor(
2924  graph()->zone(), callable.descriptor(),
2925  callable.descriptor().GetStackParameterCount(), flags, properties);
2926  return __ Call(call_descriptor, __ HeapConstant(callable.code()), obj,
2927  __ NoContextConstant());
2928 }
2929 
2930 Node* EffectControlLinearizer::LowerArgumentsLength(Node* node) {
2931  Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
2932  int formal_parameter_count = FormalParameterCountOf(node->op());
2933  bool is_rest_length = IsRestLengthOf(node->op());
2934  DCHECK_LE(0, formal_parameter_count);
2935 
2936  if (is_rest_length) {
2937  // The ArgumentsLength node is computing the number of rest parameters,
2938  // which is max(0, actual_parameter_count - formal_parameter_count).
2939  // We have to distinguish the case, when there is an arguments adaptor frame
2940  // (i.e., arguments_frame != LoadFramePointer()).
2941  auto if_adaptor_frame = __ MakeLabel();
2942  auto done = __ MakeLabel(MachineRepresentation::kTaggedSigned);
2943 
2944  Node* frame = __ LoadFramePointer();
2945  __ GotoIf(__ WordEqual(arguments_frame, frame), &done, __ SmiConstant(0));
2946  __ Goto(&if_adaptor_frame);
2947 
2948  __ Bind(&if_adaptor_frame);
2949  Node* arguments_length = __ Load(
2950  MachineType::TaggedSigned(), arguments_frame,
2951  __ IntPtrConstant(ArgumentsAdaptorFrameConstants::kLengthOffset));
2952 
2953  Node* rest_length =
2954  __ IntSub(arguments_length, __ SmiConstant(formal_parameter_count));
2955  __ GotoIf(__ IntLessThan(rest_length, __ SmiConstant(0)), &done,
2956  __ SmiConstant(0));
2957  __ Goto(&done, rest_length);
2958 
2959  __ Bind(&done);
2960  return done.PhiAt(0);
2961  } else {
2962  // The ArgumentsLength node is computing the actual number of arguments.
2963  // We have to distinguish the case when there is an arguments adaptor frame
2964  // (i.e., arguments_frame != LoadFramePointer()).
2965  auto if_adaptor_frame = __ MakeLabel();
2966  auto done = __ MakeLabel(MachineRepresentation::kTaggedSigned);
2967 
2968  Node* frame = __ LoadFramePointer();
2969  __ GotoIf(__ WordEqual(arguments_frame, frame), &done,
2970  __ SmiConstant(formal_parameter_count));
2971  __ Goto(&if_adaptor_frame);
2972 
2973  __ Bind(&if_adaptor_frame);
2974  Node* arguments_length = __ Load(
2975  MachineType::TaggedSigned(), arguments_frame,
2976  __ IntPtrConstant(ArgumentsAdaptorFrameConstants::kLengthOffset));
2977  __ Goto(&done, arguments_length);
2978 
2979  __ Bind(&done);
2980  return done.PhiAt(0);
2981  }
2982 }
2983 
2984 Node* EffectControlLinearizer::LowerArgumentsFrame(Node* node) {
2985  auto done = __ MakeLabel(MachineType::PointerRepresentation());
2986 
2987  Node* frame = __ LoadFramePointer();
2988  Node* parent_frame =
2989  __ Load(MachineType::Pointer(), frame,
2990  __ IntPtrConstant(StandardFrameConstants::kCallerFPOffset));
2991  Node* parent_frame_type = __ Load(
2992  MachineType::AnyTagged(), parent_frame,
2993  __ IntPtrConstant(CommonFrameConstants::kContextOrFrameTypeOffset));
2994  __ GotoIf(__ WordEqual(parent_frame_type,
2995  __ IntPtrConstant(StackFrame::TypeToMarker(
2996  StackFrame::ARGUMENTS_ADAPTOR))),
2997  &done, parent_frame);
2998  __ Goto(&done, frame);
2999 
3000  __ Bind(&done);
3001  return done.PhiAt(0);
3002 }
3003 
3004 Node* EffectControlLinearizer::LowerNewDoubleElements(Node* node) {
3005  PretenureFlag const pretenure = PretenureFlagOf(node->op());
3006  Node* length = node->InputAt(0);
3007 
3008  auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
3009  Node* zero_length = __ Word32Equal(length, __ Int32Constant(0));
3010  __ GotoIf(zero_length, &done,
3011  jsgraph()->HeapConstant(factory()->empty_fixed_array()));
3012 
3013  // Compute the effective size of the backing store.
3014  Node* size =
3015  __ Int32Add(__ Word32Shl(length, __ Int32Constant(kDoubleSizeLog2)),
3016  __ Int32Constant(FixedDoubleArray::kHeaderSize));
3017 
3018  // Allocate the result and initialize the header.
3019  Node* result = __ Allocate(pretenure, size);
3020  __ StoreField(AccessBuilder::ForMap(), result,
3021  __ FixedDoubleArrayMapConstant());
3022  __ StoreField(AccessBuilder::ForFixedArrayLength(), result,
3023  ChangeInt32ToSmi(length));
3024 
3025  // Initialize the backing store with holes.
3026  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
3027  Node* limit = ChangeUint32ToUintPtr(length);
3028  Node* the_hole =
3029  __ LoadField(AccessBuilder::ForHeapNumberValue(), __ TheHoleConstant());
3030  auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
3031  __ Goto(&loop, __ IntPtrConstant(0));
3032  __ Bind(&loop);
3033  {
3034  // Check if we've initialized everything.
3035  Node* index = loop.PhiAt(0);
3036  Node* check = __ UintLessThan(index, limit);
3037  __ GotoIfNot(check, &done, result);
3038 
3039  // Storing "the_hole" doesn't need a write barrier.
3040  StoreRepresentation rep(MachineRepresentation::kFloat64, kNoWriteBarrier);
3041  Node* offset = __ IntAdd(
3042  __ WordShl(index, __ IntPtrConstant(kDoubleSizeLog2)),
3043  __ IntPtrConstant(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3044  __ Store(rep, result, offset, the_hole);
3045 
3046  // Advance the {index}.
3047  index = __ IntAdd(index, __ IntPtrConstant(1));
3048  __ Goto(&loop, index);
3049  }
3050 
3051  __ Bind(&done);
3052  return done.PhiAt(0);
3053 }
3054 
3055 Node* EffectControlLinearizer::LowerNewSmiOrObjectElements(Node* node) {
3056  PretenureFlag const pretenure = PretenureFlagOf(node->op());
3057  Node* length = node->InputAt(0);
3058 
3059  auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
3060  Node* zero_length = __ Word32Equal(length, __ Int32Constant(0));
3061  __ GotoIf(zero_length, &done,
3062  jsgraph()->HeapConstant(factory()->empty_fixed_array()));
3063 
3064  // Compute the effective size of the backing store.
3065  Node* size =
3066  __ Int32Add(__ Word32Shl(length, __ Int32Constant(kPointerSizeLog2)),
3067  __ Int32Constant(FixedArray::kHeaderSize));
3068 
3069  // Allocate the result and initialize the header.
3070  Node* result = __ Allocate(pretenure, size);
3071  __ StoreField(AccessBuilder::ForMap(), result, __ FixedArrayMapConstant());
3072  __ StoreField(AccessBuilder::ForFixedArrayLength(), result,
3073  ChangeInt32ToSmi(length));
3074 
3075  // Initialize the backing store with holes.
3076  Node* limit = ChangeUint32ToUintPtr(length);
3077  Node* the_hole = __ TheHoleConstant();
3078  auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
3079  __ Goto(&loop, __ IntPtrConstant(0));
3080  __ Bind(&loop);
3081  {
3082  // Check if we've initialized everything.
3083  Node* index = loop.PhiAt(0);
3084  Node* check = __ UintLessThan(index, limit);
3085  __ GotoIfNot(check, &done, result);
3086 
3087  // Storing "the_hole" doesn't need a write barrier.
3088  StoreRepresentation rep(MachineRepresentation::kTagged, kNoWriteBarrier);
3089  Node* offset =
3090  __ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2)),
3091  __ IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
3092  __ Store(rep, result, offset, the_hole);
3093 
3094  // Advance the {index}.
3095  index = __ IntAdd(index, __ IntPtrConstant(1));
3096  __ Goto(&loop, index);
3097  }
3098 
3099  __ Bind(&done);
3100  return done.PhiAt(0);
3101 }
3102 
3103 Node* EffectControlLinearizer::LowerNewArgumentsElements(Node* node) {
3104  Node* frame = NodeProperties::GetValueInput(node, 0);
3105  Node* length = NodeProperties::GetValueInput(node, 1);
3106  int mapped_count = NewArgumentsElementsMappedCountOf(node->op());
3107 
3108  Callable const callable =
3109  Builtins::CallableFor(isolate(), Builtins::kNewArgumentsElements);
3110  Operator::Properties const properties = node->op()->properties();
3111  CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
3112  auto call_descriptor = Linkage::GetStubCallDescriptor(
3113  graph()->zone(), callable.descriptor(),
3114  callable.descriptor().GetStackParameterCount(), flags, properties);
3115  return __ Call(call_descriptor, __ HeapConstant(callable.code()), frame,
3116  length, __ SmiConstant(mapped_count), __ NoContextConstant());
3117 }
3118 
3119 Node* EffectControlLinearizer::LowerNewConsString(Node* node) {
3120  Node* length = node->InputAt(0);
3121  Node* first = node->InputAt(1);
3122  Node* second = node->InputAt(2);
3123 
3124  // Determine the instance types of {first} and {second}.
3125  Node* first_map = __ LoadField(AccessBuilder::ForMap(), first);
3126  Node* first_instance_type =
3127  __ LoadField(AccessBuilder::ForMapInstanceType(), first_map);
3128  Node* second_map = __ LoadField(AccessBuilder::ForMap(), second);
3129  Node* second_instance_type =
3130  __ LoadField(AccessBuilder::ForMapInstanceType(), second_map);
3131 
3132  // Determine the proper map for the resulting ConsString.
3133  // If both {first} and {second} are one-byte strings, we
3134  // create a new ConsOneByteString, otherwise we create a
3135  // new ConsString instead.
3136  auto if_onebyte = __ MakeLabel();
3137  auto if_twobyte = __ MakeLabel();
3138  auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
3139  STATIC_ASSERT(kOneByteStringTag != 0);
3140  STATIC_ASSERT(kTwoByteStringTag == 0);
3141  Node* instance_type = __ Word32And(first_instance_type, second_instance_type);
3142  Node* encoding =
3143  __ Word32And(instance_type, __ Int32Constant(kStringEncodingMask));
3144  __ Branch(__ Word32Equal(encoding, __ Int32Constant(kTwoByteStringTag)),
3145  &if_twobyte, &if_onebyte);
3146  __ Bind(&if_onebyte);
3147  __ Goto(&done,
3148  jsgraph()->HeapConstant(factory()->cons_one_byte_string_map()));
3149  __ Bind(&if_twobyte);
3150  __ Goto(&done, jsgraph()->HeapConstant(factory()->cons_string_map()));
3151  __ Bind(&done);
3152  Node* result_map = done.PhiAt(0);
3153 
3154  // Allocate the resulting ConsString.
3155  Node* result = __ Allocate(NOT_TENURED, __ Int32Constant(ConsString::kSize));
3156  __ StoreField(AccessBuilder::ForMap(), result, result_map);
3157  __ StoreField(AccessBuilder::ForNameHashField(), result,
3158  __ Int32Constant(Name::kEmptyHashField));
3159  __ StoreField(AccessBuilder::ForStringLength(), result, length);
3160  __ StoreField(AccessBuilder::ForConsStringFirst(), result, first);
3161  __ StoreField(AccessBuilder::ForConsStringSecond(), result, second);
3162  return result;
3163 }
3164 
3165 Node* EffectControlLinearizer::LowerSameValue(Node* node) {
3166  Node* lhs = node->InputAt(0);
3167  Node* rhs = node->InputAt(1);
3168 
3169  Callable const callable =
3170  Builtins::CallableFor(isolate(), Builtins::kSameValue);
3171  Operator::Properties properties = Operator::kEliminatable;
3172  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3173  auto call_descriptor = Linkage::GetStubCallDescriptor(
3174  graph()->zone(), callable.descriptor(),
3175  callable.descriptor().GetStackParameterCount(), flags, properties);
3176  return __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs, rhs,
3177  __ NoContextConstant());
3178 }
3179 
3180 Node* EffectControlLinearizer::LowerDeadValue(Node* node) {
3181  Node* input = NodeProperties::GetValueInput(node, 0);
3182  if (input->opcode() != IrOpcode::kUnreachable) {
3183  Node* unreachable = __ Unreachable();
3184  NodeProperties::ReplaceValueInput(node, unreachable, 0);
3185  }
3186  return node;
3187 }
3188 
3189 Node* EffectControlLinearizer::LowerStringToNumber(Node* node) {
3190  Node* string = node->InputAt(0);
3191 
3192  Callable const callable =
3193  Builtins::CallableFor(isolate(), Builtins::kStringToNumber);
3194  Operator::Properties properties = Operator::kEliminatable;
3195  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3196  auto call_descriptor = Linkage::GetStubCallDescriptor(
3197  graph()->zone(), callable.descriptor(),
3198  callable.descriptor().GetStackParameterCount(), flags, properties);
3199  return __ Call(call_descriptor, __ HeapConstant(callable.code()), string,
3200  __ NoContextConstant());
3201 }
3202 
3203 Node* EffectControlLinearizer::LowerStringCharCodeAt(Node* node) {
3204  Node* receiver = node->InputAt(0);
3205  Node* position = node->InputAt(1);
3206 
3207  // We need a loop here to properly deal with indirect strings
3208  // (SlicedString, ConsString and ThinString).
3209  auto loop = __ MakeLoopLabel(MachineRepresentation::kTagged,
3210  MachineType::PointerRepresentation());
3211  auto loop_next = __ MakeLabel(MachineRepresentation::kTagged,
3212  MachineType::PointerRepresentation());
3213  auto loop_done = __ MakeLabel(MachineRepresentation::kWord32);
3214  __ Goto(&loop, receiver, position);
3215  __ Bind(&loop);
3216  {
3217  Node* receiver = loop.PhiAt(0);
3218  Node* position = loop.PhiAt(1);
3219  Node* receiver_map = __ LoadField(AccessBuilder::ForMap(), receiver);
3220  Node* receiver_instance_type =
3221  __ LoadField(AccessBuilder::ForMapInstanceType(), receiver_map);
3222  Node* receiver_representation = __ Word32And(
3223  receiver_instance_type, __ Int32Constant(kStringRepresentationMask));
3224 
3225  // Dispatch on the current {receiver}s string representation.
3226  auto if_seqstring = __ MakeLabel();
3227  auto if_consstring = __ MakeLabel();
3228  auto if_thinstring = __ MakeLabel();
3229  auto if_externalstring = __ MakeLabel();
3230  auto if_slicedstring = __ MakeLabel();
3231  auto if_runtime = __ MakeDeferredLabel();
3232  __ GotoIf(__ Word32Equal(receiver_representation,
3233  __ Int32Constant(kSeqStringTag)),
3234  &if_seqstring);
3235  __ GotoIf(__ Word32Equal(receiver_representation,
3236  __ Int32Constant(kConsStringTag)),
3237  &if_consstring);
3238  __ GotoIf(__ Word32Equal(receiver_representation,
3239  __ Int32Constant(kThinStringTag)),
3240  &if_thinstring);
3241  __ GotoIf(__ Word32Equal(receiver_representation,
3242  __ Int32Constant(kExternalStringTag)),
3243  &if_externalstring);
3244  __ Branch(__ Word32Equal(receiver_representation,
3245  __ Int32Constant(kSlicedStringTag)),
3246  &if_slicedstring, &if_runtime);
3247 
3248  __ Bind(&if_seqstring);
3249  {
3250  Node* receiver_is_onebyte = __ Word32Equal(
3251  __ Word32Equal(__ Word32And(receiver_instance_type,
3252  __ Int32Constant(kStringEncodingMask)),
3253  __ Int32Constant(kTwoByteStringTag)),
3254  __ Int32Constant(0));
3255  Node* result = LoadFromSeqString(receiver, position, receiver_is_onebyte);
3256  __ Goto(&loop_done, result);
3257  }
3258 
3259  __ Bind(&if_thinstring);
3260  {
3261  Node* receiver_actual =
3262  __ LoadField(AccessBuilder::ForThinStringActual(), receiver);
3263  __ Goto(&loop_next, receiver_actual, position);
3264  }
3265 
3266  __ Bind(&if_consstring);
3267  {
3268  Node* receiver_second =
3269  __ LoadField(AccessBuilder::ForConsStringSecond(), receiver);
3270  __ GotoIfNot(__ WordEqual(receiver_second, __ EmptyStringConstant()),
3271  &if_runtime);
3272  Node* receiver_first =
3273  __ LoadField(AccessBuilder::ForConsStringFirst(), receiver);
3274  __ Goto(&loop_next, receiver_first, position);
3275  }
3276 
3277  __ Bind(&if_externalstring);
3278  {
3279  // We need to bailout to the runtime for uncached external strings.
3280  __ GotoIf(__ Word32Equal(
3281  __ Word32And(receiver_instance_type,
3282  __ Int32Constant(kUncachedExternalStringMask)),
3283  __ Int32Constant(kUncachedExternalStringTag)),
3284  &if_runtime);
3285 
3286  Node* receiver_data = __ LoadField(
3287  AccessBuilder::ForExternalStringResourceData(), receiver);
3288 
3289  auto if_onebyte = __ MakeLabel();
3290  auto if_twobyte = __ MakeLabel();
3291  __ Branch(
3292  __ Word32Equal(__ Word32And(receiver_instance_type,
3293  __ Int32Constant(kStringEncodingMask)),
3294  __ Int32Constant(kTwoByteStringTag)),
3295  &if_twobyte, &if_onebyte);
3296 
3297  __ Bind(&if_onebyte);
3298  {
3299  Node* result = __ Load(MachineType::Uint8(), receiver_data, position);
3300  __ Goto(&loop_done, result);
3301  }
3302 
3303  __ Bind(&if_twobyte);
3304  {
3305  Node* result = __ Load(MachineType::Uint16(), receiver_data,
3306  __ WordShl(position, __ IntPtrConstant(1)));
3307  __ Goto(&loop_done, result);
3308  }
3309  }
3310 
3311  __ Bind(&if_slicedstring);
3312  {
3313  Node* receiver_offset =
3314  __ LoadField(AccessBuilder::ForSlicedStringOffset(), receiver);
3315  Node* receiver_parent =
3316  __ LoadField(AccessBuilder::ForSlicedStringParent(), receiver);
3317  __ Goto(&loop_next, receiver_parent,
3318  __ IntAdd(position, ChangeSmiToIntPtr(receiver_offset)));
3319  }
3320 
3321  __ Bind(&if_runtime);
3322  {
3323  Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
3324  Runtime::FunctionId id = Runtime::kStringCharCodeAt;
3325  auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
3326  graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
3327  Node* result = __ Call(call_descriptor, __ CEntryStubConstant(1),
3328  receiver, ChangeIntPtrToSmi(position),
3329  __ ExternalConstant(ExternalReference::Create(id)),
3330  __ Int32Constant(2), __ NoContextConstant());
3331  __ Goto(&loop_done, ChangeSmiToInt32(result));
3332  }
3333 
3334  __ Bind(&loop_next);
3335  __ Goto(&loop, loop_next.PhiAt(0), loop_next.PhiAt(1));
3336  }
3337  __ Bind(&loop_done);
3338  return loop_done.PhiAt(0);
3339 }
3340 
3341 Node* EffectControlLinearizer::LowerStringCodePointAt(
3342  Node* node, UnicodeEncoding encoding) {
3343  Node* receiver = node->InputAt(0);
3344  Node* position = node->InputAt(1);
3345 
3346  Builtins::Name builtin = encoding == UnicodeEncoding::UTF16
3347  ? Builtins::kStringCodePointAtUTF16
3348  : Builtins::kStringCodePointAtUTF32;
3349 
3350  Callable const callable = Builtins::CallableFor(isolate(), builtin);
3351  Operator::Properties properties = Operator::kNoThrow | Operator::kNoWrite;
3352  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3353  auto call_descriptor = Linkage::GetStubCallDescriptor(
3354  graph()->zone(), callable.descriptor(),
3355  callable.descriptor().GetStackParameterCount(), flags, properties);
3356  return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
3357  position, __ NoContextConstant());
3358 }
3359 
3360 Node* EffectControlLinearizer::LoadFromSeqString(Node* receiver, Node* position,
3361  Node* is_one_byte) {
3362  auto one_byte_load = __ MakeLabel();
3363  auto done = __ MakeLabel(MachineRepresentation::kWord32);
3364  __ GotoIf(is_one_byte, &one_byte_load);
3365  Node* two_byte_result = __ LoadElement(
3366  AccessBuilder::ForSeqTwoByteStringCharacter(), receiver, position);
3367  __ Goto(&done, two_byte_result);
3368 
3369  __ Bind(&one_byte_load);
3370  Node* one_byte_element = __ LoadElement(
3371  AccessBuilder::ForSeqOneByteStringCharacter(), receiver, position);
3372  __ Goto(&done, one_byte_element);
3373 
3374  __ Bind(&done);
3375  return done.PhiAt(0);
3376 }
3377 
3378 Node* EffectControlLinearizer::LowerStringFromSingleCharCode(Node* node) {
3379  Node* value = node->InputAt(0);
3380  Node* code = __ Word32And(value, __ Uint32Constant(0xFFFF));
3381 
3382  auto if_not_one_byte = __ MakeDeferredLabel();
3383  auto cache_miss = __ MakeDeferredLabel();
3384  auto done = __ MakeLabel(MachineRepresentation::kTagged);
3385 
3386  // Check if the {code} is a one byte character
3387  Node* check1 = __ Uint32LessThanOrEqual(
3388  code, __ Uint32Constant(String::kMaxOneByteCharCode));
3389  __ GotoIfNot(check1, &if_not_one_byte);
3390  {
3391  // Load the isolate wide single character string cache.
3392  Node* cache = __ HeapConstant(factory()->single_character_string_cache());
3393 
3394  // Compute the {cache} index for {code}.
3395  Node* index = machine()->Is32() ? code : __ ChangeUint32ToUint64(code);
3396 
3397  // Check if we have an entry for the {code} in the single character string
3398  // cache already.
3399  Node* entry =
3400  __ LoadElement(AccessBuilder::ForFixedArrayElement(), cache, index);
3401 
3402  Node* check2 = __ WordEqual(entry, __ UndefinedConstant());
3403  __ GotoIf(check2, &cache_miss);
3404 
3405  // Use the {entry} from the {cache}.
3406  __ Goto(&done, entry);
3407 
3408  __ Bind(&cache_miss);
3409  {
3410  // Allocate a new SeqOneByteString for {code}.
3411  Node* vtrue2 = __ Allocate(
3412  NOT_TENURED, __ Int32Constant(SeqOneByteString::SizeFor(1)));
3413  __ StoreField(AccessBuilder::ForMap(), vtrue2,
3414  __ HeapConstant(factory()->one_byte_string_map()));
3415  __ StoreField(AccessBuilder::ForNameHashField(), vtrue2,
3416  __ Int32Constant(Name::kEmptyHashField));
3417  __ StoreField(AccessBuilder::ForStringLength(), vtrue2,
3418  __ Int32Constant(1));
3419  __ Store(
3420  StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier),
3421  vtrue2,
3422  __ IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag),
3423  code);
3424 
3425  // Remember it in the {cache}.
3426  __ StoreElement(AccessBuilder::ForFixedArrayElement(), cache, index,
3427  vtrue2);
3428  __ Goto(&done, vtrue2);
3429  }
3430  }
3431 
3432  __ Bind(&if_not_one_byte);
3433  {
3434  // Allocate a new SeqTwoByteString for {code}.
3435  Node* vfalse1 = __ Allocate(NOT_TENURED,
3436  __ Int32Constant(SeqTwoByteString::SizeFor(1)));
3437  __ StoreField(AccessBuilder::ForMap(), vfalse1,
3438  __ HeapConstant(factory()->string_map()));
3439  __ StoreField(AccessBuilder::ForNameHashField(), vfalse1,
3440  __ Int32Constant(Name::kEmptyHashField));
3441  __ StoreField(AccessBuilder::ForStringLength(), vfalse1,
3442  __ Int32Constant(1));
3443  __ Store(
3444  StoreRepresentation(MachineRepresentation::kWord16, kNoWriteBarrier),
3445  vfalse1,
3446  __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3447  code);
3448  __ Goto(&done, vfalse1);
3449  }
3450 
3451  __ Bind(&done);
3452  return done.PhiAt(0);
3453 }
3454 
3455 #ifdef V8_INTL_SUPPORT
3456 
3457 Node* EffectControlLinearizer::LowerStringToLowerCaseIntl(Node* node) {
3458  Node* receiver = node->InputAt(0);
3459 
3460  Callable callable =
3461  Builtins::CallableFor(isolate(), Builtins::kStringToLowerCaseIntl);
3462  Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
3463  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3464  auto call_descriptor = Linkage::GetStubCallDescriptor(
3465  graph()->zone(), callable.descriptor(),
3466  callable.descriptor().GetStackParameterCount(), flags, properties);
3467  return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
3468  __ NoContextConstant());
3469 }
3470 
3471 Node* EffectControlLinearizer::LowerStringToUpperCaseIntl(Node* node) {
3472  Node* receiver = node->InputAt(0);
3473  Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
3474  Runtime::FunctionId id = Runtime::kStringToUpperCaseIntl;
3475  auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
3476  graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
3477  return __ Call(call_descriptor, __ CEntryStubConstant(1), receiver,
3478  __ ExternalConstant(ExternalReference::Create(id)),
3479  __ Int32Constant(1), __ NoContextConstant());
3480 }
3481 
3482 #else
3483 
3484 Node* EffectControlLinearizer::LowerStringToLowerCaseIntl(Node* node) {
3485  UNREACHABLE();
3486  return nullptr;
3487 }
3488 
3489 Node* EffectControlLinearizer::LowerStringToUpperCaseIntl(Node* node) {
3490  UNREACHABLE();
3491  return nullptr;
3492 }
3493 
3494 #endif // V8_INTL_SUPPORT
3495 
3496 Node* EffectControlLinearizer::LowerStringFromSingleCodePoint(Node* node) {
3497  Node* value = node->InputAt(0);
3498  Node* code = value;
3499 
3500  auto if_not_single_code = __ MakeDeferredLabel();
3501  auto if_not_one_byte = __ MakeDeferredLabel();
3502  auto cache_miss = __ MakeDeferredLabel();
3503  auto done = __ MakeLabel(MachineRepresentation::kTagged);
3504 
3505  // Check if the {code} is a single code unit
3506  Node* check0 = __ Uint32LessThanOrEqual(code, __ Uint32Constant(0xFFFF));
3507  __ GotoIfNot(check0, &if_not_single_code);
3508 
3509  {
3510  // Check if the {code} is a one byte character
3511  Node* check1 = __ Uint32LessThanOrEqual(
3512  code, __ Uint32Constant(String::kMaxOneByteCharCode));
3513  __ GotoIfNot(check1, &if_not_one_byte);
3514  {
3515  // Load the isolate wide single character string cache.
3516  Node* cache = __ HeapConstant(factory()->single_character_string_cache());
3517 
3518  // Compute the {cache} index for {code}.
3519  Node* index = machine()->Is32() ? code : __ ChangeUint32ToUint64(code);
3520 
3521  // Check if we have an entry for the {code} in the single character string
3522  // cache already.
3523  Node* entry =
3524  __ LoadElement(AccessBuilder::ForFixedArrayElement(), cache, index);
3525 
3526  Node* check2 = __ WordEqual(entry, __ UndefinedConstant());
3527  __ GotoIf(check2, &cache_miss);
3528 
3529  // Use the {entry} from the {cache}.
3530  __ Goto(&done, entry);
3531 
3532  __ Bind(&cache_miss);
3533  {
3534  // Allocate a new SeqOneByteString for {code}.
3535  Node* vtrue2 = __ Allocate(
3536  NOT_TENURED, __ Int32Constant(SeqOneByteString::SizeFor(1)));
3537  __ StoreField(AccessBuilder::ForMap(), vtrue2,
3538  __ HeapConstant(factory()->one_byte_string_map()));
3539  __ StoreField(AccessBuilder::ForNameHashField(), vtrue2,
3540  __ Int32Constant(Name::kEmptyHashField));
3541  __ StoreField(AccessBuilder::ForStringLength(), vtrue2,
3542  __ Int32Constant(1));
3543  __ Store(
3544  StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier),
3545  vtrue2,
3546  __ IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag),
3547  code);
3548 
3549  // Remember it in the {cache}.
3550  __ StoreElement(AccessBuilder::ForFixedArrayElement(), cache, index,
3551  vtrue2);
3552  __ Goto(&done, vtrue2);
3553  }
3554  }
3555 
3556  __ Bind(&if_not_one_byte);
3557  {
3558  // Allocate a new SeqTwoByteString for {code}.
3559  Node* vfalse1 = __ Allocate(
3560  NOT_TENURED, __ Int32Constant(SeqTwoByteString::SizeFor(1)));
3561  __ StoreField(AccessBuilder::ForMap(), vfalse1,
3562  __ HeapConstant(factory()->string_map()));
3563  __ StoreField(AccessBuilder::ForNameHashField(), vfalse1,
3564  __ IntPtrConstant(Name::kEmptyHashField));
3565  __ StoreField(AccessBuilder::ForStringLength(), vfalse1,
3566  __ Int32Constant(1));
3567  __ Store(
3568  StoreRepresentation(MachineRepresentation::kWord16, kNoWriteBarrier),
3569  vfalse1,
3570  __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3571  code);
3572  __ Goto(&done, vfalse1);
3573  }
3574  }
3575 
3576  __ Bind(&if_not_single_code);
3577  // Generate surrogate pair string
3578  {
3579  switch (UnicodeEncodingOf(node->op())) {
3580  case UnicodeEncoding::UTF16:
3581  break;
3582 
3583  case UnicodeEncoding::UTF32: {
3584  // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
3585  Node* lead_offset = __ Int32Constant(0xD800 - (0x10000 >> 10));
3586 
3587  // lead = (codepoint >> 10) + LEAD_OFFSET
3588  Node* lead =
3589  __ Int32Add(__ Word32Shr(code, __ Int32Constant(10)), lead_offset);
3590 
3591  // trail = (codepoint & 0x3FF) + 0xDC00;
3592  Node* trail = __ Int32Add(__ Word32And(code, __ Int32Constant(0x3FF)),
3593  __ Int32Constant(0xDC00));
3594 
3595  // codpoint = (trail << 16) | lead;
3596 #if V8_TARGET_BIG_ENDIAN
3597  code = __ Word32Or(__ Word32Shl(lead, __ Int32Constant(16)), trail);
3598 #else
3599  code = __ Word32Or(__ Word32Shl(trail, __ Int32Constant(16)), lead);
3600 #endif
3601  break;
3602  }
3603  }
3604 
3605  // Allocate a new SeqTwoByteString for {code}.
3606  Node* vfalse0 = __ Allocate(NOT_TENURED,
3607  __ Int32Constant(SeqTwoByteString::SizeFor(2)));
3608  __ StoreField(AccessBuilder::ForMap(), vfalse0,
3609  __ HeapConstant(factory()->string_map()));
3610  __ StoreField(AccessBuilder::ForNameHashField(), vfalse0,
3611  __ Int32Constant(Name::kEmptyHashField));
3612  __ StoreField(AccessBuilder::ForStringLength(), vfalse0,
3613  __ Int32Constant(2));
3614  __ Store(
3615  StoreRepresentation(MachineRepresentation::kWord32, kNoWriteBarrier),
3616  vfalse0,
3617  __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3618  code);
3619  __ Goto(&done, vfalse0);
3620  }
3621 
3622  __ Bind(&done);
3623  return done.PhiAt(0);
3624 }
3625 
3626 Node* EffectControlLinearizer::LowerStringIndexOf(Node* node) {
3627  Node* subject = node->InputAt(0);
3628  Node* search_string = node->InputAt(1);
3629  Node* position = node->InputAt(2);
3630 
3631  Callable callable =
3632  Builtins::CallableFor(isolate(), Builtins::kStringIndexOf);
3633  Operator::Properties properties = Operator::kEliminatable;
3634  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3635  auto call_descriptor = Linkage::GetStubCallDescriptor(
3636  graph()->zone(), callable.descriptor(),
3637  callable.descriptor().GetStackParameterCount(), flags, properties);
3638  return __ Call(call_descriptor, __ HeapConstant(callable.code()), subject,
3639  search_string, position, __ NoContextConstant());
3640 }
3641 
3642 Node* EffectControlLinearizer::LowerStringLength(Node* node) {
3643  Node* subject = node->InputAt(0);
3644 
3645  return __ LoadField(AccessBuilder::ForStringLength(), subject);
3646 }
3647 
3648 Node* EffectControlLinearizer::LowerStringComparison(Callable const& callable,
3649  Node* node) {
3650  Node* lhs = node->InputAt(0);
3651  Node* rhs = node->InputAt(1);
3652 
3653  Operator::Properties properties = Operator::kEliminatable;
3654  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3655  auto call_descriptor = Linkage::GetStubCallDescriptor(
3656  graph()->zone(), callable.descriptor(),
3657  callable.descriptor().GetStackParameterCount(), flags, properties);
3658  return __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs, rhs,
3659  __ NoContextConstant());
3660 }
3661 
3662 Node* EffectControlLinearizer::LowerStringSubstring(Node* node) {
3663  Node* receiver = node->InputAt(0);
3664  Node* start = ChangeInt32ToIntPtr(node->InputAt(1));
3665  Node* end = ChangeInt32ToIntPtr(node->InputAt(2));
3666 
3667  Callable callable =
3668  Builtins::CallableFor(isolate(), Builtins::kStringSubstring);
3669  Operator::Properties properties = Operator::kEliminatable;
3670  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3671  auto call_descriptor = Linkage::GetStubCallDescriptor(
3672  graph()->zone(), callable.descriptor(),
3673  callable.descriptor().GetStackParameterCount(), flags, properties);
3674  return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
3675  start, end, __ NoContextConstant());
3676 }
3677 
3678 Node* EffectControlLinearizer::LowerStringEqual(Node* node) {
3679  return LowerStringComparison(
3680  Builtins::CallableFor(isolate(), Builtins::kStringEqual), node);
3681 }
3682 
3683 Node* EffectControlLinearizer::LowerStringLessThan(Node* node) {
3684  return LowerStringComparison(
3685  Builtins::CallableFor(isolate(), Builtins::kStringLessThan), node);
3686 }
3687 
3688 Node* EffectControlLinearizer::LowerStringLessThanOrEqual(Node* node) {
3689  return LowerStringComparison(
3690  Builtins::CallableFor(isolate(), Builtins::kStringLessThanOrEqual), node);
3691 }
3692 
3693 Node* EffectControlLinearizer::LowerCheckFloat64Hole(Node* node,
3694  Node* frame_state) {
3695  // If we reach this point w/o eliminating the {node} that's marked
3696  // with allow-return-hole, we cannot do anything, so just deoptimize
3697  // in case of the hole NaN.
3698  CheckFloat64HoleParameters const& params =
3699  CheckFloat64HoleParametersOf(node->op());
3700  Node* value = node->InputAt(0);
3701 
3702  auto if_nan = __ MakeDeferredLabel();
3703  auto done = __ MakeLabel();
3704 
3705  // First check whether {value} is a NaN at all...
3706  __ Branch(__ Float64Equal(value, value), &done, &if_nan);
3707 
3708  __ Bind(&if_nan);
3709  {
3710  // ...and only if {value} is a NaN, perform the expensive bit
3711  // check. See http://crbug.com/v8/8264 for details.
3712  Node* check = __ Word32Equal(__ Float64ExtractHighWord32(value),
3713  __ Int32Constant(kHoleNanUpper32));
3714  __ DeoptimizeIf(DeoptimizeReason::kHole, params.feedback(), check,
3715  frame_state);
3716  __ Goto(&done);
3717  }
3718 
3719  __ Bind(&done);
3720  return value;
3721 }
3722 
3723 Node* EffectControlLinearizer::LowerCheckNotTaggedHole(Node* node,
3724  Node* frame_state) {
3725  Node* value = node->InputAt(0);
3726  Node* check = __ WordEqual(value, __ TheHoleConstant());
3727  __ DeoptimizeIf(DeoptimizeReason::kHole, VectorSlotPair(), check,
3728  frame_state);
3729  return value;
3730 }
3731 
3732 Node* EffectControlLinearizer::LowerConvertTaggedHoleToUndefined(Node* node) {
3733  Node* value = node->InputAt(0);
3734 
3735  auto if_is_hole = __ MakeDeferredLabel();
3736  auto done = __ MakeLabel(MachineRepresentation::kTagged);
3737 
3738  Node* check = __ WordEqual(value, __ TheHoleConstant());
3739  __ GotoIf(check, &if_is_hole);
3740  __ Goto(&done, value);
3741 
3742  __ Bind(&if_is_hole);
3743  __ Goto(&done, __ UndefinedConstant());
3744 
3745  __ Bind(&done);
3746  return done.PhiAt(0);
3747 }
3748 
3749 void EffectControlLinearizer::LowerCheckEqualsInternalizedString(
3750  Node* node, Node* frame_state) {
3751  Node* exp = node->InputAt(0);
3752  Node* val = node->InputAt(1);
3753 
3754  auto if_same = __ MakeLabel();
3755  auto if_notsame = __ MakeDeferredLabel();
3756  auto if_thinstring = __ MakeLabel();
3757  auto if_notthinstring = __ MakeLabel();
3758 
3759  // Check if {exp} and {val} are the same, which is the likely case.
3760  __ Branch(__ WordEqual(exp, val), &if_same, &if_notsame);
3761 
3762  __ Bind(&if_notsame);
3763  {
3764  // Now {val} could still be a non-internalized String that matches {exp}.
3765  __ DeoptimizeIf(DeoptimizeReason::kWrongName, VectorSlotPair(),
3766  ObjectIsSmi(val), frame_state);
3767  Node* val_map = __ LoadField(AccessBuilder::ForMap(), val);
3768  Node* val_instance_type =
3769  __ LoadField(AccessBuilder::ForMapInstanceType(), val_map);
3770 
3771  // Check for the common case of ThinString first.
3772  __ GotoIf(__ Word32Equal(val_instance_type,
3773  __ Int32Constant(THIN_ONE_BYTE_STRING_TYPE)),
3774  &if_thinstring);
3775  __ Branch(
3776  __ Word32Equal(val_instance_type, __ Int32Constant(THIN_STRING_TYPE)),
3777  &if_thinstring, &if_notthinstring);
3778 
3779  __ Bind(&if_notthinstring);
3780  {
3781  // Check that the {val} is a non-internalized String, if it's anything
3782  // else it cannot match the recorded feedback {exp} anyways.
3783  __ DeoptimizeIfNot(
3784  DeoptimizeReason::kWrongName, VectorSlotPair(),
3785  __ Word32Equal(__ Word32And(val_instance_type,
3786  __ Int32Constant(kIsNotStringMask |
3787  kIsNotInternalizedMask)),
3788  __ Int32Constant(kStringTag | kNotInternalizedTag)),
3789  frame_state);
3790 
3791  // Try to find the {val} in the string table.
3792  MachineSignature::Builder builder(graph()->zone(), 1, 2);
3793  builder.AddReturn(MachineType::AnyTagged());
3794  builder.AddParam(MachineType::Pointer());
3795  builder.AddParam(MachineType::AnyTagged());
3796  Node* try_internalize_string_function = __ ExternalConstant(
3797  ExternalReference::try_internalize_string_function());
3798  Node* const isolate_ptr =
3799  __ ExternalConstant(ExternalReference::isolate_address(isolate()));
3800  auto call_descriptor =
3801  Linkage::GetSimplifiedCDescriptor(graph()->zone(), builder.Build());
3802  Node* val_internalized =
3803  __ Call(common()->Call(call_descriptor),
3804  try_internalize_string_function, isolate_ptr, val);
3805 
3806  // Now see if the results match.
3807  __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, VectorSlotPair(),
3808  __ WordEqual(exp, val_internalized), frame_state);
3809  __ Goto(&if_same);
3810  }
3811 
3812  __ Bind(&if_thinstring);
3813  {
3814  // The {val} is a ThinString, let's check the actual value.
3815  Node* val_actual =
3816  __ LoadField(AccessBuilder::ForThinStringActual(), val);
3817  __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, VectorSlotPair(),
3818  __ WordEqual(exp, val_actual), frame_state);
3819  __ Goto(&if_same);
3820  }
3821  }
3822 
3823  __ Bind(&if_same);
3824 }
3825 
3826 void EffectControlLinearizer::LowerCheckEqualsSymbol(Node* node,
3827  Node* frame_state) {
3828  Node* exp = node->InputAt(0);
3829  Node* val = node->InputAt(1);
3830  Node* check = __ WordEqual(exp, val);
3831  __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, VectorSlotPair(), check,
3832  frame_state);
3833 }
3834 
3835 Node* EffectControlLinearizer::AllocateHeapNumberWithValue(Node* value) {
3836  Node* result = __ Allocate(NOT_TENURED, __ Int32Constant(HeapNumber::kSize));
3837  __ StoreField(AccessBuilder::ForMap(), result, __ HeapNumberMapConstant());
3838  __ StoreField(AccessBuilder::ForHeapNumberValue(), result, value);
3839  return result;
3840 }
3841 
3842 Node* EffectControlLinearizer::ChangeIntPtrToSmi(Node* value) {
3843  // Do shift on 32bit values if Smis are stored in the lower word.
3844  if (machine()->Is64() && SmiValuesAre31Bits()) {
3845  return __ ChangeInt32ToInt64(
3846  __ Word32Shl(__ TruncateInt64ToInt32(value), SmiShiftBitsConstant()));
3847  }
3848  return __ WordShl(value, SmiShiftBitsConstant());
3849 }
3850 
3851 Node* EffectControlLinearizer::ChangeInt32ToIntPtr(Node* value) {
3852  if (machine()->Is64()) {
3853  value = __ ChangeInt32ToInt64(value);
3854  }
3855  return value;
3856 }
3857 
3858 Node* EffectControlLinearizer::ChangeIntPtrToInt32(Node* value) {
3859  if (machine()->Is64()) {
3860  value = __ TruncateInt64ToInt32(value);
3861  }
3862  return value;
3863 }
3864 
3865 Node* EffectControlLinearizer::ChangeInt32ToSmi(Node* value) {
3866  // Do shift on 32bit values if Smis are stored in the lower word.
3867  if (machine()->Is64() && SmiValuesAre31Bits()) {
3868  return __ ChangeInt32ToInt64(__ Word32Shl(value, SmiShiftBitsConstant()));
3869  }
3870  return ChangeIntPtrToSmi(ChangeInt32ToIntPtr(value));
3871 }
3872 
3873 Node* EffectControlLinearizer::ChangeInt64ToSmi(Node* value) {
3874  DCHECK(machine()->Is64());
3875  return ChangeIntPtrToSmi(value);
3876 }
3877 
3878 Node* EffectControlLinearizer::ChangeUint32ToUintPtr(Node* value) {
3879  if (machine()->Is64()) {
3880  value = __ ChangeUint32ToUint64(value);
3881  }
3882  return value;
3883 }
3884 
3885 Node* EffectControlLinearizer::ChangeUint32ToSmi(Node* value) {
3886  // Do shift on 32bit values if Smis are stored in the lower word.
3887  if (machine()->Is64() && SmiValuesAre31Bits()) {
3888  return __ ChangeUint32ToUint64(__ Word32Shl(value, SmiShiftBitsConstant()));
3889  } else {
3890  return __ WordShl(ChangeUint32ToUintPtr(value), SmiShiftBitsConstant());
3891  }
3892 }
3893 
3894 Node* EffectControlLinearizer::ChangeSmiToIntPtr(Node* value) {
3895  // Do shift on 32bit values if Smis are stored in the lower word.
3896  if (machine()->Is64() && SmiValuesAre31Bits()) {
3897  return __ ChangeInt32ToInt64(
3898  __ Word32Sar(__ TruncateInt64ToInt32(value), SmiShiftBitsConstant()));
3899  }
3900  return __ WordSar(value, SmiShiftBitsConstant());
3901 }
3902 
3903 Node* EffectControlLinearizer::ChangeSmiToInt32(Node* value) {
3904  // Do shift on 32bit values if Smis are stored in the lower word.
3905  if (machine()->Is64() && SmiValuesAre31Bits()) {
3906  return __ Word32Sar(__ TruncateInt64ToInt32(value), SmiShiftBitsConstant());
3907  }
3908  if (machine()->Is64()) {
3909  return __ TruncateInt64ToInt32(ChangeSmiToIntPtr(value));
3910  }
3911  return ChangeSmiToIntPtr(value);
3912 }
3913 
3914 Node* EffectControlLinearizer::ChangeSmiToInt64(Node* value) {
3915  CHECK(machine()->Is64());
3916  return ChangeSmiToIntPtr(value);
3917 }
3918 
3919 Node* EffectControlLinearizer::ObjectIsSmi(Node* value) {
3920  return __ WordEqual(__ WordAnd(value, __ IntPtrConstant(kSmiTagMask)),
3921  __ IntPtrConstant(kSmiTag));
3922 }
3923 
3924 Node* EffectControlLinearizer::SmiMaxValueConstant() {
3925  return __ Int32Constant(Smi::kMaxValue);
3926 }
3927 
3928 Node* EffectControlLinearizer::SmiShiftBitsConstant() {
3929  return __ IntPtrConstant(kSmiShiftSize + kSmiTagSize);
3930 }
3931 
3932 Node* EffectControlLinearizer::LowerPlainPrimitiveToNumber(Node* node) {
3933  Node* value = node->InputAt(0);
3934  return __ ToNumber(value);
3935 }
3936 
3937 Node* EffectControlLinearizer::LowerPlainPrimitiveToWord32(Node* node) {
3938  Node* value = node->InputAt(0);
3939 
3940  auto if_not_smi = __ MakeDeferredLabel();
3941  auto if_to_number_smi = __ MakeLabel();
3942  auto done = __ MakeLabel(MachineRepresentation::kWord32);
3943 
3944  Node* check0 = ObjectIsSmi(value);
3945  __ GotoIfNot(check0, &if_not_smi);
3946  __ Goto(&done, ChangeSmiToInt32(value));
3947 
3948  __ Bind(&if_not_smi);
3949  Node* to_number = __ ToNumber(value);
3950 
3951  Node* check1 = ObjectIsSmi(to_number);
3952  __ GotoIf(check1, &if_to_number_smi);
3953  Node* number = __ LoadField(AccessBuilder::ForHeapNumberValue(), to_number);
3954  __ Goto(&done, __ TruncateFloat64ToWord32(number));
3955 
3956  __ Bind(&if_to_number_smi);
3957  __ Goto(&done, ChangeSmiToInt32(to_number));
3958 
3959  __ Bind(&done);
3960  return done.PhiAt(0);
3961 }
3962 
3963 Node* EffectControlLinearizer::LowerPlainPrimitiveToFloat64(Node* node) {
3964  Node* value = node->InputAt(0);
3965 
3966  auto if_not_smi = __ MakeDeferredLabel();
3967  auto if_to_number_smi = __ MakeLabel();
3968  auto done = __ MakeLabel(MachineRepresentation::kFloat64);
3969 
3970  Node* check0 = ObjectIsSmi(value);
3971  __ GotoIfNot(check0, &if_not_smi);
3972  Node* from_smi = ChangeSmiToInt32(value);
3973  __ Goto(&done, __ ChangeInt32ToFloat64(from_smi));
3974 
3975  __ Bind(&if_not_smi);
3976  Node* to_number = __ ToNumber(value);
3977  Node* check1 = ObjectIsSmi(to_number);
3978  __ GotoIf(check1, &if_to_number_smi);
3979 
3980  Node* number = __ LoadField(AccessBuilder::ForHeapNumberValue(), to_number);
3981  __ Goto(&done, number);
3982 
3983  __ Bind(&if_to_number_smi);
3984  Node* number_from_smi = ChangeSmiToInt32(to_number);
3985  number_from_smi = __ ChangeInt32ToFloat64(number_from_smi);
3986  __ Goto(&done, number_from_smi);
3987 
3988  __ Bind(&done);
3989  return done.PhiAt(0);
3990 }
3991 
3992 Node* EffectControlLinearizer::LowerEnsureWritableFastElements(Node* node) {
3993  Node* object = node->InputAt(0);
3994  Node* elements = node->InputAt(1);
3995 
3996  auto if_not_fixed_array = __ MakeDeferredLabel();
3997  auto done = __ MakeLabel(MachineRepresentation::kTagged);
3998 
3999  // Load the current map of {elements}.
4000  Node* elements_map = __ LoadField(AccessBuilder::ForMap(), elements);
4001 
4002  // Check if {elements} is not a copy-on-write FixedArray.
4003  Node* check = __ WordEqual(elements_map, __ FixedArrayMapConstant());
4004  __ GotoIfNot(check, &if_not_fixed_array);
4005  // Nothing to do if the {elements} are not copy-on-write.
4006  __ Goto(&done, elements);
4007 
4008  __ Bind(&if_not_fixed_array);
4009  // We need to take a copy of the {elements} and set them up for {object}.
4010  Operator::Properties properties = Operator::kEliminatable;
4011  Callable callable =
4012  Builtins::CallableFor(isolate(), Builtins::kCopyFastSmiOrObjectElements);
4013  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4014  auto call_descriptor = Linkage::GetStubCallDescriptor(
4015  graph()->zone(), callable.descriptor(),
4016  callable.descriptor().GetStackParameterCount(), flags, properties);
4017  Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
4018  object, __ NoContextConstant());
4019  __ Goto(&done, result);
4020 
4021  __ Bind(&done);
4022  return done.PhiAt(0);
4023 }
4024 
4025 Node* EffectControlLinearizer::LowerMaybeGrowFastElements(Node* node,
4026  Node* frame_state) {
4027  GrowFastElementsParameters params = GrowFastElementsParametersOf(node->op());
4028  Node* object = node->InputAt(0);
4029  Node* elements = node->InputAt(1);
4030  Node* index = node->InputAt(2);
4031  Node* elements_length = node->InputAt(3);
4032 
4033  auto done = __ MakeLabel(MachineRepresentation::kTagged);
4034  auto if_grow = __ MakeDeferredLabel();
4035  auto if_not_grow = __ MakeLabel();
4036 
4037  // Check if we need to grow the {elements} backing store.
4038  Node* check = __ Uint32LessThan(index, elements_length);
4039  __ GotoIfNot(check, &if_grow);
4040  __ Goto(&done, elements);
4041 
4042  __ Bind(&if_grow);
4043  // We need to grow the {elements} for {object}.
4044  Operator::Properties properties = Operator::kEliminatable;
4045  Callable callable =
4046  (params.mode() == GrowFastElementsMode::kDoubleElements)
4047  ? Builtins::CallableFor(isolate(), Builtins::kGrowFastDoubleElements)
4048  : Builtins::CallableFor(isolate(),
4049  Builtins::kGrowFastSmiOrObjectElements);
4050  CallDescriptor::Flags call_flags = CallDescriptor::kNoFlags;
4051  auto call_descriptor = Linkage::GetStubCallDescriptor(
4052  graph()->zone(), callable.descriptor(),
4053  callable.descriptor().GetStackParameterCount(), call_flags, properties);
4054  Node* new_elements =
4055  __ Call(call_descriptor, __ HeapConstant(callable.code()), object,
4056  ChangeInt32ToSmi(index), __ NoContextConstant());
4057 
4058  // Ensure that we were able to grow the {elements}.
4059  __ DeoptimizeIf(DeoptimizeReason::kCouldNotGrowElements, params.feedback(),
4060  ObjectIsSmi(new_elements), frame_state);
4061  __ Goto(&done, new_elements);
4062 
4063  __ Bind(&done);
4064  return done.PhiAt(0);
4065 }
4066 
4067 void EffectControlLinearizer::LowerTransitionElementsKind(Node* node) {
4068  ElementsTransition const transition = ElementsTransitionOf(node->op());
4069  Node* object = node->InputAt(0);
4070 
4071  auto if_map_same = __ MakeDeferredLabel();
4072  auto done = __ MakeLabel();
4073 
4074  Node* source_map = __ HeapConstant(transition.source());
4075  Node* target_map = __ HeapConstant(transition.target());
4076 
4077  // Load the current map of {object}.
4078  Node* object_map = __ LoadField(AccessBuilder::ForMap(), object);
4079 
4080  // Check if {object_map} is the same as {source_map}.
4081  Node* check = __ WordEqual(object_map, source_map);
4082  __ GotoIf(check, &if_map_same);
4083  __ Goto(&done);
4084 
4085  __ Bind(&if_map_same);
4086  switch (transition.mode()) {
4087  case ElementsTransition::kFastTransition:
4088  // In-place migration of {object}, just store the {target_map}.
4089  __ StoreField(AccessBuilder::ForMap(), object, target_map);
4090  break;
4091  case ElementsTransition::kSlowTransition: {
4092  // Instance migration, call out to the runtime for {object}.
4093  Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4094  Runtime::FunctionId id = Runtime::kTransitionElementsKind;
4095  auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
4096  graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
4097  __ Call(call_descriptor, __ CEntryStubConstant(1), object, target_map,
4098  __ ExternalConstant(ExternalReference::Create(id)),
4099  __ Int32Constant(2), __ NoContextConstant());
4100  break;
4101  }
4102  }
4103  __ Goto(&done);
4104 
4105  __ Bind(&done);
4106 }
4107 
4108 Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
4109  Node* object = node->InputAt(0);
4110  Node* index = node->InputAt(1);
4111  Node* zero = __ IntPtrConstant(0);
4112  Node* one = __ IntPtrConstant(1);
4113 
4114  // Sign-extend the {index} on 64-bit architectures.
4115  if (machine()->Is64()) {
4116  index = __ ChangeInt32ToInt64(index);
4117  }
4118 
4119  auto if_double = __ MakeDeferredLabel();
4120  auto done = __ MakeLabel(MachineRepresentation::kTagged);
4121 
4122  // Check if field is a mutable double field.
4123  __ GotoIfNot(__ WordEqual(__ WordAnd(index, one), zero), &if_double);
4124 
4125  // The field is a proper Tagged field on {object}. The {index} is shifted
4126  // to the left by one in the code below.
4127  {
4128  // Check if field is in-object or out-of-object.
4129  auto if_outofobject = __ MakeLabel();
4130  __ GotoIf(__ IntLessThan(index, zero), &if_outofobject);
4131 
4132  // The field is located in the {object} itself.
4133  {
4134  Node* offset =
4135  __ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2 - 1)),
4136  __ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
4137  Node* result = __ Load(MachineType::AnyTagged(), object, offset);
4138  __ Goto(&done, result);
4139  }
4140 
4141  // The field is located in the properties backing store of {object}.
4142  // The {index} is equal to the negated out of property index plus 1.
4143  __ Bind(&if_outofobject);
4144  {
4145  Node* properties =
4146  __ LoadField(AccessBuilder::ForJSObjectPropertiesOrHash(), object);
4147  Node* offset =
4148  __ IntAdd(__ WordShl(__ IntSub(zero, index),
4149  __ IntPtrConstant(kPointerSizeLog2 - 1)),
4150  __ IntPtrConstant((FixedArray::kHeaderSize - kPointerSize) -
4151  kHeapObjectTag));
4152  Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
4153  __ Goto(&done, result);
4154  }
4155  }
4156 
4157  // The field is a Double field, either unboxed in the object on 64-bit
4158  // architectures, or as MutableHeapNumber.
4159  __ Bind(&if_double);
4160  {
4161  auto done_double = __ MakeLabel(MachineRepresentation::kFloat64);
4162 
4163  index = __ WordSar(index, one);
4164 
4165  // Check if field is in-object or out-of-object.
4166  auto if_outofobject = __ MakeLabel();
4167  __ GotoIf(__ IntLessThan(index, zero), &if_outofobject);
4168 
4169  // The field is located in the {object} itself.
4170  {
4171  Node* offset =
4172  __ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2)),
4173  __ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
4174  if (FLAG_unbox_double_fields) {
4175  Node* result = __ Load(MachineType::Float64(), object, offset);
4176  __ Goto(&done_double, result);
4177  } else {
4178  Node* result = __ Load(MachineType::AnyTagged(), object, offset);
4179  result = __ LoadField(AccessBuilder::ForHeapNumberValue(), result);
4180  __ Goto(&done_double, result);
4181  }
4182  }
4183 
4184  __ Bind(&if_outofobject);
4185  {
4186  Node* properties =
4187  __ LoadField(AccessBuilder::ForJSObjectPropertiesOrHash(), object);
4188  Node* offset =
4189  __ IntAdd(__ WordShl(__ IntSub(zero, index),
4190  __ IntPtrConstant(kPointerSizeLog2)),
4191  __ IntPtrConstant((FixedArray::kHeaderSize - kPointerSize) -
4192  kHeapObjectTag));
4193  Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
4194  result = __ LoadField(AccessBuilder::ForHeapNumberValue(), result);
4195  __ Goto(&done_double, result);
4196  }
4197 
4198  __ Bind(&done_double);
4199  {
4200  Node* result = AllocateHeapNumberWithValue(done_double.PhiAt(0));
4201  __ Goto(&done, result);
4202  }
4203  }
4204 
4205  __ Bind(&done);
4206  return done.PhiAt(0);
4207 }
4208 
4209 Node* EffectControlLinearizer::BuildReverseBytes(ExternalArrayType type,
4210  Node* value) {
4211  switch (type) {
4212  case kExternalInt8Array:
4213  case kExternalUint8Array:
4214  case kExternalUint8ClampedArray:
4215  return value;
4216 
4217  case kExternalInt16Array: {
4218  Node* result = __ Word32ReverseBytes(value);
4219  result = __ Word32Sar(result, __ Int32Constant(16));
4220  return result;
4221  }
4222 
4223  case kExternalUint16Array: {
4224  Node* result = __ Word32ReverseBytes(value);
4225  result = __ Word32Shr(result, __ Int32Constant(16));
4226  return result;
4227  }
4228 
4229  case kExternalInt32Array: // Fall through.
4230  case kExternalUint32Array:
4231  return __ Word32ReverseBytes(value);
4232 
4233  case kExternalFloat32Array: {
4234  Node* result = __ BitcastFloat32ToInt32(value);
4235  result = __ Word32ReverseBytes(result);
4236  result = __ BitcastInt32ToFloat32(result);
4237  return result;
4238  }
4239 
4240  case kExternalFloat64Array: {
4241  if (machine()->Is64()) {
4242  Node* result = __ BitcastFloat64ToInt64(value);
4243  result = __ Word64ReverseBytes(result);
4244  result = __ BitcastInt64ToFloat64(result);
4245  return result;
4246  } else {
4247  Node* lo = __ Word32ReverseBytes(__ Float64ExtractLowWord32(value));
4248  Node* hi = __ Word32ReverseBytes(__ Float64ExtractHighWord32(value));
4249  Node* result = __ Float64Constant(0.0);
4250  result = __ Float64InsertLowWord32(result, hi);
4251  result = __ Float64InsertHighWord32(result, lo);
4252  return result;
4253  }
4254  }
4255 
4256  case kExternalBigInt64Array:
4257  case kExternalBigUint64Array:
4258  UNREACHABLE();
4259  }
4260 }
4261 
4262 Node* EffectControlLinearizer::LowerLoadDataViewElement(Node* node) {
4263  ExternalArrayType element_type = ExternalArrayTypeOf(node->op());
4264  Node* buffer = node->InputAt(0);
4265  Node* storage = node->InputAt(1);
4266  Node* byte_offset = node->InputAt(2);
4267  Node* index = node->InputAt(3);
4268  Node* is_little_endian = node->InputAt(4);
4269 
4270  // We need to keep the {buffer} alive so that the GC will not release the
4271  // ArrayBuffer (if there's any) as long as we are still operating on it.
4272  __ Retain(buffer);
4273 
4274  // Compute the effective offset.
4275  Node* offset = __ IntAdd(byte_offset, index);
4276 
4277  MachineType const machine_type =
4278  AccessBuilder::ForTypedArrayElement(element_type, true).machine_type;
4279 
4280  Node* value = __ LoadUnaligned(machine_type, storage, offset);
4281  auto big_endian = __ MakeLabel();
4282  auto done = __ MakeLabel(machine_type.representation());
4283 
4284  __ GotoIfNot(is_little_endian, &big_endian);
4285  { // Little-endian load.
4286 #if V8_TARGET_LITTLE_ENDIAN
4287  __ Goto(&done, value);
4288 #else
4289  __ Goto(&done, BuildReverseBytes(element_type, value));
4290 #endif // V8_TARGET_LITTLE_ENDIAN
4291  }
4292 
4293  __ Bind(&big_endian);
4294  { // Big-endian load.
4295 #if V8_TARGET_LITTLE_ENDIAN
4296  __ Goto(&done, BuildReverseBytes(element_type, value));
4297 #else
4298  __ Goto(&done, value);
4299 #endif // V8_TARGET_LITTLE_ENDIAN
4300  }
4301 
4302  // We're done, return {result}.
4303  __ Bind(&done);
4304  return done.PhiAt(0);
4305 }
4306 
4307 void EffectControlLinearizer::LowerStoreDataViewElement(Node* node) {
4308  ExternalArrayType element_type = ExternalArrayTypeOf(node->op());
4309  Node* buffer = node->InputAt(0);
4310  Node* storage = node->InputAt(1);
4311  Node* byte_offset = node->InputAt(2);
4312  Node* index = node->InputAt(3);
4313  Node* value = node->InputAt(4);
4314  Node* is_little_endian = node->InputAt(5);
4315 
4316  // We need to keep the {buffer} alive so that the GC will not release the
4317  // ArrayBuffer (if there's any) as long as we are still operating on it.
4318  __ Retain(buffer);
4319 
4320  // Compute the effective offset.
4321  Node* offset = __ IntAdd(byte_offset, index);
4322 
4323  MachineType const machine_type =
4324  AccessBuilder::ForTypedArrayElement(element_type, true).machine_type;
4325 
4326  auto big_endian = __ MakeLabel();
4327  auto done = __ MakeLabel(machine_type.representation());
4328 
4329  __ GotoIfNot(is_little_endian, &big_endian);
4330  { // Little-endian store.
4331 #if V8_TARGET_LITTLE_ENDIAN
4332  __ Goto(&done, value);
4333 #else
4334  __ Goto(&done, BuildReverseBytes(element_type, value));
4335 #endif // V8_TARGET_LITTLE_ENDIAN
4336  }
4337 
4338  __ Bind(&big_endian);
4339  { // Big-endian store.
4340 #if V8_TARGET_LITTLE_ENDIAN
4341  __ Goto(&done, BuildReverseBytes(element_type, value));
4342 #else
4343  __ Goto(&done, value);
4344 #endif // V8_TARGET_LITTLE_ENDIAN
4345  }
4346 
4347  __ Bind(&done);
4348  __ StoreUnaligned(machine_type.representation(), storage, offset,
4349  done.PhiAt(0));
4350 }
4351 
4352 Node* EffectControlLinearizer::LowerLoadTypedElement(Node* node) {
4353  ExternalArrayType array_type = ExternalArrayTypeOf(node->op());
4354  Node* buffer = node->InputAt(0);
4355  Node* base = node->InputAt(1);
4356  Node* external = node->InputAt(2);
4357  Node* index = node->InputAt(3);
4358 
4359  // We need to keep the {buffer} alive so that the GC will not release the
4360  // ArrayBuffer (if there's any) as long as we are still operating on it.
4361  __ Retain(buffer);
4362 
4363  // Compute the effective storage pointer, handling the case where the
4364  // {external} pointer is the effective storage pointer (i.e. the {base}
4365  // is Smi zero).
4366  Node* storage = IntPtrMatcher(base).Is(0)
4367  ? external
4368  : __ UnsafePointerAdd(base, external);
4369 
4370  // Perform the actual typed element access.
4371  return __ LoadElement(AccessBuilder::ForTypedArrayElement(
4372  array_type, true, LoadSensitivity::kCritical),
4373  storage, index);
4374 }
4375 
4376 void EffectControlLinearizer::LowerStoreTypedElement(Node* node) {
4377  ExternalArrayType array_type = ExternalArrayTypeOf(node->op());
4378  Node* buffer = node->InputAt(0);
4379  Node* base = node->InputAt(1);
4380  Node* external = node->InputAt(2);
4381  Node* index = node->InputAt(3);
4382  Node* value = node->InputAt(4);
4383 
4384  // We need to keep the {buffer} alive so that the GC will not release the
4385  // ArrayBuffer (if there's any) as long as we are still operating on it.
4386  __ Retain(buffer);
4387 
4388  // Compute the effective storage pointer, handling the case where the
4389  // {external} pointer is the effective storage pointer (i.e. the {base}
4390  // is Smi zero).
4391  Node* storage = IntPtrMatcher(base).Is(0)
4392  ? external
4393  : __ UnsafePointerAdd(base, external);
4394 
4395  // Perform the actual typed element access.
4396  __ StoreElement(AccessBuilder::ForTypedArrayElement(array_type, true),
4397  storage, index, value);
4398 }
4399 
4400 void EffectControlLinearizer::TransitionElementsTo(Node* node, Node* array,
4401  ElementsKind from,
4402  ElementsKind to) {
4403  DCHECK(IsMoreGeneralElementsKindTransition(from, to));
4404  DCHECK(to == HOLEY_ELEMENTS || to == HOLEY_DOUBLE_ELEMENTS);
4405 
4406  Handle<Map> target(to == HOLEY_ELEMENTS ? FastMapParameterOf(node->op())
4407  : DoubleMapParameterOf(node->op()));
4408  Node* target_map = __ HeapConstant(target);
4409 
4410  if (IsSimpleMapChangeTransition(from, to)) {
4411  __ StoreField(AccessBuilder::ForMap(), array, target_map);
4412  } else {
4413  // Instance migration, call out to the runtime for {array}.
4414  Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4415  Runtime::FunctionId id = Runtime::kTransitionElementsKind;
4416  auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
4417  graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
4418  __ Call(call_descriptor, __ CEntryStubConstant(1), array, target_map,
4419  __ ExternalConstant(ExternalReference::Create(id)),
4420  __ Int32Constant(2), __ NoContextConstant());
4421  }
4422 }
4423 
4424 Node* EffectControlLinearizer::IsElementsKindGreaterThan(
4425  Node* kind, ElementsKind reference_kind) {
4426  Node* ref_kind = __ Int32Constant(reference_kind);
4427  Node* ret = __ Int32LessThan(ref_kind, kind);
4428  return ret;
4429 }
4430 
4431 void EffectControlLinearizer::LowerTransitionAndStoreElement(Node* node) {
4432  Node* array = node->InputAt(0);
4433  Node* index = node->InputAt(1);
4434  Node* value = node->InputAt(2);
4435 
4436  // Possibly transition array based on input and store.
4437  //
4438  // -- TRANSITION PHASE -----------------
4439  // kind = ElementsKind(array)
4440  // if value is not smi {
4441  // if kind == HOLEY_SMI_ELEMENTS {
4442  // if value is heap number {
4443  // Transition array to HOLEY_DOUBLE_ELEMENTS
4444  // kind = HOLEY_DOUBLE_ELEMENTS
4445  // } else {
4446  // Transition array to HOLEY_ELEMENTS
4447  // kind = HOLEY_ELEMENTS
4448  // }
4449  // } else if kind == HOLEY_DOUBLE_ELEMENTS {
4450  // if value is not heap number {
4451  // Transition array to HOLEY_ELEMENTS
4452  // kind = HOLEY_ELEMENTS
4453  // }
4454  // }
4455  // }
4456  //
4457  // -- STORE PHASE ----------------------
4458  // [make sure {kind} is up-to-date]
4459  // if kind == HOLEY_DOUBLE_ELEMENTS {
4460  // if value is smi {
4461  // float_value = convert smi to float
4462  // Store array[index] = float_value
4463  // } else {
4464  // float_value = value
4465  // Store array[index] = float_value
4466  // }
4467  // } else {
4468  // // kind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS
4469  // Store array[index] = value
4470  // }
4471  //
4472  Node* map = __ LoadField(AccessBuilder::ForMap(), array);
4473  Node* kind;
4474  {
4475  Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
4476  Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
4477  Node* andit = __ Word32And(bit_field2, mask);
4478  Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
4479  kind = __ Word32Shr(andit, shift);
4480  }
4481 
4482  auto do_store = __ MakeLabel(MachineRepresentation::kWord32);
4483  // We can store a smi anywhere.
4484  __ GotoIf(ObjectIsSmi(value), &do_store, kind);
4485 
4486  // {value} is a HeapObject.
4487  auto transition_smi_array = __ MakeDeferredLabel();
4488  auto transition_double_to_fast = __ MakeDeferredLabel();
4489  {
4490  __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
4491  &transition_smi_array);
4492  __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS), &do_store,
4493  kind);
4494 
4495  // We have double elements kind. Only a HeapNumber can be stored
4496  // without effecting a transition.
4497  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
4498  Node* heap_number_map = __ HeapNumberMapConstant();
4499  Node* check = __ WordEqual(value_map, heap_number_map);
4500  __ GotoIfNot(check, &transition_double_to_fast);
4501  __ Goto(&do_store, kind);
4502  }
4503 
4504  __ Bind(&transition_smi_array); // deferred code.
4505  {
4506  // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_DOUBLE_ELEMENTS or
4507  // to HOLEY_ELEMENTS.
4508  auto if_value_not_heap_number = __ MakeLabel();
4509  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
4510  Node* heap_number_map = __ HeapNumberMapConstant();
4511  Node* check = __ WordEqual(value_map, heap_number_map);
4512  __ GotoIfNot(check, &if_value_not_heap_number);
4513  {
4514  // {value} is a HeapNumber.
4515  TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS,
4516  HOLEY_DOUBLE_ELEMENTS);
4517  __ Goto(&do_store, __ Int32Constant(HOLEY_DOUBLE_ELEMENTS));
4518  }
4519  __ Bind(&if_value_not_heap_number);
4520  {
4521  TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS);
4522  __ Goto(&do_store, __ Int32Constant(HOLEY_ELEMENTS));
4523  }
4524  }
4525 
4526  __ Bind(&transition_double_to_fast); // deferred code.
4527  {
4528  TransitionElementsTo(node, array, HOLEY_DOUBLE_ELEMENTS, HOLEY_ELEMENTS);
4529  __ Goto(&do_store, __ Int32Constant(HOLEY_ELEMENTS));
4530  }
4531 
4532  // Make sure kind is up-to-date.
4533  __ Bind(&do_store);
4534  kind = do_store.PhiAt(0);
4535 
4536  Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
4537  auto if_kind_is_double = __ MakeLabel();
4538  auto done = __ MakeLabel();
4539  __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
4540  &if_kind_is_double);
4541  {
4542  // Our ElementsKind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS.
4543  __ StoreElement(AccessBuilder::ForFixedArrayElement(HOLEY_ELEMENTS),
4544  elements, index, value);
4545  __ Goto(&done);
4546  }
4547  __ Bind(&if_kind_is_double);
4548  {
4549  // Our ElementsKind is HOLEY_DOUBLE_ELEMENTS.
4550  auto do_double_store = __ MakeLabel();
4551  __ GotoIfNot(ObjectIsSmi(value), &do_double_store);
4552  {
4553  Node* int_value = ChangeSmiToInt32(value);
4554  Node* float_value = __ ChangeInt32ToFloat64(int_value);
4555  __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
4556  index, float_value);
4557  __ Goto(&done);
4558  }
4559  __ Bind(&do_double_store);
4560  {
4561  Node* float_value =
4562  __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
4563  __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
4564  index, float_value);
4565  __ Goto(&done);
4566  }
4567  }
4568 
4569  __ Bind(&done);
4570 }
4571 
4572 void EffectControlLinearizer::LowerTransitionAndStoreNumberElement(Node* node) {
4573  Node* array = node->InputAt(0);
4574  Node* index = node->InputAt(1);
4575  Node* value = node->InputAt(2); // This is a Float64, not tagged.
4576 
4577  // Possibly transition array based on input and store.
4578  //
4579  // -- TRANSITION PHASE -----------------
4580  // kind = ElementsKind(array)
4581  // if kind == HOLEY_SMI_ELEMENTS {
4582  // Transition array to HOLEY_DOUBLE_ELEMENTS
4583  // } else if kind != HOLEY_DOUBLE_ELEMENTS {
4584  // This is UNREACHABLE, execute a debug break.
4585  // }
4586  //
4587  // -- STORE PHASE ----------------------
4588  // Store array[index] = value (it's a float)
4589  //
4590  Node* map = __ LoadField(AccessBuilder::ForMap(), array);
4591  Node* kind;
4592  {
4593  Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
4594  Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
4595  Node* andit = __ Word32And(bit_field2, mask);
4596  Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
4597  kind = __ Word32Shr(andit, shift);
4598  }
4599 
4600  auto do_store = __ MakeLabel();
4601 
4602  // {value} is a float64.
4603  auto transition_smi_array = __ MakeDeferredLabel();
4604  {
4605  __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
4606  &transition_smi_array);
4607  // We expect that our input array started at HOLEY_SMI_ELEMENTS, and
4608  // climbs the lattice up to HOLEY_DOUBLE_ELEMENTS. Force a debug break
4609  // if this assumption is broken. It also would be the case that
4610  // loop peeling can break this assumption.
4611  __ GotoIf(__ Word32Equal(kind, __ Int32Constant(HOLEY_DOUBLE_ELEMENTS)),
4612  &do_store);
4613  // TODO(turbofan): It would be good to have an "Unreachable()" node type.
4614  __ DebugBreak();
4615  __ Goto(&do_store);
4616  }
4617 
4618  __ Bind(&transition_smi_array); // deferred code.
4619  {
4620  // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_DOUBLE_ELEMENTS.
4621  TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS,
4622  HOLEY_DOUBLE_ELEMENTS);
4623  __ Goto(&do_store);
4624  }
4625 
4626  __ Bind(&do_store);
4627 
4628  Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
4629  __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements, index,
4630  value);
4631 }
4632 
4633 void EffectControlLinearizer::LowerTransitionAndStoreNonNumberElement(
4634  Node* node) {
4635  Node* array = node->InputAt(0);
4636  Node* index = node->InputAt(1);
4637  Node* value = node->InputAt(2);
4638 
4639  // Possibly transition array based on input and store.
4640  //
4641  // -- TRANSITION PHASE -----------------
4642  // kind = ElementsKind(array)
4643  // if kind == HOLEY_SMI_ELEMENTS {
4644  // Transition array to HOLEY_ELEMENTS
4645  // } else if kind == HOLEY_DOUBLE_ELEMENTS {
4646  // Transition array to HOLEY_ELEMENTS
4647  // }
4648  //
4649  // -- STORE PHASE ----------------------
4650  // // kind is HOLEY_ELEMENTS
4651  // Store array[index] = value
4652  //
4653  Node* map = __ LoadField(AccessBuilder::ForMap(), array);
4654  Node* kind;
4655  {
4656  Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
4657  Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
4658  Node* andit = __ Word32And(bit_field2, mask);
4659  Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
4660  kind = __ Word32Shr(andit, shift);
4661  }
4662 
4663  auto do_store = __ MakeLabel();
4664 
4665  auto transition_smi_array = __ MakeDeferredLabel();
4666  auto transition_double_to_fast = __ MakeDeferredLabel();
4667  {
4668  __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
4669  &transition_smi_array);
4670  __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
4671  &transition_double_to_fast);
4672  __ Goto(&do_store);
4673  }
4674 
4675  __ Bind(&transition_smi_array); // deferred code.
4676  {
4677  // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_ELEMENTS.
4678  TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS);
4679  __ Goto(&do_store);
4680  }
4681 
4682  __ Bind(&transition_double_to_fast); // deferred code.
4683  {
4684  TransitionElementsTo(node, array, HOLEY_DOUBLE_ELEMENTS, HOLEY_ELEMENTS);
4685  __ Goto(&do_store);
4686  }
4687 
4688  __ Bind(&do_store);
4689 
4690  Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
4691  // Our ElementsKind is HOLEY_ELEMENTS.
4692  ElementAccess access = AccessBuilder::ForFixedArrayElement(HOLEY_ELEMENTS);
4693  Type value_type = ValueTypeParameterOf(node->op());
4694  if (value_type.Is(Type::BooleanOrNullOrUndefined())) {
4695  access.type = value_type;
4696  access.write_barrier_kind = kNoWriteBarrier;
4697  }
4698  __ StoreElement(access, elements, index, value);
4699 }
4700 
4701 void EffectControlLinearizer::LowerStoreSignedSmallElement(Node* node) {
4702  Node* array = node->InputAt(0);
4703  Node* index = node->InputAt(1);
4704  Node* value = node->InputAt(2); // int32
4705 
4706  // Store a signed small in an output array.
4707  //
4708  // kind = ElementsKind(array)
4709  //
4710  // -- STORE PHASE ----------------------
4711  // if kind == HOLEY_DOUBLE_ELEMENTS {
4712  // float_value = convert int32 to float
4713  // Store array[index] = float_value
4714  // } else {
4715  // // kind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS
4716  // smi_value = convert int32 to smi
4717  // Store array[index] = smi_value
4718  // }
4719  //
4720  Node* map = __ LoadField(AccessBuilder::ForMap(), array);
4721  Node* kind;
4722  {
4723  Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
4724  Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
4725  Node* andit = __ Word32And(bit_field2, mask);
4726  Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
4727  kind = __ Word32Shr(andit, shift);
4728  }
4729 
4730  Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
4731  auto if_kind_is_double = __ MakeLabel();
4732  auto done = __ MakeLabel();
4733  __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
4734  &if_kind_is_double);
4735  {
4736  // Our ElementsKind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS.
4737  // In this case, we know our value is a signed small, and we can optimize
4738  // the ElementAccess information.
4739  ElementAccess access = AccessBuilder::ForFixedArrayElement();
4740  access.type = Type::SignedSmall();
4741  access.machine_type = MachineType::TaggedSigned();
4742  access.write_barrier_kind = kNoWriteBarrier;
4743  Node* smi_value = ChangeInt32ToSmi(value);
4744  __ StoreElement(access, elements, index, smi_value);
4745  __ Goto(&done);
4746  }
4747  __ Bind(&if_kind_is_double);
4748  {
4749  // Our ElementsKind is HOLEY_DOUBLE_ELEMENTS.
4750  Node* float_value = __ ChangeInt32ToFloat64(value);
4751  __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
4752  index, float_value);
4753  __ Goto(&done);
4754  }
4755 
4756  __ Bind(&done);
4757 }
4758 
4759 void EffectControlLinearizer::LowerRuntimeAbort(Node* node) {
4760  AbortReason reason = AbortReasonOf(node->op());
4761  Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4762  Runtime::FunctionId id = Runtime::kAbort;
4763  auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
4764  graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
4765  __ Call(call_descriptor, __ CEntryStubConstant(1),
4766  jsgraph()->SmiConstant(static_cast<int>(reason)),
4767  __ ExternalConstant(ExternalReference::Create(id)),
4768  __ Int32Constant(1), __ NoContextConstant());
4769 }
4770 
4771 Node* EffectControlLinearizer::LowerConvertReceiver(Node* node) {
4772  ConvertReceiverMode const mode = ConvertReceiverModeOf(node->op());
4773  Node* value = node->InputAt(0);
4774  Node* global_proxy = node->InputAt(1);
4775 
4776  switch (mode) {
4777  case ConvertReceiverMode::kNullOrUndefined: {
4778  return global_proxy;
4779  }
4780  case ConvertReceiverMode::kNotNullOrUndefined: {
4781  auto convert_to_object = __ MakeDeferredLabel();
4782  auto done_convert = __ MakeLabel(MachineRepresentation::kTagged);
4783 
4784  // Check if {value} is already a JSReceiver.
4785  __ GotoIf(ObjectIsSmi(value), &convert_to_object);
4786  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
4787  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
4788  Node* value_instance_type =
4789  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
4790  Node* check = __ Uint32LessThan(
4791  value_instance_type, __ Uint32Constant(FIRST_JS_RECEIVER_TYPE));
4792  __ GotoIf(check, &convert_to_object);
4793  __ Goto(&done_convert, value);
4794 
4795  // Wrap the primitive {value} into a JSValue.
4796  __ Bind(&convert_to_object);
4797  Operator::Properties properties = Operator::kEliminatable;
4798  Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
4799  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4800  auto call_descriptor = Linkage::GetStubCallDescriptor(
4801  graph()->zone(), callable.descriptor(),
4802  callable.descriptor().GetStackParameterCount(), flags, properties);
4803  Node* native_context = __ LoadField(
4804  AccessBuilder::ForJSGlobalProxyNativeContext(), global_proxy);
4805  Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
4806  value, native_context);
4807  __ Goto(&done_convert, result);
4808 
4809  __ Bind(&done_convert);
4810  return done_convert.PhiAt(0);
4811  }
4812  case ConvertReceiverMode::kAny: {
4813  auto convert_to_object = __ MakeDeferredLabel();
4814  auto convert_global_proxy = __ MakeDeferredLabel();
4815  auto done_convert = __ MakeLabel(MachineRepresentation::kTagged);
4816 
4817  // Check if {value} is already a JSReceiver, or null/undefined.
4818  __ GotoIf(ObjectIsSmi(value), &convert_to_object);
4819  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
4820  Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
4821  Node* value_instance_type =
4822  __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
4823  Node* check = __ Uint32LessThan(
4824  value_instance_type, __ Uint32Constant(FIRST_JS_RECEIVER_TYPE));
4825  __ GotoIf(check, &convert_to_object);
4826  __ Goto(&done_convert, value);
4827 
4828  // Wrap the primitive {value} into a JSValue.
4829  __ Bind(&convert_to_object);
4830  __ GotoIf(__ WordEqual(value, __ UndefinedConstant()),
4831  &convert_global_proxy);
4832  __ GotoIf(__ WordEqual(value, __ NullConstant()), &convert_global_proxy);
4833  Operator::Properties properties = Operator::kEliminatable;
4834  Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
4835  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4836  auto call_descriptor = Linkage::GetStubCallDescriptor(
4837  graph()->zone(), callable.descriptor(),
4838  callable.descriptor().GetStackParameterCount(), flags, properties);
4839  Node* native_context = __ LoadField(
4840  AccessBuilder::ForJSGlobalProxyNativeContext(), global_proxy);
4841  Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
4842  value, native_context);
4843  __ Goto(&done_convert, result);
4844 
4845  // Replace the {value} with the {global_proxy}.
4846  __ Bind(&convert_global_proxy);
4847  __ Goto(&done_convert, global_proxy);
4848 
4849  __ Bind(&done_convert);
4850  return done_convert.PhiAt(0);
4851  }
4852  }
4853 
4854  UNREACHABLE();
4855  return nullptr;
4856 }
4857 
4858 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundUp(Node* node) {
4859  // Nothing to be done if a fast hardware instruction is available.
4860  if (machine()->Float64RoundUp().IsSupported()) {
4861  return Nothing<Node*>();
4862  }
4863 
4864  Node* const input = node->InputAt(0);
4865 
4866  // General case for ceil.
4867  //
4868  // if 0.0 < input then
4869  // if 2^52 <= input then
4870  // input
4871  // else
4872  // let temp1 = (2^52 + input) - 2^52 in
4873  // if temp1 < input then
4874  // temp1 + 1
4875  // else
4876  // temp1
4877  // else
4878  // if input == 0 then
4879  // input
4880  // else
4881  // if input <= -2^52 then
4882  // input
4883  // else
4884  // let temp1 = -0 - input in
4885  // let temp2 = (2^52 + temp1) - 2^52 in
4886  // let temp3 = (if temp1 < temp2 then temp2 - 1 else temp2) in
4887  // -0 - temp3
4888 
4889  auto if_not_positive = __ MakeDeferredLabel();
4890  auto if_greater_than_two_52 = __ MakeDeferredLabel();
4891  auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
4892  auto if_zero = __ MakeDeferredLabel();
4893  auto done_temp3 = __ MakeLabel(MachineRepresentation::kFloat64);
4894  auto done = __ MakeLabel(MachineRepresentation::kFloat64);
4895 
4896  Node* const zero = __ Float64Constant(0.0);
4897  Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
4898  Node* const one = __ Float64Constant(1.0);
4899 
4900  Node* check0 = __ Float64LessThan(zero, input);
4901  __ GotoIfNot(check0, &if_not_positive);
4902  {
4903  Node* check1 = __ Float64LessThanOrEqual(two_52, input);
4904  __ GotoIf(check1, &if_greater_than_two_52);
4905  {
4906  Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
4907  __ GotoIfNot(__ Float64LessThan(temp1, input), &done, temp1);
4908  __ Goto(&done, __ Float64Add(temp1, one));
4909  }
4910 
4911  __ Bind(&if_greater_than_two_52);
4912  __ Goto(&done, input);
4913  }
4914 
4915  __ Bind(&if_not_positive);
4916  {
4917  Node* check1 = __ Float64Equal(input, zero);
4918  __ GotoIf(check1, &if_zero);
4919 
4920  Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
4921  Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
4922  __ GotoIf(check2, &if_less_than_minus_two_52);
4923 
4924  {
4925  Node* const minus_zero = __ Float64Constant(-0.0);
4926  Node* temp1 = __ Float64Sub(minus_zero, input);
4927  Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
4928  Node* check3 = __ Float64LessThan(temp1, temp2);
4929  __ GotoIfNot(check3, &done_temp3, temp2);
4930  __ Goto(&done_temp3, __ Float64Sub(temp2, one));
4931 
4932  __ Bind(&done_temp3);
4933  Node* temp3 = done_temp3.PhiAt(0);
4934  __ Goto(&done, __ Float64Sub(minus_zero, temp3));
4935  }
4936  __ Bind(&if_less_than_minus_two_52);
4937  __ Goto(&done, input);
4938 
4939  __ Bind(&if_zero);
4940  __ Goto(&done, input);
4941  }
4942  __ Bind(&done);
4943  return Just(done.PhiAt(0));
4944 }
4945 
4946 Node* EffectControlLinearizer::BuildFloat64RoundDown(Node* value) {
4947  if (machine()->Float64RoundDown().IsSupported()) {
4948  return __ Float64RoundDown(value);
4949  }
4950 
4951  Node* const input = value;
4952 
4953  // General case for floor.
4954  //
4955  // if 0.0 < input then
4956  // if 2^52 <= input then
4957  // input
4958  // else
4959  // let temp1 = (2^52 + input) - 2^52 in
4960  // if input < temp1 then
4961  // temp1 - 1
4962  // else
4963  // temp1
4964  // else
4965  // if input == 0 then
4966  // input
4967  // else
4968  // if input <= -2^52 then
4969  // input
4970  // else
4971  // let temp1 = -0 - input in
4972  // let temp2 = (2^52 + temp1) - 2^52 in
4973  // if temp2 < temp1 then
4974  // -1 - temp2
4975  // else
4976  // -0 - temp2
4977 
4978  auto if_not_positive = __ MakeDeferredLabel();
4979  auto if_greater_than_two_52 = __ MakeDeferredLabel();
4980  auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
4981  auto if_temp2_lt_temp1 = __ MakeLabel();
4982  auto if_zero = __ MakeDeferredLabel();
4983  auto done = __ MakeLabel(MachineRepresentation::kFloat64);
4984 
4985  Node* const zero = __ Float64Constant(0.0);
4986  Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
4987 
4988  Node* check0 = __ Float64LessThan(zero, input);
4989  __ GotoIfNot(check0, &if_not_positive);
4990  {
4991  Node* check1 = __ Float64LessThanOrEqual(two_52, input);
4992  __ GotoIf(check1, &if_greater_than_two_52);
4993  {
4994  Node* const one = __ Float64Constant(1.0);
4995  Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
4996  __ GotoIfNot(__ Float64LessThan(input, temp1), &done, temp1);
4997  __ Goto(&done, __ Float64Sub(temp1, one));
4998  }
4999 
5000  __ Bind(&if_greater_than_two_52);
5001  __ Goto(&done, input);
5002  }
5003 
5004  __ Bind(&if_not_positive);
5005  {
5006  Node* check1 = __ Float64Equal(input, zero);
5007  __ GotoIf(check1, &if_zero);
5008 
5009  Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
5010  Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
5011  __ GotoIf(check2, &if_less_than_minus_two_52);
5012 
5013  {
5014  Node* const minus_zero = __ Float64Constant(-0.0);
5015  Node* temp1 = __ Float64Sub(minus_zero, input);
5016  Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
5017  Node* check3 = __ Float64LessThan(temp2, temp1);
5018  __ GotoIf(check3, &if_temp2_lt_temp1);
5019  __ Goto(&done, __ Float64Sub(minus_zero, temp2));
5020 
5021  __ Bind(&if_temp2_lt_temp1);
5022  __ Goto(&done, __ Float64Sub(__ Float64Constant(-1.0), temp2));
5023  }
5024  __ Bind(&if_less_than_minus_two_52);
5025  __ Goto(&done, input);
5026 
5027  __ Bind(&if_zero);
5028  __ Goto(&done, input);
5029  }
5030  __ Bind(&done);
5031  return done.PhiAt(0);
5032 }
5033 
5034 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundDown(Node* node) {
5035  // Nothing to be done if a fast hardware instruction is available.
5036  if (machine()->Float64RoundDown().IsSupported()) {
5037  return Nothing<Node*>();
5038  }
5039 
5040  Node* const input = node->InputAt(0);
5041  return Just(BuildFloat64RoundDown(input));
5042 }
5043 
5044 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundTiesEven(Node* node) {
5045  // Nothing to be done if a fast hardware instruction is available.
5046  if (machine()->Float64RoundTiesEven().IsSupported()) {
5047  return Nothing<Node*>();
5048  }
5049 
5050  Node* const input = node->InputAt(0);
5051 
5052  // Generate case for round ties to even:
5053  //
5054  // let value = floor(input) in
5055  // let temp1 = input - value in
5056  // if temp1 < 0.5 then
5057  // value
5058  // else if 0.5 < temp1 then
5059  // value + 1.0
5060  // else
5061  // let temp2 = value % 2.0 in
5062  // if temp2 == 0.0 then
5063  // value
5064  // else
5065  // value + 1.0
5066 
5067  auto if_is_half = __ MakeLabel();
5068  auto done = __ MakeLabel(MachineRepresentation::kFloat64);
5069 
5070  Node* value = BuildFloat64RoundDown(input);
5071  Node* temp1 = __ Float64Sub(input, value);
5072 
5073  Node* const half = __ Float64Constant(0.5);
5074  Node* check0 = __ Float64LessThan(temp1, half);
5075  __ GotoIf(check0, &done, value);
5076 
5077  Node* const one = __ Float64Constant(1.0);
5078  Node* check1 = __ Float64LessThan(half, temp1);
5079  __ GotoIfNot(check1, &if_is_half);
5080  __ Goto(&done, __ Float64Add(value, one));
5081 
5082  __ Bind(&if_is_half);
5083  Node* temp2 = __ Float64Mod(value, __ Float64Constant(2.0));
5084  Node* check2 = __ Float64Equal(temp2, __ Float64Constant(0.0));
5085  __ GotoIf(check2, &done, value);
5086  __ Goto(&done, __ Float64Add(value, one));
5087 
5088  __ Bind(&done);
5089  return Just(done.PhiAt(0));
5090 }
5091 
5092 Node* EffectControlLinearizer::BuildFloat64RoundTruncate(Node* input) {
5093  if (machine()->Float64RoundTruncate().IsSupported()) {
5094  return __ Float64RoundTruncate(input);
5095  }
5096  // General case for trunc.
5097  //
5098  // if 0.0 < input then
5099  // if 2^52 <= input then
5100  // input
5101  // else
5102  // let temp1 = (2^52 + input) - 2^52 in
5103  // if input < temp1 then
5104  // temp1 - 1
5105  // else
5106  // temp1
5107  // else
5108  // if input == 0 then
5109  // input
5110  // else
5111  // if input <= -2^52 then
5112  // input
5113  // else
5114  // let temp1 = -0 - input in
5115  // let temp2 = (2^52 + temp1) - 2^52 in
5116  // let temp3 = (if temp1 < temp2 then temp2 - 1 else temp2) in
5117  // -0 - temp3
5118  //
5119  // Note: We do not use the Diamond helper class here, because it really hurts
5120  // readability with nested diamonds.
5121 
5122  auto if_not_positive = __ MakeDeferredLabel();
5123  auto if_greater_than_two_52 = __ MakeDeferredLabel();
5124  auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
5125  auto if_zero = __ MakeDeferredLabel();
5126  auto done_temp3 = __ MakeLabel(MachineRepresentation::kFloat64);
5127  auto done = __ MakeLabel(MachineRepresentation::kFloat64);
5128 
5129  Node* const zero = __ Float64Constant(0.0);
5130  Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
5131  Node* const one = __ Float64Constant(1.0);
5132 
5133  Node* check0 = __ Float64LessThan(zero, input);
5134  __ GotoIfNot(check0, &if_not_positive);
5135  {
5136  Node* check1 = __ Float64LessThanOrEqual(two_52, input);
5137  __ GotoIf(check1, &if_greater_than_two_52);
5138  {
5139  Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
5140  __ GotoIfNot(__ Float64LessThan(input, temp1), &done, temp1);
5141  __ Goto(&done, __ Float64Sub(temp1, one));
5142  }
5143 
5144  __ Bind(&if_greater_than_two_52);
5145  __ Goto(&done, input);
5146  }
5147 
5148  __ Bind(&if_not_positive);
5149  {
5150  Node* check1 = __ Float64Equal(input, zero);
5151  __ GotoIf(check1, &if_zero);
5152 
5153  Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
5154  Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
5155  __ GotoIf(check2, &if_less_than_minus_two_52);
5156 
5157  {
5158  Node* const minus_zero = __ Float64Constant(-0.0);
5159  Node* temp1 = __ Float64Sub(minus_zero, input);
5160  Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
5161  Node* check3 = __ Float64LessThan(temp1, temp2);
5162  __ GotoIfNot(check3, &done_temp3, temp2);
5163  __ Goto(&done_temp3, __ Float64Sub(temp2, one));
5164 
5165  __ Bind(&done_temp3);
5166  Node* temp3 = done_temp3.PhiAt(0);
5167  __ Goto(&done, __ Float64Sub(minus_zero, temp3));
5168  }
5169  __ Bind(&if_less_than_minus_two_52);
5170  __ Goto(&done, input);
5171 
5172  __ Bind(&if_zero);
5173  __ Goto(&done, input);
5174  }
5175  __ Bind(&done);
5176  return done.PhiAt(0);
5177 }
5178 
5179 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundTruncate(Node* node) {
5180  // Nothing to be done if a fast hardware instruction is available.
5181  if (machine()->Float64RoundTruncate().IsSupported()) {
5182  return Nothing<Node*>();
5183  }
5184 
5185  Node* const input = node->InputAt(0);
5186  return Just(BuildFloat64RoundTruncate(input));
5187 }
5188 
5189 Node* EffectControlLinearizer::LowerFindOrderedHashMapEntry(Node* node) {
5190  Node* table = NodeProperties::GetValueInput(node, 0);
5191  Node* key = NodeProperties::GetValueInput(node, 1);
5192 
5193  {
5194  Callable const callable =
5195  Builtins::CallableFor(isolate(), Builtins::kFindOrderedHashMapEntry);
5196  Operator::Properties const properties = node->op()->properties();
5197  CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
5198  auto call_descriptor = Linkage::GetStubCallDescriptor(
5199  graph()->zone(), callable.descriptor(),
5200  callable.descriptor().GetStackParameterCount(), flags, properties);
5201  return __ Call(call_descriptor, __ HeapConstant(callable.code()), table,
5202  key, __ NoContextConstant());
5203  }
5204 }
5205 
5206 Node* EffectControlLinearizer::ComputeUnseededHash(Node* value) {
5207  // See v8::internal::ComputeUnseededHash()
5208  value = __ Int32Add(__ Word32Xor(value, __ Int32Constant(0xFFFFFFFF)),
5209  __ Word32Shl(value, __ Int32Constant(15)));
5210  value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(12)));
5211  value = __ Int32Add(value, __ Word32Shl(value, __ Int32Constant(2)));
5212  value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(4)));
5213  value = __ Int32Mul(value, __ Int32Constant(2057));
5214  value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(16)));
5215  value = __ Word32And(value, __ Int32Constant(0x3FFFFFFF));
5216  return value;
5217 }
5218 
5219 Node* EffectControlLinearizer::LowerFindOrderedHashMapEntryForInt32Key(
5220  Node* node) {
5221  Node* table = NodeProperties::GetValueInput(node, 0);
5222  Node* key = NodeProperties::GetValueInput(node, 1);
5223 
5224  // Compute the integer hash code.
5225  Node* hash = ChangeUint32ToUintPtr(ComputeUnseededHash(key));
5226 
5227  Node* number_of_buckets = ChangeSmiToIntPtr(__ LoadField(
5228  AccessBuilder::ForOrderedHashMapOrSetNumberOfBuckets(), table));
5229  hash = __ WordAnd(hash, __ IntSub(number_of_buckets, __ IntPtrConstant(1)));
5230  Node* first_entry = ChangeSmiToIntPtr(__ Load(
5231  MachineType::TaggedSigned(), table,
5232  __ IntAdd(__ WordShl(hash, __ IntPtrConstant(kPointerSizeLog2)),
5233  __ IntPtrConstant(OrderedHashMap::kHashTableStartOffset -
5234  kHeapObjectTag))));
5235 
5236  auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
5237  auto done = __ MakeLabel(MachineType::PointerRepresentation());
5238  __ Goto(&loop, first_entry);
5239  __ Bind(&loop);
5240  {
5241  Node* entry = loop.PhiAt(0);
5242  Node* check =
5243  __ WordEqual(entry, __ IntPtrConstant(OrderedHashMap::kNotFound));
5244  __ GotoIf(check, &done, entry);
5245  entry = __ IntAdd(
5246  __ IntMul(entry, __ IntPtrConstant(OrderedHashMap::kEntrySize)),
5247  number_of_buckets);
5248 
5249  Node* candidate_key = __ Load(
5250  MachineType::AnyTagged(), table,
5251  __ IntAdd(__ WordShl(entry, __ IntPtrConstant(kPointerSizeLog2)),
5252  __ IntPtrConstant(OrderedHashMap::kHashTableStartOffset -
5253  kHeapObjectTag)));
5254 
5255  auto if_match = __ MakeLabel();
5256  auto if_notmatch = __ MakeLabel();
5257  auto if_notsmi = __ MakeDeferredLabel();
5258  __ GotoIfNot(ObjectIsSmi(candidate_key), &if_notsmi);
5259  __ Branch(__ Word32Equal(ChangeSmiToInt32(candidate_key), key), &if_match,
5260  &if_notmatch);
5261 
5262  __ Bind(&if_notsmi);
5263  __ GotoIfNot(
5264  __ WordEqual(__ LoadField(AccessBuilder::ForMap(), candidate_key),
5265  __ HeapNumberMapConstant()),
5266  &if_notmatch);
5267  __ Branch(__ Float64Equal(__ LoadField(AccessBuilder::ForHeapNumberValue(),
5268  candidate_key),
5269  __ ChangeInt32ToFloat64(key)),
5270  &if_match, &if_notmatch);
5271 
5272  __ Bind(&if_match);
5273  __ Goto(&done, entry);
5274 
5275  __ Bind(&if_notmatch);
5276  {
5277  Node* next_entry = ChangeSmiToIntPtr(__ Load(
5278  MachineType::TaggedSigned(), table,
5279  __ IntAdd(
5280  __ WordShl(entry, __ IntPtrConstant(kPointerSizeLog2)),
5281  __ IntPtrConstant(OrderedHashMap::kHashTableStartOffset +
5282  OrderedHashMap::kChainOffset * kPointerSize -
5283  kHeapObjectTag))));
5284  __ Goto(&loop, next_entry);
5285  }
5286  }
5287 
5288  __ Bind(&done);
5289  return done.PhiAt(0);
5290 }
5291 
5292 Node* EffectControlLinearizer::LowerDateNow(Node* node) {
5293  Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
5294  Runtime::FunctionId id = Runtime::kDateCurrentTime;
5295  auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
5296  graph()->zone(), id, 0, properties, CallDescriptor::kNoFlags);
5297  return __ Call(call_descriptor, __ CEntryStubConstant(1),
5298  __ ExternalConstant(ExternalReference::Create(id)),
5299  __ Int32Constant(0), __ NoContextConstant());
5300 }
5301 
5302 #undef __
5303 
5304 Factory* EffectControlLinearizer::factory() const {
5305  return isolate()->factory();
5306 }
5307 
5308 Isolate* EffectControlLinearizer::isolate() const {
5309  return jsgraph()->isolate();
5310 }
5311 
5312 } // namespace compiler
5313 } // namespace internal
5314 } // namespace v8
Definition: libplatform.h:13