1// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/code-stub-assembler.h"
6
7#include "src/code-factory.h"
8#include "src/counters.h"
9#include "src/frames-inl.h"
10#include "src/frames.h"
11#include "src/function-kind.h"
12#include "src/heap/heap-inl.h" // For Page/MemoryChunk. TODO(jkummerow): Drop.
13#include "src/objects/api-callbacks.h"
14#include "src/objects/cell.h"
15#include "src/objects/descriptor-array.h"
16#include "src/objects/heap-number.h"
17#include "src/objects/oddball.h"
18#include "src/objects/ordered-hash-table-inl.h"
19#include "src/objects/property-cell.h"
20#include "src/wasm/wasm-objects.h"
21
22namespace v8 {
23namespace internal {
24
25using compiler::Node;
26template <class T>
27using TNode = compiler::TNode<T>;
28template <class T>
29using SloppyTNode = compiler::SloppyTNode<T>;
30
31CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
32 : compiler::CodeAssembler(state), BaseBuiltinsFromDSLAssembler(state) {
33 if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
34 HandleBreakOnNode();
35 }
36}
37
38void CodeStubAssembler::HandleBreakOnNode() {
39 // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
40 // string specifying the name of a stub and NODE is number specifying node id.
41 const char* name = state()->name();
42 size_t name_length = strlen(name);
43 if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
44 // Different name.
45 return;
46 }
47 size_t option_length = strlen(FLAG_csa_trap_on_node);
48 if (option_length < name_length + 2 ||
49 FLAG_csa_trap_on_node[name_length] != ',') {
50 // Option is too short.
51 return;
52 }
53 const char* start = &FLAG_csa_trap_on_node[name_length + 1];
54 char* end;
55 int node_id = static_cast<int>(strtol(start, &end, 10));
56 if (start == end) {
57 // Bad node id.
58 return;
59 }
60 BreakOnNode(node_id);
61}
62
63void CodeStubAssembler::Assert(const BranchGenerator& branch,
64 const char* message, const char* file, int line,
65 Node* extra_node1, const char* extra_node1_name,
66 Node* extra_node2, const char* extra_node2_name,
67 Node* extra_node3, const char* extra_node3_name,
68 Node* extra_node4, const char* extra_node4_name,
69 Node* extra_node5,
70 const char* extra_node5_name) {
71#if defined(DEBUG)
72 if (FLAG_debug_code) {
73 Check(branch, message, file, line, extra_node1, extra_node1_name,
74 extra_node2, extra_node2_name, extra_node3, extra_node3_name,
75 extra_node4, extra_node4_name, extra_node5, extra_node5_name);
76 }
77#endif
78}
79
80void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
81 const char* message, const char* file, int line,
82 Node* extra_node1, const char* extra_node1_name,
83 Node* extra_node2, const char* extra_node2_name,
84 Node* extra_node3, const char* extra_node3_name,
85 Node* extra_node4, const char* extra_node4_name,
86 Node* extra_node5,
87 const char* extra_node5_name) {
88#if defined(DEBUG)
89 if (FLAG_debug_code) {
90 Check(condition_body, message, file, line, extra_node1, extra_node1_name,
91 extra_node2, extra_node2_name, extra_node3, extra_node3_name,
92 extra_node4, extra_node4_name, extra_node5, extra_node5_name);
93 }
94#endif
95}
96
97#ifdef DEBUG
98namespace {
99void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
100 const char* node_name) {
101 if (node != nullptr) {
102 csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
103 csa->StringConstant(node_name), node);
104 }
105}
106} // namespace
107#endif
108
109void CodeStubAssembler::Check(const BranchGenerator& branch,
110 const char* message, const char* file, int line,
111 Node* extra_node1, const char* extra_node1_name,
112 Node* extra_node2, const char* extra_node2_name,
113 Node* extra_node3, const char* extra_node3_name,
114 Node* extra_node4, const char* extra_node4_name,
115 Node* extra_node5, const char* extra_node5_name) {
116 Label ok(this);
117 Label not_ok(this, Label::kDeferred);
118 if (message != nullptr && FLAG_code_comments) {
119 Comment("[ Assert: ", message);
120 } else {
121 Comment("[ Assert");
122 }
123 branch(&ok, &not_ok);
124
125 BIND(&not_ok);
126 FailAssert(message, file, line, extra_node1, extra_node1_name, extra_node2,
127 extra_node2_name, extra_node3, extra_node3_name, extra_node4,
128 extra_node4_name, extra_node5, extra_node5_name);
129
130 BIND(&ok);
131 Comment("] Assert");
132}
133
134void CodeStubAssembler::Check(const NodeGenerator& condition_body,
135 const char* message, const char* file, int line,
136 Node* extra_node1, const char* extra_node1_name,
137 Node* extra_node2, const char* extra_node2_name,
138 Node* extra_node3, const char* extra_node3_name,
139 Node* extra_node4, const char* extra_node4_name,
140 Node* extra_node5, const char* extra_node5_name) {
141 BranchGenerator branch = [=](Label* ok, Label* not_ok) {
142 Node* condition = condition_body();
143 DCHECK_NOT_NULL(condition);
144 Branch(condition, ok, not_ok);
145 };
146
147 Check(branch, message, file, line, extra_node1, extra_node1_name, extra_node2,
148 extra_node2_name, extra_node3, extra_node3_name, extra_node4,
149 extra_node4_name, extra_node5, extra_node5_name);
150}
151
152void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
153 Label ok(this), not_ok(this, Label::kDeferred);
154 Branch(condition, &ok, &not_ok);
155 BIND(&not_ok);
156 {
157 DebugBreak();
158 Goto(&ok);
159 }
160 BIND(&ok);
161}
162
163void CodeStubAssembler::FailAssert(
164 const char* message, const char* file, int line, Node* extra_node1,
165 const char* extra_node1_name, Node* extra_node2,
166 const char* extra_node2_name, Node* extra_node3,
167 const char* extra_node3_name, Node* extra_node4,
168 const char* extra_node4_name, Node* extra_node5,
169 const char* extra_node5_name) {
170 DCHECK_NOT_NULL(message);
171 char chars[1024];
172 Vector<char> buffer(chars);
173 if (file != nullptr) {
174 SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
175 } else {
176 SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
177 }
178 Node* message_node = StringConstant(&(buffer[0]));
179
180#ifdef DEBUG
181 // Only print the extra nodes in debug builds.
182 MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
183 MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
184 MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
185 MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
186 MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
187#endif
188
189 DebugAbort(message_node);
190 Unreachable();
191}
192
193Node* CodeStubAssembler::SelectImpl(TNode<BoolT> condition,
194 const NodeGenerator& true_body,
195 const NodeGenerator& false_body,
196 MachineRepresentation rep) {
197 VARIABLE(value, rep);
198 Label vtrue(this), vfalse(this), end(this);
199 Branch(condition, &vtrue, &vfalse);
200
201 BIND(&vtrue);
202 {
203 value.Bind(true_body());
204 Goto(&end);
205 }
206 BIND(&vfalse);
207 {
208 value.Bind(false_body());
209 Goto(&end);
210 }
211
212 BIND(&end);
213 return value.value();
214}
215
216TNode<Int32T> CodeStubAssembler::SelectInt32Constant(
217 SloppyTNode<BoolT> condition, int true_value, int false_value) {
218 return SelectConstant<Int32T>(condition, Int32Constant(true_value),
219 Int32Constant(false_value));
220}
221
222TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(
223 SloppyTNode<BoolT> condition, int true_value, int false_value) {
224 return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
225 IntPtrConstant(false_value));
226}
227
228TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
229 SloppyTNode<BoolT> condition) {
230 return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
231}
232
233TNode<Smi> CodeStubAssembler::SelectSmiConstant(SloppyTNode<BoolT> condition,
234 Smi true_value,
235 Smi false_value) {
236 return SelectConstant<Smi>(condition, SmiConstant(true_value),
237 SmiConstant(false_value));
238}
239
240TNode<Object> CodeStubAssembler::NoContextConstant() {
241 return SmiConstant(Context::kNoContext);
242}
243
244#define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
245 compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
246 std::declval<Heap>().rootAccessorName())>::type>::type> \
247 CodeStubAssembler::name##Constant() { \
248 return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
249 std::declval<Heap>().rootAccessorName())>::type>::type>( \
250 LoadRoot(RootIndex::k##rootIndexName)); \
251 }
252HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
253#undef HEAP_CONSTANT_ACCESSOR
254
255#define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
256 compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
257 std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type> \
258 CodeStubAssembler::name##Constant() { \
259 return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
260 std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>( \
261 LoadRoot(RootIndex::k##rootIndexName)); \
262 }
263HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
264#undef HEAP_CONSTANT_ACCESSOR
265
266#define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
267 compiler::TNode<BoolT> CodeStubAssembler::Is##name( \
268 SloppyTNode<Object> value) { \
269 return WordEqual(value, name##Constant()); \
270 } \
271 compiler::TNode<BoolT> CodeStubAssembler::IsNot##name( \
272 SloppyTNode<Object> value) { \
273 return WordNotEqual(value, name##Constant()); \
274 }
275HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST)
276#undef HEAP_CONSTANT_TEST
277
278Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
279 if (mode == SMI_PARAMETERS) {
280 return SmiConstant(value);
281 } else {
282 DCHECK_EQ(INTPTR_PARAMETERS, mode);
283 return IntPtrConstant(value);
284 }
285}
286
287bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
288 ParameterMode mode) {
289 int32_t constant_test;
290 Smi smi_test;
291 if (mode == INTPTR_PARAMETERS) {
292 if (ToInt32Constant(test, constant_test) && constant_test == 0) {
293 return true;
294 }
295 } else {
296 DCHECK_EQ(mode, SMI_PARAMETERS);
297 if (ToSmiConstant(test, &smi_test) && smi_test->value() == 0) {
298 return true;
299 }
300 }
301 return false;
302}
303
304bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
305 int* value,
306 ParameterMode mode) {
307 int32_t int32_constant;
308 if (mode == INTPTR_PARAMETERS) {
309 if (ToInt32Constant(maybe_constant, int32_constant)) {
310 *value = int32_constant;
311 return true;
312 }
313 } else {
314 DCHECK_EQ(mode, SMI_PARAMETERS);
315 Smi smi_constant;
316 if (ToSmiConstant(maybe_constant, &smi_constant)) {
317 *value = Smi::ToInt(smi_constant);
318 return true;
319 }
320 }
321 return false;
322}
323
324TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
325 TNode<IntPtrT> value) {
326 Comment("IntPtrRoundUpToPowerOfTwo32");
327 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
328 value = Signed(IntPtrSub(value, IntPtrConstant(1)));
329 for (int i = 1; i <= 16; i *= 2) {
330 value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
331 }
332 return Signed(IntPtrAdd(value, IntPtrConstant(1)));
333}
334
335Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
336 if (mode == SMI_PARAMETERS) {
337 return TaggedIsSmi(value);
338 } else {
339 return Int32Constant(1);
340 }
341}
342
343TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
344 // value && !(value & (value - 1))
345 return WordEqual(
346 Select<IntPtrT>(
347 WordEqual(value, IntPtrConstant(0)),
348 [=] { return IntPtrConstant(1); },
349 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
350 IntPtrConstant(0));
351}
352
353TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
354 Node* one = Float64Constant(1.0);
355 Node* one_half = Float64Constant(0.5);
356
357 Label return_x(this);
358
359 // Round up {x} towards Infinity.
360 VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
361
362 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
363 &return_x);
364 var_x.Bind(Float64Sub(var_x.value(), one));
365 Goto(&return_x);
366
367 BIND(&return_x);
368 return TNode<Float64T>::UncheckedCast(var_x.value());
369}
370
371TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
372 if (IsFloat64RoundUpSupported()) {
373 return Float64RoundUp(x);
374 }
375
376 Node* one = Float64Constant(1.0);
377 Node* zero = Float64Constant(0.0);
378 Node* two_52 = Float64Constant(4503599627370496.0E0);
379 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
380
381 VARIABLE(var_x, MachineRepresentation::kFloat64, x);
382 Label return_x(this), return_minus_x(this);
383
384 // Check if {x} is greater than zero.
385 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
386 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
387 &if_xnotgreaterthanzero);
388
389 BIND(&if_xgreaterthanzero);
390 {
391 // Just return {x} unless it's in the range ]0,2^52[.
392 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
393
394 // Round positive {x} towards Infinity.
395 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
396 GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
397 var_x.Bind(Float64Add(var_x.value(), one));
398 Goto(&return_x);
399 }
400
401 BIND(&if_xnotgreaterthanzero);
402 {
403 // Just return {x} unless it's in the range ]-2^52,0[
404 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
405 GotoIfNot(Float64LessThan(x, zero), &return_x);
406
407 // Round negated {x} towards Infinity and return the result negated.
408 Node* minus_x = Float64Neg(x);
409 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
410 GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
411 var_x.Bind(Float64Sub(var_x.value(), one));
412 Goto(&return_minus_x);
413 }
414
415 BIND(&return_minus_x);
416 var_x.Bind(Float64Neg(var_x.value()));
417 Goto(&return_x);
418
419 BIND(&return_x);
420 return TNode<Float64T>::UncheckedCast(var_x.value());
421}
422
423TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
424 if (IsFloat64RoundDownSupported()) {
425 return Float64RoundDown(x);
426 }
427
428 Node* one = Float64Constant(1.0);
429 Node* zero = Float64Constant(0.0);
430 Node* two_52 = Float64Constant(4503599627370496.0E0);
431 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
432
433 VARIABLE(var_x, MachineRepresentation::kFloat64, x);
434 Label return_x(this), return_minus_x(this);
435
436 // Check if {x} is greater than zero.
437 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
438 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
439 &if_xnotgreaterthanzero);
440
441 BIND(&if_xgreaterthanzero);
442 {
443 // Just return {x} unless it's in the range ]0,2^52[.
444 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
445
446 // Round positive {x} towards -Infinity.
447 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
448 GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
449 var_x.Bind(Float64Sub(var_x.value(), one));
450 Goto(&return_x);
451 }
452
453 BIND(&if_xnotgreaterthanzero);
454 {
455 // Just return {x} unless it's in the range ]-2^52,0[
456 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
457 GotoIfNot(Float64LessThan(x, zero), &return_x);
458
459 // Round negated {x} towards -Infinity and return the result negated.
460 Node* minus_x = Float64Neg(x);
461 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
462 GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
463 var_x.Bind(Float64Add(var_x.value(), one));
464 Goto(&return_minus_x);
465 }
466
467 BIND(&return_minus_x);
468 var_x.Bind(Float64Neg(var_x.value()));
469 Goto(&return_x);
470
471 BIND(&return_x);
472 return TNode<Float64T>::UncheckedCast(var_x.value());
473}
474
475TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
476 if (IsFloat64RoundTiesEvenSupported()) {
477 return Float64RoundTiesEven(x);
478 }
479 // See ES#sec-touint8clamp for details.
480 Node* f = Float64Floor(x);
481 Node* f_and_half = Float64Add(f, Float64Constant(0.5));
482
483 VARIABLE(var_result, MachineRepresentation::kFloat64);
484 Label return_f(this), return_f_plus_one(this), done(this);
485
486 GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
487 GotoIf(Float64LessThan(x, f_and_half), &return_f);
488 {
489 Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
490 Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
491 &return_f_plus_one);
492 }
493
494 BIND(&return_f);
495 var_result.Bind(f);
496 Goto(&done);
497
498 BIND(&return_f_plus_one);
499 var_result.Bind(Float64Add(f, Float64Constant(1.0)));
500 Goto(&done);
501
502 BIND(&done);
503 return TNode<Float64T>::UncheckedCast(var_result.value());
504}
505
506TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
507 if (IsFloat64RoundTruncateSupported()) {
508 return Float64RoundTruncate(x);
509 }
510
511 Node* one = Float64Constant(1.0);
512 Node* zero = Float64Constant(0.0);
513 Node* two_52 = Float64Constant(4503599627370496.0E0);
514 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
515
516 VARIABLE(var_x, MachineRepresentation::kFloat64, x);
517 Label return_x(this), return_minus_x(this);
518
519 // Check if {x} is greater than 0.
520 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
521 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
522 &if_xnotgreaterthanzero);
523
524 BIND(&if_xgreaterthanzero);
525 {
526 if (IsFloat64RoundDownSupported()) {
527 var_x.Bind(Float64RoundDown(x));
528 } else {
529 // Just return {x} unless it's in the range ]0,2^52[.
530 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
531
532 // Round positive {x} towards -Infinity.
533 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
534 GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
535 var_x.Bind(Float64Sub(var_x.value(), one));
536 }
537 Goto(&return_x);
538 }
539
540 BIND(&if_xnotgreaterthanzero);
541 {
542 if (IsFloat64RoundUpSupported()) {
543 var_x.Bind(Float64RoundUp(x));
544 Goto(&return_x);
545 } else {
546 // Just return {x} unless its in the range ]-2^52,0[.
547 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
548 GotoIfNot(Float64LessThan(x, zero), &return_x);
549
550 // Round negated {x} towards -Infinity and return result negated.
551 Node* minus_x = Float64Neg(x);
552 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
553 GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
554 var_x.Bind(Float64Sub(var_x.value(), one));
555 Goto(&return_minus_x);
556 }
557 }
558
559 BIND(&return_minus_x);
560 var_x.Bind(Float64Neg(var_x.value()));
561 Goto(&return_x);
562
563 BIND(&return_x);
564 return TNode<Float64T>::UncheckedCast(var_x.value());
565}
566
567TNode<BoolT> CodeStubAssembler::IsValidSmi(TNode<Smi> smi) {
568 if (SmiValuesAre31Bits() && kSystemPointerSize == kInt64Size) {
569 // Check that the Smi value is properly sign-extended.
570 TNode<IntPtrT> value = Signed(BitcastTaggedToWord(smi));
571 return WordEqual(value, ChangeInt32ToIntPtr(TruncateIntPtrToInt32(value)));
572 }
573 return Int32TrueConstant();
574}
575
576Node* CodeStubAssembler::SmiShiftBitsConstant() {
577 return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
578}
579
580TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
581 TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
582 TNode<Smi> smi =
583 BitcastWordToTaggedSigned(WordShl(value_intptr, SmiShiftBitsConstant()));
584 return smi;
585}
586
587TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
588 intptr_t constant_value;
589 if (ToIntPtrConstant(value, constant_value)) {
590 return (static_cast<uintptr_t>(constant_value) <=
591 static_cast<uintptr_t>(Smi::kMaxValue))
592 ? Int32TrueConstant()
593 : Int32FalseConstant();
594 }
595
596 return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
597}
598
599TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
600 int32_t constant_value;
601 if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
602 return SmiConstant(constant_value);
603 }
604 TNode<Smi> smi =
605 BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
606 return smi;
607}
608
609TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
610 intptr_t constant_value;
611 if (ToIntPtrConstant(value, constant_value)) {
612 return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
613 }
614 return Signed(WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
615}
616
617TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
618 TNode<IntPtrT> result = SmiUntag(value);
619 return TruncateIntPtrToInt32(result);
620}
621
622TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
623 return ChangeInt32ToFloat64(SmiToInt32(value));
624}
625
626TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
627 return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
628}
629
630TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
631 return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
632}
633
634TNode<IntPtrT> CodeStubAssembler::TryIntPtrAdd(TNode<IntPtrT> a,
635 TNode<IntPtrT> b,
636 Label* if_overflow) {
637 TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
638 TNode<BoolT> overflow = Projection<1>(pair);
639 GotoIf(overflow, if_overflow);
640 return Projection<0>(pair);
641}
642
643TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
644 Label* if_overflow) {
645 if (SmiValuesAre32Bits()) {
646 return BitcastWordToTaggedSigned(TryIntPtrAdd(
647 BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs), if_overflow));
648 } else {
649 DCHECK(SmiValuesAre31Bits());
650 TNode<PairT<Int32T, BoolT>> pair =
651 Int32AddWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
652 TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
653 TNode<BoolT> overflow = Projection<1>(pair);
654 GotoIf(overflow, if_overflow);
655 TNode<Int32T> result = Projection<0>(pair);
656 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
657 }
658}
659
660TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
661 Label* if_overflow) {
662 if (SmiValuesAre32Bits()) {
663 TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(
664 BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
665 TNode<BoolT> overflow = Projection<1>(pair);
666 GotoIf(overflow, if_overflow);
667 TNode<IntPtrT> result = Projection<0>(pair);
668 return BitcastWordToTaggedSigned(result);
669 } else {
670 DCHECK(SmiValuesAre31Bits());
671 TNode<PairT<Int32T, BoolT>> pair =
672 Int32SubWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
673 TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
674 TNode<BoolT> overflow = Projection<1>(pair);
675 GotoIf(overflow, if_overflow);
676 TNode<Int32T> result = Projection<0>(pair);
677 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
678 }
679}
680
681TNode<Number> CodeStubAssembler::NumberMax(SloppyTNode<Number> a,
682 SloppyTNode<Number> b) {
683 // TODO(danno): This could be optimized by specifically handling smi cases.
684 TVARIABLE(Number, result);
685 Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
686 GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
687 GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
688 result = NanConstant();
689 Goto(&done);
690 BIND(&greater_than_equal_a);
691 result = a;
692 Goto(&done);
693 BIND(&greater_than_equal_b);
694 result = b;
695 Goto(&done);
696 BIND(&done);
697 return result.value();
698}
699
700TNode<Number> CodeStubAssembler::NumberMin(SloppyTNode<Number> a,
701 SloppyTNode<Number> b) {
702 // TODO(danno): This could be optimized by specifically handling smi cases.
703 TVARIABLE(Number, result);
704 Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
705 GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
706 GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
707 result = NanConstant();
708 Goto(&done);
709 BIND(&greater_than_equal_a);
710 result = b;
711 Goto(&done);
712 BIND(&greater_than_equal_b);
713 result = a;
714 Goto(&done);
715 BIND(&done);
716 return result.value();
717}
718
719TNode<IntPtrT> CodeStubAssembler::ConvertToRelativeIndex(
720 TNode<Context> context, TNode<Object> index, TNode<IntPtrT> length) {
721 TVARIABLE(IntPtrT, result);
722
723 TNode<Number> const index_int =
724 ToInteger_Inline(context, index, CodeStubAssembler::kTruncateMinusZero);
725 TNode<IntPtrT> zero = IntPtrConstant(0);
726
727 Label done(this);
728 Label if_issmi(this), if_isheapnumber(this, Label::kDeferred);
729 Branch(TaggedIsSmi(index_int), &if_issmi, &if_isheapnumber);
730
731 BIND(&if_issmi);
732 {
733 TNode<Smi> const index_smi = CAST(index_int);
734 result = Select<IntPtrT>(
735 IntPtrLessThan(SmiUntag(index_smi), zero),
736 [=] { return IntPtrMax(IntPtrAdd(length, SmiUntag(index_smi)), zero); },
737 [=] { return IntPtrMin(SmiUntag(index_smi), length); });
738 Goto(&done);
739 }
740
741 BIND(&if_isheapnumber);
742 {
743 // If {index} is a heap number, it is definitely out of bounds. If it is
744 // negative, {index} = max({length} + {index}),0) = 0'. If it is positive,
745 // set {index} to {length}.
746 TNode<HeapNumber> const index_hn = CAST(index_int);
747 TNode<Float64T> const float_zero = Float64Constant(0.);
748 TNode<Float64T> const index_float = LoadHeapNumberValue(index_hn);
749 result = SelectConstant<IntPtrT>(Float64LessThan(index_float, float_zero),
750 zero, length);
751 Goto(&done);
752 }
753 BIND(&done);
754 return result.value();
755}
756
757TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
758 TVARIABLE(Number, var_result);
759 Label return_result(this, &var_result),
760 return_minuszero(this, Label::kDeferred),
761 return_nan(this, Label::kDeferred);
762
763 // Untag {a} and {b}.
764 TNode<Int32T> int_a = SmiToInt32(a);
765 TNode<Int32T> int_b = SmiToInt32(b);
766
767 // Return NaN if {b} is zero.
768 GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
769
770 // Check if {a} is non-negative.
771 Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
772 Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
773 &if_aisnegative);
774
775 BIND(&if_aisnotnegative);
776 {
777 // Fast case, don't need to check any other edge cases.
778 TNode<Int32T> r = Int32Mod(int_a, int_b);
779 var_result = SmiFromInt32(r);
780 Goto(&return_result);
781 }
782
783 BIND(&if_aisnegative);
784 {
785 if (SmiValuesAre32Bits()) {
786 // Check if {a} is kMinInt and {b} is -1 (only relevant if the
787 // kMinInt is actually representable as a Smi).
788 Label join(this);
789 GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
790 GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
791 Goto(&join);
792 BIND(&join);
793 }
794
795 // Perform the integer modulus operation.
796 TNode<Int32T> r = Int32Mod(int_a, int_b);
797
798 // Check if {r} is zero, and if so return -0, because we have to
799 // take the sign of the left hand side {a}, which is negative.
800 GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
801
802 // The remainder {r} can be outside the valid Smi range on 32bit
803 // architectures, so we cannot just say SmiFromInt32(r) here.
804 var_result = ChangeInt32ToTagged(r);
805 Goto(&return_result);
806 }
807
808 BIND(&return_minuszero);
809 var_result = MinusZeroConstant();
810 Goto(&return_result);
811
812 BIND(&return_nan);
813 var_result = NanConstant();
814 Goto(&return_result);
815
816 BIND(&return_result);
817 return var_result.value();
818}
819
820TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
821 TVARIABLE(Number, var_result);
822 VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
823 VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
824 Label return_result(this, &var_result);
825
826 // Both {a} and {b} are Smis. Convert them to integers and multiply.
827 Node* lhs32 = SmiToInt32(a);
828 Node* rhs32 = SmiToInt32(b);
829 Node* pair = Int32MulWithOverflow(lhs32, rhs32);
830
831 Node* overflow = Projection(1, pair);
832
833 // Check if the multiplication overflowed.
834 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
835 Branch(overflow, &if_overflow, &if_notoverflow);
836 BIND(&if_notoverflow);
837 {
838 // If the answer is zero, we may need to return -0.0, depending on the
839 // input.
840 Label answer_zero(this), answer_not_zero(this);
841 Node* answer = Projection(0, pair);
842 Node* zero = Int32Constant(0);
843 Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
844 BIND(&answer_not_zero);
845 {
846 var_result = ChangeInt32ToTagged(answer);
847 Goto(&return_result);
848 }
849 BIND(&answer_zero);
850 {
851 Node* or_result = Word32Or(lhs32, rhs32);
852 Label if_should_be_negative_zero(this), if_should_be_zero(this);
853 Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
854 &if_should_be_zero);
855 BIND(&if_should_be_negative_zero);
856 {
857 var_result = MinusZeroConstant();
858 Goto(&return_result);
859 }
860 BIND(&if_should_be_zero);
861 {
862 var_result = SmiConstant(0);
863 Goto(&return_result);
864 }
865 }
866 }
867 BIND(&if_overflow);
868 {
869 var_lhs_float64.Bind(SmiToFloat64(a));
870 var_rhs_float64.Bind(SmiToFloat64(b));
871 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
872 var_result = AllocateHeapNumberWithValue(value);
873 Goto(&return_result);
874 }
875
876 BIND(&return_result);
877 return var_result.value();
878}
879
880TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
881 Label* bailout) {
882 // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
883 // is zero.
884 GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
885
886 // Do floating point division if {dividend} is zero and {divisor} is
887 // negative.
888 Label dividend_is_zero(this), dividend_is_not_zero(this);
889 Branch(WordEqual(dividend, SmiConstant(0)), &dividend_is_zero,
890 &dividend_is_not_zero);
891
892 BIND(&dividend_is_zero);
893 {
894 GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
895 Goto(&dividend_is_not_zero);
896 }
897 BIND(&dividend_is_not_zero);
898
899 TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
900 TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
901
902 // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
903 // if the Smi size is 31) and {divisor} is -1.
904 Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
905 Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
906 &divisor_is_minus_one, &divisor_is_not_minus_one);
907
908 BIND(&divisor_is_minus_one);
909 {
910 GotoIf(Word32Equal(
911 untagged_dividend,
912 Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
913 bailout);
914 Goto(&divisor_is_not_minus_one);
915 }
916 BIND(&divisor_is_not_minus_one);
917
918 TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
919 TNode<Int32T> truncated = Signed(Int32Mul(untagged_result, untagged_divisor));
920
921 // Do floating point division if the remainder is not 0.
922 GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
923
924 return SmiFromInt32(untagged_result);
925}
926
927TNode<Smi> CodeStubAssembler::SmiLexicographicCompare(TNode<Smi> x,
928 TNode<Smi> y) {
929 TNode<ExternalReference> smi_lexicographic_compare =
930 ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
931 TNode<ExternalReference> isolate_ptr =
932 ExternalConstant(ExternalReference::isolate_address(isolate()));
933 return CAST(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged(),
934 std::make_pair(MachineType::Pointer(), isolate_ptr),
935 std::make_pair(MachineType::AnyTagged(), x),
936 std::make_pair(MachineType::AnyTagged(), y)));
937}
938
939TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
940 SloppyTNode<IntPtrT> value) {
941 if (Is64()) {
942 return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
943 }
944 return ReinterpretCast<Int32T>(value);
945}
946
947TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
948 return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
949 IntPtrConstant(0));
950}
951
952TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
953 return WordEqual(
954 WordAnd(BitcastMaybeObjectToWord(a), IntPtrConstant(kSmiTagMask)),
955 IntPtrConstant(0));
956}
957
958TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
959 return WordNotEqual(
960 WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
961 IntPtrConstant(0));
962}
963
964TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
965 return WordEqual(WordAnd(BitcastTaggedToWord(a),
966 IntPtrConstant(kSmiTagMask | kSmiSignMask)),
967 IntPtrConstant(0));
968}
969
970TNode<BoolT> CodeStubAssembler::WordIsAligned(SloppyTNode<WordT> word,
971 size_t alignment) {
972 DCHECK(base::bits::IsPowerOfTwo(alignment));
973 return WordEqual(IntPtrConstant(0),
974 WordAnd(word, IntPtrConstant(alignment - 1)));
975}
976
977#if DEBUG
978void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
979 CodeAssembler::Bind(label, debug_info);
980}
981#endif // DEBUG
982
983void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
984
985TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
986 TNode<FixedDoubleArray> array, TNode<Smi> index, Label* if_hole) {
987 return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
988 SMI_PARAMETERS, if_hole);
989}
990
991TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
992 TNode<FixedDoubleArray> array, TNode<IntPtrT> index, Label* if_hole) {
993 return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
994 INTPTR_PARAMETERS, if_hole);
995}
996
997void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
998 Node* receiver_map, Label* definitely_no_elements,
999 Label* possibly_elements) {
1000 CSA_SLOW_ASSERT(this, IsMap(receiver_map));
1001 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
1002 Label loop_body(this, &var_map);
1003 Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
1004 Node* empty_slow_element_dictionary =
1005 LoadRoot(RootIndex::kEmptySlowElementDictionary);
1006 Goto(&loop_body);
1007
1008 BIND(&loop_body);
1009 {
1010 Node* map = var_map.value();
1011 Node* prototype = LoadMapPrototype(map);
1012 GotoIf(IsNull(prototype), definitely_no_elements);
1013 Node* prototype_map = LoadMap(prototype);
1014 TNode<Int32T> prototype_instance_type = LoadMapInstanceType(prototype_map);
1015
1016 // Pessimistically assume elements if a Proxy, Special API Object,
1017 // or JSValue wrapper is found on the prototype chain. After this
1018 // instance type check, it's not necessary to check for interceptors or
1019 // access checks.
1020 Label if_custom(this, Label::kDeferred), if_notcustom(this);
1021 Branch(IsCustomElementsReceiverInstanceType(prototype_instance_type),
1022 &if_custom, &if_notcustom);
1023
1024 BIND(&if_custom);
1025 {
1026 // For string JSValue wrappers we still support the checks as long
1027 // as they wrap the empty string.
1028 GotoIfNot(InstanceTypeEqual(prototype_instance_type, JS_VALUE_TYPE),
1029 possibly_elements);
1030 Node* prototype_value = LoadJSValueValue(prototype);
1031 Branch(IsEmptyString(prototype_value), &if_notcustom, possibly_elements);
1032 }
1033
1034 BIND(&if_notcustom);
1035 {
1036 Node* prototype_elements = LoadElements(prototype);
1037 var_map.Bind(prototype_map);
1038 GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
1039 Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
1040 &loop_body, possibly_elements);
1041 }
1042 }
1043}
1044
1045void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
1046 Label* if_false) {
1047 GotoIf(TaggedIsSmi(object), if_false);
1048 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1049 Branch(IsJSReceiver(object), if_true, if_false);
1050}
1051
1052void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1053#ifdef V8_ENABLE_FORCE_SLOW_PATH
1054 Node* const force_slow_path_addr =
1055 ExternalConstant(ExternalReference::force_slow_path(isolate()));
1056 Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
1057
1058 GotoIf(force_slow, if_true);
1059#endif
1060}
1061
1062void CodeStubAssembler::GotoIfDebugExecutionModeChecksSideEffects(
1063 Label* if_true) {
1064 STATIC_ASSERT(sizeof(DebugInfo::ExecutionMode) >= sizeof(int32_t));
1065
1066 TNode<ExternalReference> execution_mode_address = ExternalConstant(
1067 ExternalReference::debug_execution_mode_address(isolate()));
1068 TNode<Int32T> execution_mode =
1069 UncheckedCast<Int32T>(Load(MachineType::Int32(), execution_mode_address));
1070
1071 GotoIf(Word32Equal(execution_mode, Int32Constant(DebugInfo::kSideEffects)),
1072 if_true);
1073}
1074
1075TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
1076 AllocationFlags flags,
1077 TNode<RawPtrT> top_address,
1078 TNode<RawPtrT> limit_address) {
1079 Label if_out_of_memory(this, Label::kDeferred);
1080
1081 // TODO(jgruber,jkummerow): Extract the slow paths (= probably everything
1082 // but bump pointer allocation) into a builtin to save code space. The
1083 // size_in_bytes check may be moved there as well since a non-smi
1084 // size_in_bytes probably doesn't fit into the bump pointer region
1085 // (double-check that).
1086
1087 intptr_t size_in_bytes_constant;
1088 bool size_in_bytes_is_constant = false;
1089 if (ToIntPtrConstant(size_in_bytes, size_in_bytes_constant)) {
1090 size_in_bytes_is_constant = true;
1091 CHECK(Internals::IsValidSmi(size_in_bytes_constant));
1092 CHECK_GT(size_in_bytes_constant, 0);
1093 } else {
1094 GotoIfNot(IsValidPositiveSmi(size_in_bytes), &if_out_of_memory);
1095 }
1096
1097 TNode<RawPtrT> top =
1098 UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), top_address));
1099 TNode<RawPtrT> limit =
1100 UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), limit_address));
1101
1102 // If there's not enough space, call the runtime.
1103 TVARIABLE(Object, result);
1104 Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1105
1106 bool needs_double_alignment = flags & kDoubleAlignment;
1107
1108 if (flags & kAllowLargeObjectAllocation) {
1109 Label next(this);
1110 GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1111
1112 if (FLAG_young_generation_large_objects) {
1113 result = CallRuntime(Runtime::kAllocateInYoungGeneration,
1114 NoContextConstant(), SmiTag(size_in_bytes));
1115 } else {
1116 TNode<Smi> alignment_flag = SmiConstant(Smi::FromInt(
1117 AllocateDoubleAlignFlag::encode(needs_double_alignment)));
1118 result =
1119 CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1120 SmiTag(size_in_bytes), alignment_flag);
1121 }
1122 Goto(&out);
1123
1124 BIND(&next);
1125 }
1126
1127 TVARIABLE(IntPtrT, adjusted_size, size_in_bytes);
1128
1129 if (needs_double_alignment) {
1130 Label next(this);
1131 GotoIfNot(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &next);
1132
1133 adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1134 Goto(&next);
1135
1136 BIND(&next);
1137 }
1138
1139 TNode<IntPtrT> new_top =
1140 IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1141
1142 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1143 &no_runtime_call);
1144
1145 BIND(&runtime_call);
1146 {
1147 if (flags & kPretenured) {
1148 TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1149 AllocateDoubleAlignFlag::encode(needs_double_alignment)));
1150 result =
1151 CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1152 SmiTag(size_in_bytes), runtime_flags);
1153 } else {
1154 result = CallRuntime(Runtime::kAllocateInYoungGeneration,
1155 NoContextConstant(), SmiTag(size_in_bytes));
1156 }
1157 Goto(&out);
1158 }
1159
1160 // When there is enough space, return `top' and bump it up.
1161 BIND(&no_runtime_call);
1162 {
1163 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1164 new_top);
1165
1166 TVARIABLE(IntPtrT, address, UncheckedCast<IntPtrT>(top));
1167
1168 if (needs_double_alignment) {
1169 Label next(this);
1170 GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1171
1172 // Store a filler and increase the address by 4.
1173 StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1174 LoadRoot(RootIndex::kOnePointerFillerMap));
1175 address = IntPtrAdd(UncheckedCast<IntPtrT>(top), IntPtrConstant(4));
1176 Goto(&next);
1177
1178 BIND(&next);
1179 }
1180
1181 result = BitcastWordToTagged(
1182 IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1183 Goto(&out);
1184 }
1185
1186 if (!size_in_bytes_is_constant) {
1187 BIND(&if_out_of_memory);
1188 CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
1189 NoContextConstant());
1190 Unreachable();
1191 }
1192
1193 BIND(&out);
1194 return UncheckedCast<HeapObject>(result.value());
1195}
1196
1197TNode<HeapObject> CodeStubAssembler::AllocateRawUnaligned(
1198 TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1199 TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1200 DCHECK_EQ(flags & kDoubleAlignment, 0);
1201 return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1202}
1203
1204TNode<HeapObject> CodeStubAssembler::AllocateRawDoubleAligned(
1205 TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1206 TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1207#if defined(V8_HOST_ARCH_32_BIT)
1208 return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1209 limit_address);
1210#elif defined(V8_HOST_ARCH_64_BIT)
1211#ifdef V8_COMPRESS_POINTERS
1212 // TODO(ishell, v8:8875): Consider using aligned allocations once the
1213 // allocation alignment inconsistency is fixed. For now we keep using
1214 // unaligned access since both x64 and arm64 architectures (where pointer
1215 // compression is supported) allow unaligned access to doubles and full words.
1216#endif // V8_COMPRESS_POINTERS
1217 // Allocation on 64 bit machine is naturally double aligned
1218 return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1219 limit_address);
1220#else
1221#error Architecture not supported
1222#endif
1223}
1224
1225TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
1226 TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1227 DCHECK(flags == kNone || flags == kDoubleAlignment);
1228 CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1229 return Allocate(size_in_bytes, flags);
1230}
1231
1232TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
1233 AllocationFlags flags) {
1234 Comment("Allocate");
1235 bool const new_space = !(flags & kPretenured);
1236 if (!(flags & kAllowLargeObjectAllocation)) {
1237 intptr_t size_constant;
1238 if (ToIntPtrConstant(size_in_bytes, size_constant)) {
1239 CHECK_LE(size_constant, kMaxRegularHeapObjectSize);
1240 }
1241 }
1242 if (!(flags & kDoubleAlignment) && !(flags & kAllowLargeObjectAllocation)) {
1243 return OptimizedAllocate(size_in_bytes, new_space ? AllocationType::kYoung
1244 : AllocationType::kOld);
1245 }
1246 TNode<ExternalReference> top_address = ExternalConstant(
1247 new_space
1248 ? ExternalReference::new_space_allocation_top_address(isolate())
1249 : ExternalReference::old_space_allocation_top_address(isolate()));
1250 DCHECK_EQ(kSystemPointerSize,
1251 ExternalReference::new_space_allocation_limit_address(isolate())
1252 .address() -
1253 ExternalReference::new_space_allocation_top_address(isolate())
1254 .address());
1255 DCHECK_EQ(kSystemPointerSize,
1256 ExternalReference::old_space_allocation_limit_address(isolate())
1257 .address() -
1258 ExternalReference::old_space_allocation_top_address(isolate())
1259 .address());
1260 TNode<IntPtrT> limit_address =
1261 IntPtrAdd(ReinterpretCast<IntPtrT>(top_address),
1262 IntPtrConstant(kSystemPointerSize));
1263
1264 if (flags & kDoubleAlignment) {
1265 return AllocateRawDoubleAligned(size_in_bytes, flags,
1266 ReinterpretCast<RawPtrT>(top_address),
1267 ReinterpretCast<RawPtrT>(limit_address));
1268 } else {
1269 return AllocateRawUnaligned(size_in_bytes, flags,
1270 ReinterpretCast<RawPtrT>(top_address),
1271 ReinterpretCast<RawPtrT>(limit_address));
1272 }
1273}
1274
1275TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1276 AllocationFlags flags) {
1277 CHECK(flags == kNone || flags == kDoubleAlignment);
1278 DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1279 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1280}
1281
1282TNode<HeapObject> CodeStubAssembler::Allocate(int size_in_bytes,
1283 AllocationFlags flags) {
1284 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1285}
1286
1287TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1288 TNode<IntPtrT> offset) {
1289 return UncheckedCast<HeapObject>(
1290 BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset)));
1291}
1292
1293TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1294 int offset) {
1295 return InnerAllocate(previous, IntPtrConstant(offset));
1296}
1297
1298TNode<BoolT> CodeStubAssembler::IsRegularHeapObjectSize(TNode<IntPtrT> size) {
1299 return UintPtrLessThanOrEqual(size,
1300 IntPtrConstant(kMaxRegularHeapObjectSize));
1301}
1302
1303void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1304 Label* if_false) {
1305 Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1306 if_bigint(this, Label::kDeferred);
1307 // Rule out false {value}.
1308 GotoIf(WordEqual(value, FalseConstant()), if_false);
1309
1310 // Check if {value} is a Smi or a HeapObject.
1311 Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1312
1313 BIND(&if_smi);
1314 {
1315 // The {value} is a Smi, only need to check against zero.
1316 BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1317 }
1318
1319 BIND(&if_notsmi);
1320 {
1321 // Check if {value} is the empty string.
1322 GotoIf(IsEmptyString(value), if_false);
1323
1324 // The {value} is a HeapObject, load its map.
1325 Node* value_map = LoadMap(value);
1326
1327 // Only null, undefined and document.all have the undetectable bit set,
1328 // so we can return false immediately when that bit is set.
1329 GotoIf(IsUndetectableMap(value_map), if_false);
1330
1331 // We still need to handle numbers specially, but all other {value}s
1332 // that make it here yield true.
1333 GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1334 Branch(IsBigInt(value), &if_bigint, if_true);
1335
1336 BIND(&if_heapnumber);
1337 {
1338 // Load the floating point value of {value}.
1339 Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1340 MachineType::Float64());
1341
1342 // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1343 Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1344 if_true, if_false);
1345 }
1346
1347 BIND(&if_bigint);
1348 {
1349 Node* result =
1350 CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1351 CSA_ASSERT(this, IsBoolean(result));
1352 Branch(WordEqual(result, TrueConstant()), if_true, if_false);
1353 }
1354 }
1355}
1356
1357Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1358 Node* frame_pointer = LoadParentFramePointer();
1359 return Load(rep, frame_pointer, IntPtrConstant(offset));
1360}
1361
1362Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1363 MachineType rep) {
1364 return Load(rep, buffer, IntPtrConstant(offset));
1365}
1366
1367Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1368 int offset, MachineType rep) {
1369 CSA_ASSERT(this, IsStrong(object));
1370 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1371}
1372
1373Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1374 SloppyTNode<IntPtrT> offset,
1375 MachineType rep) {
1376 CSA_ASSERT(this, IsStrong(object));
1377 return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1378}
1379
1380TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1381 SloppyTNode<HeapObject> object, int offset) {
1382 if (SmiValuesAre32Bits()) {
1383#if V8_TARGET_LITTLE_ENDIAN
1384 offset += 4;
1385#endif
1386 return ChangeInt32ToIntPtr(
1387 LoadObjectField(object, offset, MachineType::Int32()));
1388 } else {
1389 return SmiToIntPtr(
1390 LoadObjectField(object, offset, MachineType::AnyTagged()));
1391 }
1392}
1393
1394TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1395 int offset) {
1396 if (SmiValuesAre32Bits()) {
1397#if V8_TARGET_LITTLE_ENDIAN
1398 offset += 4;
1399#endif
1400 return UncheckedCast<Int32T>(
1401 LoadObjectField(object, offset, MachineType::Int32()));
1402 } else {
1403 return SmiToInt32(
1404 LoadObjectField(object, offset, MachineType::AnyTagged()));
1405 }
1406}
1407
1408TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1409 if (SmiValuesAre32Bits()) {
1410#if V8_TARGET_LITTLE_ENDIAN
1411 index += 4;
1412#endif
1413 return ChangeInt32ToIntPtr(
1414 Load(MachineType::Int32(), base, IntPtrConstant(index)));
1415 } else {
1416 return SmiToIntPtr(
1417 Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1418 }
1419}
1420
1421void CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1422 if (SmiValuesAre32Bits()) {
1423 int zero_offset = offset + 4;
1424 int payload_offset = offset;
1425#if V8_TARGET_LITTLE_ENDIAN
1426 std::swap(zero_offset, payload_offset);
1427#endif
1428 StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1429 IntPtrConstant(zero_offset), Int32Constant(0));
1430 StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1431 IntPtrConstant(payload_offset),
1432 TruncateInt64ToInt32(value));
1433 } else {
1434 StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1435 IntPtrConstant(offset), SmiTag(value));
1436 }
1437}
1438
1439TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1440 SloppyTNode<HeapNumber> object) {
1441 return TNode<Float64T>::UncheckedCast(LoadObjectField(
1442 object, HeapNumber::kValueOffset, MachineType::Float64()));
1443}
1444
1445TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1446 return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset,
1447 MachineType::TaggedPointer()));
1448}
1449
1450TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1451 SloppyTNode<HeapObject> object) {
1452 return LoadMapInstanceType(LoadMap(object));
1453}
1454
1455TNode<BoolT> CodeStubAssembler::HasInstanceType(SloppyTNode<HeapObject> object,
1456 InstanceType instance_type) {
1457 return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1458}
1459
1460TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1461 SloppyTNode<HeapObject> object, InstanceType instance_type) {
1462 return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1463}
1464
1465TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1466 SloppyTNode<HeapObject> any_tagged, InstanceType type) {
1467 /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1468 TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1469 return Select<BoolT>(
1470 tagged_is_smi, [=]() { return tagged_is_smi; },
1471 [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1472}
1473
1474TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1475 SloppyTNode<JSObject> object) {
1476 CSA_SLOW_ASSERT(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
1477 TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1478 return Select<HeapObject>(TaggedIsSmi(properties),
1479 [=] { return EmptyFixedArrayConstant(); },
1480 [=] { return CAST(properties); });
1481}
1482
1483TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1484 SloppyTNode<JSObject> object) {
1485 CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1486 TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1487 return Select<HeapObject>(TaggedIsSmi(properties),
1488 [=] { return EmptyPropertyDictionaryConstant(); },
1489 [=] { return CAST(properties); });
1490}
1491
1492TNode<Number> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1493 CSA_ASSERT(this, IsJSArray(array));
1494 return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1495}
1496
1497TNode<Object> CodeStubAssembler::LoadJSArgumentsObjectWithLength(
1498 SloppyTNode<JSArgumentsObjectWithLength> array) {
1499 return LoadObjectField(array, JSArgumentsObjectWithLength::kLengthOffset);
1500}
1501
1502TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1503 SloppyTNode<JSArray> array) {
1504 TNode<Object> length = LoadJSArrayLength(array);
1505 CSA_ASSERT(this, Word32Or(IsFastElementsKind(LoadElementsKind(array)),
1506 IsElementsKindInRange(LoadElementsKind(array),
1507 PACKED_SEALED_ELEMENTS,
1508 PACKED_FROZEN_ELEMENTS)));
1509 // JSArray length is always a positive Smi for fast arrays.
1510 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1511 return UncheckedCast<Smi>(length);
1512}
1513
1514TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1515 SloppyTNode<FixedArrayBase> array) {
1516 CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1517 return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1518}
1519
1520TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1521 SloppyTNode<FixedArrayBase> array) {
1522 return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1523}
1524
1525TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1526 TNode<FeedbackVector> vector) {
1527 return ChangeInt32ToIntPtr(
1528 LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1529}
1530
1531TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1532 TNode<WeakFixedArray> array) {
1533 return CAST(LoadObjectField(array, WeakFixedArray::kLengthOffset));
1534}
1535
1536TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1537 SloppyTNode<WeakFixedArray> array) {
1538 return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1539}
1540
1541TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
1542 TNode<DescriptorArray> array) {
1543 return UncheckedCast<Int32T>(
1544 LoadObjectField(array, DescriptorArray::kNumberOfDescriptorsOffset,
1545 MachineType::Int16()));
1546}
1547
1548TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1549 CSA_SLOW_ASSERT(this, IsMap(map));
1550 return UncheckedCast<Int32T>(
1551 LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1552}
1553
1554TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1555 CSA_SLOW_ASSERT(this, IsMap(map));
1556 return UncheckedCast<Int32T>(
1557 LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1558}
1559
1560TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1561 CSA_SLOW_ASSERT(this, IsMap(map));
1562 return UncheckedCast<Uint32T>(
1563 LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1564}
1565
1566TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1567 return UncheckedCast<Int32T>(
1568 LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint16()));
1569}
1570
1571TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1572 CSA_SLOW_ASSERT(this, IsMap(map));
1573 Node* bit_field2 = LoadMapBitField2(map);
1574 return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1575}
1576
1577TNode<Int32T> CodeStubAssembler::LoadElementsKind(
1578 SloppyTNode<HeapObject> object) {
1579 return LoadMapElementsKind(LoadMap(object));
1580}
1581
1582TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1583 SloppyTNode<Map> map) {
1584 CSA_SLOW_ASSERT(this, IsMap(map));
1585 return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1586}
1587
1588TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1589 CSA_SLOW_ASSERT(this, IsMap(map));
1590 return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1591}
1592
1593TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1594 SloppyTNode<Map> map, Label* if_no_proto_info) {
1595 Label if_strong_heap_object(this);
1596 CSA_ASSERT(this, IsMap(map));
1597 TNode<MaybeObject> maybe_prototype_info =
1598 LoadMaybeWeakObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1599 TVARIABLE(Object, prototype_info);
1600 DispatchMaybeObject(maybe_prototype_info, if_no_proto_info, if_no_proto_info,
1601 if_no_proto_info, &if_strong_heap_object,
1602 &prototype_info);
1603
1604 BIND(&if_strong_heap_object);
1605 GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
1606 LoadRoot(RootIndex::kPrototypeInfoMap)),
1607 if_no_proto_info);
1608 return CAST(prototype_info.value());
1609}
1610
1611TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(
1612 SloppyTNode<Map> map) {
1613 CSA_SLOW_ASSERT(this, IsMap(map));
1614 return ChangeInt32ToIntPtr(LoadObjectField(
1615 map, Map::kInstanceSizeInWordsOffset, MachineType::Uint8()));
1616}
1617
1618TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1619 SloppyTNode<Map> map) {
1620 CSA_SLOW_ASSERT(this, IsMap(map));
1621 // See Map::GetInObjectPropertiesStartInWords() for details.
1622 CSA_ASSERT(this, IsJSObjectMap(map));
1623 return ChangeInt32ToIntPtr(LoadObjectField(
1624 map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1625 MachineType::Uint8()));
1626}
1627
1628TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1629 SloppyTNode<Map> map) {
1630 CSA_SLOW_ASSERT(this, IsMap(map));
1631 // See Map::GetConstructorFunctionIndex() for details.
1632