V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
fixed-array-inl.h
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_FIXED_ARRAY_INL_H_
6 #define V8_OBJECTS_FIXED_ARRAY_INL_H_
7 
8 #include "src/objects/fixed-array.h"
9 
10 #include "src/conversions.h"
11 #include "src/handles-inl.h"
12 #include "src/heap/heap-write-barrier.h"
13 #include "src/objects-inl.h" // Needed for write barriers
14 #include "src/objects/bigint.h"
15 #include "src/objects/map.h"
16 #include "src/objects/maybe-object-inl.h"
17 #include "src/objects/slots.h"
18 
19 // Has to be the last include (doesn't have include guards):
20 #include "src/objects/object-macros.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 OBJECT_CONSTRUCTORS_IMPL(FixedArrayBase, HeapObjectPtr)
26 OBJECT_CONSTRUCTORS_IMPL(FixedArray, FixedArrayBase)
27 OBJECT_CONSTRUCTORS_IMPL(FixedDoubleArray, FixedArrayBase)
28 OBJECT_CONSTRUCTORS_IMPL(FixedTypedArrayBase, FixedArrayBase)
29 OBJECT_CONSTRUCTORS_IMPL(ArrayList, FixedArray)
30 OBJECT_CONSTRUCTORS_IMPL(ByteArray, FixedArrayBase)
31 OBJECT_CONSTRUCTORS_IMPL(TemplateList, FixedArray)
32 
33 FixedArrayBase::FixedArrayBase(Address ptr, AllowInlineSmiStorage allow_smi)
34  : HeapObjectPtr(ptr, allow_smi) {
35  SLOW_DCHECK(
36  (allow_smi == AllowInlineSmiStorage::kAllowBeingASmi && IsSmi()) ||
37  IsFixedArrayBase());
38 }
39 
40 ByteArray::ByteArray(Address ptr, AllowInlineSmiStorage allow_smi)
41  : FixedArrayBase(ptr, allow_smi) {
42  SLOW_DCHECK(
43  (allow_smi == AllowInlineSmiStorage::kAllowBeingASmi && IsSmi()) ||
44  IsByteArray());
45 }
46 
47 CAST_ACCESSOR2(ArrayList)
48 CAST_ACCESSOR2(ByteArray)
49 CAST_ACCESSOR2(FixedArray)
50 CAST_ACCESSOR2(FixedArrayBase)
51 CAST_ACCESSOR2(FixedDoubleArray)
52 CAST_ACCESSOR2(FixedTypedArrayBase)
53 CAST_ACCESSOR2(TemplateList)
54 CAST_ACCESSOR(WeakFixedArray)
55 CAST_ACCESSOR(WeakArrayList)
56 
57 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
58 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
59 SMI_ACCESSORS(WeakFixedArray, length, kLengthOffset)
60 SYNCHRONIZED_SMI_ACCESSORS(WeakFixedArray, length, kLengthOffset)
61 
62 SMI_ACCESSORS(WeakArrayList, capacity, kCapacityOffset)
63 SYNCHRONIZED_SMI_ACCESSORS(WeakArrayList, capacity, kCapacityOffset)
64 SMI_ACCESSORS(WeakArrayList, length, kLengthOffset)
65 
66 Object* FixedArrayBase::unchecked_synchronized_length() const {
67  return ACQUIRE_READ_FIELD(this, kLengthOffset);
68 }
69 
70 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
71 
72 ObjectSlot FixedArray::GetFirstElementAddress() {
73  return ObjectSlot(FIELD_ADDR(this, OffsetOfElementAt(0)));
74 }
75 
76 bool FixedArray::ContainsOnlySmisOrHoles() {
77  Object* the_hole = GetReadOnlyRoots().the_hole_value();
78  ObjectSlot current = GetFirstElementAddress();
79  for (int i = 0; i < length(); ++i, ++current) {
80  Object* candidate = *current;
81  if (!candidate->IsSmi() && candidate != the_hole) return false;
82  }
83  return true;
84 }
85 
86 Object* FixedArray::get(int index) const {
87  DCHECK(index >= 0 && index < this->length());
88  return RELAXED_READ_FIELD(this, kHeaderSize + index * kTaggedSize);
89 }
90 
91 Handle<Object> FixedArray::get(FixedArray array, int index, Isolate* isolate) {
92  return handle(array->get(index), isolate);
93 }
94 
95 template <class T>
96 MaybeHandle<T> FixedArray::GetValue(Isolate* isolate, int index) const {
97  Object* obj = get(index);
98  if (obj->IsUndefined(isolate)) return MaybeHandle<T>();
99  return Handle<T>(T::cast(obj), isolate);
100 }
101 
102 template <class T>
103 Handle<T> FixedArray::GetValueChecked(Isolate* isolate, int index) const {
104  Object* obj = get(index);
105  CHECK(!obj->IsUndefined(isolate));
106  return Handle<T>(T::cast(obj), isolate);
107 }
108 
109 bool FixedArray::is_the_hole(Isolate* isolate, int index) {
110  return get(index)->IsTheHole(isolate);
111 }
112 
113 void FixedArray::set(int index, Smi value) {
114  DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
115  DCHECK_LT(index, this->length());
116  DCHECK(ObjectPtr(value).IsSmi());
117  int offset = kHeaderSize + index * kTaggedSize;
118  RELAXED_WRITE_FIELD(this, offset, value);
119 }
120 
121 void FixedArray::set(int index, Object* value) {
122  DCHECK_NE(GetReadOnlyRoots().fixed_cow_array_map(), map());
123  DCHECK(IsFixedArray());
124  DCHECK_GE(index, 0);
125  DCHECK_LT(index, this->length());
126  int offset = kHeaderSize + index * kTaggedSize;
127  RELAXED_WRITE_FIELD(this, offset, value);
128  WRITE_BARRIER(this, offset, value);
129 }
130 
131 void FixedArray::set(int index, Object* value, WriteBarrierMode mode) {
132  DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
133  DCHECK_GE(index, 0);
134  DCHECK_LT(index, this->length());
135  int offset = kHeaderSize + index * kTaggedSize;
136  RELAXED_WRITE_FIELD(this, offset, value);
137  CONDITIONAL_WRITE_BARRIER(this, offset, value, mode);
138 }
139 
140 void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object* value) {
141  DCHECK_NE(array->map(), array->GetReadOnlyRoots().fixed_cow_array_map());
142  DCHECK_GE(index, 0);
143  DCHECK_LT(index, array->length());
144  DCHECK(!Heap::InNewSpace(value));
145  RELAXED_WRITE_FIELD(array, kHeaderSize + index * kTaggedSize, value);
146 }
147 
148 void FixedArray::set_undefined(int index) {
149  set_undefined(GetReadOnlyRoots(), index);
150 }
151 
152 void FixedArray::set_undefined(Isolate* isolate, int index) {
153  set_undefined(ReadOnlyRoots(isolate), index);
154 }
155 
156 void FixedArray::set_undefined(ReadOnlyRoots ro_roots, int index) {
157  FixedArray::NoWriteBarrierSet(*this, index, ro_roots.undefined_value());
158 }
159 
160 void FixedArray::set_null(int index) { set_null(GetReadOnlyRoots(), index); }
161 
162 void FixedArray::set_null(Isolate* isolate, int index) {
163  set_null(ReadOnlyRoots(isolate), index);
164 }
165 
166 void FixedArray::set_null(ReadOnlyRoots ro_roots, int index) {
167  FixedArray::NoWriteBarrierSet(*this, index, ro_roots.null_value());
168 }
169 
170 void FixedArray::set_the_hole(int index) {
171  set_the_hole(GetReadOnlyRoots(), index);
172 }
173 
174 void FixedArray::set_the_hole(Isolate* isolate, int index) {
175  set_the_hole(ReadOnlyRoots(isolate), index);
176 }
177 
178 void FixedArray::set_the_hole(ReadOnlyRoots ro_roots, int index) {
179  FixedArray::NoWriteBarrierSet(*this, index, ro_roots.the_hole_value());
180 }
181 
182 void FixedArray::FillWithHoles(int from, int to) {
183  for (int i = from; i < to; i++) {
184  set_the_hole(i);
185  }
186 }
187 
188 ObjectSlot FixedArray::data_start() {
189  return RawField(OffsetOfElementAt(0));
190 }
191 
192 ObjectSlot FixedArray::RawFieldOfElementAt(int index) {
193  return RawField(OffsetOfElementAt(index));
194 }
195 
196 void FixedArray::MoveElements(Heap* heap, int dst_index, int src_index, int len,
197  WriteBarrierMode mode) {
198  DisallowHeapAllocation no_gc;
199  heap->MoveElements(*this, dst_index, src_index, len, mode);
200 }
201 
202 double FixedDoubleArray::get_scalar(int index) {
203  DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
204  map() != GetReadOnlyRoots().fixed_array_map());
205  DCHECK(index >= 0 && index < this->length());
206  DCHECK(!is_the_hole(index));
207  return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
208 }
209 
210 uint64_t FixedDoubleArray::get_representation(int index) {
211  DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
212  map() != GetReadOnlyRoots().fixed_array_map());
213  DCHECK(index >= 0 && index < this->length());
214  int offset = kHeaderSize + index * kDoubleSize;
215  return READ_UINT64_FIELD(this, offset);
216 }
217 
218 Handle<Object> FixedDoubleArray::get(FixedDoubleArray array, int index,
219  Isolate* isolate) {
220  if (array->is_the_hole(index)) {
221  return isolate->factory()->the_hole_value();
222  } else {
223  return isolate->factory()->NewNumber(array->get_scalar(index));
224  }
225 }
226 
227 void FixedDoubleArray::set(int index, double value) {
228  DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
229  map() != GetReadOnlyRoots().fixed_array_map());
230  int offset = kHeaderSize + index * kDoubleSize;
231  if (std::isnan(value)) {
232  WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
233  } else {
234  WRITE_DOUBLE_FIELD(this, offset, value);
235  }
236  DCHECK(!is_the_hole(index));
237 }
238 
239 void FixedDoubleArray::set_the_hole(Isolate* isolate, int index) {
240  set_the_hole(index);
241 }
242 
243 void FixedDoubleArray::set_the_hole(int index) {
244  DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
245  map() != GetReadOnlyRoots().fixed_array_map());
246  int offset = kHeaderSize + index * kDoubleSize;
247  WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
248 }
249 
250 bool FixedDoubleArray::is_the_hole(Isolate* isolate, int index) {
251  return is_the_hole(index);
252 }
253 
254 bool FixedDoubleArray::is_the_hole(int index) {
255  return get_representation(index) == kHoleNanInt64;
256 }
257 
258 void FixedDoubleArray::MoveElements(Heap* heap, int dst_index, int src_index,
259  int len, WriteBarrierMode mode) {
260  DCHECK_EQ(SKIP_WRITE_BARRIER, mode);
261  double* data_start = reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
262  MemMove(data_start + dst_index, data_start + src_index, len * kDoubleSize);
263 }
264 
265 void FixedDoubleArray::FillWithHoles(int from, int to) {
266  for (int i = from; i < to; i++) {
267  set_the_hole(i);
268  }
269 }
270 
271 MaybeObject WeakFixedArray::Get(int index) const {
272  DCHECK(index >= 0 && index < this->length());
273  return RELAXED_READ_WEAK_FIELD(this, OffsetOfElementAt(index));
274 }
275 
276 void WeakFixedArray::Set(int index, MaybeObject value) {
277  DCHECK_GE(index, 0);
278  DCHECK_LT(index, length());
279  int offset = OffsetOfElementAt(index);
280  RELAXED_WRITE_WEAK_FIELD(this, offset, value);
281  WEAK_WRITE_BARRIER(this, offset, value);
282 }
283 
284 void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) {
285  DCHECK_GE(index, 0);
286  DCHECK_LT(index, length());
287  int offset = OffsetOfElementAt(index);
288  RELAXED_WRITE_WEAK_FIELD(this, offset, value);
289  CONDITIONAL_WEAK_WRITE_BARRIER(this, offset, value, mode);
290 }
291 
292 MaybeObjectSlot WeakFixedArray::data_start() {
293  return HeapObject::RawMaybeWeakField(this, kHeaderSize);
294 }
295 
296 MaybeObjectSlot WeakFixedArray::RawFieldOfElementAt(int index) {
297  return HeapObject::RawMaybeWeakField(this, OffsetOfElementAt(index));
298 }
299 
300 MaybeObject WeakArrayList::Get(int index) const {
301  DCHECK(index >= 0 && index < this->capacity());
302  return RELAXED_READ_WEAK_FIELD(this, OffsetOfElementAt(index));
303 }
304 
305 void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) {
306  DCHECK_GE(index, 0);
307  DCHECK_LT(index, this->capacity());
308  int offset = OffsetOfElementAt(index);
309  RELAXED_WRITE_WEAK_FIELD(this, offset, value);
310  CONDITIONAL_WEAK_WRITE_BARRIER(this, offset, value, mode);
311 }
312 
313 MaybeObjectSlot WeakArrayList::data_start() {
314  return HeapObject::RawMaybeWeakField(this, kHeaderSize);
315 }
316 
317 HeapObject* WeakArrayList::Iterator::Next() {
318  if (array_ != nullptr) {
319  while (index_ < array_->length()) {
320  MaybeObject item = array_->Get(index_++);
321  DCHECK(item->IsWeakOrCleared());
322  if (!item->IsCleared()) return item->GetHeapObjectAssumeWeak();
323  }
324  array_ = nullptr;
325  }
326  return nullptr;
327 }
328 
329 int ArrayList::Length() const {
330  if (FixedArray::cast(*this)->length() == 0) return 0;
331  return Smi::ToInt(FixedArray::cast(*this)->get(kLengthIndex));
332 }
333 
334 void ArrayList::SetLength(int length) {
335  return FixedArray::cast(*this)->set(kLengthIndex, Smi::FromInt(length));
336 }
337 
338 Object* ArrayList::Get(int index) const {
339  return FixedArray::cast(*this)->get(kFirstIndex + index);
340 }
341 
342 ObjectSlot ArrayList::Slot(int index) {
343  return RawField(OffsetOfElementAt(kFirstIndex + index));
344 }
345 
346 void ArrayList::Set(int index, Object* obj, WriteBarrierMode mode) {
347  FixedArray::cast(*this)->set(kFirstIndex + index, obj, mode);
348 }
349 
350 void ArrayList::Clear(int index, Object* undefined) {
351  DCHECK(undefined->IsUndefined());
352  FixedArray::cast(*this)->set(kFirstIndex + index, undefined,
353  SKIP_WRITE_BARRIER);
354 }
355 
356 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kTaggedSize); }
357 
358 byte ByteArray::get(int index) const {
359  DCHECK(index >= 0 && index < this->length());
360  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
361 }
362 
363 void ByteArray::set(int index, byte value) {
364  DCHECK(index >= 0 && index < this->length());
365  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
366 }
367 
368 void ByteArray::copy_in(int index, const byte* buffer, int length) {
369  DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
370  index + length <= this->length());
371  Address dst_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
372  memcpy(reinterpret_cast<void*>(dst_addr), buffer, length);
373 }
374 
375 void ByteArray::copy_out(int index, byte* buffer, int length) {
376  DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
377  index + length <= this->length());
378  Address src_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
379  memcpy(buffer, reinterpret_cast<void*>(src_addr), length);
380 }
381 
382 int ByteArray::get_int(int index) const {
383  DCHECK(index >= 0 && index < this->length() / kIntSize);
384  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
385 }
386 
387 void ByteArray::set_int(int index, int value) {
388  DCHECK(index >= 0 && index < this->length() / kIntSize);
389  WRITE_INT_FIELD(this, kHeaderSize + index * kIntSize, value);
390 }
391 
392 uint32_t ByteArray::get_uint32(int index) const {
393  DCHECK(index >= 0 && index < this->length() / kUInt32Size);
394  return READ_UINT32_FIELD(this, kHeaderSize + index * kUInt32Size);
395 }
396 
397 void ByteArray::set_uint32(int index, uint32_t value) {
398  DCHECK(index >= 0 && index < this->length() / kUInt32Size);
399  WRITE_UINT32_FIELD(this, kHeaderSize + index * kUInt32Size, value);
400 }
401 
402 void ByteArray::clear_padding() {
403  int data_size = length() + kHeaderSize;
404  memset(reinterpret_cast<void*>(address() + data_size), 0, Size() - data_size);
405 }
406 
407 ByteArray ByteArray::FromDataStartAddress(Address address) {
408  DCHECK_TAG_ALIGNED(address);
409  return ByteArray::cast(ObjectPtr(address - kHeaderSize + kHeapObjectTag));
410 }
411 
412 int ByteArray::DataSize() const { return RoundUp(length(), kTaggedSize); }
413 
414 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
415 
416 byte* ByteArray::GetDataStartAddress() {
417  return reinterpret_cast<byte*>(address() + kHeaderSize);
418 }
419 
420 byte* ByteArray::GetDataEndAddress() {
421  return GetDataStartAddress() + length();
422 }
423 
424 template <class T>
425 PodArray<T>::PodArray(Address ptr) : ByteArray(ptr) {}
426 
427 template <class T>
428 PodArray<T> PodArray<T>::cast(Object* object) {
429  return PodArray<T>(object->ptr());
430 }
431 
432 template <class T>
433 PodArray<T> PodArray<T>::cast(ObjectPtr object) {
434  return PodArray<T>(object.ptr());
435 }
436 
437 // static
438 template <class T>
439 Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
440  PretenureFlag pretenure) {
441  return Handle<PodArray<T>>::cast(
442  isolate->factory()->NewByteArray(length * sizeof(T), pretenure));
443 }
444 
445 template <class T>
446 int PodArray<T>::length() const {
447  return ByteArray::length() / sizeof(T);
448 }
449 
450 void* FixedTypedArrayBase::external_pointer() const {
451  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
452  return reinterpret_cast<void*>(ptr);
453 }
454 
455 void FixedTypedArrayBase::set_external_pointer(void* value,
456  WriteBarrierMode mode) {
457  intptr_t ptr = reinterpret_cast<intptr_t>(value);
458  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
459 }
460 
461 void* FixedTypedArrayBase::DataPtr() {
462  return reinterpret_cast<void*>(
463  reinterpret_cast<intptr_t>(base_pointer()) +
464  reinterpret_cast<intptr_t>(external_pointer()));
465 }
466 
467 int FixedTypedArrayBase::ElementSize(InstanceType type) {
468  int element_size;
469  switch (type) {
470 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
471  case FIXED_##TYPE##_ARRAY_TYPE: \
472  element_size = sizeof(ctype); \
473  break;
474 
475  TYPED_ARRAYS(TYPED_ARRAY_CASE)
476 #undef TYPED_ARRAY_CASE
477  default:
478  UNREACHABLE();
479  }
480  return element_size;
481 }
482 
483 int FixedTypedArrayBase::DataSize(InstanceType type) const {
484  if (base_pointer() == Smi::kZero) return 0;
485  return length() * ElementSize(type);
486 }
487 
488 int FixedTypedArrayBase::DataSize() const {
489  return DataSize(map()->instance_type());
490 }
491 
492 size_t FixedTypedArrayBase::ByteLength() const {
493  return static_cast<size_t>(length()) *
494  static_cast<size_t>(ElementSize(map()->instance_type()));
495 }
496 
497 int FixedTypedArrayBase::size() const {
498  return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
499 }
500 
501 int FixedTypedArrayBase::TypedArraySize(InstanceType type) const {
502  return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
503 }
504 
505 // static
506 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
507  return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
508 }
509 
510 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
511 
512 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
513 
514 int8_t Int8ArrayTraits::defaultValue() { return 0; }
515 
516 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
517 
518 int16_t Int16ArrayTraits::defaultValue() { return 0; }
519 
520 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
521 
522 int32_t Int32ArrayTraits::defaultValue() { return 0; }
523 
524 float Float32ArrayTraits::defaultValue() {
525  return std::numeric_limits<float>::quiet_NaN();
526 }
527 
528 double Float64ArrayTraits::defaultValue() {
529  return std::numeric_limits<double>::quiet_NaN();
530 }
531 
532 template <class Traits>
533 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
534  DCHECK((index >= 0) && (index < this->length()));
535  return FixedTypedArray<Traits>::get_scalar_from_data_ptr(DataPtr(), index);
536 }
537 
538 // static
539 template <class Traits>
540 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar_from_data_ptr(
541  void* data_ptr, int index) {
542  typename Traits::ElementType* ptr = reinterpret_cast<ElementType*>(data_ptr);
543  // The JavaScript memory model allows for racy reads and writes to a
544  // SharedArrayBuffer's backing store, which will always be a FixedTypedArray.
545  // ThreadSanitizer will catch these racy accesses and warn about them, so we
546  // disable TSAN for these reads and writes using annotations.
547  //
548  // We don't use relaxed atomics here, as it is not a requirement of the
549  // JavaScript memory model to have tear-free reads of overlapping accesses,
550  // and using relaxed atomics may introduce overhead.
551  TSAN_ANNOTATE_IGNORE_READS_BEGIN;
552  auto result = ptr[index];
553  TSAN_ANNOTATE_IGNORE_READS_END;
554  return result;
555 }
556 
557 template <class Traits>
558 void FixedTypedArray<Traits>::set(int index, ElementType value) {
559  CHECK((index >= 0) && (index < this->length()));
560  // See the comment in FixedTypedArray<Traits>::get_scalar.
561  auto* ptr = reinterpret_cast<ElementType*>(DataPtr());
562  TSAN_ANNOTATE_IGNORE_WRITES_BEGIN;
563  ptr[index] = value;
564  TSAN_ANNOTATE_IGNORE_WRITES_END;
565 }
566 
567 template <class Traits>
568 typename Traits::ElementType FixedTypedArray<Traits>::from(int value) {
569  return static_cast<ElementType>(value);
570 }
571 
572 template <>
573 inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(int value) {
574  if (value < 0) return 0;
575  if (value > 0xFF) return 0xFF;
576  return static_cast<uint8_t>(value);
577 }
578 
579 template <>
580 inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(int value) {
581  UNREACHABLE();
582 }
583 
584 template <>
585 inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(int value) {
586  UNREACHABLE();
587 }
588 
589 template <class Traits>
590 typename Traits::ElementType FixedTypedArray<Traits>::from(uint32_t value) {
591  return static_cast<ElementType>(value);
592 }
593 
594 template <>
595 inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(uint32_t value) {
596  // We need this special case for Uint32 -> Uint8Clamped, because the highest
597  // Uint32 values will be negative as an int, clamping to 0, rather than 255.
598  if (value > 0xFF) return 0xFF;
599  return static_cast<uint8_t>(value);
600 }
601 
602 template <>
603 inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(uint32_t value) {
604  UNREACHABLE();
605 }
606 
607 template <>
608 inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(uint32_t value) {
609  UNREACHABLE();
610 }
611 
612 template <class Traits>
613 typename Traits::ElementType FixedTypedArray<Traits>::from(double value) {
614  return static_cast<ElementType>(DoubleToInt32(value));
615 }
616 
617 template <>
618 inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(double value) {
619  // Handle NaNs and less than zero values which clamp to zero.
620  if (!(value > 0)) return 0;
621  if (value > 0xFF) return 0xFF;
622  return static_cast<uint8_t>(lrint(value));
623 }
624 
625 template <>
626 inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(double value) {
627  UNREACHABLE();
628 }
629 
630 template <>
631 inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(double value) {
632  UNREACHABLE();
633 }
634 
635 template <>
636 inline float FixedTypedArray<Float32ArrayTraits>::from(double value) {
637  return static_cast<float>(value);
638 }
639 
640 template <>
641 inline double FixedTypedArray<Float64ArrayTraits>::from(double value) {
642  return value;
643 }
644 
645 template <class Traits>
646 typename Traits::ElementType FixedTypedArray<Traits>::from(int64_t value) {
647  UNREACHABLE();
648 }
649 
650 template <class Traits>
651 typename Traits::ElementType FixedTypedArray<Traits>::from(uint64_t value) {
652  UNREACHABLE();
653 }
654 
655 template <>
656 inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(int64_t value) {
657  return value;
658 }
659 
660 template <>
661 inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(uint64_t value) {
662  return value;
663 }
664 
665 template <>
666 inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::from(int64_t value) {
667  return static_cast<uint64_t>(value);
668 }
669 
670 template <>
671 inline int64_t FixedTypedArray<BigInt64ArrayTraits>::from(uint64_t value) {
672  return static_cast<int64_t>(value);
673 }
674 
675 template <class Traits>
676 typename Traits::ElementType FixedTypedArray<Traits>::FromHandle(
677  Handle<Object> value, bool* lossless) {
678  if (value->IsSmi()) {
679  return from(Smi::ToInt(*value));
680  }
681  DCHECK(value->IsHeapNumber());
682  return from(HeapNumber::cast(*value)->value());
683 }
684 
685 template <>
686 inline int64_t FixedTypedArray<BigInt64ArrayTraits>::FromHandle(
687  Handle<Object> value, bool* lossless) {
688  DCHECK(value->IsBigInt());
689  return BigInt::cast(*value)->AsInt64(lossless);
690 }
691 
692 template <>
693 inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::FromHandle(
694  Handle<Object> value, bool* lossless) {
695  DCHECK(value->IsBigInt());
696  return BigInt::cast(*value)->AsUint64(lossless);
697 }
698 
699 template <class Traits>
700 Handle<Object> FixedTypedArray<Traits>::get(Isolate* isolate,
701  FixedTypedArray<Traits> array,
702  int index) {
703  return Traits::ToHandle(isolate, array->get_scalar(index));
704 }
705 
706 template <class Traits>
707 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
708  ElementType cast_value = Traits::defaultValue();
709  if (value->IsSmi()) {
710  int int_value = Smi::ToInt(value);
711  cast_value = from(int_value);
712  } else if (value->IsHeapNumber()) {
713  double double_value = HeapNumber::cast(value)->value();
714  cast_value = from(double_value);
715  } else {
716  // Clamp undefined to the default value. All other types have been
717  // converted to a number type further up in the call chain.
718  DCHECK(value->IsUndefined());
719  }
720  set(index, cast_value);
721 }
722 
723 template <>
724 inline void FixedTypedArray<BigInt64ArrayTraits>::SetValue(uint32_t index,
725  Object* value) {
726  DCHECK(value->IsBigInt());
727  set(index, BigInt::cast(value)->AsInt64());
728 }
729 
730 template <>
731 inline void FixedTypedArray<BigUint64ArrayTraits>::SetValue(uint32_t index,
732  Object* value) {
733  DCHECK(value->IsBigInt());
734  set(index, BigInt::cast(value)->AsUint64());
735 }
736 
737 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
738  return handle(Smi::FromInt(scalar), isolate);
739 }
740 
741 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
742  uint8_t scalar) {
743  return handle(Smi::FromInt(scalar), isolate);
744 }
745 
746 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
747  return handle(Smi::FromInt(scalar), isolate);
748 }
749 
750 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
751  return handle(Smi::FromInt(scalar), isolate);
752 }
753 
754 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
755  return handle(Smi::FromInt(scalar), isolate);
756 }
757 
758 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
759  return isolate->factory()->NewNumberFromUint(scalar);
760 }
761 
762 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
763  return isolate->factory()->NewNumberFromInt(scalar);
764 }
765 
766 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
767  return isolate->factory()->NewNumber(scalar);
768 }
769 
770 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
771  return isolate->factory()->NewNumber(scalar);
772 }
773 
774 Handle<Object> BigInt64ArrayTraits::ToHandle(Isolate* isolate, int64_t scalar) {
775  return BigInt::FromInt64(isolate, scalar);
776 }
777 
778 Handle<Object> BigUint64ArrayTraits::ToHandle(Isolate* isolate,
779  uint64_t scalar) {
780  return BigInt::FromUint64(isolate, scalar);
781 }
782 
783 // static
784 template <class Traits>
785 STATIC_CONST_MEMBER_DEFINITION const InstanceType
786  FixedTypedArray<Traits>::kInstanceType;
787 
788 template <class Traits>
789 FixedTypedArray<Traits>::FixedTypedArray(Address ptr)
790  : FixedTypedArrayBase(ptr) {
791  DCHECK(IsHeapObject() && map()->instance_type() == Traits::kInstanceType);
792 }
793 
794 template <class Traits>
795 FixedTypedArray<Traits> FixedTypedArray<Traits>::cast(Object* object) {
796  return FixedTypedArray<Traits>(object->ptr());
797 }
798 
799 template <class Traits>
800 FixedTypedArray<Traits> FixedTypedArray<Traits>::cast(ObjectPtr object) {
801  return FixedTypedArray<Traits>(object.ptr());
802 }
803 
804 int TemplateList::length() const {
805  return Smi::ToInt(FixedArray::cast(*this)->get(kLengthIndex));
806 }
807 
808 Object* TemplateList::get(int index) const {
809  return FixedArray::cast(*this)->get(kFirstElementIndex + index);
810 }
811 
812 void TemplateList::set(int index, Object* value) {
813  FixedArray::cast(*this)->set(kFirstElementIndex + index, value);
814 }
815 
816 } // namespace internal
817 } // namespace v8
818 
819 #include "src/objects/object-macros-undef.h"
820 
821 #endif // V8_OBJECTS_FIXED_ARRAY_INL_H_
Definition: libplatform.h:13