5#if !defined(DART_PRECOMPILED_RUNTIME)
22 DoublePtr* double_slot =
reinterpret_cast<DoublePtr*
>(
slot());
25 if (FLAG_trace_deoptimization_verbose) {
32 MintPtr* mint_slot =
reinterpret_cast<MintPtr*
>(
slot());
36 *mint_slot = mint.
ptr();
38 if (FLAG_trace_deoptimization_verbose) {
45 Float32x4Ptr* float32x4_slot =
reinterpret_cast<Float32x4Ptr*
>(
slot());
47 *float32x4_slot = raw_float32x4;
49 if (FLAG_trace_deoptimization_verbose) {
50 float x = raw_float32x4->untag()->x();
51 float y = raw_float32x4->untag()->y();
52 float z = raw_float32x4->untag()->z();
53 float w = raw_float32x4->untag()->w();
60 Float64x2Ptr* float64x2_slot =
reinterpret_cast<Float64x2Ptr*
>(
slot());
62 *float64x2_slot = raw_float64x2;
64 if (FLAG_trace_deoptimization_verbose) {
65 double x = raw_float64x2->untag()->x();
66 double y = raw_float64x2->untag()->y();
73 Int32x4Ptr* int32x4_slot =
reinterpret_cast<Int32x4Ptr*
>(
slot());
75 *int32x4_slot = raw_int32x4;
77 if (FLAG_trace_deoptimization_verbose) {
78 uint32_t
x = raw_int32x4->untag()->x();
79 uint32_t
y = raw_int32x4->untag()->y();
80 uint32_t z = raw_int32x4->untag()->z();
81 uint32_t
w = raw_int32x4->untag()->w();
90 if (FLAG_trace_deoptimization_verbose) {
105 if (!
error.IsNull()) {
110 uword continue_at_pc =
111 code.GetPcForDeoptId(deopt_id_, UntaggedPcDescriptors::kDeopt);
112 if (continue_at_pc == 0) {
113 FATAL(
"Can't locate continuation PC for deoptid %" Pd " within %s\n",
114 deopt_id_,
function.ToFullyQualifiedCString());
117 *dest_addr = continue_at_pc;
119 if (FLAG_trace_deoptimization_verbose) {
121 reinterpret_cast<uword>(
slot()), continue_at_pc);
124 uword pc =
code.GetPcForDeoptId(deopt_id_, UntaggedPcDescriptors::kIcCall);
140 function.SetProhibitsInstructionHoisting(
true);
144 function.SetProhibitsBoundsCheckGeneralization(
true);
159 if (!
error.IsNull()) {
167 if (FLAG_trace_deoptimization_verbose) {
168 THR_Print(
"materializing pc marker at 0x%" Px ": %s, %s\n",
178 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
179 THR_Print(
"Deoptimizing '%s' (count %d)\n",
199 if (!
error.IsNull()) {
207 if (FLAG_trace_deoptimization_verbose) {
210 static_cast<uword>(
code.GetObjectPool()));
215 if (object_ ==
nullptr) {
218 return object_->
ptr();
221void DeferredObject::Create() {
222 if (object_ !=
nullptr) {
231 const intptr_t num_variables =
233 if (FLAG_trace_deoptimization_verbose) {
235 "materializing context of length %" Pd " (%" Px ", %" Pd " vars)\n",
236 num_variables,
reinterpret_cast<uword>(args_), field_count_);
241 const intptr_t num_elements =
243 if (FLAG_trace_deoptimization_verbose) {
246 num_elements,
reinterpret_cast<uword>(args_),
252 const RecordShape shape(
Smi::RawCast(GetLengthOrShape()));
253 if (FLAG_trace_deoptimization_verbose) {
255 "materializing record of shape %" Px " (%" Px ", %" Pd " fields)\n",
256 shape.AsInt(),
reinterpret_cast<uword>(args_), field_count_);
262 const intptr_t num_elements =
264 if (FLAG_trace_deoptimization_verbose) {
266 " (%" Px ", %" Pd " elements)\n",
267 cls.id(), num_elements,
reinterpret_cast<uword>(args_),
274 if (FLAG_trace_deoptimization_verbose) {
276 "materializing instance of %s (%" Px ", %" Pd " fields)\n",
277 cls.ToCString(),
reinterpret_cast<uword>(args_), field_count_);
300 const Context& context = Context::Cast(*object_);
305 for (intptr_t
i = 0;
i < field_count_;
i++) {
310 parent ^= GetValue(
i);
312 if (FLAG_trace_deoptimization_verbose) {
320 if (FLAG_trace_deoptimization_verbose) {
328 const Array& array = Array::Cast(*object_);
333 for (intptr_t
i = 0;
i < field_count_;
i++) {
337 type_args ^= GetValue(
i);
339 if (FLAG_trace_deoptimization_verbose) {
347 if (FLAG_trace_deoptimization_verbose) {
356 const int kDataIndex = 0;
357 const int kTypeArgIndex = 1;
358 ASSERT(field_count_ == 2);
364 const auto& pointer = Pointer::Cast(*object_);
365 const size_t address =
368 pointer.SetNativeAddress(address);
371 pointer.SetTypeArguments(type_args);
372 if (FLAG_trace_deoptimization_verbose) {
374 OS::PrintErr(
" pointer@type_args <- %s\n", type_args.ToCString());
378 const Record& record = Record::Cast(*object_);
383 for (intptr_t
i = 0;
i < field_count_;
i++) {
388 if (FLAG_trace_deoptimization_verbose) {
396 const TypedData& typed_data = TypedData::Cast(*object_);
400 const auto cid = cls.
id();
402 for (intptr_t
i = 0;
i < field_count_;
i++) {
404 const intptr_t element_offset =
offset.Value();
407 case kTypedDataInt8ArrayCid:
410 static_cast<int8_t
>(Integer::Cast(
value).AsInt64Value()));
412 case kTypedDataUint8ArrayCid:
413 case kTypedDataUint8ClampedArrayCid:
416 static_cast<uint8_t
>(Integer::Cast(
value).AsInt64Value()));
418 case kTypedDataInt16ArrayCid:
421 static_cast<int16_t
>(Integer::Cast(
value).AsInt64Value()));
423 case kTypedDataUint16ArrayCid:
424 typed_data.SetUint16(
426 static_cast<uint16_t
>(Integer::Cast(
value).AsInt64Value()));
428 case kTypedDataInt32ArrayCid:
431 static_cast<int32_t
>(Integer::Cast(
value).AsInt64Value()));
433 case kTypedDataUint32ArrayCid:
434 typed_data.SetUint32(
436 static_cast<uint32_t
>(Integer::Cast(
value).AsInt64Value()));
438 case kTypedDataInt64ArrayCid:
439 typed_data.SetInt64(element_offset,
440 Integer::Cast(
value).AsInt64Value());
442 case kTypedDataUint64ArrayCid:
443 typed_data.SetUint64(
445 static_cast<uint64_t
>(Integer::Cast(
value).AsInt64Value()));
447 case kTypedDataFloat32ArrayCid:
448 typed_data.SetFloat32(
450 static_cast<float>(Double::Cast(
value).
value()));
452 case kTypedDataFloat64ArrayCid:
453 typed_data.SetFloat64(element_offset,
456 case kTypedDataFloat32x4ArrayCid:
457 typed_data.SetFloat32x4(element_offset,
460 case kTypedDataInt32x4ArrayCid:
461 typed_data.SetInt32x4(element_offset,
464 case kTypedDataFloat64x2ArrayCid:
465 typed_data.SetFloat64x2(element_offset,
471 if (FLAG_trace_deoptimization_verbose) {
473 element_offset,
value.ToCString());
477 const Instance& obj = Instance::Cast(*object_);
484 for (intptr_t
i = 0;
i < field_count_;
i++) {
488 ASSERT((
value.ptr() != Object::sentinel().ptr()) ||
490 if (!field.
IsNull() && (
value.ptr() != Object::sentinel().ptr())) {
492 if (FLAG_trace_deoptimization_verbose) {
504 if (FLAG_trace_deoptimization_verbose) {
506 " %s @ offset(%" Pd ") <- %s\n",
507 (field.
IsNull() ?
"null Field"
514 if (obj.IsTypedDataView()) {
517 TypedDataView::Cast(obj).RecomputeDataField();
static intptr_t type_arguments_offset()
static intptr_t index_at_offset(intptr_t offset_in_bytes)
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
virtual void SetTypeArguments(const TypeArguments &value) const
ObjectPtr At(intptr_t index) const
void SetAt(intptr_t index, const Object &value) const
ArrayPtr OffsetToFieldMap(ClassTable *class_table=nullptr) const
intptr_t host_instance_size() const
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
static ContextPtr New(intptr_t num_variables, Heap::Space space=Heap::kNew)
static intptr_t variable_offset(intptr_t context_index)
static intptr_t parent_offset()
void set_parent(const Context &parent) const
void SetAt(intptr_t context_index, const Object &value) const
static constexpr intptr_t kBytesPerElement
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
simd128_value_t value() const
simd128_value_t value() const
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
simd128_value_t value() const
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
bool deoptimizing_code() const
bool HasDeoptFlag(ICData::DeoptFlags flag)
ObjectPtr ObjectAt(intptr_t index) const
ICData::DeoptReasonId deopt_reason() const
DeferredObject * GetDeferredObject(intptr_t idx) const
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
static DART_NORETURN void PropagateError(const Error &error)
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
intptr_t deopt_id() const
void AddDeoptReason(ICData::DeoptReasonId reason) const
void SetField(const Field &field, const Object &value) const
static InstancePtr New(const Class &cls, Heap::Space space=Heap::kNew)
static Int32x4Ptr New(int32_t value0, int32_t value1, int32_t value2, int32_t value3, Heap::Space space=Heap::kNew)
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
SafepointRwLock * program_lock()
static IsolateGroup * Current()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
intptr_t GetClassId() const
virtual const char * ToCString() const
static ObjectPtr RawCast(ObjectPtr obj)
static Object & ZoneHandle()
static intptr_t data_offset()
static intptr_t type_arguments_offset()
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
static intptr_t field_index_at_offset(intptr_t offset_in_bytes)
void SetFieldAt(intptr_t field_index, const Object &value) const
static bool IsValid(int64_t value)
static Thread * Current()
IsolateGroup * isolate_group() const
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
#define THR_Print(format,...)
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
bool IsTypedDataClassId(intptr_t index)
static intptr_t ToContextIndex(intptr_t offset_in_bytes)
static constexpr intptr_t kCompressedWordSize
DECLARE_FLAG(bool, show_invisible_frames)