Flutter Engine
The Flutter Engine
deferred_objects.cc
Go to the documentation of this file.
1// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#if !defined(DART_PRECOMPILED_RUNTIME)
6
8
9#include "vm/code_patcher.h"
12#include "vm/flags.h"
13#include "vm/object.h"
14#include "vm/object_store.h"
15
16namespace dart {
17
18DECLARE_FLAG(bool, trace_deoptimization);
19DECLARE_FLAG(bool, trace_deoptimization_verbose);
20
22 DoublePtr* double_slot = reinterpret_cast<DoublePtr*>(slot());
23 *double_slot = Double::New(value());
24
25 if (FLAG_trace_deoptimization_verbose) {
26 OS::PrintErr("materializing double at %" Px ": %g\n",
27 reinterpret_cast<uword>(slot()), value());
28 }
29}
30
32 MintPtr* mint_slot = reinterpret_cast<MintPtr*>(slot());
34 Mint& mint = Mint::Handle();
35 mint ^= Integer::New(value());
36 *mint_slot = mint.ptr();
37
38 if (FLAG_trace_deoptimization_verbose) {
39 OS::PrintErr("materializing mint at %" Px ": %" Pd64 "\n",
40 reinterpret_cast<uword>(slot()), value());
41 }
42}
43
45 Float32x4Ptr* float32x4_slot = reinterpret_cast<Float32x4Ptr*>(slot());
46 Float32x4Ptr raw_float32x4 = Float32x4::New(value());
47 *float32x4_slot = raw_float32x4;
48
49 if (FLAG_trace_deoptimization_verbose) {
50 float x = raw_float32x4->untag()->x();
51 float y = raw_float32x4->untag()->y();
52 float z = raw_float32x4->untag()->z();
53 float w = raw_float32x4->untag()->w();
54 OS::PrintErr("materializing Float32x4 at %" Px ": %g,%g,%g,%g\n",
55 reinterpret_cast<uword>(slot()), x, y, z, w);
56 }
57}
58
60 Float64x2Ptr* float64x2_slot = reinterpret_cast<Float64x2Ptr*>(slot());
61 Float64x2Ptr raw_float64x2 = Float64x2::New(value());
62 *float64x2_slot = raw_float64x2;
63
64 if (FLAG_trace_deoptimization_verbose) {
65 double x = raw_float64x2->untag()->x();
66 double y = raw_float64x2->untag()->y();
67 OS::PrintErr("materializing Float64x2 at %" Px ": %g,%g\n",
68 reinterpret_cast<uword>(slot()), x, y);
69 }
70}
71
73 Int32x4Ptr* int32x4_slot = reinterpret_cast<Int32x4Ptr*>(slot());
74 Int32x4Ptr raw_int32x4 = Int32x4::New(value());
75 *int32x4_slot = raw_int32x4;
76
77 if (FLAG_trace_deoptimization_verbose) {
78 uint32_t x = raw_int32x4->untag()->x();
79 uint32_t y = raw_int32x4->untag()->y();
80 uint32_t z = raw_int32x4->untag()->z();
81 uint32_t w = raw_int32x4->untag()->w();
82 OS::PrintErr("materializing Int32x4 at %" Px ": %x,%x,%x,%x\n",
83 reinterpret_cast<uword>(slot()), x, y, z, w);
84 }
85}
86
88 DeferredObject* obj = deopt_context->GetDeferredObject(index());
89 *slot() = obj->object();
90 if (FLAG_trace_deoptimization_verbose) {
91 const Class& cls = Class::Handle(IsolateGroup::Current()->class_table()->At(
93 OS::PrintErr("writing instance of class %s ref at %" Px ".\n",
94 cls.ToCString(), reinterpret_cast<uword>(slot()));
95 }
96}
97
99 Thread* thread = deopt_context->thread();
100 Zone* zone = deopt_context->zone();
102 function ^= deopt_context->ObjectAt(index_);
103 const Error& error =
105 if (!error.IsNull()) {
107 }
108 const Code& code = Code::Handle(zone, function.unoptimized_code());
109
110 uword continue_at_pc =
111 code.GetPcForDeoptId(deopt_id_, UntaggedPcDescriptors::kDeopt);
112 if (continue_at_pc == 0) {
113 FATAL("Can't locate continuation PC for deoptid %" Pd " within %s\n",
114 deopt_id_, function.ToFullyQualifiedCString());
115 }
116 uword* dest_addr = reinterpret_cast<uword*>(slot());
117 *dest_addr = continue_at_pc;
118
119 if (FLAG_trace_deoptimization_verbose) {
120 OS::PrintErr("materializing return addr at 0x%" Px ": 0x%" Px "\n",
121 reinterpret_cast<uword>(slot()), continue_at_pc);
122 }
123
124 uword pc = code.GetPcForDeoptId(deopt_id_, UntaggedPcDescriptors::kIcCall);
125 if (pc != 0) {
126 // If the deoptimization happened at an IC call, update the IC data
127 // to avoid repeated deoptimization at the same site next time around.
128 // We cannot use CodePatcher::GetInstanceCallAt because the call site
129 // may have switched to from referencing an ICData to a target Code or
130 // MegamorphicCache.
131 ICData& ic_data = ICData::Handle(zone, function.FindICData(deopt_id_));
132 ic_data.AddDeoptReason(deopt_context->deopt_reason());
133 // Propagate the reason to all ICData-s with same deopt_id since
134 // only unoptimized-code ICData (IC calls) are propagated.
135 function.SetDeoptReasonForAll(ic_data.deopt_id(),
136 deopt_context->deopt_reason());
137 } else {
138 if (deopt_context->HasDeoptFlag(ICData::kHoisted)) {
139 // Prevent excessive deoptimization.
140 function.SetProhibitsInstructionHoisting(true);
141 }
142
143 if (deopt_context->HasDeoptFlag(ICData::kGeneralized)) {
144 function.SetProhibitsBoundsCheckGeneralization(true);
145 }
146 }
147}
148
150 Thread* thread = deopt_context->thread();
151 Zone* zone = deopt_context->zone();
152 uword* dest_addr = reinterpret_cast<uword*>(slot());
154 function ^= deopt_context->ObjectAt(index_);
155 ASSERT(!function.IsNull());
156 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
157 const Error& error =
159 if (!error.IsNull()) {
161 }
162 const Code& code = Code::Handle(zone, function.unoptimized_code());
163 ASSERT(!code.IsNull());
164 ASSERT(function.HasCode());
165 *reinterpret_cast<ObjectPtr*>(dest_addr) = code.ptr();
166
167 if (FLAG_trace_deoptimization_verbose) {
168 THR_Print("materializing pc marker at 0x%" Px ": %s, %s\n",
169 reinterpret_cast<uword>(slot()), code.ToCString(),
170 function.ToCString());
171 }
172
173 // Increment the deoptimization counter. This effectively increments each
174 // function occurring in the optimized frame.
175 if (deopt_context->deoptimizing_code()) {
176 function.set_deoptimization_counter(function.deoptimization_counter() + 1);
177 }
178 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
179 THR_Print("Deoptimizing '%s' (count %d)\n",
180 function.ToFullyQualifiedCString(),
181 function.deoptimization_counter());
182 }
183 // Clear invocation counter so that hopefully the function gets reoptimized
184 // only after more feedback has been collected.
185 function.SetUsageCounter(0);
186 if (function.HasOptimizedCode()) {
187 function.SwitchToUnoptimizedCode();
188 }
189}
190
192 Thread* thread = deopt_context->thread();
193 Zone* zone = deopt_context->zone();
195 function ^= deopt_context->ObjectAt(index_);
196 ASSERT(!function.IsNull());
197 const Error& error =
199 if (!error.IsNull()) {
201 }
202 const Code& code = Code::Handle(zone, function.unoptimized_code());
203 ASSERT(!code.IsNull());
204 ASSERT(code.GetObjectPool() != Object::null());
205 *slot() = code.GetObjectPool();
206
207 if (FLAG_trace_deoptimization_verbose) {
208 OS::PrintErr("materializing pp at 0x%" Px ": 0x%" Px "\n",
209 reinterpret_cast<uword>(slot()),
210 static_cast<uword>(code.GetObjectPool()));
211 }
212}
213
215 if (object_ == nullptr) {
216 Create();
217 }
218 return object_->ptr();
219}
220
221void DeferredObject::Create() {
222 if (object_ != nullptr) {
223 return;
224 }
225
226 Class& cls = Class::Handle();
227 cls ^= GetClass();
228
229 switch (cls.id()) {
230 case kContextCid: {
231 const intptr_t num_variables =
232 Smi::Cast(Object::Handle(GetLengthOrShape())).Value();
233 if (FLAG_trace_deoptimization_verbose) {
235 "materializing context of length %" Pd " (%" Px ", %" Pd " vars)\n",
236 num_variables, reinterpret_cast<uword>(args_), field_count_);
237 }
238 object_ = &Context::ZoneHandle(Context::New(num_variables));
239 } break;
240 case kArrayCid: {
241 const intptr_t num_elements =
242 Smi::Cast(Object::Handle(GetLengthOrShape())).Value();
243 if (FLAG_trace_deoptimization_verbose) {
244 OS::PrintErr("materializing array of length %" Pd " (%" Px ", %" Pd
245 " elements)\n",
246 num_elements, reinterpret_cast<uword>(args_),
247 field_count_);
248 }
249 object_ = &Array::ZoneHandle(Array::New(num_elements));
250 } break;
251 case kRecordCid: {
252 const RecordShape shape(Smi::RawCast(GetLengthOrShape()));
253 if (FLAG_trace_deoptimization_verbose) {
255 "materializing record of shape %" Px " (%" Px ", %" Pd " fields)\n",
256 shape.AsInt(), reinterpret_cast<uword>(args_), field_count_);
257 }
258 object_ = &Record::ZoneHandle(Record::New(shape));
259 } break;
260 default:
261 if (IsTypedDataClassId(cls.id())) {
262 const intptr_t num_elements =
263 Smi::Cast(Object::Handle(GetLengthOrShape())).Value();
264 if (FLAG_trace_deoptimization_verbose) {
265 OS::PrintErr("materializing typed data cid %" Pd " of length %" Pd
266 " (%" Px ", %" Pd " elements)\n",
267 cls.id(), num_elements, reinterpret_cast<uword>(args_),
268 field_count_);
269 }
270 object_ =
271 &TypedData::ZoneHandle(TypedData::New(cls.id(), num_elements));
272
273 } else {
274 if (FLAG_trace_deoptimization_verbose) {
276 "materializing instance of %s (%" Px ", %" Pd " fields)\n",
277 cls.ToCString(), reinterpret_cast<uword>(args_), field_count_);
278 }
279
280 object_ = &Instance::ZoneHandle(Instance::New(cls));
281 }
282 }
283}
284
285static intptr_t ToContextIndex(intptr_t offset_in_bytes) {
286 intptr_t result = (offset_in_bytes - Context::variable_offset(0)) /
288 ASSERT(result >= 0);
289 return result;
290}
291
293 Create(); // Ensure instance is created.
294
295 Class& cls = Class::Handle();
296 cls ^= GetClass();
297
298 switch (cls.id()) {
299 case kContextCid: {
300 const Context& context = Context::Cast(*object_);
301
304
305 for (intptr_t i = 0; i < field_count_; i++) {
306 offset ^= GetFieldOffset(i);
307 if (offset.Value() == Context::parent_offset()) {
308 // Copy parent.
309 Context& parent = Context::Handle();
310 parent ^= GetValue(i);
311 context.set_parent(parent);
312 if (FLAG_trace_deoptimization_verbose) {
313 OS::PrintErr(" ctx@parent (offset %" Pd ") <- %s\n",
314 offset.Value(), parent.ToCString());
315 }
316 } else {
317 intptr_t context_index = ToContextIndex(offset.Value());
318 value = GetValue(i);
319 context.SetAt(context_index, value);
320 if (FLAG_trace_deoptimization_verbose) {
321 OS::PrintErr(" ctx@%" Pd " (offset %" Pd ") <- %s\n",
322 context_index, offset.Value(), value.ToCString());
323 }
324 }
325 }
326 } break;
327 case kArrayCid: {
328 const Array& array = Array::Cast(*object_);
329
332
333 for (intptr_t i = 0; i < field_count_; i++) {
334 offset ^= GetFieldOffset(i);
335 if (offset.Value() == Array::type_arguments_offset()) {
337 type_args ^= GetValue(i);
338 array.SetTypeArguments(type_args);
339 if (FLAG_trace_deoptimization_verbose) {
340 OS::PrintErr(" array@type_args (offset %" Pd ") <- %s\n",
341 offset.Value(), type_args.ToCString());
342 }
343 } else {
344 const intptr_t index = Array::index_at_offset(offset.Value());
345 value = GetValue(i);
346 array.SetAt(index, value);
347 if (FLAG_trace_deoptimization_verbose) {
348 OS::PrintErr(" array@%" Pd " (offset %" Pd ") <- %s\n", index,
349 offset.Value(), value.ToCString());
350 }
351 }
352 }
353 } break;
354 case kPointerCid: {
355 auto* const zone = Thread::Current()->zone();
356 const int kDataIndex = 0;
357 const int kTypeArgIndex = 1;
358 ASSERT(field_count_ == 2);
359 ASSERT(Smi::Cast(Object::Handle(zone, GetFieldOffset(kDataIndex)))
360 .AsInt64Value() == PointerBase::data_offset());
361 ASSERT(Smi::Cast(Object::Handle(zone, GetFieldOffset(kTypeArgIndex)))
362 .AsInt64Value() == Pointer::type_arguments_offset());
363
364 const auto& pointer = Pointer::Cast(*object_);
365 const size_t address =
366 Integer::Cast(Object::Handle(zone, GetValue(kDataIndex)))
367 .AsInt64Value();
368 pointer.SetNativeAddress(address);
369 const auto& type_args = TypeArguments::Handle(
370 zone, IsolateGroup::Current()->object_store()->type_argument_never());
371 pointer.SetTypeArguments(type_args);
372 if (FLAG_trace_deoptimization_verbose) {
373 OS::PrintErr(" pointer@data <- 0x%" Px "\n", address);
374 OS::PrintErr(" pointer@type_args <- %s\n", type_args.ToCString());
375 }
376 } break;
377 case kRecordCid: {
378 const Record& record = Record::Cast(*object_);
379
382
383 for (intptr_t i = 0; i < field_count_; i++) {
384 offset ^= GetFieldOffset(i);
385 const intptr_t index = Record::field_index_at_offset(offset.Value());
386 value = GetValue(i);
387 record.SetFieldAt(index, value);
388 if (FLAG_trace_deoptimization_verbose) {
389 OS::PrintErr(" record@%" Pd " (offset %" Pd ") <- %s\n", index,
390 offset.Value(), value.ToCString());
391 }
392 }
393 } break;
394 default:
395 if (IsTypedDataClassId(cls.id())) {
396 const TypedData& typed_data = TypedData::Cast(*object_);
397
400 const auto cid = cls.id();
401
402 for (intptr_t i = 0; i < field_count_; i++) {
403 offset ^= GetFieldOffset(i);
404 const intptr_t element_offset = offset.Value();
405 value = GetValue(i);
406 switch (cid) {
407 case kTypedDataInt8ArrayCid:
408 typed_data.SetInt8(
409 element_offset,
410 static_cast<int8_t>(Integer::Cast(value).AsInt64Value()));
411 break;
412 case kTypedDataUint8ArrayCid:
413 case kTypedDataUint8ClampedArrayCid:
414 typed_data.SetUint8(
415 element_offset,
416 static_cast<uint8_t>(Integer::Cast(value).AsInt64Value()));
417 break;
418 case kTypedDataInt16ArrayCid:
419 typed_data.SetInt16(
420 element_offset,
421 static_cast<int16_t>(Integer::Cast(value).AsInt64Value()));
422 break;
423 case kTypedDataUint16ArrayCid:
424 typed_data.SetUint16(
425 element_offset,
426 static_cast<uint16_t>(Integer::Cast(value).AsInt64Value()));
427 break;
428 case kTypedDataInt32ArrayCid:
429 typed_data.SetInt32(
430 element_offset,
431 static_cast<int32_t>(Integer::Cast(value).AsInt64Value()));
432 break;
433 case kTypedDataUint32ArrayCid:
434 typed_data.SetUint32(
435 element_offset,
436 static_cast<uint32_t>(Integer::Cast(value).AsInt64Value()));
437 break;
438 case kTypedDataInt64ArrayCid:
439 typed_data.SetInt64(element_offset,
440 Integer::Cast(value).AsInt64Value());
441 break;
442 case kTypedDataUint64ArrayCid:
443 typed_data.SetUint64(
444 element_offset,
445 static_cast<uint64_t>(Integer::Cast(value).AsInt64Value()));
446 break;
447 case kTypedDataFloat32ArrayCid:
448 typed_data.SetFloat32(
449 element_offset,
450 static_cast<float>(Double::Cast(value).value()));
451 break;
452 case kTypedDataFloat64ArrayCid:
453 typed_data.SetFloat64(element_offset,
454 Double::Cast(value).value());
455 break;
456 case kTypedDataFloat32x4ArrayCid:
457 typed_data.SetFloat32x4(element_offset,
458 Float32x4::Cast(value).value());
459 break;
460 case kTypedDataInt32x4ArrayCid:
461 typed_data.SetInt32x4(element_offset,
462 Int32x4::Cast(value).value());
463 break;
464 case kTypedDataFloat64x2ArrayCid:
465 typed_data.SetFloat64x2(element_offset,
466 Float64x2::Cast(value).value());
467 break;
468 default:
469 UNREACHABLE();
470 }
471 if (FLAG_trace_deoptimization_verbose) {
472 OS::PrintErr(" typed_data (offset %" Pd ") <- %s\n",
473 element_offset, value.ToCString());
474 }
475 }
476 } else {
477 const Instance& obj = Instance::Cast(*object_);
478
480 Field& field = Field::Handle();
482 const Array& offset_map = Array::Handle(cls.OffsetToFieldMap());
483
484 for (intptr_t i = 0; i < field_count_; i++) {
485 offset ^= GetFieldOffset(i);
486 field ^= offset_map.At(offset.Value() / kCompressedWordSize);
487 value = GetValue(i);
488 ASSERT((value.ptr() != Object::sentinel().ptr()) ||
489 (!field.IsNull() && field.is_late()));
490 if (!field.IsNull() && (value.ptr() != Object::sentinel().ptr())) {
491 obj.SetField(field, value);
492 if (FLAG_trace_deoptimization_verbose) {
493 OS::PrintErr(" %s <- %s\n",
494 String::Handle(field.name()).ToCString(),
495 value.ToCString());
496 }
497 } else {
498 // In addition to the type arguments vector we can also have lazy
499 // materialization of e.g. _ByteDataView objects which don't have
500 // explicit fields in Dart (all accesses to the fields are done via
501 // recognized native methods).
502 ASSERT(offset.Value() < cls.host_instance_size());
503 obj.SetFieldAtOffset(offset.Value(), value);
504 if (FLAG_trace_deoptimization_verbose) {
506 " %s @ offset(%" Pd ") <- %s\n",
507 (field.IsNull() ? "null Field"
508 : String::Handle(field.name()).ToCString()),
509 offset.Value(), value.ToCString());
510 }
511 }
512 }
513
514 if (obj.IsTypedDataView()) {
515 // The data field does not get materialized for typed data views
516 // because it is not a safe untagged pointer and must be recomputed.
517 TypedDataView::Cast(obj).RecomputeDataField();
518 }
519 }
520 break;
521 }
522}
523
524} // namespace dart
525
526#endif // !defined(DART_PRECOMPILED_RUNTIME)
#define UNREACHABLE()
Definition: assert.h:248
static intptr_t type_arguments_offset()
Definition: object.h:10928
static intptr_t index_at_offset(intptr_t offset_in_bytes)
Definition: object.h:10842
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10959
virtual void SetTypeArguments(const TypeArguments &value) const
Definition: object.h:10908
ObjectPtr At(intptr_t index) const
Definition: object.h:10875
void SetAt(intptr_t index, const Object &value) const
Definition: object.h:10880
intptr_t id() const
Definition: object.h:1233
ArrayPtr OffsetToFieldMap(ClassTable *class_table=nullptr) const
Definition: object.cc:3183
intptr_t host_instance_size() const
Definition: object.h:1143
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
Definition: compiler.cc:854
static ContextPtr New(intptr_t num_variables, Heap::Space space=Heap::kNew)
Definition: object.cc:18511
static intptr_t variable_offset(intptr_t context_index)
Definition: object.h:7439
static intptr_t parent_offset()
Definition: object.h:7410
void set_parent(const Context &parent) const
Definition: object.h:7407
void SetAt(intptr_t context_index, const Object &value) const
Definition: object.h:13303
static constexpr intptr_t kBytesPerElement
Definition: object.h:7431
double value() const
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
simd128_value_t value() const
simd128_value_t value() const
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
simd128_value_t value() const
virtual void Materialize(DeoptContext *deopt_context)
int64_t value() const
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
virtual void Materialize(DeoptContext *deopt_context)
ObjectPtr * slot() const
bool deoptimizing_code() const
bool HasDeoptFlag(ICData::DeoptFlags flag)
Thread * thread() const
ObjectPtr ObjectAt(intptr_t index) const
ICData::DeoptReasonId deopt_reason() const
DeferredObject * GetDeferredObject(intptr_t idx) const
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
Definition: object.cc:23402
static DART_NORETURN void PropagateError(const Error &error)
Definition: exceptions.cc:1003
bool is_late() const
Definition: object.h:4444
StringPtr name() const
Definition: object.h:4430
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
Definition: object.cc:25307
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
Definition: object.cc:25475
intptr_t deopt_id() const
Definition: object.h:2468
void AddDeoptReason(ICData::DeoptReasonId reason) const
Definition: object.cc:16518
@ kGeneralized
Definition: object.h:2525
void SetField(const Field &field, const Object &value) const
Definition: object.cc:20494
static InstancePtr New(const Class &cls, Heap::Space space=Heap::kNew)
Definition: object.cc:20935
static Int32x4Ptr New(int32_t value0, int32_t value1, int32_t value2, int32_t value3, Heap::Space space=Heap::kNew)
Definition: object.cc:25391
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:22984
SafepointRwLock * program_lock()
Definition: isolate.h:537
static IsolateGroup * Current()
Definition: isolate.h:539
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static ObjectPtr null()
Definition: object.h:433
intptr_t GetClassId() const
Definition: object.h:341
ObjectPtr ptr() const
Definition: object.h:332
virtual const char * ToCString() const
Definition: object.h:366
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition: object.h:325
static Object & ZoneHandle()
Definition: object.h:419
static intptr_t data_offset()
Definition: object.h:11505
static intptr_t type_arguments_offset()
Definition: object.h:11902
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
Definition: object.cc:27741
static intptr_t field_index_at_offset(intptr_t offset_in_bytes)
Definition: object.h:11452
void SetFieldAt(intptr_t field_index, const Object &value) const
Definition: object.h:11436
static bool IsValid(int64_t value)
Definition: object.h:10026
Zone * zone() const
Definition: thread_state.h:37
static Thread * Current()
Definition: thread.h:362
IsolateGroup * isolate_group() const
Definition: thread.h:541
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.cc:25587
#define THR_Print(format,...)
Definition: log.h:20
#define ASSERT(E)
#define FATAL(error)
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
Dart_NativeFunction function
Definition: fuchsia.cc:51
double y
double x
Definition: dart_vm.cc:33
bool IsTypedDataClassId(intptr_t index)
Definition: class_id.h:433
uintptr_t uword
Definition: globals.h:501
static intptr_t ToContextIndex(intptr_t offset_in_bytes)
const intptr_t cid
static constexpr intptr_t kCompressedWordSize
Definition: globals.h:42
DECLARE_FLAG(bool, show_invisible_frames)
SkScalar w
#define Px
Definition: globals.h:410
#define Pd64
Definition: globals.h:416
#define Pd
Definition: globals.h:408
SeparatedVector2 offset