Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
marshaller.cc
Go to the documentation of this file.
1// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include "platform/assert.h"
8#include "platform/globals.h"
9#include "vm/class_id.h"
15#include "vm/exceptions.h"
17#include "vm/log.h"
18#include "vm/object_store.h"
19#include "vm/raw_object.h"
20#include "vm/stack_frame.h"
21#include "vm/symbols.h"
22#include "vm/tagged_pointer.h"
23
24namespace dart {
25
26namespace compiler {
27
28namespace ffi {
29
30// Argument #0 is the function pointer.
31const intptr_t kNativeParamsStartAt = 1;
32
33// Representations of the arguments and return value of a C signature function.
35 Zone* zone,
36 const FunctionType& c_signature,
37 const char** error) {
38 ASSERT(c_signature.NumOptionalParameters() == 0);
39 ASSERT(c_signature.NumOptionalPositionalParameters() == 0);
41
42 const intptr_t num_arguments =
44 auto& argument_representations =
45 *new ZoneGrowableArray<const NativeType*>(zone, num_arguments);
46 AbstractType& arg_type = AbstractType::Handle(zone);
47 intptr_t variadic_arguments_index = NativeFunctionType::kNoVariadicArguments;
48 for (intptr_t i = 0; i < num_arguments; i++) {
49 arg_type = c_signature.ParameterTypeAt(i + kNativeParamsStartAt);
50 const bool varargs = arg_type.type_class() == object_store->varargs_class();
51 if (varargs) {
52 arg_type = TypeArguments::Handle(zone, Type::Cast(arg_type).arguments())
53 .TypeAt(0);
54 variadic_arguments_index = i;
55 ASSERT(arg_type.IsRecordType());
56 const auto& record_type = RecordType::Cast(arg_type);
57 const intptr_t num_fields = record_type.NumFields();
58 auto& field_type = AbstractType::Handle(zone);
59 for (intptr_t i = 0; i < num_fields; i++) {
60 field_type ^= record_type.FieldTypeAt(i);
61 const auto rep = NativeType::FromAbstractType(zone, field_type, error);
62 if (*error != nullptr) {
63 return nullptr;
64 }
65 argument_representations.Add(rep);
66 }
67 } else {
68 const auto rep = NativeType::FromAbstractType(zone, arg_type, error);
69 if (*error != nullptr) {
70 return nullptr;
71 }
72 argument_representations.Add(rep);
73 }
74 }
75
76 const auto& result_type =
77 AbstractType::Handle(zone, c_signature.result_type());
78 const auto result_representation =
79 NativeType::FromAbstractType(zone, result_type, error);
80 if (*error != nullptr) {
81 return nullptr;
82 }
83
84 const auto result = new (zone)
85 NativeFunctionType(argument_representations, *result_representation,
86 variadic_arguments_index);
87 return result;
88}
89
91 const Function& function,
92 intptr_t function_params_start_at,
93 const FunctionType& c_signature,
94 const char** error) {
95 DEBUG_ASSERT(function.IsNotTemporaryScopedHandle());
96 DEBUG_ASSERT(c_signature.IsNotTemporaryScopedHandle());
97 const auto native_function_signature =
99 if (*error != nullptr) {
100 return nullptr;
101 }
102 const auto& native_calling_convention =
103 NativeCallingConvention::FromSignature(zone, *native_function_signature);
104 return new (zone) CallMarshaller(zone, function, function_params_start_at,
105 c_signature, native_calling_convention);
106}
107
108AbstractTypePtr BaseMarshaller::CType(intptr_t arg_index) const {
109 if (arg_index == kResultIndex) {
110 return c_signature_.result_type();
111 }
112
113 Zone* zone = Thread::Current()->zone();
114 const auto& parameter_types =
116 const intptr_t parameter_type_length = parameter_types.Length();
117 const intptr_t last_param_index = parameter_type_length - 1;
118 const auto& last_arg_type = AbstractType::Handle(
119 zone, c_signature_.ParameterTypeAt(last_param_index));
121 const bool has_varargs =
122 last_arg_type.type_class() == object_store->varargs_class();
123
124 // Skip #0 argument, the function pointer.
125 const intptr_t real_arg_index = arg_index + kNativeParamsStartAt;
126
127 if (has_varargs && real_arg_index >= last_param_index) {
128 // The C-type is nested in a VarArgs.
129 const auto& var_args_type_arg = AbstractType::Handle(
130 zone, TypeArguments::Handle(zone, Type::Cast(last_arg_type).arguments())
131 .TypeAt(0));
132 if (var_args_type_arg.IsRecordType()) {
133 const intptr_t index_in_record = real_arg_index - last_param_index;
134 const auto& record_type = RecordType::Cast(var_args_type_arg);
135 ASSERT(index_in_record < record_type.NumFields());
136 return record_type.FieldTypeAt(index_in_record);
137 } else {
138 ASSERT(!var_args_type_arg.IsNull());
139 return var_args_type_arg.ptr();
140 }
141 }
142
144 .IsNull());
145 return c_signature_.ParameterTypeAt(real_arg_index);
146}
147
148AbstractTypePtr BaseMarshaller::DartType(intptr_t arg_index) const {
149 if (arg_index == kResultIndex) {
151 }
152 const intptr_t real_arg_index = arg_index + dart_signature_params_start_at_;
154 ASSERT(real_arg_index <
157 .IsNull());
158 return dart_signature_.ParameterTypeAt(real_arg_index);
159}
160
161bool BaseMarshaller::IsPointerCType(intptr_t arg_index) const {
162 return AbstractType::Handle(zone_, CType(arg_index)).type_class_id() ==
163 kPointerCid;
164}
165
166bool BaseMarshaller::IsPointerDartType(intptr_t arg_index) const {
167 return AbstractType::Handle(zone_, DartType(arg_index)).type_class_id() ==
168 kPointerCid;
169}
170
171bool BaseMarshaller::IsPointerPointer(intptr_t arg_index) const {
173 // TODO(https://dartbug.com/54173): BuildGraphOfSyncFfiCallback provides a
174 // function object with its type arguments not initialized.
175 return IsPointerCType(arg_index);
176 }
177 return IsPointerDartType(arg_index) && IsPointerCType(arg_index);
178}
179
180bool BaseMarshaller::IsTypedDataPointer(intptr_t arg_index) const {
181 if (!IsPointerCType(arg_index)) {
182 return false;
183 }
184 if (IsHandleCType(arg_index)) {
185 return false;
186 }
187
189 // TODO(https://dartbug.com/54173): BuildGraphOfSyncFfiCallback provides a
190 // function object with its type arguments not initialized. Change this
191 // to an assert when addressing that issue.
192 return false;
193 }
194
195 const auto& type = AbstractType::Handle(zone_, DartType(arg_index));
196 return type.type_class() ==
198}
199
200static bool IsCompound(Zone* zone, const AbstractType& type) {
201 auto& compiler_state = Thread::Current()->compiler_state();
202 auto& cls = Class::Handle(zone, type.type_class());
203 if (cls.id() == compiler_state.CompoundClass().id() ||
204 cls.id() == compiler_state.ArrayClass().id()) {
205 return true;
206 }
207 cls ^= cls.SuperClass();
208 if (cls.id() == compiler_state.StructClass().id() ||
209 cls.id() == compiler_state.UnionClass().id()) {
210 return true;
211 }
212 return false;
213}
214
215bool BaseMarshaller::IsCompoundPointer(intptr_t arg_index) const {
216 if (!IsPointerCType(arg_index)) {
217 return false;
218 }
220 // TODO(https://dartbug.com/54173): BuildGraphOfSyncFfiCallback provides a
221 // function object with its type arguments not initialized.
222 return false;
223 }
224
225 const auto& dart_type = AbstractType::Handle(zone_, DartType(arg_index));
226 return IsCompound(this->zone_, dart_type);
227}
228
229bool BaseMarshaller::IsHandleCType(intptr_t arg_index) const {
230 return AbstractType::Handle(zone_, CType(arg_index)).type_class_id() ==
231 kFfiHandleCid;
232}
233
234bool BaseMarshaller::IsBool(intptr_t arg_index) const {
235 return AbstractType::Handle(zone_, CType(arg_index)).type_class_id() ==
236 kFfiBoolCid;
237}
238
239// Keep consistent with Function::FfiCSignatureReturnsStruct.
240bool BaseMarshaller::IsCompoundCType(intptr_t arg_index) const {
241 const auto& c_type = AbstractType::Handle(zone_, CType(arg_index));
242 return IsCompound(this->zone_, c_type);
243}
244
248
250 intptr_t total = 0;
251 for (intptr_t i = 0; i < num_args(); i++) {
252 total += NumDefinitions(i);
253 }
254 return total;
255}
256
257intptr_t BaseMarshaller::NumDefinitions(intptr_t arg_index) const {
258 if (ArgumentIndexIsReturn(arg_index)) {
259 return NumReturnDefinitions();
260 }
261
262 const auto& loc = Location(arg_index);
263 const auto& type = loc.payload_type();
264
265 if (IsCompoundPointer(arg_index)) {
266 // typed data base and offset.
267 return 2;
268 }
269 if (type.IsPrimitive()) {
270 // All non-struct arguments are 1 definition in IL. Even 64 bit values
271 // on 32 bit architectures.
272 return 1;
273 }
274
275 ASSERT(type.IsCompound());
276 ASSERT(!loc.IsPointerToMemory()); // Handled in overrides.
277 if (loc.IsMultiple()) {
278 // One IL definition for every nested location.
279 const auto& multiple = loc.AsMultiple();
280 return multiple.locations().length();
281 }
282
283 ASSERT(loc.IsStack());
284 // For stack, word size definitions in IL. In FFI calls passed into the
285 // native call, in FFI callbacks read in separate NativeParams.
286 const intptr_t size_in_bytes = type.SizeInBytes();
287 const intptr_t num_defs =
288 Utils::RoundUp(size_in_bytes, compiler::target::kWordSize) /
289 compiler::target::kWordSize;
290 return num_defs;
291}
292
293intptr_t CallMarshaller::NumDefinitions(intptr_t arg_index) const {
294 if (!ArgumentIndexIsReturn(arg_index)) {
295 const auto& loc = Location(arg_index);
296 const auto& type = loc.payload_type();
297 if (type.IsCompound() && loc.IsPointerToMemory()) {
298 // For FFI calls, pass in TypedDataBase and offsetInBytes in IL, and copy
299 // contents to stack and pass pointer in right location in MC.
300 return 2;
301 }
302 }
303 return BaseMarshaller::NumDefinitions(arg_index);
304}
305
306intptr_t CallbackMarshaller::NumDefinitions(intptr_t arg_index) const {
307 if (!ArgumentIndexIsReturn(arg_index)) {
308 const auto& loc = Location(arg_index);
309 const auto& type = loc.payload_type();
310 if (type.IsCompound() && loc.IsPointerToMemory()) {
311 // For FFI callbacks, get the pointer in a NativeParameter and construct
312 // the TypedDataBase in IL (always offset in bytes 0).
313 return 1;
314 }
315 }
316 return BaseMarshaller::NumDefinitions(arg_index);
317}
318
320 // An FFICall is a Definition, and Definitions currently only have one
321 // return value.
322 //
323 // For compound returns, the IL generated by the flow graph builder allocates
324 // a TypedDataBase object that is passed into the FfiCall. The generated
325 // machine code for the FfiCall instruction then fills in the contents.
326 // After the call, the generated IL wraps the TypedDataBase object in a
327 // Compound object with an offset in bytes of 0.
328 return 1;
329}
330
332 const auto& loc = Location(kResultIndex);
333
334 if (loc.IsMultiple()) {
335 const auto& type = loc.payload_type();
336 ASSERT(type.IsCompound());
337 // For multiple locations, some native locations cannot be expressed as
338 // Locations, which means the flow graph builder cannot generate appropriate
339 // IL for those cases.
340 //
341 // Instead, the flow graph builder generates IL to extract the
342 // _typedDataBase and _offsetInBytes fields of the Dart value and passes
343 // those into the NativeReturn instruction as separate arguments. The
344 // generated machine code for the NativeReturn instruction then
345 // appropriately copies the contents to a non-GC-managed block of memory. A
346 // pointer to that block of memory is returned to the C code.
347 return 2;
348 }
349
350 // If it's a compound and the native ABI is passing a pointer, copy to it in
351 // IL. If non-compound, also 1 definition. If it's a primitive, the flow graph
352 // builder generates IL to create an appropriate Dart value from the single
353 // value returned from C.
354 return 1;
355}
356
357bool BaseMarshaller::ArgumentIndexIsReturn(intptr_t arg_index) const {
358 ASSERT(arg_index == kResultIndex || arg_index >= 0);
359 return arg_index == kResultIndex;
360}
361
362// Definitions in return value count down.
363bool BaseMarshaller::DefinitionIndexIsReturn(intptr_t def_index_global) const {
364 return def_index_global <= kResultIndex;
365}
366
367intptr_t BaseMarshaller::ArgumentIndex(intptr_t def_index_global) const {
368 if (DefinitionIndexIsReturn(def_index_global)) {
369 const intptr_t def = DefinitionInArgument(def_index_global, kResultIndex);
371 return kResultIndex;
372 }
373 ASSERT(def_index_global < NumArgumentDefinitions());
374 intptr_t defs = 0;
375 intptr_t arg_index = 0;
376 for (; arg_index < num_args(); arg_index++) {
377 defs += NumDefinitions(arg_index);
378 if (defs > def_index_global) {
379 return arg_index;
380 }
381 }
382 UNREACHABLE();
383}
384
385intptr_t BaseMarshaller::FirstDefinitionIndex(intptr_t arg_index) const {
386 if (arg_index <= kResultIndex) {
387 return kResultIndex;
388 }
389 ASSERT(arg_index < num_args());
390 intptr_t num_defs = 0;
391 for (intptr_t i = 0; i < arg_index; i++) {
392 num_defs += NumDefinitions(i);
393 }
394 return num_defs;
395}
396
397intptr_t BaseMarshaller::DefinitionInArgument(intptr_t def_index_global,
398 intptr_t arg_index) const {
399 if (ArgumentIndexIsReturn(arg_index)) {
400 // Counting down for return definitions.
401 const intptr_t def = kResultIndex - def_index_global;
403 return def;
404 } else {
405 // Counting up for arguments in consecutive order.
406 const intptr_t def = def_index_global - FirstDefinitionIndex(arg_index);
408 return def;
409 }
410}
411
412intptr_t BaseMarshaller::DefinitionIndex(intptr_t def_index_in_arg,
413 intptr_t arg_index) const {
414 ASSERT(def_index_in_arg < NumDefinitions(arg_index));
415 if (ArgumentIndexIsReturn(arg_index)) {
416 return kResultIndex - def_index_in_arg;
417 } else {
418 return FirstDefinitionIndex(arg_index) + def_index_in_arg;
419 }
420}
421
423 const NativeLocation& location) {
424 if (location.container_type().IsInt() && location.payload_type().IsFloat()) {
425 // IL can only pass integers to integer Locations, so pass as integer if
426 // the Location requires it to be an integer.
427 return location.container_type().AsRepresentationOverApprox(zone);
428 }
429 // Representations do not support 8 or 16 bit ints, over approximate to 32
430 // bits.
431 return location.payload_type().AsRepresentationOverApprox(zone);
432}
433
434Representation BaseMarshaller::RepInDart(intptr_t arg_index) const {
435 // This should never be called on Pointers or Handles, which are specially
436 // handled during marshalling/unmarshalling.
437 ASSERT(!IsHandleCType(arg_index));
438 ASSERT(!IsPointerPointer(arg_index));
439 return Location(arg_index).payload_type().AsRepresentationOverApprox(zone_);
440}
441
442// Implemented partially in BaseMarshaller because most Representations are
443// the same in Calls and Callbacks.
444Representation BaseMarshaller::RepInFfiCall(intptr_t def_index_global) const {
445 intptr_t arg_index = ArgumentIndex(def_index_global);
446
447 // Handled appropriately in the subclasses.
448 ASSERT(!IsHandleCType(arg_index));
449
450 // The IL extracts the address stored in the Pointer object as an untagged
451 // pointer before passing it to C, and creates a new Pointer object to store
452 // the received untagged pointer when receiving a pointer from C.
453 if (IsPointerPointer(arg_index)) return kUntagged;
454
455 const auto& location = Location(arg_index);
456 if (location.container_type().IsPrimitive()) {
457 return SelectRepresentationInIL(zone_, location);
458 }
459 ASSERT(location.container_type().IsCompound());
460
461 if (location.IsStack()) {
462 // Split the struct in architecture size chunks.
463 return kUnboxedWord;
464 }
465
466 if (location.IsMultiple()) {
467 const intptr_t def_index_in_arg =
468 DefinitionInArgument(def_index_global, arg_index);
469 const auto& def_loc =
470 *(location.AsMultiple().locations()[def_index_in_arg]);
471 return SelectRepresentationInIL(zone_, def_loc);
472 }
473
474 UNREACHABLE(); // Implemented in subclasses.
475}
476
477static const intptr_t kTypedDataBaseIndex = 0;
478static const intptr_t kOffsetInBytesIndex = 1;
479
480Representation CallMarshaller::RepInFfiCall(intptr_t def_index_global) const {
481 intptr_t arg_index = ArgumentIndex(def_index_global);
482 if (IsHandleCType(arg_index)) {
483 // For FfiCall arguments, the FfiCall instruction takes a tagged pointer
484 // from the IL. (It then creates a handle on the stack and passes a
485 // pointer to the newly allocated handle to C.)
486 //
487 // For FfiCall returns, FfiCall returns the untagged pointer to the handle
488 // to the IL, which then extracts the ptr field of the handle to retrieve
489 // the tagged pointer.
490 return ArgumentIndexIsReturn(arg_index) ? kUntagged : kTagged;
491 }
492 if (ArgumentIndexIsReturn(arg_index) && ReturnsCompound()) {
493 // The IL creates a TypedData object which is stored on the stack, and the
494 // FfiCall copies the compound value, however it is returned into that
495 // TypedData object. In order to make the return value of the definition
496 // defined, the same TypedData object is returned from the FfiCall.
497 return kTagged;
498 }
499 const auto& location = Location(arg_index);
500 if (location.IsPointerToMemory() || IsCompoundPointer(arg_index) ||
501 IsTypedDataPointer(arg_index)) {
502 // For arguments, the compound data being passed as a pointer is first
503 // collected into a TypedData object by the IL, and that object is what is
504 // passed to the FfiCall instruction. (The machine code generated by
505 // FfiCall handles copying the data into non-GC-moveable memory and
506 // passing a pointer to that memory to the C code.)
507 const intptr_t def_index_in_arg =
508 def_index_global - FirstDefinitionIndex(arg_index);
509 if (def_index_in_arg == kTypedDataBaseIndex) {
510 // The TypedDataBase object is managed by the GC, so the payload address
511 // cannot be eagerly extracted in the IL, as that would create an
512 // unsafe untagged address as an input to a GC-triggering instruction.
513 return kTagged;
514 } else {
515 ASSERT_EQUAL(def_index_in_arg, kOffsetInBytesIndex);
516 ASSERT(!IsTypedDataPointer(arg_index));
517 return kUnboxedUword;
518 }
519 }
520 return BaseMarshaller::RepInFfiCall(def_index_global);
521}
522
524 intptr_t def_index_global) const {
525 intptr_t arg_index = ArgumentIndex(def_index_global);
526 if (IsHandleCType(arg_index)) {
527 // Dart objects are passed to C as untagged pointers to newly created
528 // handles in the IL, and the ptr field of untagged pointers to handles are
529 // extracted when the IL receives handles from C code.
530 return kUntagged;
531 }
532 const auto& location = Location(arg_index);
533 if (location.IsPointerToMemory()) {
534 // The IL gets an untagged pointer to memory both for arguments and for
535 // returns. If this is an argument, then the IL creates a Dart
536 // representation of the compound object from the pointed at memory.
537 // For returns, the IL copies the data from the compound object into
538 // the memory being pointed at before returning to C.
539 return kUntagged;
540 }
541 if (ArgumentIndexIsReturn(arg_index) && location.IsMultiple()) {
542 // To return a compound object broken up over multiple native locations,
543 // the IL loads the compound object into a single TypedData object and
544 // passes that TypedData object to NativeReturn, which handles extracting
545 // the data to the appropriate native locations.
546 return kTagged;
547 }
548 return BaseMarshaller::RepInFfiCall(def_index_global);
549}
550
551void BaseMarshaller::RepsInFfiCall(intptr_t arg_index,
553 const intptr_t num_definitions = NumDefinitions(arg_index);
554 const intptr_t first_def = FirstDefinitionIndex(arg_index);
555 for (int i = 0; i < num_definitions; i++) {
556 out->Add(RepInFfiCall(first_def + i));
557 }
558}
559
560// Helper method for `LocInFfiCall` to turn a stack location into either any
561// location or a pair of two any locations.
563 Representation rep_in_ffi_call) {
564 // Floating point values are never split: they are either in a single "FPU"
565 // register or a contiguous 64-bit slot on the stack. Unboxed 64-bit integer
566 // values, in contrast, can be split between any two registers on a 32-bit
567 // system.
568 //
569 // There is an exception for iOS and Android 32-bit ARM, where
570 // floating-point values are treated as integers as far as the calling
571 // convention is concerned. However, the representation of these arguments
572 // are set to kUnboxedInt32 or kUnboxedInt64 already, so we don't have to
573 // account for that here.
574 const bool is_atomic =
575 rep_in_ffi_call == kUnboxedDouble || rep_in_ffi_call == kUnboxedFloat;
576
577 if (loc.payload_type().IsPrimitive() &&
578 loc.payload_type().SizeInBytes() == 2 * compiler::target::kWordSize &&
579 !is_atomic) {
581 }
582 return Location::Any();
583}
584
586 const NativeLocation& location) {
587 ASSERT((location.IsFpuRegisters()));
588#if defined(TARGET_ARCH_ARM)
589 // Only pin FPU register if it is the lowest bits.
590 const auto& fpu_loc = location.AsFpuRegisters();
591 if (fpu_loc.IsLowestBits()) {
592 return fpu_loc.WidenToQFpuRegister(zone).AsLocation();
593 }
594 return Location::Any();
595#endif // defined(TARGET_ARCH_ARM)
596
597 return location.AsLocation();
598}
599
600Location CallMarshaller::LocInFfiCall(intptr_t def_index_global) const {
601 const intptr_t arg_index = ArgumentIndex(def_index_global);
602 const NativeLocation& loc = this->Location(arg_index);
603
604 if (ArgumentIndexIsReturn(arg_index)) {
605 if (loc.IsRegisters() || loc.IsFpuRegisters()) {
606 return loc.AsLocation();
607 }
609 // No location at all, because we store into TypedData passed to the
610 // FfiCall instruction. But we have to supply a location.
612 }
613
614 // Force all handles to be Stack locations.
615 if (IsHandleCType(arg_index)) {
617 }
618
619 if (loc.IsMultiple()) {
620 const intptr_t def_index_in_arg =
621 def_index_global - FirstDefinitionIndex(arg_index);
622 const auto& def_loc = *(loc.AsMultiple().locations()[def_index_in_arg]);
623 if (def_loc.IsStack()) {
624 // Don't pin stack locations, they need to be moved anyway.
625 return ConvertToAnyLocation(def_loc.AsStack(),
626 RepInFfiCall(def_index_global));
627 }
628
629 if (def_loc.IsFpuRegisters()) {
630 return SelectFpuLocationInIL(zone_, def_loc);
631 }
632
633 return def_loc.AsLocation();
634 }
635
636 if (loc.IsPointerToMemory()) {
637 const intptr_t def_index_in_arg =
638 def_index_global - FirstDefinitionIndex(arg_index);
639 if (def_index_in_arg == kTypedDataBaseIndex) {
640 const auto& pointer_location = loc.AsPointerToMemory().pointer_location();
641 if (pointer_location.IsStack()) {
642 // Don't pin stack locations, they need to be moved anyway.
643 return ConvertToAnyLocation(pointer_location.AsStack(),
644 RepInFfiCall(def_index_global));
645 }
646 return pointer_location.AsLocation();
647 } else {
648 ASSERT_EQUAL(def_index_in_arg, kOffsetInBytesIndex);
649 return Location::Any();
650 }
651 }
652
653 if (IsCompoundPointer(arg_index)) {
654 const intptr_t def_index_in_arg =
655 def_index_global - FirstDefinitionIndex(arg_index);
656 if (def_index_in_arg == kOffsetInBytesIndex) {
657 // The typed data is passed in the location from the calling convention.
658 // The offset in bytes can be passed in any location.
659 return Location::Any();
660 }
661 }
662
663 if (loc.IsStack()) {
664 return ConvertToAnyLocation(loc.AsStack(), RepInFfiCall(def_index_global));
665 }
666
667 if (loc.IsFpuRegisters()) {
668 return SelectFpuLocationInIL(zone_, loc);
669 }
670
671 if (loc.IsBoth()) {
672 const auto& fpu_reg_loc = loc.AsBoth().location(0).AsFpuRegisters();
673 return SelectFpuLocationInIL(zone_, fpu_reg_loc);
674 }
675
676 ASSERT(loc.IsRegisters());
677 return loc.AsLocation();
678}
679
683
686 return Utils::RoundUp(
687 Location(compiler::ffi::kResultIndex).payload_type().SizeInBytes(),
688 compiler::target::kWordSize);
689}
690
691// Const to be able to look up the `RequiredStackSpaceInBytes` in
692// `PassByPointerStackOffset`.
694
695intptr_t CallMarshaller::PassByPointerStackOffset(intptr_t arg_index) const {
696 ASSERT(arg_index == kResultIndex ||
697 (arg_index >= 0 && arg_index < num_args()) ||
698 arg_index == kAfterLastArgumentIndex);
699
700 intptr_t stack_offset = 0;
701
702 // First the native arguments are on the stack.
703 // This is governed by the native ABI, the rest we can chose freely.
705#if (defined(DART_TARGET_OS_MACOS_IOS) || defined(DART_TARGET_OS_MACOS)) && \
706 defined(TARGET_ARCH_ARM64)
707 // Add extra padding for possibly non stack-aligned word-size writes.
708 // TODO(https://dartbug.com/48806): Re-engineer the moves to not over-
709 // approximate struct sizes on stack.
710 stack_offset += 4;
711#endif
712 stack_offset = Utils::RoundUp(stack_offset, compiler::target::kWordSize);
713 if (arg_index == kResultIndex) {
714 return stack_offset;
715 }
716
717 // Then save space for the result.
718 const auto& result_location = Location(compiler::ffi::kResultIndex);
719 if (result_location.IsPointerToMemory()) {
720 stack_offset += result_location.payload_type().SizeInBytes();
721 stack_offset = Utils::RoundUp(stack_offset, compiler::target::kWordSize);
722 }
723
724 // And finally put the arguments on the stack that are passed by pointer.
725 for (int i = 0; i < num_args(); i++) {
726 if (arg_index == i) {
727 return stack_offset;
728 }
729 const auto& arg_location = Location(i);
730 if (arg_location.IsPointerToMemory()) {
731 stack_offset += arg_location.payload_type().SizeInBytes();
732 stack_offset = Utils::RoundUp(stack_offset, compiler::target::kWordSize);
733 }
734 }
735
736 // The total stack space we need.
737 ASSERT(arg_index == kAfterLastArgumentIndex);
738 return stack_offset;
739}
740
744
745// This classes translates the ABI location of arguments into the locations they
746// will inhabit after entry-frame setup in the invocation of a native callback.
747//
748// Native -> Dart callbacks must push all the arguments before executing any
749// Dart code because the reading the Thread from TLS requires calling a native
750// stub, and the argument registers are volatile on all ABIs we support.
751//
752// To avoid complicating initial definitions, all callback arguments are read
753// off the stack from their pushed locations, so this class updates the argument
754// positions to account for this.
755//
756// See 'NativeEntryInstr::EmitNativeCode' for details.
758 public:
760 Zone* zone,
761 const NativeLocations& argument_locations,
762 const NativeLocation& return_loc) {
763 const bool treat_return_loc = return_loc.IsPointerToMemory();
764
765 auto& pushed_locs = *(new (zone) NativeLocations(
766 argument_locations.length() + (treat_return_loc ? 1 : 0)));
767
769 for (intptr_t i = 0, n = argument_locations.length(); i < n; i++) {
770 translator.AllocateArgument(*argument_locations[i]);
771 }
772 if (treat_return_loc) {
773 translator.AllocateArgument(return_loc);
774 }
775 for (intptr_t i = 0, n = argument_locations.length(); i < n; ++i) {
776 pushed_locs.Add(
777 &translator.TranslateArgument(zone, *argument_locations[i]));
778 }
779 if (treat_return_loc) {
780 pushed_locs.Add(&translator.TranslateArgument(zone, return_loc));
781 }
782
783 return pushed_locs;
784 }
785
786 private:
787 void AllocateArgument(const NativeLocation& arg) {
788 if (arg.IsStack()) return;
789
790 if (arg.IsRegisters()) {
791 argument_slots_required_ += arg.AsRegisters().num_regs();
792 } else if (arg.IsFpuRegisters()) {
793 argument_slots_required_ += 8 / target::kWordSize;
794 } else if (arg.IsPointerToMemory()) {
796 argument_slots_required_ += 1;
797 }
798 } else if (arg.IsMultiple()) {
799 const auto& multiple = arg.AsMultiple();
800 for (intptr_t i = 0; i < multiple.locations().length(); i++) {
801 AllocateArgument(*multiple.locations().At(i));
802 }
803 } else {
804 ASSERT(arg.IsBoth());
805 const auto& both = arg.AsBoth();
806 AllocateArgument(both.location(0));
807 }
808 }
809
810 const NativeLocation& TranslateArgument(Zone* zone,
811 const NativeLocation& arg) {
812 if (arg.IsStack()) {
813 // Add extra slots after the saved arguments for the return address and
814 // frame pointer of the dummy arguments frame, which will be between the
815 // saved argument registers and stack arguments. Also add slots for the
816 // shadow space if present (factored into
817 // kCallbackSlotsBeforeSavedArguments).
818 //
819 // Finally, for NativeCallbackTrampolines, factor in the extra stack space
820 // corresponding to those trampolines' frames (above the entry frame).
821 const intptr_t stack_delta =
823 FfiCallbackMetadata::kNativeCallbackTrampolineStackDelta;
824 FrameRebase rebase(
825 zone,
826 /*old_base=*/SPREG, /*new_base=*/SPREG,
827 /*stack_delta=*/(argument_slots_required_ + stack_delta) *
828 compiler::target::kWordSize);
829 return rebase.Rebase(arg);
830 }
831
832 if (arg.IsRegisters()) {
833 const auto& result = *new (zone) NativeStackLocation(
834 arg.payload_type(), arg.container_type(), SPREG,
835 argument_slots_used_ * compiler::target::kWordSize);
836 argument_slots_used_ += arg.AsRegisters().num_regs();
837 return result;
838 }
839
840 if (arg.IsFpuRegisters()) {
841 const auto& result = *new (zone) NativeStackLocation(
842 arg.payload_type(), arg.container_type(), SPREG,
843 argument_slots_used_ * compiler::target::kWordSize);
844 argument_slots_used_ += 8 / target::kWordSize;
845 return result;
846 }
847
848 if (arg.IsPointerToMemory()) {
849 const auto& pointer_loc = arg.AsPointerToMemory().pointer_location();
850 const auto& pointer_ret_loc =
851 arg.AsPointerToMemory().pointer_return_location();
852 const auto& pointer_translated = TranslateArgument(zone, pointer_loc);
853 return *new (zone) PointerToMemoryLocation(
854 pointer_translated, pointer_ret_loc, arg.payload_type().AsCompound());
855 }
856
857 if (arg.IsMultiple()) {
858 const auto& multiple = arg.AsMultiple();
859 NativeLocations& multiple_locations =
860 *new (zone) NativeLocations(multiple.locations().length());
861 for (intptr_t i = 0; i < multiple.locations().length(); i++) {
862 multiple_locations.Add(
863 &TranslateArgument(zone, *multiple.locations().At(i)));
864 }
865 return *new (zone) MultipleNativeLocations(
866 multiple.payload_type().AsCompound(), multiple_locations);
867 }
868
869 ASSERT(arg.IsBoth());
870 const auto& both = arg.AsBoth();
871 // We only need one.
872 return TranslateArgument(zone, both.location(0));
873 }
874
875 intptr_t argument_slots_used_ = 0;
876 intptr_t argument_slots_required_ = 0;
877};
878
880 const Function& function,
881 const char** error) {
882 DEBUG_ASSERT(function.IsNotTemporaryScopedHandle());
883 const auto& c_signature =
884 FunctionType::ZoneHandle(zone, function.FfiCSignature());
885 const auto native_function_signature =
887 if (*error != nullptr) {
888 return nullptr;
889 }
890 const auto& native_calling_convention =
891 NativeCallingConvention::FromSignature(zone, *native_function_signature);
892 const auto& callback_locs =
894 zone, native_calling_convention.argument_locations(),
895 native_calling_convention.return_location());
896 return new (zone) CallbackMarshaller(
897 zone, function, c_signature, native_calling_convention, callback_locs);
898}
899
901 intptr_t def_index) const {
902 const intptr_t arg_index = ArgumentIndex(def_index);
903 if (arg_index == kResultIndex) {
904 const auto& result_loc = Location(arg_index);
905 if (result_loc.IsPointerToMemory()) {
906 // If it's a pointer we return it in the last.
907 return *callback_locs_.At(callback_locs_.length() - 1);
908 }
909 // The other return types are not translated.
910 return result_loc;
911 }
912
913 // Check that we only have stack arguments.
914 const auto& loc = *callback_locs_.At(arg_index);
915 ASSERT(loc.IsStack() || loc.IsPointerToMemory() || loc.IsMultiple());
916 if (loc.IsStack()) {
917 ASSERT(loc.AsStack().base_register() == SPREG);
918 if (loc.payload_type().IsPrimitive()) {
919 return loc;
920 }
921 const intptr_t index = DefinitionInArgument(def_index, arg_index);
922 const intptr_t count = NumDefinitions(arg_index);
923 return loc.Split(zone_, count, index);
924 } else if (loc.IsPointerToMemory()) {
925 const auto& pointer_loc = loc.AsPointerToMemory().pointer_location();
926 ASSERT(pointer_loc.IsStack() &&
927 pointer_loc.AsStack().base_register() == SPREG);
928 return loc;
929 }
930 const auto& multiple = loc.AsMultiple();
931 const intptr_t index = DefinitionInArgument(def_index, arg_index);
932 const auto& multi_loc = *multiple.locations().At(index);
933 ASSERT(multi_loc.IsStack() && multi_loc.AsStack().base_register() == SPREG);
934 return multi_loc;
935}
936
937} // namespace ffi
938
939} // namespace compiler
940
941} // namespace dart
int count
#define UNREACHABLE()
Definition assert.h:248
#define DEBUG_ASSERT(cond)
Definition assert.h:321
#define ASSERT_EQUAL(expected, actual)
Definition assert.h:309
virtual ClassPtr type_class() const
Definition object.cc:21083
const T & At(intptr_t index) const
intptr_t length() const
static constexpr Register kReturnReg
const Class & TypedDataClass()
AbstractTypePtr ParameterTypeAt(intptr_t index) const
Definition object.cc:8643
bool ContainsHandles() const
Definition object.cc:8350
AbstractTypePtr result_type() const
Definition object.h:9650
intptr_t NumOptionalPositionalParameters() const
Definition object.h:9614
intptr_t num_fixed_parameters() const
Definition object.h:9575
intptr_t NumOptionalParameters() const
Definition object.h:9605
ArrayPtr parameter_types() const
Definition object.h:9658
ArrayPtr parameter_types() const
Definition object.h:3087
AbstractTypePtr ParameterTypeAt(intptr_t index) const
Definition object.cc:8638
AbstractTypePtr result_type() const
Definition object.h:3079
ObjectStore * object_store() const
Definition isolate.h:505
static IsolateGroup * Current()
Definition isolate.h:534
static Location Pair(Location first, Location second)
Definition locations.cc:271
static Location RequiresStack()
Definition locations.h:354
static Location RegisterLocation(Register reg)
Definition locations.h:398
static Location Any()
Definition locations.h:352
static ObjectPtr null()
Definition object.h:433
ObjectPtr ptr() const
Definition object.h:332
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
Zone * zone() const
static Thread * Current()
Definition thread.h:361
CompilerState & compiler_state()
Definition thread.h:583
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition utils.h:105
bool IsPointerPointer(intptr_t arg_index) const
bool IsCompoundPointer(intptr_t arg_index) const
bool IsBool(intptr_t arg_index) const
bool IsPointerDartType(intptr_t arg_index) const
intptr_t DefinitionIndex(intptr_t def_index_in_arg, intptr_t arg_index) const
const FunctionType & c_signature() const
Definition marshaller.h:172
bool ArgumentIndexIsReturn(intptr_t arg_index) const
bool IsPointerCType(intptr_t arg_index) const
const NativeCallingConvention & native_calling_convention_
Definition marshaller.h:195
bool IsCompoundCType(intptr_t arg_index) const
void RepsInFfiCall(intptr_t arg_index, GrowableArray< Representation > *out) const
bool DefinitionIndexIsReturn(intptr_t def_index_global) const
virtual intptr_t NumReturnDefinitions() const =0
intptr_t FirstDefinitionIndex(intptr_t arg_index) const
const FunctionType & c_signature_
Definition marshaller.h:194
virtual Representation RepInFfiCall(intptr_t def_index_global) const
const intptr_t dart_signature_params_start_at_
Definition marshaller.h:193
Representation RepInDart(intptr_t arg_index) const
bool IsTypedDataPointer(intptr_t arg_index) const
intptr_t ArgumentIndex(intptr_t def_index_global) const
bool IsHandleCType(intptr_t arg_index) const
virtual intptr_t NumDefinitions(intptr_t arg_index) const
AbstractTypePtr CType(intptr_t arg_index) const
intptr_t DefinitionInArgument(intptr_t def_index_global, intptr_t arg_index) const
AbstractTypePtr DartType(intptr_t arg_index) const
const NativeLocation & location(intptr_t index) const
virtual intptr_t NumDefinitions(intptr_t arg_index) const
intptr_t PassByPointerStackOffset(intptr_t arg_index) const
dart::Location LocInFfiCall(intptr_t def_index_global) const
virtual intptr_t NumReturnDefinitions() const
intptr_t RequiredStackSpaceInBytes() const
static CallMarshaller * FromFunction(Zone *zone, const Function &function, intptr_t function_params_start_at, const FunctionType &c_signature, const char **error)
Definition marshaller.cc:90
virtual Representation RepInFfiCall(intptr_t def_index_global) const
intptr_t CompoundReturnSizeInBytes() const
static NativeLocations & TranslateArgumentLocations(Zone *zone, const NativeLocations &argument_locations, const NativeLocation &return_loc)
const NativeLocation & NativeLocationOfNativeParameter(intptr_t def_index) const
virtual intptr_t NumDefinitions(intptr_t arg_index) const
virtual Representation RepInFfiCall(intptr_t def_index_global) const
const NativeLocations & callback_locs_
Definition marshaller.h:282
static CallbackMarshaller * FromFunction(Zone *zone, const Function &function, const char **error)
virtual intptr_t NumReturnDefinitions() const
const NativeLocations & locations() const
static const NativeCallingConvention & FromSignature(Zone *zone, const NativeFunctionType &signature)
static constexpr intptr_t kNoVariadicArguments
const MultipleNativeLocations & AsMultiple() const
virtual Location AsLocation() const
const NativeType & container_type() const
const NativeRegistersLocation & AsRegisters() const
const PointerToMemoryLocation & AsPointerToMemory() const
const NativeStackLocation & AsStack() const
const BothNativeLocations & AsBoth() const
const NativeFpuRegistersLocation & AsFpuRegisters() const
const NativeType & payload_type() const
Representation AsRepresentationOverApprox(Zone *zone_) const
virtual bool IsInt() const
Definition native_type.h:89
static const NativeType * FromAbstractType(Zone *zone, const AbstractType &type, const char **error)
virtual bool IsFloat() const
Definition native_type.h:90
virtual intptr_t SizeInBytes() const =0
virtual bool IsPrimitive() const
Definition native_type.h:80
const NativeLocation & pointer_location() const
#define ASSERT(E)
const uint8_t uint32_t uint32_t GError ** error
GAsyncResult * result
Dart_NativeFunction function
Definition fuchsia.cc:51
size_t length
static bool IsCompound(Zone *zone, const AbstractType &type)
const intptr_t kAfterLastArgumentIndex
static Location SelectFpuLocationInIL(Zone *zone, const NativeLocation &location)
const intptr_t kNativeParamsStartAt
Definition marshaller.cc:31
static const intptr_t kOffsetInBytesIndex
static const intptr_t kTypedDataBaseIndex
static Representation SelectRepresentationInIL(Zone *zone, const NativeLocation &location)
static Location ConvertToAnyLocation(const NativeStackLocation &loc, Representation rep_in_ffi_call)
const intptr_t kResultIndex
Definition marshaller.h:28
ZoneGrowableArray< const NativeLocation * > NativeLocations
const NativeFunctionType * NativeFunctionTypeFromFunctionType(Zone *zone, const FunctionType &c_signature, const char **error)
Definition marshaller.cc:34
static constexpr Representation kUnboxedUword
Definition locations.h:171
Representation
Definition locations.h:66
static constexpr Representation kUnboxedWord
Definition locations.h:164
const Register SPREG
constexpr intptr_t kIntptrMax
Definition globals.h:557
constexpr intptr_t kCallbackSlotsBeforeSavedArguments