Flutter Engine
 
Loading...
Searching...
No Matches
FlutterView.mm
Go to the documentation of this file.
1// Copyright 2013 The Flutter Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
11
13
14@interface FlutterView ()
15@property(nonatomic, weak) id<FlutterViewEngineDelegate> delegate;
16@property(nonatomic, weak) UIWindowScene* previousScene;
17@end
18
19@implementation FlutterView {
20 BOOL _isWideGamutEnabled;
21}
22
23- (instancetype)init {
24 NSAssert(NO, @"FlutterView must initWithDelegate");
25 return nil;
26}
27
28- (instancetype)initWithFrame:(CGRect)frame {
29 NSAssert(NO, @"FlutterView must initWithDelegate");
30 return nil;
31}
32
33- (instancetype)initWithCoder:(NSCoder*)aDecoder {
34 NSAssert(NO, @"FlutterView must initWithDelegate");
35 return nil;
36}
37
38- (UIScreen*)screen {
39 return self.window.windowScene.screen;
40}
41
42- (MTLPixelFormat)pixelFormat {
43 if ([self.layer isKindOfClass:[CAMetalLayer class]]) {
44// It is a known Apple bug that CAMetalLayer incorrectly reports its supported
45// SDKs. It is, in fact, available since iOS 8.
46#pragma clang diagnostic push
47#pragma clang diagnostic ignored "-Wunguarded-availability-new"
48 CAMetalLayer* layer = (CAMetalLayer*)self.layer;
49 return layer.pixelFormat;
50 }
51 return MTLPixelFormatBGRA8Unorm;
52}
53- (BOOL)isWideGamutSupported {
54 FML_DCHECK(self.screen);
55
56 // Wide Gamut is not supported for iOS Extensions due to memory limitations
57 // (see https://github.com/flutter/flutter/issues/165086).
59 return NO;
60 }
61
62 // This predicates the decision on the capabilities of the iOS device's
63 // display. This means external displays will not support wide gamut if the
64 // device's display doesn't support it. It practice that should be never.
65 return self.screen.traitCollection.displayGamut != UIDisplayGamutSRGB;
66}
67
68- (instancetype)initWithDelegate:(id<FlutterViewEngineDelegate>)delegate
69 opaque:(BOOL)opaque
70 enableWideGamut:(BOOL)isWideGamutEnabled {
71 if (delegate == nil) {
72 NSLog(@"FlutterView delegate was nil.");
73 return nil;
74 }
75
76 self = [super initWithFrame:CGRectNull];
77
78 if (self) {
79 _delegate = delegate;
80 _isWideGamutEnabled = isWideGamutEnabled;
81 self.layer.opaque = opaque;
82 }
83
84 return self;
85}
86
87static void PrintWideGamutWarningOnce() {
88 static BOOL did_print = NO;
89 if (did_print) {
90 return;
91 }
92 FML_DLOG(WARNING) << "Rendering wide gamut colors is turned on but isn't "
93 "supported, downgrading the color gamut to sRGB.";
94 did_print = YES;
95}
96
97- (void)layoutSubviews {
98 if ([self.layer isKindOfClass:[CAMetalLayer class]]) {
99// It is a known Apple bug that CAMetalLayer incorrectly reports its supported
100// SDKs. It is, in fact, available since iOS 8.
101#pragma clang diagnostic push
102#pragma clang diagnostic ignored "-Wunguarded-availability-new"
103 CAMetalLayer* layer = (CAMetalLayer*)self.layer;
104#pragma clang diagnostic pop
105 CGFloat screenScale = self.screen.scale;
106 layer.allowsGroupOpacity = YES;
107 layer.contentsScale = screenScale;
108 layer.rasterizationScale = screenScale;
109 layer.framebufferOnly = flutter::Settings::kSurfaceDataAccessible ? NO : YES;
110 if (_isWideGamutEnabled && self.isWideGamutSupported) {
111 fml::CFRef<CGColorSpaceRef> srgb(CGColorSpaceCreateWithName(kCGColorSpaceExtendedSRGB));
112 layer.colorspace = srgb;
113 layer.pixelFormat = MTLPixelFormatBGRA10_XR;
114 } else if (_isWideGamutEnabled && !self.isWideGamutSupported) {
115 PrintWideGamutWarningOnce();
116 }
117 }
118
119 [super layoutSubviews];
120}
121
122+ (Class)layerClass {
124 flutter::GetRenderingAPIForProcess(/*force_software=*/false));
125}
126
127- (void)drawLayer:(CALayer*)layer inContext:(CGContextRef)context {
128 TRACE_EVENT0("flutter", "SnapshotFlutterView");
129
130 if (layer != self.layer || context == nullptr) {
131 return;
132 }
133
134 auto screenshot = [_delegate takeScreenshot:flutter::Rasterizer::ScreenshotType::UncompressedImage
135 asBase64Encoded:NO];
136
137 if (!screenshot.data || screenshot.data->isEmpty() || screenshot.frame_size.IsEmpty()) {
138 return;
139 }
140
141 NSData* data = [NSData dataWithBytes:const_cast<void*>(screenshot.data->data())
142 length:screenshot.data->size()];
143
144 fml::CFRef<CGDataProviderRef> image_data_provider(
145 CGDataProviderCreateWithCFData(reinterpret_cast<CFDataRef>(data)));
146
147 fml::CFRef<CGColorSpaceRef> colorspace(CGColorSpaceCreateDeviceRGB());
148
149 // Defaults for RGBA8888.
150 size_t bits_per_component = 8u;
151 size_t bits_per_pixel = 32u;
152 size_t bytes_per_row_multiplier = 4u;
153 CGBitmapInfo bitmap_info =
154 static_cast<CGBitmapInfo>(static_cast<uint32_t>(kCGImageAlphaPremultipliedLast) |
155 static_cast<uint32_t>(kCGBitmapByteOrder32Big));
156
157 switch (screenshot.pixel_format) {
160 // Assume unknown is Skia and is RGBA8888. Keep defaults.
161 break;
163 // Treat this as little endian with the alpha first so that it's read backwards.
164 bitmap_info =
165 static_cast<CGBitmapInfo>(static_cast<uint32_t>(kCGImageAlphaPremultipliedFirst) |
166 static_cast<uint32_t>(kCGBitmapByteOrder32Little));
167 break;
169 bits_per_component = 16u;
170 bits_per_pixel = 64u;
171 bytes_per_row_multiplier = 8u;
172 bitmap_info =
173 static_cast<CGBitmapInfo>(static_cast<uint32_t>(kCGImageAlphaPremultipliedLast) |
174 static_cast<uint32_t>(kCGBitmapFloatComponents) |
175 static_cast<uint32_t>(kCGBitmapByteOrder16Little));
176 break;
177 }
178
179 fml::CFRef<CGImageRef> image(CGImageCreate(
180 screenshot.frame_size.width, // size_t width
181 screenshot.frame_size.height, // size_t height
182 bits_per_component, // size_t bitsPerComponent
183 bits_per_pixel, // size_t bitsPerPixel,
184 bytes_per_row_multiplier * screenshot.frame_size.width, // size_t bytesPerRow
185 colorspace, // CGColorSpaceRef space
186 bitmap_info, // CGBitmapInfo bitmapInfo
187 image_data_provider, // CGDataProviderRef provider
188 nullptr, // const CGFloat* decode
189 false, // bool shouldInterpolate
190 kCGRenderingIntentDefault // CGColorRenderingIntent intent
191 ));
192
193 const CGRect frame_rect =
194 CGRectMake(0.0, 0.0, screenshot.frame_size.width, screenshot.frame_size.height);
195 CGContextSaveGState(context);
196 // If the CGContext is not a bitmap based context, this returns zero.
197 CGFloat height = CGBitmapContextGetHeight(context);
198 if (height == 0) {
199 height = CGFloat(screenshot.frame_size.height);
200 }
201 CGContextTranslateCTM(context, 0.0, height);
202 CGContextScaleCTM(context, 1.0, -1.0);
203 CGContextDrawImage(context, frame_rect, image);
204 CGContextRestoreGState(context);
205}
206
207- (BOOL)isAccessibilityElement {
208 // iOS does not provide an API to query whether the voice control
209 // is turned on or off. It is likely at least one of the assitive
210 // technologies is turned on if this method is called. If we do
211 // not catch it in notification center, we will catch it here.
212 //
213 // TODO(chunhtai): Remove this workaround once iOS provides an
214 // API to query whether voice control is enabled.
215 // https://github.com/flutter/flutter/issues/76808.
216 [self.delegate flutterViewAccessibilityDidCall];
217 return NO;
218}
219
220// Enables keyboard-based navigation when the user turns on
221// full keyboard access (FKA), using existing accessibility information.
222//
223// iOS does not provide any API for monitoring or querying whether FKA is on,
224// but it does call isAccessibilityElement if FKA is on,
225// so the isAccessibilityElement implementation above will be called
226// when the view appears and the accessibility information will most likely
227// be available by the time the user starts to interact with the app using FKA.
228//
229// See SemanticsObject+UIFocusSystem.mm for more details.
230- (NSArray<id<UIFocusItem>>*)focusItemsInRect:(CGRect)rect {
231 NSObject* rootAccessibilityElement =
232 [self.accessibilityElements count] > 0 ? self.accessibilityElements[0] : nil;
233 return [rootAccessibilityElement isKindOfClass:[SemanticsObjectContainer class]]
234 ? @[ [rootAccessibilityElement accessibilityElementAtIndex:0] ]
235 : nil;
236}
237
238- (NSArray<id<UIFocusEnvironment>>*)preferredFocusEnvironments {
239 // Occasionally we add subviews to FlutterView (text fields for example).
240 // These views shouldn't be directly visible to the iOS focus engine, instead
241 // the focus engine should only interact with the designated focus items
242 // (SemanticsObjects).
243 return nil;
244}
245
246- (void)willMoveToWindow:(UIWindow*)newWindow {
247 // When a FlutterView moves windows, it may also be moving scenes. Add/remove the FlutterEngine
248 // from the FlutterSceneLifeCycleProvider.sceneLifeCycleDelegate if it changes scenes.
249 UIWindowScene* newScene = newWindow.windowScene;
250 UIWindowScene* currentScene = self.window.windowScene;
251
252 if (newScene == currentScene) {
253 return;
254 }
255
256 // Remove the engine from the previous scene if it's no longer in that window and scene.
257 FlutterPluginSceneLifeCycleDelegate* previousSceneLifeCycleDelegate =
258 [FlutterPluginSceneLifeCycleDelegate fromScene:self.previousScene];
259 if (previousSceneLifeCycleDelegate) {
260 [previousSceneLifeCycleDelegate removeFlutterManagedEngine:(FlutterEngine*)self.delegate];
261 self.previousScene = nil;
262 }
263
264 if (newScene) {
265 // Add the engine to the new scene's lifecycle delegate.
266 FlutterPluginSceneLifeCycleDelegate* newSceneLifeCycleDelegate =
267 [FlutterPluginSceneLifeCycleDelegate fromScene:newScene];
268 if (newSceneLifeCycleDelegate) {
269 [newSceneLifeCycleDelegate addFlutterManagedEngine:(FlutterEngine*)self.delegate];
270 }
271 } else {
272 // If the view is being removed from a window, store the current scene to remove the engine
273 // from it later when the view is added to a new window.
274 self.previousScene = currentScene;
275 }
276}
277@end
FlutterVulkanImage * image
#define FML_DLOG(severity)
Definition logging.h:121
#define FML_DCHECK(condition)
Definition logging.h:122
instancetype initWithFrame
instancetype initWithCoder
IOSRenderingAPI GetRenderingAPIForProcess(bool force_software)
Class GetCoreAnimationLayerClassForRenderingAPI(IOSRenderingAPI rendering_api)
int32_t height
static constexpr bool kSurfaceDataAccessible
Definition settings.h:107
std::shared_ptr< const fml::Mapping > data
const uintptr_t id
#define TRACE_EVENT0(category_group, name)
int BOOL