Flutter Engine
The Flutter Engine
databasebuilder.py
Go to the documentation of this file.
1#!/usr/bin/env python3
2# Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
3# for details. All rights reserved. Use of this source code is governed by a
4# BSD-style license that can be found in the LICENSE file.
5
6import copy
7import database
8import logging
9import monitored
10import multiprocessing
11import os
12import os.path
13import re
14import sys
15import tempfile
16import time
17import traceback
18
19import idl_validator
20
21import compiler
22import compute_interfaces_info_individual
23from compute_interfaces_info_individual import InterfaceInfoCollector
24import idl_definitions
25
26from idlnode import *
27
28_logger = logging.getLogger('databasebuilder')
29
30# Used in source annotations to specify the parent interface declaring
31# a displaced declaration. The 'via' attribute specifies the parent interface
32# which implements a displaced declaration.
33_VIA_ANNOTATION_ATTR_NAME = 'via'
34
35
37 """Used in specifying options when importing new interfaces"""
38
39 def __init__(self,
40 idl_defines=[],
41 source=None,
42 source_attributes={},
43 rename_operation_arguments_on_merge=False,
44 add_new_interfaces=True,
45 obsolete_old_declarations=False,
46 logging_level=logging.WARNING):
47 """Constructor.
48 Args:
49 idl_defines -- list of definitions for the idl gcc pre-processor
50 source -- the origin of the IDL file, used for annotating the
51 database.
52 source_attributes -- this map of attributes is used as
53 annotation attributes.
54 rename_operation_arguments_on_merge -- if True, will rename
55 operation arguments when merging using the new name rather
56 than the old.
57 add_new_interfaces -- when False, if an interface is a new
58 addition, it will be ignored.
59 obsolete_old_declarations -- when True, if a declaration
60 from a certain source is not re-declared, it will be removed.
61 """
62 self.source = source
63 self.source_attributes = source_attributes
64 self.idl_defines = idl_defines
66 rename_operation_arguments_on_merge
67 self.add_new_interfaces = add_new_interfaces
68 self.obsolete_old_declarations = obsolete_old_declarations
69 _logger.setLevel(logging_level)
70
71
73 exception_list = traceback.format_stack()
74 exception_list = exception_list[:-2]
75 exception_list.extend(traceback.format_tb(sys.exc_info()[2]))
76 exception_list.extend(
77 traceback.format_exception_only(sys.exc_info()[0],
78 sys.exc_info()[1]))
79
80 exception_str = "Traceback (most recent call last):\n"
81 exception_str += "".join(exception_list)
82 # Removing the last \n
83 exception_str = exception_str[:-1]
84
85 return exception_str
86
87
88# Compile IDL using Blink's IDL compiler.
89def _compile_idl_file(build, file_name, import_options):
90 try:
91 idl_file_fullpath = os.path.realpath(file_name)
92 idl_definition = build.idl_compiler.compile_file(idl_file_fullpath)
93 return idl_definition
94 except Exception as err:
95 print('ERROR: idl_compiler.py: ' + os.path.basename(file_name))
96 print(err)
97 print()
98 print('Stack Dump:')
100
101 return 1
102
103
104# Create the Model (IDLFile) from the new AST of the compiled IDL file.
105def _load_idl_file(build, file_name, import_options):
106 try:
107 # Compute interface name from IDL filename (it's one for one in WebKit).
108 name = os.path.splitext(os.path.basename(file_name))[0]
109
110 idl_definition = new_asts[name]
111 return IDLFile(idl_definition, file_name)
112 except Exception as err:
113 print('ERROR: loading AST from cache: ' + os.path.basename(file_name))
114 print(err)
115 print()
116 print('Stack Dump:')
118
119 return 1
120
121
122# New IDL parser builder.
123class Build():
124
125 def __init__(self, provider):
126 # TODO(terry): Consider using the generator to do the work today we're
127 # driven by the databasebuilder. Blink compiler requires
128 # an output directory even though we don't use (yet). Might
129 # use the code generator portion of the new IDL compiler
130 # then we'd have a real output directory. Today we use the
131 # compiler to only create an AST.
132 self.output_directory = tempfile.mkdtemp()
133 attrib_file = os.path.join('Source',
134 idl_validator.EXTENDED_ATTRIBUTES_FILENAME)
135 # Create compiler.
137 self.output_directory,
138 attrib_file,
139 interfaces_info=provider._info_collector.interfaces_info,
140 only_if_changed=True)
141
142 def format_exception(self, e):
143 exception_list = traceback.format_stack()
144 exception_list = exception_list[:-2]
145 exception_list.extend(traceback.format_tb(sys.exc_info()[2]))
146 exception_list.extend(
147 traceback.format_exception_only(sys.exc_info()[0],
148 sys.exc_info()[1]))
149
150 exception_str = "Traceback (most recent call last):\n"
151 exception_str += "".join(exception_list)
152 # Removing the last \n
153 exception_str = exception_str[:-1]
154
155 return exception_str
156
157 def generate_from_idl(self, idl_file):
158 try:
159 idl_file_fullpath = os.path.realpath(idl_file)
160 self.idl_compiler.compile_file(idl_file_fullpath)
161 except Exception as err:
162 print('ERROR: idl_compiler.py: ' + os.path.basename(idl_file))
163 print(err)
164 print()
165 print('Stack Dump:')
166 print(self.format_exception(err))
167
168 return 1
169
170 return IDLFile(idl_ast, file_name)
171
172
173class DatabaseBuilder(object):
174
175 def __init__(self, database):
176 """DatabaseBuilder is used for importing and merging interfaces into
177 the Database"""
178 self._info_collector = InterfaceInfoCollector()
179
180 self._database = database
181 self._imported_interfaces = []
182 self._impl_stmts = []
184
185 # Spin up the new IDL parser.
186 self.build = Build(self)
187
188 # Global typedef to mapping.
190 'databasebuilder.global_type_defs', {
191 'Transferable': 'MessagePort',
192 })
193
194 # TODO(terry): Consider keeping richer type information (e.g.,
195 # IdlArrayOrSequenceType from the Blink parser) instead of just
196 # a type name.
197 def _resolve_type_defs(self, idl_file):
198 for type_node in idl_file.all(IDLType):
199 resolved = False
200 type_name = type_node.id
201 for typedef in self.global_type_defs:
202 seq_name_typedef = 'sequence<%s>' % typedef
203 if type_name == typedef:
204 type_node.id = self.global_type_defs[typedef]
205 resolved = True
206 elif type_name == seq_name_typedef:
207 type_node.id = 'sequence<%s>' % self.global_type_defs[
208 typedef]
209 resolved = True
210 if not (resolved):
211 for typedef in idl_file.typeDefs:
212 if type_name == typedef.id:
213 type_node.id = typedef.type.id
214 resolved = True
215
216 def _strip_ext_attributes(self, idl_file):
217 """Strips unuseful extended attributes."""
218 for ext_attrs in idl_file.all(IDLExtAttrs):
219 # TODO: Decide which attributes are uninteresting.
220 pass
221
222 def _rename_types(self, idl_file, import_options):
223 """Rename interface and type names with names provided in the
224 options. Also clears scopes from scoped names"""
225
226 strip_modules = lambda name: name.split('::')[-1]
227
228 def rename_node(idl_node):
229 idl_node.reset_id(strip_modules(idl_node.id))
230
231 def rename_ext_attrs(ext_attrs_node):
232 for type_valued_attribute_name in ['DartSupplemental']:
233 if type_valued_attribute_name in ext_attrs_node:
234 value = ext_attrs_node[type_valued_attribute_name]
235 if isinstance(value, str):
236 ext_attrs_node[
237 type_valued_attribute_name] = strip_modules(value)
238
239 list(map(rename_node, idl_file.all(IDLInterface)))
240 list(map(rename_node, idl_file.all(IDLType)))
241 list(map(rename_ext_attrs, idl_file.all(IDLExtAttrs)))
242
243 def _annotate(self, interface, import_options):
244 """Adds @ annotations based on the source and source_attributes
245 members of import_options."""
246
247 source = import_options.source
248 if not source:
249 return
250
251 def add_source_annotation(idl_node):
252 annotation = IDLAnnotation(
253 copy.deepcopy(import_options.source_attributes))
254 idl_node.annotations[source] = annotation
255 if ((isinstance(idl_node, IDLInterface) or
256 isinstance(idl_node, IDLMember)) and
257 idl_node.is_fc_suppressed):
258 annotation['suppressed'] = None
259
260 add_source_annotation(interface)
261
262 list(map(add_source_annotation, interface.parents))
263 list(map(add_source_annotation, interface.constants))
264 list(map(add_source_annotation, interface.attributes))
265 list(map(add_source_annotation, interface.operations))
266
267 def _sign(self, node):
268 """Computes a unique signature for the node, for merging purposed, by
269 concatenating types and names in the declaration."""
270 if isinstance(node, IDLType):
271 res = node.id
272 if res.startswith('unsigned '):
273 res = res[len('unsigned '):]
274 if hasattr(node, 'nullable') and node.nullable:
275 res += '?'
276 return res
277
278 res = []
279 if isinstance(node, IDLInterface):
280 res = ['interface', node.id]
281 elif isinstance(node, IDLParentInterface):
282 res = ['parent', self._sign(node.type)]
283 elif isinstance(node, IDLOperation):
284 res = ['op']
285 for special in node.specials:
286 res.append(special)
287 if node.id is not None:
288 res.append(node.id)
289 for arg in node.arguments:
290 res.append(self._sign(arg.type))
291 res.append(self._sign(node.type))
292 elif isinstance(node, IDLAttribute):
293 res = []
294 if node.is_read_only:
295 res.append('readonly')
296 res.append(node.id)
297 res.append(self._sign(node.type))
298 elif isinstance(node, IDLConstant):
299 res = []
300 res.append('const')
301 res.append(node.id)
302 res.append(node.value)
303 res.append(self._sign(node.type))
304 else:
305 raise TypeError("Can't sign input of type %s" % type(node))
306 return ':'.join(res)
307
308 def _build_signatures_map(self, idl_node_list):
309 """Creates a hash table mapping signatures to idl_nodes for the
310 given list of nodes"""
311 res = {}
312 for idl_node in idl_node_list:
313 sig = self._sign(idl_node)
314 if sig is None:
315 continue
316 if sig in res:
317 op = res[sig]
318 # Only report if the operations that match are either both suppressed
319 # or both not suppressed. Optional args aren't part of type signature
320 # for this routine. Suppressing a non-optional type and supplementing
321 # with an optional type appear the same.
322 if idl_node.is_fc_suppressed == op.is_fc_suppressed:
323 raise RuntimeError(
324 'Warning: Multiple members have the same '
325 ' signature: "%s"' % sig)
326 res[sig] = idl_node
327 return res
328
329 def _get_parent_interfaces(self, interface):
330 """Return a list of all the parent interfaces of a given interface"""
331 res = []
332
333 def recurse(current_interface):
334 if current_interface in res:
335 return
336 res.append(current_interface)
337 for parent in current_interface.parents:
338 parent_name = parent.type.id
339 if self._database.HasInterface(parent_name):
340 recurse(self._database.GetInterface(parent_name))
341
342 recurse(interface)
343 return res[1:]
344
345 def _merge_ext_attrs(self, old_attrs, new_attrs):
346 """Merges two sets of extended attributes.
347
348 Returns: True if old_attrs has changed.
349 """
350 changed = False
351 for (name, value) in new_attrs.items():
352 if name in old_attrs and old_attrs[name] == value:
353 pass # Identical
354 else:
355 if name == 'ImplementedAs' and name in old_attrs:
356 continue
357 old_attrs[name] = value
358 changed = True
359 return changed
360
361 def _merge_nodes(self, old_list, new_list, import_options):
362 """Merges two lists of nodes. Annotates nodes with the source of each
363 node.
364
365 Returns:
366 True if the old_list has changed.
367
368 Args:
369 old_list -- the list to merge into.
370 new_list -- list containing more nodes.
371 import_options -- controls how merging is done.
372 """
373 changed = False
374
375 source = import_options.source
376
377 old_signatures_map = self._build_signatures_map(old_list)
378 new_signatures_map = self._build_signatures_map(new_list)
379
380 # Merge new items
381 for (sig, new_node) in new_signatures_map.items():
382 if sig not in old_signatures_map:
383 # New node:
384 old_list.append(new_node)
385 changed = True
386 else:
387 # Merge old and new nodes:
388 old_node = old_signatures_map[sig]
389 if (source not in old_node.annotations and
390 source in new_node.annotations):
391 old_node.annotations[source] = new_node.annotations[source]
392 changed = True
393 # Maybe rename arguments:
394 if isinstance(old_node, IDLOperation):
395 for i in range(0, len(old_node.arguments)):
396 old_arg = old_node.arguments[i]
397 new_arg = new_node.arguments[i]
398
399 old_arg_name = old_arg.id
400 new_arg_name = new_arg.id
401 if (old_arg_name != new_arg_name and
402 (old_arg_name == 'arg' or
403 old_arg_name.endswith('Arg') or
404 import_options.rename_operation_arguments_on_merge)
405 ):
406 old_node.arguments[i].id = new_arg_name
407 changed = True
408
409 if self._merge_ext_attrs(old_arg.ext_attrs,
410 new_arg.ext_attrs):
411 changed = True
412
413 # Merge in [Default=Undefined] and DOMString a = null handling in
414 # IDL. The IDL model (IDLArgument) coalesces these two different
415 # default value syntaxes into the default_value* models.
416 old_default_value = old_arg.default_value
417 new_default_value = new_arg.default_value
418 old_default_value_is_null = old_arg.default_value_is_null
419 new_default_value_is_null = new_arg.default_value_is_null
420 if old_default_value != new_default_value:
421 old_arg.default_value = new_default_value
422 changed = True
423 if old_default_value_is_null != new_default_value_is_null:
424 old_arg.default_value_is_null = new_default_value_is_null
425 changed = True
426
427 # Merge in any optional argument differences.
428 old_optional = old_arg.optional
429 new_optional = new_arg.optional
430 if old_optional != new_optional:
431 old_arg.optional = new_optional
432 changed = True
433 # Maybe merge annotations:
434 if (isinstance(old_node, IDLAttribute) or
435 isinstance(old_node, IDLOperation)):
436 if self._merge_ext_attrs(old_node.ext_attrs,
437 new_node.ext_attrs):
438 changed = True
439
440 # Remove annotations on obsolete items from the same source
441 if import_options.obsolete_old_declarations:
442 for (sig, old_node) in old_signatures_map.items():
443 if (source in old_node.annotations and
444 sig not in new_signatures_map):
445 _logger.warn(
446 '%s not available in %s anymore' % (sig, source))
447 del old_node.annotations[source]
448 changed = True
449
450 return changed
451
452 def _merge_interfaces(self, old_interface, new_interface, import_options):
453 """Merges the new_interface into the old_interface, annotating the
454 interface with the sources of each change."""
455
456 changed = False
457
458 source = import_options.source
459 if (source and source not in old_interface.annotations and
460 source in new_interface.annotations and
461 not new_interface.is_supplemental):
462 old_interface.annotations[source] = new_interface.annotations[
463 source]
464 changed = True
465
466 def merge_list(what):
467 old_list = old_interface.__dict__[what]
468 new_list = new_interface.__dict__[what]
469
470 if what != 'parents' and old_interface.id != new_interface.id:
471 for node in new_list:
472 node.doc_js_interface_name = old_interface.id
473 node.ext_attrs['ImplementedBy'] = new_interface.id
474
475 changed = self._merge_nodes(old_list, new_list, import_options)
476
477 # Delete list items with zero remaining annotations.
478 if changed and import_options.obsolete_old_declarations:
479
480 def has_annotations(idl_node):
481 return len(idl_node.annotations)
482
483 old_interface.__dict__[what] = \
484 list(filter(has_annotations, old_list))
485
486 return changed
487
488 # Smartly merge various declarations:
489 if merge_list('parents'):
490 changed = True
491 if merge_list('constants'):
492 changed = True
493 if merge_list('attributes'):
494 changed = True
495 if merge_list('operations'):
496 changed = True
497
498 if self._merge_ext_attrs(old_interface.ext_attrs,
499 new_interface.ext_attrs):
500 changed = True
501
502 _logger.info('merged interface %s (changed=%s, supplemental=%s)' %
503 (old_interface.id, changed, new_interface.is_supplemental))
504
505 return changed
506
507 def _merge_impl_stmt(self, impl_stmt, import_options):
508 """Applies "X implements Y" statements on the proper places in the
509 database"""
510 implementor_name = impl_stmt.implementor.id
511 implemented_name = impl_stmt.implemented.id
512 _logger.info('merging impl stmt %s implements %s' % (implementor_name,
513 implemented_name))
514
515 source = import_options.source
516 if self._database.HasInterface(implementor_name):
517 interface = self._database.GetInterface(implementor_name)
518 if interface.parents is None:
519 interface.parents = []
520 for parent in interface.parents:
521 if parent.type.id == implemented_name:
522 if source and source not in parent.annotations:
523 parent.annotations[source] = IDLAnnotation(
524 import_options.source_attributes)
525 return
526 # not found, so add new one
527 parent = IDLParentInterface(None)
528 parent.type = IDLType(implemented_name)
529 if source:
530 parent.annotations[source] = IDLAnnotation(
531 import_options.source_attributes)
532 interface.parents.append(parent)
533
535 """Merges all imported interfaces and loads them into the DB."""
536 imported_interfaces = self._imported_interfaces
537
538 # Step 1: Pre process imported interfaces
539 for interface, import_options in imported_interfaces:
540 self._annotate(interface, import_options)
541
542 # Step 2: Add all new interfaces and merge overlapping ones
543 for interface, import_options in imported_interfaces:
544 if not interface.is_supplemental:
545 if self._database.HasInterface(interface.id):
546 old_interface = self._database.GetInterface(interface.id)
547 self._merge_interfaces(old_interface, interface,
548 import_options)
549 else:
550 if import_options.add_new_interfaces:
551 self._database.AddInterface(interface)
552
553 # Step 3: Merge in supplemental interfaces
554 for interface, import_options in imported_interfaces:
555 if interface.is_supplemental:
556 target = interface.id
557 if self._database.HasInterface(target):
558 old_interface = self._database.GetInterface(target)
559 self._merge_interfaces(old_interface, interface,
560 import_options)
561 else:
562 _logger.warning("Supplemental target '%s' not found",
563 target)
564
565 # Step 4: Resolve 'implements' statements
566 for impl_stmt, import_options in self._impl_stmts:
567 self._merge_impl_stmt(impl_stmt, import_options)
568
569 self._impl_stmts = []
570 self._imported_interfaces = []
571
572 def _compute_dart_idl_implements(self, idl_filename):
573 full_path = os.path.realpath(idl_filename)
574
575 with open(full_path) as f:
576 idl_file_contents = f.read()
577
578 implements_re = (r'^\s*' r'(\w+)\s+' r'implements\s+' r'(\w+)\s*' r';')
579
580 implements_matches = re.finditer(implements_re, idl_file_contents,
581 re.MULTILINE)
582 return [match.groups() for match in implements_matches]
583
584 # Compile the IDL file with the Blink compiler and remember each AST for the
585 # IDL.
586 def _blink_compile_idl_files(self, file_paths, import_options, is_dart_idl):
587 if not (is_dart_idl):
588 start_time = time.time()
589
590 # Compute information for individual files
591 # Information is stored in global variables interfaces_info and
592 # partial_interface_files.
593 for file_path in file_paths:
594 self._info_collector.collect_info(file_path)
595
596 end_time = time.time()
597 print('Compute dependencies %s seconds' % round(
598 (end_time - start_time), 2))
599 else:
600 # Compute the interface_info for dart.idl for implements defined. This
601 # file is special in that more than one interface can exist in this file.
602 implement_pairs = self._compute_dart_idl_implements(file_paths[0])
603
604 self._info_collector.interfaces_info['__dart_idl___'] = {
605 'implement_pairs': implement_pairs,
606 }
607
608 # Parse the IDL files serially.
609 start_time = time.time()
610
611 for file_path in file_paths:
612 file_path = os.path.normpath(file_path)
613 ast = _compile_idl_file(self.build, file_path, import_options)
614 self._process_ast(
615 os.path.splitext(os.path.basename(file_path))[0], ast)
616
617 end_time = time.time()
618 print('Compiled %s IDL files in %s seconds' %
619 (len(file_paths), round((end_time - start_time), 2)))
620
621 def _process_ast(self, filename, ast):
622 if len(ast) == 1:
623 ast = next(iter(ast.values()))
624 else:
625 print('ERROR: Processing AST: ' + os.path.basename(file_name))
626 new_asts[filename] = ast
627
628 def import_idl_files(self, file_paths, import_options, is_dart_idl):
629 self._blink_compile_idl_files(file_paths, import_options, is_dart_idl)
630
631 start_time = time.time()
632
633 # Parse the IDL files in serial.
634 for file_path in file_paths:
635 file_path = os.path.normpath(file_path)
636 idl_file = _load_idl_file(self.build, file_path, import_options)
637 _logger.info('Processing %s' % os.path.splitext(
638 os.path.basename(file_path))[0])
639 self._process_idl_file(idl_file, import_options, is_dart_idl)
640
641 end_time = time.time()
642
643 for warning in report_unions_to_any():
644 _logger.warning(warning)
645
646 print('Total %s files %sprocessed in databasebuilder in %s seconds' % \
647 (len(file_paths), '', round((end_time - start_time), 2)))
648
649 def _process_idl_file(self, idl_file, import_options, dart_idl=False):
650 # TODO(terry): strip_ext_attributes on an idl_file does nothing.
651 #self._strip_ext_attributes(idl_file)
652 self._resolve_type_defs(idl_file)
653 self._rename_types(idl_file, import_options)
654
655 def enabled(idl_node):
656 return self._is_node_enabled(idl_node, import_options.idl_defines)
657
658 for interface in idl_file.interfaces:
659 if not self._is_node_enabled(interface, import_options.idl_defines):
660 _logger.info('skipping interface %s (source=%s)' %
661 (interface.id, import_options.source))
662 continue
663
664 _logger.info('importing interface %s (source=%s file=%s)' %
665 (interface.id, import_options.source,
666 os.path.basename(idl_file.filename)))
667
668 interface.attributes = list(filter(enabled, interface.attributes))
669 interface.operations = list(filter(enabled, interface.operations))
670 self._imported_interfaces.append((interface, import_options))
671
672 # If an IDL dictionary then there is no implementsStatements.
673 if hasattr(idl_file, 'implementsStatements'):
674 for implStmt in idl_file.implementsStatements:
675 self._impl_stmts.append((implStmt, import_options))
676
677 for enum in idl_file.enums:
678 self._database.AddEnum(enum)
679
680 for dictionary in idl_file.dictionaries:
681 self._database.AddDictionary(dictionary)
682
683 # TODO(terry): Hack to remember all typedef unions they're mapped to any
684 # - no type.
685 for typedef in idl_file.typeDefs:
686 self._database.AddTypeDef(typedef)
687
688 def _is_node_enabled(self, node, idl_defines):
689 if not 'Conditional' in node.ext_attrs:
690 return True
691
692 def enabled(condition):
693 return 'ENABLE_%s' % condition in idl_defines
694
695 conditional = node.ext_attrs['Conditional']
696 if conditional.find('&') != -1:
697 for condition in conditional.split('&'):
698 condition = condition.strip()
699 self.conditionals_met.add(condition)
700 if not enabled(condition):
701 return False
702 return True
703
704 for condition in conditional.split('|'):
705 condition = condition.strip()
706 self.conditionals_met.add(condition)
707 if enabled(condition):
708 return True
709 return False
710
711 def fix_displacements(self, source):
712 """E.g. In W3C, something is declared on HTMLDocument but in WebKit
713 its on Document, so we need to mark that something in HTMLDocument
714 with @WebKit(via=Document). The 'via' attribute specifies the
715 parent interface that has the declaration."""
716
717 for interface in self._database.GetInterfaces():
718 changed = False
719
720 _logger.info('fixing displacements in %s' % interface.id)
721
722 for parent_interface in self._get_parent_interfaces(interface):
723 _logger.info('scanning parent %s of %s' % (parent_interface.id,
724 interface.id))
725
726 def fix_nodes(local_list, parent_list):
727 changed = False
728 parent_signatures_map = self._build_signatures_map(
729 parent_list)
730 for idl_node in local_list:
731 sig = self._sign(idl_node)
732 if sig in parent_signatures_map:
733 parent_member = parent_signatures_map[sig]
734 if (source in parent_member.annotations and
735 source not in idl_node.annotations and
736 _VIA_ANNOTATION_ATTR_NAME not in
737 parent_member.annotations[source]):
738 idl_node.annotations[source] = IDLAnnotation({
739 _VIA_ANNOTATION_ATTR_NAME:
740 parent_interface.id
741 })
742 changed = True
743 return changed
744
745 changed = fix_nodes(interface.constants,
746 parent_interface.constants) or changed
747 changed = fix_nodes(interface.attributes,
748 parent_interface.attributes) or changed
749 changed = fix_nodes(interface.operations,
750 parent_interface.operations) or changed
751 if changed:
752 _logger.info(
753 'fixed displaced declarations in %s' % interface.id)
754
755 def normalize_annotations(self, sources):
756 """Makes the IDLs less verbose by removing annotation attributes
757 that are identical to the ones defined at the interface level.
758
759 Args:
760 sources -- list of source names to normalize."""
761 for interface in self._database.GetInterfaces():
762 _logger.debug('normalizing annotations for %s' % interface.id)
763 for source in sources:
764 if (source not in interface.annotations or
765 not interface.annotations[source]):
766 continue
767 top_level_annotation = interface.annotations[source]
768
769 def normalize(idl_node):
770 if (source in idl_node.annotations and
771 idl_node.annotations[source]):
772 annotation = idl_node.annotations[source]
773 for name, value in list(annotation.items()):
774 if (name in top_level_annotation and
775 value == top_level_annotation[name]):
776 del annotation[name]
777
778 list(map(normalize, interface.parents))
779 list(map(normalize, interface.constants))
780 list(map(normalize, interface.attributes))
781 list(map(normalize, interface.operations))
782
784 """Changes the type of operations/constructors arguments from an IDL
785 dictionary to a Dictionary. The IDL dictionary is just an enums of
786 strings which are checked at run-time."""
787
788 def dictionary_to_map(type_node):
789 if self._database.HasDictionary(type_node.id):
790 type_node.dictionary = type_node.id
791 type_node.id = 'Dictionary'
792
793 def all_types(node):
794 list(map(dictionary_to_map, node.all(IDLType)))
795
796 for interface in self._database.GetInterfaces():
797 list(map(all_types, interface.all(IDLExtAttrFunctionValue)))
798 list(map(all_types, interface.attributes))
799 list(map(all_types, interface.operations))
800
801 def fetch_constructor_data(self, options):
802 window_interface = self._database.GetInterface('Window')
803 for attr in window_interface.attributes:
804 type = attr.type.id
805 if not type.endswith('Constructor'):
806 continue
807 type = re.sub('(Constructor)+$', '', type)
808 # TODO(antonm): Ideally we'd like to have pristine copy of WebKit IDLs and fetch
809 # this information directly from it. Unfortunately right now database is massaged
810 # a lot so it's difficult to maintain necessary information on Window itself.
811 interface = self._database.GetInterface(type)
812 if 'V8EnabledPerContext' in attr.ext_attrs:
813 interface.ext_attrs['synthesizedV8EnabledPerContext'] = \
814 attr.ext_attrs['V8EnabledPerContext']
815 if 'V8EnabledAtRuntime' in attr.ext_attrs:
816 interface.ext_attrs['synthesizedV8EnabledAtRuntime'] = \
817 attr.ext_attrs['V8EnabledAtRuntime'] or attr.id
818
819 # Iterate of the database looking for relationships between dictionaries and
820 # interfaces marked with NoInterfaceObject. This mechanism can be used for
821 # other IDL analysis.
823 # Contains list of dictionary structure: {'dictionary': dictionary, 'usages': []}
824 self._diag_dictionaries = []
826
827 # Record any dictionary.
828 for dictionary in self._database.GetDictionaries():
830 'dictionary': dictionary,
831 'usages': []
832 })
833
834 # Contains list of NoInterfaceObject structures: {'no_interface_object': dictionary, 'usages': []}
835 self._diag_no_interfaces = []
837
838 # Record any interface with Blink IDL Extended Attribute 'NoInterfaceObject'.
839 for interface in self._database.GetInterfaces():
840 if interface.is_no_interface_object:
842 'no_interface_object':
843 interface,
844 'usages': []
845 })
846
847 for interface in self._database.GetInterfaces():
848 self._constructors(interface)
849 self._constructors(interface, check_dictionaries=False)
850
851 for attribute in interface.attributes:
852 self._attribute_operation(interface, attribute)
854 interface, attribute, check_dictionaries=False)
855
856 for operation in interface.operations:
857 self._attribute_operation(interface, operation)
859 interface, operation, check_dictionaries=False)
860
861 # Report all dictionaries and their usage.
863 # Report all interface marked with NoInterfaceObject and their usage.
864 self._output_examination(check_dictionaries=False)
865
866 print('''
867Key:
868 (READ-ONLY) - read-only attribute has relationship
869 (GET/SET) - attribute has relationship
870 RETURN - operation\'s returned value has relationship
871 (ARGUMENT) - operation\'s argument(s) has relationship
872
873 (New) - After dictionary name if constructor(s) exist
874 (Ops,Props,New) after a NoInterfaceObject name is defined as:
875 Ops - number of operations for a NoInterfaceObject
876 Props - number of properties for a NoInterfaceObject
877 New - T(#) number constructors for a NoInterfaceObject
878 F no constructors for a NoInterfaceObject
879 e.g., an interface 5 operations, 3 properties and 2
880 constructors would display (5,3,T(2))
881
882
883Examination Complete
884''')
885
886 def _output_examination(self, check_dictionaries=True):
887 # Output diagnostics. First columns is Dictionary or NoInterfaceObject e.g.,
888 # | Dictionary | Used In Interface | Usage Operation/Attribute |
889 print('\n\n')
890 title_bar = ['Dictionary', 'Used In Interface', 'Usage Operation/Attribute'] if check_dictionaries \
891 else ['NoInterfaceObject (Ops,Props,New)', 'Used In Interface', 'Usage Operation/Attribute']
892 self._tabulate_title(title_bar)
893 diags = self._diag_dictionaries if check_dictionaries else self._diag_no_interfaces
894 for diag in diags:
895 if not (check_dictionaries):
896 interface = diag['no_interface_object']
897 ops_count = len(interface.operations)
898 properties_count = len(interface.attributes)
899 any_constructors = 'Constructor' in interface.ext_attrs
900 constructors = 'T(%s)' % len(interface.ext_attrs['Constructor']
901 ) if any_constructors else 'F'
902 interface_detail = '%s (%s,%s,%s)' % \
903 (diag['no_interface_object'].id,
904 ops_count,
905 properties_count,
906 constructors)
907 self._tabulate([interface_detail, '', ''])
908 else:
909 dictionary = diag['dictionary']
910 any_constructors = 'Constructor' in dictionary.ext_attrs
911 self._tabulate([
912 '%s%s' % (dictionary.id,
913 ' (New)' if any_constructors else ''), '', ''
914 ])
915 for usage in diag['usages']:
916 detail = ''
917 if 'attribute' in usage:
918 attribute_type = 'READ-ONLY' if not usage[
919 'argument'] else 'GET/SET'
920 detail = '(%s) %s' % (attribute_type, usage['attribute'])
921 elif 'operation' in usage:
922 detail = '%s %s%s' % ('RETURN' if usage['result'] else '',
923 usage['operation'], '(ARGUMENT)'
924 if usage['argument'] else '')
925 self._tabulate([None, usage['interface'], detail])
926 self._tabulate_break()
927
928 # operation_or_attribute either IDLOperation or IDLAttribute if None then
929 # its a constructor (IDLExtAttrFunctionValue).
930 def _mark_usage(self,
931 interface,
932 operation_or_attribute=None,
933 check_dictionaries=True):
934 for diag in self._diag_dictionaries if check_dictionaries else self._diag_no_interfaces:
935 for usage in diag['usages']:
936 if not usage['interface']:
937 usage['interface'] = interface.id
938 if isinstance(operation_or_attribute, IDLOperation):
939 usage['operation'] = operation_or_attribute.id
940 if check_dictionaries:
941 usage['result'] = hasattr(operation_or_attribute.type, 'dictionary') and \
942 operation_or_attribute.type.dictionary == diag['dictionary'].id
943 else:
944 usage[
945 'result'] = operation_or_attribute.type.id == diag[
946 'no_interface_object'].id
947 usage['argument'] = False
948 for argument in operation_or_attribute.arguments:
949 if check_dictionaries:
950 arg = hasattr(
951 argument.type, 'dictionary'
952 ) and argument.type.dictionary == diag[
953 'dictionary'].id
954 else:
955 arg = argument.type.id == diag[
956 'no_interface_object'].id
957 if arg:
958 usage['argument'] = arg
959 elif isinstance(operation_or_attribute, IDLAttribute):
960 usage['attribute'] = operation_or_attribute.id
961 usage['result'] = True
962 usage[
963 'argument'] = not operation_or_attribute.is_read_only
964 elif not operation_or_attribute:
965 # Its a constructor only argument is dictionary or interface with NoInterfaceObject.
966 usage['operation'] = 'constructor'
967 usage['result'] = False
968 usage['argument'] = True
969
970 def _remember_usage(self, node, check_dictionaries=True):
971 if check_dictionaries:
972 used_types = self._dictionaries_used_types
973 diag_list = self._diag_dictionaries
974 diag_name = 'dictionary'
975 else:
976 used_types = self._no_interfaces_used_types
977 diag_list = self._diag_no_interfaces
978 diag_name = 'no_interface_object'
979
980 if len(used_types) > 0:
981 normalized_used = list(set(used_types))
982 for recorded_id in normalized_used:
983 for diag in diag_list:
984 if diag[diag_name].id == recorded_id:
985 diag['usages'].append({'interface': None, 'node': node})
986
987 # Iterator function to look for any IDLType that is a dictionary then remember
988 # that dictionary.
989 def _dictionary_used(self, type_node):
990 if hasattr(type_node, 'dictionary'):
991 dictionary_id = type_node.dictionary
992 if self._database.HasDictionary(dictionary_id):
993 for diag_dictionary in self._diag_dictionaries:
994 if diag_dictionary['dictionary'].id == dictionary_id:
995 # Record the dictionary that was referenced.
996 self._dictionaries_used_types.append(dictionary_id)
997 return
998
999 # If we get to this point, the IDL dictionary was never defined ... oops.
1000 print('DIAGNOSE_ERROR: IDL Dictionary %s doesn\'t exist.' %
1001 dictionary_id)
1002
1003 # Iterator function to look for any IDLType that is an interface marked with
1004 # NoInterfaceObject then remember that interface.
1005 def _no_interface_used(self, type_node):
1006 if hasattr(type_node, 'id'):
1007 no_interface_id = type_node.id
1008 if self._database.HasInterface(no_interface_id):
1009 no_interface = self._database.GetInterface(no_interface_id)
1010 if no_interface.is_no_interface_object:
1011 for diag_no_interface in self._diag_no_interfaces:
1012 if diag_no_interface[
1013 'no_interface_object'].id == no_interface_id:
1014 # Record the interface marked with NoInterfaceObject.
1016 no_interface_id)
1017 return
1018
1019 def _constructors(self, interface, check_dictionaries=True):
1020 if check_dictionaries:
1021 self._dictionaries_used_types = []
1022 constructor_function = self._dictionary_constructor_types
1023 else:
1025 constructor_function = self._no_interface_constructor_types
1026
1027 list(map(constructor_function, interface.all(IDLExtAttrFunctionValue)))
1028
1029 self._mark_usage(interface, check_dictionaries=check_dictionaries)
1030
1031 # Scan an attribute or operation for a dictionary or interface with NoInterfaceObject
1032 # reference.
1033 def _attribute_operation(self,
1034 interface,
1035 operation_attribute,
1036 check_dictionaries=True):
1037 if check_dictionaries:
1038 self._dictionaries_used_types = []
1039 used = self._dictionary_used
1040 else:
1042 used = self._no_interface_used
1043
1044 list(map(used, operation_attribute.all(IDLType)))
1045
1046 self._remember_usage(
1047 operation_attribute, check_dictionaries=check_dictionaries)
1048 self._mark_usage(
1049 interface,
1050 operation_attribute,
1051 check_dictionaries=check_dictionaries)
1052
1053 # Iterator function for map to iterate over all constructor types
1054 # (IDLExtAttrFunctionValue) that have a dictionary reference.
1055 def _dictionary_constructor_types(self, node):
1056 self._dictionaries_used_types = []
1057 list(map(self._dictionary_used, node.all(IDLType)))
1058 self._remember_usage(node)
1059
1060 # Iterator function for map to iterate over all constructor types
1061 # (IDLExtAttrFunctionValue) that reference an interface with NoInterfaceObject.
1062 def _no_interface_constructor_types(self, node):
1064 list(map(self._no_interface_used, node.all(IDLType)))
1065 self._remember_usage(node, check_dictionaries=False)
1066
1067 # Maximum width of each column.
1068 def _TABULATE_WIDTH(self):
1069 return 45
1070
1071 def _tabulate_title(self, row_title):
1072 title_separator = "=" * self._TABULATE_WIDTH()
1073 self._tabulate([title_separator, title_separator, title_separator])
1074 self._tabulate(row_title)
1075 self._tabulate([title_separator, title_separator, title_separator])
1076
1077 def _tabulate_break(self):
1078 break_separator = "-" * self._TABULATE_WIDTH()
1079 self._tabulate([break_separator, break_separator, break_separator])
1080
1081 def _tabulate(self, columns):
1082 """Tabulate a list of columns for a row. Each item in columns is a column
1083 value each column will be padded up to _TABULATE_WIDTH. Each
1084 column starts/ends with a vertical bar '|' the format a row:
1085
1086 | columns[0] | columns[1] | columns[2] | ... |
1087 """
1088 if len(columns) > 0:
1089 for column in columns:
1090 value = '' if not column else column
1091 sys.stdout.write('|{0:^{1}}'.format(value,
1092 self._TABULATE_WIDTH()))
1093 else:
1094 sys.stdout.write('|{0:^{1}}'.format('', self._TABULATE_WIDTH()))
1095
1096 sys.stdout.write('|\n')
static void round(SkPoint *p)
static float next(float f)
GLenum type
def __init__(self, provider)
def generate_from_idl(self, idl_file)
def __init__(self, idl_defines=[], source=None, source_attributes={}, rename_operation_arguments_on_merge=False, add_new_interfaces=True, obsolete_old_declarations=False, logging_level=logging.WARNING)
def _mark_usage(self, interface, operation_or_attribute=None, check_dictionaries=True)
def _merge_interfaces(self, old_interface, new_interface, import_options)
def _merge_nodes(self, old_list, new_list, import_options)
def _build_signatures_map(self, idl_node_list)
def _is_node_enabled(self, node, idl_defines)
def _attribute_operation(self, interface, operation_attribute, check_dictionaries=True)
def _constructors(self, interface, check_dictionaries=True)
def _compute_dart_idl_implements(self, idl_filename)
def import_idl_files(self, file_paths, import_options, is_dart_idl)
def _output_examination(self, check_dictionaries=True)
def _merge_impl_stmt(self, impl_stmt, import_options)
def _merge_ext_attrs(self, old_attrs, new_attrs)
def _blink_compile_idl_files(self, file_paths, import_options, is_dart_idl)
def _remember_usage(self, node, check_dictionaries=True)
def _process_idl_file(self, idl_file, import_options, dart_idl=False)
def _annotate(self, interface, import_options)
def _rename_types(self, idl_file, import_options)
static void append(char **dst, size_t *count, const char *src, size_t n)
Definition: editor.cpp:211
uint32_t uint32_t * format
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Definition: switches.h:76
def print(*args, **kwargs)
Definition: run_tests.py:49
def report_unions_to_any()
Definition: idlnode.py:20
SIN Vec< N, float > normalize(const Vec< N, float > &v)
Definition: SkVx.h:995
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition: SkVx.h:680
#define T
Definition: precompiler.cc:65
static SkString join(const CommandLineFlags::StringArray &)
Definition: skpbench.cpp:741