Flutter Engine
The Flutter Engine
Classes | Functions
explain_binary_size_delta Namespace Reference

Classes

class  CrunchStatsData
 
class  DeltaInfo
 
class  SymbolDelta
 

Functions

def CalculateSharedAddresses (symbols)
 
def CalculateEffectiveSize (share_count, address, symbol_size)
 
def Compare (symbols1, symbols2)
 
def DeltaStr (number)
 
def SharedInfoStr (symbol_info)
 
def CrunchStats (added, removed, changed, unchanged, showsources, showsymbols)
 
def main ()
 

Function Documentation

◆ CalculateEffectiveSize()

def explain_binary_size_delta.CalculateEffectiveSize (   share_count,
  address,
  symbol_size 
)
Given a raw symbol_size and an address, this method returns the
size we should blame on this symbol considering it might share the
machine code/data with other symbols. Using the raw symbol_size for
each symbol would in those cases over estimate the true cost of that
block.

Definition at line 61 of file explain_binary_size_delta.py.

61def CalculateEffectiveSize(share_count, address, symbol_size):
62 """Given a raw symbol_size and an address, this method returns the
63 size we should blame on this symbol considering it might share the
64 machine code/data with other symbols. Using the raw symbol_size for
65 each symbol would in those cases over estimate the true cost of that
66 block.
67
68 """
69 shared_count = share_count[address]
70 if shared_count == 1:
71 return symbol_size
72
73 assert shared_count > 1
74 return int(ceil(symbol_size / float(shared_count)))
75
76
def CalculateEffectiveSize(share_count, address, symbol_size)
SIN Vec< N, float > ceil(const Vec< N, float > &x)
Definition: SkVx.h:702

◆ CalculateSharedAddresses()

def explain_binary_size_delta.CalculateSharedAddresses (   symbols)
Checks how many symbols share the same memory space. This returns a
Counter result where result[address] will tell you how many times address was
used by symbols.

Definition at line 50 of file explain_binary_size_delta.py.

50def CalculateSharedAddresses(symbols):
51 """Checks how many symbols share the same memory space. This returns a
52Counter result where result[address] will tell you how many times address was
53used by symbols."""
54 count = Counter()
55 for _, _, _, _, address in symbols:
56 count[address] += 1
57
58 return count
59
60

◆ Compare()

def explain_binary_size_delta.Compare (   symbols1,
  symbols2 
)
Executes a comparison of the symbols in symbols1 and symbols2.

Returns:
  tuple of lists: (added_symbols, removed_symbols, changed_symbols, others)
  where each list contains DeltaInfo objects.

Definition at line 124 of file explain_binary_size_delta.py.

124def Compare(symbols1, symbols2):
125 """Executes a comparison of the symbols in symbols1 and symbols2.
126
127 Returns:
128 tuple of lists: (added_symbols, removed_symbols, changed_symbols, others)
129 where each list contains DeltaInfo objects.
130 """
131 added = [] # tuples
132 removed = [] # tuples
133 changed = [] # tuples
134 unchanged = [] # tuples
135
136 cache1 = {}
137 cache2 = {}
138 # Make a map of (file, symbol_type) : (symbol_name, effective_symbol_size)
139 share_count1 = CalculateSharedAddresses(symbols1)
140 share_count2 = CalculateSharedAddresses(symbols2)
141 for cache, symbols, share_count in ((cache1, symbols1, share_count1),
142 (cache2, symbols2, share_count2)):
143 for symbol_name, symbol_type, symbol_size, file_path, address in symbols:
144 if 'vtable for ' in symbol_name:
145 symbol_type = '@' # hack to categorize these separately
146 if file_path:
147 file_path = os.path.normpath(file_path)
148 if sys.platform.startswith('win'):
149 file_path = file_path.replace('\\', '/')
150 else:
151 file_path = '(No Path)'
152 # Take into consideration that multiple symbols might share the same
153 # block of code.
154 effective_symbol_size = CalculateEffectiveSize(
155 share_count, address, symbol_size)
156 key = (file_path, symbol_type)
157 bucket = cache.setdefault(key, {})
158 size_list = bucket.setdefault(symbol_name, [])
159 size_list.append((effective_symbol_size,
160 effective_symbol_size != symbol_size))
161
162 # Now diff them. We iterate over the elements in cache1. For each symbol
163 # that we find in cache2, we record whether it was deleted, changed, or
164 # unchanged. We then remove it from cache2; all the symbols that remain
165 # in cache2 at the end of the iteration over cache1 are the 'new' symbols.
166 for key, bucket1 in cache1.items():
167 bucket2 = cache2.get(key)
168 file_path, symbol_type = key
169 if not bucket2:
170 # A file was removed. Everything in bucket1 is dead.
171 for symbol_name, symbol_size_list in bucket1.items():
172 for (symbol_size, shared) in symbol_size_list:
173 delta_info = DeltaInfo(file_path, symbol_type, symbol_name,
174 shared)
175 delta_info.old_size = symbol_size
176 removed.append(delta_info)
177 else:
178 # File still exists, look for changes within.
179 for symbol_name, symbol_size_list in bucket1.items():
180 size_list2 = bucket2.get(symbol_name)
181 if size_list2 is None:
182 # Symbol no longer exists in bucket2.
183 for (symbol_size, shared) in symbol_size_list:
184 delta_info = DeltaInfo(file_path, symbol_type,
185 symbol_name, shared)
186 delta_info.old_size = symbol_size
187 removed.append(delta_info)
188 else:
189 del bucket2[
190 symbol_name] # Symbol is not new, delete from cache2.
191 if len(symbol_size_list) == 1 and len(size_list2) == 1:
192 symbol_size, shared1 = symbol_size_list[0]
193 size2, shared2 = size_list2[0]
194 delta_info = DeltaInfo(file_path, symbol_type,
195 symbol_name, shared1 or shared2)
196 delta_info.old_size = symbol_size
197 delta_info.new_size = size2
198 if symbol_size != size2:
199 # Symbol has change size in bucket.
200 changed.append(delta_info)
201 else:
202 # Symbol is unchanged.
203 unchanged.append(delta_info)
204 else:
205 # Complex comparison for when a symbol exists multiple times
206 # in the same file (where file can be "unknown file").
207 symbol_size_counter = collections.Counter(
208 symbol_size_list)
209 delta_counter = collections.Counter(symbol_size_list)
210 delta_counter.subtract(size_list2)
211 for delta_counter_key in sorted(delta_counter.keys()):
212 delta = delta_counter[delta_counter_key]
213 unchanged_count = symbol_size_counter[
214 delta_counter_key]
215 (symbol_size, shared) = delta_counter_key
216 if delta > 0:
217 unchanged_count -= delta
218 for _ in range(unchanged_count):
219 delta_info = DeltaInfo(file_path, symbol_type,
220 symbol_name, shared)
221 delta_info.old_size = symbol_size
222 delta_info.new_size = symbol_size
223 unchanged.append(delta_info)
224 if delta > 0: # Used to be more of these than there is now.
225 for _ in range(delta):
226 delta_info = DeltaInfo(
227 file_path, symbol_type, symbol_name,
228 shared)
229 delta_info.old_size = symbol_size
230 removed.append(delta_info)
231 elif delta < 0: # More of this (symbol,size) now.
232 for _ in range(-delta):
233 delta_info = DeltaInfo(
234 file_path, symbol_type, symbol_name,
235 shared)
236 delta_info.new_size = symbol_size
237 added.append(delta_info)
238
239 if len(bucket2) == 0:
240 del cache1[
241 key] # Entire bucket is empty, delete from cache2
242
243 # We have now analyzed all symbols that are in cache1 and removed all of
244 # the encountered symbols from cache2. What's left in cache2 is the new
245 # symbols.
246 for key, bucket2 in cache2.items():
247 file_path, symbol_type = key
248 for symbol_name, symbol_size_list in bucket2.items():
249 for (symbol_size, shared) in symbol_size_list:
250 delta_info = DeltaInfo(file_path, symbol_type, symbol_name,
251 shared)
252 delta_info.new_size = symbol_size
253 added.append(delta_info)
254 return (added, removed, changed, unchanged)
255
256

◆ CrunchStats()

def explain_binary_size_delta.CrunchStats (   added,
  removed,
  changed,
  unchanged,
  showsources,
  showsymbols 
)
Outputs to stdout a summary of changes based on the symbol lists.

Definition at line 288 of file explain_binary_size_delta.py.

288def CrunchStats(added, removed, changed, unchanged, showsources, showsymbols):
289 """Outputs to stdout a summary of changes based on the symbol lists."""
290 # Split changed into grown and shrunk because that is easier to
291 # discuss.
292 grown = []
293 shrunk = []
294 for item in changed:
295 if item.old_size < item.new_size:
296 grown.append(item)
297 else:
298 shrunk.append(item)
299
300 new_symbols = CrunchStatsData(added)
301 removed_symbols = CrunchStatsData(removed)
302 grown_symbols = CrunchStatsData(grown)
303 shrunk_symbols = CrunchStatsData(shrunk)
304 sections = [new_symbols, removed_symbols, grown_symbols, shrunk_symbols]
305 for section in sections:
306 for item in section.symbols:
307 section.sources.add(item.file_path)
308 if item.old_size is not None:
309 section.before_size += item.old_size
310 if item.new_size is not None:
311 section.after_size += item.new_size
312 bucket = section.symbols_by_path.setdefault(item.file_path, [])
313 bucket.append((item.symbol_name, item.symbol_type,
314 item.ExtractSymbolDelta()))
315
316 total_change = sum(s.after_size - s.before_size for s in sections)
317 summary = 'Total change: %s bytes' % DeltaStr(total_change)
318 print(summary)
319 print('=' * len(summary))
320 for section in sections:
321 if not section.symbols:
322 continue
323 if section.before_size == 0:
324 description = (
325 'added, totalling %s bytes' % DeltaStr(section.after_size))
326 elif section.after_size == 0:
327 description = (
328 'removed, totalling %s bytes' % DeltaStr(-section.before_size))
329 else:
330 if section.after_size > section.before_size:
331 type_str = 'grown'
332 else:
333 type_str = 'shrunk'
334 description = (
335 '%s, for a net change of %s bytes '
336 '(%d bytes before, %d bytes after)' %
337 (type_str, DeltaStr(section.after_size - section.before_size),
338 section.before_size, section.after_size))
339 print(' %d %s across %d sources' % (len(section.symbols), description,
340 len(section.sources)))
341
342 maybe_unchanged_sources = set()
343 unchanged_symbols_size = 0
344 for item in unchanged:
345 maybe_unchanged_sources.add(item.file_path)
346 unchanged_symbols_size += item.old_size # == item.new_size
347 print(' %d unchanged, totalling %d bytes' % (len(unchanged),
348 unchanged_symbols_size))
349
350 # High level analysis, always output.
351 unchanged_sources = maybe_unchanged_sources
352 for section in sections:
353 unchanged_sources = unchanged_sources - section.sources
354 new_sources = (
355 new_symbols.sources - maybe_unchanged_sources - removed_symbols.sources)
356 removed_sources = (
357 removed_symbols.sources - maybe_unchanged_sources - new_symbols.sources)
358 partially_changed_sources = (
359 grown_symbols.sources | shrunk_symbols.sources | new_symbols.sources |
360 removed_symbols.sources) - removed_sources - new_sources
361 allFiles = set()
362 for section in sections:
363 allFiles = allFiles | section.sources
364 allFiles = allFiles | maybe_unchanged_sources
365 print('Source stats:')
366 print(' %d sources encountered.' % len(allFiles))
367 print(' %d completely new.' % len(new_sources))
368 print(' %d removed completely.' % len(removed_sources))
369 print(' %d partially changed.' % len(partially_changed_sources))
370 print(' %d completely unchanged.' % len(unchanged_sources))
371 remainder = (allFiles - new_sources - removed_sources -
372 partially_changed_sources - unchanged_sources)
373 assert len(remainder) == 0
374
375 if not showsources:
376 return # Per-source analysis, only if requested
377 print('Per-source Analysis:')
378 delta_by_path = {}
379 for section in sections:
380 for path in section.symbols_by_path:
381 entry = delta_by_path.get(path)
382 if not entry:
383 entry = {'plus': 0, 'minus': 0}
384 delta_by_path[path] = entry
385 for symbol_name, symbol_type, symbol_delta in \
386 section.symbols_by_path[path]:
387 if symbol_delta.old_size is None:
388 delta = symbol_delta.new_size
389 elif symbol_delta.new_size is None:
390 delta = -symbol_delta.old_size
391 else:
392 delta = symbol_delta.new_size - symbol_delta.old_size
393
394 if delta > 0:
395 entry['plus'] += delta
396 else:
397 entry['minus'] += (-1 * delta)
398
399 def delta_sort_key(item):
400 _path, size_data = item
401 growth = size_data['plus'] - size_data['minus']
402 return growth
403
404 for path, size_data in sorted(delta_by_path.items(),
405 key=delta_sort_key,
406 reverse=True):
407 gain = size_data['plus']
408 loss = size_data['minus']
409 delta = size_data['plus'] - size_data['minus']
410 header = ' %s - Source: %s - (gained %d, lost %d)' % (DeltaStr(delta),
411 path, gain, loss)
412 divider = '-' * len(header)
413 print('')
414 print(divider)
415 print(header)
416 print(divider)
417 if showsymbols:
418
419 def ExtractNewSize(tup):
420 symbol_delta = tup[2]
421 return symbol_delta.new_size
422
423 def ExtractOldSize(tup):
424 symbol_delta = tup[2]
425 return symbol_delta.old_size
426
427 if path in new_symbols.symbols_by_path:
428 print(' New symbols:')
429 for symbol_name, symbol_type, symbol_delta in \
430 sorted(new_symbols.symbols_by_path[path],
431 key=ExtractNewSize,
432 reverse=True):
433 print(' %8s: %s type=%s, size=%d bytes%s' %
434 (DeltaStr(symbol_delta.new_size), symbol_name,
435 symbol_type, symbol_delta.new_size,
436 SharedInfoStr(symbol_delta)))
437 if path in removed_symbols.symbols_by_path:
438 print(' Removed symbols:')
439 for symbol_name, symbol_type, symbol_delta in \
440 sorted(removed_symbols.symbols_by_path[path],
441 key=ExtractOldSize):
442 print(' %8s: %s type=%s, size=%d bytes%s' %
443 (DeltaStr(-symbol_delta.old_size), symbol_name,
444 symbol_type, symbol_delta.old_size,
445 SharedInfoStr(symbol_delta)))
446 for (changed_symbols_by_path,
447 type_str) in [(grown_symbols.symbols_by_path, "Grown"),
448 (shrunk_symbols.symbols_by_path, "Shrunk")]:
449 if path in changed_symbols_by_path:
450 print(' %s symbols:' % type_str)
451
452 def changed_symbol_sortkey(item):
453 symbol_name, _symbol_type, symbol_delta = item
454 return (symbol_delta.old_size - symbol_delta.new_size,
455 symbol_name)
456
457 for symbol_name, symbol_type, symbol_delta in \
458 sorted(changed_symbols_by_path[path], key=changed_symbol_sortkey):
459 print(
460 ' %8s: %s type=%s, (was %d bytes, now %d bytes)%s'
461 % (DeltaStr(symbol_delta.new_size -
462 symbol_delta.old_size), symbol_name,
463 symbol_type,
464 symbol_delta.old_size, symbol_delta.new_size,
465 SharedInfoStr(symbol_delta)))
466
467
def CrunchStats(added, removed, changed, unchanged, showsources, showsymbols)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Definition: switches.h:76
def print(*args, **kwargs)
Definition: run_tests.py:49

◆ DeltaStr()

def explain_binary_size_delta.DeltaStr (   number)
Returns the number as a string with a '+' prefix if it's > 0 and
a '-' prefix if it's < 0.

Definition at line 257 of file explain_binary_size_delta.py.

257def DeltaStr(number):
258 """Returns the number as a string with a '+' prefix if it's > 0 and
259 a '-' prefix if it's < 0."""
260 result = str(number)
261 if number > 0:
262 result = '+' + result
263 return result
264
265

◆ main()

def explain_binary_size_delta.main ( )

Definition at line 468 of file explain_binary_size_delta.py.

468def main():
469 usage = """%prog [options]
470
471 Analyzes the symbolic differences between two binary files
472 (typically, not necessarily, two different builds of the same
473 library) and produces a detailed description of symbols that have
474 been added, removed, or whose size has changed.
475
476 Example:
477 explain_binary_size_delta.py --nm1 /tmp/nm1.dump --nm2 /tmp/nm2.dump
478
479 Options are available via '--help'.
480 """
481 parser = optparse.OptionParser(usage=usage)
482 parser.add_option(
483 '--nm1', metavar='PATH', help='the nm dump of the first library')
484 parser.add_option(
485 '--nm2', metavar='PATH', help='the nm dump of the second library')
486 parser.add_option(
487 '--showsources',
488 action='store_true',
489 default=False,
490 help='show per-source statistics')
491 parser.add_option(
492 '--showsymbols',
493 action='store_true',
494 default=False,
495 help='show all symbol information; implies --showsources')
496 parser.add_option(
497 '--verbose',
498 action='store_true',
499 default=False,
500 help='output internal debugging stuff')
501 opts, _args = parser.parse_args()
502
503 if not opts.nm1:
504 parser.error('--nm1 is required')
505 if not opts.nm2:
506 parser.error('--nm2 is required')
507 symbols = []
508 for path in [opts.nm1, opts.nm2]:
509 with open(path, 'r') as nm_input:
510 if opts.verbose:
511 print('parsing ' + path + '...')
512 symbols.append(list(binary_size_utils.ParseNm(nm_input)))
513 (added, removed, changed, unchanged) = Compare(symbols[0], symbols[1])
514 CrunchStats(added, removed, changed, unchanged,
515 opts.showsources | opts.showsymbols, opts.showsymbols)
516
517
def ParseNm(nm_lines)

◆ SharedInfoStr()

def explain_binary_size_delta.SharedInfoStr (   symbol_info)
Returns a string (prefixed by space) explaining that numbers are
adjusted because of shared space between symbols, or an empty string
if space had not been shared.

Definition at line 266 of file explain_binary_size_delta.py.

266def SharedInfoStr(symbol_info):
267 """Returns a string (prefixed by space) explaining that numbers are
268 adjusted because of shared space between symbols, or an empty string
269 if space had not been shared."""
270
271 if symbol_info.shares_space_with_other_symbols:
272 return " (adjusted sizes because of memory sharing)"
273
274 return ""
275
276