5"""Describe the size difference of two binaries.
7Generates a description of the size difference of two binaries based
8on the difference of the size of various symbols.
10This tool needs "nm" dumps of each binary with full symbol
11information. You can obtain the necessary dumps by running the
12run_binary_size_analysis.py script upon each binary, with the
13"--nm-out" parameter set to the location in which you want to save the
18 buildtools/ninja/ninja -C out/Release binary_size_tool
19 tools/binary_size/run_binary_size_analysis \
20 --library <path_to_library>
21 --destdir /tmp/throwaway
22 --nm-out /tmp/nm1.dump
26 buildtools/ninja/ninja -C out/Release binary_size_tool
27 tools/binary_size/run_binary_size_analysis \
28 --library <path_to_library>
29 --destdir /tmp/throwaway
30 --nm-out /tmp/nm2.dump
36 explain_binary_size_delta.py --nm1 /tmp/nm1.dump --nm2 /tmp/nm2.dump
40from collections import Counter
47import binary_size_utils
50def CalculateSharedAddresses(symbols):
51 """Checks how many symbols share the same memory space. This returns a
52Counter result where result[address] will tell you how many times address was
55 for _, _, _, _, address
in symbols:
62 """Given a raw symbol_size and an address, this method returns the
63 size we should blame on this symbol considering it might share the
64 machine code/data with other symbols. Using the raw symbol_size
for
65 each symbol would
in those cases over estimate the true cost of that
69 shared_count = share_count[address]
73 assert shared_count > 1
78 """Stores old size, new size and some metadata."""
86 return (self.
old_size == other.old_size
and
89 shares_space_with_other_symbols)
92 return not self.
__eq__(other)
96 symbol_delta.old_size = self.
old_size
97 symbol_delta.new_size = self.
new_size
102 """Summary of a the change for one symbol between two instances."""
104 def __init__(self, file_path, symbol_type, symbol_name, shared):
105 SymbolDelta.__init__(self, shared)
111 return (self.
file_path == other.file_path
and
114 SymbolDelta.__eq__(self, other))
120 """Returns a copy of the SymbolDelta for this DeltaInfo."""
121 return SymbolDelta.copy_symbol_delta(self)
125 """Executes a comparison of the symbols in symbols1 and symbols2.
128 tuple of lists: (added_symbols, removed_symbols, changed_symbols, others)
129 where each list contains DeltaInfo objects.
141 for cache, symbols, share_count
in ((cache1, symbols1, share_count1),
142 (cache2, symbols2, share_count2)):
143 for symbol_name, symbol_type, symbol_size, file_path, address
in symbols:
144 if 'vtable for ' in symbol_name:
147 file_path = os.path.normpath(file_path)
148 if sys.platform.startswith(
'win'):
149 file_path = file_path.replace(
'\\',
'/')
151 file_path =
'(No Path)'
155 share_count, address, symbol_size)
156 key = (file_path, symbol_type)
157 bucket = cache.setdefault(key, {})
158 size_list = bucket.setdefault(symbol_name, [])
159 size_list.append((effective_symbol_size,
160 effective_symbol_size != symbol_size))
166 for key, bucket1
in cache1.items():
167 bucket2 = cache2.get(key)
168 file_path, symbol_type = key
171 for symbol_name, symbol_size_list
in bucket1.items():
172 for (symbol_size, shared)
in symbol_size_list:
173 delta_info =
DeltaInfo(file_path, symbol_type, symbol_name,
175 delta_info.old_size = symbol_size
176 removed.append(delta_info)
179 for symbol_name, symbol_size_list
in bucket1.items():
180 size_list2 = bucket2.get(symbol_name)
181 if size_list2
is None:
183 for (symbol_size, shared)
in symbol_size_list:
184 delta_info =
DeltaInfo(file_path, symbol_type,
186 delta_info.old_size = symbol_size
187 removed.append(delta_info)
191 if len(symbol_size_list) == 1
and len(size_list2) == 1:
192 symbol_size, shared1 = symbol_size_list[0]
193 size2, shared2 = size_list2[0]
194 delta_info =
DeltaInfo(file_path, symbol_type,
195 symbol_name, shared1
or shared2)
196 delta_info.old_size = symbol_size
197 delta_info.new_size = size2
198 if symbol_size != size2:
200 changed.append(delta_info)
203 unchanged.append(delta_info)
207 symbol_size_counter = collections.Counter(
209 delta_counter = collections.Counter(symbol_size_list)
210 delta_counter.subtract(size_list2)
211 for delta_counter_key
in sorted(delta_counter.keys()):
212 delta = delta_counter[delta_counter_key]
213 unchanged_count = symbol_size_counter[
215 (symbol_size, shared) = delta_counter_key
217 unchanged_count -= delta
218 for _
in range(unchanged_count):
219 delta_info =
DeltaInfo(file_path, symbol_type,
221 delta_info.old_size = symbol_size
222 delta_info.new_size = symbol_size
223 unchanged.append(delta_info)
225 for _
in range(delta):
227 file_path, symbol_type, symbol_name,
229 delta_info.old_size = symbol_size
230 removed.append(delta_info)
232 for _
in range(-delta):
234 file_path, symbol_type, symbol_name,
236 delta_info.new_size = symbol_size
237 added.append(delta_info)
239 if len(bucket2) == 0:
246 for key, bucket2
in cache2.items():
247 file_path, symbol_type = key
248 for symbol_name, symbol_size_list
in bucket2.items():
249 for (symbol_size, shared)
in symbol_size_list:
250 delta_info =
DeltaInfo(file_path, symbol_type, symbol_name,
252 delta_info.new_size = symbol_size
253 added.append(delta_info)
254 return (added, removed, changed, unchanged)
258 """Returns the number as a string with a '+' prefix if it's > 0 and
259 a '-' prefix
if it
's < 0."""
262 result =
'+' + result
267 """Returns a string (prefixed by space) explaining that numbers are
268 adjusted because of shared space between symbols, or an empty string
269 if space had
not been shared.
"""
271 if symbol_info.shares_space_with_other_symbols:
272 return " (adjusted sizes because of memory sharing)"
278 """Stores a summary of data of a certain kind."""
288def CrunchStats(added, removed, changed, unchanged, showsources, showsymbols):
289 """Outputs to stdout a summary of changes based on the symbol lists."""
295 if item.old_size < item.new_size:
304 sections = [new_symbols, removed_symbols, grown_symbols, shrunk_symbols]
305 for section
in sections:
306 for item
in section.symbols:
307 section.sources.add(item.file_path)
308 if item.old_size
is not None:
309 section.before_size += item.old_size
310 if item.new_size
is not None:
311 section.after_size += item.new_size
312 bucket = section.symbols_by_path.setdefault(item.file_path, [])
313 bucket.append((item.symbol_name, item.symbol_type,
314 item.ExtractSymbolDelta()))
316 total_change = sum(s.after_size - s.before_size
for s
in sections)
317 summary =
'Total change: %s bytes' %
DeltaStr(total_change)
320 for section
in sections:
321 if not section.symbols:
323 if section.before_size == 0:
325 'added, totalling %s bytes' %
DeltaStr(section.after_size))
326 elif section.after_size == 0:
328 'removed, totalling %s bytes' %
DeltaStr(-section.before_size))
330 if section.after_size > section.before_size:
335 '%s, for a net change of %s bytes '
336 '(%d bytes before, %d bytes after)' %
337 (type_str,
DeltaStr(section.after_size - section.before_size),
338 section.before_size, section.after_size))
339 print(
' %d %s across %d sources' % (
len(section.symbols), description,
340 len(section.sources)))
342 maybe_unchanged_sources =
set()
343 unchanged_symbols_size = 0
344 for item
in unchanged:
345 maybe_unchanged_sources.add(item.file_path)
346 unchanged_symbols_size += item.old_size
347 print(
' %d unchanged, totalling %d bytes' % (
len(unchanged),
348 unchanged_symbols_size))
351 unchanged_sources = maybe_unchanged_sources
352 for section
in sections:
353 unchanged_sources = unchanged_sources - section.sources
355 new_symbols.sources - maybe_unchanged_sources - removed_symbols.sources)
357 removed_symbols.sources - maybe_unchanged_sources - new_symbols.sources)
358 partially_changed_sources = (
359 grown_symbols.sources | shrunk_symbols.sources | new_symbols.sources |
360 removed_symbols.sources) - removed_sources - new_sources
362 for section
in sections:
363 allFiles = allFiles | section.sources
364 allFiles = allFiles | maybe_unchanged_sources
365 print(
'Source stats:')
366 print(
' %d sources encountered.' %
len(allFiles))
367 print(
' %d completely new.' %
len(new_sources))
368 print(
' %d removed completely.' %
len(removed_sources))
369 print(
' %d partially changed.' %
len(partially_changed_sources))
370 print(
' %d completely unchanged.' %
len(unchanged_sources))
371 remainder = (allFiles - new_sources - removed_sources -
372 partially_changed_sources - unchanged_sources)
373 assert len(remainder) == 0
377 print(
'Per-source Analysis:')
379 for section
in sections:
380 for path
in section.symbols_by_path:
381 entry = delta_by_path.get(path)
383 entry = {
'plus': 0,
'minus': 0}
384 delta_by_path[path] = entry
385 for symbol_name, symbol_type, symbol_delta
in \
386 section.symbols_by_path[path]:
387 if symbol_delta.old_size
is None:
388 delta = symbol_delta.new_size
389 elif symbol_delta.new_size
is None:
390 delta = -symbol_delta.old_size
392 delta = symbol_delta.new_size - symbol_delta.old_size
395 entry[
'plus'] += delta
397 entry[
'minus'] += (-1 * delta)
399 def delta_sort_key(item):
400 _path, size_data = item
401 growth = size_data[
'plus'] - size_data[
'minus']
404 for path, size_data
in sorted(delta_by_path.items(),
407 gain = size_data[
'plus']
408 loss = size_data[
'minus']
409 delta = size_data[
'plus'] - size_data[
'minus']
410 header =
' %s - Source: %s - (gained %d, lost %d)' % (
DeltaStr(delta),
412 divider =
'-' *
len(header)
419 def ExtractNewSize(tup):
420 symbol_delta = tup[2]
421 return symbol_delta.new_size
423 def ExtractOldSize(tup):
424 symbol_delta = tup[2]
425 return symbol_delta.old_size
427 if path
in new_symbols.symbols_by_path:
428 print(
' New symbols:')
429 for symbol_name, symbol_type, symbol_delta
in \
430 sorted(new_symbols.symbols_by_path[path],
433 print(
' %8s: %s type=%s, size=%d bytes%s' %
434 (
DeltaStr(symbol_delta.new_size), symbol_name,
435 symbol_type, symbol_delta.new_size,
437 if path
in removed_symbols.symbols_by_path:
438 print(
' Removed symbols:')
439 for symbol_name, symbol_type, symbol_delta
in \
440 sorted(removed_symbols.symbols_by_path[path],
442 print(
' %8s: %s type=%s, size=%d bytes%s' %
443 (
DeltaStr(-symbol_delta.old_size), symbol_name,
444 symbol_type, symbol_delta.old_size,
446 for (changed_symbols_by_path,
447 type_str)
in [(grown_symbols.symbols_by_path,
"Grown"),
448 (shrunk_symbols.symbols_by_path,
"Shrunk")]:
449 if path
in changed_symbols_by_path:
450 print(
' %s symbols:' % type_str)
452 def changed_symbol_sortkey(item):
453 symbol_name, _symbol_type, symbol_delta = item
454 return (symbol_delta.old_size - symbol_delta.new_size,
457 for symbol_name, symbol_type, symbol_delta
in \
458 sorted(changed_symbols_by_path[path], key=changed_symbol_sortkey):
460 ' %8s: %s type=%s, (was %d bytes, now %d bytes)%s'
462 symbol_delta.old_size), symbol_name,
464 symbol_delta.old_size, symbol_delta.new_size,
469 usage =
"""%prog [options]
471 Analyzes the symbolic differences between two binary files
472 (typically, not necessarily, two different builds of the same
473 library)
and produces a detailed description of symbols that have
474 been added, removed,
or whose size has changed.
477 explain_binary_size_delta.py --nm1 /tmp/nm1.dump --nm2 /tmp/nm2.dump
479 Options are available via
'--help'.
481 parser = optparse.OptionParser(usage=usage)
483 '--nm1', metavar=
'PATH', help=
'the nm dump of the first library')
485 '--nm2', metavar=
'PATH', help=
'the nm dump of the second library')
490 help=
'show per-source statistics')
495 help=
'show all symbol information; implies --showsources')
500 help=
'output internal debugging stuff')
501 opts, _args = parser.parse_args()
504 parser.error(
'--nm1 is required')
506 parser.error(
'--nm2 is required')
508 for path
in [opts.nm1, opts.nm2]:
509 with open(path,
'r')
as nm_input:
511 print(
'parsing ' + path +
'...')
513 (added, removed, changed, unchanged) =
Compare(symbols[0], symbols[1])
515 opts.showsources | opts.showsymbols, opts.showsymbols)
518if __name__ ==
'__main__':
def __init__(self, symbols)
def __init__(self, file_path, symbol_type, symbol_name, shared)
def ExtractSymbolDelta(self)
def __init__(self, shared)
shares_space_with_other_symbols
def copy_symbol_delta(self)
def CalculateSharedAddresses(symbols)
def Compare(symbols1, symbols2)
def CalculateEffectiveSize(share_count, address, symbol_size)
def SharedInfoStr(symbol_info)
def CrunchStats(added, removed, changed, unchanged, showsources, showsymbols)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
def print(*args, **kwargs)
SIN Vec< N, float > ceil(const Vec< N, float > &x)