Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
utils.py
Go to the documentation of this file.
1# Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2# for details. All rights reserved. Use of this source code is governed by a
3# BSD-style license that can be found in the LICENSE file.
4
5# This file contains a set of utilities functions used by other Python-based
6# scripts.
7
8from __future__ import print_function
9
10import contextlib
11import datetime
12from functools import total_ordering
13import glob
14import importlib.util
15import importlib.machinery
16import json
17import os
18import platform
19import re
20import shutil
21import subprocess
22import sys
23import tarfile
24import tempfile
25import uuid
26
27try:
28 # Not available on Windows.
29 import resource
30except:
31 pass
32
33SEMANTIC_VERSION_PATTERN = r'^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$'
34
35
36# To eliminate clashing with older archived builds on bleeding edge we add
37# a base number bigger the largest svn revision (this also gives us an easy
38# way of seeing if an archive comes from git based or svn based commits).
39GIT_NUMBER_BASE = 100000
40
41# Mapping table between build mode and build configuration.
42BUILD_MODES = {
43 'debug': 'Debug',
44 'release': 'Release',
45 'product': 'Product',
46}
47
48# Mapping table between build mode and build configuration.
49BUILD_SANITIZERS = {
50 None: '',
51 'none': '',
52 'asan': 'ASAN',
53 'lsan': 'LSAN',
54 'msan': 'MSAN',
55 'tsan': 'TSAN',
56 'ubsan': 'UBSAN',
57}
58
59# Mapping table between OS and build output location.
60BUILD_ROOT = {
61 'win32': 'out',
62 'linux': 'out',
63 'freebsd': 'out',
64 'macos': 'xcodebuild',
65}
66
67# Note: gn expects these to be lower case.
68ARCH_FAMILY = {
69 'ia32': 'ia32',
70 'x64': 'ia32',
71 'arm': 'arm',
72 'arm64': 'arm',
73 'arm_x64': 'arm',
74 'arm_arm64': 'arm',
75 'simarm': 'ia32',
76 'simarm64': 'ia32',
77 'simarm_x64': 'ia32',
78 'simarm_arm64': 'arm',
79 'x64c': 'ia32',
80 'arm64c': 'arm',
81 'simarm64c': 'ia32',
82 'simriscv32': 'ia32',
83 'simriscv64': 'ia32',
84 'simx64': 'arm',
85 'simx64c': 'arm',
86 'riscv32': 'riscv',
87 'riscv64': 'riscv',
88}
89
90BASE_DIR = os.path.abspath(os.path.join(os.curdir, '..'))
91DART_DIR = os.path.abspath(os.path.join(__file__, '..', '..'))
92VERSION_FILE = os.path.join(DART_DIR, 'tools', 'VERSION')
93
94
95def GetArchFamily(arch):
96 return ARCH_FAMILY[arch]
97
98
99def GetBuildDir(host_os):
100 return BUILD_ROOT[host_os]
101
102
103def GetBuildMode(mode):
104 return BUILD_MODES[mode]
105
106
107def GetBuildSanitizer(sanitizer):
108 return BUILD_SANITIZERS[sanitizer]
109
110
112 return BASE_DIR
113
114
115def load_source(modname, filename):
116 loader = importlib.machinery.SourceFileLoader(modname, filename)
117 spec = importlib.util.spec_from_file_location(modname,
118 filename,
119 loader=loader)
120 module = importlib.util.module_from_spec(spec)
121 # The module is always executed and not cached in sys.modules.
122 # Uncomment the following line to cache the module.
123 # sys.modules[module.__name__] = module
124 loader.exec_module(module)
125 return module
126
127
128def GetBotUtils(repo_path=DART_DIR):
129 '''Dynamically load the tools/bots/bot_utils.py python module.'''
130 return load_source('bot_utils',
131 os.path.join(repo_path, 'tools', 'bots', 'bot_utils.py'))
132
133
134def GetMinidumpUtils(repo_path=DART_DIR):
135 '''Dynamically load the tools/minidump.py python module.'''
136 return load_source('minidump',
137 os.path.join(repo_path, 'tools', 'minidump.py'))
138
139
140@total_ordering
141class Version(object):
142
143 def __init__(self,
144 channel=None,
145 major=None,
146 minor=None,
147 patch=None,
148 prerelease=None,
149 prerelease_patch=None,
150 version=None):
151 self.channel = channel
152 self.major = major
153 self.minor = minor
154 self.patch = patch
155 self.prerelease = prerelease
156 self.prerelease_patch = prerelease_patch
157 if version:
158 self.set_version(version)
159
160 def set_version(self, version):
161 match = re.match(SEMANTIC_VERSION_PATTERN, version)
162 assert match, '%s must be a valid version' % version
163 self.channel = 'stable'
164 self.major = match['major']
165 self.minor = match['minor']
166 self.patch = match['patch']
167 self.prerelease = '0'
168 self.prerelease_patch = '0'
169 if match['prerelease']:
170 subversions = match['prerelease'].split('.')
171 self.prerelease = subversions[0]
172 self.prerelease_patch = subversions[1]
173 self.channel = subversions[2]
174
175 def __str__(self):
176 result = '%s.%s.%s' % (self.major, self.minor, self.patch)
177 if self.channel != 'stable':
178 result += '-%s.%s.%s' % (self.prerelease, self.prerelease_patch,
179 self.channel)
180 return result
181
182 def __eq__(self, other):
183 return self.channel == other.channel and \
184 self.major == other.major and \
185 self.minor == other.minor and \
186 self.patch == other.patch and \
187 self.prerelease == other.prerelease and \
188 self.prerelease_patch == other.prerelease_patch
189
190 def __lt__(self, other):
191 if int(self.major) < int(other.major):
192 return True
193 if int(self.major) > int(other.major):
194 return False
195 if int(self.minor) < int(other.minor):
196 return True
197 if int(self.minor) > int(other.minor):
198 return False
199 if int(self.patch) < int(other.patch):
200 return True
201 if int(self.patch) > int(other.patch):
202 return False
203 # The stable channel is ahead of the other channels on the same triplet.
204 if self.channel != 'stable' and other.channel == 'stable':
205 return True
206 if self.channel == 'stable' and other.channel != 'stable':
207 return False
208 # The main channel is ahead of the other channels on the same triplet.
209 if self.channel != 'main' and other.channel == 'main':
210 return True
211 if self.channel == 'main' and other.channel != 'main':
212 return False
213 # The be channel existed before it was renamed to main.
214 if self.channel != 'be' and other.channel == 'be':
215 return True
216 if self.channel == 'be' and other.channel != 'be':
217 return False
218 if int(self.prerelease) < int(other.prerelease):
219 return True
220 if int(self.prerelease) > int(other.prerelease):
221 return False
222 if int(self.prerelease_patch) < int(other.prerelease_patch):
223 return True
224 if int(self.prerelease_patch) > int(other.prerelease_patch):
225 return False
226 return False
227
228
229# Try to guess the host operating system.
230def GuessOS():
231 os_id = platform.system()
232 if os_id == 'Linux':
233 return 'linux'
234 elif os_id == 'Darwin':
235 return 'macos'
236 elif os_id == 'Windows' or os_id == 'Microsoft':
237 # On Windows Vista platform.system() can return 'Microsoft' with some
238 # versions of Python, see http://bugs.python.org/issue1082 for details.
239 return 'win32'
240 elif os_id == 'FreeBSD':
241 return 'freebsd'
242 elif os_id == 'OpenBSD':
243 return 'openbsd'
244 elif os_id == 'SunOS':
245 return 'solaris'
246
247 return None
248
249
250# Runs true if the currently executing python interpreter is running under
251# Rosetta. I.e., python3 is an x64 executable and we're on an arm64 Mac.
253 if platform.system() == 'Darwin':
254 p = subprocess.Popen(['sysctl', '-in', 'sysctl.proc_translated'],
255 stdout=subprocess.PIPE,
256 stderr=subprocess.STDOUT)
257 output, _ = p.communicate()
258 return output.decode('utf-8').strip() == '1'
259 return False
260
261
262# Returns the architectures that can run on the current machine.
264 m = platform.machine()
265 if platform.system() == 'Darwin':
266 if m == 'arm64' or IsRosetta():
267 # ARM64 Macs also support X64.
268 return ['arm64', 'x64']
269 if m == 'x86_64':
270 # X64 Macs no longer support IA32.
271 return ['x64']
272 # Icky use of CIPD_ARCHITECTURE should be effectively dead whenever the
273 # Python on bots becomes native ARM64.
274 if ((platform.system() == 'Windows') and
275 (os.environ.get("CIPD_ARCHITECTURE") == "arm64")):
276 # ARM64 Windows also can emulate X64.
277 return ['arm64', 'x64']
278
279 if m in ['aarch64', 'arm64', 'arm64e', 'ARM64']:
280 return ['arm64']
281 if m in ['armv7l', 'armv8l']:
282 return ['arm']
283 if m in ['i386', 'i686', 'ia32', 'x86']:
284 return ['x86', 'ia32']
285 if m in ['x64', 'x86-64', 'x86_64', 'amd64', 'AMD64']:
286 return ['x64', 'x86', 'ia32']
287 if m in ['riscv64']:
288 return ['riscv64']
289 raise Exception('Failed to determine host architectures for %s %s',
290 platform.machine(), platform.system())
291
292
293# Try to guess the host architecture.
295 return HostArchitectures()[0]
296
297# Try to guess the number of cpus on this machine.
298def GuessCpus():
299 if os.getenv('DART_NUMBER_OF_CORES') is not None:
300 return int(os.getenv('DART_NUMBER_OF_CORES'))
301 if os.path.exists('/proc/cpuinfo'):
302 return int(
303 subprocess.check_output(
304 'grep -E \'^processor\' /proc/cpuinfo | wc -l', shell=True))
305 if os.path.exists('/usr/bin/hostinfo'):
306 return int(
307 subprocess.check_output(
308 '/usr/bin/hostinfo |'
309 ' grep "processors are logically available." |'
310 ' awk "{ print \\$1 }"',
311 shell=True))
312 win_cpu_count = os.getenv("NUMBER_OF_PROCESSORS")
313 if win_cpu_count:
314 return int(win_cpu_count)
315 return 2
316
317
318# Returns true if we're running under Windows.
319def IsWindows():
320 return GuessOS() == 'win32'
321
322
323def IsCrossBuild(target_os, arch):
324 if (target_os not in [None, 'host']) and (target_os != GuessOS()):
325 return True
326 if arch.startswith('sim'):
327 return False
328 if arch.endswith('c'):
329 # Strip 'compressed' suffix.
330 arch = arch[:-1]
331 if arch in HostArchitectures():
332 return False
333 return True
334
335
336def GetBuildConf(mode, arch, conf_os=None, sanitizer=None):
337 if conf_os is not None and conf_os != GuessOS() and conf_os != 'host':
338 return '{}{}{}'.format(GetBuildMode(mode), conf_os.title(),
339 arch.upper())
340
341 # Ask for a cross build if the host and target architectures don't match.
342 cross_build = ''
343 if IsCrossBuild(conf_os, arch):
344 cross_build = 'X'
345 return '{}{}{}{}'.format(GetBuildMode(mode), GetBuildSanitizer(sanitizer),
346 cross_build, arch.upper())
347
348
349def GetBuildRoot(host_os, mode=None, arch=None, target_os=None, sanitizer=None):
350 build_root = GetBuildDir(host_os)
351 if mode:
352 build_root = os.path.join(
353 build_root, GetBuildConf(mode, arch, target_os, sanitizer))
354 return build_root
355
356
357def GetVersion(no_git_hash=False, version_file=None, git_revision_file=None):
358 version = ReadVersionFile(version_file)
359 if not version:
360 return None
361
362 suffix = ''
363 if version.channel in ['main', 'be']:
364 suffix = '-edge' if no_git_hash else '-edge.{}'.format(
365 GetGitRevision(git_revision_file))
366 elif version.channel in ('beta', 'dev'):
367 suffix = '-{}.{}.{}'.format(version.prerelease,
368 version.prerelease_patch, version.channel)
369 else:
370 assert version.channel == 'stable'
371
372 return '{}.{}.{}{}'.format(version.major, version.minor, version.patch,
373 suffix)
374
375
376def GetChannel(version_file=None):
377 version = ReadVersionFile(version_file)
378 return version.channel
379
380
381def ReadVersionFile(version_file=None):
382
383 def match_against(pattern, file_content):
384 match = re.search(pattern, file_content, flags=re.MULTILINE)
385 if match:
386 return match.group(1)
387 return None
388
389 if version_file == None:
390 version_file = VERSION_FILE
391
392 content = None
393 try:
394 with open(version_file) as fd:
395 content = fd.read()
396 except:
397 print('Warning: Could not read VERSION file ({})'.format(version_file))
398 return None
399
400 channel = match_against('^CHANNEL ([A-Za-z0-9]+)$', content)
401 major = match_against('^MAJOR (\\d+)$', content)
402 minor = match_against('^MINOR (\\d+)$', content)
403 patch = match_against('^PATCH (\\d+)$', content)
404 prerelease = match_against('^PRERELEASE (\\d+)$', content)
405 prerelease_patch = match_against('^PRERELEASE_PATCH (\\d+)$', content)
406
407 if (channel and major and minor and prerelease and prerelease_patch):
408 return Version(channel, major, minor, patch, prerelease,
409 prerelease_patch)
410
411 print('Warning: VERSION file ({}) has wrong format'.format(version_file))
412 return None
413
414
415def GetGitRevision(git_revision_file=None, repo_path=DART_DIR):
416 # When building from tarball use tools/GIT_REVISION
417 if git_revision_file is None:
418 git_revision_file = os.path.join(repo_path, 'tools', 'GIT_REVISION')
419 try:
420 with open(git_revision_file) as fd:
421 return fd.read().strip()
422 except:
423 pass
424 p = subprocess.Popen(['git', 'rev-parse', 'HEAD'],
425 stdout=subprocess.PIPE,
426 stderr=subprocess.PIPE,
427 shell=IsWindows(),
428 cwd=repo_path)
429 out, err = p.communicate()
430 # TODO(https://github.com/dart-lang/sdk/issues/51865): Don't ignore errors.
431 # if p.wait() != 0:
432 # raise Exception('git rev-parse failed: ' + str(err))
433 revision = out.decode('utf-8').strip()
434 # We expect a full git hash
435 if len(revision) != 40:
436 print('Warning: Could not parse git commit, output was {}'.format(
437 revision),
438 file=sys.stderr)
439 return None
440 return revision
441
442
443def GetShortGitHash(repo_path=DART_DIR):
444 p = subprocess.Popen(['git', 'rev-parse', '--short=10', 'HEAD'],
445 stdout=subprocess.PIPE,
446 stderr=subprocess.PIPE,
447 shell=IsWindows(),
448 cwd=repo_path)
449 out, err = p.communicate()
450 if p.wait() != 0:
451 # TODO(https://github.com/dart-lang/sdk/issues/51865): Don't ignore errors.
452 # raise Exception('git rev-parse failed: ' + str(err))
453 return None
454 revision = out.decode('utf-8').strip()
455 return revision
456
457
458def GetGitTimestamp(git_timestamp_file=None, repo_path=DART_DIR):
459 # When building from tarball use tools/GIT_TIMESTAMP
460 if git_timestamp_file is None:
461 git_timestamp_file = os.path.join(repo_path, 'tools', 'GIT_TIMESTAMP')
462 try:
463 with open(git_timestamp_file) as fd:
464 return fd.read().strip()
465 except:
466 pass
467 p = subprocess.Popen(['git', 'log', '-n', '1', '--pretty=format:%cd'],
468 stdout=subprocess.PIPE,
469 stderr=subprocess.PIPE,
470 shell=IsWindows(),
471 cwd=repo_path)
472 out, err = p.communicate()
473 if p.wait() != 0:
474 # TODO(https://github.com/dart-lang/sdk/issues/51865): Don't ignore errors.
475 # raise Exception('git log failed: ' + str(err))
476 return None
477 timestamp = out.decode('utf-8').strip()
478 return timestamp
479
480
481# TODO(42528): Can we remove this? It's basically just an alias for Exception.
482class Error(Exception):
483 pass
484
485
486def IsCrashExitCode(exit_code):
487 if IsWindows():
488 return 0x80000000 & exit_code
489 return exit_code < 0
490
491
492def DiagnoseExitCode(exit_code, command):
493 if IsCrashExitCode(exit_code):
494 sys.stderr.write(
495 'Command: {}\nCRASHED with exit code {} (0x{:x})\n'.format(
496 ' '.join(command), exit_code, exit_code & 0xffffffff))
497
498
500 tools_dir = os.path.dirname(os.path.realpath(__file__))
501 return os.path.join(tools_dir, 'sdks', 'dart-sdk')
502
503
505 name = 'dart'
506 if IsWindows():
507 name = 'dart.exe'
508 return os.path.join(CheckedInSdkPath(), 'bin', name)
509
510
512 core_pattern_file = '/proc/sys/kernel/core_pattern'
513 core_pattern = open(core_pattern_file).read()
514
515 expected_core_pattern = 'core.%p'
516 if core_pattern.strip() != expected_core_pattern:
517 message = (
518 'Invalid core_pattern configuration. '
519 'The configuration of core dump handling is *not* correct for '
520 'a buildbot. The content of {0} must be "{1}" instead of "{2}".'.
521 format(core_pattern_file, expected_core_pattern, core_pattern))
522 if fatal:
523 raise Exception(message)
524 print(message)
525 return False
526 return True
527
528
529class TempDir(object):
530
531 def __init__(self, prefix=''):
532 self._temp_dir = None
533 self._prefix = prefix
534
535 def __enter__(self):
536 self._temp_dir = tempfile.mkdtemp(self._prefix)
537 return self._temp_dir
538
539 def __exit__(self, *_):
540 shutil.rmtree(self._temp_dir, ignore_errors=True)
541
542
543class UnexpectedCrash(object):
544
545 def __init__(self, test, pid, *binaries):
546 self.test = test
547 self.pid = pid
548 self.binaries = binaries
549
550 def __str__(self):
551 return 'Crash({}: {} {})'.format(self.test, self.pid,
552 ', '.join(self.binaries))
553
554
556
557 def __init__(self):
558 self._old_limits = None
559
560 def __enter__(self):
561 self._old_limits = resource.getrlimit(resource.RLIMIT_CORE)
562 resource.setrlimit(resource.RLIMIT_CORE, (-1, -1))
563
564 def __exit__(self, *_):
565 if self._old_limits != None:
566 resource.setrlimit(resource.RLIMIT_CORE, self._old_limits)
567
568
570
571 def __enter__(self):
572 # Bump core limits to unlimited if core_pattern is correctly configured.
573 if CheckLinuxCoreDumpPattern(fatal=False):
574 super(LinuxCoreDumpEnabler, self).__enter__()
575
576 def __exit__(self, *args):
577 CheckLinuxCoreDumpPattern(fatal=False)
578 super(LinuxCoreDumpEnabler, self).__exit__(*args)
579
580
582 """This enabler assumes that Dart binary was built with Crashpad support.
583 In this case DART_CRASHPAD_CRASHES_DIR environment variable allows to
584 specify the location of Crashpad crashes database. Actual minidumps will
585 be written into reports subfolder of the database.
586 """
587 CRASHPAD_DB_FOLDER = os.path.join(DART_DIR, 'crashes')
588 DUMPS_FOLDER = os.path.join(CRASHPAD_DB_FOLDER, 'reports')
589
590 def __init__(self):
591 pass
592
593 def __enter__(self):
594 print('INFO: Enabling coredump archiving into {}'.format(
595 WindowsCoreDumpEnabler.CRASHPAD_DB_FOLDER))
596 os.environ[
597 'DART_CRASHPAD_CRASHES_DIR'] = WindowsCoreDumpEnabler.CRASHPAD_DB_FOLDER
598
599 def __exit__(self, *_):
600 del os.environ['DART_CRASHPAD_CRASHES_DIR']
601
602
603def TryUnlink(file):
604 try:
605 os.unlink(file)
606 except Exception as error:
607 print('ERROR: Failed to remove {}: {}'.format(file, error))
608
609
611 """This class reads coredumps file written by UnexpectedCrashDumpArchiver
612 into the current working directory and uploads all cores and binaries
613 listed in it into Cloud Storage (see
614 pkg/test_runner/lib/src/test_progress.dart).
615 """
616
617 # test.dart will write a line for each unexpected crash into this file.
618 _UNEXPECTED_CRASHES_FILE = 'unexpected-crashes'
619
620 def __init__(self, search_dir, output_directory):
621 self._bucket = 'dart-temp-crash-archive'
622 self._binaries_dir = os.getcwd()
623 self._search_dir = search_dir
624 self._output_directory = output_directory
625
626 def _safe_cleanup(self):
627 try:
628 return self._cleanup()
629 except Exception as error:
630 print('ERROR: Failure during cleanup: {}'.format(error))
631 return False
632
633 def __enter__(self):
634 print('INFO: Core dump archiving is activated')
635
636 # Cleanup any stale files
637 if self._safe_cleanup():
638 print('WARNING: Found and removed stale coredumps')
639
640 def __exit__(self, *_):
641 try:
642 crashes = self._find_unexpected_crashes()
643 if crashes:
644 # If we get a ton of crashes, only archive 10 dumps.
645 archive_crashes = crashes[:10]
646 print('Archiving coredumps for crash (if possible):')
647 for crash in archive_crashes:
648 print('----> {}'.format(crash))
649
650 sys.stdout.flush()
651
652 self._archive(archive_crashes)
653 else:
654 print('INFO: No unexpected crashes recorded')
655 dumps = self._find_all_coredumps()
656 if dumps:
657 print('INFO: However there are {} core dumps found'.format(
658 len(dumps)))
659 for dump in dumps:
660 print('INFO: -> {}'.format(dump))
661 print()
662 except Exception as error:
663 print('ERROR: Failed to archive crashes: {}'.format(error))
664 raise
665
666 finally:
667 self._safe_cleanup()
668
669 def _archive(self, crashes):
670 files = set()
671 missing = []
672 for crash in crashes:
673 files.update(crash.binaries)
674 core = self._find_coredump_file(crash)
675 if core:
676 files.add(core)
677 else:
678 missing.append(crash)
679 if self._output_directory is not None and self._is_shard():
680 print(
681 "INFO: Moving collected dumps and binaries into output directory\n"
682 "INFO: They will be uploaded to isolate server. Look for \"isolated"
683 " out\" under the failed step on the build page.\n"
684 "INFO: For more information see runtime/docs/infra/coredumps.md"
685 )
686 self._move(files)
687 else:
688 print(
689 "INFO: Uploading collected dumps and binaries into Cloud Storage\n"
690 "INFO: Use `gsutil.py cp from-url to-path` to download them.\n"
691 "INFO: For more information see runtime/docs/infra/coredumps.md"
692 )
693 self._upload(files)
694
695 if missing:
696 self._report_missing_crashes(missing, throw=False)
697
698 # todo(athom): move the logic to decide where to copy core dumps into the recipes.
699 def _is_shard(self):
700 return 'BUILDBOT_BUILDERNAME' not in os.environ
701
702 def _report_missing_crashes(self, missing, throw=False):
703 missing_as_string = ', '.join([str(c) for c in missing])
704 other_files = list(glob.glob(os.path.join(self._search_dir, '*')))
705 sys.stderr.write(
706 "Could not find crash dumps for '{}' in search directory '{}'.\n"
707 "Existing files which *did not* match the pattern inside the search "
708 "directory are are:\n {}\n".format(missing_as_string,
709 self._search_dir,
710 '\n '.join(other_files)))
711 # TODO: Figure out why windows coredump generation does not work.
712 # See http://dartbug.com/36469
713 if throw and GuessOS() != 'win32':
714 raise Exception(
715 'Missing crash dumps for: {}'.format(missing_as_string))
716
717 def _get_file_name(self, file):
718 # Sanitize the name: actual cores follow 'core.%d' pattern, crashed
719 # binaries are copied next to cores and named
720 # 'binary.<mode>_<arch>_<binary_name>'.
721 # This should match the code in testing/dart/test_progress.dart
722 name = os.path.basename(file)
723 (prefix, suffix) = name.split('.', 1)
724 is_binary = prefix == 'binary'
725 if is_binary:
726 (mode, arch, binary_name) = suffix.split('_', 2)
727 name = binary_name
728 return (name, is_binary)
729
730 def _move(self, files):
731 for file in files:
732 print('+++ Moving {} to output_directory ({})'.format(
733 file, self._output_directory))
734 (name, is_binary) = self._get_file_name(file)
735 destination = os.path.join(self._output_directory, name)
736 shutil.move(file, destination)
737 if is_binary and os.path.exists(file + '.pdb'):
738 # Also move a PDB file if there is one.
739 pdb = os.path.join(self._output_directory, name + '.pdb')
740 shutil.move(file + '.pdb', pdb)
741
742 def _tar(self, file):
743 (name, is_binary) = self._get_file_name(file)
744 tarname = '{}.tar.gz'.format(name)
745
746 # Compress the file.
747 tar = tarfile.open(tarname, mode='w:gz')
748 tar.add(file, arcname=name)
749 if is_binary and os.path.exists(file + '.pdb'):
750 # Also add a PDB file if there is one.
751 tar.add(file + '.pdb', arcname=name + '.pdb')
752 tar.close()
753 return tarname
754
755 def _upload(self, files):
756 bot_utils = GetBotUtils()
757 gsutil = bot_utils.GSUtil()
758 storage_path = '{}/{}/'.format(self._bucket, uuid.uuid4())
759 gs_prefix = 'gs://{}'.format(storage_path)
760 http_prefix = 'https://storage.cloud.google.com/{}'.format(storage_path)
761
762 print('\n--- Uploading into {} ({}) ---'.format(gs_prefix, http_prefix))
763 for file in files:
764 tarname = self._tar(file)
765
766 # Remove / from absolute path to not have // in gs path.
767 gs_url = '{}{}'.format(gs_prefix, tarname)
768 http_url = '{}{}'.format(http_prefix, tarname)
769
770 try:
771 gsutil.upload(tarname, gs_url)
772 print('+++ Uploaded {} ({})'.format(gs_url, http_url))
773 except Exception as error:
774 print('!!! Failed to upload {}, error: {}'.format(
775 tarname, error))
776
777 TryUnlink(tarname)
778
779 print('--- Done ---\n')
780
782 """Return coredumps that were recorded (if supported by the platform).
783 This method will be overridden by concrete platform specific implementations.
784 """
785 return []
786
788 """Load coredumps file. Each line has the following format:
789
790 test-name,pid,binary-file1,binary-file2,...
791 """
792 try:
793 with open(BaseCoreDumpArchiver._UNEXPECTED_CRASHES_FILE) as f:
794 return [
795 UnexpectedCrash(*ln.strip('\n').split(','))
796 for ln in f.readlines()
797 ]
798 except:
799 return []
800
801 def _cleanup(self):
802 found = False
803 if os.path.exists(BaseCoreDumpArchiver._UNEXPECTED_CRASHES_FILE):
804 os.unlink(BaseCoreDumpArchiver._UNEXPECTED_CRASHES_FILE)
805 found = True
806 for binary in glob.glob(os.path.join(self._binaries_dir, 'binary.*')):
807 found = True
808 TryUnlink(binary)
809
810 return found
811
812
814
815 def __init__(self, search_dir, output_directory):
816 super(PosixCoreDumpArchiver, self).__init__(search_dir,
817 output_directory)
818
819 def _cleanup(self):
820 found = super(PosixCoreDumpArchiver, self)._cleanup()
821 for core in glob.glob(os.path.join(self._search_dir_search_dir, 'core.*')):
822 found = True
823 TryUnlink(core)
824 return found
825
826 def _find_coredump_file(self, crash):
827 core_filename = os.path.join(self._search_dir_search_dir,
828 'core.{}'.format(crash.pid))
829 if os.path.exists(core_filename):
830 return core_filename
831
832
834
835 def __init__(self, output_directory):
836 super(LinuxCoreDumpArchiver, self).__init__(os.getcwd(),
837 output_directory)
838
839
841
842 def __init__(self, output_directory):
843 super(MacOSCoreDumpArchiver, self).__init__('/cores', output_directory)
844
845
847
848 def __init__(self, output_directory):
849 super(WindowsCoreDumpArchiver, self).__init__(
850 WindowsCoreDumpEnabler.DUMPS_FOLDER, output_directory)
851 self._dumps_by_pid = None
852
853 # Find CDB.exe in the win_toolchain that we are using.
854 def _find_cdb(self):
855 win_toolchain_json_path = os.path.join(DART_DIR, 'build',
856 'win_toolchain.json')
857 if not os.path.exists(win_toolchain_json_path):
858 return None
859
860 with open(win_toolchain_json_path, 'r') as f:
861 win_toolchain_info = json.loads(f.read())
862
863 win_sdk_path = win_toolchain_info['win_sdk']
864
865 # We assume that we are running on 64-bit Windows.
866 # Note: x64 CDB can work with both X64 and IA32 dumps.
867 cdb_path = os.path.join(win_sdk_path, 'Debuggers', 'x64', 'cdb.exe')
868 if not os.path.exists(cdb_path):
869 return None
870
871 return cdb_path
872
873 CDBG_PROMPT_RE = re.compile(r'^\d+:\d+>')
874
876 # On Windows due to crashpad integration crashes do not produce any
877 # stacktraces. Dump stack traces from dumps Crashpad collected using
878 # CDB (if available).
879 cdb_path = self._find_cdb()
880 if cdb_path is None:
881 return
882
884 if not dumps:
885 return
886
887 print('### Collected {} crash dumps'.format(len(dumps)))
888 for dump in dumps:
889 print()
890 print('### Dumping stacks from {} using CDB'.format(dump))
891 cdb_output = subprocess.check_output(
892 '"{}" -z "{}" -kqm -c "!uniqstack -b -v -p;qd"'.format(
893 cdb_path, dump),
894 stderr=subprocess.STDOUT)
895 # Extract output of uniqstack from the whole output of CDB.
896 output = False
897 for line in cdb_output.split('\n'):
898 if re.match(WindowsCoreDumpArchiver.CDBG_PROMPT_RE, line):
899 output = True
900 elif line.startswith('quit:'):
901 break
902 elif output:
903 print(line)
904 print()
905 print('#############################################')
906 print()
907
908 def __exit__(self, *args):
909 try:
910 self._dump_all_stacks()
911 except Exception as error:
912 print('ERROR: Unable to dump stacks from dumps: {}'.format(error))
913
914 super(WindowsCoreDumpArchiver, self).__exit__(*args)
915
916 def _cleanup(self):
917 found = super(WindowsCoreDumpArchiver, self)._cleanup()
918 for core in glob.glob(os.path.join(self._search_dir_search_dir, '*')):
919 found = True
920 TryUnlink(core)
921 return found
922
924 pattern = os.path.join(self._search_dir_search_dir, '*.dmp')
925 return [core_filename for core_filename in glob.glob(pattern)]
926
927 def _find_coredump_file(self, crash):
928 if self._dumps_by_pid is None:
929 # If this function is invoked the first time then look through the directory
930 # that contains crashes for all dump files and collect pid -> filename
931 # mapping.
932 self._dumps_by_pid = {}
933 minidump = GetMinidumpUtils()
934 pattern = os.path.join(self._search_dir_search_dir, '*.dmp')
935 for core_filename in glob.glob(pattern):
936 pid = minidump.GetProcessIdFromDump(core_filename)
937 if pid != -1:
938 self._dumps_by_pid[str(pid)] = core_filename
939 if crash.pid in self._dumps_by_pid:
940 return self._dumps_by_pid[crash.pid]
941
942 def _report_missing_crashes(self, missing, throw=False):
943 # Let's only print the debugging information and not throw. We'll do more
944 # validation for werfault.exe and throw afterwards.
945 super(WindowsCoreDumpArchiver, self)._report_missing_crashes(
946 missing, throw=False)
947
948 if throw:
949 missing_as_string = ', '.join([str(c) for c in missing])
950 raise Exception(
951 'Missing crash dumps for: {}'.format(missing_as_string))
952
953
955
956 def __init__(self, nofiles):
957 self._old_limits = None
958 self._limits = (nofiles, nofiles)
959
960 def __enter__(self):
961 self._old_limits = resource.getrlimit(resource.RLIMIT_NOFILE)
962 resource.setrlimit(resource.RLIMIT_NOFILE, self._limits)
963
964 def __exit__(self, *_):
965 resource.setrlimit(resource.RLIMIT_CORE, self._old_limits)
966
967
968@contextlib.contextmanager
970 yield
971
972
974 enabled = '--copy-coredumps' in args
975 prefix = '--output-directory='
976 output_directory = next(
977 (arg[len(prefix):] for arg in args if arg.startswith(prefix)), None)
978
979 if not enabled:
980 return (NooptContextManager(),)
981
982 osname = GuessOS()
983 if osname == 'linux':
984 return (LinuxCoreDumpEnabler(), LinuxCoreDumpArchiver(output_directory))
985 elif osname == 'macos':
986 return (PosixCoreDumpEnabler(), MacOSCoreDumpArchiver(output_directory))
987 elif osname == 'win32':
988 return (WindowsCoreDumpEnabler(),
989 WindowsCoreDumpArchiver(output_directory))
990
991 # We don't have support for MacOS yet.
992 return (NooptContextManager(),)
993
994
996 osname = GuessOS()
997 if osname == 'macos':
998 return IncreasedNumberOfFileDescriptors(nofiles=10000)
999
1000 assert osname in ('linux', 'win32')
1001 # We don't have support for MacOS yet.
1002 return NooptContextManager()
1003
1004
1005def Main():
1006 print('GuessOS() -> ', GuessOS())
1007 print('GuessArchitecture() -> ', GuessArchitecture())
1008 print('GuessCpus() -> ', GuessCpus())
1009 print('IsWindows() -> ', IsWindows())
1010 print('GetGitRevision() -> ', GetGitRevision())
1011 print('GetGitTimestamp() -> ', GetGitTimestamp())
1012 print('ReadVersionFile() -> ', ReadVersionFile())
1013
1014
1015if __name__ == '__main__':
1016 Main()
static float next(float f)
static bool read(SkStream *stream, void *buffer, size_t amount)
Type::kYUV Type::kRGBA() int(0.7 *637)
void print(void *str)
Definition bridge.cpp:126
_report_missing_crashes(self, missing, throw=False)
Definition utils.py:702
__init__(self, search_dir, output_directory)
Definition utils.py:620
_upload(self, files)
Definition utils.py:755
_archive(self, crashes)
Definition utils.py:669
_get_file_name(self, file)
Definition utils.py:717
__init__(self, output_directory)
Definition utils.py:835
__exit__(self, *args)
Definition utils.py:576
__init__(self, output_directory)
Definition utils.py:842
_find_coredump_file(self, crash)
Definition utils.py:826
__init__(self, search_dir, output_directory)
Definition utils.py:815
__init__(self, prefix='')
Definition utils.py:531
__enter__(self)
Definition utils.py:535
__exit__(self, *_)
Definition utils.py:539
__init__(self, test, pid, *binaries)
Definition utils.py:545
__eq__(self, other)
Definition utils.py:182
__init__(self, channel=None, major=None, minor=None, patch=None, prerelease=None, prerelease_patch=None, version=None)
Definition utils.py:150
__lt__(self, other)
Definition utils.py:190
set_version(self, version)
Definition utils.py:160
__str__(self)
Definition utils.py:175
_find_coredump_file(self, crash)
Definition utils.py:927
_report_missing_crashes(self, missing, throw=False)
Definition utils.py:942
__init__(self, output_directory)
Definition utils.py:848
uint32_t uint32_t * format
GetProcessIdFromDump(path)
Definition minidump.py:179
GetChannel(version_file=None)
Definition utils.py:376
CheckedInSdkPath()
Definition utils.py:499
HostArchitectures()
Definition utils.py:263
GetBuildMode(mode)
Definition utils.py:134
GetMinidumpUtils(repo_path=DART_DIR)
Definition utils.py:134
CoreDumpArchiver(args)
Definition utils.py:973
FileDescriptorLimitIncreaser()
Definition utils.py:995
GetShortGitHash(repo_path=DART_DIR)
Definition utils.py:443
IsRosetta()
Definition utils.py:252
IsCrossBuild(target_os, arch)
Definition utils.py:323
Main()
Definition utils.py:1005
GuessArchitecture()
Definition utils.py:42
GetBaseDir()
Definition utils.py:111
GetGitRevision(git_revision_file=None, repo_path=DART_DIR)
Definition utils.py:415
IsWindows()
Definition utils.py:72
CheckedInSdkExecutable()
Definition utils.py:504
GetBuildDir(host_os)
Definition utils.py:99
GetBuildConf(mode, arch)
Definition utils.py:139
DiagnoseExitCode(exit_code, command)
Definition utils.py:492
GetBuildRoot(host_os, mode=None, arch=None, sanitizer=None)
Definition utils.py:143
GetBuildSanitizer(sanitizer)
Definition utils.py:107
CheckLinuxCoreDumpPattern(fatal=False)
Definition utils.py:511
GuessCpus()
Definition utils.py:55
load_source(modname, filename)
Definition utils.py:115
TryUnlink(file)
Definition utils.py:603
GetGitTimestamp(git_timestamp_file=None, repo_path=DART_DIR)
Definition utils.py:458
GetVersion(no_git_hash=False, version_file=None, git_revision_file=None)
Definition utils.py:357
GetArchFamily(arch)
Definition utils.py:95
NooptContextManager()
Definition utils.py:969
ReadVersionFile(version_file=None)
Definition utils.py:381
IsCrashExitCode(exit_code)
Definition utils.py:486
GuessOS()
Definition utils.py:21
GetBotUtils(repo_path=DART_DIR)
Definition utils.py:128