8from __future__
import print_function
12from functools
import total_ordering
15import importlib.machinery
33SEMANTIC_VERSION_PATTERN =
r'^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$'
39GIT_NUMBER_BASE = 100000
64 'macos':
'xcodebuild',
78 'simarm_arm64':
'arm',
90BASE_DIR = os.path.abspath(os.path.join(os.curdir,
'..'))
91DART_DIR = os.path.abspath(os.path.join(__file__,
'..',
'..'))
92VERSION_FILE = os.path.join(DART_DIR,
'tools',
'VERSION')
96 return ARCH_FAMILY[arch]
100 return BUILD_ROOT[host_os]
104 return BUILD_MODES[mode]
108 return BUILD_SANITIZERS[sanitizer]
116 loader = importlib.machinery.SourceFileLoader(modname, filename)
117 spec = importlib.util.spec_from_file_location(modname,
120 module = importlib.util.module_from_spec(spec)
124 loader.exec_module(module)
129 '''Dynamically load the tools/bots/bot_utils.py python module.'''
131 os.path.join(repo_path,
'tools',
'bots',
'bot_utils.py'))
135 '''Dynamically load the tools/minidump.py python module.'''
137 os.path.join(repo_path,
'tools',
'minidump.py'))
149 prerelease_patch=None,
161 match = re.match(SEMANTIC_VERSION_PATTERN, version)
162 assert match,
'%s must be a valid version' % version
164 self.
major = match[
'major']
165 self.
minor = match[
'minor']
166 self.
patch = match[
'patch']
169 if match[
'prerelease']:
170 subversions = match[
'prerelease'].split(
'.')
183 return self.
channel == other.channel
and \
184 self.
major == other.major
and \
185 self.
minor == other.minor
and \
186 self.
patch == other.patch
and \
204 if self.
channel !=
'stable' and other.channel ==
'stable':
206 if self.
channel ==
'stable' and other.channel !=
'stable':
209 if self.
channel !=
'main' and other.channel ==
'main':
211 if self.
channel ==
'main' and other.channel !=
'main':
214 if self.
channel !=
'be' and other.channel ==
'be':
216 if self.
channel ==
'be' and other.channel !=
'be':
231 os_id = platform.system()
234 elif os_id ==
'Darwin':
236 elif os_id ==
'Windows' or os_id ==
'Microsoft':
240 elif os_id ==
'FreeBSD':
242 elif os_id ==
'OpenBSD':
244 elif os_id ==
'SunOS':
253 if platform.system() ==
'Darwin':
254 p = subprocess.Popen([
'sysctl',
'-in',
'sysctl.proc_translated'],
255 stdout=subprocess.PIPE,
256 stderr=subprocess.STDOUT)
257 output, _ = p.communicate()
258 return output.decode(
'utf-8').strip() ==
'1'
264 m = platform.machine()
265 if platform.system() ==
'Darwin':
268 return [
'arm64',
'x64']
274 if ((platform.system() ==
'Windows')
and
275 (os.environ.get(
"CIPD_ARCHITECTURE") ==
"arm64")):
277 return [
'arm64',
'x64']
279 if m
in [
'aarch64',
'arm64',
'arm64e',
'ARM64']:
281 if m
in [
'armv7l',
'armv8l']:
283 if m
in [
'i386',
'i686',
'ia32',
'x86']:
284 return [
'x86',
'ia32']
285 if m
in [
'x64',
'x86-64',
'x86_64',
'amd64',
'AMD64']:
286 return [
'x64',
'x86',
'ia32']
289 raise Exception(
'Failed to determine host architectures for %s %s',
290 platform.machine(), platform.system())
299 if os.getenv(
'DART_NUMBER_OF_CORES')
is not None:
300 return int(os.getenv(
'DART_NUMBER_OF_CORES'))
301 if os.path.exists(
'/proc/cpuinfo'):
303 subprocess.check_output(
304 'grep -E \'^processor\' /proc/cpuinfo | wc -l', shell=
True))
305 if os.path.exists(
'/usr/bin/hostinfo'):
307 subprocess.check_output(
308 '/usr/bin/hostinfo |'
309 ' grep "processors are logically available." |'
310 ' awk "{ print \\$1 }"',
312 win_cpu_count = os.getenv(
"NUMBER_OF_PROCESSORS")
314 return int(win_cpu_count)
324 if (target_os
not in [
None,
'host'])
and (target_os !=
GuessOS()):
326 if arch.startswith(
'sim'):
328 if arch.endswith(
'c'):
337 if conf_os
is not None and conf_os !=
GuessOS()
and conf_os !=
'host':
346 cross_build, arch.upper())
349def GetBuildRoot(host_os, mode=None, arch=None, target_os=None, sanitizer=None):
352 build_root = os.path.join(
353 build_root,
GetBuildConf(mode, arch, target_os, sanitizer))
357def GetVersion(no_git_hash=False, version_file=None, git_revision_file=None):
363 if version.channel
in [
'main',
'be']:
364 suffix =
'-edge' if no_git_hash
else '-edge.{}'.
format(
366 elif version.channel
in (
'beta',
'dev'):
367 suffix =
'-{}.{}.{}'.
format(version.prerelease,
368 version.prerelease_patch, version.channel)
370 assert version.channel ==
'stable'
372 return '{}.{}.{}{}'.
format(version.major, version.minor, version.patch,
378 return version.channel
383 def match_against(pattern, file_content):
384 match = re.search(pattern, file_content, flags=re.MULTILINE)
386 return match.group(1)
389 if version_file ==
None:
390 version_file = VERSION_FILE
394 with open(version_file)
as fd:
397 print(
'Warning: Could not read VERSION file ({})'.
format(version_file))
400 channel = match_against(
'^CHANNEL ([A-Za-z0-9]+)$', content)
401 major = match_against(
'^MAJOR (\\d+)$', content)
402 minor = match_against(
'^MINOR (\\d+)$', content)
403 patch = match_against(
'^PATCH (\\d+)$', content)
404 prerelease = match_against(
'^PRERELEASE (\\d+)$', content)
405 prerelease_patch = match_against(
'^PRERELEASE_PATCH (\\d+)$', content)
407 if (channel
and major
and minor
and prerelease
and prerelease_patch):
408 return Version(channel, major, minor, patch, prerelease,
411 print(
'Warning: VERSION file ({}) has wrong format'.
format(version_file))
417 if git_revision_file
is None:
418 git_revision_file = os.path.join(repo_path,
'tools',
'GIT_REVISION')
420 with open(git_revision_file)
as fd:
421 return fd.read().strip()
424 p = subprocess.Popen([
'git',
'rev-parse',
'HEAD'],
425 stdout=subprocess.PIPE,
426 stderr=subprocess.PIPE,
429 out, err = p.communicate()
433 revision = out.decode(
'utf-8').strip()
435 if len(revision) != 40:
436 print(
'Warning: Could not parse git commit, output was {}'.
format(
444 p = subprocess.Popen([
'git',
'rev-parse',
'--short=10',
'HEAD'],
445 stdout=subprocess.PIPE,
446 stderr=subprocess.PIPE,
449 out, err = p.communicate()
454 revision = out.decode(
'utf-8').strip()
460 if git_timestamp_file
is None:
461 git_timestamp_file = os.path.join(repo_path,
'tools',
'GIT_TIMESTAMP')
463 with open(git_timestamp_file)
as fd:
464 return fd.read().strip()
467 p = subprocess.Popen([
'git',
'log',
'-n',
'1',
'--pretty=format:%cd'],
468 stdout=subprocess.PIPE,
469 stderr=subprocess.PIPE,
472 out, err = p.communicate()
477 timestamp = out.decode(
'utf-8').strip()
482class Error(Exception):
488 return 0x80000000 & exit_code
495 'Command: {}\nCRASHED with exit code {} (0x{:x})\n'.
format(
496 ' '.
join(command), exit_code, exit_code & 0xffffffff))
500 tools_dir = os.path.dirname(os.path.realpath(__file__))
501 return os.path.join(tools_dir,
'sdks',
'dart-sdk')
512 core_pattern_file =
'/proc/sys/kernel/core_pattern'
513 core_pattern = open(core_pattern_file).
read()
515 expected_core_pattern =
'core.%p'
516 if core_pattern.strip() != expected_core_pattern:
518 'Invalid core_pattern configuration. '
519 'The configuration of core dump handling is *not* correct for '
520 'a buildbot. The content of {0} must be "{1}" instead of "{2}".'.
521 format(core_pattern_file, expected_core_pattern, core_pattern))
523 raise Exception(message)
540 shutil.rmtree(self.
_temp_dir, ignore_errors=
True)
561 self.
_old_limits = resource.getrlimit(resource.RLIMIT_CORE)
562 resource.setrlimit(resource.RLIMIT_CORE, (-1, -1))
566 resource.setrlimit(resource.RLIMIT_CORE, self.
_old_limits)
574 super(LinuxCoreDumpEnabler, self).
__enter__()
578 super(LinuxCoreDumpEnabler, self).
__exit__(*args)
582 """This enabler assumes that Dart binary was built with Crashpad support.
583 In this case DART_CRASHPAD_CRASHES_DIR environment variable allows to
584 specify the location of Crashpad crashes database. Actual minidumps will
585 be written into reports subfolder of the database.
587 CRASHPAD_DB_FOLDER = os.path.join(DART_DIR, 'crashes')
588 DUMPS_FOLDER = os.path.join(CRASHPAD_DB_FOLDER,
'reports')
594 print(
'INFO: Enabling coredump archiving into {}'.
format(
595 WindowsCoreDumpEnabler.CRASHPAD_DB_FOLDER))
597 'DART_CRASHPAD_CRASHES_DIR'] = WindowsCoreDumpEnabler.CRASHPAD_DB_FOLDER
600 del os.environ[
'DART_CRASHPAD_CRASHES_DIR']
606 except Exception
as error:
607 print(
'ERROR: Failed to remove {}: {}'.
format(file, error))
611 """This class reads coredumps file written by UnexpectedCrashDumpArchiver
612 into the current working directory and uploads all cores
and binaries
613 listed
in it into Cloud Storage (see
614 pkg/test_runner/lib/src/test_progress.dart).
618 _UNEXPECTED_CRASHES_FILE =
'unexpected-crashes'
621 self.
_bucket =
'dart-temp-crash-archive'
626 def _safe_cleanup(self):
629 except Exception
as error:
630 print(
'ERROR: Failure during cleanup: {}'.
format(error))
634 print(
'INFO: Core dump archiving is activated')
638 print(
'WARNING: Found and removed stale coredumps')
645 archive_crashes = crashes[:10]
646 print(
'Archiving coredumps for crash (if possible):')
647 for crash
in archive_crashes:
654 print(
'INFO: No unexpected crashes recorded')
657 print(
'INFO: However there are {} core dumps found'.
format(
662 except Exception
as error:
663 print(
'ERROR: Failed to archive crashes: {}'.
format(error))
669 def _archive(self, crashes):
672 for crash
in crashes:
673 files.update(crash.binaries)
674 core = self._find_coredump_file(crash)
678 missing.append(crash)
681 "INFO: Moving collected dumps and binaries into output directory\n"
682 "INFO: They will be uploaded to isolate server. Look for \"isolated"
683 " out\" under the failed step on the build page.\n"
684 "INFO: For more information see runtime/docs/infra/coredumps.md"
689 "INFO: Uploading collected dumps and binaries into Cloud Storage\n"
690 "INFO: Use `gsutil.py cp from-url to-path` to download them.\n"
691 "INFO: For more information see runtime/docs/infra/coredumps.md"
700 return 'BUILDBOT_BUILDERNAME' not in os.environ
702 def _report_missing_crashes(self, missing, throw=False):
703 missing_as_string =
', '.
join([str(c)
for c
in missing])
704 other_files = list(glob.glob(os.path.join(self.
_search_dir,
'*')))
706 "Could not find crash dumps for '{}' in search directory '{}'.\n"
707 "Existing files which *did not* match the pattern inside the search "
708 "directory are are:\n {}\n".
format(missing_as_string,
710 '\n '.
join(other_files)))
713 if throw
and GuessOS() !=
'win32':
715 'Missing crash dumps for: {}'.
format(missing_as_string))
717 def _get_file_name(self, file):
722 name = os.path.basename(file)
723 (prefix, suffix) = name.split(
'.', 1)
724 is_binary = prefix ==
'binary'
726 (mode, arch, binary_name) = suffix.split(
'_', 2)
728 return (name, is_binary)
730 def _move(self, files):
732 print(
'+++ Moving {} to output_directory ({})'.
format(
736 shutil.move(file, destination)
737 if is_binary
and os.path.exists(file +
'.pdb'):
740 shutil.move(file +
'.pdb', pdb)
742 def _tar(self, file):
744 tarname =
'{}.tar.gz'.
format(name)
747 tar = tarfile.open(tarname, mode=
'w:gz')
748 tar.add(file, arcname=name)
749 if is_binary
and os.path.exists(file +
'.pdb'):
751 tar.add(file +
'.pdb', arcname=name +
'.pdb')
755 def _upload(self, files):
757 gsutil = bot_utils.GSUtil()
759 gs_prefix =
'gs://{}'.
format(storage_path)
760 http_prefix =
'https://storage.cloud.google.com/{}'.
format(storage_path)
762 print(
'\n--- Uploading into {} ({}) ---'.
format(gs_prefix, http_prefix))
764 tarname = self.
_tar(file)
767 gs_url =
'{}{}'.
format(gs_prefix, tarname)
768 http_url =
'{}{}'.
format(http_prefix, tarname)
771 gsutil.upload(tarname, gs_url)
772 print(
'+++ Uploaded {} ({})'.
format(gs_url, http_url))
773 except Exception
as error:
779 print(
'--- Done ---\n')
781 def _find_all_coredumps(self):
782 """Return coredumps that were recorded (if supported by the platform).
783 This method will be overridden by concrete platform specific implementations.
787 def _find_unexpected_crashes(self):
788 """Load coredumps file. Each line has the following format:
790 test-name,pid,binary-file1,binary-file2,...
793 with open(BaseCoreDumpArchiver._UNEXPECTED_CRASHES_FILE)
as f:
796 for ln
in f.readlines()
803 if os.path.exists(BaseCoreDumpArchiver._UNEXPECTED_CRASHES_FILE):
804 os.unlink(BaseCoreDumpArchiver._UNEXPECTED_CRASHES_FILE)
806 for binary
in glob.glob(os.path.join(self.
_binaries_dir,
'binary.*')):
816 super(PosixCoreDumpArchiver, self).
__init__(search_dir,
820 found = super(PosixCoreDumpArchiver, self)._cleanup()
821 for core
in glob.glob(os.path.join(self.
_search_dir,
'core.*')):
826 def _find_coredump_file(self, crash):
828 'core.{}'.
format(crash.pid))
829 if os.path.exists(core_filename):
836 super(LinuxCoreDumpArchiver, self).
__init__(os.getcwd(),
843 super(MacOSCoreDumpArchiver, self).
__init__(
'/cores', output_directory)
849 super(WindowsCoreDumpArchiver, self).
__init__(
850 WindowsCoreDumpEnabler.DUMPS_FOLDER, output_directory)
855 win_toolchain_json_path = os.path.join(DART_DIR,
'build',
856 'win_toolchain.json')
857 if not os.path.exists(win_toolchain_json_path):
860 with open(win_toolchain_json_path,
'r')
as f:
861 win_toolchain_info = json.loads(f.read())
863 win_sdk_path = win_toolchain_info[
'win_sdk']
867 cdb_path = os.path.join(win_sdk_path,
'Debuggers',
'x64',
'cdb.exe')
868 if not os.path.exists(cdb_path):
873 CDBG_PROMPT_RE = re.compile(
r'^\d+:\d+>')
875 def _dump_all_stacks(self):
890 print(
'### Dumping stacks from {} using CDB'.
format(dump))
891 cdb_output = subprocess.check_output(
892 '"{}" -z "{}" -kqm -c "!uniqstack -b -v -p;qd"'.
format(
894 stderr=subprocess.STDOUT)
897 for line
in cdb_output.split(
'\n'):
898 if re.match(WindowsCoreDumpArchiver.CDBG_PROMPT_RE, line):
900 elif line.startswith(
'quit:'):
905 print(
'#############################################')
911 except Exception
as error:
912 print(
'ERROR: Unable to dump stacks from dumps: {}'.
format(error))
914 super(WindowsCoreDumpArchiver, self).
__exit__(*args)
917 found = super(WindowsCoreDumpArchiver, self)._cleanup()
918 for core
in glob.glob(os.path.join(self.
_search_dir,
'*')):
923 def _find_all_coredumps(self):
925 return [core_filename
for core_filename
in glob.glob(pattern)]
927 def _find_coredump_file(self, crash):
935 for core_filename
in glob.glob(pattern):
942 def _report_missing_crashes(self, missing, throw=False):
945 super(WindowsCoreDumpArchiver, self)._report_missing_crashes(
946 missing, throw=
False)
949 missing_as_string =
', '.
join([str(c)
for c
in missing])
951 'Missing crash dumps for: {}'.
format(missing_as_string))
958 self.
_limits = (nofiles, nofiles)
961 self.
_old_limits = resource.getrlimit(resource.RLIMIT_NOFILE)
962 resource.setrlimit(resource.RLIMIT_NOFILE, self.
_limits)
965 resource.setrlimit(resource.RLIMIT_CORE, self.
_old_limits)
968@contextlib.contextmanager
974 enabled =
'--copy-coredumps' in args
975 prefix =
'--output-directory='
976 output_directory =
next(
977 (arg[
len(prefix):]
for arg
in args
if arg.startswith(prefix)),
None)
983 if osname ==
'linux':
984 return (LinuxCoreDumpEnabler(), LinuxCoreDumpArchiver(output_directory))
985 elif osname ==
'macos':
986 return (PosixCoreDumpEnabler(), MacOSCoreDumpArchiver(output_directory))
987 elif osname ==
'win32':
988 return (WindowsCoreDumpEnabler(),
989 WindowsCoreDumpArchiver(output_directory))
997 if osname ==
'macos':
1000 assert osname
in (
'linux',
'win32')
1015if __name__ ==
'__main__':
static float next(float f)
static bool read(SkStream *stream, void *buffer, size_t amount)
def __init__(self, search_dir, output_directory)
def _archive(self, crashes)
def _get_file_name(self, file)
def _find_unexpected_crashes(self)
def _report_missing_crashes(self, missing, throw=False)
def _find_all_coredumps(self)
def __init__(self, nofiles)
def __init__(self, output_directory)
def __exit__(self, *args)
def __init__(self, output_directory)
def __init__(self, search_dir, output_directory)
def __init__(self, prefix='')
def __init__(self, test, pid, *binaries)
def __init__(self, channel=None, major=None, minor=None, patch=None, prerelease=None, prerelease_patch=None, version=None)
def set_version(self, version)
def __init__(self, output_directory)
def _dump_all_stacks(self)
def __exit__(self, *args)
def _find_all_coredumps(self)
uint32_t uint32_t * format
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
def GetProcessIdFromDump(path)
def print(*args, **kwargs)
def GetBotUtils(repo_path=DART_DIR)
def GetMinidumpUtils(repo_path=DART_DIR)
def GetBuildConf(mode, arch)
def CheckLinuxCoreDumpPattern(fatal=False)
def IsCrashExitCode(exit_code)
def GetBuildSanitizer(sanitizer)
def GetGitTimestamp(git_timestamp_file=None, repo_path=DART_DIR)
def CoreDumpArchiver(args)
def GetShortGitHash(repo_path=DART_DIR)
def IsCrossBuild(target_os, arch)
def load_source(modname, filename)
def NooptContextManager()
def GetChannel(version_file=None)
def DiagnoseExitCode(exit_code, command)
def FileDescriptorLimitIncreaser()
def ReadVersionFile(version_file=None)
def CheckedInSdkExecutable()
def GetVersion(no_git_hash=False, version_file=None, git_revision_file=None)
def GetGitRevision(git_revision_file=None, repo_path=DART_DIR)
def GetBuildRoot(host_os, mode=None, arch=None, sanitizer=None)
static SkString join(const CommandLineFlags::StringArray &)