Apply consistent style to all the repo.
Bug: https://github.com/flutter/flutter/issues/124737
Change-Id: I64c904f5f3822f62bfd692277c64eba4f62f5149
Reviewed-on: https://flutter-review.googlesource.com/c/recipes/+/43120
Reviewed-by: Ricardo Amador <ricardoamador@google.com>
Commit-Queue: Godofredo Contreras <godofredoc@google.com>
Reviewed-by: Keyong Han <keyonghan@google.com>
diff --git a/recipe_modules/adhoc_validation/api.py b/recipe_modules/adhoc_validation/api.py
index 3808203..ea8d4f6 100644
--- a/recipe_modules/adhoc_validation/api.py
+++ b/recipe_modules/adhoc_validation/api.py
@@ -59,28 +59,31 @@
with self.m.context(env=env, env_prefixes=env_prefixes):
self.m.flutter_bcid.report_stage(BcidStage.COMPILE.value)
self.m.test_utils.run_test(
- validation,
- [resource_name],
- timeout_secs=4500 # 75 minutes
+ validation,
+ [resource_name],
+ timeout_secs=4500 # 75 minutes
)
else:
- git_ref = self.m.properties.get('release_ref') or self.m.buildbucket.gitiles_commit.ref
+ git_ref = self.m.properties.get(
+ 'release_ref'
+ ) or self.m.buildbucket.gitiles_commit.ref
# Post-processing of docs require LUCI_BRANCH to be set when running from dart-internal.
env['LUCI_BRANCH'] = git_ref.replace('refs/heads/', '')
# Override LUCI_BRANCH for docs and release candidate branches. Docs built from
# release candidate branches need to be build as stable to ensure they are processed
# correctly.
checkout_path = self.m.repo_util.sdk_checkout_path()
- if (validation == 'docs') and self.m.repo_util.is_release_candidate_branch(checkout_path):
+ if (validation == 'docs'
+ ) and self.m.repo_util.is_release_candidate_branch(checkout_path):
env['LUCI_BRANCH'] = 'stable'
env['LUCI_CI'] = True
with self.m.context(env=env, env_prefixes=env_prefixes):
self.m.flutter_bcid.report_stage(BcidStage.COMPILE.value)
self.m.test_utils.run_test(
- validation,
- [resource_name],
- timeout_secs=4500 # 75 minutes
+ validation,
+ [resource_name],
+ timeout_secs=4500 # 75 minutes
)
if ((validation == 'docs' or validation == 'docs_deploy') and
self.m.properties.get('firebase_project')):
diff --git a/recipe_modules/adhoc_validation/examples/full.py b/recipe_modules/adhoc_validation/examples/full.py
index d32c44c..7056bb4 100644
--- a/recipe_modules/adhoc_validation/examples/full.py
+++ b/recipe_modules/adhoc_validation/examples/full.py
@@ -52,13 +52,14 @@
api.repo_util.flutter_environment_data(checkout_path)
)
yield api.test(
- 'docs', api.platform.name('linux'),
- api.properties(firebase_project='myproject',
- git_branch=''),
+ 'docs',
+ api.platform.name('linux'),
+ api.properties(firebase_project='myproject', git_branch=''),
api.repo_util.flutter_environment_data(checkout_path),
api.step_data(
'Docs.Identify branches.git branch',
- stdout=api.raw_io.output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
+ stdout=api.raw_io
+ .output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
),
api.buildbucket.ci_build(
project='flutter',
diff --git a/recipe_modules/android_virtual_device/api.py b/recipe_modules/android_virtual_device/api.py
index a3f0324..f3e25ea 100644
--- a/recipe_modules/android_virtual_device/api.py
+++ b/recipe_modules/android_virtual_device/api.py
@@ -23,7 +23,8 @@
self.version = version
with self.m.step.nest('download avd package'):
self.m.file.ensure_directory('Ensure avd cache', self.avd_root)
- with self.m.context(env=env, env_prefixes=env_prefixes, cwd=self.avd_root), self.m.depot_tools.on_path():
+ with self.m.context(env=env, env_prefixes=env_prefixes,
+ cwd=self.avd_root), self.m.depot_tools.on_path():
# Download and install AVD
self.m.cipd.ensure(
self.avd_root,
@@ -70,21 +71,28 @@
self.version = version or self.version or '31'
self.emulator_pid = ''
with self.m.step.nest('start avd'):
- with self.m.context(env=env, env_prefixes=env_prefixes, cwd=self.avd_root), self.m.depot_tools.on_path():
+ with self.m.context(env=env, env_prefixes=env_prefixes,
+ cwd=self.avd_root), self.m.depot_tools.on_path():
avd_script_path = self.avd_root.join(
'src', 'tools', 'android', 'avd', 'avd.py'
)
avd_config = self.avd_root.join(
- 'src', 'tools', 'android', 'avd', 'proto', 'generic_android%s.textpb' % self.version
+ 'src', 'tools', 'android', 'avd', 'proto',
+ 'generic_android%s.textpb' % self.version
)
self.m.step(
- 'Install Android emulator (API level %s)' % self.version,
- ['vpython3', avd_script_path, 'install', '--avd-config', avd_config],
+ 'Install Android emulator (API level %s)' % self.version, [
+ 'vpython3', avd_script_path, 'install', '--avd-config',
+ avd_config
+ ],
stdout=self.m.raw_io.output_text(add_output_log=True)
)
output = self.m.step(
- 'Start Android emulator (API level %s)' % self.version,
- ['vpython3', avd_script_path, 'start', '--no-read-only', '--wipe-data', '--writable-system', '--debug-tags', 'all', '--avd-config', avd_config],
+ 'Start Android emulator (API level %s)' % self.version, [
+ 'vpython3', avd_script_path, 'start', '--no-read-only',
+ '--wipe-data', '--writable-system', '--debug-tags', 'all',
+ '--avd-config', avd_config
+ ],
stdout=self.m.raw_io.output_text(add_output_log=True)
).stdout
@@ -102,7 +110,8 @@
env_prefixes(dict): Current environment prefixes variables.
"""
with self.m.step.nest('avd setup'):
- with self.m.context(env=env, env_prefixes=env_prefixes, cwd=self.avd_root):
+ with self.m.context(env=env, env_prefixes=env_prefixes,
+ cwd=self.avd_root):
# Only supported on linux. Do not run this on other platforms.
resource_name = self.resource('avd_setup.sh')
self.m.step(
@@ -110,7 +119,10 @@
['chmod', '755', resource_name],
infra_step=True,
)
- self.m.test_utils.run_test('avd_setup.sh', [resource_name, str(self.adb_path)], infra_step=True)
+ self.m.test_utils.run_test(
+ 'avd_setup.sh', [resource_name, str(self.adb_path)],
+ infra_step=True
+ )
def stop_if_requested(self, env, emulator_pid=None):
"""Stops the emulator and cleans up any zombie QEMU processes.
@@ -136,10 +148,11 @@
self.m.step('Kill emulator cleanup', ['kill', '-9', pid_to_kill])
# Kill zombie processes left over by QEMU on the host.
- step_result = self.m.step('list processes',
- ['ps', '-axww'],
+ step_result = self.m.step(
+ 'list processes', ['ps', '-axww'],
stdout=self.m.raw_io.output_text(add_output_log=True),
- stderr=self.m.raw_io.output_text(add_output_log=True))
+ stderr=self.m.raw_io.output_text(add_output_log=True)
+ )
zombieList = ['qemu-system']
killCommand = ['kill', '-9']
for line in step_result.stdout.splitlines():
diff --git a/recipe_modules/android_virtual_device/examples/full.py b/recipe_modules/android_virtual_device/examples/full.py
index 3550379..7547152 100644
--- a/recipe_modules/android_virtual_device/examples/full.py
+++ b/recipe_modules/android_virtual_device/examples/full.py
@@ -8,51 +8,45 @@
'recipe_engine/raw_io',
]
+
def RunSteps(api):
- env = {
- 'USE_EMULATOR': True
- }
+ env = {'USE_EMULATOR': True}
env_prefixes = {}
avd_root = api.path['cache'].join('builder', 'avd')
api.android_virtual_device.download(
- avd_root=avd_root,
- env=env,
- env_prefixes=env_prefixes,
- version='31'
+ avd_root=avd_root, env=env, env_prefixes=env_prefixes, version='31'
)
api.android_virtual_device.start_if_requested(
env=env,
env_prefixes=env_prefixes,
version='31',
)
- api.android_virtual_device.stop_if_requested(
- env=env,
- )
+ api.android_virtual_device.stop_if_requested(env=env,)
def GenTests(api):
avd_api_version = '31'
yield api.test(
- 'demo',
- api.step_data(
- 'start avd.Start Android emulator (API level %s)' % avd_api_version,
- stdout=api.raw_io.output_text(
- 'android_' + avd_api_version + '_google_apis_x86|emulator-5554 started (pid: 17687)'
- )
- ),
+ 'demo',
+ api.step_data(
+ 'start avd.Start Android emulator (API level %s)' % avd_api_version,
+ stdout=api.raw_io.output_text(
+ 'android_' + avd_api_version +
+ '_google_apis_x86|emulator-5554 started (pid: 17687)'
+ )
+ ),
)
yield api.test(
- 'demo zombie processes',
- api.step_data(
- 'start avd.Start Android emulator (API level %s)' % avd_api_version,
- stdout=api.raw_io.output_text(
- 'android_' + avd_api_version + '_google_apis_x86|emulator-5554 started (pid: 17687)'
- )
- ),
- api.step_data(
- 'kill and cleanup avd.list processes',
+ 'demo zombie processes',
+ api.step_data(
+ 'start avd.Start Android emulator (API level %s)' % avd_api_version,
stdout=api.raw_io.output_text(
- '12345 qemu-system blah'
+ 'android_' + avd_api_version +
+ '_google_apis_x86|emulator-5554 started (pid: 17687)'
)
),
+ api.step_data(
+ 'kill and cleanup avd.list processes',
+ stdout=api.raw_io.output_text('12345 qemu-system blah')
+ ),
)
diff --git a/recipe_modules/archives/api.py b/recipe_modules/archives/api.py
index 263c986..431df9d 100644
--- a/recipe_modules/archives/api.py
+++ b/recipe_modules/archives/api.py
@@ -35,7 +35,6 @@
'x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom'
)
-
# Bucket + initial prefix for artifact destination.
LUCI_TO_GCS_PREFIX = {
'flutter': 'flutter_infra_release',
@@ -48,21 +47,15 @@
# Bucket + initial prefix for artifact destination.
LUCI_TO_ANDROID_GCS_PREFIX = {
- 'flutter': '',
- MONOREPO: 'flutter_archives_v2/monorepo',
- 'prod': '',
- 'staging': 'flutter_archives_v2',
- 'try': 'flutter_archives_v2',
+ 'flutter': '', MONOREPO: 'flutter_archives_v2/monorepo', 'prod': '',
+ 'staging': 'flutter_archives_v2', 'try': 'flutter_archives_v2',
'try.shadow': 'flutter_archives_v2'
}
# Subpath for realms. A realm is used to separate file destinations
# within the same configuration. E.g. production environment with
# an experimental realm and production environment with a production realm.
-REALM_TO_PATH = {
- 'production': '',
- 'experimental': 'experimental'
-}
+REALM_TO_PATH = {'production': '', 'experimental': 'experimental'}
class ArchivesApi(recipe_api.RecipeApi):
@@ -82,19 +75,19 @@
"""
results = []
self.m.path.mock_add_paths(
- self.m.path['start_dir'].join(
- 'out/android_profile/zip_archives/download.flutter.io'),
- DIRECTORY
+ self.m.path['start_dir']
+ .join('out/android_profile/zip_archives/download.flutter.io'), DIRECTORY
)
for include_path in archive_config.get('include_paths', []):
full_include_path = self.m.path.abspath(checkout.join(include_path))
if self.m.path.isdir(full_include_path):
- test_data = [
-
- ]
+ test_data = []
paths = self.m.file.listdir(
- 'Expand directory', checkout.join(include_path),
- recursive=True, test_data=(MOCK_JAR_PATH, MOCK_POM_PATH))
+ 'Expand directory',
+ checkout.join(include_path),
+ recursive=True,
+ test_data=(MOCK_JAR_PATH, MOCK_POM_PATH)
+ )
paths = [self.m.path.abspath(p) for p in paths]
results.extend(paths)
else:
@@ -149,9 +142,7 @@
dst: A string with the local destination for the file.
"""
bucket, path = self._split_dst_parts(src)
- self.m.gsutil.download(
- bucket, path, dst, name="download %s" % src
- )
+ self.m.gsutil.download(bucket, path, dst, name="download %s" % src)
def engine_v2_gcs_paths(self, checkout, archive_config):
"""Calculates engine v2 GCS paths from an archive config.
@@ -168,7 +159,9 @@
# Artifacts bucket is calculated using the LUCI bucket but we also use the realm to upload
# artifacts to the same bucket but different path when the build configurations use an experimental
# realm. Defaults to experimental.
- artifact_realm = REALM_TO_PATH.get(archive_config.get('realm', ''), 'experimental')
+ artifact_realm = REALM_TO_PATH.get(
+ archive_config.get('realm', ''), 'experimental'
+ )
# Do not archive if this is a monorepo try build.
if self.m.monorepo.is_monorepo_try_build:
return results
@@ -185,7 +178,9 @@
for include_path in file_list:
is_android_artifact = ANDROID_ARTIFACTS_BUCKET in include_path
dir_part = self.m.path.dirname(include_path)
- full_base_path = self.m.path.abspath(checkout.join(archive_config.get('base_path','')))
+ full_base_path = self.m.path.abspath(
+ checkout.join(archive_config.get('base_path', ''))
+ )
rel_path = self.m.path.relpath(dir_part, full_base_path)
rel_path = '' if rel_path == '.' else rel_path
base_name = self.m.path.basename(include_path)
@@ -196,23 +191,29 @@
artifact_path = '%s/%s' % (rel_path, base_name)
# Replace ANDROID_ARTIFACTS_BUCKET to include the realm.
old_location = '/'.join([ANDROID_ARTIFACTS_BUCKET, 'io', 'flutter'])
- new_location = '/'.join(filter(
- bool,
- [ANDROID_ARTIFACTS_BUCKET, 'io', 'flutter', artifact_realm])
+ new_location = '/'.join(
+ filter(
+ bool,
+ [ANDROID_ARTIFACTS_BUCKET, 'io', 'flutter', artifact_realm]
+ )
)
artifact_path = artifact_path.replace(old_location, new_location)
bucket_and_prefix = LUCI_TO_ANDROID_GCS_PREFIX.get(bucket)
- artifact_path = '/'.join(filter(bool, [bucket_and_prefix, artifact_path]))
+ artifact_path = '/'.join(
+ filter(bool, [bucket_and_prefix, artifact_path])
+ )
else:
bucket_and_prefix = LUCI_TO_GCS_PREFIX.get(bucket)
- artifact_path = '/'.join(filter(bool, [bucket_and_prefix, 'flutter', artifact_realm, commit, rel_path, base_name]))
+ artifact_path = '/'.join(
+ filter(
+ bool, [
+ bucket_and_prefix, 'flutter', artifact_realm, commit,
+ rel_path, base_name
+ ]
+ )
+ )
- results.append(
- ArchivePaths(
- include_path,
- 'gs://%s' % artifact_path
- )
- )
+ results.append(ArchivePaths(include_path, 'gs://%s' % artifact_path))
return results
def global_generator_paths(self, checkout, archives):
@@ -246,12 +247,16 @@
# Artifacts bucket is calculated using the LUCI bucket but we also use the realm to upload
# artifacts to the same bucket but different path when the build configurations use an
# experimental realm. Defaults to experimental.
- artifact_realm = REALM_TO_PATH.get(archive.get('realm', ''), 'experimental')
+ artifact_realm = REALM_TO_PATH.get(
+ archive.get('realm', ''), 'experimental'
+ )
source = checkout.join(archive.get('source'))
artifact_path = '/'.join(
filter(
- bool, [bucket_and_prefix, 'flutter', artifact_realm, commit,
- archive.get('destination')]
+ bool, [
+ bucket_and_prefix, 'flutter', artifact_realm, commit,
+ archive.get('destination')
+ ]
)
)
dst = 'gs://%s' % artifact_path
diff --git a/recipe_modules/archives/examples/engine_v2_gcs_paths.py b/recipe_modules/archives/examples/engine_v2_gcs_paths.py
index 712a653..2be6b91 100644
--- a/recipe_modules/archives/examples/engine_v2_gcs_paths.py
+++ b/recipe_modules/archives/examples/engine_v2_gcs_paths.py
@@ -20,10 +20,11 @@
def RunSteps(api):
checkout = api.path['start_dir']
config = {
- "name": "android_profile",
- "type": "gcs",
- "base_path": "out/android_profile/zip_archives/",
- "realm": "production",
+ "name":
+ "android_profile", "type":
+ "gcs", "base_path":
+ "out/android_profile/zip_archives/", "realm":
+ "production",
"include_paths": [
"out/android_profile/zip_archives/artifact1.zip",
"out/android_profile/zip_archives/android-arm-profile/artifacts.zip",
@@ -36,68 +37,119 @@
results = api.archives.engine_v2_gcs_paths(checkout, config)
expected_prod_results = [
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/artifact1.zip')),
+ local=str(
+ api.path['start_dir']
+ .join('out/android_profile/zip_archives/artifact1.zip')
+ ),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/artifact1.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/android-arm-profile/artifacts.zip')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/android-arm-profile/artifacts.zip'
+ )
+ ),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/android-arm-profile/artifacts.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/android-arm-profile/linux-x64.zip')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/android-arm-profile/linux-x64.zip'
+ )
+ ),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/android-arm-profile/linux-x64.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/android-arm-profile/symbols.zip')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/android-arm-profile/symbols.zip'
+ )
+ ),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/android-arm-profile/symbols.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.jar')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.jar'
+ )
+ ),
remote='gs://download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.jar'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom'
+ )
+ ),
remote='gs://download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/sky_engine.zip')),
+ local=str(
+ api.path['start_dir']
+ .join('out/android_profile/zip_archives/sky_engine.zip')
+ ),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/sky_engine.zip'
)
]
expected_try_results = [
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/artifact1.zip')),
+ local=str(
+ api.path['start_dir']
+ .join('out/android_profile/zip_archives/artifact1.zip')
+ ),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/artifact1.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/android-arm-profile/artifacts.zip')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/android-arm-profile/artifacts.zip'
+ )
+ ),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/android-arm-profile/artifacts.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/android-arm-profile/linux-x64.zip')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/android-arm-profile/linux-x64.zip'
+ )
+ ),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/android-arm-profile/linux-x64.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/android-arm-profile/symbols.zip')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/android-arm-profile/symbols.zip'
+ )
+ ),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/android-arm-profile/symbols.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.jar')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.jar'
+ )
+ ),
remote='gs://flutter_archives_v2/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.jar'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom')),
+ local=str(
+ api.path['start_dir'].join(
+ 'out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom'
+ )
+ ),
remote='gs://flutter_archives_v2/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('out/android_profile/zip_archives/sky_engine.zip')),
+ local=str(
+ api.path['start_dir']
+ .join('out/android_profile/zip_archives/sky_engine.zip')
+ ),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/sky_engine.zip'
)
]
config = api.properties.get('config')
expected_results = {
- 'prod': expected_prod_results,
- 'try': expected_try_results
+ 'prod': expected_prod_results, 'try': expected_try_results
}
api.assertions.assertListEqual(expected_results[config], results)
@@ -110,8 +162,7 @@
bucket='prod',
git_repo='https://flutter.googlesource.com/mirrors/engine',
git_ref='refs/heads/main'
- ),
- api.properties(config='prod'),
+ ), api.properties(config='prod'),
api.step_data(
'git rev-parse',
stdout=api.raw_io
@@ -125,8 +176,7 @@
bucket='try',
git_repo='https://flutter.googlesource.com/mirrors/engine',
git_ref='refs/heads/main'
- ),
- api.properties(config='try'),
+ ), api.properties(config='try'),
api.step_data(
'git rev-parse',
stdout=api.raw_io
diff --git a/recipe_modules/archives/examples/full.py b/recipe_modules/archives/examples/full.py
index 72db93d..9219470 100644
--- a/recipe_modules/archives/examples/full.py
+++ b/recipe_modules/archives/examples/full.py
@@ -23,17 +23,19 @@
for result in results:
if result.remote not in expected_destinations:
assert False, 'Unexpected file generated %s' % result.remote
- if not results: return
+ if not results:
+ return
api.archives.upload_artifact(results[0].local, results[0].remote)
api.archives.download(results[0].remote, results[0].local)
def GenTests(api):
archive_config = {
- "name": "android_profile",
- "type": "gcs",
- "realm": "production",
- "base_path": "out/android_profile/zip_archives/",
+ "name":
+ "android_profile", "type":
+ "gcs", "realm":
+ "production", "base_path":
+ "out/android_profile/zip_archives/",
"include_paths": [
"out/android_profile/zip_archives/android-arm-profile/artifacts.zip",
"out/android_profile/zip_archives/android-arm-profile/linux-x64.zip",
@@ -52,7 +54,10 @@
]
yield api.test(
'try_pool_production_realm',
- api.properties(config=archive_config, expected_destinations=try_pool_production_realm),
+ api.properties(
+ config=archive_config,
+ expected_destinations=try_pool_production_realm
+ ),
api.buildbucket.ci_build(
project='flutter',
bucket='try',
@@ -78,7 +83,10 @@
try_pool_experimental_realm_config['realm'] = 'experimental'
yield api.test(
'try_pool_experimental_realm',
- api.properties(config=try_pool_experimental_realm_config, expected_destinations=try_pool_experimental_realm),
+ api.properties(
+ config=try_pool_experimental_realm_config,
+ expected_destinations=try_pool_experimental_realm
+ ),
api.buildbucket.ci_build(
project='flutter',
bucket='try',
@@ -102,7 +110,10 @@
]
yield api.test(
'prod_pool_production_realm',
- api.properties(config=archive_config, expected_destinations=prod_pool_production_realm),
+ api.properties(
+ config=archive_config,
+ expected_destinations=prod_pool_production_realm
+ ),
api.buildbucket.ci_build(
project='flutter',
bucket='prod',
@@ -128,7 +139,10 @@
prod_pool_experimental_realm_config['realm'] = 'experimental'
yield api.test(
'prod_pool_experimental_realm',
- api.properties(config=prod_pool_experimental_realm_config, expected_destinations=prod_pool_experimental_realm),
+ api.properties(
+ config=prod_pool_experimental_realm_config,
+ expected_destinations=prod_pool_experimental_realm
+ ),
api.buildbucket.ci_build(
project='flutter',
bucket='prod',
@@ -152,7 +166,10 @@
]
yield api.test(
'flutter_pool_production_realm',
- api.properties(config=archive_config, expected_destinations=flutter_pool_production_realm),
+ api.properties(
+ config=archive_config,
+ expected_destinations=flutter_pool_production_realm
+ ),
api.buildbucket.ci_build(
project='dart-internal',
bucket='flutter',
@@ -178,7 +195,10 @@
flutter_pool_experimental_realm_config['realm'] = 'experimental'
yield api.test(
'flutter_pool_experimental_realm',
- api.properties(config=flutter_pool_experimental_realm_config, expected_destinations=flutter_pool_production_realm),
+ api.properties(
+ config=flutter_pool_experimental_realm_config,
+ expected_destinations=flutter_pool_production_realm
+ ),
api.buildbucket.ci_build(
project='dart-internal',
bucket='flutter',
@@ -202,7 +222,10 @@
]
yield api.test(
'staging_pool_production_realm',
- api.properties(config=archive_config, expected_destinations=staging_pool_production_realm),
+ api.properties(
+ config=archive_config,
+ expected_destinations=staging_pool_production_realm
+ ),
api.buildbucket.ci_build(
project='flutter',
bucket='staging',
@@ -228,7 +251,10 @@
staging_pool_experimental_realm_config['realm'] = 'experimental'
yield api.test(
'staging_pool_experimental_realm',
- api.properties(config=staging_pool_experimental_realm_config, expected_destinations=staging_pool_production_realm),
+ api.properties(
+ config=staging_pool_experimental_realm_config,
+ expected_destinations=staging_pool_production_realm
+ ),
api.buildbucket.ci_build(
project='flutter',
bucket='staging',
@@ -252,7 +278,10 @@
]
yield api.test(
'monorepo_ci', api.monorepo.ci_build(),
- api.properties(config=archive_config, expected_destinations=monorepo_production_realm),
+ api.properties(
+ config=archive_config,
+ expected_destinations=monorepo_production_realm
+ ),
api.step_data(
'git rev-parse',
stdout=api.raw_io
@@ -272,7 +301,10 @@
]
yield api.test(
'monorepo_ci_experimental_realm', api.monorepo.ci_build(),
- api.properties(config=monorepo_experimental_realm_config, expected_destinations=monorepo_experimental_realm),
+ api.properties(
+ config=monorepo_experimental_realm_config,
+ expected_destinations=monorepo_experimental_realm
+ ),
api.step_data(
'git rev-parse',
stdout=api.raw_io
@@ -281,28 +313,32 @@
)
# Monorepo try with "production" realm in build configuration file.
- monorepo_try_realm = [
- ]
+ monorepo_try_realm = []
yield api.test(
'monorepo_try_production_realm',
- api.properties(config=archive_config, expected_destinations=monorepo_try_realm),
+ api.properties(
+ config=archive_config, expected_destinations=monorepo_try_realm
+ ),
api.monorepo.try_build(),
)
# Monorepo try with "experimental" realm in build configuration file.
- monorepo_try_realm = [
- ]
+ monorepo_try_realm = []
monorepo_experimental_realm_config = copy.deepcopy(archive_config)
monorepo_experimental_realm_config['realm'] = 'experimental'
yield api.test(
'monorepo_try_experimental_realm',
- api.properties(config=archive_config, expected_destinations=monorepo_try_realm),
+ api.properties(
+ config=archive_config, expected_destinations=monorepo_try_realm
+ ),
api.monorepo.try_build(),
)
yield api.test(
'failure',
- api.properties(config=archive_config, expected_destinations=['/abc/cde.zip']),
+ api.properties(
+ config=archive_config, expected_destinations=['/abc/cde.zip']
+ ),
api.buildbucket.ci_build(
project='flutter',
bucket='try',
@@ -312,8 +348,7 @@
api.step_data(
'git rev-parse',
stdout=api.raw_io
- .output_text('12345abcde12345abcde12345abcde12345abcde\n')
- ),
- api.expect_exception('AssertionError'),
+ .output_text('12345abcde12345abcde12345abcde12345abcde\n')
+ ), api.expect_exception('AssertionError'),
api.post_process(StatusException)
)
diff --git a/recipe_modules/archives/examples/global_generator_paths.py b/recipe_modules/archives/examples/global_generator_paths.py
index cb51250..05cd980 100644
--- a/recipe_modules/archives/examples/global_generator_paths.py
+++ b/recipe_modules/archives/examples/global_generator_paths.py
@@ -19,31 +19,31 @@
def RunSteps(api):
checkout = api.path['start_dir'].join('src')
- archives = [
- {
- "source": "out/debug/artifacts.zip",
- "destination": "ios/artifacts.zip"
- },
- {
- "source": "out/release-nobitcode/Flutter.dSYM.zip",
- "destination": "ios-release-nobitcode/Flutter.dSYM.zip"
- },
- {
- "source": "out/release/Flutter.dSYM.zip",
- "destination": "ios-release/Flutter.dSYM.zip"
- }
- ]
+ archives = [{
+ "source": "out/debug/artifacts.zip", "destination": "ios/artifacts.zip"
+ }, {
+ "source": "out/release-nobitcode/Flutter.dSYM.zip",
+ "destination": "ios-release-nobitcode/Flutter.dSYM.zip"
+ }, {
+ "source": "out/release/Flutter.dSYM.zip",
+ "destination": "ios-release/Flutter.dSYM.zip"
+ }]
expected_results = [
ArchivePaths(
local=str(api.path['start_dir'].join('src/out/debug/artifacts.zip')),
remote='gs://flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios/artifacts.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/release-nobitcode/Flutter.dSYM.zip')),
+ local=str(
+ api.path['start_dir']
+ .join('src/out/release-nobitcode/Flutter.dSYM.zip')
+ ),
remote='gs://flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios-release-nobitcode/Flutter.dSYM.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')),
+ local=str(
+ api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')
+ ),
remote='gs://flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios-release/Flutter.dSYM.zip'
)
]
@@ -53,11 +53,16 @@
remote='gs://flutter_archives_v2/monorepo/flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios/artifacts.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/release-nobitcode/Flutter.dSYM.zip')),
+ local=str(
+ api.path['start_dir']
+ .join('src/out/release-nobitcode/Flutter.dSYM.zip')
+ ),
remote='gs://flutter_archives_v2/monorepo/flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios-release-nobitcode/Flutter.dSYM.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')),
+ local=str(
+ api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')
+ ),
remote='gs://flutter_archives_v2/monorepo/flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios-release/Flutter.dSYM.zip'
)
]
@@ -67,19 +72,22 @@
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios/artifacts.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/release-nobitcode/Flutter.dSYM.zip')),
+ local=str(
+ api.path['start_dir']
+ .join('src/out/release-nobitcode/Flutter.dSYM.zip')
+ ),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios-release-nobitcode/Flutter.dSYM.zip'
),
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')),
+ local=str(
+ api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')
+ ),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/experimental/12345abcde12345abcde12345abcde12345abcde/ios-release/Flutter.dSYM.zip'
)
]
env_to_results = {
- 'production': expected_results,
- 'monorepo': expected_monorepo_results,
- 'monorepo_try': [],
- 'try': expected_try_results
+ 'production': expected_results, 'monorepo': expected_monorepo_results,
+ 'monorepo_try': [], 'try': expected_try_results
}
config = api.properties.get('config')
results = api.archives.global_generator_paths(checkout, archives)
@@ -88,8 +96,7 @@
def GenTests(api):
yield api.test(
- 'basic',
- api.properties(config='production'),
+ 'basic', api.properties(config='production'),
api.buildbucket.ci_build(
project='flutter',
bucket='prod',
@@ -113,8 +120,7 @@
api.monorepo.try_build(),
)
yield api.test(
- 'try',
- api.properties(config='try'),
+ 'try', api.properties(config='try'),
api.buildbucket.ci_build(
project='flutter',
bucket='try',
diff --git a/recipe_modules/bucket_util/api.py b/recipe_modules/bucket_util/api.py
index aed7cb0..1e7d43c 100644
--- a/recipe_modules/bucket_util/api.py
+++ b/recipe_modules/bucket_util/api.py
@@ -79,7 +79,9 @@
local_zip = temp_dir.join(zip_name)
remote_zip = self.get_cloud_path(remote_name)
if isinstance(parent_directory, str):
- parent_directory = self.m.path['cache'].join('builder', parent_directory)
+ parent_directory = self.m.path['cache'].join(
+ 'builder', parent_directory
+ )
pkg = self.m.zip.make_package(parent_directory, local_zip)
pkg.add_directory(parent_directory.join(folder_name))
@@ -89,7 +91,6 @@
pkg.zip('Zip %s' % folder_name)
if self.should_upload_packages():
return self.safe_upload(local_zip, remote_zip, bucket_name=bucket_name)
-
def safe_upload(
self,
@@ -129,7 +130,9 @@
if not self.m.path.exists(local_path):
with self.m.step.nest('%s not found' % local_path) as presentation:
parent_dir = self.m.path.abs_to_path(self.m.path.dirname(local_path))
- self.m.file.listdir('Files in parent directory of safe_upload request', parent_dir)
+ self.m.file.listdir(
+ 'Files in parent directory of safe_upload request', parent_dir
+ )
presentation.status = self.m.step.FAILURE
raise AssertionError('File not found %s' % local_path)
diff --git a/recipe_modules/bucket_util/examples/full.py b/recipe_modules/bucket_util/examples/full.py
index 4863dbf..f2c8560 100644
--- a/recipe_modules/bucket_util/examples/full.py
+++ b/recipe_modules/bucket_util/examples/full.py
@@ -16,25 +16,28 @@
def RunSteps(api):
api.bucket_util.upload_folder(
- 'Upload test.zip', # dir_label
- 'src', # parent_directory
- 'build', # folder_name
- 'test1.zip') # zip_name
+ 'Upload test.zip', # dir_label
+ 'src', # parent_directory
+ 'build', # folder_name
+ 'test1.zip'
+ ) # zip_name
api.bucket_util.upload_folder_and_files(
- 'Upload test.zip', # dir_label
- 'src', # parent_directory
- 'build', # folder_name
+ 'Upload test.zip', # dir_label
+ 'src', # parent_directory
+ 'build', # folder_name
'test2.zip', # zip_name
- file_paths=['a.txt'])
+ file_paths=['a.txt']
+ )
api.bucket_util.upload_folder_and_files(
- 'Upload test.zip', # dir_label
- 'src', # parent_directory
- 'build', # folder_name
- 'test3.zip', # zip_name
+ 'Upload test.zip', # dir_label
+ 'src', # parent_directory
+ 'build', # folder_name
+ 'test3.zip', # zip_name
platform='parent_directory',
- file_paths=['a.txt'])
+ file_paths=['a.txt']
+ )
# Prepare files.
temp = api.path.mkdtemp('bucketutil-example')
@@ -48,24 +51,24 @@
package.zip('zipping')
api.bucket_util.safe_upload(
- local_zip, # local_path
- "foo", # remote_path
- skip_on_duplicate=True)
+ local_zip, # local_path
+ "foo", # remote_path
+ skip_on_duplicate=True
+ )
if api.properties.get('try_bad_file', False):
api.bucket_util.safe_upload(
- temp.join('A_file_that_does_not_exist'), # local_path
- 'bar', # remote_path
+ temp.join('A_file_that_does_not_exist'), # local_path
+ 'bar', # remote_path
skip_on_duplicate=True,
- add_mock=False)
+ add_mock=False
+ )
def GenTests(api):
yield api.test(
'basic',
- api.properties(
- upload_packages=False,
- ),
+ api.properties(upload_packages=False,),
)
yield api.test(
'basic with fail',
@@ -73,7 +76,7 @@
upload_packages=False,
try_bad_file=True,
),
- api.expect_exception('AssertionError'), # the non-existent file
+ api.expect_exception('AssertionError'), # the non-existent file
# Expectation file would contain a brittle stack trace.
# TODO: Re-enable the expectation file after Python 2 support is no longer
# required.
@@ -81,9 +84,7 @@
)
yield api.test(
'upload_packages',
- api.properties(
- upload_packages=True,
- ),
+ api.properties(upload_packages=True,),
# These ids are UUIDs derivated from a fixed seed.
# To get new ids, just run the test and use the ids generated
# by the uuid module.
@@ -106,12 +107,11 @@
)
yield api.test(
'upload_packages_if_commit_is_present',
- api.properties(
- upload_packages=True,
- ),
+ api.properties(upload_packages=True,),
api.buildbucket.ci_build(
git_repo='github.com/flutter/engine',
- revision='8b3cd40a25a512033cc8c0797e41de9ecfc2432c'),
+ revision='8b3cd40a25a512033cc8c0797e41de9ecfc2432c'
+ ),
api.step_data(
'Ensure flutter/8b3cd40a25a512033cc8c0797e41de9ecfc2432c/test1.zip '
'does not already exist on cloud storage',
@@ -131,9 +131,7 @@
)
yield api.test(
'upload_packages_tiggers_exception_and_package_exists',
- api.properties(
- upload_packages=True,
- ),
+ api.properties(upload_packages=True,),
api.expect_exception('AssertionError'),
# Expectation file would contain a brittle stack trace.
# TODO: Re-enable the expectation file after Python 2 support is no longer
@@ -143,7 +141,5 @@
yield api.test(
'upload_packages_experimental_runtime',
api.runtime(is_experimental=True),
- api.properties(
- upload_packages=True,
- ),
+ api.properties(upload_packages=True,),
)
diff --git a/recipe_modules/build_util/api.py b/recipe_modules/build_util/api.py
index 7d22c15..400a543 100644
--- a/recipe_modules/build_util/api.py
+++ b/recipe_modules/build_util/api.py
@@ -76,7 +76,8 @@
targets(list): A list of string with the ninja targets to build.
"""
build_dir = checkout_path.join('out/%s' % config)
- concurrent_jobs = self.m.properties.get('concurrent_jobs') or self._calculate_j_value()
+ concurrent_jobs = self.m.properties.get('concurrent_jobs'
+ ) or self._calculate_j_value()
ninja_args = [tool, '-C', build_dir, '-j', concurrent_jobs]
ninja_args.extend(targets)
with self.m.depot_tools.on_path():
diff --git a/recipe_modules/build_util/examples/full.py b/recipe_modules/build_util/examples/full.py
index 3cef13f..8d14350 100644
--- a/recipe_modules/build_util/examples/full.py
+++ b/recipe_modules/build_util/examples/full.py
@@ -26,7 +26,5 @@
def GenTests(api):
yield api.test('basic', api.properties(no_lto=True))
- yield api.test('win', api.properties(no_lto=True),
- api.platform('win', 64))
- yield api.test('mac', api.properties(no_lto=True),
- api.platform('mac', 64))
+ yield api.test('win', api.properties(no_lto=True), api.platform('win', 64))
+ yield api.test('mac', api.properties(no_lto=True), api.platform('mac', 64))
diff --git a/recipe_modules/devicelab_osx_sdk/examples/full.py b/recipe_modules/devicelab_osx_sdk/examples/full.py
index f0b7f99..cd60a18 100644
--- a/recipe_modules/devicelab_osx_sdk/examples/full.py
+++ b/recipe_modules/devicelab_osx_sdk/examples/full.py
@@ -3,10 +3,10 @@
# found in the LICENSE file.
DEPS = [
- 'flutter/devicelab_osx_sdk',
- 'recipe_engine/platform',
- 'recipe_engine/properties',
- 'recipe_engine/step',
+ 'flutter/devicelab_osx_sdk',
+ 'recipe_engine/platform',
+ 'recipe_engine/properties',
+ 'recipe_engine/step',
]
@@ -18,16 +18,15 @@
def GenTests(api):
for platform in ('linux', 'mac', 'win'):
- yield (api.test(platform) +
- api.platform.name(platform) +
- api.properties(**{'$flutter/devicelab_osx_sdk': {
- 'sdk_version': 'deadbeef',
- }}))
+ yield (
+ api.test(platform) + api.platform.name(platform) + api.properties(
+ **{'$flutter/devicelab_osx_sdk': {'sdk_version': 'deadbeef',}}
+ )
+ )
yield api.test(
- 'explicit_version',
- api.platform.name('mac'),
- api.properties(**{'$flutter/devicelab_osx_sdk': {
- 'sdk_version': 'deadbeef',
- }})
+ 'explicit_version', api.platform.name('mac'),
+ api.properties(
+ **{'$flutter/devicelab_osx_sdk': {'sdk_version': 'deadbeef',}}
+ )
)
diff --git a/recipe_modules/display_util/examples/display_builds.py b/recipe_modules/display_util/examples/display_builds.py
index 391df25..c17f8fc 100644
--- a/recipe_modules/display_util/examples/display_builds.py
+++ b/recipe_modules/display_util/examples/display_builds.py
@@ -13,13 +13,16 @@
"raise_on_failure": Property(kind=bool, default=True),
}
+
def RunSteps(api, raise_on_failure):
- builds = api.buildbucket.collect_builds(build_ids=[
- 123456789012345678,
- 987654321098765432,
- 112233445566778899,
- 199887766554433221,
- ])
+ builds = api.buildbucket.collect_builds(
+ build_ids=[
+ 123456789012345678,
+ 987654321098765432,
+ 112233445566778899,
+ 199887766554433221,
+ ]
+ )
api.display_util.display_builds(
step_name="display builds",
builds=sorted(builds.values(), key=lambda b: b.id),
@@ -28,15 +31,15 @@
def GenTests(api):
+
def build(summary_markdown=None, **kwargs):
- b = api.buildbucket.ci_build_message(**kwargs)
- if summary_markdown:
- b.summary_markdown = summary_markdown
- return b
+ b = api.buildbucket.ci_build_message(**kwargs)
+ if summary_markdown:
+ b.summary_markdown = summary_markdown
+ return b
yield (
- api.test(
- "mixed_with_infra_failures", status="INFRA_FAILURE") +
+ api.test("mixed_with_infra_failures", status="INFRA_FAILURE") +
# Exercise all status colors.
# Purple failures prioritized over red failures.
api.buildbucket.simulated_collect_output([
@@ -58,11 +61,11 @@
build_id=199887766554433221,
status="SCHEDULED",
),
- ]))
+ ])
+ )
yield (
- api.test(
- "canceled_buildss", status="INFRA_FAILURE") +
+ api.test("canceled_buildss", status="INFRA_FAILURE") +
# Exercise all status colors.
# Purple failures prioritized over red failures.
api.buildbucket.simulated_collect_output([
@@ -79,7 +82,8 @@
build_id=199887766554433221,
status="SCHEDULED",
),
- ]))
+ ])
+ )
yield (
api.test("mixed_without_infra_failures", status="FAILURE") +
@@ -101,7 +105,8 @@
build_id=199887766554433221,
status="SCHEDULED",
),
- ]))
+ ])
+ )
yield (
api.test("all_passed") +
@@ -111,4 +116,5 @@
build_id=123456789012345678,
status="SUCCESS",
),
- ]))
+ ])
+ )
diff --git a/recipe_modules/display_util/examples/display_subbuilds.py b/recipe_modules/display_util/examples/display_subbuilds.py
index 98742f2..1e13bdc 100644
--- a/recipe_modules/display_util/examples/display_subbuilds.py
+++ b/recipe_modules/display_util/examples/display_subbuilds.py
@@ -22,20 +22,23 @@
SCHEDULED_BUILD_ID = 199887766554433221
CANCELED_BUILD_ID = 987654321098765433
+
def RunSteps(api, raise_on_failure):
# Collect current build status using the buildbucket API. The build ids
# list passed to the API is to limit the query to only the build ids that
# we are interested on. This API return a Build oject only if the build
# exists in buildbucket.
- builds = api.buildbucket.collect_builds(build_ids=[
- # Builds with the following ids are mocked in the GenTests section
- # with different properties and status depending on the test.
- SUCCESS_BUILD_ID,
- INFRA_FAILURE_BUILD_ID,
- FAILURE_BUILD_ID,
- SCHEDULED_BUILD_ID,
- CANCELED_BUILD_ID,
- ])
+ builds = api.buildbucket.collect_builds(
+ build_ids=[
+ # Builds with the following ids are mocked in the GenTests section
+ # with different properties and status depending on the test.
+ SUCCESS_BUILD_ID,
+ INFRA_FAILURE_BUILD_ID,
+ FAILURE_BUILD_ID,
+ SCHEDULED_BUILD_ID,
+ CANCELED_BUILD_ID,
+ ]
+ )
final_builds = {}
for key in builds:
build = builds[key]
@@ -54,21 +57,22 @@
def GenTests(api):
+
def build(summary_markdown=None, **kwargs):
- b = api.buildbucket.ci_build_message(**kwargs)
- if summary_markdown:
- b.summary_markdown = summary_markdown
- return b
+ b = api.buildbucket.ci_build_message(**kwargs)
+ if summary_markdown:
+ b.summary_markdown = summary_markdown
+ return b
# Mock builds injected in the different tests.
success_build = build(
- build_id=SUCCESS_BUILD_ID,
- status="SUCCESS",
+ build_id=SUCCESS_BUILD_ID,
+ status="SUCCESS",
)
infra_failure_build = build(
- build_id=INFRA_FAILURE_BUILD_ID,
- status="INFRA_FAILURE",
- summary_markdown="something failed related to infra",
+ build_id=INFRA_FAILURE_BUILD_ID,
+ status="INFRA_FAILURE",
+ summary_markdown="something failed related to infra",
)
failure_build = build(
build_id=FAILURE_BUILD_ID,
@@ -91,8 +95,7 @@
)
yield (
- api.test(
- "mixed_with_infra_failures", status="INFRA_FAILURE") +
+ api.test("mixed_with_infra_failures", status="INFRA_FAILURE") +
# Exercise all status colors.
# Purple failures prioritized over red failures.
api.buildbucket.simulated_collect_output([
@@ -100,18 +103,19 @@
infra_failure_build,
failure_build,
scheduled_build,
- ]))
+ ])
+ )
yield (
- api.test(
- "canceled_builds", status="INFRA_FAILURE") +
+ api.test("canceled_builds", status="INFRA_FAILURE") +
# Exercise all status colors.
# Purple failures prioritized over red failures.
api.buildbucket.simulated_collect_output([
success_build,
canceled_build,
scheduled_build,
- ]))
+ ])
+ )
yield (
api.test("mixed_without_infra_failures", status="FAILURE") +
@@ -121,11 +125,13 @@
failure_build,
failure_build_2,
scheduled_build,
- ]))
+ ])
+ )
yield (
api.test("all_passed") +
# With just red failures, raise red.
api.buildbucket.simulated_collect_output([
success_build,
- ]))
+ ])
+ )
diff --git a/recipe_modules/display_util/examples/display_tasks.py b/recipe_modules/display_util/examples/display_tasks.py
index 473c272..d14b1bd 100644
--- a/recipe_modules/display_util/examples/display_tasks.py
+++ b/recipe_modules/display_util/examples/display_tasks.py
@@ -44,8 +44,7 @@
def GenTests(api):
yield (
- api.test("Test_Infra_Failure", status="INFRA_FAILURE") +
- api.step_data(
+ api.test("Test_Infra_Failure", status="INFRA_FAILURE") + api.step_data(
"Trigger Tests", api.swarming.trigger(["task1", "task2"],
initial_id=0)
) + api.step_data(
diff --git a/recipe_modules/flutter_bcid/api.py b/recipe_modules/flutter_bcid/api.py
index bcfed80..3ab2a52 100644
--- a/recipe_modules/flutter_bcid/api.py
+++ b/recipe_modules/flutter_bcid/api.py
@@ -10,12 +10,12 @@
class BcidStage(Enum):
"""Enum representing valid bcis stages."""
- START='start'
- FETCH='fetch'
- COMPILE='compile'
- UPLOAD='upload'
- UPLOAD_COMPLETE='upload-complete'
- TEST='test'
+ START = 'start'
+ FETCH = 'fetch'
+ COMPILE = 'compile'
+ UPLOAD = 'upload'
+ UPLOAD_COMPLETE = 'upload-complete'
+ TEST = 'test'
class FlutterBcidApi(recipe_api.RecipeApi):
@@ -46,8 +46,4 @@
"""
if self.is_official_build():
sha256 = self.m.file.file_hash(local_artifact_path)
- self.m.bcid_reporter.report_gcs(
- sha256,
- remote_artifact_path
- )
-
+ self.m.bcid_reporter.report_gcs(sha256, remote_artifact_path)
diff --git a/recipe_modules/flutter_bcid/examples/full.py b/recipe_modules/flutter_bcid/examples/full.py
index b21b673..d423d09 100644
--- a/recipe_modules/flutter_bcid/examples/full.py
+++ b/recipe_modules/flutter_bcid/examples/full.py
@@ -12,8 +12,7 @@
def RunSteps(api):
api.flutter_bcid.report_stage('one')
api.flutter_bcid.upload_provenance(
- api.path['cache'].join('file.zip'),
- 'gs://bucket/final_path/file.txt'
+ api.path['cache'].join('file.zip'), 'gs://bucket/final_path/file.txt'
)
api.flutter_bcid.is_official_build()
api.flutter_bcid.is_prod_build()
diff --git a/recipe_modules/flutter_deps/api.py b/recipe_modules/flutter_deps/api.py
index 1cb0ad7..d97a0fb 100644
--- a/recipe_modules/flutter_deps/api.py
+++ b/recipe_modules/flutter_deps/api.py
@@ -39,8 +39,7 @@
self.m.cas.download(
'Download engine from CAS', cas_hash, checkout_engine
)
- local_engine = checkout_engine.join(
- local_engine or 'host_debug_unopt')
+ local_engine = checkout_engine.join(local_engine or 'host_debug_unopt')
dart_bin = local_engine.join('dart-sdk', 'bin')
paths = env_prefixes.get('PATH', [])
paths.insert(0, dart_bin)
@@ -54,9 +53,10 @@
self.m.cas.download(
'Download web sdk from CAS', web_sdk_cas_hash, checkout_src
)
- local_web_sdk = checkout_src.join(
- 'out', local_web_sdk or 'wasm_release')
- dart_bin = checkout_src.join('flutter', 'prebuilts', '${platform}', 'dart-sdk', 'bin')
+ local_web_sdk = checkout_src.join('out', local_web_sdk or 'wasm_release')
+ dart_bin = checkout_src.join(
+ 'flutter', 'prebuilts', '${platform}', 'dart-sdk', 'bin'
+ )
paths = env_prefixes.get('PATH', [])
paths.insert(0, dart_bin)
env_prefixes['PATH'] = paths
@@ -92,7 +92,9 @@
'go_sdk': self.go_sdk,
'goldctl': self.goldctl,
'gradle_cache': self.gradle_cache,
- 'ios_signing': self.apple_signing, # TODO(drewroen): Remove this line once ios_signing is not being referenced
+ 'ios_signing':
+ self.
+ apple_signing, # TODO(drewroen): Remove this line once ios_signing is not being referenced
'jazzy': self.jazzy,
'ninja': self.ninja,
'open_jdk': self.open_jdk,
@@ -157,14 +159,16 @@
with self.m.step.nest('Arm Tools dependency'):
arm_tools_cache_dir = self.m.path['cache'].join('arm-tools')
self.m.cipd.ensure(
- self.m.path['cache'],
- self.m.cipd.EnsureFile().add_package(
- 'flutter_internal/tools/arm-tools', version
- )
+ self.m.path['cache'],
+ self.m.cipd.EnsureFile().add_package(
+ 'flutter_internal/tools/arm-tools', version
+ )
)
self.m.file.listdir('arm-tools contents', arm_tools_cache_dir)
- self.m.file.listdir('arm-tools malioc contents',
- arm_tools_cache_dir.join('mali_offline_compiler'))
+ self.m.file.listdir(
+ 'arm-tools malioc contents',
+ arm_tools_cache_dir.join('mali_offline_compiler')
+ )
env['ARM_TOOLS'] = arm_tools_cache_dir
def goldctl(self, env, env_prefixes, version):
@@ -290,7 +294,8 @@
with self.m.context(env=env, env_prefixes=env_prefixes):
self.m.step(
'Install dashing',
- ['go', 'install', 'github.com/technosophos/dashing@%s' % version],
+ ['go', 'install',
+ 'github.com/technosophos/dashing@%s' % version],
infra_step=True,
)
@@ -317,7 +322,7 @@
),
)
# Setup environment variables
- if (version == 'version:29.0'): # Handle the legacy case
+ if (version == 'version:29.0'): # Handle the legacy case
env['ANDROID_SDK_ROOT'] = sdk_root
env['ANDROID_HOME'] = sdk_root
env['ANDROID_NDK_PATH'] = sdk_root.join('ndk-bundle')
@@ -365,12 +370,13 @@
gemfile_dir(Path): The path to the location of the repository gemfile.
"""
deps_list = self.m.properties.get('dependencies', [])
- deps = {d['dependency']:d.get('version') for d in deps_list}
+ deps = {d['dependency']: d.get('version') for d in deps_list}
if 'gems' not in deps.keys():
# Noop if gems property is not set.
return
version = deps['gems']
- gemfile_dir = gem_dir or self.m.repo_util.sdk_checkout_path().join('dev', 'ci', 'mac')
+ gemfile_dir = gem_dir or self.m.repo_util.sdk_checkout_path(
+ ).join('dev', 'ci', 'mac')
gem_destination = self.m.path['start_dir'].join('gems')
env['GEM_HOME'] = gem_destination
self._install_ruby(env, env_prefixes, version)
@@ -390,16 +396,19 @@
lib_path = self.m.path['cache'].join('ruby')
self.m.step(
'Set ffi build flags',
- ['bundle', 'config',
- 'build.ffi', '--with-opt-dir=%s/gmp:%s' % (opt_path, lib_path)],
+ [
+ 'bundle', 'config', 'build.ffi',
+ '--with-opt-dir=%s/gmp:%s' % (opt_path, lib_path)
+ ],
infra_step=True,
)
self.m.step('install gems', ['bundler', 'install'], infra_step=True)
# Find major/minor ruby version
ruby_version = self.m.step(
- 'Ruby version', ['ruby', '-e', 'puts RUBY_VERSION'],
- stdout=self.m.raw_io.output_text(), ok_ret='any'
- ).stdout.rstrip()
+ 'Ruby version', ['ruby', '-e', 'puts RUBY_VERSION'],
+ stdout=self.m.raw_io.output_text(),
+ ok_ret='any'
+ ).stdout.rstrip()
parts = ruby_version.split('.')
parts[-1] = '0'
ruby_version = '.'.join(parts)
@@ -613,11 +622,15 @@
version = version or 'latest'
mobileprovision_path = self.m.path.mkdtemp().join('mobileprovision')
mobileprovision = self.m.cipd.EnsureFile()
- mobileprovision.add_package('flutter_internal/mac/mobileprovision/${platform}', version)
+ mobileprovision.add_package(
+ 'flutter_internal/mac/mobileprovision/${platform}', version
+ )
with self.m.step.nest('Installing Mac mobileprovision'):
self.m.cipd.ensure(mobileprovision_path, mobileprovision)
- mobileprovision_profile = mobileprovision_path.join('development.mobileprovision')
+ mobileprovision_profile = mobileprovision_path.join(
+ 'development.mobileprovision'
+ )
copy_script = self.resource('copy_mobileprovisioning_profile.sh')
self.m.step('Set execute permission', ['chmod', '755', copy_script])
self.m.step(
@@ -647,7 +660,8 @@
with self.m.context(env=env, env_prefixes=env_prefixes):
ruby_version = self.m.step(
'Ruby version', ['ruby', '-e', 'puts RUBY_VERSION'],
- stdout=self.m.raw_io.output_text(), ok_ret='any'
+ stdout=self.m.raw_io.output_text(),
+ ok_ret='any'
).stdout.rstrip()
parts = ruby_version.split('.')
parts[-1] = '0'
diff --git a/recipe_modules/flutter_deps/examples/full.py b/recipe_modules/flutter_deps/examples/full.py
index b509059..6e0565c 100644
--- a/recipe_modules/flutter_deps/examples/full.py
+++ b/recipe_modules/flutter_deps/examples/full.py
@@ -35,8 +35,8 @@
api.assertions.assertEqual(
env_prefixes.get('PATH'), [
api.path['cache'].join('chrome', 'chrome'),
- api.path['cache'].join('chrome', 'drivers'),
- api.path['cache'].join('firefox')
+ api.path['cache'].join('chrome',
+ 'drivers'), api.path['cache'].join('firefox')
]
)
api.flutter_deps.go_sdk(env, env_prefixes, 'v4')
@@ -56,7 +56,9 @@
api.flutter_deps.gradle_cache(env, env_prefixes, '')
api.flutter_deps.flutter_engine(env, env_prefixes)
api.flutter_deps.firebase(env, env_prefixes)
- api.flutter_deps.cmake(env, env_prefixes, version='build_id:8787856497187628321')
+ api.flutter_deps.cmake(
+ env, env_prefixes, version='build_id:8787856497187628321'
+ )
api.flutter_deps.codesign(env, env_prefixes, 'latest')
api.flutter_deps.cosign(env, env_prefixes)
api.flutter_deps.ninja(env, env_prefixes)
@@ -92,12 +94,16 @@
api.repo_util.flutter_environment_data(checkout_path),
)
yield api.test(
- 'with-gems', api.properties(dependencies=[{"dependency": "gems", "version": "v3.1.3"}]),
+ 'with-gems',
+ api.properties(
+ dependencies=[{"dependency": "gems", "version": "v3.1.3"}]
+ ),
api.repo_util.flutter_environment_data(checkout_path),
)
yield api.test(
- 'with-arm64ruby', api.properties(dependencies=[{"dependency": "gems"}]),
+ 'with-arm64ruby',
+ api.properties(dependencies=[{"dependency": "gems"}]),
api.repo_util.flutter_environment_data(checkout_path),
api.platform.arch('arm'),
)
@@ -121,17 +127,22 @@
api.repo_util.flutter_environment_data(checkout_path),
)
yield api.test(
- 'flutter_engine', api.properties(local_engine_cas_hash='abceqwe/7',),
+ 'flutter_engine',
+ api.properties(local_engine_cas_hash='abceqwe/7',),
api.repo_util.flutter_environment_data(checkout_path),
)
yield api.test(
- 'local_engine_cas', api.properties(local_engine_cas_hash='abceqwe/7',
- local_engine='host-release'),
+ 'local_engine_cas',
+ api.properties(
+ local_engine_cas_hash='abceqwe/7', local_engine='host-release'
+ ),
api.repo_util.flutter_environment_data(checkout_path),
)
yield api.test(
- 'local_web_sdk_cas', api.properties(local_web_sdk_cas_hash='abceqwe/7',
- local_web_sdk='wasm-release'),
+ 'local_web_sdk_cas',
+ api.properties(
+ local_web_sdk_cas_hash='abceqwe/7', local_web_sdk='wasm-release'
+ ),
api.repo_util.flutter_environment_data(checkout_path),
)
yield api.test(
@@ -140,7 +151,8 @@
api.repo_util.flutter_environment_data(checkout_path),
)
yield api.test(
- 'windows', api.properties(gold_tryjob=True, git_ref='refs/pull/1/head'),
+ 'windows',
+ api.properties(gold_tryjob=True, git_ref='refs/pull/1/head'),
api.repo_util.flutter_environment_data(checkout_path),
api.platform.name('win'),
)
diff --git a/recipe_modules/gcloud/api.py b/recipe_modules/gcloud/api.py
index 71aa085..9676fff 100644
--- a/recipe_modules/gcloud/api.py
+++ b/recipe_modules/gcloud/api.py
@@ -6,57 +6,56 @@
class GCloudApi(recipe_api.RecipeApi):
- """GCloudApi provides support for common gcloud operations."""
+ """GCloudApi provides support for common gcloud operations."""
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self.gcloud_patched = False
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.gcloud_patched = False
- @property
- def _gcloud_executable(self):
- with self.m.step.nest('ensure gcloud'):
- gcloud_dir = self.m.path['start_dir'].join('gcloud')
- gcloud_package = 'infra/3pp/tools/gcloud/${platform}'
- gcloud = self.m.cipd.EnsureFile().add_package(
- gcloud_package, "version:2@428.0.0.chromium.3")
- self.m.cipd.ensure(
- gcloud_dir,
- gcloud
- )
- tool_name = 'gcloud.cmd' if self.m.platform.is_win else 'gcloud'
- return gcloud_dir.join('bin', tool_name)
+ @property
+ def _gcloud_executable(self):
+ with self.m.step.nest('ensure gcloud'):
+ gcloud_dir = self.m.path['start_dir'].join('gcloud')
+ gcloud_package = 'infra/3pp/tools/gcloud/${platform}'
+ gcloud = self.m.cipd.EnsureFile().add_package(
+ gcloud_package, "version:2@428.0.0.chromium.3"
+ )
+ self.m.cipd.ensure(gcloud_dir, gcloud)
+ tool_name = 'gcloud.cmd' if self.m.platform.is_win else 'gcloud'
+ return gcloud_dir.join('bin', tool_name)
- def __call__(self, *args, **kwargs):
- """Executes specified gcloud command."""
- step_name = kwargs.pop("step_name", f"gcloud {args[0]}")
- cmd = [self._gcloud_executable] + list(args)
- return self.m.step(step_name, cmd, **kwargs)
+ def __call__(self, *args, **kwargs):
+ """Executes specified gcloud command."""
+ step_name = kwargs.pop("step_name", f"gcloud {args[0]}")
+ cmd = [self._gcloud_executable] + list(args)
+ return self.m.step(step_name, cmd, **kwargs)
- def container_image_exists(self, image):
- step_result = self(
- "container",
- "images",
- "describe",
- image,
- ok_ret="any",
- step_name=f"check existence of {image}",
- )
- return step_result.retcode == 0
+ def container_image_exists(self, image):
+ step_result = self(
+ "container",
+ "images",
+ "describe",
+ image,
+ ok_ret="any",
+ step_name=f"check existence of {image}",
+ )
+ return step_result.retcode == 0
- def patch_gcloud_invoker(self):
- """GCloud invoker has issues when running on bots, this API
+ def patch_gcloud_invoker(self):
+ """GCloud invoker has issues when running on bots, this API
patches the invoker to make it compatible with bots' python binary.
"""
- if self.gcloud_patched or not self._gcloud_executable:
- return
- gcloud_path = self.m.path.join(
- self.m.path.dirname(self.m.path.dirname(self._gcloud_executable)),
- "bin", "gcloud",
- )
- self.m.file.remove("remove gcloud wrapper", gcloud_path)
- self.m.file.copy(
- "copy patched gcloud",
- self.resource("gcloud"),
- gcloud_path,
- )
- self.gcloud_patched = True
+ if self.gcloud_patched or not self._gcloud_executable:
+ return
+ gcloud_path = self.m.path.join(
+ self.m.path.dirname(self.m.path.dirname(self._gcloud_executable)),
+ "bin",
+ "gcloud",
+ )
+ self.m.file.remove("remove gcloud wrapper", gcloud_path)
+ self.m.file.copy(
+ "copy patched gcloud",
+ self.resource("gcloud"),
+ gcloud_path,
+ )
+ self.gcloud_patched = True
diff --git a/recipe_modules/gcloud/tests/full.py b/recipe_modules/gcloud/tests/full.py
index c4ce46c..4038fbf 100644
--- a/recipe_modules/gcloud/tests/full.py
+++ b/recipe_modules/gcloud/tests/full.py
@@ -9,20 +9,20 @@
def RunSteps(api):
- api.gcloud(
- "alpha",
- "remote-build-execution",
- "worker-pools",
- "list",
- "--project=goma-fuchsia",
- "--instance=default_instance",
- )
- api.gcloud("help")
- api.gcloud.container_image_exists("gcr.io/goma_fuchsia/fuchsia_linux/base")
- api.gcloud.patch_gcloud_invoker()
- api.gcloud.patch_gcloud_invoker()
+ api.gcloud(
+ "alpha",
+ "remote-build-execution",
+ "worker-pools",
+ "list",
+ "--project=goma-fuchsia",
+ "--instance=default_instance",
+ )
+ api.gcloud("help")
+ api.gcloud.container_image_exists("gcr.io/goma_fuchsia/fuchsia_linux/base")
+ api.gcloud.patch_gcloud_invoker()
+ api.gcloud.patch_gcloud_invoker()
def GenTests(api):
- yield api.test("example")
- yield api.test("windows", api.platform.name("win"))
+ yield api.test("example")
+ yield api.test("windows", api.platform.name("win"))
diff --git a/recipe_modules/gerrit_util/__init__.py b/recipe_modules/gerrit_util/__init__.py
index 0d19893..2b171f6 100644
--- a/recipe_modules/gerrit_util/__init__.py
+++ b/recipe_modules/gerrit_util/__init__.py
@@ -1,3 +1 @@
-DEPS = [
- 'depot_tools/gerrit'
-]
+DEPS = ['depot_tools/gerrit']
diff --git a/recipe_modules/gerrit_util/api.py b/recipe_modules/gerrit_util/api.py
index b7287ac..005ef22 100644
--- a/recipe_modules/gerrit_util/api.py
+++ b/recipe_modules/gerrit_util/api.py
@@ -16,9 +16,10 @@
cl_number: The CL number of the requested CL
"""
cl_information = self.m.gerrit.call_raw_api(
- 'https://%s' % host,
- '/changes/%s' % cl_number,
- accept_statuses=[200],
- name='get cl info %s' % cl_number)
+ 'https://%s' % host,
+ '/changes/%s' % cl_number,
+ accept_statuses=[200],
+ name='get cl info %s' % cl_number
+ )
return cl_information
diff --git a/recipe_modules/gerrit_util/examples/full.py b/recipe_modules/gerrit_util/examples/full.py
index 7631117..8c549b4 100644
--- a/recipe_modules/gerrit_util/examples/full.py
+++ b/recipe_modules/gerrit_util/examples/full.py
@@ -2,23 +2,17 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-DEPS = [
- 'flutter/gerrit_util',
- 'recipe_engine/json'
-]
+DEPS = ['flutter/gerrit_util', 'recipe_engine/json']
def RunSteps(api):
- api.gerrit_util.get_gerrit_cl_details(
- 'flutter.googlesource.com', '12345'
- )
+ api.gerrit_util.get_gerrit_cl_details('flutter.googlesource.com', '12345')
def GenTests(api):
- yield api.test(
- 'basic',
- api.step_data(
- 'gerrit get cl info 12345',
- api.json.output([('branch', 'main')])
- )
- )
+ yield api.test(
+ 'basic',
+ api.step_data(
+ 'gerrit get cl info 12345', api.json.output([('branch', 'main')])
+ )
+ )
diff --git a/recipe_modules/goma/api.py b/recipe_modules/goma/api.py
index 457c0d1..6e3eda5 100644
--- a/recipe_modules/goma/api.py
+++ b/recipe_modules/goma/api.py
@@ -8,195 +8,197 @@
class GomaApi(recipe_api.RecipeApi):
- """GomaApi contains helper functions for using goma."""
+ """GomaApi contains helper functions for using goma."""
- def __init__(self, props, *args, **kwargs):
- super().__init__(*args, **kwargs)
+ def __init__(self, props, *args, **kwargs):
+ super().__init__(*args, **kwargs)
- self._enable_arbitrary_toolchains = props.enable_arbitrary_toolchains
- self._goma_dir = props.goma_dir
- self._jobs = props.jobs
- self._server = (
- props.server or "rbe-prod1.endpoints.fuchsia-infra-goma-prod.cloud.goog"
- )
- self._goma_started = False
- self._goma_log_dir = None
+ self._enable_arbitrary_toolchains = props.enable_arbitrary_toolchains
+ self._goma_dir = props.goma_dir
+ self._jobs = props.jobs
+ self._server = (
+ props.server or "rbe-prod1.endpoints.fuchsia-infra-goma-prod.cloud.goog"
+ )
+ self._goma_started = False
+ self._goma_log_dir = None
- @contextmanager
- def __call__(self):
- """Make context wrapping goma start/stop."""
- # Some environment needs to be set for both compiler_proxy and gomacc.
- # Push those variables used by both into context so the build can use
- # them.
- with self.m.context(
- env={
- # Allow user to override from the command line.
- "GOMA_TMP_DIR": self.m.context.env.get(
- "GOMA_TMP_DIR", self.m.path["cleanup"].join("goma")
- ),
- "GOMA_USE_LOCAL": False,
- }
- ):
- with self.m.step.nest("setup goma"):
- self._start()
- try:
- yield
- finally:
- if not self.m.runtime.in_global_shutdown:
- with self.m.step.nest("teardown goma"):
- self._stop()
+ @contextmanager
+ def __call__(self):
+ """Make context wrapping goma start/stop."""
+ # Some environment needs to be set for both compiler_proxy and gomacc.
+ # Push those variables used by both into context so the build can use
+ # them.
+ with self.m.context(env={
+ # Allow user to override from the command line.
+ "GOMA_TMP_DIR": self.m.context.env.get(
+ "GOMA_TMP_DIR", self.m.path["cleanup"].join("goma")),
+ "GOMA_USE_LOCAL": False,
+ }):
+ with self.m.step.nest("setup goma"):
+ self._start()
+ try:
+ yield
+ finally:
+ if not self.m.runtime.in_global_shutdown:
+ with self.m.step.nest("teardown goma"):
+ self._stop()
- @property
- def jobs(self):
- """Returns number of jobs for parallel build using Goma."""
- if self._jobs:
- return self._jobs
- # Based on measurements, anything beyond 10*cpu_count won't improve
- # build speed. For safety, set an upper limit of 1000.
- return min(10 * self.m.platform.cpu_count, 1000)
+ @property
+ def jobs(self):
+ """Returns number of jobs for parallel build using Goma."""
+ if self._jobs:
+ return self._jobs
+ # Based on measurements, anything beyond 10*cpu_count won't improve
+ # build speed. For safety, set an upper limit of 1000.
+ return min(10 * self.m.platform.cpu_count, 1000)
- @property
- def goma_dir(self):
- if not self._goma_dir:
- self._ensure()
- return self._goma_dir
+ @property
+ def goma_dir(self):
+ if not self._goma_dir:
+ self._ensure()
+ return self._goma_dir
- @property
- def _stats_path(self):
- return self.m.path.join(self.goma_dir, "goma_stats.json")
+ @property
+ def _stats_path(self):
+ return self.m.path.join(self.goma_dir, "goma_stats.json")
- def initialize(self):
- self._goma_log_dir = self.m.path["cleanup"]
- if self.m.platform.is_win:
- self._enable_arbitrary_toolchains = True
+ def initialize(self):
+ self._goma_log_dir = self.m.path["cleanup"]
+ if self.m.platform.is_win:
+ self._enable_arbitrary_toolchains = True
- def set_path(self, path):
- self._goma_dir = path
+ def set_path(self, path):
+ self._goma_dir = path
- def _ensure(self):
- if self._goma_dir:
- return
+ def _ensure(self):
+ if self._goma_dir:
+ return
- with self.m.step.nest("ensure goma"), self.m.context(infra_steps=True):
- self._goma_dir = self.m.path["cache"].join("goma", "client")
- if self.m.platform.is_mac:
- # On mac always use x64 package.
- # TODO(godofredoc): Remove this workaround and unfork once fuchsia has an arm package.
- package_path = "fuchsia/third_party/goma/client/mac-amd64"
- else:
- package_path = "fuchsia/third_party/goma/client/${platform}"
+ with self.m.step.nest("ensure goma"), self.m.context(infra_steps=True):
+ self._goma_dir = self.m.path["cache"].join("goma", "client")
+ if self.m.platform.is_mac:
+ # On mac always use x64 package.
+ # TODO(godofredoc): Remove this workaround and unfork once fuchsia has an arm package.
+ package_path = "fuchsia/third_party/goma/client/mac-amd64"
+ else:
+ package_path = "fuchsia/third_party/goma/client/${platform}"
- self.m.cipd.ensure(
- self._goma_dir,
- self.m.cipd.EnsureFile().add_package(
- package_path, "integration"
- ),
- )
+ self.m.cipd.ensure(
+ self._goma_dir,
+ self.m.cipd.EnsureFile().add_package(package_path, "integration"),
+ )
- def _goma_ctl(self, step_name, args, **kwargs):
- """Run a goma_ctl.py subcommand."""
- env = {
- "GLOG_log_dir": self._goma_log_dir,
- "GOMA_CACHE_DIR": self.m.path["cache"].join("goma"),
- "GOMA_DEPS_CACHE_FILE": "goma_deps_cache",
- "GOMA_LOCAL_OUTPUT_CACHE_DIR": self.m.path["cache"].join(
- "goma", "localoutputcache"
- ),
- "GOMA_STORE_LOCAL_RUN_OUTPUT": True,
- "GOMA_SERVER_HOST": self._server,
- "GOMA_DUMP_STATS_FILE": self._stats_path,
- # The next power of 2 larger than the currently known largest
- # output (153565624) from the core.x64 profile build.
- "GOMA_MAX_SUM_OUTPUT_SIZE_IN_MB": 256,
- }
- if self._enable_arbitrary_toolchains:
- env["GOMA_ARBITRARY_TOOLCHAIN_SUPPORT"] = True
-
- with self.m.context(env=env, infra_steps=True):
- return self.m.python3(
- step_name,
- [self.m.path.join(self.goma_dir, "goma_ctl.py")] + list(args),
- **kwargs,
- )
-
- def _run_jsonstatus(self):
- step = self._goma_ctl(
- "goma jsonstatus",
- ["jsonstatus", self.m.json.output(add_json_log=True)],
- step_test_data=lambda: self.m.json.test_api.output({"foo": "bar"}),
- )
- if step.json.output is None:
- step.presentation.status = self.m.step.WARNING
-
- def _upload_goma_stats(self):
- stats = self.m.file.read_json(
- "read goma_stats.json",
+ def _goma_ctl(self, step_name, args, **kwargs):
+ """Run a goma_ctl.py subcommand."""
+ env = {
+ "GLOG_log_dir":
+ self._goma_log_dir,
+ "GOMA_CACHE_DIR":
+ self.m.path["cache"].join("goma"),
+ "GOMA_DEPS_CACHE_FILE":
+ "goma_deps_cache",
+ "GOMA_LOCAL_OUTPUT_CACHE_DIR":
+ self.m.path["cache"].join("goma", "localoutputcache"),
+ "GOMA_STORE_LOCAL_RUN_OUTPUT":
+ True,
+ "GOMA_SERVER_HOST":
+ self._server,
+ "GOMA_DUMP_STATS_FILE":
self._stats_path,
- test_data={},
- include_log=False,
+ # The next power of 2 larger than the currently known largest
+ # output (153565624) from the core.x64 profile build.
+ "GOMA_MAX_SUM_OUTPUT_SIZE_IN_MB":
+ 256,
+ }
+ if self._enable_arbitrary_toolchains:
+ env["GOMA_ARBITRARY_TOOLCHAIN_SUPPORT"] = True
+
+ with self.m.context(env=env, infra_steps=True):
+ return self.m.python3(
+ step_name,
+ [self.m.path.join(self.goma_dir, "goma_ctl.py")] + list(args),
+ **kwargs,
+ )
+
+ def _run_jsonstatus(self):
+ step = self._goma_ctl(
+ "goma jsonstatus",
+ ["jsonstatus", self.m.json.output(add_json_log=True)],
+ step_test_data=lambda: self.m.json.test_api.output({"foo": "bar"}),
+ )
+ if step.json.output is None:
+ step.presentation.status = self.m.step.WARNING
+
+ def _upload_goma_stats(self):
+ stats = self.m.file.read_json(
+ "read goma_stats.json",
+ self._stats_path,
+ test_data={},
+ include_log=False,
+ )
+ if not (self.m.buildbucket.builder_name and self.m.buildbucket.build.id):
+ # Skip the upload if it does not have build input information.
+ return
+ stats["build_info"] = {
+ "build_id": self.m.buildbucket.build.id,
+ "builder": self.m.buildbucket.builder_name,
+ "time_stamp": str(self.m.time.utcnow()),
+ "time_stamp_int": self.m.time.ms_since_epoch(),
+ }
+ self.m.step.active_result.presentation.logs["json.output"
+ ] = self.m.json.dumps(
+ stats, indent=4
+ ).splitlines()
+
+ self.m.bqupload.insert(
+ step_name="upload goma stats to bigquery",
+ project="fuchsia-infra",
+ dataset="artifacts",
+ table="builds_beta_goma",
+ rows=[stats],
+ ok_ret="all",
+ )
+
+ def _start(self):
+ """Start goma compiler proxy."""
+ assert not self._goma_started
+
+ self._ensure()
+
+ try:
+ self._goma_ctl("start goma", ["restart"])
+ self._goma_started = True
+ except self.m.step.StepFailure: # pragma: no cover
+ with self.m.step.defer_results():
+ self._run_jsonstatus()
+ self._goma_ctl("stop goma (start failure)", ["stop"])
+ raise
+
+ def _stop(self):
+ """Stop goma compiler proxy."""
+ assert self._goma_started
+
+ with self.m.step.defer_results():
+ self._run_jsonstatus()
+ self._goma_ctl("goma stats", ["stat"])
+ self._goma_ctl("stop goma", ["stop"])
+
+ self._goma_started = False
+
+ compiler_proxy_warning_log_path = self._goma_log_dir.join(
+ "compiler_proxy.WARNING"
+ )
+ # Not all builds use goma, so it might not exist.
+ self.m.path.mock_add_paths(compiler_proxy_warning_log_path)
+ if self.m.path.exists(compiler_proxy_warning_log_path):
+ try:
+ self.m.file.read_text(
+ "read goma_client warning log",
+ compiler_proxy_warning_log_path,
+ test_data="test log",
)
- if not (self.m.buildbucket.builder_name and self.m.buildbucket.build.id):
- # Skip the upload if it does not have build input information.
- return
- stats["build_info"] = {
- "build_id": self.m.buildbucket.build.id,
- "builder": self.m.buildbucket.builder_name,
- "time_stamp": str(self.m.time.utcnow()),
- "time_stamp_int": self.m.time.ms_since_epoch(),
- }
- self.m.step.active_result.presentation.logs["json.output"] = self.m.json.dumps(
- stats, indent=4
- ).splitlines()
+ except self.m.step.StepFailure: # pragma: no cover
+ # Ignore. Not a big deal.
+ pass
- self.m.bqupload.insert(
- step_name="upload goma stats to bigquery",
- project="fuchsia-infra",
- dataset="artifacts",
- table="builds_beta_goma",
- rows=[stats],
- ok_ret="all",
- )
-
- def _start(self):
- """Start goma compiler proxy."""
- assert not self._goma_started
-
- self._ensure()
-
- try:
- self._goma_ctl("start goma", ["restart"])
- self._goma_started = True
- except self.m.step.StepFailure: # pragma: no cover
- with self.m.step.defer_results():
- self._run_jsonstatus()
- self._goma_ctl("stop goma (start failure)", ["stop"])
- raise
-
- def _stop(self):
- """Stop goma compiler proxy."""
- assert self._goma_started
-
- with self.m.step.defer_results():
- self._run_jsonstatus()
- self._goma_ctl("goma stats", ["stat"])
- self._goma_ctl("stop goma", ["stop"])
-
- self._goma_started = False
-
- compiler_proxy_warning_log_path = self._goma_log_dir.join(
- "compiler_proxy.WARNING"
- )
- # Not all builds use goma, so it might not exist.
- self.m.path.mock_add_paths(compiler_proxy_warning_log_path)
- if self.m.path.exists(compiler_proxy_warning_log_path):
- try:
- self.m.file.read_text(
- "read goma_client warning log",
- compiler_proxy_warning_log_path,
- test_data="test log",
- )
- except self.m.step.StepFailure: # pragma: no cover
- # Ignore. Not a big deal.
- pass
-
- self._upload_goma_stats()
+ self._upload_goma_stats()
diff --git a/recipe_modules/goma/tests/full.py b/recipe_modules/goma/tests/full.py
index e7ba04b..6ef4822 100644
--- a/recipe_modules/goma/tests/full.py
+++ b/recipe_modules/goma/tests/full.py
@@ -15,65 +15,63 @@
def RunSteps(api):
- # Use goma_dir outside of the context.
- api.goma.goma_dir
- with api.goma():
- # build something using goma.
- api.step("echo goma jobs", ["echo", str(api.goma.jobs)])
+ # Use goma_dir outside of the context.
+ api.goma.goma_dir
+ with api.goma():
+ # build something using goma.
+ api.step("echo goma jobs", ["echo", str(api.goma.jobs)])
- api.goma.set_path(api.goma.goma_dir)
+ api.goma.set_path(api.goma.goma_dir)
def GenTests(api):
- def goma_properties(**kwargs):
- return api.properties(**{"$flutter/goma": InputProperties(**kwargs)})
- yield api.test("mac") + api.platform.name("mac")
+ def goma_properties(**kwargs):
+ return api.properties(**{"$flutter/goma": InputProperties(**kwargs)})
- yield api.test("win") + api.platform.name("win")
+ yield api.test("mac") + api.platform.name("mac")
- yield (
- api.test("linux_goma_dir")
- + api.platform.name("linux")
- + goma_properties(goma_dir="path/to/goma")
- )
+ yield api.test("win") + api.platform.name("win")
- yield (
- api.test("linux_jobs") + api.platform.name("linux") + goma_properties(jobs=80)
- )
+ yield (
+ api.test("linux_goma_dir") + api.platform.name("linux") +
+ goma_properties(goma_dir="path/to/goma")
+ )
- yield (
- api.test("linux_non_default_server")
- + api.platform.name("linux")
- + goma_properties(server="goma.fuchsia.dev")
- )
+ yield (
+ api.test("linux_jobs") + api.platform.name("linux") +
+ goma_properties(jobs=80)
+ )
- yield (
- api.test("linux_arbitrary_toolchain")
- + api.platform.name("linux")
- + goma_properties(enable_arbitrary_toolchains=True)
- )
+ yield (
+ api.test("linux_non_default_server") + api.platform.name("linux") +
+ goma_properties(server="goma.fuchsia.dev")
+ )
- yield (
- api.test("linux_start_goma_failed", status="INFRA_FAILURE")
- + api.platform.name("linux")
- + api.step_data("setup goma.start goma", retcode=1)
- )
+ yield (
+ api.test("linux_arbitrary_toolchain") + api.platform.name("linux") +
+ goma_properties(enable_arbitrary_toolchains=True)
+ )
- yield (
- api.test("linux_stop_goma_failed", status="INFRA_FAILURE")
- + api.platform.name("linux")
- + api.step_data("teardown goma.stop goma", retcode=1)
- )
+ yield (
+ api.test("linux_start_goma_failed", status="INFRA_FAILURE") +
+ api.platform.name("linux") +
+ api.step_data("setup goma.start goma", retcode=1)
+ )
- yield (
- api.test("linux_invalid_goma_jsonstatus")
- + api.platform.name("linux")
- + api.step_data("teardown goma.goma jsonstatus", api.json.output(data=None))
- )
+ yield (
+ api.test("linux_stop_goma_failed", status="INFRA_FAILURE") +
+ api.platform.name("linux") +
+ api.step_data("teardown goma.stop goma", retcode=1)
+ )
- yield (
- api.test("valid_buildname_and_build_id")
- + api.platform.name("linux")
- + api.buildbucket.try_build(project="test", builder="test")
- )
+ yield (
+ api.test("linux_invalid_goma_jsonstatus") + api.platform.name("linux") +
+ api
+ .step_data("teardown goma.goma jsonstatus", api.json.output(data=None))
+ )
+
+ yield (
+ api.test("valid_buildname_and_build_id") + api.platform.name("linux") +
+ api.buildbucket.try_build(project="test", builder="test")
+ )
diff --git a/recipe_modules/gsutil/api.py b/recipe_modules/gsutil/api.py
index d5ce30d..79e3c32 100644
--- a/recipe_modules/gsutil/api.py
+++ b/recipe_modules/gsutil/api.py
@@ -6,25 +6,25 @@
class GSUtilApi(recipe_api.RecipeApi):
- """GSUtilApi provides support for GSUtil."""
+ """GSUtilApi provides support for GSUtil."""
- @recipe_api.non_step
- def join(self, *parts):
- """Constructs a GS path from composite parts."""
- return "/".join(p.strip("/") for p in parts)
+ @recipe_api.non_step
+ def join(self, *parts):
+ """Constructs a GS path from composite parts."""
+ return "/".join(p.strip("/") for p in parts)
- def upload_namespaced_file(
- self,
- source,
- bucket,
- subpath,
- namespace=None,
- metadata=None,
- no_clobber=True,
- unauthenticated_url=False,
- **kwargs,
- ):
- """Uploads a file to GCS under a subpath specific to the given build.
+ def upload_namespaced_file(
+ self,
+ source,
+ bucket,
+ subpath,
+ namespace=None,
+ metadata=None,
+ no_clobber=True,
+ unauthenticated_url=False,
+ **kwargs,
+ ):
+ """Uploads a file to GCS under a subpath specific to the given build.
Will upload the file to:
gs://<bucket>/<build id>/<subpath or basename of file>
@@ -43,22 +43,22 @@
unauthenticated_url (bool): Whether to present a URL that requires
no authentication in the GCP web UI.
"""
- kwargs.setdefault("link_name", subpath)
- return self.upload(
- bucket=bucket,
- src=source,
- dst=self.namespaced_gcs_path(subpath, namespace),
- metadata=metadata,
- no_clobber=no_clobber,
- unauthenticated_url=unauthenticated_url,
- name=f"upload {subpath} to {bucket}",
- **kwargs,
- )
+ kwargs.setdefault("link_name", subpath)
+ return self.upload(
+ bucket=bucket,
+ src=source,
+ dst=self.namespaced_gcs_path(subpath, namespace),
+ metadata=metadata,
+ no_clobber=no_clobber,
+ unauthenticated_url=unauthenticated_url,
+ name=f"upload {subpath} to {bucket}",
+ **kwargs,
+ )
- def upload_namespaced_directory(
- self, source, bucket, subpath, namespace=None, rsync=True, **kwargs
- ):
- """Uploads a directory to GCS under a subpath specific to the given build.
+ def upload_namespaced_directory(
+ self, source, bucket, subpath, namespace=None, rsync=True, **kwargs
+ ):
+ """Uploads a directory to GCS under a subpath specific to the given build.
Will upload the directory to:
gs://<bucket>/<build id>/<subpath>
@@ -73,178 +73,180 @@
rsync (bool): Whether to use rsync, which is idempotent but
sometimes less reliable.
"""
- kwargs.setdefault("link_name", subpath)
- func = self.upload
- if rsync:
- func = self.rsync
- return func(
- bucket=bucket,
- src=source,
- dst=self.namespaced_gcs_path(subpath, namespace),
- recursive=True,
- multithreaded=True,
- no_clobber=True,
- name=f"upload {subpath} to {bucket}",
- **kwargs,
- )
+ kwargs.setdefault("link_name", subpath)
+ func = self.upload
+ if rsync:
+ func = self.rsync
+ return func(
+ bucket=bucket,
+ src=source,
+ dst=self.namespaced_gcs_path(subpath, namespace),
+ recursive=True,
+ multithreaded=True,
+ no_clobber=True,
+ name=f"upload {subpath} to {bucket}",
+ **kwargs,
+ )
- def namespaced_gcs_path(self, relative_path, namespace=None):
- if not namespace:
- namespace = self.m.buildbucket_util.id
- return f"builds/{namespace}/{relative_path}"
+ def namespaced_gcs_path(self, relative_path, namespace=None):
+ if not namespace:
+ namespace = self.m.buildbucket_util.id
+ return f"builds/{namespace}/{relative_path}"
- def http_url(self, bucket, dest, unauthenticated_url=False):
- base = (
- "https://storage.googleapis.com"
- if unauthenticated_url
- else "https://storage.cloud.google.com"
- )
- return f"{base}/{bucket}/{self.m.url.quote(dest)}"
+ def http_url(self, bucket, dest, unauthenticated_url=False):
+ base = (
+ "https://storage.googleapis.com"
+ if unauthenticated_url else "https://storage.cloud.google.com"
+ )
+ return f"{base}/{bucket}/{self.m.url.quote(dest)}"
- def _directory_listing_url(self, bucket, dest):
- """Returns the URL for a GCS bucket subdirectory listing in the GCP console."""
- return (
- f"https://console.cloud.google.com/storage/browser/{bucket}/"
- f"{self.m.url.quote(dest)}"
- )
+ def _directory_listing_url(self, bucket, dest):
+ """Returns the URL for a GCS bucket subdirectory listing in the GCP console."""
+ return (
+ f"https://console.cloud.google.com/storage/browser/{bucket}/"
+ f"{self.m.url.quote(dest)}"
+ )
- def namespaced_directory_url(self, bucket, subpath="", namespace=None):
- return self._directory_listing_url(
- bucket,
- self.namespaced_gcs_path(subpath, namespace),
- )
+ def namespaced_directory_url(self, bucket, subpath="", namespace=None):
+ return self._directory_listing_url(
+ bucket,
+ self.namespaced_gcs_path(subpath, namespace),
+ )
- @staticmethod
- def _get_metadata_field(name, provider_prefix=None):
- """Returns: (str) the metadata field to use with Google Storage
+ @staticmethod
+ def _get_metadata_field(name, provider_prefix=None):
+ """Returns: (str) the metadata field to use with Google Storage
The Google Storage specification for metadata can be found at:
https://developers.google.com/storage/docs/gsutil/addlhelp/WorkingWithObjectMetadata
"""
- # Already contains custom provider prefix
- if name.lower().startswith("x-"):
- return name
+ # Already contains custom provider prefix
+ if name.lower().startswith("x-"):
+ return name
- # See if it's innately supported by Google Storage
- if name in (
- "Cache-Control",
- "Content-Disposition",
- "Content-Encoding",
- "Content-Language",
- "Content-MD5",
- "Content-Type",
- "Custom-Time",
- ):
- return name
-
- # Add provider prefix
- if not provider_prefix:
- provider_prefix = "x-goog-meta"
- return f"{provider_prefix}-{name}"
-
- @staticmethod
- def unauthenticated_url(url):
- """Transform an authenticated URL to an unauthenticated URL."""
- return url.replace(
- "https://storage.cloud.google.com/", "https://storage.googleapis.com/"
- )
-
- def _add_custom_time(self, metadata):
- if not metadata:
- metadata = {}
- metadata["Custom-Time"] = self.m.time.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
- return metadata
-
- def upload(
- self,
- bucket,
- src,
- dst,
- link_name="gsutil.upload",
- unauthenticated_url=False,
- recursive=False,
- no_clobber=False,
- gzip_exts=(),
- **kwargs,
+ # See if it's innately supported by Google Storage
+ if name in (
+ "Cache-Control",
+ "Content-Disposition",
+ "Content-Encoding",
+ "Content-Language",
+ "Content-MD5",
+ "Content-Type",
+ "Custom-Time",
):
- kwargs["metadata"] = self._add_custom_time(kwargs.pop("metadata", {}))
- args = ["cp"]
- if recursive:
- args.append("-r")
- if no_clobber:
- args.append("-n")
- if gzip_exts:
- args.extend(["-j"] + gzip_exts)
- args.extend([src, f"gs://{bucket}/{dst}"])
- if not recursive or no_clobber:
- # gsutil supports resumable uploads if we run the same command
- # again, but it's only safe to resume uploading if we're only
- # uploading a single file, or if we're operating in no_clobber mode.
- step = self.m.utils.retry(
- lambda: self._run(*args, **kwargs),
- max_attempts=3,
- )
- else:
- step = self._run(*args, **kwargs)
- if link_name:
- link_url = self.http_url(
- bucket, dst, unauthenticated_url=unauthenticated_url
- )
- step.presentation.links[link_name] = link_url
- return step
+ return name
- def rsync(
- self,
- bucket,
- src,
- dst,
- link_name="gsutil.rsync",
- recursive=True,
- no_clobber=False,
- gzip_exts=(),
- **kwargs,
- ):
- kwargs["metadata"] = self._add_custom_time(kwargs.pop("metadata", {}))
- args = ["rsync"]
- if recursive:
- args.append("-r")
- if no_clobber:
- # This will skip files already existing in dst with a later
- # timestamp.
- args.append("-u")
- if gzip_exts:
- args.extend(["-j"] + gzip_exts)
- args.extend([src, f"gs://{bucket}/{dst}"])
- step = self.m.utils.retry(lambda: self._run(*args, **kwargs), max_attempts=3)
- if link_name:
- link_url = self._directory_listing_url(bucket, dst)
- step.presentation.links[link_name] = link_url
- return step
+ # Add provider prefix
+ if not provider_prefix:
+ provider_prefix = "x-goog-meta"
+ return f"{provider_prefix}-{name}"
- def copy(
- self,
- src_bucket,
- src,
- dst_bucket,
- dst,
- link_name="gsutil.copy",
- unauthenticated_url=False,
- recursive=False,
- **kwargs,
- ):
- args = ["cp"]
- if recursive:
- args.append("-r")
- args.extend([f"gs://{src_bucket}/{src}", f"gs://{dst_bucket}/{dst}"])
- step = self._run(*args, **kwargs)
- if link_name:
- step.presentation.links[link_name] = self.http_url(
- dst_bucket, dst, unauthenticated_url=unauthenticated_url
- )
- return step
+ @staticmethod
+ def unauthenticated_url(url):
+ """Transform an authenticated URL to an unauthenticated URL."""
+ return url.replace(
+ "https://storage.cloud.google.com/", "https://storage.googleapis.com/"
+ )
- def download(self, src_bucket, src, dest, recursive=False, **kwargs):
- """Downloads gcs bucket file to local disk.
+ def _add_custom_time(self, metadata):
+ if not metadata:
+ metadata = {}
+ metadata["Custom-Time"] = self.m.time.utcnow(
+ ).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
+ return metadata
+
+ def upload(
+ self,
+ bucket,
+ src,
+ dst,
+ link_name="gsutil.upload",
+ unauthenticated_url=False,
+ recursive=False,
+ no_clobber=False,
+ gzip_exts=(),
+ **kwargs,
+ ):
+ kwargs["metadata"] = self._add_custom_time(kwargs.pop("metadata", {}))
+ args = ["cp"]
+ if recursive:
+ args.append("-r")
+ if no_clobber:
+ args.append("-n")
+ if gzip_exts:
+ args.extend(["-j"] + gzip_exts)
+ args.extend([src, f"gs://{bucket}/{dst}"])
+ if not recursive or no_clobber:
+ # gsutil supports resumable uploads if we run the same command
+ # again, but it's only safe to resume uploading if we're only
+ # uploading a single file, or if we're operating in no_clobber mode.
+ step = self.m.utils.retry(
+ lambda: self._run(*args, **kwargs),
+ max_attempts=3,
+ )
+ else:
+ step = self._run(*args, **kwargs)
+ if link_name:
+ link_url = self.http_url(
+ bucket, dst, unauthenticated_url=unauthenticated_url
+ )
+ step.presentation.links[link_name] = link_url
+ return step
+
+ def rsync(
+ self,
+ bucket,
+ src,
+ dst,
+ link_name="gsutil.rsync",
+ recursive=True,
+ no_clobber=False,
+ gzip_exts=(),
+ **kwargs,
+ ):
+ kwargs["metadata"] = self._add_custom_time(kwargs.pop("metadata", {}))
+ args = ["rsync"]
+ if recursive:
+ args.append("-r")
+ if no_clobber:
+ # This will skip files already existing in dst with a later
+ # timestamp.
+ args.append("-u")
+ if gzip_exts:
+ args.extend(["-j"] + gzip_exts)
+ args.extend([src, f"gs://{bucket}/{dst}"])
+ step = self.m.utils.retry(
+ lambda: self._run(*args, **kwargs), max_attempts=3
+ )
+ if link_name:
+ link_url = self._directory_listing_url(bucket, dst)
+ step.presentation.links[link_name] = link_url
+ return step
+
+ def copy(
+ self,
+ src_bucket,
+ src,
+ dst_bucket,
+ dst,
+ link_name="gsutil.copy",
+ unauthenticated_url=False,
+ recursive=False,
+ **kwargs,
+ ):
+ args = ["cp"]
+ if recursive:
+ args.append("-r")
+ args.extend([f"gs://{src_bucket}/{src}", f"gs://{dst_bucket}/{dst}"])
+ step = self._run(*args, **kwargs)
+ if link_name:
+ step.presentation.links[link_name] = self.http_url(
+ dst_bucket, dst, unauthenticated_url=unauthenticated_url
+ )
+ return step
+
+ def download(self, src_bucket, src, dest, recursive=False, **kwargs):
+ """Downloads gcs bucket file to local disk.
Args:
src_bucket (str): gcs bucket name.
@@ -252,38 +254,40 @@
recursive (bool): bool to indicate to copy recursively.
dest (str): local file path root to copy to.
"""
- args = ["cp"]
- if recursive:
- args.append("-r")
- args.extend([f"gs://{src_bucket}/{src}", dest])
- return self._run(*args, **kwargs)
+ args = ["cp"]
+ if recursive:
+ args.append("-r")
+ args.extend([f"gs://{src_bucket}/{src}", dest])
+ return self._run(*args, **kwargs)
- @property
- def _gsutil_tool(self):
- return self.m.ensure_tool("gsutil", self.resource("tool_manifest.json"))
+ @property
+ def _gsutil_tool(self):
+ return self.m.ensure_tool("gsutil", self.resource("tool_manifest.json"))
- def _run(self, *args, **kwargs):
- """Return a step to run arbitrary gsutil command."""
- assert self._gsutil_tool
- name = kwargs.pop("name", "gsutil " + args[0])
- infra_step = kwargs.pop("infra_step", True)
- cmd_prefix = [self._gsutil_tool]
- # Note that metadata arguments have to be passed before the command.
- metadata = kwargs.pop("metadata", [])
- if metadata:
- for k, v in sorted(metadata.items()):
- field = self._get_metadata_field(k)
- param = (field) if v is None else (f"{field}:{v}")
- cmd_prefix.extend(["-h", param])
- options = kwargs.pop("options", {})
- options["software_update_check_period"] = 0
- if options:
- for k, v in sorted(options.items()):
- cmd_prefix.extend(["-o", f"GSUtil:{k}={v}"])
- if kwargs.pop("multithreaded", False):
- cmd_prefix.extend(["-m"])
+ def _run(self, *args, **kwargs):
+ """Return a step to run arbitrary gsutil command."""
+ assert self._gsutil_tool
+ name = kwargs.pop("name", "gsutil " + args[0])
+ infra_step = kwargs.pop("infra_step", True)
+ cmd_prefix = [self._gsutil_tool]
+ # Note that metadata arguments have to be passed before the command.
+ metadata = kwargs.pop("metadata", [])
+ if metadata:
+ for k, v in sorted(metadata.items()):
+ field = self._get_metadata_field(k)
+ param = (field) if v is None else (f"{field}:{v}")
+ cmd_prefix.extend(["-h", param])
+ options = kwargs.pop("options", {})
+ options["software_update_check_period"] = 0
+ if options:
+ for k, v in sorted(options.items()):
+ cmd_prefix.extend(["-o", f"GSUtil:{k}={v}"])
+ if kwargs.pop("multithreaded", False):
+ cmd_prefix.extend(["-m"])
- # The `gsutil` executable is a Python script with a shebang, and Windows
- # doesn't support shebangs so we have to run it via Python.
- step_func = self.m.python3 if self.m.platform.is_win else self.m.step
- return step_func(name, cmd_prefix + list(args), infra_step=infra_step, **kwargs)
+ # The `gsutil` executable is a Python script with a shebang, and Windows
+ # doesn't support shebangs so we have to run it via Python.
+ step_func = self.m.python3 if self.m.platform.is_win else self.m.step
+ return step_func(
+ name, cmd_prefix + list(args), infra_step=infra_step, **kwargs
+ )
diff --git a/recipe_modules/gsutil/tests/full.py b/recipe_modules/gsutil/tests/full.py
index 2b9e291..bde8403 100644
--- a/recipe_modules/gsutil/tests/full.py
+++ b/recipe_modules/gsutil/tests/full.py
@@ -13,49 +13,51 @@
def RunSteps(api):
- api.gsutil.upload_namespaced_file(
- BUCKET,
- api.path["cleanup"].join("file"),
- api.gsutil.join("path", "to", "file"),
- metadata={
- "Test-Field": "value",
- "Remove-Me": None,
- "x-custom-field": "custom-value",
- "Cache-Control": "no-cache",
- },
- unauthenticated_url=True,
- options={"parallel_composite_upload_threshold": "50M"},
- )
+ api.gsutil.upload_namespaced_file(
+ BUCKET,
+ api.path["cleanup"].join("file"),
+ api.gsutil.join("path", "to", "file"),
+ metadata={
+ "Test-Field": "value",
+ "Remove-Me": None,
+ "x-custom-field": "custom-value",
+ "Cache-Control": "no-cache",
+ },
+ unauthenticated_url=True,
+ options={"parallel_composite_upload_threshold": "50M"},
+ )
- api.gsutil.upload_namespaced_directory(
- api.path["cleanup"].join("dir"),
- BUCKET,
- "rsync_subpath",
- gzip_exts=["html"],
- )
- api.gsutil.upload_namespaced_directory(
- api.path["cleanup"].join("dir"),
- BUCKET,
- "cp_subpath",
- rsync=False,
- gzip_exts=["html"],
- )
- api.gsutil.upload(BUCKET, api.path["cleanup"].join("dir"), "dir", recursive=True)
+ api.gsutil.upload_namespaced_directory(
+ api.path["cleanup"].join("dir"),
+ BUCKET,
+ "rsync_subpath",
+ gzip_exts=["html"],
+ )
+ api.gsutil.upload_namespaced_directory(
+ api.path["cleanup"].join("dir"),
+ BUCKET,
+ "cp_subpath",
+ rsync=False,
+ gzip_exts=["html"],
+ )
+ api.gsutil.upload(
+ BUCKET, api.path["cleanup"].join("dir"), "dir", recursive=True
+ )
- api.gsutil.copy(BUCKET, "foo", BUCKET, "bar", recursive=True)
- api.gsutil.download(BUCKET, "foo", "tmp/", recursive=True)
+ api.gsutil.copy(BUCKET, "foo", BUCKET, "bar", recursive=True)
+ api.gsutil.download(BUCKET, "foo", "tmp/", recursive=True)
- api.gsutil.unauthenticated_url("https://storage.cloud.google.com/foo/bar")
+ api.gsutil.unauthenticated_url("https://storage.cloud.google.com/foo/bar")
- dir_url = api.gsutil.namespaced_directory_url("bucket", "foo")
- assert dir_url.endswith("builds/8945511751514863184/foo"), dir_url
+ dir_url = api.gsutil.namespaced_directory_url("bucket", "foo")
+ assert dir_url.endswith("builds/8945511751514863184/foo"), dir_url
def GenTests(api):
- yield api.buildbucket_util.test("basic")
- yield (
- api.buildbucket_util.test("retry_on_failure")
- # Cover the windows-specific codepath.
- + api.platform.name("win")
- + api.step_data(f"upload cp_subpath to {BUCKET}", retcode=1)
- )
+ yield api.buildbucket_util.test("basic")
+ yield (
+ api.buildbucket_util.test("retry_on_failure")
+ # Cover the windows-specific codepath.
+ + api.platform.name("win") +
+ api.step_data(f"upload cp_subpath to {BUCKET}", retcode=1)
+ )
diff --git a/recipe_modules/logs_util/api.py b/recipe_modules/logs_util/api.py
index bb513d3..ba41e09 100644
--- a/recipe_modules/logs_util/api.py
+++ b/recipe_modules/logs_util/api.py
@@ -85,14 +85,14 @@
return
uuid = self.m.uuid.random()
self.m.gsutil.upload(
- bucket='flutter_logs',
- source=file_path,
- dest='flutter/%s/%s/%s' % (git_hash, task_name, uuid),
- link_name='archive logs',
- args=['-r'],
- multithreaded=True,
- name='upload logs %s' % git_hash,
- unauthenticated_url=True
+ bucket='flutter_logs',
+ source=file_path,
+ dest='flutter/%s/%s/%s' % (git_hash, task_name, uuid),
+ link_name='archive logs',
+ args=['-r'],
+ multithreaded=True,
+ name='upload logs %s' % git_hash,
+ unauthenticated_url=True
)
def show_logs_stdout(self, file_path):
diff --git a/recipe_modules/logs_util/examples/full.py b/recipe_modules/logs_util/examples/full.py
index b44219c..dd1c3b6 100644
--- a/recipe_modules/logs_util/examples/full.py
+++ b/recipe_modules/logs_util/examples/full.py
@@ -5,11 +5,7 @@
from PB.recipe_modules.recipe_engine.swarming import properties
from recipe_engine.post_process import DoesNotRun, Filter, StatusFailure
-DEPS = [
- 'flutter/logs_util',
- 'recipe_engine/path',
- 'recipe_engine/file'
-]
+DEPS = ['flutter/logs_util', 'recipe_engine/path', 'recipe_engine/file']
def RunSteps(api):
diff --git a/recipe_modules/os_utils/api.py b/recipe_modules/os_utils/api.py
index 078ad25..5ea7bf5 100644
--- a/recipe_modules/os_utils/api.py
+++ b/recipe_modules/os_utils/api.py
@@ -10,6 +10,7 @@
TIMEOUT_PROPERTY = 'ios_debug_symbol_doctor_timeout_seconds'
+
class OsUtilsApi(recipe_api.RecipeApi):
"""Operating system utilities."""
@@ -108,7 +109,6 @@
infra_step=True,
)
-
def kill_simulators(self):
"""Kills any open simulators.
@@ -116,7 +116,8 @@
"""
if self.m.platform.is_mac:
self.m.step(
- 'kill dart', ['killall', '-9', 'com.apple.CoreSimulator.CoreSimulatorDevice'],
+ 'kill dart',
+ ['killall', '-9', 'com.apple.CoreSimulator.CoreSimulatorDevice'],
ok_ret='any',
infra_step=True
)
@@ -232,9 +233,9 @@
def _get_initial_timeout(self):
return self.m.properties.get(
- # This is not set by the builder config, but can be provided via LED
- TIMEOUT_PROPERTY,
- 120, # 2 minutes
+ # This is not set by the builder config, but can be provided via LED
+ TIMEOUT_PROPERTY,
+ 120, # 2 minutes
)
def ios_debug_symbol_doctor(self, diagnose_first=True):
@@ -260,39 +261,41 @@
# timeout of 16 minutes
retry_count = 4
with self.m.context(cwd=cocoon_path.join('device_doctor'),
- infra_steps=True):
- self.m.step(
- 'pub get device_doctor',
- ['dart', 'pub', 'get'],
+ infra_steps=True):
+ self.m.step(
+ 'pub get device_doctor',
+ ['dart', 'pub', 'get'],
+ )
+ if diagnose_first:
+ clean_results = self._diagnose_debug_symbols(
+ entrypoint, timeout, cocoon_path
)
- if diagnose_first:
- clean_results = self._diagnose_debug_symbols(entrypoint, timeout, cocoon_path)
- if clean_results:
- # It doesn't appear debug symbols need to be sync'd, we are finished
- return
- for _ in range(retry_count):
- self._recover_debug_symbols(entrypoint, timeout, cocoon_path)
- result = self._diagnose_debug_symbols(entrypoint, timeout, cocoon_path)
- if result:
- # attached devices don't have errors
- return
- # Try for twice as long next time
- timeout *= 2
+ if clean_results:
+ # It doesn't appear debug symbols need to be sync'd, we are finished
+ return
+ for _ in range(retry_count):
+ self._recover_debug_symbols(entrypoint, timeout, cocoon_path)
+ result = self._diagnose_debug_symbols(
+ entrypoint, timeout, cocoon_path
+ )
+ if result:
+ # attached devices don't have errors
+ return
+ # Try for twice as long next time
+ timeout *= 2
- message = '''
+ message = '''
The ios_debug_symbol_doctor is detecting phones attached with errors and failed
to recover this bot with a timeout of %s seconds.
See https://github.com/flutter/flutter/issues/103511 for more context.
''' % timeout
- # raise purple
- self.m.step.empty(
- 'Recovery failed after %s attempts' % retry_count,
- status=self.m.step.INFRA_FAILURE,
- step_text=message,
- )
-
-
+ # raise purple
+ self.m.step.empty(
+ 'Recovery failed after %s attempts' % retry_count,
+ status=self.m.step.INFRA_FAILURE,
+ step_text=message,
+ )
def dismiss_dialogs(self):
"""Dismisses iOS dialogs to avoid problems.
@@ -324,9 +327,7 @@
def _checkout_cocoon(self):
"""Checkout cocoon at HEAD to the cache and return the path."""
cocoon_path = self.m.path['cache'].join('cocoon')
- self.m.repo_util.checkout(
- 'cocoon', cocoon_path, ref='refs/heads/main'
- )
+ self.m.repo_util.checkout('cocoon', cocoon_path, ref='refs/heads/main')
return cocoon_path
def _diagnose_debug_symbols(self, entrypoint, timeout, cocoon_path):
@@ -339,13 +340,13 @@
Returns a boolean for whether or not the initial diagnose succeeded.
"""
try:
- self.m.step(
- 'diagnose',
- ['dart', entrypoint, 'diagnose'],
- )
- return True
+ self.m.step(
+ 'diagnose',
+ ['dart', entrypoint, 'diagnose'],
+ )
+ return True
except self.m.step.StepFailure as e:
- return False
+ return False
def _recover_debug_symbols(self, entrypoint, timeout, cocoon_path):
self.m.step(
diff --git a/recipe_modules/os_utils/test_api.py b/recipe_modules/os_utils/test_api.py
index a347e84..77a559e 100644
--- a/recipe_modules/os_utils/test_api.py
+++ b/recipe_modules/os_utils/test_api.py
@@ -4,6 +4,7 @@
from recipe_engine import recipe_test_api
+
class OsUtilsTestApi(recipe_test_api.RecipeTestApi):
@recipe_test_api.mod_test_data
diff --git a/recipe_modules/osx_sdk/api.py b/recipe_modules/osx_sdk/api.py
index daf6fda..8733928 100644
--- a/recipe_modules/osx_sdk/api.py
+++ b/recipe_modules/osx_sdk/api.py
@@ -11,7 +11,6 @@
from recipe_engine import recipe_api
-
# Rationalized from https://en.wikipedia.org/wiki/Xcode.
#
# Maps from OS version to the maximum supported version of Xcode for that OS.
@@ -21,18 +20,10 @@
('10.13.6', '10b61'), ('10.14.3', '10g8'),
('10.14.4', '11b52'), ('10.15.4', '12a7209')]
-
_RUNTIMESPATH = [
- 'Contents',
- 'Developer',
- 'Platforms',
- 'iPhoneOS.platform',
- 'Library',
- 'Developer',
- 'CoreSimulator',
- 'Profiles',
- 'Runtimes'
- ]
+ 'Contents', 'Developer', 'Platforms', 'iPhoneOS.platform', 'Library',
+ 'Developer', 'CoreSimulator', 'Profiles', 'Runtimes'
+]
_XCODE_CACHE_PATH = 'osx_sdk'
@@ -162,16 +153,9 @@
self.m.step(
'install xcode',
[
- cache_dir.join('mac_toolchain'),
- 'install',
- '-kind',
- kind,
- '-xcode-version',
- self._sdk_version,
- '-output-dir',
- sdk_app,
- '-cipd-package-prefix',
- 'flutter_internal/ios/xcode',
+ cache_dir.join('mac_toolchain'), 'install', '-kind', kind,
+ '-xcode-version', self._sdk_version, '-output-dir', sdk_app,
+ '-cipd-package-prefix', 'flutter_internal/ios/xcode',
'-with-runtime=%s' % (not bool(self._runtime_versions))
],
)
@@ -179,9 +163,12 @@
# installs each runtime version under `osx_sdk` for cache sharing,
# and then copies over to the destination.
if self._runtime_versions:
- self.m.file.ensure_directory('Ensuring runtimes directory', sdk_app.join(*_RUNTIMESPATH))
+ self.m.file.ensure_directory(
+ 'Ensuring runtimes directory', sdk_app.join(*_RUNTIMESPATH)
+ )
for version in self._runtime_versions:
- runtime_name = 'iOS %s.simruntime' % version.lower().replace('ios-', '').replace('-', '.')
+ runtime_name = 'iOS %s.simruntime' % version.lower(
+ ).replace('ios-', '').replace('-', '.')
dest = sdk_app.join(*_RUNTIMESPATH).join(runtime_name)
if not self.m.path.exists(dest):
runtime_cache_dir = self.m.path['cache'].join(_XCODE_CACHE_PATH).join(
@@ -204,8 +191,12 @@
path_with_version = runtime_cache_dir.join(runtime_name)
# If the runtime was the default for xcode the cipd bundle contains a directory called iOS.simruntime otherwise
# it contains a folder called "iOS <version>.simruntime".
- source = path_with_version if self.m.path.exists(path_with_version) else runtime_cache_dir.join('iOS.simruntime')
- self.m.file.copytree('Copy runtime to %s' % dest, source, dest, symlinks=True)
+ source = path_with_version if self.m.path.exists(
+ path_with_version
+ ) else runtime_cache_dir.join('iOS.simruntime')
+ self.m.file.copytree(
+ 'Copy runtime to %s' % dest, source, dest, symlinks=True
+ )
return sdk_app
def _cache_polluted(self):
@@ -231,7 +222,10 @@
return cache_polluted
if not self._runtime_exists():
cache_polluted = True
- self.m.step('cache polluted due to missing runtime', ['echo', 'xcode is installed without runtime'])
+ self.m.step(
+ 'cache polluted due to missing runtime',
+ ['echo', 'xcode is installed without runtime']
+ )
return cache_polluted
def _cache_dir(self):
@@ -260,11 +254,15 @@
sdk_app_dir = self._cache_dir().join('XCode.app')
if self._runtime_versions:
for version in self._runtime_versions:
- runtime_name = 'iOS %s.simruntime' % version.lower().replace('ios-', '').replace('-', '.')
+ runtime_name = 'iOS %s.simruntime' % version.lower(
+ ).replace('ios-', '').replace('-', '.')
runtime_path = sdk_app_dir.join(*_RUNTIMESPATH).join(runtime_name)
if not self.m.path.exists(runtime_path):
runtime_exists = False
- self.m.step('runtime: %s does not exist' % runtime_name, ['echo', runtime_path])
+ self.m.step(
+ 'runtime: %s does not exist' % runtime_name,
+ ['echo', runtime_path]
+ )
break
else:
runtime_path = sdk_app_dir.join(*_RUNTIMESPATH).join('iOS.simruntime')
diff --git a/recipe_modules/osx_sdk/examples/full.py b/recipe_modules/osx_sdk/examples/full.py
index c428caa..f8c7c0f 100644
--- a/recipe_modules/osx_sdk/examples/full.py
+++ b/recipe_modules/osx_sdk/examples/full.py
@@ -3,13 +3,13 @@
# found in the LICENSE file.
DEPS = [
- 'flutter/os_utils',
- 'flutter/osx_sdk',
- 'recipe_engine/file',
- 'recipe_engine/path',
- 'recipe_engine/platform',
- 'recipe_engine/properties',
- 'recipe_engine/step',
+ 'flutter/os_utils',
+ 'flutter/osx_sdk',
+ 'recipe_engine/file',
+ 'recipe_engine/path',
+ 'recipe_engine/platform',
+ 'recipe_engine/properties',
+ 'recipe_engine/step',
]
@@ -21,51 +21,63 @@
def GenTests(api):
for platform in ('linux', 'mac', 'win'):
- yield (api.test(platform) +
- api.platform.name(platform))
+ yield (api.test(platform) + api.platform.name(platform))
yield api.test(
- 'explicit_version',
- api.platform.name('mac'),
- api.properties(**{'$flutter/osx_sdk': {
- 'sdk_version': 'deadbeef', 'toolchain_ver': '123abc',
- 'cleanup_cache': True
- }})
+ 'explicit_version', api.platform.name('mac'),
+ api.properties(
+ **{
+ '$flutter/osx_sdk': {
+ 'sdk_version': 'deadbeef', 'toolchain_ver': '123abc',
+ 'cleanup_cache': True
+ }
+ }
+ )
)
runtime_path = api.path['cache'].join(
- 'osx_sdk', 'xcode_deadbeef_runtime_ios-14-0_ios-13-0',
- 'XCode.app', 'Contents', 'Developer', 'Platforms',
- 'iPhoneOS.platform', 'Library', 'Developer', 'CoreSimulator',
- 'Profiles', 'Runtimes', 'iOS 13.0.simruntime')
+ 'osx_sdk', 'xcode_deadbeef_runtime_ios-14-0_ios-13-0', 'XCode.app',
+ 'Contents', 'Developer', 'Platforms', 'iPhoneOS.platform', 'Library',
+ 'Developer', 'CoreSimulator', 'Profiles', 'Runtimes',
+ 'iOS 13.0.simruntime'
+ )
- sdk_app_path = api.path['cache'].join('osx_sdk',
- 'xcode_deadbeef',
- 'XCode.app')
+ sdk_app_path = api.path['cache'].join(
+ 'osx_sdk', 'xcode_deadbeef', 'XCode.app'
+ )
yield api.test(
'explicit_runtime_version',
api.platform.name('mac'),
- api.properties(**{'$flutter/osx_sdk': {
- 'sdk_version': 'deadbeef', 'toolchain_ver': '123abc',
- 'runtime_versions': ['ios-13-0', 'ios-14-0']
- }}),
+ api.properties(
+ **{
+ '$flutter/osx_sdk': {
+ 'sdk_version': 'deadbeef', 'toolchain_ver': '123abc',
+ 'runtime_versions': ['ios-13-0', 'ios-14-0']
+ }
+ }
+ ),
api.os_utils.is_symlink(False),
api.path.exists(runtime_path),
)
runtime_path = api.path['cache'].join(
- 'osx_sdk', 'xcode_deadbeef',
- 'XCode.app', 'Contents', 'Developer', 'Platforms',
- 'iPhoneOS.platform', 'Library', 'Developer', 'CoreSimulator',
- 'Profiles', 'Runtimes', 'iOS.simruntime')
+ 'osx_sdk', 'xcode_deadbeef', 'XCode.app', 'Contents', 'Developer',
+ 'Platforms', 'iPhoneOS.platform', 'Library', 'Developer', 'CoreSimulator',
+ 'Profiles', 'Runtimes', 'iOS.simruntime'
+ )
yield api.test(
'no_runtime_version',
api.platform.name('mac'),
- api.properties(**{'$flutter/osx_sdk': {
- 'sdk_version': 'deadbeef', 'toolchain_ver': '123abc',
- }}),
+ api.properties(
+ **{
+ '$flutter/osx_sdk': {
+ 'sdk_version': 'deadbeef',
+ 'toolchain_ver': '123abc',
+ }
+ }
+ ),
api.os_utils.is_symlink(False),
api.path.exists(runtime_path),
api.path.exists(sdk_app_path),
diff --git a/recipe_modules/recipe_testing/api.py b/recipe_modules/recipe_testing/api.py
index ab19523..53ada32 100644
--- a/recipe_modules/recipe_testing/api.py
+++ b/recipe_modules/recipe_testing/api.py
@@ -29,155 +29,151 @@
class Build(swarming_retry_api.LedTask):
- # This warning is spurious because LedTask defines _led_data.
- # pylint: disable=attribute-defined-outside-init
+ # This warning is spurious because LedTask defines _led_data.
+ # pylint: disable=attribute-defined-outside-init
- def include_cl(self, cl):
- self._led_data = self._led_data.then("edit-cr-cl", cl)
+ def include_cl(self, cl):
+ self._led_data = self._led_data.then("edit-cr-cl", cl)
- def include_recipe_bundle(self):
- self._led_data = self._led_data.then("edit-recipe-bundle")
+ def include_recipe_bundle(self):
+ self._led_data = self._led_data.then("edit-recipe-bundle")
- def use_realms(self):
- self._led_data = self._led_data.then(
- "edit", "-experiment", "luci.use_realms=true"
- )
+ def use_realms(self):
+ self._led_data = self._led_data.then(
+ "edit", "-experiment", "luci.use_realms=true"
+ )
- def set_properties(self, properties):
- args = []
- for k, v in properties.items():
- args += ["-pa", "%s=%s" % (k, self._api.json.dumps(v))]
- self._led_data = self._led_data.then("edit", *args)
+ def set_properties(self, properties):
+ args = []
+ for k, v in properties.items():
+ args += ["-pa", "%s=%s" % (k, self._api.json.dumps(v))]
+ self._led_data = self._led_data.then("edit", *args)
class RecipeTestingApi(recipe_api.RecipeApi):
- """API for running tests and processing test results."""
+ """API for running tests and processing test results."""
- def __init__(self, props, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self._recipe_depth = props.recipe_depth
- self.enabled = props.enabled
- self.max_build_age_seconds = int(datetime.timedelta(days=28).total_seconds())
- self.projects = ("flutter",)
+ def __init__(self, props, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._recipe_depth = props.recipe_depth
+ self.enabled = props.enabled
+ self.max_build_age_seconds = int(
+ datetime.timedelta(days=28).total_seconds()
+ )
+ self.projects = ("flutter",)
- def _get_affected_recipes(self, recipes_path):
- """Collect affected recipes.
+ def _get_affected_recipes(self, recipes_path):
+ """Collect affected recipes.
For now assume we care about all recipes.
"""
- with self.m.step.nest("get_affected_recipes") as parent_step:
- recipes_dir = recipes_path.join("recipes")
- recipe_files = self.m.file.listdir(
- "ls-recipes", recipes_dir, recursive=True
- )
+ with self.m.step.nest("get_affected_recipes") as parent_step:
+ recipes_dir = recipes_path.join("recipes")
+ recipe_files = self.m.file.listdir(
+ "ls-recipes", recipes_dir, recursive=True
+ )
- all_recipes = []
- for recipe_file in recipe_files:
- path = self.m.path.relpath(
- self.m.path.realpath(recipe_file), self.m.path.realpath(recipes_dir)
- )
- # Files inside folders that end in ".resources" are never recipes.
- if self.m.path.dirname(path).endswith(".resources"):
- continue
+ all_recipes = []
+ for recipe_file in recipe_files:
+ path = self.m.path.relpath(
+ self.m.path.realpath(recipe_file),
+ self.m.path.realpath(recipes_dir)
+ )
+ # Files inside folders that end in ".resources" are never recipes.
+ if self.m.path.dirname(path).endswith(".resources"):
+ continue
- name, ext = self.m.path.splitext(path)
- if ext == ".py":
- all_recipes.append(name)
+ name, ext = self.m.path.splitext(path)
+ if ext == ".py":
+ all_recipes.append(name)
- parent_step.logs["all recipes"] = all_recipes
+ parent_step.logs["all recipes"] = all_recipes
- with self.m.context(cwd=recipes_path):
- changed_files = self.m.git.get_changed_files(commit="HEAD")
- parent_step.logs["changed files (raw)"] = changed_files
+ with self.m.context(cwd=recipes_path):
+ changed_files = self.m.git.get_changed_files(commit="HEAD")
+ parent_step.logs["changed files (raw)"] = changed_files
- def is_expected_json(path):
- # We want to ignore expected JSON files--they won't affect how recipes
- # run. It's possible there are JSON files used as data for recipes
- # instead of as expected test outputs, so determine which files to
- # ignore very narrowly.
- return self.m.path.splitext(path)[1] == ".json" and self.m.path.dirname(
- path
- ).endswith(".expected")
+ def is_expected_json(path):
+ # We want to ignore expected JSON files--they won't affect how recipes
+ # run. It's possible there are JSON files used as data for recipes
+ # instead of as expected test outputs, so determine which files to
+ # ignore very narrowly.
+ return self.m.path.splitext(path)[
+ 1] == ".json" and self.m.path.dirname(path).endswith(".expected")
- def is_python_test_file(path):
- """Return True if this is a test file that we should ignore."""
- # We want to ignore test_api.py files--they won't affect how recipes
- # run in led, they only affect how recipes run in
- # './recipes test run', and we test that every time any recipe is
- # changed.
- if (
- self.m.path.basename(path) == "test_api.py"
- and self.m.path.dirname(self.m.path.dirname(path))
- == "recipe_modules"
- ):
- return True
+ def is_python_test_file(path):
+ """Return True if this is a test file that we should ignore."""
+ # We want to ignore test_api.py files--they won't affect how recipes
+ # run in led, they only affect how recipes run in
+ # './recipes test run', and we test that every time any recipe is
+ # changed.
+ if (self.m.path.basename(path) == "test_api.py" and
+ self.m.path.dirname(self.m.path.dirname(path)) == "recipe_modules"):
+ return True
- # Also ignore test definitions themselves. By convention these are
- # given the filename 'full.py' in Fuchsia, but there is no
- # guarantee this will remain the case.
- test_dir_names = ("tests", "examples")
- if (
- self.m.path.splitext(path)[1] == ".py"
- and self.m.path.basename(self.m.path.dirname(path))
- in test_dir_names
- ):
- return True
+ # Also ignore test definitions themselves. By convention these are
+ # given the filename 'full.py' in Fuchsia, but there is no
+ # guarantee this will remain the case.
+ test_dir_names = ("tests", "examples")
+ if (self.m.path.splitext(path)[1] == ".py" and
+ self.m.path.basename(self.m.path.dirname(path)) in test_dir_names):
+ return True
- return False
+ return False
- def is_ignored_file(path):
- return is_expected_json(path) or is_python_test_file(path)
+ def is_ignored_file(path):
+ return is_expected_json(path) or is_python_test_file(path)
- filtered_changed_files = [
- x for x in changed_files if not is_ignored_file(x)
- ]
- parent_step.logs["changed files (filtered)"] = filtered_changed_files or [
- "no changed files"
- ]
+ filtered_changed_files = [
+ x for x in changed_files if not is_ignored_file(x)
+ ]
+ parent_step.logs["changed files (filtered)"] = filtered_changed_files or [
+ "no changed files"
+ ]
- res = self.m.step(
- "recipes-analyze",
- [
- recipes_path.join("recipes.py"),
- "analyze",
- self.m.json.input(
- {"recipes": all_recipes, "files": filtered_changed_files}
- ),
- self.m.json.output(),
- ],
- )
+ res = self.m.step(
+ "recipes-analyze",
+ [
+ recipes_path.join("recipes.py"),
+ "analyze",
+ self.m.json.input({
+ "recipes": all_recipes, "files": filtered_changed_files
+ }),
+ self.m.json.output(),
+ ],
+ )
- affected_recipes = res.json.output["recipes"]
+ affected_recipes = res.json.output["recipes"]
- def should_test_all_recipes(path):
- globs = (
- "infra/config/recipes.cfg",
- # We particularly care about running CQ for flutter.proto changes.
- "recipe_proto/*.proto",
- )
- return any(fnmatch.fnmatch(path, glob) for glob in globs)
+ def should_test_all_recipes(path):
+ globs = (
+ "infra/config/recipes.cfg",
+ # We particularly care about running CQ for flutter.proto changes.
+ "recipe_proto/*.proto",
+ )
+ return any(fnmatch.fnmatch(path, glob) for glob in globs)
- special_changed_files = [
- f for f in changed_files if should_test_all_recipes(f)
- ]
- if special_changed_files:
- step = self.m.step.empty("mark all recipes as affected")
- step.presentation.step_summary_text = (
- "because these files were changed:"
- )
- step.presentation.step_text = "\n" + "\n".join(special_changed_files)
- affected_recipes = all_recipes
+ special_changed_files = [
+ f for f in changed_files if should_test_all_recipes(f)
+ ]
+ if special_changed_files:
+ step = self.m.step.empty("mark all recipes as affected")
+ step.presentation.step_summary_text = (
+ "because these files were changed:"
+ )
+ step.presentation.step_text = "\n" + "\n".join(special_changed_files)
+ affected_recipes = all_recipes
- parent_step.logs["affected recipes"] = affected_recipes
+ parent_step.logs["affected recipes"] = affected_recipes
- # Skip running recipes in the `recipes/contrib` directory, because
- # they are generally lower-priority and not worth running by default
- # in recipes CQ.
- return {r for r in affected_recipes if not r.startswith("contrib/")}
+ # Skip running recipes in the `recipes/contrib` directory, because
+ # they are generally lower-priority and not worth running by default
+ # in recipes CQ.
+ return {r for r in affected_recipes if not r.startswith("contrib/")}
- def _get_last_green_build(self, builder, cl_branch='main'):
- """Returns the build proto for a builder's most recent successful build.
+ def _get_last_green_build(self, builder, cl_branch='main'):
+ """Returns the build proto for a builder's most recent successful build.
If no build younger than `self.max_build_age_seconds` is found, returns
None. Also ensures that was returned build was run on the same branch
@@ -186,82 +182,81 @@
Args:
builder: builder protobuf object
"""
- project, bucket, builder = builder.split('/')
- predicate = builds_service_pb2.BuildPredicate()
- predicate.builder.project = project
- predicate.builder.bucket = bucket
- predicate.builder.builder = builder
- predicate.status = common_pb2.SUCCESS
+ project, bucket, builder = builder.split('/')
+ predicate = builds_service_pb2.BuildPredicate()
+ predicate.builder.project = project
+ predicate.builder.bucket = bucket
+ predicate.builder.builder = builder
+ predicate.status = common_pb2.SUCCESS
- builds = self.m.buildbucket.search(predicate, limit=MAX_BUILD_RESULTS)
+ builds = self.m.buildbucket.search(predicate, limit=MAX_BUILD_RESULTS)
- def built_on_branch(build, branch):
- """Return True if build was built on the provided branch."""
- current_branch = \
- 'refs/heads/%s' % (branch or DEFAULT_BRANCH)
- build_properties = \
- build.input.properties
- if 'exe_cipd_version' in build_properties.keys():
- build_branch = build_properties['exe_cipd_version']
- else:
- build_branch = 'refs/heads/%s' % DEFAULT_BRANCH
- # Some recipes do not specify the branch, so in the case where the
- # branch is None, ensure a match can still be found.
- return build_branch in [current_branch, None]
+ def built_on_branch(build, branch):
+ """Return True if build was built on the provided branch."""
+ current_branch = \
+ 'refs/heads/%s' % (branch or DEFAULT_BRANCH)
+ build_properties = \
+ build.input.properties
+ if 'exe_cipd_version' in build_properties.keys():
+ build_branch = build_properties['exe_cipd_version']
+ else:
+ build_branch = 'refs/heads/%s' % DEFAULT_BRANCH
+ # Some recipes do not specify the branch, so in the case where the
+ # branch is None, ensure a match can still be found.
+ return build_branch in [current_branch, None]
- builds_with_current_branch = \
- list(filter(
- lambda build: built_on_branch(build, cl_branch), builds
- ))
+ builds_with_current_branch = \
+ list(filter(
+ lambda build: built_on_branch(build, cl_branch), builds
+ ))
- builds_with_current_branch.sort(
- reverse=True, key=lambda build: build.start_time.seconds)
+ builds_with_current_branch.sort(
+ reverse=True, key=lambda build: build.start_time.seconds
+ )
- if not builds_with_current_branch:
- return None
+ if not builds_with_current_branch:
+ return None
- build = builds_with_current_branch[0]
- age_seconds = self.m.time.time() - build.end_time.seconds
- if age_seconds > self.max_build_age_seconds:
- return None
- return build
+ build = builds_with_current_branch[0]
+ age_seconds = self.m.time.time() - build.end_time.seconds
+ if age_seconds > self.max_build_age_seconds:
+ return None
+ return build
- def _create_led_build(self, orig_build, selftest_cl):
- builder = orig_build.builder
- # By default the priority is increased by 10 (resulting in a "lower"
- # priority), but we want it to stay the same.
- led_data = self.m.led(
- "get-build", "-adjust-priority", "0", orig_build.id
- )
+ def _create_led_build(self, orig_build, selftest_cl):
+ builder = orig_build.builder
+ # By default the priority is increased by 10 (resulting in a "lower"
+ # priority), but we want it to stay the same.
+ led_data = self.m.led("get-build", "-adjust-priority", "0", orig_build.id)
- build = Build(api=self.m, name=builder.builder, led_data=led_data)
+ build = Build(api=self.m, name=builder.builder, led_data=led_data)
- if orig_build.input.properties["recipe"] == "recipes":
- build.include_cl(selftest_cl)
- elif orig_build.input.gerrit_changes:
- orig_cl = orig_build.input.gerrit_changes[0]
- cl_id, patchset = self._get_latest_cl(orig_cl.host, orig_cl.project)
- # Setting the CL to a more recent CL helps avoid rebase errors, but
- # if unable to find a recent CL, fall back to the original build's
- # triggering CL. It usually works.
- if not cl_id:
- cl_id = orig_cl.change
- patchset = orig_cl.patchset
- url = "https://%s/c/%s/+/%d/%d" % (
- orig_cl.host,
- orig_cl.project,
- cl_id,
- patchset,
- )
- build.include_cl(url)
- build.set_properties(self._tryjob_properties())
- build.use_realms()
+ if orig_build.input.properties["recipe"] == "recipes":
+ build.include_cl(selftest_cl)
+ elif orig_build.input.gerrit_changes:
+ orig_cl = orig_build.input.gerrit_changes[0]
+ cl_id, patchset = self._get_latest_cl(orig_cl.host, orig_cl.project)
+ # Setting the CL to a more recent CL helps avoid rebase errors, but
+ # if unable to find a recent CL, fall back to the original build's
+ # triggering CL. It usually works.
+ if not cl_id:
+ cl_id = orig_cl.change
+ patchset = orig_cl.patchset
+ url = "https://%s/c/%s/+/%d/%d" % (
+ orig_cl.host,
+ orig_cl.project,
+ cl_id,
+ patchset,
+ )
+ build.include_cl(url)
+ build.set_properties(self._tryjob_properties())
+ build.use_realms()
- return build
+ return build
- @functools.lru_cache(maxsize=None)
- def _get_latest_cl(self, gerrit_host, project):
- """Returns number and patchset for a project's most recently landed CL.
+ @functools.lru_cache(maxsize=None)
+ def _get_latest_cl(self, gerrit_host, project):
+ """Returns number and patchset for a project's most recently landed CL.
Args:
gerrit_host (str): E.g., flutter-review.googlesource.com
@@ -273,73 +268,73 @@
the tip of the main branch.
* The last patchset of that CL.
"""
- gitiles_host = gerrit_host.replace("-review", "")
- remote = "https://%s/%s" % (gitiles_host, project)
- ref = self.m.git.get_default_remote_branch(remote)
- log = self.m.gitiles.log(remote, ref, limit=10, step_name="log %s" % project)
+ gitiles_host = gerrit_host.replace("-review", "")
+ remote = "https://%s/%s" % (gitiles_host, project)
+ ref = self.m.git.get_default_remote_branch(remote)
+ log = self.m.gitiles.log(
+ remote, ref, limit=10, step_name="log %s" % project
+ )
- for log_entry in log:
- commit_hash = log_entry["id"]
- step = self.m.gerrit.change_details(
- "latest change details for %s" % project,
- commit_hash,
- query_params=("CURRENT_REVISION",),
- host=gerrit_host,
- test_data=self.m.json.test_api.output(
- {
- "_number": 12345,
- "current_revision": "5" * 40,
- "revisions": {"5" * 40: {"_number": 6}},
- }
- ),
- ok_ret=(0, 1),
- )
- # A commit that is committed directly without code review won't have a
- # corresponding Gerrit CL, so fetching it will fail (which is fine, we'll
- # just skip it and try the next one).
- if step.retcode == 0:
- cl_number = step.json.output["_number"]
- rev = step.json.output["current_revision"]
- ps_number = step.json.output["revisions"][rev]["_number"]
- return (cl_number, ps_number)
- return None, None
+ for log_entry in log:
+ commit_hash = log_entry["id"]
+ step = self.m.gerrit.change_details(
+ "latest change details for %s" % project,
+ commit_hash,
+ query_params=("CURRENT_REVISION",),
+ host=gerrit_host,
+ test_data=self.m.json.test_api.output({
+ "_number": 12345,
+ "current_revision": "5" * 40,
+ "revisions": {"5" * 40: {"_number": 6}},
+ }),
+ ok_ret=(0, 1),
+ )
+ # A commit that is committed directly without code review won't have a
+ # corresponding Gerrit CL, so fetching it will fail (which is fine, we'll
+ # just skip it and try the next one).
+ if step.retcode == 0:
+ cl_number = step.json.output["_number"]
+ rev = step.json.output["current_revision"]
+ ps_number = step.json.output["revisions"][rev]["_number"]
+ return (cl_number, ps_number)
+ return None, None
- def run_lint(self, recipes_path, allowlist=""):
- """Run lint on recipes.
+ def run_lint(self, recipes_path, allowlist=""):
+ """Run lint on recipes.
Args:
recipes_path (Path): The path to the root of the recipes repo.
allowlist (str): A regex of import names to allow.
"""
- args = ["lint"]
- if allowlist:
- args.extend(["--allowlist", allowlist])
- with self.m.context(cwd=recipes_path):
- self.m.step(
- "lint",
- cmd=[self.m.context.cwd.join("recipes.py")] + args,
- )
+ args = ["lint"]
+ if allowlist:
+ args.extend(["--allowlist", allowlist])
+ with self.m.context(cwd=recipes_path):
+ self.m.step(
+ "lint",
+ cmd=[self.m.context.cwd.join("recipes.py")] + args,
+ )
- def run_unit_tests(self, recipes_path):
- """Run the recipe unit tests."""
- with self.m.context(cwd=recipes_path):
- self.m.step(
- "test",
- cmd=[
- self.m.context.cwd.join("recipes.py"),
- "test",
- "run",
- ],
- )
+ def run_unit_tests(self, recipes_path):
+ """Run the recipe unit tests."""
+ with self.m.context(cwd=recipes_path):
+ self.m.step(
+ "test",
+ cmd=[
+ self.m.context.cwd.join("recipes.py"),
+ "test",
+ "run",
+ ],
+ )
- def run_tests(
- self,
- recipes_path,
- selftest_cl,
- config,
- selftest_builder=None,
- ):
- """Launch CQ builders.
+ def run_tests(
+ self,
+ recipes_path,
+ selftest_cl,
+ config,
+ selftest_builder=None,
+ ):
+ """Launch CQ builders.
Args:
recipes_path (Path): Path to recipes repo checkout.
@@ -351,220 +346,203 @@
selftest_builder (str|None): Builder to use to guarantee that we
exercise the scheduling codepath when `use_buildbucket` is True.
"""
- # When run against a change to the recipes recipe, this is what the
- # swarming task stack should look like:
- #
- # * recipes.py from current recipe bundle, run against current CL
- # * recipes.py from current CL, run against SELFTEST_CL
- # * cobalt.py from current CL, run against current CL
- #
- # This works, but in case something goes wrong we need to make sure we
- # don't enter infinite recursion. We should never get to a third call to
- # recipes.py, so if we do we should exit.
- if self._recipe_depth >= 2:
- raise self.m.step.InfraFailure("recursion limit reached")
+ # When run against a change to the recipes recipe, this is what the
+ # swarming task stack should look like:
+ #
+ # * recipes.py from current recipe bundle, run against current CL
+ # * recipes.py from current CL, run against SELFTEST_CL
+ # * cobalt.py from current CL, run against current CL
+ #
+ # This works, but in case something goes wrong we need to make sure we
+ # don't enter infinite recursion. We should never get to a third call to
+ # recipes.py, so if we do we should exit.
+ if self._recipe_depth >= 2:
+ raise self.m.step.InfraFailure("recursion limit reached")
- builders = set()
- for project in config.projects:
- project_builders = set(
- self.m.commit_queue.all_tryjobs(
- project=project.name,
- include_unrestricted=project.include_unrestricted,
- include_restricted=project.include_restricted,
- config_name=project.cq_config_name or "commit-queue.cfg",
- )
- )
+ builders = set()
+ for project in config.projects:
+ project_builders = set(
+ self.m.commit_queue.all_tryjobs(
+ project=project.name,
+ include_unrestricted=project.include_unrestricted,
+ include_restricted=project.include_restricted,
+ config_name=project.cq_config_name or "commit-queue.cfg",
+ )
+ )
- for excluded_bucket in project.excluded_buckets:
- excluded_builders = set()
- for builder in project_builders:
- # Retrieve "<bucket>" from "<project>/<bucket>/<builder>".
- bucket = builder.split("/")[1]
- if bucket == excluded_bucket:
- excluded_builders.add(builder)
+ for excluded_bucket in project.excluded_buckets:
+ excluded_builders = set()
+ for builder in project_builders:
+ # Retrieve "<bucket>" from "<project>/<bucket>/<builder>".
+ bucket = builder.split("/")[1]
+ if bucket == excluded_bucket:
+ excluded_builders.add(builder)
- if excluded_builders:
- project_builders -= excluded_builders
- with self.m.step.nest(
- "excluding {} builders from bucket {}/{}".format(
- len(excluded_builders),
- project.name,
- excluded_bucket,
- )
- ) as pres:
- pres.step_summary_text = "\n".join(sorted(excluded_builders))
+ if excluded_builders:
+ project_builders -= excluded_builders
+ with self.m.step.nest(
+ "excluding {} builders from bucket {}/{}".format(
+ len(excluded_builders),
+ project.name,
+ excluded_bucket,
+ )) as pres:
+ pres.step_summary_text = "\n".join(sorted(excluded_builders))
- builders.update(project_builders)
+ builders.update(project_builders)
- builders = sorted(builders)
+ builders = sorted(builders)
- affected_recipes = self._get_affected_recipes(recipes_path=recipes_path)
- if not affected_recipes:
- return
+ affected_recipes = self._get_affected_recipes(recipes_path=recipes_path)
+ if not affected_recipes:
+ return
- cl_branch = self._get_current_merging_branch()
+ cl_branch = self._get_current_merging_branch()
- if config.use_buildbucket:
- self._run_buildbucket_tests(
- selftest_builder,
- builders,
- affected_recipes,
- cl_branch)
- else:
- self._run_led_tests(
- recipes_path,
- selftest_cl,
- builders,
- affected_recipes,
- cl_branch)
+ if config.use_buildbucket:
+ self._run_buildbucket_tests(
+ selftest_builder, builders, affected_recipes, cl_branch
+ )
+ else:
+ self._run_led_tests(
+ recipes_path, selftest_cl, builders, affected_recipes, cl_branch
+ )
- def _is_build_affected(self, orig_build, affected_recipes, presentation):
- if not orig_build:
- presentation.step_summary_text = "no recent builds found"
- return False
+ def _is_build_affected(self, orig_build, affected_recipes, presentation):
+ if not orig_build:
+ presentation.step_summary_text = "no recent builds found"
+ return False
- recipe = orig_build.input.properties["recipe"]
- assert recipe
+ recipe = orig_build.input.properties["recipe"]
+ assert recipe
- is_recipe_affected = recipe in affected_recipes
- presentation.step_summary_text = "SELECTED" if is_recipe_affected else "skipped"
- presentation.logs["recipe_used"] = recipe
- return is_recipe_affected
+ is_recipe_affected = recipe in affected_recipes
+ presentation.step_summary_text = "SELECTED" if is_recipe_affected else "skipped"
+ presentation.logs["recipe_used"] = recipe
+ return is_recipe_affected
- def _get_green_tryjobs(self, expiry_secs=24 * 60 * 60):
- """Return the set of tryjobs that are green on the current patchset.
+ def _get_green_tryjobs(self, expiry_secs=24 * 60 * 60):
+ """Return the set of tryjobs that are green on the current patchset.
Args:
expiry_secs (int): Do not return tryjobs which are older than this
value in seconds.
"""
- builds = self.m.buildbucket.search(
- builds_service_pb2.BuildPredicate(
- gerrit_changes=list(self.m.buildbucket.build.input.gerrit_changes),
- status=common_pb2.SUCCESS,
- create_time=common_pb2.TimeRange(
- start_time=timestamp_pb2.Timestamp(
- seconds=int(self.m.time.time()) - expiry_secs,
- ),
+ builds = self.m.buildbucket.search(
+ builds_service_pb2.BuildPredicate(
+ gerrit_changes=list(self.m.buildbucket.build.input.gerrit_changes),
+ status=common_pb2.SUCCESS,
+ create_time=common_pb2.TimeRange(
+ start_time=timestamp_pb2.Timestamp(
+ seconds=int(self.m.time.time()) - expiry_secs,
),
),
- fields=["builder"],
- step_name="get green tryjobs",
- )
- return {self.m.buildbucket_util.full_builder_name(b.builder) for b in builds}
+ ),
+ fields=["builder"],
+ step_name="get green tryjobs",
+ )
+ return {
+ self.m.buildbucket_util.full_builder_name(b.builder) for b in builds
+ }
- def _run_buildbucket_tests(
- self,
- selftest_builder,
- builders,
- affected_recipes,
- cl_branch):
- affected_builders = []
- recipes_is_affected = False
+ def _run_buildbucket_tests(
+ self, selftest_builder, builders, affected_recipes, cl_branch
+ ):
+ affected_builders = []
+ recipes_is_affected = False
- with self.m.step.nest("get builders"), self.m.context(infra_steps=True):
- green_tryjobs = self._get_green_tryjobs()
- builders = [b for b in builders if b not in green_tryjobs]
- for builder in builders:
- with self.m.step.nest(builder) as presentation:
- orig_build = self._get_last_green_build(builder, cl_branch)
- if self._is_build_affected(
- orig_build, affected_recipes, presentation
- ):
- # With recipe versioning, the `recipes` recipe is
- # already tested in this invocation, so don't schedule
- # any more `recipes` builds.
- if orig_build.input.properties["recipe"] == "recipes":
- recipes_is_affected = True
- continue
- affected_builders.append(builder)
+ with self.m.step.nest("get builders"), self.m.context(infra_steps=True):
+ green_tryjobs = self._get_green_tryjobs()
+ builders = [b for b in builders if b not in green_tryjobs]
+ for builder in builders:
+ with self.m.step.nest(builder) as presentation:
+ orig_build = self._get_last_green_build(builder, cl_branch)
+ if self._is_build_affected(orig_build, affected_recipes,
+ presentation):
+ # With recipe versioning, the `recipes` recipe is
+ # already tested in this invocation, so don't schedule
+ # any more `recipes` builds.
+ if orig_build.input.properties["recipe"] == "recipes":
+ recipes_is_affected = True
+ continue
+ affected_builders.append(builder)
- # If `affected_builders` is empty, but the current recipe was affected,
- # then we should schedule one self-test builder so we can still exercise
- # the scheduling codepath.
- if not affected_builders and recipes_is_affected:
- affected_builders = [selftest_builder]
+ # If `affected_builders` is empty, but the current recipe was affected,
+ # then we should schedule one self-test builder so we can still exercise
+ # the scheduling codepath.
+ if not affected_builders and recipes_is_affected:
+ affected_builders = [selftest_builder]
- with self.m.step.nest("launch builds") as presentation:
- builds = self.m.subbuild.launch(
- # TODO(atyfto): Fix subbuild.launch so it can accept builders
- # with `bucket`s and/or `project`s which don't necessarily match
- # the current build's.
- builder_names=[b.split("/")[-1] for b in affected_builders],
- extra_properties=self._tryjob_properties(),
- presentation=presentation,
- # Present tryjobs in Gerrit since they are effectively
- # top-level builds.
- hide_in_gerrit=False,
- )
- with self.m.step.nest("collect builds"):
- results = self.m.subbuild.collect(
- build_ids=[b.build_id for b in builds.values()],
- )
- self.m.buildbucket_util.display_builds(
- "check builds",
- [b.build_proto for b in results.values()],
- raise_on_failure=True,
- )
+ with self.m.step.nest("launch builds") as presentation:
+ builds = self.m.subbuild.launch(
+ # TODO(atyfto): Fix subbuild.launch so it can accept builders
+ # with `bucket`s and/or `project`s which don't necessarily match
+ # the current build's.
+ builder_names=[b.split("/")[-1] for b in affected_builders],
+ extra_properties=self._tryjob_properties(),
+ presentation=presentation,
+ # Present tryjobs in Gerrit since they are effectively
+ # top-level builds.
+ hide_in_gerrit=False,
+ )
+ with self.m.step.nest("collect builds"):
+ results = self.m.subbuild.collect(
+ build_ids=[b.build_id for b in builds.values()],
+ )
+ self.m.buildbucket_util.display_builds(
+ "check builds",
+ [b.build_proto for b in results.values()],
+ raise_on_failure=True,
+ )
- def _run_led_tests(
- self,
- recipes_path,
- selftest_cl,
- builders,
- affected_recipes,
- cl_branch):
- builds = []
- with self.m.step.nest("get builders") as nest, self.m.context(
- cwd=recipes_path, infra_steps=True
- ):
- for builder in builders:
- with self.m.step.nest(builder) as presentation:
- orig_build = self._get_last_green_build(builder, cl_branch)
- if self._is_build_affected(
- orig_build, affected_recipes, presentation
- ):
- build = self._create_led_build(orig_build, selftest_cl)
- build.include_recipe_bundle()
- builds.append(build)
+ def _run_led_tests(
+ self, recipes_path, selftest_cl, builders, affected_recipes, cl_branch
+ ):
+ builds = []
+ with self.m.step.nest("get builders") as nest, self.m.context(
+ cwd=recipes_path, infra_steps=True):
+ for builder in builders:
+ with self.m.step.nest(builder) as presentation:
+ orig_build = self._get_last_green_build(builder, cl_branch)
+ if self._is_build_affected(orig_build, affected_recipes,
+ presentation):
+ build = self._create_led_build(orig_build, selftest_cl)
+ build.include_recipe_bundle()
+ builds.append(build)
- nest.step_summary_text = "selected {} builds".format(len(builds))
+ nest.step_summary_text = "selected {} builds".format(len(builds))
- if not builds:
- return
- self.m.swarming_retry.run_and_present_tasks(builds)
+ if not builds:
+ return
+ self.m.swarming_retry.run_and_present_tasks(builds)
- def _tryjob_properties(self):
- """Properties that should be set on each launched tryjob."""
- props = properties_pb2.InputProperties(
- # Signal to the launched build that it's being tested by this module.
- enabled=True,
- # Increment the recipe depth. This only has an effect on builds that
- # use this module.
- recipe_depth=self._recipe_depth + 1,
- )
- return {
- "$flutter/recipe_testing": jsonpb.MessageToDict(
- props, preserving_proto_field_name=True
- )
- }
+ def _tryjob_properties(self):
+ """Properties that should be set on each launched tryjob."""
+ props = properties_pb2.InputProperties(
+ # Signal to the launched build that it's being tested by this module.
+ enabled=True,
+ # Increment the recipe depth. This only has an effect on builds that
+ # use this module.
+ recipe_depth=self._recipe_depth + 1,
+ )
+ return {
+ "$flutter/recipe_testing":
+ jsonpb.MessageToDict(props, preserving_proto_field_name=True)
+ }
- def _get_current_merging_branch(self):
- """Returns the branch that the current CL is being merged into.
+ def _get_current_merging_branch(self):
+ """Returns the branch that the current CL is being merged into.
If the buildset is not available in the recipe, then DEFAULT_BRANCH is
used.
"""
- tags = self.m.buildbucket.build.tags
- buildset_tag = list(
- filter(lambda tag: tag.key=='buildset', tags)
- )
- buildset_property = self.m.properties.get('buildset')
- if not buildset_tag and not buildset_property:
- return DEFAULT_BRANCH
- else:
- buildset = buildset_tag[0].value if buildset_tag else buildset_property
- host, cl_number = buildset.split('/')[2:4]
- cl_information = \
- self.m.gerrit_util.get_gerrit_cl_details(host, cl_number)
- return cl_information.get('branch')
+ tags = self.m.buildbucket.build.tags
+ buildset_tag = list(filter(lambda tag: tag.key == 'buildset', tags))
+ buildset_property = self.m.properties.get('buildset')
+ if not buildset_tag and not buildset_property:
+ return DEFAULT_BRANCH
+ else:
+ buildset = buildset_tag[0].value if buildset_tag else buildset_property
+ host, cl_number = buildset.split('/')[2:4]
+ cl_information = \
+ self.m.gerrit_util.get_gerrit_cl_details(host, cl_number)
+ return cl_information.get('branch')
diff --git a/recipe_modules/recipe_testing/test_api.py b/recipe_modules/recipe_testing/test_api.py
index 6e72cf2..be89083 100644
--- a/recipe_modules/recipe_testing/test_api.py
+++ b/recipe_modules/recipe_testing/test_api.py
@@ -20,172 +20,175 @@
class RecipeTestingTestApi(recipe_test_api.RecipeTestApi):
- def project(
- self,
- name="flutter",
- include_unrestricted=True,
- include_restricted=False,
- cq_config_name="commit-queue.cfg",
- excluded_buckets=(),
- ):
- return options_pb2.Project(
- name=name,
- include_unrestricted=include_unrestricted,
- include_restricted=include_restricted,
- cq_config_name=cq_config_name,
- excluded_buckets=excluded_buckets,
+
+ def project(
+ self,
+ name="flutter",
+ include_unrestricted=True,
+ include_restricted=False,
+ cq_config_name="commit-queue.cfg",
+ excluded_buckets=(),
+ ):
+ return options_pb2.Project(
+ name=name,
+ include_unrestricted=include_unrestricted,
+ include_restricted=include_restricted,
+ cq_config_name=cq_config_name,
+ excluded_buckets=excluded_buckets,
+ )
+
+ def options(self, projects=(), use_buildbucket=False, **kwargs):
+ if not projects:
+ projects = [self.project()]
+ return self.m.properties(
+ recipe_testing_options=options_pb2.Options(
+ projects=list(projects), use_buildbucket=use_buildbucket, **kwargs
)
+ )
- def options(self, projects=(), use_buildbucket=False, **kwargs):
- if not projects:
- projects = [self.project()]
- return self.m.properties(
- recipe_testing_options=options_pb2.Options(
- projects=list(projects), use_buildbucket=use_buildbucket, **kwargs
- )
- )
+ def build_data(
+ self,
+ name,
+ recipe,
+ age_seconds=ONE_DAY,
+ cl_cached=False,
+ skip=False,
+ num_log_entries=1,
+ project="flutter",
+ bucket="try",
+ # used for both buildbucket build id and swarming task id.
+ fake_id=100,
+ using_led=True,
+ exe_cipd_version=None
+ ):
+ # This time is taken from the time recipe_engine module. I see no way
+ # of getting it programmatically.
+ curr_time = 1337000000
+ end_time = curr_time - age_seconds
- def build_data(
- self,
- name,
- recipe,
- age_seconds=ONE_DAY,
- cl_cached=False,
- skip=False,
- num_log_entries=1,
- project="flutter",
- bucket="try",
- # used for both buildbucket build id and swarming task id.
- fake_id=100,
- using_led=True,
- exe_cipd_version=None
- ):
- # This time is taken from the time recipe_engine module. I see no way
- # of getting it programmatically.
- curr_time = 1337000000
- end_time = curr_time - age_seconds
+ orig_build = build_pb2.Build(id=fake_id, status=common_pb2.SUCCESS)
+ orig_build.end_time.seconds = end_time
+ orig_build.builder.project = project
+ orig_build.builder.bucket = bucket
+ orig_build.builder.builder = name
+ orig_build.input.properties["recipe"] = recipe
+ cl = orig_build.input.gerrit_changes.add()
+ cl.host = "flutter-review.googlesource.com"
+ cl.project = project
- orig_build = build_pb2.Build(id=fake_id, status=common_pb2.SUCCESS)
- orig_build.end_time.seconds = end_time
- orig_build.builder.project = project
- orig_build.builder.bucket = bucket
- orig_build.builder.builder = name
- orig_build.input.properties["recipe"] = recipe
- cl = orig_build.input.gerrit_changes.add()
- cl.host = "flutter-review.googlesource.com"
- cl.project = project
+ if exe_cipd_version:
+ orig_build.input.properties["exe_cipd_version"] = exe_cipd_version
- if exe_cipd_version:
- orig_build.input.properties["exe_cipd_version"] = exe_cipd_version
+ result = self.m.buildbucket.simulated_search_results([
+ orig_build
+ ], "get builders.{}.buildbucket.search".format(name))
- result = self.m.buildbucket.simulated_search_results(
- [orig_build], "get builders.{}.buildbucket.search".format(name)
- )
+ if skip or age_seconds > MAX_BUILD_AGE_SECONDS:
+ return result
- if skip or age_seconds > MAX_BUILD_AGE_SECONDS:
- return result
+ job = job_pb2.Definition()
+ build = self.m.buildbucket.ci_build_message(
+ priority=34500, project=project, bucket=bucket, builder=name
+ )
+ build.input.properties["recipe"] = recipe
- job = job_pb2.Definition()
- build = self.m.buildbucket.ci_build_message(
- priority=34500, project=project, bucket=bucket, builder=name
- )
- build.input.properties["recipe"] = recipe
+ # Don't inject test data for led steps when not using led, i.e. using
+ # the Buildbucket scheduling codepath.
+ if not using_led:
+ return result
- # Don't inject test data for led steps when not using led, i.e. using
- # the Buildbucket scheduling codepath.
- if not using_led:
- return result
+ # It's unrealistic for the get-build response to have a task ID set,
+ # but the only way of mocking the task ID returned by `led launch` is
+ # to set the task ID on the input to `led launch`, which, for recipe
+ # testing, is the `led get-build` response.
+ build.infra.swarming.task_id = str(fake_id)
+ job.buildbucket.bbagent_args.build.CopyFrom(build)
+ result += self.m.led.mock_get_build(
+ job,
+ fake_id,
+ )
- # It's unrealistic for the get-build response to have a task ID set,
- # but the only way of mocking the task ID returned by `led launch` is
- # to set the task ID on the input to `led launch`, which, for recipe
- # testing, is the `led get-build` response.
- build.infra.swarming.task_id = str(fake_id)
- job.buildbucket.bbagent_args.build.CopyFrom(build)
- result += self.m.led.mock_get_build(
- job,
- fake_id,
- )
+ if recipe != "recipes" and not cl_cached:
+ result += self.m.gitiles.log(
+ "get builders.{}.log {}".format(name, cl.project),
+ "A",
+ n=num_log_entries,
+ )
- if recipe != "recipes" and not cl_cached:
- result += self.m.gitiles.log(
- "get builders.{}.log {}".format(name, cl.project),
- "A",
- n=num_log_entries,
- )
+ return result
- return result
+ def no_build(self, name):
+ return self.m.buildbucket.simulated_search_results(
+ [], "get builders.{}.buildbucket.search".format(name)
+ )
- def no_build(self, name):
- return self.m.buildbucket.simulated_search_results(
- [], "get builders.{}.buildbucket.search".format(name)
- )
+ def affected_recipes_data(
+ self,
+ affected_recipes,
+ recipe_files=None,
+ changed_files=None,
+ error=None,
+ invalid_recipes=(),
+ step_name="get_affected_recipes.recipes-analyze",
+ ):
+ if not recipe_files:
+ recipe_files = ["foo", "flutter.py", "recipes.py", "sdk.expected"]
+ res = self.step_data(
+ "get_affected_recipes.ls-recipes",
+ stdout=self.m.raw_io.output_text(
+ "".join("{}\n".format(x) for x in recipe_files)
+ ),
+ )
- def affected_recipes_data(
- self,
- affected_recipes,
- recipe_files=None,
- changed_files=None,
- error=None,
- invalid_recipes=(),
- step_name="get_affected_recipes.recipes-analyze",
- ):
- if not recipe_files:
- recipe_files = ["foo", "flutter.py", "recipes.py", "sdk.expected"]
- res = self.step_data(
- "get_affected_recipes.ls-recipes",
- stdout=self.m.raw_io.output_text(
- "".join("{}\n".format(x) for x in recipe_files)
- ),
- )
+ if not changed_files:
+ changed_files = [
+ "recipes/flutter.py",
+ "recipes/foo",
+ "recipes/non_expected_json_file.json",
+ "recipe_modules/foo/examples/full.expected/bar.json",
+ "recipe_modules/foo/examples/full.py",
+ "recipe_modules/foo/test_api.py",
+ ]
+ res += self.m.git.get_changed_files(
+ "get_affected_recipes.git diff-tree",
+ changed_files,
+ )
- if not changed_files:
- changed_files = [
- "recipes/flutter.py",
- "recipes/foo",
- "recipes/non_expected_json_file.json",
- "recipe_modules/foo/examples/full.expected/bar.json",
- "recipe_modules/foo/examples/full.py",
- "recipe_modules/foo/test_api.py",
- ]
- res += self.m.git.get_changed_files(
- "get_affected_recipes.git diff-tree",
- changed_files,
- )
+ output = {
+ "recipes": list(affected_recipes),
+ "error": error or "",
+ "invalidRecipes": list(invalid_recipes),
+ }
+ retcode = -1 if error else 0
+ res += self.step_data(
+ step_name, self.m.json.output(output), retcode=retcode
+ )
- output = {
- "recipes": list(affected_recipes),
- "error": error or "",
- "invalidRecipes": list(invalid_recipes),
- }
- retcode = -1 if error else 0
- res += self.step_data(step_name, self.m.json.output(output), retcode=retcode)
+ return res
- return res
+ def task_result(self, task_id, name, failed=False):
+ return self.m.swarming.task_result(
+ id=task_id,
+ name="recipes-cq:%s" % name,
+ state=None if not name else self.m.swarming.TaskState.COMPLETED,
+ failure=failed,
+ )
- def task_result(self, task_id, name, failed=False):
- return self.m.swarming.task_result(
- id=task_id,
- name="recipes-cq:%s" % name,
- state=None if not name else self.m.swarming.TaskState.COMPLETED,
- failure=failed,
- )
-
- def existing_green_tryjobs(self, tryjobs):
- search_results = []
- for builder_name in tryjobs:
- project, bucket, builder = builder_name.split("/")
- search_results.append(
- build_pb2.Build(
- builder=builder_common_pb2.BuilderID(
- project=project,
- bucket=bucket,
- builder=builder,
- ),
- create_time=timestamp_pb2.Timestamp(seconds=1527292217),
- )
- )
- return self.m.buildbucket.simulated_search_results(
- search_results,
- step_name="get builders.get green tryjobs",
- )
+ def existing_green_tryjobs(self, tryjobs):
+ search_results = []
+ for builder_name in tryjobs:
+ project, bucket, builder = builder_name.split("/")
+ search_results.append(
+ build_pb2.Build(
+ builder=builder_common_pb2.BuilderID(
+ project=project,
+ bucket=bucket,
+ builder=builder,
+ ),
+ create_time=timestamp_pb2.Timestamp(seconds=1527292217),
+ )
+ )
+ return self.m.buildbucket.simulated_search_results(
+ search_results,
+ step_name="get builders.get green tryjobs",
+ )
diff --git a/recipe_modules/recipe_testing/tests/full.py b/recipe_modules/recipe_testing/tests/full.py
index df24c99..a45900a 100644
--- a/recipe_modules/recipe_testing/tests/full.py
+++ b/recipe_modules/recipe_testing/tests/full.py
@@ -1,6 +1,7 @@
# Copyright 2020 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
"""API for recipe_engine testing."""
import datetime
@@ -25,201 +26,171 @@
def RunSteps(api, props): # pylint: disable=invalid-name
- recipes_path = api.path["start_dir"].join("recipe_path")
+ recipes_path = api.path["start_dir"].join("recipe_path")
- api.recipe_testing.run_lint(recipes_path, allowlist=r"allowed_module")
- api.recipe_testing.run_unit_tests(recipes_path)
+ api.recipe_testing.run_lint(recipes_path, allowlist=r"allowed_module")
+ api.recipe_testing.run_unit_tests(recipes_path)
- selftest_cl = "https://flutter-review.googlesource.com/c/recipes/+/123456"
- selftest_builder = "flutter/try/foo.bar-debug"
- api.recipe_testing.run_tests(
- recipes_path,
- selftest_cl,
- props.recipe_testing_options,
- selftest_builder=selftest_builder,
- )
+ selftest_cl = "https://flutter-review.googlesource.com/c/recipes/+/123456"
+ selftest_builder = "flutter/try/foo.bar-debug"
+ api.recipe_testing.run_tests(
+ recipes_path,
+ selftest_cl,
+ props.recipe_testing_options,
+ selftest_builder=selftest_builder,
+ )
def GenTests(api): # pylint: disable=invalid-name
- test = api.recipe_testing
+ test = api.recipe_testing
- project = "flutter"
+ project = "flutter"
- yield (
- api.buildbucket_util.test("recursive_ls")
- + api.recipe_testing.options()
- + api.commit_queue.test_data(project, "empty")
- + test.affected_recipes_data(
- affected_recipes=[],
- recipe_files=["flutter/flutter.py", "abc.resources/bar.py", "abc.py"],
- )
- )
+ yield (
+ api.buildbucket_util.test("recursive_ls") + api.recipe_testing.options() +
+ api.commit_queue.test_data(project, "empty") + test.affected_recipes_data(
+ affected_recipes=[],
+ recipe_files=["flutter/flutter.py", "abc.resources/bar.py", "abc.py"],
+ )
+ )
- yield (
- api.buildbucket_util.test("recipes_cfg")
- + api.recipe_testing.options()
- + api.commit_queue.test_data(project, "empty")
- + test.affected_recipes_data(
- affected_recipes=[],
- recipe_files=["a.py", "b.py", "c.py", "d.py", "e.py"],
- changed_files=["infra/config/recipes.cfg"],
- )
- )
+ yield (
+ api.buildbucket_util.test("recipes_cfg") + api.recipe_testing.options() +
+ api.commit_queue.test_data(project, "empty") + test.affected_recipes_data(
+ affected_recipes=[],
+ recipe_files=["a.py", "b.py", "c.py", "d.py", "e.py"],
+ changed_files=["infra/config/recipes.cfg"],
+ )
+ )
- yield (
- api.buildbucket_util.test("recipe_proto")
- + api.recipe_testing.options()
- + api.commit_queue.test_data(project)
- + test.affected_recipes_data(
- affected_recipes=[],
- changed_files=["recipe_proto/infra/flutter.proto"],
- )
- )
+ yield (
+ api.buildbucket_util.test("recipe_proto") + api.recipe_testing.options() +
+ api.commit_queue.test_data(project) + test.affected_recipes_data(
+ affected_recipes=[],
+ changed_files=["recipe_proto/infra/flutter.proto"],
+ )
+ )
- yield (
- api.buildbucket_util.test("no_build_old_build_ignored_build")
- + api.recipe_testing.options()
- + api.commit_queue.test_data(project)
- + test.affected_recipes_data(["flutter"])
- + test.build_data(
- "fuchsia/try/cobalt-x64-linux",
- "cobalt",
- age_seconds=MAX_BUILD_AGE_SECONDS - ONE_DAY,
- skip=True,
- )
- + test.build_data(
- "fuchsia/try/core.x64-debug",
- "fuchsia",
- age_seconds=MAX_BUILD_AGE_SECONDS + ONE_DAY,
- )
- + test.no_build("fuchsia/try/core.arm64-debug")
- )
+ yield (
+ api.buildbucket_util.test("no_build_old_build_ignored_build") +
+ api.recipe_testing.options() + api.commit_queue.test_data(project) +
+ test.affected_recipes_data(["flutter"]) + test.build_data(
+ "fuchsia/try/cobalt-x64-linux",
+ "cobalt",
+ age_seconds=MAX_BUILD_AGE_SECONDS - ONE_DAY,
+ skip=True,
+ ) + test.build_data(
+ "fuchsia/try/core.x64-debug",
+ "fuchsia",
+ age_seconds=MAX_BUILD_AGE_SECONDS + ONE_DAY,
+ ) + test.no_build("fuchsia/try/core.arm64-debug")
+ )
- yield (
- api.buildbucket_util.test("excluded")
- + api.recipe_testing.options(
- [api.recipe_testing.project(excluded_buckets=("try",))]
- )
- + api.properties(ignored_buckets=["try"])
- + api.commit_queue.test_data(project)
- + test.affected_recipes_data(["flutter"])
- + api.post_process(
- post_process.MustRun,
- "excluding 3 builders from bucket flutter/try",
- )
- )
+ yield (
+ api.buildbucket_util.test("excluded") + api.recipe_testing.options([
+ api.recipe_testing.project(excluded_buckets=("try",))
+ ]) + api.properties(ignored_buckets=["try"]) +
+ api.commit_queue.test_data(project) +
+ test.affected_recipes_data(["flutter"]) + api.post_process(
+ post_process.MustRun,
+ "excluding 3 builders from bucket flutter/try",
+ )
+ )
- yield (
- api.buildbucket_util.test("two_pass_one_skip")
- + api.recipe_testing.options()
- + api.commit_queue.test_data(project)
- + test.affected_recipes_data(["fuchsia"])
- + test.build_data("fuchsia/try/cobalt-x64-linux", "cobalt", skip=True)
- + test.build_data(
- "fuchsia/try/core.x64-debug", "fuchsia", cl_cached=True, fake_id=100
- )
- + test.build_data("fuchsia/try/core.arm64-debug", "fuchsia", fake_id=200)
- + api.swarming_retry.collect_data(
- [
- test.task_result(100, "fuchsia/try/core.x64-debug"),
- test.task_result(200, "fuchsia/try/core.arm64-debug"),
- ]
- )
- )
+ yield (
+ api.buildbucket_util.test("two_pass_one_skip") +
+ api.recipe_testing.options() + api.commit_queue.test_data(project) +
+ test.affected_recipes_data(["fuchsia"]) +
+ test.build_data("fuchsia/try/cobalt-x64-linux", "cobalt", skip=True) +
+ test.build_data(
+ "fuchsia/try/core.x64-debug", "fuchsia", cl_cached=True, fake_id=100
+ ) +
+ test.build_data("fuchsia/try/core.arm64-debug", "fuchsia", fake_id=200) +
+ api.swarming_retry.collect_data([
+ test.task_result(100, "fuchsia/try/core.x64-debug"),
+ test.task_result(200, "fuchsia/try/core.arm64-debug"),
+ ])
+ )
- yield (
- api.buildbucket_util.test("fuchsia_recipe_unaffected")
- + api.recipe_testing.options()
- + api.commit_queue.test_data(project)
- + test.affected_recipes_data(["qemu"])
- + test.build_data("fuchsia/try/cobalt-x64-linux", "cobalt", skip=True)
- + test.build_data("fuchsia/try/core.x64-debug", "fuchsia", skip=True)
- + test.build_data("fuchsia/try/core.arm64-debug", "fuchsia", skip=True)
- )
+ yield (
+ api.buildbucket_util.test("fuchsia_recipe_unaffected") +
+ api.recipe_testing.options() + api.commit_queue.test_data(project) +
+ test.affected_recipes_data(["qemu"]) +
+ test.build_data("fuchsia/try/cobalt-x64-linux", "cobalt", skip=True) +
+ test.build_data("fuchsia/try/core.x64-debug", "fuchsia", skip=True) +
+ test.build_data("fuchsia/try/core.arm64-debug", "fuchsia", skip=True)
+ )
- yield (
- api.buildbucket_util.test("recipes")
- + api.recipe_testing.options()
- + api.commit_queue.test_data(project, "recipes-only")
- + test.affected_recipes_data(["recipes"])
- + test.build_data("fuchsia/try/recipes", "recipes")
- + api.swarming_retry.collect_data(
- [test.task_result(100, "fuchsia/try/recipes")]
- )
- )
+ yield (
+ api.buildbucket_util.test("recipes") + api.recipe_testing.options() +
+ api.commit_queue.test_data(project, "recipes-only") +
+ test.affected_recipes_data(["recipes"]) +
+ test.build_data("fuchsia/try/recipes", "recipes") +
+ api.swarming_retry.collect_data([
+ test.task_result(100, "fuchsia/try/recipes")
+ ])
+ )
- yield (
- api.buildbucket_util.test("with_buildbucket")
- + api.commit_queue.test_data(project)
- + test.affected_recipes_data(["fuchsia"])
- + test.build_data(
- "fuchsia/try/cobalt-x64-linux",
- "cobalt",
- skip=True,
- using_led=False,
- exe_cipd_version="refs/heads/main"
- )
- + test.build_data(
- "fuchsia/try/core.x64-debug",
- "fuchsia",
- cl_cached=True,
- fake_id=100,
- using_led=False,
- exe_cipd_version="refs/heads/main"
- )
- + test.build_data(
- "fuchsia/try/core.arm64-debug",
- "fuchsia",
- fake_id=200,
- using_led=False,
- exe_cipd_version="refs/heads/main"
- )
- + test.existing_green_tryjobs(["fuchsia/try/core.arm64-release"])
- # This line only affects coverage. It's sufficiently tested in other
- # modules that use this module.
- + api.recipe_testing.options(
- use_buildbucket=True,
- projects=(api.recipe_testing.project(),),
- )
- + api.step_data(
- "gerrit get cl info 12345",
- api.json.output({})
- )
- + api.properties(
- buildset='patch/gerrit/flutter-review.googlesource.com/12345/1'
- )
- )
+ yield (
+ api.buildbucket_util.test("with_buildbucket") +
+ api.commit_queue.test_data(project) +
+ test.affected_recipes_data(["fuchsia"]) + test.build_data(
+ "fuchsia/try/cobalt-x64-linux",
+ "cobalt",
+ skip=True,
+ using_led=False,
+ exe_cipd_version="refs/heads/main"
+ ) + test.build_data(
+ "fuchsia/try/core.x64-debug",
+ "fuchsia",
+ cl_cached=True,
+ fake_id=100,
+ using_led=False,
+ exe_cipd_version="refs/heads/main"
+ ) + test.build_data(
+ "fuchsia/try/core.arm64-debug",
+ "fuchsia",
+ fake_id=200,
+ using_led=False,
+ exe_cipd_version="refs/heads/main"
+ ) + test.existing_green_tryjobs(["fuchsia/try/core.arm64-release"])
+ # This line only affects coverage. It's sufficiently tested in other
+ # modules that use this module.
+ + api.recipe_testing.options(
+ use_buildbucket=True,
+ projects=(api.recipe_testing.project(),),
+ ) + api.step_data("gerrit get cl info 12345", api.json.output({})) +
+ api.properties(
+ buildset='patch/gerrit/flutter-review.googlesource.com/12345/1'
+ )
+ )
- yield (
- api.buildbucket_util.test("recipes_with_buildbucket")
- + api.commit_queue.test_data(project, "recipes-only")
- + test.affected_recipes_data(["recipes"])
- + test.build_data("fuchsia/try/recipes", "recipes", using_led=False)
- + api.recipe_testing.options(use_buildbucket=True)
- )
+ yield (
+ api.buildbucket_util.test("recipes_with_buildbucket") +
+ api.commit_queue.test_data(project, "recipes-only") +
+ test.affected_recipes_data(["recipes"]) +
+ test.build_data("fuchsia/try/recipes", "recipes", using_led=False) +
+ api.recipe_testing.options(use_buildbucket=True)
+ )
- yield (
- api.buildbucket_util.test("no_latest_cl")
- + api.recipe_testing.options()
- + api.commit_queue.test_data(project)
- + test.affected_recipes_data(["fuchsia"])
- + test.build_data("fuchsia/try/core.x64-debug", "fuchsia", cl_cached=True)
- + test.build_data(
- "fuchsia/try/core.arm64-debug",
- "fuchsia",
- num_log_entries=0,
- fake_id=200,
- )
- + api.swarming_retry.collect_data(
- [
- test.task_result(100, "fuchsia/try/core.x64-debug"),
- test.task_result(200, "fuchsia/try/core.arm64-debug"),
- ]
- )
- )
+ yield (
+ api.buildbucket_util.test("no_latest_cl") + api.recipe_testing.options() +
+ api.commit_queue.test_data(project) +
+ test.affected_recipes_data(["fuchsia"]) +
+ test.build_data("fuchsia/try/core.x64-debug", "fuchsia", cl_cached=True) +
+ test.build_data(
+ "fuchsia/try/core.arm64-debug",
+ "fuchsia",
+ num_log_entries=0,
+ fake_id=200,
+ ) + api.swarming_retry.collect_data([
+ test.task_result(100, "fuchsia/try/core.x64-debug"),
+ test.task_result(200, "fuchsia/try/core.arm64-debug"),
+ ])
+ )
- yield (
- api.buildbucket_util.test("depth", status="INFRA_FAILURE")
- + api.properties(**{"$flutter/recipe_testing": {"recipe_depth": 2}})
- )
+ yield (
+ api.buildbucket_util.test("depth", status="INFRA_FAILURE") +
+ api.properties(**{"$flutter/recipe_testing": {"recipe_depth": 2}})
+ )
diff --git a/recipe_modules/repo_util/api.py b/recipe_modules/repo_util/api.py
index 28d092e..dcaa751 100644
--- a/recipe_modules/repo_util/api.py
+++ b/recipe_modules/repo_util/api.py
@@ -49,7 +49,8 @@
clobber = self.m.properties.get('clobber', False)
# Grab any gclient custom variables passed as properties.
local_custom_vars = self.m.shard_util_v2.unfreeze_dict(
- self.m.properties.get('gclient_variables', {}))
+ self.m.properties.get('gclient_variables', {})
+ )
# Pass a special gclient variable to identify release candidate branch checkouts. This
# is required to prevent trying to download experimental dependencies on release candidate
# branches.
@@ -120,8 +121,7 @@
max_attempts=4
)
- def monorepo_checkout(
- self, checkout_path, env, env_prefixes):
+ def monorepo_checkout(self, checkout_path, env, env_prefixes):
"""Checkout code using gclient.
Args:
@@ -137,7 +137,9 @@
# Pass a special gclient variable to identify release candidate branch checkouts. This
# is required to prevent trying to download experimental dependencies on release candidate
# branches.
- local_custom_vars = self.m.shard_util_v2.unfreeze_dict(self.m.properties.get('gclient_variables', {}))
+ local_custom_vars = self.m.shard_util_v2.unfreeze_dict(
+ self.m.properties.get('gclient_variables', {})
+ )
if (self.m.properties.get('git_branch', '').startswith('flutter-') or
self.m.properties.get('git_branch', '') in ['beta', 'stable']):
local_custom_vars['release_candidate'] = True
@@ -209,7 +211,9 @@
max_attempts=4
)
- def checkout(self, name, checkout_path, url=None, ref=None, override_sha=False):
+ def checkout(
+ self, name, checkout_path, url=None, ref=None, override_sha=False
+ ):
"""Checks out a repo and returns sha1 of checked out revision.
The supported repository names and their urls are defined in the global
@@ -237,9 +241,9 @@
# the flutter repo. This just allows us to override that and use the original ref which for
# the coming change is just the tot master branch.
git_ref = ref if override_sha else (
- self.m.buildbucket.gitiles_commit.id or
- self.m.buildbucket.gitiles_commit.ref or ref
- )
+ self.m.buildbucket.gitiles_commit.id or
+ self.m.buildbucket.gitiles_commit.ref or ref
+ )
else:
git_ref = (
ref or self.m.buildbucket.gitiles_commit.id or
@@ -276,10 +280,13 @@
def get_commit(self, checkout_path):
with self.m.context(cwd=checkout_path):
- step_test_data=lambda: self.m.raw_io.test_api.stream_output_text(
- '12345abcde12345abcde12345abcde12345abcde\n')
+ step_test_data = lambda: self.m.raw_io.test_api.stream_output_text(
+ '12345abcde12345abcde12345abcde12345abcde\n'
+ )
commit = self.m.git(
- 'rev-parse', 'HEAD', stdout=self.m.raw_io.output_text(),
+ 'rev-parse',
+ 'HEAD',
+ stdout=self.m.raw_io.output_text(),
step_test_data=step_test_data
).stdout.strip()
return commit
@@ -386,7 +393,9 @@
'src', 'third_party', 'dart', 'tools', 'sdks', 'dart-sdk', 'bin'
)
git_ref = self.m.properties.get('git_ref', '')
- android_home = checkout_path.join('src', 'third_party', 'android_tools', 'sdk')
+ android_home = checkout_path.join(
+ 'src', 'third_party', 'android_tools', 'sdk'
+ )
env = {
# Windows Packaging script assumes this is set.
'DEPOT_TOOLS':
@@ -417,10 +426,13 @@
def monorepo_environment(self, checkout_path):
"""Returns env and env_prefixes of a monorepo command environment."""
dart_bin = checkout_path.join(
- 'engine', 'src', 'third_party', 'dart', 'tools', 'sdks', 'dart-sdk', 'bin'
+ 'engine', 'src', 'third_party', 'dart', 'tools', 'sdks', 'dart-sdk',
+ 'bin'
)
git_ref = self.m.properties.get('git_ref', '')
- android_home = checkout_path.join('engine', 'src', 'third_party', 'android_tools', 'sdk')
+ android_home = checkout_path.join(
+ 'engine', 'src', 'third_party', 'android_tools', 'sdk'
+ )
env = {
# Windows Packaging script assumes this is set.
'DEPOT_TOOLS':
diff --git a/recipe_modules/repo_util/examples/full.expected/monorepo_wrong_host.json b/recipe_modules/repo_util/examples/full.expected/monorepo_wrong_host.json
index 433d071..17322d3 100644
--- a/recipe_modules/repo_util/examples/full.expected/monorepo_wrong_host.json
+++ b/recipe_modules/repo_util/examples/full.expected/monorepo_wrong_host.json
@@ -1144,7 +1144,7 @@
"Traceback (most recent call last):",
" File \"RECIPE_REPO[flutter]/recipe_modules/repo_util/examples/full.py\", line 48, in RunSteps",
" api.repo_util.monorepo_checkout(checkout_path, {}, {})",
- " File \"RECIPE_REPO[flutter]/recipe_modules/repo_util/api.py\", line 151, in monorepo_checkout",
+ " File \"RECIPE_REPO[flutter]/recipe_modules/repo_util/api.py\", line 153, in monorepo_checkout",
" raise ValueError(",
"ValueError('Input reference is not on dart.googlesource.com/monorepo')"
]
diff --git a/recipe_modules/repo_util/examples/full.py b/recipe_modules/repo_util/examples/full.py
index 0dfa00a..d76a32f 100644
--- a/recipe_modules/repo_util/examples/full.py
+++ b/recipe_modules/repo_util/examples/full.py
@@ -71,15 +71,18 @@
),
api.step_data(
'Identify branches.git branch',
- stdout=api.raw_io.output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
+ stdout=api.raw_io
+ .output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
),
api.step_data(
'Identify branches (2).git branch',
- stdout=api.raw_io.output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
+ stdout=api.raw_io
+ .output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
),
api.step_data(
'Identify branches (3).git branch',
- stdout=api.raw_io.output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
+ stdout=api.raw_io
+ .output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
)
)
)
@@ -131,10 +134,10 @@
)
yield api.test(
'first_bot_update_failed',
- api.properties(
- git_url='https://github.com/flutter/engine',
- git_ref='refs/pull/1/head'
- ),
+ api.properties(
+ git_url='https://github.com/flutter/engine',
+ git_ref='refs/pull/1/head'
+ ),
# Next line force a fail condition for the bot update
# first execution.
api.step_data("Checkout source code.bot_update", retcode=1),
@@ -148,8 +151,8 @@
),
# Next line force a fail condition for the bot update
# first execution.
- api.path.exists(api.path['cache'].join('git'),
- api.path['start_dir'].join('engine')
+ api.path.exists(
+ api.path['cache'].join('git'), api.path['start_dir'].join('engine')
),
api.override_step_data(
"Checkout source code.bot_update",
diff --git a/recipe_modules/repo_util/examples/unsupported.expected/unsupported.json b/recipe_modules/repo_util/examples/unsupported.expected/unsupported.json
index c312914..339300e 100644
--- a/recipe_modules/repo_util/examples/unsupported.expected/unsupported.json
+++ b/recipe_modules/repo_util/examples/unsupported.expected/unsupported.json
@@ -9,7 +9,7 @@
"Traceback (most recent call last):",
" File \"RECIPE_REPO[flutter]/recipe_modules/repo_util/examples/unsupported.py\", line 13, in RunSteps",
" api.repo_util.checkout('unsupported_repo', repo_dir)",
- " File \"RECIPE_REPO[flutter]/recipe_modules/repo_util/api.py\", line 226, in checkout",
+ " File \"RECIPE_REPO[flutter]/recipe_modules/repo_util/api.py\", line 230, in checkout",
" raise ValueError('Unsupported repo: %s' % name)",
"ValueError('Unsupported repo: unsupported_repo')"
]
diff --git a/recipe_modules/retry/api.py b/recipe_modules/retry/api.py
index 044b466..fe6b78f 100644
--- a/recipe_modules/retry/api.py
+++ b/recipe_modules/retry/api.py
@@ -96,14 +96,9 @@
sleep *= backoff_factor
def basic_wrap(
- self,
- func,
- max_attempts=3,
- sleep=5.0,
- backoff_factor=1.5,
- **kwargs
- ):
- """Retry basic wrapped function without step support.
+ self, func, max_attempts=3, sleep=5.0, backoff_factor=1.5, **kwargs
+ ):
+ """Retry basic wrapped function without step support.
Args:
func (callable): A function that performs the action that should be
retried on failure. If it raises a `StepFailure`, it will be retried.
@@ -115,15 +110,15 @@
Returns:
The result of executing func.
"""
- for attempt in range(max_attempts):
- try:
- result = func()
- return result
- except self.m.step.StepFailure:
- if attempt == max_attempts - 1:
- raise
- self.m.time.sleep(sleep)
- sleep *= backoff_factor
+ for attempt in range(max_attempts):
+ try:
+ result = func()
+ return result
+ except self.m.step.StepFailure:
+ if attempt == max_attempts - 1:
+ raise
+ self.m.time.sleep(sleep)
+ sleep *= backoff_factor
def run_flutter_doctor(self):
self.step(
diff --git a/recipe_modules/retry/examples/full.py b/recipe_modules/retry/examples/full.py
index 3bd57b9..3b1929d 100644
--- a/recipe_modules/retry/examples/full.py
+++ b/recipe_modules/retry/examples/full.py
@@ -41,7 +41,9 @@
def func2():
api.step('test: mytest_func_basic', ['ls', '-a'])
- api.retry.wrap(func1, step_name='test: mytest_func', max_attempts=max_attempts)
+ api.retry.wrap(
+ func1, step_name='test: mytest_func', max_attempts=max_attempts
+ )
api.retry.basic_wrap(func2, max_attempts=max_attempts)
api.retry.run_flutter_doctor()
@@ -67,8 +69,7 @@
status='FAILURE'
)
yield api.test(
- 'pass_with_retries',
- api.properties(max_attempts=2),
+ 'pass_with_retries', api.properties(max_attempts=2),
api.step_data('test: mytest', retcode=1),
api.step_data('test: mytest_func', retcode=1),
api.step_data('test: mytest_func_basic', retcode=1)
diff --git a/recipe_modules/shard_util_v2/api.py b/recipe_modules/shard_util_v2/api.py
index bb2704e..a2cdd9d 100644
--- a/recipe_modules/shard_util_v2/api.py
+++ b/recipe_modules/shard_util_v2/api.py
@@ -22,16 +22,13 @@
# Internal properties that should be set for builds running on BuildBucket.
PROPERTIES_TO_REMOVE = [
- '$recipe_engine/buildbucket',
- 'buildername', '$recipe_engine/runtime',
+ '$recipe_engine/buildbucket', 'buildername', '$recipe_engine/runtime',
'is_experimental'
]
# Environments map to calculate the environment from the bucket.
ENVIRONMENTS_MAP = {
- 'try': '',
- 'staging': 'Staging ',
- 'flutter': 'Production ',
+ 'try': '', 'staging': 'Staging ', 'flutter': 'Production ',
'prod': 'Production '
}
@@ -90,7 +87,7 @@
properties = target.get('properties')
new_props = {}
for k, v in properties.items():
- if isinstance(v,str) and (v.startswith('[') or v.startswith('{')):
+ if isinstance(v, str) and (v.startswith('[') or v.startswith('{')):
new_props[k] = json.loads(v)
else:
new_props[k] = v
@@ -126,8 +123,7 @@
build = self.unfreeze_dict(b)
build['recipe'] = build.get('recipe') or 'engine_v2/builder'
updated_builds.append(build)
- return self.schedule(updated_builds, presentation,
- branch=branch)
+ return self.schedule(updated_builds, presentation, branch=branch)
def schedule_tests(self, tests, build_results, presentation):
"""Schedule tests using build_results for dependencies.
@@ -205,7 +201,8 @@
# Buildbucket properties are not propagated to sub-builds when running with
# led. Copy the properties bb gitiles_commit to git_ref and git_url if not
# set already.
- if not (drone_properties.get('git_ref') or drone_properties.get('git_url')):
+ if not (drone_properties.get('git_ref') or
+ drone_properties.get('git_url')):
host = self.m.buildbucket.gitiles_commit.host
project = self.m.buildbucket.gitiles_commit.project
drone_properties['git_url'] = f'https://{host}/{project}'
@@ -217,7 +214,8 @@
environment = ENVIRONMENTS_MAP.get(bucket, '')
builder_name = build.get(
'drone_builder_name',
- '%s %sEngine Drone' % (platform_name, environment))
+ '%s %sEngine Drone' % (platform_name, environment)
+ )
suffix = drone_properties.get('builder_name_suffix')
if suffix:
builder_name = '%s%s' % (builder_name, suffix)
@@ -241,8 +239,8 @@
led_data = led_data.then('edit', '-r', build['recipe'])
for d in drone_dimensions:
led_data = led_data.then('edit', '-d', d)
- for k,v in ci_yaml_dimensions.items():
- led_data = led_data.then('edit', "-d", '%s=%s' % (k,v))
+ for k, v in ci_yaml_dimensions.items():
+ led_data = led_data.then('edit', "-d", '%s=%s' % (k, v))
led_data = self.m.led.inject_input_recipes(led_data)
launch_res = led_data.then('launch', '-modernize', '-real-build')
# real-build is being used and only build_id is being populated
@@ -252,8 +250,7 @@
launch_res.launch_result.swarming_hostname,
)
build_url_bb = 'https://%s/build/%s' % (
- launch_res.launch_result.buildbucket_hostname,
- task_id
+ launch_res.launch_result.buildbucket_hostname, task_id
)
build_url = build_url_swarming if launch_res.launch_result.task_id else build_url_bb
results[task_name] = SubbuildResult(
@@ -296,7 +293,8 @@
environment = ENVIRONMENTS_MAP.get(bucket, '')
builder_name = build.get(
'drone_builder_name',
- '%s %sEngine Drone' % (platform_name, environment))
+ '%s %sEngine Drone' % (platform_name, environment)
+ )
suffix = drone_properties.get('builder_name_suffix')
if suffix:
builder_name = '%s%s' % (builder_name, suffix)
@@ -305,7 +303,7 @@
for d in drone_dimensions:
k, v = d.split('=')
task_dimensions.append(common_pb2.RequestedDimension(key=k, value=v))
- for k,v in ci_yaml_dimensions.items():
+ for k, v in ci_yaml_dimensions.items():
task_dimensions.append(common_pb2.RequestedDimension(key=k, value=v))
# Override recipe.
drone_properties['recipe'] = build['recipe']
@@ -332,8 +330,9 @@
# Set priority to be same of main build temporily to help triage
# https://github.com/flutter/flutter/issues/124155
priority=30,
- exe_cipd_version=self.m.properties.get('exe_cipd_version',
- 'refs/heads/%s' % branch)
+ exe_cipd_version=self.m.properties.get(
+ 'exe_cipd_version', 'refs/heads/%s' % branch
+ )
)
# Increase timeout if no_goma, since the runtime is going to
# be much longer.
@@ -446,8 +445,7 @@
if 'full_build' in cas_out_dict:
self.m.cas.download(
'Download for build %s and cas key %s' % (build_id, build_name),
- cas_out_dict['full_build'],
- out_build_paths
+ cas_out_dict['full_build'], out_build_paths
)
def archive_full_build(self, build_dir, target):
@@ -465,7 +463,9 @@
self.m.file.copytree('Copy host_debug_unopt', build_dir, cas_engine)
def _upload():
- return self.m.cas_util.upload(cas_dir, step_name='Archive full build for %s' % target)
+ return self.m.cas_util.upload(
+ cas_dir, step_name='Archive full build for %s' % target
+ )
# Windows CAS upload is flaky, hashes are calculated before files are fully synced to disk.
return self.m.retry.basic_wrap(
diff --git a/recipe_modules/shard_util_v2/examples/full.py b/recipe_modules/shard_util_v2/examples/full.py
index 60ef57d..1df6a46 100644
--- a/recipe_modules/shard_util_v2/examples/full.py
+++ b/recipe_modules/shard_util_v2/examples/full.py
@@ -20,13 +20,14 @@
def RunSteps(api):
build_configs = api.properties.get('builds', [])
test_configs = api.properties.get('tests', [])
- props = api.shard_util_v2.pre_process_properties(
- {'properties': {
- '$flutter/osx_sdk': '{"cleanup_cache": true, "sdk_version": "14a5294e"}',
- 'validation': 'docs'
- }
- }
- )
+ props = api.shard_util_v2.pre_process_properties({
+ 'properties': {
+ '$flutter/osx_sdk':
+ '{"cleanup_cache": true, "sdk_version": "14a5294e"}',
+ 'validation':
+ 'docs'
+ }
+ })
assert isinstance(props['properties']['$flutter/osx_sdk'], dict)
assert props['properties']['validation'] == 'docs'
with api.step.nest("launch builds") as presentation:
@@ -36,8 +37,12 @@
for build in builds.values():
if build.build_proto.status != common_pb2.SUCCESS:
raise api.step.StepFailure("build %s failed" % build.build_id)
- api.shard_util_v2.archive_full_build(api.path['start_dir'].join('out', 'host_debug'), 'host_debug')
- api.shard_util_v2.download_full_builds(builds, api.path['cleanup'].join('out'))
+ api.shard_util_v2.archive_full_build(
+ api.path['start_dir'].join('out', 'host_debug'), 'host_debug'
+ )
+ api.shard_util_v2.download_full_builds(
+ builds, api.path['cleanup'].join('out')
+ )
with api.step.nest("launch builds") as presentation:
reqs = api.shard_util_v2.schedule_tests(test_configs, builds, presentation)
@@ -48,7 +53,9 @@
builder='ios_debug',
input_props={'task_name': 'mytask'},
output_props={
- 'cas_output_hash': {'web_tests': 'abc', 'ios_debug': 'bcd', 'full_build': '123'}
+ 'cas_output_hash': {
+ 'web_tests': 'abc', 'ios_debug': 'bcd', 'full_build': '123'
+ }
},
status='SUCCESS',
)
@@ -74,7 +81,9 @@
builder='ios_debug',
input_props={'task_name': 'mytask'},
output_props={
- 'cas_output_hash': {'web_tests': 'abc', 'ios_debug': 'bcd', 'full_build': '123'}
+ 'cas_output_hash': {
+ 'web_tests': 'abc', 'ios_debug': 'bcd', 'full_build': '123'
+ }
},
status='SUCCESS',
)
@@ -110,8 +119,7 @@
}
props_bb = {
'task_name': 'mytask', 'builds': [{
- 'name': 'ios_debug', 'gn': ['--ios'],
- 'dimensions': {'cpu': 'arm64'},
+ 'name': 'ios_debug', 'gn': ['--ios'], 'dimensions': {'cpu': 'arm64'},
'ninja': {'config': 'ios_debug',
'targets': []}, 'drone_dimensions': ['dimension1=abc'],
'generators': [{'name': 'generator1', 'script': 'script1.sh'}]
diff --git a/recipe_modules/shard_util_v2/test_api.py b/recipe_modules/shard_util_v2/test_api.py
index cbdaa75..2ef19be 100644
--- a/recipe_modules/shard_util_v2/test_api.py
+++ b/recipe_modules/shard_util_v2/test_api.py
@@ -12,7 +12,7 @@
class ShardUtilTestApi(recipe_test_api.RecipeTestApi):
-
+
def try_build_message(
self, builder, input_props=None, output_props=None, **kwargs
):
@@ -37,7 +37,8 @@
builder=msg.builder.builder,
build_id=msg.id,
build_name=builder,
- build_proto= msg)
+ build_proto=msg
+ )
return subbuild
def child_build_steps(
@@ -51,7 +52,11 @@
responses = []
for subbuild in subbuilds:
responses.append(
- dict(schedule_build=dict(id=subbuild.build_id, builder=subbuild.build_proto.builder))
+ dict(
+ schedule_build=dict(
+ id=subbuild.build_id, builder=subbuild.build_proto.builder
+ )
+ )
)
mock_schedule_data = self.m.buildbucket.simulated_schedule_output(
step_name="%s" % launch_step,
diff --git a/recipe_modules/signing/api.py b/recipe_modules/signing/api.py
index 5e80642..18727b5 100644
--- a/recipe_modules/signing/api.py
+++ b/recipe_modules/signing/api.py
@@ -71,12 +71,14 @@
def _codesign_environment(self, env, env_prefixes):
with self.m.step.nest('Setup codesign environment'):
secrets_dict = {
- 'FLUTTER_P12': 'flutter_p12.encrypted',
- 'FLUTTER_P12_PASSWORD': 'p12_password.encrypted',
- 'CODESIGN_TEAM_ID': 'codesign_team_id.encrypted',
+ 'FLUTTER_P12':
+ 'flutter_p12.encrypted', 'FLUTTER_P12_PASSWORD':
+ 'p12_password.encrypted', 'CODESIGN_TEAM_ID':
+ 'codesign_team_id.encrypted',
'CODESIGN_APP_SPECIFIC_PASSWORD':
'codesign_app_specific_password.encrypted',
- 'CODESIGN_APP_STORE_ID': 'codesign_app_store_id.encrypted'
+ 'CODESIGN_APP_STORE_ID':
+ 'codesign_app_store_id.encrypted'
}
self.m.kms.decrypt_secrets(env, secrets_dict)
env['CODESIGN_PATH'] = self.codesign_binary
diff --git a/recipe_modules/signing/examples/code_sign.py b/recipe_modules/signing/examples/code_sign.py
index f540951..4becc34 100644
--- a/recipe_modules/signing/examples/code_sign.py
+++ b/recipe_modules/signing/examples/code_sign.py
@@ -21,17 +21,14 @@
def GenTests(api):
yield api.test(
- 'non_mac',
- api.platform.name('linux'),
- api.properties(expected_result=False)
+ 'non_mac', api.platform.name('linux'),
+ api.properties(expected_result=False)
)
yield api.test(
- 'mac_require_signing',
- api.platform.name('mac'),
- api.properties(expected_result=True)
+ 'mac_require_signing', api.platform.name('mac'),
+ api.properties(expected_result=True)
)
yield api.test(
- 'no_signing_identity',
- api.platform.name('mac'),
- api.properties(expected_result=False)
+ 'no_signing_identity', api.platform.name('mac'),
+ api.properties(expected_result=False)
)
diff --git a/recipe_modules/signing/examples/requires_signing.py b/recipe_modules/signing/examples/requires_signing.py
index b0dbfcf..25e13d2 100644
--- a/recipe_modules/signing/examples/requires_signing.py
+++ b/recipe_modules/signing/examples/requires_signing.py
@@ -21,14 +21,25 @@
def _create_zip(api, include_entitlements=False):
with api.step.nest('Create test file'):
directory = api.path.mkdtemp()
- api.file.write_text('write file', directory.join('content', 'myfile.txt'), 'myfile')
+ api.file.write_text(
+ 'write file', directory.join('content', 'myfile.txt'), 'myfile'
+ )
if include_entitlements:
- api.file.write_text('write entitlements.txt', directory.join('content', 'entitlements.txt'), '')
- api.file.write_text('write without_entitlements.txt', directory.join('content', 'without_entitlements.txt'), '')
- api.zip.directory('create zip', directory.join('content'), directory.join('myzip.zip'))
+ api.file.write_text(
+ 'write entitlements.txt',
+ directory.join('content', 'entitlements.txt'), ''
+ )
+ api.file.write_text(
+ 'write without_entitlements.txt',
+ directory.join('content', 'without_entitlements.txt'), ''
+ )
+ api.zip.directory(
+ 'create zip', directory.join('content'), directory.join('myzip.zip')
+ )
yield directory.join('myzip.zip')
api.file.rmtree('Delete tmp folder', directory)
+
def RunSteps(api):
expected_result = api.properties.get('expected_result')
with _create_zip(api, expected_result) as zip_file_name:
@@ -38,24 +49,27 @@
def GenTests(api):
yield api.test(
- 'non_mac',
- api.platform.name('linux'),
- api.properties(expected_result=False),
+ 'non_mac',
+ api.platform.name('linux'),
+ api.properties(expected_result=False),
)
yield api.test(
- 'mac_require_signing_entitlements',
- api.platform.name('mac'),
- api.properties(expected_result=True),
- api.zip.namelist('Create test file.namelist', ['myfile.txt', 'entitlements.txt'])
+ 'mac_require_signing_entitlements', api.platform.name('mac'),
+ api.properties(expected_result=True),
+ api.zip.namelist(
+ 'Create test file.namelist', ['myfile.txt', 'entitlements.txt']
+ )
)
yield api.test(
- 'mac_require_signing_without_entitlements',
- api.platform.name('mac'),
- api.properties(expected_result=True),
- api.zip.namelist('Create test file.namelist', ['myfile.txt', 'without_entitlements.txt'])
+ 'mac_require_signing_without_entitlements', api.platform.name('mac'),
+ api.properties(expected_result=True),
+ api.zip.namelist(
+ 'Create test file.namelist',
+ ['myfile.txt', 'without_entitlements.txt']
+ )
)
yield api.test(
- 'mac_does_not_require_signing',
- api.platform.name('mac'),
- api.properties(expected_result=False),
+ 'mac_does_not_require_signing',
+ api.platform.name('mac'),
+ api.properties(expected_result=False),
)
diff --git a/recipe_modules/ssh/examples/full.py b/recipe_modules/ssh/examples/full.py
index 503bb27..9234325 100644
--- a/recipe_modules/ssh/examples/full.py
+++ b/recipe_modules/ssh/examples/full.py
@@ -24,6 +24,6 @@
api.path['cache'].join('builder/ssh/ssh_host_key.pub'),
api.path['cache'].join('builder/ssh/ssh_host_key'),
)
- )
+ )
yield api.test('ssh_paths_missing', status='FAILURE')
diff --git a/recipe_modules/status_check/api.py b/recipe_modules/status_check/api.py
index e314914..bc7e7cf 100644
--- a/recipe_modules/status_check/api.py
+++ b/recipe_modules/status_check/api.py
@@ -8,4 +8,4 @@
class StatusCheckApi(recipe_api.RecipeApi):
- pass
+ pass
diff --git a/recipe_modules/status_check/test_api.py b/recipe_modules/status_check/test_api.py
index b457789..13d09df 100644
--- a/recipe_modules/status_check/test_api.py
+++ b/recipe_modules/status_check/test_api.py
@@ -1,6 +1,7 @@
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
"""Allow tests to assert recipe success or failure.
TODO(crbug/1010715) Remove this and use functionality in core recipe
@@ -12,15 +13,16 @@
class StatusCheckTestApi(recipe_test_api.RecipeTestApi):
- def test(self, name, status="success"):
- if " " in name: # pragma: no cover
- raise Exception(
- f"Invalid recipe test name {name!r}. Test names should use underscores, not spaces. See http://go/fuchsia-recipe-docs#test-case-naming"
- )
- return super(StatusCheckTestApi, self).test(name) + self.status(status)
- def status(self, status="success"):
- """Returns step data to check status of recipe at end of test.
+ def test(self, name, status="success"):
+ if " " in name: # pragma: no cover
+ raise Exception(
+ f"Invalid recipe test name {name!r}. Test names should use underscores, not spaces. See http://go/fuchsia-recipe-docs#test-case-naming"
+ )
+ return super(StatusCheckTestApi, self).test(name) + self.status(status)
+
+ def status(self, status="success"):
+ """Returns step data to check status of recipe at end of test.
Args:
status: One of 'success' (default), 'failure', 'infra_failure', or
@@ -28,10 +30,10 @@
match this.
"""
- assertion_type = {
- "exception": post_process.StatusException,
- "failure": post_process.StatusFailure,
- "infra_failure": post_process.StatusException,
- "success": post_process.StatusSuccess,
- }[status]
- return self.post_process(assertion_type)
+ assertion_type = {
+ "exception": post_process.StatusException,
+ "failure": post_process.StatusFailure,
+ "infra_failure": post_process.StatusException,
+ "success": post_process.StatusSuccess,
+ }[status]
+ return self.post_process(assertion_type)
diff --git a/recipe_modules/status_check/tests/full.py b/recipe_modules/status_check/tests/full.py
index 8ddfb7d..e7711aa 100644
--- a/recipe_modules/status_check/tests/full.py
+++ b/recipe_modules/status_check/tests/full.py
@@ -11,8 +11,8 @@
def RunSteps(api):
- del api # Unused.
+ del api # Unused.
def GenTests(api):
- yield api.status_check.test("basic", status="success")
+ yield api.status_check.test("basic", status="success")
diff --git a/recipe_modules/status_reporting/__init__.py b/recipe_modules/status_reporting/__init__.py
index b37189a..f0997e7 100644
--- a/recipe_modules/status_reporting/__init__.py
+++ b/recipe_modules/status_reporting/__init__.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
DEPS = [
'fuchsia/gcloud',
'recipe_engine/file',
diff --git a/recipe_modules/status_reporting/api.py b/recipe_modules/status_reporting/api.py
index 7953045..5630eaa 100644
--- a/recipe_modules/status_reporting/api.py
+++ b/recipe_modules/status_reporting/api.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
from recipe_engine import recipe_api
from google.protobuf import json_format
@@ -24,9 +23,10 @@
return json_format.MessageToJson(build)
def publish_builds(
- self,
- subbuilds,
- topic='projects/flutter-dashboard/topics/luci-builds-prod'):
+ self,
+ subbuilds,
+ topic='projects/flutter-dashboard/topics/luci-builds-prod'
+ ):
"""Publish builds to a pubsub topic.
Args:
diff --git a/recipe_modules/status_reporting/examples/full.py b/recipe_modules/status_reporting/examples/full.py
index d7ccf56..6f74caf 100644
--- a/recipe_modules/status_reporting/examples/full.py
+++ b/recipe_modules/status_reporting/examples/full.py
@@ -8,18 +8,21 @@
from PB.go.chromium.org.luci.buildbucket.proto import builder_common as builder_pb2
from RECIPE_MODULES.flutter.shard_util_v2.api import SubbuildResult
+DEPS = ['flutter/status_reporting']
-DEPS = [
- 'flutter/status_reporting'
-]
def RunSteps(api):
- build = build_pb2.Build(
- builder=builder_pb2.BuilderID(project='flutter', bucket='try', builder='mybuild')
+ build = build_pb2.Build(
+ builder=builder_pb2
+ .BuilderID(project='flutter', bucket='try', builder='mybuild')
)
result = SubbuildResult(
- builder='mybuild', build_id=123, build_name='build_name',
- url='https://123', build_proto=build)
+ builder='mybuild',
+ build_id=123,
+ build_name='build_name',
+ url='https://123',
+ build_proto=build
+ )
api.status_reporting.publish_builds({'mybuild': result})
diff --git a/recipe_modules/subbuild/api.py b/recipe_modules/subbuild/api.py
index d29efc1..bbfdb01 100644
--- a/recipe_modules/subbuild/api.py
+++ b/recipe_modules/subbuild/api.py
@@ -53,36 +53,36 @@
@attr.s
class SubbuildResult:
- """Subbuild result metadata."""
+ """Subbuild result metadata."""
- builder = attr.ib(type=str)
- build_id = attr.ib(type=str, converter=str)
- url = attr.ib(type=str, default=None)
- build_proto = attr.ib(type=build_pb2.Build, default=None)
+ builder = attr.ib(type=str)
+ build_id = attr.ib(type=str, converter=str)
+ url = attr.ib(type=str, default=None)
+ build_proto = attr.ib(type=build_pb2.Build, default=None)
@attr.s
class _SplitBuilder:
- """Project/bucket/name triplet."""
+ """Project/bucket/name triplet."""
- project = attr.ib(type=str)
- bucket = attr.ib(type=str)
- name = attr.ib(type=str)
+ project = attr.ib(type=str)
+ bucket = attr.ib(type=str)
+ name = attr.ib(type=str)
class SubbuildApi(recipe_api.RecipeApi):
- """API for launching subbuilds and collecting the results."""
+ """API for launching subbuilds and collecting the results."""
- def launch(
- self,
- builder_names,
- presentation,
- extra_properties=None,
- set_swarming_parent_run_id=True,
- hide_in_gerrit=True,
- include_sub_invs=True,
- ):
- """Launches builds with buildbucket or led.
+ def launch(
+ self,
+ builder_names,
+ presentation,
+ extra_properties=None,
+ set_swarming_parent_run_id=True,
+ hide_in_gerrit=True,
+ include_sub_invs=True,
+ ):
+ """Launches builds with buildbucket or led.
If the current task was launched with led, then subbuilds will also be
launched with led.
@@ -103,135 +103,134 @@
launched_builds (dict): The launched_builds is a map from builder name
to the corresponding SubbuildResult.
"""
- parent_properties = self.m.properties.thaw()
- properties = {
- key: val
- for key, val in parent_properties.items()
- if key and key in PASS_THROUGH_PROPERTIES
- }
- if extra_properties:
- properties.update(extra_properties)
+ parent_properties = self.m.properties.thaw()
+ properties = {
+ key: val
+ for key, val in parent_properties.items()
+ if key and key in PASS_THROUGH_PROPERTIES
+ }
+ if extra_properties:
+ properties.update(extra_properties)
- # If this task was launched by led, we launch the child with led as well.
- # This lets us ensure that the parent and child use the same version of
- # the recipes code. That is a requirement for testing, as well as for
- # avoiding the need to do soft transitions when updating the interface
- # between the parent and child recipes.
- if self.m.led.launched_by_led:
- builds = self._launch_with_led(builder_names, properties)
- else:
- builds = self._launch_with_buildbucket(
- builder_names,
- properties,
- set_swarming_parent_run_id=set_swarming_parent_run_id,
- hide_in_gerrit=hide_in_gerrit,
- include_sub_invs=include_sub_invs,
- )
- for builder, build in builds.items():
- presentation.links[builder] = build.url
- return builds
+ # If this task was launched by led, we launch the child with led as well.
+ # This lets us ensure that the parent and child use the same version of
+ # the recipes code. That is a requirement for testing, as well as for
+ # avoiding the need to do soft transitions when updating the interface
+ # between the parent and child recipes.
+ if self.m.led.launched_by_led:
+ builds = self._launch_with_led(builder_names, properties)
+ else:
+ builds = self._launch_with_buildbucket(
+ builder_names,
+ properties,
+ set_swarming_parent_run_id=set_swarming_parent_run_id,
+ hide_in_gerrit=hide_in_gerrit,
+ include_sub_invs=include_sub_invs,
+ )
+ for builder, build in builds.items():
+ presentation.links[builder] = build.url
+ return builds
- def split_builder(self, builder_name):
- """Split a builder name into parts, filling in from current build."""
- parent = self.m.buildbucket.build.builder
+ def split_builder(self, builder_name):
+ """Split a builder name into parts, filling in from current build."""
+ parent = self.m.buildbucket.build.builder
- *prefixes, name = builder_name.split("/")
- assert len(prefixes) <= 2, f"bad builder_name {builder_name}"
- if len(prefixes) == 2:
- project, bucket = prefixes
- elif len(prefixes) == 1:
- project = parent.project
- bucket = prefixes[0]
- else:
- project = parent.project
- bucket = parent.bucket
+ *prefixes, name = builder_name.split("/")
+ assert len(prefixes) <= 2, f"bad builder_name {builder_name}"
+ if len(prefixes) == 2:
+ project, bucket = prefixes
+ elif len(prefixes) == 1:
+ project = parent.project
+ bucket = prefixes[0]
+ else:
+ project = parent.project
+ bucket = parent.bucket
- return _SplitBuilder(project, bucket, name)
+ return _SplitBuilder(project, bucket, name)
- def _launch_with_led(self, builder_names, properties):
- edit_args = []
- for k, v in sorted(properties.items()):
- edit_args.extend(["-p", f"{k}={self.m.json.dumps(v)}"])
- edit_cr_cl_arg = None
- bb_input = self.m.buildbucket.build_input
- if bb_input.gerrit_changes:
- gerrit_change = bb_input.gerrit_changes[0]
- edit_cr_cl_arg = f"https://{gerrit_change.host}/c/{gerrit_change.project}/+/{int(gerrit_change.change)}"
+ def _launch_with_led(self, builder_names, properties):
+ edit_args = []
+ for k, v in sorted(properties.items()):
+ edit_args.extend(["-p", f"{k}={self.m.json.dumps(v)}"])
+ edit_cr_cl_arg = None
+ bb_input = self.m.buildbucket.build_input
+ if bb_input.gerrit_changes:
+ gerrit_change = bb_input.gerrit_changes[0]
+ edit_cr_cl_arg = f"https://{gerrit_change.host}/c/{gerrit_change.project}/+/{int(gerrit_change.change)}"
- builds = {}
- for builder_name in builder_names:
- builder = self.split_builder(builder_name)
- led_data = self.m.led(
- "get-builder",
- # By default, led reduces the priority of tasks from their
- # values in buildbucket which we do not want.
- "-adjust-priority",
- "0",
- f"{builder.project}/{builder.bucket}/{builder.name}",
- )
- led_data = led_data.then("edit", *edit_args)
- if edit_cr_cl_arg:
- led_data = led_data.then("edit-cr-cl", edit_cr_cl_arg)
- led_data = self.m.led.inject_input_recipes(led_data)
- launch_res = led_data.then("launch", "-modernize", "-real-build")
- task_id = launch_res.launch_result.task_id or launch_res.launch_result.build_id
- build_url_swarming = 'https://ci.chromium.org/swarming/task/%s?server=%s' % (
- task_id,
- launch_res.launch_result.swarming_hostname,
- )
- build_url_bb = 'https://%s/build/%s' % (
- launch_res.launch_result.buildbucket_hostname,
- task_id
- )
- build_url = build_url_swarming if launch_res.launch_result.task_id else build_url_bb
- builds[builder_name] = SubbuildResult(
- builder=builder_name, build_id=task_id, url=build_url
- )
- return builds
+ builds = {}
+ for builder_name in builder_names:
+ builder = self.split_builder(builder_name)
+ led_data = self.m.led(
+ "get-builder",
+ # By default, led reduces the priority of tasks from their
+ # values in buildbucket which we do not want.
+ "-adjust-priority",
+ "0",
+ f"{builder.project}/{builder.bucket}/{builder.name}",
+ )
+ led_data = led_data.then("edit", *edit_args)
+ if edit_cr_cl_arg:
+ led_data = led_data.then("edit-cr-cl", edit_cr_cl_arg)
+ led_data = self.m.led.inject_input_recipes(led_data)
+ launch_res = led_data.then("launch", "-modernize", "-real-build")
+ task_id = launch_res.launch_result.task_id or launch_res.launch_result.build_id
+ build_url_swarming = 'https://ci.chromium.org/swarming/task/%s?server=%s' % (
+ task_id,
+ launch_res.launch_result.swarming_hostname,
+ )
+ build_url_bb = 'https://%s/build/%s' % (
+ launch_res.launch_result.buildbucket_hostname, task_id
+ )
+ build_url = build_url_swarming if launch_res.launch_result.task_id else build_url_bb
+ builds[builder_name] = SubbuildResult(
+ builder=builder_name, build_id=task_id, url=build_url
+ )
+ return builds
- def _launch_with_buildbucket(
- self,
- builder_names,
- properties,
- set_swarming_parent_run_id,
- hide_in_gerrit,
- include_sub_invs,
- ):
- reqs = []
- swarming_parent_run_id = (
- self.m.swarming.task_id if set_swarming_parent_run_id else None
- )
- bb_tags = {"skip-retry-in-gerrit": "subbuild"}
- if hide_in_gerrit:
- bb_tags["hide-in-gerrit"] = "subbuild"
- for builder_name in builder_names:
- builder = self.split_builder(builder_name)
- reqs.append(
- self.m.buildbucket.schedule_request(
- project=builder.project,
- bucket=builder.bucket,
- builder=builder.name,
- properties=properties,
- swarming_parent_run_id=swarming_parent_run_id,
- priority=None, # Leave unset to avoid overriding priority from configs.
- tags=self.m.buildbucket.tags(**bb_tags),
- )
- )
+ def _launch_with_buildbucket(
+ self,
+ builder_names,
+ properties,
+ set_swarming_parent_run_id,
+ hide_in_gerrit,
+ include_sub_invs,
+ ):
+ reqs = []
+ swarming_parent_run_id = (
+ self.m.swarming.task_id if set_swarming_parent_run_id else None
+ )
+ bb_tags = {"skip-retry-in-gerrit": "subbuild"}
+ if hide_in_gerrit:
+ bb_tags["hide-in-gerrit"] = "subbuild"
+ for builder_name in builder_names:
+ builder = self.split_builder(builder_name)
+ reqs.append(
+ self.m.buildbucket.schedule_request(
+ project=builder.project,
+ bucket=builder.bucket,
+ builder=builder.name,
+ properties=properties,
+ swarming_parent_run_id=swarming_parent_run_id,
+ priority=None, # Leave unset to avoid overriding priority from configs.
+ tags=self.m.buildbucket.tags(**bb_tags),
+ )
+ )
- scheduled_builds = self.m.buildbucket.schedule(
- reqs, step_name="schedule", include_sub_invs=include_sub_invs
- )
+ scheduled_builds = self.m.buildbucket.schedule(
+ reqs, step_name="schedule", include_sub_invs=include_sub_invs
+ )
- builds = {}
- for build in scheduled_builds:
- build_url = f"https://ci.chromium.org/b/{build.id}"
- builds[build.builder.builder] = SubbuildResult(
- builder=build.builder.builder, build_id=build.id, url=build_url
- )
- return builds
+ builds = {}
+ for build in scheduled_builds:
+ build_url = f"https://ci.chromium.org/b/{build.id}"
+ builds[build.builder.builder] = SubbuildResult(
+ builder=build.builder.builder, build_id=build.id, url=build_url
+ )
+ return builds
- def collect(self, build_ids, launched_by_led=None, extra_fields=frozenset()):
- """Collects builds with the provided build_ids.
+ def collect(self, build_ids, launched_by_led=None, extra_fields=frozenset()):
+ """Collects builds with the provided build_ids.
Args:
build_ids (list(str)): The list of build ids to collect results for.
@@ -245,97 +244,98 @@
Returns:
A map from build IDs to the corresponding SubbuildResult.
"""
- if launched_by_led is None:
- launched_by_led = self.m.led.launched_by_led
- if launched_by_led:
- builds = self._collect_from_led(build_ids)
- else:
- builds = self._collect_from_buildbucket(build_ids, extra_fields)
- return collections.OrderedDict(
- sorted(builds.items(), key=lambda item: (item[1].builder, item[0]))
- )
+ if launched_by_led is None:
+ launched_by_led = self.m.led.launched_by_led
+ if launched_by_led:
+ builds = self._collect_from_led(build_ids)
+ else:
+ builds = self._collect_from_buildbucket(build_ids, extra_fields)
+ return collections.OrderedDict(
+ sorted(builds.items(), key=lambda item: (item[1].builder, item[0]))
+ )
- def get_property(self, build_proto, property_name):
- """Retrieve an output property from a subbuild's Build proto.
+ def get_property(self, build_proto, property_name):
+ """Retrieve an output property from a subbuild's Build proto.
Ensures a clear and unified missing property error message across all
builders that use this recipe module.
"""
- try:
- return build_proto.output.properties[property_name]
- except ValueError:
- raise self.m.step.InfraFailure(
- f"Subbuild did not set the {property_name!r} output property"
- )
+ try:
+ return build_proto.output.properties[property_name]
+ except ValueError:
+ raise self.m.step.InfraFailure(
+ f"Subbuild did not set the {property_name!r} output property"
+ )
- def _collect_from_led(self, task_ids):
- swarming_results = self.m.swarming.collect(
- "collect", task_ids, output_dir=self.m.path["cleanup"]
- )
- builds = {}
- for result in swarming_results:
- task_id = result.id
- # Led launch ensures this file is present in the task root dir.
- build_proto_path = result.output_dir.join("build.proto.json")
- build_proto = self.m.file.read_proto(
- "read build.proto.json", build_proto_path, build_pb2.Build, "JSONPB"
- )
- builds[task_id] = SubbuildResult(
- builder=build_proto.builder.builder,
- build_id=task_id,
- build_proto=build_proto,
- )
- return builds
+ def _collect_from_led(self, task_ids):
+ swarming_results = self.m.swarming.collect(
+ "collect", task_ids, output_dir=self.m.path["cleanup"]
+ )
+ builds = {}
+ for result in swarming_results:
+ task_id = result.id
+ # Led launch ensures this file is present in the task root dir.
+ build_proto_path = result.output_dir.join("build.proto.json")
+ build_proto = self.m.file.read_proto(
+ "read build.proto.json", build_proto_path, build_pb2.Build, "JSONPB"
+ )
+ builds[task_id] = SubbuildResult(
+ builder=build_proto.builder.builder,
+ build_id=task_id,
+ build_proto=build_proto,
+ )
+ return builds
- def _collect_from_buildbucket(self, build_ids, extra_fields):
- bb_fields = self.m.buildbucket.DEFAULT_FIELDS.union(
- {"infra.swarming.task_id", "summary_markdown"}
- ).union(extra_fields)
+ def _collect_from_buildbucket(self, build_ids, extra_fields):
+ bb_fields = self.m.buildbucket.DEFAULT_FIELDS.union({
+ "infra.swarming.task_id", "summary_markdown"
+ }).union(extra_fields)
- builds = self.m.buildbucket.collect_builds(
- [int(build_id) for build_id in build_ids],
- interval=20, # Lower from default of 60 b/c we're impatient.
- timeout=COLLECT_TIMEOUT,
- step_name="collect",
- fields=bb_fields,
- )
+ builds = self.m.buildbucket.collect_builds(
+ [int(build_id) for build_id in build_ids],
+ interval=20, # Lower from default of 60 b/c we're impatient.
+ timeout=COLLECT_TIMEOUT,
+ step_name="collect",
+ fields=bb_fields,
+ )
- failed_builds = [b for b in builds.values() if b.status != common_pb2.SUCCESS]
- if failed_builds:
- task_ids = [b.infra.swarming.task_id for b in failed_builds]
- # Make sure task IDs are non-empty.
- assert all(task_ids), task_ids
+ failed_builds = [
+ b for b in builds.values() if b.status != common_pb2.SUCCESS
+ ]
+ if failed_builds:
+ task_ids = [b.infra.swarming.task_id for b in failed_builds]
+ # Make sure task IDs are non-empty.
+ assert all(task_ids), task_ids
- # Wait for the underlying Swarming tasks to complete. The Swarming
- # task for a Buildbucket build can take significantly longer to
- # complete than the build itself due to post-processing outside the
- # scope of the build's recipe (e.g. cache pruning). If the parent
- # build and its Swarming task both complete before the subbuild's
- # Swarming task finishes post-processing, then the subbuild's
- # Swarming task will be killed by Swarming due to the parent being
- # complete.
- #
- # That is actually working as intended. However, it's confusing for
- # a subbuild to be marked as killed when the recipe actually exited
- # normally; "killed" usually only happens for CQ builds, when a
- # build is canceled by CQ because a new patchset of the triggering
- # CL is uploaded. So it's convenient to have dashboards and queries
- # ignore "killed" tasks. We use this workaround to ensure that
- # failed subbuilds with long post-processing steps have time to
- # complete and exit cleanly with a plain old "COMPLETED (FAILURE)"
- # status.
- #
- # We only do this if the subbuild failed as a latency optimization.
- # If all subbuilds passed, the parent will go on to do some more
- # steps using the results of the subbuilds, leaving time for the
- # subbuilds' tasks to complete asynchronously, so we don't want to
- # block here while the tasks complete.
- self.m.swarming.collect(
- f"wait for {pluralize('task', task_ids)} to complete", task_ids
- )
- return {
- str(build.id): SubbuildResult(
- builder=build.builder.builder, build_id=build.id, build_proto=build
- )
- for build in builds.values()
- }
+ # Wait for the underlying Swarming tasks to complete. The Swarming
+ # task for a Buildbucket build can take significantly longer to
+ # complete than the build itself due to post-processing outside the
+ # scope of the build's recipe (e.g. cache pruning). If the parent
+ # build and its Swarming task both complete before the subbuild's
+ # Swarming task finishes post-processing, then the subbuild's
+ # Swarming task will be killed by Swarming due to the parent being
+ # complete.
+ #
+ # That is actually working as intended. However, it's confusing for
+ # a subbuild to be marked as killed when the recipe actually exited
+ # normally; "killed" usually only happens for CQ builds, when a
+ # build is canceled by CQ because a new patchset of the triggering
+ # CL is uploaded. So it's convenient to have dashboards and queries
+ # ignore "killed" tasks. We use this workaround to ensure that
+ # failed subbuilds with long post-processing steps have time to
+ # complete and exit cleanly with a plain old "COMPLETED (FAILURE)"
+ # status.
+ #
+ # We only do this if the subbuild failed as a latency optimization.
+ # If all subbuilds passed, the parent will go on to do some more
+ # steps using the results of the subbuilds, leaving time for the
+ # subbuilds' tasks to complete asynchronously, so we don't want to
+ # block here while the tasks complete.
+ self.m.swarming.collect(
+ f"wait for {pluralize('task', task_ids)} to complete", task_ids
+ )
+ return {
+ str(build.id): SubbuildResult(
+ builder=build.builder.builder, build_id=build.id, build_proto=build
+ ) for build in builds.values()
+ }
diff --git a/recipe_modules/subbuild/test_api.py b/recipe_modules/subbuild/test_api.py
index e228165..624afc7 100644
--- a/recipe_modules/subbuild/test_api.py
+++ b/recipe_modules/subbuild/test_api.py
@@ -10,8 +10,11 @@
class SubbuildTestApi(recipe_test_api.RecipeTestApi):
- def ci_build_message(self, builder, input_props=None, output_props=None, **kwargs):
- """Generates a CI Buildbucket Build message.
+
+ def ci_build_message(
+ self, builder, input_props=None, output_props=None, **kwargs
+ ):
+ """Generates a CI Buildbucket Build message.
Args:
builder (str): The builder name.
@@ -21,17 +24,19 @@
See BuildBucketTestApi.ci_build_message for full parameter documentation.
"""
- project = kwargs.pop("project", "fuchsia")
- msg = self.m.buildbucket.ci_build_message(
- builder=builder, project=project, **kwargs
- )
- msg.infra.swarming.task_id = "abc123"
- msg.input.properties.update(input_props if input_props else {})
- msg.output.properties.update(output_props if output_props else {})
- return msg
+ project = kwargs.pop("project", "fuchsia")
+ msg = self.m.buildbucket.ci_build_message(
+ builder=builder, project=project, **kwargs
+ )
+ msg.infra.swarming.task_id = "abc123"
+ msg.input.properties.update(input_props if input_props else {})
+ msg.output.properties.update(output_props if output_props else {})
+ return msg
- def try_build_message(self, builder, input_props=None, output_props=None, **kwargs):
- """Generates a try Buildbucket Build message.
+ def try_build_message(
+ self, builder, input_props=None, output_props=None, **kwargs
+ ):
+ """Generates a try Buildbucket Build message.
Args:
builder (str): The builder name.
@@ -41,86 +46,90 @@
See BuildBucketTestApi.try_build_message for full parameter documentation.
"""
- project = kwargs.pop("project", "fuchsia")
- msg = self.m.buildbucket.try_build_message(
- builder=builder, project=project, **kwargs
- )
- msg.infra.swarming.task_id = "abc123"
- msg.input.properties.update(input_props if input_props else {})
- msg.output.properties.update(output_props if output_props else {})
- return msg
+ project = kwargs.pop("project", "fuchsia")
+ msg = self.m.buildbucket.try_build_message(
+ builder=builder, project=project, **kwargs
+ )
+ msg.infra.swarming.task_id = "abc123"
+ msg.input.properties.update(input_props if input_props else {})
+ msg.output.properties.update(output_props if output_props else {})
+ return msg
- def child_build_steps(
- self, builds, launch_step="build", collect_step="build", collect_attempt=1
- ):
- """Generates step data to schedule and collect from child builds.
+ def child_build_steps(
+ self,
+ builds,
+ launch_step="build",
+ collect_step="build",
+ collect_attempt=1
+ ):
+ """Generates step data to schedule and collect from child builds.
Args:
builds (list(build_pb2.Build)): The builds to schedule and collect from.
"""
- responses = []
- for build in builds:
- responses.append(
- dict(schedule_build=dict(id=build.id, builder=build.builder))
- )
- mock_schedule_data = self.m.buildbucket.simulated_schedule_output(
- step_name=f"{launch_step}.schedule",
- batch_response=builds_service_pb2.BatchResponse(responses=responses),
- )
+ responses = []
+ for build in builds:
+ responses.append(
+ dict(schedule_build=dict(id=build.id, builder=build.builder))
+ )
+ mock_schedule_data = self.m.buildbucket.simulated_schedule_output(
+ step_name=f"{launch_step}.schedule",
+ batch_response=builds_service_pb2.BatchResponse(responses=responses),
+ )
- index = "" if collect_attempt <= 1 else (f" ({collect_attempt})")
- mock_collect_data = self.m.buildbucket.simulated_collect_output(
- step_name=f"{collect_step}.collect{index}",
- builds=builds,
- )
- return mock_schedule_data + mock_collect_data
+ index = "" if collect_attempt <= 1 else (f" ({collect_attempt})")
+ mock_collect_data = self.m.buildbucket.simulated_collect_output(
+ step_name=f"{collect_step}.collect{index}",
+ builds=builds,
+ )
+ return mock_schedule_data + mock_collect_data
- def child_led_steps(
- self,
- builds,
- collect_step="build",
- ):
- """Generates step data to schedule and collect from child builds.
+ def child_led_steps(
+ self,
+ builds,
+ collect_step="build",
+ ):
+ """Generates step data to schedule and collect from child builds.
Args:
builds (list(build_pb2.Build)): The builds to schedule and collect from.
"""
- step_data = []
- task_results = []
- i = 0
- for build in builds:
- i += 1
- suffix = ""
- if i > 1:
- suffix = f" ({int(i)})"
+ step_data = []
+ task_results = []
+ i = 0
+ for build in builds:
+ i += 1
+ suffix = ""
+ if i > 1:
+ suffix = f" ({int(i)})"
- task_id = f"fake-task-id-{int(i)}"
+ task_id = f"fake-task-id-{int(i)}"
- # led launch mock will take ....infra.swarming.task_id as this
- # build's launched swarming ID.
- jd = job_pb2.Definition()
- jd.buildbucket.bbagent_args.build.CopyFrom(build)
- jd.buildbucket.bbagent_args.build.infra.swarming.task_id = task_id
- step_data.append(
- self.m.led.mock_get_builder(
- jd,
- build.builder.project,
- build.builder.bucket,
- build.builder.builder,
- )
- )
- task_results.append(
- self.m.swarming.task_result(id=task_id, name=build.builder.builder)
- )
- step_data.append(
- self.step_data(
- f"{collect_step}.read build.proto.json{suffix}",
- self.m.file.read_proto(build),
- )
- )
- ret = self.step_data(
- f"{collect_step}.collect", self.m.swarming.collect(task_results)
- )
- for s in step_data:
- ret += s
- return ret
+ # led launch mock will take ....infra.swarming.task_id as this
+ # build's launched swarming ID.
+ jd = job_pb2.Definition()
+ jd.buildbucket.bbagent_args.build.CopyFrom(build)
+ jd.buildbucket.bbagent_args.build.infra.swarming.task_id = task_id
+ step_data.append(
+ self.m.led.mock_get_builder(
+ jd,
+ build.builder.project,
+ build.builder.bucket,
+ build.builder.builder,
+ )
+ )
+ task_results.append(
+ self.m.swarming.task_result(id=task_id, name=build.builder.builder)
+ )
+ step_data.append(
+ self.step_data(
+ f"{collect_step}.read build.proto.json{suffix}",
+ self.m.file.read_proto(build),
+ )
+ )
+ ret = self.step_data(
+ f"{collect_step}.collect", self.m.swarming.collect(task_results)
+ )
+ for s in step_data:
+ ret += s
+ return ret
diff --git a/recipe_modules/subbuild/tests/full.py b/recipe_modules/subbuild/tests/full.py
index 912b3ea..5bade05 100644
--- a/recipe_modules/subbuild/tests/full.py
+++ b/recipe_modules/subbuild/tests/full.py
@@ -17,153 +17,146 @@
]
PROPERTIES = {
- "builder_names": Property(
- kind=List(str), help="The names of the builders to launch"
- ),
- "extra_properties": Property(
- kind=dict,
- help="The extra properties to launch the subbuilds with",
- default=None,
- ),
+ "builder_names":
+ Property(kind=List(str), help="The names of the builders to launch"),
+ "extra_properties":
+ Property(
+ kind=dict,
+ help="The extra properties to launch the subbuilds with",
+ default=None,
+ ),
}
def RunSteps(api, builder_names, extra_properties):
- with api.step.nest("launch builds") as presentation:
- builds = api.subbuild.launch(
- builder_names, presentation, extra_properties=extra_properties
- )
- with api.step.nest("collect builds", status="last"):
- builds = api.subbuild.collect([build.build_id for build in builds.values()])
- for build in builds.values():
- if build.build_proto.status != common_pb2.SUCCESS:
- raise api.step.StepFailure(f"build {build.build_id} failed")
- assert api.subbuild.get_property(
- build.build_proto, "test_orchestration_inputs_hash"
- )
+ with api.step.nest("launch builds") as presentation:
+ builds = api.subbuild.launch(
+ builder_names, presentation, extra_properties=extra_properties
+ )
+ with api.step.nest("collect builds", status="last"):
+ builds = api.subbuild.collect([build.build_id for build in builds.values()])
+ for build in builds.values():
+ if build.build_proto.status != common_pb2.SUCCESS:
+ raise api.step.StepFailure(f"build {build.build_id} failed")
+ assert api.subbuild.get_property(
+ build.build_proto, "test_orchestration_inputs_hash"
+ )
def GenTests(api):
- ci_subbuild1 = api.subbuild.ci_build_message(
- build_id=8945511751514863184,
- builder="builder-subbuild1",
- output_props={"test_orchestration_inputs_hash": "abc"},
- status="SUCCESS",
- )
- ci_subbuild2 = api.subbuild.ci_build_message(
- build_id=8945511751514863185,
- builder="builder-subbuild2",
- output_props={"test_orchestration_inputs_hash": "abc"},
- status="SUCCESS",
- )
- try_subbuild1 = api.subbuild.try_build_message(
- build_id=8945511751514863186,
- builder="builder-subbuild1",
- output_props={"test_orchestration_inputs_hash": "abc"},
- status="SUCCESS",
- )
- try_subbuild2 = api.subbuild.try_build_message(
- build_id=8945511751514863187,
- builder="builder-subbuild2",
- output_props={"test_orchestration_inputs_hash": "abc"},
- status="SUCCESS",
- )
- subbuild_missing_property = api.subbuild.try_build_message(
- build_id=8945511751514863187,
- builder="builder-subbuild2",
- output_props={},
- status="SUCCESS",
- )
- failed_subbuild = api.subbuild.try_build_message(
- build_id=8945511751514863187,
- builder="builder-subbuild2",
- status="FAILURE",
- )
+ ci_subbuild1 = api.subbuild.ci_build_message(
+ build_id=8945511751514863184,
+ builder="builder-subbuild1",
+ output_props={"test_orchestration_inputs_hash": "abc"},
+ status="SUCCESS",
+ )
+ ci_subbuild2 = api.subbuild.ci_build_message(
+ build_id=8945511751514863185,
+ builder="builder-subbuild2",
+ output_props={"test_orchestration_inputs_hash": "abc"},
+ status="SUCCESS",
+ )
+ try_subbuild1 = api.subbuild.try_build_message(
+ build_id=8945511751514863186,
+ builder="builder-subbuild1",
+ output_props={"test_orchestration_inputs_hash": "abc"},
+ status="SUCCESS",
+ )
+ try_subbuild2 = api.subbuild.try_build_message(
+ build_id=8945511751514863187,
+ builder="builder-subbuild2",
+ output_props={"test_orchestration_inputs_hash": "abc"},
+ status="SUCCESS",
+ )
+ subbuild_missing_property = api.subbuild.try_build_message(
+ build_id=8945511751514863187,
+ builder="builder-subbuild2",
+ output_props={},
+ status="SUCCESS",
+ )
+ failed_subbuild = api.subbuild.try_build_message(
+ build_id=8945511751514863187,
+ builder="builder-subbuild2",
+ status="FAILURE",
+ )
- def properties(project=None, bucket=None, **kwargs):
- if project:
- assert bucket
- project = f"{project}/" if project else ""
- bucket = f"{bucket}/" if bucket else ""
- props = dict(
- builder_names=[
- f"{project}{bucket}builder-subbuild1",
- "builder-subbuild2",
- ],
- extra_properties={
- "parent_id": "parentid",
- # This should be passed through from the parent to the subbuild.
- "integration_base_revision": "abc123",
- },
- )
- props.update(**kwargs)
- return api.properties(**props)
-
- # Use different sets of options for different cases so we get coverage of
- # the logic to split the builder name without adding more tests.
-
- def ci_properties(**kwargs):
- return properties(project="fuchsia", bucket="ci", **kwargs)
-
- def try_properties(**kwargs):
- return properties(bucket="try", **kwargs)
-
- yield (
- api.buildbucket_util.test("launch_builds_ci")
- + ci_properties()
- + api.subbuild.child_build_steps(
- builds=[ci_subbuild1, ci_subbuild2],
- launch_step="launch builds",
- collect_step="collect builds",
- )
+ def properties(project=None, bucket=None, **kwargs):
+ if project:
+ assert bucket
+ project = f"{project}/" if project else ""
+ bucket = f"{bucket}/" if bucket else ""
+ props = dict(
+ builder_names=[
+ f"{project}{bucket}builder-subbuild1",
+ "builder-subbuild2",
+ ],
+ extra_properties={
+ "parent_id": "parentid",
+ # This should be passed through from the parent to the subbuild.
+ "integration_base_revision": "abc123",
+ },
)
+ props.update(**kwargs)
+ return api.properties(**props)
- yield (
- api.buildbucket_util.test("missing_property", status="INFRA_FAILURE")
- + properties()
- + api.subbuild.child_build_steps(
- builds=[subbuild_missing_property],
- launch_step="launch builds",
- collect_step="collect builds",
- )
- )
+ # Use different sets of options for different cases so we get coverage of
+ # the logic to split the builder name without adding more tests.
- yield (
- api.buildbucket_util.test("launch_builds_with_led_ci")
- + ci_properties(
- **{
- "$recipe_engine/led": LedInputProperties(
- led_run_id="led/user_example.com/abc123",
- ),
- }
- )
- + api.subbuild.child_led_steps(
- builds=[ci_subbuild1, ci_subbuild2],
- collect_step="collect builds",
- )
- )
+ def ci_properties(**kwargs):
+ return properties(project="fuchsia", bucket="ci", **kwargs)
- yield (
- api.buildbucket_util.test("launch_builds_with_led_cq", tryjob=True)
- + try_properties(
- **{
- "$recipe_engine/led": LedInputProperties(
- led_run_id="led/user_example.com/abc123",
- ),
- }
- )
- + api.subbuild.child_led_steps(
- builds=[try_subbuild1, try_subbuild2],
- collect_step="collect builds",
- )
- )
+ def try_properties(**kwargs):
+ return properties(bucket="try", **kwargs)
- yield (
- api.buildbucket_util.test("failed_subbuild", tryjob=True, status="FAILURE")
- + properties()
- + api.subbuild.child_build_steps(
- builds=[failed_subbuild],
- launch_step="launch builds",
- collect_step="collect builds",
- )
- )
+ yield (
+ api.buildbucket_util.test("launch_builds_ci") + ci_properties() +
+ api.subbuild.child_build_steps(
+ builds=[ci_subbuild1, ci_subbuild2],
+ launch_step="launch builds",
+ collect_step="collect builds",
+ )
+ )
+
+ yield (
+ api.buildbucket_util.test("missing_property", status="INFRA_FAILURE") +
+ properties() + api.subbuild.child_build_steps(
+ builds=[subbuild_missing_property],
+ launch_step="launch builds",
+ collect_step="collect builds",
+ )
+ )
+
+ yield (
+ api.buildbucket_util.test("launch_builds_with_led_ci") + ci_properties(
+ **{
+ "$recipe_engine/led":
+ LedInputProperties(led_run_id="led/user_example.com/abc123",),
+ }
+ ) + api.subbuild.child_led_steps(
+ builds=[ci_subbuild1, ci_subbuild2],
+ collect_step="collect builds",
+ )
+ )
+
+ yield (
+ api.buildbucket_util.test("launch_builds_with_led_cq", tryjob=True) +
+ try_properties(
+ **{
+ "$recipe_engine/led":
+ LedInputProperties(led_run_id="led/user_example.com/abc123",),
+ }
+ ) + api.subbuild.child_led_steps(
+ builds=[try_subbuild1, try_subbuild2],
+ collect_step="collect builds",
+ )
+ )
+
+ yield (
+ api.buildbucket_util
+ .test("failed_subbuild", tryjob=True, status="FAILURE") + properties() +
+ api.subbuild.child_build_steps(
+ builds=[failed_subbuild],
+ launch_step="launch builds",
+ collect_step="collect builds",
+ )
+ )
diff --git a/recipe_modules/swarming_retry/api.py b/recipe_modules/swarming_retry/api.py
index 26fc854..7c706a1 100644
--- a/recipe_modules/swarming_retry/api.py
+++ b/recipe_modules/swarming_retry/api.py
@@ -1,6 +1,7 @@
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
"""Launch and retry swarming jobs until they pass or we hit max attempts."""
import itertools
@@ -16,223 +17,220 @@
@attr.s
class Attempt:
- """References a specific attempt of a task."""
+ """References a specific attempt of a task."""
- task_id = attr.ib(type=str)
- index = attr.ib(type=int, default=None) # Number of prior attempts.
- host = attr.ib(type=str, default=None)
- task_ui_link = attr.ib(type=str, default=None)
- # api.swarming.TaskResult from api.swarming.collect() call.
- result = attr.ib(default=None)
- # This attribute should be set by overrides of Task.process_result(). It
- # indicates that even though at the swarming level the task may have
- # passed something failed inside that larger task.
- failure_reason = attr.ib(type=str, default="")
- has_flakes = attr.ib(type=bool, default=False)
- task_outputs_link = attr.ib(type=str, default=None)
- logs = attr.ib(type=dict, default=attr.Factory(dict))
+ task_id = attr.ib(type=str)
+ index = attr.ib(type=int, default=None) # Number of prior attempts.
+ host = attr.ib(type=str, default=None)
+ task_ui_link = attr.ib(type=str, default=None)
+ # api.swarming.TaskResult from api.swarming.collect() call.
+ result = attr.ib(default=None)
+ # This attribute should be set by overrides of Task.process_result(). It
+ # indicates that even though at the swarming level the task may have
+ # passed something failed inside that larger task.
+ failure_reason = attr.ib(type=str, default="")
+ has_flakes = attr.ib(type=bool, default=False)
+ task_outputs_link = attr.ib(type=str, default=None)
+ logs = attr.ib(type=dict, default=attr.Factory(dict))
- def __attrs_post_init__(self):
- # The led module gives the host and the id, but the swarming module
- # gives the link and the id. Require the id (since it has no default
- # above) and require either the host or task_ui_link attributes.
- assert self.host or self.task_ui_link
- if not self.task_ui_link:
- self.task_ui_link = f"https://{self.host}/task?id={self.task_id}"
- elif not self.host:
- self.host = urlparse(self.task_ui_link).netloc
+ def __attrs_post_init__(self):
+ # The led module gives the host and the id, but the swarming module
+ # gives the link and the id. Require the id (since it has no default
+ # above) and require either the host or task_ui_link attributes.
+ assert self.host or self.task_ui_link
+ if not self.task_ui_link:
+ self.task_ui_link = f"https://{self.host}/task?id={self.task_id}"
+ elif not self.host:
+ self.host = urlparse(self.task_ui_link).netloc
- @property
- def bot_id(self):
- return self.result.bot_id if self.result else None
+ @property
+ def bot_id(self):
+ return self.result.bot_id if self.result else None
- @property
- def bot_ui_link(self):
- return f"https://{self.host}/bot?id={self.bot_id}" if self.bot_id else None
+ @property
+ def bot_ui_link(self):
+ return f"https://{self.host}/bot?id={self.bot_id}" if self.bot_id else None
- @property
- def name(self):
- return f"attempt {int(self.index)}"
+ @property
+ def name(self):
+ return f"attempt {int(self.index)}"
- # TODO(mohrr) add hook for pass/fail beyond swarming task level.
- # In some cases may need to examine task outputs to determine pass/fail.
- @property
- def success(self):
- if self.failure_reason:
- return False
+ # TODO(mohrr) add hook for pass/fail beyond swarming task level.
+ # In some cases may need to examine task outputs to determine pass/fail.
+ @property
+ def success(self):
+ if self.failure_reason:
+ return False
- if not self.result: # pragma: nocover
- return False
+ if not self.result: # pragma: nocover
+ return False
- try:
- self.result.analyze()
- return True
- except recipe_api.StepFailure:
- return False
+ try:
+ self.result.analyze()
+ return True
+ except recipe_api.StepFailure:
+ return False
class TaskTracker:
- """TaskTracker tracks state about attempts to run a task.
+ """TaskTracker tracks state about attempts to run a task.
TaskTracker runs the task until we get run_count successes. Usually
run_count is 1, for running regular tests, but run_count may be >1 when
gathering results of performance tests.
"""
- # States returned by _get_state()
- _LAUNCH_MORE = "launch_more"
- _IN_PROGRESS = "in_progress"
- _OVERALL_SUCCESS = "overall_success"
- _OVERALL_FAILURE = "overall_failure"
+ # States returned by _get_state()
+ _LAUNCH_MORE = "launch_more"
+ _IN_PROGRESS = "in_progress"
+ _OVERALL_SUCCESS = "overall_success"
+ _OVERALL_FAILURE = "overall_failure"
- def __init__(self, api, task, run_count):
- """
+ def __init__(self, api, task, run_count):
+ """
Args:
api: recipe_api.RecipeApiPlain object.
task: Task object.
run_count: number of successful runs we want to get for the task.
"""
- self._api = api
- self._task = task
- self._attempts = []
- self._in_progress_attempts = []
- self._successes_required = run_count
- self._successes_got = 0
- self._failures_got = 0
- self._flakes_got = 0
+ self._api = api
+ self._task = task
+ self._attempts = []
+ self._in_progress_attempts = []
+ self._successes_required = run_count
+ self._successes_got = 0
+ self._failures_got = 0
+ self._flakes_got = 0
- @property
- def name(self):
- return self._task.name
+ @property
+ def name(self):
+ return self._task.name
- @property
- def abort_early_if_failed(self):
- return self._task.abort_early_if_failed
+ @property
+ def abort_early_if_failed(self):
+ return self._task.abort_early_if_failed
- # Returns a pair (state, number_to_launch), where number_to_launch
- # is the number of new task attempts to be launched.
- def _get_state(self):
- if len(self._in_progress_attempts) != 0:
- return self._IN_PROGRESS, 0
+ # Returns a pair (state, number_to_launch), where number_to_launch
+ # is the number of new task attempts to be launched.
+ def _get_state(self):
+ if len(self._in_progress_attempts) != 0:
+ return self._IN_PROGRESS, 0
- if self._successes_got >= self._successes_required:
- return self._OVERALL_SUCCESS, 0
- # We treat the max_attempts parameter as a multiplier, basically
- # "max attempts per successful run needed", so that the same
- # max_attempts value can be used for both perfcompare and regular
- # builders.
- attempts_allowed = self._task.max_attempts * self._successes_required
- remaining_needed = self._successes_required - self._successes_got
- remaining_allowed = attempts_allowed - len(self._attempts)
- if remaining_needed > remaining_allowed:
- return self._OVERALL_FAILURE, 0
- # Apply the "no futile retries" strategy: If we need multiple
- # successful runs but we see no successes in the first batch of
- # attempts, don't do any retries, on the grounds that the build
- # we're testing is probably bad (i.e. it won't pass if retried).
- # This is intended to avoid wasting time and infra capacity.
- if (
- self._successes_required > 1
- and self._successes_got == 0
- and len(self._attempts) >= self._successes_required
- ):
- return self._OVERALL_FAILURE, 0
- return self._LAUNCH_MORE, remaining_needed
+ if self._successes_got >= self._successes_required:
+ return self._OVERALL_SUCCESS, 0
+ # We treat the max_attempts parameter as a multiplier, basically
+ # "max attempts per successful run needed", so that the same
+ # max_attempts value can be used for both perfcompare and regular
+ # builders.
+ attempts_allowed = self._task.max_attempts * self._successes_required
+ remaining_needed = self._successes_required - self._successes_got
+ remaining_allowed = attempts_allowed - len(self._attempts)
+ if remaining_needed > remaining_allowed:
+ return self._OVERALL_FAILURE, 0
+ # Apply the "no futile retries" strategy: If we need multiple
+ # successful runs but we see no successes in the first batch of
+ # attempts, don't do any retries, on the grounds that the build
+ # we're testing is probably bad (i.e. it won't pass if retried).
+ # This is intended to avoid wasting time and infra capacity.
+ if (self._successes_required > 1 and self._successes_got == 0 and
+ len(self._attempts) >= self._successes_required):
+ return self._OVERALL_FAILURE, 0
+ return self._LAUNCH_MORE, remaining_needed
- def should_launch(self):
- _, number_to_launch = self._get_state()
- return number_to_launch > 0
+ def should_launch(self):
+ _, number_to_launch = self._get_state()
+ return number_to_launch > 0
- # Launch one or more task attempts. This assumes that should_launch()
- # was previously called and returned True.
- def launch(self):
- state, number_to_launch = self._get_state()
- assert state == self._LAUNCH_MORE, state
- assert number_to_launch > 0
+ # Launch one or more task attempts. This assumes that should_launch()
+ # was previously called and returned True.
+ def launch(self):
+ state, number_to_launch = self._get_state()
+ assert state == self._LAUNCH_MORE, state
+ assert number_to_launch > 0
- # Don't increase the priority if we need multiple successful runs (used
- # for perfcompare mode).
- if self._successes_required > 1:
- priority_boost_amount = 0
- else:
- # Boost the priority by the number of previous attempts. This means
- # that second attempts will take priority over first attempts, third
- # attempts will take priority over second attempts, etc.
- #
- # This means that if there is a long queue for Swarming tasks to run,
- # only the first attempts should wait. Subsequent attempts should
- # jump ahead in the queue.
- priority_boost_amount = len(self._attempts)
+ # Don't increase the priority if we need multiple successful runs (used
+ # for perfcompare mode).
+ if self._successes_required > 1:
+ priority_boost_amount = 0
+ else:
+ # Boost the priority by the number of previous attempts. This means
+ # that second attempts will take priority over first attempts, third
+ # attempts will take priority over second attempts, etc.
+ #
+ # This means that if there is a long queue for Swarming tasks to run,
+ # only the first attempts should wait. Subsequent attempts should
+ # jump ahead in the queue.
+ priority_boost_amount = len(self._attempts)
- task_ids = []
- for _ in range(number_to_launch):
- attempt_index = len(self._attempts)
- task_name = f"{self.name} (attempt {int(attempt_index)})"
- with self._api.step.nest(task_name) as presentation:
- attempt = self._task.launch(priority_boost_amount)
- attempt.index = attempt_index
- self._attempts.append(attempt)
- self._in_progress_attempts.append(attempt)
- task_ids.append(attempt.task_id)
- presentation.links["Swarming task"] = attempt.task_ui_link
- return task_ids
+ task_ids = []
+ for _ in range(number_to_launch):
+ attempt_index = len(self._attempts)
+ task_name = f"{self.name} (attempt {int(attempt_index)})"
+ with self._api.step.nest(task_name) as presentation:
+ attempt = self._task.launch(priority_boost_amount)
+ attempt.index = attempt_index
+ self._attempts.append(attempt)
+ self._in_progress_attempts.append(attempt)
+ task_ids.append(attempt.task_id)
+ presentation.links["Swarming task"] = attempt.task_ui_link
+ return task_ids
- @property
- def attempts(self): # pragma: no cover
- return self._attempts[:]
+ @property
+ def attempts(self): # pragma: no cover
+ return self._attempts[:]
- @property
- def in_progress(self):
- state, _ = self._get_state()
- return state == self._IN_PROGRESS
+ @property
+ def in_progress(self):
+ state, _ = self._get_state()
+ return state == self._IN_PROGRESS
- @property
- def success(self):
- state, _ = self._get_state()
- return state == self._OVERALL_SUCCESS
+ @property
+ def success(self):
+ state, _ = self._get_state()
+ return state == self._OVERALL_SUCCESS
- @property
- def failed(self):
- return not self.success and not self.in_progress
+ @property
+ def failed(self):
+ return not self.success and not self.in_progress
- def failed_after_max_attempts(self):
- state, _ = self._get_state()
- return state == self._OVERALL_FAILURE
+ def failed_after_max_attempts(self):
+ state, _ = self._get_state()
+ return state == self._OVERALL_FAILURE
- def has_flakes(self):
- return self._flakes_got > 0 or (
- self._successes_got > 0 and self._failures_got > 0
- )
+ def has_flakes(self):
+ return self._flakes_got > 0 or (
+ self._successes_got > 0 and self._failures_got > 0
+ )
- def process_result(self, attempt, result):
- with self._api.step.nest(result.name):
- self._in_progress_attempts.remove(attempt)
- attempt.result = result
- try:
- self._task.process_result(attempt)
- except recipe_api.StepFailure as e:
- error_step = self._api.step.empty("exception")
- error_step.presentation.step_summary_text = str(e)
- attempt.failure_reason = "exception during result processing"
- if e.name and e.exc_result:
- # The error name generally contains the name of the step
- # that failed. The full step name will already be namespaced
- # by task name, so present everything after the task name
- # since the failure_reason will only be presented in the
- # context of this task.
- attempt.failure_reason += f": {e.name.split(self.name + '.')[-1]} (retcode {e.exc_result.retcode})"
- trace_lines = self._api.utils.traceback_format_exc().splitlines()
- attempt.logs["exception"] = trace_lines
- error_step.presentation.logs["exception"] = trace_lines
- if attempt.success:
- self._successes_got += 1
- if attempt.has_flakes:
- self._flakes_got += 1
- else:
- self._failures_got += 1
+ def process_result(self, attempt, result):
+ with self._api.step.nest(result.name):
+ self._in_progress_attempts.remove(attempt)
+ attempt.result = result
+ try:
+ self._task.process_result(attempt)
+ except recipe_api.StepFailure as e:
+ error_step = self._api.step.empty("exception")
+ error_step.presentation.step_summary_text = str(e)
+ attempt.failure_reason = "exception during result processing"
+ if e.name and e.exc_result:
+ # The error name generally contains the name of the step
+ # that failed. The full step name will already be namespaced
+ # by task name, so present everything after the task name
+ # since the failure_reason will only be presented in the
+ # context of this task.
+ attempt.failure_reason += f": {e.name.split(self.name + '.')[-1]} (retcode {e.exc_result.retcode})"
+ trace_lines = self._api.utils.traceback_format_exc().splitlines()
+ attempt.logs["exception"] = trace_lines
+ error_step.presentation.logs["exception"] = trace_lines
+ if attempt.success:
+ self._successes_got += 1
+ if attempt.has_flakes:
+ self._flakes_got += 1
+ else:
+ self._failures_got += 1
- def present(self, **kwargs):
- """Present this task when summarizing results at the end of the run.
+ def present(self, **kwargs):
+ """Present this task when summarizing results at the end of the run.
Args:
**kwargs (Dict): passed through to present_attempt()
@@ -240,20 +238,20 @@
Returns:
None
"""
- with self._api.step.nest(self.name) as task_step_presentation:
- for attempt in self._attempts:
- self._task.present_attempt(task_step_presentation, attempt, **kwargs)
+ with self._api.step.nest(self.name) as task_step_presentation:
+ for attempt in self._attempts:
+ self._task.present_attempt(task_step_presentation, attempt, **kwargs)
- # Show incomplete tasks in green so as not to be confused with
- # actual failures.
- if self.success or self.in_progress:
- task_step_presentation.status = self._api.step.SUCCESS
- else:
- task_step_presentation.status = self._api.step.FAILURE
+ # Show incomplete tasks in green so as not to be confused with
+ # actual failures.
+ if self.success or self.in_progress:
+ task_step_presentation.status = self._api.step.SUCCESS
+ else:
+ task_step_presentation.status = self._api.step.FAILURE
class Task:
- """A Task object describes:
+ """A Task object describes:
* How to launch a task.
* How to process and present the results from a task.
@@ -266,20 +264,20 @@
default for other tasks, set max_attempts to that number.
"""
- def __init__(self, api, name):
- """Initializer.
+ def __init__(self, api, name):
+ """Initializer.
Args:
api: recipe_api.RecipeApiPlain object.
name: str, human readable name of this task
"""
- self._api = api
- self.name = name
- self.max_attempts = None
- self.abort_early_if_failed = False
+ self._api = api
+ self.name = name
+ self.max_attempts = None
+ self.abort_early_if_failed = False
- def process_result(self, attempt):
- """Examine the result in the given attempt for failures.
+ def process_result(self, attempt):
+ """Examine the result in the given attempt for failures.
Subclasses can set attempt.failure_reason if they find a failure inside
attempt.result. failure_reason should be a short summary of the failure
@@ -295,8 +293,8 @@
None
"""
- def present_attempt(self, task_step_presentation, attempt, **kwargs):
- """Present an Attempt when summarizing results at the end of the run.
+ def present_attempt(self, task_step_presentation, attempt, **kwargs):
+ """Present an Attempt when summarizing results at the end of the run.
Args:
task_step_presentation (StepPresentation): assuming present() was not
@@ -313,13 +311,13 @@
Returns:
None
"""
- del kwargs # Unused.
- name = f"{attempt.name} ({'pass' if attempt.success else 'fail'})"
- task_step_presentation.links[name] = attempt.task_ui_link
- task_step_presentation.links[attempt.bot_id] = attempt.bot_ui_link
+ del kwargs # Unused.
+ name = f"{attempt.name} ({'pass' if attempt.success else 'fail'})"
+ task_step_presentation.links[name] = attempt.task_ui_link
+ task_step_presentation.links[attempt.bot_id] = attempt.bot_ui_link
- def launch(self, priority_boost_amount):
- """Launch the task (using Swarming, led, or something else).
+ def launch(self, priority_boost_amount):
+ """Launch the task (using Swarming, led, or something else).
Args:
priority_boost_amount (int): Non-negative integer specifying how much
@@ -329,138 +327,141 @@
Attempt object, with the task_id or host property filled out from
from the Swarming or led result.
"""
- assert False, "Subclasses must define launch() method." # pragma: no cover
+ assert False, "Subclasses must define launch() method." # pragma: no cover
class TriggeredTask(Task):
- def __init__(self, request, api, **kwargs):
- super().__init__(api, request.name, **kwargs)
- self._request = request
- def launch(self, priority_boost_amount):
- assert self._request
+ def __init__(self, request, api, **kwargs):
+ super().__init__(api, request.name, **kwargs)
+ self._request = request
- request = self._request
- if priority_boost_amount != 0:
- old_priority = request.priority
- # For Swarming tasks, numerically lower priority values are logically
- # higher priorities, so use subtraction here.
- request = request.with_priority(old_priority - priority_boost_amount)
- with self._api.step.nest("increase priority") as pres:
- pres.step_summary_text = (
- f"from {int(old_priority)} to {int(request.priority)}"
- )
+ def launch(self, priority_boost_amount):
+ assert self._request
- metadata = self._api.swarming.trigger("trigger", [request])
- assert len(metadata) == 1
- metadata = metadata[0]
-
- return self._api.swarming_retry.Attempt(
- task_ui_link=metadata.task_ui_link,
- task_id=metadata.id,
+ request = self._request
+ if priority_boost_amount != 0:
+ old_priority = request.priority
+ # For Swarming tasks, numerically lower priority values are logically
+ # higher priorities, so use subtraction here.
+ request = request.with_priority(old_priority - priority_boost_amount)
+ with self._api.step.nest("increase priority") as pres:
+ pres.step_summary_text = (
+ f"from {int(old_priority)} to {int(request.priority)}"
)
+ metadata = self._api.swarming.trigger("trigger", [request])
+ assert len(metadata) == 1
+ metadata = metadata[0]
+
+ return self._api.swarming_retry.Attempt(
+ task_ui_link=metadata.task_ui_link,
+ task_id=metadata.id,
+ )
+
class LedTask(Task):
- def __init__(self, led_data, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self._led_data = led_data
- build = led_data.result.buildbucket.bbagent_args.build
- self._swarming_pb = build.infra.swarming
- self._original_priority = self._swarming_pb.priority
+ def __init__(self, led_data, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._led_data = led_data
- def launch(self, priority_boost_amount):
- assert self._led_data
+ build = led_data.result.buildbucket.bbagent_args.build
+ self._swarming_pb = build.infra.swarming
+ self._original_priority = self._swarming_pb.priority
- # For Swarming tasks, numerically lower priority values are logically
- # higher priorities, so use subtraction here.
- new_priority = self._original_priority - priority_boost_amount
- # Although we modify this data structure in place, one launch()
- # invocation should not affect later launch() invocations because this
- # 'priority' field is always overwritten.
- self._swarming_pb.priority = new_priority
- if priority_boost_amount != 0:
- with self._api.step.nest("increase priority") as pres:
- pres.step_summary_text = (
- f"from {int(self._original_priority)} to {int(new_priority)}"
- )
+ def launch(self, priority_boost_amount):
+ assert self._led_data
- res = self._led_data.then("launch", "-modernize", "-real-build")
- host = res.launch_result.swarming_hostname
- task_id = res.launch_result.task_id or str(res.launch_result.build_id)
- build_url_swarming = 'https://ci.chromium.org/swarming/task/%s?server=%s' % (
- task_id,
- res.launch_result.swarming_hostname,
+ # For Swarming tasks, numerically lower priority values are logically
+ # higher priorities, so use subtraction here.
+ new_priority = self._original_priority - priority_boost_amount
+ # Although we modify this data structure in place, one launch()
+ # invocation should not affect later launch() invocations because this
+ # 'priority' field is always overwritten.
+ self._swarming_pb.priority = new_priority
+ if priority_boost_amount != 0:
+ with self._api.step.nest("increase priority") as pres:
+ pres.step_summary_text = (
+ f"from {int(self._original_priority)} to {int(new_priority)}"
)
- build_url_bb = 'https://%s/build/%s' % (
- res.launch_result.buildbucket_hostname,
- task_id
- )
- build_url = build_url_swarming if res.launch_result.task_id else build_url_bb
- return self._api.swarming_retry.Attempt(
- host=host,
- task_id=task_id,
- # Use Milo since this task is running a recipe.
- task_ui_link=build_url,
- )
+
+ res = self._led_data.then("launch", "-modernize", "-real-build")
+ host = res.launch_result.swarming_hostname
+ task_id = res.launch_result.task_id or str(res.launch_result.build_id)
+ build_url_swarming = 'https://ci.chromium.org/swarming/task/%s?server=%s' % (
+ task_id,
+ res.launch_result.swarming_hostname,
+ )
+ build_url_bb = 'https://%s/build/%s' % (
+ res.launch_result.buildbucket_hostname, task_id
+ )
+ build_url = build_url_swarming if res.launch_result.task_id else build_url_bb
+ return self._api.swarming_retry.Attempt(
+ host=host,
+ task_id=task_id,
+ # Use Milo since this task is running a recipe.
+ task_ui_link=build_url,
+ )
class RetrySwarmingApi(recipe_api.RecipeApi):
- """Launch and retry swarming jobs until they pass or we hit max attempts."""
+ """Launch and retry swarming jobs until they pass or we hit max attempts."""
- Task = Task # pylint: disable=invalid-name
- LedTask = LedTask # pylint: disable=invalid-name
- TriggeredTask = TriggeredTask # pylint: disable=invalid-name
+ Task = Task # pylint: disable=invalid-name
+ LedTask = LedTask # pylint: disable=invalid-name
+ TriggeredTask = TriggeredTask # pylint: disable=invalid-name
- Attempt = Attempt # pylint: disable=invalid-name
+ Attempt = Attempt # pylint: disable=invalid-name
- DEFAULT_MAX_ATTEMPTS = DEFAULT_MAX_ATTEMPTS
+ DEFAULT_MAX_ATTEMPTS = DEFAULT_MAX_ATTEMPTS
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self._task_ids_seen = set()
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._task_ids_seen = set()
- def run_and_present_tasks(self, tasks, **kwargs):
- tasks = self.run_tasks(tasks, **kwargs)
- self.present_tasks(tasks)
- self.raise_failures(tasks)
+ def run_and_present_tasks(self, tasks, **kwargs):
+ tasks = self.run_tasks(tasks, **kwargs)
+ self.present_tasks(tasks)
+ self.raise_failures(tasks)
- def _is_complete(self, result):
- # At the moment results have a bunch of fields set to None if incomplete.
- # On the assumption this will be changed at some point I'm also checking
- # the state explicitly.
- if result.name is None:
- return False
+ def _is_complete(self, result):
+ # At the moment results have a bunch of fields set to None if incomplete.
+ # On the assumption this will be changed at some point I'm also checking
+ # the state explicitly.
+ if result.name is None:
+ return False
- return result.state not in {
- self.m.swarming.TaskState.RUNNING,
- self.m.swarming.TaskState.PENDING,
- }
+ return result.state not in {
+ self.m.swarming.TaskState.RUNNING,
+ self.m.swarming.TaskState.PENDING,
+ }
- def _get_tasks_to_launch(self, tasks):
- if any(task.failed_after_max_attempts() for task in tasks):
- # The build has failed overall, so disable launching any
- # further task attempts.
- return []
- return [task for task in tasks if task.should_launch()]
+ def _get_tasks_to_launch(self, tasks):
+ if any(task.failed_after_max_attempts() for task in tasks):
+ # The build has failed overall, so disable launching any
+ # further task attempts.
+ return []
+ return [task for task in tasks if task.should_launch()]
- def _launch(self, tasks):
- for task in tasks:
- task_ids = task.launch()
- # Check whether we got any duplicate task IDs. This is just a
- # rationality check for testing. With the current testing
- # framework, it is easy for multiple launch attempts to return
- # the same ID, because the default testing output always
- # returns the same task ID value.
- for task_id in task_ids:
- assert (
- task_id not in self._task_ids_seen
- ), f"Duplicate task ID seen: {repr(task_id)}"
- self._task_ids_seen.add(task_id)
+ def _launch(self, tasks):
+ for task in tasks:
+ task_ids = task.launch()
+ # Check whether we got any duplicate task IDs. This is just a
+ # rationality check for testing. With the current testing
+ # framework, it is easy for multiple launch attempts to return
+ # the same ID, because the default testing output always
+ # returns the same task ID value.
+ for task_id in task_ids:
+ assert (
+ task_id not in self._task_ids_seen
+ ), f"Duplicate task ID seen: {repr(task_id)}"
+ self._task_ids_seen.add(task_id)
- def _launch_and_collect(self, tasks, collect_output_dir, summary_presentation):
- """Launch necessary tasks and process those that complete.
+ def _launch_and_collect(
+ self, tasks, collect_output_dir, summary_presentation
+ ):
+ """Launch necessary tasks and process those that complete.
Launch any tasks that are not currently running, have not passed,
and have not exceeded max_attempts.
@@ -485,125 +486,127 @@
Number of jobs still running or to be relaunched. As long as this
is positive the caller should continue calling this method.
"""
- summary = []
+ summary = []
- def summary_entry(list_name, task_list):
- if len(task_list) == 1:
- count_or_name = task_list[0].name
- else:
- count_or_name = len(task_list)
- return f"{count_or_name} {list_name}"
+ def summary_entry(list_name, task_list):
+ if len(task_list) == 1:
+ count_or_name = task_list[0].name
+ else:
+ count_or_name = len(task_list)
+ return f"{count_or_name} {list_name}"
- to_launch = self._get_tasks_to_launch(tasks)
- if to_launch:
- with self.m.step.nest("launch"):
- self._launch(to_launch)
+ to_launch = self._get_tasks_to_launch(tasks)
+ if to_launch:
+ with self.m.step.nest("launch"):
+ self._launch(to_launch)
- # Wait on tasks that are in-progress.
- tasks_by_id = {}
- for task in tasks:
- for attempt in task._in_progress_attempts:
- assert attempt.task_id not in tasks_by_id
- tasks_by_id[attempt.task_id] = (task, attempt)
- results = []
- if tasks_by_id:
- results = self.m.swarming.collect(
- "collect",
- sorted(tasks_by_id.keys()),
- output_dir=collect_output_dir,
- eager=True,
- verbose=True,
- )
+ # Wait on tasks that are in-progress.
+ tasks_by_id = {}
+ for task in tasks:
+ for attempt in task._in_progress_attempts:
+ assert attempt.task_id not in tasks_by_id
+ tasks_by_id[attempt.task_id] = (task, attempt)
+ results = []
+ if tasks_by_id:
+ results = self.m.swarming.collect(
+ "collect",
+ sorted(tasks_by_id.keys()),
+ output_dir=collect_output_dir,
+ eager=True,
+ verbose=True,
+ )
- # 'collect' takes a list of task IDs and returns a list specifying
- # whether each task has completed or is still running. However,
- # sometimes the list it returns misses tasks that haven't
- # completed. That makes no difference here because we only look at
- # the completed tasks.
- completed_results = [result for result in results if self._is_complete(result)]
+ # 'collect' takes a list of task IDs and returns a list specifying
+ # whether each task has completed or is still running. However,
+ # sometimes the list it returns misses tasks that haven't
+ # completed. That makes no difference here because we only look at
+ # the completed tasks.
+ completed_results = [
+ result for result in results if self._is_complete(result)
+ ]
- passed_tasks = []
- failed_tasks = []
- if completed_results:
- with self.m.step.nest("process results") as process_results_presentation:
- for result in completed_results:
- task, attempt = tasks_by_id[result.id]
- task.process_result(attempt, result)
- if attempt.success:
- passed_tasks.append((task, attempt))
- else:
- failed_tasks.append((task, attempt))
+ passed_tasks = []
+ failed_tasks = []
+ if completed_results:
+ with self.m.step.nest("process results") as process_results_presentation:
+ for result in completed_results:
+ task, attempt = tasks_by_id[result.id]
+ task.process_result(attempt, result)
+ if attempt.success:
+ passed_tasks.append((task, attempt))
+ else:
+ failed_tasks.append((task, attempt))
- # Prevent failure states from the child log steps being
- # propagated up the log step tree by the recipe log step
- # system. This is desirable because although
- # task.process_result() may internally catch and suppress
- # an exception, the exception will still be reported
- # through recipe log step system.
- process_results_presentation.status = self.m.step.SUCCESS
+ # Prevent failure states from the child log steps being
+ # propagated up the log step tree by the recipe log step
+ # system. This is desirable because although
+ # task.process_result() may internally catch and suppress
+ # an exception, the exception will still be reported
+ # through recipe log step system.
+ process_results_presentation.status = self.m.step.SUCCESS
- for list_name, task_list in [
- ("passed", passed_tasks),
- ("failed", failed_tasks),
- ]:
- if not task_list:
- continue
- links = []
- for task, attempt in task_list:
- name = f"{task.name} ({attempt.name})"
- links.append((name, attempt.task_ui_link))
- with self.m.step.nest(f"{list_name} tasks") as list_step_presentation:
- list_step_presentation.links.update(links)
+ for list_name, task_list in [
+ ("passed", passed_tasks),
+ ("failed", failed_tasks),
+ ]:
+ if not task_list:
+ continue
+ links = []
+ for task, attempt in task_list:
+ name = f"{task.name} ({attempt.name})"
+ links.append((name, attempt.task_ui_link))
+ with self.m.step.nest(f"{list_name} tasks") as list_step_presentation:
+ list_step_presentation.links.update(links)
- summary.append(summary_entry(list_name, [task for task, _ in task_list]))
+ summary.append(summary_entry(list_name, [task for task, _ in task_list]))
- incomplete_tasks = [task for task in tasks if task.in_progress]
- # Do minimal presentation of all in-progress Attempts.
- links = []
- for task in tasks:
- for attempt in task._in_progress_attempts:
- name = f"{task.name} ({attempt.name})"
- links.append((name, attempt.task_ui_link))
- if links:
- with self.m.step.nest("incomplete tasks") as list_step_presentation:
- list_step_presentation.links.update(links)
- summary.append(summary_entry("incomplete", incomplete_tasks))
+ incomplete_tasks = [task for task in tasks if task.in_progress]
+ # Do minimal presentation of all in-progress Attempts.
+ links = []
+ for task in tasks:
+ for attempt in task._in_progress_attempts:
+ name = f"{task.name} ({attempt.name})"
+ links.append((name, attempt.task_ui_link))
+ if links:
+ with self.m.step.nest("incomplete tasks") as list_step_presentation:
+ list_step_presentation.links.update(links)
+ summary.append(summary_entry("incomplete", incomplete_tasks))
- to_be_relaunched = self._get_tasks_to_launch(tasks)
- failed_after_max_attempts = [
- task for task in tasks if task.failed_after_max_attempts()
- ]
- if failed_after_max_attempts:
- summary.append(
- summary_entry("failed after max attempts", failed_after_max_attempts)
- )
+ to_be_relaunched = self._get_tasks_to_launch(tasks)
+ failed_after_max_attempts = [
+ task for task in tasks if task.failed_after_max_attempts()
+ ]
+ if failed_after_max_attempts:
+ summary.append(
+ summary_entry("failed after max attempts", failed_after_max_attempts)
+ )
- summary_presentation.step_summary_text = ", ".join(summary)
+ summary_presentation.step_summary_text = ", ".join(summary)
- # Check if all abort_early_if_failed tasks are finished. If one or more
- # fail, don't wait on remaining tasks. They will be automatically
- # forcibly terminated when the build's Swarming task completes.
- abort_early_tasks = [task for task in tasks if task.abort_early_if_failed]
- abort_early_tasks_in_progress = [
- task for task in abort_early_tasks if task.in_progress
- ]
- if not abort_early_tasks_in_progress:
- failed_abort_early_tasks = [
- task for task in abort_early_tasks if task.failed_after_max_attempts()
- ]
- if failed_abort_early_tasks:
- return 0
+ # Check if all abort_early_if_failed tasks are finished. If one or more
+ # fail, don't wait on remaining tasks. They will be automatically
+ # forcibly terminated when the build's Swarming task completes.
+ abort_early_tasks = [task for task in tasks if task.abort_early_if_failed]
+ abort_early_tasks_in_progress = [
+ task for task in abort_early_tasks if task.in_progress
+ ]
+ if not abort_early_tasks_in_progress:
+ failed_abort_early_tasks = [
+ task for task in abort_early_tasks if task.failed_after_max_attempts()
+ ]
+ if failed_abort_early_tasks:
+ return 0
- return len(to_be_relaunched) + len(incomplete_tasks)
+ return len(to_be_relaunched) + len(incomplete_tasks)
- def run_tasks(
- self,
- tasks,
- max_attempts=0,
- collect_output_dir=None,
- run_count=1,
- ):
- """Launch all tasks, retry until max_attempts reached.
+ def run_tasks(
+ self,
+ tasks,
+ max_attempts=0,
+ collect_output_dir=None,
+ run_count=1,
+ ):
+ """Launch all tasks, retry until max_attempts reached.
Args:
tasks (seq[Task]): tasks to execute
@@ -613,28 +616,28 @@
api.swarming.collect()
"""
- max_attempts = max_attempts or DEFAULT_MAX_ATTEMPTS
+ max_attempts = max_attempts or DEFAULT_MAX_ATTEMPTS
- for task in tasks:
- if not task.max_attempts:
- task.max_attempts = max_attempts
+ for task in tasks:
+ if not task.max_attempts:
+ task.max_attempts = max_attempts
- tasks = [TaskTracker(self.m, task, run_count) for task in tasks]
+ tasks = [TaskTracker(self.m, task, run_count) for task in tasks]
- with self.m.step.nest("launch/collect"), self.m.context(infra_steps=True):
- for i in itertools.count(0):
- with self.m.step.nest(str(i)) as presentation:
- if not self._launch_and_collect(
- tasks=tasks,
- collect_output_dir=collect_output_dir,
- summary_presentation=presentation,
- ):
- break
+ with self.m.step.nest("launch/collect"), self.m.context(infra_steps=True):
+ for i in itertools.count(0):
+ with self.m.step.nest(str(i)) as presentation:
+ if not self._launch_and_collect(
+ tasks=tasks,
+ collect_output_dir=collect_output_dir,
+ summary_presentation=presentation,
+ ):
+ break
- return tasks
+ return tasks
- def present_tasks(self, tasks):
- """Present results as steps.
+ def present_tasks(self, tasks):
+ """Present results as steps.
Examine tasks for pass/fail status and create step data for displaying
that status. Group all passes under one step and all failures under
@@ -644,50 +647,50 @@
Args:
tasks (seq[Task]): tasks to examine
"""
- # TODO(mohrr) add hooks to include task-specific data beyond pass/fail.
- passed_tasks = [x for x in tasks if x.success]
- # Some tasks may be incomplete if the launch_and_collect loop exited
- # early due to failures.
- incomplete_tasks = [x for x in tasks if x.in_progress]
- failed_tasks = [x for x in tasks if x.failed]
- flaked_tasks = [x for x in tasks if x.has_flakes()]
+ # TODO(mohrr) add hooks to include task-specific data beyond pass/fail.
+ passed_tasks = [x for x in tasks if x.success]
+ # Some tasks may be incomplete if the launch_and_collect loop exited
+ # early due to failures.
+ incomplete_tasks = [x for x in tasks if x.in_progress]
+ failed_tasks = [x for x in tasks if x.failed]
+ flaked_tasks = [x for x in tasks if x.has_flakes()]
- with self.m.step.nest("passes") as step_presentation:
- for task in passed_tasks:
- task.present(category="passes")
- step_presentation.step_summary_text = f"{len(passed_tasks)} passed"
+ with self.m.step.nest("passes") as step_presentation:
+ for task in passed_tasks:
+ task.present(category="passes")
+ step_presentation.step_summary_text = f"{len(passed_tasks)} passed"
- with self.m.step.nest("flakes") as step_presentation:
- for task in flaked_tasks:
- task.present(category="flakes")
- step_presentation.step_summary_text = f"{len(flaked_tasks)} flaked"
+ with self.m.step.nest("flakes") as step_presentation:
+ for task in flaked_tasks:
+ task.present(category="flakes")
+ step_presentation.step_summary_text = f"{len(flaked_tasks)} flaked"
- with self.m.step.nest("failures") as step_presentation:
- for task in failed_tasks:
- task.present(category="failures")
- step_presentation.step_summary_text = f"{len(failed_tasks)} failed"
+ with self.m.step.nest("failures") as step_presentation:
+ for task in failed_tasks:
+ task.present(category="failures")
+ step_presentation.step_summary_text = f"{len(failed_tasks)} failed"
- if incomplete_tasks:
- with self.m.step.nest("incomplete") as step_presentation:
- for task in incomplete_tasks:
- task.present(category="incomplete")
- step_presentation.step_summary_text = (
- f"{len(incomplete_tasks)} incomplete"
- )
+ if incomplete_tasks:
+ with self.m.step.nest("incomplete") as step_presentation:
+ for task in incomplete_tasks:
+ task.present(category="incomplete")
+ step_presentation.step_summary_text = (
+ f"{len(incomplete_tasks)} incomplete"
+ )
- if not failed_tasks and not incomplete_tasks:
- self.m.step.empty("all tasks passed") # pragma: no cover
+ if not failed_tasks and not incomplete_tasks:
+ self.m.step.empty("all tasks passed") # pragma: no cover
- def raise_failures(self, tasks):
- """Raise an exception if any tasks failed.
+ def raise_failures(self, tasks):
+ """Raise an exception if any tasks failed.
Examine tasks for pass/fail status. If any failed, raise a StepFailure.
Args:
tasks (seq[Task]): tasks to examine
"""
- failed = [x for x in tasks if x.failed]
- if failed:
- raise self.m.step.StepFailure(
- f"{pluralize('task', failed)} failed: {', '.join(x.name for x in failed)}"
- )
+ failed = [x for x in tasks if x.failed]
+ if failed:
+ raise self.m.step.StepFailure(
+ f"{pluralize('task', failed)} failed: {', '.join(x.name for x in failed)}"
+ )
diff --git a/recipe_modules/swarming_retry/test_api.py b/recipe_modules/swarming_retry/test_api.py
index 1ab240e..654d14e 100644
--- a/recipe_modules/swarming_retry/test_api.py
+++ b/recipe_modules/swarming_retry/test_api.py
@@ -6,41 +6,48 @@
class FuchsiaSwarmingRetryTestApi(recipe_test_api.RecipeTestApi):
- def _attempt(self, attempt, iteration):
- """If not given guess the attempt number from context."""
- # First, if the attempt number is given use it. The next couple
- # statements make some assumptions but this is the way to ignore
- # those assumptions.
- if attempt is not None: # pragma: no cover
- return attempt
+ def _attempt(self, attempt, iteration):
+ """If not given guess the attempt number from context."""
- # If no attempt number is given assume attempts starting at iteration
- # 0 have attempt number 0.
- if iteration == 0:
- return 0
+ # First, if the attempt number is given use it. The next couple
+ # statements make some assumptions but this is the way to ignore
+ # those assumptions.
+ if attempt is not None: # pragma: no cover
+ return attempt
- # If not at iteration 0 assume we're relaunching and since the max
- # attempts is currently 2 this has to be attempt 1.
- return 1
+ # If no attempt number is given assume attempts starting at iteration
+ # 0 have attempt number 0.
+ if iteration == 0:
+ return 0
- def trigger_data(self, name, task_id, iteration=0, attempt=None):
- """Like led_data() above, but for mocking api.swarming.trigger."""
+ # If not at iteration 0 assume we're relaunching and since the max
+ # attempts is currently 2 this has to be attempt 1.
+ return 1
- attempt = self._attempt(attempt=attempt, iteration=iteration)
+ def trigger_data(self, name, task_id, iteration=0, attempt=None):
+ """Like led_data() above, but for mocking api.swarming.trigger."""
- step_name = (
- f"launch/collect.{iteration}.launch.{name} (attempt {attempt}).trigger"
- )
- launch_data = self.m.swarming.trigger(
- task_names=[name], initial_id=int(task_id)
- )
- return self.step_data(step_name, launch_data)
+ attempt = self._attempt(attempt=attempt, iteration=iteration)
- def task_result(
- self, name, task_id, failed=False, incomplete=False, timed_out=False, **kwargs
- ):
- """Mock data for call to api.swarming.collect().
+ step_name = (
+ f"launch/collect.{iteration}.launch.{name} (attempt {attempt}).trigger"
+ )
+ launch_data = self.m.swarming.trigger(
+ task_names=[name], initial_id=int(task_id)
+ )
+ return self.step_data(step_name, launch_data)
+
+ def task_result(
+ self,
+ name,
+ task_id,
+ failed=False,
+ incomplete=False,
+ timed_out=False,
+ **kwargs
+ ):
+ """Mock data for call to api.swarming.collect().
Args:
name (str): name of task
@@ -50,36 +57,37 @@
timed_out (bool): if the task timed out (implies failed)
**kwargs (dict): additional args to pass to swarming.task_result()
"""
- assert not (failed and incomplete)
- assert not (timed_out and incomplete)
+ assert not (failed and incomplete)
+ assert not (timed_out and incomplete)
- failed = failed or timed_out
+ failed = failed or timed_out
- state = self.m.swarming.TaskState.COMPLETED
- if incomplete:
- state = None
- name = None
- elif timed_out:
- state = self.m.swarming.TaskState.TIMED_OUT
+ state = self.m.swarming.TaskState.COMPLETED
+ if incomplete:
+ state = None
+ name = None
+ elif timed_out:
+ state = self.m.swarming.TaskState.TIMED_OUT
- return self.m.swarming.task_result(
- id=str(task_id), name=name, state=state, failure=failed, **kwargs
- )
+ return self.m.swarming.task_result(
+ id=str(task_id), name=name, state=state, failure=failed, **kwargs
+ )
- # These methods are just for convenience to make tests more readable.
- def incomplete_task(self, name, task_id, **kwargs):
- return self.task_result(name, task_id, incomplete=True, **kwargs)
+ # These methods are just for convenience to make tests more readable.
+ def incomplete_task(self, name, task_id, **kwargs):
+ return self.task_result(name, task_id, incomplete=True, **kwargs)
- def failed_task(self, name, task_id, **kwargs):
- return self.task_result(name, task_id, failed=True, **kwargs)
+ def failed_task(self, name, task_id, **kwargs):
+ return self.task_result(name, task_id, failed=True, **kwargs)
- def timed_out_task(self, name, task_id, **kwargs):
- return self.task_result(name, task_id, timed_out=True, **kwargs)
+ def timed_out_task(self, name, task_id, **kwargs):
+ return self.task_result(name, task_id, timed_out=True, **kwargs)
- def passed_task(self, name, task_id, **kwargs):
- return self.task_result(name, task_id, **kwargs)
+ def passed_task(self, name, task_id, **kwargs):
+ return self.task_result(name, task_id, **kwargs)
- def collect_data(self, results, iteration=0):
- return self.override_step_data(
- f"launch/collect.{int(iteration)}.collect", self.m.swarming.collect(results)
- )
+ def collect_data(self, results, iteration=0):
+ return self.override_step_data(
+ f"launch/collect.{int(iteration)}.collect",
+ self.m.swarming.collect(results)
+ )
diff --git a/recipe_modules/swarming_retry/tests/full.py b/recipe_modules/swarming_retry/tests/full.py
index 6c9f80b..08b602e 100644
--- a/recipe_modules/swarming_retry/tests/full.py
+++ b/recipe_modules/swarming_retry/tests/full.py
@@ -18,45 +18,51 @@
]
PROPERTIES = {
- "full": Property(
- kind=bool,
- default=False,
- help="Whether to run six tasks or just one.",
- ),
- "run_count": Property(
- kind=int,
- default=1,
- help="Number of successful runs we want for each task.",
- ),
- "task_type": Property(
- kind=str,
- default="test",
- help="Type of tasks to create. Options: "
- '"test", "internal_failure", "raising", "led", "triggered".',
- ),
- "max_attempts": Property(kind=int, default=2, help="Overall max attempts."),
- "last_task_max_attempts": Property(
- kind=int,
- default=None,
- help="Override the overall max attempts by setting on "
- "Task.max_attempts. Only set on last task.",
- ),
- "abort_early": Property(
- kind=bool,
- default=False,
- help="Whether to run a task that will fail and abort early",
- ),
+ "full":
+ Property(
+ kind=bool,
+ default=False,
+ help="Whether to run six tasks or just one.",
+ ),
+ "run_count":
+ Property(
+ kind=int,
+ default=1,
+ help="Number of successful runs we want for each task.",
+ ),
+ "task_type":
+ Property(
+ kind=str,
+ default="test",
+ help="Type of tasks to create. Options: "
+ '"test", "internal_failure", "raising", "led", "triggered".',
+ ),
+ "max_attempts":
+ Property(kind=int, default=2, help="Overall max attempts."),
+ "last_task_max_attempts":
+ Property(
+ kind=int,
+ default=None,
+ help="Override the overall max attempts by setting on "
+ "Task.max_attempts. Only set on last task.",
+ ),
+ "abort_early":
+ Property(
+ kind=bool,
+ default=False,
+ help="Whether to run a task that will fail and abort early",
+ ),
}
class Task(swarming_retry_api.Task):
- """Required subclass for testing swarming_retry.
+ """Required subclass for testing swarming_retry.
Defined inside a function because base class is inside api object.
"""
- def __init__(self, initial_task_id, *args, **kwargs):
- """Construct a Task object.
+ def __init__(self, initial_task_id, *args, **kwargs):
+ """Construct a Task object.
Args:
initial_task_id (int or str): integer decimal value (since this needs
@@ -65,76 +71,81 @@
int(initial_task_id) works)
"""
- abort_early = kwargs.pop("abort_early", False)
- super().__init__(*args, **kwargs)
- self._next_task_id = int(initial_task_id)
- self.abort_early_if_failed = abort_early
+ abort_early = kwargs.pop("abort_early", False)
+ super().__init__(*args, **kwargs)
+ self._next_task_id = int(initial_task_id)
+ self.abort_early_if_failed = abort_early
- def launch(self, _):
- kwargs = {
- "task_id": str(self._next_task_id),
- }
+ def launch(self, _):
+ kwargs = {
+ "task_id": str(self._next_task_id),
+ }
- self._next_task_id += 1
+ self._next_task_id += 1
- # This looks funny but it's needed to ensure coverage of
- # Attempt.task_ui_link.
- if self._next_task_id % 2 == 0:
- kwargs["host"] = "testhost"
- else:
- kwargs["task_ui_link"] = f"https://testhost/task?id={kwargs['task_id']}"
+ # This looks funny but it's needed to ensure coverage of
+ # Attempt.task_ui_link.
+ if self._next_task_id % 2 == 0:
+ kwargs["host"] = "testhost"
+ else:
+ kwargs["task_ui_link"] = f"https://testhost/task?id={kwargs['task_id']}"
- attempt = self._api.swarming_retry.Attempt(**kwargs)
- self._api.step.empty(f"launch {self.name}", step_text=attempt.task_id)
- return attempt
+ attempt = self._api.swarming_retry.Attempt(**kwargs)
+ self._api.step.empty(f"launch {self.name}", step_text=attempt.task_id)
+ return attempt
class FlakeTask(Task):
- def process_result(self, attempt):
- attempt.has_flakes = True
+
+ def process_result(self, attempt):
+ attempt.has_flakes = True
class InternalFailureTask(Task):
- def process_result(self, attempt):
- attempt.failure_reason = "internal failure"
+
+ def process_result(self, attempt):
+ attempt.failure_reason = "internal failure"
class RaisingTask(Task):
- def process_result(self, _):
- self._api.step.empty("failing step", status=self._api.step.FAILURE)
+
+ def process_result(self, _):
+ self._api.step.empty("failing step", status=self._api.step.FAILURE)
class LedTask(swarming_retry_api.LedTask):
- def __init__(self, initial_task_id, api, **kwargs):
- ir = api.led("get-builder", "project/bucket:builder")
- build_proto = ir.result.buildbucket.bbagent_args.build
- build_proto.id = int(initial_task_id)
- build_proto.infra.swarming.task_id = str(initial_task_id)
- super().__init__(ir, api=api, **kwargs)
- def launch(self, priority_boost_amount):
- ret = super().launch(priority_boost_amount)
+ def __init__(self, initial_task_id, api, **kwargs):
+ ir = api.led("get-builder", "project/bucket:builder")
+ build_proto = ir.result.buildbucket.bbagent_args.build
+ build_proto.id = int(initial_task_id)
+ build_proto.infra.swarming.task_id = str(initial_task_id)
+ super().__init__(ir, api=api, **kwargs)
- build_proto = self._led_data.result.buildbucket.bbagent_args.build
- cur_id = int(build_proto.infra.swarming.task_id)
- build_proto.infra.swarming.task_id = str(cur_id + 1)
- build_proto.id = cur_id + 1
- return ret
+ def launch(self, priority_boost_amount):
+ ret = super().launch(priority_boost_amount)
+
+ build_proto = self._led_data.result.buildbucket.bbagent_args.build
+ cur_id = int(build_proto.infra.swarming.task_id)
+ build_proto.infra.swarming.task_id = str(cur_id + 1)
+ build_proto.id = cur_id + 1
+ return ret
class TriggeredTask(swarming_retry_api.TriggeredTask):
- def __init__(self, api, name, initial_task_id, **kwargs):
- del initial_task_id # Unused.
- dimensions = {
- "pool": "pool",
- "device_type": "device_type",
- }
+ def __init__(self, api, name, initial_task_id, **kwargs):
+ del initial_task_id # Unused.
- request = api.swarming.task_request().with_name(name)
- request = request.with_slice(0, request[0].with_dimensions(**dimensions))
+ dimensions = {
+ "pool": "pool",
+ "device_type": "device_type",
+ }
- super().__init__(request, api, **kwargs)
+ request = api.swarming.task_request().with_name(name)
+ request = request.with_slice(0, request[0].with_dimensions(**dimensions))
+
+ super().__init__(request, api, **kwargs)
# pylint: disable=invalid-name
@@ -147,248 +158,252 @@
run_count,
abort_early,
):
- task_types = {
- "test": Task,
- "flake_task": FlakeTask,
- "internal_failure": InternalFailureTask,
- "raising": RaisingTask,
- "led": LedTask,
- "triggered": TriggeredTask,
- }
+ task_types = {
+ "test": Task,
+ "flake_task": FlakeTask,
+ "internal_failure": InternalFailureTask,
+ "raising": RaisingTask,
+ "led": LedTask,
+ "triggered": TriggeredTask,
+ }
- _create_task = task_types[task_type] # pylint: disable=invalid-name
+ _create_task = task_types[task_type] # pylint: disable=invalid-name
- if full:
- tasks = [
- _create_task(api=api, name="pass", initial_task_id=100),
- _create_task(api=api, name="flake", initial_task_id=200),
- _create_task(api=api, name="fail", initial_task_id=300),
- _create_task(api=api, name="pass_long", initial_task_id=400),
- _create_task(api=api, name="flake_long", initial_task_id=500),
- _create_task(api=api, name="fail_long", initial_task_id=600),
- ]
+ if full:
+ tasks = [
+ _create_task(api=api, name="pass", initial_task_id=100),
+ _create_task(api=api, name="flake", initial_task_id=200),
+ _create_task(api=api, name="fail", initial_task_id=300),
+ _create_task(api=api, name="pass_long", initial_task_id=400),
+ _create_task(api=api, name="flake_long", initial_task_id=500),
+ _create_task(api=api, name="fail_long", initial_task_id=600),
+ ]
- else:
- tasks = [_create_task(api=api, name="task", initial_task_id=100)]
+ else:
+ tasks = [_create_task(api=api, name="task", initial_task_id=100)]
- if abort_early:
- tasks.append(
- _create_task(
- api=api,
- name="abort_early_task",
- initial_task_id=700,
- abort_early=True,
- )
+ if abort_early:
+ tasks.append(
+ _create_task(
+ api=api,
+ name="abort_early_task",
+ initial_task_id=700,
+ abort_early=True,
)
-
- if last_task_max_attempts:
- tasks[-1].max_attempts = last_task_max_attempts
-
- api.swarming_retry.run_and_present_tasks(
- tasks, max_attempts=max_attempts, run_count=run_count
)
+ if last_task_max_attempts:
+ tasks[-1].max_attempts = last_task_max_attempts
+
+ api.swarming_retry.run_and_present_tasks(
+ tasks, max_attempts=max_attempts, run_count=run_count
+ )
+
def GenTests(api): # pylint: disable=invalid-name
- test_api = api.swarming_retry
+ test_api = api.swarming_retry
- def led_build_data(priority=100):
- build = api.buildbucket.ci_build_message(priority=priority)
+ def led_build_data(priority=100):
+ build = api.buildbucket.ci_build_message(priority=priority)
- job_def = job_pb2.Definition()
- job_def.buildbucket.bbagent_args.build.CopyFrom(build)
- return api.led.mock_get_builder(job_def)
+ job_def = job_pb2.Definition()
+ job_def.buildbucket.bbagent_args.build.CopyFrom(build)
+ return api.led.mock_get_builder(job_def)
- yield (
- api.test("full_test", status="FAILURE")
- + api.properties(full=True)
- + test_api.collect_data(
- [
- test_api.passed_task("pass", 100),
- test_api.failed_task("flake", 200),
- test_api.failed_task("fail", 300),
- ],
- iteration=0,
- )
- + test_api.collect_data(
- [test_api.passed_task("flake", 201), test_api.failed_task("fail", 301)],
- iteration=1,
- )
- +
- # `fail` task failed max times so remaining long tasks should only be run
- # once.
- test_api.collect_data(
- [
- test_api.incomplete_task("pass_long", 400),
- test_api.incomplete_task("flake_long", 500),
- test_api.incomplete_task("fail_long", 600),
- ],
- iteration=2,
- )
- + test_api.collect_data([], iteration=3)
- + test_api.collect_data([test_api.passed_task("pass_long", 400)], iteration=4)
- + test_api.collect_data([test_api.failed_task("flake_long", 500)], iteration=5)
- + test_api.collect_data([test_api.failed_task("fail_long", 600)], iteration=6)
- )
+ yield (
+ api.test("full_test", status="FAILURE") + api.properties(full=True) +
+ test_api.collect_data(
+ [
+ test_api.passed_task("pass", 100),
+ test_api.failed_task("flake", 200),
+ test_api.failed_task("fail", 300),
+ ],
+ iteration=0,
+ ) + test_api.collect_data(
+ [
+ test_api.passed_task("flake", 201),
+ test_api.failed_task("fail", 301)
+ ],
+ iteration=1,
+ ) +
+ # `fail` task failed max times so remaining long tasks should only be run
+ # once.
+ test_api.collect_data(
+ [
+ test_api.incomplete_task("pass_long", 400),
+ test_api.incomplete_task("flake_long", 500),
+ test_api.incomplete_task("fail_long", 600),
+ ],
+ iteration=2,
+ ) + test_api.collect_data([], iteration=3) +
+ test_api.collect_data([test_api.passed_task("pass_long", 400)],
+ iteration=4) +
+ test_api.collect_data([test_api.failed_task("flake_long", 500)],
+ iteration=5) +
+ test_api.collect_data([test_api.failed_task("fail_long", 600)],
+ iteration=6)
+ )
- yield (
- api.test("timeout_then_pass")
- + api.properties(full=False)
- + test_api.collect_data([test_api.timed_out_task("task", 100)])
- + test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
- )
+ yield (
+ api.test("timeout_then_pass") + api.properties(full=False) +
+ test_api.collect_data([test_api.timed_out_task("task", 100)]) +
+ test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
+ )
- yield (
- api.test("internal_failure", status="FAILURE")
- + api.properties(full=False, task_type="internal_failure")
- + test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
- )
+ yield (
+ api.test("internal_failure", status="FAILURE") +
+ api.properties(full=False, task_type="internal_failure") +
+ test_api.collect_data([test_api.passed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
+ )
- yield (
- api.test("raising_process_results", status="FAILURE")
- + api.properties(full=False, task_type="raising")
- + test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
- )
+ yield (
+ api.test("raising_process_results", status="FAILURE") +
+ api.properties(full=False, task_type="raising") +
+ test_api.collect_data([test_api.passed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
+ )
- yield (
- api.test("led_task")
- + api.properties(full=False, task_type="led")
- + led_build_data()
- + test_api.collect_data([test_api.failed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
- )
+ yield (
+ api.test("led_task") + api.properties(full=False, task_type="led") +
+ led_build_data() +
+ test_api.collect_data([test_api.failed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
+ )
- yield (
- api.test("led_task_hardcoded_attempt")
- + api.properties(full=False, task_type="led")
- + led_build_data()
- )
+ yield (
+ api.test("led_task_hardcoded_attempt") +
+ api.properties(full=False, task_type="led") + led_build_data()
+ )
- yield (
- api.test("triggered_task")
- + api.properties(full=False, task_type="triggered")
- + api.swarming_retry.trigger_data("task", 1, iteration=0)
- + test_api.collect_data([test_api.failed_task("task", 1)], iteration=0)
- + api.swarming_retry.trigger_data("task", 2, iteration=1)
- + test_api.collect_data([test_api.passed_task("task", 2)], iteration=1)
- )
+ yield (
+ api.test("triggered_task") +
+ api.properties(full=False, task_type="triggered") +
+ api.swarming_retry.trigger_data("task", 1, iteration=0) +
+ test_api.collect_data([test_api.failed_task("task", 1)], iteration=0) +
+ api.swarming_retry.trigger_data("task", 2, iteration=1) +
+ test_api.collect_data([test_api.passed_task("task", 2)], iteration=1)
+ )
- yield (
- api.test("max_attempts_three", status="FAILURE")
- + api.properties(full=False, task_type="raising", max_attempts=3)
- + test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
- + test_api.collect_data([test_api.passed_task("task", 102)], iteration=2)
- )
+ yield (
+ api.test("max_attempts_three", status="FAILURE") +
+ api.properties(full=False, task_type="raising", max_attempts=3) +
+ test_api.collect_data([test_api.passed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.passed_task("task", 101)], iteration=1) +
+ test_api.collect_data([test_api.passed_task("task", 102)], iteration=2)
+ )
- yield (
- api.test("last_task_max_attempts_low", status="FAILURE")
- + api.properties(
- full=False, task_type="raising", max_attempts=3, last_task_max_attempts=1
- )
- + test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
- )
+ yield (
+ api.test("last_task_max_attempts_low", status="FAILURE") + api.properties(
+ full=False,
+ task_type="raising",
+ max_attempts=3,
+ last_task_max_attempts=1
+ ) +
+ test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
+ )
- yield (
- api.test("last_task_max_attempts_high", status="FAILURE")
- + api.properties(
- full=False, task_type="raising", max_attempts=3, last_task_max_attempts=5
- )
- + test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
- + test_api.collect_data([test_api.passed_task("task", 102)], iteration=2)
- + test_api.collect_data([test_api.passed_task("task", 103)], iteration=3)
- + test_api.collect_data([test_api.passed_task("task", 104)], iteration=4)
- )
+ yield (
+ api.test("last_task_max_attempts_high", status="FAILURE") +
+ api.properties(
+ full=False,
+ task_type="raising",
+ max_attempts=3,
+ last_task_max_attempts=5
+ ) +
+ test_api.collect_data([test_api.passed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.passed_task("task", 101)], iteration=1) +
+ test_api.collect_data([test_api.passed_task("task", 102)], iteration=2) +
+ test_api.collect_data([test_api.passed_task("task", 103)], iteration=3) +
+ test_api.collect_data([test_api.passed_task("task", 104)], iteration=4)
+ )
- # Test case where we want to get multiple successful runs of the same
- # task (run_count > 1).
+ # Test case where we want to get multiple successful runs of the same
+ # task (run_count > 1).
- # Test the simple case where there are no failures of the task.
- yield (
- api.test("multirun_without_failures", status="SUCCESS")
- + api.properties(run_count=2, task_type="triggered")
- # Enforce that both of these task attempt are launched in the first
- # iteration. (This requires using task_type="triggered".)
- + api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0)
- + api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1)
- + test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
- )
+ # Test the simple case where there are no failures of the task.
+ yield (
+ api.test("multirun_without_failures", status="SUCCESS") +
+ api.properties(run_count=2, task_type="triggered")
+ # Enforce that both of these task attempt are launched in the first
+ # iteration. (This requires using task_type="triggered".)
+ + api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0) +
+ api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1) +
+ test_api.collect_data([test_api.passed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.passed_task("task", 101)], iteration=1)
+ )
- # Test the case where a task must be retried (multiple times) but
- # eventually passes.
- yield (
- api.test("multirun_retry_overall_pass", status="SUCCESS")
- + api.properties(run_count=2, task_type="triggered")
- + api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0)
- + api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1)
- + test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.failed_task("task", 101)], iteration=1)
- + api.swarming_retry.trigger_data("task", 102, iteration=2, attempt=2)
- + test_api.collect_data([test_api.failed_task("task", 102)], iteration=2)
- + api.swarming_retry.trigger_data("task", 103, iteration=3, attempt=3)
- + test_api.collect_data([test_api.passed_task("task", 103)], iteration=3)
- )
+ # Test the case where a task must be retried (multiple times) but
+ # eventually passes.
+ yield (
+ api.test("multirun_retry_overall_pass", status="SUCCESS") +
+ api.properties(run_count=2, task_type="triggered") +
+ api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0) +
+ api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1) +
+ test_api.collect_data([test_api.passed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.failed_task("task", 101)], iteration=1) +
+ api.swarming_retry.trigger_data("task", 102, iteration=2, attempt=2) +
+ test_api.collect_data([test_api.failed_task("task", 102)], iteration=2) +
+ api.swarming_retry.trigger_data("task", 103, iteration=3, attempt=3) +
+ test_api.collect_data([test_api.passed_task("task", 103)], iteration=3)
+ )
- # Test the case where a task is retried, but ultimately we do not get
- # enough passes within the max_attempts retry limit.
- yield (
- api.test("multirun_retry_overall_fail", status="FAILURE")
- + api.properties(run_count=2, task_type="triggered")
- + api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0)
- + api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1)
- + test_api.collect_data([test_api.passed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.failed_task("task", 101)], iteration=1)
- + api.swarming_retry.trigger_data("task", 102, iteration=2, attempt=2)
- + test_api.collect_data([test_api.failed_task("task", 102)], iteration=2)
- + api.swarming_retry.trigger_data("task", 103, iteration=3, attempt=3)
- + test_api.collect_data([test_api.failed_task("task", 103)], iteration=3)
- )
+ # Test the case where a task is retried, but ultimately we do not get
+ # enough passes within the max_attempts retry limit.
+ yield (
+ api.test("multirun_retry_overall_fail", status="FAILURE") +
+ api.properties(run_count=2, task_type="triggered") +
+ api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0) +
+ api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1) +
+ test_api.collect_data([test_api.passed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.failed_task("task", 101)], iteration=1) +
+ api.swarming_retry.trigger_data("task", 102, iteration=2, attempt=2) +
+ test_api.collect_data([test_api.failed_task("task", 102)], iteration=2) +
+ api.swarming_retry.trigger_data("task", 103, iteration=3, attempt=3) +
+ test_api.collect_data([test_api.failed_task("task", 103)], iteration=3)
+ )
- # If the last attempt in the list passed but the task failed overall,
- # it should not be treated as passed.
- #
- # Also, if the last attempt in the list completes before other attempts
- # have completed, the task should be treated as in-progress not
- # finished.
- yield (
- api.test("multirun_fail_pass", status="FAILURE")
- + api.properties(run_count=2, max_attempts=1, task_type="triggered")
- + api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0)
- + api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1)
- # The second attempt passes first.
- + test_api.collect_data([test_api.passed_task("task", 101)], iteration=0)
- + test_api.collect_data([test_api.failed_task("task", 100)], iteration=1)
- )
+ # If the last attempt in the list passed but the task failed overall,
+ # it should not be treated as passed.
+ #
+ # Also, if the last attempt in the list completes before other attempts
+ # have completed, the task should be treated as in-progress not
+ # finished.
+ yield (
+ api.test("multirun_fail_pass", status="FAILURE") +
+ api.properties(run_count=2, max_attempts=1, task_type="triggered") +
+ api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0) +
+ api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1)
+ # The second attempt passes first.
+ +
+ test_api.collect_data([test_api.passed_task("task", 101)], iteration=0) +
+ test_api.collect_data([test_api.failed_task("task", 100)], iteration=1)
+ )
- # Test that the "no futile retries" strategy is applied: If all of the
- # attempts in the first batch fail, there should be no retries.
- yield (
- api.test("multirun_no_futile_retries", status="FAILURE")
- + api.properties(run_count=2, task_type="triggered")
- + api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0)
- + api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1)
- + test_api.collect_data([test_api.failed_task("task", 100)], iteration=0)
- + test_api.collect_data([test_api.failed_task("task", 101)], iteration=1)
- )
+ # Test that the "no futile retries" strategy is applied: If all of the
+ # attempts in the first batch fail, there should be no retries.
+ yield (
+ api.test("multirun_no_futile_retries", status="FAILURE") +
+ api.properties(run_count=2, task_type="triggered") +
+ api.swarming_retry.trigger_data("task", 100, iteration=0, attempt=0) +
+ api.swarming_retry.trigger_data("task", 101, iteration=0, attempt=1) +
+ test_api.collect_data([test_api.failed_task("task", 100)], iteration=0) +
+ test_api.collect_data([test_api.failed_task("task", 101)], iteration=1)
+ )
- yield (
- api.test("no_collect_after_failed_abort_early_task", status="FAILURE")
- + api.properties(full=True, abort_early=True, task_type="flake_task")
- + test_api.collect_data(
- [
- test_api.failed_task("abort_early_task", 700),
- test_api.passed_task("pass", 100),
- test_api.failed_task("flake", 200),
- test_api.failed_task("fail", 300),
- ],
- iteration=0,
- )
- + test_api.collect_data(
- [test_api.failed_task("abort_early_task", 701)],
- iteration=1,
- )
- )
+ yield (
+ api.test("no_collect_after_failed_abort_early_task", status="FAILURE") +
+ api.properties(full=True, abort_early=True, task_type="flake_task") +
+ test_api.collect_data(
+ [
+ test_api.failed_task("abort_early_task", 700),
+ test_api.passed_task("pass", 100),
+ test_api.failed_task("flake", 200),
+ test_api.failed_task("fail", 300),
+ ],
+ iteration=0,
+ ) + test_api.collect_data(
+ [test_api.failed_task("abort_early_task", 701)],
+ iteration=1,
+ )
+ )
diff --git a/recipe_modules/tar/api.py b/recipe_modules/tar/api.py
index d72f230..f64f332 100644
--- a/recipe_modules/tar/api.py
+++ b/recipe_modules/tar/api.py
@@ -6,34 +6,34 @@
class TarApi(recipe_api.RecipeApi):
- """Provides steps to tar and untar files."""
+ """Provides steps to tar and untar files."""
- COMPRESSION_OPTS = ["gzip", "bzip2", "xz", "lzma"]
+ COMPRESSION_OPTS = ["gzip", "bzip2", "xz", "lzma"]
- def __call__(self, step_name, cmd):
- full_cmd = [self._bsdtar_path] + list(cmd)
- return self.m.step(step_name, full_cmd)
+ def __call__(self, step_name, cmd):
+ full_cmd = [self._bsdtar_path] + list(cmd)
+ return self.m.step(step_name, full_cmd)
- @property
- def _bsdtar_path(self):
- """Ensures that bsdtar is installed."""
- return self.m.ensure_tool("bsdtar", self.resource("tool_manifest.json"))
+ @property
+ def _bsdtar_path(self):
+ """Ensures that bsdtar is installed."""
+ return self.m.ensure_tool("bsdtar", self.resource("tool_manifest.json"))
- def create(self, path, compression=None):
- """Returns TarArchive object that can be used to compress a set of files.
+ def create(self, path, compression=None):
+ """Returns TarArchive object that can be used to compress a set of files.
Args:
path: path of the archive file to be created.
compression: str, one of COMPRESSION_OPTS or None to disable compression.
"""
- assert not compression or compression in TarApi.COMPRESSION_OPTS, (
- "compression must be one of %s",
- TarApi.COMPRESSION_OPTS,
- )
- return TarArchive(self.m, path, compression)
+ assert not compression or compression in TarApi.COMPRESSION_OPTS, (
+ "compression must be one of %s",
+ TarApi.COMPRESSION_OPTS,
+ )
+ return TarArchive(self.m, path, compression)
- def extract(self, step_name, path, directory=None, strip_components=None):
- """Uncompress |archive| file.
+ def extract(self, step_name, path, directory=None, strip_components=None):
+ """Uncompress |archive| file.
Args:
step_name: name of the step.
@@ -41,72 +41,68 @@
directory: directory to extract the archive in.
strip_components: strip number of leading components from file names.
"""
- # We use long-form options whenever possible, but for options with
- # arguments, we have to use the short form. The recipe engine tests require
- # objects which might be placeholders (in this case |path|) to be their own
- # argument, and the version of tar we're using doesn't support
- # '--long-opt arg'. It only supports '--long-opt=arg' or short-form like
- # '-s arg'.
- cmd = [
- "--extract",
- "--verbose",
- "-f",
- path,
- ]
- if directory:
- cmd.extend(["-C", directory])
- if strip_components:
- cmd.extend(["--strip-components", str(int(strip_components))])
- return self(step_name, cmd)
+ # We use long-form options whenever possible, but for options with
+ # arguments, we have to use the short form. The recipe engine tests require
+ # objects which might be placeholders (in this case |path|) to be their own
+ # argument, and the version of tar we're using doesn't support
+ # '--long-opt arg'. It only supports '--long-opt=arg' or short-form like
+ # '-s arg'.
+ cmd = [
+ "--extract",
+ "--verbose",
+ "-f",
+ path,
+ ]
+ if directory:
+ cmd.extend(["-C", directory])
+ if strip_components:
+ cmd.extend(["--strip-components", str(int(strip_components))])
+ return self(step_name, cmd)
class TarArchive:
- """Used to gather a list of files to tar."""
+ """Used to gather a list of files to tar."""
- def __init__(self, api, path, compression):
- self._api = api
- self._path = path
- self._compression = compression
- self._entries = {}
+ def __init__(self, api, path, compression):
+ self._api = api
+ self._path = path
+ self._compression = compression
+ self._entries = {}
- @property
- def path(self):
- return self._path
+ @property
+ def path(self):
+ return self._path
- def add(self, path, directory=None):
- """Stages single file to be added to the package.
+ def add(self, path, directory=None):
+ """Stages single file to be added to the package.
Args:
path: absolute path to a file, should be a child of |directory|.
directory: ancestor directory of |path|. The name of the file
inside the archive will not include |directory|. Defaults to $CWD.
"""
- if not directory:
- directory = self._api.context.cwd
- assert directory.is_parent_of(
- path
- ), "directory must be a parent of path. directory: %s.%s, path: %s.%s" % (
- directory.base,
- directory.pieces,
- path.base,
- path.pieces,
- )
- self._entries.setdefault(str(directory), []).append(str(path))
+ if not directory:
+ directory = self._api.context.cwd
+ assert directory.is_parent_of(
+ path
+ ), "directory must be a parent of path. directory: %s.%s, path: %s.%s" % (
+ directory.base,
+ directory.pieces,
+ path.base,
+ path.pieces,
+ )
+ self._entries.setdefault(str(directory), []).append(str(path))
- def tar(self, step_name):
- """Step to tar all staged files."""
- cmd = ["--create", "-f", self._path]
- if self._compression:
- cmd.append("--%s" % self._compression)
- for directory in sorted(self._entries):
- cmd.extend(
- ["-C", directory]
- + [
- self._api.path.relpath(p, directory)
- for p in self._entries[directory]
- ]
- )
+ def tar(self, step_name):
+ """Step to tar all staged files."""
+ cmd = ["--create", "-f", self._path]
+ if self._compression:
+ cmd.append("--%s" % self._compression)
+ for directory in sorted(self._entries):
+ cmd.extend(["-C", directory] + [
+ self._api.path.relpath(p, directory) for p in self._entries[directory]
+ ])
- step_result = self._api.tar(step_name, cmd)
- self._api.path.mock_add_paths(self._path)
- return step_result
+ step_result = self._api.tar(step_name, cmd)
+ self._api.path.mock_add_paths(self._path)
+ return step_result
diff --git a/recipe_modules/tar/tests/full.py b/recipe_modules/tar/tests/full.py
index 2bc4bab..3b4b11f 100644
--- a/recipe_modules/tar/tests/full.py
+++ b/recipe_modules/tar/tests/full.py
@@ -13,38 +13,38 @@
def RunSteps(api):
- # Prepare files.
- temp = api.path.mkdtemp("tar-example")
- api.step("touch a", ["touch", temp.join("a")])
- api.step("touch b", ["touch", temp.join("b")])
- api.file.ensure_directory("mkdirs", temp.join("sub", "dir"))
- api.step("touch c", ["touch", temp.join("sub", "dir", "c")])
+ # Prepare files.
+ temp = api.path.mkdtemp("tar-example")
+ api.step("touch a", ["touch", temp.join("a")])
+ api.step("touch b", ["touch", temp.join("b")])
+ api.file.ensure_directory("mkdirs", temp.join("sub", "dir"))
+ api.step("touch c", ["touch", temp.join("sub", "dir", "c")])
- # Build a tar file.
- archive = api.tar.create(temp.join("more.tar.gz"), compression="gzip")
- archive.add(temp.join("a"), temp)
- with api.context(cwd=temp):
- archive.add(temp.join("b"))
- archive.add(temp.join("sub", "dir", "c"), temp.join("sub"))
- archive.tar("taring more")
+ # Build a tar file.
+ archive = api.tar.create(temp.join("more.tar.gz"), compression="gzip")
+ archive.add(temp.join("a"), temp)
+ with api.context(cwd=temp):
+ archive.add(temp.join("b"))
+ archive.add(temp.join("sub", "dir", "c"), temp.join("sub"))
+ archive.tar("taring more")
- # Coverage for 'output' property.
- api.step("report", ["echo", archive.path])
+ # Coverage for 'output' property.
+ api.step("report", ["echo", archive.path])
- # Extract the archive into a directory stripping one path component.
- api.tar.extract(
- "untaring",
- temp.join("output.tar"),
- directory=temp.join("output"),
- strip_components=1,
- )
- # List untarped content.
- with api.context(cwd=temp.join("output")):
- api.step("listing", ["find"])
- # Clean up.
- api.file.rmtree("rmtree %s" % temp, temp)
+ # Extract the archive into a directory stripping one path component.
+ api.tar.extract(
+ "untaring",
+ temp.join("output.tar"),
+ directory=temp.join("output"),
+ strip_components=1,
+ )
+ # List untarped content.
+ with api.context(cwd=temp.join("output")):
+ api.step("listing", ["find"])
+ # Clean up.
+ api.file.rmtree("rmtree %s" % temp, temp)
def GenTests(api):
- for platform in ("linux", "mac"):
- yield api.test(platform) + api.platform.name(platform)
+ for platform in ("linux", "mac"):
+ yield api.test(platform) + api.platform.name(platform)
diff --git a/recipe_modules/test_utils/api.py b/recipe_modules/test_utils/api.py
index 7e43bef..7ef46e7 100644
--- a/recipe_modules/test_utils/api.py
+++ b/recipe_modules/test_utils/api.py
@@ -101,7 +101,14 @@
str(self.m.swarming.bot_id).startswith('flutter-win')
)
- def run_test(self, step_name, command_list, timeout_secs=TIMEOUT_SECS, infra_step=False, suppress_log=False):
+ def run_test(
+ self,
+ step_name,
+ command_list,
+ timeout_secs=TIMEOUT_SECS,
+ infra_step=False,
+ suppress_log=False
+ ):
"""Recipe's step wrapper to collect stdout and add it to step_summary.
Args:
@@ -132,8 +139,8 @@
# Truncate stderr
stderr = self._truncateString(result.stderr)
raise self.m.step.StepFailure(
- '\n\n```%s```\n' % (stdout or stderr),
- result=result,
+ '\n\n```%s```\n' % (stdout or stderr),
+ result=result,
)
finally:
if not suppress_log:
diff --git a/recipe_modules/test_utils/examples/full.py b/recipe_modules/test_utils/examples/full.py
index 94fbdb6..fd4a941 100644
--- a/recipe_modules/test_utils/examples/full.py
+++ b/recipe_modules/test_utils/examples/full.py
@@ -39,9 +39,7 @@
api.step_data(
'mytest',
stdout=api.raw_io.output_text('#success\nthis is a success'),
- ),
- api.platform.name('mac'),
- api.properties(tags=['ios']),
+ ), api.platform.name('mac'), api.properties(tags=['ios']),
api.step_data(
'Find device type',
stdout=api.raw_io.output_text('iPhone8,1'),
@@ -52,8 +50,7 @@
api.step_data(
'mytest',
stdout=api.raw_io.output_text('#flaky\nthis is a flaky\nflaky: true'),
- ),
- api.properties(tags=['hostonly', 'android']),
+ ), api.properties(tags=['hostonly', 'android']),
api.platform.name('linux')
)
yield api.test(
diff --git a/recipe_modules/token_util/api.py b/recipe_modules/token_util/api.py
index 7c9618f..facb840 100644
--- a/recipe_modules/token_util/api.py
+++ b/recipe_modules/token_util/api.py
@@ -45,4 +45,3 @@
include_log=False
)
return cocoon_access_token_path
-
diff --git a/recipe_modules/yaml/api.py b/recipe_modules/yaml/api.py
index 6de234e..13d19da 100644
--- a/recipe_modules/yaml/api.py
+++ b/recipe_modules/yaml/api.py
@@ -28,11 +28,11 @@
content = self.m.file.read_text('read', file_path)
presentation.logs['yaml'] = content
return self.m.step(
- 'parse',
- [
- 'vpython3', self.resource('parse_yaml.py'),
- '--yaml_file', file_path,
- '--json_file', self.m.json.output()
+ 'parse', [
+ 'vpython3',
+ self.resource('parse_yaml.py'), '--yaml_file', file_path,
+ '--json_file',
+ self.m.json.output()
],
infra_step=True
- )
\ No newline at end of file
+ )
diff --git a/recipe_modules/yaml/examples/full.py b/recipe_modules/yaml/examples/full.py
index 6442545..4bae24b 100644
--- a/recipe_modules/yaml/examples/full.py
+++ b/recipe_modules/yaml/examples/full.py
@@ -24,15 +24,13 @@
def GenTests(api):
yield api.test(
- 'passing',
- api.step_data('yaml.parse',
- api.json.output({'key': 'value'})),
- api.step_data('yaml.read',
- api.file.read_text(text_content=YAML_CONTENT))
+ 'passing', api.step_data('yaml.parse', api.json.output({'key': 'value'})),
+ api.step_data('yaml.read', api.file.read_text(text_content=YAML_CONTENT))
)
yield api.test(
'fail_to_read',
- api.step_data('yaml.read', retcode=1,
- stderr=api.raw_io.output_text('fail')),
+ api.step_data(
+ 'yaml.read', retcode=1, stderr=api.raw_io.output_text('fail')
+ ),
status='INFRA_FAILURE'
)
diff --git a/recipe_modules/zip/__init__.py b/recipe_modules/zip/__init__.py
index 61dd06f..6ac63fe 100644
--- a/recipe_modules/zip/__init__.py
+++ b/recipe_modules/zip/__init__.py
@@ -3,7 +3,7 @@
# found in the LICENSE file.
DEPS = [
- 'recipe_engine/json',
- 'recipe_engine/path',
- 'recipe_engine/step',
+ 'recipe_engine/json',
+ 'recipe_engine/path',
+ 'recipe_engine/step',
]
diff --git a/recipe_modules/zip/api.py b/recipe_modules/zip/api.py
index c150ad2..7e45c10 100644
--- a/recipe_modules/zip/api.py
+++ b/recipe_modules/zip/api.py
@@ -49,13 +49,13 @@
zip_file: path to a zip file to get its namelist, should exist.
"""
script_input = {
- 'zip_file': str(zip_file),
+ 'zip_file': str(zip_file),
}
names_step = self.m.step(
- step_name,
- [ 'python3', self.resource('namelist.py') ],
- stdin=self.m.json.input(script_input),
- stdout=self.m.json.output(),
+ step_name,
+ ['python3', self.resource('namelist.py')],
+ stdin=self.m.json.input(script_input),
+ stdout=self.m.json.output(),
)
return names_step.stdout or []
@@ -76,14 +76,13 @@
"""
# TODO(vadimsh): Use 7zip on Windows if available?
script_input = {
- 'output': str(output),
- 'zip_file': str(zip_file),
- 'quiet': quiet,
+ 'output': str(output),
+ 'zip_file': str(zip_file),
+ 'quiet': quiet,
}
self.m.step(
- step_name,
- [ 'python3', self.resource('unzip.py') ],
- stdin=self.m.json.input(script_input)
+ step_name, ['python3', self.resource('unzip.py')],
+ stdin=self.m.json.input(script_input)
)
@@ -113,9 +112,7 @@
"""
assert self._root.is_parent_of(path), path
self._entries.append({
- 'type': 'file',
- 'path': str(path),
- 'archive_name': archive_name
+ 'type': 'file', 'path': str(path), 'archive_name': archive_name
})
def add_directory(self, path):
@@ -127,20 +124,20 @@
# TODO(vadimsh): Implement 'exclude' filter.
assert self._root.is_parent_of(path) or path == self._root, path
self._entries.append({
- 'type': 'dir',
- 'path': str(path),
+ 'type': 'dir',
+ 'path': str(path),
})
def zip(self, step_name):
"""Step to zip all staged files."""
script_input = {
- 'entries': self._entries,
- 'output': str(self._output),
- 'root': str(self._root),
+ 'entries': self._entries,
+ 'output': str(self._output),
+ 'root': str(self._root),
}
step_result = self._api.step(
- step_name,
- [ 'python3', self._api.zip.resource('zip.py') ],
- stdin=self._api.json.input(script_input))
+ step_name, ['python3', self._api.zip.resource('zip.py')],
+ stdin=self._api.json.input(script_input)
+ )
self._api.path.mock_add_paths(self._output)
return step_result
diff --git a/recipe_modules/zip/examples/full.py b/recipe_modules/zip/examples/full.py
index a3aaf08..463ab3d 100644
--- a/recipe_modules/zip/examples/full.py
+++ b/recipe_modules/zip/examples/full.py
@@ -3,15 +3,16 @@
# found in the LICENSE file.
DEPS = [
- 'flutter/zip',
- 'recipe_engine/context',
- 'recipe_engine/file',
- 'recipe_engine/path',
- 'recipe_engine/platform',
- 'recipe_engine/step',
- 'recipe_engine/json',
+ 'flutter/zip',
+ 'recipe_engine/context',
+ 'recipe_engine/file',
+ 'recipe_engine/path',
+ 'recipe_engine/platform',
+ 'recipe_engine/step',
+ 'recipe_engine/json',
]
+
def RunSteps(api):
# Prepare files.
temp = api.path.mkdtemp('zip-example')
@@ -34,8 +35,9 @@
api.step('report', ['echo', package.output])
# Unzip the package.
- api.zip.unzip('unzipping', temp.join('output.zip'), temp.join('output'),
- quiet=True)
+ api.zip.unzip(
+ 'unzipping', temp.join('output.zip'), temp.join('output'), quiet=True
+ )
# List unzipped content.
with api.context(cwd=temp.join('output')):
api.step('listing', ['find'])
@@ -52,7 +54,6 @@
def GenTests(api):
for platform in ('linux', 'win', 'mac'):
yield api.test(
- platform,
- api.platform.name(platform),
+ platform, api.platform.name(platform),
api.zip.namelist('namelist', ['/a/b/c.txt'])
)
diff --git a/recipe_modules/zip/resources/namelist.py b/recipe_modules/zip/resources/namelist.py
index 4b15da0..e2ec250 100644
--- a/recipe_modules/zip/resources/namelist.py
+++ b/recipe_modules/zip/resources/namelist.py
@@ -7,6 +7,7 @@
import sys
import zipfile
+
def main():
# See zip/api.py, def unzip(...) for format of |data|.
data = json.load(sys.stdin)
@@ -20,5 +21,6 @@
sys.stdout.write(json.dumps(artifact_zip.namelist()))
return 0
+
if __name__ == '__main__':
sys.exit(main())
diff --git a/recipe_modules/zip/resources/unzip.py b/recipe_modules/zip/resources/unzip.py
index 6dbfdcb..9846daa 100644
--- a/recipe_modules/zip/resources/unzip.py
+++ b/recipe_modules/zip/resources/unzip.py
@@ -33,9 +33,7 @@
args += ['-q']
args += [zip_file]
- return subprocess.call(
- args=args,
- cwd=output)
+ return subprocess.call(args=args, cwd=output)
def unzip_with_python(zip_file, output):
diff --git a/recipe_modules/zip/resources/zip.py b/recipe_modules/zip/resources/zip.py
index 79d6331..2b458a1 100644
--- a/recipe_modules/zip/resources/zip.py
+++ b/recipe_modules/zip/resources/zip.py
@@ -55,31 +55,34 @@
proc = subprocess.Popen(
args=['zip', '-1', '--recurse-paths', '--symlinks', '-@', output],
stdin=subprocess.PIPE,
- cwd=root)
+ cwd=root
+ )
items_to_zip_bytes = []
for item in items_to_zip:
- items_to_zip_bytes.append(item if isinstance(item, bytes) else bytes(item, 'UTF-8'))
-
+ items_to_zip_bytes.append(
+ item if isinstance(item, bytes) else bytes(item, 'UTF-8')
+ )
+
proc.communicate(b'\n'.join(items_to_zip_bytes))
return proc.returncode
def walk_dir_and_do(directory_path, callback):
- for cur, _, files in os.walk(directory_path):
- for name in files:
- callback(os.path.join(cur, name))
+ for cur, _, files in os.walk(directory_path):
+ for name in files:
+ callback(os.path.join(cur, name))
def hash_file(file_path):
- BUFFER_SIZE = 1 << 16 # 64kb
- sha = hashlib.sha256()
- with open(file_path, 'rb') as f:
- data = f.read(BUFFER_SIZE)
- while data:
- sha.update(data)
- data = f.read(BUFFER_SIZE)
- digest = sha.hexdigest()
- print('sha256 digest for %s is:\n%s\n' % (file_path, digest))
+ BUFFER_SIZE = 1 << 16 # 64kb
+ sha = hashlib.sha256()
+ with open(file_path, 'rb') as f:
+ data = f.read(BUFFER_SIZE)
+ while data:
+ sha.update(data)
+ data = f.read(BUFFER_SIZE)
+ digest = sha.hexdigest()
+ print('sha256 digest for %s is:\n%s\n' % (file_path, digest))
def zip_with_python(root, output, entries):
@@ -97,6 +100,7 @@
"""
with zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED,
allowZip64=True) as zip_file:
+
def add(path, archive_name):
assert path.startswith(root), path
# Do not add itself to archive.
diff --git a/recipe_modules/zip/test_api.py b/recipe_modules/zip/test_api.py
index 185dbf8..8f2a2e4 100644
--- a/recipe_modules/zip/test_api.py
+++ b/recipe_modules/zip/test_api.py
@@ -6,7 +6,8 @@
from recipe_engine import recipe_test_api
-class ZipTestApi(recipe_test_api.RecipeTestApi): # pragma: no cover
+
+class ZipTestApi(recipe_test_api.RecipeTestApi): # pragma: no cover
"""Test api for zip module."""
def namelist(self, name, output):
@@ -18,8 +19,5 @@
inside the zip file.
"""
return self.override_step_data(
- name,
- stdout=self.m.json.output(output),
- retcode=0
+ name, stdout=self.m.json.output(output), retcode=0
)
-
diff --git a/recipes.py b/recipes.py
index 7c534c2..94232fd 100755
--- a/recipes.py
+++ b/recipes.py
@@ -11,8 +11,10 @@
# This little shell hack is a triple-quoted noop in python, but in sh it
# evaluates to re-exec'ing this script in unbuffered mode.
# pylint: disable=pointless-string-statement
+
''''exec python3 -u -- "$0" ${1+"$@"} # '''
# vi: syntax=python
+
"""Bootstrap script to clone and forward to the recipe engine tool.
*******************
@@ -79,8 +81,9 @@
try:
if pb['api_version'] != 2:
- raise MalformedRecipesCfg('unknown version %d' % pb['api_version'],
- recipes_cfg_path)
+ raise MalformedRecipesCfg(
+ 'unknown version %d' % pb['api_version'], recipes_cfg_path
+ )
# If we're running ./recipes.py from the recipe_engine repo itself, then
# return None to signal that there's no EngineDep.
@@ -95,7 +98,8 @@
if 'url' not in engine:
raise MalformedRecipesCfg(
'Required field "url" in dependency "recipe_engine" not found',
- recipes_cfg_path)
+ recipes_cfg_path
+ )
engine.setdefault('revision', '')
engine.setdefault('branch', 'refs/heads/main')
@@ -105,8 +109,9 @@
if not engine['branch'].startswith('refs/'):
engine['branch'] = 'refs/heads/' + engine['branch']
- recipes_path = os.path.join(repo_root,
- recipes_path.replace('/', os.path.sep))
+ recipes_path = os.path.join(
+ repo_root, recipes_path.replace('/', os.path.sep)
+ )
return EngineDep(**engine), recipes_path, py3_only
except KeyError as ex:
raise MalformedRecipesCfg(str(ex), recipes_cfg_path)
@@ -213,8 +218,9 @@
os.remove(index_lock)
except OSError as exc:
if exc.errno != errno.ENOENT:
- logging.warn('failed to remove %r, reset will fail: %s', index_lock,
- exc)
+ logging.warn(
+ 'failed to remove %r, reset will fail: %s', index_lock, exc
+ )
_git_check_call(['reset', '-q', '--hard', revision], cwd=engine_path)
# If the engine has refactored/moved modules we need to clean all .pyc files
@@ -238,16 +244,20 @@
if recipes_cfg_path:
# calculate repo_root from recipes_cfg_path
repo_root = os.path.dirname(
- os.path.dirname(os.path.dirname(recipes_cfg_path)))
+ os.path.dirname(os.path.dirname(recipes_cfg_path))
+ )
else:
# find repo_root with git and calculate recipes_cfg_path
repo_root = (
_git_output(['rev-parse', '--show-toplevel'],
- cwd=os.path.abspath(os.path.dirname(__file__))).strip())
+ cwd=os.path.abspath(os.path.dirname(__file__))).strip()
+ )
repo_root = os.path.abspath(repo_root).decode()
recipes_cfg_path = os.path.join(repo_root, 'infra', 'config', 'recipes.cfg')
args = ['--package', recipes_cfg_path] + args
- engine_path, py3_only = checkout_engine(engine_override, repo_root, recipes_cfg_path)
+ engine_path, py3_only = checkout_engine(
+ engine_override, repo_root, recipes_cfg_path
+ )
using_py3 = py3_only or os.getenv('RECIPES_USE_PY3') == 'true'
vpython = ('vpython' + ('3' if using_py3 else '') + _BAT)
@@ -255,7 +265,9 @@
return 'Required binary is not found on PATH: %s' % vpython
argv = ([
- vpython, '-u', os.path.join(engine_path, 'recipe_engine', 'main.py'),
+ vpython,
+ '-u',
+ os.path.join(engine_path, 'recipe_engine', 'main.py'),
] + args)
if IS_WIN:
diff --git a/recipes/cipd/cosign.py b/recipes/cipd/cosign.py
index dce0c3d..99244bf 100644
--- a/recipes/cipd/cosign.py
+++ b/recipes/cipd/cosign.py
@@ -17,6 +17,7 @@
'recipe_engine/step',
]
+
# This recipe builds the cosign CIPD package.
def RunSteps(api):
env = {}
@@ -50,35 +51,36 @@
Args:
api: luci api object.
"""
- cosign_releases_raw_response = api.step('Get cosign releases from github',
- cmd=['curl', 'https://api.github.com/repos/sigstore/cosign/releases'],
- stdout=api.raw_io.output_text()
- ).stdout
+ cosign_releases_raw_response = api.step(
+ 'Get cosign releases from github',
+ cmd=['curl', 'https://api.github.com/repos/sigstore/cosign/releases'],
+ stdout=api.raw_io.output_text()
+ ).stdout
cosign_releases = json.loads(cosign_releases_raw_response)
latest_release = max(
- cosign_releases,
- key=lambda release: datetime.strptime(
- release.get('published_at'), '%Y-%m-%dT%H:%M:%SZ')
+ cosign_releases,
+ key=lambda release: datetime.
+ strptime(release.get('published_at'), '%Y-%m-%dT%H:%M:%SZ')
).get('url')
release_artifacts_raw_response = api.step(
- 'Get artifacts from sigstore/cosign for a specific release version',
- cmd=['curl', latest_release],
- stdout=api.raw_io.output_text()
- ).stdout
+ 'Get artifacts from sigstore/cosign for a specific release version',
+ cmd=['curl', latest_release],
+ stdout=api.raw_io.output_text()
+ ).stdout
release_artifacts = json.loads(release_artifacts_raw_response)
release_artifacts_download_uris = list(
- map(
- lambda asset:
- asset.get('browser_download_url'),
- release_artifacts.get('assets')
- )
+ map(
+ lambda asset: asset.get('browser_download_url'),
+ release_artifacts.get('assets')
+ )
)
return release_artifacts_download_uris
+
def DownloadCosignArtifacts(api, cosign_dir, platform, cosign_download_uris):
"""Downloads the latest cosign binary, certificate, and signature.
@@ -99,67 +101,54 @@
cosign_base_name = 'cosign-%s-amd64%s' % (platform, exe)
cosign_binary_download_uri = next(
- filter(
- lambda uri:
- uri.endswith(cosign_base_name),
- cosign_download_uris
- )
+ filter(lambda uri: uri.endswith(cosign_base_name), cosign_download_uris)
)
cosign_certificate_download_uri = next(
- filter(
- lambda uri:
- uri.endswith('%s-keyless.pem' % cosign_base_name),
- cosign_download_uris
- )
+ filter(
+ lambda uri: uri.endswith('%s-keyless.pem' % cosign_base_name),
+ cosign_download_uris
+ )
)
cosign_signature_download_uri = next(
- filter(
- lambda uri:
- uri.endswith('%s-keyless.sig' % cosign_base_name),
- cosign_download_uris
- )
+ filter(
+ lambda uri: uri.endswith('%s-keyless.sig' % cosign_base_name),
+ cosign_download_uris
+ )
)
api.step(
- 'Download %s cosign binary' % platform,
- [
- 'curl', '-L', cosign_binary_download_uri,
- '-o', cosign_dir.join('bin', 'cosign%s' % exe),
- '--create-dirs'
- ],
- infra_step=True
+ 'Download %s cosign binary' % platform, [
+ 'curl', '-L', cosign_binary_download_uri, '-o',
+ cosign_dir.join('bin', 'cosign%s' % exe), '--create-dirs'
+ ],
+ infra_step=True
)
api.step(
- 'Download %s cosign certificate' % platform,
- [
- 'curl', '-L', cosign_certificate_download_uri,
- '-o', cosign_dir.join("certificate", "cosign-cert%s.pem" % exe),
- '--create-dirs'
- ],
- infra_step=True
+ 'Download %s cosign certificate' % platform, [
+ 'curl', '-L', cosign_certificate_download_uri, '-o',
+ cosign_dir.join("certificate", "cosign-cert%s.pem" % exe),
+ '--create-dirs'
+ ],
+ infra_step=True
)
api.step(
- 'Download %s cosign signature' % platform,
- [
- 'curl', '-L', cosign_signature_download_uri,
- '-o', cosign_dir.join("certificate", "cosign-sig%s.sig" % exe),
- '--create-dirs'
- ],
- infra_step=True
+ 'Download %s cosign signature' % platform, [
+ 'curl', '-L', cosign_signature_download_uri, '-o',
+ cosign_dir.join("certificate", "cosign-sig%s.sig" % exe),
+ '--create-dirs'
+ ],
+ infra_step=True
)
if platform == 'linux' or platform == 'darwin':
api.step(
- 'Make %s cosign binary executable' % platform,
- [
- 'chmod',
- '755',
- cosign_dir.join('bin', 'cosign%s' % exe)
- ]
+ 'Make %s cosign binary executable' % platform,
+ ['chmod', '755',
+ cosign_dir.join('bin', 'cosign%s' % exe)]
)
@@ -179,16 +168,13 @@
exe = '.exe' if platform == 'windows' else ''
api.step(
- 'Verify %s cosign binary is legitimate' % platform,
- [
- 'cosign',
- 'verify-blob',
- '--cert',
- cosign_dir.join("certificate", "cosign-cert%s.pem" % exe),
- '--signature',
- cosign_dir.join("certificate", "cosign-sig%s.sig" % exe),
- cosign_dir.join("bin", "cosign%s" % exe)
- ]
+ 'Verify %s cosign binary is legitimate' % platform, [
+ 'cosign', 'verify-blob', '--cert',
+ cosign_dir.join("certificate", "cosign-cert%s.pem" % exe),
+ '--signature',
+ cosign_dir.join("certificate", "cosign-sig%s.sig" % exe),
+ cosign_dir.join("bin", "cosign%s" % exe)
+ ]
)
@@ -213,12 +199,12 @@
def GenTests(api):
yield api.test(
- 'cosign',
- api.properties(cosign_version='v1.0'),
- api.platform('linux', 64),
- api.step_data(
- 'Get cosign releases from github',
- stdout=api.raw_io.output_text('''
+ 'cosign', api.properties(cosign_version='v1.0'),
+ api.platform('linux', 64),
+ api.step_data(
+ 'Get cosign releases from github',
+ stdout=api.raw_io.output_text(
+ '''
[
{
"url": "https://api.github.com/releases/1",
@@ -229,11 +215,12 @@
"published_at": "2022-06-02T14:08:35Z"
}
]
- ''')
- ) +
- api.step_data(
- 'Get artifacts from sigstore/cosign for a specific release version',
- stdout=api.raw_io.output_text('''
+ '''
+ )
+ ) + api.step_data(
+ 'Get artifacts from sigstore/cosign for a specific release version',
+ stdout=api.raw_io.output_text(
+ '''
{
"assets":[
{
@@ -268,6 +255,7 @@
}
]
}
- ''')
+ '''
+ )
)
- )
+ )
diff --git a/recipes/cocoon/cipd.py b/recipes/cocoon/cipd.py
index c960e0a..ce50e63 100644
--- a/recipes/cocoon/cipd.py
+++ b/recipes/cocoon/cipd.py
@@ -61,7 +61,7 @@
api.cipd.build(project_path.join('build'), cipd_zip_path, cipd_full_name)
if api.buildbucket.build.builder.bucket == 'prod' and should_upload:
- api.cipd.register(cipd_full_name, cipd_zip_path, refs = ["latest"])
+ api.cipd.register(cipd_full_name, cipd_zip_path, refs=["latest"])
def GenTests(api):
diff --git a/recipes/contrib/salt_packer.py b/recipes/contrib/salt_packer.py
index 8f8c19a..69c16a5 100644
--- a/recipes/contrib/salt_packer.py
+++ b/recipes/contrib/salt_packer.py
@@ -1,6 +1,7 @@
# Copyright 2021 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
"""
Recipe to build GCE images with Packer.
"""
@@ -23,120 +24,123 @@
]
PROPERTIES = {
- "repo": Property(kind=str, help="Salt repository to checkout."),
- "dry_run": Property(
- kind=bool, help="Exit early instead of creating a disk image.", default=True
- ),
+ "repo":
+ Property(kind=str, help="Salt repository to checkout."),
+ "dry_run":
+ Property(
+ kind=bool,
+ help="Exit early instead of creating a disk image.",
+ default=True
+ ),
}
TEMPLATE_SUFFIX = ".packer.generated.json"
def RunSteps(api, repo, dry_run):
- salt_path, revision = api.git_checkout(repo, rebase_merges=True)
+ salt_path, revision = api.git_checkout(repo, rebase_merges=True)
- # Get a short revision, image names must be < 64 characters
- with api.context(cwd=salt_path):
- revision = api.git(
- "git rev-parse",
- "rev-parse",
- "--short",
- revision,
- stdout=api.raw_io.output_text(),
- ).stdout.rstrip()
+ # Get a short revision, image names must be < 64 characters
+ with api.context(cwd=salt_path):
+ revision = api.git(
+ "git rev-parse",
+ "rev-parse",
+ "--short",
+ revision,
+ stdout=api.raw_io.output_text(),
+ ).stdout.rstrip()
- packer_dir = salt_path.join("packer")
- template_paths = api.file.glob_paths(
- "find packer templates", packer_dir, "*" + TEMPLATE_SUFFIX
- )
+ packer_dir = salt_path.join("packer")
+ template_paths = api.file.glob_paths(
+ "find packer templates", packer_dir, "*" + TEMPLATE_SUFFIX
+ )
- env = {
- # Disable update checks.
- "CHECKPOINT_DISABLE": "1",
- # Enable verbose logging.
- "PACKER_LOG": "1",
- # Disable color in logging.
- "PACKER_NO_COLOR": "1",
- }
+ env = {
+ # Disable update checks.
+ "CHECKPOINT_DISABLE": "1",
+ # Enable verbose logging.
+ "PACKER_LOG": "1",
+ # Disable color in logging.
+ "PACKER_NO_COLOR": "1",
+ }
- with api.context(env=env, cwd=salt_path):
- builds = []
- for template_path in template_paths:
- api.step(
- "packer validate",
- [
- "packer",
- "validate",
- "-var",
- "revision={}".format(revision),
- template_path,
- ],
- )
- builds.append(
- api.futures.spawn(
- _do_packer_builder,
- api,
- revision,
- dry_run,
- template_path,
- )
- )
- api.futures.wait(builds)
- if any(not build.result() for build in builds):
- raise api.step.StepFailure("BUILDS FAILED")
+ with api.context(env=env, cwd=salt_path):
+ builds = []
+ for template_path in template_paths:
+ api.step(
+ "packer validate",
+ [
+ "packer",
+ "validate",
+ "-var",
+ "revision={}".format(revision),
+ template_path,
+ ],
+ )
+ builds.append(
+ api.futures.spawn(
+ _do_packer_builder,
+ api,
+ revision,
+ dry_run,
+ template_path,
+ )
+ )
+ api.futures.wait(builds)
+ if any(not build.result() for build in builds):
+ raise api.step.StepFailure("BUILDS FAILED")
def _do_packer_builder(api, revision, dry_run, template_path):
- build = api.path.basename(template_path).replace(TEMPLATE_SUFFIX, "")
- with api.step.nest(build):
- result = api.step(
- "packer build",
- [
- "packer",
- "build",
- "-var",
- "revision={}".format(revision),
- "-var",
- "dry_run={}".format(str(dry_run).lower()),
- "-var",
- "use_internal_ip=true",
- "-only={}".format(build),
- template_path,
- ],
- stdout=api.raw_io.output_text(),
- ok_ret="any",
- # By default recipe_engine assigns a `cost` of 500 mCPU per step,
- # this limits our parallelism to 2*NUM_CORES but these steps are
- # simply waiting 99% of the time we can run far more in parallel.
- cost=None,
- )
- output = result.stdout
- result.presentation.logs["output"] = output.splitlines()
+ build = api.path.basename(template_path).replace(TEMPLATE_SUFFIX, "")
+ with api.step.nest(build):
+ result = api.step(
+ "packer build",
+ [
+ "packer",
+ "build",
+ "-var",
+ "revision={}".format(revision),
+ "-var",
+ "dry_run={}".format(str(dry_run).lower()),
+ "-var",
+ "use_internal_ip=true",
+ "-only={}".format(build),
+ template_path,
+ ],
+ stdout=api.raw_io.output_text(),
+ ok_ret="any",
+ # By default recipe_engine assigns a `cost` of 500 mCPU per step,
+ # this limits our parallelism to 2*NUM_CORES but these steps are
+ # simply waiting 99% of the time we can run far more in parallel.
+ cost=None,
+ )
+ output = result.stdout
+ result.presentation.logs["output"] = output.splitlines()
- if result.retcode != 0:
- if dry_run:
- for line in output.splitlines():
- if "DRYRUN SUCCEEDED" in line:
- result.presentation.step_text = "DRYRUN SUCCEEDED"
- return True
- result.presentation.step_text = "DRYRUN FAILED"
- result.presentation.status = api.step.FAILURE
- result.presentation.status = api.step.FAILURE
- else:
+ if result.retcode != 0:
+ if dry_run:
+ for line in output.splitlines():
+ if "DRYRUN SUCCEEDED" in line:
+ result.presentation.step_text = "DRYRUN SUCCEEDED"
return True
- if result.presentation.status == api.step.FAILURE:
- failures_regex = re.compile(
- "(\s*ID:\s(.*?\n).*?Result:\sFalse\n.*?Changes:.*?)\n\s*{}:\s-{{10}}".format(
- build
- ),
- re.DOTALL | re.MULTILINE,
- )
- for f in re.findall(failures_regex, output):
- result.presentation.logs[f[1]] = f[0].splitlines()
- return False
+ result.presentation.step_text = "DRYRUN FAILED"
+ result.presentation.status = api.step.FAILURE
+ result.presentation.status = api.step.FAILURE
+ else:
+ return True
+ if result.presentation.status == api.step.FAILURE:
+ failures_regex = re.compile(
+ "(\s*ID:\s(.*?\n).*?Result:\sFalse\n.*?Changes:.*?)\n\s*{}:\s-{{10}}"
+ .format(build),
+ re.DOTALL | re.MULTILINE,
+ )
+ for f in re.findall(failures_regex, output):
+ result.presentation.logs[f[1]] = f[0].splitlines()
+ return False
def GenTests(api):
- state_failures = """
+ state_failures = """
fail: ----------
fail: ID: /etc/systemd/system/gce-provider-start-agent.service
fail: Function: file.managed
@@ -166,75 +170,57 @@
fail: ----------
"""
- repo = "https://dash-internal.googlesource.com/salt"
- yield (
- api.buildbucket_util.test("ci_failure", status="FAILURE", git_repo=repo)
- + api.properties(repo=repo, dry_run=False)
- + api.step_data(
- "find packer templates",
- api.file.glob_paths(
- [
- "pass.packer.generated.json",
- "fail.packer.generated.json",
- ]
- ),
- )
- + api.step_data("pass.packer build", retcode=0)
- + api.step_data(
- "fail.packer build",
- api.raw_io.stream_output_text(state_failures),
- retcode=1,
- )
- )
+ repo = "https://dash-internal.googlesource.com/salt"
+ yield (
+ api.buildbucket_util.test("ci_failure", status="FAILURE", git_repo=repo) +
+ api.properties(repo=repo, dry_run=False) + api.step_data(
+ "find packer templates",
+ api.file.glob_paths([
+ "pass.packer.generated.json",
+ "fail.packer.generated.json",
+ ]),
+ ) + api.step_data("pass.packer build", retcode=0) + api.step_data(
+ "fail.packer build",
+ api.raw_io.stream_output_text(state_failures),
+ retcode=1,
+ )
+ )
- yield (
- api.buildbucket_util.test("ci_success", git_repo=repo)
- + api.properties(repo=repo, dry_run=False)
- + api.step_data(
- "find packer templates",
- api.file.glob_paths(
- [
- "pass.packer.generated.json",
- ]
- ),
- )
- + api.step_data("pass.packer build", retcode=0)
- )
+ yield (
+ api.buildbucket_util.test("ci_success", git_repo=repo) +
+ api.properties(repo=repo, dry_run=False) + api.step_data(
+ "find packer templates",
+ api.file.glob_paths([
+ "pass.packer.generated.json",
+ ]),
+ ) + api.step_data("pass.packer build", retcode=0)
+ )
- yield (
- api.buildbucket_util.test("try_failure", status="FAILURE", git_repo=repo)
- + api.properties(repo=repo, dry_run=True)
- + api.step_data(
- "find packer templates",
- api.file.glob_paths(
- [
- "pass.packer.generated.json",
- "fail.packer.generated.json",
- ]
- ),
- )
- + api.step_data("pass.packer build", retcode=0)
- + api.step_data(
- "fail.packer build",
- api.raw_io.stream_output_text(state_failures),
- retcode=1,
- )
- )
+ yield (
+ api.buildbucket_util.test("try_failure", status="FAILURE", git_repo=repo)
+ + api.properties(repo=repo, dry_run=True) + api.step_data(
+ "find packer templates",
+ api.file.glob_paths([
+ "pass.packer.generated.json",
+ "fail.packer.generated.json",
+ ]),
+ ) + api.step_data("pass.packer build", retcode=0) + api.step_data(
+ "fail.packer build",
+ api.raw_io.stream_output_text(state_failures),
+ retcode=1,
+ )
+ )
- yield (
- api.buildbucket_util.test("try_success", tryjob=True, git_repo=repo)
- + api.properties(repo=repo, dry_run=True)
- + api.step_data(
- "find packer templates",
- api.file.glob_paths(
- [
- "pass.packer.generated.json",
- ]
- ),
- )
- + api.step_data(
- "pass.packer build",
- api.raw_io.stream_output_text("DRYRUN SUCCEEDED"),
- retcode=1,
- )
- )
+ yield (
+ api.buildbucket_util.test("try_success", tryjob=True, git_repo=repo) +
+ api.properties(repo=repo, dry_run=True) + api.step_data(
+ "find packer templates",
+ api.file.glob_paths([
+ "pass.packer.generated.json",
+ ]),
+ ) + api.step_data(
+ "pass.packer build",
+ api.raw_io.stream_output_text("DRYRUN SUCCEEDED"),
+ retcode=1,
+ )
+ )
diff --git a/recipes/contrib/salt_packer_base_image_roller.py b/recipes/contrib/salt_packer_base_image_roller.py
index d48e76b..0a27193 100644
--- a/recipes/contrib/salt_packer_base_image_roller.py
+++ b/recipes/contrib/salt_packer_base_image_roller.py
@@ -1,6 +1,7 @@
# Copyright 2021 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
"""
Recipe to roll base/source_images for Packer.
"""
@@ -22,110 +23,110 @@
]
PROPERTIES = {
- "repo": Property(kind=str, help="Salt repository to checkout."),
- "dry_run": Property(
- kind=bool, help="Exit early instead of committing a change.", default=True
- ),
+ "repo":
+ Property(kind=str, help="Salt repository to checkout."),
+ "dry_run":
+ Property(
+ kind=bool,
+ help="Exit early instead of committing a change.",
+ default=True
+ ),
}
def RunSteps(api, repo, dry_run):
- salt_path, revision = api.git_checkout(repo)
+ salt_path, revision = api.git_checkout(repo)
- # Get a short revision, image names must be < 64 characters
- with api.context(cwd=salt_path):
- revision = api.git(
- "git rev-parse",
- "rev-parse",
- "--short",
- revision,
- stdout=api.raw_io.output_text(),
- ).stdout.rstrip()
+ # Get a short revision, image names must be < 64 characters
+ with api.context(cwd=salt_path):
+ revision = api.git(
+ "git rev-parse",
+ "rev-parse",
+ "--short",
+ revision,
+ stdout=api.raw_io.output_text(),
+ ).stdout.rstrip()
- json_path = salt_path.join("starlark", "packer-source-image.json")
+ json_path = salt_path.join("starlark", "packer-source-image.json")
- source_image = api.file.read_json(
- name="load packer source image json", source=json_path
+ source_image = api.file.read_json(
+ name="load packer source image json", source=json_path
+ )
+
+ commit_message = "Rolling Salt Packer Base Images:\n\n"
+ for family, config in source_image.items():
+ project = config["project"]
+ old_image = config["image"]
+ result = api.gcloud(
+ "compute",
+ "images",
+ "describe-from-family",
+ "{}".format(family),
+ "--project={}".format(project),
+ "--format=json",
+ ok_ret="any",
+ stdout=api.json.output(),
+ step_name="get latest image for {}/{}".format(project, family),
)
+ if result.retcode != 0 or "name" not in result.stdout:
+ raise api.step.StepFailure("Unable to find image for {}".format(family))
+ new_image = result.stdout["name"]
+ if old_image != new_image:
+ commit_message += "{}: {} -> {}\n".format(family, old_image, new_image)
+ source_image[family]["image"] = new_image
- commit_message = "Rolling Salt Packer Base Images:\n\n"
- for family, config in source_image.items():
- project = config["project"]
- old_image = config["image"]
- result = api.gcloud(
- "compute",
- "images",
- "describe-from-family",
- "{}".format(family),
- "--project={}".format(project),
- "--format=json",
- ok_ret="any",
- stdout=api.json.output(),
- step_name="get latest image for {}/{}".format(project, family),
- )
- if result.retcode != 0 or "name" not in result.stdout:
- raise api.step.StepFailure("Unable to find image for {}".format(family))
- new_image = result.stdout["name"]
- if old_image != new_image:
- commit_message += "{}: {} -> {}\n".format(family, old_image, new_image)
- source_image[family]["image"] = new_image
+ api.file.write_json(
+ name="update packer source image template",
+ dest=json_path,
+ data=source_image,
+ indent=4,
+ )
+ api.step("regen starlark", [salt_path.join("gen.sh")])
- api.file.write_json(
- name="update packer source image template",
- dest=json_path,
- data=source_image,
- indent=4,
- )
- api.step("regen starlark", [salt_path.join("gen.sh")])
+ env = {
+ # Disable update checks.
+ "CHECKPOINT_DISABLE": "1",
+ # Enable verbose logging.
+ "PACKER_LOG": "1",
+ # Disable color in logging.
+ "PACKER_NO_COLOR": "1",
+ }
- env = {
- # Disable update checks.
- "CHECKPOINT_DISABLE": "1",
- # Enable verbose logging.
- "PACKER_LOG": "1",
- # Disable color in logging.
- "PACKER_NO_COLOR": "1",
- }
-
- api.auto_roller.attempt_roll(
- api.auto_roller.Options(
- remote=repo,
- dry_run=dry_run,
- ),
- repo_dir=salt_path,
- commit_message=commit_message,
- )
+ api.auto_roller.attempt_roll(
+ api.auto_roller.Options(
+ remote=repo,
+ dry_run=dry_run,
+ ),
+ repo_dir=salt_path,
+ commit_message=commit_message,
+ )
def GenTests(api):
- repo = "https://dash-internal.googlesource.com/salt"
- yield (
- api.buildbucket_util.test("update", git_repo=repo)
- + api.properties(repo=repo, dry_run=False)
- + api.step_data(
- "load packer source image json",
- api.file.read_json(
- json_content={"bar": {"image": "old", "project": "foo"}}
- ),
- )
- + api.step_data(
- "get latest image for foo/bar",
- stdout=api.json.output({"name": "new"}),
- retcode=0,
- )
- + api.auto_roller.success()
- )
+ repo = "https://dash-internal.googlesource.com/salt"
+ yield (
+ api.buildbucket_util.test("update", git_repo=repo) +
+ api.properties(repo=repo, dry_run=False) + api.step_data(
+ "load packer source image json",
+ api.file.read_json(
+ json_content={"bar": {"image": "old", "project": "foo"}}
+ ),
+ ) + api.step_data(
+ "get latest image for foo/bar",
+ stdout=api.json.output({"name": "new"}),
+ retcode=0,
+ ) + api.auto_roller.success()
+ )
- yield (
- api.buildbucket_util.test("get_latest_failure", status="FAILURE", git_repo=repo)
- + api.properties(repo=repo, dry_run=False)
- + api.step_data(
- "load packer source image json",
- api.file.read_json(
- json_content={"bar": {"image": "old", "project": "foo"}}
- ),
- )
- + api.step_data(
- "get latest image for foo/bar", stdout=api.json.output({}), retcode=1
- )
- )
+ yield (
+ api.buildbucket_util
+ .test("get_latest_failure", status="FAILURE", git_repo=repo) +
+ api.properties(repo=repo, dry_run=False) + api.step_data(
+ "load packer source image json",
+ api.file.read_json(
+ json_content={"bar": {"image": "old", "project": "foo"}}
+ ),
+ ) + api.step_data(
+ "get latest image for foo/bar", stdout=api.json.output({}), retcode=1
+ )
+ )
diff --git a/recipes/devicelab/devicelab_drone.expected/no-task-name.json b/recipes/devicelab/devicelab_drone.expected/no-task-name.json
index 1b63109..db7713e 100644
--- a/recipes/devicelab/devicelab_drone.expected/no-task-name.json
+++ b/recipes/devicelab/devicelab_drone.expected/no-task-name.json
@@ -19,7 +19,7 @@
"The recipe has crashed at point 'Uncaught exception'!",
"",
"Traceback (most recent call last):",
- " File \"RECIPE_REPO[flutter]/recipes/devicelab/devicelab_drone.py\", line 43, in RunSteps",
+ " File \"RECIPE_REPO[flutter]/recipes/devicelab/devicelab_drone.py\", line 44, in RunSteps",
" raise ValueError('A task_name property is required')",
"ValueError('A task_name property is required')"
]
diff --git a/recipes/devicelab/devicelab_drone.py b/recipes/devicelab/devicelab_drone.py
index 45f3068..606824b 100644
--- a/recipes/devicelab/devicelab_drone.py
+++ b/recipes/devicelab/devicelab_drone.py
@@ -33,6 +33,7 @@
MAX_DEFAULT_TIMEOUT_SECS = 30 * 60
+
def RunSteps(api):
api.flutter_bcid.report_stage(BcidStage.START.value)
# Collect memory/cpu/process before task execution.
@@ -53,7 +54,9 @@
api.properties.get('git_ref'),
)
- test_timeout_secs = api.properties.get('test_timeout_secs', MAX_DEFAULT_TIMEOUT_SECS)
+ test_timeout_secs = api.properties.get(
+ 'test_timeout_secs', MAX_DEFAULT_TIMEOUT_SECS
+ )
with api.context(cwd=flutter_path):
commit_time = api.git(
@@ -91,7 +94,9 @@
api.flutter_deps.required_deps(env, env_prefixes, deps)
target_tags = api.properties.get('tags', [])
- device_tags = api.test_utils.collect_benchmark_tags(env, env_prefixes, target_tags)
+ device_tags = api.test_utils.collect_benchmark_tags(
+ env, env_prefixes, target_tags
+ )
benchmark_tags = api.json.dumps(device_tags)
# Check to see if an emulator has been requested.
@@ -129,20 +134,21 @@
if str(api.swarming.bot_id).startswith('flutter-devicelab'):
with api.devicelab_osx_sdk('ios'):
test_status = mac_test(
- api, env, env_prefixes, flutter_path, task_name, runner_params, suppress_log, test_timeout_secs
+ api, env, env_prefixes, flutter_path, task_name, runner_params,
+ suppress_log, test_timeout_secs
)
else:
with api.osx_sdk('ios'):
test_status = mac_test(
- api, env, env_prefixes, flutter_path, task_name, runner_params, suppress_log, test_timeout_secs
+ api, env, env_prefixes, flutter_path, task_name, runner_params,
+ suppress_log, test_timeout_secs
)
else:
with api.context(env=env, env_prefixes=env_prefixes):
# Start an emulator if it is requested, it must be started before the doctor to avoid issues.
emulator_commands, emulator_pid = api.android_virtual_device.start_if_requested(
- env,
- env_prefixes,
- dep_list.get('android_virtual_device', None))
+ env, env_prefixes, dep_list.get('android_virtual_device', None)
+ )
api.retry.step(
'flutter doctor',
@@ -173,9 +179,17 @@
api.test_utils.flaky_step('run %s' % task_name)
with api.context(env=env, env_prefixes=env_prefixes, cwd=devicelab_path):
uploadResults(
- api, env, env_prefixes, results_path, test_status == 'flaky',
- git_branch, api.properties.get('buildername'), commit_time,
- task_name, benchmark_tags, suppress_log=suppress_log
+ api,
+ env,
+ env_prefixes,
+ results_path,
+ test_status == 'flaky',
+ git_branch,
+ api.properties.get('buildername'),
+ commit_time,
+ task_name,
+ benchmark_tags,
+ suppress_log=suppress_log
)
uploadMetricsToCas(api, results_path)
@@ -189,7 +203,10 @@
api.os_utils.collect_os_info()
-def mac_test(api, env, env_prefixes, flutter_path, task_name, runner_params, suppress_log, test_timeout_secs):
+def mac_test(
+ api, env, env_prefixes, flutter_path, task_name, runner_params,
+ suppress_log, test_timeout_secs
+):
"""Runs a devicelab mac test."""
api.flutter_deps.gems(
env, env_prefixes, flutter_path.join('dev', 'ci', 'mac')
@@ -239,6 +256,7 @@
else:
return False
+
def uploadResults(
api,
env,
@@ -345,14 +363,13 @@
yield api.test(
"basic",
api.properties(
- buildername='Linux abc',
- task_name='abc',
- git_branch='master',
- openpay=True,
- dependencies=[{
- "dependency": "android_virtual_device",
- "version": "31"
- }],
+ buildername='Linux abc',
+ task_name='abc',
+ git_branch='master',
+ openpay=True,
+ dependencies=[{
+ "dependency": "android_virtual_device", "version": "31"
+ }],
),
api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.step_data(
@@ -367,30 +384,27 @@
api.runtime(is_experimental=True),
)
yield api.test(
- "emulator-test",
- api.properties(
- buildername='Linux abc',
- task_name='abc',
- git_branch='master',
- use_emulator="true",
- dependencies=[{
- "dependency": "android_virtual_device",
- "version": "31"
- }],
- ),
- api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
- api.step_data(
- 'run abc',
- stdout=api.raw_io.output_text('#flaky\nthis is a flaky\nflaky: true'),
- retcode=0
- ),
- api.step_data(
- 'start avd.Start Android emulator (API level 31)',
- stdout=api.raw_io.output_text(
- 'android_31_google_apis_x86|emulator-5554 started (pid: 17687)'
- )
- ),
- api.runtime(is_experimental=True)
+ "emulator-test",
+ api.properties(
+ buildername='Linux abc',
+ task_name='abc',
+ git_branch='master',
+ use_emulator="true",
+ dependencies=[{
+ "dependency": "android_virtual_device", "version": "31"
+ }],
+ ), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
+ api.step_data(
+ 'run abc',
+ stdout=api.raw_io.output_text('#flaky\nthis is a flaky\nflaky: true'),
+ retcode=0
+ ),
+ api.step_data(
+ 'start avd.Start Android emulator (API level 31)',
+ stdout=api.raw_io.output_text(
+ 'android_31_google_apis_x86|emulator-5554 started (pid: 17687)'
+ )
+ ), api.runtime(is_experimental=True)
)
yield api.test(
"xcode-devicelab",
@@ -400,9 +414,7 @@
tags=['ios'],
dependencies=[{'dependency': 'xcode'}],
git_branch='master',
- **{'$flutter/devicelab_osx_sdk': {
- 'sdk_version': 'deadbeef',
- }}
+ **{'$flutter/devicelab_osx_sdk': {'sdk_version': 'deadbeef',}}
), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.platform.name('mac'),
api.buildbucket.ci_build(git_ref='refs/heads/master',),
@@ -425,10 +437,9 @@
dependencies=[{'dependency': 'xcode'}],
test_timeout_secs=1,
git_branch='master',
- **{'$flutter/devicelab_osx_sdk': {
- 'sdk_version': 'deadbeef',
- }}
- ), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
+ **{'$flutter/devicelab_osx_sdk': {'sdk_version': 'deadbeef',}}
+ ),
+ api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.platform.name('mac'),
api.buildbucket.ci_build(git_ref='refs/heads/master',),
api.step_data(
@@ -439,7 +450,8 @@
'run abc',
times_out_after=2,
had_timeout=True,
- ), api.swarming.properties(bot_id='flutter-devicelab-mac-1'),
+ ),
+ api.swarming.properties(bot_id='flutter-devicelab-mac-1'),
status='FAILURE',
)
yield api.test(
@@ -467,8 +479,7 @@
upload_metrics=True,
git_branch='master',
dependencies=[{
- "dependency": "android_virtual_device",
- "version": "31"
+ "dependency": "android_virtual_device", "version": "31"
}],
),
api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
@@ -504,8 +515,7 @@
upload_metrics_to_cas=True,
git_branch='master',
dependencies=[{
- "dependency": "android_virtual_device",
- "version": "31"
+ "dependency": "android_virtual_device", "version": "31"
}],
), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.buildbucket.ci_build(
@@ -522,8 +532,7 @@
git_branch='master',
xvfb=1,
dependencies=[{
- "dependency": "android_virtual_device",
- "version": "31"
+ "dependency": "android_virtual_device", "version": "31"
}],
), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.buildbucket.ci_build(
@@ -541,8 +550,7 @@
git_branch='master',
openpay=True,
dependencies=[{
- "dependency": "android_virtual_device",
- "version": "31"
+ "dependency": "android_virtual_device", "version": "31"
}],
), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.buildbucket.ci_build(
@@ -559,8 +567,7 @@
local_engine='host-release',
git_branch='master',
dependencies=[{
- "dependency": "android_virtual_device",
- "version": "31"
+ "dependency": "android_virtual_device", "version": "31"
}],
), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.buildbucket.ci_build(
diff --git a/recipes/devicelab/devicelab_drone_build_test.py b/recipes/devicelab/devicelab_drone_build_test.py
index 029e838..49aaa2b 100644
--- a/recipes/devicelab/devicelab_drone_build_test.py
+++ b/recipes/devicelab/devicelab_drone_build_test.py
@@ -97,20 +97,17 @@
test_props = {
'dependencies': [api.shard_util_v2.unfreeze_dict(dep) for dep in deps],
'task_name': task_name,
- 'parent_builder': api.properties.get('buildername'),
- 'artifact': artifact,
- 'git_branch': api.properties.get('git_branch'),
- 'tags': tags,
- '$flutter/devicelab_osx_sdk': {'sdk_version': api.properties.get('xcode')}
- }
- reqs.append(
- {
- 'name': task_name,
- 'properties': test_props,
- 'drone_dimensions': api.properties.get('drone_dimensions', []),
- 'recipe': 'devicelab/devicelab_test_drone'
+ 'parent_builder': api.properties.get('buildername'), 'artifact': artifact,
+ 'git_branch': api.properties.get('git_branch'), 'tags': tags,
+ '$flutter/devicelab_osx_sdk': {
+ 'sdk_version': api.properties.get('xcode')
}
- )
+ }
+ reqs.append({
+ 'name': task_name, 'properties': test_props,
+ 'drone_dimensions': api.properties.get('drone_dimensions', []),
+ 'recipe': 'devicelab/devicelab_test_drone'
+ })
return reqs
@@ -271,9 +268,7 @@
tags=['ios'],
dependencies=[{'dependency': 'xcode'}],
git_branch='master',
- **{'$flutter/osx_sdk': {
- 'sdk_version': 'deadbeef',
- }}
+ **{'$flutter/osx_sdk': {'sdk_version': 'deadbeef',}}
), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.platform.name('mac'),
api.buildbucket.ci_build(git_ref='refs/heads/master',)
diff --git a/recipes/devicelab/devicelab_test_drone.py b/recipes/devicelab/devicelab_test_drone.py
index 3f34d63..a2acaef 100644
--- a/recipes/devicelab/devicelab_test_drone.py
+++ b/recipes/devicelab/devicelab_test_drone.py
@@ -125,11 +125,11 @@
with api.context(env=env, env_prefixes=env_prefixes, cwd=devicelab_path):
uploadResults(
api, env, env_prefixes, results_path, test_status == 'flaky',
- git_branch, parent_builder, commit_time, task_name,
- benchmark_tags
+ git_branch, parent_builder, commit_time, task_name, benchmark_tags
)
uploadMetricsToCas(api, results_path)
+
def run_test(api, task_name, runner_params):
'''Run the devicelab test.'''
api.retry.run_flutter_doctor()
@@ -137,15 +137,16 @@
test_runner_command.extend(runner_params)
try:
test_status = api.test_utils.run_test(
- 'run %s' % task_name,
- test_runner_command,
- timeout_secs=MAX_TIMEOUT_SECS
+ 'run %s' % task_name,
+ test_runner_command,
+ timeout_secs=MAX_TIMEOUT_SECS
)
finally:
debug_after_failure(api, task_name)
if test_status == 'flaky':
api.test_utils.flaky_step('run %s' % task_name)
+
def download_artifact(api, artifact, artifact_destination_dir):
'''Download pre-build artifact.'''
commit_sha = api.repo_util.get_env_ref()
@@ -378,9 +379,7 @@
tags=['ios'],
dependencies=[{'dependency': 'xcode'}],
git_branch='master',
- **{'$flutter/devicelab_osx_sdk': {
- 'sdk_version': 'deadbeef',
- }},
+ **{'$flutter/devicelab_osx_sdk': {'sdk_version': 'deadbeef',}},
artifact='def',
parent_builder='ghi'
), api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
diff --git a/recipes/engine/engine.py b/recipes/engine/engine.py
index 70eaf3c..a0d4495 100644
--- a/recipes/engine/engine.py
+++ b/recipes/engine/engine.py
@@ -53,16 +53,13 @@
FUCHSIA_ARTIFACTS_BUCKET_NAME = 'fuchsia-artifacts-release'
FUCHSIA_ARTIFACTS_DEBUG_NAMESPACE = 'debug'
ICU_DATA_PATH = 'third_party/icu/flutter/icudtl.dat'
-GIT_REPO = (
- 'https://flutter.googlesource.com/mirrors/engine'
-)
+GIT_REPO = ('https://flutter.googlesource.com/mirrors/engine')
PROPERTIES = InputProperties
ENV_PROPERTIES = EnvProperties
IMPELLERC_SHADER_LIB_PATH = 'shader_lib'
-
# Relative paths used to mock paths for testing.
MOCK_JAR_PATH = (
'io/flutter/x86_debug/'
@@ -81,43 +78,35 @@
def UploadArtifact(api, config, platform, artifact_name):
path = GetCheckoutPath(api).join(
- 'out',
- config,
- 'zip_archives',
- platform,
- artifact_name
+ 'out', config, 'zip_archives', platform, artifact_name
)
api.path.mock_add_file(path)
assert api.path.exists(path), '%s does not exist' % str(path)
if not api.flutter_bcid.is_prod_build():
return
dst = '%s/%s' % (platform, artifact_name) if platform else artifact_name
- api.bucket_util.safe_upload(
- path,
- GetCloudPath(api, dst)
- )
+ api.bucket_util.safe_upload(path, GetCloudPath(api, dst))
def UploadToDownloadFlutterIO(api, config):
src = GetCheckoutPath(api).join(
- 'out',
- config,
- 'zip_archives',
- 'download.flutter.io'
+ 'out', config, 'zip_archives', 'download.flutter.io'
)
api.path.mock_add_file(src)
assert api.path.exists(src), '%s does not exist' % str(src)
if not api.flutter_bcid.is_prod_build():
return
paths = api.file.listdir(
- 'Expand directory', src,
- recursive=True, test_data=(MOCK_JAR_PATH, MOCK_POM_PATH))
+ 'Expand directory',
+ src,
+ recursive=True,
+ test_data=(MOCK_JAR_PATH, MOCK_POM_PATH)
+ )
paths = [api.path.abspath(p) for p in paths]
experimental = 'experimental' if api.runtime.is_experimental else ''
for path in paths:
dst_list = [
- 'gs://download.flutter.io',
- experimental,
+ 'gs://download.flutter.io', experimental,
str(path).split('download.flutter.io/')[1]
]
dst = '/'.join(filter(bool, dst_list))
@@ -152,11 +141,8 @@
# TODO(godofredoc): use .vpython from engine when file are available.
venv_path = api.depot_tools.root.join('.vpython3')
args = [
- 'vpython3', '-vpython-spec', venv_path,
- script_path,
- '--variant', out_dir,
- '--type', types,
- '--engine-capture-core-dump'
+ 'vpython3', '-vpython-spec', venv_path, script_path, '--variant', out_dir,
+ '--type', types, '--engine-capture-core-dump'
]
if android_out_dir:
args.extend(['--android-variant', android_out_dir])
@@ -193,7 +179,9 @@
# Set priority to be same of main build temporily to help triage
# https://github.com/flutter/flutter/issues/124155
priority=30,
- exe_cipd_version=api.properties.get('exe_cipd_version', 'refs/heads/main')
+ exe_cipd_version=api.properties.get(
+ 'exe_cipd_version', 'refs/heads/main'
+ )
)
return api.buildbucket.schedule([req])
@@ -240,7 +228,12 @@
def BuildAndPackageFuchsia(api, build_script, git_rev):
RunGN(
- api, '--fuchsia', '--fuchsia-cpu', 'x64', '--runtime-mode', 'debug',
+ api,
+ '--fuchsia',
+ '--fuchsia-cpu',
+ 'x64',
+ '--runtime-mode',
+ 'debug',
'--no-lto',
)
Build(api, 'fuchsia_debug_x64', *GetFlutterFuchsiaBuildTargets(False, True))
@@ -259,13 +252,25 @@
# TODO(akbiggs): Clean this up if we feel brave.
if api.platform.is_linux:
fuchsia_package_cmd = [
- 'python3', build_script, '--engine-version', git_rev, '--skip-build',
- '--archs', 'x64', '--runtime-mode', 'debug',
+ 'python3',
+ build_script,
+ '--engine-version',
+ git_rev,
+ '--skip-build',
+ '--archs',
+ 'x64',
+ '--runtime-mode',
+ 'debug',
]
api.step('Package Fuchsia Artifacts', fuchsia_package_cmd)
RunGN(
- api, '--fuchsia', '--fuchsia-cpu', 'arm64', '--runtime-mode', 'debug',
+ api,
+ '--fuchsia',
+ '--fuchsia-cpu',
+ 'arm64',
+ '--runtime-mode',
+ 'debug',
'--no-lto',
)
Build(api, 'fuchsia_debug_arm64', *GetFlutterFuchsiaBuildTargets(False, True))
@@ -431,15 +436,18 @@
Build(api, build_output_dir, *aot_variant.GetNinjaTargets())
env = {
- 'STORAGE_BUCKET': 'gs://flutter_firebase_testlab_staging',
- 'GCP_PROJECT': 'flutter-infra-staging'
+ 'STORAGE_BUCKET': 'gs://flutter_firebase_testlab_staging',
+ 'GCP_PROJECT': 'flutter-infra-staging'
}
with api.context(env=env, cwd=checkout):
args = [
- 'python3', './flutter/ci/firebase_testlab.py',
- '--variant', build_output_dir,
- '--build-id', swarming_task_id,
+ 'python3',
+ './flutter/ci/firebase_testlab.py',
+ '--variant',
+ build_output_dir,
+ '--build-id',
+ swarming_task_id,
]
step_name = api.test_utils.test_step_name('Android Firebase Test')
@@ -465,15 +473,10 @@
android_triple='aarch64-linux-android',
abi='arm64_v8a',
gn_args=[
- '--runtime-mode',
- 'profile',
- '--android',
- '--android-cpu',
- 'arm64'
+ '--runtime-mode', 'profile', '--android', '--android-cpu', 'arm64'
],
ninja_targets=[
- 'default',
- 'clang_x64/gen_snapshot',
+ 'default', 'clang_x64/gen_snapshot',
'flutter/shell/platform/android:abi_jars',
'flutter/shell/platform/android:analyze_snapshot'
]
@@ -486,15 +489,10 @@
android_triple='aarch64-linux-android',
abi='arm64_v8a',
gn_args=[
- '--runtime-mode',
- 'release',
- '--android',
- '--android-cpu',
- 'arm64'
+ '--runtime-mode', 'release', '--android', '--android-cpu', 'arm64'
],
ninja_targets=[
- 'default',
- 'clang_x64/gen_snapshot',
+ 'default', 'clang_x64/gen_snapshot',
'flutter/shell/platform/android:abi_jars',
'flutter/shell/platform/android:analyze_snapshot'
]
@@ -507,14 +505,10 @@
android_triple='arm-linux-androideabi',
abi='armeabi_v7a',
gn_args=[
- '--runtime-mode',
- 'profile',
- '--android',
- '--android-cpu', 'arm'
+ '--runtime-mode', 'profile', '--android', '--android-cpu', 'arm'
],
ninja_targets=[
- 'default',
- 'clang_x64/gen_snapshot',
+ 'default', 'clang_x64/gen_snapshot',
'flutter/shell/platform/android:embedding_jars',
'flutter/shell/platform/android:abi_jars'
]
@@ -527,15 +521,10 @@
android_triple='arm-linux-androideabi',
abi='armeabi_v7a',
gn_args=[
- '--runtime-mode',
- 'release',
- '--android',
- '--android-cpu',
- 'arm'
+ '--runtime-mode', 'release', '--android', '--android-cpu', 'arm'
],
ninja_targets=[
- 'default',
- 'clang_x64/gen_snapshot',
+ 'default', 'clang_x64/gen_snapshot',
'flutter/shell/platform/android:embedding_jars',
'flutter/shell/platform/android:abi_jars'
]
@@ -548,15 +537,10 @@
android_triple='x86_64-linux-android',
abi='x86_64',
gn_args=[
- '--runtime-mode',
- 'profile',
- '--android',
- '--android-cpu',
- 'x64'
+ '--runtime-mode', 'profile', '--android', '--android-cpu', 'x64'
],
ninja_targets=[
- 'default',
- 'clang_x64/gen_snapshot',
+ 'default', 'clang_x64/gen_snapshot',
'flutter/shell/platform/android:abi_jars',
'flutter/shell/platform/android:analyze_snapshot'
]
@@ -569,14 +553,10 @@
android_triple='x86_64-linux-android',
abi='x86_64',
gn_args=[
- '--runtime-mode',
- 'release',
- '--android',
- '--android-cpu', 'x64'
+ '--runtime-mode', 'release', '--android', '--android-cpu', 'x64'
],
ninja_targets=[
- 'default',
- 'clang_x64/gen_snapshot',
+ 'default', 'clang_x64/gen_snapshot',
'flutter/shell/platform/android:abi_jars',
'flutter/shell/platform/android:analyze_snapshot'
]
@@ -590,8 +570,7 @@
continue
props = {
'builds': [{
- 'gn_args': aot_variant.GetGNArgs(),
- 'dir': build_out_dir,
+ 'gn_args': aot_variant.GetGNArgs(), 'dir': build_out_dir,
'targets': aot_variant.GetNinjaTargets(),
'output_files': ['zip_archives', 'libflutter.so']
}],
@@ -624,67 +603,155 @@
build_props = builds[build_id].output.properties
if 'cas_output_hash' in build_props:
api.cas.download(
- 'Download for build %s' % build_id,
- build_props['cas_output_hash'], GetCheckoutPath(api)
+ 'Download for build %s' % build_id, build_props['cas_output_hash'],
+ GetCheckoutPath(api)
)
# Explicitly upload artifacts.
# Artifacts.zip
- UploadArtifact(api, config='android_profile', platform='android-arm-profile',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='android_profile_x64', platform='android-x64-profile',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='android_profile_arm64', platform='android-arm64-profile',
- artifact_name='artifacts.zip')
+ UploadArtifact(
+ api,
+ config='android_profile',
+ platform='android-arm-profile',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_x64',
+ platform='android-x64-profile',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_arm64',
+ platform='android-arm64-profile',
+ artifact_name='artifacts.zip'
+ )
- UploadArtifact(api, config='android_release', platform='android-arm-release',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='android_release_x64', platform='android-x64-release',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='android_release_arm64', platform='android-arm64-release',
- artifact_name='artifacts.zip')
+ UploadArtifact(
+ api,
+ config='android_release',
+ platform='android-arm-release',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_x64',
+ platform='android-x64-release',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_arm64',
+ platform='android-arm64-release',
+ artifact_name='artifacts.zip'
+ )
# Linux-x64.zip.
- UploadArtifact(api, config='android_profile', platform='android-arm-profile',
- artifact_name='linux-x64.zip')
- UploadArtifact(api, config='android_profile_x64', platform='android-x64-profile',
- artifact_name='linux-x64.zip')
- UploadArtifact(api, config='android_profile_arm64', platform='android-arm64-profile',
- artifact_name='linux-x64.zip')
+ UploadArtifact(
+ api,
+ config='android_profile',
+ platform='android-arm-profile',
+ artifact_name='linux-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_x64',
+ platform='android-x64-profile',
+ artifact_name='linux-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_arm64',
+ platform='android-arm64-profile',
+ artifact_name='linux-x64.zip'
+ )
- UploadArtifact(api, config='android_release', platform='android-arm-release',
- artifact_name='linux-x64.zip')
- UploadArtifact(api, config='android_release_x64', platform='android-x64-release',
- artifact_name='linux-x64.zip')
- UploadArtifact(api, config='android_release_arm64', platform='android-arm64-release',
- artifact_name='linux-x64.zip')
+ UploadArtifact(
+ api,
+ config='android_release',
+ platform='android-arm-release',
+ artifact_name='linux-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_x64',
+ platform='android-x64-release',
+ artifact_name='linux-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_arm64',
+ platform='android-arm64-release',
+ artifact_name='linux-x64.zip'
+ )
# Symbols.zip
- UploadArtifact(api, config='android_profile', platform='android-arm-profile',
- artifact_name='symbols.zip')
- UploadArtifact(api, config='android_profile_x64', platform='android-x64-profile',
- artifact_name='symbols.zip')
- UploadArtifact(api, config='android_profile_arm64', platform='android-arm64-profile',
- artifact_name='symbols.zip')
+ UploadArtifact(
+ api,
+ config='android_profile',
+ platform='android-arm-profile',
+ artifact_name='symbols.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_x64',
+ platform='android-x64-profile',
+ artifact_name='symbols.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_arm64',
+ platform='android-arm64-profile',
+ artifact_name='symbols.zip'
+ )
- UploadArtifact(api, config='android_release', platform='android-arm-release',
- artifact_name='symbols.zip')
- UploadArtifact(api, config='android_release_x64', platform='android-x64-release',
- artifact_name='symbols.zip')
- UploadArtifact(api, config='android_release_arm64', platform='android-arm64-release',
- artifact_name='symbols.zip')
+ UploadArtifact(
+ api,
+ config='android_release',
+ platform='android-arm-release',
+ artifact_name='symbols.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_x64',
+ platform='android-x64-release',
+ artifact_name='symbols.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_arm64',
+ platform='android-arm64-release',
+ artifact_name='symbols.zip'
+ )
# analyze-snapshot-linux-x64.zip
- UploadArtifact(api, config='android_profile_x64', platform='android-x64-profile',
- artifact_name='analyze-snapshot-linux-x64.zip')
- UploadArtifact(api, config='android_profile_arm64', platform='android-arm64-profile',
- artifact_name='analyze-snapshot-linux-x64.zip')
+ UploadArtifact(
+ api,
+ config='android_profile_x64',
+ platform='android-x64-profile',
+ artifact_name='analyze-snapshot-linux-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_arm64',
+ platform='android-arm64-profile',
+ artifact_name='analyze-snapshot-linux-x64.zip'
+ )
- UploadArtifact(api, config='android_release_x64', platform='android-x64-release',
- artifact_name='analyze-snapshot-linux-x64.zip')
- UploadArtifact(api, config='android_release_arm64', platform='android-arm64-release',
- artifact_name='analyze-snapshot-linux-x64.zip')
+ UploadArtifact(
+ api,
+ config='android_release_x64',
+ platform='android-x64-release',
+ artifact_name='analyze-snapshot-linux-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_arm64',
+ platform='android-arm64-release',
+ artifact_name='analyze-snapshot-linux-x64.zip'
+ )
# Jar, pom, embedding files.
UploadToDownloadFlutterIO(api, 'android_profile')
@@ -708,26 +775,20 @@
aot_variant.GetLibFlutterPath()
)
- if aot_variant.GetBuildOutDir() in ['android_release_arm64', 'android_release']:
+ if aot_variant.GetBuildOutDir() in ['android_release_arm64',
+ 'android_release']:
triple = aot_variant.android_triple
UploadTreeMap(api, upload_dir, unstripped_lib_flutter_path, triple)
def BuildLinuxAndroid(api, swarming_task_id):
if api.properties.get('build_android_jit_release', True):
- RunGN(
- api,
- '--android',
- '--android-cpu=x86',
- '--runtime-mode=jit_release'
- )
+ RunGN(api, '--android', '--android-cpu=x86', '--runtime-mode=jit_release')
Build(
- api,
- 'android_jit_release_x86',
- 'flutter',
- 'flutter/shell/platform/android:abi_jars',
- 'flutter/shell/platform/android:embedding_jars',
- 'flutter/shell/platform/android:robolectric_tests'
+ api, 'android_jit_release_x86', 'flutter',
+ 'flutter/shell/platform/android:abi_jars',
+ 'flutter/shell/platform/android:embedding_jars',
+ 'flutter/shell/platform/android:robolectric_tests'
)
# Upload artifacts.zip
@@ -747,119 +808,146 @@
if api.properties.get('build_android_debug', True):
debug_variants = [
AndroidAotVariant(
- android_cpu='x86',
- out_dir='android_debug_x86',
- artifact_dir='android-x86',
- clang_dir='',
- android_triple='',
- abi='x86',
- gn_args=[
- '--android',
- '--android-cpu=x86',
- '--no-lto'
- ],
- ninja_targets=[
- 'flutter',
- 'flutter/shell/platform/android:abi_jars',
- 'flutter/shell/platform/android:robolectric_tests'
- ]
+ android_cpu='x86',
+ out_dir='android_debug_x86',
+ artifact_dir='android-x86',
+ clang_dir='',
+ android_triple='',
+ abi='x86',
+ gn_args=['--android', '--android-cpu=x86', '--no-lto'],
+ ninja_targets=[
+ 'flutter', 'flutter/shell/platform/android:abi_jars',
+ 'flutter/shell/platform/android:robolectric_tests'
+ ]
),
AndroidAotVariant(
- android_cpu='x64',
- out_dir='android_debug_x64',
- artifact_dir='android-x64',
- clang_dir='',
- android_triple='',
- abi='x86_64',
- gn_args=[
- '--android',
- '--android-cpu=x64',
- '--no-lto'
- ],
- ninja_targets=[
- 'flutter',
- 'flutter/shell/platform/android:abi_jars'
- ]
+ android_cpu='x64',
+ out_dir='android_debug_x64',
+ artifact_dir='android-x64',
+ clang_dir='',
+ android_triple='',
+ abi='x86_64',
+ gn_args=['--android', '--android-cpu=x64', '--no-lto'],
+ ninja_targets=[
+ 'flutter', 'flutter/shell/platform/android:abi_jars'
+ ]
),
AndroidAotVariant(
- android_cpu='arm',
- out_dir='android_debug',
- artifact_dir='android-arm',
- clang_dir='',
- android_triple='',
- abi='armeabi_v7a',
- gn_args=[
- '--android',
- '--android-cpu=arm',
- '--no-lto'
- ],
- ninja_targets=[
- 'flutter',
- 'flutter/sky/dist:zip_old_location',
- 'flutter/shell/platform/android:embedding_jars',
- 'flutter/shell/platform/android:abi_jars'
- ]
+ android_cpu='arm',
+ out_dir='android_debug',
+ artifact_dir='android-arm',
+ clang_dir='',
+ android_triple='',
+ abi='armeabi_v7a',
+ gn_args=['--android', '--android-cpu=arm', '--no-lto'],
+ ninja_targets=[
+ 'flutter', 'flutter/sky/dist:zip_old_location',
+ 'flutter/shell/platform/android:embedding_jars',
+ 'flutter/shell/platform/android:abi_jars'
+ ]
),
AndroidAotVariant(
- android_cpu='arm64',
- out_dir='android_debug_arm64',
- artifact_dir='android-arm64',
- clang_dir='',
- android_triple='',
- abi='arm64_v8a',
- gn_args=[
- '--android',
- '--android-cpu=arm64',
- '--no-lto'
- ],
- ninja_targets=[
- 'flutter',
- 'flutter/shell/platform/android:abi_jars'
- ]
+ android_cpu='arm64',
+ out_dir='android_debug_arm64',
+ artifact_dir='android-arm64',
+ clang_dir='',
+ android_triple='',
+ abi='arm64_v8a',
+ gn_args=['--android', '--android-cpu=arm64', '--no-lto'],
+ ninja_targets=[
+ 'flutter', 'flutter/shell/platform/android:abi_jars'
+ ]
)
]
for debug_variant in debug_variants:
RunGN(api, *(debug_variant.GetGNArgs()))
- Build(api, debug_variant.GetBuildOutDir(), *(debug_variant.GetNinjaTargets()))
+ Build(
+ api, debug_variant.GetBuildOutDir(),
+ *(debug_variant.GetNinjaTargets())
+ )
# Run tests
RunGN(api, '--android', '--unoptimized', '--runtime-mode=debug', '--no-lto')
- Build(api, 'android_debug', 'flutter/shell/platform/android:robolectric_tests')
- RunTests(api, 'android_debug', android_out_dir='android_debug', types='java')
+ Build(
+ api, 'android_debug', 'flutter/shell/platform/android:robolectric_tests'
+ )
+ RunTests(
+ api, 'android_debug', android_out_dir='android_debug', types='java'
+ )
# Explicitly upload artifacts.
# Artifacts.zip
- UploadArtifact(api, config='android_debug_x86', platform='android-x86',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='android_debug_x64', platform='android-x64',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='android_debug', platform='android-arm',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='android_debug_arm64', platform='android-arm64',
- artifact_name='artifacts.zip')
+ UploadArtifact(
+ api,
+ config='android_debug_x86',
+ platform='android-x86',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_debug_x64',
+ platform='android-x64',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_debug',
+ platform='android-arm',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_debug_arm64',
+ platform='android-arm64',
+ artifact_name='artifacts.zip'
+ )
# Symbols.zip
- UploadArtifact(api, config='android_debug_x86', platform='android-x86',
- artifact_name='symbols.zip')
- UploadArtifact(api, config='android_debug_x64', platform='android-x64',
- artifact_name='symbols.zip')
- UploadArtifact(api, config='android_debug', platform='android-arm',
- artifact_name='symbols.zip')
- UploadArtifact(api, config='android_debug_arm64', platform='android-arm64',
- artifact_name='symbols.zip')
+ UploadArtifact(
+ api,
+ config='android_debug_x86',
+ platform='android-x86',
+ artifact_name='symbols.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_debug_x64',
+ platform='android-x64',
+ artifact_name='symbols.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_debug',
+ platform='android-arm',
+ artifact_name='symbols.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_debug_arm64',
+ platform='android-arm64',
+ artifact_name='symbols.zip'
+ )
# Jar, pom, embedding files.
UploadToDownloadFlutterIO(api, 'android_debug_x86')
UploadToDownloadFlutterIO(api, 'android_debug_x64')
- UploadToDownloadFlutterIO(api, 'android_debug') #arm
+ UploadToDownloadFlutterIO(api, 'android_debug') #arm
UploadToDownloadFlutterIO(api, 'android_debug_arm64')
# Additional artifacts for android_debug
- UploadArtifact(api, config='android_debug', platform='',
- artifact_name='sky_engine.zip')
- UploadArtifact(api, config='android_debug', platform='',
- artifact_name='android-javadoc.zip')
+ UploadArtifact(
+ api,
+ config='android_debug',
+ platform='',
+ artifact_name='sky_engine.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_debug',
+ platform='',
+ artifact_name='android-javadoc.zip'
+ )
# Upload to CIPD.
# TODO(godofredoc): Validate if this can be removed.
@@ -871,80 +959,135 @@
def BuildLinux(api):
checkout = GetCheckoutPath(api)
- RunGN(api, '--runtime-mode', 'debug', '--prebuilt-dart-sdk', '--build-embedder-examples')
+ RunGN(
+ api, '--runtime-mode', 'debug', '--prebuilt-dart-sdk',
+ '--build-embedder-examples'
+ )
RunGN(api, '--runtime-mode', 'debug', '--unoptimized', '--prebuilt-dart-sdk')
- RunGN(api, '--runtime-mode', 'profile', '--no-lto', '--prebuilt-dart-sdk', '--build-embedder-examples')
- RunGN(api, '--runtime-mode', 'release', '--prebuilt-dart-sdk', '--build-embedder-examples')
+ RunGN(
+ api, '--runtime-mode', 'profile', '--no-lto', '--prebuilt-dart-sdk',
+ '--build-embedder-examples'
+ )
+ RunGN(
+ api, '--runtime-mode', 'release', '--prebuilt-dart-sdk',
+ '--build-embedder-examples'
+ )
# flutter/sky/packages from host_debug_unopt is needed for RunTests 'dart'
# type.
Build(api, 'host_debug_unopt', 'flutter/sky/packages')
- Build(api, 'host_debug',
- 'flutter/build/archives:artifacts',
- 'flutter/build/archives:dart_sdk_archive',
- 'flutter/build/archives:embedder',
- 'flutter/build/archives:flutter_patched_sdk',
- 'flutter/build/dart:copy_dart_sdk',
- 'flutter/tools/font-subset',
- 'flutter:unittests',
+ Build(
+ api,
+ 'host_debug',
+ 'flutter/build/archives:artifacts',
+ 'flutter/build/archives:dart_sdk_archive',
+ 'flutter/build/archives:embedder',
+ 'flutter/build/archives:flutter_patched_sdk',
+ 'flutter/build/dart:copy_dart_sdk',
+ 'flutter/tools/font-subset',
+ 'flutter:unittests',
)
# 'engine' suite has failing tests in host_debug.
# https://github.com/flutter/flutter/issues/103757
RunTests(api, 'host_debug', types='dart')
- Build(api, 'host_profile',
- 'flutter/shell/testing',
- 'flutter/tools/path_ops',
- 'flutter/build/dart:copy_dart_sdk',
- 'flutter/shell/testing',
- 'flutter:unittests',
+ Build(
+ api,
+ 'host_profile',
+ 'flutter/shell/testing',
+ 'flutter/tools/path_ops',
+ 'flutter/build/dart:copy_dart_sdk',
+ 'flutter/shell/testing',
+ 'flutter:unittests',
)
RunTests(api, 'host_profile', types='dart,engine')
- Build(api, 'host_release',
- 'flutter/build/archives:flutter_patched_sdk',
- 'flutter/build/dart:copy_dart_sdk',
- 'flutter/display_list:display_list_benchmarks',
- 'flutter/display_list:display_list_builder_benchmarks',
- 'flutter/fml:fml_benchmarks',
- 'flutter/impeller/geometry:geometry_benchmarks',
- 'flutter/lib/ui:ui_benchmarks',
- 'flutter/shell/common:shell_benchmarks',
- 'flutter/shell/testing',
- 'flutter/third_party/txt:txt_benchmarks',
- 'flutter/tools/path_ops',
- 'flutter:unittests'
+ Build(
+ api, 'host_release', 'flutter/build/archives:flutter_patched_sdk',
+ 'flutter/build/dart:copy_dart_sdk',
+ 'flutter/display_list:display_list_benchmarks',
+ 'flutter/display_list:display_list_builder_benchmarks',
+ 'flutter/fml:fml_benchmarks',
+ 'flutter/impeller/geometry:geometry_benchmarks',
+ 'flutter/lib/ui:ui_benchmarks', 'flutter/shell/common:shell_benchmarks',
+ 'flutter/shell/testing', 'flutter/third_party/txt:txt_benchmarks',
+ 'flutter/tools/path_ops', 'flutter:unittests'
)
RunTests(api, 'host_release', types='dart,engine,benchmarks')
# host_debug
- UploadArtifact(api, config='host_debug', platform='linux-x64',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='host_debug', platform='linux-x64',
- artifact_name='linux-x64-embedder.zip')
- UploadArtifact(api, config='host_debug', platform='linux-x64',
- artifact_name='font-subset.zip')
- UploadArtifact(api, config='host_debug', platform='',
- artifact_name='flutter_patched_sdk.zip')
- UploadArtifact(api, config='host_release', platform='',
- artifact_name='flutter_patched_sdk_product.zip')
- UploadArtifact(api, config='host_debug', platform='',
- artifact_name='dart-sdk-linux-x64.zip')
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='linux-x64',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='linux-x64',
+ artifact_name='linux-x64-embedder.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='linux-x64',
+ artifact_name='font-subset.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='',
+ artifact_name='flutter_patched_sdk.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_release',
+ platform='',
+ artifact_name='flutter_patched_sdk_product.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='',
+ artifact_name='dart-sdk-linux-x64.zip'
+ )
# Rebuild with fontconfig support enabled for the desktop embedding, since it
# should be on for libflutter_linux_gtk.so, but not libflutter_engine.so.
- RunGN(api, '--runtime-mode', 'debug', '--enable-fontconfig', '--prebuilt-dart-sdk')
- RunGN(api, '--runtime-mode', 'profile', '--no-lto', '--enable-fontconfig', '--prebuilt-dart-sdk')
- RunGN(api, '--runtime-mode', 'release', '--enable-fontconfig', '--prebuilt-dart-sdk')
+ RunGN(
+ api, '--runtime-mode', 'debug', '--enable-fontconfig',
+ '--prebuilt-dart-sdk'
+ )
+ RunGN(
+ api, '--runtime-mode', 'profile', '--no-lto', '--enable-fontconfig',
+ '--prebuilt-dart-sdk'
+ )
+ RunGN(
+ api, '--runtime-mode', 'release', '--enable-fontconfig',
+ '--prebuilt-dart-sdk'
+ )
Build(api, 'host_debug', 'flutter/shell/platform/linux:flutter_gtk')
Build(api, 'host_profile', 'flutter/shell/platform/linux:flutter_gtk')
Build(api, 'host_release', 'flutter/shell/platform/linux:flutter_gtk')
- UploadArtifact(api, config='host_debug', platform='linux-x64-debug',
- artifact_name='linux-x64-flutter-gtk.zip')
- UploadArtifact(api, config='host_profile', platform='linux-x64-profile',
- artifact_name='linux-x64-flutter-gtk.zip')
- UploadArtifact(api, config='host_release', platform='linux-x64-release',
- artifact_name='linux-x64-flutter-gtk.zip')
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='linux-x64-debug',
+ artifact_name='linux-x64-flutter-gtk.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_profile',
+ platform='linux-x64-profile',
+ artifact_name='linux-x64-flutter-gtk.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_release',
+ platform='linux-x64-release',
+ artifact_name='linux-x64-flutter-gtk.zip'
+ )
def GetRemoteFileName(exec_path):
@@ -1010,7 +1153,11 @@
debug_symbols_cmd += [
'--target-arch', arch, '--out-dir', temp_dir, '--symbol-dirs'
] + symbol_dirs
- api.step('Upload to CIPD for arch: %s' % arch, cmd=debug_symbols_cmd, infra_step=True)
+ api.step(
+ 'Upload to CIPD for arch: %s' % arch,
+ cmd=debug_symbols_cmd,
+ infra_step=True
+ )
def UploadFuchsiaDebugSymbols(api, upload):
@@ -1113,8 +1260,8 @@
build_props = builds[build_id].output.properties
if 'cas_output_hash' in build_props:
api.cas.download(
- 'Download for build %s' % build_id,
- build_props['cas_output_hash'], GetCheckoutPath(api)
+ 'Download for build %s' % build_id, build_props['cas_output_hash'],
+ GetCheckoutPath(api)
)
fuchsia_package_cmd = [
@@ -1125,9 +1272,11 @@
'--skip-build',
]
- upload = (api.bucket_util.should_upload_packages() and
+ upload = (
+ api.bucket_util.should_upload_packages() and
not api.runtime.is_experimental and
- ShouldPublishToCIPD(api, 'flutter/fuchsia', git_rev))
+ ShouldPublishToCIPD(api, 'flutter/fuchsia', git_rev)
+ )
if upload:
fuchsia_package_cmd += ['--upload']
@@ -1191,8 +1340,7 @@
with api.context(cwd=checkout):
api.step(
- 'Create macOS %s gen_snapshot' % label,
- create_macos_gen_snapshot_cmd
+ 'Create macOS %s gen_snapshot' % label, create_macos_gen_snapshot_cmd
)
api.zip.directory(
@@ -1202,13 +1350,15 @@
)
UploadArtifacts(
- api, bucket_name, [
+ api,
+ bucket_name, [
'out/%s/FlutterMacOS.framework.zip' % label,
],
archive_name='FlutterMacOS.framework.zip'
)
UploadArtifacts(
- api, bucket_name, [
+ api,
+ bucket_name, [
'out/%s/gen_snapshot_x64' % label,
'out/%s/gen_snapshot_arm64' % label,
],
@@ -1217,12 +1367,12 @@
if label == 'release':
api.zip.directory(
- 'Archive FlutterMacOS.dSYM',
- label_dir.join('FlutterMacOS.dSYM'),
+ 'Archive FlutterMacOS.dSYM', label_dir.join('FlutterMacOS.dSYM'),
label_dir.join('FlutterMacOS.dSYM.zip')
)
UploadArtifacts(
- api, bucket_name, [
+ api,
+ bucket_name, [
'out/%s/FlutterMacOS.dSYM.zip' % label,
],
archive_name='FlutterMacOS.dSYM.zip'
@@ -1233,41 +1383,28 @@
if api.properties.get('build_host', True):
# Host Debug x64
RunGN(
- api,
- '--runtime-mode',
- 'debug',
- '--no-lto',
- '--prebuilt-dart-sdk',
+ api, '--runtime-mode', 'debug', '--no-lto', '--prebuilt-dart-sdk',
'--build-embedder-examples'
)
Build(
- api,
- 'host_debug',
- 'flutter/build/archives:archive_gen_snapshot',
+ api, 'host_debug', 'flutter/build/archives:archive_gen_snapshot',
'flutter/build/archives:artifacts',
'flutter/build/archives:dart_sdk_archive',
'flutter/build/archives:flutter_embedder_framework',
'flutter/build/dart:copy_dart_sdk',
'flutter/shell/platform/darwin/macos:zip_macos_flutter_framework',
- 'flutter/tools/font-subset',
- 'flutter:unittests'
+ 'flutter/tools/font-subset', 'flutter:unittests'
)
RunTests(api, 'host_debug', types='dart')
# Host Profile x64
RunGN(
- api,
- '--runtime-mode',
- 'profile', '--no-lto',
- '--prebuilt-dart-sdk',
+ api, '--runtime-mode', 'profile', '--no-lto', '--prebuilt-dart-sdk',
'--build-embedder-examples'
)
Build(
- api,
- 'host_profile',
- 'flutter/build/archives:archive_gen_snapshot',
- 'flutter/build/archives:artifacts',
- 'flutter/build/dart:copy_dart_sdk',
+ api, 'host_profile', 'flutter/build/archives:archive_gen_snapshot',
+ 'flutter/build/archives:artifacts', 'flutter/build/dart:copy_dart_sdk',
'flutter/shell/platform/darwin/macos:zip_macos_flutter_framework',
'flutter:unittests'
)
@@ -1275,19 +1412,12 @@
# Host release x64
RunGN(
- api,
- '--runtime-mode',
- 'release',
- '--no-lto',
- '--prebuilt-dart-sdk',
+ api, '--runtime-mode', 'release', '--no-lto', '--prebuilt-dart-sdk',
'--build-embedder-examples'
)
Build(
- api,
- 'host_release',
- 'flutter/build/archives:archive_gen_snapshot',
- 'flutter/build/archives:artifacts',
- 'flutter/build/dart:copy_dart_sdk',
+ api, 'host_release', 'flutter/build/archives:archive_gen_snapshot',
+ 'flutter/build/archives:artifacts', 'flutter/build/dart:copy_dart_sdk',
'flutter/shell/platform/darwin/macos:zip_macos_flutter_framework',
'flutter:unittests'
)
@@ -1295,19 +1425,11 @@
# Host debug arm64
RunGN(
- api,
- '--mac',
- '--mac-cpu',
- 'arm64',
- '--runtime-mode',
- 'debug',
- '--no-lto',
- '--prebuilt-dart-sdk'
+ api, '--mac', '--mac-cpu', 'arm64', '--runtime-mode', 'debug',
+ '--no-lto', '--prebuilt-dart-sdk'
)
Build(
- api,
- 'mac_debug_arm64',
- 'flutter/build/archives:archive_gen_snapshot',
+ api, 'mac_debug_arm64', 'flutter/build/archives:archive_gen_snapshot',
'flutter/build/archives:artifacts',
'flutter/build/archives:dart_sdk_archive',
'flutter/shell/platform/darwin/macos:zip_macos_flutter_framework',
@@ -1316,37 +1438,21 @@
# Host profile arm64
RunGN(
- api,
- '--mac',
- '--mac-cpu',
- 'arm64',
- '--runtime-mode',
- 'profile',
- '--no-lto',
- '--prebuilt-dart-sdk'
+ api, '--mac', '--mac-cpu', 'arm64', '--runtime-mode', 'profile',
+ '--no-lto', '--prebuilt-dart-sdk'
)
Build(
- api,
- 'mac_profile_arm64',
- 'flutter/build/archives:artifacts',
+ api, 'mac_profile_arm64', 'flutter/build/archives:artifacts',
'flutter/shell/platform/darwin/macos:zip_macos_flutter_framework'
)
# Host release arm64
RunGN(
- api,
- '--mac',
- '--mac-cpu',
- 'arm64',
- '--runtime-mode',
- 'release',
- '--no-lto',
- '--prebuilt-dart-sdk'
+ api, '--mac', '--mac-cpu', 'arm64', '--runtime-mode', 'release',
+ '--no-lto', '--prebuilt-dart-sdk'
)
Build(
- api,
- 'mac_release_arm64',
- 'flutter/build/archives:artifacts',
+ api, 'mac_release_arm64', 'flutter/build/archives:artifacts',
'flutter/shell/platform/darwin/macos:zip_macos_flutter_framework'
)
@@ -1434,25 +1540,19 @@
api, 'debug', 'mac_debug_arm64', 'host_debug', 'darwin-x64'
)
PackageMacOSVariant(
- api, 'profile', 'mac_profile_arm64', 'host_profile', 'darwin-x64-profile'
+ api, 'profile', 'mac_profile_arm64', 'host_profile',
+ 'darwin-x64-profile'
)
PackageMacOSVariant(
- api, 'release', 'mac_release_arm64', 'host_release', 'darwin-x64-release'
+ api, 'release', 'mac_release_arm64', 'host_release',
+ 'darwin-x64-release'
)
-
if api.properties.get('build_android_aot', True):
# Profile arm
- RunGN(
- api,
- '--runtime-mode',
- 'profile',
- '--android'
- )
+ RunGN(api, '--runtime-mode', 'profile', '--android')
Build(
- api,
- 'android_profile',
- 'flutter/lib/snapshot',
+ api, 'android_profile', 'flutter/lib/snapshot',
'flutter/shell/platform/android:gen_snapshot'
)
UploadArtifact(
@@ -1463,17 +1563,9 @@
)
# Profile arm64
- RunGN(
- api,
- '--runtime-mode',
- 'profile',
- '--android',
- '--android-cpu=arm64'
- )
+ RunGN(api, '--runtime-mode', 'profile', '--android', '--android-cpu=arm64')
Build(
- api,
- 'android_profile_arm64',
- 'flutter/lib/snapshot',
+ api, 'android_profile_arm64', 'flutter/lib/snapshot',
'flutter/shell/platform/android:gen_snapshot'
)
UploadArtifact(
@@ -1484,17 +1576,9 @@
)
# Profile x64
- RunGN(
- api,
- '--runtime-mode',
- 'profile',
- '--android',
- '--android-cpu=x64'
- )
+ RunGN(api, '--runtime-mode', 'profile', '--android', '--android-cpu=x64')
Build(
- api,
- 'android_profile_x64',
- 'flutter/lib/snapshot',
+ api, 'android_profile_x64', 'flutter/lib/snapshot',
'flutter/shell/platform/android:gen_snapshot'
)
UploadArtifact(
@@ -1505,16 +1589,9 @@
)
# Release arm
- RunGN(
- api,
- '--runtime-mode',
- 'release',
- '--android'
- )
+ RunGN(api, '--runtime-mode', 'release', '--android')
Build(
- api,
- 'android_release',
- 'flutter/lib/snapshot',
+ api, 'android_release', 'flutter/lib/snapshot',
'flutter/shell/platform/android:gen_snapshot'
)
UploadArtifact(
@@ -1525,17 +1602,9 @@
)
# Release arm64
- RunGN(
- api,
- '--runtime-mode',
- 'release',
- '--android',
- '--android-cpu=arm64'
- )
+ RunGN(api, '--runtime-mode', 'release', '--android', '--android-cpu=arm64')
Build(
- api,
- 'android_release_arm64',
- 'flutter/lib/snapshot',
+ api, 'android_release_arm64', 'flutter/lib/snapshot',
'flutter/shell/platform/android:gen_snapshot'
)
UploadArtifact(
@@ -1546,19 +1615,10 @@
)
# Release x64
- RunGN(
- api,
- '--runtime-mode',
- 'release',
- '--android',
- '--android-cpu=x64'
- )
+ RunGN(api, '--runtime-mode', 'release', '--android', '--android-cpu=x64')
Build(
- api,
- 'android_release_x64',
- 'flutter/lib/snapshot',
+ api, 'android_release_x64', 'flutter/lib/snapshot',
'flutter/shell/platform/android:gen_snapshot'
-
)
UploadArtifact(
api,
@@ -1663,86 +1723,124 @@
RunGN(
api, '--ios', '--runtime-mode', 'debug', '--simulator',
- '--simulator-cpu=arm64', '--no-lto',
- '--prebuilt-impellerc', impellerc_path
+ '--simulator-cpu=arm64', '--no-lto', '--prebuilt-impellerc',
+ impellerc_path
)
Build(api, 'ios_debug_sim_arm64')
if api.properties.get('ios_debug', True):
RunGN(
- api, '--ios', '--runtime-mode', 'debug',
- '--prebuilt-impellerc', impellerc_path
+ api, '--ios', '--runtime-mode', 'debug', '--prebuilt-impellerc',
+ impellerc_path
)
Build(api, 'ios_debug')
BuildObjcDoc(api, env, env_prefixes)
PackageIOSVariant(
- api, 'debug', 'ios_debug', 'ios_debug_sim',
- 'ios_debug_sim_arm64', 'ios'
+ api, 'debug', 'ios_debug', 'ios_debug_sim', 'ios_debug_sim_arm64', 'ios'
)
if api.properties.get('ios_profile', True):
RunGN(
- api, '--ios', '--runtime-mode', 'profile',
- '--prebuilt-impellerc', impellerc_path
+ api, '--ios', '--runtime-mode', 'profile', '--prebuilt-impellerc',
+ impellerc_path
)
Build(api, 'ios_profile')
PackageIOSVariant(
- api, 'profile', 'ios_profile', 'ios_debug_sim',
- 'ios_debug_sim_arm64', 'ios-profile'
+ api, 'profile', 'ios_profile', 'ios_debug_sim', 'ios_debug_sim_arm64',
+ 'ios-profile'
)
if api.properties.get('ios_release', True):
RunGN(
- api, '--ios', '--runtime-mode', 'release',
- '--prebuilt-impellerc', impellerc_path
+ api, '--ios', '--runtime-mode', 'release', '--prebuilt-impellerc',
+ impellerc_path
)
Build(api, 'ios_release')
PackageIOSVariant(
- api, 'release', 'ios_release', 'ios_debug_sim',
- 'ios_debug_sim_arm64', 'ios-release'
+ api, 'release', 'ios_release', 'ios_debug_sim', 'ios_debug_sim_arm64',
+ 'ios-release'
)
def BuildWindows(api):
if api.properties.get('build_host', True):
RunGN(api, '--runtime-mode', 'debug', '--no-lto', '--prebuilt-dart-sdk')
- Build(api, 'host_debug', 'flutter:unittests', 'flutter/build/archives:artifacts',
- 'flutter/build/archives:embedder', 'flutter/tools/font-subset',
- 'flutter/build/archives:dart_sdk_archive',
- 'flutter/shell/platform/windows/client_wrapper:client_wrapper_archive',
- 'flutter/build/archives:windows_flutter'
+ Build(
+ api, 'host_debug', 'flutter:unittests',
+ 'flutter/build/archives:artifacts', 'flutter/build/archives:embedder',
+ 'flutter/tools/font-subset', 'flutter/build/archives:dart_sdk_archive',
+ 'flutter/shell/platform/windows/client_wrapper:client_wrapper_archive',
+ 'flutter/build/archives:windows_flutter'
)
RunTests(api, 'host_debug', types='engine')
RunGN(api, '--runtime-mode', 'profile', '--no-lto', '--prebuilt-dart-sdk')
- Build(api, 'host_profile', 'windows', 'flutter:gen_snapshot', 'flutter/build/archives:windows_flutter')
+ Build(
+ api, 'host_profile', 'windows', 'flutter:gen_snapshot',
+ 'flutter/build/archives:windows_flutter'
+ )
RunGN(api, '--runtime-mode', 'release', '--no-lto', '--prebuilt-dart-sdk')
- Build(api, 'host_release', 'windows', 'flutter:gen_snapshot', 'flutter/build/archives:windows_flutter')
+ Build(
+ api, 'host_release', 'windows', 'flutter:gen_snapshot',
+ 'flutter/build/archives:windows_flutter'
+ )
# host_debug
- UploadArtifact(api, config='host_debug', platform='windows-x64',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='host_debug', platform='windows-x64',
- artifact_name='windows-x64-embedder.zip')
- UploadArtifact(api, config='host_debug', platform='windows-x64-debug',
- artifact_name='windows-x64-flutter.zip')
- UploadArtifact(api, config='host_debug', platform='windows-x64',
- artifact_name='flutter-cpp-client-wrapper.zip')
- UploadArtifact(api, config='host_debug', platform='windows-x64',
- artifact_name='font-subset.zip')
- UploadArtifact(api, config='host_debug', platform='',
- artifact_name='dart-sdk-windows-x64.zip')
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='windows-x64',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='windows-x64',
+ artifact_name='windows-x64-embedder.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='windows-x64-debug',
+ artifact_name='windows-x64-flutter.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='windows-x64',
+ artifact_name='flutter-cpp-client-wrapper.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='windows-x64',
+ artifact_name='font-subset.zip'
+ )
+ UploadArtifact(
+ api,
+ config='host_debug',
+ platform='',
+ artifact_name='dart-sdk-windows-x64.zip'
+ )
# Host_profile
- UploadArtifact(api, config='host_profile', platform='windows-x64-profile',
- artifact_name='windows-x64-flutter.zip')
+ UploadArtifact(
+ api,
+ config='host_profile',
+ platform='windows-x64-profile',
+ artifact_name='windows-x64-flutter.zip'
+ )
# Host_release
- UploadArtifact(api, config='host_release', platform='windows-x64-release',
- artifact_name='windows-x64-flutter.zip')
+ UploadArtifact(
+ api,
+ config='host_release',
+ platform='windows-x64-release',
+ artifact_name='windows-x64-flutter.zip'
+ )
if api.properties.get('build_android_aot', True):
RunGN(api, '--runtime-mode', 'profile', '--android')
@@ -1753,24 +1851,66 @@
RunGN(api, '--runtime-mode', 'release', '--android', '--android-cpu=arm64')
RunGN(api, '--runtime-mode', 'release', '--android', '--android-cpu=x64')
- Build(api, 'android_profile', 'flutter/build/archives:archive_win_gen_snapshot')
- Build(api, 'android_profile_arm64', 'flutter/build/archives:archive_win_gen_snapshot')
- Build(api, 'android_profile_x64', 'flutter/build/archives:archive_win_gen_snapshot')
- Build(api, 'android_release', 'flutter/build/archives:archive_win_gen_snapshot')
- Build(api, 'android_release_arm64', 'flutter/build/archives:archive_win_gen_snapshot')
- Build(api, 'android_release_x64', 'flutter/build/archives:archive_win_gen_snapshot')
- UploadArtifact(api, config='android_profile', platform='android-arm-profile',
- artifact_name='windows-x64.zip')
- UploadArtifact(api, config='android_profile_arm64', platform='android-arm64-profile',
- artifact_name='windows-x64.zip')
- UploadArtifact(api, config='android_profile_x64', platform='android-x64-profile',
- artifact_name='windows-x64.zip')
- UploadArtifact(api, config='android_release', platform='android-arm-release',
- artifact_name='windows-x64.zip')
- UploadArtifact(api, config='android_release_arm64', platform='android-arm64-release',
- artifact_name='windows-x64.zip')
- UploadArtifact(api, config='android_release_x64', platform='android-x64-release',
- artifact_name='windows-x64.zip')
+ Build(
+ api, 'android_profile',
+ 'flutter/build/archives:archive_win_gen_snapshot'
+ )
+ Build(
+ api, 'android_profile_arm64',
+ 'flutter/build/archives:archive_win_gen_snapshot'
+ )
+ Build(
+ api, 'android_profile_x64',
+ 'flutter/build/archives:archive_win_gen_snapshot'
+ )
+ Build(
+ api, 'android_release',
+ 'flutter/build/archives:archive_win_gen_snapshot'
+ )
+ Build(
+ api, 'android_release_arm64',
+ 'flutter/build/archives:archive_win_gen_snapshot'
+ )
+ Build(
+ api, 'android_release_x64',
+ 'flutter/build/archives:archive_win_gen_snapshot'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile',
+ platform='android-arm-profile',
+ artifact_name='windows-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_arm64',
+ platform='android-arm64-profile',
+ artifact_name='windows-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_profile_x64',
+ platform='android-x64-profile',
+ artifact_name='windows-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release',
+ platform='android-arm-release',
+ artifact_name='windows-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_arm64',
+ platform='android-arm64-release',
+ artifact_name='windows-x64.zip'
+ )
+ UploadArtifact(
+ api,
+ config='android_release_x64',
+ platform='android-x64-release',
+ artifact_name='windows-x64.zip'
+ )
def BuildObjcDoc(api, env, env_prefixes):
@@ -1779,7 +1919,8 @@
checkout = GetCheckoutPath(api)
with api.os_utils.make_temp_directory('BuildObjcDoc') as temp_dir:
objcdoc_cmd = [checkout.join('flutter/tools/gen_objcdoc.sh'), temp_dir]
- with api.context(env=env, env_prefixes=env_prefixes, cwd=checkout.join('flutter')):
+ with api.context(env=env, env_prefixes=env_prefixes,
+ cwd=checkout.join('flutter')):
api.step('build obj-c doc', objcdoc_cmd)
api.zip.directory(
'archive obj-c doc', temp_dir, checkout.join('out/ios-objcdoc.zip')
@@ -1809,11 +1950,13 @@
android_home = checkout.join('third_party', 'android_tools', 'sdk')
env = {
- 'ANDROID_HOME': str(android_home),
+ 'ANDROID_HOME': str(android_home),
}
- use_prebuilt_dart = (api.properties.get('build_host', True) or
- api.properties.get('build_android_aot', True))
+ use_prebuilt_dart = (
+ api.properties.get('build_host', True) or
+ api.properties.get('build_android_aot', True)
+ )
if use_prebuilt_dart:
env['FLUTTER_PREBUILT_DART_SDK'] = 'True'
@@ -1843,7 +1986,9 @@
env_prefixes=env_prefixes), api.depot_tools.on_path():
api.gclient.runhooks()
- gclient_vars = api.shard_util_v2.unfreeze_dict(api.properties.get('gclient_variables', {}))
+ gclient_vars = api.shard_util_v2.unfreeze_dict(
+ api.properties.get('gclient_variables', {})
+ )
try:
if api.platform.is_linux:
@@ -1901,11 +2046,10 @@
test = api.test(
'%s%s%s%s%s%s_%s_%s' % (
platform, '_upload' if should_upload else '',
- '_maven' if maven else '', '_publish_cipd'
- if should_publish_cipd else '', '_no_lto' if no_lto else '',
- '_font_subset' if font_subset else '',
- bucket,
- branch
+ '_maven' if maven else '',
+ '_publish_cipd' if should_publish_cipd else '',
+ '_no_lto' if no_lto else '',
+ '_font_subset' if font_subset else '', bucket, branch
),
api.platform(platform, 64),
api.buildbucket.ci_build(
@@ -1995,17 +2139,13 @@
api.runtime(is_experimental=True),
api.properties(
**{
- 'clobber': True,
- 'git_url': 'https://github.com/flutter/engine',
- 'goma_jobs': '200',
- 'git_ref': 'refs/pull/1/head',
- 'fuchsia_ctl_version': 'version:0.0.2',
- 'build_host': True,
- 'build_fuchsia': True,
- 'build_android_aot': True,
- 'build_android_debug': True,
- 'android_sdk_license': 'android_sdk_hash',
- 'android_sdk_preview_license': 'android_sdk_preview_hash'
+ 'clobber': True, 'git_url': 'https://github.com/flutter/engine',
+ 'goma_jobs': '200', 'git_ref':
+ 'refs/pull/1/head', 'fuchsia_ctl_version': 'version:0.0.2',
+ 'build_host': True, 'build_fuchsia': True, 'build_android_aot':
+ True, 'build_android_debug': True, 'android_sdk_license':
+ 'android_sdk_hash', 'android_sdk_preview_license':
+ 'android_sdk_preview_hash'
}
),
)
@@ -2031,7 +2171,8 @@
'build_android_debug': True,
'android_sdk_license': 'android_sdk_hash',
'android_sdk_preview_license': 'android_sdk_preview_hash',
- 'gclient_variables': {'upload_fuchsia_sdk': True, 'fuchsia_sdk_hash': 'thehash'},
+ 'gclient_variables':
+ {'upload_fuchsia_sdk': True, 'fuchsia_sdk_hash': 'thehash'},
}
),
)
@@ -2051,19 +2192,13 @@
collect_build_output,
api.properties(
**{
- 'clobber': False,
- 'goma_jobs': '1024',
- 'fuchsia_ctl_version': 'version:0.0.2',
- 'build_host': False,
- 'build_fuchsia': True,
- 'build_android_aot': False,
- 'build_android_jit_release': False,
- 'build_android_debug': False,
- 'no_maven': True,
- 'upload_packages': True,
- 'android_sdk_license': 'android_sdk_hash',
- 'android_sdk_preview_license': 'android_sdk_preview_hash',
- 'force_upload': False
+ 'clobber': False, 'goma_jobs': '1024', 'fuchsia_ctl_version':
+ 'version:0.0.2', 'build_host': False, 'build_fuchsia': True,
+ 'build_android_aot': False, 'build_android_jit_release': False,
+ 'build_android_debug': False, 'no_maven': True, 'upload_packages':
+ True, 'android_sdk_license':
+ 'android_sdk_hash', 'android_sdk_preview_license':
+ 'android_sdk_preview_hash', 'force_upload': False
}
),
api.properties.environ(EnvProperties(SWARMING_TASK_ID='deadbeef')),
@@ -2081,18 +2216,12 @@
),
api.properties(
**{
- 'clobber': False,
- 'goma_jobs': '1024',
- 'fuchsia_ctl_version': 'version:0.0.2',
- 'build_host': False,
- 'build_fuchsia': True,
- 'build_android_aot': False,
- 'build_android_debug': False,
- 'no_maven': False,
- 'upload_packages': True,
- 'android_sdk_license': 'android_sdk_hash',
- 'android_sdk_preview_license': 'android_sdk_preview_hash',
- 'force_upload': True
+ 'clobber': False, 'goma_jobs': '1024', 'fuchsia_ctl_version':
+ 'version:0.0.2', 'build_host': False, 'build_fuchsia': True,
+ 'build_android_aot': False, 'build_android_debug': False,
+ 'no_maven': False, 'upload_packages': True, 'android_sdk_license':
+ 'android_sdk_hash', 'android_sdk_preview_license':
+ 'android_sdk_preview_hash', 'force_upload': True
}
),
api.properties.environ(EnvProperties(SWARMING_TASK_ID='deadbeef')),
@@ -2111,24 +2240,16 @@
),
api.properties(
**{
- 'clobber': False,
- 'goma_jobs': '1024',
- 'fuchsia_ctl_version': 'version:0.0.2',
- 'build_host': False,
- 'build_fuchsia': False,
- 'build_android_aot': True,
- 'build_android_debug': False,
- 'dependencies': [
- {
- 'dependency': 'open_jdk',
- 'version': 'version:11',
- }
- ],
- 'no_maven': False,
- 'upload_packages': True,
- 'android_sdk_license': 'android_sdk_hash',
- 'android_sdk_preview_license': 'android_sdk_preview_hash',
- 'force_upload': True
+ 'clobber': False, 'goma_jobs': '1024', 'fuchsia_ctl_version':
+ 'version:0.0.2',
+ 'build_host': False, 'build_fuchsia': False, 'build_android_aot':
+ True, 'build_android_debug': False, 'dependencies': [{
+ 'dependency': 'open_jdk',
+ 'version': 'version:11',
+ }],
+ 'no_maven': False, 'upload_packages': True, 'android_sdk_license':
+ 'android_sdk_hash', 'android_sdk_preview_license':
+ 'android_sdk_preview_hash', 'force_upload': True
}
),
api.properties.environ(EnvProperties(SWARMING_TASK_ID='deadbeef')),
diff --git a/recipes/engine/engine_arm.py b/recipes/engine/engine_arm.py
index 0864cfd..6c654b5 100644
--- a/recipes/engine/engine_arm.py
+++ b/recipes/engine/engine_arm.py
@@ -37,8 +37,7 @@
BUCKET_NAME = 'flutter_infra_release'
MAVEN_BUCKET_NAME = 'download.flutter.io'
ICU_DATA_PATH = 'third_party/icu/flutter/icudtl.dat'
-GIT_REPO = (
- 'https://flutter.googlesource.com/mirrors/engine')
+GIT_REPO = ('https://flutter.googlesource.com/mirrors/engine')
IMPELLERC_SHADER_LIB_PATH = 'shader_lib'
PROPERTIES = InputProperties
@@ -80,60 +79,78 @@
def UploadArtifact(api, config, platform, artifact_name):
path = GetCheckoutPath(api).join(
- 'out',
- config,
- 'zip_archives',
- platform,
- artifact_name
+ 'out', config, 'zip_archives', platform, artifact_name
)
api.path.mock_add_file(path)
assert api.path.exists(path), '%s does not exist' % str(path)
if not api.flutter_bcid.is_prod_build():
return
dst = '%s/%s' % (platform, artifact_name) if platform else artifact_name
- api.bucket_util.safe_upload(
- path,
- GetCloudPath(api, dst)
- )
+ api.bucket_util.safe_upload(path, GetCloudPath(api, dst))
def BuildLinux(api):
- RunGN(api, '--runtime-mode', 'debug', '--target-os=linux',
- '--linux-cpu=arm64', '--prebuilt-dart-sdk')
- Build(api, 'linux_debug_arm64',
- 'flutter/build/archives:artifacts',
- 'flutter/build/archives:dart_sdk_archive',
- 'flutter/tools/font-subset',
- 'flutter/shell/platform/linux:flutter_gtk')
-
- RunGN(api, '--runtime-mode', 'profile', '--no-lto', '--target-os=linux',
- '--linux-cpu=arm64', '--prebuilt-dart-sdk')
+ RunGN(
+ api, '--runtime-mode', 'debug', '--target-os=linux', '--linux-cpu=arm64',
+ '--prebuilt-dart-sdk'
+ )
Build(
- api, 'linux_profile_arm64',
- 'flutter/shell/platform/linux:flutter_gtk')
+ api, 'linux_debug_arm64', 'flutter/build/archives:artifacts',
+ 'flutter/build/archives:dart_sdk_archive', 'flutter/tools/font-subset',
+ 'flutter/shell/platform/linux:flutter_gtk'
+ )
- RunGN(api, '--runtime-mode', 'release', '--target-os=linux',
- '--linux-cpu=arm64', '--prebuilt-dart-sdk')
- Build(api, 'linux_release_arm64',
- 'flutter/shell/platform/linux:flutter_gtk')
+ RunGN(
+ api, '--runtime-mode', 'profile', '--no-lto', '--target-os=linux',
+ '--linux-cpu=arm64', '--prebuilt-dart-sdk'
+ )
+ Build(api, 'linux_profile_arm64', 'flutter/shell/platform/linux:flutter_gtk')
+ RunGN(
+ api, '--runtime-mode', 'release', '--target-os=linux',
+ '--linux-cpu=arm64', '--prebuilt-dart-sdk'
+ )
+ Build(api, 'linux_release_arm64', 'flutter/shell/platform/linux:flutter_gtk')
# linux_debug_arm64
- UploadArtifact(api, config='linux_debug_arm64', platform='linux-arm64',
- artifact_name='artifacts.zip')
- UploadArtifact(api, config='linux_debug_arm64', platform='linux-arm64',
- artifact_name='font-subset.zip')
- UploadArtifact(api, config='linux_debug_arm64', platform='',
- artifact_name='dart-sdk-linux-arm64.zip')
-
+ UploadArtifact(
+ api,
+ config='linux_debug_arm64',
+ platform='linux-arm64',
+ artifact_name='artifacts.zip'
+ )
+ UploadArtifact(
+ api,
+ config='linux_debug_arm64',
+ platform='linux-arm64',
+ artifact_name='font-subset.zip'
+ )
+ UploadArtifact(
+ api,
+ config='linux_debug_arm64',
+ platform='',
+ artifact_name='dart-sdk-linux-arm64.zip'
+ )
# Desktop embedding.
- UploadArtifact(api, config='linux_debug_arm64', platform='linux-arm64-debug',
- artifact_name='linux-arm64-flutter-gtk.zip')
- UploadArtifact(api, config='linux_profile_arm64', platform='linux-arm64-profile',
- artifact_name='linux-arm64-flutter-gtk.zip')
- UploadArtifact(api, config='linux_release_arm64', platform='linux-arm64-release',
- artifact_name='linux-arm64-flutter-gtk.zip')
+ UploadArtifact(
+ api,
+ config='linux_debug_arm64',
+ platform='linux-arm64-debug',
+ artifact_name='linux-arm64-flutter-gtk.zip'
+ )
+ UploadArtifact(
+ api,
+ config='linux_profile_arm64',
+ platform='linux-arm64-profile',
+ artifact_name='linux-arm64-flutter-gtk.zip'
+ )
+ UploadArtifact(
+ api,
+ config='linux_release_arm64',
+ platform='linux-arm64-release',
+ artifact_name='linux-arm64-flutter-gtk.zip'
+ )
def RunSteps(api, properties, env_properties):
@@ -146,14 +163,15 @@
api.file.rmtree('Clobber build output', checkout.join('out'))
api.file.ensure_directory('Ensure checkout cache', cache_root)
- dart_bin = checkout.join('third_party', 'dart', 'tools', 'sdks', 'dart-sdk',
- 'bin')
+ dart_bin = checkout.join(
+ 'third_party', 'dart', 'tools', 'sdks', 'dart-sdk', 'bin'
+ )
android_home = checkout.join('third_party', 'android_tools', 'sdk')
env = {
- 'ANDROID_HOME': str(android_home),
- 'FLUTTER_PREBUILT_DART_SDK': 'True',
+ 'ANDROID_HOME': str(android_home),
+ 'FLUTTER_PREBUILT_DART_SDK': 'True',
}
env_prefixes = {}
@@ -163,9 +181,8 @@
api.os_utils.clean_derived_data()
# Various scripts we run assume access to depot_tools on path for `ninja`.
- with api.context(
- cwd=cache_root, env=env,
- env_prefixes=env_prefixes), api.depot_tools.on_path():
+ with api.context(cwd=cache_root, env=env,
+ env_prefixes=env_prefixes), api.depot_tools.on_path():
api.gclient.runhooks()
@@ -193,10 +210,19 @@
git_revision = 'abcd1234'
for platform in ('mac', 'linux', 'win'):
for should_upload in (True, False):
- for bucket in ('prod', 'staging',):
- for experimental in (True, False,):
+ for bucket in (
+ 'prod',
+ 'staging',
+ ):
+ for experimental in (
+ True,
+ False,
+ ):
test = api.test(
- '%s%s_%s_%s' % (platform, '_upload' if should_upload else '', bucket, experimental),
+ '%s%s_%s_%s' % (
+ platform, '_upload' if should_upload else '', bucket,
+ experimental
+ ),
api.platform(platform, 64),
api.buildbucket.ci_build(
builder='%s Engine' % platform.capitalize(),
@@ -217,9 +243,11 @@
build_android_debug=True,
upload_packages=should_upload,
force_upload=True,
- ),),
+ ),
+ ),
api.properties.environ(
- EnvProperties(SWARMING_TASK_ID='deadbeef')),
+ EnvProperties(SWARMING_TASK_ID='deadbeef')
+ ),
status='FAILURE' if platform in ['mac', 'win'] else 'SUCCESS'
)
yield test
@@ -240,14 +268,16 @@
android_sdk_license='android_sdk_hash',
android_sdk_preview_license='android_sdk_preview_hash',
upload_packages=should_upload,
- )),
+ )
+ ),
)
yield api.test(
'clobber',
api.buildbucket.ci_build(
builder='Linux Host Engine',
git_repo='https://github.com/flutter/engine',
- project='flutter'),
+ project='flutter'
+ ),
api.runtime(is_experimental=True),
api.properties(
InputProperties(
@@ -261,14 +291,17 @@
build_android_aot=True,
build_android_debug=True,
android_sdk_license='android_sdk_hash',
- android_sdk_preview_license='android_sdk_preview_hash')),
+ android_sdk_preview_license='android_sdk_preview_hash'
+ )
+ ),
)
yield api.test(
'pull_request',
api.buildbucket.ci_build(
builder='Linux Host Engine',
git_repo='https://github.com/flutter/engine',
- project='flutter'),
+ project='flutter'
+ ),
api.runtime(is_experimental=True),
api.properties(
InputProperties(
@@ -282,5 +315,7 @@
build_android_aot=True,
build_android_debug=True,
android_sdk_license='android_sdk_hash',
- android_sdk_preview_license='android_sdk_preview_hash')),
+ android_sdk_preview_license='android_sdk_preview_hash'
+ )
+ ),
)
diff --git a/recipes/engine/engine_builder.py b/recipes/engine/engine_builder.py
index ab707bc..8d86b37 100644
--- a/recipes/engine/engine_builder.py
+++ b/recipes/engine/engine_builder.py
@@ -8,20 +8,20 @@
from PB.recipes.flutter.engine.engine_builder import InputProperties, EngineBuild
DEPS = [
- 'depot_tools/depot_tools',
- 'flutter/goma',
- 'flutter/os_utils',
- 'flutter/osx_sdk',
- 'flutter/repo_util',
- 'flutter/shard_util_v2',
- 'recipe_engine/buildbucket',
- 'recipe_engine/cas',
- 'recipe_engine/context',
- 'recipe_engine/file',
- 'recipe_engine/path',
- 'recipe_engine/platform',
- 'recipe_engine/properties',
- 'recipe_engine/step',
+ 'depot_tools/depot_tools',
+ 'flutter/goma',
+ 'flutter/os_utils',
+ 'flutter/osx_sdk',
+ 'flutter/repo_util',
+ 'flutter/shard_util_v2',
+ 'recipe_engine/buildbucket',
+ 'recipe_engine/cas',
+ 'recipe_engine/context',
+ 'recipe_engine/file',
+ 'recipe_engine/path',
+ 'recipe_engine/platform',
+ 'recipe_engine/properties',
+ 'recipe_engine/step',
]
GIT_REPO = \
@@ -39,7 +39,7 @@
ninja_args = [ninja_path, '-C', build_dir]
ninja_args.extend(targets)
with api.goma():
- name='build %s' % ' '.join([config] + list(targets))
+ name = 'build %s' % ' '.join([config] + list(targets))
api.step(name, ninja_args)
else:
ninja_args = [ninja_path, '-C', build_dir]
@@ -60,7 +60,6 @@
api.step('gn %s' % ' '.join(args), gn_cmd)
-
def CasOutputs(api, output_files, output_dirs):
out_dir = api.path['cache'].join('builder', 'src')
dirs = output_files + output_dirs
@@ -89,10 +88,12 @@
Build(api, build.dir, build.disable_goma, *build.targets)
for output_file in build.output_files:
output_files.append(
- cache_root.join('src', 'out', build.dir, output_file))
+ cache_root.join('src', 'out', build.dir, output_file)
+ )
for output_dir in build.output_dirs:
output_dirs.append(
- cache_root.join('src', 'out', build.dir, output_dir))
+ cache_root.join('src', 'out', build.dir, output_dir)
+ )
# This is to clean up leaked processes.
api.os_utils.kill_processes()
# Collect memory/cpu/process after task execution.
@@ -127,7 +128,8 @@
disable_goma=False,
gn_args=['--unoptimized'],
dir='host_debug_unopt',
- output_files=['shell_unittests'])
+ output_files=['shell_unittests']
+ )
]
)
)
diff --git a/recipes/engine/engine_license.py b/recipes/engine/engine_license.py
index 209d878..5998159 100644
--- a/recipes/engine/engine_license.py
+++ b/recipes/engine/engine_license.py
@@ -61,8 +61,8 @@
android_home = checkout.join('third_party', 'android_tools', 'sdk')
env = {
- 'ANDROID_HOME': str(android_home),
- 'FLUTTER_PREBUILT_DART_SDK': 'True',
+ 'ANDROID_HOME': str(android_home),
+ 'FLUTTER_PREBUILT_DART_SDK': 'True',
}
env_prefixes = {'PATH': [dart_bin]}
@@ -109,13 +109,7 @@
project='flutter',
),
api.runtime(is_experimental=False),
- api.properties(
- InputProperties(
- goma_jobs='1024',
- ),
- ),
- api.properties.environ(
- EnvProperties(SWARMING_TASK_ID='deadbeef')
- ),
+ api.properties(InputProperties(goma_jobs='1024',),),
+ api.properties.environ(EnvProperties(SWARMING_TASK_ID='deadbeef')),
)
yield test
diff --git a/recipes/engine/engine_lint.py b/recipes/engine/engine_lint.py
index 59df88c..44faae6 100644
--- a/recipes/engine/engine_lint.py
+++ b/recipes/engine/engine_lint.py
@@ -79,8 +79,12 @@
elif api.platform.is_mac:
with api.osx_sdk('ios'):
cpu = api.properties.get('cpu', 'x86')
- ios_command = ['--ios', '--runtime-mode', 'debug', '--simulator', '--no-lto']
- host_command = ['--runtime-mode', 'debug', '--prebuilt-dart-sdk', '--no-lto']
+ ios_command = [
+ '--ios', '--runtime-mode', 'debug', '--simulator', '--no-lto'
+ ]
+ host_command = [
+ '--runtime-mode', 'debug', '--prebuilt-dart-sdk', '--no-lto'
+ ]
if (cpu == 'arm64'):
ios_command += ['--force-mac-arm64']
host_command += ['--force-mac-arm64']
@@ -112,8 +116,8 @@
android_home = checkout.join('third_party', 'android_tools', 'sdk')
env = {
- 'ANDROID_HOME': str(android_home),
- 'FLUTTER_PREBUILT_DART_SDK': 'True',
+ 'ANDROID_HOME': str(android_home),
+ 'FLUTTER_PREBUILT_DART_SDK': 'True',
}
env_prefixes = {'PATH': [dart_bin]}
@@ -162,7 +166,9 @@
continue
test = api.test(
'%s %s %s' % (
- platform, lint_target, lint_set,
+ platform,
+ lint_target,
+ lint_set,
),
api.platform(platform, 64),
api.buildbucket.ci_build(
@@ -181,9 +187,7 @@
lint_ios=lint_target == 'ios',
),
),
- api.properties.environ(
- EnvProperties(SWARMING_TASK_ID='deadbeef')
- ),
+ api.properties.environ(EnvProperties(SWARMING_TASK_ID='deadbeef')),
)
yield test
@@ -191,18 +195,18 @@
for cpu in ('arm64', 'x86'):
yield api.test(
'%s arch %s' % (
- lint_target, cpu,
+ lint_target,
+ cpu,
),
api.platform('mac', 64),
api.runtime(is_experimental=False),
api.properties(
- cpu='%s' % cpu,
- goma_jobs='1024',
- lint_all=False,
- lint_head=False,
- lint_host=lint_target == 'host',
- lint_android=False,
- lint_ios=lint_target == 'ios',
+ cpu='%s' % cpu,
+ goma_jobs='1024',
+ lint_all=False,
+ lint_head=False,
+ lint_host=lint_target == 'host',
+ lint_android=False,
+ lint_ios=lint_target == 'ios',
),
)
-
diff --git a/recipes/engine/engine_unopt.py b/recipes/engine/engine_unopt.py
index 7aa7119..14bd52a 100644
--- a/recipes/engine/engine_unopt.py
+++ b/recipes/engine/engine_unopt.py
@@ -48,20 +48,20 @@
api.build_util.build(config, checkout, targets)
-def RunTests(api, out_dir, android_out_dir=None, ios_out_dir=None, types='all', suppress_sanitizers=False):
+def RunTests(
+ api,
+ out_dir,
+ android_out_dir=None,
+ ios_out_dir=None,
+ types='all',
+ suppress_sanitizers=False
+):
script_path = GetCheckoutPath(api).join('flutter', 'testing', 'run_tests.py')
# TODO(godofredoc): use .vpython from engine when file are available.
venv_path = api.depot_tools.root.join('.vpython3')
args = [
- 'vpython3',
- '-vpython-spec',
- venv_path,
- script_path,
- '--variant',
- out_dir,
- '--type',
- types,
- '--engine-capture-core-dump'
+ 'vpython3', '-vpython-spec', venv_path, script_path, '--variant', out_dir,
+ '--type', types, '--engine-capture-core-dump'
]
if android_out_dir:
args.extend(['--android-variant', android_out_dir])
@@ -112,15 +112,10 @@
# TODO(godofredoc): use .vpython from engine when file are available.
venv_path = api.depot_tools.root.join('.vpython3')
args = [
- 'vpython3',
- '-vpython-spec',
- venv_path,
- script_path,
- '--before',
+ 'vpython3', '-vpython-spec', venv_path, script_path, '--before',
GetCheckoutPath(api).join('flutter', 'impeller', 'tools', 'malioc.json'),
'--after',
- GetCheckoutPath(api).join('out', out_dir, 'gen', 'malioc'),
- '--print-diff'
+ GetCheckoutPath(api).join('out', out_dir, 'gen', 'malioc'), '--print-diff'
]
api.step('malioc diff', args)
@@ -128,15 +123,14 @@
def BuildLinuxAndroid(api, env, swarming_task_id):
# Build Android Unopt and run tests
RunGN(
- api,
- '--android',
- '--unoptimized',
- '--malioc-path',
+ api, '--android', '--unoptimized', '--malioc-path',
api.path.join(env['ARM_TOOLS'], 'mali_offline_compiler', 'malioc')
)
- Build(api, 'android_debug_unopt',
- 'flutter/shell/platform/android:robolectric_tests',
- 'flutter/impeller',
+ Build(
+ api,
+ 'android_debug_unopt',
+ 'flutter/shell/platform/android:robolectric_tests',
+ 'flutter/impeller',
)
RunTests(
api,
@@ -149,17 +143,13 @@
def BuildLinux(api):
RunGN(
- api,
- '--runtime-mode',
- 'debug',
- '--unoptimized',
- '--prebuilt-dart-sdk',
- '--asan',
- '--lsan',
- '--dart-debug'
+ api, '--runtime-mode', 'debug', '--unoptimized', '--prebuilt-dart-sdk',
+ '--asan', '--lsan', '--dart-debug'
)
Build(api, 'host_debug_unopt')
- RunTests(api, 'host_debug_unopt', types='dart,engine', suppress_sanitizers=True)
+ RunTests(
+ api, 'host_debug_unopt', types='dart,engine', suppress_sanitizers=True
+ )
def TestObservatory(api):
@@ -185,7 +175,10 @@
def BuildMac(api):
- RunGN(api, '--runtime-mode', 'debug', '--unoptimized', '--no-lto', '--prebuilt-dart-sdk')
+ RunGN(
+ api, '--runtime-mode', 'debug', '--unoptimized', '--no-lto',
+ '--prebuilt-dart-sdk'
+ )
Build(api, 'host_debug_unopt')
RunTests(api, 'host_debug_unopt', types='dart,engine')
@@ -234,8 +227,8 @@
android_home = checkout.join('third_party', 'android_tools', 'sdk')
env = {
- 'ANDROID_HOME': str(android_home),
- 'FLUTTER_PREBUILT_DART_SDK': 'True',
+ 'ANDROID_HOME': str(android_home),
+ 'FLUTTER_PREBUILT_DART_SDK': 'True',
}
env_prefixes = {'PATH': [dart_bin]}
@@ -302,14 +295,10 @@
project='flutter',
),
api.runtime(is_experimental=False),
- api.properties(
- InputProperties(
- goma_jobs='1024',
- no_lto=no_lto,
- ),
- ),
- api.properties.environ(
- EnvProperties(SWARMING_TASK_ID='deadbeef')
- ),
+ api.properties(InputProperties(
+ goma_jobs='1024',
+ no_lto=no_lto,
+ ),),
+ api.properties.environ(EnvProperties(SWARMING_TASK_ID='deadbeef')),
)
yield test
diff --git a/recipes/engine/femu_test.py b/recipes/engine/femu_test.py
index 2611f5c..75f2ce1 100644
--- a/recipes/engine/femu_test.py
+++ b/recipes/engine/femu_test.py
@@ -74,15 +74,25 @@
def BuildAndTestFuchsia(api, build_script, git_rev):
arch = GetEmulatorArch(api)
# Prepares build files for debug/JIT Fuchsia
- RunGN(api, '--fuchsia', '--fuchsia-cpu', arch, '--runtime-mode', 'debug',
- '--no-lto')
+ RunGN(
+ api, '--fuchsia', '--fuchsia-cpu', arch, '--runtime-mode', 'debug',
+ '--no-lto'
+ )
# Prepares build files for profile/AOT Fuchsia
- RunGN(api, '--fuchsia', '--fuchsia-cpu', arch, '--runtime-mode', 'profile',
- '--no-lto')
+ RunGN(
+ api, '--fuchsia', '--fuchsia-cpu', arch, '--runtime-mode', 'profile',
+ '--no-lto'
+ )
# Builds debug/JIT Fuchsia
- Build(api, 'fuchsia_debug_%s' % arch, *GetFlutterFuchsiaBuildTargets(False, True))
+ Build(
+ api, 'fuchsia_debug_%s' % arch,
+ *GetFlutterFuchsiaBuildTargets(False, True)
+ )
# Builds profile/AOT Fuchsia
- Build(api, 'fuchsia_profile_%s' % arch, *GetFlutterFuchsiaBuildTargets(False, True))
+ Build(
+ api, 'fuchsia_profile_%s' % arch,
+ *GetFlutterFuchsiaBuildTargets(False, True)
+ )
# Package the build artifacts.
#
@@ -92,8 +102,15 @@
#
# TODO(akbiggs): Clean this up if we feel brave.
fuchsia_debug_package_cmd = [
- 'python3', build_script, '--engine-version', git_rev, '--skip-build',
- '--archs', arch, '--runtime-mode', 'debug',
+ 'python3',
+ build_script,
+ '--engine-version',
+ git_rev,
+ '--skip-build',
+ '--archs',
+ arch,
+ '--runtime-mode',
+ 'debug',
]
fuchsia_profile_package_cmd = [
'python3', build_script, '--engine-version', git_rev, '--skip-build',
@@ -116,8 +133,9 @@
def GetFuchsiaBuildId(api):
checkout = GetCheckoutPath(api)
- manifest_path = checkout.join('fuchsia', 'sdk', 'linux', 'meta',
- 'manifest.json')
+ manifest_path = checkout.join(
+ 'fuchsia', 'sdk', 'linux', 'meta', 'manifest.json'
+ )
manifest_data = api.file.read_json('read manifest', manifest_path)
return manifest_data['id']
@@ -140,13 +158,17 @@
def addFlutterTests():
arch = GetEmulatorArch(api)
add(
- checkout.join('out', 'fuchsia_bucket', 'flutter', arch, 'debug', 'aot',
- 'flutter_aot_runner-0.far'), 'flutter_aot_runner-0.far')
+ checkout.join(
+ 'out', 'fuchsia_bucket', 'flutter', arch, 'debug', 'aot',
+ 'flutter_aot_runner-0.far'
+ ), 'flutter_aot_runner-0.far'
+ )
test_suites_file = checkout.join(
- 'flutter', 'testing', 'fuchsia', 'test_suites.yaml')
+ 'flutter', 'testing', 'fuchsia', 'test_suites.yaml'
+ )
- for suite in api.yaml.read('retrieve list of test suites',
- test_suites_file, api.json.output()).json.output:
+ for suite in api.yaml.read('retrieve list of test suites', test_suites_file,
+ api.json.output()).json.output:
# Default behavior is to run all tests on x64 if "emulator_arch" isn't present
# If "emulator_arch" is present, we run based on the emulator_arch specified
# x64 - femu_test.py
@@ -156,16 +178,22 @@
# Ensure command is well-formed.
# See https://fuchsia.dev/fuchsia-src/concepts/packages/package_url.
- match = re.match(r'^(test run) (?P<test_far_file>fuchsia-pkg://[0-9a-z\-_\.]+/(?P<name>[0-9a-z\-_\.]+)#meta/[0-9a-z\-_\.]+(\.cm|\.cmx))( +[0-9a-zA-Z\-_*\.: =]+)?$', suite['test_command'])
+ match = re.match(
+ r'^(test run) (?P<test_far_file>fuchsia-pkg://[0-9a-z\-_\.]+/(?P<name>[0-9a-z\-_\.]+)#meta/[0-9a-z\-_\.]+(\.cm|\.cmx))( +[0-9a-zA-Z\-_*\.: =]+)?$',
+ suite['test_command']
+ )
if not match:
- raise api.step.StepFailure('Invalid test command: %s' % suite['test_command'])
+ raise api.step.StepFailure(
+ 'Invalid test command: %s' % suite['test_command']
+ )
suite['name'] = match.group('name')
- suite['run_with_dart_aot'] = 'run_with_dart_aot' in suite and suite['run_with_dart_aot'] == 'true'
+ suite['run_with_dart_aot'] = 'run_with_dart_aot' in suite and suite[
+ 'run_with_dart_aot'] == 'true'
suite['test_far_file'] = match.group('test_far_file')
if 'packages' not in suite:
- suite['packages'] = [ suite['package'] ]
+ suite['packages'] = [suite['package']]
suite['package_basenames'] = []
for path in suite['packages']:
# Captures the FAR name (long/path/to/far/file/actual_far.far would output actual_far.far)
@@ -180,7 +208,9 @@
addFlutterTests()
cas_tree.create_links("create tree of runfiles")
- cas_hash = api.cas_util.upload(cas_tree.root, step_name='archive FEMU Run Files')
+ cas_hash = api.cas_util.upload(
+ cas_tree.root, step_name='archive FEMU Run Files'
+ )
return test_suites, root_dir, cas_hash
@@ -370,7 +400,8 @@
"""
checkout = GetCheckoutPath(api)
build_script = str(
- checkout.join('flutter/tools/fuchsia/build_fuchsia_artifacts.py'))
+ checkout.join('flutter/tools/fuchsia/build_fuchsia_artifacts.py')
+ )
git_rev = api.buildbucket.gitiles_commit.id or 'HEAD'
BuildAndTestFuchsia(api, build_script, git_rev)
@@ -380,22 +411,22 @@
checkout = GetCheckoutPath(api)
api.file.rmtree('clobber build output', checkout.join('out'))
api.file.ensure_directory('ensure checkout cache', cache_root)
- dart_bin = checkout.join('third_party', 'dart', 'tools', 'sdks', 'dart-sdk',
- 'bin')
+ dart_bin = checkout.join(
+ 'third_party', 'dart', 'tools', 'sdks', 'dart-sdk', 'bin'
+ )
ffx_isolate_dir = api.path.mkdtemp('ffx_isolate_files')
env = {
- 'FFX_ISOLATE_DIR': ffx_isolate_dir,
+ 'FFX_ISOLATE_DIR': ffx_isolate_dir,
}
env_prefixes = {'PATH': [dart_bin]}
api.repo_util.engine_checkout(cache_root, env, env_prefixes)
# Various scripts we run assume access to depot_tools on path for `ninja`.
- with api.context(
- cwd=cache_root, env=env,
- env_prefixes=env_prefixes), api.depot_tools.on_path():
+ with api.context(cwd=cache_root, env=env,
+ env_prefixes=env_prefixes), api.depot_tools.on_path():
if api.platform.is_linux and api.properties.get('build_fuchsia', True):
BuildFuchsia(api)
@@ -407,7 +438,8 @@
output_props = struct_pb2.Struct()
output_props['cas_output_hash'] = 'deadbeef'
build = api.buildbucket.try_build_message(
- builder='FEMU Test', project='flutter')
+ builder='FEMU Test', project='flutter'
+ )
build.output.CopyFrom(build_pb2.Build.Output(properties=output_props))
def ffx_repo_list_step_data(step_name):
@@ -655,26 +687,35 @@
git_url='https://github.com/flutter/engine',
git_ref='refs/pull/1/head',
clobber=False,
- ), clobber=False,),
+ ),
+ clobber=False,
+ ),
api.step_data(
'retrieve list of test suites.parse',
api.json.output([{
- 'package': 'ordinary_package1.far',
- 'test_command': 'suspicious command'
+ 'package': 'ordinary_package1.far',
+ 'test_command': 'suspicious command'
}, {
- 'package': 'ordinary_package2.far',
- 'test_command': 'test run fuchsia-pkg://fuchsia.com/ordinary_package2#meta/ordinary_package2.cmx; suspicious command'
+ 'package':
+ 'ordinary_package2.far',
+ 'test_command':
+ 'test run fuchsia-pkg://fuchsia.com/ordinary_package2#meta/ordinary_package2.cmx; suspicious command'
}, {
- 'package': 'ordinary_package3.far',
- 'test_command': 'test run fuchsia-pkg://fuchsia.com/ordinary_package3#meta/ordinary_package3.cmx $(suspicious command)'
+ 'package':
+ 'ordinary_package3.far',
+ 'test_command':
+ 'test run fuchsia-pkg://fuchsia.com/ordinary_package3#meta/ordinary_package3.cmx $(suspicious command)'
}, {
- 'package': 'ordinary_package4.far',
- 'test_command': 'test run fuchsia-pkg://fuchsia.com/ordinary_package4#meta/ordinary_package4.cmx `suspicious command`'
+ 'package':
+ 'ordinary_package4.far',
+ 'test_command':
+ 'test run fuchsia-pkg://fuchsia.com/ordinary_package4#meta/ordinary_package4.cmx `suspicious command`'
}])
),
api.step_data(
'retrieve list of test suites.read',
- api.file.read_text('''
+ api.file.read_text(
+ '''
- package: ordinary_package1.far
test_command: suspicious command
- package: ordinary_package2.far
@@ -682,7 +723,8 @@
- package: ordinary_package3.far
test_command: test run fuchsia-pkg://fuchsia.com/ordinary_package3#meta/ordinary_package3.cmx $(suspicious command)
- package: ordinary_package4.far
- test_command: test run fuchsia-pkg://fuchsia.com/ordinary_package4#meta/ordinary_package4.cmx `suspicious command`''')
+ test_command: test run fuchsia-pkg://fuchsia.com/ordinary_package4#meta/ordinary_package4.cmx `suspicious command`'''
+ )
),
api.step_data(
'read manifest',
diff --git a/recipes/engine/framework_smoke.py b/recipes/engine/framework_smoke.py
index 4f7dc7e..478c9a0 100644
--- a/recipes/engine/framework_smoke.py
+++ b/recipes/engine/framework_smoke.py
@@ -34,7 +34,7 @@
cache_root = api.path['cache'].join('builder')
api.file.ensure_directory('Ensure checkout cache', cache_root)
env = {
- 'FLUTTER_PREBUILT_DART_SDK': 'True',
+ 'FLUTTER_PREBUILT_DART_SDK': 'True',
}
env_prefixes = {}
diff --git a/recipes/engine/scenarios.py b/recipes/engine/scenarios.py
index ed1f919..5bc7ada 100644
--- a/recipes/engine/scenarios.py
+++ b/recipes/engine/scenarios.py
@@ -52,17 +52,23 @@
with api.context(env=env):
api.step('gn %s' % ' '.join(args), gn_cmd)
+
def RunAndroidUnitTests(api, env, env_prefixes):
"""Runs the unit tests for the Android embedder on a x64 Android Emulator."""
engine_checkout = GetCheckoutPath(api)
test_dir = engine_checkout.join('flutter', 'testing')
- exe_path = engine_checkout.join('out', 'android_debug_x64', 'flutter_shell_native_unittests')
+ exe_path = engine_checkout.join(
+ 'out', 'android_debug_x64', 'flutter_shell_native_unittests'
+ )
with api.context(cwd=test_dir, env=env, env_prefixes=env_prefixes):
result = api.step(
- 'Android Unit Tests',
- ['./run_tests.py', '--android-variant', 'android_debug_x64', '--type', 'android', '--adb-path', env['ADB_PATH']]
+ 'Android Unit Tests', [
+ './run_tests.py', '--android-variant', 'android_debug_x64',
+ '--type', 'android', '--adb-path', env['ADB_PATH']
+ ]
)
+
def RunAndroidScenarioTests(api, env, env_prefixes):
"""Runs the scenario test app on a x64 Android emulator.
@@ -78,13 +84,15 @@
# file containing the python scripts.
gradle_home_bin_dir = scenario_app_tests.join('android', 'gradle-home', 'bin')
with api.context(cwd=scenario_app_tests,
- env_prefixes={'PATH': [gradle_home_bin_dir]}), api.step.defer_results():
+ env_prefixes={'PATH': [gradle_home_bin_dir]
+ }), api.step.defer_results():
result = api.step(
'Scenario App Integration Tests',
['./run_android_tests.sh', 'android_debug_x64'],
)
+
def RunSteps(api, properties, env_properties):
# Collect memory/cpu/process after task execution.
api.os_utils.collect_os_info()
@@ -100,8 +108,8 @@
)
android_home = checkout.join('third_party', 'android_tools', 'sdk')
env = {
- 'ANDROID_HOME': str(android_home),
- 'FLUTTER_PREBUILT_DART_SDK': 'True',
+ 'ANDROID_HOME': str(android_home),
+ 'FLUTTER_PREBUILT_DART_SDK': 'True',
}
env_prefixes = {'PATH': [dart_bin]}
@@ -115,7 +123,10 @@
with api.context(cwd=cache_root, env=env,
env_prefixes=env_prefixes), api.depot_tools.on_path():
RunGN(api, '--android', '--android-cpu=x64', '--no-lto')
- Build(api, 'android_debug_x64', 'scenario_app', 'flutter_shell_native_unittests')
+ Build(
+ api, 'android_debug_x64', 'scenario_app',
+ 'flutter_shell_native_unittests'
+ )
RunAndroidUnitTests(api, env, env_prefixes)
RunAndroidScenarioTests(api, env, env_prefixes)
@@ -134,27 +145,24 @@
)
avd_api_version = '31'
yield api.test(
- 'without_failure_upload',
- api.properties(
- dependencies=[
- {'dependency':'android_virtual_device', 'version':'31'},
- ]
- ),
- api.buildbucket.ci_build(
- builder='Linux Engine',
- git_repo='https://flutter.googlesource.com/mirrors/engine',
- project='flutter',
- revision='abcd1234',
- ),
- api.properties(
- InputProperties(
- goma_jobs='1024',
- ),
- ),
- api.step_data(
- 'start avd.Start Android emulator (API level %s)' % avd_api_version,
- stdout=api.raw_io.output_text(
- 'android_' + avd_api_version + '_google_apis_x86|emulator-5554 started (pid: 17687)'
- )
- ),
+ 'without_failure_upload',
+ api.properties(
+ dependencies=[
+ {'dependency': 'android_virtual_device', 'version': '31'},
+ ]
+ ),
+ api.buildbucket.ci_build(
+ builder='Linux Engine',
+ git_repo='https://flutter.googlesource.com/mirrors/engine',
+ project='flutter',
+ revision='abcd1234',
+ ),
+ api.properties(InputProperties(goma_jobs='1024',),),
+ api.step_data(
+ 'start avd.Start Android emulator (API level %s)' % avd_api_version,
+ stdout=api.raw_io.output_text(
+ 'android_' + avd_api_version +
+ '_google_apis_x86|emulator-5554 started (pid: 17687)'
+ )
+ ),
)
diff --git a/recipes/engine/web_engine.py b/recipes/engine/web_engine.py
index 8a74f58..2270bf4 100644
--- a/recipes/engine/web_engine.py
+++ b/recipes/engine/web_engine.py
@@ -34,10 +34,10 @@
'recipe_engine/step',
]
-
PROPERTIES = InputProperties
ENV_PROPERTIES = EnvProperties
+
def GetCheckoutPath(api):
return api.path['cache'].join('builder', 'src')
@@ -77,21 +77,20 @@
)
with api.context(cwd=cache_root, env=env,
- env_prefixes=env_prefixes), api.depot_tools.on_path():
+ env_prefixes=env_prefixes), api.depot_tools.on_path():
felt_name = 'felt.bat' if api.platform.is_win else 'felt'
- felt_cmd = [
- checkout.join('flutter', 'lib', 'web_ui', 'dev', felt_name)
- ]
+ felt_cmd = [checkout.join('flutter', 'lib', 'web_ui', 'dev', felt_name)]
cas_hash = ''
builds = []
if api.platform.is_linux:
- api.build_util.run_gn(['--build-canvaskit', '--web', '--runtime-mode=release', '--no-goma'],
- checkout)
+ api.build_util.run_gn([
+ '--build-canvaskit', '--web', '--runtime-mode=release', '--no-goma'
+ ], checkout)
api.build_util.build('wasm_release', checkout, [])
wasm_cas_hash = api.shard_util_v2.archive_full_build(
- checkout.join('out', 'wasm_release'),
- 'wasm_release')
+ checkout.join('out', 'wasm_release'), 'wasm_release'
+ )
targets = generate_targets(api, cas_hash, wasm_cas_hash)
# Update dart packages and run tests.
@@ -119,7 +118,8 @@
felt_test.append('--require-skia-gold')
felt_test.append('--browser=safari')
api.step(
- api.test_utils.test_step_name('Run tests on macOS Safari'), felt_test
+ api.test_utils.test_step_name('Run tests on macOS Safari'),
+ felt_test
)
CleanUpProcesses(api)
else:
@@ -148,7 +148,9 @@
properties['command_name'] = 'chrome-unit-linux'
properties['name'] = properties['command_name']
# These are the felt commands which will be used.
- properties['command_args'] = ['test', '--browser=chrome', '--require-skia-gold']
+ properties['command_args'] = [
+ 'test', '--browser=chrome', '--require-skia-gold'
+ ]
properties['recipe'] = 'engine/web_engine_drone'
targets.append(properties)
@@ -158,8 +160,7 @@
properties['name'] = properties['command_name']
# These are the felt commands which will be used.
properties['command_args'] = [
- 'test', '--browser=chrome', '--require-skia-gold',
- '--use-local-canvaskit'
+ 'test', '--browser=chrome', '--require-skia-gold', '--use-local-canvaskit'
]
properties['recipe'] = 'engine/web_engine_drone'
targets.append(properties)
@@ -170,8 +171,7 @@
properties['name'] = properties['command_name']
# These are the felt commands which will be used.
properties['command_args'] = [
- 'test', '--browser=chrome', '--require-skia-gold',
- '--wasm'
+ 'test', '--browser=chrome', '--require-skia-gold', '--wasm'
]
properties['recipe'] = 'engine/web_engine_drone'
targets.append(properties)
@@ -189,43 +189,42 @@
def GenTests(api):
yield api.test(
- 'basic',
- api.properties(clobber=True),
- api.buildbucket.try_build(
+ 'basic',
+ api.properties(clobber=True),
+ api.buildbucket.try_build(
project='proj',
builder='try-builder',
bucket='try',
git_repo='https://flutter.googlesource.com/mirrors/engine',
revision='a' * 40,
build_number=123,
- ),
+ ),
)
yield api.test(
- 'mac-post-submit',
- api.properties(goma_jobs='200', gclient_variables={'download_emsdk': True}),
- api.platform('mac', 64),
- api.runtime(is_experimental=False),
- api.buildbucket.try_build(
+ 'mac-post-submit',
+ api.properties(
+ goma_jobs='200', gclient_variables={'download_emsdk': True}
+ ),
+ api.platform('mac', 64),
+ api.runtime(is_experimental=False),
+ api.buildbucket.try_build(
project='proj',
builder='try-builder',
bucket='try',
git_repo='https://flutter.googlesource.com/mirrors/engine',
revision='a' * 40,
build_number=123,
- ),
+ ),
)
yield api.test(
- 'windows-post-submit',
- api.properties(
- gclient_variables={'download_emsdk': True}
- ),
- api.platform('win', 64),
- api.runtime(is_experimental=False),
- api.buildbucket.ci_build(
+ 'windows-post-submit',
+ api.properties(gclient_variables={'download_emsdk': True}),
+ api.platform('win', 64),
+ api.runtime(is_experimental=False),
+ api.buildbucket.ci_build(
project='flutter',
bucket='prod',
git_repo='https://flutter.googlesource.com/mirrors/engine',
git_ref='refs/heads/main'
- ),
+ ),
)
-
diff --git a/recipes/engine/web_engine_drone.py b/recipes/engine/web_engine_drone.py
index a521227..82487b5 100644
--- a/recipes/engine/web_engine_drone.py
+++ b/recipes/engine/web_engine_drone.py
@@ -77,15 +77,15 @@
# Download local CanvasKit build.
wasm_cas_hash = build.get('wasm_release_cas_hash')
out_dir = checkout.join('out')
- api.cas.download('Download CanvasKit build from CAS', wasm_cas_hash, out_dir)
+ api.cas.download(
+ 'Download CanvasKit build from CAS', wasm_cas_hash, out_dir
+ )
command_args = build.get('command_args', ['test'])
command_name = build.get('command_name', 'test')
felt_name = 'felt.bat' if api.platform.is_win else 'felt'
- felt_cmd = [
- checkout.join('flutter', 'lib', 'web_ui', 'dev', felt_name)
- ]
+ felt_cmd = [checkout.join('flutter', 'lib', 'web_ui', 'dev', felt_name)]
felt_cmd.extend(command_args)
with api.context(cwd=cache_root, env=env,
@@ -100,18 +100,18 @@
def GenTests(api):
build = {
- 'command_args': ['test', '--browser=chrome', '--require-skia-gold'],
- 'command_name': 'chrome-unit-linux',
- 'git_ref': 'refs/heads/master',
- 'inherited_dependencies': [
- {'dependency': 'chrome_and_driver'},
- {'dependency': 'firefox'},
- {'dependency': 'goldctl'},
- {'dependency': 'open_jdk'},
- {'dependency': 'gradle_cache'}
- ],
- 'name': 'chrome-unit-linux',
- 'wasm_release_cas_hash': '7a4348cb77de16aac05401c635950c2a75566e3f268fd60e7113b0c70cd4fbcb/87',
+ 'command_args': ['test', '--browser=chrome',
+ '--require-skia-gold'], 'command_name':
+ 'chrome-unit-linux', 'git_ref':
+ 'refs/heads/master',
+ 'inherited_dependencies': [{'dependency': 'chrome_and_driver'},
+ {'dependency': 'firefox'},
+ {'dependency': 'goldctl'},
+ {'dependency': 'open_jdk'},
+ {'dependency': 'gradle_cache'}], 'name':
+ 'chrome-unit-linux',
+ 'wasm_release_cas_hash':
+ '7a4348cb77de16aac05401c635950c2a75566e3f268fd60e7113b0c70cd4fbcb/87',
'web_dependencies': ['chrome']
}
yield api.test(
diff --git a/recipes/engine/web_engine_framework.py b/recipes/engine/web_engine_framework.py
index 5d44d01..62257cb 100644
--- a/recipes/engine/web_engine_framework.py
+++ b/recipes/engine/web_engine_framework.py
@@ -96,7 +96,9 @@
# Create new enviromenent variables for Framework.
# Note that the `dart binary` location is not the same for Framework and the
# engine.
- f_env, f_env_prefix = api.repo_util.flutter_environment(flutter_checkout_path)
+ f_env, f_env_prefix = api.repo_util.flutter_environment(
+ flutter_checkout_path
+ )
f_env['FLUTTER_CLONE_REPO_PATH'] = flutter_checkout_path
deps = api.properties.get('dependencies', [])
@@ -121,13 +123,13 @@
# side is kept in `ref`.
targets = generate_targets(api, cas_hash, ref.strip(), url, deps)
with api.step.nest('launch builds') as presentation:
- tasks = api.shard_util_v2.schedule(targets, presentation)
+ tasks = api.shard_util_v2.schedule(targets, presentation)
with api.step.nest('collect builds') as presentation:
- build_results = api.shard_util_v2.collect(tasks)
+ build_results = api.shard_util_v2.collect(tasks)
api.display_util.display_subbuilds(
- step_name='display builds',
- subbuilds=build_results,
- raise_on_failure=True,
+ step_name='display builds',
+ subbuilds=build_results,
+ raise_on_failure=True,
)
@@ -147,14 +149,12 @@
}
drone_props['git_url'] = url
drone_props['git_ref'] = ref
- targets.append(
- {
- 'name': task_name,
- 'properties': drone_props,
- 'recipe': 'flutter/flutter_drone',
- 'drone_dimensions': api.properties.get('drone_dimensions', []),
- }
- )
+ targets.append({
+ 'name': task_name,
+ 'properties': drone_props,
+ 'recipe': 'flutter/flutter_drone',
+ 'drone_dimensions': api.properties.get('drone_dimensions', []),
+ })
return targets
@@ -163,7 +163,9 @@
'linux-pre-submit',
api.repo_util.flutter_environment_data(api.path['cache'].join('flutter')),
api.properties(
- dependencies=[{'dependency': 'chrome_and_driver', 'version': 'version:96.2'}],
+ dependencies=[{
+ 'dependency': 'chrome_and_driver', 'version': 'version:96.2'
+ }],
shard='web_tests',
subshards=['0', '1_last'],
goma_jobs='200',
diff --git a/recipes/engine_v2/builder.py b/recipes/engine_v2/builder.py
index 30e1f5e..2f5b93d 100644
--- a/recipes/engine_v2/builder.py
+++ b/recipes/engine_v2/builder.py
@@ -65,7 +65,6 @@
ENV_PROPERTIES = EnvProperties
ANDROID_ARTIFACTS_BUCKET = 'download.flutter.io'
-
# Relative paths used to mock paths for testing.
MOCK_JAR_PATH = (
'io/flutter/x86_debug/'
@@ -86,12 +85,8 @@
"""Runs sub-builds generators."""
# Run pub on all of the pub_dirs.
for pub in pub_dirs:
- pub_dir = api.path.abs_to_path(
- api.path.dirname(
- checkout.join(pub))
- )
- with api.context(env=env, env_prefixes=env_prefixes,
- cwd=pub_dir):
+ pub_dir = api.path.abs_to_path(api.path.dirname(checkout.join(pub)))
+ with api.context(env=env, env_prefixes=env_prefixes, cwd=pub_dir):
api.step('dart pub get', ['dart', 'pub', 'get'])
for generator_task in generator_tasks:
# Generators must run from inside flutter folder.
@@ -141,8 +136,8 @@
# of files.
api.path.mock_add_paths(
api.path['cache'].join(
- 'builder/src/out/android_jit_release_x86/zip_archives/download.flutter.io'),
- DIRECTORY
+ 'builder/src/out/android_jit_release_x86/zip_archives/download.flutter.io'
+ ), DIRECTORY
)
ninja_tool = {
@@ -156,10 +151,8 @@
if gn:
api.build_util.run_gn(build.get('gn'), checkout)
ninja = build.get('ninja')
- ninja_tool[ninja.get('tool', 'ninja')](
- ninja.get('config'),
- checkout,
- ninja.get('targets', []))
+ ninja_tool[ninja.get('tool', 'ninja')
+ ](ninja.get('config'), checkout, ninja.get('targets', []))
generator_tasks = build.get('generators', {}).get('tasks', [])
pub_dirs = build.get('generators', {}).get('pub_dirs', [])
archives = build.get('archives', [])
@@ -175,11 +168,12 @@
api.flutter_bcid.report_stage('upload-complete')
# Archive full build. This is inneficient but necessary for global generators.
full_build_hash = api.shard_util_v2.archive_full_build(
- checkout.join('out', build.get('name')), build.get('name'))
+ checkout.join('out', build.get('name')), build.get('name')
+ )
outputs['full_build'] = full_build_hash
-def Archive(api, checkout, archive_config):
+def Archive(api, checkout, archive_config):
paths = api.archives.engine_v2_gcs_paths(checkout, archive_config)
# Sign artifacts if running on mac and a release candidate branch.
is_release_branch = api.repo_util.is_release_candidate_branch(
@@ -187,16 +181,15 @@
)
if api.platform.is_mac and is_release_branch:
signing_paths = [
- path.local for path in paths
+ path.local
+ for path in paths
if api.signing.requires_signing(path.local)
]
api.signing.code_sign(signing_paths)
for path in paths:
api.archives.upload_artifact(path.local, path.remote)
- api.flutter_bcid.upload_provenance(
- path.local,
- path.remote
- )
+ api.flutter_bcid.upload_provenance(path.local, path.remote)
+
def RunSteps(api, properties, env_properties):
api.flutter_bcid.report_stage('start')
@@ -207,7 +200,9 @@
# Enable long path support on Windows.
api.os_utils.enable_long_paths()
- env, env_prefixes = api.repo_util.engine_environment(api.path['cache'].join('builder'))
+ env, env_prefixes = api.repo_util.engine_environment(
+ api.path['cache'].join('builder')
+ )
# Engine path is used inconsistently across the engine repo. We'll start
# with [cache]/builder and will adjust it to start using it consistently.
@@ -231,35 +226,26 @@
def GenTests(api):
build = {
- "archives": [
- {
- "name": "android_jit_release_x86",
- "type": "gcs",
- "realm": "production",
- "base_path": "out/android_jit_release_x86/zip_archives/",
- "include_paths": [
- "out/android_jit_release_x86/zip_archives/android-x86-jit-release/artifacts.zip",
- "out/android_jit_release_x86/zip_archives/download.flutter.io"
- ]
- }
- ],
- "gn": ["--ios"], "ninja": {"config": "ios_debug", "targets": []},
- "generators": {
- "pub_dirs": ["dev"],
- "tasks": [
- {
- "name": "generator1",
- "scripts": ["script1.sh", "dev/felt.dart"],
- "parameters": ["--argument1"]
- }
+ "archives": [{
+ "name":
+ "android_jit_release_x86", "type":
+ "gcs", "realm":
+ "production", "base_path":
+ "out/android_jit_release_x86/zip_archives/",
+ "include_paths": [
+ "out/android_jit_release_x86/zip_archives/android-x86-jit-release/artifacts.zip",
+ "out/android_jit_release_x86/zip_archives/download.flutter.io"
]
- },
- "tests": [
- {
- "name": "mytest", "script": "myscript.sh",
- "parameters": ["param1", "param2"], "type": "local"
- }
- ]
+ }], "gn": ["--ios"], "ninja": {"config": "ios_debug", "targets": []},
+ "generators": {
+ "pub_dirs": ["dev"], "tasks": [{
+ "name": "generator1", "scripts": ["script1.sh", "dev/felt.dart"],
+ "parameters": ["--argument1"]
+ }]
+ }, "tests": [{
+ "name": "mytest", "script": "myscript.sh",
+ "parameters": ["param1", "param2"], "type": "local"
+ }]
}
yield api.test(
'basic',
@@ -290,9 +276,8 @@
api.step_data(
'Identify branches.git branch',
stdout=api.raw_io
- .output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
+ .output_text('branch1\nbranch2\nflutter-3.2-candidate.5')
),
-
)
yield api.test(
'monorepo',
@@ -309,7 +294,8 @@
build_custom["gclient_variables"] = {"example_custom_var": True}
build_custom["tests"] = []
yield api.test(
- 'dart-internal-flutter', api.properties(build=build, no_goma=True),
+ 'dart-internal-flutter',
+ api.properties(build=build, no_goma=True),
api.buildbucket.ci_build(
project='dart-internal',
bucket='flutter',
diff --git a/recipes/engine_v2/engine_v2.py b/recipes/engine_v2/engine_v2.py
index bc23034..15d649e 100644
--- a/recipes/engine_v2/engine_v2.py
+++ b/recipes/engine_v2/engine_v2.py
@@ -118,9 +118,13 @@
api.repo_util.is_release_candidate_branch(checkout_path)):
# Generators, archives and codesign require a full engine checkout.
full_engine_checkout = api.path['cache'].join('builder')
- api.file.ensure_directory('Ensure full engine checkout folder', full_engine_checkout)
+ api.file.ensure_directory(
+ 'Ensure full engine checkout folder', full_engine_checkout
+ )
if api.monorepo.is_monorepo_ci_build or api.monorepo.is_monorepo_try_build:
- env, env_prefixes = api.repo_util.monorepo_environment(full_engine_checkout)
+ env, env_prefixes = api.repo_util.monorepo_environment(
+ full_engine_checkout
+ )
api.repo_util.monorepo_checkout(full_engine_checkout, env, env_prefixes)
full_engine_checkout = full_engine_checkout.join('engine')
else:
@@ -129,7 +133,9 @@
# The checkouts are using cache which may have some old artifacts in the out
# directory. We are cleaning out the folder to ensure we start from an empty
# out folder.
- api.file.rmtree('Clobber build output', full_engine_checkout.join('src', 'out'))
+ api.file.rmtree(
+ 'Clobber build output', full_engine_checkout.join('src', 'out')
+ )
if generators:
# Download sub-builds
@@ -181,19 +187,22 @@
# release, debug or profile depending on the runtime mode.
# So far we are uploading files only.
files_to_archive = api.archives.global_generator_paths(
- full_engine_checkout.join('src'), archives)
+ full_engine_checkout.join('src'), archives
+ )
# Sign artifacts if running in mac.
is_release_candidate = api.repo_util.is_release_candidate_branch(
full_engine_checkout.join('src', 'flutter')
)
signing_paths = [
- path.local for path in files_to_archive
- if api.signing.requires_signing(path.local)
+ path.local
+ for path in files_to_archive
+ if api.signing.requires_signing(path.local)
]
if api.platform.is_mac and is_release_candidate:
signing_paths = [
- path.local for path in files_to_archive
+ path.local
+ for path in files_to_archive
if api.signing.requires_signing(path.local)
]
api.signing.code_sign(signing_paths)
@@ -223,9 +232,15 @@
for generator_task in generators['tasks']:
# Generators must run from inside flutter folder.
# If platform is mac we need to run the generator from an xcode context.
- with api.context(env=env, env_prefixes=env_prefixes, cwd=full_engine_checkout):
- cmd = [generator_task.get('language')] if generator_task.get('language') else []
- api.file.listdir('List checkout', full_engine_checkout.join('src', 'out'), recursive=True)
+ with api.context(env=env, env_prefixes=env_prefixes,
+ cwd=full_engine_checkout):
+ cmd = [generator_task.get('language')
+ ] if generator_task.get('language') else []
+ api.file.listdir(
+ 'List checkout',
+ full_engine_checkout.join('src', 'out'),
+ recursive=True
+ )
script = generator_task.get('script')
full_path_script = full_engine_checkout.join('src', script)
cmd.append(full_path_script)
@@ -256,22 +271,14 @@
"generators": [{"name": "generator1", "script": "script1.sh"}]
}]
generators = {
- "tasks":
- [
- {
- "language": "python3",
- "name": "Debug-FlutterMacOS.framework",
- "parameters": [
- "--variant",
- "host_profile",
- "--type",
- "engine",
- "--engine-capture-core-dump"
- ],
- "script": "flutter/sky/tools/create_macos_framework.py",
- "type": "local"
- }
- ]
+ "tasks": [{
+ "language": "python3", "name": "Debug-FlutterMacOS.framework",
+ "parameters": [
+ "--variant", "host_profile", "--type", "engine",
+ "--engine-capture-core-dump"
+ ], "script": "flutter/sky/tools/create_macos_framework.py",
+ "type": "local"
+ }]
}
archives = [{
'source': '/a/b/c.txt', 'destination': 'bucket/c.txt', 'name': 'c.txt'
@@ -414,13 +421,9 @@
),
api.step_data(
'Read build config file',
- api.file.read_json(
- {
- 'builds': builds,
- 'archives': archives,
- 'generators': generators
- }
- )
+ api.file.read_json({
+ 'builds': builds, 'archives': archives, 'generators': generators
+ })
),
api.step_data(
'Identify branches.git rev-parse',
@@ -476,8 +479,11 @@
collect_step="collect tests",
),
api.step_data(
- 'Read build config file', api.file.read_json({'builds': builds,
- 'tests': tests, 'generators': generators, 'archives': archives})
+ 'Read build config file',
+ api.file.read_json({
+ 'builds': builds, 'tests': tests, 'generators': generators,
+ 'archives': archives
+ })
),
api.step_data(
'Identify branches.git branch',
diff --git a/recipes/engine_v2/tester.py b/recipes/engine_v2/tester.py
index 0db60b7..7bab61b 100644
--- a/recipes/engine_v2/tester.py
+++ b/recipes/engine_v2/tester.py
@@ -92,6 +92,7 @@
flutter.join('dev', 'bots', 'test.dart')]
)
+
def GenTests(api):
build = {'shard': 'framework_coverage'}
yield api.test(
@@ -149,8 +150,9 @@
'framework_tests',
'subshard':
'slow',
- 'test_dependencies': [{"dependency": "android_sdk",
- "version": "version:33v6"}],
+ 'test_dependencies': [{
+ "dependency": "android_sdk", "version": "version:33v6"
+ }],
}
yield api.test(
diff --git a/recipes/engine_v2/tester_engine.py b/recipes/engine_v2/tester_engine.py
index acc5c5b..cf81033 100644
--- a/recipes/engine_v2/tester_engine.py
+++ b/recipes/engine_v2/tester_engine.py
@@ -71,6 +71,7 @@
PROPERTIES = InputProperties
ENV_PROPERTIES = EnvProperties
+
def run_tests(api, test, checkout, env, env_prefixes):
"""Runs sub-build tests."""
# Install dependencies.
@@ -81,11 +82,7 @@
# Download build dependencies.
for dep in test.get('resolved_deps', []):
out_hash = dep.get('full_build')
- api.cas.download(
- f'Download {out_hash}',
- out_hash,
- out_path
- )
+ api.cas.download(f'Download {out_hash}', out_hash, out_path)
for task in test.get('tasks', []):
command = [task.get('language')] if task.get('language') else []
# Ideally local tests should be completely hermetic and in theory we can run
@@ -124,7 +121,9 @@
api.file.rmtree('Clobber build output', checkout.join('out'))
cache_root = api.path['cache'].join('builder')
api.file.ensure_directory('Ensure checkout cache', cache_root)
- env, env_prefixes = api.repo_util.engine_environment(api.path['cache'].join('builder'))
+ env, env_prefixes = api.repo_util.engine_environment(
+ api.path['cache'].join('builder')
+ )
# Engine path is used inconsistently across the engine repo. We'll start
# with [cache]/builder and will adjust it to start using it consistently.
env['ENGINE_PATH'] = api.path['cache'].join('builder')
@@ -134,31 +133,19 @@
def GenTests(api):
build = {
- "test_dependencies": [
- {
- "dependency": "chrome_and_driver", "version": "version:111.0"
- }
- ],
- "resolved_deps": [
- {
- "full_build": "f5b9de6cc9f4b05833aa128717d3112c133e2363e4303df9a1951540c79e72a3/87"
- },
- {
- "full_build": "32b40edba8bfbf7729374eaa4aa44bf0d89c385f080f64b56c9fbce7172e4a71/84"
- }
- ],
- 'tasks': [
- {
- 'language': 'dart',
- 'name': 'felt test: chrome-unit-linux',
- 'parameters': [
- 'test',
- '--browser=chrome',
- '--require-skia-gold'
- ],
- 'script': 'flutter/lib/web_ui/dev/felt'
- }
- ]
+ "test_dependencies": [{
+ "dependency": "chrome_and_driver", "version": "version:111.0"
+ }], "resolved_deps": [{
+ "full_build":
+ "f5b9de6cc9f4b05833aa128717d3112c133e2363e4303df9a1951540c79e72a3/87"
+ }, {
+ "full_build":
+ "32b40edba8bfbf7729374eaa4aa44bf0d89c385f080f64b56c9fbce7172e4a71/84"
+ }], 'tasks': [{
+ 'language': 'dart', 'name': 'felt test: chrome-unit-linux',
+ 'parameters': ['test', '--browser=chrome', '--require-skia-gold'],
+ 'script': 'flutter/lib/web_ui/dev/felt'
+ }]
}
yield api.test(
'basic',
diff --git a/recipes/firebaselab/firebaselab.py b/recipes/firebaselab/firebaselab.py
index f4f1af2..a72d0cf 100644
--- a/recipes/firebaselab/firebaselab.py
+++ b/recipes/firebaselab/firebaselab.py
@@ -93,7 +93,8 @@
with api.context(env=env, env_prefixes=env_prefixes, cwd=checkout_path):
api.step('flutter doctor', ['flutter', 'doctor', '-v'])
api.step(
- 'download dependencies', ['flutter', 'update-packages', '-v'],
+ 'download dependencies',
+ ['flutter', 'update-packages', '-v'],
infra_step=True,
)
@@ -129,7 +130,9 @@
# allow CI to pass in that case rather than block the tree.
infra_failure_codes = (1, 15, 20)
try:
- api.retry.wrap(run_firebase, max_attempts=3, retriable_codes=infra_failure_codes)
+ api.retry.wrap(
+ run_firebase, max_attempts=3, retriable_codes=infra_failure_codes
+ )
except api.step.StepFailure:
if api.step.active_result.retcode in infra_failure_codes:
# FTL is having some infra outage. Don't block the tree. Still
@@ -153,29 +156,19 @@
api.properties(task_name='the_task'),
# A return code of 1 from grep means not error messages were
# found in logcat and the only acceptable return code.
- api.step_data(
- 'test_execution.analyze_logcat', retcode=1
- ),
+ api.step_data('test_execution.analyze_logcat', retcode=1),
)
yield api.test(
'succeed_on_infra_failure',
api.repo_util.flutter_environment_data(),
- api.step_data(
- 'test_execution.gcloud firebase', retcode=15
- ),
- api.step_data(
- 'test_execution.gcloud firebase (2)', retcode=15
- ),
- api.step_data(
- 'test_execution.gcloud firebase (3)', retcode=15
- ),
+ api.step_data('test_execution.gcloud firebase', retcode=15),
+ api.step_data('test_execution.gcloud firebase (2)', retcode=15),
+ api.step_data('test_execution.gcloud firebase (3)', retcode=15),
status='FAILURE'
)
yield api.test(
'failure 10',
api.repo_util.flutter_environment_data(),
- api.step_data(
- 'test_execution.gcloud firebase', retcode=10
- ),
+ api.step_data('test_execution.gcloud firebase', retcode=10),
status='FAILURE'
)
diff --git a/recipes/flutter/android_views.py b/recipes/flutter/android_views.py
index 500a282..910b815 100644
--- a/recipes/flutter/android_views.py
+++ b/recipes/flutter/android_views.py
@@ -28,6 +28,7 @@
PROPERTIES = InputProperties
ENV_PROPERTIES = EnvProperties
+
def RunSteps(api, properties, env_properties):
# Collect memory/cpu/process before task execution.
api.os_utils.collect_os_info()
@@ -74,18 +75,17 @@
['flutter', 'update-packages', '-v'],
infra_step=True,
)
- views_test_dir = checkout_path.join('dev', 'integration_tests', 'android_views')
- with api.context(env=env, env_prefixes=env_prefixes, cwd=views_test_dir), api.step.defer_results():
+ views_test_dir = checkout_path.join(
+ 'dev', 'integration_tests', 'android_views'
+ )
+ with api.context(env=env, env_prefixes=env_prefixes,
+ cwd=views_test_dir), api.step.defer_results():
api.step(
'Android Views Integration Tests',
[
- 'flutter',
- 'drive',
- '--browser-name=android-chrome',
- '--android-emulator',
- '--no-start-paused',
- '--purge-persistent-cache',
- '--device-timeout=30'
+ 'flutter', 'drive', '--browser-name=android-chrome',
+ '--android-emulator', '--no-start-paused',
+ '--purge-persistent-cache', '--device-timeout=30'
],
timeout=700,
)
@@ -95,46 +95,43 @@
# Collect memory/cpu/process after task execution.
api.os_utils.collect_os_info()
+
def GenTests(api):
checkout_path = api.path['start_dir'].join('flutter sdk')
avd_api_version = '31'
yield api.test(
'flutter_drive_clean_exit',
api.properties(
- dependencies=[
- {'dependency':'android_sdk'},
- {'dependency':'android_virtual_device', 'version':'31'},
- {'dependency':'curl'}
- ]
+ dependencies=[{'dependency': 'android_sdk'}, {
+ 'dependency': 'android_virtual_device', 'version': '31'
+ }, {'dependency': 'curl'}]
),
api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.step_data(
'start avd.Start Android emulator (API level %s)' % avd_api_version,
stdout=api.raw_io.output_text(
- 'android_' + avd_api_version + '_google_apis_x86|emulator-5554 started (pid: 17687)'
+ 'android_' + avd_api_version +
+ '_google_apis_x86|emulator-5554 started (pid: 17687)'
)
),
)
yield api.test(
'flutter_drive_zombie_process',
api.properties(
- dependencies=[
- {'dependency':'android_sdk'},
- {'dependency':'android_virtual_device', 'version':'31'},
- {'dependency':'curl'}
- ]
+ dependencies=[{'dependency': 'android_sdk'}, {
+ 'dependency': 'android_virtual_device', 'version': '31'
+ }, {'dependency': 'curl'}]
),
api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.step_data(
'start avd.Start Android emulator (API level %s)' % avd_api_version,
stdout=api.raw_io.output_text(
- 'android_' + avd_api_version + '_google_apis_x86|emulator-5554 started (pid: 17687)'
+ 'android_' + avd_api_version +
+ '_google_apis_x86|emulator-5554 started (pid: 17687)'
)
),
api.step_data(
'kill and cleanup avd.list processes',
- stdout=api.raw_io.output_text(
- '12345 qemu-system blah'
- )
+ stdout=api.raw_io.output_text('12345 qemu-system blah')
),
)
diff --git a/recipes/flutter/coverage.py b/recipes/flutter/coverage.py
index ff3aede..922acee 100644
--- a/recipes/flutter/coverage.py
+++ b/recipes/flutter/coverage.py
@@ -48,18 +48,15 @@
)
lcov_path = packages_path.join('coverage', 'lcov.info')
api.gsutil.upload(
- bucket='flutter_infra_release',
- source=lcov_path,
- dest='flutter/coverage/lcov.info',
- link_name='lcov.info',
- multithreaded=True,
- name='upload lcov.info',
- unauthenticated_url=True
- )
+ bucket='flutter_infra_release',
+ source=lcov_path,
+ dest='flutter/coverage/lcov.info',
+ link_name='lcov.info',
+ multithreaded=True,
+ name='upload lcov.info',
+ unauthenticated_url=True
+ )
def GenTests(api):
- yield api.test(
- 'coverage',
- api.repo_util.flutter_environment_data()
- )
+ yield api.test('coverage', api.repo_util.flutter_environment_data())
diff --git a/recipes/flutter/deferred_components.py b/recipes/flutter/deferred_components.py
index 8495112..e8dba76 100644
--- a/recipes/flutter/deferred_components.py
+++ b/recipes/flutter/deferred_components.py
@@ -34,6 +34,7 @@
PROPERTIES = InputProperties
ENV_PROPERTIES = EnvProperties
+
def RunSteps(api, properties, env_properties):
# Collect memory/cpu/process before task execution.
api.os_utils.collect_os_info()
@@ -52,7 +53,7 @@
ref=api.properties.get('git_ref'),
)
- avd_api_version = '31' # 31 is the first version that supports x86_64
+ avd_api_version = '31' # 31 is the first version that supports x86_64
for dep in api.properties.get('dependencies', []):
if dep['dependency'] == 'android_virtual_device':
avd_api_version = dep['version']
@@ -95,18 +96,18 @@
'0xeDa85nRhdQfi3iN2dK8PPluwI73z9San_Afuj3CfgC'
)
)
- test_dir = checkout_path.join('dev', 'integration_tests', 'deferred_components_test')
- with api.context(env=env, env_prefixes=env_prefixes, cwd=test_dir), api.step.defer_results():
+ test_dir = checkout_path.join(
+ 'dev', 'integration_tests', 'deferred_components_test'
+ )
+ with api.context(env=env, env_prefixes=env_prefixes,
+ cwd=test_dir), api.step.defer_results():
# These assets are not allowed to be checked into the repo,
# so they are downloaded separately here.
api.step('download assets script', ['./download_assets.sh'])
api.step(
'Deferred components release tests',
- [
- './run_release_test.sh',
- str(bundletool_jar),
- env['ADB_PATH']
- ],
+ ['./run_release_test.sh',
+ str(bundletool_jar), env['ADB_PATH']],
timeout=700,
)
# TODO(garyq): add flutter drive tests after https://github.com/flutter/flutter/issues/88906 is resolved
@@ -124,40 +125,36 @@
yield api.test(
'flutter_release_clean_exit',
api.properties(
- dependencies=[
- {'dependency':'android_sdk'},
- {'dependency':'android_virtual_device', 'version':'31'},
- {'dependency':'curl'}
- ]
+ dependencies=[{'dependency': 'android_sdk'}, {
+ 'dependency': 'android_virtual_device', 'version': '31'
+ }, {'dependency': 'curl'}]
),
api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.step_data(
'start avd.Start Android emulator (API level %s)' % avd_api_version,
stdout=api.raw_io.output_text(
- 'android_' + avd_api_version + '_google_apis_x86|emulator-5554 started (pid: 17687)'
+ 'android_' + avd_api_version +
+ '_google_apis_x86|emulator-5554 started (pid: 17687)'
)
),
)
yield api.test(
'flutter_release_zombie_process',
api.properties(
- dependencies=[
- {'dependency':'android_sdk'},
- {'dependency':'android_virtual_device', 'version':'31'},
- {'dependency':'curl'}
- ]
+ dependencies=[{'dependency': 'android_sdk'}, {
+ 'dependency': 'android_virtual_device', 'version': '31'
+ }, {'dependency': 'curl'}]
),
api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
api.step_data(
'start avd.Start Android emulator (API level %s)' % avd_api_version,
stdout=api.raw_io.output_text(
- 'android_' + avd_api_version + '_google_apis_x86|emulator-5554 started (pid: 17687)'
+ 'android_' + avd_api_version +
+ '_google_apis_x86|emulator-5554 started (pid: 17687)'
)
),
api.step_data(
'kill and cleanup avd.list processes',
- stdout=api.raw_io.output_text(
- '12345 qemu-system blah'
- )
+ stdout=api.raw_io.output_text('12345 qemu-system blah')
),
)
diff --git a/recipes/flutter/flutter_drone.py b/recipes/flutter/flutter_drone.py
index 7daf474..3a62aa6 100644
--- a/recipes/flutter/flutter_drone.py
+++ b/recipes/flutter/flutter_drone.py
@@ -57,8 +57,8 @@
)
api.logs_util.show_logs_stdout(checkout_path.join('error.log'))
api.logs_util.upload_test_metrics(
- checkout_path.join('test_results.json'),
- '%s_%s' % (api.properties.get('shard'), api.properties.get('subshard'))
+ checkout_path.join('test_results.json'), '%s_%s' %
+ (api.properties.get('shard'), api.properties.get('subshard'))
)
@@ -127,11 +127,13 @@
def GenTests(api):
for should_run_reduced in (True, False):
yield api.test(
- 'no_requirements%s' % ( '_reduced' if should_run_reduced else ''), api.repo_util.flutter_environment_data(),
+ 'no_requirements%s' % ('_reduced' if should_run_reduced else ''),
+ api.repo_util.flutter_environment_data(),
api.properties(reduced_test_set=should_run_reduced)
)
yield api.test(
- 'android_sdk%s' % ( '_reduced' if should_run_reduced else ''), api.repo_util.flutter_environment_data(),
+ 'android_sdk%s' % ('_reduced' if should_run_reduced else ''),
+ api.repo_util.flutter_environment_data(),
api.properties(
dependencies=[{'dependency': 'android_sdk'}],
android_sdk=True,
@@ -141,13 +143,18 @@
)
)
yield api.test(
- 'web_engine%s' % ( '_reduced' if should_run_reduced else ''), api.repo_util.flutter_environment_data(),
+ 'web_engine%s' % ('_reduced' if should_run_reduced else ''),
+ api.repo_util.flutter_environment_data(),
api.properties(
- local_web_sdk_cas_hash='abceqwe',
- reduced_test_set=should_run_reduced
+ local_web_sdk_cas_hash='abceqwe',
+ reduced_test_set=should_run_reduced
)
)
yield api.test(
- 'xcode%s' % ( '_reduced' if should_run_reduced else ''), api.repo_util.flutter_environment_data(),
- api.properties(dependencies=[{'dependency': 'xcode'}], reduced_test_set=should_run_reduced)
+ 'xcode%s' % ('_reduced' if should_run_reduced else ''),
+ api.repo_util.flutter_environment_data(),
+ api.properties(
+ dependencies=[{'dependency': 'xcode'}],
+ reduced_test_set=should_run_reduced
+ )
)
diff --git a/recipes/infra/ci_yaml.py b/recipes/infra/ci_yaml.py
index 15de5ef..8ae7c39 100644
--- a/recipes/infra/ci_yaml.py
+++ b/recipes/infra/ci_yaml.py
@@ -19,6 +19,7 @@
'recipe_engine/step',
]
+
def _is_postsubmit(api):
"""Returns True if the current build is not in try, otherwise False."""
return api.buildbucket.build.builder.bucket != 'try'
@@ -28,6 +29,7 @@
"""Returns True if branch is master or main."""
return branch in ("main", "master")
+
def RunSteps(api):
"""Steps to checkout infra, dependencies, and generate new config."""
start_path = api.path['start_dir']
@@ -40,18 +42,10 @@
api.repo_util.checkout('flutter', flutter_path, ref=flutter_git_ref)
# Checkout latest version of flutter/cocoon.
- api.repo_util.checkout(
- 'cocoon',
- cocoon_path,
- ref='refs/heads/main'
- )
+ api.repo_util.checkout('cocoon', cocoon_path, ref='refs/heads/main')
# Checkout latest version of flutter/infra
- api.repo_util.checkout(
- 'infra',
- infra_path,
- ref='refs/heads/main'
- )
+ api.repo_util.checkout('infra', infra_path, ref='refs/heads/main')
# Install protoc to compile latest scheduler.proto
protoc_path = start_path.join('protoc')
@@ -69,34 +63,50 @@
git_ref = api.buildbucket.gitiles_commit.id
else:
# github pull request info
- git_ref = 'main' # Default to master for LED runs
+ git_ref = 'main' # Default to master for LED runs
for tag in api.buildbucket.build.tags:
if 'sha/git/' in tag.value:
- git_ref = tag.value.replace('sha/git/', '')
+ git_ref = tag.value.replace('sha/git/', '')
# The context adds dart-sdk tools to PATH and sets PUB_CACHE.
env, env_prefixes = api.repo_util.flutter_environment(flutter_path)
- with api.context(env=env, env_prefixes=env_prefixes, cwd=cocoon_path.join('app_dart')):
+ with api.context(env=env, env_prefixes=env_prefixes,
+ cwd=cocoon_path.join('app_dart')):
api.step('flutter doctor', cmd=['flutter', 'doctor'])
api.step('dart pub get', cmd=['dart', 'pub', 'get'])
- generate_jspb_path = cocoon_path.join('app_dart', 'bin', 'generate_jspb.dart')
+ generate_jspb_path = cocoon_path.join(
+ 'app_dart', 'bin', 'generate_jspb.dart'
+ )
config_name = '%s_config.json' % repo
if git_branch and not _is_default_branch(git_branch):
config_name = '%s_%s_config.json' % (repo, git_branch)
- infra_config_path = infra_path.join('config', 'generated', 'ci_yaml', config_name)
+ infra_config_path = infra_path.join(
+ 'config', 'generated', 'ci_yaml', config_name
+ )
# Generate_jspb
- jspb_step = api.step('generate jspb',
+ jspb_step = api.step(
+ 'generate jspb',
cmd=['dart', generate_jspb_path, repo, git_ref],
- stdout=api.raw_io.output_text(), stderr=api.raw_io.output_text())
+ stdout=api.raw_io.output_text(),
+ stderr=api.raw_io.output_text()
+ )
api.file.write_raw('write jspb', infra_config_path, jspb_step.stdout)
# Roll scheduler.proto
with api.context(env_prefixes={'PATH': [protoc_path.join('bin')]}):
- scheduler_proto_src = cocoon_path.join('app_dart', 'lib', 'src', 'model',
- 'proto', 'internal', 'scheduler.proto')
+ scheduler_proto_src = cocoon_path.join(
+ 'app_dart', 'lib', 'src', 'model', 'proto', 'internal',
+ 'scheduler.proto'
+ )
scheduler_proto_dst = infra_path.join('config', 'lib', 'ci_yaml')
- api.step('Roll scheduler.proto', ['cp', scheduler_proto_src, scheduler_proto_dst])
- api.step('Compile scheduler.proto', ['bash', scheduler_proto_dst.join('compile_proto.sh')])
+ api.step(
+ 'Roll scheduler.proto',
+ ['cp', scheduler_proto_src, scheduler_proto_dst]
+ )
+ api.step(
+ 'Compile scheduler.proto',
+ ['bash', scheduler_proto_dst.join('compile_proto.sh')]
+ )
with api.context(cwd=infra_path):
# Generate luci configs
@@ -105,16 +115,16 @@
api.step('luci validate', cmd=['lucicfg', 'validate', 'config/main.star'])
# Only send rolls on postsubmit
if _is_postsubmit(api):
- api.auto_roller.attempt_roll(
- api.auto_roller.Options(
- remote = 'https://flutter.googlesource.com/infra',
- cc_on_failure_emails = ['flutter-infra@grotations.appspotmail.com'],
- labels_to_set = {'Commit-Queue': 2},
- bot_commit = True,
- ),
- repo_dir = infra_path,
- commit_message = 'Roll %s to %s' % (repo, git_ref),
- )
+ api.auto_roller.attempt_roll(
+ api.auto_roller.Options(
+ remote='https://flutter.googlesource.com/infra',
+ cc_on_failure_emails=['flutter-infra@grotations.appspotmail.com'],
+ labels_to_set={'Commit-Queue': 2},
+ bot_commit=True,
+ ),
+ repo_dir=infra_path,
+ commit_message='Roll %s to %s' % (repo, git_ref),
+ )
def GenTests(api):
@@ -123,63 +133,50 @@
api.buildbucket.ci_build(
bucket='prod',
git_repo='https://flutter.googlesource.com/mirrors/engine',
- revision = 'abc123'
- ),
- api.properties(
- git_branch='main',
- git_repo='engine'
- ),
+ revision='abc123'
+ ), api.properties(git_branch='main', git_repo='engine'),
api.repo_util.flutter_environment_data(
api.path['start_dir'].join('flutter')
),
- api.step_data('generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')),
- api.auto_roller.success()
+ api.step_data(
+ 'generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')
+ ), api.auto_roller.success()
)
yield api.test(
'release',
api.buildbucket.ci_build(
bucket='prod',
git_repo='https://flutter.googlesource.com/mirrors/engine',
- revision = 'abc123'
- ),
- api.properties(
- git_branch='dev',
- git_repo='engine'
- ),
+ revision='abc123'
+ ), api.properties(git_branch='dev', git_repo='engine'),
api.repo_util.flutter_environment_data(
api.path['start_dir'].join('flutter')
),
- api.step_data('generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')),
- api.auto_roller.success()
+ api.step_data(
+ 'generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')
+ ), api.auto_roller.success()
)
yield api.test(
'staging',
api.buildbucket.ci_build(
bucket='staging',
git_repo='https://flutter.googlesource.com/mirrors/engine',
- revision = 'abc123'
- ),
- api.properties(
- git_branch='main',
- git_repo='engine'
- ),
+ revision='abc123'
+ ), api.properties(git_branch='main', git_repo='engine'),
api.repo_util.flutter_environment_data(
api.path['start_dir'].join('flutter')
),
- api.step_data('generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')),
- api.auto_roller.success()
+ api.step_data(
+ 'generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')
+ ), api.auto_roller.success()
)
yield api.test(
'presubmit',
api.buildbucket.try_build(
bucket='try',
- tags=api.buildbucket.tags(
- buildset=['sha/git/def123', 'sha/pr/1']
- )
+ tags=api.buildbucket.tags(buildset=['sha/git/def123', 'sha/pr/1'])
),
- api.properties(
- git_repo='engine'
- ),
+ api.properties(git_repo='engine'),
api.repo_util.flutter_environment_data(
api.path['start_dir'].join('flutter')
),
diff --git a/recipes/packages/packages.py b/recipes/packages/packages.py
index f610f80..817d153 100644
--- a/recipes/packages/packages.py
+++ b/recipes/packages/packages.py
@@ -71,13 +71,14 @@
if 'xcode' in dep_list:
with api.osx_sdk('ios'):
api.flutter_deps.gems(
- env, env_prefixes, flutter_checkout_path.join('dev', 'ci', 'mac')
+ env, env_prefixes, flutter_checkout_path.join('dev', 'ci', 'mac')
)
with api.context(env=env, env_prefixes=env_prefixes):
run_test(api, result, packages_checkout_path, env)
else:
run_test(api, result, packages_checkout_path, env)
+
def run_test(api, result, packages_checkout_path, env):
"""Run tests sequentially following the script"""
for task in result.json.output['tasks']:
@@ -93,22 +94,22 @@
finally:
api.logs_util.upload_logs(task['name'])
+
def GenTests(api):
flutter_path = api.path['start_dir'].join('flutter')
- tasks_dict = {'tasks': [{'name': 'one', 'script': 'myscript', 'args': ['arg1', 'arg2']}]}
+ tasks_dict = {
+ 'tasks': [{'name': 'one', 'script': 'myscript', 'args': ['arg1', 'arg2']}]
+ }
yield api.test(
'master_channel', api.repo_util.flutter_environment_data(flutter_path),
api.properties(
channel='master',
version_file='flutter_master.version',
- ),
- api.step_data('read yaml.parse', api.json.output(tasks_dict))
+ ), api.step_data('read yaml.parse', api.json.output(tasks_dict))
)
yield api.test(
'stable_channel', api.repo_util.flutter_environment_data(flutter_path),
- api.properties(
- channel='stable',
- ),
+ api.properties(channel='stable',),
api.step_data('read yaml.parse', api.json.output(tasks_dict))
)
yield api.test(
@@ -117,6 +118,5 @@
channel='master',
version_file='flutter_master.version',
dependencies=[{'dependency': 'xcode'}],
- ),
- api.step_data('read yaml.parse', api.json.output(tasks_dict))
+ ), api.step_data('read yaml.parse', api.json.output(tasks_dict))
)
diff --git a/recipes/packaging/packaging.py b/recipes/packaging/packaging.py
index 4e4ec10..a17bf72 100644
--- a/recipes/packaging/packaging.py
+++ b/recipes/packaging/packaging.py
@@ -26,11 +26,7 @@
PACKAGED_REF_RE = re.compile(r'^refs/heads/(.+)$')
-PLATFORMS_MAP = {
- 'win': 'windows',
- 'mac': 'macos',
- 'linux': 'linux'
-}
+PLATFORMS_MAP = {'win': 'windows', 'mac': 'macos', 'linux': 'linux'}
@contextmanager
@@ -61,7 +57,9 @@
dart_executable = 'dart' if not api.platform.is_win else 'dart.exe'
work_dir = api.path['start_dir'].join('archive')
api.step('flutter doctor', [flutter_executable, 'doctor'])
- api.step('download dependencies', [flutter_executable, 'update-packages', '-v'])
+ api.step(
+ 'download dependencies', [flutter_executable, 'update-packages', '-v']
+ )
api.flutter_bcid.report_stage(BcidStage.COMPILE.value)
api.file.rmtree('clean archive work directory', work_dir)
api.file.ensure_directory('(re)create archive work directory', work_dir)
@@ -94,15 +92,20 @@
# latency between publishing a release and it being available on the
# site.
headers = {'Cache-Control': 'max-age=60'}
- api.archives.upload_artifact(metadata_absolute_path, metadata_gs_path, metadata=headers)
+ api.archives.upload_artifact(
+ metadata_absolute_path, metadata_gs_path, metadata=headers
+ )
else:
# add experimental subpath if branch is not beta or stable
pkg_gs_path = '%s/%s/%s' % (dest_gs, 'experimental', file_name)
# Do not upload on presubmit.
- if ((not api.runtime.is_experimental) and (api.flutter_bcid.is_official_build() or
- api.flutter_bcid.is_prod_build())):
+ if ((not api.runtime.is_experimental) and
+ (api.flutter_bcid.is_official_build() or
+ api.flutter_bcid.is_prod_build())):
api.archives.upload_artifact(flutter_pkg_absolute_path, pkg_gs_path)
- api.flutter_bcid.upload_provenance(flutter_pkg_absolute_path, pkg_gs_path)
+ api.flutter_bcid.upload_provenance(
+ flutter_pkg_absolute_path, pkg_gs_path
+ )
api.flutter_bcid.report_stage(BcidStage.UPLOAD_COMPLETE.value)
@@ -116,10 +119,15 @@
str: Fully qualified path to flutter package directory.
"""
suffix = 'tar.xz' if api.platform.is_linux else 'zip'
- files = api.file.glob_paths('get flutter archive file name', work_dir,
- '*flutter*.%s' % suffix, test_data=['flutter-archive-package.%s' % suffix])
+ files = api.file.glob_paths(
+ 'get flutter archive file name',
+ work_dir,
+ '*flutter*.%s' % suffix,
+ test_data=['flutter-archive-package.%s' % suffix]
+ )
return files[0] if len(files) == 1 else None
+
def GetFlutterMetadataAbsolutePath(api, work_dir):
"""Gets local metadata absolute path.
@@ -133,10 +141,15 @@
metadata_filename += "macos.json"
elif api.platform.is_win:
metadata_filename += "windows.json"
- files = api.file.glob_paths('get flutter archive file name', work_dir,
- metadata_filename, test_data=['releases_linux.json'])
+ files = api.file.glob_paths(
+ 'get flutter archive file name',
+ work_dir,
+ metadata_filename,
+ test_data=['releases_linux.json']
+ )
return files[0] if len(files) == 1 else None
+
def RunSteps(api):
api.flutter_bcid.report_stage(BcidStage.START.value)
git_ref = api.properties.get('git_ref') or api.buildbucket.gitiles_commit.ref
@@ -144,7 +157,9 @@
api.flutter_bcid.report_stage(BcidStage.FETCH.value)
checkout_path = api.path['start_dir'].join('flutter')
- git_url = api.properties.get('git_url') or 'https://flutter.googlesource.com/mirrors/flutter'
+ git_url = api.properties.get(
+ 'git_url'
+ ) or 'https://flutter.googlesource.com/mirrors/flutter'
# Call this just to obtain release_git_hash so the script knows which commit
# to release
with api.step.nest('determine release revision'):
@@ -169,15 +184,15 @@
env, env_prefixes, api.properties.get('dependencies', [])
)
- packaging_script = checkout_path.join(
- 'dev', 'bots', 'prepare_package.dart'
- )
+ packaging_script = checkout_path.join('dev', 'bots', 'prepare_package.dart')
with api.context(env=env, env_prefixes=env_prefixes):
with api.depot_tools.on_path():
match = PACKAGED_REF_RE.match(git_ref)
if match and not api.runtime.is_experimental:
branch = match.group(1)
- CreateAndUploadFlutterPackage(api, release_git_hash, branch, packaging_script)
+ CreateAndUploadFlutterPackage(
+ api, release_git_hash, branch, packaging_script
+ )
# Nothing left to do on a packaging branch.
return
api.step('Running on test mode - no uploads will happen', [])
@@ -188,27 +203,22 @@
for should_upload in (True, False):
for platform in ('mac', 'linux', 'win'):
for branch in ('master', 'beta', 'stable', 'flutter-release-test'):
- for bucket in ('prod', 'staging', 'flutter'):
- for git_ref in ('refs/heads/' + branch,
- 'invalid' + branch):
- test = api.test(
- '%s_%s%s%s_%s' % (
- platform,
- git_ref,
- '_experimental' if experimental else '',
- '_upload' if should_upload else '',
- bucket
- ), api.platform(platform, 64),
- api.buildbucket.ci_build(
- git_ref=git_ref,
- revision=None,
- bucket=bucket
- ), api.properties(
- shard='tests',
- fuchsia_ctl_version='version:0.0.2',
- upload_packages=should_upload,
- gold_tryjob=not should_upload,
- ), api.runtime(is_experimental=experimental),
- api.repo_util.flutter_environment_data()
- )
- yield test
+ for bucket in ('prod', 'staging', 'flutter'):
+ for git_ref in ('refs/heads/' + branch, 'invalid' + branch):
+ test = api.test(
+ '%s_%s%s%s_%s' % (
+ platform, git_ref, '_experimental' if experimental else
+ '', '_upload' if should_upload else '', bucket
+ ), api.platform(platform, 64),
+ api.buildbucket.ci_build(
+ git_ref=git_ref, revision=None, bucket=bucket
+ ),
+ api.properties(
+ shard='tests',
+ fuchsia_ctl_version='version:0.0.2',
+ upload_packages=should_upload,
+ gold_tryjob=not should_upload,
+ ), api.runtime(is_experimental=experimental),
+ api.repo_util.flutter_environment_data()
+ )
+ yield test
diff --git a/recipes/pub_autoroller/pub_autoroller.py b/recipes/pub_autoroller/pub_autoroller.py
index ba7c45e..61d2a4b 100644
--- a/recipes/pub_autoroller/pub_autoroller.py
+++ b/recipes/pub_autoroller/pub_autoroller.py
@@ -46,14 +46,17 @@
# This should be an https url an ssh url cannot be authenticated via an
# access token
mirror_remote = api.properties.get(
- 'mirror_remote', 'https://github.com/fluttergithubbot/flutter.git')
- api.step('run roll-packages script', [
- autoroll_script,
- '--token',
- token_file,
- '--mirror-remote',
- mirror_remote,
- ])
+ 'mirror_remote', 'https://github.com/fluttergithubbot/flutter.git'
+ )
+ api.step(
+ 'run roll-packages script', [
+ autoroll_script,
+ '--token',
+ token_file,
+ '--mirror-remote',
+ mirror_remote,
+ ]
+ )
def GenTests(api):
diff --git a/recipes/release/release_builder.py b/recipes/release/release_builder.py
index 6ced46f..b6b0b23 100644
--- a/recipes/release/release_builder.py
+++ b/recipes/release/release_builder.py
@@ -7,7 +7,6 @@
# This recipe reads <engine_checkout>/.ci_yaml, and for every target
# marked with release_build: true, and spawens a subbuild.
-
import json
from contextlib import contextmanager
@@ -19,16 +18,16 @@
from google.protobuf import struct_pb2
DEPS = [
- 'flutter/yaml',
- 'flutter/display_util',
- 'flutter/repo_util',
- 'flutter/shard_util_v2',
- 'recipe_engine/buildbucket',
- 'recipe_engine/json',
- 'recipe_engine/path',
- 'recipe_engine/platform',
- 'recipe_engine/properties',
- 'recipe_engine/step',
+ 'flutter/yaml',
+ 'flutter/display_util',
+ 'flutter/repo_util',
+ 'flutter/shard_util_v2',
+ 'recipe_engine/buildbucket',
+ 'recipe_engine/json',
+ 'recipe_engine/path',
+ 'recipe_engine/platform',
+ 'recipe_engine/properties',
+ 'recipe_engine/step',
]
PROPERTIES = InputProperties
@@ -46,25 +45,23 @@
(git_ref not in RELEASE_CHANNELS)):
return True
# Packaging for the flutter repository.
- if (target.get('scheduler') == 'release' and for_this_platform
- and (git_ref in RELEASE_CHANNELS)
- and git_ref.replace('refs/heads/', '') in target.get('enabled_branches', [])
- ):
+ if (target.get('scheduler') == 'release' and for_this_platform and
+ (git_ref in RELEASE_CHANNELS) and
+ git_ref.replace('refs/heads/', '') in target.get('enabled_branches', [])):
return True
return False
def RunSteps(api, properties, env_properties):
- repository = api.properties.get('git_repo') or api.buildbucket.gitiles_commit.project
+ repository = api.properties.get(
+ 'git_repo'
+ ) or api.buildbucket.gitiles_commit.project
repository_parts = repository.split('/')
checkout_path = api.path['start_dir'].join(*repository_parts)
git_ref = api.properties.get('git_ref') or api.buildbucket.gitiles_commit.ref
git_url = api.properties.get('git_url') or REPOS[repository]
api.repo_util.checkout(
- repository,
- checkout_path=checkout_path,
- url=git_url,
- ref=git_ref
+ repository, checkout_path=checkout_path, url=git_url, ref=git_ref
)
ci_yaml_path = checkout_path.join('.ci.yaml')
@@ -78,62 +75,56 @@
for target in ci_yaml.json.output['targets']:
if ShouldRun(api, git_ref, target):
target = api.shard_util_v2.pre_process_properties(target)
- tasks.update(api.shard_util_v2.schedule(
- [target, ], presentation))
+ tasks.update(api.shard_util_v2.schedule([
+ target,
+ ], presentation))
with api.step.nest('collect builds') as presentation:
build_results = api.shard_util_v2.collect(tasks)
api.display_util.display_subbuilds(
- step_name='display builds',
- subbuilds=build_results,
- raise_on_failure=True,
+ step_name='display builds',
+ subbuilds=build_results,
+ raise_on_failure=True,
)
def GenTests(api):
try_subbuild1 = api.shard_util_v2.try_build_message(
- build_id=8945511751514863186,
- builder="builder-subbuild1",
- output_props={"test_orchestration_inputs_hash": "abc"},
- status="SUCCESS",
+ build_id=8945511751514863186,
+ builder="builder-subbuild1",
+ output_props={"test_orchestration_inputs_hash": "abc"},
+ status="SUCCESS",
)
- tasks_dict = {'targets': [
- {
+ tasks_dict = {
+ 'targets': [{
'name': 'linux one',
'recipe': 'engine/something',
'properties': {
'release_build': True,
'$flutter/osx_sdk': '{"sdk_version": "14a5294e"}'
},
- },
- {
- 'name': 'linux packaging one',
- 'recipe': 'release/something',
+ }, {
+ 'name': 'linux packaging one', 'recipe': 'release/something',
'scheduler': 'release',
- 'properties': {
- '$flutter/osx_sdk': '{"sdk_version": "14a5294e"}'
- },
+ 'properties': {'$flutter/osx_sdk': '{"sdk_version": "14a5294e"}'},
'enabled_branches': ['beta', 'main']
- }
- ]
+ }]
}
for git_ref in ['main', 'beta']:
yield api.test(
- 'basic_linux_%s' % git_ref,
- api.platform.name('linux'),
- api.properties(environment='Staging', repository='engine'),
- api.buildbucket.try_build(
- project='prod',
- builder='try-builder',
- git_repo='https://flutter.googlesource.com/mirrors/engine',
- revision='a' * 40,
- build_number=123,
- git_ref='refs/heads/%s' % git_ref,
- ),
- api.shard_util_v2.child_build_steps(
- subbuilds=[try_subbuild1],
- launch_step="launch builds.schedule",
- collect_step="collect builds",
- ),
- api.step_data('read ci yaml.parse', api.json.output(tasks_dict))
- )
+ 'basic_linux_%s' % git_ref, api.platform.name('linux'),
+ api.properties(environment='Staging', repository='engine'),
+ api.buildbucket.try_build(
+ project='prod',
+ builder='try-builder',
+ git_repo='https://flutter.googlesource.com/mirrors/engine',
+ revision='a' * 40,
+ build_number=123,
+ git_ref='refs/heads/%s' % git_ref,
+ ),
+ api.shard_util_v2.child_build_steps(
+ subbuilds=[try_subbuild1],
+ launch_step="launch builds.schedule",
+ collect_step="collect builds",
+ ), api.step_data('read ci yaml.parse', api.json.output(tasks_dict))
+ )
diff --git a/recipes/release/release_publish.py b/recipes/release/release_publish.py
index 791dcd5..2fc9cd5 100644
--- a/recipes/release/release_publish.py
+++ b/recipes/release/release_publish.py
@@ -23,11 +23,13 @@
stableTagRegex = r'^(\d+)\.(\d+)\.(\d+)$'
betaTagRegex = r'^(\d+)\.(\d+)\.(\d+)-(\d+)\.(\d+)\.pre$'
+
def isValidTag(tag):
stable = re.search(stableTagRegex, tag)
development = re.search(betaTagRegex, tag)
return stable or development
+
"""
This recipe executes the tag and publishing stages of a flutter release.
To trigger this recipe, tool proxy must be invoked with multi-party approval.
@@ -41,12 +43,16 @@
The recipe will tag and push to github unless triggered
from an experimental run.
"""
+
+
def RunSteps(api):
git_branch = api.properties.get('git_branch')
tag = api.properties.get('tag')
release_channel = api.properties.get('release_channel')
# default to False force push
- force = False if api.runtime.is_experimental else api.properties.get('force', False)
+ force = False if api.runtime.is_experimental else api.properties.get(
+ 'force', False
+ )
assert git_branch and tag and release_channel in ('stable', 'beta')
flutter_checkout = api.path['start_dir'].join('flutter')
@@ -63,21 +69,25 @@
with api.step.nest('checkout flutter release branch'):
flutter_rel_hash = api.repo_util.checkout(
- 'flutter',
- flutter_checkout,
- url=flutter_git_url,
- ref="refs/heads/%s" % git_branch,
+ 'flutter',
+ flutter_checkout,
+ url=flutter_git_url,
+ ref="refs/heads/%s" % git_branch,
)
with api.step.nest('checkout engine release branch'):
api.repo_util.checkout(
- 'engine',
- api.path['start_dir'].join('engine'),
- url=engine_git_url,
- ref='refs/heads/%s' % git_branch,
+ 'engine',
+ api.path['start_dir'].join('engine'),
+ url=engine_git_url,
+ ref='refs/heads/%s' % git_branch,
)
- env_flutter, env_flutter_prefixes = api.repo_util.flutter_environment(flutter_checkout)
- env_engine, env_engine_prefixes = api.repo_util.engine_environment(engine_checkout)
+ env_flutter, env_flutter_prefixes = api.repo_util.flutter_environment(
+ flutter_checkout
+ )
+ env_engine, env_engine_prefixes = api.repo_util.engine_environment(
+ engine_checkout
+ )
api.flutter_deps.required_deps(
env_flutter,
env_flutter_prefixes,
@@ -97,22 +107,28 @@
)
for repo in ('flutter', 'engine'):
- env = env_flutter if repo=='flutter' else env_engine
- env_prefixes = env_flutter_prefixes if repo=='flutter' else env_engine_prefixes
- checkout = flutter_checkout if repo=='flutter' else engine_checkout
- rel_hash = flutter_rel_hash if repo=='flutter' else GetEngineVersion(api, flutter_checkout)
+ env = env_flutter if repo == 'flutter' else env_engine
+ env_prefixes = env_flutter_prefixes if repo == 'flutter' else env_engine_prefixes
+ checkout = flutter_checkout if repo == 'flutter' else engine_checkout
+ rel_hash = flutter_rel_hash if repo == 'flutter' else GetEngineVersion(
+ api, flutter_checkout
+ )
with api.context(env=env, env_prefixes=env_prefixes, cwd=checkout):
token_decrypted = api.path['cleanup'].join('token.txt')
- api.kms.get_secret('flutter-release-github-token.encrypted', token_decrypted)
+ api.kms.get_secret(
+ 'flutter-release-github-token.encrypted', token_decrypted
+ )
- env['FORCE_FLAG'] = '--force-with-lease=%s:%s' % (release_channel, rel_hash) if force else ''
+ env['FORCE_FLAG'] = '--force-with-lease=%s:%s' % (
+ release_channel, rel_hash
+ ) if force else ''
env['TOKEN_PATH'] = token_decrypted
env['TAG'] = tag
env['REL_HASH'] = rel_hash
env['RELEASE_CHANNEL'] = release_channel
env['GIT_BRANCH'] = git_branch
env['GITHUB_USER'] = 'fluttergithubbot'
- env['REPO'] = 'flutter' if repo=='flutter' else 'engine'
+ env['REPO'] = 'flutter' if repo == 'flutter' else 'engine'
# Run script within a new context to use the new env variables.
# Tag and push flutter/flutter first, then use hash found in
@@ -120,32 +136,38 @@
with api.context(env=env, env_prefixes=env_prefixes):
api.step('Tag and push release on flutter/%s' % repo, [resource_name])
+
def GetEngineVersion(api, flutter_checkout):
- return api.file.read_text('read engine hash', str(flutter_checkout)+'/bin/internal/engine.version').strip()
+ return api.file.read_text(
+ 'read engine hash',
+ str(flutter_checkout) + '/bin/internal/engine.version'
+ ).strip()
+
def GenTests(api):
- checkout_path = api.path['start_dir'].join('flutter')
- for tag in ('1.2.3-4.5.pre', '1.2.3'):
- for release_channel in ('stable', 'beta'):
- for force in ('True', 'False'):
- test = api.test(
- '%s_%s_%s%s' % (
- 'flutter-2.8-candidate.9',
- tag,
- release_channel,
- '_force' if force=='True' else 'False'
- ), api.platform('linux', 64),
- api.properties(
- git_branch='flutter-2.8-candidate.9',
- tag=tag,
- release_channel=release_channel,
- force=force
- ),
- api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
- api.post_process(post_process.MustRun,
- 'Tag and push release on flutter/flutter'),
- api.post_process(post_process.MustRun,
- 'Tag and push release on flutter/engine'),
- api.post_process(post_process.StatusSuccess),
- )
- yield test
+ checkout_path = api.path['start_dir'].join('flutter')
+ for tag in ('1.2.3-4.5.pre', '1.2.3'):
+ for release_channel in ('stable', 'beta'):
+ for force in ('True', 'False'):
+ test = api.test(
+ '%s_%s_%s%s' % (
+ 'flutter-2.8-candidate.9', tag, release_channel,
+ '_force' if force == 'True' else 'False'
+ ),
+ api.platform('linux', 64),
+ api.properties(
+ git_branch='flutter-2.8-candidate.9',
+ tag=tag,
+ release_channel=release_channel,
+ force=force
+ ),
+ api.repo_util.flutter_environment_data(checkout_dir=checkout_path),
+ api.post_process(
+ post_process.MustRun, 'Tag and push release on flutter/flutter'
+ ),
+ api.post_process(
+ post_process.MustRun, 'Tag and push release on flutter/engine'
+ ),
+ api.post_process(post_process.StatusSuccess),
+ )
+ yield test