Remove deprecated uses of the path subscript operator
Accessors like api.path['start_dir'] are deprecated.
Use a member like api.path.start_dir instead
Bug: https://crbug.com/329113288
Change-Id: I5adc09b99af085726726d926635ede0468466b5e
Reviewed-on: https://flutter-review.googlesource.com/c/recipes/+/59420
Reviewed-by: Christopher Fujino <fujino@google.com>
Commit-Queue: Alexander Thomas <athom@google.com>
diff --git a/recipe_modules/android_virtual_device/api.py b/recipe_modules/android_virtual_device/api.py
index fde16a2..f0b291b 100644
--- a/recipe_modules/android_virtual_device/api.py
+++ b/recipe_modules/android_virtual_device/api.py
@@ -28,7 +28,7 @@
env['AVD_ROOT'] = self.avd_root
env['ADB_PATH'] = self.adb_path
return
- self.avd_root = self.m.path['cache'].join('avd')
+ self.avd_root = self.m.path.cache_dir.join('avd')
self.download(
env=env,
env_prefixes=env_prefixes,
@@ -89,7 +89,7 @@
def _get_config_version(self, version):
"""Get the config if given an integer version
-
+
Args:
"""
avd_config = None
diff --git a/recipe_modules/archives/api.py b/recipe_modules/archives/api.py
index b37e117..a5d1203 100644
--- a/recipe_modules/archives/api.py
+++ b/recipe_modules/archives/api.py
@@ -110,7 +110,7 @@
"""
results = []
self.m.path.mock_add_directory(
- self.m.path['start_dir']
+ self.m.path.start_dir
.join('out/android_profile/zip_archives/download.flutter.io')
)
for include_path in archive_config.get('include_paths', []):
diff --git a/recipe_modules/archives/examples/engine_v2_gcs_paths.py b/recipe_modules/archives/examples/engine_v2_gcs_paths.py
index c39b57f..0090425 100644
--- a/recipe_modules/archives/examples/engine_v2_gcs_paths.py
+++ b/recipe_modules/archives/examples/engine_v2_gcs_paths.py
@@ -18,7 +18,7 @@
def RunSteps(api):
- checkout = api.path['start_dir']
+ checkout = api.path.start_dir
config = api.properties.get('config')
config_prod_realm = {
"name":
@@ -63,14 +63,14 @@
expected_prod_results = [
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('out/android_profile/zip_archives/artifact1.zip')
),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/artifact1.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/android-arm-profile/artifacts.zip'
)
),
@@ -78,7 +78,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/android-arm-profile/linux-x64.zip'
)
),
@@ -86,7 +86,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/android-arm-profile/symbols.zip'
)
),
@@ -94,7 +94,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.jar'
)
),
@@ -102,7 +102,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom'
)
),
@@ -110,7 +110,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('out/android_profile/zip_archives/sky_engine.zip')
),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/sky_engine.zip'
@@ -119,14 +119,14 @@
expected_try_results = [
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('out/android_profile/zip_archives/artifact1.zip')
),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/artifact1.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/android-arm-profile/artifacts.zip'
)
),
@@ -134,7 +134,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/android-arm-profile/linux-x64.zip'
)
),
@@ -142,7 +142,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/android-arm-profile/symbols.zip'
)
),
@@ -150,7 +150,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.jar'
)
),
@@ -158,7 +158,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir'].join(
+ api.path.start_dir.join(
'out/android_profile/zip_archives/download.flutter.io/io/flutter/x86_debug/1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584/x86_debug-1.0.0-0005149dca9b248663adcde4bdd7c6c915a76584.pom'
)
),
@@ -166,7 +166,7 @@
),
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('out/android_profile/zip_archives/sky_engine.zip')
),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/sky_engine.zip'
diff --git a/recipe_modules/archives/examples/full.py b/recipe_modules/archives/examples/full.py
index c4b4799..7445860 100644
--- a/recipe_modules/archives/examples/full.py
+++ b/recipe_modules/archives/examples/full.py
@@ -16,7 +16,7 @@
def RunSteps(api):
- checkout = api.path['start_dir']
+ checkout = api.path.start_dir
config = api.properties.get('config')
expected_destinations = api.properties.get('expected_destinations')
results = api.archives.engine_v2_gcs_paths(checkout, config)
diff --git a/recipe_modules/archives/examples/global_generator_paths.py b/recipe_modules/archives/examples/global_generator_paths.py
index 3859628..b3a4c8b 100644
--- a/recipe_modules/archives/examples/global_generator_paths.py
+++ b/recipe_modules/archives/examples/global_generator_paths.py
@@ -18,7 +18,7 @@
def RunSteps(api):
- checkout = api.path['start_dir']
+ checkout = api.path.start_dir
if api.monorepo.is_monorepo:
checkout = checkout / 'monorepo'
checkout = checkout / 'src'
@@ -36,95 +36,95 @@
}]
expected_results = [
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/debug/artifacts.zip')),
+ local=str(api.path.start_dir.join('src/out/debug/artifacts.zip')),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios/artifacts.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('src/out/release-nobitcode/Flutter.dSYM.zip')
),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios-release-nobitcode/Flutter.dSYM.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')
+ api.path.start_dir.join('src/out/release/Flutter.dSYM.zip')
),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios-release/Flutter.dSYM.zip'
)
]
expected_results_flutter = [
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/debug/artifacts.zip')),
+ local=str(api.path.start_dir.join('src/out/debug/artifacts.zip')),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios/artifacts.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('src/out/release-nobitcode/Flutter.dSYM.zip')
),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios-release-nobitcode/Flutter.dSYM.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')
+ api.path.start_dir.join('src/out/release/Flutter.dSYM.zip')
),
remote='gs://flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios-release/Flutter.dSYM.zip'
)
]
expected_monorepo_results = [
ArchivePaths(
- local=str(api.path['start_dir'].join('monorepo/src/out/debug/artifacts.zip')),
+ local=str(api.path.start_dir.join('monorepo/src/out/debug/artifacts.zip')),
remote='gs://flutter_archives_v2/monorepo/123/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios/artifacts.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('monorepo/src/out/release-nobitcode/Flutter.dSYM.zip')
),
remote='gs://flutter_archives_v2/monorepo/123/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios-release-nobitcode/Flutter.dSYM.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir'].join('monorepo/src/out/release/Flutter.dSYM.zip')
+ api.path.start_dir.join('monorepo/src/out/release/Flutter.dSYM.zip')
),
remote='gs://flutter_archives_v2/monorepo/123/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios-release/Flutter.dSYM.zip'
)
]
expected_monorepo_try_results = [
ArchivePaths(
- local=str(api.path['start_dir'].join('monorepo/src/out/debug/artifacts.zip')),
+ local=str(api.path.start_dir.join('monorepo/src/out/debug/artifacts.zip')),
remote='gs://flutter_archives_v2/monorepo_try/123/flutter_infra_release/flutter/123/ios/artifacts.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('monorepo/src/out/release-nobitcode/Flutter.dSYM.zip')
),
remote='gs://flutter_archives_v2/monorepo_try/123/flutter_infra_release/flutter/123/ios-release-nobitcode/Flutter.dSYM.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir'].join('monorepo/src/out/release/Flutter.dSYM.zip')
+ api.path.start_dir.join('monorepo/src/out/release/Flutter.dSYM.zip')
),
remote='gs://flutter_archives_v2/monorepo_try/123/flutter_infra_release/flutter/123/ios-release/Flutter.dSYM.zip'
)
]
expected_try_results = [
ArchivePaths(
- local=str(api.path['start_dir'].join('src/out/debug/artifacts.zip')),
+ local=str(api.path.start_dir.join('src/out/debug/artifacts.zip')),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios/artifacts.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir']
+ api.path.start_dir
.join('src/out/release-nobitcode/Flutter.dSYM.zip')
),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios-release-nobitcode/Flutter.dSYM.zip'
),
ArchivePaths(
local=str(
- api.path['start_dir'].join('src/out/release/Flutter.dSYM.zip')
+ api.path.start_dir.join('src/out/release/Flutter.dSYM.zip')
),
remote='gs://flutter_archives_v2/flutter_infra_release/flutter/12345abcde12345abcde12345abcde12345abcde/ios-release/Flutter.dSYM.zip'
)
diff --git a/recipe_modules/bucket_util/api.py b/recipe_modules/bucket_util/api.py
index 1e7d43c..701b30b 100644
--- a/recipe_modules/bucket_util/api.py
+++ b/recipe_modules/bucket_util/api.py
@@ -79,7 +79,7 @@
local_zip = temp_dir.join(zip_name)
remote_zip = self.get_cloud_path(remote_name)
if isinstance(parent_directory, str):
- parent_directory = self.m.path['cache'].join(
+ parent_directory = self.m.path.cache_dir.join(
'builder', parent_directory
)
pkg = self.m.zip.make_package(parent_directory, local_zip)
diff --git a/recipe_modules/build_util/examples/full.py b/recipe_modules/build_util/examples/full.py
index 7cebc56..b0d9ab5 100644
--- a/recipe_modules/build_util/examples/full.py
+++ b/recipe_modules/build_util/examples/full.py
@@ -14,23 +14,23 @@
def RunSteps(api):
- checkout = api.path['start_dir']
+ checkout = api.path.start_dir
env_prefixes = {}
with api.context(env_prefixes=env_prefixes):
api.build_util.run_gn([], checkout)
api.build_util.build(
'profile', checkout, ['mytarget'], {
- 'CLANG_CRASH_DIAGNOSTICS_DIR': api.path['start_dir'],
- 'FLUTTER_LOGS_DIR': api.path['start_dir']
+ 'CLANG_CRASH_DIAGNOSTICS_DIR': api.path.start_dir,
+ 'FLUTTER_LOGS_DIR': api.path.start_dir
},
- rbe_working_path=api.path["cleanup"].join("rbe"),
+ rbe_working_path=api.path.cleanup_dir.join("rbe"),
)
with api.context(env_prefixes=env_prefixes):
api.build_util.run_gn(['--no-goma', '--no-rbe'], checkout)
api.build_util.build(
'release', checkout, ['mytarget'], {
- 'CLANG_CRASH_DIAGNOSTICS_DIR': api.path['start_dir'],
- 'FLUTTER_LOGS_DIR': api.path['start_dir']
+ 'CLANG_CRASH_DIAGNOSTICS_DIR': api.path.start_dir,
+ 'FLUTTER_LOGS_DIR': api.path.start_dir
}
)
with api.context(env_prefixes=env_prefixes):
@@ -40,10 +40,10 @@
checkout,
['rbe_target'],
{
- 'CLANG_CRASH_DIAGNOSTICS_DIR': api.path['start_dir'],
- 'FLUTTER_LOGS_DIR': api.path['start_dir']
+ 'CLANG_CRASH_DIAGNOSTICS_DIR': api.path.start_dir,
+ 'FLUTTER_LOGS_DIR': api.path.start_dir
},
- rbe_working_path=api.path["cleanup"].join("rbe"),
+ rbe_working_path=api.path.cleanup_dir.join("rbe"),
)
@@ -56,7 +56,7 @@
'build profile mytarget',
retcode=1,
),
- api.path.exists(api.path['start_dir'].join('foo.sh')),
+ api.path.exists(api.path.start_dir.join('foo.sh')),
status='FAILURE',
)
yield api.test(
diff --git a/recipe_modules/cache/api.py b/recipe_modules/cache/api.py
index 6d075bf..9ec60b6 100644
--- a/recipe_modules/cache/api.py
+++ b/recipe_modules/cache/api.py
@@ -76,7 +76,7 @@
hash_value = self.m.cas.archive('Archive %s' % name, path, log_level='debug')
cache_metadata['hashes'][name] = hash_value
platform = self.m.platform.name
- local_cache_path = self.m.path['cleanup'].join(
+ local_cache_path = self.m.path.cleanup_dir.join(
'%s-%s.json' % (cache_name, platform)
)
self.m.file.write_json(
@@ -106,7 +106,7 @@
force (bool): Whether to recreate the caches or skip them if they already exist..
"""
with self.m.step.nest('Mount caches'):
- cache_root = cache_root or self.m.path['cache']
+ cache_root = cache_root or self.m.path.cache_dir
cloud_path = self._cache_path(cache_name)
metadata = self.m.gsutil.cat(
cloud_path, stdout=self.m.json.output()
diff --git a/recipe_modules/cache/tests/refresh.py b/recipe_modules/cache/tests/refresh.py
index 6ee3906..0a3f046 100644
--- a/recipe_modules/cache/tests/refresh.py
+++ b/recipe_modules/cache/tests/refresh.py
@@ -14,12 +14,12 @@
result = api.cache.requires_refresh('builder')
api.assertions.assertTrue(result)
paths = [
- api.path['cache'].join('builder'),
- api.path['cache'].join('git'),
+ api.path.cache_dir.join('builder'),
+ api.path.cache_dir.join('git'),
]
api.cache.write('builder', paths, 60)
- api.cache.mount_cache('builder', api.path['cache'])
- api.cache.should_force_mount(api.path['cache'].join('builder'))
+ api.cache.mount_cache('builder', api.path.cache_dir)
+ api.cache.should_force_mount(api.path.cache_dir.join('builder'))
def GenTests(api):
diff --git a/recipe_modules/cache_micro_manager/examples/existing_cache_file.py b/recipe_modules/cache_micro_manager/examples/existing_cache_file.py
index ef424e2..ed13a87 100644
--- a/recipe_modules/cache_micro_manager/examples/existing_cache_file.py
+++ b/recipe_modules/cache_micro_manager/examples/existing_cache_file.py
@@ -17,7 +17,7 @@
def RunSteps(api):
- cache_target_dir = api.path['cache'].join('osx_sdk')
+ cache_target_dir = api.path.cache_dir.join('osx_sdk')
fake_dirdep_1_path = cache_target_dir.join('fake_dep_package_1')
fake_filedep_1_path = cache_target_dir.join('fake_dep_file_1')
diff --git a/recipe_modules/cache_micro_manager/examples/no_cache_file.py b/recipe_modules/cache_micro_manager/examples/no_cache_file.py
index 25af149..12dde71 100644
--- a/recipe_modules/cache_micro_manager/examples/no_cache_file.py
+++ b/recipe_modules/cache_micro_manager/examples/no_cache_file.py
@@ -13,7 +13,7 @@
from unittest.mock import Mock
def RunSteps(api):
- cache_target_dir = api.path['cache'].join('osx_sdk')
+ cache_target_dir = api.path.cache_dir.join('osx_sdk')
fake_dirdep_1_path = cache_target_dir.join('fake_dep_package_1')
fake_filedep_1_path = cache_target_dir.join('fake_dep_file_1')
diff --git a/recipe_modules/display_util/examples/display_tasks.py b/recipe_modules/display_util/examples/display_tasks.py
index d14b1bd..1288d3a 100644
--- a/recipe_modules/display_util/examples/display_tasks.py
+++ b/recipe_modules/display_util/examples/display_tasks.py
@@ -29,7 +29,7 @@
metadata = api.swarming.trigger("Trigger Tests", requests=[request])
links = {m.id: m.task_ui_link for m in metadata}
# Collect the result of the task by metadata.
- fuchsia_output = api.path["cleanup"].join("fuchsia_test_output")
+ fuchsia_output = api.path.cleanup_dir.join("fuchsia_test_output")
api.file.ensure_directory("swarming output", fuchsia_output)
results = api.swarming.collect(
"collect", metadata, output_dir=fuchsia_output, timeout="30m"
diff --git a/recipe_modules/firebase/examples/full.py b/recipe_modules/firebase/examples/full.py
index 78a0493..9ecc708 100644
--- a/recipe_modules/firebase/examples/full.py
+++ b/recipe_modules/firebase/examples/full.py
@@ -9,7 +9,7 @@
def RunSteps(api):
- docs_path = api.path['start_dir'].join('flutter', 'dev', 'docs')
+ docs_path = api.path.start_dir.join('flutter', 'dev', 'docs')
api.firebase.deploy_docs({}, {}, docs_path, 'myproject')
diff --git a/recipe_modules/flutter_bcid/examples/full.py b/recipe_modules/flutter_bcid/examples/full.py
index 08b22cb..040fc88 100644
--- a/recipe_modules/flutter_bcid/examples/full.py
+++ b/recipe_modules/flutter_bcid/examples/full.py
@@ -13,7 +13,7 @@
def RunSteps(api):
api.flutter_bcid.report_stage('one')
api.flutter_bcid.upload_provenance(
- api.path['cache'].join('file.zip'), 'gs://bucket/final_path/file.txt'
+ api.path.cache_dir.join('file.zip'), 'gs://bucket/final_path/file.txt'
)
api.flutter_bcid.is_official_build()
api.flutter_bcid.is_prod_build()
diff --git a/recipe_modules/flutter_deps/api.py b/recipe_modules/flutter_deps/api.py
index 1864dc4..a172ff7 100644
--- a/recipe_modules/flutter_deps/api.py
+++ b/recipe_modules/flutter_deps/api.py
@@ -32,7 +32,7 @@
# No-op if `local_engine_cas_hash` property is empty
cas_hash = self.m.properties.get('local_engine_cas_hash')
if cas_hash:
- checkout_engine = self.m.path['cleanup'].join('builder', 'src', 'out')
+ checkout_engine = self.m.path.cleanup_dir.join('builder', 'src', 'out')
# Download built engines from CAS.
if cas_hash:
self.m.cas.download(
@@ -50,7 +50,7 @@
web_sdk_cas_hash = self.m.properties.get('local_web_sdk_cas_hash')
local_web_sdk = self.m.properties.get('local_web_sdk')
if web_sdk_cas_hash:
- checkout_src = self.m.path['cleanup'].join('builder', 'src')
+ checkout_src = self.m.path.cleanup_dir.join('builder', 'src')
self.m.cas.download(
'Download web sdk from CAS', web_sdk_cas_hash, checkout_src
)
@@ -159,7 +159,7 @@
"""
version = version or 'version:11'
with self.m.step.nest('OpenJDK dependency'):
- java_cache_dir = self.m.path['cache'].join('java')
+ java_cache_dir = self.m.path.cache_dir.join('java')
self.m.cipd.ensure(
java_cache_dir,
self.m.cipd.EnsureFile().add_package(
@@ -185,9 +185,9 @@
"""
version = version or 'last_updated:2023-02-03T15:32:01-0800'
with self.m.step.nest('Arm Tools dependency'):
- arm_tools_cache_dir = self.m.path['cache'].join('arm-tools')
+ arm_tools_cache_dir = self.m.path.cache_dir.join('arm-tools')
self.m.cipd.ensure(
- self.m.path['cache'],
+ self.m.path.cache_dir,
self.m.cipd.EnsureFile().add_package(
'flutter_internal/tools/arm-tools', version
)
@@ -230,7 +230,7 @@
version = version or 'git_revision:720a542f6fe4f92922c3b8f0fdcc4d2ac6bb83cd'
with self.m.step.nest('Download goldctl'):
- goldctl_cache_dir = self.m.path['cache'].join('gold')
+ goldctl_cache_dir = self.m.path.cache_dir.join('gold')
self.m.cipd.ensure(
goldctl_cache_dir,
self.m.cipd.EnsureFile().add_package(
@@ -254,11 +254,11 @@
version = version or 'latest'
with self.m.step.nest('Chrome and driver dependency'):
env['CHROME_NO_SANDBOX'] = 'true'
- chrome_path = self.m.path['cache'].join('chrome', 'chrome')
+ chrome_path = self.m.path.cache_dir.join('chrome', 'chrome')
pkgs = self.m.cipd.EnsureFile()
pkgs.add_package('flutter_internal/browsers/chrome/${platform}', version)
self.m.cipd.ensure(chrome_path, pkgs)
- chrome_driver_path = self.m.path['cache'].join('chrome', 'drivers')
+ chrome_driver_path = self.m.path.cache_dir.join('chrome', 'drivers')
pkgdriver = self.m.cipd.EnsureFile()
pkgdriver.add_package(
'flutter_internal/browser-drivers/chrome/${platform}', version
@@ -297,7 +297,7 @@
"""
version = version or 'latest'
with self.m.step.nest('Firefox dependency'):
- firefox_path = self.m.path['cache'].join('firefox')
+ firefox_path = self.m.path.cache_dir.join('firefox')
pkgs = self.m.cipd.EnsureFile()
pkgs.add_package('flutter_internal/browsers/firefox/${platform}', version)
self.m.cipd.ensure(firefox_path, pkgs)
@@ -309,7 +309,7 @@
def gh_cli(self, env, env_prefixes, version):
"""Installs GitHub CLI."""
version = version or 'latest'
- gh_path = self.m.path['cache'].join('gh-cli')
+ gh_path = self.m.path.cache_dir.join('gh-cli')
gh_file = self.m.cipd.EnsureFile()
gh_file.add_package('flutter_internal/tools/gh-cli/${platform}', version)
self.m.cipd.ensure(gh_path, gh_file)
@@ -320,14 +320,14 @@
def go_sdk(self, env, env_prefixes, version):
"""Installs go sdk."""
- go_path = self.m.path['cache'].join('go')
+ go_path = self.m.path.cache_dir.join('go')
go = self.m.cipd.EnsureFile()
go.add_package('infra/3pp/tools/go/${platform}', version)
self.m.cipd.ensure(go_path, go)
paths = env_prefixes.get('PATH', [])
paths.append(go_path.join('bin'))
# Setup GOPATH and add to the env.
- bin_path = self.m.path['cleanup'].join('go_path')
+ bin_path = self.m.path.cleanup_dir.join('go_path')
self.m.file.ensure_directory('Ensure go path', bin_path)
env['GOPATH'] = bin_path
paths.append(bin_path.join('bin'))
@@ -370,7 +370,7 @@
def android_sdk(self, env, env_prefixes, version):
"""Installs android sdk."""
version = version or 'latest'
- sdk_root = self.m.path['cache'].join('android')
+ sdk_root = self.m.path.cache_dir.join('android')
self.m.cipd.ensure(
sdk_root,
self.m.cipd.EnsureFile().add_package(
@@ -396,13 +396,13 @@
# Specify the location of the shared cache used by Gradle builds.
# This cache contains dependencies downloaded from network when a Gradle task is run.
# When a cache hit occurs, the dependency is immediately provided to the Gradle build.
- env['GRADLE_USER_HOME'] = self.m.path['cache'].join('gradle')
+ env['GRADLE_USER_HOME'] = self.m.path.cache_dir.join('gradle')
# Disable the Gradle daemon. Some builders aren't ephemeral, which means that state leaks out potentially
# leaving the bot in a bad state.
# For more, see CI section on https://docs.gradle.org/current/userguide/gradle_daemon.html#sec:disabling_the_daemon
env['GRADLE_OPTS'] = '-Dorg.gradle.daemon=false'
self.m.file.listdir(
- 'gradle cache', self.m.path['cache'].join('gradle'), recursive=True
+ 'gradle cache', self.m.path.cache_dir.join('gradle'), recursive=True
)
def firebase(self, env, env_prefixes, version='latest'):
@@ -414,7 +414,7 @@
env(dict): Current environment variables.
env_prefixes(dict): Current environment prefixes variables.
"""
- firebase_dir = self.m.path['start_dir'].join('firebase')
+ firebase_dir = self.m.path.start_dir.join('firebase')
self.m.file.ensure_directory('ensure directory', firebase_dir)
with self.m.step.nest('Install firebase'):
self.m.step(
@@ -443,7 +443,7 @@
env_prefixes(dict): Current environment prefixes variables.
"""
version = version or 'git_revision:7e9747b50bcb1be28d4a3236571e8050835497a6'
- clang_path = self.m.path['cache'].join('clang')
+ clang_path = self.m.path.cache_dir.join('clang')
clang = self.m.cipd.EnsureFile()
clang.add_package('fuchsia/third_party/clang/${platform}', version)
with self.m.step.nest('Install clang'):
@@ -459,7 +459,7 @@
env(dict): Current environment variables.
env_prefixes(dict): Current environment prefixes variables.
"""
- cmake_path = self.m.path['cache'].join('cmake')
+ cmake_path = self.m.path.cache_dir.join('cmake')
cmake = self.m.cipd.EnsureFile()
version = version or 'build_id:8787856497187628321'
cmake.add_package('infra/3pp/tools/cmake/${platform}', version)
@@ -495,7 +495,7 @@
env_prefixes(dict): Current environment prefixes variables.
"""
version = version or 'latest'
- cosign_path = self.m.path['cache'].join('cosign')
+ cosign_path = self.m.path.cache_dir.join('cosign')
cosign = self.m.cipd.EnsureFile()
cosign.add_package('flutter/tools/cosign/${platform}', version)
with self.m.step.nest('Install cosign'):
@@ -512,7 +512,7 @@
env_prefixes(dict): Current environment prefixes variables.
"""
version = version or 'version:1.9.0'
- ninja_path = self.m.path['cache'].join('ninja')
+ ninja_path = self.m.path.cache_dir.join('ninja')
ninja = self.m.cipd.EnsureFile()
ninja.add_package("infra/ninja/${platform}", version)
with self.m.step.nest('Install ninja'):
@@ -529,7 +529,7 @@
env_prefixes(dict): Current environment prefixes variables.
"""
version = version or 'stable'
- dart_sdk_path = self.m.path['cache'].join('dart_sdk')
+ dart_sdk_path = self.m.path.cache_dir.join('dart_sdk')
dart_sdk = self.m.cipd.EnsureFile()
dart_sdk.add_package("dart/dart-sdk/${platform}", version)
with self.m.step.nest('Install dart sdk'):
@@ -549,7 +549,7 @@
# noop for non windows platforms.
return
version = version or 'latest'
- certs_path = self.m.path['cache'].join('certs')
+ certs_path = self.m.path.cache_dir.join('certs')
certs = self.m.cipd.EnsureFile()
certs.add_package("flutter_internal/certs", version)
with self.m.step.nest('Install certs'):
@@ -577,7 +577,7 @@
# noop for non Mac platforms.
return
version = version or 'latest'
- swift_format_path = self.m.path['cache'].join('swift_format')
+ swift_format_path = self.m.path.cache_dir.join('swift_format')
sf = self.m.cipd.EnsureFile()
sf.add_package("infra/3pp/tools/swift-format/${platform}", version)
with self.m.step.nest('Install swift-format'):
@@ -598,7 +598,7 @@
return
version = version or 'latest'
- vs_path = self.m.path['cache'].join('vsbuild')
+ vs_path = self.m.path.cache_dir.join('vsbuild')
vs = self.m.cipd.EnsureFile()
vs.add_package("flutter_internal/windows/vsbuild", version)
with self.m.step.nest('VSBuild') as presentation:
@@ -618,7 +618,7 @@
Write-Output $process.ExitCode
exit $process.ExitCode
"""
- install_script_path = self.m.path['cleanup'].join('install.ps1')
+ install_script_path = self.m.path.cleanup_dir.join('install.ps1')
self.m.file.write_text(
'Write install script', install_script_path, installation_script
)
@@ -639,7 +639,7 @@
Copy-Item "$env:TEMP\dd_setup_*_errors.log" "$destination"
Copy-Item "$env:TEMP\dd_vs_setup_*" "$destination"
"""
- copy_script_path = self.m.path['cleanup'].join('copy.ps1')
+ copy_script_path = self.m.path.cleanup_dir.join('copy.ps1')
self.m.file.write_text(
'Write copy script', copy_script_path, copy_script
)
@@ -716,7 +716,7 @@
"""
version = version or 'latest'
with self.m.step.nest('Install ruby'):
- ruby_path = self.m.path['cache'].join('ruby')
+ ruby_path = self.m.path.cache_dir.join('ruby')
ruby = self.m.cipd.EnsureFile()
ruby.add_package('flutter/ruby/${platform}', version)
self.m.cipd.ensure(ruby_path, ruby)
@@ -736,7 +736,7 @@
"""
version = version or 'latest'
with self.m.step.nest('Install ktlint'):
- ktlint_path = self.m.path['cache'].join('ktlint')
+ ktlint_path = self.m.path.cache_dir.join('ktlint')
ktlint = self.m.cipd.EnsureFile()
ktlint.add_package('flutter/ktlint/${platform}', version)
self.m.cipd.ensure(ktlint_path, ktlint)
diff --git a/recipe_modules/flutter_deps/examples/full.py b/recipe_modules/flutter_deps/examples/full.py
index 20f0368..dea4256 100644
--- a/recipe_modules/flutter_deps/examples/full.py
+++ b/recipe_modules/flutter_deps/examples/full.py
@@ -50,9 +50,9 @@
api.assertions.assertTrue(env.get('FIREFOX_EXECUTABLE'))
api.assertions.assertEqual(
env_prefixes.get('PATH'), [
- api.path['cache'].join('chrome', 'chrome'),
- api.path['cache'].join('chrome',
- 'drivers'), api.path['cache'].join('firefox')
+ api.path.cache_dir.join('chrome', 'chrome'),
+ api.path.cache_dir.join('chrome',
+ 'drivers'), api.path.cache_dir.join('firefox')
]
)
api.flutter_deps.go_sdk(env, env_prefixes, 'v4')
@@ -100,12 +100,12 @@
api.flutter_deps.gh_cli(env, env_prefixes, 'latest')
# Gems dependency requires to run from a flutter_environment.
- checkout_path = api.path['start_dir'].join('flutter sdk')
+ checkout_path = api.path.start_dir.join('flutter sdk')
env, env_prefixes = api.repo_util.flutter_environment(checkout_path)
def GenTests(api):
- checkout_path = api.path['start_dir'].join('flutter sdk')
+ checkout_path = api.path.start_dir.join('flutter sdk')
yield api.test(
'basic',
api.repo_util.flutter_environment_data(checkout_path),
@@ -124,10 +124,10 @@
),
api.swarming.properties(bot_id='flutter-devicelab-mac-1'),
api.path.exists(
- api.path['cache'].join(
+ api.path.cache_dir.join(
'osx_sdk/XCode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift'
),
- api.path['cache'].join(
+ api.path.cache_dir.join(
'osx_sdk/XCode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-5.0'
),
),
@@ -145,10 +145,10 @@
),
api.swarming.properties(bot_id='flutter-devicelab-mac-1'),
api.path.exists(
- api.path['cache'].join(
+ api.path.cache_dir.join(
'osx_sdk/XCode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift'
),
- api.path['cache'].join(
+ api.path.cache_dir.join(
'osx_sdk/XCode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-5.0'
),
),
diff --git a/recipe_modules/gcloud/api.py b/recipe_modules/gcloud/api.py
index 9676fff..8296974 100644
--- a/recipe_modules/gcloud/api.py
+++ b/recipe_modules/gcloud/api.py
@@ -15,7 +15,7 @@
@property
def _gcloud_executable(self):
with self.m.step.nest('ensure gcloud'):
- gcloud_dir = self.m.path['start_dir'].join('gcloud')
+ gcloud_dir = self.m.path.start_dir.join('gcloud')
gcloud_package = 'infra/3pp/tools/gcloud/${platform}'
gcloud = self.m.cipd.EnsureFile().add_package(
gcloud_package, "version:2@428.0.0.chromium.3"
diff --git a/recipe_modules/goma/api.py b/recipe_modules/goma/api.py
index 73a0402..ff2c58e 100644
--- a/recipe_modules/goma/api.py
+++ b/recipe_modules/goma/api.py
@@ -31,7 +31,7 @@
with self.m.context(env={
# Allow user to override from the command line.
"GOMA_TMP_DIR": self.m.context.env.get(
- "GOMA_TMP_DIR", self.m.path["cleanup"].join("goma")),
+ "GOMA_TMP_DIR", self.m.path.cleanup_dir.join("goma")),
"GOMA_USE_LOCAL": False,
}):
with self.m.step.nest("setup goma"):
@@ -63,7 +63,7 @@
return self.m.path.join(self.goma_dir, "goma_stats.json")
def initialize(self):
- self._goma_log_dir = self.m.path["cleanup"]
+ self._goma_log_dir = self.m.path.cleanup_dir
if self.m.platform.is_win:
self._enable_arbitrary_toolchains = True
@@ -75,7 +75,7 @@
return
with self.m.step.nest("ensure goma"), self.m.context(infra_steps=True):
- self._goma_dir = self.m.path["cache"].join("goma", "client")
+ self._goma_dir = self.m.path.cache_dir.join("goma", "client")
if self.m.platform.is_mac:
# On mac always use x64 package.
# TODO(godofredoc): Remove this workaround and unfork once fuchsia has an arm package.
@@ -94,11 +94,11 @@
"GLOG_log_dir":
self._goma_log_dir,
"GOMA_CACHE_DIR":
- self.m.path["cache"].join("goma"),
+ self.m.path.cache_dir.join("goma"),
"GOMA_DEPS_CACHE_FILE":
"goma_deps_cache",
"GOMA_LOCAL_OUTPUT_CACHE_DIR":
- self.m.path["cache"].join("goma", "localoutputcache"),
+ self.m.path.cache_dir.join("goma", "localoutputcache"),
"GOMA_STORE_LOCAL_RUN_OUTPUT":
True,
"GOMA_SERVER_HOST":
diff --git a/recipe_modules/gsutil/tests/full.py b/recipe_modules/gsutil/tests/full.py
index bde8403..e015c39 100644
--- a/recipe_modules/gsutil/tests/full.py
+++ b/recipe_modules/gsutil/tests/full.py
@@ -15,7 +15,7 @@
def RunSteps(api):
api.gsutil.upload_namespaced_file(
BUCKET,
- api.path["cleanup"].join("file"),
+ api.path.cleanup_dir.join("file"),
api.gsutil.join("path", "to", "file"),
metadata={
"Test-Field": "value",
@@ -28,20 +28,20 @@
)
api.gsutil.upload_namespaced_directory(
- api.path["cleanup"].join("dir"),
+ api.path.cleanup_dir.join("dir"),
BUCKET,
"rsync_subpath",
gzip_exts=["html"],
)
api.gsutil.upload_namespaced_directory(
- api.path["cleanup"].join("dir"),
+ api.path.cleanup_dir.join("dir"),
BUCKET,
"cp_subpath",
rsync=False,
gzip_exts=["html"],
)
api.gsutil.upload(
- BUCKET, api.path["cleanup"].join("dir"), "dir", recursive=True
+ BUCKET, api.path.cleanup_dir.join("dir"), "dir", recursive=True
)
api.gsutil.copy(BUCKET, "foo", BUCKET, "bar", recursive=True)
diff --git a/recipe_modules/kms/api.py b/recipe_modules/kms/api.py
index ad9c269..c1462a6 100644
--- a/recipe_modules/kms/api.py
+++ b/recipe_modules/kms/api.py
@@ -15,13 +15,13 @@
input_file (str): path on GCS to encrypted file of the secret relative to 'flutter_configs'.
secret_path (Path): path of decrypted secret.
"""
- cloudkms_dir = self.m.path['start_dir'].join('cloudkms')
+ cloudkms_dir = self.m.path.start_dir.join('cloudkms')
cloudkms_package = 'infra/tools/luci/cloudkms/${platform}'
self.m.cipd.ensure(
cloudkms_dir,
self.m.cipd.EnsureFile().add_package(cloudkms_package, 'latest')
)
- encrypt_file = self.m.path['cleanup'].join(input_file)
+ encrypt_file = self.m.path.cleanup_dir.join(input_file)
self.m.gsutil.download('flutter_configs', input_file, encrypt_file)
cloudkms = cloudkms_dir.join(
'cloudkms.exe' if self.m.platform.name == 'win' else 'cloudkms'
@@ -49,6 +49,6 @@
decrypted file and the value is the path to the encrypted file in gcs.
"""
for k, v in secrets.items():
- secret_path = self.m.path['cleanup'].join(k)
+ secret_path = self.m.path.cleanup_dir.join(k)
self.m.kms.get_secret(v, secret_path)
env[k] = secret_path
diff --git a/recipe_modules/kms/examples/full.py b/recipe_modules/kms/examples/full.py
index b24f366..ce41aae 100644
--- a/recipe_modules/kms/examples/full.py
+++ b/recipe_modules/kms/examples/full.py
@@ -14,7 +14,7 @@
def RunSteps(api):
env = {}
api.kms.decrypt_secrets(env, {'a': 'a'})
- api.kms.get_secret('in', api.path['cleanup'].join('out'))
+ api.kms.get_secret('in', api.path.cleanup_dir.join('out'))
def GenTests(api):
diff --git a/recipe_modules/logs_util/api.py b/recipe_modules/logs_util/api.py
index 5d98bb2..a69379d 100644
--- a/recipe_modules/logs_util/api.py
+++ b/recipe_modules/logs_util/api.py
@@ -22,7 +22,7 @@
# Create a temp folder to keep logs until we can upload them to gcs
# at the end of the execution of the test.
with self.m.step.nest('Initialize logs'):
- logs_path = self.m.path['cleanup'].join('flutter_logs_dir')
+ logs_path = self.m.path.cleanup_dir.join('flutter_logs_dir')
self.m.file.ensure_directory('Ensure %s' % logs_path, logs_path)
env['FLUTTER_LOGS_DIR'] = logs_path
# Ensure that any test outputs, e.g. timelines/timeline summaries are
@@ -45,7 +45,7 @@
# UUID is used in LED and try jobs.
uuid = self.m.uuid.random()
invocation_id = git_hash if git_hash else uuid
- logs_path = self.m.path['cleanup'].join('flutter_logs_dir')
+ logs_path = self.m.path.cleanup_dir.join('flutter_logs_dir')
with self.m.step.nest('process logs'):
self.m.gsutil.upload(
bucket='flutter_logs',
@@ -99,7 +99,7 @@
"""Outputs to sdout the connect of file_path
:param file_path: str
- :return:
+ :return:
"""
if self.m.path.exists(file_path):
self.m.file.read_text('Read log file', file_path)
diff --git a/recipe_modules/logs_util/examples/full.py b/recipe_modules/logs_util/examples/full.py
index dd1c3b6..e2b133a 100644
--- a/recipe_modules/logs_util/examples/full.py
+++ b/recipe_modules/logs_util/examples/full.py
@@ -12,7 +12,7 @@
env = {}
api.logs_util.initialize_logs_collection(env)
api.logs_util.upload_logs('mytaskname')
- s = api.path['cleanup'].join('flutter_logs_dir')
+ s = api.path.cleanup_dir.join('flutter_logs_dir')
api.logs_util.upload_test_metrics(s, 'taskname', 'hash')
api.logs_util.upload_test_metrics('/path/to/tmp/json', 'taskname2')
api.file.write_json('write file', s.join('errors.log'), {'a': 'b'})
diff --git a/recipe_modules/os_utils/api.py b/recipe_modules/os_utils/api.py
index e8d2dd9..793ec8e 100644
--- a/recipe_modules/os_utils/api.py
+++ b/recipe_modules/os_utils/api.py
@@ -74,7 +74,7 @@
sdks are used in the same bot. To prevent those failures we will start
deleting the folder before every task.
"""
- derived_data_path = self.m.path['home'].join(
+ derived_data_path = self.m.path.home_dir.join(
'Library', 'Developer', 'Xcode', 'DerivedData'
)
if self.m.platform.is_mac:
@@ -794,7 +794,7 @@
def _checkout_cocoon(self):
"""Checkout cocoon at HEAD to the cache and return the path."""
- cocoon_path = self.m.path['cache'].join('cocoon')
+ cocoon_path = self.m.path.cache_dir.join('cocoon')
self.m.repo_util.checkout('cocoon', cocoon_path, ref='refs/heads/main')
return cocoon_path
diff --git a/recipe_modules/osx_sdk/api.py b/recipe_modules/osx_sdk/api.py
index 15bf0e4..ec1cfaf 100644
--- a/recipe_modules/osx_sdk/api.py
+++ b/recipe_modules/osx_sdk/api.py
@@ -212,7 +212,7 @@
"""
if devicelab:
return '/opt/flutter/xcode'
- return self.m.path['cache'].join('osx_sdk')
+ return self.m.path.cache_dir.join('osx_sdk')
def _setup_osx_sdk(self, kind, devicelab):
app = None
diff --git a/recipe_modules/osx_sdk/examples/full.py b/recipe_modules/osx_sdk/examples/full.py
index 849fd25..769b42e 100644
--- a/recipe_modules/osx_sdk/examples/full.py
+++ b/recipe_modules/osx_sdk/examples/full.py
@@ -42,7 +42,7 @@
)
)
- sdk_app_path = api.path['cache'].join(
+ sdk_app_path = api.path.cache_dir.join(
'osx_sdk', 'xcode_deadbeef', 'XCode.app'
)
diff --git a/recipe_modules/rbe/api.py b/recipe_modules/rbe/api.py
index 192a918..36d5541 100644
--- a/recipe_modules/rbe/api.py
+++ b/recipe_modules/rbe/api.py
@@ -122,7 +122,7 @@
return self._reclient_path.join("bootstrap")
def _environment(self, working_dir):
- cache_dir = self.m.path["cache"].join("rbe")
+ cache_dir = self.m.path.cache_dir.join("rbe")
deps_cache_dir = cache_dir.join("deps")
self.m.file.ensure_directory("create rbe cache dir", deps_cache_dir)
rbe_server_address = 'pipe://reproxy.pipe' if self.m.platform.is_win else f"unix://{working_dir.join('reproxy.sock')}"
diff --git a/recipe_modules/rbe/tests/full.py b/recipe_modules/rbe/tests/full.py
index 6880462..3e14ce5 100644
--- a/recipe_modules/rbe/tests/full.py
+++ b/recipe_modules/rbe/tests/full.py
@@ -15,27 +15,27 @@
def RunSteps(api):
- with api.rbe(reclient_path=api.path["cleanup"].join("rbe"),
- working_path=api.path["cleanup"].join("rbe")):
+ with api.rbe(reclient_path=api.path.cleanup_dir.join("rbe"),
+ working_path=api.path.cleanup_dir.join("rbe")):
# build something using rbe.
api.step("build", ["echo", "Mission Complete!"])
- with api.rbe(config_path=api.path["cleanup"].join("configs"),
- working_path=api.path["cleanup"].join("rbe")):
+ with api.rbe(config_path=api.path.cleanup_dir.join("configs"),
+ working_path=api.path.cleanup_dir.join("rbe")):
# build something using rbe.
api.step("build", ["echo", "Misison Accomplished!"])
api.rbe.wait_and_collect_logs(
- working_dir=api.path["cleanup"].join("rbe"), collect_rbe_logs_latency=-1
+ working_dir=api.path.cleanup_dir.join("rbe"), collect_rbe_logs_latency=-1
)
api.rbe.set_rbe_triggered(False)
api.rbe.wait_and_collect_logs(
- working_dir=api.path["cleanup"].join("rbe"), collect_rbe_logs_latency=61
+ working_dir=api.path.cleanup_dir.join("rbe"), collect_rbe_logs_latency=61
)
api.rbe.set_rbe_triggered(True)
api.rbe.wait_and_collect_logs(
- working_dir=api.path["cleanup"].join("rbe"), collect_rbe_logs_latency=61
+ working_dir=api.path.cleanup_dir.join("rbe"), collect_rbe_logs_latency=61
)
- api.rbe.prepare_rbe_gn(api.path["cleanup"].join("rbe"), [])
+ api.rbe.prepare_rbe_gn(api.path.cleanup_dir.join("rbe"), [])
def GenTests(api):
diff --git a/recipe_modules/recipe_testing/tests/full.py b/recipe_modules/recipe_testing/tests/full.py
index 99351c3..5803ffc 100644
--- a/recipe_modules/recipe_testing/tests/full.py
+++ b/recipe_modules/recipe_testing/tests/full.py
@@ -25,7 +25,7 @@
def RunSteps(api, props): # pylint: disable=invalid-name
- recipes_path = api.path["start_dir"].join("recipe_path")
+ recipes_path = api.path.start_dir.join("recipe_path")
api.recipe_testing.run_lint(recipes_path, allowlist=r"allowed_module")
api.recipe_testing.run_unit_tests(recipes_path)
diff --git a/recipe_modules/repo_util/api.py b/recipe_modules/repo_util/api.py
index e7f36a2..e42d5a7 100644
--- a/recipe_modules/repo_util/api.py
+++ b/recipe_modules/repo_util/api.py
@@ -40,19 +40,19 @@
def _setup_win_toolchain(self, env):
"""Setups local win toolchain if available."""
if self.m.platform.is_win:
- toolchain_metadata_src = self.m.path['cache'].join(
+ toolchain_metadata_src = self.m.path.cache_dir.join(
'builder', 'vs_toolchain_root', 'data.json'
)
self.m.path.mock_add_paths(toolchain_metadata_src)
if self.m.path.exists(toolchain_metadata_src):
- toolchain_metadata_dst = self.m.path['cache'].join(
+ toolchain_metadata_dst = self.m.path.cache_dir.join(
'builder', 'src', 'build', 'win_toolchain.json'
)
self.m.file.copy(
'copy win_toolchain_metadata', toolchain_metadata_src,
toolchain_metadata_dst
)
- data_file = self.m.path['cache'].join(
+ data_file = self.m.path.cache_dir.join(
'builder', 'vs_toolchain_root', 'data.json'
)
metadata = self.m.file.read_json(
@@ -69,7 +69,7 @@
with self.m.depot_tools.on_path():
if self.m.path.exists(checkout_path):
self.m.file.rmcontents('Clobber cache', checkout_path)
- git_cache_path = self.m.path['cache'].join('git')
+ git_cache_path = self.m.path.cache_dir.join('git')
self.m.path.mock_add_directory(git_cache_path)
if self.m.path.exists(git_cache_path):
self.m.file.rmtree('Clobber git cache', git_cache_path)
@@ -89,7 +89,7 @@
# Set vs_toolchain env to cache it.
if self.m.platform.is_win:
# Set win toolchain root to a directory inside cache/builder to cache it.
- env['DEPOT_TOOLS_WIN_TOOLCHAIN_ROOT'] = self.m.path['cache'].join(
+ env['DEPOT_TOOLS_WIN_TOOLCHAIN_ROOT'] = self.m.path.cache_dir.join(
'builder', 'vs_toolchain_root'
)
env['DEPOT_TOOLS_WIN_TOOLCHAIN'] = 1
@@ -100,10 +100,10 @@
# Calculate if we need to mount the cache and mount it if required.
mount_git = self.m.cache.should_force_mount(
- self.m.path['cache'].join('git')
+ self.m.path.cache_dir.join('git')
)
mount_builder = self.m.cache.should_force_mount(
- self.m.path['cache'].join('builder')
+ self.m.path.cache_dir.join('builder')
)
if (not clobber) and (bucket != OFFICIAL_BUILD_BUCKET):
self.m.cache.mount_cache('builder', force=True)
@@ -412,7 +412,7 @@
'Flutter Environment', status=self.m.step.FAILURE, step_text=msg
)
git_ref = self.m.properties.get('git_ref', '')
- pub_cache_path = self.m.path['start_dir'].join('.pub-cache')
+ pub_cache_path = self.m.path.start_dir.join('.pub-cache')
env = {
# Setup our own pub_cache to not affect other slaves on this machine,
# and so that the pre-populated pub cache is contained in the package.
@@ -506,9 +506,9 @@
'ANDROID_USER_HOME':
str(android_tmp.join('.android')),
'LUCI_WORKDIR':
- str(self.m.path['start_dir']),
+ str(self.m.path.start_dir),
'LUCI_CLEANUP':
- str(self.m.path['cleanup']),
+ str(self.m.path.cleanup_dir),
'REVISION':
self.m.buildbucket.gitiles_commit.id or '',
'CLANG_CRASH_DIAGNOSTICS_DIR':
@@ -562,7 +562,7 @@
'ANDROID_USER_HOME':
str(android_tmp.join('.android')),
'LUCI_WORKDIR':
- str(self.m.path['start_dir']),
+ str(self.m.path.start_dir),
'REVISION':
self.m.buildbucket.gitiles_commit.id or ''
}
diff --git a/recipe_modules/repo_util/examples/full.py b/recipe_modules/repo_util/examples/full.py
index fd7e1c7..9dacfb8 100644
--- a/recipe_modules/repo_util/examples/full.py
+++ b/recipe_modules/repo_util/examples/full.py
@@ -17,7 +17,7 @@
def RunSteps(api):
- flutter_checkout_path = api.path['start_dir'].join('flutter')
+ flutter_checkout_path = api.path.start_dir.join('flutter')
api.repo_util.get_branch(flutter_checkout_path)
is_release_candidate = api.repo_util.is_release_candidate_branch(
flutter_checkout_path
@@ -30,13 +30,13 @@
'flutter', flutter_checkout_path, ref='refs/heads/master'
)
api.repo_util.checkout(
- 'engine', api.path['start_dir'].join('engine'), ref='refs/heads/main'
+ 'engine', api.path.start_dir.join('engine'), ref='refs/heads/main'
)
api.repo_util.checkout(
- 'cocoon', api.path['start_dir'].join('cocoon'), ref='refs/heads/main'
+ 'cocoon', api.path.start_dir.join('cocoon'), ref='refs/heads/main'
)
api.repo_util.checkout(
- 'packages', api.path['start_dir'].join('packages'), ref='refs/heads/main'
+ 'packages', api.path.start_dir.join('packages'), ref='refs/heads/main'
)
# we need an override because all of the previous step calls on checkout directly overrides the ref variable
api.repo_util.checkout(
@@ -48,7 +48,7 @@
env, env_paths = api.repo_util.monorepo_environment(flutter_checkout_path)
env, env_paths = api.repo_util.flutter_environment(flutter_checkout_path)
api.repo_util.in_release_and_main(flutter_checkout_path)
- checkout_path = api.path['start_dir']
+ checkout_path = api.path.start_dir
if api.monorepo.is_monorepo_ci_build or api.monorepo.is_monorepo_try_build:
api.file.ensure_directory('ensure directory', checkout_path)
api.repo_util.monorepo_checkout(checkout_path, {}, {})
@@ -57,7 +57,7 @@
api.repo_util.engine_checkout(checkout_path.join('engine'), {}, {})
with api.context(env=env, env_prefixes=env_paths):
api.repo_util.sdk_checkout_path()
- api.repo_util.get_build(api.path['start_dir'])
+ api.repo_util.get_build(api.path.start_dir)
def GenTests(api):
@@ -293,7 +293,7 @@
# Next line force a fail condition for the bot update
# first execution.
api.path.exists(
- api.path['cache'].join('git'), api.path['start_dir'].join('engine')
+ api.path.cache_dir.join('git'), api.path.start_dir.join('engine')
),
api.step_data(
"Checkout source code.bot_update",
diff --git a/recipe_modules/repo_util/examples/unsupported.py b/recipe_modules/repo_util/examples/unsupported.py
index 8bb58e3..8b60226 100644
--- a/recipe_modules/repo_util/examples/unsupported.py
+++ b/recipe_modules/repo_util/examples/unsupported.py
@@ -9,7 +9,7 @@
def RunSteps(api):
- repo_dir = api.path['start_dir'].join('unsupported_repo')
+ repo_dir = api.path.start_dir.join('unsupported_repo')
api.repo_util.checkout('unsupported_repo', repo_dir)
diff --git a/recipe_modules/repo_util/test_api.py b/recipe_modules/repo_util/test_api.py
index e8ebfcf..4637ad8 100644
--- a/recipe_modules/repo_util/test_api.py
+++ b/recipe_modules/repo_util/test_api.py
@@ -9,7 +9,7 @@
def flutter_environment_data(self, checkout_dir=''):
"""Provides flutter environment data for tests."""
- checkout_path = checkout_dir or self.m.path['checkout']
+ checkout_path = checkout_dir or self.m.path.checkout_dir
dart_bin = checkout_path.join('bin', 'cache', 'dart-sdk', 'bin')
flutter_bin = checkout_path.join('bin')
return self.m.path.exists(dart_bin, flutter_bin)
diff --git a/recipe_modules/shard_util/examples/full.py b/recipe_modules/shard_util/examples/full.py
index b72d786..64da8b6 100644
--- a/recipe_modules/shard_util/examples/full.py
+++ b/recipe_modules/shard_util/examples/full.py
@@ -46,9 +46,9 @@
if build.build_proto.status != common_pb2.SUCCESS:
raise api.step.StepFailure("build %s failed" % build.build_id)
api.shard_util.archive_full_build(
- api.path['start_dir'].join('out', 'host_debug'), 'host_debug'
+ api.path.start_dir.join('out', 'host_debug'), 'host_debug'
)
- api.shard_util.download_full_builds(builds, api.path['cleanup'].join('out'))
+ api.shard_util.download_full_builds(builds, api.path.cleanup_dir.join('out'))
with api.step.nest("launch builds") as presentation:
reqs = api.shard_util.schedule_tests(test_configs, builds, presentation)
api.shard_util.get_base_bucket_name()
diff --git a/recipe_modules/signing/api.py b/recipe_modules/signing/api.py
index 495c19c..df51109 100644
--- a/recipe_modules/signing/api.py
+++ b/recipe_modules/signing/api.py
@@ -106,9 +106,9 @@
infra_step=True,
)
# Only filepath with a .p12 suffix will be recognized.
- p12_suffix_filepath = self.m.path['cleanup'].join('flutter.p12')
+ p12_suffix_filepath = self.m.path.cleanup_dir.join('flutter.p12')
env['P12_SUFFIX_FILEPATH'] = p12_suffix_filepath
- setup_keychain_log_file = self.m.path['cleanup'].join('setup_keychain_logs.txt')
+ setup_keychain_log_file = self.m.path.cleanup_dir.join('setup_keychain_logs.txt')
env['SETUP_KEYCHAIN_LOGS_PATH'] = setup_keychain_log_file
with self.m.context(env=env, env_prefixes=env_prefixes):
diff --git a/recipe_modules/ssh/examples/full.py b/recipe_modules/ssh/examples/full.py
index 9234325..7da125a 100644
--- a/recipe_modules/ssh/examples/full.py
+++ b/recipe_modules/ssh/examples/full.py
@@ -11,7 +11,7 @@
def RunSteps(api):
ssh_paths = api.ssh.ssh_paths
api.ssh.generate_ssh_config(
- private_key_path=ssh_paths.id_private, dest=api.path['cache']
+ private_key_path=ssh_paths.id_private, dest=api.path.cache_dir
)
@@ -19,10 +19,10 @@
yield api.test(
'ssh_paths',
api.path.exists(
- api.path['cache'].join('builder/ssh/id_ed25519.pub'),
- api.path['cache'].join('builder/ssh/id_ed25519'),
- api.path['cache'].join('builder/ssh/ssh_host_key.pub'),
- api.path['cache'].join('builder/ssh/ssh_host_key'),
+ api.path.cache_dir.join('builder/ssh/id_ed25519.pub'),
+ api.path.cache_dir.join('builder/ssh/id_ed25519'),
+ api.path.cache_dir.join('builder/ssh/ssh_host_key.pub'),
+ api.path.cache_dir.join('builder/ssh/ssh_host_key'),
)
)
diff --git a/recipes/cipd/cosign.py b/recipes/cipd/cosign.py
index 99244bf..d6f2822 100644
--- a/recipes/cipd/cosign.py
+++ b/recipes/cipd/cosign.py
@@ -26,7 +26,7 @@
env, env_prefixes, api.properties.get('dependencies', [])
)
- cosign_default_dir = api.path['start_dir'].join('cosign')
+ cosign_default_dir = api.path.start_dir.join('cosign')
cosign_download_uris = GetLatestCosignDownloadUris(api)
diff --git a/recipes/cocoon/cipd.py b/recipes/cocoon/cipd.py
index 610ec80..093b4f2 100644
--- a/recipes/cocoon/cipd.py
+++ b/recipes/cocoon/cipd.py
@@ -26,7 +26,7 @@
# This recipe builds the codesign CIPD package.
def RunSteps(api):
- start_path = api.path['start_dir']
+ start_path = api.path.start_dir
cocoon_dir = start_path.join('cocoon')
cocoon_git_rev = api.repo_util.checkout(
'cocoon',
diff --git a/recipes/cocoon/cocoon.py b/recipes/cocoon/cocoon.py
index 52922d9..89a004d 100644
--- a/recipes/cocoon/cocoon.py
+++ b/recipes/cocoon/cocoon.py
@@ -21,7 +21,7 @@
"""Steps to checkout cocoon, dependencies and execute tests."""
# Collect memory/cpu/process before task execution.
api.os_utils.collect_os_info()
- start_path = api.path['start_dir']
+ start_path = api.path.start_dir
cocoon_path = start_path.join('cocoon')
flutter_path = start_path.join('flutter')
@@ -75,6 +75,6 @@
git_ref='refs/pull/1/head'
),
api.repo_util.flutter_environment_data(
- api.path['start_dir'].join('flutter')
+ api.path.start_dir.join('flutter')
), api.step_data('read yaml.parse', api.json.output(tasks_dict))
)
diff --git a/recipes/devicelab/devicelab_drone.py b/recipes/devicelab/devicelab_drone.py
index 02a1431..389009b 100644
--- a/recipes/devicelab/devicelab_drone.py
+++ b/recipes/devicelab/devicelab_drone.py
@@ -298,7 +298,7 @@
def GenTests(api):
- checkout_path = api.path['cleanup'].join('tmp_tmp_1', 'flutter sdk')
+ checkout_path = api.path.cleanup_dir.join('tmp_tmp_1', 'flutter sdk')
avd_version = "android_31_google_apis_x64.textpb"
avd_cipd_version = "AVDCIPDVERSION"
yield api.test(
diff --git a/recipes/devicelab/devicelab_drone_build_test.py b/recipes/devicelab/devicelab_drone_build_test.py
index b75d025..af50acb 100644
--- a/recipes/devicelab/devicelab_drone_build_test.py
+++ b/recipes/devicelab/devicelab_drone_build_test.py
@@ -219,7 +219,7 @@
def GenTests(api):
- checkout_path = api.path['cleanup'].join('tmp_tmp_1', 'flutter sdk')
+ checkout_path = api.path.cleanup_dir.join('tmp_tmp_1', 'flutter sdk')
yield api.test(
"no-task-name",
api.expect_exception('ValueError'),
diff --git a/recipes/devicelab/devicelab_test_drone.py b/recipes/devicelab/devicelab_test_drone.py
index 8d60262..725ea8b 100644
--- a/recipes/devicelab/devicelab_test_drone.py
+++ b/recipes/devicelab/devicelab_test_drone.py
@@ -281,7 +281,7 @@
def GenTests(api):
- checkout_path = api.path['cleanup'].join('tmp_tmp_1', 'flutter sdk')
+ checkout_path = api.path.cleanup_dir.join('tmp_tmp_1', 'flutter sdk')
yield api.test(
"no-task-name",
api.expect_exception('ValueError'),
diff --git a/recipes/engine/web_engine_framework.py b/recipes/engine/web_engine_framework.py
index 429e0f4..60534b8 100644
--- a/recipes/engine/web_engine_framework.py
+++ b/recipes/engine/web_engine_framework.py
@@ -52,14 +52,14 @@
def GetCheckoutPath(api):
- return api.path['cache'].join('builder', 'src')
+ return api.path.cache_dir.join('builder', 'src')
def RunSteps(api, properties, env_properties):
"""Steps to checkout flutter engine and execute web tests."""
# Collect memory/cpu/process before task execution.
api.os_utils.collect_os_info()
- cache_root = api.path['cache'].join('builder')
+ cache_root = api.path.cache_dir.join('builder')
checkout = GetCheckoutPath(api)
if properties.clobber:
@@ -93,7 +93,7 @@
url = 'https://github.com/flutter/flutter'
# Checkout flutter to run the web integration tests with the local engine.
- flutter_checkout_path = api.path['cache'].join('flutter')
+ flutter_checkout_path = api.path.cache_dir.join('flutter')
api.repo_util.checkout(
'flutter', checkout_path=flutter_checkout_path, url=url, ref=ref
)
@@ -166,7 +166,7 @@
def GenTests(api):
yield api.test(
'linux-pre-submit',
- api.repo_util.flutter_environment_data(api.path['cache'].join('flutter')),
+ api.repo_util.flutter_environment_data(api.path.cache_dir.join('flutter')),
api.properties(
dependencies=[{
'dependency': 'chrome_and_driver', 'version': 'version:96.2'
diff --git a/recipes/engine_v2/builder.py b/recipes/engine_v2/builder.py
index 5175f95..fa640f4 100644
--- a/recipes/engine_v2/builder.py
+++ b/recipes/engine_v2/builder.py
@@ -184,7 +184,7 @@
# Mock data for tests. This is required for the archive api to expand the directory to full path
# of files.
api.path.mock_add_directory(
- api.path['cache'].join(
+ api.path.cache_dir.join(
'builder/src/out/android_jit_release_x86/zip_archives/download.flutter.io'
)
)
@@ -295,9 +295,9 @@
api.osx_sdk.reset_xcode()
api.flutter_bcid.report_stage('start')
- checkout = api.path['cache'].join('builder', 'src')
+ checkout = api.path.cache_dir.join('builder', 'src')
api.file.rmtree('Clobber build output', checkout.join('out'))
- cache_root = api.path['cache'].join('builder')
+ cache_root = api.path.cache_dir.join('builder')
api.file.ensure_directory('Ensure checkout cache', cache_root)
with api.os_utils.make_temp_directory('standalone_repo') as temp_checkout:
@@ -310,13 +310,13 @@
api.flutter_bcid.report_stage('fetch')
if api.monorepo.is_monorepo_ci_build or api.monorepo.is_monorepo_try_build:
env, env_prefixes = api.repo_util.monorepo_environment(
- api.path['cache'].join('builder')
+ api.path.cache_dir.join('builder')
)
api.repo_util.monorepo_checkout(cache_root, env, env_prefixes)
- checkout = api.path['cache'].join('builder', 'engine', 'src')
+ checkout = api.path.cache_dir.join('builder', 'engine', 'src')
else:
env, env_prefixes = api.repo_util.engine_environment(
- api.path['cache'].join('builder')
+ api.path.cache_dir.join('builder')
)
api.repo_util.engine_checkout(
cache_root, env, env_prefixes, gclient_variables=gclient_variables
diff --git a/recipes/engine_v2/cache.py b/recipes/engine_v2/cache.py
index 5141b0e..2dd0834 100644
--- a/recipes/engine_v2/cache.py
+++ b/recipes/engine_v2/cache.py
@@ -11,27 +11,27 @@
def RunSteps(api):
# Sets the engine environment and checkouts the source code.
- checkout = api.path['cache'].join('builder', 'src')
+ checkout = api.path.cache_dir.join('builder', 'src')
api.file.rmtree('Clobber build output', checkout.join('out'))
- builder_root = api.path['cache'].join('builder')
+ builder_root = api.path.cache_dir.join('builder')
api.file.ensure_directory('Ensure checkout cache', builder_root)
env, env_prefixes = api.repo_util.engine_environment(builder_root)
# Engine path is used inconsistently across the engine repo. We'll start
# with [cache]/builder and will adjust it to start using it consistently.
- env['ENGINE_PATH'] = api.path['cache'].join('builder')
+ env['ENGINE_PATH'] = api.path.cache_dir.join('builder')
cache_ttl = api.properties.get('cache_ttl', 3600 * 4)
cache_name = api.properties.get('cache_name')
if api.cache.requires_refresh(cache_name):
api.repo_util.engine_checkout(builder_root, env, env_prefixes)
paths = [
- api.path['cache'].join(p)
+ api.path.cache_dir.join(p)
for p in api.properties.get('cache_paths', [])
]
- api.path.mock_add_directory(api.path['cache'].join('builder', 'fake'))
+ api.path.mock_add_directory(api.path.cache_dir.join('builder', 'fake'))
ignore_paths = [
- api.path['cache'].join(p)
+ api.path.cache_dir.join(p)
for p in api.properties.get('ignore_cache_paths', [])
]
diff --git a/recipes/engine_v2/engine_v2.py b/recipes/engine_v2/engine_v2.py
index e9d8089..7964f14 100644
--- a/recipes/engine_v2/engine_v2.py
+++ b/recipes/engine_v2/engine_v2.py
@@ -62,7 +62,7 @@
# Only check out the repository, not dependencies.
api.flutter_bcid.report_stage(BcidStage.FETCH.value)
- checkout_path = api.path['start_dir'].join(project)
+ checkout_path = api.path.start_dir.join(project)
parent_commit = api.repo_util.checkout(
project,
checkout_path=checkout_path,
@@ -121,7 +121,7 @@
checkout_path and
api.repo_util.is_release_candidate_branch(checkout_path)):
# Generators, archives and codesign require a full engine checkout.
- full_engine_checkout = api.path['cache'].join('builder')
+ full_engine_checkout = api.path.cache_dir.join('builder')
api.file.ensure_directory(
'Ensure full engine checkout folder', full_engine_checkout
)
diff --git a/recipes/engine_v2/tester.py b/recipes/engine_v2/tester.py
index e055730..1d8cb29 100644
--- a/recipes/engine_v2/tester.py
+++ b/recipes/engine_v2/tester.py
@@ -31,7 +31,7 @@
def get_monorepo_framework(api):
- monorepo = api.path['cache'].join('builder', 'monorepo')
+ monorepo = api.path.cache_dir.join('builder', 'monorepo')
api.repo_util.checkout('monorepo', monorepo)
commits = api.file.read_json(
'get commits from monorepo',
@@ -86,7 +86,7 @@
def RunSteps(api):
# Collect memory/cpu/process before task execution.
api.os_utils.collect_os_info()
- builder = api.path['cache'].join('builder')
+ builder = api.path.cache_dir.join('builder')
flutter = builder.join('flutter')
if api.monorepo.is_monorepo_try_build:
framework_ref = 'refs/heads/main'
diff --git a/recipes/engine_v2/tester_engine.py b/recipes/engine_v2/tester_engine.py
index 2b7ff57..e1c3bc7 100644
--- a/recipes/engine_v2/tester_engine.py
+++ b/recipes/engine_v2/tester_engine.py
@@ -134,16 +134,16 @@
def RunSteps(api):
# Sets the engine environment and checkouts the source code.
- checkout = api.path['cache'].join('builder', 'src')
+ checkout = api.path.cache_dir.join('builder', 'src')
api.file.rmtree('Clobber build output', checkout.join('out'))
- cache_root = api.path['cache'].join('builder')
+ cache_root = api.path.cache_dir.join('builder')
api.file.ensure_directory('Ensure checkout cache', cache_root)
env, env_prefixes = api.repo_util.engine_environment(
- api.path['cache'].join('builder')
+ api.path.cache_dir.join('builder')
)
# Engine path is used inconsistently across the engine repo. We'll start
# with [cache]/builder and will adjust it to start using it consistently.
- env['ENGINE_PATH'] = api.path['cache'].join('builder')
+ env['ENGINE_PATH'] = api.path.cache_dir.join('builder')
api.repo_util.engine_checkout(cache_root, env, env_prefixes)
Test(api, checkout, env, env_prefixes)
diff --git a/recipes/firebaselab/firebaselab.py b/recipes/firebaselab/firebaselab.py
index 85dc0cb..1607e35 100644
--- a/recipes/firebaselab/firebaselab.py
+++ b/recipes/firebaselab/firebaselab.py
@@ -28,11 +28,11 @@
def RunSteps(api):
api.os_utils.collect_os_info()
- checkout_path = api.path['start_dir'].join('flutter')
+ checkout_path = api.path.start_dir.join('flutter')
# Bucket to upload apks and logs.
gcs_bucket = 'flutter_firebase_testlab_staging'
# Checkout flutter/flutter.
- checkout_path = api.path['start_dir'].join('flutter')
+ checkout_path = api.path.start_dir.join('flutter')
api.repo_util.checkout(
'flutter',
checkout_path=checkout_path,
@@ -141,8 +141,8 @@
# Download the test logcat files.
logcat_path = '%s/%s/*/logcat' % (task_name, task_id)
- tmp_logcat = api.path['cleanup'].join('logcat')
- api.gsutil.download(gcs_bucket, logcat_path, api.path['cleanup'])
+ tmp_logcat = api.path.cleanup_dir.join('logcat')
+ api.gsutil.download(gcs_bucket, logcat_path, api.path.cleanup_dir)
# Read the logcat files and add them to the step logs.
content = api.file.read_text('read', tmp_logcat)
presentation.logs['logcat'] = content
diff --git a/recipes/flutter/coverage.py b/recipes/flutter/coverage.py
index 8b91d5b..6d37e17 100644
--- a/recipes/flutter/coverage.py
+++ b/recipes/flutter/coverage.py
@@ -16,7 +16,7 @@
def RunSteps(api):
"""Recipe to collect coverage used by the flutter tool."""
- checkout_path = api.path['start_dir'].join('flutter sdk')
+ checkout_path = api.path.start_dir.join('flutter sdk')
with api.step.nest('checkout source code'):
api.repo_util.checkout(
'flutter',
diff --git a/recipes/flutter/docs.py b/recipes/flutter/docs.py
index 0bfa584..4d90537 100644
--- a/recipes/flutter/docs.py
+++ b/recipes/flutter/docs.py
@@ -100,7 +100,7 @@
# Collect memory/cpu/process before task execution.
api.os_utils.collect_os_info()
- checkout_path = api.path['start_dir'].join('flutter')
+ checkout_path = api.path.start_dir.join('flutter')
api.flutter_bcid.report_stage(BcidStage.FETCH.value)
api.repo_util.checkout(
'flutter',
diff --git a/recipes/flutter/flutter_drone.py b/recipes/flutter/flutter_drone.py
index 72dc9a2..6c5c538 100644
--- a/recipes/flutter/flutter_drone.py
+++ b/recipes/flutter/flutter_drone.py
@@ -69,7 +69,7 @@
# If on macOS, reset Xcode in case a previous build failed to do so.
api.osx_sdk.reset_xcode()
- checkout_path = api.path['start_dir'].join('flutter')
+ checkout_path = api.path.start_dir.join('flutter')
api.flutter_bcid.report_stage(BcidStage.FETCH.value)
api.repo_util.checkout(
'flutter',
diff --git a/recipes/infra/ci_yaml.py b/recipes/infra/ci_yaml.py
index 7f293a3..b69d4b5 100644
--- a/recipes/infra/ci_yaml.py
+++ b/recipes/infra/ci_yaml.py
@@ -32,7 +32,7 @@
def RunSteps(api):
"""Steps to checkout infra, dependencies, and generate new config."""
- start_path = api.path['start_dir']
+ start_path = api.path.start_dir
cocoon_path = start_path.join('cocoon')
flutter_path = start_path.join('flutter')
infra_path = start_path.join('infra')
@@ -137,7 +137,7 @@
revision='abc123'
), api.properties(git_branch='main', git_repo='engine'),
api.repo_util.flutter_environment_data(
- api.path['start_dir'].join('flutter')
+ api.path.start_dir.join('flutter')
),
api.step_data(
'generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')
@@ -151,7 +151,7 @@
revision='abc123'
), api.properties(git_branch='dev', git_repo='engine'),
api.repo_util.flutter_environment_data(
- api.path['start_dir'].join('flutter')
+ api.path.start_dir.join('flutter')
),
api.step_data(
'generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')
@@ -165,7 +165,7 @@
revision='abc123'
), api.properties(git_branch='main', git_repo='engine'),
api.repo_util.flutter_environment_data(
- api.path['start_dir'].join('flutter')
+ api.path.start_dir.join('flutter')
),
api.step_data(
'generate jspb', stdout=api.raw_io.output_text('{"hello": "world"}')
@@ -179,6 +179,6 @@
),
api.properties(git_repo='engine'),
api.repo_util.flutter_environment_data(
- api.path['start_dir'].join('flutter')
+ api.path.start_dir.join('flutter')
),
)
diff --git a/recipes/infra/luci_config.py b/recipes/infra/luci_config.py
index 490ded5..79c3f73 100644
--- a/recipes/infra/luci_config.py
+++ b/recipes/infra/luci_config.py
@@ -15,7 +15,7 @@
def RunSteps(api):
- start_path = api.path['start_dir']
+ start_path = api.path.start_dir
infra_path = start_path.join('infra')
# Checkout flutter/infra
api.git_checkout('https://flutter.googlesource.com/infra', path=infra_path)
diff --git a/recipes/infra/test_ownership.py b/recipes/infra/test_ownership.py
index b57cb44..6dede85 100644
--- a/recipes/infra/test_ownership.py
+++ b/recipes/infra/test_ownership.py
@@ -22,7 +22,7 @@
def RunSteps(api):
"""Steps to checkout cocoon, dependencies and execute tests."""
- start_path = api.path['start_dir']
+ start_path = api.path.start_dir
cocoon_path = start_path.join('cocoon')
flutter_path = start_path.join('flutter')
@@ -63,7 +63,7 @@
yield api.test(
'basic',
api.repo_util.flutter_environment_data(
- api.path['start_dir'].join('flutter')
+ api.path.start_dir.join('flutter')
),
api.properties(
git_ref='refs/pull/123/head',
diff --git a/recipes/ios_usb_dependencies/ios-usb-dependencies.py b/recipes/ios_usb_dependencies/ios-usb-dependencies.py
index 0759ddd..8499183 100644
--- a/recipes/ios_usb_dependencies/ios-usb-dependencies.py
+++ b/recipes/ios_usb_dependencies/ios-usb-dependencies.py
@@ -260,7 +260,7 @@
update_library_path(bool): a flag indicating whether there are LIBRARY_PATH updates.
update_pkg_config_path(bool): a flag indicating whether there are PKG_CONFIG_PATH updates.
"""
- work_dir = api.path['start_dir']
+ work_dir = api.path.start_dir
src_dir = work_dir.join('src')
package_src_dir = work_dir.join('src').join(package_name)
package_install_dir = work_dir.join('src').join('%s_install' % package_name)
@@ -343,7 +343,7 @@
yield api.test(
'basic',
api.path.exists(
- api.path['start_dir'].join('src').join('ios-deploy'
+ api.path.start_dir.join('src').join('ios-deploy'
).join('commit_sha.txt'),
),
api.step_data(
diff --git a/recipes/packages/packages.py b/recipes/packages/packages.py
index 57c9f37..884badb 100644
--- a/recipes/packages/packages.py
+++ b/recipes/packages/packages.py
@@ -31,8 +31,8 @@
# If on macOS, reset Xcode in case a previous build failed to do so.
api.osx_sdk.reset_xcode()
- packages_checkout_path = api.path['start_dir'].join('packages')
- flutter_checkout_path = api.path['start_dir'].join('flutter')
+ packages_checkout_path = api.path.start_dir.join('packages')
+ flutter_checkout_path = api.path.start_dir.join('flutter')
channel = api.properties.get('channel')
version_file_name = api.properties.get('version_file', '')
with api.step.nest('checkout source code'):
@@ -149,7 +149,7 @@
def GenTests(api):
- flutter_path = api.path['start_dir'].join('flutter')
+ flutter_path = api.path.start_dir.join('flutter')
tasks_dict = {
'tasks': [{'name': 'one', 'script': 'myscript', 'args': ['arg1', 'arg2']}]
}
@@ -173,7 +173,7 @@
**{'$flutter/osx_sdk': {'sdk_version': 'deadbeef',}},
), api.step_data('read yaml.parse', api.json.output(tasks_dict))
)
- checkout_path = api.path['cleanup'].join('tmp_tmp_1', 'flutter sdk')
+ checkout_path = api.path.cleanup_dir.join('tmp_tmp_1', 'flutter sdk')
yield api.test(
"emulator-test", api.repo_util.flutter_environment_data(flutter_path),
api.properties(
diff --git a/recipes/packaging/packaging.py b/recipes/packaging/packaging.py
index c8466e3..4e9c723 100644
--- a/recipes/packaging/packaging.py
+++ b/recipes/packaging/packaging.py
@@ -34,7 +34,7 @@
@contextmanager
def Install7za(api):
if api.platform.is_win:
- sevenzip_cache_dir = api.path['cache'].join('builder', '7za')
+ sevenzip_cache_dir = api.path.cache_dir.join('builder', '7za')
api.cipd.ensure(
sevenzip_cache_dir,
api.cipd.EnsureFile().add_package(
@@ -57,7 +57,7 @@
"""
flutter_executable = 'flutter' if not api.platform.is_win else 'flutter.bat'
dart_executable = 'dart' if not api.platform.is_win else 'dart.exe'
- work_dir = api.path['start_dir'].join('archive')
+ work_dir = api.path.start_dir.join('archive')
api.step('flutter doctor', [flutter_executable, 'doctor'])
api.step(
'download dependencies', [flutter_executable, 'update-packages', '-v']
@@ -66,7 +66,7 @@
api.file.rmtree('clean archive work directory', work_dir)
api.file.ensure_directory('(re)create archive work directory', work_dir)
with Install7za(api):
- with api.context(cwd=api.path['start_dir']):
+ with api.context(cwd=api.path.start_dir):
step_args = [
dart_executable, packaging_script,
'--temp_dir=%s' % work_dir,
@@ -193,7 +193,7 @@
assert git_ref
api.flutter_bcid.report_stage(BcidStage.FETCH.value)
- checkout_path = api.path['start_dir'].join('flutter')
+ checkout_path = api.path.start_dir.join('flutter')
git_url = api.properties.get(
'git_url'
) or 'https://flutter.googlesource.com/mirrors/flutter'
diff --git a/recipes/pub_autoroller/pub_autoroller.py b/recipes/pub_autoroller/pub_autoroller.py
index 38da95f..9492f26 100644
--- a/recipes/pub_autoroller/pub_autoroller.py
+++ b/recipes/pub_autoroller/pub_autoroller.py
@@ -16,7 +16,7 @@
def RunSteps(api):
assert api.platform.is_linux, 'This recipe should only be run once per commit, on Linux'
- checkout_path = api.path['start_dir'].join('flutter')
+ checkout_path = api.path.start_dir.join('flutter')
api.repo_util.checkout(
'flutter',
checkout_path=checkout_path,
diff --git a/recipes/recipes.py b/recipes/recipes.py
index 78fec69..8886fd2 100755
--- a/recipes/recipes.py
+++ b/recipes/recipes.py
@@ -102,7 +102,7 @@
def RunSteps(api, remote, unittest_only):
- checkout_path = api.path['start_dir'].join('recipes')
+ checkout_path = api.path.start_dir.join('recipes')
api.git_checkout(remote, path=checkout_path)
with api.context(cwd=checkout_path):
api.git('log', 'log', '--oneline', '-n', '10')
diff --git a/recipes/release/release_builder.py b/recipes/release/release_builder.py
index e5dc355..a30fcaf 100644
--- a/recipes/release/release_builder.py
+++ b/recipes/release/release_builder.py
@@ -85,7 +85,7 @@
'git_repo'
) or api.buildbucket.gitiles_commit.project
repository_parts = repository.split('/')
- checkout_path = api.path['start_dir'].join(*repository_parts)
+ checkout_path = api.path.start_dir.join(*repository_parts)
git_ref = api.properties.get('git_ref') or api.buildbucket.gitiles_commit.ref
git_url = api.properties.get('git_url') or REPOS[repository]
api.repo_util.checkout(
diff --git a/recipes/release/release_publish.py b/recipes/release/release_publish.py
index f29928c..d2d230d 100644
--- a/recipes/release/release_publish.py
+++ b/recipes/release/release_publish.py
@@ -59,8 +59,8 @@
)
assert git_branch and tag and release_channel in ('stable', 'beta')
- flutter_checkout = api.path['start_dir'].join('flutter')
- engine_checkout = api.path['start_dir'].join('engine')
+ flutter_checkout = api.path.start_dir.join('flutter')
+ engine_checkout = api.path.start_dir.join('engine')
flutter_git_url = 'https://github.com/flutter/flutter'
engine_git_url = 'https://github.com/flutter/engine'
@@ -81,7 +81,7 @@
with api.step.nest('checkout engine release branch'):
engine_tot = api.repo_util.checkout(
'engine',
- api.path['start_dir'].join('engine'),
+ api.path.start_dir.join('engine'),
url=engine_git_url,
ref='refs/heads/%s' % git_branch,
)
@@ -124,7 +124,7 @@
rel_hash = flutter_rel_hash if repo == 'flutter' else engine_version
with api.context(env=env, env_prefixes=env_prefixes, cwd=checkout):
- token_decrypted = api.path['cleanup'].join('token.txt')
+ token_decrypted = api.path.cleanup_dir.join('token.txt')
api.kms.get_secret(
'flutter-release-github-token.encrypted', token_decrypted
)
@@ -153,7 +153,7 @@
def GenTests(api):
- checkout_path = api.path['start_dir'].join('flutter')
+ checkout_path = api.path.start_dir.join('flutter')
for tag in ('1.2.3-4.5.pre', '1.2.3'):
for release_channel in ('stable', 'beta'):
for force in ('True', 'False'):
diff --git a/recipes/tricium/tricium.py b/recipes/tricium/tricium.py
index 4ed64ce..9eca520 100644
--- a/recipes/tricium/tricium.py
+++ b/recipes/tricium/tricium.py
@@ -41,7 +41,7 @@
def RunSteps(api, props):
with api.context(infra_steps=True):
- checkout_path = api.path['start_dir'].join('recipes')
+ checkout_path = api.path.start_dir.join('recipes')
api.git_checkout(REMOTE, path=checkout_path)
# tricium is expecting a dictionary as a checkout.
checkout = _CheckoutResult(checkout_path, '')
@@ -50,7 +50,7 @@
if props.cipd_packages:
with api.step.nest("ensure_packages"):
with api.context(infra_steps=True):
- cipd_dir = api.path['start_dir'].join("cipd")
+ cipd_dir = api.path.start_dir.join("cipd")
pkgs = api.cipd.EnsureFile()
for package in props.cipd_packages:
pkgs.add_package(package.name, package.version, subdir=package.subdir)