blob: 28c821085db2cb0ec926fd557a4ba11da8213a6c [file] [log] [blame]
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Recipe to run firebase lab tests.
# This recipe uses the standar flutter dependencies model and a single property
# task_name to identify the test to run.
from contextlib import contextmanager
import re
DEPS = [
def RunSteps(api):
checkout_path = api.path['start_dir'].join('flutter')
gcs_bucket = 'flutter_firebase_testlab'
env, env_prefixes = api.repo_util.flutter_environment(checkout_path)
deps ='dependencies', [])
api.flutter_deps.required_deps(env, env_prefixes, deps)
task_name ='task_name')
physical_devices = [
# Physical devices - use only highly available devices to avoid timeouts.
# Pixel 3
# Pixel 5
# Moto Z XT1650
virtual_devices = [
# Virtual devices for API level coverage.
# SDK 20 not available virtually or physically.
# SDK 24 is run on a physical griffin/Moto Z above.
# SDK 28 is run on a physical blueline/Pixel 3 above.
# SDK 30 is run on a physical redfin/Pixel 5 above.
test_configurations = (
'Build appbundle', [
'flutter', 'build', 'appbundle', '--target-platform',
], 'build/app/outputs/bundle/release/app-release.aab',
'Build apk', [
'flutter', 'build', 'apk', '--debug', '--target-platform',
], 'build/app/outputs/flutter-apk/app-debug.apk', virtual_devices
with api.context(env=env, env_prefixes=env_prefixes, cwd=checkout_path):
api.step('flutter doctor', ['flutter', 'doctor', '-v'])
'download dependencies', ['flutter', 'update-packages'],
test_path = checkout_path.join('dev', 'integration_tests', task_name)
with api.step.nest('test_execution') as presentation:
with api.context(env=env, env_prefixes=env_prefixes, cwd=test_path):
task_id = api.swarming.task_id
for step_name, build_command, binary, devices in test_configurations:
api.step(step_name, build_command)
firebase_cmd = [
'firebase', 'test', 'android', 'run', '--type', 'robo', '--app',
binary, '--timeout', '2m',
'--results-bucket=gs://%s' % gcs_bucket,
'--results-dir=%s/%s' % (task_name, task_id)
] + devices
# See
# If the firebase command fails with 1, it's likely an HTTP issue that
# will resolve on a retry. If it fails on 15 or 20, it's explicitly
# an infra failure on the FTL side, so we should just retry.
def run_firebase():
return api.gcloud(*firebase_cmd)
api.retry.wrap(run_firebase, max_attempts=3, retriable_ret=(1, 15, 20))
logcat_path = '%s/%s/*/logcat' % (task_name, task_id)
tmp_logcat = api.path['cleanup'].join('logcat'), logcat_path, api.path['cleanup'])
content = api.file.read_text('read', tmp_logcat)
presentation.logs['logcat'] = content
api.step('analyze_logcat', ['grep', 'E/flutter', tmp_logcat], ok_ret=(1,))
def GenTests(api):
yield api.test(
'basic', api.repo_util.flutter_environment_data(),'the_task')
yield api.test('failure 15',
api.repo_util.flutter_environment_data()) + api.step_data(
'test_execution.gcloud firebase', retcode=15
yield api.test('failure 10',
api.repo_util.flutter_environment_data()) + api.step_data(
'test_execution.gcloud firebase', retcode=10