[webview_benchmarks] Migrate to null safety (#879)

diff --git a/packages/web_benchmarks/CHANGELOG.md b/packages/web_benchmarks/CHANGELOG.md
index 3fcfe56..13e6cfd 100644
--- a/packages/web_benchmarks/CHANGELOG.md
+++ b/packages/web_benchmarks/CHANGELOG.md
@@ -1,3 +1,9 @@
+## 0.1.0
+
+* Migrates to null safety.
+* **BREAKING CHANGES**:
+    * Required parameters are non-nullable.
+
 ## 0.0.7+1
 
 * Updates text theme parameters to avoid deprecation issues.
diff --git a/packages/web_benchmarks/lib/client.dart b/packages/web_benchmarks/lib/client.dart
index e6571da..ae0c453 100644
--- a/packages/web_benchmarks/lib/client.dart
+++ b/packages/web_benchmarks/lib/client.dart
@@ -7,8 +7,6 @@
 import 'dart:html' as html;
 import 'dart:math' as math;
 
-import 'package:meta/meta.dart';
-
 import 'src/common.dart';
 import 'src/recorder.dart';
 export 'src/recorder.dart';
@@ -20,7 +18,7 @@
 ///
 /// When adding a new benchmark, add it to this map. Make sure that the name
 /// of your benchmark is unique.
-Map<String, RecorderFactory> _benchmarks;
+late Map<String, RecorderFactory> _benchmarks;
 
 final LocalBenchmarkServerClient _client = LocalBenchmarkServerClient();
 
@@ -50,8 +48,8 @@
   html.window.location.reload();
 }
 
-Future<void> _runBenchmark(String benchmarkName) async {
-  final RecorderFactory recorderFactory = _benchmarks[benchmarkName];
+Future<void> _runBenchmark(String? benchmarkName) async {
+  final RecorderFactory? recorderFactory = _benchmarks[benchmarkName];
 
   if (recorderFactory == null) {
     _fallbackToManual('Benchmark $benchmarkName not found.');
@@ -105,7 +103,7 @@
 }
 
 void _fallbackToManual(String error) {
-  html.document.body.appendHtml('''
+  html.document.body!.appendHtml('''
     <div id="manual-panel">
       <h3>$error</h3>
 
@@ -122,9 +120,10 @@
         ..allowInlineStyles());
 
   for (final String benchmarkName in _benchmarks.keys) {
-    final html.Element button = html.document.querySelector('#$benchmarkName');
+    // Find the button elements added above.
+    final html.Element button = html.document.querySelector('#$benchmarkName')!;
     button.addEventListener('click', (_) {
-      final html.Element manualPanel =
+      final html.Element? manualPanel =
           html.document.querySelector('#manual-panel');
       manualPanel?.remove();
       _runBenchmark(benchmarkName);
@@ -134,12 +133,14 @@
 
 /// Visualizes results on the Web page for manual inspection.
 void _printResultsToScreen(Profile profile) {
-  html.document.body.innerHtml = '<h2>${profile.name}</h2>';
+  final html.BodyElement _body = html.document.body!;
+
+  _body.innerHtml = '<h2>${profile.name}</h2>';
 
   profile.scoreData.forEach((String scoreKey, Timeseries timeseries) {
-    html.document.body.appendHtml('<h2>$scoreKey</h2>');
-    html.document.body.appendHtml('<pre>${timeseries.computeStats()}</pre>');
-    html.document.body.append(TimeseriesVisualization(timeseries).render());
+    _body.appendHtml('<h2>$scoreKey</h2>');
+    _body.appendHtml('<pre>${timeseries.computeStats()}</pre>');
+    _body.append(TimeseriesVisualization(timeseries).render());
   });
 }
 
@@ -149,7 +150,7 @@
   TimeseriesVisualization(this._timeseries) {
     _stats = _timeseries.computeStats();
     _canvas = html.CanvasElement();
-    _screenWidth = html.window.screen.width;
+    _screenWidth = html.window.screen!.width!;
     _canvas.width = _screenWidth;
     _canvas.height = (_kCanvasHeight * html.window.devicePixelRatio).round();
     _canvas.style
@@ -171,13 +172,13 @@
   static const double _kCanvasHeight = 200;
 
   final Timeseries _timeseries;
-  TimeseriesStats _stats;
-  html.CanvasElement _canvas;
-  html.CanvasRenderingContext2D _ctx;
-  int _screenWidth;
+  late TimeseriesStats _stats;
+  late html.CanvasElement _canvas;
+  late html.CanvasRenderingContext2D _ctx;
+  late int _screenWidth;
 
   // Used to normalize benchmark values to chart height.
-  double _maxValueChartRange;
+  late double _maxValueChartRange;
 
   /// Converts a sample value to vertical canvas coordinates.
   ///
@@ -205,7 +206,7 @@
       final AnnotatedSample sample = _stats.samples[i];
 
       if (sample.isWarmUpValue) {
-        // Put gray background behing warm-up samples.
+        // Put gray background behind warm-up samples.
         _ctx.fillStyle = 'rgba(200,200,200,1)';
         _ctx.fillRect(xOffset, 0, barWidth, _normalized(_maxValueChartRange));
       }
@@ -262,7 +263,7 @@
   /// This happens when you run benchmarks using plain `flutter run` rather than
   /// devicelab test harness. The test harness spins up a special server that
   /// provides API for automatically picking the next benchmark to run.
-  bool isInManualMode;
+  late bool isInManualMode;
 
   /// Asks the local server for the name of the next benchmark to run.
   ///
@@ -284,7 +285,7 @@
     }
 
     isInManualMode = false;
-    return request.responseText;
+    return request.responseText ?? kManualFallback;
   }
 
   void _checkNotManualMode() {
@@ -298,7 +299,7 @@
   /// This uses the chrome://tracing tracer, which is not available from within
   /// the page itself, and therefore must be controlled from outside using the
   /// DevTools Protocol.
-  Future<void> startPerformanceTracing(String benchmarkName) async {
+  Future<void> startPerformanceTracing(String? benchmarkName) async {
     _checkNotManualMode();
     await html.HttpRequest.request(
       '/start-performance-tracing?label=$benchmarkName',
@@ -364,13 +365,12 @@
   /// crash on 404, which we use to detect `flutter run`.
   Future<html.HttpRequest> _requestXhr(
     String url, {
-    @required String method,
-    @required String mimeType,
-    @required dynamic sendData,
+    required String method,
+    required String mimeType,
+    required dynamic sendData,
   }) {
     final Completer<html.HttpRequest> completer = Completer<html.HttpRequest>();
     final html.HttpRequest xhr = html.HttpRequest();
-    method ??= 'GET';
     xhr.open(method, url, async: true);
     xhr.overrideMimeType(mimeType);
     xhr.onLoad.listen((html.ProgressEvent e) {
diff --git a/packages/web_benchmarks/lib/server.dart b/packages/web_benchmarks/lib/server.dart
index a2be754..7327e1a 100644
--- a/packages/web_benchmarks/lib/server.dart
+++ b/packages/web_benchmarks/lib/server.dart
@@ -6,7 +6,6 @@
 import 'dart:io' as io;
 
 import 'package:logging/logging.dart';
-import 'package:meta/meta.dart';
 
 import 'src/benchmark_result.dart';
 import 'src/runner.dart';
@@ -41,9 +40,9 @@
 /// If [headless] is true, runs Chrome without UI. In particular, this is
 /// useful in environments (e.g. CI) that doesn't have a display.
 Future<BenchmarkResults> serveWebBenchmark({
-  @required io.Directory benchmarkAppDirectory,
-  @required String entryPoint,
-  @required bool useCanvasKit,
+  required io.Directory benchmarkAppDirectory,
+  required String entryPoint,
+  required bool useCanvasKit,
   int benchmarkServerPort = defaultBenchmarkServerPort,
   int chromeDebugPort = defaultChromeDebugPort,
   bool headless = true,
diff --git a/packages/web_benchmarks/lib/src/benchmark_result.dart b/packages/web_benchmarks/lib/src/benchmark_result.dart
index 255d5ca..f432629 100644
--- a/packages/web_benchmarks/lib/src/benchmark_result.dart
+++ b/packages/web_benchmarks/lib/src/benchmark_result.dart
@@ -2,16 +2,14 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-import 'package:meta/meta.dart';
-
 /// A single benchmark score value collected from the benchmark.
 class BenchmarkScore {
   /// Creates a benchmark score.
   ///
   /// [metric] and [value] must not be null.
   BenchmarkScore({
-    @required this.metric,
-    @required this.value,
+    required this.metric,
+    required this.value,
   }) : assert(metric != null && value != null);
 
   /// The name of the metric that this score is categorized under.
diff --git a/packages/web_benchmarks/lib/src/browser.dart b/packages/web_benchmarks/lib/src/browser.dart
index 3a92cf9..b6b9cf5 100644
--- a/packages/web_benchmarks/lib/src/browser.dart
+++ b/packages/web_benchmarks/lib/src/browser.dart
@@ -7,7 +7,6 @@
 import 'dart:io' as io;
 import 'dart:math' as math;
 
-import 'package:meta/meta.dart';
 import 'package:path/path.dart' as path;
 import 'package:webkit_inspection_protocol/webkit_inspection_protocol.dart';
 
@@ -16,22 +15,20 @@
 /// Options passed to Chrome when launching it.
 class ChromeOptions {
   /// Creates chrome options.
-  ///
-  /// [windowWidth], [windowHeight], and [headless] must not be null.
   ChromeOptions({
     this.userDataDirectory,
     this.url,
     this.windowWidth = 1024,
     this.windowHeight = 1024,
-    this.headless,
+    required this.headless,
     this.debugPort,
   });
 
   /// If not null passed as `--user-data-dir`.
-  final String userDataDirectory;
+  final String? userDataDirectory;
 
   /// If not null launches a Chrome tab at this URL.
-  final String url;
+  final String? url;
 
   /// The width of the Chrome window.
   ///
@@ -52,7 +49,7 @@
   /// If null, Chrome is launched without debugging. When running in headless
   /// mode without a debug port, Chrome quits immediately. For most tests it is
   /// typical to set [headless] to true and set a non-null debug port.
-  final int debugPort;
+  final int? debugPort;
 }
 
 /// A function called when the Chrome process encounters an error.
@@ -82,7 +79,7 @@
   /// process encounters an error. In particular, [onError] is called when the
   /// Chrome process exits prematurely, i.e. before [stop] is called.
   static Future<Chrome> launch(ChromeOptions options,
-      {String workingDirectory, @required ChromeErrorCallback onError}) async {
+      {String? workingDirectory, required ChromeErrorCallback onError}) async {
     if (!io.Platform.isWindows) {
       final io.ProcessResult versionResult = io.Process.runSync(
           _findSystemChromeExecutable(), const <String>['--version']);
@@ -90,12 +87,13 @@
     } else {
       print('Launching Chrome...');
     }
-
+    final String? url = options.url;
     final bool withDebugging = options.debugPort != null;
+
     final List<String> args = <String>[
       if (options.userDataDirectory != null)
         '--user-data-dir=${options.userDataDirectory}',
-      if (options.url != null) options.url,
+      if (url != null) url,
       if (io.Platform.environment['CHROME_NO_SANDBOX'] == 'true')
         '--no-sandbox',
       if (options.headless) '--headless',
@@ -116,10 +114,11 @@
       workingDirectory: workingDirectory,
     );
 
-    WipConnection debugConnection;
-    if (withDebugging) {
+    WipConnection? debugConnection;
+    final int? debugPort = options.debugPort;
+    if (debugPort != null) {
       debugConnection =
-          await _connectToChromeDebugPort(chromeProcess, options.debugPort);
+          await _connectToChromeDebugPort(chromeProcess, debugPort);
     }
 
     return Chrome._(chromeProcess, onError, debugConnection, options.headless);
@@ -127,12 +126,12 @@
 
   final io.Process _chromeProcess;
   final ChromeErrorCallback _onError;
-  final WipConnection _debugConnection;
+  final WipConnection? _debugConnection;
   bool _isStopped = false;
 
-  Completer<void> _tracingCompleter;
-  StreamSubscription<WipEvent> _tracingSubscription;
-  List<Map<String, dynamic>> _tracingData;
+  Completer<void>? _tracingCompleter;
+  StreamSubscription<WipEvent>? _tracingSubscription;
+  List<Map<String, dynamic>>? _tracingData;
 
   /// Starts recording a performance trace.
   ///
@@ -140,7 +139,7 @@
   /// [endRecordingPerformance] before starting a new tracing session.
   ///
   /// The [label] is for debugging convenience.
-  Future<void> beginRecordingPerformance(String label) async {
+  Future<void> beginRecordingPerformance(String? label) async {
     if (_tracingCompleter != null) {
       throw StateError(
           'Cannot start a new performance trace. A tracing session labeled '
@@ -152,25 +151,26 @@
     // Subscribe to tracing events prior to calling "Tracing.start". Otherwise,
     // we'll miss tracing data.
     _tracingSubscription =
-        _debugConnection.onNotification.listen((WipEvent event) {
+        _debugConnection?.onNotification.listen((WipEvent event) {
       // We receive data as a sequence of "Tracing.dataCollected" followed by
       // "Tracing.tracingComplete" at the end. Until "Tracing.tracingComplete"
       // is received, the data may be incomplete.
       if (event.method == 'Tracing.tracingComplete') {
-        _tracingCompleter.complete();
-        _tracingSubscription.cancel();
+        _tracingCompleter!.complete();
+        _tracingSubscription?.cancel();
         _tracingSubscription = null;
       } else if (event.method == 'Tracing.dataCollected') {
-        final dynamic value = event.params['value'];
+        final dynamic value = event.params!['value'];
         if (value is! List) {
           throw FormatException(
               '"Tracing.dataCollected" returned malformed data. '
               'Expected a List but got: ${value.runtimeType}');
         }
-        _tracingData.addAll(event.params['value'].cast<Map<String, dynamic>>());
+        _tracingData
+            ?.addAll(event.params!['value'].cast<Map<String, dynamic>>());
       }
     });
-    await _debugConnection.sendCommand('Tracing.start', <String, dynamic>{
+    await _debugConnection?.sendCommand('Tracing.start', <String, dynamic>{
       // The choice of categories is as follows:
       //
       // blink:
@@ -192,10 +192,10 @@
   /// Stops a performance tracing session started by [beginRecordingPerformance].
   ///
   /// Returns all the collected tracing data unfiltered.
-  Future<List<Map<String, dynamic>>> endRecordingPerformance() async {
-    await _debugConnection.sendCommand('Tracing.end');
-    await _tracingCompleter.future;
-    final List<Map<String, dynamic>> data = _tracingData;
+  Future<List<Map<String, dynamic>>?> endRecordingPerformance() async {
+    await _debugConnection?.sendCommand('Tracing.end');
+    await _tracingCompleter?.future;
+    final List<Map<String, dynamic>>? data = _tracingData;
     _tracingCompleter = null;
     _tracingData = null;
     return data;
@@ -204,6 +204,7 @@
   /// Stops the Chrome process.
   void stop() {
     _isStopped = true;
+    _tracingSubscription?.cancel();
     _chromeProcess.kill();
   }
 
@@ -217,7 +218,7 @@
   // On some environments, such as the Dart HHH tester, Chrome resides in a
   // non-standard location and is provided via the following environment
   // variable.
-  final String envExecutable = io.Platform.environment['CHROME_EXECUTABLE'];
+  final String? envExecutable = io.Platform.environment['CHROME_EXECUTABLE'];
   if (envExecutable != null) {
     return envExecutable;
   }
@@ -237,14 +238,15 @@
   } else if (io.Platform.isWindows) {
     const String kWindowsExecutable = r'Google\Chrome\Application\chrome.exe';
     final List<String> kWindowsPrefixes = <String>[
-      io.Platform.environment['LOCALAPPDATA'],
-      io.Platform.environment['PROGRAMFILES'],
-      io.Platform.environment['PROGRAMFILES(X86)'],
+      for (String? item in <String?>[
+        io.Platform.environment['LOCALAPPDATA'],
+        io.Platform.environment['PROGRAMFILES'],
+        io.Platform.environment['PROGRAMFILES(X86)'],
+      ])
+        if (item != null) item
     ];
+
     final String windowsPrefix = kWindowsPrefixes.firstWhere((String prefix) {
-      if (prefix == null) {
-        return false;
-      }
       final String expectedPath = path.join(prefix, kWindowsExecutable);
       return io.File(expectedPath).existsSync();
     }, orElse: () => '.');
@@ -297,8 +299,8 @@
   final io.HttpClientRequest request =
       await client.getUrl(base.resolve('/json/list'));
   final io.HttpClientResponse response = await request.close();
-  final List<dynamic> jsonObject =
-      await json.fuse(utf8).decoder.bind(response).single;
+  final List<dynamic>? jsonObject =
+      await json.fuse(utf8).decoder.bind(response).single as List<dynamic>?;
   if (jsonObject == null || jsonObject.isEmpty) {
     return base;
   }
@@ -308,13 +310,13 @@
 /// Summarizes a Blink trace down to a few interesting values.
 class BlinkTraceSummary {
   BlinkTraceSummary._({
-    @required this.averageBeginFrameTime,
-    @required this.averageUpdateLifecyclePhasesTime,
+    required this.averageBeginFrameTime,
+    required this.averageUpdateLifecyclePhasesTime,
   }) : averageTotalUIFrameTime =
             averageBeginFrameTime + averageUpdateLifecyclePhasesTime;
 
   /// Summarizes Blink trace from the raw JSON trace.
-  static BlinkTraceSummary fromJson(List<Map<String, dynamic>> traceJson) {
+  static BlinkTraceSummary? fromJson(List<Map<String, dynamic>> traceJson) {
     try {
       // Convert raw JSON data to BlinkTraceEvent objects sorted by timestamp.
       List<BlinkTraceEvent> events = traceJson
@@ -385,10 +387,13 @@
 
       // Compute averages and summarize.
       return BlinkTraceSummary._(
-        averageBeginFrameTime: _computeAverageDuration(
-            frames.map((BlinkFrame frame) => frame.beginFrame).toList()),
+        averageBeginFrameTime: _computeAverageDuration(frames
+            .map((BlinkFrame frame) => frame.beginFrame)
+            .whereType<BlinkTraceEvent>()
+            .toList()),
         averageUpdateLifecyclePhasesTime: _computeAverageDuration(frames
             .map((BlinkFrame frame) => frame.updateAllLifecyclePhases)
+            .whereType<BlinkTraceEvent>()
             .toList()),
       );
     } catch (_, __) {
@@ -430,16 +435,16 @@
 /// Contains events pertaining to a single frame in the Blink trace data.
 class BlinkFrame {
   /// Corresponds to 'WebViewImpl::beginFrame' event.
-  BlinkTraceEvent beginFrame;
+  BlinkTraceEvent? beginFrame;
 
   /// Corresponds to 'WebViewImpl::updateAllLifecyclePhases' event.
-  BlinkTraceEvent updateAllLifecyclePhases;
+  BlinkTraceEvent? updateAllLifecyclePhases;
 
   /// Corresponds to 'measured_frame' begin event.
-  BlinkTraceEvent beginMeasuredFrame;
+  BlinkTraceEvent? beginMeasuredFrame;
 
   /// Corresponds to 'measured_frame' end event.
-  BlinkTraceEvent endMeasuredFrame;
+  BlinkTraceEvent? endMeasuredFrame;
 }
 
 /// Takes a list of events that have non-null [BlinkTraceEvent.tdur] computes
@@ -449,10 +454,11 @@
   final double sum = events
       .skip(math.max(events.length - kMeasuredSampleCount, 0))
       .fold(0.0, (double previousValue, BlinkTraceEvent event) {
-    if (event.tdur == null) {
+    final int? _threadClockDuration = event.tdur;
+    if (_threadClockDuration == null) {
       throw FormatException('Trace event lacks "tdur" field: $event');
     }
-    return previousValue + event.tdur;
+    return previousValue + _threadClockDuration;
   });
   final int sampleCount = math.min(events.length, kMeasuredSampleCount);
   return Duration(microseconds: sum ~/ sampleCount);
@@ -464,15 +470,15 @@
 ///  * https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview
 class BlinkTraceEvent {
   BlinkTraceEvent._({
-    @required this.args,
-    @required this.cat,
-    @required this.name,
-    @required this.ph,
-    @required this.pid,
-    @required this.tid,
-    @required this.ts,
-    @required this.tts,
-    @required this.tdur,
+    required this.args,
+    required this.cat,
+    required this.name,
+    required this.ph,
+    required this.pid,
+    required this.tid,
+    required this.ts,
+    required this.tts,
+    required this.tdur,
   });
 
   /// Parses an event from its JSON representation.
@@ -501,13 +507,13 @@
   /// https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview
   static BlinkTraceEvent fromJson(Map<String, dynamic> json) {
     return BlinkTraceEvent._(
-      args: json['args'],
-      cat: json['cat'],
-      name: json['name'],
-      ph: json['ph'],
-      pid: _readInt(json, 'pid'),
-      tid: _readInt(json, 'tid'),
-      ts: _readInt(json, 'ts'),
+      args: json['args'] as Map<String, dynamic>,
+      cat: json['cat'] as String,
+      name: json['name'] as String,
+      ph: json['ph'] as String,
+      pid: _readInt(json, 'pid')!,
+      tid: _readInt(json, 'tid')!,
+      ts: _readInt(json, 'ts')!,
       tts: _readInt(json, 'tts'),
       tdur: _readInt(json, 'tdur'),
     );
@@ -535,10 +541,10 @@
   final int ts;
 
   /// Timestamp in microseconds using thread clock.
-  final int tts;
+  final int? tts;
 
   /// Event duration in microseconds.
-  final int tdur;
+  final int? tdur;
 
   /// A "begin frame" event contains all of the scripting time of an animation
   /// frame (JavaScript, WebAssembly), plus a negligible amount of internal
@@ -604,8 +610,8 @@
 /// validation and conversion is needed.
 ///
 /// Returns null if the value is null.
-int _readInt(Map<String, dynamic> json, String key) {
-  final num jsonValue = json[key];
+int? _readInt(Map<String, dynamic> json, String key) {
+  final num? jsonValue = json[key];
 
   if (jsonValue == null) {
     return null;
diff --git a/packages/web_benchmarks/lib/src/recorder.dart b/packages/web_benchmarks/lib/src/recorder.dart
index 95d9032..9e5b182 100644
--- a/packages/web_benchmarks/lib/src/recorder.dart
+++ b/packages/web_benchmarks/lib/src/recorder.dart
@@ -2,8 +2,6 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-// @dart = 2.7
-
 import 'dart:async';
 import 'dart:html' as html;
 import 'dart:js';
@@ -65,7 +63,7 @@
   ///
   /// All arguments must not be null.
   Runner({
-    @required this.recorder,
+    required this.recorder,
     this.setUpAllDidRun = _dummyAsyncVoidCallback,
     this.tearDownAllWillRun = _dummyAsyncVoidCallback,
   });
@@ -115,13 +113,13 @@
   /// Returns the recorded profile.
   ///
   /// This value is only available while the benchmark is running.
-  Profile get profile;
+  Profile? get profile;
 
   /// Whether the benchmark should continue running.
   ///
   /// Returns `false` if the benchmark collected enough data and it's time to
   /// stop.
-  bool shouldContinue() => profile.shouldContinue();
+  bool shouldContinue() => profile!.shouldContinue();
 
   /// Called once before all runs of this benchmark recorder.
   ///
@@ -166,7 +164,7 @@
   /// Creates a raw benchmark recorder with a name.
   ///
   /// [name] must not be null.
-  RawRecorder({@required String name}) : super._(name, false);
+  RawRecorder({required String name}) : super._(name, false);
 
   /// The body of the benchmark.
   ///
@@ -175,7 +173,7 @@
 
   @override
   Profile get profile => _profile;
-  Profile _profile;
+  late Profile _profile;
 
   @override
   @nonVirtual
@@ -218,11 +216,11 @@
   /// Creates a [SceneBuilder] benchmark recorder.
   ///
   /// [name] must not be null.
-  SceneBuilderRecorder({@required String name}) : super._(name, true);
+  SceneBuilderRecorder({required String name}) : super._(name, true);
 
   @override
   Profile get profile => _profile;
-  Profile _profile;
+  late Profile _profile;
 
   /// Called from [Window.onBeginFrame].
   @mustCallSuper
@@ -349,7 +347,7 @@
   /// benchmark is expected to call [Profile.stopWarmingUp] to signal that
   /// the warm-up phase is finished.
   WidgetRecorder({
-    @required String name,
+    required String name,
     this.useCustomWarmUp = false,
   }) : super._(name, true);
 
@@ -361,19 +359,22 @@
   Widget createWidget();
 
   final List<VoidCallback> _didStopCallbacks = <VoidCallback>[];
+
   @override
   void registerDidStop(VoidCallback fn) {
     _didStopCallbacks.add(fn);
   }
 
   @override
-  Profile profile;
-  Completer<void> _runCompleter;
+  late Profile profile;
+
+  // This will be initialized in [run].
+  late Completer<void> _runCompleter;
 
   /// Whether to delimit warm-up frames in a custom way.
   final bool useCustomWarmUp;
 
-  Stopwatch _drawFrameStopwatch;
+  late Stopwatch _drawFrameStopwatch;
 
   @override
   @mustCallSuper
@@ -400,7 +401,7 @@
   }
 
   @override
-  void _onError(dynamic error, StackTrace stackTrace) {
+  void _onError(dynamic error, StackTrace? stackTrace) {
     _runCompleter.completeError(error, stackTrace);
   }
 
@@ -436,8 +437,6 @@
     } finally {
       stopListeningToEngineBenchmarkValues(kProfilePrerollFrame);
       stopListeningToEngineBenchmarkValues(kProfileApplyFrame);
-      _runCompleter = null;
-      profile = null;
     }
   }
 }
@@ -453,7 +452,7 @@
   /// Creates a widget build benchmark recorder.
   ///
   /// [name] must not be null.
-  WidgetBuildRecorder({@required String name}) : super._(name, true);
+  WidgetBuildRecorder({required String name}) : super._(name, true);
 
   /// Creates a widget to be benchmarked.
   ///
@@ -463,16 +462,17 @@
   Widget createWidget();
 
   final List<VoidCallback> _didStopCallbacks = <VoidCallback>[];
+
   @override
   void registerDidStop(VoidCallback fn) {
     _didStopCallbacks.add(fn);
   }
 
   @override
-  Profile profile;
-  Completer<void> _runCompleter;
+  late Profile profile;
+  Completer<void>? _runCompleter;
 
-  Stopwatch _drawFrameStopwatch;
+  late Stopwatch _drawFrameStopwatch;
 
   /// Whether in this frame we should call [createWidget] and render it.
   ///
@@ -480,9 +480,9 @@
   bool showWidget = true;
 
   /// The state that hosts the widget under test.
-  _WidgetBuildRecorderHostState _hostState;
+  late _WidgetBuildRecorderHostState _hostState;
 
-  Widget _getWidgetForFrame() {
+  Widget? _getWidgetForFrame() {
     if (showWidget) {
       return createWidget();
     } else {
@@ -516,13 +516,13 @@
       for (final VoidCallback fn in _didStopCallbacks) {
         fn();
       }
-      _runCompleter.complete();
+      _runCompleter!.complete();
     }
   }
 
   @override
-  void _onError(dynamic error, StackTrace stackTrace) {
-    _runCompleter.completeError(error, stackTrace);
+  void _onError(dynamic error, StackTrace? stackTrace) {
+    _runCompleter!.completeError(error, stackTrace);
   }
 
   @override
@@ -534,11 +534,10 @@
     binding._beginRecording(this, _WidgetBuildRecorderHost(this));
 
     try {
-      await _runCompleter.future;
+      await _runCompleter!.future;
       return localProfile;
     } finally {
       _runCompleter = null;
-      profile = null;
     }
   }
 }
@@ -604,11 +603,11 @@
 
   /// The number of frames ignored as warm-up frames, used only
   /// when [useCustomWarmUp] is true.
-  int _warmUpFrameCount;
+  int? _warmUpFrameCount;
 
   /// The number of frames ignored as warm-up frames.
   int get warmUpFrameCount =>
-      useCustomWarmUp ? _warmUpFrameCount : count - kMeasuredSampleCount;
+      useCustomWarmUp ? _warmUpFrameCount! : count - kMeasuredSampleCount;
 
   /// List of all the values that have been recorded.
   ///
@@ -697,7 +696,7 @@
   }
 
   /// Adds a value to this timeseries.
-  void add(double value, {@required bool isWarmUpValue}) {
+  void add(double value, {required bool isWarmUpValue}) {
     if (value < 0.0) {
       throw StateError(
         'Timeseries $name: negative metric values are not supported. Got: $value',
@@ -705,7 +704,7 @@
     }
     _allValues.add(value);
     if (useCustomWarmUp && isWarmUpValue) {
-      _warmUpFrameCount += 1;
+      _warmUpFrameCount = warmUpFrameCount + 1;
     }
   }
 }
@@ -717,15 +716,15 @@
 class TimeseriesStats {
   /// Creates statistics for a time series.
   const TimeseriesStats({
-    @required this.name,
-    @required this.average,
-    @required this.outlierCutOff,
-    @required this.outlierAverage,
-    @required this.standardDeviation,
-    @required this.noise,
-    @required this.cleanSampleCount,
-    @required this.outlierSampleCount,
-    @required this.samples,
+    required this.name,
+    required this.average,
+    required this.outlierCutOff,
+    required this.outlierAverage,
+    required this.standardDeviation,
+    required this.noise,
+    required this.cleanSampleCount,
+    required this.outlierSampleCount,
+    required this.samples,
   });
 
   /// The label used to refer to the corresponding timeseries.
@@ -800,9 +799,9 @@
 class AnnotatedSample {
   /// Creates an annotated measurement sample.
   const AnnotatedSample({
-    @required this.magnitude,
-    @required this.isOutlier,
-    @required this.isWarmUpValue,
+    required this.magnitude,
+    required this.isOutlier,
+    required this.isWarmUpValue,
   });
 
   /// The non-negative raw result of the measurement.
@@ -826,7 +825,7 @@
   /// Creates an empty profile.
   ///
   /// [name] and [useCustomWarmUp] must not be null.
-  Profile({@required this.name, this.useCustomWarmUp = false})
+  Profile({required this.name, this.useCustomWarmUp = false})
       : assert(name != null),
         _isWarmingUp = useCustomWarmUp;
 
@@ -864,8 +863,7 @@
   final Map<String, dynamic> extraData = <String, dynamic>{};
 
   /// Invokes [callback] and records the duration of its execution under [key].
-  Duration record(String key, VoidCallback callback,
-      {@required bool reported}) {
+  Duration record(String key, VoidCallback callback, {required bool reported}) {
     final Duration duration = timeAction(callback);
     addDataPoint(key, duration, reported: reported);
     return duration;
@@ -877,7 +875,7 @@
   ///
   /// Set [reported] to `false` to store the data, but not show it on the
   /// dashboard UI.
-  void addDataPoint(String key, Duration duration, {@required bool reported}) {
+  void addDataPoint(String key, Duration duration, {required bool reported}) {
     scoreData
         .putIfAbsent(
           key,
@@ -904,7 +902,7 @@
     // We have recorded something, but do we have enough samples? If every
     // timeseries has collected enough samples, stop the benchmark.
     return !scoreData.keys
-        .every((String key) => scoreData[key].count >= kTotalSampleCount);
+        .every((String key) => scoreData[key]!.count >= kTotalSampleCount);
   }
 
   /// Returns a JSON representation of the profile that will be sent to the
@@ -917,7 +915,7 @@
     };
 
     for (final String key in scoreData.keys) {
-      final Timeseries timeseries = scoreData[key];
+      final Timeseries timeseries = scoreData[key]!;
 
       if (timeseries.isReported) {
         scoreKeys.add('$key.average');
@@ -944,7 +942,7 @@
     final StringBuffer buffer = StringBuffer();
     buffer.writeln('name: $name');
     for (final String key in scoreData.keys) {
-      final Timeseries timeseries = scoreData[key];
+      final Timeseries timeseries = scoreData[key]!;
       final TimeseriesStats stats = timeseries.computeStats();
       buffer.writeln(stats.toString());
     }
@@ -1014,7 +1012,7 @@
   /// Reports an error.
   ///
   /// The implementation is expected to halt benchmark execution as soon as possible.
-  void _onError(dynamic error, StackTrace stackTrace);
+  void _onError(dynamic error, StackTrace? stackTrace);
 }
 
 /// A variant of [WidgetsBinding] that collaborates with a [Recorder] to decide
@@ -1040,18 +1038,20 @@
     _instance = this;
   }
 
-  static _RecordingWidgetsBinding get instance => _instance;
-  static _RecordingWidgetsBinding _instance;
+  static _RecordingWidgetsBinding get instance =>
+      BindingBase.checkInstance(_instance);
+  static _RecordingWidgetsBinding? _instance;
 
   /// Makes an instance of [_RecordingWidgetsBinding] the current binding.
   static _RecordingWidgetsBinding ensureInitialized() {
-    if (_RecordingWidgetsBinding.instance == null) {
+    if (_instance == null) {
       _RecordingWidgetsBinding();
     }
     return _RecordingWidgetsBinding.instance;
   }
 
-  FrameRecorder _recorder;
+  // This will be not null when the benchmark is running.
+  FrameRecorder? _recorder;
   bool _hasErrored = false;
 
   /// To short-circuit all frame lifecycle methods when the benchmark has
@@ -1064,7 +1064,7 @@
         'Cannot call _RecordingWidgetsBinding._beginRecording more than once',
       );
     }
-    final FlutterExceptionHandler originalOnError = FlutterError.onError;
+    final FlutterExceptionHandler? originalOnError = FlutterError.onError;
 
     recorder.registerDidStop(() {
       _benchmarkStopped = true;
@@ -1073,22 +1073,22 @@
     // Fail hard and fast on errors. Benchmarks should not have any errors.
     FlutterError.onError = (FlutterErrorDetails details) {
       _haltBenchmarkWithError(details.exception, details.stack);
-      originalOnError(details);
+      originalOnError!(details);
     };
     _recorder = recorder;
     runApp(widget);
   }
 
-  void _haltBenchmarkWithError(dynamic error, StackTrace stackTrace) {
+  void _haltBenchmarkWithError(dynamic error, StackTrace? stackTrace) {
     if (_hasErrored) {
       return;
     }
-    _recorder._onError(error, stackTrace);
+    _recorder!._onError(error, stackTrace);
     _hasErrored = true;
   }
 
   @override
-  void handleBeginFrame(Duration rawTimeStamp) {
+  void handleBeginFrame(Duration? rawTimeStamp) {
     // Don't keep on truckin' if there's an error or the benchmark has stopped.
     if (_hasErrored || _benchmarkStopped) {
       return;
@@ -1117,9 +1117,9 @@
       return;
     }
     try {
-      _recorder.frameWillDraw();
+      _recorder!.frameWillDraw();
       super.handleDrawFrame();
-      _recorder.frameDidDraw();
+      _recorder!.frameDidDraw();
     } catch (error, stackTrace) {
       _haltBenchmarkWithError(error, stackTrace);
       rethrow;
@@ -1247,7 +1247,8 @@
 //
 // If there are no listeners registered for [name], ignores the value.
 void _dispatchEngineBenchmarkValue(String name, double value) {
-  final EngineBenchmarkValueListener listener = _engineBenchmarkListeners[name];
+  final EngineBenchmarkValueListener? listener =
+      _engineBenchmarkListeners[name];
   if (listener != null) {
     listener(value);
   }
diff --git a/packages/web_benchmarks/lib/src/runner.dart b/packages/web_benchmarks/lib/src/runner.dart
index 595b487..3c9dd72 100644
--- a/packages/web_benchmarks/lib/src/runner.dart
+++ b/packages/web_benchmarks/lib/src/runner.dart
@@ -7,7 +7,6 @@
 import 'dart:io' as io;
 
 import 'package:logging/logging.dart';
-import 'package:meta/meta.dart';
 import 'package:path/path.dart' as path;
 import 'package:process/process.dart';
 import 'package:shelf/shelf.dart';
@@ -47,12 +46,12 @@
   /// If [headless] is true, runs Chrome without UI. In particular, this is
   /// useful in environments (e.g. CI) that doesn't have a display.
   BenchmarkServer({
-    @required this.benchmarkAppDirectory,
-    @required this.entryPoint,
-    @required this.useCanvasKit,
-    @required this.benchmarkServerPort,
-    @required this.chromeDebugPort,
-    @required this.headless,
+    required this.benchmarkAppDirectory,
+    required this.entryPoint,
+    required this.useCanvasKit,
+    required this.benchmarkServerPort,
+    required this.chromeDebugPort,
+    required this.headless,
   });
 
   final ProcessManager _processManager = const LocalProcessManager();
@@ -117,16 +116,16 @@
         Completer<List<Map<String, dynamic>>>();
     final List<Map<String, dynamic>> collectedProfiles =
         <Map<String, dynamic>>[];
-    List<String> benchmarks;
-    Iterator<String> benchmarkIterator;
+    List<String>? benchmarks;
+    late Iterator<String> benchmarkIterator;
 
     // This future fixes a race condition between the web-page loading and
     // asking to run a benchmark, and us connecting to Chrome's DevTools port.
     // Sometime one wins. Other times, the other wins.
-    Future<Chrome> whenChromeIsReady;
-    Chrome chrome;
-    io.HttpServer server;
-    List<Map<String, dynamic>> latestPerformanceTrace;
+    Future<Chrome>? whenChromeIsReady;
+    Chrome? chrome;
+    late io.HttpServer server;
+    List<Map<String, dynamic>>? latestPerformanceTrace;
     Cascade cascade = Cascade();
 
     // Serves the static files built for the app (html, js, images, fonts, etc)
@@ -143,11 +142,11 @@
         if (request.requestedUri.path.endsWith('/profile-data')) {
           final Map<String, dynamic> profile =
               json.decode(await request.readAsString());
-          final String benchmarkName = profile['name'];
+          final String? benchmarkName = profile['name'];
           if (benchmarkName != benchmarkIterator.current) {
             profileData.completeError(Exception(
               'Browser returned benchmark results from a wrong benchmark.\n'
-              'Requested to run bechmark ${benchmarkIterator.current}, but '
+              'Requested to run benchmark ${benchmarkIterator.current}, but '
               'got results for $benchmarkName.',
             ));
             server.close();
@@ -155,10 +154,10 @@
 
           // Trace data is null when the benchmark is not frame-based, such as RawRecorder.
           if (latestPerformanceTrace != null) {
-            final BlinkTraceSummary traceSummary =
-                BlinkTraceSummary.fromJson(latestPerformanceTrace);
+            final BlinkTraceSummary? traceSummary =
+                BlinkTraceSummary.fromJson(latestPerformanceTrace!);
             profile['totalUiFrame.average'] =
-                traceSummary.averageTotalUIFrameTime.inMicroseconds;
+                traceSummary?.averageTotalUIFrameTime.inMicroseconds;
             profile['scoreKeys'] ??=
                 <dynamic>[]; // using dynamic for consistency with JSON
             profile['scoreKeys'].add('totalUiFrame.average');
@@ -169,12 +168,12 @@
         } else if (request.requestedUri.path
             .endsWith('/start-performance-tracing')) {
           latestPerformanceTrace = null;
-          await chrome.beginRecordingPerformance(
+          await chrome!.beginRecordingPerformance(
               request.requestedUri.queryParameters['label']);
           return Response.ok('Started performance tracing');
         } else if (request.requestedUri.path
             .endsWith('/stop-performance-tracing')) {
-          latestPerformanceTrace = await chrome.endRecordingPerformance();
+          latestPerformanceTrace = await chrome!.endRecordingPerformance();
           return Response.ok('Stopped performance tracing');
         } else if (request.requestedUri.path.endsWith('/on-error')) {
           final Map<String, dynamic> errorDetails =
@@ -193,7 +192,7 @@
           if (benchmarks == null) {
             benchmarks =
                 (json.decode(await request.readAsString())).cast<String>();
-            benchmarkIterator = benchmarks.iterator;
+            benchmarkIterator = benchmarks!.iterator;
           }
           if (benchmarkIterator.moveNext()) {
             final String nextBenchmark = benchmarkIterator.current;
@@ -312,7 +311,7 @@
         );
         await chrome?.whenExits;
       }
-      server?.close();
+      server.close();
     }
   }
 }
diff --git a/packages/web_benchmarks/pubspec.yaml b/packages/web_benchmarks/pubspec.yaml
index c613091..b82a743 100644
--- a/packages/web_benchmarks/pubspec.yaml
+++ b/packages/web_benchmarks/pubspec.yaml
@@ -2,23 +2,22 @@
 description: A benchmark harness for performance-testing Flutter apps in Chrome.
 repository: https://github.com/flutter/packages/tree/main/packages/web_benchmarks
 issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+web_benchmarks%22
-version: 0.0.7+1
+version: 0.1.0
 
 environment:
-  sdk: ">=2.7.0 <3.0.0"
-  flutter: ">=2.10.0"
+  sdk: '>=2.17.0 <3.0.0'
+  flutter: ">=3.0.0"
 
-# Using +2 upper limit on some packages to allow null-safe versions
 dependencies:
   flutter:
     sdk: flutter
   flutter_test:
     sdk: flutter
-  logging: ">=0.11.4 <2.0.0"
-  meta: ">=1.0.0 <2.0.0"
-  path: ">=1.7.0 <2.0.0"
-  process: ">=3.0.13 <5.0.0"
-  shelf: ">=0.7.5 <2.0.0"
-  shelf_static: ">=0.2.8 <2.0.0"
-  test: ">=1.15.0 <3.0.0"
-  webkit_inspection_protocol: ">=0.7.3 <2.0.0"
+  logging: ^1.0.2
+  meta: ^1.7.0
+  path: ^1.8.0
+  process: ^4.2.4
+  shelf: ^1.2.0
+  shelf_static: ^1.1.0
+  test: ^1.19.5
+  webkit_inspection_protocol: ^1.0.0
diff --git a/packages/web_benchmarks/testing/test_app/lib/aboutpage.dart b/packages/web_benchmarks/testing/test_app/lib/aboutpage.dart
index ac3507e..ebe5271 100644
--- a/packages/web_benchmarks/testing/test_app/lib/aboutpage.dart
+++ b/packages/web_benchmarks/testing/test_app/lib/aboutpage.dart
@@ -7,7 +7,7 @@
 const ValueKey<String> backKey = ValueKey<String>('backKey');
 
 class AboutPage extends StatelessWidget {
-  const AboutPage({Key key}) : super(key: key);
+  const AboutPage({Key? key}) : super(key: key);
 
   @override
   Widget build(BuildContext context) {
diff --git a/packages/web_benchmarks/testing/test_app/lib/benchmarks/runner.dart b/packages/web_benchmarks/testing/test_app/lib/benchmarks/runner.dart
index 62cb922..c8f340e 100644
--- a/packages/web_benchmarks/testing/test_app/lib/benchmarks/runner.dart
+++ b/packages/web_benchmarks/testing/test_app/lib/benchmarks/runner.dart
@@ -12,7 +12,7 @@
 
 /// A recorder that measures frame building durations.
 abstract class AppRecorder extends WidgetRecorder {
-  AppRecorder({@required this.benchmarkName}) : super(name: benchmarkName);
+  AppRecorder({required this.benchmarkName}) : super(name: benchmarkName);
 
   final String benchmarkName;
 
@@ -37,7 +37,7 @@
   @override
   Future<void> automate() async {
     final ScrollableState scrollable =
-        Scrollable.of(find.byKey(textKey).evaluate().single);
+        Scrollable.of(find.byKey(textKey).evaluate().single)!;
     await scrollable.position.animateTo(
       30000,
       curve: Curves.linear,
diff --git a/packages/web_benchmarks/testing/test_app/lib/homepage.dart b/packages/web_benchmarks/testing/test_app/lib/homepage.dart
index c065429..7343713 100644
--- a/packages/web_benchmarks/testing/test_app/lib/homepage.dart
+++ b/packages/web_benchmarks/testing/test_app/lib/homepage.dart
@@ -8,7 +8,7 @@
 const ValueKey<String> aboutPageKey = ValueKey<String>('aboutPageKey');
 
 class HomePage extends StatefulWidget {
-  const HomePage({Key key, this.title}) : super(key: key);
+  const HomePage({Key? key, required this.title}) : super(key: key);
 
   final String title;
 
diff --git a/packages/web_benchmarks/testing/test_app/lib/main.dart b/packages/web_benchmarks/testing/test_app/lib/main.dart
index 6823d71..8607bf3 100644
--- a/packages/web_benchmarks/testing/test_app/lib/main.dart
+++ b/packages/web_benchmarks/testing/test_app/lib/main.dart
@@ -12,7 +12,7 @@
 }
 
 class MyApp extends StatelessWidget {
-  const MyApp({Key key}) : super(key: key);
+  const MyApp({Key? key}) : super(key: key);
 
   @override
   Widget build(BuildContext context) {
diff --git a/packages/web_benchmarks/testing/test_app/pubspec.yaml b/packages/web_benchmarks/testing/test_app/pubspec.yaml
index 4d9a9f6..3a124ba 100644
--- a/packages/web_benchmarks/testing/test_app/pubspec.yaml
+++ b/packages/web_benchmarks/testing/test_app/pubspec.yaml
@@ -6,7 +6,7 @@
 version: 1.0.0+1
 
 environment:
-  sdk: ">=2.7.0 <3.0.0"
+  sdk: '>=2.17.0 <3.0.0'
 
 dependencies:
   cupertino_icons: ^0.1.3
diff --git a/packages/web_benchmarks/testing/web_benchmarks_test.dart b/packages/web_benchmarks/testing/web_benchmarks_test.dart
index 4560113..3f4db8e 100644
--- a/packages/web_benchmarks/testing/web_benchmarks_test.dart
+++ b/packages/web_benchmarks/testing/web_benchmarks_test.dart
@@ -30,14 +30,14 @@
           'noise',
         ]) {
           expect(
-            taskResult.scores[benchmarkName].where((BenchmarkScore score) =>
+            taskResult.scores[benchmarkName]!.where((BenchmarkScore score) =>
                 score.metric == '$metricName.$valueName'),
             hasLength(1),
           );
         }
       }
       expect(
-        taskResult.scores[benchmarkName].where(
+        taskResult.scores[benchmarkName]!.where(
             (BenchmarkScore score) => score.metric == 'totalUiFrame.average'),
         hasLength(1),
       );