[video_player_web] Stop buffering when browser canPlayThrough. (#5068)

diff --git a/packages/video_player/video_player_web/CHANGELOG.md b/packages/video_player/video_player_web/CHANGELOG.md
index 1cd428c..3310660 100644
--- a/packages/video_player/video_player_web/CHANGELOG.md
+++ b/packages/video_player/video_player_web/CHANGELOG.md
@@ -1,3 +1,11 @@
+## 2.0.8
+
+* Ensures `buffering` state is only removed when the browser reports enough data
+  has been buffered so that the video can likely play through without stopping
+  (`onCanPlayThrough`). Issue [#94630](https://github.com/flutter/flutter/issues/94630).
+* Improves testability of the `_VideoPlayer` private class.
+* Ensures that tests that listen to a Stream fail "fast" (1 second max timeout).
+
 ## 2.0.7
 
 * Internal code cleanup for stricter analysis options.
diff --git a/packages/video_player/video_player_web/example/integration_test/utils.dart b/packages/video_player/video_player_web/example/integration_test/utils.dart
new file mode 100644
index 0000000..b011851
--- /dev/null
+++ b/packages/video_player/video_player_web/example/integration_test/utils.dart
@@ -0,0 +1,16 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Returns the URL to load an asset from this example app as a network source.
+//
+// TODO(stuartmorgan): Convert this to a local `HttpServer` that vends the
+// assets directly, https://github.com/flutter/flutter/issues/95420
+String getUrlForAssetAsNetworkSource(String assetKey) {
+  return 'https://github.com/flutter/plugins/blob/'
+      // This hash can be rolled forward to pick up newly-added assets.
+      'cb381ced070d356799dddf24aca38ce0579d3d7b'
+      '/packages/video_player/video_player/example/'
+      '$assetKey'
+      '?raw=true';
+}
diff --git a/packages/video_player/video_player_web/example/integration_test/video_player_test.dart b/packages/video_player/video_player_web/example/integration_test/video_player_test.dart
new file mode 100644
index 0000000..41aba97
--- /dev/null
+++ b/packages/video_player/video_player_web/example/integration_test/video_player_test.dart
@@ -0,0 +1,195 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html' as html;
+
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:video_player_platform_interface/video_player_platform_interface.dart';
+import 'package:video_player_web/src/video_player.dart';
+
+void main() {
+  IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+  group('VideoPlayer', () {
+    late html.VideoElement video;
+
+    setUp(() {
+      // Never set "src" on the video, so this test doesn't hit the network!
+      video = html.VideoElement()
+        ..controls = true
+        ..setAttribute('playsinline', 'false');
+    });
+
+    testWidgets('fixes critical video element config', (WidgetTester _) async {
+      VideoPlayer(videoElement: video).initialize();
+
+      expect(video.controls, isFalse,
+          reason: 'Video is controlled through code');
+      expect(video.getAttribute('autoplay'), 'false',
+          reason: 'Cannot autoplay on the web');
+      expect(video.getAttribute('playsinline'), 'true',
+          reason: 'Needed by safari iOS');
+    });
+
+    testWidgets('setVolume', (WidgetTester tester) async {
+      final VideoPlayer player = VideoPlayer(videoElement: video)..initialize();
+
+      player.setVolume(0);
+
+      expect(video.volume, isZero, reason: 'Volume should be zero');
+      expect(video.muted, isTrue, reason: 'muted attribute should be true');
+
+      expect(() {
+        player.setVolume(-0.0001);
+      }, throwsAssertionError, reason: 'Volume cannot be < 0');
+
+      expect(() {
+        player.setVolume(1.0001);
+      }, throwsAssertionError, reason: 'Volume cannot be > 1');
+    });
+
+    testWidgets('setPlaybackSpeed', (WidgetTester tester) async {
+      final VideoPlayer player = VideoPlayer(videoElement: video)..initialize();
+
+      expect(() {
+        player.setPlaybackSpeed(-1);
+      }, throwsAssertionError, reason: 'Playback speed cannot be < 0');
+
+      expect(() {
+        player.setPlaybackSpeed(0);
+      }, throwsAssertionError, reason: 'Playback speed cannot be == 0');
+    });
+
+    testWidgets('seekTo', (WidgetTester tester) async {
+      final VideoPlayer player = VideoPlayer(videoElement: video)..initialize();
+
+      expect(() {
+        player.seekTo(const Duration(seconds: -1));
+      }, throwsAssertionError, reason: 'Cannot seek into negative numbers');
+    });
+
+    // The events tested in this group do *not* represent the actual sequence
+    // of events from a real "video" element. They're crafted to test the
+    // behavior of the VideoPlayer in different states with different events.
+    group('events', () {
+      late StreamController<VideoEvent> streamController;
+      late VideoPlayer player;
+      late Stream<VideoEvent> timedStream;
+
+      final Set<VideoEventType> bufferingEvents = <VideoEventType>{
+        VideoEventType.bufferingStart,
+        VideoEventType.bufferingEnd,
+      };
+
+      setUp(() {
+        streamController = StreamController<VideoEvent>();
+        player =
+            VideoPlayer(videoElement: video, eventController: streamController)
+              ..initialize();
+
+        // This stream will automatically close after 100 ms without seeing any events
+        timedStream = streamController.stream.timeout(
+          const Duration(milliseconds: 100),
+          onTimeout: (EventSink<VideoEvent> sink) {
+            sink.close();
+          },
+        );
+      });
+
+      testWidgets('buffering dispatches only when it changes',
+          (WidgetTester tester) async {
+        // Take all the "buffering" events that we see during the next few seconds
+        final Future<List<bool>> stream = timedStream
+            .where(
+                (VideoEvent event) => bufferingEvents.contains(event.eventType))
+            .map((VideoEvent event) =>
+                event.eventType == VideoEventType.bufferingStart)
+            .toList();
+
+        // Simulate some events coming from the player...
+        player.setBuffering(true);
+        player.setBuffering(true);
+        player.setBuffering(true);
+        player.setBuffering(false);
+        player.setBuffering(false);
+        player.setBuffering(true);
+        player.setBuffering(false);
+        player.setBuffering(true);
+        player.setBuffering(false);
+
+        final List<bool> events = await stream;
+
+        expect(events, hasLength(6));
+        expect(events, <bool>[true, false, true, false, true, false]);
+      });
+
+      testWidgets('canplay event does not change buffering state',
+          (WidgetTester tester) async {
+        // Take all the "buffering" events that we see during the next few seconds
+        final Future<List<bool>> stream = timedStream
+            .where(
+                (VideoEvent event) => bufferingEvents.contains(event.eventType))
+            .map((VideoEvent event) =>
+                event.eventType == VideoEventType.bufferingStart)
+            .toList();
+
+        player.setBuffering(true);
+
+        // Simulate "canplay" event...
+        video.dispatchEvent(html.Event('canplay'));
+
+        final List<bool> events = await stream;
+
+        expect(events, hasLength(1));
+        expect(events, <bool>[true]);
+      });
+
+      testWidgets('canplaythrough event does change buffering state',
+          (WidgetTester tester) async {
+        // Take all the "buffering" events that we see during the next few seconds
+        final Future<List<bool>> stream = timedStream
+            .where(
+                (VideoEvent event) => bufferingEvents.contains(event.eventType))
+            .map((VideoEvent event) =>
+                event.eventType == VideoEventType.bufferingStart)
+            .toList();
+
+        player.setBuffering(true);
+
+        // Simulate "canplaythrough" event...
+        video.dispatchEvent(html.Event('canplaythrough'));
+
+        final List<bool> events = await stream;
+
+        expect(events, hasLength(2));
+        expect(events, <bool>[true, false]);
+      });
+
+      testWidgets('initialized dispatches only once',
+          (WidgetTester tester) async {
+        // Dispatch some bogus "canplay" events from the video object
+        video.dispatchEvent(html.Event('canplay'));
+        video.dispatchEvent(html.Event('canplay'));
+        video.dispatchEvent(html.Event('canplay'));
+
+        // Take all the "initialized" events that we see during the next few seconds
+        final Future<List<VideoEvent>> stream = timedStream
+            .where((VideoEvent event) =>
+                event.eventType == VideoEventType.initialized)
+            .toList();
+
+        video.dispatchEvent(html.Event('canplay'));
+        video.dispatchEvent(html.Event('canplay'));
+        video.dispatchEvent(html.Event('canplay'));
+
+        final List<VideoEvent> events = await stream;
+
+        expect(events, hasLength(1));
+        expect(events[0].eventType, VideoEventType.initialized);
+      });
+    });
+  });
+}
diff --git a/packages/video_player/video_player_web/example/integration_test/video_player_web_test.dart b/packages/video_player/video_player_web/example/integration_test/video_player_web_test.dart
index 97b0364..5053ea6 100644
--- a/packages/video_player/video_player_web/example/integration_test/video_player_web_test.dart
+++ b/packages/video_player/video_player_web/example/integration_test/video_player_web_test.dart
@@ -11,10 +11,15 @@
 import 'package:video_player_platform_interface/video_player_platform_interface.dart';
 import 'package:video_player_web/video_player_web.dart';
 
+import 'utils.dart';
+
+// Use WebM to allow CI to run tests in Chromium.
+const String _videoAssetKey = 'assets/Butterfly-209.webm';
+
 void main() {
   IntegrationTestWidgetsFlutterBinding.ensureInitialized();
 
-  group('VideoPlayer for Web', () {
+  group('VideoPlayerWeb plugin (hits network)', () {
     late Future<int> textureId;
 
     setUp(() {
@@ -23,8 +28,7 @@
           .create(
             DataSource(
               sourceType: DataSourceType.network,
-              uri:
-                  'https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4',
+              uri: getUrlForAssetAsNetworkSource(_videoAssetKey),
             ),
           )
           .then((int? textureId) => textureId!);
@@ -38,9 +42,9 @@
       expect(
           VideoPlayerPlatform.instance.create(
             DataSource(
-                sourceType: DataSourceType.network,
-                uri:
-                    'https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4'),
+              sourceType: DataSourceType.network,
+              uri: getUrlForAssetAsNetworkSource(_videoAssetKey),
+            ),
           ),
           completion(isNonZero));
     });
@@ -100,9 +104,9 @@
         (WidgetTester tester) async {
       final int videoPlayerId = (await VideoPlayerPlatform.instance.create(
         DataSource(
-            sourceType: DataSourceType.network,
-            uri:
-                'https://flutter.github.io/assets-for-api-docs/assets/videos/_non_existent_video.mp4'),
+          sourceType: DataSourceType.network,
+          uri: getUrlForAssetAsNetworkSource('assets/__non_existent.webm'),
+        ),
       ))!;
 
       final Stream<VideoEvent> eventStream =
@@ -113,7 +117,7 @@
       await VideoPlayerPlatform.instance.play(videoPlayerId);
 
       expect(() async {
-        await eventStream.last;
+        await eventStream.timeout(const Duration(seconds: 5)).last;
       }, throwsA(isA<PlatformException>()));
     });
 
@@ -164,5 +168,40 @@
       expect(VideoPlayerPlatform.instance.setMixWithOthers(true), completes);
       expect(VideoPlayerPlatform.instance.setMixWithOthers(false), completes);
     });
+
+    testWidgets('video playback lifecycle', (WidgetTester tester) async {
+      final int videoPlayerId = await textureId;
+      final Stream<VideoEvent> eventStream =
+          VideoPlayerPlatform.instance.videoEventsFor(videoPlayerId);
+
+      final Future<List<VideoEvent>> stream = eventStream.timeout(
+        const Duration(seconds: 1),
+        onTimeout: (EventSink<VideoEvent> sink) {
+          sink.close();
+        },
+      ).toList();
+
+      await VideoPlayerPlatform.instance.setVolume(videoPlayerId, 0);
+      await VideoPlayerPlatform.instance.play(videoPlayerId);
+
+      // Let the video play, until we stop seeing events for a second
+      final List<VideoEvent> events = await stream;
+
+      await VideoPlayerPlatform.instance.pause(videoPlayerId);
+
+      // The expected list of event types should look like this:
+      // 1. bufferingStart,
+      // 2. bufferingUpdate (videoElement.onWaiting),
+      // 3. initialized (videoElement.onCanPlay),
+      // 4. bufferingEnd (videoElement.onCanPlayThrough),
+      expect(
+          events.map((VideoEvent e) => e.eventType),
+          equals(<VideoEventType>[
+            VideoEventType.bufferingStart,
+            VideoEventType.bufferingUpdate,
+            VideoEventType.initialized,
+            VideoEventType.bufferingEnd
+          ]));
+    });
   });
 }
diff --git a/packages/video_player/video_player_web/lib/src/video_player.dart b/packages/video_player/video_player_web/lib/src/video_player.dart
new file mode 100644
index 0000000..eda188c
--- /dev/null
+++ b/packages/video_player/video_player_web/lib/src/video_player.dart
@@ -0,0 +1,254 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html' as html;
+
+import 'package:flutter/foundation.dart' show visibleForTesting;
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+import 'package:video_player_platform_interface/video_player_platform_interface.dart';
+
+// An error code value to error name Map.
+// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code
+const Map<int, String> _kErrorValueToErrorName = <int, String>{
+  1: 'MEDIA_ERR_ABORTED',
+  2: 'MEDIA_ERR_NETWORK',
+  3: 'MEDIA_ERR_DECODE',
+  4: 'MEDIA_ERR_SRC_NOT_SUPPORTED',
+};
+
+// An error code value to description Map.
+// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code
+const Map<int, String> _kErrorValueToErrorDescription = <int, String>{
+  1: 'The user canceled the fetching of the video.',
+  2: 'A network error occurred while fetching the video, despite having previously been available.',
+  3: 'An error occurred while trying to decode the video, despite having previously been determined to be usable.',
+  4: 'The video has been found to be unsuitable (missing or in a format not supported by your browser).',
+};
+
+// The default error message, when the error is an empty string
+// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/message
+const String _kDefaultErrorMessage =
+    'No further diagnostic information can be determined or provided.';
+
+/// Wraps a [html.VideoElement] so its API complies with what is expected by the plugin.
+class VideoPlayer {
+  /// Create a [VideoPlayer] from a [html.VideoElement] instance.
+  VideoPlayer({
+    required html.VideoElement videoElement,
+    @visibleForTesting StreamController<VideoEvent>? eventController,
+  })  : _videoElement = videoElement,
+        _eventController = eventController ?? StreamController<VideoEvent>();
+
+  final StreamController<VideoEvent> _eventController;
+  final html.VideoElement _videoElement;
+
+  bool _isInitialized = false;
+  bool _isBuffering = false;
+
+  /// Returns the [Stream] of [VideoEvent]s from the inner [html.VideoElement].
+  Stream<VideoEvent> get events => _eventController.stream;
+
+  /// Initializes the wrapped [html.VideoElement].
+  ///
+  /// This method sets the required DOM attributes so videos can [play] programmatically,
+  /// and attaches listeners to the internal events from the [html.VideoElement]
+  /// to react to them / expose them through the [VideoPlayer.events] stream.
+  void initialize() {
+    _videoElement
+      ..autoplay = false
+      ..controls = false;
+
+    // Allows Safari iOS to play the video inline
+    _videoElement.setAttribute('playsinline', 'true');
+
+    // Set autoplay to false since most browsers won't autoplay a video unless it is muted
+    _videoElement.setAttribute('autoplay', 'false');
+
+    _videoElement.onCanPlay.listen((dynamic _) {
+      if (!_isInitialized) {
+        _isInitialized = true;
+        _sendInitialized();
+      }
+    });
+
+    _videoElement.onCanPlayThrough.listen((dynamic _) {
+      setBuffering(false);
+    });
+
+    _videoElement.onPlaying.listen((dynamic _) {
+      setBuffering(false);
+    });
+
+    _videoElement.onWaiting.listen((dynamic _) {
+      setBuffering(true);
+      _sendBufferingRangesUpdate();
+    });
+
+    // The error event fires when some form of error occurs while attempting to load or perform the media.
+    _videoElement.onError.listen((html.Event _) {
+      setBuffering(false);
+      // The Event itself (_) doesn't contain info about the actual error.
+      // We need to look at the HTMLMediaElement.error.
+      // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error
+      final html.MediaError error = _videoElement.error!;
+      _eventController.addError(PlatformException(
+        code: _kErrorValueToErrorName[error.code]!,
+        message: error.message != '' ? error.message : _kDefaultErrorMessage,
+        details: _kErrorValueToErrorDescription[error.code],
+      ));
+    });
+
+    _videoElement.onEnded.listen((dynamic _) {
+      setBuffering(false);
+      _eventController.add(VideoEvent(eventType: VideoEventType.completed));
+    });
+  }
+
+  /// Attempts to play the video.
+  ///
+  /// If this method is called programmatically (without user interaction), it
+  /// might fail unless the video is completely muted (or it has no Audio tracks).
+  ///
+  /// When called from some user interaction (a tap on a button), the above
+  /// limitation should disappear.
+  Future<void> play() {
+    return _videoElement.play().catchError((Object e) {
+      // play() attempts to begin playback of the media. It returns
+      // a Promise which can get rejected in case of failure to begin
+      // playback for any reason, such as permission issues.
+      // The rejection handler is called with a DomException.
+      // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/play
+      final html.DomException exception = e as html.DomException;
+      _eventController.addError(PlatformException(
+        code: exception.name,
+        message: exception.message,
+      ));
+    }, test: (Object e) => e is html.DomException);
+  }
+
+  /// Pauses the video in the current position.
+  void pause() {
+    _videoElement.pause();
+  }
+
+  /// Controls whether the video should start again after it finishes.
+  void setLooping(bool value) {
+    _videoElement.loop = value;
+  }
+
+  /// Sets the volume at which the media will be played.
+  ///
+  /// Values must fall between 0 and 1, where 0 is muted and 1 is the loudest.
+  ///
+  /// When volume is set to 0, the `muted` property is also applied to the
+  /// [html.VideoElement]. This is required for auto-play on the web.
+  void setVolume(double volume) {
+    assert(volume >= 0 && volume <= 1);
+
+    // TODO(ditman): Do we need to expose a "muted" API?
+    // https://github.com/flutter/flutter/issues/60721
+    _videoElement.muted = !(volume > 0.0);
+    _videoElement.volume = volume;
+  }
+
+  /// Sets the playback `speed`.
+  ///
+  /// A `speed` of 1.0 is "normal speed," values lower than 1.0 make the media
+  /// play slower than normal, higher values make it play faster.
+  ///
+  /// `speed` cannot be negative.
+  ///
+  /// The audio is muted when the fast forward or slow motion is outside a useful
+  /// range (for example, Gecko mutes the sound outside the range 0.25 to 4.0).
+  ///
+  /// The pitch of the audio is corrected by default.
+  void setPlaybackSpeed(double speed) {
+    assert(speed > 0);
+
+    _videoElement.playbackRate = speed;
+  }
+
+  /// Moves the playback head to a new `position`.
+  ///
+  /// `position` cannot be negative.
+  void seekTo(Duration position) {
+    assert(!position.isNegative);
+
+    _videoElement.currentTime = position.inMilliseconds.toDouble() / 1000;
+  }
+
+  /// Returns the current playback head position as a [Duration].
+  Duration getPosition() {
+    _sendBufferingRangesUpdate();
+    return Duration(milliseconds: (_videoElement.currentTime * 1000).round());
+  }
+
+  /// Disposes of the current [html.VideoElement].
+  void dispose() {
+    _videoElement.removeAttribute('src');
+    _videoElement.load();
+  }
+
+  // Sends an [VideoEventType.initialized] [VideoEvent] with info about the wrapped video.
+  void _sendInitialized() {
+    final Duration? duration = !_videoElement.duration.isNaN
+        ? Duration(
+            milliseconds: (_videoElement.duration * 1000).round(),
+          )
+        : null;
+
+    final Size? size = !_videoElement.videoHeight.isNaN
+        ? Size(
+            _videoElement.videoWidth.toDouble(),
+            _videoElement.videoHeight.toDouble(),
+          )
+        : null;
+
+    _eventController.add(
+      VideoEvent(
+        eventType: VideoEventType.initialized,
+        duration: duration,
+        size: size,
+      ),
+    );
+  }
+
+  /// Caches the current "buffering" state of the video.
+  ///
+  /// If the current buffering state is different from the previous one
+  /// ([_isBuffering]), this dispatches a [VideoEvent].
+  @visibleForTesting
+  void setBuffering(bool buffering) {
+    if (_isBuffering != buffering) {
+      _isBuffering = buffering;
+      _eventController.add(VideoEvent(
+        eventType: _isBuffering
+            ? VideoEventType.bufferingStart
+            : VideoEventType.bufferingEnd,
+      ));
+    }
+  }
+
+  // Broadcasts the [html.VideoElement.buffered] status through the [events] stream.
+  void _sendBufferingRangesUpdate() {
+    _eventController.add(VideoEvent(
+      buffered: _toDurationRange(_videoElement.buffered),
+      eventType: VideoEventType.bufferingUpdate,
+    ));
+  }
+
+  // Converts from [html.TimeRanges] to our own List<DurationRange>.
+  List<DurationRange> _toDurationRange(html.TimeRanges buffered) {
+    final List<DurationRange> durationRange = <DurationRange>[];
+    for (int i = 0; i < buffered.length; i++) {
+      durationRange.add(DurationRange(
+        Duration(milliseconds: (buffered.start(i) * 1000).round()),
+        Duration(milliseconds: (buffered.end(i) * 1000).round()),
+      ));
+    }
+    return durationRange;
+  }
+}
diff --git a/packages/video_player/video_player_web/lib/video_player_web.dart b/packages/video_player/video_player_web/lib/video_player_web.dart
index a676850..e52fd83 100644
--- a/packages/video_player/video_player_web/lib/video_player_web.dart
+++ b/packages/video_player/video_player_web/lib/video_player_web.dart
@@ -6,34 +6,11 @@
 import 'dart:html';
 
 import 'package:flutter/material.dart';
-import 'package:flutter/services.dart';
 import 'package:flutter_web_plugins/flutter_web_plugins.dart';
 import 'package:video_player_platform_interface/video_player_platform_interface.dart';
 
 import 'src/shims/dart_ui.dart' as ui;
-
-// An error code value to error name Map.
-// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code
-const Map<int, String> _kErrorValueToErrorName = <int, String>{
-  1: 'MEDIA_ERR_ABORTED',
-  2: 'MEDIA_ERR_NETWORK',
-  3: 'MEDIA_ERR_DECODE',
-  4: 'MEDIA_ERR_SRC_NOT_SUPPORTED',
-};
-
-// An error code value to description Map.
-// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code
-const Map<int, String> _kErrorValueToErrorDescription = <int, String>{
-  1: 'The user canceled the fetching of the video.',
-  2: 'A network error occurred while fetching the video, despite having previously been available.',
-  3: 'An error occurred while trying to decode the video, despite having previously been determined to be usable.',
-  4: 'The video has been found to be unsuitable (missing or in a format not supported by your browser).',
-};
-
-// The default error message, when the error is an empty string
-// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/message
-const String _kDefaultErrorMessage =
-    'No further diagnostic information can be determined or provided.';
+import 'src/video_player.dart';
 
 /// The web implementation of [VideoPlayerPlatform].
 ///
@@ -44,8 +21,10 @@
     VideoPlayerPlatform.instance = VideoPlayerPlugin();
   }
 
-  final Map<int, _VideoPlayer> _videoPlayers = <int, _VideoPlayer>{};
+  // Map of textureId -> VideoPlayer instances
+  final Map<int, VideoPlayer> _videoPlayers = <int, VideoPlayer>{};
 
+  // Simulate the native "textureId".
   int _textureCounter = 1;
 
   @override
@@ -55,13 +34,13 @@
 
   @override
   Future<void> dispose(int textureId) async {
-    _videoPlayers[textureId]!.dispose();
+    _player(textureId).dispose();
     _videoPlayers.remove(textureId);
     return;
   }
 
   void _disposeAllPlayers() {
-    for (final _VideoPlayer videoPlayer in _videoPlayers.values) {
+    for (final VideoPlayer videoPlayer in _videoPlayers.values) {
       videoPlayer.dispose();
     }
     _videoPlayers.clear();
@@ -69,8 +48,7 @@
 
   @override
   Future<int> create(DataSource dataSource) async {
-    final int textureId = _textureCounter;
-    _textureCounter++;
+    final int textureId = _textureCounter++;
 
     late String uri;
     switch (dataSource.sourceType) {
@@ -95,58 +73,69 @@
             'web implementation of video_player cannot play content uri'));
     }
 
-    final _VideoPlayer player = _VideoPlayer(
-      uri: uri,
-      textureId: textureId,
-    );
+    final VideoElement videoElement = VideoElement()
+      ..id = 'videoElement-$textureId'
+      ..src = uri
+      ..style.border = 'none'
+      ..style.height = '100%'
+      ..style.width = '100%';
 
-    player.initialize();
+    // TODO(hterkelsen): Use initialization parameters once they are available
+    ui.platformViewRegistry.registerViewFactory(
+        'videoPlayer-$textureId', (int viewId) => videoElement);
+
+    final VideoPlayer player = VideoPlayer(videoElement: videoElement)
+      ..initialize();
 
     _videoPlayers[textureId] = player;
+
     return textureId;
   }
 
   @override
   Future<void> setLooping(int textureId, bool looping) async {
-    return _videoPlayers[textureId]!.setLooping(looping);
+    return _player(textureId).setLooping(looping);
   }
 
   @override
   Future<void> play(int textureId) async {
-    return _videoPlayers[textureId]!.play();
+    return _player(textureId).play();
   }
 
   @override
   Future<void> pause(int textureId) async {
-    return _videoPlayers[textureId]!.pause();
+    return _player(textureId).pause();
   }
 
   @override
   Future<void> setVolume(int textureId, double volume) async {
-    return _videoPlayers[textureId]!.setVolume(volume);
+    return _player(textureId).setVolume(volume);
   }
 
   @override
   Future<void> setPlaybackSpeed(int textureId, double speed) async {
-    assert(speed > 0);
-
-    return _videoPlayers[textureId]!.setPlaybackSpeed(speed);
+    return _player(textureId).setPlaybackSpeed(speed);
   }
 
   @override
   Future<void> seekTo(int textureId, Duration position) async {
-    return _videoPlayers[textureId]!.seekTo(position);
+    return _player(textureId).seekTo(position);
   }
 
   @override
   Future<Duration> getPosition(int textureId) async {
-    _videoPlayers[textureId]!.sendBufferingUpdate();
-    return _videoPlayers[textureId]!.getPosition();
+    return _player(textureId).getPosition();
   }
 
   @override
   Stream<VideoEvent> videoEventsFor(int textureId) {
-    return _videoPlayers[textureId]!.eventController.stream;
+    return _player(textureId).events;
+  }
+
+  // Retrieves a [VideoPlayer] by its internal `id`.
+  // It must have been created earlier from the [create] method.
+  VideoPlayer _player(int id) {
+    return _videoPlayers[id]!;
   }
 
   @override
@@ -158,171 +147,3 @@
   @override
   Future<void> setMixWithOthers(bool mixWithOthers) => Future<void>.value();
 }
-
-class _VideoPlayer {
-  _VideoPlayer({required this.uri, required this.textureId});
-
-  final StreamController<VideoEvent> eventController =
-      StreamController<VideoEvent>();
-
-  final String uri;
-  final int textureId;
-  late VideoElement videoElement;
-  bool isInitialized = false;
-  bool isBuffering = false;
-
-  void setBuffering(bool buffering) {
-    if (isBuffering != buffering) {
-      isBuffering = buffering;
-      eventController.add(VideoEvent(
-          eventType: isBuffering
-              ? VideoEventType.bufferingStart
-              : VideoEventType.bufferingEnd));
-    }
-  }
-
-  void initialize() {
-    videoElement = VideoElement()
-      ..src = uri
-      ..autoplay = false
-      ..controls = false
-      ..style.border = 'none'
-      ..style.height = '100%'
-      ..style.width = '100%';
-
-    // Allows Safari iOS to play the video inline
-    videoElement.setAttribute('playsinline', 'true');
-
-    // Set autoplay to false since most browsers won't autoplay a video unless it is muted
-    videoElement.setAttribute('autoplay', 'false');
-
-    // TODO(hterkelsen): Use initialization parameters once they are available
-    ui.platformViewRegistry.registerViewFactory(
-        'videoPlayer-$textureId', (int viewId) => videoElement);
-
-    videoElement.onCanPlay.listen((dynamic _) {
-      if (!isInitialized) {
-        isInitialized = true;
-        sendInitialized();
-      }
-      setBuffering(false);
-    });
-
-    videoElement.onCanPlayThrough.listen((dynamic _) {
-      setBuffering(false);
-    });
-
-    videoElement.onPlaying.listen((dynamic _) {
-      setBuffering(false);
-    });
-
-    videoElement.onWaiting.listen((dynamic _) {
-      setBuffering(true);
-      sendBufferingUpdate();
-    });
-
-    // The error event fires when some form of error occurs while attempting to load or perform the media.
-    videoElement.onError.listen((Event _) {
-      setBuffering(false);
-      // The Event itself (_) doesn't contain info about the actual error.
-      // We need to look at the HTMLMediaElement.error.
-      // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error
-      final MediaError error = videoElement.error!;
-      eventController.addError(PlatformException(
-        code: _kErrorValueToErrorName[error.code]!,
-        message: error.message != '' ? error.message : _kDefaultErrorMessage,
-        details: _kErrorValueToErrorDescription[error.code],
-      ));
-    });
-
-    videoElement.onEnded.listen((dynamic _) {
-      setBuffering(false);
-      eventController.add(VideoEvent(eventType: VideoEventType.completed));
-    });
-  }
-
-  void sendBufferingUpdate() {
-    eventController.add(VideoEvent(
-      buffered: _toDurationRange(videoElement.buffered),
-      eventType: VideoEventType.bufferingUpdate,
-    ));
-  }
-
-  Future<void> play() {
-    return videoElement.play().catchError((Object e) {
-      // play() attempts to begin playback of the media. It returns
-      // a Promise which can get rejected in case of failure to begin
-      // playback for any reason, such as permission issues.
-      // The rejection handler is called with a DomException.
-      // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/play
-      final DomException exception = e as DomException;
-      eventController.addError(PlatformException(
-        code: exception.name,
-        message: exception.message,
-      ));
-    }, test: (Object e) => e is DomException);
-  }
-
-  void pause() {
-    videoElement.pause();
-  }
-
-  void setLooping(bool value) {
-    videoElement.loop = value;
-  }
-
-  void setVolume(double value) {
-    // TODO(ditman): Do we need to expose a "muted" API? https://github.com/flutter/flutter/issues/60721
-    if (value > 0.0) {
-      videoElement.muted = false;
-    } else {
-      videoElement.muted = true;
-    }
-    videoElement.volume = value;
-  }
-
-  void setPlaybackSpeed(double speed) {
-    assert(speed > 0);
-
-    videoElement.playbackRate = speed;
-  }
-
-  void seekTo(Duration position) {
-    videoElement.currentTime = position.inMilliseconds.toDouble() / 1000;
-  }
-
-  Duration getPosition() {
-    return Duration(milliseconds: (videoElement.currentTime * 1000).round());
-  }
-
-  void sendInitialized() {
-    eventController.add(
-      VideoEvent(
-        eventType: VideoEventType.initialized,
-        duration: Duration(
-          milliseconds: (videoElement.duration * 1000).round(),
-        ),
-        size: Size(
-          videoElement.videoWidth.toDouble(),
-          videoElement.videoHeight.toDouble(),
-        ),
-      ),
-    );
-  }
-
-  void dispose() {
-    videoElement.removeAttribute('src');
-    videoElement.load();
-  }
-
-  List<DurationRange> _toDurationRange(TimeRanges buffered) {
-    final List<DurationRange> durationRange = <DurationRange>[];
-    for (int i = 0; i < buffered.length; i++) {
-      durationRange.add(DurationRange(
-        Duration(milliseconds: (buffered.start(i) * 1000).round()),
-        Duration(milliseconds: (buffered.end(i) * 1000).round()),
-      ));
-    }
-    return durationRange;
-  }
-}
diff --git a/packages/video_player/video_player_web/pubspec.yaml b/packages/video_player/video_player_web/pubspec.yaml
index 69a2df4..064517e 100644
--- a/packages/video_player/video_player_web/pubspec.yaml
+++ b/packages/video_player/video_player_web/pubspec.yaml
@@ -2,7 +2,7 @@
 description: Web platform implementation of video_player.
 repository: https://github.com/flutter/plugins/tree/main/packages/video_player/video_player_web
 issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+video_player%22
-version: 2.0.7
+version: 2.0.8
 
 environment:
   sdk: ">=2.12.0 <3.0.0"