[video_player] Use platform interface (#2276)

* [video_player] Use platform interface

* Move tests (leftover from part1)

* Merge master

* Export DurationRange and DataSourceType

* Export VideoFormat

* ignore: unused_element

* Format

* Update video_player_platform_interface dependency
diff --git a/packages/video_player/video_player/CHANGELOG.md b/packages/video_player/video_player/CHANGELOG.md
index d4824c4..50c528d 100644
--- a/packages/video_player/video_player/CHANGELOG.md
+++ b/packages/video_player/video_player/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.10.4
+
+* Port plugin code to use the federated Platform Interface, instead of a MethodChannel directly.
+
 ## 0.10.3+3
 
 * Add DartDocs and unit tests.
diff --git a/packages/video_player/example/test_driver/video_player.dart b/packages/video_player/video_player/example/test_driver/video_player.dart
similarity index 100%
rename from packages/video_player/example/test_driver/video_player.dart
rename to packages/video_player/video_player/example/test_driver/video_player.dart
diff --git a/packages/video_player/example/test_driver/video_player_test.dart b/packages/video_player/video_player/example/test_driver/video_player_test.dart
similarity index 100%
rename from packages/video_player/example/test_driver/video_player_test.dart
rename to packages/video_player/video_player/example/test_driver/video_player_test.dart
diff --git a/packages/video_player/video_player/lib/video_player.dart b/packages/video_player/video_player/lib/video_player.dart
index f92693e..3a4768f 100644
--- a/packages/video_player/video_player/lib/video_player.dart
+++ b/packages/video_player/video_player/lib/video_player.dart
@@ -5,77 +5,19 @@
 import 'dart:async';
 import 'dart:io';
 
+import 'package:flutter/foundation.dart';
 import 'package:flutter/services.dart';
 import 'package:flutter/material.dart';
 import 'package:meta/meta.dart';
 
-final MethodChannel _channel = const MethodChannel('flutter.io/videoPlayer')
-  // This will clear all open videos on the platform when a full restart is
-  // performed.
-  ..invokeMethod<void>('init');
+import 'package:video_player_platform_interface/video_player_platform_interface.dart';
+export 'package:video_player_platform_interface/video_player_platform_interface.dart'
+    show DurationRange, DataSourceType, VideoFormat;
 
-/// Describes a discrete segment of time within a video using a [start] and
-/// [end] [Duration].
-class DurationRange {
-  /// Trusts that the given [start] and [end] are actually in order. They should
-  /// both be non-null.
-  DurationRange(this.start, this.end);
-
-  /// The beginning of the segment described relative to the beginning of the
-  /// entire video. Should be shorter than or equal to [end].
-  ///
-  /// For example, if the entire video is 4 minutes long and the range is from
-  /// 1:00-2:00, this should be a `Duration` of one minute.
-  final Duration start;
-
-  /// The end of the segment described as a duration relative to the beginning of
-  /// the entire video. This is expected to be non-null and longer than or equal
-  /// to [start].
-  ///
-  /// For example, if the entire video is 4 minutes long and the range is from
-  /// 1:00-2:00, this should be a `Duration` of two minutes.
-  final Duration end;
-
-  /// Assumes that [duration] is the total length of the video that this
-  /// DurationRange is a segment form. It returns the percentage that [start] is
-  /// through the entire video.
-  ///
-  /// For example, assume that the entire video is 4 minutes long. If [start] has
-  /// a duration of one minute, this will return `0.25` since the DurationRange
-  /// starts 25% of the way through the video's total length.
-  double startFraction(Duration duration) {
-    return start.inMilliseconds / duration.inMilliseconds;
-  }
-
-  /// Assumes that [duration] is the total length of the video that this
-  /// DurationRange is a segment form. It returns the percentage that [start] is
-  /// through the entire video.
-  ///
-  /// For example, assume that the entire video is 4 minutes long. If [end] has a
-  /// duration of two minutes, this will return `0.5` since the DurationRange
-  /// ends 50% of the way through the video's total length.
-  double endFraction(Duration duration) {
-    return end.inMilliseconds / duration.inMilliseconds;
-  }
-
-  @override
-  String toString() => '$runtimeType(start: $start, end: $end)';
-}
-
-/// The file format of the given video.
-enum VideoFormat {
-  /// Dynamic Adaptive Streaming over HTTP, also known as MPEG-DASH.
-  dash,
-
-  /// HTTP Live Streaming.
-  hls,
-
-  /// Smooth Streaming.
-  ss,
-
-  /// Any format other than the other ones defined in this enum.
-  other
-}
+// This will clear all open videos on the platform when a full restart is
+// performed.
+// ignore: unused_element
+final VideoPlayerPlatform _ = VideoPlayerPlatform.instance..init();
 
 /// The duration, current position, buffering state, error state and settings
 /// of a [VideoPlayerController].
@@ -187,20 +129,6 @@
   }
 }
 
-/// The way in which the video was originally loaded. This has nothing to do
-/// with the video's file type. It's just the place from which the video is
-/// fetched from.
-enum DataSourceType {
-  /// The video was included in the app's asset files.
-  asset,
-
-  /// The video was downloaded from the internet.
-  network,
-
-  /// The video was loaded off of the local filesystem.
-  file
-}
-
 /// Controls a platform video player, and provides updates when the state is
 /// changing.
 ///
@@ -277,75 +205,66 @@
     _lifeCycleObserver = _VideoAppLifeCycleObserver(this);
     _lifeCycleObserver.initialize();
     _creatingCompleter = Completer<void>();
-    Map<dynamic, dynamic> dataSourceDescription;
+
+    DataSource dataSourceDescription;
     switch (dataSourceType) {
       case DataSourceType.asset:
-        dataSourceDescription = <String, dynamic>{
-          'asset': dataSource,
-          'package': package
-        };
+        dataSourceDescription = DataSource(
+          sourceType: DataSourceType.asset,
+          asset: dataSource,
+          package: package,
+        );
         break;
       case DataSourceType.network:
-        dataSourceDescription = <String, dynamic>{
-          'uri': dataSource,
-          'formatHint': _videoFormatStringMap[formatHint]
-        };
+        dataSourceDescription = DataSource(
+          sourceType: DataSourceType.network,
+          uri: dataSource,
+          formatHint: formatHint,
+        );
         break;
       case DataSourceType.file:
-        dataSourceDescription = <String, dynamic>{'uri': dataSource};
+        dataSourceDescription = DataSource(
+          sourceType: DataSourceType.file,
+          uri: dataSource,
+        );
         break;
     }
-    final Map<String, dynamic> response =
-        await _channel.invokeMapMethod<String, dynamic>(
-      'create',
-      dataSourceDescription,
-    );
-    _textureId = response['textureId'];
+    _textureId =
+        await VideoPlayerPlatform.instance.create(dataSourceDescription);
     _creatingCompleter.complete(null);
     final Completer<void> initializingCompleter = Completer<void>();
 
-    DurationRange toDurationRange(dynamic value) {
-      final List<dynamic> pair = value;
-      return DurationRange(
-        Duration(milliseconds: pair[0]),
-        Duration(milliseconds: pair[1]),
-      );
-    }
-
-    void eventListener(dynamic event) {
+    void eventListener(VideoEvent event) {
       if (_isDisposed) {
         return;
       }
 
-      final Map<dynamic, dynamic> map = event;
-      switch (map['event']) {
-        case 'initialized':
+      switch (event.eventType) {
+        case VideoEventType.initialized:
           value = value.copyWith(
-            duration: Duration(milliseconds: map['duration']),
-            size: Size(map['width']?.toDouble() ?? 0.0,
-                map['height']?.toDouble() ?? 0.0),
+            duration: event.duration,
+            size: event.size,
           );
           initializingCompleter.complete(null);
           _applyLooping();
           _applyVolume();
           _applyPlayPause();
           break;
-        case 'completed':
+        case VideoEventType.completed:
           value = value.copyWith(isPlaying: false, position: value.duration);
           _timer?.cancel();
           break;
-        case 'bufferingUpdate':
-          final List<dynamic> values = map['values'];
-          value = value.copyWith(
-            buffered: values.map<DurationRange>(toDurationRange).toList(),
-          );
+        case VideoEventType.bufferingUpdate:
+          value = value.copyWith(buffered: event.buffered);
           break;
-        case 'bufferingStart':
+        case VideoEventType.bufferingStart:
           value = value.copyWith(isBuffering: true);
           break;
-        case 'bufferingEnd':
+        case VideoEventType.bufferingEnd:
           value = value.copyWith(isBuffering: false);
           break;
+        case VideoEventType.unknown:
+          break;
       }
     }
 
@@ -355,16 +274,12 @@
       _timer?.cancel();
     }
 
-    _eventSubscription = _eventChannelFor(_textureId)
-        .receiveBroadcastStream()
+    _eventSubscription = VideoPlayerPlatform.instance
+        .videoEventsFor(_textureId)
         .listen(eventListener, onError: errorListener);
     return initializingCompleter.future;
   }
 
-  EventChannel _eventChannelFor(int textureId) {
-    return EventChannel('flutter.io/videoPlayer/videoEvents$textureId');
-  }
-
   @override
   Future<void> dispose() async {
     if (_creatingCompleter != null) {
@@ -373,10 +288,7 @@
         _isDisposed = true;
         _timer?.cancel();
         await _eventSubscription?.cancel();
-        await _channel.invokeMethod<void>(
-          'dispose',
-          <String, dynamic>{'textureId': _textureId},
-        );
+        await VideoPlayerPlatform.instance.dispose(_textureId);
       }
       _lifeCycleObserver.dispose();
     }
@@ -411,10 +323,7 @@
     if (!value.initialized || _isDisposed) {
       return;
     }
-    _channel.invokeMethod<void>(
-      'setLooping',
-      <String, dynamic>{'textureId': _textureId, 'looping': value.isLooping},
-    );
+    VideoPlayerPlatform.instance.setLooping(_textureId, value.isLooping);
   }
 
   Future<void> _applyPlayPause() async {
@@ -422,10 +331,7 @@
       return;
     }
     if (value.isPlaying) {
-      await _channel.invokeMethod<void>(
-        'play',
-        <String, dynamic>{'textureId': _textureId},
-      );
+      VideoPlayerPlatform.instance.play(_textureId);
       _timer = Timer.periodic(
         const Duration(milliseconds: 500),
         (Timer timer) async {
@@ -441,10 +347,7 @@
       );
     } else {
       _timer?.cancel();
-      await _channel.invokeMethod<void>(
-        'pause',
-        <String, dynamic>{'textureId': _textureId},
-      );
+      VideoPlayerPlatform.instance.pause(_textureId);
     }
   }
 
@@ -452,10 +355,7 @@
     if (!value.initialized || _isDisposed) {
       return;
     }
-    await _channel.invokeMethod<void>(
-      'setVolume',
-      <String, dynamic>{'textureId': _textureId, 'volume': value.volume},
-    );
+    VideoPlayerPlatform.instance.setVolume(_textureId, value.volume);
   }
 
   /// The position in the current video.
@@ -463,12 +363,7 @@
     if (_isDisposed) {
       return null;
     }
-    return Duration(
-      milliseconds: await _channel.invokeMethod<int>(
-        'position',
-        <String, dynamic>{'textureId': _textureId},
-      ),
-    );
+    return await VideoPlayerPlatform.instance.getPosition(_textureId);
   }
 
   /// Sets the video's current timestamp to be at [moment]. The next
@@ -476,20 +371,17 @@
   ///
   /// If [moment] is outside of the video's full range it will be automatically
   /// and silently clamped.
-  Future<void> seekTo(Duration moment) async {
+  Future<void> seekTo(Duration position) async {
     if (_isDisposed) {
       return;
     }
-    if (moment > value.duration) {
-      moment = value.duration;
-    } else if (moment < const Duration()) {
-      moment = const Duration();
+    if (position > value.duration) {
+      position = value.duration;
+    } else if (position < const Duration()) {
+      position = const Duration();
     }
-    await _channel.invokeMethod<void>('seekTo', <String, dynamic>{
-      'textureId': _textureId,
-      'location': moment.inMilliseconds,
-    });
-    value = value.copyWith(position: moment);
+    VideoPlayerPlatform.instance.seekTo(_textureId, position);
+    value = value.copyWith(position: position);
   }
 
   /// Sets the audio volume of [this].
@@ -500,14 +392,6 @@
     value = value.copyWith(volume: volume.clamp(0.0, 1.0));
     await _applyVolume();
   }
-
-  static const Map<VideoFormat, String> _videoFormatStringMap =
-      <VideoFormat, String>{
-    VideoFormat.ss: 'ss',
-    VideoFormat.hls: 'hls',
-    VideoFormat.dash: 'dash',
-    VideoFormat.other: 'other',
-  };
 }
 
 class _VideoAppLifeCycleObserver extends Object with WidgetsBindingObserver {
@@ -594,7 +478,9 @@
 
   @override
   Widget build(BuildContext context) {
-    return _textureId == null ? Container() : Texture(textureId: _textureId);
+    return _textureId == null
+        ? Container()
+        : VideoPlayerPlatform.instance.buildView(_textureId);
   }
 }
 
diff --git a/packages/video_player/video_player/pubspec.yaml b/packages/video_player/video_player/pubspec.yaml
index cf85568..e191377 100644
--- a/packages/video_player/video_player/pubspec.yaml
+++ b/packages/video_player/video_player/pubspec.yaml
@@ -2,8 +2,8 @@
 description: Flutter plugin for displaying inline video with other Flutter
   widgets on Android and iOS.
 author: Flutter Team <flutter-dev@googlegroups.com>
-version: 0.10.3+3
-homepage: https://github.com/flutter/plugins/tree/master/packages/video_player
+version: 0.10.4
+homepage: https://github.com/flutter/plugins/tree/master/packages/video_player/video_player
 
 flutter:
   plugin:
@@ -13,6 +13,8 @@
 
 dependencies:
   meta: "^1.0.5"
+  video_player_platform_interface: ^1.0.1
+
   flutter:
     sdk: flutter
 
diff --git a/packages/video_player/video_player/test/video_player_test.dart b/packages/video_player/video_player/test/video_player_test.dart
index 10b5754..f0aac12 100644
--- a/packages/video_player/video_player/test/video_player_test.dart
+++ b/packages/video_player/video_player/test/video_player_test.dart
@@ -9,6 +9,7 @@
 import 'package:flutter/widgets.dart';
 import 'package:video_player/video_player.dart';
 import 'package:flutter_test/flutter_test.dart';
+import 'package:video_player_platform_interface/video_player_platform_interface.dart';
 
 class FakeController extends ValueNotifier<VideoPlayerValue>
     implements VideoPlayerController {
diff --git a/packages/video_player/video_player_platform_interface/lib/video_player_platform_interface.dart b/packages/video_player/video_player_platform_interface/lib/video_player_platform_interface.dart
index 031baf6..46b04d4 100644
--- a/packages/video_player/video_player_platform_interface/lib/video_player_platform_interface.dart
+++ b/packages/video_player/video_player_platform_interface/lib/video_player_platform_interface.dart
@@ -135,17 +135,33 @@
   final String package;
 }
 
+/// The way in which the video was originally loaded. This has nothing to do
+/// with the video's file type. It's just the place from which the video is
+/// fetched from.
 enum DataSourceType {
+  /// The video was included in the app's asset files.
   asset,
+
+  /// The video was downloaded from the internet.
   network,
-  file,
+
+  /// The video was loaded off of the local filesystem.
+  file
 }
 
+/// The file format of the given video.
 enum VideoFormat {
+  /// Dynamic Adaptive Streaming over HTTP, also known as MPEG-DASH.
   dash,
+
+  /// HTTP Live Streaming.
   hls,
+
+  /// Smooth Streaming.
   ss,
-  other,
+
+  /// Any format other than the other ones defined in this enum.
+  other
 }
 
 class VideoEvent {
@@ -189,16 +205,46 @@
   unknown,
 }
 
+/// Describes a discrete segment of time within a video using a [start] and
+/// [end] [Duration].
 class DurationRange {
+  /// Trusts that the given [start] and [end] are actually in order. They should
+  /// both be non-null.
   DurationRange(this.start, this.end);
 
+  /// The beginning of the segment described relative to the beginning of the
+  /// entire video. Should be shorter than or equal to [end].
+  ///
+  /// For example, if the entire video is 4 minutes long and the range is from
+  /// 1:00-2:00, this should be a `Duration` of one minute.
   final Duration start;
+
+  /// The end of the segment described as a duration relative to the beginning of
+  /// the entire video. This is expected to be non-null and longer than or equal
+  /// to [start].
+  ///
+  /// For example, if the entire video is 4 minutes long and the range is from
+  /// 1:00-2:00, this should be a `Duration` of two minutes.
   final Duration end;
 
+  /// Assumes that [duration] is the total length of the video that this
+  /// DurationRange is a segment form. It returns the percentage that [start] is
+  /// through the entire video.
+  ///
+  /// For example, assume that the entire video is 4 minutes long. If [start] has
+  /// a duration of one minute, this will return `0.25` since the DurationRange
+  /// starts 25% of the way through the video's total length.
   double startFraction(Duration duration) {
     return start.inMilliseconds / duration.inMilliseconds;
   }
 
+  /// Assumes that [duration] is the total length of the video that this
+  /// DurationRange is a segment form. It returns the percentage that [start] is
+  /// through the entire video.
+  ///
+  /// For example, assume that the entire video is 4 minutes long. If [end] has a
+  /// duration of two minutes, this will return `0.5` since the DurationRange
+  /// ends 50% of the way through the video's total length.
   double endFraction(Duration duration) {
     return end.inMilliseconds / duration.inMilliseconds;
   }