[camera] Switch to internal method channels (#5943)
diff --git a/CODEOWNERS b/CODEOWNERS
index 68e4cb7..16bf98b 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -26,7 +26,7 @@
packages/**/*_web/** @ditman
# - Android
-packages/camera/camera/android/** @camsim99
+packages/camera/camera_android/** @camsim99
packages/espresso/** @blasten
packages/flutter_plugin_android_lifecycle/** @blasten
packages/google_maps_flutter/google_maps_flutter/android/** @GaryQian
@@ -40,7 +40,7 @@
packages/video_player/video_player_android/** @blasten
# - iOS
-packages/camera/camera/ios/** @hellohuanlin
+packages/camera/camera_avfoundation/** @hellohuanlin
packages/google_maps_flutter/google_maps_flutter/ios/** @cyanglaz
packages/google_sign_in/google_sign_in_ios/** @jmagman
packages/image_picker/image_picker_ios/** @cyanglaz
diff --git a/packages/camera/camera_android/CHANGELOG.md b/packages/camera/camera_android/CHANGELOG.md
index c57f301..5bc7c8a 100644
--- a/packages/camera/camera_android/CHANGELOG.md
+++ b/packages/camera/camera_android/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.9.8
+
+* Switches to internal method channel implementation.
+
## 0.9.7+1
* Splits from `camera` as a federated implementation.
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/DartMessenger.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/DartMessenger.java
index dc62fce..e15078e 100644
--- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/DartMessenger.java
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/DartMessenger.java
@@ -64,8 +64,9 @@
* the main thread. The handler is mainly supplied so it will be easier test this class.
*/
DartMessenger(BinaryMessenger messenger, long cameraId, @NonNull Handler handler) {
- cameraChannel = new MethodChannel(messenger, "flutter.io/cameraPlugin/camera" + cameraId);
- deviceChannel = new MethodChannel(messenger, "flutter.io/cameraPlugin/device");
+ cameraChannel =
+ new MethodChannel(messenger, "plugins.flutter.io/camera_android/camera" + cameraId);
+ deviceChannel = new MethodChannel(messenger, "plugins.flutter.io/camera_android/fromPlatform");
this.handler = handler;
}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
index 35cc2b0..38201e1 100644
--- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
@@ -49,8 +49,9 @@
this.permissionsRegistry = permissionsAdder;
this.textureRegistry = textureRegistry;
- methodChannel = new MethodChannel(messenger, "plugins.flutter.io/camera");
- imageStreamChannel = new EventChannel(messenger, "plugins.flutter.io/camera/imageStream");
+ methodChannel = new MethodChannel(messenger, "plugins.flutter.io/camera_android");
+ imageStreamChannel =
+ new EventChannel(messenger, "plugins.flutter.io/camera_android/imageStream");
methodChannel.setMethodCallHandler(this);
}
diff --git a/packages/camera/camera_android/example/integration_test/camera_test.dart b/packages/camera/camera_android/example/integration_test/camera_test.dart
index 05c6693..99029fc 100644
--- a/packages/camera/camera_android/example/integration_test/camera_test.dart
+++ b/packages/camera/camera_android/example/integration_test/camera_test.dart
@@ -5,6 +5,7 @@
import 'dart:io';
import 'dart:ui';
+import 'package:camera_android/camera_android.dart';
import 'package:camera_example/camera_controller.dart';
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/painting.dart';
@@ -19,6 +20,7 @@
IntegrationTestWidgetsFlutterBinding.ensureInitialized();
setUpAll(() async {
+ CameraPlatform.instance = AndroidCamera();
final Directory extDir = await getTemporaryDirectory();
testDir = await Directory('${extDir.path}/test').create(recursive: true);
});
diff --git a/packages/camera/camera_android/lib/camera_android.dart b/packages/camera/camera_android/lib/camera_android.dart
new file mode 100644
index 0000000..93e3e17
--- /dev/null
+++ b/packages/camera/camera_android/lib/camera_android.dart
@@ -0,0 +1,5 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'src/android_camera.dart';
diff --git a/packages/camera/camera_android/lib/src/android_camera.dart b/packages/camera/camera_android/lib/src/android_camera.dart
new file mode 100644
index 0000000..5fb3443
--- /dev/null
+++ b/packages/camera/camera_android/lib/src/android_camera.dart
@@ -0,0 +1,587 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:stream_transform/stream_transform.dart';
+
+import 'type_conversion.dart';
+import 'utils.dart';
+
+const MethodChannel _channel =
+ MethodChannel('plugins.flutter.io/camera_android');
+
+/// The Android implementation of [CameraPlatform] that uses method channels.
+class AndroidCamera extends CameraPlatform {
+ /// Construct a new method channel camera instance.
+ AndroidCamera() {
+ const MethodChannel channel =
+ MethodChannel('plugins.flutter.io/camera_android/fromPlatform');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleDeviceMethodCall(call));
+ }
+
+ /// Registers this class as the default instance of [CameraPlatform].
+ static void registerWith() {
+ CameraPlatform.instance = AndroidCamera();
+ }
+
+ final Map<int, MethodChannel> _channels = <int, MethodChannel>{};
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to camera events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<CameraEvent> cameraEventStreamController =
+ StreamController<CameraEvent>.broadcast();
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to general device events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<DeviceEvent> deviceEventStreamController =
+ StreamController<DeviceEvent>.broadcast();
+
+ // The stream to receive frames from the native code.
+ StreamSubscription<dynamic>? _platformImageStreamSubscription;
+
+ // The stream for vending frames to platform interface clients.
+ StreamController<CameraImageData>? _frameStreamController;
+
+ Stream<CameraEvent> _cameraEvents(int cameraId) =>
+ cameraEventStreamController.stream
+ .where((CameraEvent event) => event.cameraId == cameraId);
+
+ @override
+ Future<List<CameraDescription>> availableCameras() async {
+ try {
+ final List<Map<dynamic, dynamic>>? cameras = await _channel
+ .invokeListMethod<Map<dynamic, dynamic>>('availableCameras');
+
+ if (cameras == null) {
+ return <CameraDescription>[];
+ }
+
+ return cameras.map((Map<dynamic, dynamic> camera) {
+ return CameraDescription(
+ name: camera['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(camera['lensFacing']! as String),
+ sensorOrientation: camera['sensorOrientation']! as int,
+ );
+ }).toList();
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) async {
+ try {
+ final Map<String, dynamic>? reply = await _channel
+ .invokeMapMethod<String, dynamic>('create', <String, dynamic>{
+ 'cameraName': cameraDescription.name,
+ 'resolutionPreset': resolutionPreset != null
+ ? _serializeResolutionPreset(resolutionPreset)
+ : null,
+ 'enableAudio': enableAudio,
+ });
+
+ return reply!['cameraId']! as int;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> initializeCamera(
+ int cameraId, {
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) {
+ _channels.putIfAbsent(cameraId, () {
+ final MethodChannel channel =
+ MethodChannel('plugins.flutter.io/camera_android/camera$cameraId');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleCameraMethodCall(call, cameraId));
+ return channel;
+ });
+
+ final Completer<void> _completer = Completer<void>();
+
+ onCameraInitialized(cameraId).first.then((CameraInitializedEvent value) {
+ _completer.complete();
+ });
+
+ _channel.invokeMapMethod<String, dynamic>(
+ 'initialize',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'imageFormatGroup': imageFormatGroup.name(),
+ },
+ ).catchError(
+ (Object error, StackTrace stackTrace) {
+ if (error is! PlatformException) {
+ throw error;
+ }
+ _completer.completeError(
+ CameraException(error.code, error.message),
+ stackTrace,
+ );
+ },
+ );
+
+ return _completer.future;
+ }
+
+ @override
+ Future<void> dispose(int cameraId) async {
+ if (_channels.containsKey(cameraId)) {
+ final MethodChannel? cameraChannel = _channels[cameraId];
+ cameraChannel?.setMethodCallHandler(null);
+ _channels.remove(cameraId);
+ }
+
+ await _channel.invokeMethod<void>(
+ 'dispose',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
+ }
+
+ @override
+ Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraResolutionChangedEvent>();
+ }
+
+ @override
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
+ }
+
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraErrorEvent>();
+ }
+
+ @override
+ Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
+ return _cameraEvents(cameraId).whereType<VideoRecordedEvent>();
+ }
+
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ return deviceEventStreamController.stream
+ .whereType<DeviceOrientationChangedEvent>();
+ }
+
+ @override
+ Future<void> lockCaptureOrientation(
+ int cameraId,
+ DeviceOrientation orientation,
+ ) async {
+ await _channel.invokeMethod<String>(
+ 'lockCaptureOrientation',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'orientation': serializeDeviceOrientation(orientation)
+ },
+ );
+ }
+
+ @override
+ Future<void> unlockCaptureOrientation(int cameraId) async {
+ await _channel.invokeMethod<String>(
+ 'unlockCaptureOrientation',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<XFile> takePicture(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'takePicture',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> prepareForVideoRecording() =>
+ _channel.invokeMethod<void>('prepareForVideoRecording');
+
+ @override
+ Future<void> startVideoRecording(int cameraId,
+ {Duration? maxVideoDuration}) async {
+ await _channel.invokeMethod<void>(
+ 'startVideoRecording',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': maxVideoDuration?.inMilliseconds,
+ },
+ );
+ }
+
+ @override
+ Future<XFile> stopVideoRecording(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'stopVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> pauseVideoRecording(int cameraId) => _channel.invokeMethod<void>(
+ 'pauseVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Future<void> resumeVideoRecording(int cameraId) =>
+ _channel.invokeMethod<void>(
+ 'resumeVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
+ {CameraImageStreamOptions? options}) {
+ _frameStreamController = StreamController<CameraImageData>(
+ onListen: _onFrameStreamListen,
+ onPause: _onFrameStreamPauseResume,
+ onResume: _onFrameStreamPauseResume,
+ onCancel: _onFrameStreamCancel,
+ );
+ return _frameStreamController!.stream;
+ }
+
+ void _onFrameStreamListen() {
+ _startPlatformStream();
+ }
+
+ Future<void> _startPlatformStream() async {
+ await _channel.invokeMethod<void>('startImageStream');
+ const EventChannel cameraEventChannel =
+ EventChannel('plugins.flutter.io/camera_android/imageStream');
+ _platformImageStreamSubscription =
+ cameraEventChannel.receiveBroadcastStream().listen((dynamic imageData) {
+ _frameStreamController!
+ .add(cameraImageFromPlatformData(imageData as Map<dynamic, dynamic>));
+ });
+ }
+
+ FutureOr<void> _onFrameStreamCancel() async {
+ await _channel.invokeMethod<void>('stopImageStream');
+ await _platformImageStreamSubscription?.cancel();
+ _platformImageStreamSubscription = null;
+ _frameStreamController = null;
+ }
+
+ void _onFrameStreamPauseResume() {
+ throw CameraException('InvalidCall',
+ 'Pause and resume are not supported for onStreamedFrameAvailable');
+ }
+
+ @override
+ Future<void> setFlashMode(int cameraId, FlashMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFlashMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': _serializeFlashMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposureMode(int cameraId, ExposureMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setExposureMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeExposureMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposurePoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setExposurePoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMinExposureOffset(int cameraId) async {
+ final double? minExposureOffset = await _channel.invokeMethod<double>(
+ 'getMinExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minExposureOffset!;
+ }
+
+ @override
+ Future<double> getMaxExposureOffset(int cameraId) async {
+ final double? maxExposureOffset = await _channel.invokeMethod<double>(
+ 'getMaxExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxExposureOffset!;
+ }
+
+ @override
+ Future<double> getExposureOffsetStepSize(int cameraId) async {
+ final double? stepSize = await _channel.invokeMethod<double>(
+ 'getExposureOffsetStepSize',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return stepSize!;
+ }
+
+ @override
+ Future<double> setExposureOffset(int cameraId, double offset) async {
+ final double? appliedOffset = await _channel.invokeMethod<double>(
+ 'setExposureOffset',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'offset': offset,
+ },
+ );
+
+ return appliedOffset!;
+ }
+
+ @override
+ Future<void> setFocusMode(int cameraId, FocusMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFocusMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeFocusMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setFocusPoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setFocusPoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMaxZoomLevel(int cameraId) async {
+ final double? maxZoomLevel = await _channel.invokeMethod<double>(
+ 'getMaxZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxZoomLevel!;
+ }
+
+ @override
+ Future<double> getMinZoomLevel(int cameraId) async {
+ final double? minZoomLevel = await _channel.invokeMethod<double>(
+ 'getMinZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minZoomLevel!;
+ }
+
+ @override
+ Future<void> setZoomLevel(int cameraId, double zoom) async {
+ try {
+ await _channel.invokeMethod<double>(
+ 'setZoomLevel',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'zoom': zoom,
+ },
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> pausePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'pausePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<void> resumePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'resumePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Widget buildPreview(int cameraId) {
+ return Texture(textureId: cameraId);
+ }
+
+ /// Returns the flash mode as a String.
+ String _serializeFlashMode(FlashMode flashMode) {
+ switch (flashMode) {
+ case FlashMode.off:
+ return 'off';
+ case FlashMode.auto:
+ return 'auto';
+ case FlashMode.always:
+ return 'always';
+ case FlashMode.torch:
+ return 'torch';
+ default:
+ throw ArgumentError('Unknown FlashMode value');
+ }
+ }
+
+ /// Returns the resolution preset as a String.
+ String _serializeResolutionPreset(ResolutionPreset resolutionPreset) {
+ switch (resolutionPreset) {
+ case ResolutionPreset.max:
+ return 'max';
+ case ResolutionPreset.ultraHigh:
+ return 'ultraHigh';
+ case ResolutionPreset.veryHigh:
+ return 'veryHigh';
+ case ResolutionPreset.high:
+ return 'high';
+ case ResolutionPreset.medium:
+ return 'medium';
+ case ResolutionPreset.low:
+ return 'low';
+ default:
+ throw ArgumentError('Unknown ResolutionPreset value');
+ }
+ }
+
+ /// Converts messages received from the native platform into device events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ Future<dynamic> handleDeviceMethodCall(MethodCall call) async {
+ switch (call.method) {
+ case 'orientation_changed':
+ deviceEventStreamController.add(DeviceOrientationChangedEvent(
+ deserializeDeviceOrientation(
+ call.arguments['orientation']! as String)));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+
+ /// Converts messages received from the native platform into camera events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ Future<dynamic> handleCameraMethodCall(MethodCall call, int cameraId) async {
+ switch (call.method) {
+ case 'initialized':
+ cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ call.arguments['previewWidth']! as double,
+ call.arguments['previewHeight']! as double,
+ deserializeExposureMode(call.arguments['exposureMode']! as String),
+ call.arguments['exposurePointSupported']! as bool,
+ deserializeFocusMode(call.arguments['focusMode']! as String),
+ call.arguments['focusPointSupported']! as bool,
+ ));
+ break;
+ case 'resolution_changed':
+ cameraEventStreamController.add(CameraResolutionChangedEvent(
+ cameraId,
+ call.arguments['captureWidth']! as double,
+ call.arguments['captureHeight']! as double,
+ ));
+ break;
+ case 'camera_closing':
+ cameraEventStreamController.add(CameraClosingEvent(
+ cameraId,
+ ));
+ break;
+ case 'video_recorded':
+ cameraEventStreamController.add(VideoRecordedEvent(
+ cameraId,
+ XFile(call.arguments['path']! as String),
+ call.arguments['maxVideoDuration'] != null
+ ? Duration(
+ milliseconds: call.arguments['maxVideoDuration']! as int)
+ : null,
+ ));
+ break;
+ case 'error':
+ cameraEventStreamController.add(CameraErrorEvent(
+ cameraId,
+ call.arguments['description']! as String,
+ ));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+}
diff --git a/packages/camera/camera_android/lib/src/type_conversion.dart b/packages/camera/camera_android/lib/src/type_conversion.dart
new file mode 100644
index 0000000..754a5a0
--- /dev/null
+++ b/packages/camera/camera_android/lib/src/type_conversion.dart
@@ -0,0 +1,49 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+
+/// Converts method channel call [data] for `receivedImageStreamData` to a
+/// [CameraImageData].
+CameraImageData cameraImageFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImageData(
+ format: _cameraImageFormatFromPlatformData(data['format']),
+ height: data['height'] as int,
+ width: data['width'] as int,
+ lensAperture: data['lensAperture'] as double?,
+ sensorExposureTime: data['sensorExposureTime'] as int?,
+ sensorSensitivity: data['sensorSensitivity'] as double?,
+ planes: List<CameraImagePlane>.unmodifiable(
+ (data['planes'] as List<dynamic>).map<CameraImagePlane>(
+ (dynamic planeData) => _cameraImagePlaneFromPlatformData(
+ planeData as Map<dynamic, dynamic>))));
+}
+
+CameraImageFormat _cameraImageFormatFromPlatformData(dynamic data) {
+ return CameraImageFormat(_imageFormatGroupFromPlatformData(data), raw: data);
+}
+
+ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
+ switch (data) {
+ case 35: // android.graphics.ImageFormat.YUV_420_888
+ return ImageFormatGroup.yuv420;
+ case 256: // android.graphics.ImageFormat.JPEG
+ return ImageFormatGroup.jpeg;
+ }
+
+ return ImageFormatGroup.unknown;
+}
+
+CameraImagePlane _cameraImagePlaneFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImagePlane(
+ bytes: data['bytes'] as Uint8List,
+ bytesPerPixel: data['bytesPerPixel'] as int?,
+ bytesPerRow: data['bytesPerRow'] as int,
+ height: data['height'] as int?,
+ width: data['width'] as int?);
+}
diff --git a/packages/camera/camera_android/lib/src/utils.dart b/packages/camera/camera_android/lib/src/utils.dart
new file mode 100644
index 0000000..663ec6d
--- /dev/null
+++ b/packages/camera/camera_android/lib/src/utils.dart
@@ -0,0 +1,51 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+
+/// Parses a string into a corresponding CameraLensDirection.
+CameraLensDirection parseCameraLensDirection(String string) {
+ switch (string) {
+ case 'front':
+ return CameraLensDirection.front;
+ case 'back':
+ return CameraLensDirection.back;
+ case 'external':
+ return CameraLensDirection.external;
+ }
+ throw ArgumentError('Unknown CameraLensDirection value');
+}
+
+/// Returns the device orientation as a String.
+String serializeDeviceOrientation(DeviceOrientation orientation) {
+ switch (orientation) {
+ case DeviceOrientation.portraitUp:
+ return 'portraitUp';
+ case DeviceOrientation.portraitDown:
+ return 'portraitDown';
+ case DeviceOrientation.landscapeRight:
+ return 'landscapeRight';
+ case DeviceOrientation.landscapeLeft:
+ return 'landscapeLeft';
+ default:
+ throw ArgumentError('Unknown DeviceOrientation value');
+ }
+}
+
+/// Returns the device orientation for a given String.
+DeviceOrientation deserializeDeviceOrientation(String str) {
+ switch (str) {
+ case 'portraitUp':
+ return DeviceOrientation.portraitUp;
+ case 'portraitDown':
+ return DeviceOrientation.portraitDown;
+ case 'landscapeRight':
+ return DeviceOrientation.landscapeRight;
+ case 'landscapeLeft':
+ return DeviceOrientation.landscapeLeft;
+ default:
+ throw ArgumentError('"$str" is not a valid DeviceOrientation value');
+ }
+}
diff --git a/packages/camera/camera_android/pubspec.yaml b/packages/camera/camera_android/pubspec.yaml
index 908d55f..73eeaa7 100644
--- a/packages/camera/camera_android/pubspec.yaml
+++ b/packages/camera/camera_android/pubspec.yaml
@@ -2,7 +2,7 @@
description: Android implementation of the camera plugin.
repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_android
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.9.7+1
+version: 0.9.8
environment:
sdk: ">=2.14.0 <3.0.0"
@@ -15,14 +15,17 @@
android:
package: io.flutter.plugins.camera
pluginClass: CameraPlugin
+ dartPluginClass: AndroidCamera
dependencies:
camera_platform_interface: ^2.2.0
flutter:
sdk: flutter
flutter_plugin_android_lifecycle: ^2.0.2
+ stream_transform: ^2.0.0
dev_dependencies:
+ async: ^2.5.0
flutter_driver:
sdk: flutter
flutter_test:
diff --git a/packages/camera/camera_android/test/android_camera_test.dart b/packages/camera/camera_android/test/android_camera_test.dart
new file mode 100644
index 0000000..9674b0c
--- /dev/null
+++ b/packages/camera/camera_android/test/android_camera_test.dart
@@ -0,0 +1,1075 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:async/async.dart';
+import 'package:camera_android/src/android_camera.dart';
+import 'package:camera_android/src/utils.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'method_channel_mock.dart';
+
+const String _channelName = 'plugins.flutter.io/camera_android';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ test('registers instance', () async {
+ AndroidCamera.registerWith();
+ expect(CameraPlatform.instance, isA<AndroidCamera>());
+ });
+
+ group('Creation, Initialization & Disposal Tests', () {
+ test('Should send creation data and receive back a camera id', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ }
+ });
+ final AndroidCamera camera = AndroidCamera();
+
+ // Act
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0),
+ ResolutionPreset.high,
+ );
+
+ // Assert
+ expect(cameraMockChannel.log, <Matcher>[
+ isMethodCall(
+ 'create',
+ arguments: <String, Object?>{
+ 'cameraName': 'Test',
+ 'resolutionPreset': 'high',
+ 'enableAudio': false
+ },
+ ),
+ ]);
+ expect(cameraId, 1);
+ });
+
+ test('Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final AndroidCamera camera = AndroidCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final AndroidCamera camera = AndroidCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test(
+ 'Should throw CameraException when initialize throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'initialize': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ },
+ );
+ final AndroidCamera camera = AndroidCamera();
+
+ // Act
+ expect(
+ () => camera.initializeCamera(0),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having(
+ (CameraException e) => e.description,
+ 'description',
+ 'Mock error message used during testing.',
+ ),
+ ),
+ );
+ },
+ );
+
+ test('Should send initialization data', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ 'initialize': null
+ });
+ final AndroidCamera camera = AndroidCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+
+ // Act
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ isMethodCall(
+ 'initialize',
+ arguments: <String, Object?>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ ),
+ ]);
+ });
+
+ test('Should send a disposal call on dispose', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null,
+ 'dispose': <String, dynamic>{'cameraId': 1}
+ });
+
+ final AndroidCamera camera = AndroidCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Act
+ await camera.dispose(cameraId);
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ anything,
+ isMethodCall(
+ 'dispose',
+ arguments: <String, Object?>{'cameraId': 1},
+ ),
+ ]);
+ });
+ });
+
+ group('Event Tests', () {
+ late AndroidCamera camera;
+ late int cameraId;
+ setUp(() async {
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = AndroidCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+ });
+
+ test('Should receive initialized event', () async {
+ // Act
+ final Stream<CameraInitializedEvent> eventStream =
+ camera.onCameraInitialized(cameraId);
+ final StreamQueue<CameraInitializedEvent> streamQueue =
+ StreamQueue<CameraInitializedEvent>(eventStream);
+
+ // Emit test events
+ final CameraInitializedEvent event = CameraInitializedEvent(
+ cameraId,
+ 3840,
+ 2160,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ );
+ await camera.handleCameraMethodCall(
+ MethodCall('initialized', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive resolution changes', () async {
+ // Act
+ final Stream<CameraResolutionChangedEvent> resolutionStream =
+ camera.onCameraResolutionChanged(cameraId);
+ final StreamQueue<CameraResolutionChangedEvent> streamQueue =
+ StreamQueue<CameraResolutionChangedEvent>(resolutionStream);
+
+ // Emit test events
+ final CameraResolutionChangedEvent fhdEvent =
+ CameraResolutionChangedEvent(cameraId, 1920, 1080);
+ final CameraResolutionChangedEvent uhdEvent =
+ CameraResolutionChangedEvent(cameraId, 3840, 2160);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera closing events', () async {
+ // Act
+ final Stream<CameraClosingEvent> eventStream =
+ camera.onCameraClosing(cameraId);
+ final StreamQueue<CameraClosingEvent> streamQueue =
+ StreamQueue<CameraClosingEvent>(eventStream);
+
+ // Emit test events
+ final CameraClosingEvent event = CameraClosingEvent(cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera error events', () async {
+ // Act
+ final Stream<CameraErrorEvent> errorStream =
+ camera.onCameraError(cameraId);
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(errorStream);
+
+ // Emit test events
+ final CameraErrorEvent event =
+ CameraErrorEvent(cameraId, 'Error Description');
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive device orientation change events', () async {
+ // Act
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ camera.onDeviceOrientationChanged();
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+
+ // Emit test events
+ const DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+ });
+
+ group('Function Tests', () {
+ late AndroidCamera camera;
+ late int cameraId;
+
+ setUp(() async {
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = AndroidCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(
+ CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ),
+ );
+ await initializeFuture;
+ });
+
+ test('Should fetch CameraDescription instances for available cameras',
+ () async {
+ // Arrange
+ final List<dynamic> returnData = <dynamic>[
+ <String, dynamic>{
+ 'name': 'Test 1',
+ 'lensFacing': 'front',
+ 'sensorOrientation': 1
+ },
+ <String, dynamic>{
+ 'name': 'Test 2',
+ 'lensFacing': 'back',
+ 'sensorOrientation': 2
+ }
+ ];
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'availableCameras': returnData},
+ );
+
+ // Act
+ final List<CameraDescription> cameras = await camera.availableCameras();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('availableCameras', arguments: null),
+ ]);
+ expect(cameras.length, returnData.length);
+ for (int i = 0; i < returnData.length; i++) {
+ final CameraDescription cameraDescription = CameraDescription(
+ name: returnData[i]['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(returnData[i]['lensFacing']! as String),
+ sensorOrientation: returnData[i]['sensorOrientation']! as int,
+ );
+ expect(cameras[i], cameraDescription);
+ }
+ });
+
+ test(
+ 'Should throw CameraException when availableCameras throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'availableCameras': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+
+ // Act
+ expect(
+ camera.availableCameras,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should take a picture and return an XFile instance', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'takePicture': '/test/path.jpg'});
+
+ // Act
+ final XFile file = await camera.takePicture(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('takePicture', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.jpg');
+ });
+
+ test('Should prepare for video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'prepareForVideoRecording': null},
+ );
+
+ // Act
+ await camera.prepareForVideoRecording();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('prepareForVideoRecording', arguments: null),
+ ]);
+ });
+
+ test('Should start recording a video', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': null,
+ }),
+ ]);
+ });
+
+ test('Should pass maxVideoDuration when starting recording a video',
+ () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(
+ cameraId,
+ maxVideoDuration: const Duration(seconds: 10),
+ );
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': 10000
+ }),
+ ]);
+ });
+
+ test('Should stop a video recording and return the file', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'stopVideoRecording': '/test/path.mp4'},
+ );
+
+ // Act
+ final XFile file = await camera.stopVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('stopVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.mp4');
+ });
+
+ test('Should pause a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'pauseVideoRecording': null},
+ );
+
+ // Act
+ await camera.pauseVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pauseVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should resume a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'resumeVideoRecording': null},
+ );
+
+ // Act
+ await camera.resumeVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumeVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the flash mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFlashMode': null},
+ );
+
+ // Act
+ await camera.setFlashMode(cameraId, FlashMode.torch);
+ await camera.setFlashMode(cameraId, FlashMode.always);
+ await camera.setFlashMode(cameraId, FlashMode.auto);
+ await camera.setFlashMode(cameraId, FlashMode.off);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'torch'
+ }),
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'always'
+ }),
+ isMethodCall('setFlashMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setFlashMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'off'}),
+ ]);
+ });
+
+ test('Should set the exposure mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposureMode': null},
+ );
+
+ // Act
+ await camera.setExposureMode(cameraId, ExposureMode.auto);
+ await camera.setExposureMode(cameraId, ExposureMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setExposureMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposurePoint': null},
+ );
+
+ // Act
+ await camera.setExposurePoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setExposurePoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should get the min exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMinExposureOffset': 2.0},
+ );
+
+ // Act
+ final double minExposureOffset =
+ await camera.getMinExposureOffset(cameraId);
+
+ // Assert
+ expect(minExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the max exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMaxExposureOffset': 2.0},
+ );
+
+ // Act
+ final double maxExposureOffset =
+ await camera.getMaxExposureOffset(cameraId);
+
+ // Assert
+ expect(maxExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the exposure offset step size', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getExposureOffsetStepSize': 0.25},
+ );
+
+ // Act
+ final double stepSize = await camera.getExposureOffsetStepSize(cameraId);
+
+ // Assert
+ expect(stepSize, 0.25);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getExposureOffsetStepSize', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposureOffset': 0.6},
+ );
+
+ // Act
+ final double actualOffset = await camera.setExposureOffset(cameraId, 0.5);
+
+ // Assert
+ expect(actualOffset, 0.6);
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'offset': 0.5,
+ }),
+ ]);
+ });
+
+ test('Should set the focus mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFocusMode': null},
+ );
+
+ // Act
+ await camera.setFocusMode(cameraId, FocusMode.auto);
+ await camera.setFocusMode(cameraId, FocusMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setFocusMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFocusPoint': null},
+ );
+
+ // Act
+ await camera.setFocusPoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setFocusPoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should build a texture widget as preview widget', () async {
+ // Act
+ final Widget widget = camera.buildPreview(cameraId);
+
+ // Act
+ expect(widget is Texture, isTrue);
+ expect((widget as Texture).textureId, cameraId);
+ });
+
+ test('Should throw MissingPluginException when handling unknown method',
+ () {
+ final AndroidCamera camera = AndroidCamera();
+
+ expect(
+ () => camera.handleCameraMethodCall(
+ const MethodCall('unknown_method'), 1),
+ throwsA(isA<MissingPluginException>()));
+ });
+
+ test('Should get the max zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMaxZoomLevel': 10.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMaxZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 10.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the min zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMinZoomLevel': 1.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMinZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 1.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setZoomLevel': null},
+ );
+
+ // Act
+ await camera.setZoomLevel(cameraId, 2.0);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setZoomLevel',
+ arguments: <String, Object?>{'cameraId': cameraId, 'zoom': 2.0}),
+ ]);
+ });
+
+ test('Should throw CameraException when illegal zoom level is supplied',
+ () async {
+ // Arrange
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'setZoomLevel': PlatformException(
+ code: 'ZOOM_ERROR',
+ message: 'Illegal zoom error',
+ details: null,
+ )
+ },
+ );
+
+ // Act & assert
+ expect(
+ () => camera.setZoomLevel(cameraId, -1.0),
+ throwsA(isA<CameraException>()
+ .having((CameraException e) => e.code, 'code', 'ZOOM_ERROR')
+ .having((CameraException e) => e.description, 'description',
+ 'Illegal zoom error')));
+ });
+
+ test('Should lock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'lockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.lockCaptureOrientation(
+ cameraId, DeviceOrientation.portraitUp);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('lockCaptureOrientation', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'orientation': 'portraitUp'
+ }),
+ ]);
+ });
+
+ test('Should unlock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'unlockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.unlockCaptureOrientation(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('unlockCaptureOrientation',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should pause the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'pausePreview': null},
+ );
+
+ // Act
+ await camera.pausePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pausePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should resume the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'resumePreview': null},
+ );
+
+ // Act
+ await camera.resumePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should start streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ ]);
+
+ subscription.cancel();
+ });
+
+ test('Should stop streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+ subscription.cancel();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ isMethodCall('stopImageStream', arguments: null),
+ ]);
+ });
+ });
+}
diff --git a/packages/camera/camera_android/test/method_channel_mock.dart b/packages/camera/camera_android/test/method_channel_mock.dart
new file mode 100644
index 0000000..413c106
--- /dev/null
+++ b/packages/camera/camera_android/test/method_channel_mock.dart
@@ -0,0 +1,39 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+class MethodChannelMock {
+ MethodChannelMock({
+ required String channelName,
+ this.delay,
+ required this.methods,
+ }) : methodChannel = MethodChannel(channelName) {
+ methodChannel.setMockMethodCallHandler(_handler);
+ }
+
+ final Duration? delay;
+ final MethodChannel methodChannel;
+ final Map<String, dynamic> methods;
+ final List<MethodCall> log = <MethodCall>[];
+
+ Future<dynamic> _handler(MethodCall methodCall) async {
+ log.add(methodCall);
+
+ if (!methods.containsKey(methodCall.method)) {
+ throw MissingPluginException('No implementation found for method '
+ '${methodCall.method} on channel ${methodChannel.name}');
+ }
+
+ return Future<dynamic>.delayed(delay ?? Duration.zero, () {
+ final dynamic result = methods[methodCall.method];
+ if (result is Exception) {
+ throw result;
+ }
+
+ return Future<dynamic>.value(result);
+ });
+ }
+}
diff --git a/packages/camera/camera_android/test/type_conversion_test.dart b/packages/camera/camera_android/test/type_conversion_test.dart
new file mode 100644
index 0000000..b07466d
--- /dev/null
+++ b/packages/camera/camera_android/test/type_conversion_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_android/src/type_conversion.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('CameraImageData can be created', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 1,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.height, 1);
+ expect(cameraImage.width, 4);
+ expect(cameraImage.format.group, ImageFormatGroup.unknown);
+ expect(cameraImage.planes.length, 1);
+ });
+
+ test('CameraImageData has ImageFormatGroup.yuv420', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 35,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ });
+}
diff --git a/packages/camera/camera_android/test/utils_test.dart b/packages/camera/camera_android/test/utils_test.dart
new file mode 100644
index 0000000..6f426bc
--- /dev/null
+++ b/packages/camera/camera_android/test/utils_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android/src/utils.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ group('Utility methods', () {
+ test(
+ 'Should return CameraLensDirection when valid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ parseCameraLensDirection('back'),
+ CameraLensDirection.back,
+ );
+ expect(
+ parseCameraLensDirection('front'),
+ CameraLensDirection.front,
+ );
+ expect(
+ parseCameraLensDirection('external'),
+ CameraLensDirection.external,
+ );
+ });
+
+ test(
+ 'Should throw ArgumentException when invalid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ () => parseCameraLensDirection('test'),
+ throwsA(isArgumentError),
+ );
+ });
+
+ test('serializeDeviceOrientation() should serialize correctly', () {
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitUp),
+ 'portraitUp');
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitDown),
+ 'portraitDown');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeRight),
+ 'landscapeRight');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeLeft),
+ 'landscapeLeft');
+ });
+
+ test('deserializeDeviceOrientation() should deserialize correctly', () {
+ expect(deserializeDeviceOrientation('portraitUp'),
+ DeviceOrientation.portraitUp);
+ expect(deserializeDeviceOrientation('portraitDown'),
+ DeviceOrientation.portraitDown);
+ expect(deserializeDeviceOrientation('landscapeRight'),
+ DeviceOrientation.landscapeRight);
+ expect(deserializeDeviceOrientation('landscapeLeft'),
+ DeviceOrientation.landscapeLeft);
+ });
+ });
+}
diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md
index c57f301..5bc7c8a 100644
--- a/packages/camera/camera_avfoundation/CHANGELOG.md
+++ b/packages/camera/camera_avfoundation/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.9.8
+
+* Switches to internal method channel implementation.
+
## 0.9.7+1
* Splits from `camera` as a federated implementation.
diff --git a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart
index 6ac7a68..51eab63 100644
--- a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart
+++ b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart
@@ -6,6 +6,7 @@
import 'dart:io';
import 'dart:ui';
+import 'package:camera_avfoundation/camera_avfoundation.dart';
import 'package:camera_example/camera_controller.dart';
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/painting.dart';
@@ -20,6 +21,7 @@
IntegrationTestWidgetsFlutterBinding.ensureInitialized();
setUpAll(() async {
+ CameraPlatform.instance = AVFoundationCamera();
final Directory extDir = await getTemporaryDirectory();
testDir = await Directory('${extDir.path}/test').create(recursive: true);
});
diff --git a/packages/camera/camera_avfoundation/example/test_driver/integration_test.dart b/packages/camera/camera_avfoundation/example/test_driver/integration_test.dart
index 4ec97e6..4f10f2a 100644
--- a/packages/camera/camera_avfoundation/example/test_driver/integration_test.dart
+++ b/packages/camera/camera_avfoundation/example/test_driver/integration_test.dart
@@ -2,63 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-import 'dart:async';
-import 'dart:convert';
-import 'dart:io';
+import 'package:integration_test/integration_test_driver.dart';
-import 'package:flutter_driver/flutter_driver.dart';
-
-const String _examplePackage = 'io.flutter.plugins.cameraexample';
-
-Future<void> main() async {
- if (!(Platform.isLinux || Platform.isMacOS)) {
- print('This test must be run on a POSIX host. Skipping...');
- exit(0);
- }
- final bool adbExists =
- Process.runSync('which', <String>['adb']).exitCode == 0;
- if (!adbExists) {
- print(r'This test needs ADB to exist on the $PATH. Skipping...');
- exit(0);
- }
- print('Granting camera permissions...');
- Process.runSync('adb', <String>[
- 'shell',
- 'pm',
- 'grant',
- _examplePackage,
- 'android.permission.CAMERA'
- ]);
- Process.runSync('adb', <String>[
- 'shell',
- 'pm',
- 'grant',
- _examplePackage,
- 'android.permission.RECORD_AUDIO'
- ]);
- print('Starting test.');
- final FlutterDriver driver = await FlutterDriver.connect();
- final String data = await driver.requestData(
- null,
- timeout: const Duration(minutes: 1),
- );
- await driver.close();
- print('Test finished. Revoking camera permissions...');
- Process.runSync('adb', <String>[
- 'shell',
- 'pm',
- 'revoke',
- _examplePackage,
- 'android.permission.CAMERA'
- ]);
- Process.runSync('adb', <String>[
- 'shell',
- 'pm',
- 'revoke',
- _examplePackage,
- 'android.permission.RECORD_AUDIO'
- ]);
-
- final Map<String, dynamic> result = jsonDecode(data) as Map<String, dynamic>;
- exit(result['result'] == 'true' ? 0 : 1);
-}
+Future<void> main() => integrationDriver();
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
index 90327e3..cb19c09 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
@@ -26,7 +26,7 @@
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
FlutterMethodChannel *channel =
- [FlutterMethodChannel methodChannelWithName:@"plugins.flutter.io/camera"
+ [FlutterMethodChannel methodChannelWithName:@"plugins.flutter.io/camera_avfoundation"
binaryMessenger:[registrar messenger]];
CameraPlugin *instance = [[CameraPlugin alloc] initWithRegistry:[registrar textures]
messenger:[registrar messenger]];
@@ -49,9 +49,9 @@
}
- (void)initDeviceEventMethodChannel {
- FlutterMethodChannel *methodChannel =
- [FlutterMethodChannel methodChannelWithName:@"flutter.io/cameraPlugin/device"
- binaryMessenger:_messenger];
+ FlutterMethodChannel *methodChannel = [FlutterMethodChannel
+ methodChannelWithName:@"plugins.flutter.io/camera_avfoundation/fromPlatform"
+ binaryMessenger:_messenger];
_deviceEventMethodChannel =
[[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:methodChannel];
}
@@ -162,8 +162,9 @@
}
};
FlutterMethodChannel *methodChannel = [FlutterMethodChannel
- methodChannelWithName:[NSString stringWithFormat:@"flutter.io/cameraPlugin/camera%lu",
- (unsigned long)cameraId]
+ methodChannelWithName:
+ [NSString stringWithFormat:@"plugins.flutter.io/camera_avfoundation/camera%lu",
+ (unsigned long)cameraId]
binaryMessenger:_messenger];
FLTThreadSafeMethodChannel *threadSafeMethodChannel =
[[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:methodChannel];
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
index 7af505b..f267604 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
@@ -910,9 +910,9 @@
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler {
if (!_isStreamingImages) {
- FlutterEventChannel *eventChannel =
- [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream"
- binaryMessenger:messenger];
+ FlutterEventChannel *eventChannel = [FlutterEventChannel
+ eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream"
+ binaryMessenger:messenger];
FLTThreadSafeEventChannel *threadSafeEventChannel =
[[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];
diff --git a/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart
new file mode 100644
index 0000000..e07a440
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart
@@ -0,0 +1,5 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'src/avfoundation_camera.dart';
diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
new file mode 100644
index 0000000..1bff901
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
@@ -0,0 +1,592 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:stream_transform/stream_transform.dart';
+
+import 'type_conversion.dart';
+import 'utils.dart';
+
+const MethodChannel _channel =
+ MethodChannel('plugins.flutter.io/camera_avfoundation');
+
+/// An iOS implementation of [CameraPlatform] based on AVFoundation.
+class AVFoundationCamera extends CameraPlatform {
+ /// Construct a new method channel camera instance.
+ AVFoundationCamera() {
+ const MethodChannel channel =
+ MethodChannel('plugins.flutter.io/camera_avfoundation/fromPlatform');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleDeviceMethodCall(call));
+ }
+
+ /// Registers this class as the default instance of [CameraPlatform].
+ static void registerWith() {
+ CameraPlatform.instance = AVFoundationCamera();
+ }
+
+ final Map<int, MethodChannel> _channels = <int, MethodChannel>{};
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to camera events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<CameraEvent> cameraEventStreamController =
+ StreamController<CameraEvent>.broadcast();
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to general device events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<DeviceEvent> deviceEventStreamController =
+ StreamController<DeviceEvent>.broadcast();
+
+ // The stream to receive frames from the native code.
+ StreamSubscription<dynamic>? _platformImageStreamSubscription;
+
+ // The stream for vending frames to platform interface clients.
+ StreamController<CameraImageData>? _frameStreamController;
+
+ Stream<CameraEvent> _cameraEvents(int cameraId) =>
+ cameraEventStreamController.stream
+ .where((CameraEvent event) => event.cameraId == cameraId);
+
+ @override
+ Future<List<CameraDescription>> availableCameras() async {
+ try {
+ final List<Map<dynamic, dynamic>>? cameras = await _channel
+ .invokeListMethod<Map<dynamic, dynamic>>('availableCameras');
+
+ if (cameras == null) {
+ return <CameraDescription>[];
+ }
+
+ return cameras.map((Map<dynamic, dynamic> camera) {
+ return CameraDescription(
+ name: camera['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(camera['lensFacing']! as String),
+ sensorOrientation: camera['sensorOrientation']! as int,
+ );
+ }).toList();
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) async {
+ try {
+ final Map<String, dynamic>? reply = await _channel
+ .invokeMapMethod<String, dynamic>('create', <String, dynamic>{
+ 'cameraName': cameraDescription.name,
+ 'resolutionPreset': resolutionPreset != null
+ ? _serializeResolutionPreset(resolutionPreset)
+ : null,
+ 'enableAudio': enableAudio,
+ });
+
+ return reply!['cameraId']! as int;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> initializeCamera(
+ int cameraId, {
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) {
+ _channels.putIfAbsent(cameraId, () {
+ final MethodChannel channel = MethodChannel(
+ 'plugins.flutter.io/camera_avfoundation/camera$cameraId');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleCameraMethodCall(call, cameraId));
+ return channel;
+ });
+
+ final Completer<void> _completer = Completer<void>();
+
+ onCameraInitialized(cameraId).first.then((CameraInitializedEvent value) {
+ _completer.complete();
+ });
+
+ _channel.invokeMapMethod<String, dynamic>(
+ 'initialize',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'imageFormatGroup': imageFormatGroup.name(),
+ },
+ ).catchError(
+ (Object error, StackTrace stackTrace) {
+ if (error is! PlatformException) {
+ throw error;
+ }
+ _completer.completeError(
+ CameraException(error.code, error.message),
+ stackTrace,
+ );
+ },
+ );
+
+ return _completer.future;
+ }
+
+ @override
+ Future<void> dispose(int cameraId) async {
+ if (_channels.containsKey(cameraId)) {
+ final MethodChannel? cameraChannel = _channels[cameraId];
+ cameraChannel?.setMethodCallHandler(null);
+ _channels.remove(cameraId);
+ }
+
+ await _channel.invokeMethod<void>(
+ 'dispose',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
+ }
+
+ @override
+ Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraResolutionChangedEvent>();
+ }
+
+ @override
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
+ }
+
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraErrorEvent>();
+ }
+
+ @override
+ Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
+ return _cameraEvents(cameraId).whereType<VideoRecordedEvent>();
+ }
+
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ return deviceEventStreamController.stream
+ .whereType<DeviceOrientationChangedEvent>();
+ }
+
+ @override
+ Future<void> lockCaptureOrientation(
+ int cameraId,
+ DeviceOrientation orientation,
+ ) async {
+ await _channel.invokeMethod<String>(
+ 'lockCaptureOrientation',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'orientation': serializeDeviceOrientation(orientation)
+ },
+ );
+ }
+
+ @override
+ Future<void> unlockCaptureOrientation(int cameraId) async {
+ await _channel.invokeMethod<String>(
+ 'unlockCaptureOrientation',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<XFile> takePicture(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'takePicture',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> prepareForVideoRecording() =>
+ _channel.invokeMethod<void>('prepareForVideoRecording');
+
+ @override
+ Future<void> startVideoRecording(int cameraId,
+ {Duration? maxVideoDuration}) async {
+ await _channel.invokeMethod<void>(
+ 'startVideoRecording',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': maxVideoDuration?.inMilliseconds,
+ },
+ );
+ }
+
+ @override
+ Future<XFile> stopVideoRecording(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'stopVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> pauseVideoRecording(int cameraId) => _channel.invokeMethod<void>(
+ 'pauseVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Future<void> resumeVideoRecording(int cameraId) =>
+ _channel.invokeMethod<void>(
+ 'resumeVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
+ {CameraImageStreamOptions? options}) {
+ _frameStreamController = StreamController<CameraImageData>(
+ onListen: _onFrameStreamListen,
+ onPause: _onFrameStreamPauseResume,
+ onResume: _onFrameStreamPauseResume,
+ onCancel: _onFrameStreamCancel,
+ );
+ return _frameStreamController!.stream;
+ }
+
+ void _onFrameStreamListen() {
+ _startPlatformStream();
+ }
+
+ Future<void> _startPlatformStream() async {
+ await _channel.invokeMethod<void>('startImageStream');
+ const EventChannel cameraEventChannel =
+ EventChannel('plugins.flutter.io/camera_avfoundation/imageStream');
+ _platformImageStreamSubscription =
+ cameraEventChannel.receiveBroadcastStream().listen((dynamic imageData) {
+ try {
+ _channel.invokeMethod<void>('receivedImageStreamData');
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ _frameStreamController!
+ .add(cameraImageFromPlatformData(imageData as Map<dynamic, dynamic>));
+ });
+ }
+
+ FutureOr<void> _onFrameStreamCancel() async {
+ await _channel.invokeMethod<void>('stopImageStream');
+ await _platformImageStreamSubscription?.cancel();
+ _platformImageStreamSubscription = null;
+ _frameStreamController = null;
+ }
+
+ void _onFrameStreamPauseResume() {
+ throw CameraException('InvalidCall',
+ 'Pause and resume are not supported for onStreamedFrameAvailable');
+ }
+
+ @override
+ Future<void> setFlashMode(int cameraId, FlashMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFlashMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': _serializeFlashMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposureMode(int cameraId, ExposureMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setExposureMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeExposureMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposurePoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setExposurePoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMinExposureOffset(int cameraId) async {
+ final double? minExposureOffset = await _channel.invokeMethod<double>(
+ 'getMinExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minExposureOffset!;
+ }
+
+ @override
+ Future<double> getMaxExposureOffset(int cameraId) async {
+ final double? maxExposureOffset = await _channel.invokeMethod<double>(
+ 'getMaxExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxExposureOffset!;
+ }
+
+ @override
+ Future<double> getExposureOffsetStepSize(int cameraId) async {
+ final double? stepSize = await _channel.invokeMethod<double>(
+ 'getExposureOffsetStepSize',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return stepSize!;
+ }
+
+ @override
+ Future<double> setExposureOffset(int cameraId, double offset) async {
+ final double? appliedOffset = await _channel.invokeMethod<double>(
+ 'setExposureOffset',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'offset': offset,
+ },
+ );
+
+ return appliedOffset!;
+ }
+
+ @override
+ Future<void> setFocusMode(int cameraId, FocusMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFocusMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeFocusMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setFocusPoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setFocusPoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMaxZoomLevel(int cameraId) async {
+ final double? maxZoomLevel = await _channel.invokeMethod<double>(
+ 'getMaxZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxZoomLevel!;
+ }
+
+ @override
+ Future<double> getMinZoomLevel(int cameraId) async {
+ final double? minZoomLevel = await _channel.invokeMethod<double>(
+ 'getMinZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minZoomLevel!;
+ }
+
+ @override
+ Future<void> setZoomLevel(int cameraId, double zoom) async {
+ try {
+ await _channel.invokeMethod<double>(
+ 'setZoomLevel',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'zoom': zoom,
+ },
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> pausePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'pausePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<void> resumePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'resumePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Widget buildPreview(int cameraId) {
+ return Texture(textureId: cameraId);
+ }
+
+ /// Returns the flash mode as a String.
+ String _serializeFlashMode(FlashMode flashMode) {
+ switch (flashMode) {
+ case FlashMode.off:
+ return 'off';
+ case FlashMode.auto:
+ return 'auto';
+ case FlashMode.always:
+ return 'always';
+ case FlashMode.torch:
+ return 'torch';
+ default:
+ throw ArgumentError('Unknown FlashMode value');
+ }
+ }
+
+ /// Returns the resolution preset as a String.
+ String _serializeResolutionPreset(ResolutionPreset resolutionPreset) {
+ switch (resolutionPreset) {
+ case ResolutionPreset.max:
+ return 'max';
+ case ResolutionPreset.ultraHigh:
+ return 'ultraHigh';
+ case ResolutionPreset.veryHigh:
+ return 'veryHigh';
+ case ResolutionPreset.high:
+ return 'high';
+ case ResolutionPreset.medium:
+ return 'medium';
+ case ResolutionPreset.low:
+ return 'low';
+ default:
+ throw ArgumentError('Unknown ResolutionPreset value');
+ }
+ }
+
+ /// Converts messages received from the native platform into device events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ Future<dynamic> handleDeviceMethodCall(MethodCall call) async {
+ switch (call.method) {
+ case 'orientation_changed':
+ deviceEventStreamController.add(DeviceOrientationChangedEvent(
+ deserializeDeviceOrientation(
+ call.arguments['orientation']! as String)));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+
+ /// Converts messages received from the native platform into camera events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ Future<dynamic> handleCameraMethodCall(MethodCall call, int cameraId) async {
+ switch (call.method) {
+ case 'initialized':
+ cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ call.arguments['previewWidth']! as double,
+ call.arguments['previewHeight']! as double,
+ deserializeExposureMode(call.arguments['exposureMode']! as String),
+ call.arguments['exposurePointSupported']! as bool,
+ deserializeFocusMode(call.arguments['focusMode']! as String),
+ call.arguments['focusPointSupported']! as bool,
+ ));
+ break;
+ case 'resolution_changed':
+ cameraEventStreamController.add(CameraResolutionChangedEvent(
+ cameraId,
+ call.arguments['captureWidth']! as double,
+ call.arguments['captureHeight']! as double,
+ ));
+ break;
+ case 'camera_closing':
+ cameraEventStreamController.add(CameraClosingEvent(
+ cameraId,
+ ));
+ break;
+ case 'video_recorded':
+ cameraEventStreamController.add(VideoRecordedEvent(
+ cameraId,
+ XFile(call.arguments['path']! as String),
+ call.arguments['maxVideoDuration'] != null
+ ? Duration(
+ milliseconds: call.arguments['maxVideoDuration']! as int)
+ : null,
+ ));
+ break;
+ case 'error':
+ cameraEventStreamController.add(CameraErrorEvent(
+ cameraId,
+ call.arguments['description']! as String,
+ ));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+}
diff --git a/packages/camera/camera_avfoundation/lib/src/type_conversion.dart b/packages/camera/camera_avfoundation/lib/src/type_conversion.dart
new file mode 100644
index 0000000..c2a539a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/src/type_conversion.dart
@@ -0,0 +1,50 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+
+/// Converts method channel call [data] for `receivedImageStreamData` to a
+/// [CameraImageData].
+CameraImageData cameraImageFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImageData(
+ format: _cameraImageFormatFromPlatformData(data['format']),
+ height: data['height'] as int,
+ width: data['width'] as int,
+ lensAperture: data['lensAperture'] as double?,
+ sensorExposureTime: data['sensorExposureTime'] as int?,
+ sensorSensitivity: data['sensorSensitivity'] as double?,
+ planes: List<CameraImagePlane>.unmodifiable(
+ (data['planes'] as List<dynamic>).map<CameraImagePlane>(
+ (dynamic planeData) => _cameraImagePlaneFromPlatformData(
+ planeData as Map<dynamic, dynamic>))));
+}
+
+CameraImageFormat _cameraImageFormatFromPlatformData(dynamic data) {
+ return CameraImageFormat(_imageFormatGroupFromPlatformData(data), raw: data);
+}
+
+ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
+ switch (data) {
+ case 875704438: // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ return ImageFormatGroup.yuv420;
+
+ case 1111970369: // kCVPixelFormatType_32BGRA
+ return ImageFormatGroup.bgra8888;
+ }
+
+ return ImageFormatGroup.unknown;
+}
+
+CameraImagePlane _cameraImagePlaneFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImagePlane(
+ bytes: data['bytes'] as Uint8List,
+ bytesPerPixel: data['bytesPerPixel'] as int?,
+ bytesPerRow: data['bytesPerRow'] as int,
+ height: data['height'] as int?,
+ width: data['width'] as int?);
+}
diff --git a/packages/camera/camera_avfoundation/lib/src/utils.dart b/packages/camera/camera_avfoundation/lib/src/utils.dart
new file mode 100644
index 0000000..663ec6d
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/src/utils.dart
@@ -0,0 +1,51 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+
+/// Parses a string into a corresponding CameraLensDirection.
+CameraLensDirection parseCameraLensDirection(String string) {
+ switch (string) {
+ case 'front':
+ return CameraLensDirection.front;
+ case 'back':
+ return CameraLensDirection.back;
+ case 'external':
+ return CameraLensDirection.external;
+ }
+ throw ArgumentError('Unknown CameraLensDirection value');
+}
+
+/// Returns the device orientation as a String.
+String serializeDeviceOrientation(DeviceOrientation orientation) {
+ switch (orientation) {
+ case DeviceOrientation.portraitUp:
+ return 'portraitUp';
+ case DeviceOrientation.portraitDown:
+ return 'portraitDown';
+ case DeviceOrientation.landscapeRight:
+ return 'landscapeRight';
+ case DeviceOrientation.landscapeLeft:
+ return 'landscapeLeft';
+ default:
+ throw ArgumentError('Unknown DeviceOrientation value');
+ }
+}
+
+/// Returns the device orientation for a given String.
+DeviceOrientation deserializeDeviceOrientation(String str) {
+ switch (str) {
+ case 'portraitUp':
+ return DeviceOrientation.portraitUp;
+ case 'portraitDown':
+ return DeviceOrientation.portraitDown;
+ case 'landscapeRight':
+ return DeviceOrientation.landscapeRight;
+ case 'landscapeLeft':
+ return DeviceOrientation.landscapeLeft;
+ default:
+ throw ArgumentError('"$str" is not a valid DeviceOrientation value');
+ }
+}
diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml
index 237f5ad..231ce26 100644
--- a/packages/camera/camera_avfoundation/pubspec.yaml
+++ b/packages/camera/camera_avfoundation/pubspec.yaml
@@ -2,7 +2,7 @@
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.9.7+1
+version: 0.9.8
environment:
sdk: ">=2.14.0 <3.0.0"
@@ -14,13 +14,16 @@
platforms:
ios:
pluginClass: CameraPlugin
+ dartPluginClass: AVFoundationCamera
dependencies:
camera_platform_interface: ^2.2.0
flutter:
sdk: flutter
+ stream_transform: ^2.0.0
dev_dependencies:
+ async: ^2.5.0
flutter_driver:
sdk: flutter
flutter_test:
diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
new file mode 100644
index 0000000..4b32d2e
--- /dev/null
+++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
@@ -0,0 +1,1075 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:async/async.dart';
+import 'package:camera_avfoundation/src/avfoundation_camera.dart';
+import 'package:camera_avfoundation/src/utils.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'method_channel_mock.dart';
+
+const String _channelName = 'plugins.flutter.io/camera_avfoundation';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ test('registers instance', () async {
+ AVFoundationCamera.registerWith();
+ expect(CameraPlatform.instance, isA<AVFoundationCamera>());
+ });
+
+ group('Creation, Initialization & Disposal Tests', () {
+ test('Should send creation data and receive back a camera id', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ }
+ });
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ // Act
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0),
+ ResolutionPreset.high,
+ );
+
+ // Assert
+ expect(cameraMockChannel.log, <Matcher>[
+ isMethodCall(
+ 'create',
+ arguments: <String, Object?>{
+ 'cameraName': 'Test',
+ 'resolutionPreset': 'high',
+ 'enableAudio': false
+ },
+ ),
+ ]);
+ expect(cameraId, 1);
+ });
+
+ test('Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test(
+ 'Should throw CameraException when initialize throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'initialize': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ },
+ );
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ // Act
+ expect(
+ () => camera.initializeCamera(0),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having(
+ (CameraException e) => e.description,
+ 'description',
+ 'Mock error message used during testing.',
+ ),
+ ),
+ );
+ },
+ );
+
+ test('Should send initialization data', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ 'initialize': null
+ });
+ final AVFoundationCamera camera = AVFoundationCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+
+ // Act
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ isMethodCall(
+ 'initialize',
+ arguments: <String, Object?>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ ),
+ ]);
+ });
+
+ test('Should send a disposal call on dispose', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null,
+ 'dispose': <String, dynamic>{'cameraId': 1}
+ });
+
+ final AVFoundationCamera camera = AVFoundationCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Act
+ await camera.dispose(cameraId);
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ anything,
+ isMethodCall(
+ 'dispose',
+ arguments: <String, Object?>{'cameraId': 1},
+ ),
+ ]);
+ });
+ });
+
+ group('Event Tests', () {
+ late AVFoundationCamera camera;
+ late int cameraId;
+ setUp(() async {
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = AVFoundationCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+ });
+
+ test('Should receive initialized event', () async {
+ // Act
+ final Stream<CameraInitializedEvent> eventStream =
+ camera.onCameraInitialized(cameraId);
+ final StreamQueue<CameraInitializedEvent> streamQueue =
+ StreamQueue<CameraInitializedEvent>(eventStream);
+
+ // Emit test events
+ final CameraInitializedEvent event = CameraInitializedEvent(
+ cameraId,
+ 3840,
+ 2160,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ );
+ await camera.handleCameraMethodCall(
+ MethodCall('initialized', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive resolution changes', () async {
+ // Act
+ final Stream<CameraResolutionChangedEvent> resolutionStream =
+ camera.onCameraResolutionChanged(cameraId);
+ final StreamQueue<CameraResolutionChangedEvent> streamQueue =
+ StreamQueue<CameraResolutionChangedEvent>(resolutionStream);
+
+ // Emit test events
+ final CameraResolutionChangedEvent fhdEvent =
+ CameraResolutionChangedEvent(cameraId, 1920, 1080);
+ final CameraResolutionChangedEvent uhdEvent =
+ CameraResolutionChangedEvent(cameraId, 3840, 2160);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera closing events', () async {
+ // Act
+ final Stream<CameraClosingEvent> eventStream =
+ camera.onCameraClosing(cameraId);
+ final StreamQueue<CameraClosingEvent> streamQueue =
+ StreamQueue<CameraClosingEvent>(eventStream);
+
+ // Emit test events
+ final CameraClosingEvent event = CameraClosingEvent(cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera error events', () async {
+ // Act
+ final Stream<CameraErrorEvent> errorStream =
+ camera.onCameraError(cameraId);
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(errorStream);
+
+ // Emit test events
+ final CameraErrorEvent event =
+ CameraErrorEvent(cameraId, 'Error Description');
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive device orientation change events', () async {
+ // Act
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ camera.onDeviceOrientationChanged();
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+
+ // Emit test events
+ const DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+ });
+
+ group('Function Tests', () {
+ late AVFoundationCamera camera;
+ late int cameraId;
+
+ setUp(() async {
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = AVFoundationCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(
+ CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ),
+ );
+ await initializeFuture;
+ });
+
+ test('Should fetch CameraDescription instances for available cameras',
+ () async {
+ // Arrange
+ final List<dynamic> returnData = <dynamic>[
+ <String, dynamic>{
+ 'name': 'Test 1',
+ 'lensFacing': 'front',
+ 'sensorOrientation': 1
+ },
+ <String, dynamic>{
+ 'name': 'Test 2',
+ 'lensFacing': 'back',
+ 'sensorOrientation': 2
+ }
+ ];
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'availableCameras': returnData},
+ );
+
+ // Act
+ final List<CameraDescription> cameras = await camera.availableCameras();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('availableCameras', arguments: null),
+ ]);
+ expect(cameras.length, returnData.length);
+ for (int i = 0; i < returnData.length; i++) {
+ final CameraDescription cameraDescription = CameraDescription(
+ name: returnData[i]['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(returnData[i]['lensFacing']! as String),
+ sensorOrientation: returnData[i]['sensorOrientation']! as int,
+ );
+ expect(cameras[i], cameraDescription);
+ }
+ });
+
+ test(
+ 'Should throw CameraException when availableCameras throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'availableCameras': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+
+ // Act
+ expect(
+ camera.availableCameras,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should take a picture and return an XFile instance', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'takePicture': '/test/path.jpg'});
+
+ // Act
+ final XFile file = await camera.takePicture(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('takePicture', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.jpg');
+ });
+
+ test('Should prepare for video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'prepareForVideoRecording': null},
+ );
+
+ // Act
+ await camera.prepareForVideoRecording();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('prepareForVideoRecording', arguments: null),
+ ]);
+ });
+
+ test('Should start recording a video', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': null,
+ }),
+ ]);
+ });
+
+ test('Should pass maxVideoDuration when starting recording a video',
+ () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(
+ cameraId,
+ maxVideoDuration: const Duration(seconds: 10),
+ );
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': 10000
+ }),
+ ]);
+ });
+
+ test('Should stop a video recording and return the file', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'stopVideoRecording': '/test/path.mp4'},
+ );
+
+ // Act
+ final XFile file = await camera.stopVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('stopVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.mp4');
+ });
+
+ test('Should pause a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'pauseVideoRecording': null},
+ );
+
+ // Act
+ await camera.pauseVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pauseVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should resume a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'resumeVideoRecording': null},
+ );
+
+ // Act
+ await camera.resumeVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumeVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the flash mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFlashMode': null},
+ );
+
+ // Act
+ await camera.setFlashMode(cameraId, FlashMode.torch);
+ await camera.setFlashMode(cameraId, FlashMode.always);
+ await camera.setFlashMode(cameraId, FlashMode.auto);
+ await camera.setFlashMode(cameraId, FlashMode.off);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'torch'
+ }),
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'always'
+ }),
+ isMethodCall('setFlashMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setFlashMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'off'}),
+ ]);
+ });
+
+ test('Should set the exposure mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposureMode': null},
+ );
+
+ // Act
+ await camera.setExposureMode(cameraId, ExposureMode.auto);
+ await camera.setExposureMode(cameraId, ExposureMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setExposureMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposurePoint': null},
+ );
+
+ // Act
+ await camera.setExposurePoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setExposurePoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should get the min exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMinExposureOffset': 2.0},
+ );
+
+ // Act
+ final double minExposureOffset =
+ await camera.getMinExposureOffset(cameraId);
+
+ // Assert
+ expect(minExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the max exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMaxExposureOffset': 2.0},
+ );
+
+ // Act
+ final double maxExposureOffset =
+ await camera.getMaxExposureOffset(cameraId);
+
+ // Assert
+ expect(maxExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the exposure offset step size', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getExposureOffsetStepSize': 0.25},
+ );
+
+ // Act
+ final double stepSize = await camera.getExposureOffsetStepSize(cameraId);
+
+ // Assert
+ expect(stepSize, 0.25);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getExposureOffsetStepSize', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposureOffset': 0.6},
+ );
+
+ // Act
+ final double actualOffset = await camera.setExposureOffset(cameraId, 0.5);
+
+ // Assert
+ expect(actualOffset, 0.6);
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'offset': 0.5,
+ }),
+ ]);
+ });
+
+ test('Should set the focus mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFocusMode': null},
+ );
+
+ // Act
+ await camera.setFocusMode(cameraId, FocusMode.auto);
+ await camera.setFocusMode(cameraId, FocusMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setFocusMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFocusPoint': null},
+ );
+
+ // Act
+ await camera.setFocusPoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setFocusPoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should build a texture widget as preview widget', () async {
+ // Act
+ final Widget widget = camera.buildPreview(cameraId);
+
+ // Act
+ expect(widget is Texture, isTrue);
+ expect((widget as Texture).textureId, cameraId);
+ });
+
+ test('Should throw MissingPluginException when handling unknown method',
+ () {
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ expect(
+ () => camera.handleCameraMethodCall(
+ const MethodCall('unknown_method'), 1),
+ throwsA(isA<MissingPluginException>()));
+ });
+
+ test('Should get the max zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMaxZoomLevel': 10.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMaxZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 10.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the min zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMinZoomLevel': 1.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMinZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 1.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setZoomLevel': null},
+ );
+
+ // Act
+ await camera.setZoomLevel(cameraId, 2.0);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setZoomLevel',
+ arguments: <String, Object?>{'cameraId': cameraId, 'zoom': 2.0}),
+ ]);
+ });
+
+ test('Should throw CameraException when illegal zoom level is supplied',
+ () async {
+ // Arrange
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'setZoomLevel': PlatformException(
+ code: 'ZOOM_ERROR',
+ message: 'Illegal zoom error',
+ details: null,
+ )
+ },
+ );
+
+ // Act & assert
+ expect(
+ () => camera.setZoomLevel(cameraId, -1.0),
+ throwsA(isA<CameraException>()
+ .having((CameraException e) => e.code, 'code', 'ZOOM_ERROR')
+ .having((CameraException e) => e.description, 'description',
+ 'Illegal zoom error')));
+ });
+
+ test('Should lock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'lockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.lockCaptureOrientation(
+ cameraId, DeviceOrientation.portraitUp);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('lockCaptureOrientation', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'orientation': 'portraitUp'
+ }),
+ ]);
+ });
+
+ test('Should unlock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'unlockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.unlockCaptureOrientation(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('unlockCaptureOrientation',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should pause the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'pausePreview': null},
+ );
+
+ // Act
+ await camera.pausePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pausePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should resume the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'resumePreview': null},
+ );
+
+ // Act
+ await camera.resumePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should start streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ ]);
+
+ subscription.cancel();
+ });
+
+ test('Should stop streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+ subscription.cancel();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ isMethodCall('stopImageStream', arguments: null),
+ ]);
+ });
+ });
+}
diff --git a/packages/camera/camera_avfoundation/test/method_channel_mock.dart b/packages/camera/camera_avfoundation/test/method_channel_mock.dart
new file mode 100644
index 0000000..413c106
--- /dev/null
+++ b/packages/camera/camera_avfoundation/test/method_channel_mock.dart
@@ -0,0 +1,39 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+class MethodChannelMock {
+ MethodChannelMock({
+ required String channelName,
+ this.delay,
+ required this.methods,
+ }) : methodChannel = MethodChannel(channelName) {
+ methodChannel.setMockMethodCallHandler(_handler);
+ }
+
+ final Duration? delay;
+ final MethodChannel methodChannel;
+ final Map<String, dynamic> methods;
+ final List<MethodCall> log = <MethodCall>[];
+
+ Future<dynamic> _handler(MethodCall methodCall) async {
+ log.add(methodCall);
+
+ if (!methods.containsKey(methodCall.method)) {
+ throw MissingPluginException('No implementation found for method '
+ '${methodCall.method} on channel ${methodChannel.name}');
+ }
+
+ return Future<dynamic>.delayed(delay ?? Duration.zero, () {
+ final dynamic result = methods[methodCall.method];
+ if (result is Exception) {
+ throw result;
+ }
+
+ return Future<dynamic>.value(result);
+ });
+ }
+}
diff --git a/packages/camera/camera_avfoundation/test/type_conversion_test.dart b/packages/camera/camera_avfoundation/test/type_conversion_test.dart
new file mode 100644
index 0000000..282f4ae
--- /dev/null
+++ b/packages/camera/camera_avfoundation/test/type_conversion_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_avfoundation/src/type_conversion.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('CameraImageData can be created', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 1,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.height, 1);
+ expect(cameraImage.width, 4);
+ expect(cameraImage.format.group, ImageFormatGroup.unknown);
+ expect(cameraImage.planes.length, 1);
+ });
+
+ test('CameraImageData has ImageFormatGroup.yuv420', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 875704438,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ });
+}
diff --git a/packages/camera/camera_avfoundation/test/utils_test.dart b/packages/camera/camera_avfoundation/test/utils_test.dart
new file mode 100644
index 0000000..bd28abb
--- /dev/null
+++ b/packages/camera/camera_avfoundation/test/utils_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_avfoundation/src/utils.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ group('Utility methods', () {
+ test(
+ 'Should return CameraLensDirection when valid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ parseCameraLensDirection('back'),
+ CameraLensDirection.back,
+ );
+ expect(
+ parseCameraLensDirection('front'),
+ CameraLensDirection.front,
+ );
+ expect(
+ parseCameraLensDirection('external'),
+ CameraLensDirection.external,
+ );
+ });
+
+ test(
+ 'Should throw ArgumentException when invalid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ () => parseCameraLensDirection('test'),
+ throwsA(isArgumentError),
+ );
+ });
+
+ test('serializeDeviceOrientation() should serialize correctly', () {
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitUp),
+ 'portraitUp');
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitDown),
+ 'portraitDown');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeRight),
+ 'landscapeRight');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeLeft),
+ 'landscapeLeft');
+ });
+
+ test('deserializeDeviceOrientation() should deserialize correctly', () {
+ expect(deserializeDeviceOrientation('portraitUp'),
+ DeviceOrientation.portraitUp);
+ expect(deserializeDeviceOrientation('portraitDown'),
+ DeviceOrientation.portraitDown);
+ expect(deserializeDeviceOrientation('landscapeRight'),
+ DeviceOrientation.landscapeRight);
+ expect(deserializeDeviceOrientation('landscapeLeft'),
+ DeviceOrientation.landscapeLeft);
+ });
+ });
+}