blob: 8b06bf61a916838957bf2f2e79a9331cc98589c7 [file] [log] [blame]
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:async/async.dart';
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/widgets.dart';
import 'package:stream_transform/stream_transform.dart';
import 'analyzer.dart';
import 'camera.dart';
import 'camera_control.dart';
import 'camera_info.dart';
import 'camera_selector.dart';
import 'camera_state.dart';
import 'camerax_library.g.dart';
import 'exposure_state.dart';
import 'fallback_strategy.dart';
import 'image_analysis.dart';
import 'image_capture.dart';
import 'image_proxy.dart';
import 'live_data.dart';
import 'observer.dart';
import 'pending_recording.dart';
import 'plane_proxy.dart';
import 'preview.dart';
import 'process_camera_provider.dart';
import 'quality_selector.dart';
import 'recorder.dart';
import 'recording.dart';
import 'resolution_selector.dart';
import 'resolution_strategy.dart';
import 'surface.dart';
import 'system_services.dart';
import 'use_case.dart';
import 'video_capture.dart';
import 'zoom_state.dart';
/// The Android implementation of [CameraPlatform] that uses the CameraX library.
class AndroidCameraCameraX extends CameraPlatform {
/// Constructs an [AndroidCameraCameraX].
AndroidCameraCameraX() : _shouldCreateDetachedObjectForTesting = false;
/// Constructs an [AndroidCameraCameraX] that is able to set
/// [_shouldCreateDetachedObjectForTesting] to create detached objects
/// for testing purposes only.
@visibleForTesting
AndroidCameraCameraX.forTesting(
{bool shouldCreateDetachedObjectForTesting = false})
: _shouldCreateDetachedObjectForTesting =
shouldCreateDetachedObjectForTesting;
/// Registers this class as the default instance of [CameraPlatform].
static void registerWith() {
CameraPlatform.instance = AndroidCameraCameraX();
}
/// The [ProcessCameraProvider] instance used to access camera functionality.
@visibleForTesting
ProcessCameraProvider? processCameraProvider;
/// The [Camera] instance returned by the [processCameraProvider] when a [UseCase] is
/// bound to the lifecycle of the camera it manages.
@visibleForTesting
Camera? camera;
/// The [CameraInfo] instance that corresponds to the [camera] instance.
@visibleForTesting
CameraInfo? cameraInfo;
/// The [LiveData] of the [CameraState] that represents the state of the
/// [camera] instance.
LiveData<CameraState>? liveCameraState;
/// The [Preview] instance that can be configured to present a live camera preview.
@visibleForTesting
Preview? preview;
/// The [VideoCapture] instance that can be instantiated and configured to
/// handle video recording
@visibleForTesting
VideoCapture? videoCapture;
/// The [Recorder] instance handling the current creating a new [PendingRecording].
@visibleForTesting
Recorder? recorder;
/// The [PendingRecording] instance used to create an active [Recording].
@visibleForTesting
PendingRecording? pendingRecording;
/// The [Recording] instance representing the current recording.
@visibleForTesting
Recording? recording;
/// The path at which the video file will be saved for the current [Recording].
@visibleForTesting
String? videoOutputPath;
bool _previewIsPaused = false;
/// The prefix used to create the filename for video recording files.
@visibleForTesting
final String videoPrefix = 'MOV';
/// The [ImageCapture] instance that can be configured to capture a still image.
@visibleForTesting
ImageCapture? imageCapture;
/// The flash mode currently configured for [imageCapture].
int? _currentFlashMode;
/// Whether or not torch flash mode has been enabled for the [camera].
@visibleForTesting
bool torchEnabled = false;
/// The [ImageAnalysis] instance that can be configured to analyze individual
/// frames.
ImageAnalysis? imageAnalysis;
/// The [CameraSelector] used to configure the [processCameraProvider] to use
/// the desired camera.
@visibleForTesting
CameraSelector? cameraSelector;
/// The controller we need to broadcast the different camera events.
///
/// It is a `broadcast` because multiple controllers will connect to
/// different stream views of this Controller.
/// This is only exposed for test purposes. It shouldn't be used by clients of
/// the plugin as it may break or change at any time.
@visibleForTesting
final StreamController<CameraEvent> cameraEventStreamController =
StreamController<CameraEvent>.broadcast();
/// The stream of camera events.
Stream<CameraEvent> _cameraEvents(int cameraId) =>
cameraEventStreamController.stream
.where((CameraEvent event) => event.cameraId == cameraId);
/// Conditional used to create detached objects for testing their
/// callback methods.
final bool _shouldCreateDetachedObjectForTesting;
/// The controller we need to stream image data.
@visibleForTesting
StreamController<CameraImageData>? cameraImageDataStreamController;
/// Constant representing the multi-plane Android YUV 420 image format.
///
/// See https://developer.android.com/reference/android/graphics/ImageFormat#YUV_420_888.
static const int imageFormatYuv420_888 = 35;
/// Constant representing the compressed JPEG image format.
///
/// See https://developer.android.com/reference/android/graphics/ImageFormat#JPEG.
static const int imageFormatJpeg = 256;
/// Error code indicating a [ZoomState] was requested, but one has not been
/// set for the camera in use.
static const String zoomStateNotSetErrorCode = 'zoomStateNotSet';
/// Returns list of all available cameras and their descriptions.
@override
Future<List<CameraDescription>> availableCameras() async {
final List<CameraDescription> cameraDescriptions = <CameraDescription>[];
processCameraProvider ??= await ProcessCameraProvider.getInstance();
final List<CameraInfo> cameraInfos =
await processCameraProvider!.getAvailableCameraInfos();
CameraLensDirection? cameraLensDirection;
int cameraCount = 0;
int? cameraSensorOrientation;
String? cameraName;
for (final CameraInfo cameraInfo in cameraInfos) {
// Determine the lens direction by filtering the CameraInfo
// TODO(gmackall): replace this with call to CameraInfo.getLensFacing when changes containing that method are available
if ((await createCameraSelector(CameraSelector.lensFacingBack)
.filter(<CameraInfo>[cameraInfo]))
.isNotEmpty) {
cameraLensDirection = CameraLensDirection.back;
} else if ((await createCameraSelector(CameraSelector.lensFacingFront)
.filter(<CameraInfo>[cameraInfo]))
.isNotEmpty) {
cameraLensDirection = CameraLensDirection.front;
} else {
//Skip this CameraInfo as its lens direction is unknown
continue;
}
cameraSensorOrientation = await cameraInfo.getSensorRotationDegrees();
cameraName = 'Camera $cameraCount';
cameraCount++;
cameraDescriptions.add(CameraDescription(
name: cameraName,
lensDirection: cameraLensDirection,
sensorOrientation: cameraSensorOrientation));
}
return cameraDescriptions;
}
/// Creates an uninitialized camera instance and returns the camera ID.
///
/// In the CameraX library, cameras are accessed by combining [UseCase]s
/// to an instance of a [ProcessCameraProvider]. Thus, to create an
/// uninitialized camera instance, this method retrieves a
/// [ProcessCameraProvider] instance.
///
/// To return the camera ID, which is equivalent to the ID of the surface texture
/// that a camera preview can be drawn to, a [Preview] instance is configured
/// and bound to the [ProcessCameraProvider] instance.
@override
Future<int> createCamera(
CameraDescription cameraDescription,
ResolutionPreset? resolutionPreset, {
bool enableAudio = false,
}) async {
// Must obtain proper permissions before attempting to access a camera.
await requestCameraPermissions(enableAudio);
// Save CameraSelector that matches cameraDescription.
final int cameraSelectorLensDirection =
_getCameraSelectorLensDirection(cameraDescription.lensDirection);
final bool cameraIsFrontFacing =
cameraSelectorLensDirection == CameraSelector.lensFacingFront;
cameraSelector = createCameraSelector(cameraSelectorLensDirection);
// Start listening for device orientation changes preceding camera creation.
startListeningForDeviceOrientationChange(
cameraIsFrontFacing, cameraDescription.sensorOrientation);
// Determine ResolutionSelector and QualitySelector based on
// resolutionPreset for camera UseCases.
final ResolutionSelector? presetResolutionSelector =
_getResolutionSelectorFromPreset(resolutionPreset);
final QualitySelector? presetQualitySelector =
_getQualitySelectorFromPreset(resolutionPreset);
// Retrieve a fresh ProcessCameraProvider instance.
processCameraProvider ??= await ProcessCameraProvider.getInstance();
processCameraProvider!.unbindAll();
// Configure Preview instance.
final int targetRotation =
_getTargetRotation(cameraDescription.sensorOrientation);
preview = createPreview(
targetRotation: targetRotation,
resolutionSelector: presetResolutionSelector);
final int flutterSurfaceTextureId = await preview!.setSurfaceProvider();
// Configure ImageCapture instance.
imageCapture = createImageCapture(presetResolutionSelector);
// Configure ImageAnalysis instance.
// Defaults to YUV_420_888 image format.
imageAnalysis = createImageAnalysis(presetResolutionSelector);
// Configure VideoCapture and Recorder instances.
recorder = createRecorder(presetQualitySelector);
videoCapture = await createVideoCapture(recorder!);
// Bind configured UseCases to ProcessCameraProvider instance & mark Preview
// instance as bound but not paused. Video capture is bound at first use
// instead of here.
camera = await processCameraProvider!.bindToLifecycle(
cameraSelector!, <UseCase>[preview!, imageCapture!, imageAnalysis!]);
await _updateCameraInfoAndLiveCameraState(flutterSurfaceTextureId);
_previewIsPaused = false;
return flutterSurfaceTextureId;
}
/// Initializes the camera on the device.
///
/// Since initialization of a camera does not directly map as an operation to
/// the CameraX library, this method just retrieves information about the
/// camera and sends a [CameraInitializedEvent].
///
/// [imageFormatGroup] is used to specify the image format used for image
/// streaming, but CameraX currently only supports YUV_420_888 (supported by
/// Flutter) and RGBA (not supported by Flutter). CameraX uses YUV_420_888
/// by default, so [imageFormatGroup] is not used.
@override
Future<void> initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
}) async {
// Configure CameraInitializedEvent to send as representation of a
// configured camera:
// Retrieve preview resolution.
if (preview == null) {
// No camera has been created; createCamera must be called before initializeCamera.
throw CameraException(
'cameraNotFound',
"Camera not found. Please call the 'create' method before calling 'initialize'",
);
}
final ResolutionInfo previewResolutionInfo =
await preview!.getResolutionInfo();
// Retrieve exposure and focus mode configurations:
// TODO(camsim99): Implement support for retrieving exposure mode configuration.
// https://github.com/flutter/flutter/issues/120468
const ExposureMode exposureMode = ExposureMode.auto;
const bool exposurePointSupported = false;
// TODO(camsim99): Implement support for retrieving focus mode configuration.
// https://github.com/flutter/flutter/issues/120467
const FocusMode focusMode = FocusMode.auto;
const bool focusPointSupported = false;
cameraEventStreamController.add(CameraInitializedEvent(
cameraId,
previewResolutionInfo.width.toDouble(),
previewResolutionInfo.height.toDouble(),
exposureMode,
exposurePointSupported,
focusMode,
focusPointSupported));
}
/// Releases the resources of the accessed camera.
///
/// [cameraId] not used.
@override
Future<void> dispose(int cameraId) async {
preview?.releaseFlutterSurfaceTexture();
unawaited(liveCameraState?.removeObservers());
processCameraProvider?.unbindAll();
unawaited(imageAnalysis?.clearAnalyzer());
}
/// The camera has been initialized.
@override
Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
}
/// The camera's resolution has changed.
///
/// This stream currently has no events being added to it from this plugin.
@override
Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
return _cameraEvents(cameraId).whereType<CameraResolutionChangedEvent>();
}
/// The camera started to close.
@override
Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
}
/// The camera experienced an error.
@override
Stream<CameraErrorEvent> onCameraError(int cameraId) {
return StreamGroup.mergeBroadcast<
CameraErrorEvent>(<Stream<CameraErrorEvent>>[
SystemServices.cameraErrorStreamController.stream
.map<CameraErrorEvent>((String errorDescription) {
return CameraErrorEvent(cameraId, errorDescription);
}),
_cameraEvents(cameraId).whereType<CameraErrorEvent>()
]);
}
/// The camera finished recording a video.
@override
Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
return _cameraEvents(cameraId).whereType<VideoRecordedEvent>();
}
/// Gets the minimum supported exposure offset for the selected camera in EV units.
///
/// [cameraId] not used.
@override
Future<double> getMinExposureOffset(int cameraId) async {
final ExposureState exposureState = await cameraInfo!.getExposureState();
return exposureState.exposureCompensationRange.minCompensation *
exposureState.exposureCompensationStep;
}
/// Gets the maximum supported exposure offset for the selected camera in EV units.
///
/// [cameraId] not used.
@override
Future<double> getMaxExposureOffset(int cameraId) async {
final ExposureState exposureState = await cameraInfo!.getExposureState();
return exposureState.exposureCompensationRange.maxCompensation *
exposureState.exposureCompensationStep;
}
/// Gets the supported step size for exposure offset for the selected camera in EV units.
///
/// Returns 0 when exposure compensation is not supported.
///
/// [cameraId] not used.
@override
Future<double> getExposureOffsetStepSize(int cameraId) async {
final ExposureState exposureState = await cameraInfo!.getExposureState();
return exposureState.exposureCompensationStep;
}
/// Gets the maximum supported zoom level for the selected camera.
///
/// [cameraId] not used.
@override
Future<double> getMaxZoomLevel(int cameraId) async {
final LiveData<ZoomState> liveZoomState = await cameraInfo!.getZoomState();
final ZoomState? zoomState = await liveZoomState.getValue();
if (zoomState == null) {
throw CameraException(
zoomStateNotSetErrorCode,
'No explicit ZoomState has been set on the LiveData instance for the camera in use.',
);
}
return zoomState.maxZoomRatio;
}
/// Gets the minimum supported zoom level for the selected camera.
///
/// [cameraId] not used.
@override
Future<double> getMinZoomLevel(int cameraId) async {
final LiveData<ZoomState> liveZoomState = await cameraInfo!.getZoomState();
final ZoomState? zoomState = await liveZoomState.getValue();
if (zoomState == null) {
throw CameraException(
zoomStateNotSetErrorCode,
'No explicit ZoomState has been set on the LiveData instance for the camera in use.',
);
}
return zoomState.minZoomRatio;
}
/// The ui orientation changed.
@override
Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
return SystemServices.deviceOrientationChangedStreamController.stream;
}
/// Pause the active preview on the current frame for the selected camera.
///
/// [cameraId] not used.
@override
Future<void> pausePreview(int cameraId) async {
_previewIsPaused = true;
await _unbindUseCaseFromLifecycle(preview!);
}
/// Resume the paused preview for the selected camera.
///
/// [cameraId] not used.
@override
Future<void> resumePreview(int cameraId) async {
_previewIsPaused = false;
await _bindPreviewToLifecycle(cameraId);
}
/// Returns a widget showing a live camera preview.
@override
Widget buildPreview(int cameraId) {
return FutureBuilder<void>(
future: _bindPreviewToLifecycle(cameraId),
builder: (BuildContext context, AsyncSnapshot<void> snapshot) {
switch (snapshot.connectionState) {
case ConnectionState.none:
case ConnectionState.waiting:
case ConnectionState.active:
// Do nothing while waiting for preview to be bound to lifecyle.
return const SizedBox.shrink();
case ConnectionState.done:
return Texture(textureId: cameraId);
}
});
}
/// Captures an image and returns the file where it was saved.
///
/// [cameraId] is not used.
@override
Future<XFile> takePicture(int cameraId) async {
if (_currentFlashMode != null) {
await imageCapture!.setFlashMode(_currentFlashMode!);
} else if (torchEnabled) {
// Ensure any previously set flash modes are unset when torch mode has
// been enabled.
await imageCapture!.setFlashMode(ImageCapture.flashModeOff);
}
final String picturePath = await imageCapture!.takePicture();
return XFile(picturePath);
}
/// Sets the flash mode for the selected camera.
///
/// When the [FlashMode.torch] is enabled, any previously set [FlashMode] with
/// this method will be disabled, just as with any other [FlashMode]; while
/// this is not default native Android behavior as defined by the CameraX API,
/// this behavior is compliant with the plugin platform interface.
///
/// This method combines the notion of setting the flash mode of the
/// [imageCapture] UseCase and enabling the camera torch, as described
/// by https://developer.android.com/reference/androidx/camera/core/ImageCapture
/// and https://developer.android.com/reference/androidx/camera/core/CameraControl#enableTorch(boolean),
/// respectively.
@override
Future<void> setFlashMode(int cameraId, FlashMode mode) async {
CameraControl? cameraControl;
// Turn off torch mode if it is enabled and not being redundantly set.
if (mode != FlashMode.torch && torchEnabled) {
cameraControl = await camera!.getCameraControl();
await cameraControl.enableTorch(false);
torchEnabled = false;
}
switch (mode) {
case FlashMode.off:
_currentFlashMode = ImageCapture.flashModeOff;
break;
case FlashMode.auto:
_currentFlashMode = ImageCapture.flashModeAuto;
break;
case FlashMode.always:
_currentFlashMode = ImageCapture.flashModeOn;
break;
case FlashMode.torch:
_currentFlashMode = null;
if (torchEnabled) {
// Torch mode enabled already.
return;
}
cameraControl = await camera!.getCameraControl();
await cameraControl.enableTorch(true);
torchEnabled = true;
break;
}
}
/// Configures and starts a video recording. Returns silently without doing
/// anything if there is currently an active recording.
///
/// Note that the preset resolution is used to configure the recording, but
/// 240p ([ResolutionPreset.low]) is unsupported and will fallback to
/// configure the recording as the next highest available quality.
///
/// This method is deprecated in favour of [startVideoCapturing].
@override
Future<void> startVideoRecording(int cameraId,
{Duration? maxVideoDuration}) async {
return startVideoCapturing(
VideoCaptureOptions(cameraId, maxDuration: maxVideoDuration));
}
/// Starts a video recording and/or streaming session.
///
/// Please see [VideoCaptureOptions] for documentation on the
/// configuration options. Currently, maxVideoDuration and streamOptions
/// are unsupported due to the limitations of CameraX and the platform
/// interface, respectively.
@override
Future<void> startVideoCapturing(VideoCaptureOptions options) async {
if (recording != null) {
// There is currently an active recording, so do not start a new one.
return;
}
if (!(await processCameraProvider!.isBound(videoCapture!))) {
camera = await processCameraProvider!
.bindToLifecycle(cameraSelector!, <UseCase>[videoCapture!]);
}
videoOutputPath =
await SystemServices.getTempFilePath(videoPrefix, '.temp');
pendingRecording = await recorder!.prepareRecording(videoOutputPath!);
recording = await pendingRecording!.start();
if (options.streamCallback != null) {
onStreamedFrameAvailable(options.cameraId).listen(options.streamCallback);
}
}
/// Stops the video recording and returns the file where it was saved.
/// Throws a CameraException if the recording is currently null, or if the
/// videoOutputPath is null.
///
/// If the videoOutputPath is null the recording objects are cleaned up
/// so starting a new recording is possible.
@override
Future<XFile> stopVideoRecording(int cameraId) async {
if (recording == null) {
throw CameraException(
'videoRecordingFailed',
'Attempting to stop a '
'video recording while no recording is in progress.');
}
if (videoOutputPath == null) {
// Stop the current active recording as we will be unable to complete it
// in this error case.
unawaited(recording!.close());
recording = null;
pendingRecording = null;
throw CameraException(
'INVALID_PATH',
'The platform did not return a path '
'while reporting success. The platform should always '
'return a valid path or report an error.');
}
unawaited(recording!.close());
recording = null;
pendingRecording = null;
return XFile(videoOutputPath!);
}
/// Pause the current video recording if it is not null.
@override
Future<void> pauseVideoRecording(int cameraId) async {
if (recording != null) {
await recording!.pause();
}
}
/// Resume the current video recording if it is not null.
@override
Future<void> resumeVideoRecording(int cameraId) async {
if (recording != null) {
await recording!.resume();
}
}
/// A new streamed frame is available.
///
/// Listening to this stream will start streaming, and canceling will stop.
/// To temporarily stop receiving frames, cancel, then listen again later.
/// Pausing/resuming is not supported, as pausing the stream would cause
/// very high memory usage, and will throw an exception due to the
/// implementation using a broadcast [StreamController], which does not
/// support those operations.
///
/// [cameraId] and [options] are not used.
@override
Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
{CameraImageStreamOptions? options}) {
cameraImageDataStreamController = StreamController<CameraImageData>(
onListen: () => _onFrameStreamListen(cameraId),
onCancel: _onFrameStreamCancel,
);
return cameraImageDataStreamController!.stream;
}
// Methods for binding UseCases to the lifecycle of the camera controlled
// by a ProcessCameraProvider instance:
/// Binds [preview] instance to the camera lifecycle controlled by the
/// [processCameraProvider].
///
/// [cameraId] used to build [CameraEvent]s should you wish to filter
/// these based on a reference to a cameraId received from calling
/// `createCamera(...)`.
Future<void> _bindPreviewToLifecycle(int cameraId) async {
final bool previewIsBound = await processCameraProvider!.isBound(preview!);
if (previewIsBound || _previewIsPaused) {
// Only bind if preview is not already bound or intentionally paused.
return;
}
camera = await processCameraProvider!
.bindToLifecycle(cameraSelector!, <UseCase>[preview!]);
await _updateCameraInfoAndLiveCameraState(cameraId);
}
/// Configures the [imageAnalysis] instance for image streaming and binds it
/// to camera lifecycle controlled by the [processCameraProvider].
Future<void> _configureAndBindImageAnalysisToLifecycle(int cameraId) async {
// Create Analyzer that can read image data for image streaming.
final WeakReference<AndroidCameraCameraX> weakThis =
WeakReference<AndroidCameraCameraX>(this);
Future<void> analyze(ImageProxy imageProxy) async {
final List<PlaneProxy> planes = await imageProxy.getPlanes();
final List<CameraImagePlane> cameraImagePlanes = <CameraImagePlane>[];
for (final PlaneProxy plane in planes) {
cameraImagePlanes.add(CameraImagePlane(
bytes: plane.buffer,
bytesPerRow: plane.rowStride,
bytesPerPixel: plane.pixelStride));
}
final int format = imageProxy.format;
final CameraImageFormat cameraImageFormat = CameraImageFormat(
_imageFormatGroupFromPlatformData(format),
raw: format);
final CameraImageData cameraImageData = CameraImageData(
format: cameraImageFormat,
planes: cameraImagePlanes,
height: imageProxy.height,
width: imageProxy.width);
weakThis.target!.cameraImageDataStreamController!.add(cameraImageData);
unawaited(imageProxy.close());
}
// shouldCreateDetachedObjectForTesting is used to create an Analyzer
// detached from the native sideonly to test the logic of the Analyzer
// instance that will be used for image streaming.
final Analyzer analyzer = _shouldCreateDetachedObjectForTesting
? Analyzer.detached(analyze: analyze)
: Analyzer(analyze: analyze);
await imageAnalysis!.setAnalyzer(analyzer);
}
/// Unbinds [useCase] from camera lifecycle controlled by the
/// [processCameraProvider].
Future<void> _unbindUseCaseFromLifecycle(UseCase useCase) async {
final bool useCaseIsBound = await processCameraProvider!.isBound(useCase);
if (!useCaseIsBound) {
return;
}
processCameraProvider!.unbind(<UseCase>[useCase]);
}
// Methods for configuring image streaming:
/// The [onListen] callback for the stream controller used for image
/// streaming.
Future<void> _onFrameStreamListen(int cameraId) async {
await _configureAndBindImageAnalysisToLifecycle(cameraId);
}
/// The [onCancel] callback for the stream controller used for image
/// streaming.
///
/// Removes the previously set analyzer on the [imageAnalysis] instance, since
/// image information should no longer be streamed.
FutureOr<void> _onFrameStreamCancel() async {
unawaited(imageAnalysis!.clearAnalyzer());
}
/// Converts between Android ImageFormat constants and [ImageFormatGroup]s.
///
/// See https://developer.android.com/reference/android/graphics/ImageFormat.
ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
switch (data) {
case imageFormatYuv420_888: // android.graphics.ImageFormat.YUV_420_888
return ImageFormatGroup.yuv420;
case imageFormatJpeg: // android.graphics.ImageFormat.JPEG
return ImageFormatGroup.jpeg;
}
return ImageFormatGroup.unknown;
}
// Methods concerning camera state:
/// Updates [cameraInfo] to the information corresponding to [camera] and
/// adds observers to the [LiveData] of the [CameraState] of the current
/// [camera], saved as [liveCameraState].
///
/// If a previous [liveCameraState] was stored, existing observers are
/// removed, as well.
Future<void> _updateCameraInfoAndLiveCameraState(int cameraId) async {
cameraInfo = await camera!.getCameraInfo();
await liveCameraState?.removeObservers();
liveCameraState = await cameraInfo!.getCameraState();
await liveCameraState!.observe(_createCameraClosingObserver(cameraId));
}
/// Creates [Observer] of the [CameraState] that will:
///
/// * Send a [CameraClosingEvent] if the [CameraState] indicates that the
/// camera has begun to close.
/// * Send a [CameraErrorEvent] if the [CameraState] indicates that the
/// camera is in error state.
Observer<CameraState> _createCameraClosingObserver(int cameraId) {
final WeakReference<AndroidCameraCameraX> weakThis =
WeakReference<AndroidCameraCameraX>(this);
// Callback method used to implement the behavior described above:
void onChanged(Object stateAsObject) {
// This cast is safe because the Observer implementation ensures
// the type of stateAsObject is the same as the observer this callback
// is attached to.
final CameraState state = stateAsObject as CameraState;
if (state.type == CameraStateType.closing) {
weakThis.target!.cameraEventStreamController
.add(CameraClosingEvent(cameraId));
}
if (state.error != null) {
weakThis.target!.cameraEventStreamController
.add(CameraErrorEvent(cameraId, state.error!.getDescription()));
}
}
// shouldCreateDetachedObjectForTesting is used to create an Observer
// detached from the native side only to test the logic of the Analyzer
// instance that will be used for image streaming.
return _shouldCreateDetachedObjectForTesting
? Observer<CameraState>.detached(onChanged: onChanged)
: Observer<CameraState>(onChanged: onChanged);
}
// Methods for mapping Flutter camera constants to CameraX constants:
/// Returns [CameraSelector] lens direction that maps to specified
/// [CameraLensDirection].
int _getCameraSelectorLensDirection(CameraLensDirection lensDirection) {
switch (lensDirection) {
case CameraLensDirection.front:
return CameraSelector.lensFacingFront;
case CameraLensDirection.back:
return CameraSelector.lensFacingBack;
case CameraLensDirection.external:
return CameraSelector.lensFacingExternal;
}
}
/// Returns [Surface] target rotation constant that maps to specified sensor
/// orientation.
int _getTargetRotation(int sensorOrientation) {
switch (sensorOrientation) {
case 90:
return Surface.ROTATION_90;
case 180:
return Surface.ROTATION_180;
case 270:
return Surface.ROTATION_270;
case 0:
return Surface.ROTATION_0;
default:
throw ArgumentError(
'"$sensorOrientation" is not a valid sensor orientation value');
}
}
/// Returns the [ResolutionSelector] that maps to the specified resolution
/// preset for camera [UseCase]s.
///
/// If the specified [preset] is unavailable, the camera will fall back to the
/// closest lower resolution available.
ResolutionSelector? _getResolutionSelectorFromPreset(
ResolutionPreset? preset) {
const int fallbackRule = ResolutionStrategy.fallbackRuleClosestLower;
Size? boundSize;
ResolutionStrategy? resolutionStrategy;
switch (preset) {
case ResolutionPreset.low:
boundSize = const Size(320, 240);
break;
case ResolutionPreset.medium:
boundSize = const Size(720, 480);
break;
case ResolutionPreset.high:
boundSize = const Size(1280, 720);
break;
case ResolutionPreset.veryHigh:
boundSize = const Size(1920, 1080);
break;
case ResolutionPreset.ultraHigh:
boundSize = const Size(3840, 2160);
break;
case ResolutionPreset.max:
// Automatically set strategy to choose highest available.
resolutionStrategy = _shouldCreateDetachedObjectForTesting
? ResolutionStrategy.detachedHighestAvailableStrategy()
: ResolutionStrategy.highestAvailableStrategy();
break;
case null:
// If no preset is specified, default to CameraX's default behavior
// for each UseCase.
return null;
}
if (_shouldCreateDetachedObjectForTesting) {
resolutionStrategy ??= ResolutionStrategy.detached(
boundSize: boundSize, fallbackRule: fallbackRule);
return ResolutionSelector.detached(
resolutionStrategy: resolutionStrategy);
}
resolutionStrategy ??=
ResolutionStrategy(boundSize: boundSize!, fallbackRule: fallbackRule);
return ResolutionSelector(
resolutionStrategy: ResolutionStrategy(
boundSize: boundSize!, fallbackRule: fallbackRule));
}
/// Returns the [QualitySelector] that maps to the specified resolution
/// preset for the camera used only for video capture.
///
/// If the specified [preset] is unavailable, the camera will fall back to the
/// closest lower resolution available.
QualitySelector? _getQualitySelectorFromPreset(ResolutionPreset? preset) {
VideoQuality? videoQuality;
switch (preset) {
case ResolutionPreset.low:
// 240p is not supported by CameraX.
case ResolutionPreset.medium:
videoQuality = VideoQuality.SD;
break;
case ResolutionPreset.high:
videoQuality = VideoQuality.HD;
break;
case ResolutionPreset.veryHigh:
videoQuality = VideoQuality.FHD;
break;
case ResolutionPreset.ultraHigh:
videoQuality = VideoQuality.UHD;
break;
case ResolutionPreset.max:
videoQuality = VideoQuality.highest;
break;
case null:
// If no preset is specified, default to CameraX's default behavior
// for each UseCase.
return null;
}
// We will choose the next highest video quality if the one desired
// is unavailable.
const VideoResolutionFallbackRule fallbackRule =
VideoResolutionFallbackRule.lowerQualityThan;
final FallbackStrategy fallbackStrategy =
_shouldCreateDetachedObjectForTesting
? FallbackStrategy.detached(
quality: videoQuality, fallbackRule: fallbackRule)
: FallbackStrategy(
quality: videoQuality, fallbackRule: fallbackRule);
return _shouldCreateDetachedObjectForTesting
? QualitySelector.detached(qualityList: <VideoQualityData>[
VideoQualityData(quality: videoQuality)
], fallbackStrategy: fallbackStrategy)
: QualitySelector.from(
quality: VideoQualityData(quality: videoQuality),
fallbackStrategy: fallbackStrategy);
}
// Methods for calls that need to be tested:
/// Requests camera permissions.
@visibleForTesting
Future<void> requestCameraPermissions(bool enableAudio) async {
await SystemServices.requestCameraPermissions(enableAudio);
}
/// Subscribes the plugin as a listener to changes in device orientation.
@visibleForTesting
void startListeningForDeviceOrientationChange(
bool cameraIsFrontFacing, int sensorOrientation) {
SystemServices.startListeningForDeviceOrientationChange(
cameraIsFrontFacing, sensorOrientation);
}
/// Returns a [CameraSelector] based on the specified camera lens direction.
@visibleForTesting
CameraSelector createCameraSelector(int cameraSelectorLensDirection) {
switch (cameraSelectorLensDirection) {
case CameraSelector.lensFacingFront:
return CameraSelector.getDefaultFrontCamera();
case CameraSelector.lensFacingBack:
return CameraSelector.getDefaultBackCamera();
default:
return CameraSelector(lensFacing: cameraSelectorLensDirection);
}
}
/// Returns a [Preview] configured with the specified target rotation and
/// specified [ResolutionSelector].
@visibleForTesting
Preview createPreview(
{required int targetRotation, ResolutionSelector? resolutionSelector}) {
return Preview(
targetRotation: targetRotation, resolutionSelector: resolutionSelector);
}
/// Returns an [ImageCapture] configured with specified flash mode and
/// the specified [ResolutionSelector].
@visibleForTesting
ImageCapture createImageCapture(ResolutionSelector? resolutionSelector) {
return ImageCapture(resolutionSelector: resolutionSelector);
}
/// Returns a [Recorder] for use in video capture configured with the
/// specified [QualitySelector].
@visibleForTesting
Recorder createRecorder(QualitySelector? qualitySelector) {
return Recorder(qualitySelector: qualitySelector);
}
/// Returns a [VideoCapture] associated with the provided [Recorder].
@visibleForTesting
Future<VideoCapture> createVideoCapture(Recorder recorder) async {
return VideoCapture.withOutput(recorder);
}
/// Returns an [ImageAnalysis] configured with the specified
/// [ResolutionSelector].
@visibleForTesting
ImageAnalysis createImageAnalysis(ResolutionSelector? resolutionSelector) {
return ImageAnalysis(resolutionSelector: resolutionSelector);
}
}