Merge flutter/plugins (#3233)
Merges everything from the packages/ directory of flutter/plugins (which
is essentially the entire remaining repository) into this repository,
completing the core of the repository merge.
Part of https://github.com/flutter/flutter/issues/113764
diff --git a/.cirrus.yml b/.cirrus.yml
index 4f6cf53..ad467b7 100644
--- a/.cirrus.yml
+++ b/.cirrus.yml
@@ -236,6 +236,9 @@
- name: dart_unit_tests
env:
matrix:
+ PACKAGE_SHARDING: "--shardIndex 0 --shardCount 2"
+ PACKAGE_SHARDING: "--shardIndex 1 --shardCount 2"
+ matrix:
CHANNEL: "master"
CHANNEL: "stable"
unit_test_script:
@@ -315,7 +318,13 @@
build_script:
- ./script/tool_runner.sh build-examples --web
drive_script:
- - ./script/tool_runner.sh drive-examples --web --exclude=script/configs/exclude_integration_web.yaml
+ # TODO(stuartmorgan): Figure out why url_launcher_web is failing on stable and re-enable it:
+ # https://github.com/flutter/flutter/issues/121161
+ - if [[ "$CHANNEL" == "master" ]]; then
+ - ./script/tool_runner.sh drive-examples --web --exclude=script/configs/exclude_integration_web.yaml
+ - else
+ - ./script/tool_runner.sh drive-examples --web --exclude=script/configs/exclude_integration_web.yaml,url_launcher_web
+ - fi
- name: web_benchmarks_test
env:
matrix:
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 396a3c0..a365609 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -33,6 +33,78 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/camera/camera_android/android"
+ commit-message:
+ prefix: "[camera]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/camera/camera_android/example/android/app"
+ commit-message:
+ prefix: "[camera]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/camera/camera_android_camerax/android"
+ commit-message:
+ prefix: "[camera]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/camera/camera_android_camerax/example/android/app"
+ commit-message:
+ prefix: "[camera]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/camera/camera/example/android/app"
+ commit-message:
+ prefix: "[camera]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/dynamic_layouts/example/android/app"
commit-message:
@@ -43,6 +115,37 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/espresso/android"
+ commit-message:
+ prefix: "[espresso]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/espresso/example/android/app"
+ commit-message:
+ prefix: "[espresso]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/extension_google_sign_in_as_googleapis_auth/example/android/app"
commit-message:
@@ -53,6 +156,7 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/flutter_adaptive_scaffold/example/android/app"
commit-message:
@@ -63,6 +167,7 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/flutter_markdown/example/android/app"
commit-message:
@@ -73,6 +178,37 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/flutter_plugin_android_lifecycle/android"
+ commit-message:
+ prefix: "[lifecycle]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/flutter_plugin_android_lifecycle/example/android/app"
+ commit-message:
+ prefix: "[lifecycle]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/go_router/example/android/app"
commit-message:
@@ -83,6 +219,212 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/google_maps_flutter/google_maps_flutter/example/android/app"
+ commit-message:
+ prefix: "[google_maps]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/google_maps_flutter/google_maps_flutter_android/android"
+ commit-message:
+ prefix: "[google_maps]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/google_maps_flutter/google_maps_flutter_android/example/android/app"
+ commit-message:
+ prefix: "[google_maps]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/google_sign_in/google_sign_in/example/android/app"
+ commit-message:
+ prefix: "[sign_in]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/google_sign_in/google_sign_in_android/android"
+ commit-message:
+ prefix: "[sign_in]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/google_sign_in/google_sign_in_android/example/android/app"
+ commit-message:
+ prefix: "[sign_in]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/in_app_purchase/in_app_purchase_android/android"
+ commit-message:
+ prefix: "[in_app_pur]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/in_app_purchase/in_app_purchase_android/example/android/app"
+ commit-message:
+ prefix: "[in_app_pur]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/in_app_purchase/in_app_purchase/example/android/app"
+ commit-message:
+ prefix: "[in_app_pur]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/image_picker/image_picker/example/android/app"
+ commit-message:
+ prefix: "[image_picker]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/image_picker/image_picker_android/android"
+ commit-message:
+ prefix: "[image_picker]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/image_picker/image_picker_android/example/android/app"
+ commit-message:
+ prefix: "[image_picker]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/local_auth/local_auth_android/android"
+ commit-message:
+ prefix: "[local_auth]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/local_auth/local_auth_android/example/android/app"
+ commit-message:
+ prefix: "[local_auth]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/local_auth/local_auth/example/android/app"
+ commit-message:
+ prefix: "[local_auth]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/palette_generator/example/android/app"
commit-message:
@@ -93,6 +435,48 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/path_provider/path_provider/example/android/app"
+ commit-message:
+ prefix: "[path_provider]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/path_provider/path_provider_android/android"
+ commit-message:
+ prefix: "[path_provider]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/path_provider/path_provider_android/example/android/app"
+ commit-message:
+ prefix: "[path_provider]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/pigeon/platform_tests/test_plugin/android"
commit-message:
@@ -111,6 +495,7 @@
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
- dependency-name: "org.robolectric:*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/pigeon/platform_tests/test_plugin/example/android/app"
commit-message:
@@ -121,6 +506,7 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/pigeon/platform_tests/alternate_language_test_plugin/android"
commit-message:
@@ -139,6 +525,7 @@
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
- dependency-name: "org.robolectric:*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/pigeon/platform_tests/alternate_language_test_plugin/example/android/app"
commit-message:
@@ -149,6 +536,48 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/quick_actions/quick_actions_android/android"
+ commit-message:
+ prefix: "[quick_actions]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/quick_actions/quick_actions_android/example/android/app"
+ commit-message:
+ prefix: "[quick_actions]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/quick_actions/quick_actions/example/android/app"
+ commit-message:
+ prefix: "[quick_actions]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/rfw/example/hello/android/app"
commit-message:
@@ -159,6 +588,7 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/rfw/example/local/android/app"
commit-message:
@@ -169,6 +599,7 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
- package-ecosystem: "gradle"
directory: "/packages/rfw/example/remote/android/app"
commit-message:
@@ -179,3 +610,167 @@
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/shared_preferences/shared_preferences/example/android/app"
+ commit-message:
+ prefix: "[shared_pref]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/shared_preferences/shared_preferences_android/android"
+ commit-message:
+ prefix: "[shared_pref]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/shared_preferences/shared_preferences_android/example/android/app"
+ commit-message:
+ prefix: "[shared_pref]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/url_launcher/url_launcher_android/android"
+ commit-message:
+ prefix: "[url_launcher]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/url_launcher/url_launcher_android/example/android/app"
+ commit-message:
+ prefix: "[url_launcher]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/url_launcher/url_launcher/example/android/app"
+ commit-message:
+ prefix: "[url_launcher]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/video_player/video_player/example/android/app"
+ commit-message:
+ prefix: "[video_player]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/video_player/video_player_android/android"
+ commit-message:
+ prefix: "[video_player]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/video_player/video_player_android/example/android/app"
+ commit-message:
+ prefix: "[video_player]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/webview_flutter/webview_flutter/example/android/app"
+ commit-message:
+ prefix: "[webview]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/webview_flutter/webview_flutter_android/android"
+ commit-message:
+ prefix: "[webview]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "com.android.tools.build:gradle"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "junit:junit"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.mockito:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "androidx.test:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+ - dependency-name: "org.robolectric:*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
+
+ - package-ecosystem: "gradle"
+ directory: "/packages/webview_flutter/webview_flutter_android/example/android/app"
+ commit-message:
+ prefix: "[webview]"
+ schedule:
+ interval: "weekly"
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-minor", "version-update:semver-patch"]
diff --git a/.github/labeler.yml b/.github/labeler.yml
index 2104027..7bbdbd1 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -1,18 +1,30 @@
'p: animations':
- packages/animations/**/*
+'p: camera':
+ - packages/camera/**/*
+
'p: cross_file':
- packages/cross_file/**/*
'p: css_colors':
- packages/css_colors/**/*
+'p: cupertino_icons':
+ - third_party/packages/cupertino_icons/**/*
+
'p: dynamic_layouts':
- packages/dynamic_layouts/**/*
+'p: espresso':
+ - packages/espresso/**/*
+
'p: extension_google_sign_in_as_googleapis_auth':
- packages/extension_google_sign_in_as_googleapis_auth/**/*
+'p: file_selector':
+ - packages/file_selector/**/*
+
'p: flutter_adaptive_scaffold':
- packages/flutter_adaptive_scaffold/**/*
@@ -28,6 +40,9 @@
'p: flutter_migrate':
- packages/flutter_migrate/**/*
+'p: flutter_plugin_android_lifecycle':
+ - packages/flutter_plugin_android_lifecycle/**/*
+
'p: flutter_template_images':
- packages/flutter_template_images/**/*
@@ -40,6 +55,24 @@
'p: google_identity_services':
- packages/google_indentity_services_web/**
+'p: google_maps_flutter':
+ - packages/google_maps_flutter/**/*
+
+'p: google_sign_in':
+ - packages/google_sign_in/**/*
+
+'p: image_picker':
+ - packages/image_picker/**/*
+
+'p: in_app_purchase':
+ - packages/in_app_purchase/**/*
+
+'p: ios_platform_images':
+ - packages/ios_platform_images/**/*
+
+'p: local_auth':
+ - packages/local_auth/**/*
+
'p: metrics_center':
- packages/metrics_center/**/*
@@ -49,23 +82,68 @@
'p: palette_generator':
- packages/palette_generator/**/*
+'p: path_provider':
+ - packages/path_provider/**/*
+
'p: pigeon':
- packages/pigeon/**/*
+'p: plugin_platform_interface':
+ - packages/plugin_platform_interface/**/*
+
'p: pointer_interceptor':
- packages/pointer_interceptor/**/*
+'p: quick_actions':
+ - packages/quick_actions/**/*
+
'p: rfw':
- packages/rfw/**/*
+'p: shared_preferences':
+ - packages/shared_preferences/**/*
+
'p: standard_message_codec':
- packages/standard_message_codec/**/*
+'p: url_launcher':
+ - packages/url_launcher/**/*
+
+'p: video_player':
+ - packages/video_player/**/*
+
'p: web_benchmarks':
- packages/web_benchmarks/**/*
+'p: webview_flutter':
+ - packages/webview_flutter/**/*
+
'p: xdg_directories':
- packages/xdg_directories/**/*
-'p: cupertino_icons':
- - third_party/packages/cupertino_icons/**/*
+'platform-android':
+ - packages/*/*_android/**/*
+ - packages/**/android/**/*
+
+'platform-ios':
+ - packages/*/*_ios/**/*
+ - packages/*/*_storekit/**/*
+ - packages/*/*_wkwebview/**/*
+ - packages/**/ios/**/*
+
+'platform-linux':
+ - packages/*/*_linux/**/*
+ - packages/**/linux/**/*
+
+'platform-macos':
+ - packages/*/*_macos/**/*
+ - packages/**/macos/**/*
+
+'platform-web':
+ - packages/*/*_web/**/*
+ - packages/**/web/**/*
+
+'platform-windows':
+ - packages/*/*_windows/**/*
+ - packages/**/windows/**/*
+
diff --git a/AUTHORS b/AUTHORS
index 7b9785e..23670fb 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -7,3 +7,67 @@
The Chromium Authors
Britannio Jarrett <britanniojarrett@gmail.com>
Sarbagya Dhaubanjar <mail@sarbagyastha.com.np>
+German Saprykin <saprykin.h@gmail.com>
+Benjamin Sauer <sauer.benjamin@gmail.com>
+larsenthomasj@gmail.com
+Ali Bitek <alibitek@protonmail.ch>
+Pol Batlló <pol.batllo@gmail.com>
+Anatoly Pulyaevskiy
+Hayden Flinner <haydenflinner@gmail.com>
+Stefano Rodriguez <hlsroddy@gmail.com>
+Salvatore Giordano <salvatoregiordanoo@gmail.com>
+Brian Armstrong <brian@flutter.institute>
+Paul DeMarco <paulmdemarco@gmail.com>
+Fabricio Nogueira <feufeu@gmail.com>
+Simon Lightfoot <simon@devangels.london>
+Ashton Thomas <ashton@acrinta.com>
+Thomas Danner <thmsdnnr@gmail.com>
+Diego Velásquez <diego.velasquez.lopez@gmail.com>
+Hajime Nakamura <nkmrhj@gmail.com>
+Tuyển Vũ Xuân <netsoft1985@gmail.com>
+Miguel Ruivo <miguel@miguelruivo.com>
+Sarthak Verma <sarthak@artiosys.com>
+Mike Diarmid <mike@invertase.io>
+Invertase <oss@invertase.io>
+Elliot Hesp <elliot@invertase.io>
+Vince Varga <vince.varga@smaho.com>
+Aawaz Gyawali <awazgyawali@gmail.com>
+EUI Limited <ian.evans3@admiralgroup.co.uk>
+Katarina Sheremet <katarina@sheremet.ch>
+Thomas Stockx <thomas@stockxit.com>
+Sarbagya Dhaubanjar <sarbagyastha@gmail.com>
+Ozkan Eksi <ozeksi@gmail.com>
+Rishab Nayak <rishab@bu.edu>
+ko2ic <ko2ic.dev@gmail.com>
+Jonathan Younger <jonathan@daikini.com>
+Jose Sanchez <josesm82@gmail.com>
+Debkanchan Samadder <debu.samadder@gmail.com>
+Audrius Karosevicius <audrius.karosevicius@gmail.com>
+Lukasz Piliszczuk <lukasz@intheloup.io>
+SoundReply Solutions GmbH <ch@soundreply.com>
+Rafal Wachol <rwachol@gmail.com>
+Pau Picas <pau.picas@gmail.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Alexandru Tuca <salexandru.tuca@outlook.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Rhodes Davis Jr. <rody.davis.jr@gmail.com>
+Luigi Agosti <luigi@tengio.com>
+Quentin Le Guennec <quentin@tengio.com>
+Koushik Ravikumar <koushik@tengio.com>
+Nissim Dsilva <nissim@tengio.com>
+Giancarlo Rocha <giancarloiff@gmail.com>
+Ryo Miyake <ryo@miyake.id>
+Théo Champion <contact.theochampion@gmail.com>
+Kazuki Yamaguchi <y.kazuki0614n@gmail.com>
+Eitan Schwartz <eshvartz@gmail.com>
+Chris Rutkowski <chrisrutkowski89@gmail.com>
+Juan Alvarez <juan.alvarez@resideo.com>
+Aleksandr Yurkovskiy <sanekyy@gmail.com>
+Anton Borries <mail@antonborri.es>
+Alex Li <google@alexv525.com>
+Rahul Raj <64.rahulraj@gmail.com>
+Daniel Roek <daniel.roek@gmail.com>
+TheOneWithTheBraid <the-one@with-the-braid.cf>
+Rulong Chen(陈汝龙) <rulong.crl@alibaba-inc.com>
+Hwanseok Kang <tttkhs96@gmail.com>
+Twin Sun, LLC <google-contrib@twinsunsolutions.com>
diff --git a/CODEOWNERS b/CODEOWNERS
index d160af2..538c826 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -4,27 +4,98 @@
# These names are just suggestions. It is fine to have your changes
# reviewed by someone else.
-packages/animations/** @goderbauer
-packages/cross_file/** @ditman
-packages/css_colors/** @stuartmorgan
-packages/dynamic_layouts/** @Piinks
+packages/animations/** @goderbauer
+packages/camera/** @bparrishMines
+packages/cross_file/** @ditman
+packages/css_colors/** @stuartmorgan
+packages/dynamic_layouts/** @Piinks
packages/extension_google_sign_in_as_googleapis_auth/** @ditman
-packages/flutter_adaptive_scaffold/** @gspencergoog
-packages/flutter_image/** @stuartmorgan
-packages/flutter_lints/** @goderbauer
-packages/flutter_markdown/** @domesticmouse
-packages/flutter_migrate/** @GaryQian
-packages/flutter_template_images/** @stuartmorgan
-packages/go_router/** @chunhtai
-packages/go_router_builder/** @chunhtai
-packages/google_identity_services_web/** @ditman
-packages/metrics_center/** @keyonghan
-packages/multicast_dns/** @dnfield
-packages/palette_generator/** @gspencergoog
-packages/pigeon/** @tarrinneal
-packages/pointer_interceptor/** @ditman
-packages/rfw/** @Hixie
-packages/standard_message_codec/** @jonahwilliams
-packages/web_benchmarks/** @yjbanov
-packages/xdg_directories/** @stuartmorgan
-third_party/packages/cupertino_icons/** @jmagman
+packages/file_selector/** @stuartmorgan
+packages/flutter_adaptive_scaffold/** @gspencergoog
+packages/flutter_image/** @stuartmorgan
+packages/flutter_lints/** @goderbauer
+packages/flutter_markdown/** @domesticmouse
+packages/flutter_migrate/** @GaryQian
+packages/flutter_template_images/** @stuartmorgan
+packages/go_router/** @chunhtai
+packages/go_router_builder/** @chunhtai
+packages/google_identity_services_web/** @ditman
+packages/google_maps_flutter/** @stuartmorgan
+packages/google_sign_in/** @stuartmorgan
+packages/image_picker/** @tarrinneal
+packages/in_app_purchase/** @bparrishMines
+packages/local_auth/** @stuartmorgan
+packages/metrics_center/** @keyonghan
+packages/multicast_dns/** @dnfield
+packages/palette_generator/** @gspencergoog
+packages/path_provider/** @stuartmorgan
+packages/pigeon/** @tarrinneal
+packages/plugin_platform_interface/** @stuartmorgan
+packages/pointer_interceptor/** @ditman
+packages/quick_actions/** @bparrishMines
+packages/rfw/** @Hixie
+packages/shared_preferences/** @tarrinneal
+packages/standard_message_codec/** @jonahwilliams
+packages/url_launcher/** @stuartmorgan
+packages/video_player/** @tarrinneal
+packages/web_benchmarks/** @yjbanov
+packages/webview_flutter/** @bparrishMines
+packages/xdg_directories/** @stuartmorgan
+third_party/packages/cupertino_icons/** @jmagman
+
+# Plugin platform implementation rules. These should stay last, since the last
+# matching entry takes precedence.
+
+# - Web
+packages/**/*_web/** @ditman
+
+# - Android
+packages/camera/camera_android/** @camsim99
+packages/camera/camera_android_camerax/** @camsim99
+packages/espresso/** @reidbaker
+packages/flutter_plugin_android_lifecycle/** @reidbaker
+packages/google_maps_flutter/google_maps_flutter_android/** @reidbaker
+packages/google_sign_in/google_sign_in_android/** @camsim99
+packages/image_picker/image_picker_android/** @gmackall
+packages/in_app_purchase/in_app_purchase_android/** @gmackall
+packages/local_auth/local_auth_android/** @camsim99
+packages/path_provider/path_provider_android/** @camsim99
+packages/quick_actions/quick_actions_android/** @camsim99
+packages/shared_preferences/shared_preferences_android/** @reidbaker
+packages/url_launcher/url_launcher_android/** @gmackall
+packages/video_player/video_player_android/** @camsim99
+
+# - iOS
+packages/camera/camera_avfoundation/** @hellohuanlin
+packages/file_selector/file_selector_ios/** @jmagman
+packages/google_maps_flutter/google_maps_flutter_ios/** @cyanglaz
+packages/google_sign_in/google_sign_in_ios/** @vashworth
+packages/image_picker/image_picker_ios/** @vashworth
+packages/in_app_purchase/in_app_purchase_storekit/** @cyanglaz
+packages/ios_platform_images/ios/** @jmagman
+packages/local_auth/local_auth_ios/** @louisehsu
+packages/path_provider/path_provider_foundation/** @jmagman
+packages/quick_actions/quick_actions_ios/** @hellohuanlin
+packages/shared_preferences/shared_preferences_foundation/** @cyanglaz
+packages/url_launcher/url_launcher_ios/** @jmagman
+packages/video_player/video_player_avfoundation/** @hellohuanlin
+packages/webview_flutter/webview_flutter_wkwebview/** @cyanglaz
+
+# - Linux
+packages/file_selector/file_selector_linux/** @cbracken
+packages/path_provider/path_provider_linux/** @cbracken
+packages/shared_preferences/shared_preferences_linux/** @cbracken
+packages/url_launcher/url_launcher_linux/** @cbracken
+
+# - macOS
+packages/file_selector/file_selector_macos/** @cbracken
+packages/url_launcher/url_launcher_macos/** @cbracken
+
+# - Windows
+packages/camera/camera_windows/** @cbracken
+packages/file_selector/file_selector_windows/** @cbracken
+packages/image_picker/image_picker_windows/** @cbracken
+packages/local_auth/local_auth_windows/** @cbracken
+packages/path_provider/path_provider_windows/** @cbracken
+packages/shared_preferences/shared_preferences_windows/** @cbracken
+packages/url_launcher/url_launcher_windows/** @cbracken
diff --git a/packages/camera/camera/AUTHORS b/packages/camera/camera/AUTHORS
new file mode 100644
index 0000000..493a0b4
--- /dev/null
+++ b/packages/camera/camera/AUTHORS
@@ -0,0 +1,66 @@
+# Below is a list of people and organizations that have contributed
+# to the Flutter project. Names should be added to the list like so:
+#
+# Name/Organization <email address>
+
+Google Inc.
+The Chromium Authors
+German Saprykin <saprykin.h@gmail.com>
+Benjamin Sauer <sauer.benjamin@gmail.com>
+larsenthomasj@gmail.com
+Ali Bitek <alibitek@protonmail.ch>
+Pol Batlló <pol.batllo@gmail.com>
+Anatoly Pulyaevskiy
+Hayden Flinner <haydenflinner@gmail.com>
+Stefano Rodriguez <hlsroddy@gmail.com>
+Salvatore Giordano <salvatoregiordanoo@gmail.com>
+Brian Armstrong <brian@flutter.institute>
+Paul DeMarco <paulmdemarco@gmail.com>
+Fabricio Nogueira <feufeu@gmail.com>
+Simon Lightfoot <simon@devangels.london>
+Ashton Thomas <ashton@acrinta.com>
+Thomas Danner <thmsdnnr@gmail.com>
+Diego Velásquez <diego.velasquez.lopez@gmail.com>
+Hajime Nakamura <nkmrhj@gmail.com>
+Tuyển Vũ Xuân <netsoft1985@gmail.com>
+Miguel Ruivo <miguel@miguelruivo.com>
+Sarthak Verma <sarthak@artiosys.com>
+Mike Diarmid <mike@invertase.io>
+Invertase <oss@invertase.io>
+Elliot Hesp <elliot@invertase.io>
+Vince Varga <vince.varga@smaho.com>
+Aawaz Gyawali <awazgyawali@gmail.com>
+EUI Limited <ian.evans3@admiralgroup.co.uk>
+Katarina Sheremet <katarina@sheremet.ch>
+Thomas Stockx <thomas@stockxit.com>
+Sarbagya Dhaubanjar <sarbagyastha@gmail.com>
+Ozkan Eksi <ozeksi@gmail.com>
+Rishab Nayak <rishab@bu.edu>
+ko2ic <ko2ic.dev@gmail.com>
+Jonathan Younger <jonathan@daikini.com>
+Jose Sanchez <josesm82@gmail.com>
+Debkanchan Samadder <debu.samadder@gmail.com>
+Audrius Karosevicius <audrius.karosevicius@gmail.com>
+Lukasz Piliszczuk <lukasz@intheloup.io>
+SoundReply Solutions GmbH <ch@soundreply.com>
+Rafal Wachol <rwachol@gmail.com>
+Pau Picas <pau.picas@gmail.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Alexandru Tuca <salexandru.tuca@outlook.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Rhodes Davis Jr. <rody.davis.jr@gmail.com>
+Luigi Agosti <luigi@tengio.com>
+Quentin Le Guennec <quentin@tengio.com>
+Koushik Ravikumar <koushik@tengio.com>
+Nissim Dsilva <nissim@tengio.com>
+Giancarlo Rocha <giancarloiff@gmail.com>
+Ryo Miyake <ryo@miyake.id>
+Théo Champion <contact.theochampion@gmail.com>
+Kazuki Yamaguchi <y.kazuki0614n@gmail.com>
+Eitan Schwartz <eshvartz@gmail.com>
+Chris Rutkowski <chrisrutkowski89@gmail.com>
+Juan Alvarez <juan.alvarez@resideo.com>
+Aleksandr Yurkovskiy <sanekyy@gmail.com>
+Anton Borries <mail@antonborri.es>
+Alex Li <google@alexv525.com>
+Rahul Raj <64.rahulraj@gmail.com>
diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md
new file mode 100644
index 0000000..13c0040
--- /dev/null
+++ b/packages/camera/camera/CHANGELOG.md
@@ -0,0 +1,719 @@
+## 0.10.3
+
+* Adds back use of Optional type.
+
+## 0.10.2+1
+
+* Updates code for stricter lint checks.
+
+## 0.10.2
+
+* Implements option to also stream when recording a video.
+
+## 0.10.1
+
+* Remove usage of deprecated quiver Optional type.
+
+## 0.10.0+5
+
+* Updates code for stricter lint checks.
+
+## 0.10.0+4
+
+* Removes usage of `_ambiguate` method in example.
+* Updates minimum Flutter version to 3.0.
+
+## 0.10.0+3
+
+* Updates code for `no_leading_underscores_for_local_identifiers` lint.
+
+## 0.10.0+2
+
+* Updates imports for `prefer_relative_imports`.
+* Updates minimum Flutter version to 2.10.
+
+## 0.10.0+1
+
+* Fixes avoid_redundant_argument_values lint warnings and minor typos.
+
+## 0.10.0
+
+* **Breaking Change** Bumps default camera_web package version, which updates permission exception code from `cameraPermission` to `CameraAccessDenied`.
+* **Breaking Change** Bumps default camera_android package version, which updates permission exception code from `cameraPermission` to
+ `CameraAccessDenied` and `AudioAccessDenied`.
+* Ignores unnecessary import warnings in preparation for [upcoming Flutter changes](https://github.com/flutter/flutter/pull/106316).
+
+## 0.9.8+1
+
+* Ignores deprecation warnings for upcoming styleFrom button API changes.
+
+## 0.9.8
+
+* Moves Android and iOS implementations to federated packages.
+* Ignores unnecessary import warnings in preparation for [upcoming Flutter changes](https://github.com/flutter/flutter/pull/104231).
+
+## 0.9.7+1
+
+* Moves streaming implementation to the platform interface package.
+
+## 0.9.7
+
+* Returns all the available cameras on iOS.
+
+## 0.9.6
+
+* Adds audio access permission handling logic on iOS to fix an issue with `prepareForVideoRecording` not awaiting for the audio permission request result.
+
+## 0.9.5+1
+
+* Suppresses warnings for pre-iOS-11 codepaths.
+
+## 0.9.5
+
+* Adds camera access permission handling logic on iOS to fix a related crash when using the camera for the first time.
+
+## 0.9.4+24
+
+* Fixes preview orientation when pausing preview with locked orientation.
+
+## 0.9.4+23
+
+* Minor fixes for new analysis options.
+
+## 0.9.4+22
+
+* Removes unnecessary imports.
+* Fixes library_private_types_in_public_api, sort_child_properties_last and use_key_in_widget_constructors
+ lint warnings.
+
+## 0.9.4+21
+
+* Fixes README code samples.
+
+## 0.9.4+20
+
+* Fixes an issue with the orientation of videos recorded in landscape on Android.
+
+## 0.9.4+19
+
+* Migrate deprecated Scaffold SnackBar methods to ScaffoldMessenger.
+
+## 0.9.4+18
+
+* Fixes a crash in iOS when streaming on low-performance devices.
+
+## 0.9.4+17
+
+* Removes obsolete information from README, and adds OS support table.
+
+## 0.9.4+16
+
+* Fixes a bug resulting in a `CameraAccessException` that prevents image
+ capture on some Android devices.
+
+## 0.9.4+15
+
+* Uses dispatch queue for pixel buffer synchronization on iOS.
+* Minor iOS internal code cleanup related to queue helper functions.
+
+## 0.9.4+14
+
+* Restores compatibility with Flutter 2.5 and 2.8.
+
+## 0.9.4+13
+
+* Updates iOS camera's photo capture delegate reference on a background queue to prevent potential race conditions, and some related internal code cleanup.
+
+## 0.9.4+12
+
+* Skips unnecessary AppDelegate setup for unit tests on iOS.
+* Internal code cleanup for stricter analysis options.
+
+## 0.9.4+11
+
+* Manages iOS camera's orientation-related states on a background queue to prevent potential race conditions.
+
+## 0.9.4+10
+
+* iOS performance improvement by moving file writing from the main queue to a background IO queue.
+
+## 0.9.4+9
+
+* iOS performance improvement by moving sample buffer handling from the main queue to a background session queue.
+* Minor iOS internal code cleanup related to camera class and its delegate.
+* Minor iOS internal code cleanup related to resolution preset, video format, focus mode, exposure mode and device orientation.
+* Minor iOS internal code cleanup related to flash mode.
+
+## 0.9.4+8
+
+* Fixes a bug where ImageFormatGroup was ignored in `startImageStream` on iOS.
+
+## 0.9.4+7
+
+* Fixes a crash in iOS when passing null queue pointer into AVFoundation API due to race condition.
+* Minor iOS internal code cleanup related to dispatch queue.
+
+## 0.9.4+6
+
+* Fixes a crash in iOS when using image stream due to calling Flutter engine API on non-main thread.
+
+## 0.9.4+5
+
+* Fixes bug where calling a method after the camera was closed resulted in a Java `IllegalStateException` exception.
+* Fixes integration tests.
+
+## 0.9.4+4
+
+* Change Android compileSdkVersion to 31.
+* Remove usages of deprecated Android API `CamcorderProfile`.
+* Update gradle version to 7.0.2 on Android.
+
+## 0.9.4+3
+
+* Fix registerTexture and result being called on background thread on iOS.
+
+## 0.9.4+2
+
+* Updated package description;
+* Refactor unit test on iOS to make it compatible with new restrictions in Xcode 13 which only supports the use of the `XCUIDevice` in Xcode UI tests.
+
+## 0.9.4+1
+
+* Fixed Android implementation throwing IllegalStateException when switching to a different activity.
+
+## 0.9.4
+
+* Add web support by endorsing `package:camera_web`.
+
+## 0.9.3+1
+
+* Remove iOS 9 availability check around ultra high capture sessions.
+
+## 0.9.3
+
+* Update minimum Flutter SDK to 2.5 and iOS deployment target to 9.0.
+
+## 0.9.2+2
+
+* Ensure that setting the exposure offset returns the new offset value on Android.
+
+## 0.9.2+1
+
+* Fixed camera controller throwing an exception when being replaced in the preview widget.
+
+## 0.9.2
+
+* Added functions to pause and resume the camera preview.
+
+## 0.9.1+1
+
+* Replace `device_info` reference with `device_info_plus` in the [README.md](README.md)
+
+## 0.9.1
+
+* Added `lensAperture`, `sensorExposureTime` and `sensorSensitivity` properties to the `CameraImage` dto.
+
+## 0.9.0
+
+* Complete rewrite of Android plugin to fix many capture, focus, flash, orientation and exposure issues.
+* Fixed crash when opening front-facing cameras on some legacy android devices like Sony XZ.
+* Android Flash mode works with full precapture sequence.
+* Updated Android lint settings.
+
+## 0.8.1+7
+
+* Fix device orientation sometimes not affecting the camera preview orientation.
+
+## 0.8.1+6
+
+* Remove references to the Android V1 embedding.
+
+## 0.8.1+5
+
+* Make sure the `setFocusPoint` and `setExposurePoint` coordinates work correctly in all orientations on iOS (instead of only in portrait mode).
+
+## 0.8.1+4
+
+* Silenced warnings that may occur during build when using a very
+ recent version of Flutter relating to null safety.
+
+## 0.8.1+3
+
+* Do not change camera orientation when iOS device is flat.
+
+## 0.8.1+2
+
+* Fix iOS crash when selecting an unsupported FocusMode.
+
+## 0.8.1+1
+
+* Migrate maven repository from jcenter to mavenCentral.
+
+## 0.8.1
+
+* Solved a rotation issue on iOS which caused the default preview to be displayed as landscape right instead of portrait.
+
+## 0.8.0
+
+* Stable null safety release.
+* Solved delay when using the zoom feature on iOS.
+* Added a timeout to the pre-capture sequence on Android to prevent crashes when the camera cannot get a focus.
+* Updates the example code listed in the [README.md](README.md), so it runs without errors when you simply copy/ paste it into a Flutter App.
+
+## 0.7.0+4
+
+* Fix crash when taking picture with orientation lock
+
+## 0.7.0+3
+
+* Clockwise rotation of focus point in android
+
+## 0.7.0+2
+
+* Fix example reference in README.
+* Revert compileSdkVersion back to 29 (from 30) as this is causing problems with add-to-app configurations.
+
+## 0.7.0+1
+
+* Ensure communication from JAVA to Dart is done on the main UI thread.
+
+## 0.7.0
+
+* BREAKING CHANGE: `CameraValue.aspectRatio` now returns `width / height` rather than `height / width`. [(commit)](https://github.com/flutter/plugins/commit/100c7470d4066b1d0f8f7e4ec6d7c943e736f970)
+ * Added support for capture orientation locking on Android and iOS.
+ * Fixed camera preview not rotating correctly on Android and iOS.
+ * Fixed camera preview sometimes appearing stretched on Android and iOS.
+ * Fixed videos & photos saving with the incorrect rotation on iOS.
+* New Features:
+ * Adds auto focus support for Android and iOS implementations. [(commmit)](https://github.com/flutter/plugins/commit/71a831790220f898bf8120c8a23840ac6e742db5)
+ * Adds ImageFormat selection for ImageStream and Video(iOS only). [(commit)](https://github.com/flutter/plugins/commit/da1b4638b750a5ff832d7be86a42831c42c6d6c0)
+* Bug Fixes:
+ * Fixes crash when taking a picture on iOS devices without flash. [(commit)](https://github.com/flutter/plugins/commit/831344490984b1feec007afc9c8595d80b6c13f4)
+ * Make sure the configured zoom scale is copied over to the final capture builder on Android. Fixes the issue where the preview is zoomed but the final picture is not. [(commit)](https://github.com/flutter/plugins/commit/5916f55664e1772a4c3f0c02c5c71fc11e491b76)
+ * Fixes crash with using inner camera on some Android devices. [(commit)](https://github.com/flutter/plugins/commit/980b674cb4020c1927917426211a87e275346d5e)
+ * Improved error feedback by differentiating between uninitialized and disposed camera controllers. [(commit)](https://github.com/flutter/plugins/commit/d0b7109f6b00a0eda03506fed2c74cc123ffc6f3)
+ * Fixes picture captures causing a crash on some Huawei devices. [(commit)](https://github.com/flutter/plugins/commit/6d18db83f00f4861ffe485aba2d1f8aa08845ce6)
+
+## 0.6.4+5
+
+* Update the example app: remove the deprecated `RaisedButton` and `FlatButton` widgets.
+
+## 0.6.4+4
+
+* Set camera auto focus enabled by default.
+
+## 0.6.4+3
+
+* Detect if selected camera supports auto focus and act accordingly on Android. This solves a problem where front facing cameras are not capturing the picture because auto focus is not supported.
+
+## 0.6.4+2
+
+* Set ImageStreamReader listener to null to prevent stale images when streaming images.
+
+## 0.6.4+1
+
+* Added closeCaptureSession() to stopVideoRecording in Camera.java to fix an Android 6 crash.
+
+## 0.6.4
+
+* Adds auto exposure support for Android and iOS implementations.
+
+## 0.6.3+4
+
+* Revert previous dependency update: Changed dependency on camera_platform_interface to >=1.04 <1.1.0.
+
+## 0.6.3+3
+
+* Updated dependency on camera_platform_interface to ^1.2.0.
+
+## 0.6.3+2
+
+* Fixes crash on Android which occurs after video recording has stopped just before taking a picture.
+
+## 0.6.3+1
+
+* Fixes flash & torch modes not working on some Android devices.
+
+## 0.6.3
+
+* Adds torch mode as a flash mode for Android and iOS implementations.
+
+## 0.6.2+1
+
+* Fix the API documentation for the `CameraController.takePicture` method.
+
+## 0.6.2
+
+* Add zoom support for Android and iOS implementations.
+
+## 0.6.1+1
+
+* Added implementation of the `didFinishProcessingPhoto` on iOS which allows saving image metadata (EXIF) on iOS 11 and up.
+
+## 0.6.1
+
+* Add flash support for Android and iOS implementations.
+
+## 0.6.0+2
+
+* Fix outdated links across a number of markdown files ([#3276](https://github.com/flutter/plugins/pull/3276))
+
+## 0.6.0+1
+
+Updated README to inform users that iOS 10.0+ is needed for use
+
+## 0.6.0
+
+As part of implementing federated architecture and making the interface compatible with the web this version contains the following **breaking changes**:
+
+Method changes in `CameraController`:
+- The `takePicture` method no longer accepts the `path` parameter, but instead returns the captured image as an instance of the `XFile` class;
+- The `startVideoRecording` method no longer accepts the `filePath`. Instead the recorded video is now returned as a `XFile` instance when the `stopVideoRecording` method completes;
+- The `stopVideoRecording` method now returns the captured video when it completes;
+- Added the `buildPreview` method which is now used to implement the CameraPreview widget.
+
+## 0.5.8+19
+
+* Update Flutter SDK constraint.
+
+## 0.5.8+18
+
+* Suppress unchecked warning in Android tests which prevented the tests to compile.
+
+## 0.5.8+17
+
+* Added Android 30 support.
+
+## 0.5.8+16
+
+* Moved package to camera/camera subdir, to allow for federated implementations.
+
+## 0.5.8+15
+
+* Added the `debugCheckIsDisposed` method which can be used in debug mode to validate if the `CameraController` class has been disposed.
+
+## 0.5.8+14
+
+* Changed the order of the setters for `mediaRecorder` in `MediaRecorderBuilder.java` to make it more readable.
+
+## 0.5.8+13
+
+* Added Dartdocs for all public APIs.
+
+## 0.5.8+12
+
+* Added information of video not working correctly on Android emulators to `README.md`.
+
+## 0.5.8+11
+
+* Fix rare nullptr exception on Android.
+* Updated README.md with information about handling App lifecycle changes.
+
+## 0.5.8+10
+
+* Suppress the `deprecated_member_use` warning in the example app for `ScaffoldMessenger.showSnackBar`.
+
+## 0.5.8+9
+
+* Update android compileSdkVersion to 29.
+
+## 0.5.8+8
+
+* Fixed garbled audio (in video) by setting audio encoding bitrate.
+
+## 0.5.8+7
+
+* Keep handling deprecated Android v1 classes for backward compatibility.
+
+## 0.5.8+6
+
+* Avoiding uses or overrides a deprecated API in CameraPlugin.java.
+
+## 0.5.8+5
+
+* Fix compilation/availability issues on iOS.
+
+## 0.5.8+4
+
+* Fixed bug caused by casting a `CameraAccessException` on Android.
+
+## 0.5.8+3
+
+* Fix bug in usage example in README.md
+
+## 0.5.8+2
+
+* Post-v2 embedding cleanups.
+
+## 0.5.8+1
+
+* Update lower bound of dart dependency to 2.1.0.
+
+## 0.5.8
+
+* Remove Android dependencies fallback.
+* Require Flutter SDK 1.12.13+hotfix.5 or greater.
+
+## 0.5.7+5
+
+* Replace deprecated `getFlutterEngine` call on Android.
+
+## 0.5.7+4
+
+* Add `pedantic` to dev_dependency.
+
+## 0.5.7+3
+
+* Fix an Android crash when permissions are requested multiple times.
+
+## 0.5.7+2
+
+* Remove the deprecated `author:` field from pubspec.yaml
+* Migrate the plugin to the pubspec platforms manifest.
+* Require Flutter SDK 1.10.0 or greater.
+
+## 0.5.7+1
+
+* Fix example null exception.
+
+## 0.5.7
+
+* Fix unawaited futures.
+
+## 0.5.6+4
+
+* Android: Use CameraDevice.TEMPLATE_RECORD to improve image streaming.
+
+## 0.5.6+3
+
+* Remove AndroidX warning.
+
+## 0.5.6+2
+
+* Include lifecycle dependency as a compileOnly one on Android to resolve
+ potential version conflicts with other transitive libraries.
+
+## 0.5.6+1
+
+* Android: Use android.arch.lifecycle instead of androidx.lifecycle:lifecycle in `build.gradle` to support apps that has not been migrated to AndroidX.
+
+## 0.5.6
+
+* Add support for the v2 Android embedding. This shouldn't affect existing
+ functionality.
+
+## 0.5.5+1
+
+* Fix event type check
+
+## 0.5.5
+
+* Define clang modules for iOS.
+
+## 0.5.4+3
+
+* Update and migrate iOS example project.
+
+## 0.5.4+2
+
+* Fix Android NullPointerException on devices with only front-facing camera.
+
+## 0.5.4+1
+
+* Fix Android pause and resume video crash when executing in APIs below 24.
+
+## 0.5.4
+
+* Add feature to pause and resume video recording.
+
+## 0.5.3+1
+
+* Fix too large request code for FragmentActivity users.
+
+## 0.5.3
+
+* Added new quality presets.
+* Now all quality presets can be used to control image capture quality.
+
+## 0.5.2+2
+
+* Fix memory leak related to not unregistering stream handler in FlutterEventChannel when disposing camera.
+
+## 0.5.2+1
+
+* Fix bug that prevented video recording with audio.
+
+## 0.5.2
+
+* Added capability to disable audio for the `CameraController`. (e.g. `CameraController(_, _,
+ enableAudio: false);`)
+
+## 0.5.1
+
+* Can now be compiled with earlier Android sdks below 21 when
+`<uses-sdk tools:overrideLibrary="io.flutter.plugins.camera"/>` has been added to the project
+`AndroidManifest.xml`. For sdks below 21, the plugin won't be registered and calls to it will throw
+a `MissingPluginException.`
+
+## 0.5.0
+
+* **Breaking Change** This plugin no longer handles closing and opening the camera on Android
+ lifecycle changes. Please use `WidgetsBindingObserver` to control camera resources on lifecycle
+ changes. See example project for example using `WidgetsBindingObserver`.
+
+## 0.4.3+2
+
+* Bump the minimum Flutter version to 1.2.0.
+* Add template type parameter to `invokeMethod` calls.
+
+## 0.4.3+1
+
+* Catch additional `Exception`s from Android and throw as `CameraException`s.
+
+## 0.4.3
+
+* Add capability to prepare the capture session for video recording on iOS.
+
+## 0.4.2
+
+* Add sensor orientation value to `CameraDescription`.
+
+## 0.4.1
+
+* Camera methods are ran in a background thread on iOS.
+
+## 0.4.0+3
+
+* Fixed a crash when the plugin is registered by a background FlutterView.
+
+## 0.4.0+2
+
+* Fix orientation of captured photos when camera is used for the first time on Android.
+
+## 0.4.0+1
+
+* Remove categories.
+
+## 0.4.0
+
+* **Breaking Change** Change iOS image stream format to `ImageFormatGroup.bgra8888` from
+ `ImageFormatGroup.yuv420`.
+
+## 0.3.0+4
+
+* Fixed bug causing black screen on some Android devices.
+
+## 0.3.0+3
+
+* Log a more detailed warning at build time about the previous AndroidX
+ migration.
+
+## 0.3.0+2
+
+* Fix issue with calculating iOS image orientation in certain edge cases.
+
+## 0.3.0+1
+
+* Remove initial method call invocation from static camera method.
+
+## 0.3.0
+
+* **Breaking change**. Migrate from the deprecated original Android Support
+ Library to AndroidX. This shouldn't result in any functional changes, but it
+ requires any Android apps using this plugin to [also
+ migrate](https://developer.android.com/jetpack/androidx/migrate) if they're
+ using the original support library.
+
+## 0.2.9+1
+
+* Fix a crash when failing to start preview.
+
+## 0.2.9
+
+* Save photo orientation data on iOS.
+
+## 0.2.8
+
+* Add access to the image stream from Dart.
+* Use `cameraController.startImageStream(listener)` to process the images.
+
+## 0.2.7
+
+* Fix issue with crash when the physical device's orientation is unknown.
+
+## 0.2.6
+
+* Update the camera to use the physical device's orientation instead of the UI
+ orientation on Android.
+
+## 0.2.5
+
+* Fix preview and video size with satisfying conditions of multiple outputs.
+
+## 0.2.4
+
+* Unregister the activity lifecycle callbacks when disposing the camera.
+
+## 0.2.3
+
+* Added path_provider and video_player as dev dependencies because the example uses them.
+* Updated example path_provider version to get Dart 2 support.
+
+## 0.2.2
+
+* iOS image capture is done in high quality (full camera size)
+
+## 0.2.1
+
+* Updated Gradle tooling to match Android Studio 3.1.2.
+
+## 0.2.0
+
+* Added support for video recording.
+* Changed the example app to add video recording.
+
+A lot of **breaking changes** in this version:
+
+Getter changes:
+ - Removed `isStarted`
+ - Renamed `initialized` to `isInitialized`
+ - Added `isRecordingVideo`
+
+Method changes:
+ - Renamed `capture` to `takePicture`
+ - Removed `start` (the preview starts automatically when `initialize` is called)
+ - Added `startVideoRecording(String filePath)`
+ - Removed `stop` (the preview stops automatically when `dispose` is called)
+ - Added `stopVideoRecording`
+
+## 0.1.2
+
+* Fix Dart 2 runtime errors.
+
+## 0.1.1
+
+* Fix Dart 2 runtime error.
+
+## 0.1.0
+
+* **Breaking change**. Set SDK constraints to match the Flutter beta release.
+
+## 0.0.4
+
+* Revert regression of `CameraController.capture()` introduced in v. 0.0.3.
+
+## 0.0.3
+
+* Improved resource cleanup on Android. Avoids crash on Activity restart.
+* Made the Future returned by `CameraController.dispose()` and `CameraController.capture()` actually complete on
+ Android.
+
+## 0.0.2
+
+* Simplified and upgraded Android project template to Android SDK 27.
+* Moved Android package to io.flutter.plugins.
+* Fixed warnings from the Dart 2.0 analyzer.
+
+## 0.0.1
+
+* Initial release
diff --git a/packages/camera/camera/LICENSE b/packages/camera/camera/LICENSE
new file mode 100644
index 0000000..c6823b8
--- /dev/null
+++ b/packages/camera/camera/LICENSE
@@ -0,0 +1,25 @@
+Copyright 2013 The Flutter Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/packages/camera/camera/README.md b/packages/camera/camera/README.md
new file mode 100644
index 0000000..86b0355
--- /dev/null
+++ b/packages/camera/camera/README.md
@@ -0,0 +1,174 @@
+# Camera Plugin
+
+<?code-excerpt path-base="excerpts/packages/camera_example"?>
+
+[](https://pub.dev/packages/camera)
+
+A Flutter plugin for iOS, Android and Web allowing access to the device cameras.
+
+| | Android | iOS | Web |
+|----------------|---------|----------|------------------------|
+| **Support** | SDK 21+ | iOS 10+* | [See `camera_web `][1] |
+
+## Features
+
+* Display live camera preview in a widget.
+* Snapshots can be captured and saved to a file.
+* Record video.
+* Add access to the image stream from Dart.
+
+## Installation
+
+First, add `camera` as a [dependency in your pubspec.yaml file](https://flutter.dev/using-packages/).
+
+### iOS
+
+\* The camera plugin compiles for any version of iOS, but its functionality
+requires iOS 10 or higher. If compiling for iOS 9, make sure to programmatically
+check the version of iOS running on the device before using any camera plugin features.
+The [device_info_plus](https://pub.dev/packages/device_info_plus) plugin, for example, can be used to check the iOS version.
+
+Add two rows to the `ios/Runner/Info.plist`:
+
+* one with the key `Privacy - Camera Usage Description` and a usage description.
+* and one with the key `Privacy - Microphone Usage Description` and a usage description.
+
+If editing `Info.plist` as text, add:
+
+```xml
+<key>NSCameraUsageDescription</key>
+<string>your usage description here</string>
+<key>NSMicrophoneUsageDescription</key>
+<string>your usage description here</string>
+```
+
+### Android
+
+Change the minimum Android sdk version to 21 (or higher) in your `android/app/build.gradle` file.
+
+```groovy
+minSdkVersion 21
+```
+
+It's important to note that the `MediaRecorder` class is not working properly on emulators, as stated in the documentation: https://developer.android.com/reference/android/media/MediaRecorder. Specifically, when recording a video with sound enabled and trying to play it back, the duration won't be correct and you will only see the first frame.
+
+### Web integration
+
+For web integration details, see the
+[`camera_web` package](https://pub.dev/packages/camera_web).
+
+### Handling Lifecycle states
+
+As of version [0.5.0](https://github.com/flutter/plugins/blob/main/packages/camera/CHANGELOG.md#050) of the camera plugin, lifecycle changes are no longer handled by the plugin. This means developers are now responsible to control camera resources when the lifecycle state is updated. Failure to do so might lead to unexpected behavior (for example as described in issue [#39109](https://github.com/flutter/flutter/issues/39109)). Handling lifecycle changes can be done by overriding the `didChangeAppLifecycleState` method like so:
+
+<?code-excerpt "main.dart (AppLifecycle)"?>
+```dart
+@override
+void didChangeAppLifecycleState(AppLifecycleState state) {
+ final CameraController? cameraController = controller;
+
+ // App state changed before we got the chance to initialize.
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return;
+ }
+
+ if (state == AppLifecycleState.inactive) {
+ cameraController.dispose();
+ } else if (state == AppLifecycleState.resumed) {
+ onNewCameraSelected(cameraController.description);
+ }
+}
+```
+
+### Handling camera access permissions
+
+Permission errors may be thrown when initializing the camera controller, and you are expected to handle them properly.
+
+Here is a list of all permission error codes that can be thrown:
+
+- `CameraAccessDenied`: Thrown when user denies the camera access permission.
+
+- `CameraAccessDeniedWithoutPrompt`: iOS only for now. Thrown when user has previously denied the permission. iOS does not allow prompting alert dialog a second time. Users will have to go to Settings > Privacy > Camera in order to enable camera access.
+
+- `CameraAccessRestricted`: iOS only for now. Thrown when camera access is restricted and users cannot grant permission (parental control).
+
+- `AudioAccessDenied`: Thrown when user denies the audio access permission.
+
+- `AudioAccessDeniedWithoutPrompt`: iOS only for now. Thrown when user has previously denied the permission. iOS does not allow prompting alert dialog a second time. Users will have to go to Settings > Privacy > Microphone in order to enable audio access.
+
+- `AudioAccessRestricted`: iOS only for now. Thrown when audio access is restricted and users cannot grant permission (parental control).
+
+### Example
+
+Here is a small example flutter app displaying a full screen camera preview.
+
+<?code-excerpt "readme_full_example.dart (FullAppExample)"?>
+```dart
+import 'package:camera/camera.dart';
+import 'package:flutter/material.dart';
+
+late List<CameraDescription> _cameras;
+
+Future<void> main() async {
+ WidgetsFlutterBinding.ensureInitialized();
+
+ _cameras = await availableCameras();
+ runApp(const CameraApp());
+}
+
+/// CameraApp is the Main Application.
+class CameraApp extends StatefulWidget {
+ /// Default Constructor
+ const CameraApp({Key? key}) : super(key: key);
+
+ @override
+ State<CameraApp> createState() => _CameraAppState();
+}
+
+class _CameraAppState extends State<CameraApp> {
+ late CameraController controller;
+
+ @override
+ void initState() {
+ super.initState();
+ controller = CameraController(_cameras[0], ResolutionPreset.max);
+ controller.initialize().then((_) {
+ if (!mounted) {
+ return;
+ }
+ setState(() {});
+ }).catchError((Object e) {
+ if (e is CameraException) {
+ switch (e.code) {
+ case 'CameraAccessDenied':
+ // Handle access errors here.
+ break;
+ default:
+ // Handle other errors here.
+ break;
+ }
+ }
+ });
+ }
+
+ @override
+ void dispose() {
+ controller.dispose();
+ super.dispose();
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ if (!controller.value.isInitialized) {
+ return Container();
+ }
+ return MaterialApp(
+ home: CameraPreview(controller),
+ );
+ }
+}
+```
+
+For a more elaborate usage example see [here](https://github.com/flutter/plugins/tree/main/packages/camera/camera/example).
+
+[1]: https://pub.dev/packages/camera_web#limitations-on-the-web-platform
diff --git a/packages/camera/camera/example/android/app/build.gradle b/packages/camera/camera/example/android/app/build.gradle
new file mode 100644
index 0000000..5d6af58
--- /dev/null
+++ b/packages/camera/camera/example/android/app/build.gradle
@@ -0,0 +1,64 @@
+def localProperties = new Properties()
+def localPropertiesFile = rootProject.file('local.properties')
+if (localPropertiesFile.exists()) {
+ localPropertiesFile.withReader('UTF-8') { reader ->
+ localProperties.load(reader)
+ }
+}
+
+def flutterRoot = localProperties.getProperty('flutter.sdk')
+if (flutterRoot == null) {
+ throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
+}
+
+def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
+if (flutterVersionCode == null) {
+ flutterVersionCode = '1'
+}
+
+def flutterVersionName = localProperties.getProperty('flutter.versionName')
+if (flutterVersionName == null) {
+ flutterVersionName = '1.0'
+}
+
+apply plugin: 'com.android.application'
+apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
+
+android {
+ compileSdkVersion 31
+
+ lintOptions {
+ disable 'InvalidPackage'
+ }
+
+ defaultConfig {
+ applicationId "io.flutter.plugins.cameraexample"
+ minSdkVersion 21
+ targetSdkVersion 28
+ versionCode flutterVersionCode.toInteger()
+ versionName flutterVersionName
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ }
+
+ buildTypes {
+ release {
+ // TODO: Add your own signing config for the release build.
+ // Signing with the debug keys for now, so `flutter run --release` works.
+ signingConfig signingConfigs.debug
+ }
+ profile {
+ matchingFallbacks = ['debug', 'release']
+ }
+ }
+}
+
+flutter {
+ source '../..'
+}
+
+dependencies {
+ testImplementation 'junit:junit:4.13.2'
+ androidTestImplementation 'androidx.test:runner:1.2.0'
+ androidTestImplementation 'androidx.test:rules:1.2.0'
+ androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
+}
diff --git a/packages/camera/camera/example/android/app/gradle/wrapper/gradle-wrapper.properties b/packages/camera/camera/example/android/app/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..29e4134
--- /dev/null
+++ b/packages/camera/camera/example/android/app/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/packages/camera/camera/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java b/packages/camera/camera/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java
new file mode 100644
index 0000000..0f4298d
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface DartIntegrationTest {}
diff --git a/packages/camera/camera/example/android/app/src/androidTest/java/io/flutter/plugins/cameraexample/FlutterActivityTest.java b/packages/camera/camera/example/android/app/src/androidTest/java/io/flutter/plugins/cameraexample/FlutterActivityTest.java
new file mode 100644
index 0000000..39cae48
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/androidTest/java/io/flutter/plugins/cameraexample/FlutterActivityTest.java
@@ -0,0 +1,19 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.cameraexample;
+
+import androidx.test.rule.ActivityTestRule;
+import dev.flutter.plugins.integration_test.FlutterTestRunner;
+import io.flutter.embedding.android.FlutterActivity;
+import io.flutter.plugins.DartIntegrationTest;
+import org.junit.Rule;
+import org.junit.runner.RunWith;
+
+@DartIntegrationTest
+@RunWith(FlutterTestRunner.class)
+public class FlutterActivityTest {
+ @Rule
+ public ActivityTestRule<FlutterActivity> rule = new ActivityTestRule<>(FlutterActivity.class);
+}
diff --git a/packages/camera/camera/example/android/app/src/main/AndroidManifest.xml b/packages/camera/camera/example/android/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..cef2316
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/main/AndroidManifest.xml
@@ -0,0 +1,28 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="io.flutter.plugins.cameraexample">
+
+ <application
+ android:icon="@mipmap/ic_launcher"
+ android:label="camera_example">
+ <activity
+ android:configChanges="orientation|keyboardHidden|keyboard|screenSize|locale|layoutDirection"
+ android:hardwareAccelerated="true"
+ android:launchMode="singleTop"
+ android:name="io.flutter.embedding.android.FlutterActivity"
+ android:theme="@style/LaunchTheme"
+ android:windowSoftInputMode="adjustResize">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+ </activity>
+ <meta-data android:name="flutterEmbedding" android:value="2"/>
+ </application>
+
+ <uses-feature
+ android:name="android.hardware.camera"
+ android:required="true"/>
+
+ <uses-permission android:name="android.permission.INTERNET"/>
+ <uses-permission android:name="android.permission.FLASHLIGHT"/>
+</manifest>
diff --git a/packages/camera/camera/example/android/app/src/main/res/drawable/launch_background.xml b/packages/camera/camera/example/android/app/src/main/res/drawable/launch_background.xml
new file mode 100644
index 0000000..304732f
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/main/res/drawable/launch_background.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Modify this file to customize your launch splash screen -->
+<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
+ <item android:drawable="@android:color/white" />
+
+ <!-- You can insert your own image assets here -->
+ <!-- <item>
+ <bitmap
+ android:gravity="center"
+ android:src="@mipmap/launch_image" />
+ </item> -->
+</layer-list>
diff --git a/packages/camera/camera/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/packages/camera/camera/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 0000000..db77bb4
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/packages/camera/camera/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 0000000..17987b7
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/packages/camera/camera/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..09d4391
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/packages/camera/camera/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 0000000..d5f1c8d
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/packages/camera/camera/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100644
index 0000000..4d6372e
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera/example/android/app/src/main/res/values/styles.xml b/packages/camera/camera/example/android/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..00fa441
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/main/res/values/styles.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
+ <!-- Show a splash screen on the activity. Automatically removed when
+ Flutter draws its first frame -->
+ <item name="android:windowBackground">@drawable/launch_background</item>
+ </style>
+</resources>
diff --git a/packages/camera/camera/example/android/build.gradle b/packages/camera/camera/example/android/build.gradle
new file mode 100644
index 0000000..c21bff8
--- /dev/null
+++ b/packages/camera/camera/example/android/build.gradle
@@ -0,0 +1,29 @@
+buildscript {
+ repositories {
+ google()
+ mavenCentral()
+ }
+
+ dependencies {
+ classpath 'com.android.tools.build:gradle:7.0.1'
+ }
+}
+
+allprojects {
+ repositories {
+ google()
+ mavenCentral()
+ }
+}
+
+rootProject.buildDir = '../build'
+subprojects {
+ project.buildDir = "${rootProject.buildDir}/${project.name}"
+}
+subprojects {
+ project.evaluationDependsOn(':app')
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/packages/camera/camera/example/android/gradle.properties b/packages/camera/camera/example/android/gradle.properties
new file mode 100644
index 0000000..d0448f1
--- /dev/null
+++ b/packages/camera/camera/example/android/gradle.properties
@@ -0,0 +1,4 @@
+org.gradle.jvmargs=-Xmx4G
+android.useAndroidX=true
+android.enableJetifier=false
+android.enableR8=true
diff --git a/packages/camera/camera/example/android/gradle/wrapper/gradle-wrapper.properties b/packages/camera/camera/example/android/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..297f2fe
--- /dev/null
+++ b/packages/camera/camera/example/android/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip
diff --git a/packages/camera/camera/example/android/settings.gradle b/packages/camera/camera/example/android/settings.gradle
new file mode 100644
index 0000000..115da6c
--- /dev/null
+++ b/packages/camera/camera/example/android/settings.gradle
@@ -0,0 +1,15 @@
+include ':app'
+
+def flutterProjectRoot = rootProject.projectDir.parentFile.toPath()
+
+def plugins = new Properties()
+def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins')
+if (pluginsFile.exists()) {
+ pluginsFile.withInputStream { stream -> plugins.load(stream) }
+}
+
+plugins.each { name, path ->
+ def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile()
+ include ":$name"
+ project(":$name").projectDir = pluginDirectory
+}
diff --git a/packages/camera/camera/example/build.excerpt.yaml b/packages/camera/camera/example/build.excerpt.yaml
new file mode 100644
index 0000000..e317efa
--- /dev/null
+++ b/packages/camera/camera/example/build.excerpt.yaml
@@ -0,0 +1,15 @@
+targets:
+ $default:
+ sources:
+ include:
+ - lib/**
+ # Some default includes that aren't really used here but will prevent
+ # false-negative warnings:
+ - $package$
+ - lib/$lib$
+ exclude:
+ - '**/.*/**'
+ - '**/build/**'
+ builders:
+ code_excerpter|code_excerpter:
+ enabled: true
diff --git a/packages/camera/camera/example/integration_test/camera_test.dart b/packages/camera/camera/example/integration_test/camera_test.dart
new file mode 100644
index 0000000..f0cc67f
--- /dev/null
+++ b/packages/camera/camera/example/integration_test/camera_test.dart
@@ -0,0 +1,293 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:io';
+import 'dart:ui';
+
+import 'package:camera/camera.dart';
+import 'package:flutter/painting.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:path_provider/path_provider.dart';
+import 'package:video_player/video_player.dart';
+
+void main() {
+ late Directory testDir;
+
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ setUpAll(() async {
+ final Directory extDir = await getTemporaryDirectory();
+ testDir = await Directory('${extDir.path}/test').create(recursive: true);
+ });
+
+ tearDownAll(() async {
+ await testDir.delete(recursive: true);
+ });
+
+ final Map<ResolutionPreset, Size> presetExpectedSizes =
+ <ResolutionPreset, Size>{
+ ResolutionPreset.low:
+ Platform.isAndroid ? const Size(240, 320) : const Size(288, 352),
+ ResolutionPreset.medium:
+ Platform.isAndroid ? const Size(480, 720) : const Size(480, 640),
+ ResolutionPreset.high: const Size(720, 1280),
+ ResolutionPreset.veryHigh: const Size(1080, 1920),
+ ResolutionPreset.ultraHigh: const Size(2160, 3840),
+ // Don't bother checking for max here since it could be anything.
+ };
+
+ /// Verify that [actual] has dimensions that are at least as large as
+ /// [expectedSize]. Allows for a mismatch in portrait vs landscape. Returns
+ /// whether the dimensions exactly match.
+ bool assertExpectedDimensions(Size expectedSize, Size actual) {
+ expect(actual.shortestSide, lessThanOrEqualTo(expectedSize.shortestSide));
+ expect(actual.longestSide, lessThanOrEqualTo(expectedSize.longestSide));
+ return actual.shortestSide == expectedSize.shortestSide &&
+ actual.longestSide == expectedSize.longestSide;
+ }
+
+ // This tests that the capture is no bigger than the preset, since we have
+ // automatic code to fall back to smaller sizes when we need to. Returns
+ // whether the image is exactly the desired resolution.
+ Future<bool> testCaptureImageResolution(
+ CameraController controller, ResolutionPreset preset) async {
+ final Size expectedSize = presetExpectedSizes[preset]!;
+
+ // Take Picture
+ final XFile file = await controller.takePicture();
+
+ // Load picture
+ final File fileImage = File(file.path);
+ final Image image = await decodeImageFromList(fileImage.readAsBytesSync());
+
+ // Verify image dimensions are as expected
+ expect(image, isNotNull);
+ return assertExpectedDimensions(
+ expectedSize, Size(image.height.toDouble(), image.width.toDouble()));
+ }
+
+ testWidgets(
+ 'Capture specific image resolutions',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras = await availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+ for (final CameraDescription cameraDescription in cameras) {
+ bool previousPresetExactlySupported = true;
+ for (final MapEntry<ResolutionPreset, Size> preset
+ in presetExpectedSizes.entries) {
+ final CameraController controller =
+ CameraController(cameraDescription, preset.key);
+ await controller.initialize();
+ final bool presetExactlySupported =
+ await testCaptureImageResolution(controller, preset.key);
+ assert(!(!previousPresetExactlySupported && presetExactlySupported),
+ 'The camera took higher resolution pictures at a lower resolution.');
+ previousPresetExactlySupported = presetExactlySupported;
+ await controller.dispose();
+ }
+ }
+ },
+ // TODO(egarciad): Fix https://github.com/flutter/flutter/issues/93686.
+ skip: true,
+ );
+
+ // This tests that the capture is no bigger than the preset, since we have
+ // automatic code to fall back to smaller sizes when we need to. Returns
+ // whether the image is exactly the desired resolution.
+ Future<bool> testCaptureVideoResolution(
+ CameraController controller, ResolutionPreset preset) async {
+ final Size expectedSize = presetExpectedSizes[preset]!;
+
+ // Take Video
+ await controller.startVideoRecording();
+ sleep(const Duration(milliseconds: 300));
+ final XFile file = await controller.stopVideoRecording();
+
+ // Load video metadata
+ final File videoFile = File(file.path);
+ final VideoPlayerController videoController =
+ VideoPlayerController.file(videoFile);
+ await videoController.initialize();
+ final Size video = videoController.value.size;
+
+ // Verify image dimensions are as expected
+ expect(video, isNotNull);
+ return assertExpectedDimensions(
+ expectedSize, Size(video.height, video.width));
+ }
+
+ testWidgets(
+ 'Capture specific video resolutions',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras = await availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+ for (final CameraDescription cameraDescription in cameras) {
+ bool previousPresetExactlySupported = true;
+ for (final MapEntry<ResolutionPreset, Size> preset
+ in presetExpectedSizes.entries) {
+ final CameraController controller =
+ CameraController(cameraDescription, preset.key);
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+ final bool presetExactlySupported =
+ await testCaptureVideoResolution(controller, preset.key);
+ assert(!(!previousPresetExactlySupported && presetExactlySupported),
+ 'The camera took higher resolution pictures at a lower resolution.');
+ previousPresetExactlySupported = presetExactlySupported;
+ await controller.dispose();
+ }
+ }
+ },
+ // TODO(egarciad): Fix https://github.com/flutter/flutter/issues/93686.
+ skip: true,
+ );
+
+ testWidgets('Pause and resume video recording', (WidgetTester tester) async {
+ final List<CameraDescription> cameras = await availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+
+ int startPause;
+ int timePaused = 0;
+
+ await controller.startVideoRecording();
+ final int recordingStart = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+
+ await controller.pauseVideoRecording();
+ startPause = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+ await controller.resumeVideoRecording();
+ timePaused += DateTime.now().millisecondsSinceEpoch - startPause;
+
+ sleep(const Duration(milliseconds: 500));
+
+ await controller.pauseVideoRecording();
+ startPause = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+ await controller.resumeVideoRecording();
+ timePaused += DateTime.now().millisecondsSinceEpoch - startPause;
+
+ sleep(const Duration(milliseconds: 500));
+
+ final XFile file = await controller.stopVideoRecording();
+ final int recordingTime =
+ DateTime.now().millisecondsSinceEpoch - recordingStart;
+
+ final File videoFile = File(file.path);
+ final VideoPlayerController videoController = VideoPlayerController.file(
+ videoFile,
+ );
+ await videoController.initialize();
+ final int duration = videoController.value.duration.inMilliseconds;
+ await videoController.dispose();
+
+ expect(duration, lessThan(recordingTime - timePaused));
+ }, skip: !Platform.isAndroid);
+
+ testWidgets(
+ 'Android image streaming',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras = await availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ bool isDetecting = false;
+
+ await controller.startImageStream((CameraImage image) {
+ if (isDetecting) {
+ return;
+ }
+
+ isDetecting = true;
+
+ expectLater(image, isNotNull).whenComplete(() => isDetecting = false);
+ });
+
+ expect(controller.value.isStreamingImages, true);
+
+ sleep(const Duration(milliseconds: 500));
+
+ await controller.stopImageStream();
+ await controller.dispose();
+ },
+ skip: !Platform.isAndroid,
+ );
+
+ /// Start streaming with specifying the ImageFormatGroup.
+ Future<CameraImage> startStreaming(List<CameraDescription> cameras,
+ ImageFormatGroup? imageFormatGroup) async {
+ final CameraController controller = CameraController(
+ cameras.first,
+ ResolutionPreset.low,
+ enableAudio: false,
+ imageFormatGroup: imageFormatGroup,
+ );
+
+ await controller.initialize();
+ final Completer<CameraImage> completer = Completer<CameraImage>();
+
+ await controller.startImageStream((CameraImage image) {
+ if (!completer.isCompleted) {
+ Future<void>(() async {
+ await controller.stopImageStream();
+ await controller.dispose();
+ }).then((Object? value) {
+ completer.complete(image);
+ });
+ }
+ });
+ return completer.future;
+ }
+
+ testWidgets(
+ 'iOS image streaming with imageFormatGroup',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras = await availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ CameraImage image = await startStreaming(cameras, null);
+ expect(image, isNotNull);
+ expect(image.format.group, ImageFormatGroup.bgra8888);
+ expect(image.planes.length, 1);
+
+ image = await startStreaming(cameras, ImageFormatGroup.yuv420);
+ expect(image, isNotNull);
+ expect(image.format.group, ImageFormatGroup.yuv420);
+ expect(image.planes.length, 2);
+
+ image = await startStreaming(cameras, ImageFormatGroup.bgra8888);
+ expect(image, isNotNull);
+ expect(image.format.group, ImageFormatGroup.bgra8888);
+ expect(image.planes.length, 1);
+ },
+ skip: !Platform.isIOS,
+ );
+}
diff --git a/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist b/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist
new file mode 100644
index 0000000..3a9c234
--- /dev/null
+++ b/packages/camera/camera/example/ios/Flutter/AppFrameworkInfo.plist
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>App</string>
+ <key>CFBundleIdentifier</key>
+ <string>io.flutter.flutter.app</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>App</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>arm64</string>
+ </array>
+ <key>MinimumOSVersion</key>
+ <string>9.0</string>
+</dict>
+</plist>
diff --git a/packages/camera/camera/example/ios/Flutter/Debug.xcconfig b/packages/camera/camera/example/ios/Flutter/Debug.xcconfig
new file mode 100644
index 0000000..b2f5fae
--- /dev/null
+++ b/packages/camera/camera/example/ios/Flutter/Debug.xcconfig
@@ -0,0 +1,3 @@
+#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
+#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
+#include "Generated.xcconfig"
diff --git a/packages/camera/camera/example/ios/Flutter/Release.xcconfig b/packages/camera/camera/example/ios/Flutter/Release.xcconfig
new file mode 100644
index 0000000..88c2914
--- /dev/null
+++ b/packages/camera/camera/example/ios/Flutter/Release.xcconfig
@@ -0,0 +1,3 @@
+#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
+#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
+#include "Generated.xcconfig"
diff --git a/packages/camera/camera/example/ios/Podfile b/packages/camera/camera/example/ios/Podfile
new file mode 100644
index 0000000..f7d6a5e
--- /dev/null
+++ b/packages/camera/camera/example/ios/Podfile
@@ -0,0 +1,38 @@
+# Uncomment this line to define a global platform for your project
+# platform :ios, '9.0'
+
+# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
+ENV['COCOAPODS_DISABLE_STATS'] = 'true'
+
+project 'Runner', {
+ 'Debug' => :debug,
+ 'Profile' => :release,
+ 'Release' => :release,
+}
+
+def flutter_root
+ generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__)
+ unless File.exist?(generated_xcode_build_settings_path)
+ raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first"
+ end
+
+ File.foreach(generated_xcode_build_settings_path) do |line|
+ matches = line.match(/FLUTTER_ROOT\=(.*)/)
+ return matches[1].strip if matches
+ end
+ raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get"
+end
+
+require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root)
+
+flutter_ios_podfile_setup
+
+target 'Runner' do
+ flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
+end
+
+post_install do |installer|
+ installer.pods_project.targets.each do |target|
+ flutter_additional_ios_build_settings(target)
+ end
+end
diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj
new file mode 100644
index 0000000..99433b0
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj
@@ -0,0 +1,472 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
+ 236906D1621AE863A5B2E770 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 89D82918721FABF772705DB0 /* libPods-Runner.a */; };
+ 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
+ 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; };
+ 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; };
+ 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
+ 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
+ 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXCopyFilesBuildPhase section */
+ 9705A1C41CF9048500538489 /* Embed Frameworks */ = {
+ isa = PBXCopyFilesBuildPhase;
+ buildActionMask = 2147483647;
+ dstPath = "";
+ dstSubfolderSpec = 10;
+ files = (
+ );
+ name = "Embed Frameworks";
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXCopyFilesBuildPhase section */
+
+/* Begin PBXFileReference section */
+ 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
+ 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
+ 14AE82C910C2A12F2ECB2094 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = "<group>"; };
+ 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; };
+ 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
+ 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
+ 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
+ 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
+ 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
+ 89D82918721FABF772705DB0 /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; };
+ 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
+ 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
+ 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
+ 97C146F21CF9000F007C117D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
+ 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
+ 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
+ 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
+ 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
+ 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = "<group>"; };
+ A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = "<group>"; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 97C146EB1CF9000F007C117D /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 236906D1621AE863A5B2E770 /* libPods-Runner.a in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 3242FD2B467C15C62200632F /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ 89D82918721FABF772705DB0 /* libPods-Runner.a */,
+ 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */,
+ );
+ name = Frameworks;
+ sourceTree = "<group>";
+ };
+ 9740EEB11CF90186004384FC /* Flutter */ = {
+ isa = PBXGroup;
+ children = (
+ 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */,
+ 9740EEB21CF90195004384FC /* Debug.xcconfig */,
+ 7AFA3C8E1D35360C0083082E /* Release.xcconfig */,
+ 9740EEB31CF90195004384FC /* Generated.xcconfig */,
+ );
+ name = Flutter;
+ sourceTree = "<group>";
+ };
+ 97C146E51CF9000F007C117D = {
+ isa = PBXGroup;
+ children = (
+ 9740EEB11CF90186004384FC /* Flutter */,
+ 97C146F01CF9000F007C117D /* Runner */,
+ 97C146EF1CF9000F007C117D /* Products */,
+ FD386F00E98D73419C929072 /* Pods */,
+ 3242FD2B467C15C62200632F /* Frameworks */,
+ );
+ sourceTree = "<group>";
+ };
+ 97C146EF1CF9000F007C117D /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 97C146EE1CF9000F007C117D /* Runner.app */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+ 97C146F01CF9000F007C117D /* Runner */ = {
+ isa = PBXGroup;
+ children = (
+ 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */,
+ 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */,
+ 97C146FA1CF9000F007C117D /* Main.storyboard */,
+ 97C146FD1CF9000F007C117D /* Assets.xcassets */,
+ 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
+ 97C147021CF9000F007C117D /* Info.plist */,
+ 97C146F11CF9000F007C117D /* Supporting Files */,
+ 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
+ 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
+ );
+ path = Runner;
+ sourceTree = "<group>";
+ };
+ 97C146F11CF9000F007C117D /* Supporting Files */ = {
+ isa = PBXGroup;
+ children = (
+ 97C146F21CF9000F007C117D /* main.m */,
+ );
+ name = "Supporting Files";
+ sourceTree = "<group>";
+ };
+ FD386F00E98D73419C929072 /* Pods */ = {
+ isa = PBXGroup;
+ children = (
+ 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */,
+ 14AE82C910C2A12F2ECB2094 /* Pods-Runner.release.xcconfig */,
+ 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */,
+ A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */,
+ );
+ path = Pods;
+ sourceTree = "<group>";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ 97C146ED1CF9000F007C117D /* Runner */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
+ buildPhases = (
+ 9872F2A25E8A171A111468CD /* [CP] Check Pods Manifest.lock */,
+ 9740EEB61CF901F6004384FC /* Run Script */,
+ 97C146EA1CF9000F007C117D /* Sources */,
+ 97C146EB1CF9000F007C117D /* Frameworks */,
+ 97C146EC1CF9000F007C117D /* Resources */,
+ 9705A1C41CF9048500538489 /* Embed Frameworks */,
+ 3B06AD1E1E4923F5004D2608 /* Thin Binary */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = Runner;
+ productName = Runner;
+ productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
+ productType = "com.apple.product-type.application";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 97C146E61CF9000F007C117D /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastUpgradeCheck = 1300;
+ ORGANIZATIONNAME = "The Flutter Authors";
+ TargetAttributes = {
+ 97C146ED1CF9000F007C117D = {
+ CreatedOnToolsVersion = 7.3.1;
+ };
+ };
+ };
+ buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = en;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ Base,
+ );
+ mainGroup = 97C146E51CF9000F007C117D;
+ productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 97C146ED1CF9000F007C117D /* Runner */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ 97C146EC1CF9000F007C117D /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
+ 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
+ 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
+ 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXShellScriptBuildPhase section */
+ 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "Thin Binary";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
+ };
+ 9740EEB61CF901F6004384FC /* Run Script */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "Run Script";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
+ };
+ 9872F2A25E8A171A111468CD /* [CP] Check Pods Manifest.lock */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputFileListPaths = (
+ );
+ inputPaths = (
+ "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
+ "${PODS_ROOT}/Manifest.lock",
+ );
+ name = "[CP] Check Pods Manifest.lock";
+ outputFileListPaths = (
+ );
+ outputPaths = (
+ "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
+ showEnvVarsInLog = 0;
+ };
+/* End PBXShellScriptBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 97C146EA1CF9000F007C117D /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */,
+ 97C146F31CF9000F007C117D /* main.m in Sources */,
+ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin PBXVariantGroup section */
+ 97C146FA1CF9000F007C117D /* Main.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ 97C146FB1CF9000F007C117D /* Base */,
+ );
+ name = Main.storyboard;
+ sourceTree = "<group>";
+ };
+ 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ 97C147001CF9000F007C117D /* Base */,
+ );
+ name = LaunchScreen.storyboard;
+ sourceTree = "<group>";
+ };
+/* End PBXVariantGroup section */
+
+/* Begin XCBuildConfiguration section */
+ 97C147031CF9000F007C117D /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_COMMA = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+ CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+ CLANG_WARN_STRICT_PROTOTYPES = YES;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_TESTABILITY = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 9.0;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ };
+ name = Debug;
+ };
+ 97C147041CF9000F007C117D /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_COMMA = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+ CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+ CLANG_WARN_STRICT_PROTOTYPES = YES;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_NS_ASSERTIONS = NO;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 9.0;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ VALIDATE_PRODUCT = YES;
+ };
+ name = Release;
+ };
+ 97C147061CF9000F007C117D /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ DEVELOPMENT_TEAM = "";
+ ENABLE_BITCODE = NO;
+ FRAMEWORK_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Flutter",
+ );
+ INFOPLIST_FILE = Runner/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LIBRARY_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Flutter",
+ );
+ PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.cameraExample;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Debug;
+ };
+ 97C147071CF9000F007C117D /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ DEVELOPMENT_TEAM = "";
+ ENABLE_BITCODE = NO;
+ FRAMEWORK_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Flutter",
+ );
+ INFOPLIST_FILE = Runner/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LIBRARY_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Flutter",
+ );
+ PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.cameraExample;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 97C147031CF9000F007C117D /* Debug */,
+ 97C147041CF9000F007C117D /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 97C147061CF9000F007C117D /* Debug */,
+ 97C147071CF9000F007C117D /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 97C146E61CF9000F007C117D /* Project object */;
+}
diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/packages/camera/camera/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata
new file mode 100644
index 0000000..919434a
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Workspace
+ version = "1.0">
+ <FileRef
+ location = "self:">
+ </FileRef>
+</Workspace>
diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/packages/camera/camera/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
new file mode 100644
index 0000000..f4b3c10
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
@@ -0,0 +1,104 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Scheme
+ LastUpgradeVersion = "1300"
+ version = "1.3">
+ <BuildAction
+ parallelizeBuildables = "YES"
+ buildImplicitDependencies = "YES">
+ <BuildActionEntries>
+ <BuildActionEntry
+ buildForTesting = "YES"
+ buildForRunning = "YES"
+ buildForProfiling = "YES"
+ buildForArchiving = "YES"
+ buildForAnalyzing = "YES">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "97C146ED1CF9000F007C117D"
+ BuildableName = "Runner.app"
+ BlueprintName = "Runner"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </BuildActionEntry>
+ </BuildActionEntries>
+ </BuildAction>
+ <TestAction
+ buildConfiguration = "Debug"
+ selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+ selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+ shouldUseLaunchSchemeArgsEnv = "YES">
+ <MacroExpansion>
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "97C146ED1CF9000F007C117D"
+ BuildableName = "Runner.app"
+ BlueprintName = "Runner"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </MacroExpansion>
+ <Testables>
+ <TestableReference
+ skipped = "NO">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "03BB76672665316900CE5A93"
+ BuildableName = "RunnerTests.xctest"
+ BlueprintName = "RunnerTests"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </TestableReference>
+ </Testables>
+ </TestAction>
+ <LaunchAction
+ buildConfiguration = "Debug"
+ selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+ selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+ launchStyle = "0"
+ useCustomWorkingDirectory = "NO"
+ ignoresPersistentStateOnLaunch = "NO"
+ debugDocumentVersioning = "YES"
+ debugServiceExtension = "internal"
+ allowLocationSimulation = "YES">
+ <BuildableProductRunnable
+ runnableDebuggingMode = "0">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "97C146ED1CF9000F007C117D"
+ BuildableName = "Runner.app"
+ BlueprintName = "Runner"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </BuildableProductRunnable>
+ <AdditionalOptions>
+ <AdditionalOption
+ key = "NSZombieEnabled"
+ value = "YES"
+ isEnabled = "YES">
+ </AdditionalOption>
+ </AdditionalOptions>
+ </LaunchAction>
+ <ProfileAction
+ buildConfiguration = "Release"
+ shouldUseLaunchSchemeArgsEnv = "YES"
+ savedToolIdentifier = ""
+ useCustomWorkingDirectory = "NO"
+ debugDocumentVersioning = "YES">
+ <BuildableProductRunnable
+ runnableDebuggingMode = "0">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "97C146ED1CF9000F007C117D"
+ BuildableName = "Runner.app"
+ BlueprintName = "Runner"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </BuildableProductRunnable>
+ </ProfileAction>
+ <AnalyzeAction
+ buildConfiguration = "Debug">
+ </AnalyzeAction>
+ <ArchiveAction
+ buildConfiguration = "Release"
+ revealArchiveInOrganizer = "YES">
+ </ArchiveAction>
+</Scheme>
diff --git a/packages/camera/camera/example/ios/Runner.xcworkspace/contents.xcworkspacedata b/packages/camera/camera/example/ios/Runner.xcworkspace/contents.xcworkspacedata
new file mode 100644
index 0000000..21a3cc1
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner.xcworkspace/contents.xcworkspacedata
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Workspace
+ version = "1.0">
+ <FileRef
+ location = "group:Runner.xcodeproj">
+ </FileRef>
+ <FileRef
+ location = "group:Pods/Pods.xcodeproj">
+ </FileRef>
+</Workspace>
diff --git a/packages/camera/camera/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/packages/camera/camera/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
new file mode 100644
index 0000000..18d9810
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>IDEDidComputeMac32BitWarning</key>
+ <true/>
+</dict>
+</plist>
diff --git a/packages/camera/camera/example/ios/Runner/AppDelegate.h b/packages/camera/camera/example/ios/Runner/AppDelegate.h
new file mode 100644
index 0000000..0681d28
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/AppDelegate.h
@@ -0,0 +1,10 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : FlutterAppDelegate
+
+@end
diff --git a/packages/camera/camera/example/ios/Runner/AppDelegate.m b/packages/camera/camera/example/ios/Runner/AppDelegate.m
new file mode 100644
index 0000000..30b8796
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/AppDelegate.m
@@ -0,0 +1,17 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "AppDelegate.h"
+#include "GeneratedPluginRegistrant.h"
+
+@implementation AppDelegate
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ [GeneratedPluginRegistrant registerWithRegistry:self];
+ // Override point for customization after application launch.
+ return [super application:application didFinishLaunchingWithOptions:launchOptions];
+}
+
+@end
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 0000000..d225b3c
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,121 @@
+{
+ "images" : [
+ {
+ "size" : "20x20",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-20x20@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "20x20",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-20x20@3x.png",
+ "scale" : "3x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-29x29@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-29x29@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-29x29@3x.png",
+ "scale" : "3x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-40x40@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-40x40@3x.png",
+ "scale" : "3x"
+ },
+ {
+ "size" : "60x60",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-60x60@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "60x60",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-60x60@3x.png",
+ "scale" : "3x"
+ },
+ {
+ "size" : "20x20",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-20x20@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "20x20",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-20x20@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-29x29@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-29x29@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-40x40@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-40x40@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "76x76",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-76x76@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "76x76",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-76x76@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "83.5x83.5",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-83.5x83.5@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ios-marketing",
+ "size" : "1024x1024",
+ "scale" : "1x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
\ No newline at end of file
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png
new file mode 100644
index 0000000..28c6bf0
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png
new file mode 100644
index 0000000..2ccbfd9
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png
new file mode 100644
index 0000000..f091b6b
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png
new file mode 100644
index 0000000..4cde121
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png
new file mode 100644
index 0000000..d0ef06e
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png
new file mode 100644
index 0000000..dcdc230
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png
new file mode 100644
index 0000000..2ccbfd9
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png
new file mode 100644
index 0000000..c8f9ed8
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png
new file mode 100644
index 0000000..a6d6b86
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png
new file mode 100644
index 0000000..a6d6b86
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png
new file mode 100644
index 0000000..75b2d16
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png
new file mode 100644
index 0000000..c4df70d
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png
new file mode 100644
index 0000000..6a84f41
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png
new file mode 100644
index 0000000..d0e1f58
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json
new file mode 100644
index 0000000..0bedcf2
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json
@@ -0,0 +1,23 @@
+{
+ "images" : [
+ {
+ "idiom" : "universal",
+ "filename" : "LaunchImage.png",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "universal",
+ "filename" : "LaunchImage@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "universal",
+ "filename" : "LaunchImage@3x.png",
+ "scale" : "3x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png
new file mode 100644
index 0000000..9da19ea
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png
new file mode 100644
index 0000000..9da19ea
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png
new file mode 100644
index 0000000..9da19ea
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png
Binary files differ
diff --git a/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md
new file mode 100644
index 0000000..89c2725
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md
@@ -0,0 +1,5 @@
+# Launch Screen Assets
+
+You can customize the launch screen with your own desired assets by replacing the image files in this directory.
+
+You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images.
\ No newline at end of file
diff --git a/packages/camera/camera/example/ios/Runner/Base.lproj/LaunchScreen.storyboard b/packages/camera/camera/example/ios/Runner/Base.lproj/LaunchScreen.storyboard
new file mode 100644
index 0000000..f2e259c
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Base.lproj/LaunchScreen.storyboard
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
+ <dependencies>
+ <deployment identifier="iOS"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
+ </dependencies>
+ <scenes>
+ <!--View Controller-->
+ <scene sceneID="EHf-IW-A2E">
+ <objects>
+ <viewController id="01J-lp-oVM" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="Ydg-fD-yQy"/>
+ <viewControllerLayoutGuide type="bottom" id="xbc-2k-c8Z"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <subviews>
+ <imageView opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" image="LaunchImage" translatesAutoresizingMaskIntoConstraints="NO" id="YRO-k0-Ey4">
+ </imageView>
+ </subviews>
+ <color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
+ <constraints>
+ <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerX" secondItem="Ze5-6b-2t3" secondAttribute="centerX" id="1a2-6s-vTC"/>
+ <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerY" secondItem="Ze5-6b-2t3" secondAttribute="centerY" id="4X2-HB-R7a"/>
+ </constraints>
+ </view>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
+ </objects>
+ <point key="canvasLocation" x="53" y="375"/>
+ </scene>
+ </scenes>
+ <resources>
+ <image name="LaunchImage" width="168" height="185"/>
+ </resources>
+</document>
diff --git a/packages/camera/camera/example/ios/Runner/Base.lproj/Main.storyboard b/packages/camera/camera/example/ios/Runner/Base.lproj/Main.storyboard
new file mode 100644
index 0000000..f3c2851
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Base.lproj/Main.storyboard
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
+ <dependencies>
+ <deployment identifier="iOS"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
+ </dependencies>
+ <scenes>
+ <!--Flutter View Controller-->
+ <scene sceneID="tne-QT-ifu">
+ <objects>
+ <viewController id="BYZ-38-t0r" customClass="FlutterViewController" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
+ <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
+ <rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ </view>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
+ </objects>
+ </scene>
+ </scenes>
+</document>
diff --git a/packages/camera/camera/example/ios/Runner/Info.plist b/packages/camera/camera/example/ios/Runner/Info.plist
new file mode 100644
index 0000000..ff2e341
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/Info.plist
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>camera_example</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSApplicationCategoryType</key>
+ <string></string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>NSCameraUsageDescription</key>
+ <string>Can I use the camera please? Only for demo purpose of the app</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Only for demo purpose of the app</string>
+ <key>UILaunchStoryboardName</key>
+ <string>LaunchScreen</string>
+ <key>UIMainStoryboardFile</key>
+ <string>Main</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>arm64</string>
+ </array>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UIViewControllerBasedStatusBarAppearance</key>
+ <false/>
+</dict>
+</plist>
diff --git a/packages/camera/camera/example/ios/Runner/main.m b/packages/camera/camera/example/ios/Runner/main.m
new file mode 100644
index 0000000..d1224fe
--- /dev/null
+++ b/packages/camera/camera/example/ios/Runner/main.m
@@ -0,0 +1,19 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+#import <UIKit/UIKit.h>
+#import "AppDelegate.h"
+
+int main(int argc, char *argv[]) {
+ @autoreleasepool {
+ // The setup logic in `AppDelegate::didFinishLaunchingWithOptions:` eventually sends camera
+ // operations on the background queue, which would run concurrently with the test cases during
+ // unit tests, making the debugging process confusing. This setup is actually not necessary for
+ // the unit tests, so it is better to skip the AppDelegate when running unit tests.
+ BOOL isTesting = NSClassFromString(@"XCTestCase") != nil;
+ return UIApplicationMain(argc, argv, nil,
+ isTesting ? nil : NSStringFromClass([AppDelegate class]));
+ }
+}
diff --git a/packages/camera/camera/example/lib/main.dart b/packages/camera/camera/example/lib/main.dart
new file mode 100644
index 0000000..b343b6d
--- /dev/null
+++ b/packages/camera/camera/example/lib/main.dart
@@ -0,0 +1,1080 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:io';
+
+import 'package:camera/camera.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/scheduler.dart';
+import 'package:video_player/video_player.dart';
+
+/// Camera example home widget.
+class CameraExampleHome extends StatefulWidget {
+ /// Default Constructor
+ const CameraExampleHome({Key? key}) : super(key: key);
+
+ @override
+ State<CameraExampleHome> createState() {
+ return _CameraExampleHomeState();
+ }
+}
+
+/// Returns a suitable camera icon for [direction].
+IconData getCameraLensIcon(CameraLensDirection direction) {
+ switch (direction) {
+ case CameraLensDirection.back:
+ return Icons.camera_rear;
+ case CameraLensDirection.front:
+ return Icons.camera_front;
+ case CameraLensDirection.external:
+ return Icons.camera;
+ }
+ // This enum is from a different package, so a new value could be added at
+ // any time. The example should keep working if that happens.
+ // ignore: dead_code
+ return Icons.camera;
+}
+
+void _logError(String code, String? message) {
+ // ignore: avoid_print
+ print('Error: $code${message == null ? '' : '\nError Message: $message'}');
+}
+
+class _CameraExampleHomeState extends State<CameraExampleHome>
+ with WidgetsBindingObserver, TickerProviderStateMixin {
+ CameraController? controller;
+ XFile? imageFile;
+ XFile? videoFile;
+ VideoPlayerController? videoController;
+ VoidCallback? videoPlayerListener;
+ bool enableAudio = true;
+ double _minAvailableExposureOffset = 0.0;
+ double _maxAvailableExposureOffset = 0.0;
+ double _currentExposureOffset = 0.0;
+ late AnimationController _flashModeControlRowAnimationController;
+ late Animation<double> _flashModeControlRowAnimation;
+ late AnimationController _exposureModeControlRowAnimationController;
+ late Animation<double> _exposureModeControlRowAnimation;
+ late AnimationController _focusModeControlRowAnimationController;
+ late Animation<double> _focusModeControlRowAnimation;
+ double _minAvailableZoom = 1.0;
+ double _maxAvailableZoom = 1.0;
+ double _currentScale = 1.0;
+ double _baseScale = 1.0;
+
+ // Counting pointers (number of user fingers on screen)
+ int _pointers = 0;
+
+ @override
+ void initState() {
+ super.initState();
+ WidgetsBinding.instance.addObserver(this);
+
+ _flashModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _flashModeControlRowAnimation = CurvedAnimation(
+ parent: _flashModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ _exposureModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _exposureModeControlRowAnimation = CurvedAnimation(
+ parent: _exposureModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ _focusModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _focusModeControlRowAnimation = CurvedAnimation(
+ parent: _focusModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ }
+
+ @override
+ void dispose() {
+ WidgetsBinding.instance.removeObserver(this);
+ _flashModeControlRowAnimationController.dispose();
+ _exposureModeControlRowAnimationController.dispose();
+ super.dispose();
+ }
+
+ // #docregion AppLifecycle
+ @override
+ void didChangeAppLifecycleState(AppLifecycleState state) {
+ final CameraController? cameraController = controller;
+
+ // App state changed before we got the chance to initialize.
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return;
+ }
+
+ if (state == AppLifecycleState.inactive) {
+ cameraController.dispose();
+ } else if (state == AppLifecycleState.resumed) {
+ onNewCameraSelected(cameraController.description);
+ }
+ }
+ // #enddocregion AppLifecycle
+
+ @override
+ Widget build(BuildContext context) {
+ return Scaffold(
+ appBar: AppBar(
+ title: const Text('Camera example'),
+ ),
+ body: Column(
+ children: <Widget>[
+ Expanded(
+ child: Container(
+ decoration: BoxDecoration(
+ color: Colors.black,
+ border: Border.all(
+ color:
+ controller != null && controller!.value.isRecordingVideo
+ ? Colors.redAccent
+ : Colors.grey,
+ width: 3.0,
+ ),
+ ),
+ child: Padding(
+ padding: const EdgeInsets.all(1.0),
+ child: Center(
+ child: _cameraPreviewWidget(),
+ ),
+ ),
+ ),
+ ),
+ _captureControlRowWidget(),
+ _modeControlRowWidget(),
+ Padding(
+ padding: const EdgeInsets.all(5.0),
+ child: Row(
+ children: <Widget>[
+ _cameraTogglesRowWidget(),
+ _thumbnailWidget(),
+ ],
+ ),
+ ),
+ ],
+ ),
+ );
+ }
+
+ /// Display the preview from the camera (or a message if the preview is not available).
+ Widget _cameraPreviewWidget() {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return const Text(
+ 'Tap a camera',
+ style: TextStyle(
+ color: Colors.white,
+ fontSize: 24.0,
+ fontWeight: FontWeight.w900,
+ ),
+ );
+ } else {
+ return Listener(
+ onPointerDown: (_) => _pointers++,
+ onPointerUp: (_) => _pointers--,
+ child: CameraPreview(
+ controller!,
+ child: LayoutBuilder(
+ builder: (BuildContext context, BoxConstraints constraints) {
+ return GestureDetector(
+ behavior: HitTestBehavior.opaque,
+ onScaleStart: _handleScaleStart,
+ onScaleUpdate: _handleScaleUpdate,
+ onTapDown: (TapDownDetails details) =>
+ onViewFinderTap(details, constraints),
+ );
+ }),
+ ),
+ );
+ }
+ }
+
+ void _handleScaleStart(ScaleStartDetails details) {
+ _baseScale = _currentScale;
+ }
+
+ Future<void> _handleScaleUpdate(ScaleUpdateDetails details) async {
+ // When there are not exactly two fingers on screen don't scale
+ if (controller == null || _pointers != 2) {
+ return;
+ }
+
+ _currentScale = (_baseScale * details.scale)
+ .clamp(_minAvailableZoom, _maxAvailableZoom);
+
+ await controller!.setZoomLevel(_currentScale);
+ }
+
+ /// Display the thumbnail of the captured image or video.
+ Widget _thumbnailWidget() {
+ final VideoPlayerController? localVideoController = videoController;
+
+ return Expanded(
+ child: Align(
+ alignment: Alignment.centerRight,
+ child: Row(
+ mainAxisSize: MainAxisSize.min,
+ children: <Widget>[
+ if (localVideoController == null && imageFile == null)
+ Container()
+ else
+ SizedBox(
+ width: 64.0,
+ height: 64.0,
+ child: (localVideoController == null)
+ ? (
+ // The captured image on the web contains a network-accessible URL
+ // pointing to a location within the browser. It may be displayed
+ // either with Image.network or Image.memory after loading the image
+ // bytes to memory.
+ kIsWeb
+ ? Image.network(imageFile!.path)
+ : Image.file(File(imageFile!.path)))
+ : Container(
+ decoration: BoxDecoration(
+ border: Border.all(color: Colors.pink)),
+ child: Center(
+ child: AspectRatio(
+ aspectRatio:
+ localVideoController.value.size != null
+ ? localVideoController.value.aspectRatio
+ : 1.0,
+ child: VideoPlayer(localVideoController)),
+ ),
+ ),
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ /// Display a bar with buttons to change the flash and exposure modes
+ Widget _modeControlRowWidget() {
+ return Column(
+ children: <Widget>[
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.flash_on),
+ color: Colors.blue,
+ onPressed: controller != null ? onFlashModeButtonPressed : null,
+ ),
+ // The exposure and focus mode are currently not supported on the web.
+ ...!kIsWeb
+ ? <Widget>[
+ IconButton(
+ icon: const Icon(Icons.exposure),
+ color: Colors.blue,
+ onPressed: controller != null
+ ? onExposureModeButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.filter_center_focus),
+ color: Colors.blue,
+ onPressed:
+ controller != null ? onFocusModeButtonPressed : null,
+ )
+ ]
+ : <Widget>[],
+ IconButton(
+ icon: Icon(enableAudio ? Icons.volume_up : Icons.volume_mute),
+ color: Colors.blue,
+ onPressed: controller != null ? onAudioModeButtonPressed : null,
+ ),
+ IconButton(
+ icon: Icon(controller?.value.isCaptureOrientationLocked ?? false
+ ? Icons.screen_lock_rotation
+ : Icons.screen_rotation),
+ color: Colors.blue,
+ onPressed: controller != null
+ ? onCaptureOrientationLockButtonPressed
+ : null,
+ ),
+ ],
+ ),
+ _flashModeControlRowWidget(),
+ _exposureModeControlRowWidget(),
+ _focusModeControlRowWidget(),
+ ],
+ );
+ }
+
+ Widget _flashModeControlRowWidget() {
+ return SizeTransition(
+ sizeFactor: _flashModeControlRowAnimation,
+ child: ClipRect(
+ child: Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.flash_off),
+ color: controller?.value.flashMode == FlashMode.off
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.off)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.flash_auto),
+ color: controller?.value.flashMode == FlashMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.auto)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.flash_on),
+ color: controller?.value.flashMode == FlashMode.always
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.always)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.highlight),
+ color: controller?.value.flashMode == FlashMode.torch
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.torch)
+ : null,
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ Widget _exposureModeControlRowWidget() {
+ final ButtonStyle styleAuto = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.exposureMode == ExposureMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ );
+ final ButtonStyle styleLocked = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.exposureMode == ExposureMode.locked
+ ? Colors.orange
+ : Colors.blue,
+ );
+
+ return SizeTransition(
+ sizeFactor: _exposureModeControlRowAnimation,
+ child: ClipRect(
+ child: Container(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: <Widget>[
+ const Center(
+ child: Text('Exposure Mode'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ TextButton(
+ style: styleAuto,
+ onPressed: controller != null
+ ? () =>
+ onSetExposureModeButtonPressed(ExposureMode.auto)
+ : null,
+ onLongPress: () {
+ if (controller != null) {
+ controller!.setExposurePoint(null);
+ showInSnackBar('Resetting exposure point');
+ }
+ },
+ child: const Text('AUTO'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () =>
+ onSetExposureModeButtonPressed(ExposureMode.locked)
+ : null,
+ child: const Text('LOCKED'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () => controller!.setExposureOffset(0.0)
+ : null,
+ child: const Text('RESET OFFSET'),
+ ),
+ ],
+ ),
+ const Center(
+ child: Text('Exposure Offset'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ Text(_minAvailableExposureOffset.toString()),
+ Slider(
+ value: _currentExposureOffset,
+ min: _minAvailableExposureOffset,
+ max: _maxAvailableExposureOffset,
+ label: _currentExposureOffset.toString(),
+ onChanged: _minAvailableExposureOffset ==
+ _maxAvailableExposureOffset
+ ? null
+ : setExposureOffset,
+ ),
+ Text(_maxAvailableExposureOffset.toString()),
+ ],
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ Widget _focusModeControlRowWidget() {
+ final ButtonStyle styleAuto = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.focusMode == FocusMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ );
+ final ButtonStyle styleLocked = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.focusMode == FocusMode.locked
+ ? Colors.orange
+ : Colors.blue,
+ );
+
+ return SizeTransition(
+ sizeFactor: _focusModeControlRowAnimation,
+ child: ClipRect(
+ child: Container(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: <Widget>[
+ const Center(
+ child: Text('Focus Mode'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ TextButton(
+ style: styleAuto,
+ onPressed: controller != null
+ ? () => onSetFocusModeButtonPressed(FocusMode.auto)
+ : null,
+ onLongPress: () {
+ if (controller != null) {
+ controller!.setFocusPoint(null);
+ }
+ showInSnackBar('Resetting focus point');
+ },
+ child: const Text('AUTO'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () => onSetFocusModeButtonPressed(FocusMode.locked)
+ : null,
+ child: const Text('LOCKED'),
+ ),
+ ],
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ /// Display the control bar with buttons to take pictures and record videos.
+ Widget _captureControlRowWidget() {
+ final CameraController? cameraController = controller;
+
+ return Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.camera_alt),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ !cameraController.value.isRecordingVideo
+ ? onTakePictureButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.videocam),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ !cameraController.value.isRecordingVideo
+ ? onVideoRecordButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: cameraController != null &&
+ cameraController.value.isRecordingPaused
+ ? const Icon(Icons.play_arrow)
+ : const Icon(Icons.pause),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ cameraController.value.isRecordingVideo
+ ? (cameraController.value.isRecordingPaused)
+ ? onResumeButtonPressed
+ : onPauseButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.stop),
+ color: Colors.red,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ cameraController.value.isRecordingVideo
+ ? onStopButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.pause_presentation),
+ color:
+ cameraController != null && cameraController.value.isPreviewPaused
+ ? Colors.red
+ : Colors.blue,
+ onPressed:
+ cameraController == null ? null : onPausePreviewButtonPressed,
+ ),
+ ],
+ );
+ }
+
+ /// Display a row of toggle to select the camera (or a message if no camera is available).
+ Widget _cameraTogglesRowWidget() {
+ final List<Widget> toggles = <Widget>[];
+
+ void onChanged(CameraDescription? description) {
+ if (description == null) {
+ return;
+ }
+
+ onNewCameraSelected(description);
+ }
+
+ if (_cameras.isEmpty) {
+ SchedulerBinding.instance.addPostFrameCallback((_) async {
+ showInSnackBar('No camera found.');
+ });
+ return const Text('None');
+ } else {
+ for (final CameraDescription cameraDescription in _cameras) {
+ toggles.add(
+ SizedBox(
+ width: 90.0,
+ child: RadioListTile<CameraDescription>(
+ title: Icon(getCameraLensIcon(cameraDescription.lensDirection)),
+ groupValue: controller?.description,
+ value: cameraDescription,
+ onChanged:
+ controller != null && controller!.value.isRecordingVideo
+ ? null
+ : onChanged,
+ ),
+ ),
+ );
+ }
+ }
+
+ return Row(children: toggles);
+ }
+
+ String timestamp() => DateTime.now().millisecondsSinceEpoch.toString();
+
+ void showInSnackBar(String message) {
+ ScaffoldMessenger.of(context)
+ .showSnackBar(SnackBar(content: Text(message)));
+ }
+
+ void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) {
+ if (controller == null) {
+ return;
+ }
+
+ final CameraController cameraController = controller!;
+
+ final Offset offset = Offset(
+ details.localPosition.dx / constraints.maxWidth,
+ details.localPosition.dy / constraints.maxHeight,
+ );
+ cameraController.setExposurePoint(offset);
+ cameraController.setFocusPoint(offset);
+ }
+
+ Future<void> onNewCameraSelected(CameraDescription cameraDescription) async {
+ final CameraController? oldController = controller;
+ if (oldController != null) {
+ // `controller` needs to be set to null before getting disposed,
+ // to avoid a race condition when we use the controller that is being
+ // disposed. This happens when camera permission dialog shows up,
+ // which triggers `didChangeAppLifecycleState`, which disposes and
+ // re-creates the controller.
+ controller = null;
+ await oldController.dispose();
+ }
+
+ final CameraController cameraController = CameraController(
+ cameraDescription,
+ kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium,
+ enableAudio: enableAudio,
+ imageFormatGroup: ImageFormatGroup.jpeg,
+ );
+
+ controller = cameraController;
+
+ // If the controller is updated then update the UI.
+ cameraController.addListener(() {
+ if (mounted) {
+ setState(() {});
+ }
+ if (cameraController.value.hasError) {
+ showInSnackBar(
+ 'Camera error ${cameraController.value.errorDescription}');
+ }
+ });
+
+ try {
+ await cameraController.initialize();
+ await Future.wait(<Future<Object?>>[
+ // The exposure mode is currently not supported on the web.
+ ...!kIsWeb
+ ? <Future<Object?>>[
+ cameraController.getMinExposureOffset().then(
+ (double value) => _minAvailableExposureOffset = value),
+ cameraController
+ .getMaxExposureOffset()
+ .then((double value) => _maxAvailableExposureOffset = value)
+ ]
+ : <Future<Object?>>[],
+ cameraController
+ .getMaxZoomLevel()
+ .then((double value) => _maxAvailableZoom = value),
+ cameraController
+ .getMinZoomLevel()
+ .then((double value) => _minAvailableZoom = value),
+ ]);
+ } on CameraException catch (e) {
+ switch (e.code) {
+ case 'CameraAccessDenied':
+ showInSnackBar('You have denied camera access.');
+ break;
+ case 'CameraAccessDeniedWithoutPrompt':
+ // iOS only
+ showInSnackBar('Please go to Settings app to enable camera access.');
+ break;
+ case 'CameraAccessRestricted':
+ // iOS only
+ showInSnackBar('Camera access is restricted.');
+ break;
+ case 'AudioAccessDenied':
+ showInSnackBar('You have denied audio access.');
+ break;
+ case 'AudioAccessDeniedWithoutPrompt':
+ // iOS only
+ showInSnackBar('Please go to Settings app to enable audio access.');
+ break;
+ case 'AudioAccessRestricted':
+ // iOS only
+ showInSnackBar('Audio access is restricted.');
+ break;
+ default:
+ _showCameraException(e);
+ break;
+ }
+ }
+
+ if (mounted) {
+ setState(() {});
+ }
+ }
+
+ void onTakePictureButtonPressed() {
+ takePicture().then((XFile? file) {
+ if (mounted) {
+ setState(() {
+ imageFile = file;
+ videoController?.dispose();
+ videoController = null;
+ });
+ if (file != null) {
+ showInSnackBar('Picture saved to ${file.path}');
+ }
+ }
+ });
+ }
+
+ void onFlashModeButtonPressed() {
+ if (_flashModeControlRowAnimationController.value == 1) {
+ _flashModeControlRowAnimationController.reverse();
+ } else {
+ _flashModeControlRowAnimationController.forward();
+ _exposureModeControlRowAnimationController.reverse();
+ _focusModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onExposureModeButtonPressed() {
+ if (_exposureModeControlRowAnimationController.value == 1) {
+ _exposureModeControlRowAnimationController.reverse();
+ } else {
+ _exposureModeControlRowAnimationController.forward();
+ _flashModeControlRowAnimationController.reverse();
+ _focusModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onFocusModeButtonPressed() {
+ if (_focusModeControlRowAnimationController.value == 1) {
+ _focusModeControlRowAnimationController.reverse();
+ } else {
+ _focusModeControlRowAnimationController.forward();
+ _flashModeControlRowAnimationController.reverse();
+ _exposureModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onAudioModeButtonPressed() {
+ enableAudio = !enableAudio;
+ if (controller != null) {
+ onNewCameraSelected(controller!.description);
+ }
+ }
+
+ Future<void> onCaptureOrientationLockButtonPressed() async {
+ try {
+ if (controller != null) {
+ final CameraController cameraController = controller!;
+ if (cameraController.value.isCaptureOrientationLocked) {
+ await cameraController.unlockCaptureOrientation();
+ showInSnackBar('Capture orientation unlocked');
+ } else {
+ await cameraController.lockCaptureOrientation();
+ showInSnackBar(
+ 'Capture orientation locked to ${cameraController.value.lockedCaptureOrientation.toString().split('.').last}');
+ }
+ }
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ }
+ }
+
+ void onSetFlashModeButtonPressed(FlashMode mode) {
+ setFlashMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Flash mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onSetExposureModeButtonPressed(ExposureMode mode) {
+ setExposureMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Exposure mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onSetFocusModeButtonPressed(FocusMode mode) {
+ setFocusMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Focus mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onVideoRecordButtonPressed() {
+ startVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ });
+ }
+
+ void onStopButtonPressed() {
+ stopVideoRecording().then((XFile? file) {
+ if (mounted) {
+ setState(() {});
+ }
+ if (file != null) {
+ showInSnackBar('Video recorded to ${file.path}');
+ videoFile = file;
+ _startVideoPlayer();
+ }
+ });
+ }
+
+ Future<void> onPausePreviewButtonPressed() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return;
+ }
+
+ if (cameraController.value.isPreviewPaused) {
+ await cameraController.resumePreview();
+ } else {
+ await cameraController.pausePreview();
+ }
+
+ if (mounted) {
+ setState(() {});
+ }
+ }
+
+ void onPauseButtonPressed() {
+ pauseVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Video recording paused');
+ });
+ }
+
+ void onResumeButtonPressed() {
+ resumeVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Video recording resumed');
+ });
+ }
+
+ Future<void> startVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return;
+ }
+
+ if (cameraController.value.isRecordingVideo) {
+ // A recording is already started, do nothing.
+ return;
+ }
+
+ try {
+ await cameraController.startVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return;
+ }
+ }
+
+ Future<XFile?> stopVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return null;
+ }
+
+ try {
+ return cameraController.stopVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return null;
+ }
+ }
+
+ Future<void> pauseVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return;
+ }
+
+ try {
+ await cameraController.pauseVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> resumeVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return;
+ }
+
+ try {
+ await cameraController.resumeVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setFlashMode(FlashMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setFlashMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setExposureMode(ExposureMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setExposureMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setExposureOffset(double offset) async {
+ if (controller == null) {
+ return;
+ }
+
+ setState(() {
+ _currentExposureOffset = offset;
+ });
+ try {
+ offset = await controller!.setExposureOffset(offset);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setFocusMode(FocusMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setFocusMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> _startVideoPlayer() async {
+ if (videoFile == null) {
+ return;
+ }
+
+ final VideoPlayerController vController = kIsWeb
+ ? VideoPlayerController.network(videoFile!.path)
+ : VideoPlayerController.file(File(videoFile!.path));
+
+ videoPlayerListener = () {
+ if (videoController != null && videoController!.value.size != null) {
+ // Refreshing the state to update video player with the correct ratio.
+ if (mounted) {
+ setState(() {});
+ }
+ videoController!.removeListener(videoPlayerListener!);
+ }
+ };
+ vController.addListener(videoPlayerListener!);
+ await vController.setLooping(true);
+ await vController.initialize();
+ await videoController?.dispose();
+ if (mounted) {
+ setState(() {
+ imageFile = null;
+ videoController = vController;
+ });
+ }
+ await vController.play();
+ }
+
+ Future<XFile?> takePicture() async {
+ final CameraController? cameraController = controller;
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return null;
+ }
+
+ if (cameraController.value.isTakingPicture) {
+ // A capture is already pending, do nothing.
+ return null;
+ }
+
+ try {
+ final XFile file = await cameraController.takePicture();
+ return file;
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return null;
+ }
+ }
+
+ void _showCameraException(CameraException e) {
+ _logError(e.code, e.description);
+ showInSnackBar('Error: ${e.code}\n${e.description}');
+ }
+}
+
+/// CameraApp is the Main Application.
+class CameraApp extends StatelessWidget {
+ /// Default Constructor
+ const CameraApp({Key? key}) : super(key: key);
+
+ @override
+ Widget build(BuildContext context) {
+ return const MaterialApp(
+ home: CameraExampleHome(),
+ );
+ }
+}
+
+List<CameraDescription> _cameras = <CameraDescription>[];
+
+Future<void> main() async {
+ // Fetch the available cameras before initializing the app.
+ try {
+ WidgetsFlutterBinding.ensureInitialized();
+ _cameras = await availableCameras();
+ } on CameraException catch (e) {
+ _logError(e.code, e.description);
+ }
+ runApp(const CameraApp());
+}
diff --git a/packages/camera/camera/example/lib/readme_full_example.dart b/packages/camera/camera/example/lib/readme_full_example.dart
new file mode 100644
index 0000000..20bfe78
--- /dev/null
+++ b/packages/camera/camera/example/lib/readme_full_example.dart
@@ -0,0 +1,69 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// #docregion FullAppExample
+import 'package:camera/camera.dart';
+import 'package:flutter/material.dart';
+
+late List<CameraDescription> _cameras;
+
+Future<void> main() async {
+ WidgetsFlutterBinding.ensureInitialized();
+
+ _cameras = await availableCameras();
+ runApp(const CameraApp());
+}
+
+/// CameraApp is the Main Application.
+class CameraApp extends StatefulWidget {
+ /// Default Constructor
+ const CameraApp({Key? key}) : super(key: key);
+
+ @override
+ State<CameraApp> createState() => _CameraAppState();
+}
+
+class _CameraAppState extends State<CameraApp> {
+ late CameraController controller;
+
+ @override
+ void initState() {
+ super.initState();
+ controller = CameraController(_cameras[0], ResolutionPreset.max);
+ controller.initialize().then((_) {
+ if (!mounted) {
+ return;
+ }
+ setState(() {});
+ }).catchError((Object e) {
+ if (e is CameraException) {
+ switch (e.code) {
+ case 'CameraAccessDenied':
+ // Handle access errors here.
+ break;
+ default:
+ // Handle other errors here.
+ break;
+ }
+ }
+ });
+ }
+
+ @override
+ void dispose() {
+ controller.dispose();
+ super.dispose();
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ if (!controller.value.isInitialized) {
+ return Container();
+ }
+ return MaterialApp(
+ home: CameraPreview(controller),
+ );
+ }
+}
+// #enddocregion FullAppExample
diff --git a/packages/camera/camera/example/pubspec.yaml b/packages/camera/camera/example/pubspec.yaml
new file mode 100644
index 0000000..e630240
--- /dev/null
+++ b/packages/camera/camera/example/pubspec.yaml
@@ -0,0 +1,32 @@
+name: camera_example
+description: Demonstrates how to use the camera plugin.
+publish_to: none
+
+environment:
+ sdk: ">=2.14.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+dependencies:
+ camera:
+ # When depending on this package from a real application you should use:
+ # camera: ^x.y.z
+ # See https://dart.dev/tools/pub/dependencies#version-constraints
+ # The example app is bundled with the plugin so we use a path dependency on
+ # the parent directory to use the current plugin's version.
+ path: ../
+ flutter:
+ sdk: flutter
+ path_provider: ^2.0.0
+ video_player: ^2.1.4
+
+dev_dependencies:
+ build_runner: ^2.1.10
+ flutter_driver:
+ sdk: flutter
+ flutter_test:
+ sdk: flutter
+ integration_test:
+ sdk: flutter
+
+flutter:
+ uses-material-design: true
diff --git a/packages/camera/camera/example/test/main_test.dart b/packages/camera/camera/example/test/main_test.dart
new file mode 100644
index 0000000..6e909ef
--- /dev/null
+++ b/packages/camera/camera/example/test/main_test.dart
@@ -0,0 +1,16 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_example/main.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ testWidgets('Test snackbar', (WidgetTester tester) async {
+ WidgetsFlutterBinding.ensureInitialized();
+ await tester.pumpWidget(const CameraApp());
+ await tester.pumpAndSettle();
+ expect(find.byType(SnackBar), findsOneWidget);
+ });
+}
diff --git a/packages/camera/camera/example/test_driver/integration_test.dart b/packages/camera/camera/example/test_driver/integration_test.dart
new file mode 100644
index 0000000..aa57599
--- /dev/null
+++ b/packages/camera/camera/example/test_driver/integration_test.dart
@@ -0,0 +1,66 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// ignore_for_file: avoid_print
+
+import 'dart:async';
+import 'dart:convert';
+import 'dart:io';
+
+import 'package:flutter_driver/flutter_driver.dart';
+
+const String _examplePackage = 'io.flutter.plugins.cameraexample';
+
+Future<void> main() async {
+ if (!(Platform.isLinux || Platform.isMacOS)) {
+ print('This test must be run on a POSIX host. Skipping...');
+ exit(0);
+ }
+ final bool adbExists =
+ Process.runSync('which', <String>['adb']).exitCode == 0;
+ if (!adbExists) {
+ print(r'This test needs ADB to exist on the $PATH. Skipping...');
+ exit(0);
+ }
+ print('Granting camera permissions...');
+ Process.runSync('adb', <String>[
+ 'shell',
+ 'pm',
+ 'grant',
+ _examplePackage,
+ 'android.permission.CAMERA'
+ ]);
+ Process.runSync('adb', <String>[
+ 'shell',
+ 'pm',
+ 'grant',
+ _examplePackage,
+ 'android.permission.RECORD_AUDIO'
+ ]);
+ print('Starting test.');
+ final FlutterDriver driver = await FlutterDriver.connect();
+ final String data = await driver.requestData(
+ null,
+ timeout: const Duration(minutes: 1),
+ );
+ await driver.close();
+ print('Test finished. Revoking camera permissions...');
+ Process.runSync('adb', <String>[
+ 'shell',
+ 'pm',
+ 'revoke',
+ _examplePackage,
+ 'android.permission.CAMERA'
+ ]);
+ Process.runSync('adb', <String>[
+ 'shell',
+ 'pm',
+ 'revoke',
+ _examplePackage,
+ 'android.permission.RECORD_AUDIO'
+ ]);
+
+ final Map<String, dynamic> result = jsonDecode(data) as Map<String, dynamic>;
+ exit(result['result'] == 'true' ? 0 : 1);
+}
diff --git a/packages/camera/camera/example/web/favicon.png b/packages/camera/camera/example/web/favicon.png
new file mode 100644
index 0000000..8aaa46a
--- /dev/null
+++ b/packages/camera/camera/example/web/favicon.png
Binary files differ
diff --git a/packages/camera/camera/example/web/icons/Icon-192.png b/packages/camera/camera/example/web/icons/Icon-192.png
new file mode 100644
index 0000000..b749bfe
--- /dev/null
+++ b/packages/camera/camera/example/web/icons/Icon-192.png
Binary files differ
diff --git a/packages/camera/camera/example/web/icons/Icon-512.png b/packages/camera/camera/example/web/icons/Icon-512.png
new file mode 100644
index 0000000..88cfd48
--- /dev/null
+++ b/packages/camera/camera/example/web/icons/Icon-512.png
Binary files differ
diff --git a/packages/camera/camera/example/web/index.html b/packages/camera/camera/example/web/index.html
new file mode 100644
index 0000000..2a3117d
--- /dev/null
+++ b/packages/camera/camera/example/web/index.html
@@ -0,0 +1,39 @@
+<!DOCTYPE html>
+<!-- Copyright 2013 The Flutter Authors. All rights reserved.
+Use of this source code is governed by a BSD-style license that can be
+found in the LICENSE file. -->
+<html>
+
+<head>
+ <meta charset="UTF-8">
+ <meta content="IE=Edge" http-equiv="X-UA-Compatible">
+ <meta name="description" content="An example of the camera on the web.">
+
+ <!-- iOS meta tags & icons -->
+ <meta name="apple-mobile-web-app-capable" content="yes">
+ <meta name="apple-mobile-web-app-status-bar-style" content="black">
+ <meta name="apple-mobile-web-app-title" content="example">
+ <link rel="apple-touch-icon" href="icons/Icon-192.png">
+
+ <!-- Favicon -->
+ <link rel="shortcut icon" type="image/png" href="favicon.png" />
+
+ <title>Camera Web Example</title>
+ <link rel="manifest" href="manifest.json">
+</head>
+
+<body>
+ <!-- This script installs service_worker.js to provide PWA functionality to
+ application. For more information, see:
+ https://developers.google.com/web/fundamentals/primers/service-workers -->
+ <script>
+ if ('serviceWorker' in navigator) {
+ window.addEventListener('load', function () {
+ navigator.serviceWorker.register('flutter_service_worker.js');
+ });
+ }
+ </script>
+ <script src="main.dart.js" type="application/javascript"></script>
+</body>
+
+</html>
\ No newline at end of file
diff --git a/packages/camera/camera/example/web/manifest.json b/packages/camera/camera/example/web/manifest.json
new file mode 100644
index 0000000..5fe0e04
--- /dev/null
+++ b/packages/camera/camera/example/web/manifest.json
@@ -0,0 +1,23 @@
+{
+ "name": "camera example",
+ "short_name": "camera",
+ "start_url": ".",
+ "display": "standalone",
+ "background_color": "#0175C2",
+ "theme_color": "#0175C2",
+ "description": "An example of the camera on the web.",
+ "orientation": "portrait-primary",
+ "prefer_related_applications": false,
+ "icons": [
+ {
+ "src": "icons/Icon-192.png",
+ "sizes": "192x192",
+ "type": "image/png"
+ },
+ {
+ "src": "icons/Icon-512.png",
+ "sizes": "512x512",
+ "type": "image/png"
+ }
+ ]
+}
diff --git a/packages/camera/camera/lib/camera.dart b/packages/camera/camera/lib/camera.dart
new file mode 100644
index 0000000..900c263
--- /dev/null
+++ b/packages/camera/camera/lib/camera.dart
@@ -0,0 +1,19 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'package:camera_platform_interface/camera_platform_interface.dart'
+ show
+ CameraDescription,
+ CameraException,
+ CameraLensDirection,
+ FlashMode,
+ ExposureMode,
+ FocusMode,
+ ResolutionPreset,
+ XFile,
+ ImageFormatGroup;
+
+export 'src/camera_controller.dart';
+export 'src/camera_image.dart';
+export 'src/camera_preview.dart';
diff --git a/packages/camera/camera/lib/src/camera_controller.dart b/packages/camera/camera/lib/src/camera_controller.dart
new file mode 100644
index 0000000..7a396c1
--- /dev/null
+++ b/packages/camera/camera/lib/src/camera_controller.dart
@@ -0,0 +1,957 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:collection';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+import '../camera.dart';
+
+/// Signature for a callback receiving the a camera image.
+///
+/// This is used by [CameraController.startImageStream].
+// TODO(stuartmorgan): Fix this naming the next time there's a breaking change
+// to this package.
+// ignore: camel_case_types
+typedef onLatestImageAvailable = Function(CameraImage image);
+
+/// Completes with a list of available cameras.
+///
+/// May throw a [CameraException].
+Future<List<CameraDescription>> availableCameras() async {
+ return CameraPlatform.instance.availableCameras();
+}
+
+// TODO(stuartmorgan): Remove this once the package requires 2.10, where the
+// dart:async `unawaited` accepts a nullable future.
+void _unawaited(Future<void>? future) {}
+
+/// The state of a [CameraController].
+class CameraValue {
+ /// Creates a new camera controller state.
+ const CameraValue({
+ required this.isInitialized,
+ this.errorDescription,
+ this.previewSize,
+ required this.isRecordingVideo,
+ required this.isTakingPicture,
+ required this.isStreamingImages,
+ required bool isRecordingPaused,
+ required this.flashMode,
+ required this.exposureMode,
+ required this.focusMode,
+ required this.exposurePointSupported,
+ required this.focusPointSupported,
+ required this.deviceOrientation,
+ this.lockedCaptureOrientation,
+ this.recordingOrientation,
+ this.isPreviewPaused = false,
+ this.previewPauseOrientation,
+ }) : _isRecordingPaused = isRecordingPaused;
+
+ /// Creates a new camera controller state for an uninitialized controller.
+ const CameraValue.uninitialized()
+ : this(
+ isInitialized: false,
+ isRecordingVideo: false,
+ isTakingPicture: false,
+ isStreamingImages: false,
+ isRecordingPaused: false,
+ flashMode: FlashMode.auto,
+ exposureMode: ExposureMode.auto,
+ exposurePointSupported: false,
+ focusMode: FocusMode.auto,
+ focusPointSupported: false,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ isPreviewPaused: false,
+ );
+
+ /// True after [CameraController.initialize] has completed successfully.
+ final bool isInitialized;
+
+ /// True when a picture capture request has been sent but as not yet returned.
+ final bool isTakingPicture;
+
+ /// True when the camera is recording (not the same as previewing).
+ final bool isRecordingVideo;
+
+ /// True when images from the camera are being streamed.
+ final bool isStreamingImages;
+
+ final bool _isRecordingPaused;
+
+ /// True when the preview widget has been paused manually.
+ final bool isPreviewPaused;
+
+ /// Set to the orientation the preview was paused in, if it is currently paused.
+ final DeviceOrientation? previewPauseOrientation;
+
+ /// True when camera [isRecordingVideo] and recording is paused.
+ bool get isRecordingPaused => isRecordingVideo && _isRecordingPaused;
+
+ /// Description of an error state.
+ ///
+ /// This is null while the controller is not in an error state.
+ /// When [hasError] is true this contains the error description.
+ final String? errorDescription;
+
+ /// The size of the preview in pixels.
+ ///
+ /// Is `null` until [isInitialized] is `true`.
+ final Size? previewSize;
+
+ /// Convenience getter for `previewSize.width / previewSize.height`.
+ ///
+ /// Can only be called when [initialize] is done.
+ double get aspectRatio => previewSize!.width / previewSize!.height;
+
+ /// Whether the controller is in an error state.
+ ///
+ /// When true [errorDescription] describes the error.
+ bool get hasError => errorDescription != null;
+
+ /// The flash mode the camera is currently set to.
+ final FlashMode flashMode;
+
+ /// The exposure mode the camera is currently set to.
+ final ExposureMode exposureMode;
+
+ /// The focus mode the camera is currently set to.
+ final FocusMode focusMode;
+
+ /// Whether setting the exposure point is supported.
+ final bool exposurePointSupported;
+
+ /// Whether setting the focus point is supported.
+ final bool focusPointSupported;
+
+ /// The current device UI orientation.
+ final DeviceOrientation deviceOrientation;
+
+ /// The currently locked capture orientation.
+ final DeviceOrientation? lockedCaptureOrientation;
+
+ /// Whether the capture orientation is currently locked.
+ bool get isCaptureOrientationLocked => lockedCaptureOrientation != null;
+
+ /// The orientation of the currently running video recording.
+ final DeviceOrientation? recordingOrientation;
+
+ /// Creates a modified copy of the object.
+ ///
+ /// Explicitly specified fields get the specified value, all other fields get
+ /// the same value of the current object.
+ CameraValue copyWith({
+ bool? isInitialized,
+ bool? isRecordingVideo,
+ bool? isTakingPicture,
+ bool? isStreamingImages,
+ String? errorDescription,
+ Size? previewSize,
+ bool? isRecordingPaused,
+ FlashMode? flashMode,
+ ExposureMode? exposureMode,
+ FocusMode? focusMode,
+ bool? exposurePointSupported,
+ bool? focusPointSupported,
+ DeviceOrientation? deviceOrientation,
+ Optional<DeviceOrientation>? lockedCaptureOrientation,
+ Optional<DeviceOrientation>? recordingOrientation,
+ bool? isPreviewPaused,
+ Optional<DeviceOrientation>? previewPauseOrientation,
+ }) {
+ return CameraValue(
+ isInitialized: isInitialized ?? this.isInitialized,
+ errorDescription: errorDescription,
+ previewSize: previewSize ?? this.previewSize,
+ isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo,
+ isTakingPicture: isTakingPicture ?? this.isTakingPicture,
+ isStreamingImages: isStreamingImages ?? this.isStreamingImages,
+ isRecordingPaused: isRecordingPaused ?? _isRecordingPaused,
+ flashMode: flashMode ?? this.flashMode,
+ exposureMode: exposureMode ?? this.exposureMode,
+ focusMode: focusMode ?? this.focusMode,
+ exposurePointSupported:
+ exposurePointSupported ?? this.exposurePointSupported,
+ focusPointSupported: focusPointSupported ?? this.focusPointSupported,
+ deviceOrientation: deviceOrientation ?? this.deviceOrientation,
+ lockedCaptureOrientation: lockedCaptureOrientation == null
+ ? this.lockedCaptureOrientation
+ : lockedCaptureOrientation.orNull,
+ recordingOrientation: recordingOrientation == null
+ ? this.recordingOrientation
+ : recordingOrientation.orNull,
+ isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused,
+ previewPauseOrientation: previewPauseOrientation == null
+ ? this.previewPauseOrientation
+ : previewPauseOrientation.orNull,
+ );
+ }
+
+ @override
+ String toString() {
+ return '${objectRuntimeType(this, 'CameraValue')}('
+ 'isRecordingVideo: $isRecordingVideo, '
+ 'isInitialized: $isInitialized, '
+ 'errorDescription: $errorDescription, '
+ 'previewSize: $previewSize, '
+ 'isStreamingImages: $isStreamingImages, '
+ 'flashMode: $flashMode, '
+ 'exposureMode: $exposureMode, '
+ 'focusMode: $focusMode, '
+ 'exposurePointSupported: $exposurePointSupported, '
+ 'focusPointSupported: $focusPointSupported, '
+ 'deviceOrientation: $deviceOrientation, '
+ 'lockedCaptureOrientation: $lockedCaptureOrientation, '
+ 'recordingOrientation: $recordingOrientation, '
+ 'isPreviewPaused: $isPreviewPaused, '
+ 'previewPausedOrientation: $previewPauseOrientation)';
+ }
+}
+
+/// Controls a device camera.
+///
+/// Use [availableCameras] to get a list of available cameras.
+///
+/// Before using a [CameraController] a call to [initialize] must complete.
+///
+/// To show the camera preview on the screen use a [CameraPreview] widget.
+class CameraController extends ValueNotifier<CameraValue> {
+ /// Creates a new camera controller in an uninitialized state.
+ CameraController(
+ this.description,
+ this.resolutionPreset, {
+ this.enableAudio = true,
+ this.imageFormatGroup,
+ }) : super(const CameraValue.uninitialized());
+
+ /// The properties of the camera device controlled by this controller.
+ final CameraDescription description;
+
+ /// The resolution this controller is targeting.
+ ///
+ /// This resolution preset is not guaranteed to be available on the device,
+ /// if unavailable a lower resolution will be used.
+ ///
+ /// See also: [ResolutionPreset].
+ final ResolutionPreset resolutionPreset;
+
+ /// Whether to include audio when recording a video.
+ final bool enableAudio;
+
+ /// The [ImageFormatGroup] describes the output of the raw image format.
+ ///
+ /// When null the imageFormat will fallback to the platforms default.
+ final ImageFormatGroup? imageFormatGroup;
+
+ /// The id of a camera that hasn't been initialized.
+ @visibleForTesting
+ static const int kUninitializedCameraId = -1;
+ int _cameraId = kUninitializedCameraId;
+
+ bool _isDisposed = false;
+ StreamSubscription<CameraImageData>? _imageStreamSubscription;
+ FutureOr<bool>? _initCalled;
+ StreamSubscription<DeviceOrientationChangedEvent>?
+ _deviceOrientationSubscription;
+
+ /// Checks whether [CameraController.dispose] has completed successfully.
+ ///
+ /// This is a no-op when asserts are disabled.
+ void debugCheckIsDisposed() {
+ assert(_isDisposed);
+ }
+
+ /// The camera identifier with which the controller is associated.
+ int get cameraId => _cameraId;
+
+ /// Initializes the camera on the device.
+ ///
+ /// Throws a [CameraException] if the initialization fails.
+ Future<void> initialize() async {
+ if (_isDisposed) {
+ throw CameraException(
+ 'Disposed CameraController',
+ 'initialize was called on a disposed CameraController',
+ );
+ }
+ try {
+ final Completer<CameraInitializedEvent> initializeCompleter =
+ Completer<CameraInitializedEvent>();
+
+ _deviceOrientationSubscription = CameraPlatform.instance
+ .onDeviceOrientationChanged()
+ .listen((DeviceOrientationChangedEvent event) {
+ value = value.copyWith(
+ deviceOrientation: event.orientation,
+ );
+ });
+
+ _cameraId = await CameraPlatform.instance.createCamera(
+ description,
+ resolutionPreset,
+ enableAudio: enableAudio,
+ );
+
+ _unawaited(CameraPlatform.instance
+ .onCameraInitialized(_cameraId)
+ .first
+ .then((CameraInitializedEvent event) {
+ initializeCompleter.complete(event);
+ }));
+
+ await CameraPlatform.instance.initializeCamera(
+ _cameraId,
+ imageFormatGroup: imageFormatGroup ?? ImageFormatGroup.unknown,
+ );
+
+ value = value.copyWith(
+ isInitialized: true,
+ previewSize: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => Size(
+ event.previewWidth,
+ event.previewHeight,
+ )),
+ exposureMode: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.exposureMode),
+ focusMode: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.focusMode),
+ exposurePointSupported: await initializeCompleter.future.then(
+ (CameraInitializedEvent event) => event.exposurePointSupported),
+ focusPointSupported: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.focusPointSupported),
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+
+ _initCalled = true;
+ }
+
+ /// Prepare the capture session for video recording.
+ ///
+ /// Use of this method is optional, but it may be called for performance
+ /// reasons on iOS.
+ ///
+ /// Preparing audio can cause a minor delay in the CameraPreview view on iOS.
+ /// If video recording is intended, calling this early eliminates this delay
+ /// that would otherwise be experienced when video recording is started.
+ /// This operation is a no-op on Android and Web.
+ ///
+ /// Throws a [CameraException] if the prepare fails.
+ Future<void> prepareForVideoRecording() async {
+ await CameraPlatform.instance.prepareForVideoRecording();
+ }
+
+ /// Pauses the current camera preview
+ Future<void> pausePreview() async {
+ if (value.isPreviewPaused) {
+ return;
+ }
+ try {
+ await CameraPlatform.instance.pausePreview(_cameraId);
+ value = value.copyWith(
+ isPreviewPaused: true,
+ previewPauseOrientation: Optional<DeviceOrientation>.of(
+ value.lockedCaptureOrientation ?? value.deviceOrientation));
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Resumes the current camera preview
+ Future<void> resumePreview() async {
+ if (!value.isPreviewPaused) {
+ return;
+ }
+ try {
+ await CameraPlatform.instance.resumePreview(_cameraId);
+ value = value.copyWith(
+ isPreviewPaused: false,
+ previewPauseOrientation: const Optional<DeviceOrientation>.absent());
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Captures an image and returns the file where it was saved.
+ ///
+ /// Throws a [CameraException] if the capture fails.
+ Future<XFile> takePicture() async {
+ _throwIfNotInitialized('takePicture');
+ if (value.isTakingPicture) {
+ throw CameraException(
+ 'Previous capture has not returned yet.',
+ 'takePicture was called before the previous capture returned.',
+ );
+ }
+ try {
+ value = value.copyWith(isTakingPicture: true);
+ final XFile file = await CameraPlatform.instance.takePicture(_cameraId);
+ value = value.copyWith(isTakingPicture: false);
+ return file;
+ } on PlatformException catch (e) {
+ value = value.copyWith(isTakingPicture: false);
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Start streaming images from platform camera.
+ ///
+ /// Settings for capturing images on iOS and Android is set to always use the
+ /// latest image available from the camera and will drop all other images.
+ ///
+ /// When running continuously with [CameraPreview] widget, this function runs
+ /// best with [ResolutionPreset.low]. Running on [ResolutionPreset.high] can
+ /// have significant frame rate drops for [CameraPreview] on lower end
+ /// devices.
+ ///
+ /// Throws a [CameraException] if image streaming or video recording has
+ /// already started.
+ ///
+ /// The `startImageStream` method is only available on Android and iOS (other
+ /// platforms won't be supported in current setup).
+ ///
+ // TODO(bmparr): Add settings for resolution and fps.
+ Future<void> startImageStream(onLatestImageAvailable onAvailable) async {
+ assert(defaultTargetPlatform == TargetPlatform.android ||
+ defaultTargetPlatform == TargetPlatform.iOS);
+ _throwIfNotInitialized('startImageStream');
+ if (value.isRecordingVideo) {
+ throw CameraException(
+ 'A video recording is already started.',
+ 'startImageStream was called while a video is being recorded.',
+ );
+ }
+ if (value.isStreamingImages) {
+ throw CameraException(
+ 'A camera has started streaming images.',
+ 'startImageStream was called while a camera was streaming images.',
+ );
+ }
+
+ try {
+ _imageStreamSubscription = CameraPlatform.instance
+ .onStreamedFrameAvailable(_cameraId)
+ .listen((CameraImageData imageData) {
+ onAvailable(CameraImage.fromPlatformInterface(imageData));
+ });
+ value = value.copyWith(isStreamingImages: true);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Stop streaming images from platform camera.
+ ///
+ /// Throws a [CameraException] if image streaming was not started or video
+ /// recording was started.
+ ///
+ /// The `stopImageStream` method is only available on Android and iOS (other
+ /// platforms won't be supported in current setup).
+ Future<void> stopImageStream() async {
+ assert(defaultTargetPlatform == TargetPlatform.android ||
+ defaultTargetPlatform == TargetPlatform.iOS);
+ _throwIfNotInitialized('stopImageStream');
+ if (!value.isStreamingImages) {
+ throw CameraException(
+ 'No camera is streaming images',
+ 'stopImageStream was called when no camera is streaming images.',
+ );
+ }
+
+ try {
+ value = value.copyWith(isStreamingImages: false);
+ await _imageStreamSubscription?.cancel();
+ _imageStreamSubscription = null;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Start a video recording.
+ ///
+ /// You may optionally pass an [onAvailable] callback to also have the
+ /// video frames streamed to this callback.
+ ///
+ /// The video is returned as a [XFile] after calling [stopVideoRecording].
+ /// Throws a [CameraException] if the capture fails.
+ Future<void> startVideoRecording(
+ {onLatestImageAvailable? onAvailable}) async {
+ _throwIfNotInitialized('startVideoRecording');
+ if (value.isRecordingVideo) {
+ throw CameraException(
+ 'A video recording is already started.',
+ 'startVideoRecording was called when a recording is already started.',
+ );
+ }
+
+ Function(CameraImageData image)? streamCallback;
+ if (onAvailable != null) {
+ streamCallback = (CameraImageData imageData) {
+ onAvailable(CameraImage.fromPlatformInterface(imageData));
+ };
+ }
+
+ try {
+ await CameraPlatform.instance.startVideoCapturing(
+ VideoCaptureOptions(_cameraId, streamCallback: streamCallback));
+ value = value.copyWith(
+ isRecordingVideo: true,
+ isRecordingPaused: false,
+ recordingOrientation: Optional<DeviceOrientation>.of(
+ value.lockedCaptureOrientation ?? value.deviceOrientation),
+ isStreamingImages: onAvailable != null);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Stops the video recording and returns the file where it was saved.
+ ///
+ /// Throws a [CameraException] if the capture failed.
+ Future<XFile> stopVideoRecording() async {
+ _throwIfNotInitialized('stopVideoRecording');
+ if (!value.isRecordingVideo) {
+ throw CameraException(
+ 'No video is recording',
+ 'stopVideoRecording was called when no video is recording.',
+ );
+ }
+
+ if (value.isStreamingImages) {
+ stopImageStream();
+ }
+
+ try {
+ final XFile file =
+ await CameraPlatform.instance.stopVideoRecording(_cameraId);
+ value = value.copyWith(
+ isRecordingVideo: false,
+ recordingOrientation: const Optional<DeviceOrientation>.absent(),
+ );
+ return file;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Pause video recording.
+ ///
+ /// This feature is only available on iOS and Android sdk 24+.
+ Future<void> pauseVideoRecording() async {
+ _throwIfNotInitialized('pauseVideoRecording');
+ if (!value.isRecordingVideo) {
+ throw CameraException(
+ 'No video is recording',
+ 'pauseVideoRecording was called when no video is recording.',
+ );
+ }
+ try {
+ await CameraPlatform.instance.pauseVideoRecording(_cameraId);
+ value = value.copyWith(isRecordingPaused: true);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Resume video recording after pausing.
+ ///
+ /// This feature is only available on iOS and Android sdk 24+.
+ Future<void> resumeVideoRecording() async {
+ _throwIfNotInitialized('resumeVideoRecording');
+ if (!value.isRecordingVideo) {
+ throw CameraException(
+ 'No video is recording',
+ 'resumeVideoRecording was called when no video is recording.',
+ );
+ }
+ try {
+ await CameraPlatform.instance.resumeVideoRecording(_cameraId);
+ value = value.copyWith(isRecordingPaused: false);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Returns a widget showing a live camera preview.
+ Widget buildPreview() {
+ _throwIfNotInitialized('buildPreview');
+ try {
+ return CameraPlatform.instance.buildPreview(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the maximum supported zoom level for the selected camera.
+ Future<double> getMaxZoomLevel() {
+ _throwIfNotInitialized('getMaxZoomLevel');
+ try {
+ return CameraPlatform.instance.getMaxZoomLevel(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the minimum supported zoom level for the selected camera.
+ Future<double> getMinZoomLevel() {
+ _throwIfNotInitialized('getMinZoomLevel');
+ try {
+ return CameraPlatform.instance.getMinZoomLevel(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Set the zoom level for the selected camera.
+ ///
+ /// The supplied [zoom] value should be between 1.0 and the maximum supported
+ /// zoom level returned by the `getMaxZoomLevel`. Throws an `CameraException`
+ /// when an illegal zoom level is suplied.
+ Future<void> setZoomLevel(double zoom) {
+ _throwIfNotInitialized('setZoomLevel');
+ try {
+ return CameraPlatform.instance.setZoomLevel(_cameraId, zoom);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the flash mode for taking pictures.
+ Future<void> setFlashMode(FlashMode mode) async {
+ try {
+ await CameraPlatform.instance.setFlashMode(_cameraId, mode);
+ value = value.copyWith(flashMode: mode);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the exposure mode for taking pictures.
+ Future<void> setExposureMode(ExposureMode mode) async {
+ try {
+ await CameraPlatform.instance.setExposureMode(_cameraId, mode);
+ value = value.copyWith(exposureMode: mode);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the exposure point for automatically determining the exposure value.
+ ///
+ /// Supplying a `null` value will reset the exposure point to it's default
+ /// value.
+ Future<void> setExposurePoint(Offset? point) async {
+ if (point != null &&
+ (point.dx < 0 || point.dx > 1 || point.dy < 0 || point.dy > 1)) {
+ throw ArgumentError(
+ 'The values of point should be anywhere between (0,0) and (1,1).');
+ }
+
+ try {
+ await CameraPlatform.instance.setExposurePoint(
+ _cameraId,
+ point == null
+ ? null
+ : Point<double>(
+ point.dx,
+ point.dy,
+ ),
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the minimum supported exposure offset for the selected camera in EV units.
+ Future<double> getMinExposureOffset() async {
+ _throwIfNotInitialized('getMinExposureOffset');
+ try {
+ return CameraPlatform.instance.getMinExposureOffset(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the maximum supported exposure offset for the selected camera in EV units.
+ Future<double> getMaxExposureOffset() async {
+ _throwIfNotInitialized('getMaxExposureOffset');
+ try {
+ return CameraPlatform.instance.getMaxExposureOffset(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the supported step size for exposure offset for the selected camera in EV units.
+ ///
+ /// Returns 0 when the camera supports using a free value without stepping.
+ Future<double> getExposureOffsetStepSize() async {
+ _throwIfNotInitialized('getExposureOffsetStepSize');
+ try {
+ return CameraPlatform.instance.getExposureOffsetStepSize(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the exposure offset for the selected camera.
+ ///
+ /// The supplied [offset] value should be in EV units. 1 EV unit represents a
+ /// doubling in brightness. It should be between the minimum and maximum offsets
+ /// obtained through `getMinExposureOffset` and `getMaxExposureOffset` respectively.
+ /// Throws a `CameraException` when an illegal offset is supplied.
+ ///
+ /// When the supplied [offset] value does not align with the step size obtained
+ /// through `getExposureStepSize`, it will automatically be rounded to the nearest step.
+ ///
+ /// Returns the (rounded) offset value that was set.
+ Future<double> setExposureOffset(double offset) async {
+ _throwIfNotInitialized('setExposureOffset');
+ // Check if offset is in range
+ final List<double> range = await Future.wait(
+ <Future<double>>[getMinExposureOffset(), getMaxExposureOffset()]);
+ if (offset < range[0] || offset > range[1]) {
+ throw CameraException(
+ 'exposureOffsetOutOfBounds',
+ 'The provided exposure offset was outside the supported range for this device.',
+ );
+ }
+
+ // Round to the closest step if needed
+ final double stepSize = await getExposureOffsetStepSize();
+ if (stepSize > 0) {
+ final double inv = 1.0 / stepSize;
+ double roundedOffset = (offset * inv).roundToDouble() / inv;
+ if (roundedOffset > range[1]) {
+ roundedOffset = (offset * inv).floorToDouble() / inv;
+ } else if (roundedOffset < range[0]) {
+ roundedOffset = (offset * inv).ceilToDouble() / inv;
+ }
+ offset = roundedOffset;
+ }
+
+ try {
+ return CameraPlatform.instance.setExposureOffset(_cameraId, offset);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Locks the capture orientation.
+ ///
+ /// If [orientation] is omitted, the current device orientation is used.
+ Future<void> lockCaptureOrientation([DeviceOrientation? orientation]) async {
+ try {
+ await CameraPlatform.instance.lockCaptureOrientation(
+ _cameraId, orientation ?? value.deviceOrientation);
+ value = value.copyWith(
+ lockedCaptureOrientation: Optional<DeviceOrientation>.of(
+ orientation ?? value.deviceOrientation));
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the focus mode for taking pictures.
+ Future<void> setFocusMode(FocusMode mode) async {
+ try {
+ await CameraPlatform.instance.setFocusMode(_cameraId, mode);
+ value = value.copyWith(focusMode: mode);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Unlocks the capture orientation.
+ Future<void> unlockCaptureOrientation() async {
+ try {
+ await CameraPlatform.instance.unlockCaptureOrientation(_cameraId);
+ value = value.copyWith(
+ lockedCaptureOrientation: const Optional<DeviceOrientation>.absent());
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the focus point for automatically determining the focus value.
+ ///
+ /// Supplying a `null` value will reset the focus point to it's default
+ /// value.
+ Future<void> setFocusPoint(Offset? point) async {
+ if (point != null &&
+ (point.dx < 0 || point.dx > 1 || point.dy < 0 || point.dy > 1)) {
+ throw ArgumentError(
+ 'The values of point should be anywhere between (0,0) and (1,1).');
+ }
+ try {
+ await CameraPlatform.instance.setFocusPoint(
+ _cameraId,
+ point == null
+ ? null
+ : Point<double>(
+ point.dx,
+ point.dy,
+ ),
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Releases the resources of this camera.
+ @override
+ Future<void> dispose() async {
+ if (_isDisposed) {
+ return;
+ }
+ _unawaited(_deviceOrientationSubscription?.cancel());
+ _isDisposed = true;
+ super.dispose();
+ if (_initCalled != null) {
+ await _initCalled;
+ await CameraPlatform.instance.dispose(_cameraId);
+ }
+ }
+
+ void _throwIfNotInitialized(String functionName) {
+ if (!value.isInitialized) {
+ throw CameraException(
+ 'Uninitialized CameraController',
+ '$functionName() was called on an uninitialized CameraController.',
+ );
+ }
+ if (_isDisposed) {
+ throw CameraException(
+ 'Disposed CameraController',
+ '$functionName() was called on a disposed CameraController.',
+ );
+ }
+ }
+
+ @override
+ void removeListener(VoidCallback listener) {
+ // Prevent ValueListenableBuilder in CameraPreview widget from causing an
+ // exception to be thrown by attempting to remove its own listener after
+ // the controller has already been disposed.
+ if (!_isDisposed) {
+ super.removeListener(listener);
+ }
+ }
+}
+
+/// A value that might be absent.
+///
+/// Used to represent [DeviceOrientation]s that are optional but also able
+/// to be cleared.
+@immutable
+class Optional<T> extends IterableBase<T> {
+ /// Constructs an empty Optional.
+ const Optional.absent() : _value = null;
+
+ /// Constructs an Optional of the given [value].
+ ///
+ /// Throws [ArgumentError] if [value] is null.
+ Optional.of(T value) : _value = value {
+ // TODO(cbracken): Delete and make this ctor const once mixed-mode
+ // execution is no longer around.
+ ArgumentError.checkNotNull(value);
+ }
+
+ /// Constructs an Optional of the given [value].
+ ///
+ /// If [value] is null, returns [absent()].
+ const Optional.fromNullable(T? value) : _value = value;
+
+ final T? _value;
+
+ /// True when this optional contains a value.
+ bool get isPresent => _value != null;
+
+ /// True when this optional contains no value.
+ bool get isNotPresent => _value == null;
+
+ /// Gets the Optional value.
+ ///
+ /// Throws [StateError] if [value] is null.
+ T get value {
+ if (_value == null) {
+ throw StateError('value called on absent Optional.');
+ }
+ return _value!;
+ }
+
+ /// Executes a function if the Optional value is present.
+ void ifPresent(void Function(T value) ifPresent) {
+ if (isPresent) {
+ ifPresent(_value as T);
+ }
+ }
+
+ /// Execution a function if the Optional value is absent.
+ void ifAbsent(void Function() ifAbsent) {
+ if (!isPresent) {
+ ifAbsent();
+ }
+ }
+
+ /// Gets the Optional value with a default.
+ ///
+ /// The default is returned if the Optional is [absent()].
+ ///
+ /// Throws [ArgumentError] if [defaultValue] is null.
+ T or(T defaultValue) {
+ return _value ?? defaultValue;
+ }
+
+ /// Gets the Optional value, or `null` if there is none.
+ T? get orNull => _value;
+
+ /// Transforms the Optional value.
+ ///
+ /// If the Optional is [absent()], returns [absent()] without applying the transformer.
+ ///
+ /// The transformer must not return `null`. If it does, an [ArgumentError] is thrown.
+ Optional<S> transform<S>(S Function(T value) transformer) {
+ return _value == null
+ ? Optional<S>.absent()
+ : Optional<S>.of(transformer(_value as T));
+ }
+
+ /// Transforms the Optional value.
+ ///
+ /// If the Optional is [absent()], returns [absent()] without applying the transformer.
+ ///
+ /// Returns [absent()] if the transformer returns `null`.
+ Optional<S> transformNullable<S>(S? Function(T value) transformer) {
+ return _value == null
+ ? Optional<S>.absent()
+ : Optional<S>.fromNullable(transformer(_value as T));
+ }
+
+ @override
+ Iterator<T> get iterator =>
+ isPresent ? <T>[_value as T].iterator : Iterable<T>.empty().iterator;
+
+ /// Delegates to the underlying [value] hashCode.
+ @override
+ int get hashCode => _value.hashCode;
+
+ /// Delegates to the underlying [value] operator==.
+ @override
+ bool operator ==(Object o) => o is Optional<T> && o._value == _value;
+
+ @override
+ String toString() {
+ return _value == null
+ ? 'Optional { absent }'
+ : 'Optional { value: $_value }';
+ }
+}
diff --git a/packages/camera/camera/lib/src/camera_image.dart b/packages/camera/camera/lib/src/camera_image.dart
new file mode 100644
index 0000000..bfcad66
--- /dev/null
+++ b/packages/camera/camera/lib/src/camera_image.dart
@@ -0,0 +1,177 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+
+// TODO(stuartmorgan): Remove all of these classes in a breaking change, and
+// vend the platform interface versions directly. See
+// https://github.com/flutter/flutter/issues/104188
+
+/// A single color plane of image data.
+///
+/// The number and meaning of the planes in an image are determined by the
+/// format of the Image.
+class Plane {
+ Plane._fromPlatformInterface(CameraImagePlane plane)
+ : bytes = plane.bytes,
+ bytesPerPixel = plane.bytesPerPixel,
+ bytesPerRow = plane.bytesPerRow,
+ height = plane.height,
+ width = plane.width;
+
+ // Only used by the deprecated codepath that's kept to avoid breaking changes.
+ // Never called by the plugin itself.
+ Plane._fromPlatformData(Map<dynamic, dynamic> data)
+ : bytes = data['bytes'] as Uint8List,
+ bytesPerPixel = data['bytesPerPixel'] as int?,
+ bytesPerRow = data['bytesPerRow'] as int,
+ height = data['height'] as int?,
+ width = data['width'] as int?;
+
+ /// Bytes representing this plane.
+ final Uint8List bytes;
+
+ /// The distance between adjacent pixel samples on Android, in bytes.
+ ///
+ /// Will be `null` on iOS.
+ final int? bytesPerPixel;
+
+ /// The row stride for this color plane, in bytes.
+ final int bytesPerRow;
+
+ /// Height of the pixel buffer on iOS.
+ ///
+ /// Will be `null` on Android
+ final int? height;
+
+ /// Width of the pixel buffer on iOS.
+ ///
+ /// Will be `null` on Android.
+ final int? width;
+}
+
+/// Describes how pixels are represented in an image.
+class ImageFormat {
+ ImageFormat._fromPlatformInterface(CameraImageFormat format)
+ : group = format.group,
+ raw = format.raw;
+
+ // Only used by the deprecated codepath that's kept to avoid breaking changes.
+ // Never called by the plugin itself.
+ ImageFormat._fromPlatformData(this.raw) : group = _asImageFormatGroup(raw);
+
+ /// Describes the format group the raw image format falls into.
+ final ImageFormatGroup group;
+
+ /// Raw version of the format from the Android or iOS platform.
+ ///
+ /// On Android, this is an `int` from class `android.graphics.ImageFormat`. See
+ /// https://developer.android.com/reference/android/graphics/ImageFormat
+ ///
+ /// On iOS, this is a `FourCharCode` constant from Pixel Format Identifiers.
+ /// See https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers?language=objc
+ final dynamic raw;
+}
+
+// Only used by the deprecated codepath that's kept to avoid breaking changes.
+// Never called by the plugin itself.
+ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) {
+ if (defaultTargetPlatform == TargetPlatform.android) {
+ switch (rawFormat) {
+ // android.graphics.ImageFormat.YUV_420_888
+ case 35:
+ return ImageFormatGroup.yuv420;
+ // android.graphics.ImageFormat.JPEG
+ case 256:
+ return ImageFormatGroup.jpeg;
+ }
+ }
+
+ if (defaultTargetPlatform == TargetPlatform.iOS) {
+ switch (rawFormat) {
+ // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ case 875704438:
+ return ImageFormatGroup.yuv420;
+ // kCVPixelFormatType_32BGRA
+ case 1111970369:
+ return ImageFormatGroup.bgra8888;
+ }
+ }
+
+ return ImageFormatGroup.unknown;
+}
+
+/// A single complete image buffer from the platform camera.
+///
+/// This class allows for direct application access to the pixel data of an
+/// Image through one or more [Uint8List]. Each buffer is encapsulated in a
+/// [Plane] that describes the layout of the pixel data in that plane. The
+/// [CameraImage] is not directly usable as a UI resource.
+///
+/// Although not all image formats are planar on iOS, we treat 1-dimensional
+/// images as single planar images.
+class CameraImage {
+ /// Creates a [CameraImage] from the platform interface version.
+ CameraImage.fromPlatformInterface(CameraImageData data)
+ : format = ImageFormat._fromPlatformInterface(data.format),
+ height = data.height,
+ width = data.width,
+ planes = List<Plane>.unmodifiable(data.planes.map<Plane>(
+ (CameraImagePlane plane) => Plane._fromPlatformInterface(plane))),
+ lensAperture = data.lensAperture,
+ sensorExposureTime = data.sensorExposureTime,
+ sensorSensitivity = data.sensorSensitivity;
+
+ /// Creates a [CameraImage] from method channel data.
+ @Deprecated('Use fromPlatformInterface instead')
+ CameraImage.fromPlatformData(Map<dynamic, dynamic> data)
+ : format = ImageFormat._fromPlatformData(data['format']),
+ height = data['height'] as int,
+ width = data['width'] as int,
+ lensAperture = data['lensAperture'] as double?,
+ sensorExposureTime = data['sensorExposureTime'] as int?,
+ sensorSensitivity = data['sensorSensitivity'] as double?,
+ planes = List<Plane>.unmodifiable((data['planes'] as List<dynamic>)
+ .map<Plane>((dynamic planeData) =>
+ Plane._fromPlatformData(planeData as Map<dynamic, dynamic>)));
+
+ /// Format of the image provided.
+ ///
+ /// Determines the number of planes needed to represent the image, and
+ /// the general layout of the pixel data in each [Uint8List].
+ final ImageFormat format;
+
+ /// Height of the image in pixels.
+ ///
+ /// For formats where some color channels are subsampled, this is the height
+ /// of the largest-resolution plane.
+ final int height;
+
+ /// Width of the image in pixels.
+ ///
+ /// For formats where some color channels are subsampled, this is the width
+ /// of the largest-resolution plane.
+ final int width;
+
+ /// The pixels planes for this image.
+ ///
+ /// The number of planes is determined by the format of the image.
+ final List<Plane> planes;
+
+ /// The aperture settings for this image.
+ ///
+ /// Represented as an f-stop value.
+ final double? lensAperture;
+
+ /// The sensor exposure time for this image in nanoseconds.
+ final int? sensorExposureTime;
+
+ /// The sensor sensitivity in standard ISO arithmetic units.
+ final double? sensorSensitivity;
+}
diff --git a/packages/camera/camera/lib/src/camera_preview.dart b/packages/camera/camera/lib/src/camera_preview.dart
new file mode 100644
index 0000000..d8eadd8
--- /dev/null
+++ b/packages/camera/camera/lib/src/camera_preview.dart
@@ -0,0 +1,82 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+import '../camera.dart';
+
+/// A widget showing a live camera preview.
+class CameraPreview extends StatelessWidget {
+ /// Creates a preview widget for the given camera controller.
+ const CameraPreview(this.controller, {Key? key, this.child})
+ : super(key: key);
+
+ /// The controller for the camera that the preview is shown for.
+ final CameraController controller;
+
+ /// A widget to overlay on top of the camera preview
+ final Widget? child;
+
+ @override
+ Widget build(BuildContext context) {
+ return controller.value.isInitialized
+ ? ValueListenableBuilder<CameraValue>(
+ valueListenable: controller,
+ builder: (BuildContext context, Object? value, Widget? child) {
+ return AspectRatio(
+ aspectRatio: _isLandscape()
+ ? controller.value.aspectRatio
+ : (1 / controller.value.aspectRatio),
+ child: Stack(
+ fit: StackFit.expand,
+ children: <Widget>[
+ _wrapInRotatedBox(child: controller.buildPreview()),
+ child ?? Container(),
+ ],
+ ),
+ );
+ },
+ child: child,
+ )
+ : Container();
+ }
+
+ Widget _wrapInRotatedBox({required Widget child}) {
+ if (kIsWeb || defaultTargetPlatform != TargetPlatform.android) {
+ return child;
+ }
+
+ return RotatedBox(
+ quarterTurns: _getQuarterTurns(),
+ child: child,
+ );
+ }
+
+ bool _isLandscape() {
+ return <DeviceOrientation>[
+ DeviceOrientation.landscapeLeft,
+ DeviceOrientation.landscapeRight
+ ].contains(_getApplicableOrientation());
+ }
+
+ int _getQuarterTurns() {
+ final Map<DeviceOrientation, int> turns = <DeviceOrientation, int>{
+ DeviceOrientation.portraitUp: 0,
+ DeviceOrientation.landscapeRight: 1,
+ DeviceOrientation.portraitDown: 2,
+ DeviceOrientation.landscapeLeft: 3,
+ };
+ return turns[_getApplicableOrientation()]!;
+ }
+
+ DeviceOrientation _getApplicableOrientation() {
+ return controller.value.isRecordingVideo
+ ? controller.value.recordingOrientation!
+ : (controller.value.previewPauseOrientation ??
+ controller.value.lockedCaptureOrientation ??
+ controller.value.deviceOrientation);
+ }
+}
diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml
new file mode 100644
index 0000000..1b902ab
--- /dev/null
+++ b/packages/camera/camera/pubspec.yaml
@@ -0,0 +1,40 @@
+name: camera
+description: A Flutter plugin for controlling the camera. Supports previewing
+ the camera feed, capturing images and video, and streaming image buffers to
+ Dart.
+repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera
+issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
+version: 0.10.3
+
+environment:
+ sdk: ">=2.14.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+flutter:
+ plugin:
+ platforms:
+ android:
+ default_package: camera_android
+ ios:
+ default_package: camera_avfoundation
+ web:
+ default_package: camera_web
+
+dependencies:
+ camera_android: ^0.10.1
+ camera_avfoundation: ^0.9.9
+ camera_platform_interface: ^2.3.2
+ camera_web: ^0.3.1
+ flutter:
+ sdk: flutter
+ flutter_plugin_android_lifecycle: ^2.0.2
+ quiver: ^3.0.0
+
+dev_dependencies:
+ flutter_driver:
+ sdk: flutter
+ flutter_test:
+ sdk: flutter
+ mockito: ^5.0.0
+ plugin_platform_interface: ^2.0.0
+ video_player: ^2.0.0
diff --git a/packages/camera/camera/test/camera_image_stream_test.dart b/packages/camera/camera/test/camera_image_stream_test.dart
new file mode 100644
index 0000000..29b5cce
--- /dev/null
+++ b/packages/camera/camera/test/camera_image_stream_test.dart
@@ -0,0 +1,243 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+
+import 'package:camera/camera.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'camera_test.dart';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+ late MockStreamingCameraPlatform mockPlatform;
+
+ setUp(() {
+ mockPlatform = MockStreamingCameraPlatform();
+ CameraPlatform.instance = mockPlatform;
+ });
+
+ test('startImageStream() throws $CameraException when uninitialized', () {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ expect(
+ () => cameraController.startImageStream((CameraImage image) => null),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Uninitialized CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'startImageStream() was called on an uninitialized CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test('startImageStream() throws $CameraException when recording videos',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+
+ cameraController.value =
+ cameraController.value.copyWith(isRecordingVideo: true);
+
+ expect(
+ () => cameraController.startImageStream((CameraImage image) => null),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'A video recording is already started.',
+ 'startImageStream was called while a video is being recorded.',
+ )));
+ });
+ test(
+ 'startImageStream() throws $CameraException when already streaming images',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ cameraController.value =
+ cameraController.value.copyWith(isStreamingImages: true);
+ expect(
+ () => cameraController.startImageStream((CameraImage image) => null),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'A camera has started streaming images.',
+ 'startImageStream was called while a camera was streaming images.',
+ )));
+ });
+
+ test('startImageStream() calls CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ await cameraController.startImageStream((CameraImage image) => null);
+
+ expect(mockPlatform.streamCallLog,
+ <String>['onStreamedFrameAvailable', 'listen']);
+ });
+
+ test('stopImageStream() throws $CameraException when uninitialized', () {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ expect(
+ cameraController.stopImageStream,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Uninitialized CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'stopImageStream() was called on an uninitialized CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test('stopImageStream() throws $CameraException when not streaming images',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ expect(
+ cameraController.stopImageStream,
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'No camera is streaming images',
+ 'stopImageStream was called when no camera is streaming images.',
+ )));
+ });
+
+ test('stopImageStream() intended behaviour', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ await cameraController.startImageStream((CameraImage image) => null);
+ await cameraController.stopImageStream();
+
+ expect(mockPlatform.streamCallLog,
+ <String>['onStreamedFrameAvailable', 'listen', 'cancel']);
+ });
+
+ test('startVideoRecording() can stream images', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+
+ cameraController.startVideoRecording(
+ onAvailable: (CameraImage image) => null);
+
+ expect(
+ mockPlatform.streamCallLog.contains('startVideoCapturing with stream'),
+ isTrue);
+ });
+
+ test('startVideoRecording() by default does not stream', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+
+ cameraController.startVideoRecording();
+
+ expect(mockPlatform.streamCallLog.contains('startVideoCapturing'), isTrue);
+ });
+}
+
+class MockStreamingCameraPlatform extends MockCameraPlatform {
+ List<String> streamCallLog = <String>[];
+
+ StreamController<CameraImageData>? _streamController;
+
+ @override
+ Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
+ {CameraImageStreamOptions? options}) {
+ streamCallLog.add('onStreamedFrameAvailable');
+ _streamController = StreamController<CameraImageData>(
+ onListen: _onFrameStreamListen,
+ onCancel: _onFrameStreamCancel,
+ );
+ return _streamController!.stream;
+ }
+
+ @override
+ Future<XFile> startVideoRecording(int cameraId,
+ {Duration? maxVideoDuration}) {
+ streamCallLog.add('startVideoRecording');
+ return super
+ .startVideoRecording(cameraId, maxVideoDuration: maxVideoDuration);
+ }
+
+ @override
+ Future<void> startVideoCapturing(VideoCaptureOptions options) {
+ if (options.streamCallback == null) {
+ streamCallLog.add('startVideoCapturing');
+ } else {
+ streamCallLog.add('startVideoCapturing with stream');
+ }
+ return super.startVideoCapturing(options);
+ }
+
+ void _onFrameStreamListen() {
+ streamCallLog.add('listen');
+ }
+
+ FutureOr<void> _onFrameStreamCancel() async {
+ streamCallLog.add('cancel');
+ _streamController = null;
+ }
+}
diff --git a/packages/camera/camera/test/camera_image_test.dart b/packages/camera/camera/test/camera_image_test.dart
new file mode 100644
index 0000000..ecf4b50
--- /dev/null
+++ b/packages/camera/camera/test/camera_image_test.dart
@@ -0,0 +1,187 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera/camera.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('translates correctly from platform interface classes', () {
+ final CameraImageData originalImage = CameraImageData(
+ format: const CameraImageFormat(ImageFormatGroup.jpeg, raw: 1234),
+ planes: <CameraImagePlane>[
+ CameraImagePlane(
+ bytes: Uint8List.fromList(<int>[1, 2, 3, 4]),
+ bytesPerRow: 20,
+ bytesPerPixel: 3,
+ width: 200,
+ height: 100,
+ ),
+ CameraImagePlane(
+ bytes: Uint8List.fromList(<int>[5, 6, 7, 8]),
+ bytesPerRow: 18,
+ bytesPerPixel: 4,
+ width: 220,
+ height: 110,
+ ),
+ ],
+ width: 640,
+ height: 480,
+ lensAperture: 2.5,
+ sensorExposureTime: 5,
+ sensorSensitivity: 1.3,
+ );
+
+ final CameraImage image = CameraImage.fromPlatformInterface(originalImage);
+ // Simple values.
+ expect(image.width, 640);
+ expect(image.height, 480);
+ expect(image.lensAperture, 2.5);
+ expect(image.sensorExposureTime, 5);
+ expect(image.sensorSensitivity, 1.3);
+ // Format.
+ expect(image.format.group, ImageFormatGroup.jpeg);
+ expect(image.format.raw, 1234);
+ // Planes.
+ expect(image.planes.length, originalImage.planes.length);
+ for (int i = 0; i < image.planes.length; i++) {
+ expect(
+ image.planes[i].bytes.length, originalImage.planes[i].bytes.length);
+ for (int j = 0; j < image.planes[i].bytes.length; j++) {
+ expect(image.planes[i].bytes[j], originalImage.planes[i].bytes[j]);
+ }
+ expect(
+ image.planes[i].bytesPerPixel, originalImage.planes[i].bytesPerPixel);
+ expect(image.planes[i].bytesPerRow, originalImage.planes[i].bytesPerRow);
+ expect(image.planes[i].width, originalImage.planes[i].width);
+ expect(image.planes[i].height, originalImage.planes[i].height);
+ }
+ });
+
+ group('legacy constructors', () {
+ test('$CameraImage can be created', () {
+ debugDefaultTargetPlatformOverride = TargetPlatform.android;
+ final CameraImage cameraImage =
+ CameraImage.fromPlatformData(<dynamic, dynamic>{
+ 'format': 35,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.height, 1);
+ expect(cameraImage.width, 4);
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ expect(cameraImage.planes.length, 1);
+ });
+
+ test('$CameraImage has ImageFormatGroup.yuv420 for iOS', () {
+ debugDefaultTargetPlatformOverride = TargetPlatform.iOS;
+
+ final CameraImage cameraImage =
+ CameraImage.fromPlatformData(<dynamic, dynamic>{
+ 'format': 875704438,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ });
+
+ test('$CameraImage has ImageFormatGroup.yuv420 for Android', () {
+ debugDefaultTargetPlatformOverride = TargetPlatform.android;
+
+ final CameraImage cameraImage =
+ CameraImage.fromPlatformData(<dynamic, dynamic>{
+ 'format': 35,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ });
+
+ test('$CameraImage has ImageFormatGroup.bgra8888 for iOS', () {
+ debugDefaultTargetPlatformOverride = TargetPlatform.iOS;
+
+ final CameraImage cameraImage =
+ CameraImage.fromPlatformData(<dynamic, dynamic>{
+ 'format': 1111970369,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.bgra8888);
+ });
+ test('$CameraImage has ImageFormatGroup.unknown', () {
+ final CameraImage cameraImage =
+ CameraImage.fromPlatformData(<dynamic, dynamic>{
+ 'format': null,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.unknown);
+ });
+ });
+}
diff --git a/packages/camera/camera/test/camera_preview_test.dart b/packages/camera/camera/test/camera_preview_test.dart
new file mode 100644
index 0000000..6677fcf
--- /dev/null
+++ b/packages/camera/camera/test/camera_preview_test.dart
@@ -0,0 +1,244 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera/camera.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+class FakeController extends ValueNotifier<CameraValue>
+ implements CameraController {
+ FakeController() : super(const CameraValue.uninitialized());
+
+ @override
+ Future<void> dispose() async {
+ super.dispose();
+ }
+
+ @override
+ Widget buildPreview() {
+ return const Texture(textureId: CameraController.kUninitializedCameraId);
+ }
+
+ @override
+ int get cameraId => CameraController.kUninitializedCameraId;
+
+ @override
+ void debugCheckIsDisposed() {}
+
+ @override
+ CameraDescription get description => const CameraDescription(
+ name: '', lensDirection: CameraLensDirection.back, sensorOrientation: 0);
+
+ @override
+ bool get enableAudio => false;
+
+ @override
+ Future<double> getExposureOffsetStepSize() async => 1.0;
+
+ @override
+ Future<double> getMaxExposureOffset() async => 1.0;
+
+ @override
+ Future<double> getMaxZoomLevel() async => 1.0;
+
+ @override
+ Future<double> getMinExposureOffset() async => 1.0;
+
+ @override
+ Future<double> getMinZoomLevel() async => 1.0;
+
+ @override
+ ImageFormatGroup? get imageFormatGroup => null;
+
+ @override
+ Future<void> initialize() async {}
+
+ @override
+ Future<void> lockCaptureOrientation([DeviceOrientation? orientation]) async {}
+
+ @override
+ Future<void> pauseVideoRecording() async {}
+
+ @override
+ Future<void> prepareForVideoRecording() async {}
+
+ @override
+ ResolutionPreset get resolutionPreset => ResolutionPreset.low;
+
+ @override
+ Future<void> resumeVideoRecording() async {}
+
+ @override
+ Future<void> setExposureMode(ExposureMode mode) async {}
+
+ @override
+ Future<double> setExposureOffset(double offset) async => offset;
+
+ @override
+ Future<void> setExposurePoint(Offset? point) async {}
+
+ @override
+ Future<void> setFlashMode(FlashMode mode) async {}
+
+ @override
+ Future<void> setFocusMode(FocusMode mode) async {}
+
+ @override
+ Future<void> setFocusPoint(Offset? point) async {}
+
+ @override
+ Future<void> setZoomLevel(double zoom) async {}
+
+ @override
+ Future<void> startImageStream(onLatestImageAvailable onAvailable) async {}
+
+ @override
+ Future<void> startVideoRecording(
+ {onLatestImageAvailable? onAvailable}) async {}
+
+ @override
+ Future<void> stopImageStream() async {}
+
+ @override
+ Future<XFile> stopVideoRecording() async => XFile('');
+
+ @override
+ Future<XFile> takePicture() async => XFile('');
+
+ @override
+ Future<void> unlockCaptureOrientation() async {}
+
+ @override
+ Future<void> pausePreview() async {}
+
+ @override
+ Future<void> resumePreview() async {}
+}
+
+void main() {
+ group('RotatedBox (Android only)', () {
+ testWidgets(
+ 'when recording rotatedBox should turn according to recording orientation',
+ (
+ WidgetTester tester,
+ ) async {
+ debugDefaultTargetPlatformOverride = TargetPlatform.android;
+
+ final FakeController controller = FakeController();
+ controller.value = controller.value.copyWith(
+ isInitialized: true,
+ isRecordingVideo: true,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ lockedCaptureOrientation:
+ const Optional<DeviceOrientation>.fromNullable(
+ DeviceOrientation.landscapeRight),
+ recordingOrientation: const Optional<DeviceOrientation>.fromNullable(
+ DeviceOrientation.landscapeLeft),
+ previewSize: const Size(480, 640),
+ );
+
+ await tester.pumpWidget(
+ Directionality(
+ textDirection: TextDirection.ltr,
+ child: CameraPreview(controller),
+ ),
+ );
+ expect(find.byType(RotatedBox), findsOneWidget);
+
+ final RotatedBox rotatedBox =
+ tester.widget<RotatedBox>(find.byType(RotatedBox));
+ expect(rotatedBox.quarterTurns, 3);
+
+ debugDefaultTargetPlatformOverride = null;
+ });
+
+ testWidgets(
+ 'when orientation locked rotatedBox should turn according to locked orientation',
+ (
+ WidgetTester tester,
+ ) async {
+ debugDefaultTargetPlatformOverride = TargetPlatform.android;
+
+ final FakeController controller = FakeController();
+ controller.value = controller.value.copyWith(
+ isInitialized: true,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ lockedCaptureOrientation:
+ const Optional<DeviceOrientation>.fromNullable(
+ DeviceOrientation.landscapeRight),
+ recordingOrientation: const Optional<DeviceOrientation>.fromNullable(
+ DeviceOrientation.landscapeLeft),
+ previewSize: const Size(480, 640),
+ );
+
+ await tester.pumpWidget(
+ Directionality(
+ textDirection: TextDirection.ltr,
+ child: CameraPreview(controller),
+ ),
+ );
+ expect(find.byType(RotatedBox), findsOneWidget);
+
+ final RotatedBox rotatedBox =
+ tester.widget<RotatedBox>(find.byType(RotatedBox));
+ expect(rotatedBox.quarterTurns, 1);
+
+ debugDefaultTargetPlatformOverride = null;
+ });
+
+ testWidgets(
+ 'when not locked and not recording rotatedBox should turn according to device orientation',
+ (
+ WidgetTester tester,
+ ) async {
+ debugDefaultTargetPlatformOverride = TargetPlatform.android;
+
+ final FakeController controller = FakeController();
+ controller.value = controller.value.copyWith(
+ isInitialized: true,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ recordingOrientation: const Optional<DeviceOrientation>.fromNullable(
+ DeviceOrientation.landscapeLeft),
+ previewSize: const Size(480, 640),
+ );
+
+ await tester.pumpWidget(
+ Directionality(
+ textDirection: TextDirection.ltr,
+ child: CameraPreview(controller),
+ ),
+ );
+ expect(find.byType(RotatedBox), findsOneWidget);
+
+ final RotatedBox rotatedBox =
+ tester.widget<RotatedBox>(find.byType(RotatedBox));
+ expect(rotatedBox.quarterTurns, 0);
+
+ debugDefaultTargetPlatformOverride = null;
+ });
+ }, skip: kIsWeb);
+
+ testWidgets('when not on Android there should not be a rotated box',
+ (WidgetTester tester) async {
+ debugDefaultTargetPlatformOverride = TargetPlatform.iOS;
+ final FakeController controller = FakeController();
+ controller.value = controller.value.copyWith(
+ isInitialized: true,
+ previewSize: const Size(480, 640),
+ );
+
+ await tester.pumpWidget(
+ Directionality(
+ textDirection: TextDirection.ltr,
+ child: CameraPreview(controller),
+ ),
+ );
+ expect(find.byType(RotatedBox), findsNothing);
+ expect(find.byType(Texture), findsOneWidget);
+ debugDefaultTargetPlatformOverride = null;
+ });
+}
diff --git a/packages/camera/camera/test/camera_test.dart b/packages/camera/camera/test/camera_test.dart
new file mode 100644
index 0000000..ab8354f
--- /dev/null
+++ b/packages/camera/camera/test/camera_test.dart
@@ -0,0 +1,1537 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:camera/camera.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:mockito/mockito.dart';
+import 'package:plugin_platform_interface/plugin_platform_interface.dart';
+
+List<CameraDescription> get mockAvailableCameras => <CameraDescription>[
+ const CameraDescription(
+ name: 'camBack',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ const CameraDescription(
+ name: 'camFront',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 180),
+ ];
+
+int get mockInitializeCamera => 13;
+
+CameraInitializedEvent get mockOnCameraInitializedEvent =>
+ const CameraInitializedEvent(
+ 13,
+ 75,
+ 75,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ );
+
+DeviceOrientationChangedEvent get mockOnDeviceOrientationChangedEvent =>
+ const DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+
+CameraClosingEvent get mockOnCameraClosingEvent => const CameraClosingEvent(13);
+
+CameraErrorEvent get mockOnCameraErrorEvent =>
+ const CameraErrorEvent(13, 'closing');
+
+XFile mockTakePicture = XFile('foo/bar.png');
+
+XFile mockVideoRecordingXFile = XFile('foo/bar.mpeg');
+
+bool mockPlatformException = false;
+
+void main() {
+ WidgetsFlutterBinding.ensureInitialized();
+
+ group('camera', () {
+ test('debugCheckIsDisposed should not throw assertion error when disposed',
+ () {
+ const MockCameraDescription description = MockCameraDescription();
+ final CameraController controller = CameraController(
+ description,
+ ResolutionPreset.low,
+ );
+
+ controller.dispose();
+
+ expect(controller.debugCheckIsDisposed, returnsNormally);
+ });
+
+ test('debugCheckIsDisposed should throw assertion error when not disposed',
+ () {
+ const MockCameraDescription description = MockCameraDescription();
+ final CameraController controller = CameraController(
+ description,
+ ResolutionPreset.low,
+ );
+
+ expect(
+ () => controller.debugCheckIsDisposed(),
+ throwsAssertionError,
+ );
+ });
+
+ test('availableCameras() has camera', () async {
+ CameraPlatform.instance = MockCameraPlatform();
+
+ final List<CameraDescription> camList = await availableCameras();
+
+ expect(camList, equals(mockAvailableCameras));
+ });
+ });
+
+ group('$CameraController', () {
+ setUpAll(() {
+ CameraPlatform.instance = MockCameraPlatform();
+ });
+
+ test('Can be initialized', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ expect(cameraController.value.aspectRatio, 1);
+ expect(cameraController.value.previewSize, const Size(75, 75));
+ expect(cameraController.value.isInitialized, isTrue);
+ });
+
+ test('can be disposed', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ expect(cameraController.value.aspectRatio, 1);
+ expect(cameraController.value.previewSize, const Size(75, 75));
+ expect(cameraController.value.isInitialized, isTrue);
+
+ await cameraController.dispose();
+
+ verify(CameraPlatform.instance.dispose(13)).called(1);
+ });
+
+ test('initialize() throws CameraException when disposed', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ expect(cameraController.value.aspectRatio, 1);
+ expect(cameraController.value.previewSize, const Size(75, 75));
+ expect(cameraController.value.isInitialized, isTrue);
+
+ await cameraController.dispose();
+
+ verify(CameraPlatform.instance.dispose(13)).called(1);
+
+ expect(
+ cameraController.initialize,
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'Error description',
+ 'initialize was called on a disposed CameraController',
+ )));
+ });
+
+ test('initialize() throws $CameraException on $PlatformException ',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ mockPlatformException = true;
+
+ expect(
+ cameraController.initialize,
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'foo',
+ 'bar',
+ )));
+ mockPlatformException = false;
+ });
+
+ test('initialize() sets imageFormat', () async {
+ debugDefaultTargetPlatformOverride = TargetPlatform.android;
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max,
+ imageFormatGroup: ImageFormatGroup.yuv420,
+ );
+ await cameraController.initialize();
+ verify(CameraPlatform.instance
+ .initializeCamera(13, imageFormatGroup: ImageFormatGroup.yuv420))
+ .called(1);
+ });
+
+ test('prepareForVideoRecording() calls $CameraPlatform ', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ await cameraController.prepareForVideoRecording();
+
+ verify(CameraPlatform.instance.prepareForVideoRecording()).called(1);
+ });
+
+ test('takePicture() throws $CameraException when uninitialized ', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ expect(
+ cameraController.takePicture(),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Uninitialized CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'takePicture() was called on an uninitialized CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test('takePicture() throws $CameraException when takePicture is true',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ cameraController.value =
+ cameraController.value.copyWith(isTakingPicture: true);
+ expect(
+ cameraController.takePicture(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'Previous capture has not returned yet.',
+ 'takePicture was called before the previous capture returned.',
+ )));
+ });
+
+ test('takePicture() returns $XFile', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ final XFile xFile = await cameraController.takePicture();
+
+ expect(xFile.path, mockTakePicture.path);
+ });
+
+ test('takePicture() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ mockPlatformException = true;
+ expect(
+ cameraController.takePicture(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'foo',
+ 'bar',
+ )));
+ mockPlatformException = false;
+ });
+
+ test('startVideoRecording() throws $CameraException when uninitialized',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ expect(
+ cameraController.startVideoRecording(),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Uninitialized CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'startVideoRecording() was called on an uninitialized CameraController.',
+ ),
+ ),
+ );
+ });
+ test('startVideoRecording() throws $CameraException when recording videos',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+
+ cameraController.value =
+ cameraController.value.copyWith(isRecordingVideo: true);
+
+ expect(
+ cameraController.startVideoRecording(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'A video recording is already started.',
+ 'startVideoRecording was called when a recording is already started.',
+ )));
+ });
+
+ test('getMaxZoomLevel() throws $CameraException when uninitialized',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ expect(
+ cameraController.getMaxZoomLevel,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Uninitialized CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'getMaxZoomLevel() was called on an uninitialized CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test('getMaxZoomLevel() throws $CameraException when disposed', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ await cameraController.dispose();
+
+ expect(
+ cameraController.getMaxZoomLevel,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Disposed CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'getMaxZoomLevel() was called on a disposed CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test(
+ 'getMaxZoomLevel() throws $CameraException when a platform exception occured.',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ when(CameraPlatform.instance.getMaxZoomLevel(mockInitializeCamera))
+ .thenThrow(CameraException(
+ 'TEST_ERROR',
+ 'This is a test error messge',
+ ));
+
+ expect(
+ cameraController.getMaxZoomLevel,
+ throwsA(isA<CameraException>()
+ .having(
+ (CameraException error) => error.code, 'code', 'TEST_ERROR')
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'This is a test error messge',
+ )));
+ });
+
+ test('getMaxZoomLevel() returns max zoom level.', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ when(CameraPlatform.instance.getMaxZoomLevel(mockInitializeCamera))
+ .thenAnswer((_) => Future<double>.value(42.0));
+
+ final double maxZoomLevel = await cameraController.getMaxZoomLevel();
+ expect(maxZoomLevel, 42.0);
+ });
+
+ test('getMinZoomLevel() throws $CameraException when uninitialized',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ expect(
+ cameraController.getMinZoomLevel,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Uninitialized CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'getMinZoomLevel() was called on an uninitialized CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test('getMinZoomLevel() throws $CameraException when disposed', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ await cameraController.dispose();
+
+ expect(
+ cameraController.getMinZoomLevel,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Disposed CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'getMinZoomLevel() was called on a disposed CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test(
+ 'getMinZoomLevel() throws $CameraException when a platform exception occured.',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ when(CameraPlatform.instance.getMinZoomLevel(mockInitializeCamera))
+ .thenThrow(CameraException(
+ 'TEST_ERROR',
+ 'This is a test error messge',
+ ));
+
+ expect(
+ cameraController.getMinZoomLevel,
+ throwsA(isA<CameraException>()
+ .having(
+ (CameraException error) => error.code, 'code', 'TEST_ERROR')
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'This is a test error messge',
+ )));
+ });
+
+ test('getMinZoomLevel() returns max zoom level.', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ when(CameraPlatform.instance.getMinZoomLevel(mockInitializeCamera))
+ .thenAnswer((_) => Future<double>.value(42.0));
+
+ final double maxZoomLevel = await cameraController.getMinZoomLevel();
+ expect(maxZoomLevel, 42.0);
+ });
+
+ test('setZoomLevel() throws $CameraException when uninitialized', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ expect(
+ () => cameraController.setZoomLevel(42.0),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Uninitialized CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'setZoomLevel() was called on an uninitialized CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test('setZoomLevel() throws $CameraException when disposed', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ await cameraController.dispose();
+
+ expect(
+ () => cameraController.setZoomLevel(42.0),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException error) => error.code,
+ 'code',
+ 'Disposed CameraController',
+ )
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'setZoomLevel() was called on a disposed CameraController.',
+ ),
+ ),
+ );
+ });
+
+ test(
+ 'setZoomLevel() throws $CameraException when a platform exception occured.',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ when(CameraPlatform.instance.setZoomLevel(mockInitializeCamera, 42.0))
+ .thenThrow(CameraException(
+ 'TEST_ERROR',
+ 'This is a test error messge',
+ ));
+
+ expect(
+ () => cameraController.setZoomLevel(42),
+ throwsA(isA<CameraException>()
+ .having(
+ (CameraException error) => error.code, 'code', 'TEST_ERROR')
+ .having(
+ (CameraException error) => error.description,
+ 'description',
+ 'This is a test error messge',
+ )));
+
+ reset(CameraPlatform.instance);
+ });
+
+ test(
+ 'setZoomLevel() completes and calls method channel with correct value.',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+
+ await cameraController.initialize();
+ await cameraController.setZoomLevel(42.0);
+
+ verify(CameraPlatform.instance.setZoomLevel(mockInitializeCamera, 42.0))
+ .called(1);
+ });
+
+ test('setFlashMode() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ await cameraController.setFlashMode(FlashMode.always);
+
+ verify(CameraPlatform.instance
+ .setFlashMode(cameraController.cameraId, FlashMode.always))
+ .called(1);
+ });
+
+ test('setFlashMode() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance
+ .setFlashMode(cameraController.cameraId, FlashMode.always))
+ .thenThrow(
+ PlatformException(
+ code: 'TEST_ERROR',
+ message: 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.setFlashMode(FlashMode.always),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('setExposureMode() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ await cameraController.setExposureMode(ExposureMode.auto);
+
+ verify(CameraPlatform.instance
+ .setExposureMode(cameraController.cameraId, ExposureMode.auto))
+ .called(1);
+ });
+
+ test('setExposureMode() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance
+ .setExposureMode(cameraController.cameraId, ExposureMode.auto))
+ .thenThrow(
+ PlatformException(
+ code: 'TEST_ERROR',
+ message: 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.setExposureMode(ExposureMode.auto),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('setExposurePoint() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ await cameraController.setExposurePoint(const Offset(0.5, 0.5));
+
+ verify(CameraPlatform.instance.setExposurePoint(
+ cameraController.cameraId, const Point<double>(0.5, 0.5)))
+ .called(1);
+ });
+
+ test('setExposurePoint() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance.setExposurePoint(
+ cameraController.cameraId, const Point<double>(0.5, 0.5)))
+ .thenThrow(
+ PlatformException(
+ code: 'TEST_ERROR',
+ message: 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.setExposurePoint(const Offset(0.5, 0.5)),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('getMinExposureOffset() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) => Future<double>.value(0.0));
+
+ await cameraController.getMinExposureOffset();
+
+ verify(CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId))
+ .called(1);
+ });
+
+ test('getMinExposureOffset() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId))
+ .thenThrow(
+ CameraException(
+ 'TEST_ERROR',
+ 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.getMinExposureOffset(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('getMaxExposureOffset() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) => Future<double>.value(1.0));
+
+ await cameraController.getMaxExposureOffset();
+
+ verify(CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId))
+ .called(1);
+ });
+
+ test('getMaxExposureOffset() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId))
+ .thenThrow(
+ CameraException(
+ 'TEST_ERROR',
+ 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.getMaxExposureOffset(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('getExposureOffsetStepSize() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance
+ .getExposureOffsetStepSize(cameraController.cameraId))
+ .thenAnswer((_) => Future<double>.value(0.0));
+
+ await cameraController.getExposureOffsetStepSize();
+
+ verify(CameraPlatform.instance
+ .getExposureOffsetStepSize(cameraController.cameraId))
+ .called(1);
+ });
+
+ test(
+ 'getExposureOffsetStepSize() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ when(CameraPlatform.instance
+ .getExposureOffsetStepSize(cameraController.cameraId))
+ .thenThrow(
+ CameraException(
+ 'TEST_ERROR',
+ 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.getExposureOffsetStepSize(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('setExposureOffset() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ when(CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) async => -1.0);
+ when(CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) async => 2.0);
+ when(CameraPlatform.instance
+ .getExposureOffsetStepSize(cameraController.cameraId))
+ .thenAnswer((_) async => 1.0);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 1.0))
+ .thenAnswer((_) async => 1.0);
+
+ await cameraController.setExposureOffset(1.0);
+
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 1.0))
+ .called(1);
+ });
+
+ test('setExposureOffset() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ when(CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) async => -1.0);
+ when(CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) async => 2.0);
+ when(CameraPlatform.instance
+ .getExposureOffsetStepSize(cameraController.cameraId))
+ .thenAnswer((_) async => 1.0);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 1.0))
+ .thenThrow(
+ CameraException(
+ 'TEST_ERROR',
+ 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.setExposureOffset(1.0),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test(
+ 'setExposureOffset() throws $CameraException when offset is out of bounds',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ when(CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) async => -1.0);
+ when(CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) async => 2.0);
+ when(CameraPlatform.instance
+ .getExposureOffsetStepSize(cameraController.cameraId))
+ .thenAnswer((_) async => 1.0);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 0.0))
+ .thenAnswer((_) async => 0.0);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, -1.0))
+ .thenAnswer((_) async => 0.0);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 2.0))
+ .thenAnswer((_) async => 0.0);
+
+ expect(
+ cameraController.setExposureOffset(3.0),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'exposureOffsetOutOfBounds',
+ 'The provided exposure offset was outside the supported range for this device.',
+ )));
+ expect(
+ cameraController.setExposureOffset(-2.0),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'exposureOffsetOutOfBounds',
+ 'The provided exposure offset was outside the supported range for this device.',
+ )));
+
+ await cameraController.setExposureOffset(0.0);
+ await cameraController.setExposureOffset(-1.0);
+ await cameraController.setExposureOffset(2.0);
+
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 0.0))
+ .called(1);
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, -1.0))
+ .called(1);
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 2.0))
+ .called(1);
+ });
+
+ test('setExposureOffset() rounds offset to nearest step', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ when(CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) async => -1.2);
+ when(CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId))
+ .thenAnswer((_) async => 1.2);
+ when(CameraPlatform.instance
+ .getExposureOffsetStepSize(cameraController.cameraId))
+ .thenAnswer((_) async => 0.4);
+
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, -1.2))
+ .thenAnswer((_) async => -1.2);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, -0.8))
+ .thenAnswer((_) async => -0.8);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, -0.4))
+ .thenAnswer((_) async => -0.4);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 0.0))
+ .thenAnswer((_) async => 0.0);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 0.4))
+ .thenAnswer((_) async => 0.4);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 0.8))
+ .thenAnswer((_) async => 0.8);
+ when(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 1.2))
+ .thenAnswer((_) async => 1.2);
+
+ await cameraController.setExposureOffset(1.2);
+ await cameraController.setExposureOffset(-1.2);
+ await cameraController.setExposureOffset(0.1);
+ await cameraController.setExposureOffset(0.2);
+ await cameraController.setExposureOffset(0.3);
+ await cameraController.setExposureOffset(0.4);
+ await cameraController.setExposureOffset(0.5);
+ await cameraController.setExposureOffset(0.6);
+ await cameraController.setExposureOffset(0.7);
+ await cameraController.setExposureOffset(-0.1);
+ await cameraController.setExposureOffset(-0.2);
+ await cameraController.setExposureOffset(-0.3);
+ await cameraController.setExposureOffset(-0.4);
+ await cameraController.setExposureOffset(-0.5);
+ await cameraController.setExposureOffset(-0.6);
+ await cameraController.setExposureOffset(-0.7);
+
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 0.8))
+ .called(2);
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, -0.8))
+ .called(2);
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 0.0))
+ .called(2);
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, 0.4))
+ .called(4);
+ verify(CameraPlatform.instance
+ .setExposureOffset(cameraController.cameraId, -0.4))
+ .called(4);
+ });
+
+ test('pausePreview() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ cameraController.value = cameraController.value
+ .copyWith(deviceOrientation: DeviceOrientation.portraitUp);
+
+ await cameraController.pausePreview();
+
+ verify(CameraPlatform.instance.pausePreview(cameraController.cameraId))
+ .called(1);
+ expect(cameraController.value.isPreviewPaused, equals(true));
+ expect(cameraController.value.previewPauseOrientation,
+ DeviceOrientation.portraitUp);
+ });
+
+ test('pausePreview() does not call $CameraPlatform when already paused',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ cameraController.value =
+ cameraController.value.copyWith(isPreviewPaused: true);
+
+ await cameraController.pausePreview();
+
+ verifyNever(
+ CameraPlatform.instance.pausePreview(cameraController.cameraId));
+ expect(cameraController.value.isPreviewPaused, equals(true));
+ });
+
+ test(
+ 'pausePreview() sets previewPauseOrientation according to locked orientation',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ cameraController.value = cameraController.value.copyWith(
+ isPreviewPaused: false,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ lockedCaptureOrientation:
+ Optional<DeviceOrientation>.of(DeviceOrientation.landscapeRight));
+
+ await cameraController.pausePreview();
+
+ expect(cameraController.value.deviceOrientation,
+ equals(DeviceOrientation.portraitUp));
+ expect(cameraController.value.previewPauseOrientation,
+ equals(DeviceOrientation.landscapeRight));
+ });
+
+ test('pausePreview() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ when(CameraPlatform.instance.pausePreview(cameraController.cameraId))
+ .thenThrow(
+ PlatformException(
+ code: 'TEST_ERROR',
+ message: 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.pausePreview(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('resumePreview() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ cameraController.value =
+ cameraController.value.copyWith(isPreviewPaused: true);
+
+ await cameraController.resumePreview();
+
+ verify(CameraPlatform.instance.resumePreview(cameraController.cameraId))
+ .called(1);
+ expect(cameraController.value.isPreviewPaused, equals(false));
+ });
+
+ test('resumePreview() does not call $CameraPlatform when not paused',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ cameraController.value =
+ cameraController.value.copyWith(isPreviewPaused: false);
+
+ await cameraController.resumePreview();
+
+ verifyNever(
+ CameraPlatform.instance.resumePreview(cameraController.cameraId));
+ expect(cameraController.value.isPreviewPaused, equals(false));
+ });
+
+ test('resumePreview() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ cameraController.value =
+ cameraController.value.copyWith(isPreviewPaused: true);
+ when(CameraPlatform.instance.resumePreview(cameraController.cameraId))
+ .thenThrow(
+ PlatformException(
+ code: 'TEST_ERROR',
+ message: 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.resumePreview(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('lockCaptureOrientation() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ await cameraController.lockCaptureOrientation();
+ expect(cameraController.value.lockedCaptureOrientation,
+ equals(DeviceOrientation.portraitUp));
+ await cameraController
+ .lockCaptureOrientation(DeviceOrientation.landscapeRight);
+ expect(cameraController.value.lockedCaptureOrientation,
+ equals(DeviceOrientation.landscapeRight));
+
+ verify(CameraPlatform.instance.lockCaptureOrientation(
+ cameraController.cameraId, DeviceOrientation.portraitUp))
+ .called(1);
+ verify(CameraPlatform.instance.lockCaptureOrientation(
+ cameraController.cameraId, DeviceOrientation.landscapeRight))
+ .called(1);
+ });
+
+ test(
+ 'lockCaptureOrientation() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ when(CameraPlatform.instance.lockCaptureOrientation(
+ cameraController.cameraId, DeviceOrientation.portraitUp))
+ .thenThrow(
+ PlatformException(
+ code: 'TEST_ERROR',
+ message: 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.lockCaptureOrientation(DeviceOrientation.portraitUp),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+
+ test('unlockCaptureOrientation() calls $CameraPlatform', () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+
+ await cameraController.unlockCaptureOrientation();
+ expect(cameraController.value.lockedCaptureOrientation, equals(null));
+
+ verify(CameraPlatform.instance
+ .unlockCaptureOrientation(cameraController.cameraId))
+ .called(1);
+ });
+
+ test(
+ 'unlockCaptureOrientation() throws $CameraException on $PlatformException',
+ () async {
+ final CameraController cameraController = CameraController(
+ const CameraDescription(
+ name: 'cam',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90),
+ ResolutionPreset.max);
+ await cameraController.initialize();
+ when(CameraPlatform.instance
+ .unlockCaptureOrientation(cameraController.cameraId))
+ .thenThrow(
+ PlatformException(
+ code: 'TEST_ERROR',
+ message: 'This is a test error message',
+ ),
+ );
+
+ expect(
+ cameraController.unlockCaptureOrientation(),
+ throwsA(isA<CameraException>().having(
+ (CameraException error) => error.description,
+ 'TEST_ERROR',
+ 'This is a test error message',
+ )));
+ });
+ });
+}
+
+class MockCameraPlatform extends Mock
+ with MockPlatformInterfaceMixin
+ implements CameraPlatform {
+ @override
+ Future<void> initializeCamera(
+ int? cameraId, {
+ ImageFormatGroup? imageFormatGroup = ImageFormatGroup.unknown,
+ }) async =>
+ super.noSuchMethod(Invocation.method(
+ #initializeCamera,
+ <Object?>[cameraId],
+ <Symbol, dynamic>{
+ #imageFormatGroup: imageFormatGroup,
+ },
+ ));
+
+ @override
+ Future<void> dispose(int? cameraId) async {
+ return super.noSuchMethod(Invocation.method(#dispose, <Object?>[cameraId]));
+ }
+
+ @override
+ Future<List<CameraDescription>> availableCameras() =>
+ Future<List<CameraDescription>>.value(mockAvailableCameras);
+
+ @override
+ Future<int> createCamera(
+ CameraDescription description,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) =>
+ mockPlatformException
+ ? throw PlatformException(code: 'foo', message: 'bar')
+ : Future<int>.value(mockInitializeCamera);
+
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) =>
+ Stream<CameraInitializedEvent>.value(mockOnCameraInitializedEvent);
+
+ @override
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) =>
+ Stream<CameraClosingEvent>.value(mockOnCameraClosingEvent);
+
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) =>
+ Stream<CameraErrorEvent>.value(mockOnCameraErrorEvent);
+
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() =>
+ Stream<DeviceOrientationChangedEvent>.value(
+ mockOnDeviceOrientationChangedEvent);
+
+ @override
+ Future<XFile> takePicture(int cameraId) => mockPlatformException
+ ? throw PlatformException(code: 'foo', message: 'bar')
+ : Future<XFile>.value(mockTakePicture);
+
+ @override
+ Future<void> prepareForVideoRecording() async =>
+ super.noSuchMethod(Invocation.method(#prepareForVideoRecording, null));
+
+ @override
+ Future<XFile> startVideoRecording(int cameraId,
+ {Duration? maxVideoDuration}) =>
+ Future<XFile>.value(mockVideoRecordingXFile);
+
+ @override
+ Future<void> startVideoCapturing(VideoCaptureOptions options) {
+ return startVideoRecording(options.cameraId,
+ maxVideoDuration: options.maxDuration);
+ }
+
+ @override
+ Future<void> lockCaptureOrientation(
+ int? cameraId, DeviceOrientation? orientation) async =>
+ super.noSuchMethod(Invocation.method(
+ #lockCaptureOrientation, <Object?>[cameraId, orientation]));
+
+ @override
+ Future<void> unlockCaptureOrientation(int? cameraId) async =>
+ super.noSuchMethod(
+ Invocation.method(#unlockCaptureOrientation, <Object?>[cameraId]));
+
+ @override
+ Future<void> pausePreview(int? cameraId) async =>
+ super.noSuchMethod(Invocation.method(#pausePreview, <Object?>[cameraId]));
+
+ @override
+ Future<void> resumePreview(int? cameraId) async => super
+ .noSuchMethod(Invocation.method(#resumePreview, <Object?>[cameraId]));
+
+ @override
+ Future<double> getMaxZoomLevel(int? cameraId) async => super.noSuchMethod(
+ Invocation.method(#getMaxZoomLevel, <Object?>[cameraId]),
+ returnValue: Future<double>.value(1.0),
+ ) as Future<double>;
+
+ @override
+ Future<double> getMinZoomLevel(int? cameraId) async => super.noSuchMethod(
+ Invocation.method(#getMinZoomLevel, <Object?>[cameraId]),
+ returnValue: Future<double>.value(0.0),
+ ) as Future<double>;
+
+ @override
+ Future<void> setZoomLevel(int? cameraId, double? zoom) async =>
+ super.noSuchMethod(
+ Invocation.method(#setZoomLevel, <Object?>[cameraId, zoom]));
+
+ @override
+ Future<void> setFlashMode(int? cameraId, FlashMode? mode) async =>
+ super.noSuchMethod(
+ Invocation.method(#setFlashMode, <Object?>[cameraId, mode]));
+
+ @override
+ Future<void> setExposureMode(int? cameraId, ExposureMode? mode) async =>
+ super.noSuchMethod(
+ Invocation.method(#setExposureMode, <Object?>[cameraId, mode]));
+
+ @override
+ Future<void> setExposurePoint(int? cameraId, Point<double>? point) async =>
+ super.noSuchMethod(
+ Invocation.method(#setExposurePoint, <Object?>[cameraId, point]));
+
+ @override
+ Future<double> getMinExposureOffset(int? cameraId) async =>
+ super.noSuchMethod(
+ Invocation.method(#getMinExposureOffset, <Object?>[cameraId]),
+ returnValue: Future<double>.value(0.0),
+ ) as Future<double>;
+
+ @override
+ Future<double> getMaxExposureOffset(int? cameraId) async =>
+ super.noSuchMethod(
+ Invocation.method(#getMaxExposureOffset, <Object?>[cameraId]),
+ returnValue: Future<double>.value(1.0),
+ ) as Future<double>;
+
+ @override
+ Future<double> getExposureOffsetStepSize(int? cameraId) async =>
+ super.noSuchMethod(
+ Invocation.method(#getExposureOffsetStepSize, <Object?>[cameraId]),
+ returnValue: Future<double>.value(1.0),
+ ) as Future<double>;
+
+ @override
+ Future<double> setExposureOffset(int? cameraId, double? offset) async =>
+ super.noSuchMethod(
+ Invocation.method(#setExposureOffset, <Object?>[cameraId, offset]),
+ returnValue: Future<double>.value(1.0),
+ ) as Future<double>;
+}
+
+class MockCameraDescription extends CameraDescription {
+ /// Creates a new camera description with the given properties.
+ const MockCameraDescription()
+ : super(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ );
+
+ @override
+ CameraLensDirection get lensDirection => CameraLensDirection.back;
+
+ @override
+ String get name => 'back';
+}
diff --git a/packages/camera/camera/test/camera_value_test.dart b/packages/camera/camera/test/camera_value_test.dart
new file mode 100644
index 0000000..37168db
--- /dev/null
+++ b/packages/camera/camera/test/camera_value_test.dart
@@ -0,0 +1,150 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#106316)
+// ignore: unnecessary_import
+import 'dart:ui';
+
+import 'package:camera/camera.dart';
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#106316)
+// ignore: unnecessary_import
+import 'package:flutter/cupertino.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ group('camera_value', () {
+ test('Can be created', () {
+ const CameraValue cameraValue = CameraValue(
+ isInitialized: false,
+ previewSize: Size(10, 10),
+ isRecordingPaused: false,
+ isRecordingVideo: false,
+ isTakingPicture: false,
+ isStreamingImages: false,
+ flashMode: FlashMode.auto,
+ exposureMode: ExposureMode.auto,
+ exposurePointSupported: true,
+ focusMode: FocusMode.auto,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ lockedCaptureOrientation: DeviceOrientation.portraitUp,
+ recordingOrientation: DeviceOrientation.portraitUp,
+ focusPointSupported: true,
+ previewPauseOrientation: DeviceOrientation.portraitUp,
+ );
+
+ expect(cameraValue, isA<CameraValue>());
+ expect(cameraValue.isInitialized, isFalse);
+ expect(cameraValue.errorDescription, null);
+ expect(cameraValue.previewSize, const Size(10, 10));
+ expect(cameraValue.isRecordingPaused, isFalse);
+ expect(cameraValue.isRecordingVideo, isFalse);
+ expect(cameraValue.isTakingPicture, isFalse);
+ expect(cameraValue.isStreamingImages, isFalse);
+ expect(cameraValue.flashMode, FlashMode.auto);
+ expect(cameraValue.exposureMode, ExposureMode.auto);
+ expect(cameraValue.exposurePointSupported, true);
+ expect(cameraValue.deviceOrientation, DeviceOrientation.portraitUp);
+ expect(
+ cameraValue.lockedCaptureOrientation, DeviceOrientation.portraitUp);
+ expect(cameraValue.recordingOrientation, DeviceOrientation.portraitUp);
+ expect(cameraValue.isPreviewPaused, false);
+ expect(cameraValue.previewPauseOrientation, DeviceOrientation.portraitUp);
+ });
+
+ test('Can be created as uninitialized', () {
+ const CameraValue cameraValue = CameraValue.uninitialized();
+
+ expect(cameraValue, isA<CameraValue>());
+ expect(cameraValue.isInitialized, isFalse);
+ expect(cameraValue.errorDescription, null);
+ expect(cameraValue.previewSize, null);
+ expect(cameraValue.isRecordingPaused, isFalse);
+ expect(cameraValue.isRecordingVideo, isFalse);
+ expect(cameraValue.isTakingPicture, isFalse);
+ expect(cameraValue.isStreamingImages, isFalse);
+ expect(cameraValue.flashMode, FlashMode.auto);
+ expect(cameraValue.exposureMode, ExposureMode.auto);
+ expect(cameraValue.exposurePointSupported, false);
+ expect(cameraValue.focusMode, FocusMode.auto);
+ expect(cameraValue.deviceOrientation, DeviceOrientation.portraitUp);
+ expect(cameraValue.lockedCaptureOrientation, null);
+ expect(cameraValue.recordingOrientation, null);
+ expect(cameraValue.isPreviewPaused, isFalse);
+ expect(cameraValue.previewPauseOrientation, null);
+ });
+
+ test('Can be copied with isInitialized', () {
+ const CameraValue cv = CameraValue.uninitialized();
+ final CameraValue cameraValue = cv.copyWith(isInitialized: true);
+
+ expect(cameraValue, isA<CameraValue>());
+ expect(cameraValue.isInitialized, isTrue);
+ expect(cameraValue.errorDescription, null);
+ expect(cameraValue.previewSize, null);
+ expect(cameraValue.isRecordingPaused, isFalse);
+ expect(cameraValue.isRecordingVideo, isFalse);
+ expect(cameraValue.isTakingPicture, isFalse);
+ expect(cameraValue.isStreamingImages, isFalse);
+ expect(cameraValue.flashMode, FlashMode.auto);
+ expect(cameraValue.focusMode, FocusMode.auto);
+ expect(cameraValue.exposureMode, ExposureMode.auto);
+ expect(cameraValue.exposurePointSupported, false);
+ expect(cameraValue.deviceOrientation, DeviceOrientation.portraitUp);
+ expect(cameraValue.lockedCaptureOrientation, null);
+ expect(cameraValue.recordingOrientation, null);
+ expect(cameraValue.isPreviewPaused, isFalse);
+ expect(cameraValue.previewPauseOrientation, null);
+ });
+
+ test('Has aspectRatio after setting size', () {
+ const CameraValue cv = CameraValue.uninitialized();
+ final CameraValue cameraValue =
+ cv.copyWith(isInitialized: true, previewSize: const Size(20, 10));
+
+ expect(cameraValue.aspectRatio, 2.0);
+ });
+
+ test('hasError is true after setting errorDescription', () {
+ const CameraValue cv = CameraValue.uninitialized();
+ final CameraValue cameraValue = cv.copyWith(errorDescription: 'error');
+
+ expect(cameraValue.hasError, isTrue);
+ expect(cameraValue.errorDescription, 'error');
+ });
+
+ test('Recording paused is false when not recording', () {
+ const CameraValue cv = CameraValue.uninitialized();
+ final CameraValue cameraValue = cv.copyWith(
+ isInitialized: true,
+ isRecordingVideo: false,
+ isRecordingPaused: true);
+
+ expect(cameraValue.isRecordingPaused, isFalse);
+ });
+
+ test('toString() works as expected', () {
+ const CameraValue cameraValue = CameraValue(
+ isInitialized: false,
+ previewSize: Size(10, 10),
+ isRecordingPaused: false,
+ isRecordingVideo: false,
+ isTakingPicture: false,
+ isStreamingImages: false,
+ flashMode: FlashMode.auto,
+ exposureMode: ExposureMode.auto,
+ focusMode: FocusMode.auto,
+ exposurePointSupported: true,
+ focusPointSupported: true,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ lockedCaptureOrientation: DeviceOrientation.portraitUp,
+ recordingOrientation: DeviceOrientation.portraitUp,
+ isPreviewPaused: true,
+ previewPauseOrientation: DeviceOrientation.portraitUp);
+
+ expect(cameraValue.toString(),
+ 'CameraValue(isRecordingVideo: false, isInitialized: false, errorDescription: null, previewSize: Size(10.0, 10.0), isStreamingImages: false, flashMode: FlashMode.auto, exposureMode: ExposureMode.auto, focusMode: FocusMode.auto, exposurePointSupported: true, focusPointSupported: true, deviceOrientation: DeviceOrientation.portraitUp, lockedCaptureOrientation: DeviceOrientation.portraitUp, recordingOrientation: DeviceOrientation.portraitUp, isPreviewPaused: true, previewPausedOrientation: DeviceOrientation.portraitUp)');
+ });
+ });
+}
diff --git a/packages/camera/camera_android/AUTHORS b/packages/camera/camera_android/AUTHORS
new file mode 100644
index 0000000..493a0b4
--- /dev/null
+++ b/packages/camera/camera_android/AUTHORS
@@ -0,0 +1,66 @@
+# Below is a list of people and organizations that have contributed
+# to the Flutter project. Names should be added to the list like so:
+#
+# Name/Organization <email address>
+
+Google Inc.
+The Chromium Authors
+German Saprykin <saprykin.h@gmail.com>
+Benjamin Sauer <sauer.benjamin@gmail.com>
+larsenthomasj@gmail.com
+Ali Bitek <alibitek@protonmail.ch>
+Pol Batlló <pol.batllo@gmail.com>
+Anatoly Pulyaevskiy
+Hayden Flinner <haydenflinner@gmail.com>
+Stefano Rodriguez <hlsroddy@gmail.com>
+Salvatore Giordano <salvatoregiordanoo@gmail.com>
+Brian Armstrong <brian@flutter.institute>
+Paul DeMarco <paulmdemarco@gmail.com>
+Fabricio Nogueira <feufeu@gmail.com>
+Simon Lightfoot <simon@devangels.london>
+Ashton Thomas <ashton@acrinta.com>
+Thomas Danner <thmsdnnr@gmail.com>
+Diego Velásquez <diego.velasquez.lopez@gmail.com>
+Hajime Nakamura <nkmrhj@gmail.com>
+Tuyển Vũ Xuân <netsoft1985@gmail.com>
+Miguel Ruivo <miguel@miguelruivo.com>
+Sarthak Verma <sarthak@artiosys.com>
+Mike Diarmid <mike@invertase.io>
+Invertase <oss@invertase.io>
+Elliot Hesp <elliot@invertase.io>
+Vince Varga <vince.varga@smaho.com>
+Aawaz Gyawali <awazgyawali@gmail.com>
+EUI Limited <ian.evans3@admiralgroup.co.uk>
+Katarina Sheremet <katarina@sheremet.ch>
+Thomas Stockx <thomas@stockxit.com>
+Sarbagya Dhaubanjar <sarbagyastha@gmail.com>
+Ozkan Eksi <ozeksi@gmail.com>
+Rishab Nayak <rishab@bu.edu>
+ko2ic <ko2ic.dev@gmail.com>
+Jonathan Younger <jonathan@daikini.com>
+Jose Sanchez <josesm82@gmail.com>
+Debkanchan Samadder <debu.samadder@gmail.com>
+Audrius Karosevicius <audrius.karosevicius@gmail.com>
+Lukasz Piliszczuk <lukasz@intheloup.io>
+SoundReply Solutions GmbH <ch@soundreply.com>
+Rafal Wachol <rwachol@gmail.com>
+Pau Picas <pau.picas@gmail.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Alexandru Tuca <salexandru.tuca@outlook.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Rhodes Davis Jr. <rody.davis.jr@gmail.com>
+Luigi Agosti <luigi@tengio.com>
+Quentin Le Guennec <quentin@tengio.com>
+Koushik Ravikumar <koushik@tengio.com>
+Nissim Dsilva <nissim@tengio.com>
+Giancarlo Rocha <giancarloiff@gmail.com>
+Ryo Miyake <ryo@miyake.id>
+Théo Champion <contact.theochampion@gmail.com>
+Kazuki Yamaguchi <y.kazuki0614n@gmail.com>
+Eitan Schwartz <eshvartz@gmail.com>
+Chris Rutkowski <chrisrutkowski89@gmail.com>
+Juan Alvarez <juan.alvarez@resideo.com>
+Aleksandr Yurkovskiy <sanekyy@gmail.com>
+Anton Borries <mail@antonborri.es>
+Alex Li <google@alexv525.com>
+Rahul Raj <64.rahulraj@gmail.com>
diff --git a/packages/camera/camera_android/CHANGELOG.md b/packages/camera/camera_android/CHANGELOG.md
new file mode 100644
index 0000000..4609b40
--- /dev/null
+++ b/packages/camera/camera_android/CHANGELOG.md
@@ -0,0 +1,75 @@
+## 0.10.4
+
+* Temporarily fixes issue with requested video profiles being null by falling back to deprecated behavior in that case.
+
+## 0.10.3
+
+* Adds back use of Optional type.
+* Updates minimum Flutter version to 3.0.
+
+## 0.10.2+3
+
+* Updates code for stricter lint checks.
+
+## 0.10.2+2
+
+* Fixes zoom computation for virtual cameras hiding physical cameras in Android 11+.
+* Removes the unused CameraZoom class from the codebase.
+
+## 0.10.2+1
+
+* Updates code for stricter lint checks.
+
+## 0.10.2
+
+* Remove usage of deprecated quiver Optional type.
+
+## 0.10.1
+
+* Implements an option to also stream when recording a video.
+
+## 0.10.0+5
+
+* Fixes `ArrayIndexOutOfBoundsException` when the permission request is interrupted.
+
+## 0.10.0+4
+
+* Upgrades `androidx.annotation` version to 1.5.0.
+
+## 0.10.0+3
+
+* Updates code for `no_leading_underscores_for_local_identifiers` lint.
+
+## 0.10.0+2
+
+* Removes call to `join` on the camera's background `HandlerThread`.
+* Updates minimum Flutter version to 2.10.
+
+## 0.10.0+1
+
+* Fixes avoid_redundant_argument_values lint warnings and minor typos.
+
+## 0.10.0
+
+* **Breaking Change** Updates Android camera access permission error codes to be consistent with other platforms. If your app still handles the legacy `cameraPermission` exception, please update it to handle the new permission exception codes that are noted in the README.
+* Ignores missing return warnings in preparation for [upcoming analysis changes](https://github.com/flutter/flutter/issues/105750).
+
+## 0.9.8+3
+
+* Skips duplicate calls to stop background thread and removes unnecessary closings of camera capture sessions on Android.
+
+## 0.9.8+2
+
+* Fixes exception in registerWith caused by the switch to an in-package method channel.
+
+## 0.9.8+1
+
+* Ignores deprecation warnings for upcoming styleFrom button API changes.
+
+## 0.9.8
+
+* Switches to internal method channel implementation.
+
+## 0.9.7+1
+
+* Splits from `camera` as a federated implementation.
diff --git a/packages/camera/camera_android/LICENSE b/packages/camera/camera_android/LICENSE
new file mode 100644
index 0000000..c6823b8
--- /dev/null
+++ b/packages/camera/camera_android/LICENSE
@@ -0,0 +1,25 @@
+Copyright 2013 The Flutter Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/packages/camera/camera_android/README.md b/packages/camera/camera_android/README.md
new file mode 100644
index 0000000..de8897c
--- /dev/null
+++ b/packages/camera/camera_android/README.md
@@ -0,0 +1,11 @@
+# camera\_android
+
+The Android implementation of [`camera`][1].
+
+## Usage
+
+This package is [endorsed][2], which means you can simply use `camera`
+normally. This package will be automatically included in your app when you do.
+
+[1]: https://pub.dev/packages/camera
+[2]: https://flutter.dev/docs/development/packages-and-plugins/developing-packages#endorsed-federated-plugin
diff --git a/packages/camera/camera_android/android/build.gradle b/packages/camera/camera_android/android/build.gradle
new file mode 100644
index 0000000..9c403e0
--- /dev/null
+++ b/packages/camera/camera_android/android/build.gradle
@@ -0,0 +1,66 @@
+group 'io.flutter.plugins.camera'
+version '1.0-SNAPSHOT'
+def args = ["-Xlint:deprecation","-Xlint:unchecked"]
+
+buildscript {
+ repositories {
+ google()
+ mavenCentral()
+ }
+
+ dependencies {
+ classpath 'com.android.tools.build:gradle:7.0.2'
+ }
+}
+
+rootProject.allprojects {
+ repositories {
+ google()
+ mavenCentral()
+ }
+}
+
+project.getTasks().withType(JavaCompile){
+ options.compilerArgs.addAll(args)
+}
+
+apply plugin: 'com.android.library'
+
+android {
+ compileSdkVersion 31
+
+ defaultConfig {
+ targetSdkVersion 31
+ minSdkVersion 21
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ }
+ lintOptions {
+ disable 'AndroidGradlePluginVersion', 'InvalidPackage', 'GradleDependency'
+ baseline file("lint-baseline.xml")
+ }
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+
+
+ testOptions {
+ unitTests.includeAndroidResources = true
+ unitTests.returnDefaultValues = true
+ unitTests.all {
+ testLogging {
+ events "passed", "skipped", "failed", "standardOut", "standardError"
+ outputs.upToDateWhen {false}
+ showStandardStreams = true
+ }
+ }
+ }
+}
+
+dependencies {
+ implementation 'androidx.annotation:annotation:1.5.0'
+ testImplementation 'junit:junit:4.13.2'
+ testImplementation 'org.mockito:mockito-inline:5.0.0'
+ testImplementation 'androidx.test:core:1.4.0'
+ testImplementation 'org.robolectric:robolectric:4.5'
+}
diff --git a/packages/camera/camera_android/android/lint-baseline.xml b/packages/camera/camera_android/android/lint-baseline.xml
new file mode 100644
index 0000000..4ddaafa
--- /dev/null
+++ b/packages/camera/camera_android/android/lint-baseline.xml
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<issues format="5" by="lint 3.5.0" client="gradle" variant="debug" version="3.5.0">
+
+ <issue
+ id="Assert"
+ message="Assertions are unreliable in Dalvik and unimplemented in ART. Use `BuildConfig.DEBUG` conditional checks instead."
+ errorLine1=" assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0);"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java"
+ line="73"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="Assert"
+ message="Assertions are unreliable in Dalvik and unimplemented in ART. Use `BuildConfig.DEBUG` conditional checks instead."
+ errorLine1=" assert (x >= 0 && x <= 1);"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java"
+ line="74"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="Assert"
+ message="Assertions are unreliable in Dalvik and unimplemented in ART. Use `BuildConfig.DEBUG` conditional checks instead."
+ errorLine1=" assert (y >= 0 && y <= 1);"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java"
+ line="75"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="Assert"
+ message="Assertions are unreliable in Dalvik and unimplemented in ART. Use `BuildConfig.DEBUG` conditional checks instead."
+ errorLine1=" assert (maxBoundaries == null || maxBoundaries.getWidth() > 0);"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/CameraRegions.java"
+ line="16"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="Assert"
+ message="Assertions are unreliable in Dalvik and unimplemented in ART. Use `BuildConfig.DEBUG` conditional checks instead."
+ errorLine1=" assert (maxBoundaries == null || maxBoundaries.getHeight() > 0);"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/CameraRegions.java"
+ line="17"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="Assert"
+ message="Assertions are unreliable in Dalvik and unimplemented in ART. Use `BuildConfig.DEBUG` conditional checks instead."
+ errorLine1=" assert (x >= 0 && x <= 1);"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/CameraRegions.java"
+ line="50"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="Assert"
+ message="Assertions are unreliable in Dalvik and unimplemented in ART. Use `BuildConfig.DEBUG` conditional checks instead."
+ errorLine1=" assert (y >= 0 && y <= 1);"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/CameraRegions.java"
+ line="51"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="SwitchIntDef"
+ message="Switch statement on an `int` with known associated constant missing case `Configuration.ORIENTATION_SQUARE`, `Configuration.ORIENTATION_UNDEFINED`"
+ errorLine1=" switch (orientation) {"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/DeviceOrientationManager.java"
+ line="143"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="SwitchIntDef"
+ message="Switch statement on an `int` with known associated constant missing case `Configuration.ORIENTATION_SQUARE`, `Configuration.ORIENTATION_UNDEFINED`"
+ errorLine1=" switch (orientation) {"
+ errorLine2=" ~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManager.java"
+ line="264"
+ column="5"/>
+ </issue>
+
+ <issue
+ id="ObsoleteSdkInt"
+ message="Unnecessary; SDK_INT is never < 21"
+ errorLine1=" if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {"
+ errorLine2=" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~">
+ <location
+ file="src/main/java/io/flutter/plugins/camera/CameraPlugin.java"
+ line="102"
+ column="9"/>
+ </issue>
+
+</issues>
diff --git a/packages/camera/camera_android/android/settings.gradle b/packages/camera/camera_android/android/settings.gradle
new file mode 100644
index 0000000..94a1bae
--- /dev/null
+++ b/packages/camera/camera_android/android/settings.gradle
@@ -0,0 +1 @@
+rootProject.name = 'camera_android'
diff --git a/packages/camera/camera_android/android/src/main/AndroidManifest.xml b/packages/camera/camera_android/android/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..d80d364
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/AndroidManifest.xml
@@ -0,0 +1,5 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="io.flutter.plugins.camera">
+ <uses-permission android:name="android.permission.CAMERA"/>
+ <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+</manifest>
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java
new file mode 100644
index 0000000..b02d686
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java
@@ -0,0 +1,1273 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.annotation.SuppressLint;
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.OutputConfiguration;
+import android.hardware.camera2.params.SessionConfiguration;
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import android.media.Image;
+import android.media.ImageReader;
+import android.media.MediaRecorder;
+import android.os.Build;
+import android.os.Build.VERSION;
+import android.os.Build.VERSION_CODES;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.util.Log;
+import android.util.Size;
+import android.view.Display;
+import android.view.Surface;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugin.common.EventChannel;
+import io.flutter.plugin.common.MethodChannel;
+import io.flutter.plugin.common.MethodChannel.Result;
+import io.flutter.plugins.camera.features.CameraFeature;
+import io.flutter.plugins.camera.features.CameraFeatureFactory;
+import io.flutter.plugins.camera.features.CameraFeatures;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.autofocus.FocusMode;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureMode;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.flash.FlashMode;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.DeviceOrientationManager;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+import io.flutter.plugins.camera.media.MediaRecorderBuilder;
+import io.flutter.plugins.camera.types.CameraCaptureProperties;
+import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper;
+import io.flutter.view.TextureRegistry.SurfaceTextureEntry;
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.Executors;
+
+@FunctionalInterface
+interface ErrorCallback {
+ void onError(String errorCode, String errorMessage);
+}
+
+/** A mockable wrapper for CameraDevice calls. */
+interface CameraDeviceWrapper {
+ @NonNull
+ CaptureRequest.Builder createCaptureRequest(int templateType) throws CameraAccessException;
+
+ @TargetApi(VERSION_CODES.P)
+ void createCaptureSession(SessionConfiguration config) throws CameraAccessException;
+
+ @TargetApi(VERSION_CODES.LOLLIPOP)
+ void createCaptureSession(
+ @NonNull List<Surface> outputs,
+ @NonNull CameraCaptureSession.StateCallback callback,
+ @Nullable Handler handler)
+ throws CameraAccessException;
+
+ void close();
+}
+
+class Camera
+ implements CameraCaptureCallback.CameraCaptureStateListener,
+ ImageReader.OnImageAvailableListener {
+ private static final String TAG = "Camera";
+
+ private static final HashMap<String, Integer> supportedImageFormats;
+
+ // Current supported outputs.
+ static {
+ supportedImageFormats = new HashMap<>();
+ supportedImageFormats.put("yuv420", ImageFormat.YUV_420_888);
+ supportedImageFormats.put("jpeg", ImageFormat.JPEG);
+ }
+
+ /**
+ * Holds all of the camera features/settings and will be used to update the request builder when
+ * one changes.
+ */
+ private final CameraFeatures cameraFeatures;
+
+ private final SurfaceTextureEntry flutterTexture;
+ private final boolean enableAudio;
+ private final Context applicationContext;
+ private final DartMessenger dartMessenger;
+ private final CameraProperties cameraProperties;
+ private final CameraFeatureFactory cameraFeatureFactory;
+ private final Activity activity;
+ /** A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */
+ private final CameraCaptureCallback cameraCaptureCallback;
+ /** A {@link Handler} for running tasks in the background. */
+ private Handler backgroundHandler;
+
+ /** An additional thread for running tasks that shouldn't block the UI. */
+ private HandlerThread backgroundHandlerThread;
+
+ private CameraDeviceWrapper cameraDevice;
+ private CameraCaptureSession captureSession;
+ private ImageReader pictureImageReader;
+ private ImageReader imageStreamReader;
+ /** {@link CaptureRequest.Builder} for the camera preview */
+ private CaptureRequest.Builder previewRequestBuilder;
+
+ private MediaRecorder mediaRecorder;
+ /** True when recording video. */
+ private boolean recordingVideo;
+ /** True when the preview is paused. */
+ private boolean pausedPreview;
+
+ private File captureFile;
+
+ /** Holds the current capture timeouts */
+ private CaptureTimeoutsWrapper captureTimeouts;
+ /** Holds the last known capture properties */
+ private CameraCaptureProperties captureProps;
+
+ private MethodChannel.Result flutterResult;
+
+ /** A CameraDeviceWrapper implementation that forwards calls to a CameraDevice. */
+ private class DefaultCameraDeviceWrapper implements CameraDeviceWrapper {
+ private final CameraDevice cameraDevice;
+
+ private DefaultCameraDeviceWrapper(CameraDevice cameraDevice) {
+ this.cameraDevice = cameraDevice;
+ }
+
+ @NonNull
+ @Override
+ public CaptureRequest.Builder createCaptureRequest(int templateType)
+ throws CameraAccessException {
+ return cameraDevice.createCaptureRequest(templateType);
+ }
+
+ @TargetApi(VERSION_CODES.P)
+ @Override
+ public void createCaptureSession(SessionConfiguration config) throws CameraAccessException {
+ cameraDevice.createCaptureSession(config);
+ }
+
+ @TargetApi(VERSION_CODES.LOLLIPOP)
+ @SuppressWarnings("deprecation")
+ @Override
+ public void createCaptureSession(
+ @NonNull List<Surface> outputs,
+ @NonNull CameraCaptureSession.StateCallback callback,
+ @Nullable Handler handler)
+ throws CameraAccessException {
+ cameraDevice.createCaptureSession(outputs, callback, backgroundHandler);
+ }
+
+ @Override
+ public void close() {
+ cameraDevice.close();
+ }
+ }
+
+ public Camera(
+ final Activity activity,
+ final SurfaceTextureEntry flutterTexture,
+ final CameraFeatureFactory cameraFeatureFactory,
+ final DartMessenger dartMessenger,
+ final CameraProperties cameraProperties,
+ final ResolutionPreset resolutionPreset,
+ final boolean enableAudio) {
+
+ if (activity == null) {
+ throw new IllegalStateException("No activity available!");
+ }
+ this.activity = activity;
+ this.enableAudio = enableAudio;
+ this.flutterTexture = flutterTexture;
+ this.dartMessenger = dartMessenger;
+ this.applicationContext = activity.getApplicationContext();
+ this.cameraProperties = cameraProperties;
+ this.cameraFeatureFactory = cameraFeatureFactory;
+ this.cameraFeatures =
+ CameraFeatures.init(
+ cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset);
+
+ // Create capture callback.
+ captureTimeouts = new CaptureTimeoutsWrapper(3000, 3000);
+ captureProps = new CameraCaptureProperties();
+ cameraCaptureCallback = CameraCaptureCallback.create(this, captureTimeouts, captureProps);
+
+ startBackgroundThread();
+ }
+
+ @Override
+ public void onConverged() {
+ takePictureAfterPrecapture();
+ }
+
+ @Override
+ public void onPrecapture() {
+ runPrecaptureSequence();
+ }
+
+ /**
+ * Updates the builder settings with all of the available features.
+ *
+ * @param requestBuilder request builder to update.
+ */
+ private void updateBuilderSettings(CaptureRequest.Builder requestBuilder) {
+ for (CameraFeature feature : cameraFeatures.getAllFeatures()) {
+ Log.d(TAG, "Updating builder with feature: " + feature.getDebugName());
+ feature.updateBuilder(requestBuilder);
+ }
+ }
+
+ private void prepareMediaRecorder(String outputFilePath) throws IOException {
+ Log.i(TAG, "prepareMediaRecorder");
+
+ if (mediaRecorder != null) {
+ mediaRecorder.release();
+ }
+
+ final PlatformChannel.DeviceOrientation lockedOrientation =
+ ((SensorOrientationFeature) cameraFeatures.getSensorOrientation())
+ .getLockedCaptureOrientation();
+
+ MediaRecorderBuilder mediaRecorderBuilder;
+
+ // TODO(camsim99): Revert changes that allow legacy code to be used when recordingProfile is null
+ // once this has largely been fixed on the Android side. https://github.com/flutter/flutter/issues/119668
+ EncoderProfiles recordingProfile = getRecordingProfile();
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && recordingProfile != null) {
+ mediaRecorderBuilder = new MediaRecorderBuilder(recordingProfile, outputFilePath);
+ } else {
+ mediaRecorderBuilder = new MediaRecorderBuilder(getRecordingProfileLegacy(), outputFilePath);
+ }
+
+ mediaRecorder =
+ mediaRecorderBuilder
+ .setEnableAudio(enableAudio)
+ .setMediaOrientation(
+ lockedOrientation == null
+ ? getDeviceOrientationManager().getVideoOrientation()
+ : getDeviceOrientationManager().getVideoOrientation(lockedOrientation))
+ .build();
+ }
+
+ @SuppressLint("MissingPermission")
+ public void open(String imageFormatGroup) throws CameraAccessException {
+ final ResolutionFeature resolutionFeature = cameraFeatures.getResolution();
+
+ if (!resolutionFeature.checkIsSupported()) {
+ // Tell the user that the camera they are trying to open is not supported,
+ // as its {@link android.media.CamcorderProfile} cannot be fetched due to the name
+ // not being a valid parsable integer.
+ dartMessenger.sendCameraErrorEvent(
+ "Camera with name \""
+ + cameraProperties.getCameraName()
+ + "\" is not supported by this plugin.");
+ return;
+ }
+
+ // Always capture using JPEG format.
+ pictureImageReader =
+ ImageReader.newInstance(
+ resolutionFeature.getCaptureSize().getWidth(),
+ resolutionFeature.getCaptureSize().getHeight(),
+ ImageFormat.JPEG,
+ 1);
+
+ // For image streaming, use the provided image format or fall back to YUV420.
+ Integer imageFormat = supportedImageFormats.get(imageFormatGroup);
+ if (imageFormat == null) {
+ Log.w(TAG, "The selected imageFormatGroup is not supported by Android. Defaulting to yuv420");
+ imageFormat = ImageFormat.YUV_420_888;
+ }
+ imageStreamReader =
+ ImageReader.newInstance(
+ resolutionFeature.getPreviewSize().getWidth(),
+ resolutionFeature.getPreviewSize().getHeight(),
+ imageFormat,
+ 1);
+
+ // Open the camera.
+ CameraManager cameraManager = CameraUtils.getCameraManager(activity);
+ cameraManager.openCamera(
+ cameraProperties.getCameraName(),
+ new CameraDevice.StateCallback() {
+ @Override
+ public void onOpened(@NonNull CameraDevice device) {
+ cameraDevice = new DefaultCameraDeviceWrapper(device);
+ try {
+ startPreview();
+ dartMessenger.sendCameraInitializedEvent(
+ resolutionFeature.getPreviewSize().getWidth(),
+ resolutionFeature.getPreviewSize().getHeight(),
+ cameraFeatures.getExposureLock().getValue(),
+ cameraFeatures.getAutoFocus().getValue(),
+ cameraFeatures.getExposurePoint().checkIsSupported(),
+ cameraFeatures.getFocusPoint().checkIsSupported());
+ } catch (CameraAccessException e) {
+ dartMessenger.sendCameraErrorEvent(e.getMessage());
+ close();
+ }
+ }
+
+ @Override
+ public void onClosed(@NonNull CameraDevice camera) {
+ Log.i(TAG, "open | onClosed");
+
+ // Prevents calls to methods that would otherwise result in IllegalStateException exceptions.
+ cameraDevice = null;
+ closeCaptureSession();
+ dartMessenger.sendCameraClosingEvent();
+ }
+
+ @Override
+ public void onDisconnected(@NonNull CameraDevice cameraDevice) {
+ Log.i(TAG, "open | onDisconnected");
+
+ close();
+ dartMessenger.sendCameraErrorEvent("The camera was disconnected.");
+ }
+
+ @Override
+ public void onError(@NonNull CameraDevice cameraDevice, int errorCode) {
+ Log.i(TAG, "open | onError");
+
+ close();
+ String errorDescription;
+ switch (errorCode) {
+ case ERROR_CAMERA_IN_USE:
+ errorDescription = "The camera device is in use already.";
+ break;
+ case ERROR_MAX_CAMERAS_IN_USE:
+ errorDescription = "Max cameras in use";
+ break;
+ case ERROR_CAMERA_DISABLED:
+ errorDescription = "The camera device could not be opened due to a device policy.";
+ break;
+ case ERROR_CAMERA_DEVICE:
+ errorDescription = "The camera device has encountered a fatal error";
+ break;
+ case ERROR_CAMERA_SERVICE:
+ errorDescription = "The camera service has encountered a fatal error.";
+ break;
+ default:
+ errorDescription = "Unknown camera error";
+ }
+ dartMessenger.sendCameraErrorEvent(errorDescription);
+ }
+ },
+ backgroundHandler);
+ }
+
+ @VisibleForTesting
+ void createCaptureSession(int templateType, Surface... surfaces) throws CameraAccessException {
+ createCaptureSession(templateType, null, surfaces);
+ }
+
+ private void createCaptureSession(
+ int templateType, Runnable onSuccessCallback, Surface... surfaces)
+ throws CameraAccessException {
+ // Close any existing capture session.
+ captureSession = null;
+
+ // Create a new capture builder.
+ previewRequestBuilder = cameraDevice.createCaptureRequest(templateType);
+
+ // Build Flutter surface to render to.
+ ResolutionFeature resolutionFeature = cameraFeatures.getResolution();
+ SurfaceTexture surfaceTexture = flutterTexture.surfaceTexture();
+ surfaceTexture.setDefaultBufferSize(
+ resolutionFeature.getPreviewSize().getWidth(),
+ resolutionFeature.getPreviewSize().getHeight());
+ Surface flutterSurface = new Surface(surfaceTexture);
+ previewRequestBuilder.addTarget(flutterSurface);
+
+ List<Surface> remainingSurfaces = Arrays.asList(surfaces);
+ if (templateType != CameraDevice.TEMPLATE_PREVIEW) {
+ // If it is not preview mode, add all surfaces as targets.
+ for (Surface surface : remainingSurfaces) {
+ previewRequestBuilder.addTarget(surface);
+ }
+ }
+
+ // Update camera regions.
+ Size cameraBoundaries =
+ CameraRegionUtils.getCameraBoundaries(cameraProperties, previewRequestBuilder);
+ cameraFeatures.getExposurePoint().setCameraBoundaries(cameraBoundaries);
+ cameraFeatures.getFocusPoint().setCameraBoundaries(cameraBoundaries);
+
+ // Prepare the callback.
+ CameraCaptureSession.StateCallback callback =
+ new CameraCaptureSession.StateCallback() {
+ boolean captureSessionClosed = false;
+
+ @Override
+ public void onConfigured(@NonNull CameraCaptureSession session) {
+ Log.i(TAG, "CameraCaptureSession onConfigured");
+ // Camera was already closed.
+ if (cameraDevice == null || captureSessionClosed) {
+ dartMessenger.sendCameraErrorEvent("The camera was closed during configuration.");
+ return;
+ }
+ captureSession = session;
+
+ Log.i(TAG, "Updating builder settings");
+ updateBuilderSettings(previewRequestBuilder);
+
+ refreshPreviewCaptureSession(
+ onSuccessCallback, (code, message) -> dartMessenger.sendCameraErrorEvent(message));
+ }
+
+ @Override
+ public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
+ Log.i(TAG, "CameraCaptureSession onConfigureFailed");
+ dartMessenger.sendCameraErrorEvent("Failed to configure camera session.");
+ }
+
+ @Override
+ public void onClosed(@NonNull CameraCaptureSession session) {
+ Log.i(TAG, "CameraCaptureSession onClosed");
+ captureSessionClosed = true;
+ }
+ };
+
+ // Start the session.
+ if (VERSION.SDK_INT >= VERSION_CODES.P) {
+ // Collect all surfaces to render to.
+ List<OutputConfiguration> configs = new ArrayList<>();
+ configs.add(new OutputConfiguration(flutterSurface));
+ for (Surface surface : remainingSurfaces) {
+ configs.add(new OutputConfiguration(surface));
+ }
+ createCaptureSessionWithSessionConfig(configs, callback);
+ } else {
+ // Collect all surfaces to render to.
+ List<Surface> surfaceList = new ArrayList<>();
+ surfaceList.add(flutterSurface);
+ surfaceList.addAll(remainingSurfaces);
+ createCaptureSession(surfaceList, callback);
+ }
+ }
+
+ @TargetApi(VERSION_CODES.P)
+ private void createCaptureSessionWithSessionConfig(
+ List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback callback)
+ throws CameraAccessException {
+ cameraDevice.createCaptureSession(
+ new SessionConfiguration(
+ SessionConfiguration.SESSION_REGULAR,
+ outputConfigs,
+ Executors.newSingleThreadExecutor(),
+ callback));
+ }
+
+ @TargetApi(VERSION_CODES.LOLLIPOP)
+ @SuppressWarnings("deprecation")
+ private void createCaptureSession(
+ List<Surface> surfaces, CameraCaptureSession.StateCallback callback)
+ throws CameraAccessException {
+ cameraDevice.createCaptureSession(surfaces, callback, backgroundHandler);
+ }
+
+ // Send a repeating request to refresh capture session.
+ private void refreshPreviewCaptureSession(
+ @Nullable Runnable onSuccessCallback, @NonNull ErrorCallback onErrorCallback) {
+ Log.i(TAG, "refreshPreviewCaptureSession");
+
+ if (captureSession == null) {
+ Log.i(
+ TAG,
+ "refreshPreviewCaptureSession: captureSession not yet initialized, "
+ + "skipping preview capture session refresh.");
+ return;
+ }
+
+ try {
+ if (!pausedPreview) {
+ captureSession.setRepeatingRequest(
+ previewRequestBuilder.build(), cameraCaptureCallback, backgroundHandler);
+ }
+
+ if (onSuccessCallback != null) {
+ onSuccessCallback.run();
+ }
+
+ } catch (IllegalStateException e) {
+ onErrorCallback.onError("cameraAccess", "Camera is closed: " + e.getMessage());
+ } catch (CameraAccessException e) {
+ onErrorCallback.onError("cameraAccess", e.getMessage());
+ }
+ }
+
+ private void startCapture(boolean record, boolean stream) throws CameraAccessException {
+ List<Surface> surfaces = new ArrayList<>();
+ Runnable successCallback = null;
+ if (record) {
+ surfaces.add(mediaRecorder.getSurface());
+ successCallback = () -> mediaRecorder.start();
+ }
+ if (stream) {
+ surfaces.add(imageStreamReader.getSurface());
+ }
+
+ createCaptureSession(
+ CameraDevice.TEMPLATE_RECORD, successCallback, surfaces.toArray(new Surface[0]));
+ }
+
+ public void takePicture(@NonNull final Result result) {
+ // Only take one picture at a time.
+ if (cameraCaptureCallback.getCameraState() != CameraState.STATE_PREVIEW) {
+ result.error("captureAlreadyActive", "Picture is currently already being captured", null);
+ return;
+ }
+
+ flutterResult = result;
+
+ // Create temporary file.
+ final File outputDir = applicationContext.getCacheDir();
+ try {
+ captureFile = File.createTempFile("CAP", ".jpg", outputDir);
+ captureTimeouts.reset();
+ } catch (IOException | SecurityException e) {
+ dartMessenger.error(flutterResult, "cannotCreateFile", e.getMessage(), null);
+ return;
+ }
+
+ // Listen for picture being taken.
+ pictureImageReader.setOnImageAvailableListener(this, backgroundHandler);
+
+ final AutoFocusFeature autoFocusFeature = cameraFeatures.getAutoFocus();
+ final boolean isAutoFocusSupported = autoFocusFeature.checkIsSupported();
+ if (isAutoFocusSupported && autoFocusFeature.getValue() == FocusMode.auto) {
+ runPictureAutoFocus();
+ } else {
+ runPrecaptureSequence();
+ }
+ }
+
+ /**
+ * Run the precapture sequence for capturing a still image. This method should be called when a
+ * response is received in {@link #cameraCaptureCallback} from lockFocus().
+ */
+ private void runPrecaptureSequence() {
+ Log.i(TAG, "runPrecaptureSequence");
+ try {
+ // First set precapture state to idle or else it can hang in STATE_WAITING_PRECAPTURE_START.
+ previewRequestBuilder.set(
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
+ captureSession.capture(
+ previewRequestBuilder.build(), cameraCaptureCallback, backgroundHandler);
+
+ // Repeating request to refresh preview session.
+ refreshPreviewCaptureSession(
+ null,
+ (code, message) -> dartMessenger.error(flutterResult, "cameraAccess", message, null));
+
+ // Start precapture.
+ cameraCaptureCallback.setCameraState(CameraState.STATE_WAITING_PRECAPTURE_START);
+
+ previewRequestBuilder.set(
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+
+ // Trigger one capture to start AE sequence.
+ captureSession.capture(
+ previewRequestBuilder.build(), cameraCaptureCallback, backgroundHandler);
+
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Capture a still picture. This method should be called when a response is received {@link
+ * #cameraCaptureCallback} from both lockFocus().
+ */
+ private void takePictureAfterPrecapture() {
+ Log.i(TAG, "captureStillPicture");
+ cameraCaptureCallback.setCameraState(CameraState.STATE_CAPTURING);
+
+ if (cameraDevice == null) {
+ return;
+ }
+ // This is the CaptureRequest.Builder that is used to take a picture.
+ CaptureRequest.Builder stillBuilder;
+ try {
+ stillBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+ } catch (CameraAccessException e) {
+ dartMessenger.error(flutterResult, "cameraAccess", e.getMessage(), null);
+ return;
+ }
+ stillBuilder.addTarget(pictureImageReader.getSurface());
+
+ // Zoom.
+ stillBuilder.set(
+ CaptureRequest.SCALER_CROP_REGION,
+ previewRequestBuilder.get(CaptureRequest.SCALER_CROP_REGION));
+
+ // Have all features update the builder.
+ updateBuilderSettings(stillBuilder);
+
+ // Orientation.
+ final PlatformChannel.DeviceOrientation lockedOrientation =
+ ((SensorOrientationFeature) cameraFeatures.getSensorOrientation())
+ .getLockedCaptureOrientation();
+ stillBuilder.set(
+ CaptureRequest.JPEG_ORIENTATION,
+ lockedOrientation == null
+ ? getDeviceOrientationManager().getPhotoOrientation()
+ : getDeviceOrientationManager().getPhotoOrientation(lockedOrientation));
+
+ CameraCaptureSession.CaptureCallback captureCallback =
+ new CameraCaptureSession.CaptureCallback() {
+ @Override
+ public void onCaptureCompleted(
+ @NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request,
+ @NonNull TotalCaptureResult result) {
+ unlockAutoFocus();
+ }
+ };
+
+ try {
+ captureSession.stopRepeating();
+ Log.i(TAG, "sending capture request");
+ captureSession.capture(stillBuilder.build(), captureCallback, backgroundHandler);
+ } catch (CameraAccessException e) {
+ dartMessenger.error(flutterResult, "cameraAccess", e.getMessage(), null);
+ }
+ }
+
+ @SuppressWarnings("deprecation")
+ private Display getDefaultDisplay() {
+ return activity.getWindowManager().getDefaultDisplay();
+ }
+
+ /** Starts a background thread and its {@link Handler}. */
+ public void startBackgroundThread() {
+ if (backgroundHandlerThread != null) {
+ return;
+ }
+
+ backgroundHandlerThread = HandlerThreadFactory.create("CameraBackground");
+ try {
+ backgroundHandlerThread.start();
+ } catch (IllegalThreadStateException e) {
+ // Ignore exception in case the thread has already started.
+ }
+ backgroundHandler = HandlerFactory.create(backgroundHandlerThread.getLooper());
+ }
+
+ /** Stops the background thread and its {@link Handler}. */
+ public void stopBackgroundThread() {
+ if (backgroundHandlerThread != null) {
+ backgroundHandlerThread.quitSafely();
+ }
+ backgroundHandlerThread = null;
+ backgroundHandler = null;
+ }
+
+ /** Start capturing a picture, doing autofocus first. */
+ private void runPictureAutoFocus() {
+ Log.i(TAG, "runPictureAutoFocus");
+
+ cameraCaptureCallback.setCameraState(CameraState.STATE_WAITING_FOCUS);
+ lockAutoFocus();
+ }
+
+ private void lockAutoFocus() {
+ Log.i(TAG, "lockAutoFocus");
+ if (captureSession == null) {
+ Log.i(TAG, "[unlockAutoFocus] captureSession null, returning");
+ return;
+ }
+
+ // Trigger AF to start.
+ previewRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
+
+ try {
+ captureSession.capture(previewRequestBuilder.build(), null, backgroundHandler);
+ } catch (CameraAccessException e) {
+ dartMessenger.sendCameraErrorEvent(e.getMessage());
+ }
+ }
+
+ /** Cancel and reset auto focus state and refresh the preview session. */
+ private void unlockAutoFocus() {
+ Log.i(TAG, "unlockAutoFocus");
+ if (captureSession == null) {
+ Log.i(TAG, "[unlockAutoFocus] captureSession null, returning");
+ return;
+ }
+ try {
+ // Cancel existing AF state.
+ previewRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
+ captureSession.capture(previewRequestBuilder.build(), null, backgroundHandler);
+
+ // Set AF state to idle again.
+ previewRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+
+ captureSession.capture(previewRequestBuilder.build(), null, backgroundHandler);
+ } catch (CameraAccessException e) {
+ dartMessenger.sendCameraErrorEvent(e.getMessage());
+ return;
+ }
+
+ refreshPreviewCaptureSession(
+ null,
+ (errorCode, errorMessage) ->
+ dartMessenger.error(flutterResult, errorCode, errorMessage, null));
+ }
+
+ public void startVideoRecording(
+ @NonNull Result result, @Nullable EventChannel imageStreamChannel) {
+ prepareRecording(result);
+
+ if (imageStreamChannel != null) {
+ setStreamHandler(imageStreamChannel);
+ }
+
+ recordingVideo = true;
+ try {
+ startCapture(true, imageStreamChannel != null);
+ result.success(null);
+ } catch (CameraAccessException e) {
+ recordingVideo = false;
+ captureFile = null;
+ result.error("videoRecordingFailed", e.getMessage(), null);
+ }
+ }
+
+ public void stopVideoRecording(@NonNull final Result result) {
+ if (!recordingVideo) {
+ result.success(null);
+ return;
+ }
+ // Re-create autofocus feature so it's using continuous capture focus mode now.
+ cameraFeatures.setAutoFocus(
+ cameraFeatureFactory.createAutoFocusFeature(cameraProperties, false));
+ recordingVideo = false;
+ try {
+ captureSession.abortCaptures();
+ mediaRecorder.stop();
+ } catch (CameraAccessException | IllegalStateException e) {
+ // Ignore exceptions and try to continue (changes are camera session already aborted capture).
+ }
+ mediaRecorder.reset();
+ try {
+ startPreview();
+ } catch (CameraAccessException | IllegalStateException e) {
+ result.error("videoRecordingFailed", e.getMessage(), null);
+ return;
+ }
+ result.success(captureFile.getAbsolutePath());
+ captureFile = null;
+ }
+
+ public void pauseVideoRecording(@NonNull final Result result) {
+ if (!recordingVideo) {
+ result.success(null);
+ return;
+ }
+
+ try {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ mediaRecorder.pause();
+ } else {
+ result.error("videoRecordingFailed", "pauseVideoRecording requires Android API +24.", null);
+ return;
+ }
+ } catch (IllegalStateException e) {
+ result.error("videoRecordingFailed", e.getMessage(), null);
+ return;
+ }
+
+ result.success(null);
+ }
+
+ public void resumeVideoRecording(@NonNull final Result result) {
+ if (!recordingVideo) {
+ result.success(null);
+ return;
+ }
+
+ try {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ mediaRecorder.resume();
+ } else {
+ result.error(
+ "videoRecordingFailed", "resumeVideoRecording requires Android API +24.", null);
+ return;
+ }
+ } catch (IllegalStateException e) {
+ result.error("videoRecordingFailed", e.getMessage(), null);
+ return;
+ }
+
+ result.success(null);
+ }
+
+ /**
+ * Method handler for setting new flash modes.
+ *
+ * @param result Flutter result.
+ * @param newMode new mode.
+ */
+ public void setFlashMode(@NonNull final Result result, @NonNull FlashMode newMode) {
+ // Save the new flash mode setting.
+ final FlashFeature flashFeature = cameraFeatures.getFlash();
+ flashFeature.setValue(newMode);
+ flashFeature.updateBuilder(previewRequestBuilder);
+
+ refreshPreviewCaptureSession(
+ () -> result.success(null),
+ (code, message) -> result.error("setFlashModeFailed", "Could not set flash mode.", null));
+ }
+
+ /**
+ * Method handler for setting new exposure modes.
+ *
+ * @param result Flutter result.
+ * @param newMode new mode.
+ */
+ public void setExposureMode(@NonNull final Result result, @NonNull ExposureMode newMode) {
+ final ExposureLockFeature exposureLockFeature = cameraFeatures.getExposureLock();
+ exposureLockFeature.setValue(newMode);
+ exposureLockFeature.updateBuilder(previewRequestBuilder);
+
+ refreshPreviewCaptureSession(
+ () -> result.success(null),
+ (code, message) ->
+ result.error("setExposureModeFailed", "Could not set exposure mode.", null));
+ }
+
+ /**
+ * Sets new exposure point from dart.
+ *
+ * @param result Flutter result.
+ * @param point The exposure point.
+ */
+ public void setExposurePoint(@NonNull final Result result, @Nullable Point point) {
+ final ExposurePointFeature exposurePointFeature = cameraFeatures.getExposurePoint();
+ exposurePointFeature.setValue(point);
+ exposurePointFeature.updateBuilder(previewRequestBuilder);
+
+ refreshPreviewCaptureSession(
+ () -> result.success(null),
+ (code, message) ->
+ result.error("setExposurePointFailed", "Could not set exposure point.", null));
+ }
+
+ /** Return the max exposure offset value supported by the camera to dart. */
+ public double getMaxExposureOffset() {
+ return cameraFeatures.getExposureOffset().getMaxExposureOffset();
+ }
+
+ /** Return the min exposure offset value supported by the camera to dart. */
+ public double getMinExposureOffset() {
+ return cameraFeatures.getExposureOffset().getMinExposureOffset();
+ }
+
+ /** Return the exposure offset step size to dart. */
+ public double getExposureOffsetStepSize() {
+ return cameraFeatures.getExposureOffset().getExposureOffsetStepSize();
+ }
+
+ /**
+ * Sets new focus mode from dart.
+ *
+ * @param result Flutter result.
+ * @param newMode New mode.
+ */
+ public void setFocusMode(final Result result, @NonNull FocusMode newMode) {
+ final AutoFocusFeature autoFocusFeature = cameraFeatures.getAutoFocus();
+ autoFocusFeature.setValue(newMode);
+ autoFocusFeature.updateBuilder(previewRequestBuilder);
+
+ /*
+ * For focus mode an extra step of actually locking/unlocking the
+ * focus has to be done, in order to ensure it goes into the correct state.
+ */
+ if (!pausedPreview) {
+ switch (newMode) {
+ case locked:
+ // Perform a single focus trigger.
+ if (captureSession == null) {
+ Log.i(TAG, "[unlockAutoFocus] captureSession null, returning");
+ return;
+ }
+ lockAutoFocus();
+
+ // Set AF state to idle again.
+ previewRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+
+ try {
+ captureSession.setRepeatingRequest(
+ previewRequestBuilder.build(), null, backgroundHandler);
+ } catch (CameraAccessException e) {
+ if (result != null) {
+ result.error(
+ "setFocusModeFailed", "Error setting focus mode: " + e.getMessage(), null);
+ }
+ return;
+ }
+ break;
+ case auto:
+ // Cancel current AF trigger and set AF to idle again.
+ unlockAutoFocus();
+ break;
+ }
+ }
+
+ if (result != null) {
+ result.success(null);
+ }
+ }
+
+ /**
+ * Sets new focus point from dart.
+ *
+ * @param result Flutter result.
+ * @param point the new coordinates.
+ */
+ public void setFocusPoint(@NonNull final Result result, @Nullable Point point) {
+ final FocusPointFeature focusPointFeature = cameraFeatures.getFocusPoint();
+ focusPointFeature.setValue(point);
+ focusPointFeature.updateBuilder(previewRequestBuilder);
+
+ refreshPreviewCaptureSession(
+ () -> result.success(null),
+ (code, message) -> result.error("setFocusPointFailed", "Could not set focus point.", null));
+
+ this.setFocusMode(null, cameraFeatures.getAutoFocus().getValue());
+ }
+
+ /**
+ * Sets a new exposure offset from dart. From dart the offset comes as a double, like +1.3 or
+ * -1.3.
+ *
+ * @param result flutter result.
+ * @param offset new value.
+ */
+ public void setExposureOffset(@NonNull final Result result, double offset) {
+ final ExposureOffsetFeature exposureOffsetFeature = cameraFeatures.getExposureOffset();
+ exposureOffsetFeature.setValue(offset);
+ exposureOffsetFeature.updateBuilder(previewRequestBuilder);
+
+ refreshPreviewCaptureSession(
+ () -> result.success(exposureOffsetFeature.getValue()),
+ (code, message) ->
+ result.error("setExposureOffsetFailed", "Could not set exposure offset.", null));
+ }
+
+ public float getMaxZoomLevel() {
+ return cameraFeatures.getZoomLevel().getMaximumZoomLevel();
+ }
+
+ public float getMinZoomLevel() {
+ return cameraFeatures.getZoomLevel().getMinimumZoomLevel();
+ }
+
+ /** Shortcut to get current recording profile. Legacy method provides support for SDK < 31. */
+ CamcorderProfile getRecordingProfileLegacy() {
+ return cameraFeatures.getResolution().getRecordingProfileLegacy();
+ }
+
+ EncoderProfiles getRecordingProfile() {
+ return cameraFeatures.getResolution().getRecordingProfile();
+ }
+
+ /** Shortut to get deviceOrientationListener. */
+ DeviceOrientationManager getDeviceOrientationManager() {
+ return cameraFeatures.getSensorOrientation().getDeviceOrientationManager();
+ }
+
+ /**
+ * Sets zoom level from dart.
+ *
+ * @param result Flutter result.
+ * @param zoom new value.
+ */
+ public void setZoomLevel(@NonNull final Result result, float zoom) throws CameraAccessException {
+ final ZoomLevelFeature zoomLevel = cameraFeatures.getZoomLevel();
+ float maxZoom = zoomLevel.getMaximumZoomLevel();
+ float minZoom = zoomLevel.getMinimumZoomLevel();
+
+ if (zoom > maxZoom || zoom < minZoom) {
+ String errorMessage =
+ String.format(
+ Locale.ENGLISH,
+ "Zoom level out of bounds (zoom level should be between %f and %f).",
+ minZoom,
+ maxZoom);
+ result.error("ZOOM_ERROR", errorMessage, null);
+ return;
+ }
+
+ zoomLevel.setValue(zoom);
+ zoomLevel.updateBuilder(previewRequestBuilder);
+
+ refreshPreviewCaptureSession(
+ () -> result.success(null),
+ (code, message) -> result.error("setZoomLevelFailed", "Could not set zoom level.", null));
+ }
+
+ /**
+ * Lock capture orientation from dart.
+ *
+ * @param orientation new orientation.
+ */
+ public void lockCaptureOrientation(PlatformChannel.DeviceOrientation orientation) {
+ cameraFeatures.getSensorOrientation().lockCaptureOrientation(orientation);
+ }
+
+ /** Unlock capture orientation from dart. */
+ public void unlockCaptureOrientation() {
+ cameraFeatures.getSensorOrientation().unlockCaptureOrientation();
+ }
+
+ /** Pause the preview from dart. */
+ public void pausePreview() throws CameraAccessException {
+ this.pausedPreview = true;
+ this.captureSession.stopRepeating();
+ }
+
+ /** Resume the preview from dart. */
+ public void resumePreview() {
+ this.pausedPreview = false;
+ this.refreshPreviewCaptureSession(
+ null, (code, message) -> dartMessenger.sendCameraErrorEvent(message));
+ }
+
+ public void startPreview() throws CameraAccessException {
+ if (pictureImageReader == null || pictureImageReader.getSurface() == null) return;
+ Log.i(TAG, "startPreview");
+
+ createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface());
+ }
+
+ public void startPreviewWithImageStream(EventChannel imageStreamChannel)
+ throws CameraAccessException {
+ setStreamHandler(imageStreamChannel);
+
+ startCapture(false, true);
+ Log.i(TAG, "startPreviewWithImageStream");
+ }
+
+ /**
+ * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
+ * still image is ready to be saved.
+ */
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Log.i(TAG, "onImageAvailable");
+
+ backgroundHandler.post(
+ new ImageSaver(
+ // Use acquireNextImage since image reader is only for one image.
+ reader.acquireNextImage(),
+ captureFile,
+ new ImageSaver.Callback() {
+ @Override
+ public void onComplete(String absolutePath) {
+ dartMessenger.finish(flutterResult, absolutePath);
+ }
+
+ @Override
+ public void onError(String errorCode, String errorMessage) {
+ dartMessenger.error(flutterResult, errorCode, errorMessage, null);
+ }
+ }));
+ cameraCaptureCallback.setCameraState(CameraState.STATE_PREVIEW);
+ }
+
+ private void prepareRecording(@NonNull Result result) {
+ final File outputDir = applicationContext.getCacheDir();
+ try {
+ captureFile = File.createTempFile("REC", ".mp4", outputDir);
+ } catch (IOException | SecurityException e) {
+ result.error("cannotCreateFile", e.getMessage(), null);
+ return;
+ }
+ try {
+ prepareMediaRecorder(captureFile.getAbsolutePath());
+ } catch (IOException e) {
+ recordingVideo = false;
+ captureFile = null;
+ result.error("videoRecordingFailed", e.getMessage(), null);
+ return;
+ }
+ // Re-create autofocus feature so it's using video focus mode now.
+ cameraFeatures.setAutoFocus(
+ cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true));
+ }
+
+ private void setStreamHandler(EventChannel imageStreamChannel) {
+ imageStreamChannel.setStreamHandler(
+ new EventChannel.StreamHandler() {
+ @Override
+ public void onListen(Object o, EventChannel.EventSink imageStreamSink) {
+ setImageStreamImageAvailableListener(imageStreamSink);
+ }
+
+ @Override
+ public void onCancel(Object o) {
+ imageStreamReader.setOnImageAvailableListener(null, backgroundHandler);
+ }
+ });
+ }
+
+ private void setImageStreamImageAvailableListener(final EventChannel.EventSink imageStreamSink) {
+ imageStreamReader.setOnImageAvailableListener(
+ reader -> {
+ Image img = reader.acquireNextImage();
+ // Use acquireNextImage since image reader is only for one image.
+ if (img == null) return;
+
+ List<Map<String, Object>> planes = new ArrayList<>();
+ for (Image.Plane plane : img.getPlanes()) {
+ ByteBuffer buffer = plane.getBuffer();
+
+ byte[] bytes = new byte[buffer.remaining()];
+ buffer.get(bytes, 0, bytes.length);
+
+ Map<String, Object> planeBuffer = new HashMap<>();
+ planeBuffer.put("bytesPerRow", plane.getRowStride());
+ planeBuffer.put("bytesPerPixel", plane.getPixelStride());
+ planeBuffer.put("bytes", bytes);
+
+ planes.add(planeBuffer);
+ }
+
+ Map<String, Object> imageBuffer = new HashMap<>();
+ imageBuffer.put("width", img.getWidth());
+ imageBuffer.put("height", img.getHeight());
+ imageBuffer.put("format", img.getFormat());
+ imageBuffer.put("planes", planes);
+ imageBuffer.put("lensAperture", this.captureProps.getLastLensAperture());
+ imageBuffer.put("sensorExposureTime", this.captureProps.getLastSensorExposureTime());
+ Integer sensorSensitivity = this.captureProps.getLastSensorSensitivity();
+ imageBuffer.put(
+ "sensorSensitivity", sensorSensitivity == null ? null : (double) sensorSensitivity);
+
+ final Handler handler = new Handler(Looper.getMainLooper());
+ handler.post(() -> imageStreamSink.success(imageBuffer));
+ img.close();
+ },
+ backgroundHandler);
+ }
+
+ private void closeCaptureSession() {
+ if (captureSession != null) {
+ Log.i(TAG, "closeCaptureSession");
+
+ captureSession.close();
+ captureSession = null;
+ }
+ }
+
+ public void close() {
+ Log.i(TAG, "close");
+
+ if (cameraDevice != null) {
+ cameraDevice.close();
+ cameraDevice = null;
+
+ // Closing the CameraDevice without closing the CameraCaptureSession is recommended
+ // for quickly closing the camera:
+ // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close()
+ captureSession = null;
+ } else {
+ closeCaptureSession();
+ }
+
+ if (pictureImageReader != null) {
+ pictureImageReader.close();
+ pictureImageReader = null;
+ }
+ if (imageStreamReader != null) {
+ imageStreamReader.close();
+ imageStreamReader = null;
+ }
+ if (mediaRecorder != null) {
+ mediaRecorder.reset();
+ mediaRecorder.release();
+ mediaRecorder = null;
+ }
+
+ stopBackgroundThread();
+ }
+
+ public void dispose() {
+ Log.i(TAG, "dispose");
+
+ close();
+ flutterTexture.release();
+ getDeviceOrientationManager().stop();
+ }
+
+ /** Factory class that assists in creating a {@link HandlerThread} instance. */
+ static class HandlerThreadFactory {
+ /**
+ * Creates a new instance of the {@link HandlerThread} class.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this *
+ * class.
+ *
+ * @param name to give to the HandlerThread.
+ * @return new instance of the {@link HandlerThread} class.
+ */
+ @VisibleForTesting
+ public static HandlerThread create(String name) {
+ return new HandlerThread(name);
+ }
+ }
+
+ /** Factory class that assists in creating a {@link Handler} instance. */
+ static class HandlerFactory {
+ /**
+ * Creates a new instance of the {@link Handler} class.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this *
+ * class.
+ *
+ * @param looper to give to the Handler.
+ * @return new instance of the {@link Handler} class.
+ */
+ @VisibleForTesting
+ public static Handler create(Looper looper) {
+ return new Handler(looper);
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraCaptureCallback.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraCaptureCallback.java
new file mode 100644
index 0000000..805f182
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraCaptureCallback.java
@@ -0,0 +1,183 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.util.Log;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.types.CameraCaptureProperties;
+import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper;
+
+/**
+ * A callback object for tracking the progress of a {@link android.hardware.camera2.CaptureRequest}
+ * submitted to the camera device.
+ */
+class CameraCaptureCallback extends CaptureCallback {
+ private static final String TAG = "CameraCaptureCallback";
+ private final CameraCaptureStateListener cameraStateListener;
+ private CameraState cameraState;
+ private final CaptureTimeoutsWrapper captureTimeouts;
+ private final CameraCaptureProperties captureProps;
+
+ private CameraCaptureCallback(
+ @NonNull CameraCaptureStateListener cameraStateListener,
+ @NonNull CaptureTimeoutsWrapper captureTimeouts,
+ @NonNull CameraCaptureProperties captureProps) {
+ cameraState = CameraState.STATE_PREVIEW;
+ this.cameraStateListener = cameraStateListener;
+ this.captureTimeouts = captureTimeouts;
+ this.captureProps = captureProps;
+ }
+
+ /**
+ * Creates a new instance of the {@link CameraCaptureCallback} class.
+ *
+ * @param cameraStateListener instance which will be called when the camera state changes.
+ * @param captureTimeouts specifying the different timeout counters that should be taken into
+ * account.
+ * @return a configured instance of the {@link CameraCaptureCallback} class.
+ */
+ public static CameraCaptureCallback create(
+ @NonNull CameraCaptureStateListener cameraStateListener,
+ @NonNull CaptureTimeoutsWrapper captureTimeouts,
+ @NonNull CameraCaptureProperties captureProps) {
+ return new CameraCaptureCallback(cameraStateListener, captureTimeouts, captureProps);
+ }
+
+ /**
+ * Gets the current {@link CameraState}.
+ *
+ * @return the current {@link CameraState}.
+ */
+ public CameraState getCameraState() {
+ return cameraState;
+ }
+
+ /**
+ * Sets the {@link CameraState}.
+ *
+ * @param state the camera is currently in.
+ */
+ public void setCameraState(@NonNull CameraState state) {
+ cameraState = state;
+ }
+
+ private void process(CaptureResult result) {
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
+
+ // Update capture properties
+ if (result instanceof TotalCaptureResult) {
+ Float lensAperture = result.get(CaptureResult.LENS_APERTURE);
+ Long sensorExposureTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
+ Integer sensorSensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);
+ this.captureProps.setLastLensAperture(lensAperture);
+ this.captureProps.setLastSensorExposureTime(sensorExposureTime);
+ this.captureProps.setLastSensorSensitivity(sensorSensitivity);
+ }
+
+ if (cameraState != CameraState.STATE_PREVIEW) {
+ Log.d(
+ TAG,
+ "CameraCaptureCallback | state: "
+ + cameraState
+ + " | afState: "
+ + afState
+ + " | aeState: "
+ + aeState);
+ }
+
+ switch (cameraState) {
+ case STATE_PREVIEW:
+ {
+ // We have nothing to do when the camera preview is working normally.
+ break;
+ }
+ case STATE_WAITING_FOCUS:
+ {
+ if (afState == null) {
+ return;
+ } else if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED
+ || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
+ handleWaitingFocusState(aeState);
+ } else if (captureTimeouts.getPreCaptureFocusing().getIsExpired()) {
+ Log.w(TAG, "Focus timeout, moving on with capture");
+ handleWaitingFocusState(aeState);
+ }
+
+ break;
+ }
+ case STATE_WAITING_PRECAPTURE_START:
+ {
+ // CONTROL_AE_STATE can be null on some devices
+ if (aeState == null
+ || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED
+ || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE
+ || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED) {
+ setCameraState(CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ } else if (captureTimeouts.getPreCaptureMetering().getIsExpired()) {
+ Log.w(TAG, "Metering timeout waiting for pre-capture to start, moving on with capture");
+
+ setCameraState(CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ }
+ break;
+ }
+ case STATE_WAITING_PRECAPTURE_DONE:
+ {
+ // CONTROL_AE_STATE can be null on some devices
+ if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
+ cameraStateListener.onConverged();
+ } else if (captureTimeouts.getPreCaptureMetering().getIsExpired()) {
+ Log.w(
+ TAG, "Metering timeout waiting for pre-capture to finish, moving on with capture");
+ cameraStateListener.onConverged();
+ }
+
+ break;
+ }
+ }
+ }
+
+ private void handleWaitingFocusState(Integer aeState) {
+ // CONTROL_AE_STATE can be null on some devices
+ if (aeState == null || aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED) {
+ cameraStateListener.onConverged();
+ } else {
+ cameraStateListener.onPrecapture();
+ }
+ }
+
+ @Override
+ public void onCaptureProgressed(
+ @NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request,
+ @NonNull CaptureResult partialResult) {
+ process(partialResult);
+ }
+
+ @Override
+ public void onCaptureCompleted(
+ @NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request,
+ @NonNull TotalCaptureResult result) {
+ process(result);
+ }
+
+ /** An interface that describes the different state changes implementers can be informed about. */
+ interface CameraCaptureStateListener {
+
+ /** Called when the {@link android.hardware.camera2.CaptureRequest} has been converged. */
+ void onConverged();
+
+ /**
+ * Called when the {@link android.hardware.camera2.CaptureRequest} enters the pre-capture state.
+ */
+ void onPrecapture();
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraPermissions.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraPermissions.java
new file mode 100644
index 0000000..ee8fa5a
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraPermissions.java
@@ -0,0 +1,120 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.Manifest;
+import android.Manifest.permission;
+import android.app.Activity;
+import android.content.pm.PackageManager;
+import androidx.annotation.VisibleForTesting;
+import androidx.core.app.ActivityCompat;
+import androidx.core.content.ContextCompat;
+
+final class CameraPermissions {
+ interface PermissionsRegistry {
+ @SuppressWarnings("deprecation")
+ void addListener(
+ io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener handler);
+ }
+
+ interface ResultCallback {
+ void onResult(String errorCode, String errorDescription);
+ }
+
+ /**
+ * Camera access permission errors handled when camera is created. See {@code MethodChannelCamera}
+ * in {@code camera/camera_platform_interface} for details.
+ */
+ private static final String CAMERA_PERMISSIONS_REQUEST_ONGOING =
+ "CameraPermissionsRequestOngoing";
+
+ private static final String CAMERA_PERMISSIONS_REQUEST_ONGOING_MESSAGE =
+ "Another request is ongoing and multiple requests cannot be handled at once.";
+ private static final String CAMERA_ACCESS_DENIED = "CameraAccessDenied";
+ private static final String CAMERA_ACCESS_DENIED_MESSAGE = "Camera access permission was denied.";
+ private static final String AUDIO_ACCESS_DENIED = "AudioAccessDenied";
+ private static final String AUDIO_ACCESS_DENIED_MESSAGE = "Audio access permission was denied.";
+
+ private static final int CAMERA_REQUEST_ID = 9796;
+ @VisibleForTesting boolean ongoing = false;
+
+ void requestPermissions(
+ Activity activity,
+ PermissionsRegistry permissionsRegistry,
+ boolean enableAudio,
+ ResultCallback callback) {
+ if (ongoing) {
+ callback.onResult(
+ CAMERA_PERMISSIONS_REQUEST_ONGOING, CAMERA_PERMISSIONS_REQUEST_ONGOING_MESSAGE);
+ return;
+ }
+ if (!hasCameraPermission(activity) || (enableAudio && !hasAudioPermission(activity))) {
+ permissionsRegistry.addListener(
+ new CameraRequestPermissionsListener(
+ (String errorCode, String errorDescription) -> {
+ ongoing = false;
+ callback.onResult(errorCode, errorDescription);
+ }));
+ ongoing = true;
+ ActivityCompat.requestPermissions(
+ activity,
+ enableAudio
+ ? new String[] {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}
+ : new String[] {Manifest.permission.CAMERA},
+ CAMERA_REQUEST_ID);
+ } else {
+ // Permissions already exist. Call the callback with success.
+ callback.onResult(null, null);
+ }
+ }
+
+ private boolean hasCameraPermission(Activity activity) {
+ return ContextCompat.checkSelfPermission(activity, permission.CAMERA)
+ == PackageManager.PERMISSION_GRANTED;
+ }
+
+ private boolean hasAudioPermission(Activity activity) {
+ return ContextCompat.checkSelfPermission(activity, permission.RECORD_AUDIO)
+ == PackageManager.PERMISSION_GRANTED;
+ }
+
+ @VisibleForTesting
+ @SuppressWarnings("deprecation")
+ static final class CameraRequestPermissionsListener
+ implements io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener {
+
+ // There's no way to unregister permission listeners in the v1 embedding, so we'll be called
+ // duplicate times in cases where the user denies and then grants a permission. Keep track of if
+ // we've responded before and bail out of handling the callback manually if this is a repeat
+ // call.
+ boolean alreadyCalled = false;
+
+ final ResultCallback callback;
+
+ @VisibleForTesting
+ CameraRequestPermissionsListener(ResultCallback callback) {
+ this.callback = callback;
+ }
+
+ @Override
+ public boolean onRequestPermissionsResult(int id, String[] permissions, int[] grantResults) {
+ if (alreadyCalled || id != CAMERA_REQUEST_ID) {
+ return false;
+ }
+
+ alreadyCalled = true;
+ // grantResults could be empty if the permissions request with the user is interrupted
+ // https://developer.android.com/reference/android/app/Activity#onRequestPermissionsResult(int,%20java.lang.String[],%20int[])
+ if (grantResults.length == 0 || grantResults[0] != PackageManager.PERMISSION_GRANTED) {
+ callback.onResult(CAMERA_ACCESS_DENIED, CAMERA_ACCESS_DENIED_MESSAGE);
+ } else if (grantResults.length > 1 && grantResults[1] != PackageManager.PERMISSION_GRANTED) {
+ callback.onResult(AUDIO_ACCESS_DENIED, AUDIO_ACCESS_DENIED_MESSAGE);
+ } else {
+ callback.onResult(null, null);
+ }
+ return true;
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java
new file mode 100644
index 0000000..067ed02
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java
@@ -0,0 +1,109 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.app.Activity;
+import android.os.Build;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import io.flutter.embedding.engine.plugins.FlutterPlugin;
+import io.flutter.embedding.engine.plugins.activity.ActivityAware;
+import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camera.CameraPermissions.PermissionsRegistry;
+import io.flutter.view.TextureRegistry;
+
+/**
+ * Platform implementation of the camera_plugin.
+ *
+ * <p>Instantiate this in an add to app scenario to gracefully handle activity and context changes.
+ * See {@code io.flutter.plugins.camera.MainActivity} for an example.
+ *
+ * <p>Call {@link #registerWith(io.flutter.plugin.common.PluginRegistry.Registrar)} to register an
+ * implementation of this that uses the stable {@code io.flutter.plugin.common} package.
+ */
+public final class CameraPlugin implements FlutterPlugin, ActivityAware {
+
+ private static final String TAG = "CameraPlugin";
+ private @Nullable FlutterPluginBinding flutterPluginBinding;
+ private @Nullable MethodCallHandlerImpl methodCallHandler;
+
+ /**
+ * Initialize this within the {@code #configureFlutterEngine} of a Flutter activity or fragment.
+ *
+ * <p>See {@code io.flutter.plugins.camera.MainActivity} for an example.
+ */
+ public CameraPlugin() {}
+
+ /**
+ * Registers a plugin implementation that uses the stable {@code io.flutter.plugin.common}
+ * package.
+ *
+ * <p>Calling this automatically initializes the plugin. However plugins initialized this way
+ * won't react to changes in activity or context, unlike {@link CameraPlugin}.
+ */
+ @SuppressWarnings("deprecation")
+ public static void registerWith(io.flutter.plugin.common.PluginRegistry.Registrar registrar) {
+ CameraPlugin plugin = new CameraPlugin();
+ plugin.maybeStartListening(
+ registrar.activity(),
+ registrar.messenger(),
+ registrar::addRequestPermissionsResultListener,
+ registrar.view());
+ }
+
+ @Override
+ public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) {
+ this.flutterPluginBinding = binding;
+ }
+
+ @Override
+ public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
+ this.flutterPluginBinding = null;
+ }
+
+ @Override
+ public void onAttachedToActivity(@NonNull ActivityPluginBinding binding) {
+ maybeStartListening(
+ binding.getActivity(),
+ flutterPluginBinding.getBinaryMessenger(),
+ binding::addRequestPermissionsResultListener,
+ flutterPluginBinding.getTextureRegistry());
+ }
+
+ @Override
+ public void onDetachedFromActivity() {
+ // Could be on too low of an SDK to have started listening originally.
+ if (methodCallHandler != null) {
+ methodCallHandler.stopListening();
+ methodCallHandler = null;
+ }
+ }
+
+ @Override
+ public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBinding binding) {
+ onAttachedToActivity(binding);
+ }
+
+ @Override
+ public void onDetachedFromActivityForConfigChanges() {
+ onDetachedFromActivity();
+ }
+
+ private void maybeStartListening(
+ Activity activity,
+ BinaryMessenger messenger,
+ PermissionsRegistry permissionsRegistry,
+ TextureRegistry textureRegistry) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
+ // If the sdk is less than 21 (min sdk for Camera2) we don't register the plugin.
+ return;
+ }
+
+ methodCallHandler =
+ new MethodCallHandlerImpl(
+ activity, messenger, new CameraPermissions(), permissionsRegistry, textureRegistry);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraProperties.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraProperties.java
new file mode 100644
index 0000000..a69bae4
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraProperties.java
@@ -0,0 +1,386 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.graphics.Rect;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.os.Build.VERSION_CODES;
+import android.util.Range;
+import android.util.Rational;
+import android.util.Size;
+import androidx.annotation.RequiresApi;
+
+/** An interface allowing access to the different characteristics of the device's camera. */
+public interface CameraProperties {
+
+ /**
+ * Returns the name (or identifier) of the camera device.
+ *
+ * @return String The name of the camera device.
+ */
+ String getCameraName();
+
+ /**
+ * Returns the list of frame rate ranges for @see android.control.aeTargetFpsRange supported by
+ * this camera device.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#CONTROL_AE_TARGET_FPS_RANGE key.
+ *
+ * @return android.util.Range<Integer>[] List of frame rate ranges supported by this camera
+ * device.
+ */
+ Range<Integer>[] getControlAutoExposureAvailableTargetFpsRanges();
+
+ /**
+ * Returns the maximum and minimum exposure compensation values for @see
+ * android.control.aeExposureCompensation, in counts of @see android.control.aeCompensationStep,
+ * that are supported by this camera device.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE key.
+ *
+ * @return android.util.Range<Integer> Maximum and minimum exposure compensation supported by this
+ * camera device.
+ */
+ Range<Integer> getControlAutoExposureCompensationRange();
+
+ /**
+ * Returns the smallest step by which the exposure compensation can be changed.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP key.
+ *
+ * @return double Smallest step by which the exposure compensation can be changed.
+ */
+ double getControlAutoExposureCompensationStep();
+
+ /**
+ * Returns a list of auto-focus modes for @see android.control.afMode that are supported by this
+ * camera device.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES key.
+ *
+ * @return int[] List of auto-focus modes supported by this camera device.
+ */
+ int[] getControlAutoFocusAvailableModes();
+
+ /**
+ * Returns the maximum number of metering regions that can be used by the auto-exposure routine.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#CONTROL_MAX_REGIONS_AE key.
+ *
+ * @return Integer Maximum number of metering regions that can be used by the auto-exposure
+ * routine.
+ */
+ Integer getControlMaxRegionsAutoExposure();
+
+ /**
+ * Returns the maximum number of metering regions that can be used by the auto-focus routine.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#CONTROL_MAX_REGIONS_AF key.
+ *
+ * @return Integer Maximum number of metering regions that can be used by the auto-focus routine.
+ */
+ Integer getControlMaxRegionsAutoFocus();
+
+ /**
+ * Returns a list of distortion correction modes for @see android.distortionCorrection.mode that
+ * are supported by this camera device.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#DISTORTION_CORRECTION_AVAILABLE_MODES key.
+ *
+ * @return int[] List of distortion correction modes supported by this camera device.
+ */
+ @RequiresApi(api = VERSION_CODES.P)
+ int[] getDistortionCorrectionAvailableModes();
+
+ /**
+ * Returns whether this camera device has a flash unit.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#FLASH_INFO_AVAILABLE key.
+ *
+ * @return Boolean Whether this camera device has a flash unit.
+ */
+ Boolean getFlashInfoAvailable();
+
+ /**
+ * Returns the direction the camera faces relative to device screen.
+ *
+ * <p><string>Possible values:</string>
+ *
+ * <ul>
+ * <li>@see android.hardware.camera2.CameraMetadata.LENS_FACING_FRONT
+ * <li>@see android.hardware.camera2.CameraMetadata.LENS_FACING_BACK
+ * <li>@see android.hardware.camera2.CameraMetadata.LENS_FACING_EXTERNAL
+ * </ul>
+ *
+ * <p>By default maps to the @see android.hardware.camera2.CameraCharacteristics.LENS_FACING key.
+ *
+ * @return int Direction the camera faces relative to device screen.
+ */
+ int getLensFacing();
+
+ /**
+ * Returns the shortest distance from front most surface of the lens that can be brought into
+ * sharp focus.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE key.
+ *
+ * @return Float Shortest distance from front most surface of the lens that can be brought into
+ * sharp focus.
+ */
+ Float getLensInfoMinimumFocusDistance();
+
+ /**
+ * Returns the maximum ratio between both active area width and crop region width, and active area
+ * height and crop region height, for @see android.scaler.cropRegion.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM key.
+ *
+ * @return Float Maximum ratio between both active area width and crop region width, and active
+ * area height and crop region height.
+ */
+ Float getScalerAvailableMaxDigitalZoom();
+
+ /**
+ * Returns the minimum ratio between the default camera zoom setting and all of the available
+ * zoom.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#CONTROL_ZOOM_RATIO_RANGE key's lower value.
+ *
+ * @return Float Minimum ratio between the default zoom ratio and the minimum possible zoom.
+ */
+ @RequiresApi(api = VERSION_CODES.R)
+ Float getScalerMinZoomRatio();
+
+ /**
+ * Returns the maximum ratio between the default camera zoom setting and all of the available
+ * zoom.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#CONTROL_ZOOM_RATIO_RANGE key's upper value.
+ *
+ * @return Float Maximum ratio between the default zoom ratio and the maximum possible zoom.
+ */
+ @RequiresApi(api = VERSION_CODES.R)
+ Float getScalerMaxZoomRatio();
+
+ /**
+ * Returns the area of the image sensor which corresponds to active pixels after any geometric
+ * distortion correction has been applied.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE key.
+ *
+ * @return android.graphics.Rect area of the image sensor which corresponds to active pixels after
+ * any geometric distortion correction has been applied.
+ */
+ Rect getSensorInfoActiveArraySize();
+
+ /**
+ * Returns the dimensions of the full pixel array, possibly including black calibration pixels.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE key.
+ *
+ * @return android.util.Size Dimensions of the full pixel array, possibly including black
+ * calibration pixels.
+ */
+ Size getSensorInfoPixelArraySize();
+
+ /**
+ * Returns the area of the image sensor which corresponds to active pixels prior to the
+ * application of any geometric distortion correction.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ * key.
+ *
+ * @return android.graphics.Rect Area of the image sensor which corresponds to active pixels prior
+ * to the application of any geometric distortion correction.
+ */
+ @RequiresApi(api = VERSION_CODES.M)
+ Rect getSensorInfoPreCorrectionActiveArraySize();
+
+ /**
+ * Returns the clockwise angle through which the output image needs to be rotated to be upright on
+ * the device screen in its native orientation.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#SENSOR_ORIENTATION key.
+ *
+ * @return int Clockwise angle through which the output image needs to be rotated to be upright on
+ * the device screen in its native orientation.
+ */
+ int getSensorOrientation();
+
+ /**
+ * Returns a level which generally classifies the overall set of the camera device functionality.
+ *
+ * <p><strong>Possible values:</strong>
+ *
+ * <ul>
+ * <li>@see android.hardware.camera2.CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
+ * <li>@see android.hardware.camera2.CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
+ * <li>@see android.hardware.camera2.CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL
+ * <li>@see android.hardware.camera2.CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEVEL_3
+ * <li>@see android.hardware.camera2.CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL
+ * </ul>
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL key.
+ *
+ * @return int Level which generally classifies the overall set of the camera device
+ * functionality.
+ */
+ int getHardwareLevel();
+
+ /**
+ * Returns a list of noise reduction modes for @see android.noiseReduction.mode that are supported
+ * by this camera device.
+ *
+ * <p>By default maps to the @see
+ * android.hardware.camera2.CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
+ * key.
+ *
+ * @return int[] List of noise reduction modes that are supported by this camera device.
+ */
+ int[] getAvailableNoiseReductionModes();
+}
+
+/**
+ * Implementation of the @see CameraProperties interface using the @see
+ * android.hardware.camera2.CameraCharacteristics class to access the different characteristics.
+ */
+class CameraPropertiesImpl implements CameraProperties {
+ private final CameraCharacteristics cameraCharacteristics;
+ private final String cameraName;
+
+ public CameraPropertiesImpl(String cameraName, CameraManager cameraManager)
+ throws CameraAccessException {
+ this.cameraName = cameraName;
+ this.cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraName);
+ }
+
+ @Override
+ public String getCameraName() {
+ return cameraName;
+ }
+
+ @Override
+ public Range<Integer>[] getControlAutoExposureAvailableTargetFpsRanges() {
+ return cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ }
+
+ @Override
+ public Range<Integer> getControlAutoExposureCompensationRange() {
+ return cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
+ }
+
+ @Override
+ public double getControlAutoExposureCompensationStep() {
+ Rational rational =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
+
+ return rational == null ? 0.0 : rational.doubleValue();
+ }
+
+ @Override
+ public int[] getControlAutoFocusAvailableModes() {
+ return cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ }
+
+ @Override
+ public Integer getControlMaxRegionsAutoExposure() {
+ return cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
+ }
+
+ @Override
+ public Integer getControlMaxRegionsAutoFocus() {
+ return cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
+ }
+
+ @RequiresApi(api = VERSION_CODES.P)
+ @Override
+ public int[] getDistortionCorrectionAvailableModes() {
+ return cameraCharacteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES);
+ }
+
+ @Override
+ public Boolean getFlashInfoAvailable() {
+ return cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
+ }
+
+ @Override
+ public int getLensFacing() {
+ return cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
+ }
+
+ @Override
+ public Float getLensInfoMinimumFocusDistance() {
+ return cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+ }
+
+ @Override
+ public Float getScalerAvailableMaxDigitalZoom() {
+ return cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+ }
+
+ @RequiresApi(api = VERSION_CODES.R)
+ @Override
+ public Float getScalerMaxZoomRatio() {
+ return cameraCharacteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE).getUpper();
+ }
+
+ @RequiresApi(api = VERSION_CODES.R)
+ @Override
+ public Float getScalerMinZoomRatio() {
+ return cameraCharacteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE).getLower();
+ }
+
+ @Override
+ public Rect getSensorInfoActiveArraySize() {
+ return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ }
+
+ @Override
+ public Size getSensorInfoPixelArraySize() {
+ return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE);
+ }
+
+ @RequiresApi(api = VERSION_CODES.M)
+ @Override
+ public Rect getSensorInfoPreCorrectionActiveArraySize() {
+ return cameraCharacteristics.get(
+ CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
+ }
+
+ @Override
+ public int getSensorOrientation() {
+ return cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ }
+
+ @Override
+ public int getHardwareLevel() {
+ return cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ }
+
+ @Override
+ public int[] getAvailableNoiseReductionModes() {
+ return cameraCharacteristics.get(
+ CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java
new file mode 100644
index 0000000..951a279
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java
@@ -0,0 +1,182 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.annotation.TargetApi;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.os.Build;
+import android.util.Size;
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import java.util.Arrays;
+
+/**
+ * Utility class offering functions to calculate values regarding the camera boundaries.
+ *
+ * <p>The functions are used to calculate focus and exposure settings.
+ */
+public final class CameraRegionUtils {
+
+ /**
+ * Obtains the boundaries for the currently active camera, that can be used for calculating
+ * MeteringRectangle instances required for setting focus or exposure settings.
+ *
+ * @param cameraProperties - Collection of the characteristics for the current camera device.
+ * @param requestBuilder - The request builder for the current capture request.
+ * @return The boundaries for the current camera device.
+ */
+ public static Size getCameraBoundaries(
+ @NonNull CameraProperties cameraProperties, @NonNull CaptureRequest.Builder requestBuilder) {
+ if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.P
+ && supportsDistortionCorrection(cameraProperties)) {
+ // Get the current distortion correction mode.
+ Integer distortionCorrectionMode =
+ requestBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE);
+
+ // Return the correct boundaries depending on the mode.
+ android.graphics.Rect rect;
+ if (distortionCorrectionMode == null
+ || distortionCorrectionMode == CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) {
+ rect = cameraProperties.getSensorInfoPreCorrectionActiveArraySize();
+ } else {
+ rect = cameraProperties.getSensorInfoActiveArraySize();
+ }
+
+ return SizeFactory.create(rect.width(), rect.height());
+ } else {
+ // No distortion correction support.
+ return cameraProperties.getSensorInfoPixelArraySize();
+ }
+ }
+
+ /**
+ * Converts a point into a {@link MeteringRectangle} with the supplied coordinates as the center
+ * point.
+ *
+ * <p>Since the Camera API (due to cross-platform constraints) only accepts a point when
+ * configuring a specific focus or exposure area and Android requires a rectangle to configure
+ * these settings there is a need to convert the point into a rectangle. This method will create
+ * the required rectangle with an arbitrarily size that is a 10th of the current viewport and the
+ * coordinates as the center point.
+ *
+ * @param boundaries - The camera boundaries to calculate the metering rectangle for.
+ * @param x x - 1 >= coordinate >= 0.
+ * @param y y - 1 >= coordinate >= 0.
+ * @return The dimensions of the metering rectangle based on the supplied coordinates and
+ * boundaries.
+ */
+ public static MeteringRectangle convertPointToMeteringRectangle(
+ @NonNull Size boundaries,
+ double x,
+ double y,
+ @NonNull PlatformChannel.DeviceOrientation orientation) {
+ assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0);
+ assert (x >= 0 && x <= 1);
+ assert (y >= 0 && y <= 1);
+ // Rotate the coordinates to match the device orientation.
+ double oldX = x, oldY = y;
+ switch (orientation) {
+ case PORTRAIT_UP: // 90 ccw.
+ y = 1 - oldX;
+ x = oldY;
+ break;
+ case PORTRAIT_DOWN: // 90 cw.
+ x = 1 - oldY;
+ y = oldX;
+ break;
+ case LANDSCAPE_LEFT:
+ // No rotation required.
+ break;
+ case LANDSCAPE_RIGHT: // 180.
+ x = 1 - x;
+ y = 1 - y;
+ break;
+ }
+ // Interpolate the target coordinate.
+ int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1)));
+ int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1)));
+ // Determine the dimensions of the metering rectangle (10th of the viewport).
+ int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d);
+ int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d);
+ // Adjust target coordinate to represent top-left corner of metering rectangle.
+ targetX -= targetWidth / 2;
+ targetY -= targetHeight / 2;
+ // Adjust target coordinate as to not fall out of bounds.
+ if (targetX < 0) {
+ targetX = 0;
+ }
+ if (targetY < 0) {
+ targetY = 0;
+ }
+ int maxTargetX = boundaries.getWidth() - 1 - targetWidth;
+ int maxTargetY = boundaries.getHeight() - 1 - targetHeight;
+ if (targetX > maxTargetX) {
+ targetX = maxTargetX;
+ }
+ if (targetY > maxTargetY) {
+ targetY = maxTargetY;
+ }
+ // Build the metering rectangle.
+ return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1);
+ }
+
+ @TargetApi(Build.VERSION_CODES.P)
+ private static boolean supportsDistortionCorrection(CameraProperties cameraProperties) {
+ int[] availableDistortionCorrectionModes =
+ cameraProperties.getDistortionCorrectionAvailableModes();
+ if (availableDistortionCorrectionModes == null) {
+ availableDistortionCorrectionModes = new int[0];
+ }
+ long nonOffModesSupported =
+ Arrays.stream(availableDistortionCorrectionModes)
+ .filter((value) -> value != CaptureRequest.DISTORTION_CORRECTION_MODE_OFF)
+ .count();
+ return nonOffModesSupported > 0;
+ }
+
+ /** Factory class that assists in creating a {@link MeteringRectangle} instance. */
+ static class MeteringRectangleFactory {
+ /**
+ * Creates a new instance of the {@link MeteringRectangle} class.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this *
+ * class.
+ *
+ * @param x coordinate >= 0.
+ * @param y coordinate >= 0.
+ * @param width width >= 0.
+ * @param height height >= 0.
+ * @param meteringWeight weight between {@value MeteringRectangle#METERING_WEIGHT_MIN} and
+ * {@value MeteringRectangle#METERING_WEIGHT_MAX} inclusively.
+ * @return new instance of the {@link MeteringRectangle} class.
+ * @throws IllegalArgumentException if any of the parameters were negative.
+ */
+ @VisibleForTesting
+ public static MeteringRectangle create(
+ int x, int y, int width, int height, int meteringWeight) {
+ return new MeteringRectangle(x, y, width, height, meteringWeight);
+ }
+ }
+
+ /** Factory class that assists in creating a {@link Size} instance. */
+ static class SizeFactory {
+ /**
+ * Creates a new instance of the {@link Size} class.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this *
+ * class.
+ *
+ * @param width width >= 0.
+ * @param height height >= 0.
+ * @return new instance of the {@link Size} class.
+ */
+ @VisibleForTesting
+ public static Size create(int width, int height) {
+ return new Size(width, height);
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraState.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraState.java
new file mode 100644
index 0000000..ac48caf
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraState.java
@@ -0,0 +1,27 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+/**
+ * These are the states that the camera can be in. The camera can only take one photo at a time so
+ * this state describes the state of the camera itself. The camera works like a pipeline where we
+ * feed it requests through. It can only process one tasks at a time.
+ */
+public enum CameraState {
+ /** Idle, showing preview and not capturing anything. */
+ STATE_PREVIEW,
+
+ /** Starting and waiting for autofocus to complete. */
+ STATE_WAITING_FOCUS,
+
+ /** Start performing autoexposure. */
+ STATE_WAITING_PRECAPTURE_START,
+
+ /** waiting for autoexposure to complete. */
+ STATE_WAITING_PRECAPTURE_DONE,
+
+ /** Capturing an image. */
+ STATE_CAPTURING,
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraUtils.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraUtils.java
new file mode 100644
index 0000000..11b6eea
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/CameraUtils.java
@@ -0,0 +1,132 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.app.Activity;
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/** Provides various utilities for camera. */
+public final class CameraUtils {
+
+ private CameraUtils() {}
+
+ /**
+ * Gets the {@link CameraManager} singleton.
+ *
+ * @param context The context to get the {@link CameraManager} singleton from.
+ * @return The {@link CameraManager} singleton.
+ */
+ static CameraManager getCameraManager(Context context) {
+ return (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ /**
+ * Serializes the {@link PlatformChannel.DeviceOrientation} to a string value.
+ *
+ * @param orientation The orientation to serialize.
+ * @return The serialized orientation.
+ * @throws UnsupportedOperationException when the provided orientation not have a corresponding
+ * string value.
+ */
+ static String serializeDeviceOrientation(PlatformChannel.DeviceOrientation orientation) {
+ if (orientation == null)
+ throw new UnsupportedOperationException("Could not serialize null device orientation.");
+ switch (orientation) {
+ case PORTRAIT_UP:
+ return "portraitUp";
+ case PORTRAIT_DOWN:
+ return "portraitDown";
+ case LANDSCAPE_LEFT:
+ return "landscapeLeft";
+ case LANDSCAPE_RIGHT:
+ return "landscapeRight";
+ default:
+ throw new UnsupportedOperationException(
+ "Could not serialize device orientation: " + orientation.toString());
+ }
+ }
+
+ /**
+ * Deserializes a string value to its corresponding {@link PlatformChannel.DeviceOrientation}
+ * value.
+ *
+ * @param orientation The string value to deserialize.
+ * @return The deserialized orientation.
+ * @throws UnsupportedOperationException when the provided string value does not have a
+ * corresponding {@link PlatformChannel.DeviceOrientation}.
+ */
+ static PlatformChannel.DeviceOrientation deserializeDeviceOrientation(String orientation) {
+ if (orientation == null)
+ throw new UnsupportedOperationException("Could not deserialize null device orientation.");
+ switch (orientation) {
+ case "portraitUp":
+ return PlatformChannel.DeviceOrientation.PORTRAIT_UP;
+ case "portraitDown":
+ return PlatformChannel.DeviceOrientation.PORTRAIT_DOWN;
+ case "landscapeLeft":
+ return PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT;
+ case "landscapeRight":
+ return PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT;
+ default:
+ throw new UnsupportedOperationException(
+ "Could not deserialize device orientation: " + orientation);
+ }
+ }
+
+ /**
+ * Gets all the available cameras for the device.
+ *
+ * @param activity The current Android activity.
+ * @return A map of all the available cameras, with their name as their key.
+ * @throws CameraAccessException when the camera could not be accessed.
+ */
+ public static List<Map<String, Object>> getAvailableCameras(Activity activity)
+ throws CameraAccessException {
+ CameraManager cameraManager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+ String[] cameraNames = cameraManager.getCameraIdList();
+ List<Map<String, Object>> cameras = new ArrayList<>();
+ for (String cameraName : cameraNames) {
+ int cameraId;
+ try {
+ cameraId = Integer.parseInt(cameraName, 10);
+ } catch (NumberFormatException e) {
+ cameraId = -1;
+ }
+ if (cameraId < 0) {
+ continue;
+ }
+
+ HashMap<String, Object> details = new HashMap<>();
+ CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraName);
+ details.put("name", cameraName);
+ int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ details.put("sensorOrientation", sensorOrientation);
+
+ int lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING);
+ switch (lensFacing) {
+ case CameraMetadata.LENS_FACING_FRONT:
+ details.put("lensFacing", "front");
+ break;
+ case CameraMetadata.LENS_FACING_BACK:
+ details.put("lensFacing", "back");
+ break;
+ case CameraMetadata.LENS_FACING_EXTERNAL:
+ details.put("lensFacing", "external");
+ break;
+ }
+ cameras.add(details);
+ }
+ return cameras;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/DartMessenger.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/DartMessenger.java
new file mode 100644
index 0000000..e15078e
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/DartMessenger.java
@@ -0,0 +1,206 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.os.Handler;
+import android.text.TextUtils;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugin.common.MethodChannel;
+import io.flutter.plugins.camera.features.autofocus.FocusMode;
+import io.flutter.plugins.camera.features.exposurelock.ExposureMode;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Utility class that facilitates communication to the Flutter client */
+public class DartMessenger {
+ @NonNull private final Handler handler;
+ @Nullable private MethodChannel cameraChannel;
+ @Nullable private MethodChannel deviceChannel;
+
+ /** Specifies the different device related message types. */
+ enum DeviceEventType {
+ /** Indicates the device's orientation has changed. */
+ ORIENTATION_CHANGED("orientation_changed");
+ private final String method;
+
+ DeviceEventType(String method) {
+ this.method = method;
+ }
+ }
+
+ /** Specifies the different camera related message types. */
+ enum CameraEventType {
+ /** Indicates that an error occurred while interacting with the camera. */
+ ERROR("error"),
+ /** Indicates that the camera is closing. */
+ CLOSING("camera_closing"),
+ /** Indicates that the camera is initialized. */
+ INITIALIZED("initialized");
+
+ private final String method;
+
+ /**
+ * Converts the supplied method name to the matching {@link CameraEventType}.
+ *
+ * @param method name to be converted into a {@link CameraEventType}.
+ */
+ CameraEventType(String method) {
+ this.method = method;
+ }
+ }
+
+ /**
+ * Creates a new instance of the {@link DartMessenger} class.
+ *
+ * @param messenger is the {@link BinaryMessenger} that is used to communicate with Flutter.
+ * @param cameraId identifies the camera which is the source of the communication.
+ * @param handler the handler used to manage the thread's message queue. This should always be a
+ * handler managing the main thread since communication with Flutter should always happen on
+ * the main thread. The handler is mainly supplied so it will be easier test this class.
+ */
+ DartMessenger(BinaryMessenger messenger, long cameraId, @NonNull Handler handler) {
+ cameraChannel =
+ new MethodChannel(messenger, "plugins.flutter.io/camera_android/camera" + cameraId);
+ deviceChannel = new MethodChannel(messenger, "plugins.flutter.io/camera_android/fromPlatform");
+ this.handler = handler;
+ }
+
+ /**
+ * Sends a message to the Flutter client informing the orientation of the device has been changed.
+ *
+ * @param orientation specifies the new orientation of the device.
+ */
+ public void sendDeviceOrientationChangeEvent(PlatformChannel.DeviceOrientation orientation) {
+ assert (orientation != null);
+ this.send(
+ DeviceEventType.ORIENTATION_CHANGED,
+ new HashMap<String, Object>() {
+ {
+ put("orientation", CameraUtils.serializeDeviceOrientation(orientation));
+ }
+ });
+ }
+
+ /**
+ * Sends a message to the Flutter client informing that the camera has been initialized.
+ *
+ * @param previewWidth describes the preview width that is supported by the camera.
+ * @param previewHeight describes the preview height that is supported by the camera.
+ * @param exposureMode describes the current exposure mode that is set on the camera.
+ * @param focusMode describes the current focus mode that is set on the camera.
+ * @param exposurePointSupported indicates if the camera supports setting an exposure point.
+ * @param focusPointSupported indicates if the camera supports setting a focus point.
+ */
+ void sendCameraInitializedEvent(
+ Integer previewWidth,
+ Integer previewHeight,
+ ExposureMode exposureMode,
+ FocusMode focusMode,
+ Boolean exposurePointSupported,
+ Boolean focusPointSupported) {
+ assert (previewWidth != null);
+ assert (previewHeight != null);
+ assert (exposureMode != null);
+ assert (focusMode != null);
+ assert (exposurePointSupported != null);
+ assert (focusPointSupported != null);
+ this.send(
+ CameraEventType.INITIALIZED,
+ new HashMap<String, Object>() {
+ {
+ put("previewWidth", previewWidth.doubleValue());
+ put("previewHeight", previewHeight.doubleValue());
+ put("exposureMode", exposureMode.toString());
+ put("focusMode", focusMode.toString());
+ put("exposurePointSupported", exposurePointSupported);
+ put("focusPointSupported", focusPointSupported);
+ }
+ });
+ }
+
+ /** Sends a message to the Flutter client informing that the camera is closing. */
+ void sendCameraClosingEvent() {
+ send(CameraEventType.CLOSING);
+ }
+
+ /**
+ * Sends a message to the Flutter client informing that an error occurred while interacting with
+ * the camera.
+ *
+ * @param description contains details regarding the error that occurred.
+ */
+ void sendCameraErrorEvent(@Nullable String description) {
+ this.send(
+ CameraEventType.ERROR,
+ new HashMap<String, Object>() {
+ {
+ if (!TextUtils.isEmpty(description)) put("description", description);
+ }
+ });
+ }
+
+ private void send(CameraEventType eventType) {
+ send(eventType, new HashMap<>());
+ }
+
+ private void send(CameraEventType eventType, Map<String, Object> args) {
+ if (cameraChannel == null) {
+ return;
+ }
+
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ cameraChannel.invokeMethod(eventType.method, args);
+ }
+ });
+ }
+
+ private void send(DeviceEventType eventType) {
+ send(eventType, new HashMap<>());
+ }
+
+ private void send(DeviceEventType eventType, Map<String, Object> args) {
+ if (deviceChannel == null) {
+ return;
+ }
+
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ deviceChannel.invokeMethod(eventType.method, args);
+ }
+ });
+ }
+
+ /**
+ * Send a success payload to a {@link MethodChannel.Result} on the main thread.
+ *
+ * @param payload The payload to send.
+ */
+ public void finish(MethodChannel.Result result, Object payload) {
+ handler.post(() -> result.success(payload));
+ }
+
+ /**
+ * Send an error payload to a {@link MethodChannel.Result} on the main thread.
+ *
+ * @param errorCode error code.
+ * @param errorMessage error message.
+ * @param errorDetails error details.
+ */
+ public void error(
+ MethodChannel.Result result,
+ String errorCode,
+ @Nullable String errorMessage,
+ @Nullable Object errorDetails) {
+ handler.post(() -> result.error(errorCode, errorMessage, errorDetails));
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/ImageSaver.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/ImageSaver.java
new file mode 100644
index 0000000..821c9a5
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/ImageSaver.java
@@ -0,0 +1,105 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.media.Image;
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/** Saves a JPEG {@link Image} into the specified {@link File}. */
+public class ImageSaver implements Runnable {
+
+ /** The JPEG image */
+ private final Image image;
+
+ /** The file we save the image into. */
+ private final File file;
+
+ /** Used to report the status of the save action. */
+ private final Callback callback;
+
+ /**
+ * Creates an instance of the ImageSaver runnable
+ *
+ * @param image - The image to save
+ * @param file - The file to save the image to
+ * @param callback - The callback that is run on completion, or when an error is encountered.
+ */
+ ImageSaver(@NonNull Image image, @NonNull File file, @NonNull Callback callback) {
+ this.image = image;
+ this.file = file;
+ this.callback = callback;
+ }
+
+ @Override
+ public void run() {
+ ByteBuffer buffer = image.getPlanes()[0].getBuffer();
+ byte[] bytes = new byte[buffer.remaining()];
+ buffer.get(bytes);
+ FileOutputStream output = null;
+ try {
+ output = FileOutputStreamFactory.create(file);
+ output.write(bytes);
+
+ callback.onComplete(file.getAbsolutePath());
+
+ } catch (IOException e) {
+ callback.onError("IOError", "Failed saving image");
+ } finally {
+ image.close();
+ if (null != output) {
+ try {
+ output.close();
+ } catch (IOException e) {
+ callback.onError("cameraAccess", e.getMessage());
+ }
+ }
+ }
+ }
+
+ /**
+ * The interface for the callback that is passed to ImageSaver, for detecting completion or
+ * failure of the image saving task.
+ */
+ public interface Callback {
+ /**
+ * Called when the image file has been saved successfully.
+ *
+ * @param absolutePath - The absolute path of the file that was saved.
+ */
+ void onComplete(String absolutePath);
+
+ /**
+ * Called when an error is encountered while saving the image file.
+ *
+ * @param errorCode - The error code.
+ * @param errorMessage - The human readable error message.
+ */
+ void onError(String errorCode, String errorMessage);
+ }
+
+ /** Factory class that assists in creating a {@link FileOutputStream} instance. */
+ static class FileOutputStreamFactory {
+ /**
+ * Creates a new instance of the {@link FileOutputStream} class.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this *
+ * class.
+ *
+ * @param file - The file to create the output stream for
+ * @return new instance of the {@link FileOutputStream} class.
+ * @throws FileNotFoundException when the supplied file could not be found.
+ */
+ @VisibleForTesting
+ public static FileOutputStream create(File file) throws FileNotFoundException {
+ return new FileOutputStream(file);
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
new file mode 100644
index 0000000..432344a
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
@@ -0,0 +1,417 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraAccessException;
+import android.os.Handler;
+import android.os.Looper;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugin.common.EventChannel;
+import io.flutter.plugin.common.MethodCall;
+import io.flutter.plugin.common.MethodChannel;
+import io.flutter.plugin.common.MethodChannel.Result;
+import io.flutter.plugins.camera.CameraPermissions.PermissionsRegistry;
+import io.flutter.plugins.camera.features.CameraFeatureFactoryImpl;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.autofocus.FocusMode;
+import io.flutter.plugins.camera.features.exposurelock.ExposureMode;
+import io.flutter.plugins.camera.features.flash.FlashMode;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.view.TextureRegistry;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+final class MethodCallHandlerImpl implements MethodChannel.MethodCallHandler {
+ private final Activity activity;
+ private final BinaryMessenger messenger;
+ private final CameraPermissions cameraPermissions;
+ private final PermissionsRegistry permissionsRegistry;
+ private final TextureRegistry textureRegistry;
+ private final MethodChannel methodChannel;
+ private final EventChannel imageStreamChannel;
+ private @Nullable Camera camera;
+
+ MethodCallHandlerImpl(
+ Activity activity,
+ BinaryMessenger messenger,
+ CameraPermissions cameraPermissions,
+ PermissionsRegistry permissionsAdder,
+ TextureRegistry textureRegistry) {
+ this.activity = activity;
+ this.messenger = messenger;
+ this.cameraPermissions = cameraPermissions;
+ this.permissionsRegistry = permissionsAdder;
+ this.textureRegistry = textureRegistry;
+
+ methodChannel = new MethodChannel(messenger, "plugins.flutter.io/camera_android");
+ imageStreamChannel =
+ new EventChannel(messenger, "plugins.flutter.io/camera_android/imageStream");
+ methodChannel.setMethodCallHandler(this);
+ }
+
+ @Override
+ public void onMethodCall(@NonNull MethodCall call, @NonNull final Result result) {
+ switch (call.method) {
+ case "availableCameras":
+ try {
+ result.success(CameraUtils.getAvailableCameras(activity));
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ case "create":
+ {
+ if (camera != null) {
+ camera.close();
+ }
+
+ cameraPermissions.requestPermissions(
+ activity,
+ permissionsRegistry,
+ call.argument("enableAudio"),
+ (String errCode, String errDesc) -> {
+ if (errCode == null) {
+ try {
+ instantiateCamera(call, result);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ } else {
+ result.error(errCode, errDesc, null);
+ }
+ });
+ break;
+ }
+ case "initialize":
+ {
+ if (camera != null) {
+ try {
+ camera.open(call.argument("imageFormatGroup"));
+ result.success(null);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ } else {
+ result.error(
+ "cameraNotFound",
+ "Camera not found. Please call the 'create' method before calling 'initialize'.",
+ null);
+ }
+ break;
+ }
+ case "takePicture":
+ {
+ camera.takePicture(result);
+ break;
+ }
+ case "prepareForVideoRecording":
+ {
+ // This optimization is not required for Android.
+ result.success(null);
+ break;
+ }
+ case "startVideoRecording":
+ {
+ camera.startVideoRecording(
+ result,
+ Objects.equals(call.argument("enableStream"), true) ? imageStreamChannel : null);
+ break;
+ }
+ case "stopVideoRecording":
+ {
+ camera.stopVideoRecording(result);
+ break;
+ }
+ case "pauseVideoRecording":
+ {
+ camera.pauseVideoRecording(result);
+ break;
+ }
+ case "resumeVideoRecording":
+ {
+ camera.resumeVideoRecording(result);
+ break;
+ }
+ case "setFlashMode":
+ {
+ String modeStr = call.argument("mode");
+ FlashMode mode = FlashMode.getValueForString(modeStr);
+ if (mode == null) {
+ result.error("setFlashModeFailed", "Unknown flash mode " + modeStr, null);
+ return;
+ }
+ try {
+ camera.setFlashMode(result, mode);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "setExposureMode":
+ {
+ String modeStr = call.argument("mode");
+ ExposureMode mode = ExposureMode.getValueForString(modeStr);
+ if (mode == null) {
+ result.error("setExposureModeFailed", "Unknown exposure mode " + modeStr, null);
+ return;
+ }
+ try {
+ camera.setExposureMode(result, mode);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "setExposurePoint":
+ {
+ Boolean reset = call.argument("reset");
+ Double x = null;
+ Double y = null;
+ if (reset == null || !reset) {
+ x = call.argument("x");
+ y = call.argument("y");
+ }
+ try {
+ camera.setExposurePoint(result, new Point(x, y));
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "getMinExposureOffset":
+ {
+ try {
+ result.success(camera.getMinExposureOffset());
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "getMaxExposureOffset":
+ {
+ try {
+ result.success(camera.getMaxExposureOffset());
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "getExposureOffsetStepSize":
+ {
+ try {
+ result.success(camera.getExposureOffsetStepSize());
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "setExposureOffset":
+ {
+ try {
+ camera.setExposureOffset(result, call.argument("offset"));
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "setFocusMode":
+ {
+ String modeStr = call.argument("mode");
+ FocusMode mode = FocusMode.getValueForString(modeStr);
+ if (mode == null) {
+ result.error("setFocusModeFailed", "Unknown focus mode " + modeStr, null);
+ return;
+ }
+ try {
+ camera.setFocusMode(result, mode);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "setFocusPoint":
+ {
+ Boolean reset = call.argument("reset");
+ Double x = null;
+ Double y = null;
+ if (reset == null || !reset) {
+ x = call.argument("x");
+ y = call.argument("y");
+ }
+ try {
+ camera.setFocusPoint(result, new Point(x, y));
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "startImageStream":
+ {
+ try {
+ camera.startPreviewWithImageStream(imageStreamChannel);
+ result.success(null);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "stopImageStream":
+ {
+ try {
+ camera.startPreview();
+ result.success(null);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "getMaxZoomLevel":
+ {
+ assert camera != null;
+
+ try {
+ float maxZoomLevel = camera.getMaxZoomLevel();
+ result.success(maxZoomLevel);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "getMinZoomLevel":
+ {
+ assert camera != null;
+
+ try {
+ float minZoomLevel = camera.getMinZoomLevel();
+ result.success(minZoomLevel);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "setZoomLevel":
+ {
+ assert camera != null;
+
+ Double zoom = call.argument("zoom");
+
+ if (zoom == null) {
+ result.error(
+ "ZOOM_ERROR", "setZoomLevel is called without specifying a zoom level.", null);
+ return;
+ }
+
+ try {
+ camera.setZoomLevel(result, zoom.floatValue());
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "lockCaptureOrientation":
+ {
+ PlatformChannel.DeviceOrientation orientation =
+ CameraUtils.deserializeDeviceOrientation(call.argument("orientation"));
+
+ try {
+ camera.lockCaptureOrientation(orientation);
+ result.success(null);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "unlockCaptureOrientation":
+ {
+ try {
+ camera.unlockCaptureOrientation();
+ result.success(null);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "pausePreview":
+ {
+ try {
+ camera.pausePreview();
+ result.success(null);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
+ case "resumePreview":
+ {
+ camera.resumePreview();
+ result.success(null);
+ break;
+ }
+ case "dispose":
+ {
+ if (camera != null) {
+ camera.dispose();
+ }
+ result.success(null);
+ break;
+ }
+ default:
+ result.notImplemented();
+ break;
+ }
+ }
+
+ void stopListening() {
+ methodChannel.setMethodCallHandler(null);
+ }
+
+ private void instantiateCamera(MethodCall call, Result result) throws CameraAccessException {
+ String cameraName = call.argument("cameraName");
+ String preset = call.argument("resolutionPreset");
+ boolean enableAudio = call.argument("enableAudio");
+
+ TextureRegistry.SurfaceTextureEntry flutterSurfaceTexture =
+ textureRegistry.createSurfaceTexture();
+ DartMessenger dartMessenger =
+ new DartMessenger(
+ messenger, flutterSurfaceTexture.id(), new Handler(Looper.getMainLooper()));
+ CameraProperties cameraProperties =
+ new CameraPropertiesImpl(cameraName, CameraUtils.getCameraManager(activity));
+ ResolutionPreset resolutionPreset = ResolutionPreset.valueOf(preset);
+
+ camera =
+ new Camera(
+ activity,
+ flutterSurfaceTexture,
+ new CameraFeatureFactoryImpl(),
+ dartMessenger,
+ cameraProperties,
+ resolutionPreset,
+ enableAudio);
+
+ Map<String, Object> reply = new HashMap<>();
+ reply.put("cameraId", flutterSurfaceTexture.id());
+ result.success(reply);
+ }
+
+ // We move catching CameraAccessException out of onMethodCall because it causes a crash
+ // on plugin registration for sdks incompatible with Camera2 (< 21). We want this plugin to
+ // to be able to compile with <21 sdks for apps that want the camera and support earlier version.
+ @SuppressWarnings("ConstantConditions")
+ private void handleException(Exception exception, Result result) {
+ if (exception instanceof CameraAccessException) {
+ result.error("CameraAccess", exception.getMessage(), null);
+ return;
+ }
+
+ // CameraAccessException can not be cast to a RuntimeException.
+ throw (RuntimeException) exception;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeature.java
new file mode 100644
index 0000000..92cfd54
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeature.java
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features;
+
+import android.hardware.camera2.CaptureRequest;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.CameraProperties;
+
+/**
+ * An interface describing a feature in the camera. This holds a setting value of type T and must
+ * implement a means to check if this setting is supported by the current camera properties. It also
+ * must implement a builder update method which will update a given capture request builder for this
+ * feature's current setting value.
+ *
+ * @param <T>
+ */
+public abstract class CameraFeature<T> {
+
+ protected final CameraProperties cameraProperties;
+
+ protected CameraFeature(@NonNull CameraProperties cameraProperties) {
+ this.cameraProperties = cameraProperties;
+ }
+
+ /** Debug name for this feature. */
+ public abstract String getDebugName();
+
+ /**
+ * Gets the current value of this feature's setting.
+ *
+ * @return <T> Current value of this feature's setting.
+ */
+ public abstract T getValue();
+
+ /**
+ * Sets a new value for this feature's setting.
+ *
+ * @param value New value for this feature's setting.
+ */
+ public abstract void setValue(T value);
+
+ /**
+ * Returns whether or not this feature is supported.
+ *
+ * <p>When the feature is not supported any {@see #value} is simply ignored by the camera plugin.
+ *
+ * @return boolean Whether or not this feature is supported.
+ */
+ public abstract boolean checkIsSupported();
+
+ /**
+ * Updates the setting in a provided {@see android.hardware.camera2.CaptureRequest.Builder}.
+ *
+ * @param requestBuilder A {@see android.hardware.camera2.CaptureRequest.Builder} instance used to
+ * configure the settings and outputs needed to capture a single image from the camera device.
+ */
+ public abstract void updateBuilder(CaptureRequest.Builder requestBuilder);
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactory.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactory.java
new file mode 100644
index 0000000..b91f9a1
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactory.java
@@ -0,0 +1,149 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features;
+
+import android.app.Activity;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+
+/**
+ * Factory for creating the supported feature implementation controlling different aspects of the
+ * {@link android.hardware.camera2.CaptureRequest}.
+ */
+public interface CameraFeatureFactory {
+
+ /**
+ * Creates a new instance of the auto focus feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param recordingVideo indicates if the camera is currently recording.
+ * @return newly created instance of the AutoFocusFeature class.
+ */
+ AutoFocusFeature createAutoFocusFeature(
+ @NonNull CameraProperties cameraProperties, boolean recordingVideo);
+
+ /**
+ * Creates a new instance of the exposure lock feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the ExposureLockFeature class.
+ */
+ ExposureLockFeature createExposureLockFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the exposure offset feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the ExposureOffsetFeature class.
+ */
+ ExposureOffsetFeature createExposureOffsetFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the flash feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the FlashFeature class.
+ */
+ FlashFeature createFlashFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the resolution feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param initialSetting initial resolution preset.
+ * @param cameraName the name of the camera which can be used to identify the camera device.
+ * @return newly created instance of the ResolutionFeature class.
+ */
+ ResolutionFeature createResolutionFeature(
+ @NonNull CameraProperties cameraProperties,
+ ResolutionPreset initialSetting,
+ String cameraName);
+
+ /**
+ * Creates a new instance of the focus point feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param sensorOrientationFeature instance of the SensorOrientationFeature class containing
+ * information about the sensor and device orientation.
+ * @return newly created instance of the FocusPointFeature class.
+ */
+ FocusPointFeature createFocusPointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature);
+
+ /**
+ * Creates a new instance of the FPS range feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the FpsRangeFeature class.
+ */
+ FpsRangeFeature createFpsRangeFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the sensor orientation feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param activity current activity associated with the camera plugin.
+ * @param dartMessenger instance of the DartMessenger class, used to send state updates back to
+ * Dart.
+ * @return newly created instance of the SensorOrientationFeature class.
+ */
+ SensorOrientationFeature createSensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger);
+
+ /**
+ * Creates a new instance of the zoom level feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the ZoomLevelFeature class.
+ */
+ ZoomLevelFeature createZoomLevelFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the exposure point feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param sensorOrientationFeature instance of the SensorOrientationFeature class containing
+ * information about the sensor and device orientation.
+ * @return newly created instance of the ExposurePointFeature class.
+ */
+ ExposurePointFeature createExposurePointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature);
+
+ /**
+ * Creates a new instance of the noise reduction feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the NoiseReductionFeature class.
+ */
+ NoiseReductionFeature createNoiseReductionFeature(@NonNull CameraProperties cameraProperties);
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactoryImpl.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactoryImpl.java
new file mode 100644
index 0000000..95a8c06
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactoryImpl.java
@@ -0,0 +1,98 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features;
+
+import android.app.Activity;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+
+/**
+ * Implementation of the {@link CameraFeatureFactory} interface creating the supported feature
+ * implementation controlling different aspects of the {@link
+ * android.hardware.camera2.CaptureRequest}.
+ */
+public class CameraFeatureFactoryImpl implements CameraFeatureFactory {
+
+ @Override
+ public AutoFocusFeature createAutoFocusFeature(
+ @NonNull CameraProperties cameraProperties, boolean recordingVideo) {
+ return new AutoFocusFeature(cameraProperties, recordingVideo);
+ }
+
+ @Override
+ public ExposureLockFeature createExposureLockFeature(@NonNull CameraProperties cameraProperties) {
+ return new ExposureLockFeature(cameraProperties);
+ }
+
+ @Override
+ public ExposureOffsetFeature createExposureOffsetFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return new ExposureOffsetFeature(cameraProperties);
+ }
+
+ @Override
+ public FlashFeature createFlashFeature(@NonNull CameraProperties cameraProperties) {
+ return new FlashFeature(cameraProperties);
+ }
+
+ @Override
+ public ResolutionFeature createResolutionFeature(
+ @NonNull CameraProperties cameraProperties,
+ ResolutionPreset initialSetting,
+ String cameraName) {
+ return new ResolutionFeature(cameraProperties, initialSetting, cameraName);
+ }
+
+ @Override
+ public FocusPointFeature createFocusPointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature) {
+ return new FocusPointFeature(cameraProperties, sensorOrientationFeature);
+ }
+
+ @Override
+ public FpsRangeFeature createFpsRangeFeature(@NonNull CameraProperties cameraProperties) {
+ return new FpsRangeFeature(cameraProperties);
+ }
+
+ @Override
+ public SensorOrientationFeature createSensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger) {
+ return new SensorOrientationFeature(cameraProperties, activity, dartMessenger);
+ }
+
+ @Override
+ public ZoomLevelFeature createZoomLevelFeature(@NonNull CameraProperties cameraProperties) {
+ return new ZoomLevelFeature(cameraProperties);
+ }
+
+ @Override
+ public ExposurePointFeature createExposurePointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature) {
+ return new ExposurePointFeature(cameraProperties, sensorOrientationFeature);
+ }
+
+ @Override
+ public NoiseReductionFeature createNoiseReductionFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return new NoiseReductionFeature(cameraProperties);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatures.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatures.java
new file mode 100644
index 0000000..659fd15
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatures.java
@@ -0,0 +1,285 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features;
+
+import android.app.Activity;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * These are all of our available features in the camera. Used in the Camera to access all features
+ * in a simpler way.
+ */
+public class CameraFeatures {
+ private static final String AUTO_FOCUS = "AUTO_FOCUS";
+ private static final String EXPOSURE_LOCK = "EXPOSURE_LOCK";
+ private static final String EXPOSURE_OFFSET = "EXPOSURE_OFFSET";
+ private static final String EXPOSURE_POINT = "EXPOSURE_POINT";
+ private static final String FLASH = "FLASH";
+ private static final String FOCUS_POINT = "FOCUS_POINT";
+ private static final String FPS_RANGE = "FPS_RANGE";
+ private static final String NOISE_REDUCTION = "NOISE_REDUCTION";
+ private static final String REGION_BOUNDARIES = "REGION_BOUNDARIES";
+ private static final String RESOLUTION = "RESOLUTION";
+ private static final String SENSOR_ORIENTATION = "SENSOR_ORIENTATION";
+ private static final String ZOOM_LEVEL = "ZOOM_LEVEL";
+
+ public static CameraFeatures init(
+ CameraFeatureFactory cameraFeatureFactory,
+ CameraProperties cameraProperties,
+ Activity activity,
+ DartMessenger dartMessenger,
+ ResolutionPreset resolutionPreset) {
+ CameraFeatures cameraFeatures = new CameraFeatures();
+ cameraFeatures.setAutoFocus(
+ cameraFeatureFactory.createAutoFocusFeature(cameraProperties, false));
+ cameraFeatures.setExposureLock(
+ cameraFeatureFactory.createExposureLockFeature(cameraProperties));
+ cameraFeatures.setExposureOffset(
+ cameraFeatureFactory.createExposureOffsetFeature(cameraProperties));
+ SensorOrientationFeature sensorOrientationFeature =
+ cameraFeatureFactory.createSensorOrientationFeature(
+ cameraProperties, activity, dartMessenger);
+ cameraFeatures.setSensorOrientation(sensorOrientationFeature);
+ cameraFeatures.setExposurePoint(
+ cameraFeatureFactory.createExposurePointFeature(
+ cameraProperties, sensorOrientationFeature));
+ cameraFeatures.setFlash(cameraFeatureFactory.createFlashFeature(cameraProperties));
+ cameraFeatures.setFocusPoint(
+ cameraFeatureFactory.createFocusPointFeature(cameraProperties, sensorOrientationFeature));
+ cameraFeatures.setFpsRange(cameraFeatureFactory.createFpsRangeFeature(cameraProperties));
+ cameraFeatures.setNoiseReduction(
+ cameraFeatureFactory.createNoiseReductionFeature(cameraProperties));
+ cameraFeatures.setResolution(
+ cameraFeatureFactory.createResolutionFeature(
+ cameraProperties, resolutionPreset, cameraProperties.getCameraName()));
+ cameraFeatures.setZoomLevel(cameraFeatureFactory.createZoomLevelFeature(cameraProperties));
+ return cameraFeatures;
+ }
+
+ private Map<String, CameraFeature> featureMap = new HashMap<>();
+
+ /**
+ * Gets a collection of all features that have been set.
+ *
+ * @return A collection of all features that have been set.
+ */
+ public Collection<CameraFeature> getAllFeatures() {
+ return this.featureMap.values();
+ }
+
+ /**
+ * Gets the auto focus feature if it has been set.
+ *
+ * @return the auto focus feature.
+ */
+ public AutoFocusFeature getAutoFocus() {
+ return (AutoFocusFeature) featureMap.get(AUTO_FOCUS);
+ }
+
+ /**
+ * Sets the instance of the auto focus feature.
+ *
+ * @param autoFocus the {@link AutoFocusFeature} instance to set.
+ */
+ public void setAutoFocus(AutoFocusFeature autoFocus) {
+ this.featureMap.put(AUTO_FOCUS, autoFocus);
+ }
+
+ /**
+ * Gets the exposure lock feature if it has been set.
+ *
+ * @return the exposure lock feature.
+ */
+ public ExposureLockFeature getExposureLock() {
+ return (ExposureLockFeature) featureMap.get(EXPOSURE_LOCK);
+ }
+
+ /**
+ * Sets the instance of the exposure lock feature.
+ *
+ * @param exposureLock the {@link ExposureLockFeature} instance to set.
+ */
+ public void setExposureLock(ExposureLockFeature exposureLock) {
+ this.featureMap.put(EXPOSURE_LOCK, exposureLock);
+ }
+
+ /**
+ * Gets the exposure offset feature if it has been set.
+ *
+ * @return the exposure offset feature.
+ */
+ public ExposureOffsetFeature getExposureOffset() {
+ return (ExposureOffsetFeature) featureMap.get(EXPOSURE_OFFSET);
+ }
+
+ /**
+ * Sets the instance of the exposure offset feature.
+ *
+ * @param exposureOffset the {@link ExposureOffsetFeature} instance to set.
+ */
+ public void setExposureOffset(ExposureOffsetFeature exposureOffset) {
+ this.featureMap.put(EXPOSURE_OFFSET, exposureOffset);
+ }
+
+ /**
+ * Gets the exposure point feature if it has been set.
+ *
+ * @return the exposure point feature.
+ */
+ public ExposurePointFeature getExposurePoint() {
+ return (ExposurePointFeature) featureMap.get(EXPOSURE_POINT);
+ }
+
+ /**
+ * Sets the instance of the exposure point feature.
+ *
+ * @param exposurePoint the {@link ExposurePointFeature} instance to set.
+ */
+ public void setExposurePoint(ExposurePointFeature exposurePoint) {
+ this.featureMap.put(EXPOSURE_POINT, exposurePoint);
+ }
+
+ /**
+ * Gets the flash feature if it has been set.
+ *
+ * @return the flash feature.
+ */
+ public FlashFeature getFlash() {
+ return (FlashFeature) featureMap.get(FLASH);
+ }
+
+ /**
+ * Sets the instance of the flash feature.
+ *
+ * @param flash the {@link FlashFeature} instance to set.
+ */
+ public void setFlash(FlashFeature flash) {
+ this.featureMap.put(FLASH, flash);
+ }
+
+ /**
+ * Gets the focus point feature if it has been set.
+ *
+ * @return the focus point feature.
+ */
+ public FocusPointFeature getFocusPoint() {
+ return (FocusPointFeature) featureMap.get(FOCUS_POINT);
+ }
+
+ /**
+ * Sets the instance of the focus point feature.
+ *
+ * @param focusPoint the {@link FocusPointFeature} instance to set.
+ */
+ public void setFocusPoint(FocusPointFeature focusPoint) {
+ this.featureMap.put(FOCUS_POINT, focusPoint);
+ }
+
+ /**
+ * Gets the fps range feature if it has been set.
+ *
+ * @return the fps range feature.
+ */
+ public FpsRangeFeature getFpsRange() {
+ return (FpsRangeFeature) featureMap.get(FPS_RANGE);
+ }
+
+ /**
+ * Sets the instance of the fps range feature.
+ *
+ * @param fpsRange the {@link FpsRangeFeature} instance to set.
+ */
+ public void setFpsRange(FpsRangeFeature fpsRange) {
+ this.featureMap.put(FPS_RANGE, fpsRange);
+ }
+
+ /**
+ * Gets the noise reduction feature if it has been set.
+ *
+ * @return the noise reduction feature.
+ */
+ public NoiseReductionFeature getNoiseReduction() {
+ return (NoiseReductionFeature) featureMap.get(NOISE_REDUCTION);
+ }
+
+ /**
+ * Sets the instance of the noise reduction feature.
+ *
+ * @param noiseReduction the {@link NoiseReductionFeature} instance to set.
+ */
+ public void setNoiseReduction(NoiseReductionFeature noiseReduction) {
+ this.featureMap.put(NOISE_REDUCTION, noiseReduction);
+ }
+
+ /**
+ * Gets the resolution feature if it has been set.
+ *
+ * @return the resolution feature.
+ */
+ public ResolutionFeature getResolution() {
+ return (ResolutionFeature) featureMap.get(RESOLUTION);
+ }
+
+ /**
+ * Sets the instance of the resolution feature.
+ *
+ * @param resolution the {@link ResolutionFeature} instance to set.
+ */
+ public void setResolution(ResolutionFeature resolution) {
+ this.featureMap.put(RESOLUTION, resolution);
+ }
+
+ /**
+ * Gets the sensor orientation feature if it has been set.
+ *
+ * @return the sensor orientation feature.
+ */
+ public SensorOrientationFeature getSensorOrientation() {
+ return (SensorOrientationFeature) featureMap.get(SENSOR_ORIENTATION);
+ }
+
+ /**
+ * Sets the instance of the sensor orientation feature.
+ *
+ * @param sensorOrientation the {@link SensorOrientationFeature} instance to set.
+ */
+ public void setSensorOrientation(SensorOrientationFeature sensorOrientation) {
+ this.featureMap.put(SENSOR_ORIENTATION, sensorOrientation);
+ }
+
+ /**
+ * Gets the zoom level feature if it has been set.
+ *
+ * @return the zoom level feature.
+ */
+ public ZoomLevelFeature getZoomLevel() {
+ return (ZoomLevelFeature) featureMap.get(ZOOM_LEVEL);
+ }
+
+ /**
+ * Sets the instance of the zoom level feature.
+ *
+ * @param zoomLevel the {@link ZoomLevelFeature} instance to set.
+ */
+ public void setZoomLevel(ZoomLevelFeature zoomLevel) {
+ this.featureMap.put(ZOOM_LEVEL, zoomLevel);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/Point.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/Point.java
new file mode 100644
index 0000000..b6b64f9
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/Point.java
@@ -0,0 +1,16 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features;
+
+/** Represents a point on an x/y axis. */
+public class Point {
+ public final Double x;
+ public final Double y;
+
+ public Point(Double x, Double y) {
+ this.x = x;
+ this.y = y;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/autofocus/AutoFocusFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/autofocus/AutoFocusFeature.java
new file mode 100644
index 0000000..1789a96
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/autofocus/AutoFocusFeature.java
@@ -0,0 +1,83 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.autofocus;
+
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+
+/** Controls the auto focus configuration on the {@see anddroid.hardware.camera2} API. */
+public class AutoFocusFeature extends CameraFeature<FocusMode> {
+ private FocusMode currentSetting = FocusMode.auto;
+
+ // When switching recording modes this feature is re-created with the appropriate setting here.
+ private final boolean recordingVideo;
+
+ /**
+ * Creates a new instance of the {@see AutoFocusFeature}.
+ *
+ * @param cameraProperties Collection of the characteristics for the current camera device.
+ * @param recordingVideo Indicates whether the camera is currently recording video.
+ */
+ public AutoFocusFeature(CameraProperties cameraProperties, boolean recordingVideo) {
+ super(cameraProperties);
+ this.recordingVideo = recordingVideo;
+ }
+
+ @Override
+ public String getDebugName() {
+ return "AutoFocusFeature";
+ }
+
+ @Override
+ public FocusMode getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(FocusMode value) {
+ this.currentSetting = value;
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ int[] modes = cameraProperties.getControlAutoFocusAvailableModes();
+
+ final Float minFocus = cameraProperties.getLensInfoMinimumFocusDistance();
+
+ // Check if the focal length of the lens is fixed. If the minimum focus distance == 0, then the
+ // focal length is fixed. The minimum focus distance can be null on some devices: https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ boolean isFixedLength = minFocus == null || minFocus == 0;
+
+ return !isFixedLength
+ && !(modes.length == 0
+ || (modes.length == 1 && modes[0] == CameraCharacteristics.CONTROL_AF_MODE_OFF));
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ switch (currentSetting) {
+ case locked:
+ // When locking the auto-focus the camera device should do a one-time focus and afterwards
+ // set the auto-focus to idle. This is accomplished by setting the CONTROL_AF_MODE to
+ // CONTROL_AF_MODE_AUTO.
+ requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
+ break;
+ case auto:
+ requestBuilder.set(
+ CaptureRequest.CONTROL_AF_MODE,
+ recordingVideo
+ ? CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO
+ : CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+ default:
+ break;
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/autofocus/FocusMode.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/autofocus/FocusMode.java
new file mode 100644
index 0000000..56331b4
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/autofocus/FocusMode.java
@@ -0,0 +1,31 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.autofocus;
+
+// Mirrors focus_mode.dart
+public enum FocusMode {
+ auto("auto"),
+ locked("locked");
+
+ private final String strValue;
+
+ FocusMode(String strValue) {
+ this.strValue = strValue;
+ }
+
+ public static FocusMode getValueForString(String modeStr) {
+ for (FocusMode value : values()) {
+ if (value.strValue.equals(modeStr)) {
+ return value;
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ return strValue;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurelock/ExposureLockFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurelock/ExposureLockFeature.java
new file mode 100644
index 0000000..df08cd9
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurelock/ExposureLockFeature.java
@@ -0,0 +1,54 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.exposurelock;
+
+import android.hardware.camera2.CaptureRequest;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+
+/** Controls whether or not the exposure mode is currently locked or automatically metering. */
+public class ExposureLockFeature extends CameraFeature<ExposureMode> {
+
+ private ExposureMode currentSetting = ExposureMode.auto;
+
+ /**
+ * Creates a new instance of the {@see ExposureLockFeature}.
+ *
+ * @param cameraProperties Collection of the characteristics for the current camera device.
+ */
+ public ExposureLockFeature(CameraProperties cameraProperties) {
+ super(cameraProperties);
+ }
+
+ @Override
+ public String getDebugName() {
+ return "ExposureLockFeature";
+ }
+
+ @Override
+ public ExposureMode getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(ExposureMode value) {
+ this.currentSetting = value;
+ }
+
+ // Available on all devices.
+ @Override
+ public boolean checkIsSupported() {
+ return true;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, currentSetting == ExposureMode.locked);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurelock/ExposureMode.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurelock/ExposureMode.java
new file mode 100644
index 0000000..2971fb2
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurelock/ExposureMode.java
@@ -0,0 +1,40 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.exposurelock;
+
+// Mirrors exposure_mode.dart
+public enum ExposureMode {
+ auto("auto"),
+ locked("locked");
+
+ private final String strValue;
+
+ ExposureMode(String strValue) {
+ this.strValue = strValue;
+ }
+
+ /**
+ * Tries to convert the supplied string into an {@see ExposureMode} enum value.
+ *
+ * <p>When the supplied string doesn't match a valid {@see ExposureMode} enum value, null is
+ * returned.
+ *
+ * @param modeStr String value to convert into an {@see ExposureMode} enum value.
+ * @return Matching {@see ExposureMode} enum value, or null if no match is found.
+ */
+ public static ExposureMode getValueForString(String modeStr) {
+ for (ExposureMode value : values()) {
+ if (value.strValue.equals(modeStr)) {
+ return value;
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ return strValue;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposureoffset/ExposureOffsetFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposureoffset/ExposureOffsetFeature.java
new file mode 100644
index 0000000..d5a9fcd
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposureoffset/ExposureOffsetFeature.java
@@ -0,0 +1,94 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.exposureoffset;
+
+import android.hardware.camera2.CaptureRequest;
+import android.util.Range;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+
+/** Controls the exposure offset making the resulting image brighter or darker. */
+public class ExposureOffsetFeature extends CameraFeature<Double> {
+
+ private double currentSetting = 0;
+
+ /**
+ * Creates a new instance of the {@link ExposureOffsetFeature}.
+ *
+ * @param cameraProperties Collection of the characteristics for the current camera device.
+ */
+ public ExposureOffsetFeature(CameraProperties cameraProperties) {
+ super(cameraProperties);
+ }
+
+ @Override
+ public String getDebugName() {
+ return "ExposureOffsetFeature";
+ }
+
+ @Override
+ public Double getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(@NonNull Double value) {
+ double stepSize = getExposureOffsetStepSize();
+ this.currentSetting = value / stepSize;
+ }
+
+ // Available on all devices.
+ @Override
+ public boolean checkIsSupported() {
+ return true;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ requestBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, (int) currentSetting);
+ }
+
+ /**
+ * Returns the minimum exposure offset.
+ *
+ * @return double Minimum exposure offset.
+ */
+ public double getMinExposureOffset() {
+ Range<Integer> range = cameraProperties.getControlAutoExposureCompensationRange();
+ double minStepped = range == null ? 0 : range.getLower();
+ double stepSize = getExposureOffsetStepSize();
+ return minStepped * stepSize;
+ }
+
+ /**
+ * Returns the maximum exposure offset.
+ *
+ * @return double Maximum exposure offset.
+ */
+ public double getMaxExposureOffset() {
+ Range<Integer> range = cameraProperties.getControlAutoExposureCompensationRange();
+ double maxStepped = range == null ? 0 : range.getUpper();
+ double stepSize = getExposureOffsetStepSize();
+ return maxStepped * stepSize;
+ }
+
+ /**
+ * Returns the smallest step by which the exposure compensation can be changed.
+ *
+ * <p>Example: if this has a value of 0.5, then an aeExposureCompensation setting of -2 means that
+ * the actual AE offset is -1. More details can be found in the official Android documentation:
+ * https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#CONTROL_AE_COMPENSATION_STEP
+ *
+ * @return double Smallest step by which the exposure compensation can be changed.
+ */
+ public double getExposureOffsetStepSize() {
+ return cameraProperties.getControlAutoExposureCompensationStep();
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurepoint/ExposurePointFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurepoint/ExposurePointFeature.java
new file mode 100644
index 0000000..336e756
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/exposurepoint/ExposurePointFeature.java
@@ -0,0 +1,99 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.exposurepoint;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.util.Size;
+import androidx.annotation.NonNull;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.CameraRegionUtils;
+import io.flutter.plugins.camera.features.CameraFeature;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+
+/** Exposure point controls where in the frame exposure metering will come from. */
+public class ExposurePointFeature extends CameraFeature<Point> {
+
+ private Size cameraBoundaries;
+ private Point exposurePoint;
+ private MeteringRectangle exposureRectangle;
+ private final SensorOrientationFeature sensorOrientationFeature;
+
+ /**
+ * Creates a new instance of the {@link ExposurePointFeature}.
+ *
+ * @param cameraProperties Collection of the characteristics for the current camera device.
+ */
+ public ExposurePointFeature(
+ CameraProperties cameraProperties, SensorOrientationFeature sensorOrientationFeature) {
+ super(cameraProperties);
+ this.sensorOrientationFeature = sensorOrientationFeature;
+ }
+
+ /**
+ * Sets the camera boundaries that are required for the exposure point feature to function.
+ *
+ * @param cameraBoundaries - The camera boundaries to set.
+ */
+ public void setCameraBoundaries(@NonNull Size cameraBoundaries) {
+ this.cameraBoundaries = cameraBoundaries;
+ this.buildExposureRectangle();
+ }
+
+ @Override
+ public String getDebugName() {
+ return "ExposurePointFeature";
+ }
+
+ @Override
+ public Point getValue() {
+ return exposurePoint;
+ }
+
+ @Override
+ public void setValue(Point value) {
+ this.exposurePoint = (value == null || value.x == null || value.y == null) ? null : value;
+ this.buildExposureRectangle();
+ }
+
+ // Whether or not this camera can set the exposure point.
+ @Override
+ public boolean checkIsSupported() {
+ Integer supportedRegions = cameraProperties.getControlMaxRegionsAutoExposure();
+ return supportedRegions != null && supportedRegions > 0;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+ requestBuilder.set(
+ CaptureRequest.CONTROL_AE_REGIONS,
+ exposureRectangle == null ? null : new MeteringRectangle[] {exposureRectangle});
+ }
+
+ private void buildExposureRectangle() {
+ if (this.cameraBoundaries == null) {
+ throw new AssertionError(
+ "The cameraBoundaries should be set (using `ExposurePointFeature.setCameraBoundaries(Size)`) before updating the exposure point.");
+ }
+ if (this.exposurePoint == null) {
+ this.exposureRectangle = null;
+ } else {
+ PlatformChannel.DeviceOrientation orientation =
+ this.sensorOrientationFeature.getLockedCaptureOrientation();
+ if (orientation == null) {
+ orientation =
+ this.sensorOrientationFeature.getDeviceOrientationManager().getLastUIOrientation();
+ }
+ this.exposureRectangle =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.cameraBoundaries, this.exposurePoint.x, this.exposurePoint.y, orientation);
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/flash/FlashFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/flash/FlashFeature.java
new file mode 100644
index 0000000..054c81f
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/flash/FlashFeature.java
@@ -0,0 +1,75 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.flash;
+
+import android.hardware.camera2.CaptureRequest;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+
+/** Controls the flash configuration on the {@link android.hardware.camera2} API. */
+public class FlashFeature extends CameraFeature<FlashMode> {
+ private FlashMode currentSetting = FlashMode.auto;
+
+ /**
+ * Creates a new instance of the {@link FlashFeature}.
+ *
+ * @param cameraProperties Collection of characteristics for the current camera device.
+ */
+ public FlashFeature(CameraProperties cameraProperties) {
+ super(cameraProperties);
+ }
+
+ @Override
+ public String getDebugName() {
+ return "FlashFeature";
+ }
+
+ @Override
+ public FlashMode getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(FlashMode value) {
+ this.currentSetting = value;
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ Boolean available = cameraProperties.getFlashInfoAvailable();
+ return available != null && available;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ switch (currentSetting) {
+ case off:
+ requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
+ requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ break;
+
+ case always:
+ requestBuilder.set(
+ CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
+ requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ break;
+
+ case torch:
+ requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
+ requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
+ break;
+
+ case auto:
+ requestBuilder.set(
+ CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
+ requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ break;
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/flash/FlashMode.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/flash/FlashMode.java
new file mode 100644
index 0000000..788c768
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/flash/FlashMode.java
@@ -0,0 +1,40 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.flash;
+
+// Mirrors flash_mode.dart
+public enum FlashMode {
+ off("off"),
+ auto("auto"),
+ always("always"),
+ torch("torch");
+
+ private final String strValue;
+
+ FlashMode(String strValue) {
+ this.strValue = strValue;
+ }
+
+ /**
+ * Tries to convert the supplied string into a {@see FlashMode} enum value.
+ *
+ * <p>When the supplied string doesn't match a valid {@see FlashMode} enum value, null is
+ * returned.
+ *
+ * @param modeStr String value to convert into an {@see FlashMode} enum value.
+ * @return Matching {@see FlashMode} enum value, or null if no match is found.
+ */
+ public static FlashMode getValueForString(String modeStr) {
+ for (FlashMode value : values()) {
+ if (value.strValue.equals(modeStr)) return value;
+ }
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ return strValue;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeature.java
new file mode 100644
index 0000000..a3a0172
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeature.java
@@ -0,0 +1,99 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.focuspoint;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.util.Size;
+import androidx.annotation.NonNull;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.CameraRegionUtils;
+import io.flutter.plugins.camera.features.CameraFeature;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+
+/** Focus point controls where in the frame focus will come from. */
+public class FocusPointFeature extends CameraFeature<Point> {
+
+ private Size cameraBoundaries;
+ private Point focusPoint;
+ private MeteringRectangle focusRectangle;
+ private final SensorOrientationFeature sensorOrientationFeature;
+
+ /**
+ * Creates a new instance of the {@link FocusPointFeature}.
+ *
+ * @param cameraProperties Collection of the characteristics for the current camera device.
+ */
+ public FocusPointFeature(
+ CameraProperties cameraProperties, SensorOrientationFeature sensorOrientationFeature) {
+ super(cameraProperties);
+ this.sensorOrientationFeature = sensorOrientationFeature;
+ }
+
+ /**
+ * Sets the camera boundaries that are required for the focus point feature to function.
+ *
+ * @param cameraBoundaries - The camera boundaries to set.
+ */
+ public void setCameraBoundaries(@NonNull Size cameraBoundaries) {
+ this.cameraBoundaries = cameraBoundaries;
+ this.buildFocusRectangle();
+ }
+
+ @Override
+ public String getDebugName() {
+ return "FocusPointFeature";
+ }
+
+ @Override
+ public Point getValue() {
+ return focusPoint;
+ }
+
+ @Override
+ public void setValue(Point value) {
+ this.focusPoint = value == null || value.x == null || value.y == null ? null : value;
+ this.buildFocusRectangle();
+ }
+
+ // Whether or not this camera can set the focus point.
+ @Override
+ public boolean checkIsSupported() {
+ Integer supportedRegions = cameraProperties.getControlMaxRegionsAutoFocus();
+ return supportedRegions != null && supportedRegions > 0;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+ requestBuilder.set(
+ CaptureRequest.CONTROL_AF_REGIONS,
+ focusRectangle == null ? null : new MeteringRectangle[] {focusRectangle});
+ }
+
+ private void buildFocusRectangle() {
+ if (this.cameraBoundaries == null) {
+ throw new AssertionError(
+ "The cameraBoundaries should be set (using `FocusPointFeature.setCameraBoundaries(Size)`) before updating the focus point.");
+ }
+ if (this.focusPoint == null) {
+ this.focusRectangle = null;
+ } else {
+ PlatformChannel.DeviceOrientation orientation =
+ this.sensorOrientationFeature.getLockedCaptureOrientation();
+ if (orientation == null) {
+ orientation =
+ this.sensorOrientationFeature.getDeviceOrientationManager().getLastUIOrientation();
+ }
+ this.focusRectangle =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.cameraBoundaries, this.focusPoint.x, this.focusPoint.y, orientation);
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeature.java
new file mode 100644
index 0000000..500f2aa
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeature.java
@@ -0,0 +1,87 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.fpsrange;
+
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import android.util.Range;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+
+/**
+ * Controls the frames per seconds (FPS) range configuration on the {@link android.hardware.camera2}
+ * API.
+ */
+public class FpsRangeFeature extends CameraFeature<Range<Integer>> {
+ private static final Range<Integer> MAX_PIXEL4A_RANGE = new Range<>(30, 30);
+ private Range<Integer> currentSetting;
+
+ /**
+ * Creates a new instance of the {@link FpsRangeFeature}.
+ *
+ * @param cameraProperties Collection of characteristics for the current camera device.
+ */
+ public FpsRangeFeature(CameraProperties cameraProperties) {
+ super(cameraProperties);
+
+ if (isPixel4A()) {
+ // HACK: There is a bug in the Pixel 4A where it cannot support 60fps modes
+ // even though they are reported as supported by
+ // `getControlAutoExposureAvailableTargetFpsRanges`.
+ // For max device compatibility we will keep FPS under 60 even if they report they are
+ // capable of achieving 60 fps. Highest working FPS is 30.
+ // https://issuetracker.google.com/issues/189237151
+ currentSetting = MAX_PIXEL4A_RANGE;
+ } else {
+ Range<Integer>[] ranges = cameraProperties.getControlAutoExposureAvailableTargetFpsRanges();
+
+ if (ranges != null) {
+ for (Range<Integer> range : ranges) {
+ int upper = range.getUpper();
+
+ if (upper >= 10) {
+ if (currentSetting == null || upper > currentSetting.getUpper()) {
+ currentSetting = range;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private boolean isPixel4A() {
+ return Build.BRAND.equals("google") && Build.MODEL.equals("Pixel 4a");
+ }
+
+ @Override
+ public String getDebugName() {
+ return "FpsRangeFeature";
+ }
+
+ @Override
+ public Range<Integer> getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(Range<Integer> value) {
+ this.currentSetting = value;
+ }
+
+ // Always supported
+ @Override
+ public boolean checkIsSupported() {
+ return true;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, currentSetting);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeature.java
new file mode 100644
index 0000000..408575b
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeature.java
@@ -0,0 +1,91 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.noisereduction;
+
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build.VERSION;
+import android.os.Build.VERSION_CODES;
+import android.util.Log;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+import java.util.HashMap;
+
+/**
+ * This can either be enabled or disabled. Only full capability devices can set this to off. Legacy
+ * and full support the fast mode.
+ * https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
+ */
+public class NoiseReductionFeature extends CameraFeature<NoiseReductionMode> {
+ private NoiseReductionMode currentSetting = NoiseReductionMode.fast;
+
+ private final HashMap<NoiseReductionMode, Integer> NOISE_REDUCTION_MODES = new HashMap<>();
+
+ /**
+ * Creates a new instance of the {@link NoiseReductionFeature}.
+ *
+ * @param cameraProperties Collection of the characteristics for the current camera device.
+ */
+ public NoiseReductionFeature(CameraProperties cameraProperties) {
+ super(cameraProperties);
+ NOISE_REDUCTION_MODES.put(NoiseReductionMode.off, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
+ NOISE_REDUCTION_MODES.put(NoiseReductionMode.fast, CaptureRequest.NOISE_REDUCTION_MODE_FAST);
+ NOISE_REDUCTION_MODES.put(
+ NoiseReductionMode.highQuality, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
+ if (VERSION.SDK_INT >= VERSION_CODES.M) {
+ NOISE_REDUCTION_MODES.put(
+ NoiseReductionMode.minimal, CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL);
+ NOISE_REDUCTION_MODES.put(
+ NoiseReductionMode.zeroShutterLag, CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG);
+ }
+ }
+
+ @Override
+ public String getDebugName() {
+ return "NoiseReductionFeature";
+ }
+
+ @Override
+ public NoiseReductionMode getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(NoiseReductionMode value) {
+ this.currentSetting = value;
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ /*
+ * Available settings: public static final int NOISE_REDUCTION_MODE_FAST = 1; public static
+ * final int NOISE_REDUCTION_MODE_HIGH_QUALITY = 2; public static final int
+ * NOISE_REDUCTION_MODE_MINIMAL = 3; public static final int NOISE_REDUCTION_MODE_OFF = 0;
+ * public static final int NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG = 4;
+ *
+ * <p>Full-capability camera devices will always support OFF and FAST. Camera devices that
+ * support YUV_REPROCESSING or PRIVATE_REPROCESSING will support ZERO_SHUTTER_LAG.
+ * Legacy-capability camera devices will only support FAST mode.
+ */
+
+ // Can be null on some devices.
+ int[] modes = cameraProperties.getAvailableNoiseReductionModes();
+
+ /// If there's at least one mode available then we are supported.
+ return modes != null && modes.length > 0;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ Log.i("Camera", "updateNoiseReduction | currentSetting: " + currentSetting);
+
+ // Always use fast mode.
+ requestBuilder.set(
+ CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODES.get(currentSetting));
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionMode.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionMode.java
new file mode 100644
index 0000000..425a458
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionMode.java
@@ -0,0 +1,41 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.noisereduction;
+
+/** Only supports fast mode for now. */
+public enum NoiseReductionMode {
+ off("off"),
+ fast("fast"),
+ highQuality("highQuality"),
+ minimal("minimal"),
+ zeroShutterLag("zeroShutterLag");
+
+ private final String strValue;
+
+ NoiseReductionMode(String strValue) {
+ this.strValue = strValue;
+ }
+
+ /**
+ * Tries to convert the supplied string into a {@see NoiseReductionMode} enum value.
+ *
+ * <p>When the supplied string doesn't match a valid {@see NoiseReductionMode} enum value, null is
+ * returned.
+ *
+ * @param modeStr String value to convert into an {@see NoiseReductionMode} enum value.
+ * @return Matching {@see NoiseReductionMode} enum value, or null if no match is found.
+ */
+ public static NoiseReductionMode getValueForString(String modeStr) {
+ for (NoiseReductionMode value : values()) {
+ if (value.strValue.equals(modeStr)) return value;
+ }
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ return strValue;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionFeature.java
new file mode 100644
index 0000000..0ec2fbe
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionFeature.java
@@ -0,0 +1,269 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.resolution;
+
+import android.annotation.TargetApi;
+import android.hardware.camera2.CaptureRequest;
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import android.os.Build;
+import android.util.Size;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+import java.util.List;
+
+/**
+ * Controls the resolutions configuration on the {@link android.hardware.camera2} API.
+ *
+ * <p>The {@link ResolutionFeature} is responsible for converting the platform independent {@link
+ * ResolutionPreset} into a {@link android.media.CamcorderProfile} which contains all the properties
+ * required to configure the resolution using the {@link android.hardware.camera2} API.
+ */
+public class ResolutionFeature extends CameraFeature<ResolutionPreset> {
+ private Size captureSize;
+ private Size previewSize;
+ private CamcorderProfile recordingProfileLegacy;
+ private EncoderProfiles recordingProfile;
+ private ResolutionPreset currentSetting;
+ private int cameraId;
+
+ /**
+ * Creates a new instance of the {@link ResolutionFeature}.
+ *
+ * @param cameraProperties Collection of characteristics for the current camera device.
+ * @param resolutionPreset Platform agnostic enum containing resolution information.
+ * @param cameraName Camera identifier of the camera for which to configure the resolution.
+ */
+ public ResolutionFeature(
+ CameraProperties cameraProperties, ResolutionPreset resolutionPreset, String cameraName) {
+ super(cameraProperties);
+ this.currentSetting = resolutionPreset;
+ try {
+ this.cameraId = Integer.parseInt(cameraName, 10);
+ } catch (NumberFormatException e) {
+ this.cameraId = -1;
+ return;
+ }
+ configureResolution(resolutionPreset, cameraId);
+ }
+
+ /**
+ * Gets the {@link android.media.CamcorderProfile} containing the information to configure the
+ * resolution using the {@link android.hardware.camera2} API.
+ *
+ * @return Resolution information to configure the {@link android.hardware.camera2} API.
+ */
+ public CamcorderProfile getRecordingProfileLegacy() {
+ return this.recordingProfileLegacy;
+ }
+
+ public EncoderProfiles getRecordingProfile() {
+ return this.recordingProfile;
+ }
+
+ /**
+ * Gets the optimal preview size based on the configured resolution.
+ *
+ * @return The optimal preview size.
+ */
+ public Size getPreviewSize() {
+ return this.previewSize;
+ }
+
+ /**
+ * Gets the optimal capture size based on the configured resolution.
+ *
+ * @return The optimal capture size.
+ */
+ public Size getCaptureSize() {
+ return this.captureSize;
+ }
+
+ @Override
+ public String getDebugName() {
+ return "ResolutionFeature";
+ }
+
+ @Override
+ public ResolutionPreset getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(ResolutionPreset value) {
+ this.currentSetting = value;
+ configureResolution(currentSetting, cameraId);
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ return cameraId >= 0;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ // No-op: when setting a resolution there is no need to update the request builder.
+ }
+
+ @VisibleForTesting
+ static Size computeBestPreviewSize(int cameraId, ResolutionPreset preset)
+ throws IndexOutOfBoundsException {
+ if (preset.ordinal() > ResolutionPreset.high.ordinal()) {
+ preset = ResolutionPreset.high;
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
+ EncoderProfiles profile =
+ getBestAvailableCamcorderProfileForResolutionPreset(cameraId, preset);
+ List<EncoderProfiles.VideoProfile> videoProfiles = profile.getVideoProfiles();
+ EncoderProfiles.VideoProfile defaultVideoProfile = videoProfiles.get(0);
+
+ if (defaultVideoProfile != null) {
+ return new Size(defaultVideoProfile.getWidth(), defaultVideoProfile.getHeight());
+ }
+ }
+
+ @SuppressWarnings("deprecation")
+ // TODO(camsim99): Suppression is currently safe because legacy code is used as a fallback for SDK >= S.
+ // This should be removed when reverting that fallback behavior: https://github.com/flutter/flutter/issues/119668.
+ CamcorderProfile profile =
+ getBestAvailableCamcorderProfileForResolutionPresetLegacy(cameraId, preset);
+ return new Size(profile.videoFrameWidth, profile.videoFrameHeight);
+ }
+
+ /**
+ * Gets the best possible {@link android.media.CamcorderProfile} for the supplied {@link
+ * ResolutionPreset}. Supports SDK < 31.
+ *
+ * @param cameraId Camera identifier which indicates the device's camera for which to select a
+ * {@link android.media.CamcorderProfile}.
+ * @param preset The {@link ResolutionPreset} for which is to be translated to a {@link
+ * android.media.CamcorderProfile}.
+ * @return The best possible {@link android.media.CamcorderProfile} that matches the supplied
+ * {@link ResolutionPreset}.
+ */
+ public static CamcorderProfile getBestAvailableCamcorderProfileForResolutionPresetLegacy(
+ int cameraId, ResolutionPreset preset) {
+ if (cameraId < 0) {
+ throw new AssertionError(
+ "getBestAvailableCamcorderProfileForResolutionPreset can only be used with valid (>=0) camera identifiers.");
+ }
+
+ switch (preset) {
+ // All of these cases deliberately fall through to get the best available profile.
+ case max:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_HIGH)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH);
+ }
+ case ultraHigh:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_2160P)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_2160P);
+ }
+ case veryHigh:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_1080P)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_1080P);
+ }
+ case high:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_720P)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_720P);
+ }
+ case medium:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_480P)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_480P);
+ }
+ case low:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_QVGA)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_QVGA);
+ }
+ default:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_LOW)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_LOW);
+ } else {
+ throw new IllegalArgumentException(
+ "No capture session available for current capture session.");
+ }
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.S)
+ public static EncoderProfiles getBestAvailableCamcorderProfileForResolutionPreset(
+ int cameraId, ResolutionPreset preset) {
+ if (cameraId < 0) {
+ throw new AssertionError(
+ "getBestAvailableCamcorderProfileForResolutionPreset can only be used with valid (>=0) camera identifiers.");
+ }
+
+ String cameraIdString = Integer.toString(cameraId);
+
+ switch (preset) {
+ // All of these cases deliberately fall through to get the best available profile.
+ case max:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_HIGH)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_HIGH);
+ }
+ case ultraHigh:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_2160P)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_2160P);
+ }
+ case veryHigh:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_1080P)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_1080P);
+ }
+ case high:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_720P)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_720P);
+ }
+ case medium:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_480P)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_480P);
+ }
+ case low:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_QVGA)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_QVGA);
+ }
+ default:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_LOW)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_LOW);
+ }
+
+ throw new IllegalArgumentException(
+ "No capture session available for current capture session.");
+ }
+ }
+
+ private void configureResolution(ResolutionPreset resolutionPreset, int cameraId)
+ throws IndexOutOfBoundsException {
+ if (!checkIsSupported()) {
+ return;
+ }
+ boolean captureSizeCalculated = false;
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
+ recordingProfileLegacy = null;
+ recordingProfile =
+ getBestAvailableCamcorderProfileForResolutionPreset(cameraId, resolutionPreset);
+ List<EncoderProfiles.VideoProfile> videoProfiles = recordingProfile.getVideoProfiles();
+
+ EncoderProfiles.VideoProfile defaultVideoProfile = videoProfiles.get(0);
+
+ if (defaultVideoProfile != null) {
+ captureSizeCalculated = true;
+ captureSize = new Size(defaultVideoProfile.getWidth(), defaultVideoProfile.getHeight());
+ }
+ }
+
+ if (!captureSizeCalculated) {
+ recordingProfile = null;
+ @SuppressWarnings("deprecation")
+ CamcorderProfile camcorderProfile =
+ getBestAvailableCamcorderProfileForResolutionPresetLegacy(cameraId, resolutionPreset);
+ recordingProfileLegacy = camcorderProfile;
+ captureSize =
+ new Size(recordingProfileLegacy.videoFrameWidth, recordingProfileLegacy.videoFrameHeight);
+ }
+
+ previewSize = computeBestPreviewSize(cameraId, resolutionPreset);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionPreset.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionPreset.java
new file mode 100644
index 0000000..3593003
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionPreset.java
@@ -0,0 +1,15 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.resolution;
+
+// Mirrors camera.dart
+public enum ResolutionPreset {
+ low,
+ medium,
+ high,
+ veryHigh,
+ ultraHigh,
+ max,
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManager.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManager.java
new file mode 100644
index 0000000..ec6fa13
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManager.java
@@ -0,0 +1,335 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.sensororientation;
+
+import android.app.Activity;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.res.Configuration;
+import android.view.Display;
+import android.view.Surface;
+import android.view.WindowManager;
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugins.camera.DartMessenger;
+
+/**
+ * Support class to help to determine the media orientation based on the orientation of the device.
+ */
+public class DeviceOrientationManager {
+
+ private static final IntentFilter orientationIntentFilter =
+ new IntentFilter(Intent.ACTION_CONFIGURATION_CHANGED);
+
+ private final Activity activity;
+ private final DartMessenger messenger;
+ private final boolean isFrontFacing;
+ private final int sensorOrientation;
+ private PlatformChannel.DeviceOrientation lastOrientation;
+ private BroadcastReceiver broadcastReceiver;
+
+ /** Factory method to create a device orientation manager. */
+ public static DeviceOrientationManager create(
+ @NonNull Activity activity,
+ @NonNull DartMessenger messenger,
+ boolean isFrontFacing,
+ int sensorOrientation) {
+ return new DeviceOrientationManager(activity, messenger, isFrontFacing, sensorOrientation);
+ }
+
+ private DeviceOrientationManager(
+ @NonNull Activity activity,
+ @NonNull DartMessenger messenger,
+ boolean isFrontFacing,
+ int sensorOrientation) {
+ this.activity = activity;
+ this.messenger = messenger;
+ this.isFrontFacing = isFrontFacing;
+ this.sensorOrientation = sensorOrientation;
+ }
+
+ /**
+ * Starts listening to the device's sensors or UI for orientation updates.
+ *
+ * <p>When orientation information is updated the new orientation is send to the client using the
+ * {@link DartMessenger}. This latest value can also be retrieved through the {@link
+ * #getVideoOrientation()} accessor.
+ *
+ * <p>If the device's ACCELEROMETER_ROTATION setting is enabled the {@link
+ * DeviceOrientationManager} will report orientation updates based on the sensor information. If
+ * the ACCELEROMETER_ROTATION is disabled the {@link DeviceOrientationManager} will fallback to
+ * the deliver orientation updates based on the UI orientation.
+ */
+ public void start() {
+ if (broadcastReceiver != null) {
+ return;
+ }
+ broadcastReceiver =
+ new BroadcastReceiver() {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ handleUIOrientationChange();
+ }
+ };
+ activity.registerReceiver(broadcastReceiver, orientationIntentFilter);
+ broadcastReceiver.onReceive(activity, null);
+ }
+
+ /** Stops listening for orientation updates. */
+ public void stop() {
+ if (broadcastReceiver == null) {
+ return;
+ }
+ activity.unregisterReceiver(broadcastReceiver);
+ broadcastReceiver = null;
+ }
+
+ /**
+ * Returns the device's photo orientation in degrees based on the sensor orientation and the last
+ * known UI orientation.
+ *
+ * <p>Returns one of 0, 90, 180 or 270.
+ *
+ * @return The device's photo orientation in degrees.
+ */
+ public int getPhotoOrientation() {
+ return this.getPhotoOrientation(this.lastOrientation);
+ }
+
+ /**
+ * Returns the device's photo orientation in degrees based on the sensor orientation and the
+ * supplied {@link PlatformChannel.DeviceOrientation} value.
+ *
+ * <p>Returns one of 0, 90, 180 or 270.
+ *
+ * @param orientation The {@link PlatformChannel.DeviceOrientation} value that is to be converted
+ * into degrees.
+ * @return The device's photo orientation in degrees.
+ */
+ public int getPhotoOrientation(PlatformChannel.DeviceOrientation orientation) {
+ int angle = 0;
+ // Fallback to device orientation when the orientation value is null.
+ if (orientation == null) {
+ orientation = getUIOrientation();
+ }
+
+ switch (orientation) {
+ case PORTRAIT_UP:
+ angle = 90;
+ break;
+ case PORTRAIT_DOWN:
+ angle = 270;
+ break;
+ case LANDSCAPE_LEFT:
+ angle = isFrontFacing ? 180 : 0;
+ break;
+ case LANDSCAPE_RIGHT:
+ angle = isFrontFacing ? 0 : 180;
+ break;
+ }
+
+ // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X).
+ // This has to be taken into account so the JPEG is rotated properly.
+ // For devices with orientation of 90, this simply returns the mapping from ORIENTATIONS.
+ // For devices with orientation of 270, the JPEG is rotated 180 degrees instead.
+ return (angle + sensorOrientation + 270) % 360;
+ }
+
+ /**
+ * Returns the device's video orientation in clockwise degrees based on the sensor orientation and
+ * the last known UI orientation.
+ *
+ * <p>Returns one of 0, 90, 180 or 270.
+ *
+ * @return The device's video orientation in clockwise degrees.
+ */
+ public int getVideoOrientation() {
+ return this.getVideoOrientation(this.lastOrientation);
+ }
+
+ /**
+ * Returns the device's video orientation in clockwise degrees based on the sensor orientation and
+ * the supplied {@link PlatformChannel.DeviceOrientation} value.
+ *
+ * <p>Returns one of 0, 90, 180 or 270.
+ *
+ * <p>More details can be found in the official Android documentation:
+ * https://developer.android.com/reference/android/media/MediaRecorder#setOrientationHint(int)
+ *
+ * <p>See also:
+ * https://developer.android.com/training/camera2/camera-preview-large-screens#orientation_calculation
+ *
+ * @param orientation The {@link PlatformChannel.DeviceOrientation} value that is to be converted
+ * into degrees.
+ * @return The device's video orientation in clockwise degrees.
+ */
+ public int getVideoOrientation(PlatformChannel.DeviceOrientation orientation) {
+ int angle = 0;
+
+ // Fallback to device orientation when the orientation value is null.
+ if (orientation == null) {
+ orientation = getUIOrientation();
+ }
+
+ switch (orientation) {
+ case PORTRAIT_UP:
+ angle = 0;
+ break;
+ case PORTRAIT_DOWN:
+ angle = 180;
+ break;
+ case LANDSCAPE_LEFT:
+ angle = 270;
+ break;
+ case LANDSCAPE_RIGHT:
+ angle = 90;
+ break;
+ }
+
+ if (isFrontFacing) {
+ angle *= -1;
+ }
+
+ return (angle + sensorOrientation + 360) % 360;
+ }
+
+ /** @return the last received UI orientation. */
+ public PlatformChannel.DeviceOrientation getLastUIOrientation() {
+ return this.lastOrientation;
+ }
+
+ /**
+ * Handles orientation changes based on change events triggered by the OrientationIntentFilter.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ */
+ @VisibleForTesting
+ void handleUIOrientationChange() {
+ PlatformChannel.DeviceOrientation orientation = getUIOrientation();
+ handleOrientationChange(orientation, lastOrientation, messenger);
+ lastOrientation = orientation;
+ }
+
+ /**
+ * Handles orientation changes coming from either the device's sensors or the
+ * OrientationIntentFilter.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ */
+ @VisibleForTesting
+ static void handleOrientationChange(
+ DeviceOrientation newOrientation,
+ DeviceOrientation previousOrientation,
+ DartMessenger messenger) {
+ if (!newOrientation.equals(previousOrientation)) {
+ messenger.sendDeviceOrientationChangeEvent(newOrientation);
+ }
+ }
+
+ /**
+ * Gets the current user interface orientation.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return The current user interface orientation.
+ */
+ @VisibleForTesting
+ PlatformChannel.DeviceOrientation getUIOrientation() {
+ final int rotation = getDisplay().getRotation();
+ final int orientation = activity.getResources().getConfiguration().orientation;
+
+ switch (orientation) {
+ case Configuration.ORIENTATION_PORTRAIT:
+ if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) {
+ return PlatformChannel.DeviceOrientation.PORTRAIT_UP;
+ } else {
+ return PlatformChannel.DeviceOrientation.PORTRAIT_DOWN;
+ }
+ case Configuration.ORIENTATION_LANDSCAPE:
+ if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) {
+ return PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT;
+ } else {
+ return PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT;
+ }
+ default:
+ return PlatformChannel.DeviceOrientation.PORTRAIT_UP;
+ }
+ }
+
+ /**
+ * Calculates the sensor orientation based on the supplied angle.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @param angle Orientation angle.
+ * @return The sensor orientation based on the supplied angle.
+ */
+ @VisibleForTesting
+ PlatformChannel.DeviceOrientation calculateSensorOrientation(int angle) {
+ final int tolerance = 45;
+ angle += tolerance;
+
+ // Orientation is 0 in the default orientation mode. This is portrait-mode for phones
+ // and landscape for tablets. We have to compensate for this by calculating the default
+ // orientation, and apply an offset accordingly.
+ int defaultDeviceOrientation = getDeviceDefaultOrientation();
+ if (defaultDeviceOrientation == Configuration.ORIENTATION_LANDSCAPE) {
+ angle += 90;
+ }
+ // Determine the orientation
+ angle = angle % 360;
+ return new PlatformChannel.DeviceOrientation[] {
+ PlatformChannel.DeviceOrientation.PORTRAIT_UP,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT,
+ PlatformChannel.DeviceOrientation.PORTRAIT_DOWN,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT,
+ }
+ [angle / 90];
+ }
+
+ /**
+ * Gets the default orientation of the device.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return The default orientation of the device.
+ */
+ @VisibleForTesting
+ int getDeviceDefaultOrientation() {
+ Configuration config = activity.getResources().getConfiguration();
+ int rotation = getDisplay().getRotation();
+ if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180)
+ && config.orientation == Configuration.ORIENTATION_LANDSCAPE)
+ || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270)
+ && config.orientation == Configuration.ORIENTATION_PORTRAIT)) {
+ return Configuration.ORIENTATION_LANDSCAPE;
+ } else {
+ return Configuration.ORIENTATION_PORTRAIT;
+ }
+ }
+
+ /**
+ * Gets an instance of the Android {@link android.view.Display}.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return An instance of the Android {@link android.view.Display}.
+ */
+ @SuppressWarnings("deprecation")
+ @VisibleForTesting
+ Display getDisplay() {
+ return ((WindowManager) activity.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeature.java
new file mode 100644
index 0000000..9e316f7
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeature.java
@@ -0,0 +1,105 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.sensororientation;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import androidx.annotation.NonNull;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import io.flutter.plugins.camera.features.CameraFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+
+/** Provides access to the sensor orientation of the camera devices. */
+public class SensorOrientationFeature extends CameraFeature<Integer> {
+ private Integer currentSetting = 0;
+ private final DeviceOrientationManager deviceOrientationListener;
+ private PlatformChannel.DeviceOrientation lockedCaptureOrientation;
+
+ /**
+ * Creates a new instance of the {@link ResolutionFeature}.
+ *
+ * @param cameraProperties Collection of characteristics for the current camera device.
+ * @param activity Current Android {@link android.app.Activity}, used to detect UI orientation
+ * changes.
+ * @param dartMessenger Instance of a {@link DartMessenger} used to communicate orientation
+ * updates back to the client.
+ */
+ public SensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger) {
+ super(cameraProperties);
+ setValue(cameraProperties.getSensorOrientation());
+
+ boolean isFrontFacing = cameraProperties.getLensFacing() == CameraMetadata.LENS_FACING_FRONT;
+ deviceOrientationListener =
+ DeviceOrientationManager.create(activity, dartMessenger, isFrontFacing, currentSetting);
+ deviceOrientationListener.start();
+ }
+
+ @Override
+ public String getDebugName() {
+ return "SensorOrientationFeature";
+ }
+
+ @Override
+ public Integer getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(Integer value) {
+ this.currentSetting = value;
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ return true;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ // Noop: when setting the sensor orientation there is no need to update the request builder.
+ }
+
+ /**
+ * Gets the instance of the {@link DeviceOrientationManager} used to detect orientation changes.
+ *
+ * @return The instance of the {@link DeviceOrientationManager}.
+ */
+ public DeviceOrientationManager getDeviceOrientationManager() {
+ return this.deviceOrientationListener;
+ }
+
+ /**
+ * Lock the capture orientation, indicating that the device orientation should not influence the
+ * capture orientation.
+ *
+ * @param orientation The orientation in which to lock the capture orientation.
+ */
+ public void lockCaptureOrientation(PlatformChannel.DeviceOrientation orientation) {
+ this.lockedCaptureOrientation = orientation;
+ }
+
+ /**
+ * Unlock the capture orientation, indicating that the device orientation should be used to
+ * configure the capture orientation.
+ */
+ public void unlockCaptureOrientation() {
+ this.lockedCaptureOrientation = null;
+ }
+
+ /**
+ * Gets the configured locked capture orientation.
+ *
+ * @return The configured locked capture orientation.
+ */
+ public PlatformChannel.DeviceOrientation getLockedCaptureOrientation() {
+ return this.lockedCaptureOrientation;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/zoomlevel/ZoomLevelFeature.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/zoomlevel/ZoomLevelFeature.java
new file mode 100644
index 0000000..2ac7082
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/zoomlevel/ZoomLevelFeature.java
@@ -0,0 +1,110 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.zoomlevel;
+
+import android.graphics.Rect;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+
+/** Controls the zoom configuration on the {@link android.hardware.camera2} API. */
+public class ZoomLevelFeature extends CameraFeature<Float> {
+ private static final Float DEFAULT_ZOOM_LEVEL = 1.0f;
+ private final boolean hasSupport;
+ private final Rect sensorArraySize;
+ private Float currentSetting = DEFAULT_ZOOM_LEVEL;
+ private Float minimumZoomLevel = currentSetting;
+ private Float maximumZoomLevel;
+
+ /**
+ * Creates a new instance of the {@link ZoomLevelFeature}.
+ *
+ * @param cameraProperties Collection of characteristics for the current camera device.
+ */
+ public ZoomLevelFeature(CameraProperties cameraProperties) {
+ super(cameraProperties);
+
+ sensorArraySize = cameraProperties.getSensorInfoActiveArraySize();
+
+ if (sensorArraySize == null) {
+ maximumZoomLevel = minimumZoomLevel;
+ hasSupport = false;
+ return;
+ }
+ // On Android 11+ CONTROL_ZOOM_RATIO_RANGE should be use to get the zoom ratio directly as minimum zoom does not have to be 1.0f.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
+ minimumZoomLevel = cameraProperties.getScalerMinZoomRatio();
+ maximumZoomLevel = cameraProperties.getScalerMaxZoomRatio();
+ } else {
+ minimumZoomLevel = DEFAULT_ZOOM_LEVEL;
+ Float maxDigitalZoom = cameraProperties.getScalerAvailableMaxDigitalZoom();
+ maximumZoomLevel =
+ ((maxDigitalZoom == null) || (maxDigitalZoom < minimumZoomLevel))
+ ? minimumZoomLevel
+ : maxDigitalZoom;
+ }
+
+ hasSupport = (Float.compare(maximumZoomLevel, minimumZoomLevel) > 0);
+ }
+
+ @Override
+ public String getDebugName() {
+ return "ZoomLevelFeature";
+ }
+
+ @Override
+ public Float getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(Float value) {
+ currentSetting = value;
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ return hasSupport;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+ // On Android 11+ CONTROL_ZOOM_RATIO can be set to a zoom ratio and the camera feed will compute
+ // how to zoom on its own accounting for multiple logical cameras.
+ // Prior the image cropping window must be calculated and set manually.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
+ requestBuilder.set(
+ CaptureRequest.CONTROL_ZOOM_RATIO,
+ ZoomUtils.computeZoomRatio(currentSetting, minimumZoomLevel, maximumZoomLevel));
+ } else {
+ final Rect computedZoom =
+ ZoomUtils.computeZoomRect(
+ currentSetting, sensorArraySize, minimumZoomLevel, maximumZoomLevel);
+ requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, computedZoom);
+ }
+ }
+
+ /**
+ * Gets the minimum supported zoom level.
+ *
+ * @return The minimum zoom level.
+ */
+ public float getMinimumZoomLevel() {
+ return minimumZoomLevel;
+ }
+
+ /**
+ * Gets the maximum supported zoom level.
+ *
+ * @return The maximum zoom level.
+ */
+ public float getMaximumZoomLevel() {
+ return maximumZoomLevel;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/zoomlevel/ZoomUtils.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/zoomlevel/ZoomUtils.java
new file mode 100644
index 0000000..af9e48f
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/zoomlevel/ZoomUtils.java
@@ -0,0 +1,45 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.zoomlevel;
+
+import android.graphics.Rect;
+import androidx.annotation.NonNull;
+import androidx.core.math.MathUtils;
+
+/**
+ * Utility class containing methods that assist with zoom features in the {@link
+ * android.hardware.camera2} API.
+ */
+final class ZoomUtils {
+
+ /**
+ * Computes an image sensor area based on the supplied zoom settings.
+ *
+ * <p>The returned image sensor area can be applied to the {@link android.hardware.camera2} API in
+ * order to control zoom levels. This method of zoom should only be used for Android versions <=
+ * 11 as past that, the newer {@link #computeZoomRatio()} functional can be used.
+ *
+ * @param zoom The desired zoom level.
+ * @param sensorArraySize The current area of the image sensor.
+ * @param minimumZoomLevel The minimum supported zoom level.
+ * @param maximumZoomLevel The maximim supported zoom level.
+ * @return An image sensor area based on the supplied zoom settings
+ */
+ static Rect computeZoomRect(
+ float zoom, @NonNull Rect sensorArraySize, float minimumZoomLevel, float maximumZoomLevel) {
+ final float newZoom = computeZoomRatio(zoom, minimumZoomLevel, maximumZoomLevel);
+
+ final int centerX = sensorArraySize.width() / 2;
+ final int centerY = sensorArraySize.height() / 2;
+ final int deltaX = (int) ((0.5f * sensorArraySize.width()) / newZoom);
+ final int deltaY = (int) ((0.5f * sensorArraySize.height()) / newZoom);
+
+ return new Rect(centerX - deltaX, centerY - deltaY, centerX + deltaX, centerY + deltaY);
+ }
+
+ static Float computeZoomRatio(float zoom, float minimumZoomLevel, float maximumZoomLevel) {
+ return MathUtils.clamp(zoom, minimumZoomLevel, maximumZoomLevel);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/media/MediaRecorderBuilder.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/media/MediaRecorderBuilder.java
new file mode 100644
index 0000000..1f9f620
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/media/MediaRecorderBuilder.java
@@ -0,0 +1,114 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.media;
+
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import android.media.MediaRecorder;
+import android.os.Build;
+import androidx.annotation.NonNull;
+import java.io.IOException;
+
+public class MediaRecorderBuilder {
+ @SuppressWarnings("deprecation")
+ static class MediaRecorderFactory {
+ MediaRecorder makeMediaRecorder() {
+ return new MediaRecorder();
+ }
+ }
+
+ private final String outputFilePath;
+ private final CamcorderProfile camcorderProfile;
+ private final EncoderProfiles encoderProfiles;
+ private final MediaRecorderFactory recorderFactory;
+
+ private boolean enableAudio;
+ private int mediaOrientation;
+
+ public MediaRecorderBuilder(
+ @NonNull CamcorderProfile camcorderProfile, @NonNull String outputFilePath) {
+ this(camcorderProfile, outputFilePath, new MediaRecorderFactory());
+ }
+
+ public MediaRecorderBuilder(
+ @NonNull EncoderProfiles encoderProfiles, @NonNull String outputFilePath) {
+ this(encoderProfiles, outputFilePath, new MediaRecorderFactory());
+ }
+
+ MediaRecorderBuilder(
+ @NonNull CamcorderProfile camcorderProfile,
+ @NonNull String outputFilePath,
+ MediaRecorderFactory helper) {
+ this.outputFilePath = outputFilePath;
+ this.camcorderProfile = camcorderProfile;
+ this.encoderProfiles = null;
+ this.recorderFactory = helper;
+ }
+
+ MediaRecorderBuilder(
+ @NonNull EncoderProfiles encoderProfiles,
+ @NonNull String outputFilePath,
+ MediaRecorderFactory helper) {
+ this.outputFilePath = outputFilePath;
+ this.encoderProfiles = encoderProfiles;
+ this.camcorderProfile = null;
+ this.recorderFactory = helper;
+ }
+
+ public MediaRecorderBuilder setEnableAudio(boolean enableAudio) {
+ this.enableAudio = enableAudio;
+ return this;
+ }
+
+ public MediaRecorderBuilder setMediaOrientation(int orientation) {
+ this.mediaOrientation = orientation;
+ return this;
+ }
+
+ public MediaRecorder build() throws IOException, NullPointerException, IndexOutOfBoundsException {
+ MediaRecorder mediaRecorder = recorderFactory.makeMediaRecorder();
+
+ // There's a fixed order that mediaRecorder expects. Only change these functions accordingly.
+ // You can find the specifics here: https://developer.android.com/reference/android/media/MediaRecorder.
+ if (enableAudio) mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+ mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && encoderProfiles != null) {
+ EncoderProfiles.VideoProfile videoProfile = encoderProfiles.getVideoProfiles().get(0);
+ EncoderProfiles.AudioProfile audioProfile = encoderProfiles.getAudioProfiles().get(0);
+
+ mediaRecorder.setOutputFormat(encoderProfiles.getRecommendedFileFormat());
+ if (enableAudio) {
+ mediaRecorder.setAudioEncoder(audioProfile.getCodec());
+ mediaRecorder.setAudioEncodingBitRate(audioProfile.getBitrate());
+ mediaRecorder.setAudioSamplingRate(audioProfile.getSampleRate());
+ }
+ mediaRecorder.setVideoEncoder(videoProfile.getCodec());
+ mediaRecorder.setVideoEncodingBitRate(videoProfile.getBitrate());
+ mediaRecorder.setVideoFrameRate(videoProfile.getFrameRate());
+ mediaRecorder.setVideoSize(videoProfile.getWidth(), videoProfile.getHeight());
+ mediaRecorder.setVideoSize(videoProfile.getWidth(), videoProfile.getHeight());
+ } else {
+ mediaRecorder.setOutputFormat(camcorderProfile.fileFormat);
+ if (enableAudio) {
+ mediaRecorder.setAudioEncoder(camcorderProfile.audioCodec);
+ mediaRecorder.setAudioEncodingBitRate(camcorderProfile.audioBitRate);
+ mediaRecorder.setAudioSamplingRate(camcorderProfile.audioSampleRate);
+ }
+ mediaRecorder.setVideoEncoder(camcorderProfile.videoCodec);
+ mediaRecorder.setVideoEncodingBitRate(camcorderProfile.videoBitRate);
+ mediaRecorder.setVideoFrameRate(camcorderProfile.videoFrameRate);
+ mediaRecorder.setVideoSize(
+ camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight);
+ }
+
+ mediaRecorder.setOutputFile(outputFilePath);
+ mediaRecorder.setOrientationHint(this.mediaOrientation);
+
+ mediaRecorder.prepare();
+
+ return mediaRecorder;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/CameraCaptureProperties.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/CameraCaptureProperties.java
new file mode 100644
index 0000000..68177f4
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/CameraCaptureProperties.java
@@ -0,0 +1,67 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+public class CameraCaptureProperties {
+
+ private Float lastLensAperture;
+ private Long lastSensorExposureTime;
+ private Integer lastSensorSensitivity;
+
+ /**
+ * Gets the last known lens aperture. (As f-stop value)
+ *
+ * @return the last known lens aperture. (As f-stop value)
+ */
+ public Float getLastLensAperture() {
+ return lastLensAperture;
+ }
+
+ /**
+ * Sets the last known lens aperture. (As f-stop value)
+ *
+ * @param lastLensAperture - The last known lens aperture to set. (As f-stop value)
+ */
+ public void setLastLensAperture(Float lastLensAperture) {
+ this.lastLensAperture = lastLensAperture;
+ }
+
+ /**
+ * Gets the last known sensor exposure time in nanoseconds.
+ *
+ * @return the last known sensor exposure time in nanoseconds.
+ */
+ public Long getLastSensorExposureTime() {
+ return lastSensorExposureTime;
+ }
+
+ /**
+ * Sets the last known sensor exposure time in nanoseconds.
+ *
+ * @param lastSensorExposureTime - The last known sensor exposure time to set, in nanoseconds.
+ */
+ public void setLastSensorExposureTime(Long lastSensorExposureTime) {
+ this.lastSensorExposureTime = lastSensorExposureTime;
+ }
+
+ /**
+ * Gets the last known sensor sensitivity in ISO arithmetic units.
+ *
+ * @return the last known sensor sensitivity in ISO arithmetic units.
+ */
+ public Integer getLastSensorSensitivity() {
+ return lastSensorSensitivity;
+ }
+
+ /**
+ * Sets the last known sensor sensitivity in ISO arithmetic units.
+ *
+ * @param lastSensorSensitivity - The last known sensor sensitivity to set, in ISO arithmetic
+ * units.
+ */
+ public void setLastSensorSensitivity(Integer lastSensorSensitivity) {
+ this.lastSensorSensitivity = lastSensorSensitivity;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/CaptureTimeoutsWrapper.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/CaptureTimeoutsWrapper.java
new file mode 100644
index 0000000..ad59bd0
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/CaptureTimeoutsWrapper.java
@@ -0,0 +1,52 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+/**
+ * Wrapper class that provides a container for all {@link Timeout} instances that are required for
+ * the capture flow.
+ */
+public class CaptureTimeoutsWrapper {
+ private Timeout preCaptureFocusing;
+ private Timeout preCaptureMetering;
+ private final long preCaptureFocusingTimeoutMs;
+ private final long preCaptureMeteringTimeoutMs;
+
+ /**
+ * Create a new wrapper instance with the specified timeout values.
+ *
+ * @param preCaptureFocusingTimeoutMs focusing timeout milliseconds.
+ * @param preCaptureMeteringTimeoutMs metering timeout milliseconds.
+ */
+ public CaptureTimeoutsWrapper(
+ long preCaptureFocusingTimeoutMs, long preCaptureMeteringTimeoutMs) {
+ this.preCaptureFocusingTimeoutMs = preCaptureFocusingTimeoutMs;
+ this.preCaptureMeteringTimeoutMs = preCaptureMeteringTimeoutMs;
+ }
+
+ /** Reset all timeouts to the current timestamp. */
+ public void reset() {
+ this.preCaptureFocusing = Timeout.create(preCaptureFocusingTimeoutMs);
+ this.preCaptureMetering = Timeout.create(preCaptureMeteringTimeoutMs);
+ }
+
+ /**
+ * Returns the timeout instance related to precapture focusing.
+ *
+ * @return - The timeout object
+ */
+ public Timeout getPreCaptureFocusing() {
+ return preCaptureFocusing;
+ }
+
+ /**
+ * Returns the timeout instance related to precapture metering.
+ *
+ * @return - The timeout object
+ */
+ public Timeout getPreCaptureMetering() {
+ return preCaptureMetering;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/ExposureMode.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/ExposureMode.java
new file mode 100644
index 0000000..0bd2394
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/ExposureMode.java
@@ -0,0 +1,29 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+// Mirrors exposure_mode.dart
+public enum ExposureMode {
+ auto("auto"),
+ locked("locked");
+
+ private final String strValue;
+
+ ExposureMode(String strValue) {
+ this.strValue = strValue;
+ }
+
+ public static ExposureMode getValueForString(String modeStr) {
+ for (ExposureMode value : values()) {
+ if (value.strValue.equals(modeStr)) return value;
+ }
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ return strValue;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/FlashMode.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/FlashMode.java
new file mode 100644
index 0000000..d7b6613
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/FlashMode.java
@@ -0,0 +1,31 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+// Mirrors flash_mode.dart
+public enum FlashMode {
+ off("off"),
+ auto("auto"),
+ always("always"),
+ torch("torch");
+
+ private final String strValue;
+
+ FlashMode(String strValue) {
+ this.strValue = strValue;
+ }
+
+ public static FlashMode getValueForString(String modeStr) {
+ for (FlashMode value : values()) {
+ if (value.strValue.equals(modeStr)) return value;
+ }
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ return strValue;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/FocusMode.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/FocusMode.java
new file mode 100644
index 0000000..c879593
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/FocusMode.java
@@ -0,0 +1,29 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+// Mirrors focus_mode.dart
+public enum FocusMode {
+ auto("auto"),
+ locked("locked");
+
+ private final String strValue;
+
+ FocusMode(String strValue) {
+ this.strValue = strValue;
+ }
+
+ public static FocusMode getValueForString(String modeStr) {
+ for (FocusMode value : values()) {
+ if (value.strValue.equals(modeStr)) return value;
+ }
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ return strValue;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/ResolutionPreset.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/ResolutionPreset.java
new file mode 100644
index 0000000..a70d856
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/ResolutionPreset.java
@@ -0,0 +1,15 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+// Mirrors camera.dart
+public enum ResolutionPreset {
+ low,
+ medium,
+ high,
+ veryHigh,
+ ultraHigh,
+ max,
+}
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/Timeout.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/Timeout.java
new file mode 100644
index 0000000..67e0549
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/types/Timeout.java
@@ -0,0 +1,51 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+import android.os.SystemClock;
+
+/**
+ * This is a simple class for managing a timeout. In the camera we generally keep two timeouts: one
+ * for focusing and one for pre-capture metering.
+ *
+ * <p>We use timeouts to ensure a picture is always captured within a reasonable amount of time even
+ * if the settings don't converge and focus can't be locked.
+ *
+ * <p>You generally check the status of the timeout in the CameraCaptureCallback during the capture
+ * sequence and use it to move to the next state if the timeout has passed.
+ */
+public class Timeout {
+
+ /** The timeout time in milliseconds */
+ private final long timeoutMs;
+
+ /** When this timeout was started. Will be used later to check if the timeout has expired yet. */
+ private final long timeStarted;
+
+ /**
+ * Factory method to create a new Timeout.
+ *
+ * @param timeoutMs timeout to use.
+ * @return returns a new Timeout.
+ */
+ public static Timeout create(long timeoutMs) {
+ return new Timeout(timeoutMs);
+ }
+
+ /**
+ * Create a new timeout.
+ *
+ * @param timeoutMs the time in milliseconds for this timeout to lapse.
+ */
+ private Timeout(long timeoutMs) {
+ this.timeoutMs = timeoutMs;
+ this.timeStarted = SystemClock.elapsedRealtime();
+ }
+
+ /** Will return true when the timeout period has lapsed. */
+ public boolean getIsExpired() {
+ return (SystemClock.elapsedRealtime() - timeStarted) > timeoutMs;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackStatesTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackStatesTest.java
new file mode 100644
index 0000000..934aff8
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackStatesTest.java
@@ -0,0 +1,381 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.CaptureResult.Key;
+import android.hardware.camera2.TotalCaptureResult;
+import io.flutter.plugins.camera.CameraCaptureCallback.CameraCaptureStateListener;
+import io.flutter.plugins.camera.types.CameraCaptureProperties;
+import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper;
+import io.flutter.plugins.camera.types.Timeout;
+import io.flutter.plugins.camera.utils.TestUtils;
+import java.util.HashMap;
+import java.util.Map;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+import org.mockito.MockedStatic;
+
+public class CameraCaptureCallbackStatesTest extends TestCase {
+ private final Integer aeState;
+ private final Integer afState;
+ private final CameraState cameraState;
+ private final boolean isTimedOut;
+
+ private Runnable validate;
+
+ private CameraCaptureCallback cameraCaptureCallback;
+ private CameraCaptureStateListener mockCaptureStateListener;
+ private CameraCaptureSession mockCameraCaptureSession;
+ private CaptureRequest mockCaptureRequest;
+ private CaptureResult mockPartialCaptureResult;
+ private CaptureTimeoutsWrapper mockCaptureTimeouts;
+ private CameraCaptureProperties mockCaptureProps;
+ private TotalCaptureResult mockTotalCaptureResult;
+ private MockedStatic<Timeout> mockedStaticTimeout;
+ private Timeout mockTimeout;
+
+ public static TestSuite suite() {
+ TestSuite suite = new TestSuite();
+
+ setUpPreviewStateTest(suite);
+ setUpWaitingFocusTests(suite);
+ setUpWaitingPreCaptureStartTests(suite);
+ setUpWaitingPreCaptureDoneTests(suite);
+
+ return suite;
+ }
+
+ protected CameraCaptureCallbackStatesTest(
+ String name, CameraState cameraState, Integer afState, Integer aeState) {
+ this(name, cameraState, afState, aeState, false);
+ }
+
+ protected CameraCaptureCallbackStatesTest(
+ String name, CameraState cameraState, Integer afState, Integer aeState, boolean isTimedOut) {
+ super(name);
+
+ this.aeState = aeState;
+ this.afState = afState;
+ this.cameraState = cameraState;
+ this.isTimedOut = isTimedOut;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ mockedStaticTimeout = mockStatic(Timeout.class);
+ mockCaptureStateListener = mock(CameraCaptureStateListener.class);
+ mockCameraCaptureSession = mock(CameraCaptureSession.class);
+ mockCaptureRequest = mock(CaptureRequest.class);
+ mockPartialCaptureResult = mock(CaptureResult.class);
+ mockTotalCaptureResult = mock(TotalCaptureResult.class);
+ mockTimeout = mock(Timeout.class);
+ mockCaptureTimeouts = mock(CaptureTimeoutsWrapper.class);
+ mockCaptureProps = mock(CameraCaptureProperties.class);
+ when(mockCaptureTimeouts.getPreCaptureFocusing()).thenReturn(mockTimeout);
+ when(mockCaptureTimeouts.getPreCaptureMetering()).thenReturn(mockTimeout);
+
+ Key<Integer> mockAeStateKey = mock(Key.class);
+ Key<Integer> mockAfStateKey = mock(Key.class);
+
+ TestUtils.setFinalStatic(CaptureResult.class, "CONTROL_AE_STATE", mockAeStateKey);
+ TestUtils.setFinalStatic(CaptureResult.class, "CONTROL_AF_STATE", mockAfStateKey);
+
+ mockedStaticTimeout.when(() -> Timeout.create(1000)).thenReturn(mockTimeout);
+
+ cameraCaptureCallback =
+ CameraCaptureCallback.create(
+ mockCaptureStateListener, mockCaptureTimeouts, mockCaptureProps);
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+
+ mockedStaticTimeout.close();
+
+ TestUtils.setFinalStatic(CaptureResult.class, "CONTROL_AE_STATE", null);
+ TestUtils.setFinalStatic(CaptureResult.class, "CONTROL_AF_STATE", null);
+ }
+
+ @Override
+ protected void runTest() throws Throwable {
+ when(mockPartialCaptureResult.get(CaptureResult.CONTROL_AF_STATE)).thenReturn(afState);
+ when(mockPartialCaptureResult.get(CaptureResult.CONTROL_AE_STATE)).thenReturn(aeState);
+ when(mockTotalCaptureResult.get(CaptureResult.CONTROL_AF_STATE)).thenReturn(afState);
+ when(mockTotalCaptureResult.get(CaptureResult.CONTROL_AE_STATE)).thenReturn(aeState);
+
+ cameraCaptureCallback.setCameraState(cameraState);
+ if (isTimedOut) {
+ when(mockTimeout.getIsExpired()).thenReturn(true);
+ cameraCaptureCallback.onCaptureCompleted(
+ mockCameraCaptureSession, mockCaptureRequest, mockTotalCaptureResult);
+ } else {
+ cameraCaptureCallback.onCaptureProgressed(
+ mockCameraCaptureSession, mockCaptureRequest, mockPartialCaptureResult);
+ }
+
+ validate.run();
+ }
+
+ private static void setUpPreviewStateTest(TestSuite suite) {
+ CameraCaptureCallbackStatesTest previewStateTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_not_converge_or_pre_capture_when_state_is_preview",
+ CameraState.STATE_PREVIEW,
+ null,
+ null);
+ previewStateTest.validate =
+ () -> {
+ verify(previewStateTest.mockCaptureStateListener, never()).onConverged();
+ verify(previewStateTest.mockCaptureStateListener, never()).onConverged();
+ assertEquals(
+ CameraState.STATE_PREVIEW, previewStateTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(previewStateTest);
+ }
+
+ private static void setUpWaitingFocusTests(TestSuite suite) {
+ Integer[] actionableAfStates =
+ new Integer[] {
+ CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED,
+ CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
+ };
+
+ Integer[] nonActionableAfStates =
+ new Integer[] {
+ CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN,
+ CaptureResult.CONTROL_AF_STATE_INACTIVE,
+ CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED,
+ CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN,
+ CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED
+ };
+
+ Map<Integer, Boolean> aeStatesConvergeMap =
+ new HashMap<Integer, Boolean>() {
+ {
+ put(null, true);
+ put(CaptureResult.CONTROL_AE_STATE_CONVERGED, true);
+ put(CaptureResult.CONTROL_AE_STATE_PRECAPTURE, false);
+ put(CaptureResult.CONTROL_AE_STATE_LOCKED, false);
+ put(CaptureResult.CONTROL_AE_STATE_SEARCHING, false);
+ put(CaptureResult.CONTROL_AE_STATE_INACTIVE, false);
+ put(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED, false);
+ }
+ };
+
+ CameraCaptureCallbackStatesTest nullStateTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_not_converge_or_pre_capture_when_afstate_is_null",
+ CameraState.STATE_WAITING_FOCUS,
+ null,
+ null);
+ nullStateTest.validate =
+ () -> {
+ verify(nullStateTest.mockCaptureStateListener, never()).onConverged();
+ verify(nullStateTest.mockCaptureStateListener, never()).onConverged();
+ assertEquals(
+ CameraState.STATE_WAITING_FOCUS,
+ nullStateTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(nullStateTest);
+
+ for (Integer afState : actionableAfStates) {
+ aeStatesConvergeMap.forEach(
+ (aeState, shouldConverge) -> {
+ CameraCaptureCallbackStatesTest focusLockedTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_converge_when_af_state_is_"
+ + afState
+ + "_and_ae_state_is_"
+ + aeState,
+ CameraState.STATE_WAITING_FOCUS,
+ afState,
+ aeState);
+ focusLockedTest.validate =
+ () -> {
+ if (shouldConverge) {
+ verify(focusLockedTest.mockCaptureStateListener, times(1)).onConverged();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onPrecapture();
+ } else {
+ verify(focusLockedTest.mockCaptureStateListener, times(1)).onPrecapture();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onConverged();
+ }
+ assertEquals(
+ CameraState.STATE_WAITING_FOCUS,
+ focusLockedTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(focusLockedTest);
+ });
+ }
+
+ for (Integer afState : nonActionableAfStates) {
+ CameraCaptureCallbackStatesTest focusLockedTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_do_nothing_when_af_state_is_" + afState,
+ CameraState.STATE_WAITING_FOCUS,
+ afState,
+ null);
+ focusLockedTest.validate =
+ () -> {
+ verify(focusLockedTest.mockCaptureStateListener, never()).onConverged();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onPrecapture();
+ assertEquals(
+ CameraState.STATE_WAITING_FOCUS,
+ focusLockedTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(focusLockedTest);
+ }
+
+ for (Integer afState : nonActionableAfStates) {
+ aeStatesConvergeMap.forEach(
+ (aeState, shouldConverge) -> {
+ CameraCaptureCallbackStatesTest focusLockedTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_converge_when_af_state_is_"
+ + afState
+ + "_and_ae_state_is_"
+ + aeState,
+ CameraState.STATE_WAITING_FOCUS,
+ afState,
+ aeState,
+ true);
+ focusLockedTest.validate =
+ () -> {
+ if (shouldConverge) {
+ verify(focusLockedTest.mockCaptureStateListener, times(1)).onConverged();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onPrecapture();
+ } else {
+ verify(focusLockedTest.mockCaptureStateListener, times(1)).onPrecapture();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onConverged();
+ }
+ assertEquals(
+ CameraState.STATE_WAITING_FOCUS,
+ focusLockedTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(focusLockedTest);
+ });
+ }
+ }
+
+ private static void setUpWaitingPreCaptureStartTests(TestSuite suite) {
+ Map<Integer, CameraState> cameraStateMap =
+ new HashMap<Integer, CameraState>() {
+ {
+ put(null, CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ put(
+ CaptureResult.CONTROL_AE_STATE_INACTIVE,
+ CameraState.STATE_WAITING_PRECAPTURE_START);
+ put(
+ CaptureResult.CONTROL_AE_STATE_SEARCHING,
+ CameraState.STATE_WAITING_PRECAPTURE_START);
+ put(
+ CaptureResult.CONTROL_AE_STATE_CONVERGED,
+ CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ put(CaptureResult.CONTROL_AE_STATE_LOCKED, CameraState.STATE_WAITING_PRECAPTURE_START);
+ put(
+ CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED,
+ CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ put(
+ CaptureResult.CONTROL_AE_STATE_PRECAPTURE,
+ CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ }
+ };
+
+ cameraStateMap.forEach(
+ (aeState, cameraState) -> {
+ CameraCaptureCallbackStatesTest testCase =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_update_camera_state_to_waiting_pre_capture_done_when_ae_state_is_"
+ + aeState,
+ CameraState.STATE_WAITING_PRECAPTURE_START,
+ null,
+ aeState);
+ testCase.validate =
+ () -> assertEquals(cameraState, testCase.cameraCaptureCallback.getCameraState());
+ suite.addTest(testCase);
+ });
+
+ cameraStateMap.forEach(
+ (aeState, cameraState) -> {
+ if (cameraState == CameraState.STATE_WAITING_PRECAPTURE_DONE) {
+ return;
+ }
+
+ CameraCaptureCallbackStatesTest testCase =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_update_camera_state_to_waiting_pre_capture_done_when_ae_state_is_"
+ + aeState,
+ CameraState.STATE_WAITING_PRECAPTURE_START,
+ null,
+ aeState,
+ true);
+ testCase.validate =
+ () ->
+ assertEquals(
+ CameraState.STATE_WAITING_PRECAPTURE_DONE,
+ testCase.cameraCaptureCallback.getCameraState());
+ suite.addTest(testCase);
+ });
+ }
+
+ private static void setUpWaitingPreCaptureDoneTests(TestSuite suite) {
+ Integer[] onConvergeStates =
+ new Integer[] {
+ null,
+ CaptureResult.CONTROL_AE_STATE_CONVERGED,
+ CaptureResult.CONTROL_AE_STATE_LOCKED,
+ CaptureResult.CONTROL_AE_STATE_SEARCHING,
+ CaptureResult.CONTROL_AE_STATE_INACTIVE,
+ CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED,
+ };
+
+ for (Integer aeState : onConvergeStates) {
+ CameraCaptureCallbackStatesTest shouldConvergeTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_converge_when_ae_state_is_" + aeState,
+ CameraState.STATE_WAITING_PRECAPTURE_DONE,
+ null,
+ null);
+ shouldConvergeTest.validate =
+ () -> verify(shouldConvergeTest.mockCaptureStateListener, times(1)).onConverged();
+ suite.addTest(shouldConvergeTest);
+ }
+
+ CameraCaptureCallbackStatesTest shouldNotConvergeTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_not_converge_when_ae_state_is_pre_capture",
+ CameraState.STATE_WAITING_PRECAPTURE_DONE,
+ null,
+ CaptureResult.CONTROL_AE_STATE_PRECAPTURE);
+ shouldNotConvergeTest.validate =
+ () -> verify(shouldNotConvergeTest.mockCaptureStateListener, never()).onConverged();
+ suite.addTest(shouldNotConvergeTest);
+
+ CameraCaptureCallbackStatesTest shouldConvergeWhenTimedOutTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_not_converge_when_ae_state_is_pre_capture",
+ CameraState.STATE_WAITING_PRECAPTURE_DONE,
+ null,
+ CaptureResult.CONTROL_AE_STATE_PRECAPTURE,
+ true);
+ shouldConvergeWhenTimedOutTest.validate =
+ () ->
+ verify(shouldConvergeWhenTimedOutTest.mockCaptureStateListener, times(1)).onConverged();
+ suite.addTest(shouldConvergeWhenTimedOutTest);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackTest.java
new file mode 100644
index 0000000..75a5b25
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackTest.java
@@ -0,0 +1,72 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.mockito.ArgumentMatchers.anyFloat;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import io.flutter.plugins.camera.types.CameraCaptureProperties;
+import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.RobolectricTestRunner;
+
+@RunWith(RobolectricTestRunner.class)
+public class CameraCaptureCallbackTest {
+
+ private CameraCaptureCallback cameraCaptureCallback;
+ private CameraCaptureProperties mockCaptureProps;
+
+ @Before
+ public void setUp() {
+ CameraCaptureCallback.CameraCaptureStateListener mockCaptureStateListener =
+ mock(CameraCaptureCallback.CameraCaptureStateListener.class);
+ CaptureTimeoutsWrapper mockCaptureTimeouts = mock(CaptureTimeoutsWrapper.class);
+ mockCaptureProps = mock(CameraCaptureProperties.class);
+ cameraCaptureCallback =
+ CameraCaptureCallback.create(
+ mockCaptureStateListener, mockCaptureTimeouts, mockCaptureProps);
+ }
+
+ @Test
+ public void onCaptureProgressed_doesNotUpdateCameraCaptureProperties() {
+ CameraCaptureSession mockSession = mock(CameraCaptureSession.class);
+ CaptureRequest mockRequest = mock(CaptureRequest.class);
+ CaptureResult mockResult = mock(CaptureResult.class);
+
+ cameraCaptureCallback.onCaptureProgressed(mockSession, mockRequest, mockResult);
+
+ verify(mockCaptureProps, never()).setLastLensAperture(anyFloat());
+ verify(mockCaptureProps, never()).setLastSensorExposureTime(anyLong());
+ verify(mockCaptureProps, never()).setLastSensorSensitivity(anyInt());
+ }
+
+ @Test
+ public void onCaptureCompleted_updatesCameraCaptureProperties() {
+ CameraCaptureSession mockSession = mock(CameraCaptureSession.class);
+ CaptureRequest mockRequest = mock(CaptureRequest.class);
+ TotalCaptureResult mockResult = mock(TotalCaptureResult.class);
+ when(mockResult.get(CaptureResult.LENS_APERTURE)).thenReturn(1.0f);
+ when(mockResult.get(CaptureResult.SENSOR_EXPOSURE_TIME)).thenReturn(2L);
+ when(mockResult.get(CaptureResult.SENSOR_SENSITIVITY)).thenReturn(3);
+
+ cameraCaptureCallback.onCaptureCompleted(mockSession, mockRequest, mockResult);
+
+ verify(mockCaptureProps, times(1)).setLastLensAperture(1.0f);
+ verify(mockCaptureProps, times(1)).setLastSensorExposureTime(2L);
+ verify(mockCaptureProps, times(1)).setLastSensorSensitivity(3);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraPermissionsTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraPermissionsTest.java
new file mode 100644
index 0000000..575ec8c
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraPermissionsTest.java
@@ -0,0 +1,89 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static junit.framework.TestCase.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+
+import android.content.pm.PackageManager;
+import io.flutter.plugins.camera.CameraPermissions.CameraRequestPermissionsListener;
+import io.flutter.plugins.camera.CameraPermissions.ResultCallback;
+import org.junit.Test;
+
+public class CameraPermissionsTest {
+ @Test
+ public void listener_respondsOnce() {
+ final int[] calledCounter = {0};
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener((String code, String desc) -> calledCounter[0]++);
+
+ permissionsListener.onRequestPermissionsResult(
+ 9796, null, new int[] {PackageManager.PERMISSION_DENIED});
+ permissionsListener.onRequestPermissionsResult(
+ 9796, null, new int[] {PackageManager.PERMISSION_GRANTED});
+
+ assertEquals(1, calledCounter[0]);
+ }
+
+ @Test
+ public void callback_respondsWithCameraAccessDenied() {
+ ResultCallback fakeResultCallback = mock(ResultCallback.class);
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener(fakeResultCallback);
+
+ permissionsListener.onRequestPermissionsResult(
+ 9796, null, new int[] {PackageManager.PERMISSION_DENIED});
+
+ verify(fakeResultCallback)
+ .onResult("CameraAccessDenied", "Camera access permission was denied.");
+ }
+
+ @Test
+ public void callback_respondsWithAudioAccessDenied() {
+ ResultCallback fakeResultCallback = mock(ResultCallback.class);
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener(fakeResultCallback);
+
+ permissionsListener.onRequestPermissionsResult(
+ 9796,
+ null,
+ new int[] {PackageManager.PERMISSION_GRANTED, PackageManager.PERMISSION_DENIED});
+
+ verify(fakeResultCallback).onResult("AudioAccessDenied", "Audio access permission was denied.");
+ }
+
+ @Test
+ public void callback_doesNotRespond() {
+ ResultCallback fakeResultCallback = mock(ResultCallback.class);
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener(fakeResultCallback);
+
+ permissionsListener.onRequestPermissionsResult(
+ 9796,
+ null,
+ new int[] {PackageManager.PERMISSION_GRANTED, PackageManager.PERMISSION_GRANTED});
+
+ verify(fakeResultCallback, never())
+ .onResult("CameraAccessDenied", "Camera access permission was denied.");
+ verify(fakeResultCallback, never())
+ .onResult("AudioAccessDenied", "Audio access permission was denied.");
+ }
+
+ @Test
+ public void callback_respondsWithCameraAccessDeniedWhenEmptyResult() {
+ // Handles the case where the grantResults array is empty
+
+ ResultCallback fakeResultCallback = mock(ResultCallback.class);
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener(fakeResultCallback);
+
+ permissionsListener.onRequestPermissionsResult(9796, null, new int[] {});
+
+ verify(fakeResultCallback)
+ .onResult("CameraAccessDenied", "Camera access permission was denied.");
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraPropertiesImplTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraPropertiesImplTest.java
new file mode 100644
index 0000000..c61be04
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraPropertiesImplTest.java
@@ -0,0 +1,303 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.graphics.Rect;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.util.Range;
+import android.util.Rational;
+import android.util.Size;
+import org.junit.Before;
+import org.junit.Test;
+
+public class CameraPropertiesImplTest {
+ private static final String CAMERA_NAME = "test_camera";
+ private final CameraCharacteristics mockCharacteristics = mock(CameraCharacteristics.class);
+ private final CameraManager mockCameraManager = mock(CameraManager.class);
+
+ private CameraPropertiesImpl cameraProperties;
+
+ @Before
+ public void before() {
+ try {
+ when(mockCameraManager.getCameraCharacteristics(CAMERA_NAME)).thenReturn(mockCharacteristics);
+ cameraProperties = new CameraPropertiesImpl(CAMERA_NAME, mockCameraManager);
+ } catch (CameraAccessException e) {
+ fail();
+ }
+ }
+
+ @Test
+ public void ctor_shouldReturnValidInstance() throws CameraAccessException {
+ verify(mockCameraManager, times(1)).getCameraCharacteristics(CAMERA_NAME);
+ assertNotNull(cameraProperties);
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void getControlAutoExposureAvailableTargetFpsRangesTest() {
+ Range<Integer> mockRange = mock(Range.class);
+ Range<Integer>[] mockRanges = new Range[] {mockRange};
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES))
+ .thenReturn(mockRanges);
+
+ Range<Integer>[] actualRanges =
+ cameraProperties.getControlAutoExposureAvailableTargetFpsRanges();
+
+ verify(mockCharacteristics, times(1))
+ .get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ assertArrayEquals(actualRanges, mockRanges);
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void getControlAutoExposureCompensationRangeTest() {
+ Range<Integer> mockRange = mock(Range.class);
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE))
+ .thenReturn(mockRange);
+
+ Range<Integer> actualRange = cameraProperties.getControlAutoExposureCompensationRange();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
+ assertEquals(actualRange, mockRange);
+ }
+
+ @Test
+ public void getControlAutoExposureCompensationStep_shouldReturnDoubleWhenRationalIsNotNull() {
+ double expectedStep = 3.1415926535;
+ Rational mockRational = mock(Rational.class);
+
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP))
+ .thenReturn(mockRational);
+ when(mockRational.doubleValue()).thenReturn(expectedStep);
+
+ double actualSteps = cameraProperties.getControlAutoExposureCompensationStep();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
+ assertEquals(actualSteps, expectedStep, 0);
+ }
+
+ @Test
+ public void getControlAutoExposureCompensationStep_shouldReturnZeroWhenRationalIsNull() {
+ double expectedStep = 0.0;
+
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP))
+ .thenReturn(null);
+
+ double actualSteps = cameraProperties.getControlAutoExposureCompensationStep();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
+ assertEquals(actualSteps, expectedStep, 0);
+ }
+
+ @Test
+ public void getControlAutoFocusAvailableModesTest() {
+ int[] expectedAutoFocusModes = new int[] {0, 1, 2};
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES))
+ .thenReturn(expectedAutoFocusModes);
+
+ int[] actualAutoFocusModes = cameraProperties.getControlAutoFocusAvailableModes();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ assertEquals(actualAutoFocusModes, expectedAutoFocusModes);
+ }
+
+ @Test
+ public void getControlMaxRegionsAutoExposureTest() {
+ int expectedRegions = 42;
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE))
+ .thenReturn(expectedRegions);
+
+ int actualRegions = cameraProperties.getControlMaxRegionsAutoExposure();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
+ assertEquals(actualRegions, expectedRegions);
+ }
+
+ @Test
+ public void getControlMaxRegionsAutoFocusTest() {
+ int expectedRegions = 42;
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF))
+ .thenReturn(expectedRegions);
+
+ int actualRegions = cameraProperties.getControlMaxRegionsAutoFocus();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
+ assertEquals(actualRegions, expectedRegions);
+ }
+
+ @Test
+ public void getDistortionCorrectionAvailableModesTest() {
+ int[] expectedCorrectionModes = new int[] {0, 1, 2};
+ when(mockCharacteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES))
+ .thenReturn(expectedCorrectionModes);
+
+ int[] actualCorrectionModes = cameraProperties.getDistortionCorrectionAvailableModes();
+
+ verify(mockCharacteristics, times(1))
+ .get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES);
+ assertEquals(actualCorrectionModes, expectedCorrectionModes);
+ }
+
+ @Test
+ public void getFlashInfoAvailableTest() {
+ boolean expectedAvailability = true;
+ when(mockCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE))
+ .thenReturn(expectedAvailability);
+
+ boolean actualAvailability = cameraProperties.getFlashInfoAvailable();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
+ assertEquals(actualAvailability, expectedAvailability);
+ }
+
+ @Test
+ public void getLensFacingTest() {
+ int expectedFacing = 42;
+ when(mockCharacteristics.get(CameraCharacteristics.LENS_FACING)).thenReturn(expectedFacing);
+
+ int actualFacing = cameraProperties.getLensFacing();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.LENS_FACING);
+ assertEquals(actualFacing, expectedFacing);
+ }
+
+ @Test
+ public void getLensInfoMinimumFocusDistanceTest() {
+ Float expectedFocusDistance = new Float(3.14);
+ when(mockCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE))
+ .thenReturn(expectedFocusDistance);
+
+ Float actualFocusDistance = cameraProperties.getLensInfoMinimumFocusDistance();
+
+ verify(mockCharacteristics, times(1))
+ .get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+ assertEquals(actualFocusDistance, expectedFocusDistance);
+ }
+
+ @Test
+ public void getScalerAvailableMaxDigitalZoomTest() {
+ Float expectedDigitalZoom = new Float(3.14);
+ when(mockCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM))
+ .thenReturn(expectedDigitalZoom);
+
+ Float actualDigitalZoom = cameraProperties.getScalerAvailableMaxDigitalZoom();
+
+ verify(mockCharacteristics, times(1))
+ .get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+ assertEquals(actualDigitalZoom, expectedDigitalZoom);
+ }
+
+ @Test
+ public void getScalerGetScalerMinZoomRatioTest() {
+ Range zoomRange = mock(Range.class);
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE))
+ .thenReturn(zoomRange);
+
+ Float minZoom = cameraProperties.getScalerMinZoomRatio();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE);
+ assertEquals(zoomRange.getLower(), minZoom);
+ }
+
+ @Test
+ public void getScalerGetScalerMaxZoomRatioTest() {
+ Range zoomRange = mock(Range.class);
+ when(mockCharacteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE))
+ .thenReturn(zoomRange);
+
+ Float maxZoom = cameraProperties.getScalerMaxZoomRatio();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE);
+ assertEquals(zoomRange.getUpper(), maxZoom);
+ }
+
+ @Test
+ public void getSensorInfoActiveArraySizeTest() {
+ Rect expectedArraySize = mock(Rect.class);
+ when(mockCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE))
+ .thenReturn(expectedArraySize);
+
+ Rect actualArraySize = cameraProperties.getSensorInfoActiveArraySize();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ assertEquals(actualArraySize, expectedArraySize);
+ }
+
+ @Test
+ public void getSensorInfoPixelArraySizeTest() {
+ Size expectedArraySize = mock(Size.class);
+ when(mockCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE))
+ .thenReturn(expectedArraySize);
+
+ Size actualArraySize = cameraProperties.getSensorInfoPixelArraySize();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE);
+ assertEquals(actualArraySize, expectedArraySize);
+ }
+
+ @Test
+ public void getSensorInfoPreCorrectionActiveArraySize() {
+ Rect expectedArraySize = mock(Rect.class);
+ when(mockCharacteristics.get(
+ CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE))
+ .thenReturn(expectedArraySize);
+
+ Rect actualArraySize = cameraProperties.getSensorInfoPreCorrectionActiveArraySize();
+
+ verify(mockCharacteristics, times(1))
+ .get(CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
+ assertEquals(actualArraySize, expectedArraySize);
+ }
+
+ @Test
+ public void getSensorOrientationTest() {
+ int expectedOrientation = 42;
+ when(mockCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION))
+ .thenReturn(expectedOrientation);
+
+ int actualOrientation = cameraProperties.getSensorOrientation();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.SENSOR_ORIENTATION);
+ assertEquals(actualOrientation, expectedOrientation);
+ }
+
+ @Test
+ public void getHardwareLevelTest() {
+ int expectedLevel = 42;
+ when(mockCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL))
+ .thenReturn(expectedLevel);
+
+ int actualLevel = cameraProperties.getHardwareLevel();
+
+ verify(mockCharacteristics, times(1)).get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ assertEquals(actualLevel, expectedLevel);
+ }
+
+ @Test
+ public void getAvailableNoiseReductionModesTest() {
+ int[] expectedReductionModes = new int[] {0, 1, 2};
+ when(mockCharacteristics.get(
+ CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES))
+ .thenReturn(expectedReductionModes);
+
+ int[] actualReductionModes = cameraProperties.getAvailableNoiseReductionModes();
+
+ verify(mockCharacteristics, times(1))
+ .get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES);
+ assertEquals(actualReductionModes, expectedReductionModes);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_convertPointToMeteringRectangleTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_convertPointToMeteringRectangleTest.java
new file mode 100644
index 0000000..2c6d9d9
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_convertPointToMeteringRectangleTest.java
@@ -0,0 +1,197 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.params.MeteringRectangle;
+import android.util.Size;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+public class CameraRegionUtils_convertPointToMeteringRectangleTest {
+ private MockedStatic<CameraRegionUtils.MeteringRectangleFactory> mockedMeteringRectangleFactory;
+ private Size mockCameraBoundaries;
+
+ @Before
+ public void setUp() {
+ this.mockCameraBoundaries = mock(Size.class);
+ when(this.mockCameraBoundaries.getWidth()).thenReturn(100);
+ when(this.mockCameraBoundaries.getHeight()).thenReturn(100);
+ mockedMeteringRectangleFactory = mockStatic(CameraRegionUtils.MeteringRectangleFactory.class);
+
+ mockedMeteringRectangleFactory
+ .when(
+ () ->
+ CameraRegionUtils.MeteringRectangleFactory.create(
+ anyInt(), anyInt(), anyInt(), anyInt(), anyInt()))
+ .thenAnswer(
+ new Answer<MeteringRectangle>() {
+ @Override
+ public MeteringRectangle answer(InvocationOnMock createInvocation) throws Throwable {
+ MeteringRectangle mockMeteringRectangle = mock(MeteringRectangle.class);
+ when(mockMeteringRectangle.getX()).thenReturn(createInvocation.getArgument(0));
+ when(mockMeteringRectangle.getY()).thenReturn(createInvocation.getArgument(1));
+ when(mockMeteringRectangle.getWidth()).thenReturn(createInvocation.getArgument(2));
+ when(mockMeteringRectangle.getHeight()).thenReturn(createInvocation.getArgument(3));
+ when(mockMeteringRectangle.getMeteringWeight())
+ .thenReturn(createInvocation.getArgument(4));
+ when(mockMeteringRectangle.equals(any()))
+ .thenAnswer(
+ new Answer<Boolean>() {
+ @Override
+ public Boolean answer(InvocationOnMock equalsInvocation)
+ throws Throwable {
+ MeteringRectangle otherMockMeteringRectangle =
+ equalsInvocation.getArgument(0);
+ return mockMeteringRectangle.getX() == otherMockMeteringRectangle.getX()
+ && mockMeteringRectangle.getY() == otherMockMeteringRectangle.getY()
+ && mockMeteringRectangle.getWidth()
+ == otherMockMeteringRectangle.getWidth()
+ && mockMeteringRectangle.getHeight()
+ == otherMockMeteringRectangle.getHeight()
+ && mockMeteringRectangle.getMeteringWeight()
+ == otherMockMeteringRectangle.getMeteringWeight();
+ }
+ });
+ return mockMeteringRectangle;
+ }
+ });
+ }
+
+ @After
+ public void tearDown() {
+ mockedMeteringRectangleFactory.close();
+ }
+
+ @Test
+ public void convertPointToMeteringRectangle_shouldReturnValidMeteringRectangleForCenterCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0.5, 0.5, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(45, 45, 10, 10, 1).equals(r));
+ }
+
+ @Test
+ public void convertPointToMeteringRectangle_shouldReturnValidMeteringRectangleForTopLeftCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, 0, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(0, 0, 10, 10, 1).equals(r));
+ }
+
+ @Test
+ public void convertPointToMeteringRectangle_ShouldReturnValidMeteringRectangleForTopRightCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 0, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(89, 0, 10, 10, 1).equals(r));
+ }
+
+ @Test
+ public void
+ convertPointToMeteringRectangle_shouldReturnValidMeteringRectangleForBottomLeftCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, 1, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(0, 89, 10, 10, 1).equals(r));
+ }
+
+ @Test
+ public void
+ convertPointToMeteringRectangle_shouldReturnValidMeteringRectangleForBottomRightCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(89, 89, 10, 10, 1).equals(r));
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowForXUpperBound() {
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1.5, 0, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowForXLowerBound() {
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, -0.5, 0, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowForYUpperBound() {
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, 1.5, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowForYLowerBound() {
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, -0.5, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test()
+ public void
+ convertPointToMeteringRectangle_shouldRotateMeteringRectangleAccordingToUiOrientationForPortraitUp() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(89, 0, 10, 10, 1).equals(r));
+ }
+
+ @Test()
+ public void
+ convertPointToMeteringRectangle_shouldRotateMeteringRectangleAccordingToUiOrientationForPortraitDown() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.PORTRAIT_DOWN);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(0, 89, 10, 10, 1).equals(r));
+ }
+
+ @Test()
+ public void
+ convertPointToMeteringRectangle_shouldRotateMeteringRectangleAccordingToUiOrientationForLandscapeLeft() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(89, 89, 10, 10, 1).equals(r));
+ }
+
+ @Test()
+ public void
+ convertPointToMeteringRectangle_shouldRotateMeteringRectangleAccordingToUiOrientationForLandscapeRight() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(0, 0, 10, 10, 1).equals(r));
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowFor0WidthBoundary() {
+ Size mockCameraBoundaries = mock(Size.class);
+ when(mockCameraBoundaries.getWidth()).thenReturn(0);
+ when(mockCameraBoundaries.getHeight()).thenReturn(50);
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockCameraBoundaries, 0, -0.5, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowFor0HeightBoundary() {
+ Size mockCameraBoundaries = mock(Size.class);
+ when(mockCameraBoundaries.getWidth()).thenReturn(50);
+ when(mockCameraBoundaries.getHeight()).thenReturn(0);
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, -0.5, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_getCameraBoundariesTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_getCameraBoundariesTest.java
new file mode 100644
index 0000000..4c01649
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_getCameraBoundariesTest.java
@@ -0,0 +1,247 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.graphics.Rect;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import android.util.Size;
+import io.flutter.plugins.camera.utils.TestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.stubbing.Answer;
+
+public class CameraRegionUtils_getCameraBoundariesTest {
+
+ Size mockCameraBoundaries;
+
+ @Before
+ public void setUp() {
+ this.mockCameraBoundaries = mock(Size.class);
+ when(this.mockCameraBoundaries.getWidth()).thenReturn(100);
+ when(this.mockCameraBoundaries.getHeight()).thenReturn(100);
+ }
+
+ @Test
+ public void getCameraBoundaries_shouldReturnSensorInfoPixelArraySizeWhenRunningPreAndroidP() {
+ updateSdkVersion(Build.VERSION_CODES.O_MR1);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ when(mockCameraProperties.getSensorInfoPixelArraySize()).thenReturn(mockCameraBoundaries);
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(mockCameraBoundaries, result);
+ verify(mockCameraProperties, never()).getSensorInfoPreCorrectionActiveArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnSensorInfoPixelArraySizeWhenDistortionCorrectionIsNull() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes()).thenReturn(null);
+ when(mockCameraProperties.getSensorInfoPixelArraySize()).thenReturn(mockCameraBoundaries);
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(mockCameraBoundaries, result);
+ verify(mockCameraProperties, never()).getSensorInfoPreCorrectionActiveArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnSensorInfoPixelArraySizeWhenDistortionCorrectionIsOff() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes())
+ .thenReturn(new int[] {CaptureRequest.DISTORTION_CORRECTION_MODE_OFF});
+ when(mockCameraProperties.getSensorInfoPixelArraySize()).thenReturn(mockCameraBoundaries);
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(mockCameraBoundaries, result);
+ verify(mockCameraProperties, never()).getSensorInfoPreCorrectionActiveArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnInfoPreCorrectionActiveArraySizeWhenDistortionCorrectionModeIsSetToNull() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ Rect mockSensorInfoPreCorrectionActiveArraySize = mock(Rect.class);
+ when(mockSensorInfoPreCorrectionActiveArraySize.width()).thenReturn(100);
+ when(mockSensorInfoPreCorrectionActiveArraySize.height()).thenReturn(100);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes())
+ .thenReturn(
+ new int[] {
+ CaptureRequest.DISTORTION_CORRECTION_MODE_OFF,
+ CaptureRequest.DISTORTION_CORRECTION_MODE_FAST
+ });
+ when(mockBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE)).thenReturn(null);
+ when(mockCameraProperties.getSensorInfoPreCorrectionActiveArraySize())
+ .thenReturn(mockSensorInfoPreCorrectionActiveArraySize);
+
+ try (MockedStatic<CameraRegionUtils.SizeFactory> mockedSizeFactory =
+ mockStatic(CameraRegionUtils.SizeFactory.class)) {
+ mockedSizeFactory
+ .when(() -> CameraRegionUtils.SizeFactory.create(anyInt(), anyInt()))
+ .thenAnswer(
+ (Answer<Size>)
+ invocation -> {
+ Size mockSize = mock(Size.class);
+ when(mockSize.getWidth()).thenReturn(invocation.getArgument(0));
+ when(mockSize.getHeight()).thenReturn(invocation.getArgument(1));
+ return mockSize;
+ });
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(100, result.getWidth());
+ assertEquals(100, result.getHeight());
+ verify(mockCameraProperties, never()).getSensorInfoPixelArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ }
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnInfoPreCorrectionActiveArraySizeWhenDistortionCorrectionModeIsSetToOff() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ Rect mockSensorInfoPreCorrectionActiveArraySize = mock(Rect.class);
+ when(mockSensorInfoPreCorrectionActiveArraySize.width()).thenReturn(100);
+ when(mockSensorInfoPreCorrectionActiveArraySize.height()).thenReturn(100);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes())
+ .thenReturn(
+ new int[] {
+ CaptureRequest.DISTORTION_CORRECTION_MODE_OFF,
+ CaptureRequest.DISTORTION_CORRECTION_MODE_FAST
+ });
+
+ when(mockBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE))
+ .thenReturn(CaptureRequest.DISTORTION_CORRECTION_MODE_OFF);
+ when(mockCameraProperties.getSensorInfoPreCorrectionActiveArraySize())
+ .thenReturn(mockSensorInfoPreCorrectionActiveArraySize);
+
+ try (MockedStatic<CameraRegionUtils.SizeFactory> mockedSizeFactory =
+ mockStatic(CameraRegionUtils.SizeFactory.class)) {
+ mockedSizeFactory
+ .when(() -> CameraRegionUtils.SizeFactory.create(anyInt(), anyInt()))
+ .thenAnswer(
+ (Answer<Size>)
+ invocation -> {
+ Size mockSize = mock(Size.class);
+ when(mockSize.getWidth()).thenReturn(invocation.getArgument(0));
+ when(mockSize.getHeight()).thenReturn(invocation.getArgument(1));
+ return mockSize;
+ });
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(100, result.getWidth());
+ assertEquals(100, result.getHeight());
+ verify(mockCameraProperties, never()).getSensorInfoPixelArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ }
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnSensorInfoActiveArraySizeWhenDistortionCorrectionModeIsSet() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ Rect mockSensorInfoActiveArraySize = mock(Rect.class);
+ when(mockSensorInfoActiveArraySize.width()).thenReturn(100);
+ when(mockSensorInfoActiveArraySize.height()).thenReturn(100);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes())
+ .thenReturn(
+ new int[] {
+ CaptureRequest.DISTORTION_CORRECTION_MODE_OFF,
+ CaptureRequest.DISTORTION_CORRECTION_MODE_FAST
+ });
+
+ when(mockBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE))
+ .thenReturn(CaptureRequest.DISTORTION_CORRECTION_MODE_FAST);
+ when(mockCameraProperties.getSensorInfoActiveArraySize())
+ .thenReturn(mockSensorInfoActiveArraySize);
+
+ try (MockedStatic<CameraRegionUtils.SizeFactory> mockedSizeFactory =
+ mockStatic(CameraRegionUtils.SizeFactory.class)) {
+ mockedSizeFactory
+ .when(() -> CameraRegionUtils.SizeFactory.create(anyInt(), anyInt()))
+ .thenAnswer(
+ (Answer<Size>)
+ invocation -> {
+ Size mockSize = mock(Size.class);
+ when(mockSize.getWidth()).thenReturn(invocation.getArgument(0));
+ when(mockSize.getHeight()).thenReturn(invocation.getArgument(1));
+ return mockSize;
+ });
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(100, result.getWidth());
+ assertEquals(100, result.getHeight());
+ verify(mockCameraProperties, never()).getSensorInfoPixelArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoPreCorrectionActiveArraySize();
+ }
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ private static void updateSdkVersion(int version) {
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", version);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
new file mode 100644
index 0000000..9a67901
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
@@ -0,0 +1,1014 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.SessionConfiguration;
+import android.media.ImageReader;
+import android.media.MediaRecorder;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Size;
+import android.view.Surface;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.lifecycle.LifecycleObserver;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugin.common.MethodChannel;
+import io.flutter.plugins.camera.features.CameraFeatureFactory;
+import io.flutter.plugins.camera.features.CameraFeatures;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.autofocus.FocusMode;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureMode;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.flash.FlashMode;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.DeviceOrientationManager;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+import io.flutter.plugins.camera.utils.TestUtils;
+import io.flutter.view.TextureRegistry;
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+
+class FakeCameraDeviceWrapper implements CameraDeviceWrapper {
+ final List<CaptureRequest.Builder> captureRequests;
+
+ FakeCameraDeviceWrapper(List<CaptureRequest.Builder> captureRequests) {
+ this.captureRequests = captureRequests;
+ }
+
+ @NonNull
+ @Override
+ public CaptureRequest.Builder createCaptureRequest(int var1) {
+ return captureRequests.remove(0);
+ }
+
+ @Override
+ public void createCaptureSession(SessionConfiguration config) {}
+
+ @Override
+ public void createCaptureSession(
+ @NonNull List<Surface> outputs,
+ @NonNull CameraCaptureSession.StateCallback callback,
+ @Nullable Handler handler) {}
+
+ @Override
+ public void close() {}
+}
+
+public class CameraTest {
+ private CameraProperties mockCameraProperties;
+ private CameraFeatureFactory mockCameraFeatureFactory;
+ private DartMessenger mockDartMessenger;
+ private Camera camera;
+ private CameraCaptureSession mockCaptureSession;
+ private CaptureRequest.Builder mockPreviewRequestBuilder;
+ private MockedStatic<Camera.HandlerThreadFactory> mockHandlerThreadFactory;
+ private HandlerThread mockHandlerThread;
+ private MockedStatic<Camera.HandlerFactory> mockHandlerFactory;
+ private Handler mockHandler;
+
+ @Before
+ public void before() {
+ mockCameraProperties = mock(CameraProperties.class);
+ mockCameraFeatureFactory = new TestCameraFeatureFactory();
+ mockDartMessenger = mock(DartMessenger.class);
+ mockCaptureSession = mock(CameraCaptureSession.class);
+ mockPreviewRequestBuilder = mock(CaptureRequest.Builder.class);
+ mockHandlerThreadFactory = mockStatic(Camera.HandlerThreadFactory.class);
+ mockHandlerThread = mock(HandlerThread.class);
+ mockHandlerFactory = mockStatic(Camera.HandlerFactory.class);
+ mockHandler = mock(Handler.class);
+
+ final Activity mockActivity = mock(Activity.class);
+ final TextureRegistry.SurfaceTextureEntry mockFlutterTexture =
+ mock(TextureRegistry.SurfaceTextureEntry.class);
+ final String cameraName = "1";
+ final ResolutionPreset resolutionPreset = ResolutionPreset.high;
+ final boolean enableAudio = false;
+
+ when(mockCameraProperties.getCameraName()).thenReturn(cameraName);
+ mockHandlerFactory.when(() -> Camera.HandlerFactory.create(any())).thenReturn(mockHandler);
+ mockHandlerThreadFactory
+ .when(() -> Camera.HandlerThreadFactory.create(any()))
+ .thenReturn(mockHandlerThread);
+
+ camera =
+ new Camera(
+ mockActivity,
+ mockFlutterTexture,
+ mockCameraFeatureFactory,
+ mockDartMessenger,
+ mockCameraProperties,
+ resolutionPreset,
+ enableAudio);
+
+ TestUtils.setPrivateField(camera, "captureSession", mockCaptureSession);
+ TestUtils.setPrivateField(camera, "previewRequestBuilder", mockPreviewRequestBuilder);
+ }
+
+ @After
+ public void after() {
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 0);
+ mockHandlerThreadFactory.close();
+ mockHandlerFactory.close();
+ }
+
+ @Test
+ public void shouldNotImplementLifecycleObserverInterface() {
+ Class<Camera> cameraClass = Camera.class;
+
+ assertFalse(LifecycleObserver.class.isAssignableFrom(cameraClass));
+ }
+
+ @Test
+ public void shouldCreateCameraPluginAndSetAllFeatures() {
+ final Activity mockActivity = mock(Activity.class);
+ final TextureRegistry.SurfaceTextureEntry mockFlutterTexture =
+ mock(TextureRegistry.SurfaceTextureEntry.class);
+ final CameraFeatureFactory mockCameraFeatureFactory = mock(CameraFeatureFactory.class);
+ final String cameraName = "1";
+ final ResolutionPreset resolutionPreset = ResolutionPreset.high;
+ final boolean enableAudio = false;
+
+ when(mockCameraProperties.getCameraName()).thenReturn(cameraName);
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ when(mockCameraFeatureFactory.createSensorOrientationFeature(any(), any(), any()))
+ .thenReturn(mockSensorOrientationFeature);
+
+ Camera camera =
+ new Camera(
+ mockActivity,
+ mockFlutterTexture,
+ mockCameraFeatureFactory,
+ mockDartMessenger,
+ mockCameraProperties,
+ resolutionPreset,
+ enableAudio);
+
+ verify(mockCameraFeatureFactory, times(1))
+ .createSensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+ verify(mockCameraFeatureFactory, times(1)).createAutoFocusFeature(mockCameraProperties, false);
+ verify(mockCameraFeatureFactory, times(1)).createExposureLockFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1))
+ .createExposurePointFeature(eq(mockCameraProperties), eq(mockSensorOrientationFeature));
+ verify(mockCameraFeatureFactory, times(1)).createExposureOffsetFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1)).createFlashFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1))
+ .createFocusPointFeature(eq(mockCameraProperties), eq(mockSensorOrientationFeature));
+ verify(mockCameraFeatureFactory, times(1)).createFpsRangeFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1)).createNoiseReductionFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1))
+ .createResolutionFeature(mockCameraProperties, resolutionPreset, cameraName);
+ verify(mockCameraFeatureFactory, times(1)).createZoomLevelFeature(mockCameraProperties);
+ assertNotNull("should create a camera", camera);
+ }
+
+ @Test
+ public void getDeviceOrientationManager() {
+ SensorOrientationFeature mockSensorOrientationFeature =
+ mockCameraFeatureFactory.createSensorOrientationFeature(mockCameraProperties, null, null);
+ DeviceOrientationManager mockDeviceOrientationManager = mock(DeviceOrientationManager.class);
+
+ when(mockSensorOrientationFeature.getDeviceOrientationManager())
+ .thenReturn(mockDeviceOrientationManager);
+
+ DeviceOrientationManager actualDeviceOrientationManager = camera.getDeviceOrientationManager();
+
+ verify(mockSensorOrientationFeature, times(1)).getDeviceOrientationManager();
+ assertEquals(mockDeviceOrientationManager, actualDeviceOrientationManager);
+ }
+
+ @Test
+ public void getExposureOffsetStepSize() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ double stepSize = 2.3;
+
+ when(mockExposureOffsetFeature.getExposureOffsetStepSize()).thenReturn(stepSize);
+
+ double actualSize = camera.getExposureOffsetStepSize();
+
+ verify(mockExposureOffsetFeature, times(1)).getExposureOffsetStepSize();
+ assertEquals(stepSize, actualSize, 0);
+ }
+
+ @Test
+ public void getMaxExposureOffset() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ double expectedMaxOffset = 42.0;
+
+ when(mockExposureOffsetFeature.getMaxExposureOffset()).thenReturn(expectedMaxOffset);
+
+ double actualMaxOffset = camera.getMaxExposureOffset();
+
+ verify(mockExposureOffsetFeature, times(1)).getMaxExposureOffset();
+ assertEquals(expectedMaxOffset, actualMaxOffset, 0);
+ }
+
+ @Test
+ public void getMinExposureOffset() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ double expectedMinOffset = 21.5;
+
+ when(mockExposureOffsetFeature.getMinExposureOffset()).thenReturn(21.5);
+
+ double actualMinOffset = camera.getMinExposureOffset();
+
+ verify(mockExposureOffsetFeature, times(1)).getMinExposureOffset();
+ assertEquals(expectedMinOffset, actualMinOffset, 0);
+ }
+
+ @Test
+ public void getMaxZoomLevel() {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ float expectedMaxZoomLevel = 4.2f;
+
+ when(mockZoomLevelFeature.getMaximumZoomLevel()).thenReturn(expectedMaxZoomLevel);
+
+ float actualMaxZoomLevel = camera.getMaxZoomLevel();
+
+ verify(mockZoomLevelFeature, times(1)).getMaximumZoomLevel();
+ assertEquals(expectedMaxZoomLevel, actualMaxZoomLevel, 0);
+ }
+
+ @Test
+ public void getMinZoomLevel() {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ float expectedMinZoomLevel = 4.2f;
+
+ when(mockZoomLevelFeature.getMinimumZoomLevel()).thenReturn(expectedMinZoomLevel);
+
+ float actualMinZoomLevel = camera.getMinZoomLevel();
+
+ verify(mockZoomLevelFeature, times(1)).getMinimumZoomLevel();
+ assertEquals(expectedMinZoomLevel, actualMinZoomLevel, 0);
+ }
+
+ @Test
+ public void setExposureMode_shouldUpdateExposureLockFeature() {
+ ExposureLockFeature mockExposureLockFeature =
+ mockCameraFeatureFactory.createExposureLockFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ ExposureMode exposureMode = ExposureMode.locked;
+
+ camera.setExposureMode(mockResult, exposureMode);
+
+ verify(mockExposureLockFeature, times(1)).setValue(exposureMode);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setExposureMode_shouldUpdateBuilder() {
+ ExposureLockFeature mockExposureLockFeature =
+ mockCameraFeatureFactory.createExposureLockFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ ExposureMode exposureMode = ExposureMode.locked;
+
+ camera.setExposureMode(mockResult, exposureMode);
+
+ verify(mockExposureLockFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setExposureMode_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ ExposureMode exposureMode = ExposureMode.locked;
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setExposureMode(mockResult, exposureMode);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1))
+ .error("setExposureModeFailed", "Could not set exposure mode.", null);
+ }
+
+ @Test
+ public void setExposurePoint_shouldUpdateExposurePointFeature() {
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ ExposurePointFeature mockExposurePointFeature =
+ mockCameraFeatureFactory.createExposurePointFeature(
+ mockCameraProperties, mockSensorOrientationFeature);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+
+ camera.setExposurePoint(mockResult, point);
+
+ verify(mockExposurePointFeature, times(1)).setValue(point);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setExposurePoint_shouldUpdateBuilder() {
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ ExposurePointFeature mockExposurePointFeature =
+ mockCameraFeatureFactory.createExposurePointFeature(
+ mockCameraProperties, mockSensorOrientationFeature);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+
+ camera.setExposurePoint(mockResult, point);
+
+ verify(mockExposurePointFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setExposurePoint_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setExposurePoint(mockResult, point);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1))
+ .error("setExposurePointFailed", "Could not set exposure point.", null);
+ }
+
+ @Test
+ public void setFlashMode_shouldUpdateFlashFeature() {
+ FlashFeature mockFlashFeature =
+ mockCameraFeatureFactory.createFlashFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ FlashMode flashMode = FlashMode.always;
+
+ camera.setFlashMode(mockResult, flashMode);
+
+ verify(mockFlashFeature, times(1)).setValue(flashMode);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setFlashMode_shouldUpdateBuilder() {
+ FlashFeature mockFlashFeature =
+ mockCameraFeatureFactory.createFlashFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ FlashMode flashMode = FlashMode.always;
+
+ camera.setFlashMode(mockResult, flashMode);
+
+ verify(mockFlashFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setFlashMode_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ FlashMode flashMode = FlashMode.always;
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setFlashMode(mockResult, flashMode);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1)).error("setFlashModeFailed", "Could not set flash mode.", null);
+ }
+
+ @Test
+ public void setFocusPoint_shouldUpdateFocusPointFeature() {
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ FocusPointFeature mockFocusPointFeature =
+ mockCameraFeatureFactory.createFocusPointFeature(
+ mockCameraProperties, mockSensorOrientationFeature);
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+ when(mockAutoFocusFeature.getValue()).thenReturn(FocusMode.auto);
+
+ camera.setFocusPoint(mockResult, point);
+
+ verify(mockFocusPointFeature, times(1)).setValue(point);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setFocusPoint_shouldUpdateBuilder() {
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ FocusPointFeature mockFocusPointFeature =
+ mockCameraFeatureFactory.createFocusPointFeature(
+ mockCameraProperties, mockSensorOrientationFeature);
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+ when(mockAutoFocusFeature.getValue()).thenReturn(FocusMode.auto);
+
+ camera.setFocusPoint(mockResult, point);
+
+ verify(mockFocusPointFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setFocusPoint_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+ when(mockAutoFocusFeature.getValue()).thenReturn(FocusMode.auto);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setFocusPoint(mockResult, point);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1)).error("setFocusPointFailed", "Could not set focus point.", null);
+ }
+
+ @Test
+ public void setZoomLevel_shouldUpdateZoomLevelFeature() throws CameraAccessException {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ float zoomLevel = 1.0f;
+
+ when(mockZoomLevelFeature.getValue()).thenReturn(zoomLevel);
+ when(mockZoomLevelFeature.getMinimumZoomLevel()).thenReturn(0f);
+ when(mockZoomLevelFeature.getMaximumZoomLevel()).thenReturn(2f);
+
+ camera.setZoomLevel(mockResult, zoomLevel);
+
+ verify(mockZoomLevelFeature, times(1)).setValue(zoomLevel);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setZoomLevel_shouldUpdateBuilder() throws CameraAccessException {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ float zoomLevel = 1.0f;
+
+ when(mockZoomLevelFeature.getValue()).thenReturn(zoomLevel);
+ when(mockZoomLevelFeature.getMinimumZoomLevel()).thenReturn(0f);
+ when(mockZoomLevelFeature.getMaximumZoomLevel()).thenReturn(2f);
+
+ camera.setZoomLevel(mockResult, zoomLevel);
+
+ verify(mockZoomLevelFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setZoomLevel_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ float zoomLevel = 1.0f;
+
+ when(mockZoomLevelFeature.getValue()).thenReturn(zoomLevel);
+ when(mockZoomLevelFeature.getMinimumZoomLevel()).thenReturn(0f);
+ when(mockZoomLevelFeature.getMaximumZoomLevel()).thenReturn(2f);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setZoomLevel(mockResult, zoomLevel);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1)).error("setZoomLevelFailed", "Could not set zoom level.", null);
+ }
+
+ @Test
+ public void pauseVideoRecording_shouldSendNullResultWhenNotRecording() {
+ TestUtils.setPrivateField(camera, "recordingVideo", false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.pauseVideoRecording(mockResult);
+
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void pauseVideoRecording_shouldCallPauseWhenRecordingAndOnAPIN() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 24);
+
+ camera.pauseVideoRecording(mockResult);
+
+ verify(mockMediaRecorder, times(1)).pause();
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void pauseVideoRecording_shouldSendVideoRecordingFailedErrorWhenVersionCodeSmallerThenN() {
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 23);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.pauseVideoRecording(mockResult);
+
+ verify(mockResult, times(1))
+ .error("videoRecordingFailed", "pauseVideoRecording requires Android API +24.", null);
+ verify(mockResult, never()).success(any());
+ }
+
+ @Test
+ public void
+ pauseVideoRecording_shouldSendVideoRecordingFailedErrorWhenMediaRecorderPauseThrowsIllegalStateException() {
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 24);
+
+ IllegalStateException expectedException = new IllegalStateException("Test error message");
+
+ doThrow(expectedException).when(mockMediaRecorder).pause();
+
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.pauseVideoRecording(mockResult);
+
+ verify(mockResult, times(1)).error("videoRecordingFailed", "Test error message", null);
+ verify(mockResult, never()).success(any());
+ }
+
+ @Test
+ public void resumeVideoRecording_shouldSendNullResultWhenNotRecording() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ TestUtils.setPrivateField(camera, "recordingVideo", false);
+
+ camera.resumeVideoRecording(mockResult);
+
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void resumeVideoRecording_shouldCallPauseWhenRecordingAndOnAPIN() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 24);
+
+ camera.resumeVideoRecording(mockResult);
+
+ verify(mockMediaRecorder, times(1)).resume();
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void
+ resumeVideoRecording_shouldSendVideoRecordingFailedErrorWhenVersionCodeSmallerThanN() {
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 23);
+
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.resumeVideoRecording(mockResult);
+
+ verify(mockResult, times(1))
+ .error("videoRecordingFailed", "resumeVideoRecording requires Android API +24.", null);
+ verify(mockResult, never()).success(any());
+ }
+
+ @Test
+ public void
+ resumeVideoRecording_shouldSendVideoRecordingFailedErrorWhenMediaRecorderPauseThrowsIllegalStateException() {
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 24);
+
+ IllegalStateException expectedException = new IllegalStateException("Test error message");
+
+ doThrow(expectedException).when(mockMediaRecorder).resume();
+
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.resumeVideoRecording(mockResult);
+
+ verify(mockResult, times(1)).error("videoRecordingFailed", "Test error message", null);
+ verify(mockResult, never()).success(any());
+ }
+
+ @Test
+ public void setFocusMode_shouldUpdateAutoFocusFeature() {
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.setFocusMode(mockResult, FocusMode.auto);
+
+ verify(mockAutoFocusFeature, times(1)).setValue(FocusMode.auto);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setFocusMode_shouldUpdateBuilder() {
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.setFocusMode(mockResult, FocusMode.auto);
+
+ verify(mockAutoFocusFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setFocusMode_shouldUnlockAutoFocusForAutoMode() {
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.auto);
+ verify(mockPreviewRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
+ verify(mockPreviewRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+ }
+
+ @Test
+ public void setFocusMode_shouldSkipUnlockAutoFocusWhenNullCaptureSession() {
+ TestUtils.setPrivateField(camera, "captureSession", null);
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.auto);
+ verify(mockPreviewRequestBuilder, never())
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
+ verify(mockPreviewRequestBuilder, never())
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+ }
+
+ @Test
+ public void setFocusMode_shouldSendErrorEventOnUnlockAutoFocusCameraAccessException()
+ throws CameraAccessException {
+ when(mockCaptureSession.capture(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.auto);
+ verify(mockDartMessenger, times(1)).sendCameraErrorEvent(any());
+ }
+
+ @Test
+ public void setFocusMode_shouldLockAutoFocusForLockedMode() throws CameraAccessException {
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.locked);
+ verify(mockPreviewRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
+ verify(mockCaptureSession, times(1)).capture(any(), any(), any());
+ verify(mockCaptureSession, times(1)).setRepeatingRequest(any(), any(), any());
+ }
+
+ @Test
+ public void setFocusMode_shouldSkipLockAutoFocusWhenNullCaptureSession() {
+ TestUtils.setPrivateField(camera, "captureSession", null);
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.locked);
+ verify(mockPreviewRequestBuilder, never())
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
+ }
+
+ @Test
+ public void setFocusMode_shouldSendErrorEventOnLockAutoFocusCameraAccessException()
+ throws CameraAccessException {
+ when(mockCaptureSession.capture(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.locked);
+ verify(mockDartMessenger, times(1)).sendCameraErrorEvent(any());
+ }
+
+ @Test
+ public void setFocusMode_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setFocusMode(mockResult, FocusMode.locked);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1))
+ .error("setFocusModeFailed", "Error setting focus mode: null", null);
+ }
+
+ @Test
+ public void setExposureOffset_shouldUpdateExposureOffsetFeature() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ when(mockExposureOffsetFeature.getValue()).thenReturn(1.0);
+
+ camera.setExposureOffset(mockResult, 1.0);
+
+ verify(mockExposureOffsetFeature, times(1)).setValue(1.0);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(1.0);
+ }
+
+ @Test
+ public void setExposureOffset_shouldAndUpdateBuilder() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.setExposureOffset(mockResult, 1.0);
+
+ verify(mockExposureOffsetFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setExposureOffset_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setExposureOffset(mockResult, 1.0);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1))
+ .error("setExposureOffsetFailed", "Could not set exposure offset.", null);
+ }
+
+ @Test
+ public void lockCaptureOrientation_shouldLockCaptureOrientation() {
+ final Activity mockActivity = mock(Activity.class);
+ SensorOrientationFeature mockSensorOrientationFeature =
+ mockCameraFeatureFactory.createSensorOrientationFeature(
+ mockCameraProperties, mockActivity, mockDartMessenger);
+
+ camera.lockCaptureOrientation(PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+
+ verify(mockSensorOrientationFeature, times(1))
+ .lockCaptureOrientation(PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test
+ public void unlockCaptureOrientation_shouldUnlockCaptureOrientation() {
+ final Activity mockActivity = mock(Activity.class);
+ SensorOrientationFeature mockSensorOrientationFeature =
+ mockCameraFeatureFactory.createSensorOrientationFeature(
+ mockCameraProperties, mockActivity, mockDartMessenger);
+
+ camera.unlockCaptureOrientation();
+
+ verify(mockSensorOrientationFeature, times(1)).unlockCaptureOrientation();
+ }
+
+ @Test
+ public void pausePreview_shouldPausePreview() throws CameraAccessException {
+ camera.pausePreview();
+
+ assertEquals(TestUtils.getPrivateField(camera, "pausedPreview"), true);
+ verify(mockCaptureSession, times(1)).stopRepeating();
+ }
+
+ @Test
+ public void resumePreview_shouldResumePreview() throws CameraAccessException {
+ camera.resumePreview();
+
+ assertEquals(TestUtils.getPrivateField(camera, "pausedPreview"), false);
+ verify(mockCaptureSession, times(1)).setRepeatingRequest(any(), any(), any());
+ }
+
+ @Test
+ public void resumePreview_shouldSendErrorEventOnCameraAccessException()
+ throws CameraAccessException {
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0));
+
+ camera.resumePreview();
+
+ verify(mockDartMessenger, times(1)).sendCameraErrorEvent(any());
+ }
+
+ @Test
+ public void startBackgroundThread_shouldStartNewThread() {
+ camera.startBackgroundThread();
+
+ verify(mockHandlerThread, times(1)).start();
+ assertEquals(mockHandler, TestUtils.getPrivateField(camera, "backgroundHandler"));
+ }
+
+ @Test
+ public void startBackgroundThread_shouldNotStartNewThreadWhenAlreadyCreated() {
+ camera.startBackgroundThread();
+ camera.startBackgroundThread();
+
+ verify(mockHandlerThread, times(1)).start();
+ }
+
+ @Test
+ public void stopBackgroundThread_quitsSafely() throws InterruptedException {
+ camera.startBackgroundThread();
+ camera.stopBackgroundThread();
+
+ verify(mockHandlerThread).quitSafely();
+ verify(mockHandlerThread, never()).join();
+ }
+
+ @Test
+ public void onConverge_shouldTakePictureWithoutAbortingSession() throws CameraAccessException {
+ ArrayList<CaptureRequest.Builder> mockRequestBuilders = new ArrayList<>();
+ mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
+ CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
+ // Stub out other features used by the flow.
+ TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
+ TestUtils.setPrivateField(camera, "pictureImageReader", mock(ImageReader.class));
+ SensorOrientationFeature mockSensorOrientationFeature =
+ mockCameraFeatureFactory.createSensorOrientationFeature(mockCameraProperties, null, null);
+ DeviceOrientationManager mockDeviceOrientationManager = mock(DeviceOrientationManager.class);
+ when(mockSensorOrientationFeature.getDeviceOrientationManager())
+ .thenReturn(mockDeviceOrientationManager);
+
+ // Simulate a post-precapture flow.
+ camera.onConverged();
+ // A picture should be taken.
+ verify(mockCaptureSession, times(1)).capture(any(), any(), any());
+ // The session shuold not be aborted as part of this flow, as this breaks capture on some
+ // devices, and causes delays on others.
+ verify(mockCaptureSession, never()).abortCaptures();
+ }
+
+ @Test
+ public void createCaptureSession_doesNotCloseCaptureSession() throws CameraAccessException {
+ Surface mockSurface = mock(Surface.class);
+ SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
+ ResolutionFeature mockResolutionFeature = mock(ResolutionFeature.class);
+ Size mockSize = mock(Size.class);
+ ArrayList<CaptureRequest.Builder> mockRequestBuilders = new ArrayList<>();
+ mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
+ CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
+ TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
+
+ TextureRegistry.SurfaceTextureEntry cameraFlutterTexture =
+ (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture");
+ CameraFeatures cameraFeatures =
+ (CameraFeatures) TestUtils.getPrivateField(camera, "cameraFeatures");
+ ResolutionFeature resolutionFeature =
+ (ResolutionFeature)
+ TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature");
+
+ when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture);
+ when(resolutionFeature.getPreviewSize()).thenReturn(mockSize);
+
+ camera.createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, mockSurface);
+
+ verify(mockCaptureSession, never()).close();
+ }
+
+ @Test
+ public void close_doesCloseCaptureSessionWhenCameraDeviceNull() {
+ camera.close();
+
+ verify(mockCaptureSession).close();
+ }
+
+ @Test
+ public void close_doesNotCloseCaptureSessionWhenCameraDeviceNonNull() {
+ ArrayList<CaptureRequest.Builder> mockRequestBuilders = new ArrayList<>();
+ mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
+ CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
+ TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
+
+ camera.close();
+
+ verify(mockCaptureSession, never()).close();
+ }
+
+ private static class TestCameraFeatureFactory implements CameraFeatureFactory {
+ private final AutoFocusFeature mockAutoFocusFeature;
+ private final ExposureLockFeature mockExposureLockFeature;
+ private final ExposureOffsetFeature mockExposureOffsetFeature;
+ private final ExposurePointFeature mockExposurePointFeature;
+ private final FlashFeature mockFlashFeature;
+ private final FocusPointFeature mockFocusPointFeature;
+ private final FpsRangeFeature mockFpsRangeFeature;
+ private final NoiseReductionFeature mockNoiseReductionFeature;
+ private final ResolutionFeature mockResolutionFeature;
+ private final SensorOrientationFeature mockSensorOrientationFeature;
+ private final ZoomLevelFeature mockZoomLevelFeature;
+
+ public TestCameraFeatureFactory() {
+ this.mockAutoFocusFeature = mock(AutoFocusFeature.class);
+ this.mockExposureLockFeature = mock(ExposureLockFeature.class);
+ this.mockExposureOffsetFeature = mock(ExposureOffsetFeature.class);
+ this.mockExposurePointFeature = mock(ExposurePointFeature.class);
+ this.mockFlashFeature = mock(FlashFeature.class);
+ this.mockFocusPointFeature = mock(FocusPointFeature.class);
+ this.mockFpsRangeFeature = mock(FpsRangeFeature.class);
+ this.mockNoiseReductionFeature = mock(NoiseReductionFeature.class);
+ this.mockResolutionFeature = mock(ResolutionFeature.class);
+ this.mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ this.mockZoomLevelFeature = mock(ZoomLevelFeature.class);
+ }
+
+ @Override
+ public AutoFocusFeature createAutoFocusFeature(
+ @NonNull CameraProperties cameraProperties, boolean recordingVideo) {
+ return mockAutoFocusFeature;
+ }
+
+ @Override
+ public ExposureLockFeature createExposureLockFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockExposureLockFeature;
+ }
+
+ @Override
+ public ExposureOffsetFeature createExposureOffsetFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockExposureOffsetFeature;
+ }
+
+ @Override
+ public FlashFeature createFlashFeature(@NonNull CameraProperties cameraProperties) {
+ return mockFlashFeature;
+ }
+
+ @Override
+ public ResolutionFeature createResolutionFeature(
+ @NonNull CameraProperties cameraProperties,
+ ResolutionPreset initialSetting,
+ String cameraName) {
+ return mockResolutionFeature;
+ }
+
+ @Override
+ public FocusPointFeature createFocusPointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrienttionFeature) {
+ return mockFocusPointFeature;
+ }
+
+ @Override
+ public FpsRangeFeature createFpsRangeFeature(@NonNull CameraProperties cameraProperties) {
+ return mockFpsRangeFeature;
+ }
+
+ @Override
+ public SensorOrientationFeature createSensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger) {
+ return mockSensorOrientationFeature;
+ }
+
+ @Override
+ public ZoomLevelFeature createZoomLevelFeature(@NonNull CameraProperties cameraProperties) {
+ return mockZoomLevelFeature;
+ }
+
+ @Override
+ public ExposurePointFeature createExposurePointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature) {
+ return mockExposurePointFeature;
+ }
+
+ @Override
+ public NoiseReductionFeature createNoiseReductionFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockNoiseReductionFeature;
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest_getRecordingProfileTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest_getRecordingProfileTest.java
new file mode 100644
index 0000000..04bab14
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest_getRecordingProfileTest.java
@@ -0,0 +1,205 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CaptureRequest;
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import android.os.Handler;
+import android.os.HandlerThread;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.features.CameraFeatureFactory;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+import io.flutter.view.TextureRegistry;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.MockedStatic;
+import org.robolectric.RobolectricTestRunner;
+import org.robolectric.annotation.Config;
+
+@RunWith(RobolectricTestRunner.class)
+public class CameraTest_getRecordingProfileTest {
+
+ private CameraProperties mockCameraProperties;
+ private CameraFeatureFactory mockCameraFeatureFactory;
+ private DartMessenger mockDartMessenger;
+ private Camera camera;
+ private CameraCaptureSession mockCaptureSession;
+ private CaptureRequest.Builder mockPreviewRequestBuilder;
+ private MockedStatic<Camera.HandlerThreadFactory> mockHandlerThreadFactory;
+ private HandlerThread mockHandlerThread;
+ private MockedStatic<Camera.HandlerFactory> mockHandlerFactory;
+ private Handler mockHandler;
+
+ @Before
+ public void before() {
+ mockCameraProperties = mock(CameraProperties.class);
+ mockCameraFeatureFactory = new TestCameraFeatureFactory();
+ mockDartMessenger = mock(DartMessenger.class);
+
+ final Activity mockActivity = mock(Activity.class);
+ final TextureRegistry.SurfaceTextureEntry mockFlutterTexture =
+ mock(TextureRegistry.SurfaceTextureEntry.class);
+ final ResolutionPreset resolutionPreset = ResolutionPreset.high;
+ final boolean enableAudio = false;
+
+ camera =
+ new Camera(
+ mockActivity,
+ mockFlutterTexture,
+ mockCameraFeatureFactory,
+ mockDartMessenger,
+ mockCameraProperties,
+ resolutionPreset,
+ enableAudio);
+ }
+
+ @Config(maxSdk = 30)
+ @Test
+ public void getRecordingProfileLegacy() {
+ ResolutionFeature mockResolutionFeature =
+ mockCameraFeatureFactory.createResolutionFeature(mockCameraProperties, null, null);
+ CamcorderProfile mockCamcorderProfile = mock(CamcorderProfile.class);
+
+ when(mockResolutionFeature.getRecordingProfileLegacy()).thenReturn(mockCamcorderProfile);
+
+ CamcorderProfile actualRecordingProfile = camera.getRecordingProfileLegacy();
+
+ verify(mockResolutionFeature, times(1)).getRecordingProfileLegacy();
+ assertEquals(mockCamcorderProfile, actualRecordingProfile);
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void getRecordingProfile() {
+ ResolutionFeature mockResolutionFeature =
+ mockCameraFeatureFactory.createResolutionFeature(mockCameraProperties, null, null);
+ EncoderProfiles mockRecordingProfile = mock(EncoderProfiles.class);
+
+ when(mockResolutionFeature.getRecordingProfile()).thenReturn(mockRecordingProfile);
+
+ EncoderProfiles actualRecordingProfile = camera.getRecordingProfile();
+
+ verify(mockResolutionFeature, times(1)).getRecordingProfile();
+ assertEquals(mockRecordingProfile, actualRecordingProfile);
+ }
+
+ private static class TestCameraFeatureFactory implements CameraFeatureFactory {
+ private final AutoFocusFeature mockAutoFocusFeature;
+ private final ExposureLockFeature mockExposureLockFeature;
+ private final ExposureOffsetFeature mockExposureOffsetFeature;
+ private final ExposurePointFeature mockExposurePointFeature;
+ private final FlashFeature mockFlashFeature;
+ private final FocusPointFeature mockFocusPointFeature;
+ private final FpsRangeFeature mockFpsRangeFeature;
+ private final NoiseReductionFeature mockNoiseReductionFeature;
+ private final ResolutionFeature mockResolutionFeature;
+ private final SensorOrientationFeature mockSensorOrientationFeature;
+ private final ZoomLevelFeature mockZoomLevelFeature;
+
+ public TestCameraFeatureFactory() {
+ this.mockAutoFocusFeature = mock(AutoFocusFeature.class);
+ this.mockExposureLockFeature = mock(ExposureLockFeature.class);
+ this.mockExposureOffsetFeature = mock(ExposureOffsetFeature.class);
+ this.mockExposurePointFeature = mock(ExposurePointFeature.class);
+ this.mockFlashFeature = mock(FlashFeature.class);
+ this.mockFocusPointFeature = mock(FocusPointFeature.class);
+ this.mockFpsRangeFeature = mock(FpsRangeFeature.class);
+ this.mockNoiseReductionFeature = mock(NoiseReductionFeature.class);
+ this.mockResolutionFeature = mock(ResolutionFeature.class);
+ this.mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ this.mockZoomLevelFeature = mock(ZoomLevelFeature.class);
+ }
+
+ @Override
+ public AutoFocusFeature createAutoFocusFeature(
+ @NonNull CameraProperties cameraProperties, boolean recordingVideo) {
+ return mockAutoFocusFeature;
+ }
+
+ @Override
+ public ExposureLockFeature createExposureLockFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockExposureLockFeature;
+ }
+
+ @Override
+ public ExposureOffsetFeature createExposureOffsetFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockExposureOffsetFeature;
+ }
+
+ @Override
+ public FlashFeature createFlashFeature(@NonNull CameraProperties cameraProperties) {
+ return mockFlashFeature;
+ }
+
+ @Override
+ public ResolutionFeature createResolutionFeature(
+ @NonNull CameraProperties cameraProperties,
+ ResolutionPreset initialSetting,
+ String cameraName) {
+ return mockResolutionFeature;
+ }
+
+ @Override
+ public FocusPointFeature createFocusPointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrienttionFeature) {
+ return mockFocusPointFeature;
+ }
+
+ @Override
+ public FpsRangeFeature createFpsRangeFeature(@NonNull CameraProperties cameraProperties) {
+ return mockFpsRangeFeature;
+ }
+
+ @Override
+ public SensorOrientationFeature createSensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger) {
+ return mockSensorOrientationFeature;
+ }
+
+ @Override
+ public ZoomLevelFeature createZoomLevelFeature(@NonNull CameraProperties cameraProperties) {
+ return mockZoomLevelFeature;
+ }
+
+ @Override
+ public ExposurePointFeature createExposurePointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature) {
+ return mockExposurePointFeature;
+ }
+
+ @Override
+ public NoiseReductionFeature createNoiseReductionFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockNoiseReductionFeature;
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraUtilsTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraUtilsTest.java
new file mode 100644
index 0000000..e59b05b
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraUtilsTest.java
@@ -0,0 +1,100 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import java.util.List;
+import java.util.Map;
+import org.junit.Test;
+
+public class CameraUtilsTest {
+
+ @Test
+ public void serializeDeviceOrientation_serializesCorrectly() {
+ assertEquals(
+ "portraitUp",
+ CameraUtils.serializeDeviceOrientation(PlatformChannel.DeviceOrientation.PORTRAIT_UP));
+ assertEquals(
+ "portraitDown",
+ CameraUtils.serializeDeviceOrientation(PlatformChannel.DeviceOrientation.PORTRAIT_DOWN));
+ assertEquals(
+ "landscapeLeft",
+ CameraUtils.serializeDeviceOrientation(PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT));
+ assertEquals(
+ "landscapeRight",
+ CameraUtils.serializeDeviceOrientation(PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT));
+ }
+
+ @Test(expected = UnsupportedOperationException.class)
+ public void serializeDeviceOrientation_throws_for_null() {
+ CameraUtils.serializeDeviceOrientation(null);
+ }
+
+ @Test
+ public void deserializeDeviceOrientation_deserializesCorrectly() {
+ assertEquals(
+ PlatformChannel.DeviceOrientation.PORTRAIT_UP,
+ CameraUtils.deserializeDeviceOrientation("portraitUp"));
+ assertEquals(
+ PlatformChannel.DeviceOrientation.PORTRAIT_DOWN,
+ CameraUtils.deserializeDeviceOrientation("portraitDown"));
+ assertEquals(
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT,
+ CameraUtils.deserializeDeviceOrientation("landscapeLeft"));
+ assertEquals(
+ PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT,
+ CameraUtils.deserializeDeviceOrientation("landscapeRight"));
+ }
+
+ @Test(expected = UnsupportedOperationException.class)
+ public void deserializeDeviceOrientation_throwsForNull() {
+ CameraUtils.deserializeDeviceOrientation(null);
+ }
+
+ @Test
+ public void getAvailableCameras_retrievesValidCameras()
+ throws CameraAccessException, NumberFormatException {
+ final Activity mockActivity = mock(Activity.class);
+ final CameraManager mockCameraManager = mock(CameraManager.class);
+ final CameraCharacteristics mockCameraCharacteristics = mock(CameraCharacteristics.class);
+ final String[] mockCameraIds = {"1394902", "-192930", "0283835", "foobar"};
+ final int mockSensorOrientation0 = 90;
+ final int mockSensorOrientation2 = 270;
+ final int mockLensFacing0 = CameraMetadata.LENS_FACING_FRONT;
+ final int mockLensFacing2 = CameraMetadata.LENS_FACING_EXTERNAL;
+
+ when(mockActivity.getSystemService(Context.CAMERA_SERVICE)).thenReturn(mockCameraManager);
+ when(mockCameraManager.getCameraIdList()).thenReturn(mockCameraIds);
+ when(mockCameraManager.getCameraCharacteristics(anyString()))
+ .thenReturn(mockCameraCharacteristics);
+ when(mockCameraCharacteristics.get(any()))
+ .thenReturn(mockSensorOrientation0)
+ .thenReturn(mockLensFacing0)
+ .thenReturn(mockSensorOrientation2)
+ .thenReturn(mockLensFacing2);
+
+ List<Map<String, Object>> availableCameras = CameraUtils.getAvailableCameras(mockActivity);
+
+ assertEquals(availableCameras.size(), 2);
+ assertEquals(availableCameras.get(0).get("name"), "1394902");
+ assertEquals(availableCameras.get(0).get("sensorOrientation"), mockSensorOrientation0);
+ assertEquals(availableCameras.get(0).get("lensFacing"), "front");
+ assertEquals(availableCameras.get(1).get("name"), "0283835");
+ assertEquals(availableCameras.get(1).get("sensorOrientation"), mockSensorOrientation2);
+ assertEquals(availableCameras.get(1).get("lensFacing"), "external");
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/DartMessengerTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/DartMessengerTest.java
new file mode 100644
index 0000000..0a2fc43
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/DartMessengerTest.java
@@ -0,0 +1,135 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static junit.framework.TestCase.assertNull;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+
+import android.os.Handler;
+import androidx.annotation.NonNull;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugin.common.MethodCall;
+import io.flutter.plugin.common.StandardMethodCodec;
+import io.flutter.plugins.camera.features.autofocus.FocusMode;
+import io.flutter.plugins.camera.features.exposurelock.ExposureMode;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+public class DartMessengerTest {
+ /** A {@link BinaryMessenger} implementation that does nothing but save its messages. */
+ private static class FakeBinaryMessenger implements BinaryMessenger {
+ private final List<ByteBuffer> sentMessages = new ArrayList<>();
+
+ @Override
+ public void send(@NonNull String channel, ByteBuffer message) {
+ sentMessages.add(message);
+ }
+
+ @Override
+ public void send(@NonNull String channel, ByteBuffer message, BinaryReply callback) {
+ send(channel, message);
+ }
+
+ @Override
+ public void setMessageHandler(@NonNull String channel, BinaryMessageHandler handler) {}
+
+ List<ByteBuffer> getMessages() {
+ return new ArrayList<>(sentMessages);
+ }
+ }
+
+ private Handler mockHandler;
+ private DartMessenger dartMessenger;
+ private FakeBinaryMessenger fakeBinaryMessenger;
+
+ @Before
+ public void setUp() {
+ mockHandler = mock(Handler.class);
+ fakeBinaryMessenger = new FakeBinaryMessenger();
+ dartMessenger = new DartMessenger(fakeBinaryMessenger, 0, mockHandler);
+ }
+
+ @Test
+ public void sendCameraErrorEvent_includesErrorDescriptions() {
+ doAnswer(createPostHandlerAnswer()).when(mockHandler).post(any(Runnable.class));
+
+ dartMessenger.sendCameraErrorEvent("error description");
+ List<ByteBuffer> sentMessages = fakeBinaryMessenger.getMessages();
+
+ assertEquals(1, sentMessages.size());
+ MethodCall call = decodeSentMessage(sentMessages.get(0));
+ assertEquals("error", call.method);
+ assertEquals("error description", call.argument("description"));
+ }
+
+ @Test
+ public void sendCameraInitializedEvent_includesPreviewSize() {
+ doAnswer(createPostHandlerAnswer()).when(mockHandler).post(any(Runnable.class));
+ dartMessenger.sendCameraInitializedEvent(0, 0, ExposureMode.auto, FocusMode.auto, true, true);
+
+ List<ByteBuffer> sentMessages = fakeBinaryMessenger.getMessages();
+ assertEquals(1, sentMessages.size());
+ MethodCall call = decodeSentMessage(sentMessages.get(0));
+ assertEquals("initialized", call.method);
+ assertEquals(0, (double) call.argument("previewWidth"), 0);
+ assertEquals(0, (double) call.argument("previewHeight"), 0);
+ assertEquals("ExposureMode auto", call.argument("exposureMode"), "auto");
+ assertEquals("FocusMode continuous", call.argument("focusMode"), "auto");
+ assertEquals("exposurePointSupported", call.argument("exposurePointSupported"), true);
+ assertEquals("focusPointSupported", call.argument("focusPointSupported"), true);
+ }
+
+ @Test
+ public void sendCameraClosingEvent() {
+ doAnswer(createPostHandlerAnswer()).when(mockHandler).post(any(Runnable.class));
+ dartMessenger.sendCameraClosingEvent();
+
+ List<ByteBuffer> sentMessages = fakeBinaryMessenger.getMessages();
+ assertEquals(1, sentMessages.size());
+ MethodCall call = decodeSentMessage(sentMessages.get(0));
+ assertEquals("camera_closing", call.method);
+ assertNull(call.argument("description"));
+ }
+
+ @Test
+ public void sendDeviceOrientationChangedEvent() {
+ doAnswer(createPostHandlerAnswer()).when(mockHandler).post(any(Runnable.class));
+ dartMessenger.sendDeviceOrientationChangeEvent(PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+
+ List<ByteBuffer> sentMessages = fakeBinaryMessenger.getMessages();
+ assertEquals(1, sentMessages.size());
+ MethodCall call = decodeSentMessage(sentMessages.get(0));
+ assertEquals("orientation_changed", call.method);
+ assertEquals(call.argument("orientation"), "portraitUp");
+ }
+
+ private static Answer<Boolean> createPostHandlerAnswer() {
+ return new Answer<Boolean>() {
+ @Override
+ public Boolean answer(InvocationOnMock invocation) throws Throwable {
+ Runnable runnable = invocation.getArgument(0, Runnable.class);
+ if (runnable != null) {
+ runnable.run();
+ }
+ return true;
+ }
+ };
+ }
+
+ private MethodCall decodeSentMessage(ByteBuffer sentMessage) {
+ sentMessage.position(0);
+
+ return StandardMethodCodec.INSTANCE.decodeMethodCall(sentMessage);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/ImageSaverTests.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/ImageSaverTests.java
new file mode 100644
index 0000000..0358ce6
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/ImageSaverTests.java
@@ -0,0 +1,105 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.media.Image;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+public class ImageSaverTests {
+
+ Image mockImage;
+ File mockFile;
+ ImageSaver.Callback mockCallback;
+ ImageSaver imageSaver;
+ Image.Plane mockPlane;
+ ByteBuffer mockBuffer;
+ MockedStatic<ImageSaver.FileOutputStreamFactory> mockFileOutputStreamFactory;
+ FileOutputStream mockFileOutputStream;
+
+ @Before
+ public void setup() {
+ // Set up mocked file dependency
+ mockFile = mock(File.class);
+ when(mockFile.getAbsolutePath()).thenReturn("absolute/path");
+ mockPlane = mock(Image.Plane.class);
+ mockBuffer = mock(ByteBuffer.class);
+ when(mockBuffer.remaining()).thenReturn(3);
+ when(mockBuffer.get(any()))
+ .thenAnswer(
+ new Answer<Object>() {
+ @Override
+ public Object answer(InvocationOnMock invocation) throws Throwable {
+ byte[] bytes = invocation.getArgument(0);
+ bytes[0] = 0x42;
+ bytes[1] = 0x00;
+ bytes[2] = 0x13;
+ return mockBuffer;
+ }
+ });
+
+ // Set up mocked image dependency
+ mockImage = mock(Image.class);
+ when(mockPlane.getBuffer()).thenReturn(mockBuffer);
+ when(mockImage.getPlanes()).thenReturn(new Image.Plane[] {mockPlane});
+
+ // Set up mocked FileOutputStream
+ mockFileOutputStreamFactory = mockStatic(ImageSaver.FileOutputStreamFactory.class);
+ mockFileOutputStream = mock(FileOutputStream.class);
+ mockFileOutputStreamFactory
+ .when(() -> ImageSaver.FileOutputStreamFactory.create(any()))
+ .thenReturn(mockFileOutputStream);
+
+ // Set up testable ImageSaver instance
+ mockCallback = mock(ImageSaver.Callback.class);
+ imageSaver = new ImageSaver(mockImage, mockFile, mockCallback);
+ }
+
+ @After
+ public void teardown() {
+ mockFileOutputStreamFactory.close();
+ }
+
+ @Test
+ public void runWritesBytesToFileAndFinishesWithPath() throws IOException {
+ imageSaver.run();
+
+ verify(mockFileOutputStream, times(1)).write(new byte[] {0x42, 0x00, 0x13});
+ verify(mockCallback, times(1)).onComplete("absolute/path");
+ verify(mockCallback, never()).onError(any(), any());
+ }
+
+ @Test
+ public void runCallsErrorOnWriteIoexception() throws IOException {
+ doThrow(new IOException()).when(mockFileOutputStream).write(any());
+ imageSaver.run();
+ verify(mockCallback, times(1)).onError("IOError", "Failed saving image");
+ verify(mockCallback, never()).onComplete(any());
+ }
+
+ @Test
+ public void runCallsErrorOnCloseIoexception() throws IOException {
+ doThrow(new IOException("message")).when(mockFileOutputStream).close();
+ imageSaver.run();
+ verify(mockCallback, times(1)).onError("cameraAccess", "message");
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/MethodCallHandlerImplTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/MethodCallHandlerImplTest.java
new file mode 100644
index 0000000..868e2e9
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/MethodCallHandlerImplTest.java
@@ -0,0 +1,77 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertFalse;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraAccessException;
+import androidx.lifecycle.LifecycleObserver;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugin.common.MethodCall;
+import io.flutter.plugin.common.MethodChannel;
+import io.flutter.plugins.camera.utils.TestUtils;
+import io.flutter.view.TextureRegistry;
+import org.junit.Before;
+import org.junit.Test;
+
+public class MethodCallHandlerImplTest {
+
+ MethodChannel.MethodCallHandler handler;
+ MethodChannel.Result mockResult;
+ Camera mockCamera;
+
+ @Before
+ public void setUp() {
+ handler =
+ new MethodCallHandlerImpl(
+ mock(Activity.class),
+ mock(BinaryMessenger.class),
+ mock(CameraPermissions.class),
+ mock(CameraPermissions.PermissionsRegistry.class),
+ mock(TextureRegistry.class));
+ mockResult = mock(MethodChannel.Result.class);
+ mockCamera = mock(Camera.class);
+ TestUtils.setPrivateField(handler, "camera", mockCamera);
+ }
+
+ @Test
+ public void shouldNotImplementLifecycleObserverInterface() {
+ Class<MethodCallHandlerImpl> methodCallHandlerClass = MethodCallHandlerImpl.class;
+
+ assertFalse(LifecycleObserver.class.isAssignableFrom(methodCallHandlerClass));
+ }
+
+ @Test
+ public void onMethodCall_pausePreview_shouldPausePreviewAndSendSuccessResult()
+ throws CameraAccessException {
+ handler.onMethodCall(new MethodCall("pausePreview", null), mockResult);
+
+ verify(mockCamera, times(1)).pausePreview();
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void onMethodCall_pausePreview_shouldSendErrorResultOnCameraAccessException()
+ throws CameraAccessException {
+ doThrow(new CameraAccessException(0)).when(mockCamera).pausePreview();
+
+ handler.onMethodCall(new MethodCall("pausePreview", null), mockResult);
+
+ verify(mockResult, times(1)).error("CameraAccess", null, null);
+ }
+
+ @Test
+ public void onMethodCall_resumePreview_shouldResumePreviewAndSendSuccessResult() {
+ handler.onMethodCall(new MethodCall("resumePreview", null), mockResult);
+
+ verify(mockCamera, times(1)).resumePreview();
+ verify(mockResult, times(1)).success(null);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/autofocus/AutoFocusFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/autofocus/AutoFocusFeatureTest.java
new file mode 100644
index 0000000..fd8ef7c
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/autofocus/AutoFocusFeatureTest.java
@@ -0,0 +1,176 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.autofocus;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import io.flutter.plugins.camera.CameraProperties;
+import org.junit.Test;
+
+public class AutoFocusFeatureTest {
+ private static final int[] FOCUS_MODES_ONLY_OFF =
+ new int[] {CameraCharacteristics.CONTROL_AF_MODE_OFF};
+ private static final int[] FOCUS_MODES =
+ new int[] {
+ CameraCharacteristics.CONTROL_AF_MODE_OFF, CameraCharacteristics.CONTROL_AF_MODE_AUTO
+ };
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ assertEquals("AutoFocusFeature", autoFocusFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnAutoIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ assertEquals(FocusMode.auto, autoFocusFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+ FocusMode expectedValue = FocusMode.locked;
+
+ autoFocusFeature.setValue(expectedValue);
+ FocusMode actualValue = autoFocusFeature.getValue();
+
+ assertEquals(expectedValue, actualValue);
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenMinimumFocusDistanceIsZero() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(FOCUS_MODES);
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(0.0F);
+
+ assertFalse(autoFocusFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenMinimumFocusDistanceIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(FOCUS_MODES);
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(null);
+
+ assertFalse(autoFocusFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupport_shouldReturnFalseWhenNoFocusModesAreAvailable() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(new int[] {});
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(1.0F);
+
+ assertFalse(autoFocusFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupport_shouldReturnFalseWhenOnlyFocusOffIsAvailable() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(FOCUS_MODES_ONLY_OFF);
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(1.0F);
+
+ assertFalse(autoFocusFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupport_shouldReturnTrueWhenOnlyMultipleFocusModesAreAvailable() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(FOCUS_MODES);
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(1.0F);
+
+ assertTrue(autoFocusFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilderShouldReturnWhenCheckIsSupportedIsFalse() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(FOCUS_MODES);
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(0.0F);
+
+ autoFocusFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, never()).set(any(), any());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetControlModeToAutoWhenFocusIsLocked() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(FOCUS_MODES);
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(1.0F);
+
+ autoFocusFeature.setValue(FocusMode.locked);
+ autoFocusFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
+ }
+
+ @Test
+ public void
+ updateBuilder_shouldSetControlModeToContinuousVideoWhenFocusIsAutoAndRecordingVideo() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, true);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(FOCUS_MODES);
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(1.0F);
+
+ autoFocusFeature.setValue(FocusMode.auto);
+ autoFocusFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
+ }
+
+ @Test
+ public void
+ updateBuilder_shouldSetControlModeToContinuousVideoWhenFocusIsAutoAndNotRecordingVideo() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ AutoFocusFeature autoFocusFeature = new AutoFocusFeature(mockCameraProperties, false);
+
+ when(mockCameraProperties.getControlAutoFocusAvailableModes()).thenReturn(FOCUS_MODES);
+ when(mockCameraProperties.getLensInfoMinimumFocusDistance()).thenReturn(1.0F);
+
+ autoFocusFeature.setValue(FocusMode.auto);
+ autoFocusFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/autofocus/FocusModeTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/autofocus/FocusModeTest.java
new file mode 100644
index 0000000..f68ae71
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/autofocus/FocusModeTest.java
@@ -0,0 +1,34 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.autofocus;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+
+public class FocusModeTest {
+
+ @Test
+ public void getValueForString_returnsCorrectValues() {
+ assertEquals(
+ "Returns FocusMode.auto for 'auto'", FocusMode.getValueForString("auto"), FocusMode.auto);
+ assertEquals(
+ "Returns FocusMode.locked for 'locked'",
+ FocusMode.getValueForString("locked"),
+ FocusMode.locked);
+ }
+
+ @Test
+ public void getValueForString_returnsNullForNonexistantValue() {
+ assertEquals(
+ "Returns null for 'nonexistant'", FocusMode.getValueForString("nonexistant"), null);
+ }
+
+ @Test
+ public void toString_returnsCorrectValue() {
+ assertEquals("Returns 'auto' for FocusMode.auto", FocusMode.auto.toString(), "auto");
+ assertEquals("Returns 'locked' for FocusMode.locked", FocusMode.locked.toString(), "locked");
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurelock/ExposureLockFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurelock/ExposureLockFeatureTest.java
new file mode 100644
index 0000000..1cda0a8
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurelock/ExposureLockFeatureTest.java
@@ -0,0 +1,77 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.exposurelock;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import android.hardware.camera2.CaptureRequest;
+import io.flutter.plugins.camera.CameraProperties;
+import org.junit.Test;
+
+public class ExposureLockFeatureTest {
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureLockFeature exposureLockFeature = new ExposureLockFeature(mockCameraProperties);
+
+ assertEquals("ExposureLockFeature", exposureLockFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnAutoIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureLockFeature exposureLockFeature = new ExposureLockFeature(mockCameraProperties);
+
+ assertEquals(ExposureMode.auto, exposureLockFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureLockFeature exposureLockFeature = new ExposureLockFeature(mockCameraProperties);
+ ExposureMode expectedValue = ExposureMode.locked;
+
+ exposureLockFeature.setValue(expectedValue);
+ ExposureMode actualValue = exposureLockFeature.getValue();
+
+ assertEquals(expectedValue, actualValue);
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnTrue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureLockFeature exposureLockFeature = new ExposureLockFeature(mockCameraProperties);
+
+ assertTrue(exposureLockFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetControlAeLockToFalseWhenAutoExposureIsSetToAuto() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ ExposureLockFeature exposureLockFeature = new ExposureLockFeature(mockCameraProperties);
+
+ exposureLockFeature.setValue(ExposureMode.auto);
+ exposureLockFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1)).set(CaptureRequest.CONTROL_AE_LOCK, false);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetControlAeLockToFalseWhenAutoExposureIsSetToLocked() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ ExposureLockFeature exposureLockFeature = new ExposureLockFeature(mockCameraProperties);
+
+ exposureLockFeature.setValue(ExposureMode.locked);
+ exposureLockFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1)).set(CaptureRequest.CONTROL_AE_LOCK, true);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurelock/ExposureModeTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurelock/ExposureModeTest.java
new file mode 100644
index 0000000..d5d4769
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurelock/ExposureModeTest.java
@@ -0,0 +1,37 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.exposurelock;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+
+public class ExposureModeTest {
+
+ @Test
+ public void getValueForString_returnsCorrectValues() {
+ assertEquals(
+ "Returns ExposureMode.auto for 'auto'",
+ ExposureMode.getValueForString("auto"),
+ ExposureMode.auto);
+ assertEquals(
+ "Returns ExposureMode.locked for 'locked'",
+ ExposureMode.getValueForString("locked"),
+ ExposureMode.locked);
+ }
+
+ @Test
+ public void getValueForString_returnsNullForNonexistantValue() {
+ assertEquals(
+ "Returns null for 'nonexistant'", ExposureMode.getValueForString("nonexistant"), null);
+ }
+
+ @Test
+ public void toString_returnsCorrectValue() {
+ assertEquals("Returns 'auto' for ExposureMode.auto", ExposureMode.auto.toString(), "auto");
+ assertEquals(
+ "Returns 'locked' for ExposureMode.locked", ExposureMode.locked.toString(), "locked");
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposureoffset/ExposureOffsetFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposureoffset/ExposureOffsetFeatureTest.java
new file mode 100644
index 0000000..ee428f3
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposureoffset/ExposureOffsetFeatureTest.java
@@ -0,0 +1,82 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.exposureoffset;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import io.flutter.plugins.camera.CameraProperties;
+import org.junit.Test;
+
+public class ExposureOffsetFeatureTest {
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureOffsetFeature exposureOffsetFeature = new ExposureOffsetFeature(mockCameraProperties);
+
+ assertEquals("ExposureOffsetFeature", exposureOffsetFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnZeroIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureOffsetFeature exposureOffsetFeature = new ExposureOffsetFeature(mockCameraProperties);
+
+ final double actualValue = exposureOffsetFeature.getValue();
+
+ assertEquals(0.0, actualValue, 0);
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureOffsetFeature exposureOffsetFeature = new ExposureOffsetFeature(mockCameraProperties);
+ double expectedValue = 4.0;
+
+ when(mockCameraProperties.getControlAutoExposureCompensationStep()).thenReturn(0.5);
+
+ exposureOffsetFeature.setValue(2.0);
+ double actualValue = exposureOffsetFeature.getValue();
+
+ assertEquals(expectedValue, actualValue, 0);
+ }
+
+ @Test
+ public void getExposureOffsetStepSize_shouldReturnTheControlExposureCompensationStepValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureOffsetFeature exposureOffsetFeature = new ExposureOffsetFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getControlAutoExposureCompensationStep()).thenReturn(0.5);
+
+ assertEquals(0.5, exposureOffsetFeature.getExposureOffsetStepSize(), 0);
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnTrue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposureOffsetFeature exposureOffsetFeature = new ExposureOffsetFeature(mockCameraProperties);
+
+ assertTrue(exposureOffsetFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetControlAeExposureCompensationToOffset() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ ExposureOffsetFeature exposureOffsetFeature = new ExposureOffsetFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getControlAutoExposureCompensationStep()).thenReturn(0.5);
+
+ exposureOffsetFeature.setValue(2.0);
+ exposureOffsetFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1)).set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 4);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurepoint/ExposurePointFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurepoint/ExposurePointFeatureTest.java
new file mode 100644
index 0000000..b34a04f
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/exposurepoint/ExposurePointFeatureTest.java
@@ -0,0 +1,316 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.exposurepoint;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.isNull;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.util.Size;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.CameraRegionUtils;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.sensororientation.DeviceOrientationManager;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+
+public class ExposurePointFeatureTest {
+
+ Size mockCameraBoundaries;
+ SensorOrientationFeature mockSensorOrientationFeature;
+ DeviceOrientationManager mockDeviceOrientationManager;
+
+ @Before
+ public void setUp() {
+ this.mockCameraBoundaries = mock(Size.class);
+ when(this.mockCameraBoundaries.getWidth()).thenReturn(100);
+ when(this.mockCameraBoundaries.getHeight()).thenReturn(100);
+ mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ mockDeviceOrientationManager = mock(DeviceOrientationManager.class);
+ when(mockSensorOrientationFeature.getDeviceOrientationManager())
+ .thenReturn(mockDeviceOrientationManager);
+ when(mockDeviceOrientationManager.getLastUIOrientation())
+ .thenReturn(PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ assertEquals("ExposurePointFeature", exposurePointFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnNullIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ assertNull(exposurePointFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ Point expectedPoint = new Point(0.0, 0.0);
+
+ exposurePointFeature.setValue(expectedPoint);
+ Point actualPoint = exposurePointFeature.getValue();
+
+ assertEquals(expectedPoint, actualPoint);
+ }
+
+ @Test
+ public void setValue_shouldResetPointWhenXCoordIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ exposurePointFeature.setValue(new Point(null, 0.0));
+
+ assertNull(exposurePointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldResetPointWhenYCoordIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ exposurePointFeature.setValue(new Point(0.0, null));
+
+ assertNull(exposurePointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldSetPointWhenValidCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ Point point = new Point(0.0, 0.0);
+
+ exposurePointFeature.setValue(point);
+
+ assertEquals(point, exposurePointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldDetermineMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(1);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ exposurePointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ exposurePointFeature.setValue(new Point(0.5, 0.5));
+
+ mockedCameraRegionUtils.verify(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT),
+ times(1));
+ }
+ }
+
+ @Test(expected = AssertionError.class)
+ public void setValue_shouldThrowAssertionErrorWhenNoValidBoundariesAreSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(1);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+ exposurePointFeature.setValue(new Point(0.5, 0.5));
+ }
+ }
+
+ @Test
+ public void setValue_shouldNotDetermineMeteringRectangleWhenNullCoordsAreSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(1);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ exposurePointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ exposurePointFeature.setValue(null);
+ exposurePointFeature.setValue(new Point(null, 0.5));
+ exposurePointFeature.setValue(new Point(0.5, null));
+
+ mockedCameraRegionUtils.verifyNoInteractions();
+ }
+ }
+
+ @Test
+ public void
+ setCameraBoundaries_shouldDetermineMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(1);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ exposurePointFeature.setValue(new Point(0.5, 0.5));
+ Size mockedCameraBoundaries = mock(Size.class);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ exposurePointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ mockedCameraRegionUtils.verify(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT),
+ times(1));
+ }
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenMaxRegionsIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(null);
+
+ assertFalse(exposurePointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenMaxRegionsIsZero() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(0);
+
+ assertFalse(exposurePointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnTrueWhenMaxRegionsIsBiggerThenZero() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(1);
+
+ assertTrue(exposurePointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldReturnWhenCheckIsSupportedIsFalse() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(0);
+
+ exposurePointFeature.updateBuilder(mockCaptureRequestBuilder);
+
+ verify(mockCaptureRequestBuilder, never()).set(any(), any());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ MeteringRectangle mockedMeteringRectangle = mock(MeteringRectangle.class);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+ mockedCameraRegionUtils
+ .when(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT))
+ .thenReturn(mockedMeteringRectangle);
+ exposurePointFeature.setCameraBoundaries(mockedCameraBoundaries);
+ exposurePointFeature.setValue(new Point(0.5, 0.5));
+
+ exposurePointFeature.updateBuilder(mockCaptureRequestBuilder);
+ }
+
+ verify(mockCaptureRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {mockedMeteringRectangle});
+ }
+
+ @Test
+ public void updateBuilder_shouldNotSetMeteringRectangleWhenNoValidBoundariesAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ exposurePointFeature.updateBuilder(mockCaptureRequestBuilder);
+
+ verify(mockCaptureRequestBuilder, times(1)).set(any(), isNull());
+ }
+
+ @Test
+ public void updateBuilder_shouldNotSetMeteringRectangleWhenNoValidCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoExposure()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ ExposurePointFeature exposurePointFeature =
+ new ExposurePointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ exposurePointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ exposurePointFeature.setValue(null);
+ exposurePointFeature.updateBuilder(mockCaptureRequestBuilder);
+ exposurePointFeature.setValue(new Point(0d, null));
+ exposurePointFeature.updateBuilder(mockCaptureRequestBuilder);
+ exposurePointFeature.setValue(new Point(null, 0d));
+ exposurePointFeature.updateBuilder(mockCaptureRequestBuilder);
+ verify(mockCaptureRequestBuilder, times(3)).set(any(), isNull());
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/flash/FlashFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/flash/FlashFeatureTest.java
new file mode 100644
index 0000000..f2b4ffc
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/flash/FlashFeatureTest.java
@@ -0,0 +1,156 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.flash;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import io.flutter.plugins.camera.CameraProperties;
+import org.junit.Test;
+
+public class FlashFeatureTest {
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ assertEquals("FlashFeature", flashFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnAutoIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ assertEquals(FlashMode.auto, flashFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+ FlashMode expectedValue = FlashMode.torch;
+
+ flashFeature.setValue(expectedValue);
+ FlashMode actualValue = flashFeature.getValue();
+
+ assertEquals(expectedValue, actualValue);
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenFlashInfoAvailableIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getFlashInfoAvailable()).thenReturn(null);
+
+ assertFalse(flashFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenFlashInfoAvailableIsFalse() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getFlashInfoAvailable()).thenReturn(false);
+
+ assertFalse(flashFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnTrueWhenFlashInfoAvailableIsTrue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getFlashInfoAvailable()).thenReturn(true);
+
+ assertTrue(flashFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldReturnWhenCheckIsSupportedIsFalse() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getFlashInfoAvailable()).thenReturn(false);
+
+ flashFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, never()).set(any(), any());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetAeModeAndFlashModeWhenFlashModeIsOff() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getFlashInfoAvailable()).thenReturn(true);
+
+ flashFeature.setValue(FlashMode.off);
+ flashFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
+ verify(mockBuilder, times(1)).set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetAeModeAndFlashModeWhenFlashModeIsAlways() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getFlashInfoAvailable()).thenReturn(true);
+
+ flashFeature.setValue(FlashMode.always);
+ flashFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
+ verify(mockBuilder, times(1)).set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetAeModeAndFlashModeWhenFlashModeIsTorch() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getFlashInfoAvailable()).thenReturn(true);
+
+ flashFeature.setValue(FlashMode.torch);
+ flashFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
+ verify(mockBuilder, times(1)).set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetAeModeAndFlashModeWhenFlashModeIsAuto() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ FlashFeature flashFeature = new FlashFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getFlashInfoAvailable()).thenReturn(true);
+
+ flashFeature.setValue(FlashMode.auto);
+ flashFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
+ verify(mockBuilder, times(1)).set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeatureTest.java
new file mode 100644
index 0000000..f03dc9f
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeatureTest.java
@@ -0,0 +1,318 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.focuspoint;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.isNull;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.util.Size;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.CameraRegionUtils;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.sensororientation.DeviceOrientationManager;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+
+public class FocusPointFeatureTest {
+
+ Size mockCameraBoundaries;
+ SensorOrientationFeature mockSensorOrientationFeature;
+ DeviceOrientationManager mockDeviceOrientationManager;
+
+ @Before
+ public void setUp() {
+ this.mockCameraBoundaries = mock(Size.class);
+ when(this.mockCameraBoundaries.getWidth()).thenReturn(100);
+ when(this.mockCameraBoundaries.getHeight()).thenReturn(100);
+ mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ mockDeviceOrientationManager = mock(DeviceOrientationManager.class);
+ when(mockSensorOrientationFeature.getDeviceOrientationManager())
+ .thenReturn(mockDeviceOrientationManager);
+ when(mockDeviceOrientationManager.getLastUIOrientation())
+ .thenReturn(PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ assertEquals("FocusPointFeature", focusPointFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnNullIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Point actualPoint = focusPointFeature.getValue();
+ assertNull(focusPointFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ Point expectedPoint = new Point(0.0, 0.0);
+
+ focusPointFeature.setValue(expectedPoint);
+ Point actualPoint = focusPointFeature.getValue();
+
+ assertEquals(expectedPoint, actualPoint);
+ }
+
+ @Test
+ public void setValue_shouldResetPointWhenXCoordIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ focusPointFeature.setValue(new Point(null, 0.0));
+
+ assertNull(focusPointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldResetPointWhenYCoordIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ focusPointFeature.setValue(new Point(0.0, null));
+
+ assertNull(focusPointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldSetPointWhenValidCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ Point point = new Point(0.0, 0.0);
+
+ focusPointFeature.setValue(point);
+
+ assertEquals(point, focusPointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldDetermineMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ focusPointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ focusPointFeature.setValue(new Point(0.5, 0.5));
+
+ mockedCameraRegionUtils.verify(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT),
+ times(1));
+ }
+ }
+
+ @Test(expected = AssertionError.class)
+ public void setValue_shouldThrowAssertionErrorWhenNoValidBoundariesAreSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+ focusPointFeature.setValue(new Point(0.5, 0.5));
+ }
+ }
+
+ @Test
+ public void setValue_shouldNotDetermineMeteringRectangleWhenNullCoordsAreSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ focusPointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ focusPointFeature.setValue(null);
+ focusPointFeature.setValue(new Point(null, 0.5));
+ focusPointFeature.setValue(new Point(0.5, null));
+
+ mockedCameraRegionUtils.verifyNoInteractions();
+ }
+ }
+
+ @Test
+ public void
+ setCameraBoundaries_shouldDetermineMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ focusPointFeature.setValue(new Point(0.5, 0.5));
+ Size mockedCameraBoundaries = mock(Size.class);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ focusPointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ mockedCameraRegionUtils.verify(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT),
+ times(1));
+ }
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenMaxRegionsIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(null);
+
+ assertFalse(focusPointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenMaxRegionsIsZero() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(0);
+
+ assertFalse(focusPointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnTrueWhenMaxRegionsIsBiggerThenZero() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+
+ assertTrue(focusPointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldReturnWhenCheckIsSupportedIsFalse() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(0);
+
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+
+ verify(mockCaptureRequestBuilder, never()).set(any(), any());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ MeteringRectangle mockedMeteringRectangle = mock(MeteringRectangle.class);
+
+ try (MockedStatic<CameraRegionUtils> mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+ mockedCameraRegionUtils
+ .when(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT))
+ .thenReturn(mockedMeteringRectangle);
+ focusPointFeature.setCameraBoundaries(mockedCameraBoundaries);
+ focusPointFeature.setValue(new Point(0.5, 0.5));
+
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+ }
+
+ verify(mockCaptureRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {mockedMeteringRectangle});
+ }
+
+ @Test
+ public void updateBuilder_shouldNotSetMeteringRectangleWhenNoValidBoundariesAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ MeteringRectangle mockedMeteringRectangle = mock(MeteringRectangle.class);
+
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+
+ verify(mockCaptureRequestBuilder, times(1)).set(any(), isNull());
+ }
+
+ @Test
+ public void updateBuilder_shouldNotSetMeteringRectangleWhenNoValidCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ focusPointFeature.setValue(null);
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+ focusPointFeature.setValue(new Point(0d, null));
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+ focusPointFeature.setValue(new Point(null, 0d));
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+ verify(mockCaptureRequestBuilder, times(3)).set(any(), isNull());
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeaturePixel4aTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeaturePixel4aTest.java
new file mode 100644
index 0000000..93cfe55
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeaturePixel4aTest.java
@@ -0,0 +1,30 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.fpsrange;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
+import android.os.Build;
+import android.util.Range;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.utils.TestUtils;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.RobolectricTestRunner;
+
+@RunWith(RobolectricTestRunner.class)
+public class FpsRangeFeaturePixel4aTest {
+ @Test
+ public void ctor_shouldInitializeFpsRangeWith30WhenDeviceIsPixel4a() {
+ TestUtils.setFinalStatic(Build.class, "BRAND", "google");
+ TestUtils.setFinalStatic(Build.class, "MODEL", "Pixel 4a");
+
+ FpsRangeFeature fpsRangeFeature = new FpsRangeFeature(mock(CameraProperties.class));
+ Range<Integer> range = fpsRangeFeature.getValue();
+ assertEquals(30, (int) range.getLower());
+ assertEquals(30, (int) range.getUpper());
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeatureTest.java
new file mode 100644
index 0000000..2bb4d84
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeatureTest.java
@@ -0,0 +1,108 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.fpsrange;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import android.util.Range;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.utils.TestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class FpsRangeFeatureTest {
+ @Before
+ public void before() {
+ TestUtils.setFinalStatic(Build.class, "BRAND", "Test Brand");
+ TestUtils.setFinalStatic(Build.class, "MODEL", "Test Model");
+ }
+
+ @After
+ public void after() {
+ TestUtils.setFinalStatic(Build.class, "BRAND", null);
+ TestUtils.setFinalStatic(Build.class, "MODEL", null);
+ }
+
+ @Test
+ public void ctor_shouldInitializeFpsRangeWithHighestUpperValueFromRangeArray() {
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+ assertEquals(13, (int) fpsRangeFeature.getValue().getUpper());
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+ assertEquals("FpsRangeFeature", fpsRangeFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnHighestUpperRangeIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+
+ assertEquals(13, (int) fpsRangeFeature.getValue().getUpper());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FpsRangeFeature fpsRangeFeature = new FpsRangeFeature(mockCameraProperties);
+ @SuppressWarnings("unchecked")
+ Range<Integer> expectedValue = mock(Range.class);
+
+ fpsRangeFeature.setValue(expectedValue);
+ Range<Integer> actualValue = fpsRangeFeature.getValue();
+
+ assertEquals(expectedValue, actualValue);
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnTrue() {
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+ assertTrue(fpsRangeFeature.checkIsSupported());
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void updateBuilder_shouldSetAeTargetFpsRange() {
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+
+ fpsRangeFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder).set(eq(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE), any(Range.class));
+ }
+
+ private static FpsRangeFeature createTestInstance() {
+ @SuppressWarnings("unchecked")
+ Range<Integer> rangeOne = mock(Range.class);
+ @SuppressWarnings("unchecked")
+ Range<Integer> rangeTwo = mock(Range.class);
+ @SuppressWarnings("unchecked")
+ Range<Integer> rangeThree = mock(Range.class);
+
+ when(rangeOne.getUpper()).thenReturn(11);
+ when(rangeTwo.getUpper()).thenReturn(12);
+ when(rangeThree.getUpper()).thenReturn(13);
+
+ @SuppressWarnings("unchecked")
+ Range<Integer>[] ranges = new Range[] {rangeOne, rangeTwo, rangeThree};
+
+ CameraProperties cameraProperties = mock(CameraProperties.class);
+
+ when(cameraProperties.getControlAutoExposureAvailableTargetFpsRanges()).thenReturn(ranges);
+
+ return new FpsRangeFeature(cameraProperties);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeatureTest.java
new file mode 100644
index 0000000..b89aad0
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeatureTest.java
@@ -0,0 +1,150 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.noisereduction;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build.VERSION;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.utils.TestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class NoiseReductionFeatureTest {
+ @Before
+ public void before() {
+ // Make sure the VERSION.SDK_INT field returns 23, to allow using all available
+ // noise reduction modes in tests.
+ TestUtils.setFinalStatic(VERSION.class, "SDK_INT", 23);
+ }
+
+ @After
+ public void after() {
+ // Make sure we reset the VERSION.SDK_INT field to it's original value.
+ TestUtils.setFinalStatic(VERSION.class, "SDK_INT", 0);
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ assertEquals("NoiseReductionFeature", noiseReductionFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnFastIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ assertEquals(NoiseReductionMode.fast, noiseReductionFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+ NoiseReductionMode expectedValue = NoiseReductionMode.fast;
+
+ noiseReductionFeature.setValue(expectedValue);
+ NoiseReductionMode actualValue = noiseReductionFeature.getValue();
+
+ assertEquals(expectedValue, actualValue);
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenAvailableNoiseReductionModesIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(null);
+
+ assertFalse(noiseReductionFeature.checkIsSupported());
+ }
+
+ @Test
+ public void
+ checkIsSupported_shouldReturnFalseWhenAvailableNoiseReductionModesReturnsAnEmptyArray() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(new int[] {});
+
+ assertFalse(noiseReductionFeature.checkIsSupported());
+ }
+
+ @Test
+ public void
+ checkIsSupported_shouldReturnTrueWhenAvailableNoiseReductionModesReturnsAtLeastOneItem() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(new int[] {1});
+
+ assertTrue(noiseReductionFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldReturnWhenCheckIsSupportedIsFalse() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(new int[] {});
+
+ noiseReductionFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, never()).set(any(), any());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeOffWhenOff() {
+ testUpdateBuilderWith(NoiseReductionMode.off, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeFastWhenFast() {
+ testUpdateBuilderWith(NoiseReductionMode.fast, CaptureRequest.NOISE_REDUCTION_MODE_FAST);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeHighQualityWhenHighQuality() {
+ testUpdateBuilderWith(
+ NoiseReductionMode.highQuality, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeMinimalWhenMinimal() {
+ testUpdateBuilderWith(NoiseReductionMode.minimal, CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeZeroShutterLagWhenZeroShutterLag() {
+ testUpdateBuilderWith(
+ NoiseReductionMode.zeroShutterLag, CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG);
+ }
+
+ private static void testUpdateBuilderWith(NoiseReductionMode mode, int expectedResult) {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(new int[] {1});
+
+ noiseReductionFeature.setValue(mode);
+ noiseReductionFeature.updateBuilder(mockBuilder);
+ verify(mockBuilder, times(1)).set(CaptureRequest.NOISE_REDUCTION_MODE, expectedResult);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/resolution/ResolutionFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/resolution/ResolutionFeatureTest.java
new file mode 100644
index 0000000..dbc352d
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/resolution/ResolutionFeatureTest.java
@@ -0,0 +1,430 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.resolution;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyInt;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.when;
+
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import android.util.Size;
+import io.flutter.plugins.camera.CameraProperties;
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.MockedStatic;
+import org.mockito.stubbing.Answer;
+import org.robolectric.RobolectricTestRunner;
+import org.robolectric.annotation.Config;
+
+@RunWith(RobolectricTestRunner.class)
+public class ResolutionFeatureTest {
+ private static final String cameraName = "1";
+ private CamcorderProfile mockProfileLowLegacy;
+ private EncoderProfiles mockProfileLow;
+ private MockedStatic<CamcorderProfile> mockedStaticProfile;
+
+ @Before
+ @SuppressWarnings("deprecation")
+ public void beforeLegacy() {
+ mockedStaticProfile = mockStatic(CamcorderProfile.class);
+ mockProfileLowLegacy = mock(CamcorderProfile.class);
+ CamcorderProfile mockProfileLegacy = mock(CamcorderProfile.class);
+
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_2160P))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_1080P))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_720P))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_480P))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_LOW))
+ .thenReturn(true);
+
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_2160P))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_1080P))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_480P))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_LOW))
+ .thenReturn(mockProfileLowLegacy);
+ }
+
+ public void before() {
+ mockProfileLow = mock(EncoderProfiles.class);
+ EncoderProfiles mockProfile = mock(EncoderProfiles.class);
+ EncoderProfiles.VideoProfile mockVideoProfile = mock(EncoderProfiles.VideoProfile.class);
+ List<EncoderProfiles.VideoProfile> mockVideoProfilesList = List.of(mockVideoProfile);
+
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_2160P))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_1080P))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_480P))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_LOW))
+ .thenReturn(mockProfileLow);
+
+ when(mockProfile.getVideoProfiles()).thenReturn(mockVideoProfilesList);
+ when(mockVideoProfile.getHeight()).thenReturn(100);
+ when(mockVideoProfile.getWidth()).thenReturn(100);
+ }
+
+ @After
+ public void after() {
+ mockedStaticProfile.reset();
+ mockedStaticProfile.close();
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ assertEquals("ResolutionFeature", resolutionFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnInitialValueWhenNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ assertEquals(ResolutionPreset.max, resolutionFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ resolutionFeature.setValue(ResolutionPreset.high);
+
+ assertEquals(ResolutionPreset.high, resolutionFeature.getValue());
+ }
+
+ @Test
+ public void checkIsSupport_returnsTrue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ assertTrue(resolutionFeature.checkIsSupported());
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void getBestAvailableCamcorderProfileForResolutionPreset_shouldFallThroughLegacy() {
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_2160P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_1080P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_720P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_480P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_LOW))
+ .thenReturn(true);
+
+ assertEquals(
+ mockProfileLowLegacy,
+ ResolutionFeature.getBestAvailableCamcorderProfileForResolutionPresetLegacy(
+ 1, ResolutionPreset.max));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void getBestAvailableCamcorderProfileForResolutionPreset_shouldFallThrough() {
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_2160P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_1080P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_720P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_480P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_LOW))
+ .thenReturn(true);
+
+ assertEquals(
+ mockProfileLow,
+ ResolutionFeature.getBestAvailableCamcorderProfileForResolutionPreset(
+ 1, ResolutionPreset.max));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetMaxLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.max);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetMax() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.max);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetUltraHighLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.ultraHigh);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetUltraHigh() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.ultraHigh);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetVeryHighLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.veryHigh);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(minSdk = 31)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetVeryHigh() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.veryHigh);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetHighLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.high);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetHigh() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.high);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse480PWhenResolutionPresetMediumLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.medium);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_480P));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUse480PWhenResolutionPresetMedium() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.medium);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_480P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUseQVGAWhenResolutionPresetLowLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.low);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_QVGA));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUseQVGAWhenResolutionPresetLow() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.low);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_QVGA));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUseLegacyBehaviorWhenEncoderProfilesNull() {
+ try (MockedStatic<ResolutionFeature> mockedResolutionFeature =
+ mockStatic(ResolutionFeature.class)) {
+ mockedResolutionFeature
+ .when(
+ () ->
+ ResolutionFeature.getBestAvailableCamcorderProfileForResolutionPreset(
+ anyInt(), any(ResolutionPreset.class)))
+ .thenAnswer(
+ (Answer<EncoderProfiles>)
+ invocation -> {
+ EncoderProfiles mockEncoderProfiles = mock(EncoderProfiles.class);
+ List<EncoderProfiles.VideoProfile> videoProfiles =
+ new ArrayList<EncoderProfiles.VideoProfile>() {
+ {
+ add(null);
+ }
+ };
+ when(mockEncoderProfiles.getVideoProfiles()).thenReturn(videoProfiles);
+ return mockEncoderProfiles;
+ });
+ mockedResolutionFeature
+ .when(
+ () ->
+ ResolutionFeature.getBestAvailableCamcorderProfileForResolutionPresetLegacy(
+ anyInt(), any(ResolutionPreset.class)))
+ .thenAnswer(
+ (Answer<CamcorderProfile>)
+ invocation -> {
+ CamcorderProfile mockCamcorderProfile = mock(CamcorderProfile.class);
+ mockCamcorderProfile.videoFrameWidth = 10;
+ mockCamcorderProfile.videoFrameHeight = 50;
+ return mockCamcorderProfile;
+ });
+ mockedResolutionFeature
+ .when(() -> ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.max))
+ .thenCallRealMethod();
+
+ Size testPreviewSize = ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.max);
+ assertEquals(testPreviewSize.getWidth(), 10);
+ assertEquals(testPreviewSize.getHeight(), 50);
+ }
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void resolutionFeatureShouldUseLegacyBehaviorWhenEncoderProfilesNull() {
+ beforeLegacy();
+ try (MockedStatic<ResolutionFeature> mockedResolutionFeature =
+ mockStatic(ResolutionFeature.class)) {
+ mockedResolutionFeature
+ .when(
+ () ->
+ ResolutionFeature.getBestAvailableCamcorderProfileForResolutionPreset(
+ anyInt(), any(ResolutionPreset.class)))
+ .thenAnswer(
+ (Answer<EncoderProfiles>)
+ invocation -> {
+ EncoderProfiles mockEncoderProfiles = mock(EncoderProfiles.class);
+ List<EncoderProfiles.VideoProfile> videoProfiles =
+ new ArrayList<EncoderProfiles.VideoProfile>() {
+ {
+ add(null);
+ }
+ };
+ when(mockEncoderProfiles.getVideoProfiles()).thenReturn(videoProfiles);
+ return mockEncoderProfiles;
+ });
+ mockedResolutionFeature
+ .when(
+ () ->
+ ResolutionFeature.getBestAvailableCamcorderProfileForResolutionPresetLegacy(
+ anyInt(), any(ResolutionPreset.class)))
+ .thenAnswer(
+ (Answer<CamcorderProfile>)
+ invocation -> {
+ CamcorderProfile mockCamcorderProfile = mock(CamcorderProfile.class);
+ return mockCamcorderProfile;
+ });
+
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ assertNotNull(resolutionFeature.getRecordingProfileLegacy());
+ assertNull(resolutionFeature.getRecordingProfile());
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManagerTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManagerTest.java
new file mode 100644
index 0000000..3762006
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManagerTest.java
@@ -0,0 +1,313 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.sensororientation;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.res.Configuration;
+import android.content.res.Resources;
+import android.provider.Settings;
+import android.view.Display;
+import android.view.Surface;
+import android.view.WindowManager;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugins.camera.DartMessenger;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+
+public class DeviceOrientationManagerTest {
+ private Activity mockActivity;
+ private DartMessenger mockDartMessenger;
+ private WindowManager mockWindowManager;
+ private Display mockDisplay;
+ private DeviceOrientationManager deviceOrientationManager;
+
+ @Before
+ @SuppressWarnings("deprecation")
+ public void before() {
+ mockActivity = mock(Activity.class);
+ mockDartMessenger = mock(DartMessenger.class);
+ mockDisplay = mock(Display.class);
+ mockWindowManager = mock(WindowManager.class);
+
+ when(mockActivity.getSystemService(Context.WINDOW_SERVICE)).thenReturn(mockWindowManager);
+ when(mockWindowManager.getDefaultDisplay()).thenReturn(mockDisplay);
+
+ deviceOrientationManager =
+ DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 0);
+ }
+
+ @Test
+ public void getVideoOrientation_whenNaturalScreenOrientationEqualsPortraitUp() {
+ int degreesPortraitUp =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(0, degreesPortraitUp);
+ assertEquals(270, degreesLandscapeLeft);
+ assertEquals(180, degreesPortraitDown);
+ assertEquals(90, degreesLandscapeRight);
+ }
+
+ @Test
+ public void getVideoOrientation_whenNaturalScreenOrientationEqualsLandscapeLeft() {
+ DeviceOrientationManager orientationManager =
+ DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 90);
+
+ int degreesPortraitUp = orientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ orientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ orientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ orientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(90, degreesPortraitUp);
+ assertEquals(0, degreesLandscapeLeft);
+ assertEquals(270, degreesPortraitDown);
+ assertEquals(180, degreesLandscapeRight);
+ }
+
+ @Test
+ public void getVideoOrientation_fallbackToPortraitSensorOrientationWhenOrientationIsNull() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+
+ int degrees = deviceOrientationManager.getVideoOrientation(null);
+
+ assertEquals(0, degrees);
+ }
+
+ @Test
+ public void getVideoOrientation_fallbackToLandscapeSensorOrientationWhenOrientationIsNull() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ DeviceOrientationManager orientationManager =
+ DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 90);
+
+ int degrees = orientationManager.getVideoOrientation(null);
+
+ assertEquals(0, degrees);
+ }
+
+ @Test
+ public void getPhotoOrientation_whenNaturalScreenOrientationEqualsPortraitUp() {
+ int degreesPortraitUp =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(0, degreesPortraitUp);
+ assertEquals(90, degreesLandscapeRight);
+ assertEquals(180, degreesPortraitDown);
+ assertEquals(270, degreesLandscapeLeft);
+ }
+
+ @Test
+ public void getPhotoOrientation_whenNaturalScreenOrientationEqualsLandscapeLeft() {
+ DeviceOrientationManager orientationManager =
+ DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 90);
+
+ int degreesPortraitUp = orientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ orientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ orientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ orientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(90, degreesPortraitUp);
+ assertEquals(180, degreesLandscapeRight);
+ assertEquals(270, degreesPortraitDown);
+ assertEquals(0, degreesLandscapeLeft);
+ }
+
+ @Test
+ public void getPhotoOrientation_shouldFallbackToCurrentOrientationWhenOrientationIsNull() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ int degrees = deviceOrientationManager.getPhotoOrientation(null);
+
+ assertEquals(270, degrees);
+ }
+
+ @Test
+ public void handleUIOrientationChange_shouldSendMessageWhenSensorAccessIsAllowed() {
+ try (MockedStatic<Settings.System> mockedSystem = mockStatic(Settings.System.class)) {
+ mockedSystem
+ .when(
+ () ->
+ Settings.System.getInt(any(), eq(Settings.System.ACCELEROMETER_ROTATION), eq(0)))
+ .thenReturn(0);
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ deviceOrientationManager.handleUIOrientationChange();
+ }
+
+ verify(mockDartMessenger, times(1))
+ .sendDeviceOrientationChangeEvent(DeviceOrientation.LANDSCAPE_LEFT);
+ }
+
+ @Test
+ public void handleOrientationChange_shouldSendMessageWhenOrientationIsUpdated() {
+ DeviceOrientation previousOrientation = DeviceOrientation.PORTRAIT_UP;
+ DeviceOrientation newOrientation = DeviceOrientation.LANDSCAPE_LEFT;
+
+ DeviceOrientationManager.handleOrientationChange(
+ newOrientation, previousOrientation, mockDartMessenger);
+
+ verify(mockDartMessenger, times(1)).sendDeviceOrientationChangeEvent(newOrientation);
+ }
+
+ @Test
+ public void handleOrientationChange_shouldNotSendMessageWhenOrientationIsNotUpdated() {
+ DeviceOrientation previousOrientation = DeviceOrientation.PORTRAIT_UP;
+ DeviceOrientation newOrientation = DeviceOrientation.PORTRAIT_UP;
+
+ DeviceOrientationManager.handleOrientationChange(
+ newOrientation, previousOrientation, mockDartMessenger);
+
+ verify(mockDartMessenger, never()).sendDeviceOrientationChangeEvent(any());
+ }
+
+ @Test
+ public void getUIOrientation() {
+ // Orientation portrait and rotation of 0 should translate to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ DeviceOrientation uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+
+ // Orientation portrait and rotation of 90 should translate to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_90);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+
+ // Orientation portrait and rotation of 180 should translate to "PORTRAIT_DOWN".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_180);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, uiOrientation);
+
+ // Orientation portrait and rotation of 270 should translate to "PORTRAIT_DOWN".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_270);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, uiOrientation);
+
+ // Orientation landscape and rotation of 0 should translate to "LANDSCAPE_LEFT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, uiOrientation);
+
+ // Orientation landscape and rotation of 90 should translate to "LANDSCAPE_LEFT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_90);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, uiOrientation);
+
+ // Orientation landscape and rotation of 180 should translate to "LANDSCAPE_RIGHT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_180);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, uiOrientation);
+
+ // Orientation landscape and rotation of 270 should translate to "LANDSCAPE_RIGHT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_270);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, uiOrientation);
+
+ // Orientation undefined should default to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_UNDEFINED, Surface.ROTATION_0);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+ }
+
+ @Test
+ public void getDeviceDefaultOrientation() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ int orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_180);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_90);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_270);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_180);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_90);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_270);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+ }
+
+ @Test
+ public void calculateSensorOrientation() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ DeviceOrientation orientation = deviceOrientationManager.calculateSensorOrientation(0);
+ assertEquals(DeviceOrientation.PORTRAIT_UP, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(90);
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(180);
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(270);
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, orientation);
+ }
+
+ private void setUpUIOrientationMocks(int orientation, int rotation) {
+ Resources mockResources = mock(Resources.class);
+ Configuration mockConfiguration = mock(Configuration.class);
+
+ when(mockDisplay.getRotation()).thenReturn(rotation);
+
+ mockConfiguration.orientation = orientation;
+ when(mockActivity.getResources()).thenReturn(mockResources);
+ when(mockResources.getConfiguration()).thenReturn(mockConfiguration);
+ }
+
+ @Test
+ public void getDisplayTest() {
+ Display display = deviceOrientationManager.getDisplay();
+
+ assertEquals(mockDisplay, display);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeatureTest.java
new file mode 100644
index 0000000..2c3a5ab
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeatureTest.java
@@ -0,0 +1,125 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.sensororientation;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraMetadata;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+
+public class SensorOrientationFeatureTest {
+ private MockedStatic<DeviceOrientationManager> mockedStaticDeviceOrientationManager;
+ private Activity mockActivity;
+ private CameraProperties mockCameraProperties;
+ private DartMessenger mockDartMessenger;
+ private DeviceOrientationManager mockDeviceOrientationManager;
+
+ @Before
+ public void before() {
+ mockedStaticDeviceOrientationManager = mockStatic(DeviceOrientationManager.class);
+ mockActivity = mock(Activity.class);
+ mockCameraProperties = mock(CameraProperties.class);
+ mockDartMessenger = mock(DartMessenger.class);
+ mockDeviceOrientationManager = mock(DeviceOrientationManager.class);
+
+ when(mockCameraProperties.getSensorOrientation()).thenReturn(0);
+ when(mockCameraProperties.getLensFacing()).thenReturn(CameraMetadata.LENS_FACING_BACK);
+
+ mockedStaticDeviceOrientationManager
+ .when(() -> DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 0))
+ .thenReturn(mockDeviceOrientationManager);
+ }
+
+ @After
+ public void after() {
+ mockedStaticDeviceOrientationManager.close();
+ }
+
+ @Test
+ public void ctor_shouldStartDeviceOrientationManager() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ verify(mockDeviceOrientationManager, times(1)).start();
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ assertEquals("SensorOrientationFeature", sensorOrientationFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnNullIfNotSet() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ assertEquals(0, (int) sensorOrientationFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoSetValue() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ sensorOrientationFeature.setValue(90);
+
+ assertEquals(90, (int) sensorOrientationFeature.getValue());
+ }
+
+ @Test
+ public void checkIsSupport_returnsTrue() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ assertTrue(sensorOrientationFeature.checkIsSupported());
+ }
+
+ @Test
+ public void getDeviceOrientationManager_shouldReturnInitializedDartOrientationManagerInstance() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ assertEquals(
+ mockDeviceOrientationManager, sensorOrientationFeature.getDeviceOrientationManager());
+ }
+
+ @Test
+ public void lockCaptureOrientation_shouldLockToSpecifiedOrientation() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ sensorOrientationFeature.lockCaptureOrientation(DeviceOrientation.PORTRAIT_DOWN);
+
+ assertEquals(
+ DeviceOrientation.PORTRAIT_DOWN, sensorOrientationFeature.getLockedCaptureOrientation());
+ }
+
+ @Test
+ public void unlockCaptureOrientation_shouldSetLockToNull() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ sensorOrientationFeature.unlockCaptureOrientation();
+
+ assertNull(sensorOrientationFeature.getLockedCaptureOrientation());
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/zoomlevel/ZoomLevelFeatureTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/zoomlevel/ZoomLevelFeatureTest.java
new file mode 100644
index 0000000..4d58269
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/zoomlevel/ZoomLevelFeatureTest.java
@@ -0,0 +1,219 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.zoomlevel;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyFloat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.graphics.Rect;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import io.flutter.plugins.camera.CameraProperties;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+
+public class ZoomLevelFeatureTest {
+ private MockedStatic<ZoomUtils> mockedStaticCameraZoom;
+ private CameraProperties mockCameraProperties;
+ private ZoomUtils mockCameraZoom;
+ private Rect mockZoomArea;
+ private Rect mockSensorArray;
+
+ @Before
+ public void before() {
+ mockedStaticCameraZoom = mockStatic(ZoomUtils.class);
+ mockCameraProperties = mock(CameraProperties.class);
+ mockCameraZoom = mock(ZoomUtils.class);
+ mockZoomArea = mock(Rect.class);
+ mockSensorArray = mock(Rect.class);
+
+ mockedStaticCameraZoom
+ .when(() -> ZoomUtils.computeZoomRect(anyFloat(), any(), anyFloat(), anyFloat()))
+ .thenReturn(mockZoomArea);
+ }
+
+ @After
+ public void after() {
+ mockedStaticCameraZoom.close();
+ }
+
+ @Test
+ public void ctor_whenParametersAreValid() {
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(mockSensorArray);
+ when(mockCameraProperties.getScalerAvailableMaxDigitalZoom()).thenReturn(42f);
+
+ final ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ verify(mockCameraProperties, times(1)).getSensorInfoActiveArraySize();
+ verify(mockCameraProperties, times(1)).getScalerAvailableMaxDigitalZoom();
+ assertNotNull(zoomLevelFeature);
+ assertEquals(42f, zoomLevelFeature.getMaximumZoomLevel(), 0);
+ }
+
+ @Test
+ public void ctor_whenSensorSizeIsNull() {
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(null);
+ when(mockCameraProperties.getScalerAvailableMaxDigitalZoom()).thenReturn(42f);
+
+ final ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ verify(mockCameraProperties, times(1)).getSensorInfoActiveArraySize();
+ verify(mockCameraProperties, never()).getScalerAvailableMaxDigitalZoom();
+ assertNotNull(zoomLevelFeature);
+ assertFalse(zoomLevelFeature.checkIsSupported());
+ assertEquals(zoomLevelFeature.getMaximumZoomLevel(), 1.0f, 0);
+ }
+
+ @Test
+ public void ctor_whenMaxZoomIsNull() {
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(mockSensorArray);
+ when(mockCameraProperties.getScalerAvailableMaxDigitalZoom()).thenReturn(null);
+
+ final ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ verify(mockCameraProperties, times(1)).getSensorInfoActiveArraySize();
+ verify(mockCameraProperties, times(1)).getScalerAvailableMaxDigitalZoom();
+ assertNotNull(zoomLevelFeature);
+ assertFalse(zoomLevelFeature.checkIsSupported());
+ assertEquals(zoomLevelFeature.getMaximumZoomLevel(), 1.0f, 0);
+ }
+
+ @Test
+ public void ctor_whenMaxZoomIsSmallerThenDefaultZoomFactor() {
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(mockSensorArray);
+ when(mockCameraProperties.getScalerAvailableMaxDigitalZoom()).thenReturn(0.5f);
+
+ final ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ verify(mockCameraProperties, times(1)).getSensorInfoActiveArraySize();
+ verify(mockCameraProperties, times(1)).getScalerAvailableMaxDigitalZoom();
+ assertNotNull(zoomLevelFeature);
+ assertFalse(zoomLevelFeature.checkIsSupported());
+ assertEquals(zoomLevelFeature.getMaximumZoomLevel(), 1.0f, 0);
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ assertEquals("ZoomLevelFeature", zoomLevelFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnNullIfNotSet() {
+ ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ assertEquals(1.0, (float) zoomLevelFeature.getValue(), 0);
+ }
+
+ @Test
+ public void getValue_shouldEchoSetValue() {
+ ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ zoomLevelFeature.setValue(2.3f);
+
+ assertEquals(2.3f, (float) zoomLevelFeature.getValue(), 0);
+ }
+
+ @Test
+ public void checkIsSupport_returnsFalseByDefault() {
+ ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ assertFalse(zoomLevelFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetScalarCropRegionWhenCheckIsSupportIsTrue() {
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(mockSensorArray);
+ when(mockCameraProperties.getScalerAvailableMaxDigitalZoom()).thenReturn(42f);
+
+ ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+
+ zoomLevelFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1)).set(CaptureRequest.SCALER_CROP_REGION, mockZoomArea);
+ }
+
+ @Test
+ public void updateBuilder_shouldControlZoomRatioWhenCheckIsSupportIsTrue() throws Exception {
+ setSdkVersion(Build.VERSION_CODES.R);
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(mockSensorArray);
+ when(mockCameraProperties.getScalerMaxZoomRatio()).thenReturn(42f);
+ when(mockCameraProperties.getScalerMinZoomRatio()).thenReturn(1.0f);
+
+ ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+
+ zoomLevelFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, times(1)).set(CaptureRequest.CONTROL_ZOOM_RATIO, 0.0f);
+ }
+
+ @Test
+ public void getMinimumZoomLevel() {
+ ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ assertEquals(1.0f, zoomLevelFeature.getMinimumZoomLevel(), 0);
+ }
+
+ @Test
+ public void getMaximumZoomLevel() {
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(mockSensorArray);
+ when(mockCameraProperties.getScalerAvailableMaxDigitalZoom()).thenReturn(42f);
+
+ ZoomLevelFeature zoomLevelFeature = new ZoomLevelFeature(mockCameraProperties);
+
+ assertEquals(42f, zoomLevelFeature.getMaximumZoomLevel(), 0);
+ }
+
+ @Test
+ public void checkZoomLevelFeature_callsMaxDigitalZoomOnAndroidQ() throws Exception {
+ setSdkVersion(Build.VERSION_CODES.Q);
+
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(mockSensorArray);
+
+ new ZoomLevelFeature(mockCameraProperties);
+
+ verify(mockCameraProperties, times(0)).getScalerMaxZoomRatio();
+ verify(mockCameraProperties, times(0)).getScalerMinZoomRatio();
+ verify(mockCameraProperties, times(1)).getScalerAvailableMaxDigitalZoom();
+ }
+
+ @Test
+ public void checkZoomLevelFeature_callsScalarMaxZoomRatioOnAndroidR() throws Exception {
+ setSdkVersion(Build.VERSION_CODES.R);
+ when(mockCameraProperties.getSensorInfoActiveArraySize()).thenReturn(mockSensorArray);
+
+ new ZoomLevelFeature(mockCameraProperties);
+
+ verify(mockCameraProperties, times(1)).getScalerMaxZoomRatio();
+ verify(mockCameraProperties, times(1)).getScalerMinZoomRatio();
+ verify(mockCameraProperties, times(0)).getScalerAvailableMaxDigitalZoom();
+ }
+
+ static void setSdkVersion(int sdkVersion) throws Exception {
+ Field sdkInt = Build.VERSION.class.getField("SDK_INT");
+ sdkInt.setAccessible(true);
+ Field modifiersField = Field.class.getDeclaredField("modifiers");
+ modifiersField.setAccessible(true);
+ modifiersField.setInt(sdkInt, sdkInt.getModifiers() & ~Modifier.FINAL);
+ sdkInt.set(null, sdkVersion);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/zoomlevel/ZoomUtilsTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/zoomlevel/ZoomUtilsTest.java
new file mode 100644
index 0000000..2f61608
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/features/zoomlevel/ZoomUtilsTest.java
@@ -0,0 +1,85 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.zoomlevel;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import android.graphics.Rect;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.RobolectricTestRunner;
+
+@RunWith(RobolectricTestRunner.class)
+public class ZoomUtilsTest {
+ @Test
+ public void setZoomRect_whenSensorSizeEqualsZeroShouldReturnCropRegionOfZero() {
+ final Rect sensorSize = new Rect(0, 0, 0, 0);
+ final Rect computedZoom = ZoomUtils.computeZoomRect(18f, sensorSize, 1f, 20f);
+
+ assertNotNull(computedZoom);
+ assertEquals(computedZoom.left, 0);
+ assertEquals(computedZoom.top, 0);
+ assertEquals(computedZoom.right, 0);
+ assertEquals(computedZoom.bottom, 0);
+ }
+
+ @Test
+ public void setZoomRect_whenSensorSizeIsValidShouldReturnCropRegion() {
+ final Rect sensorSize = new Rect(0, 0, 100, 100);
+ final Rect computedZoom = ZoomUtils.computeZoomRect(18f, sensorSize, 1f, 20f);
+
+ assertNotNull(computedZoom);
+ assertEquals(computedZoom.left, 48);
+ assertEquals(computedZoom.top, 48);
+ assertEquals(computedZoom.right, 52);
+ assertEquals(computedZoom.bottom, 52);
+ }
+
+ @Test
+ public void setZoomRect_whenZoomIsGreaterThenMaxZoomClampToMaxZoom() {
+ final Rect sensorSize = new Rect(0, 0, 100, 100);
+ final Rect computedZoom = ZoomUtils.computeZoomRect(25f, sensorSize, 1f, 10f);
+
+ assertNotNull(computedZoom);
+ assertEquals(computedZoom.left, 45);
+ assertEquals(computedZoom.top, 45);
+ assertEquals(computedZoom.right, 55);
+ assertEquals(computedZoom.bottom, 55);
+ }
+
+ @Test
+ public void setZoomRect_whenZoomIsSmallerThenMinZoomClampToMinZoom() {
+ final Rect sensorSize = new Rect(0, 0, 100, 100);
+ final Rect computedZoom = ZoomUtils.computeZoomRect(0.5f, sensorSize, 1f, 10f);
+
+ assertNotNull(computedZoom);
+ assertEquals(computedZoom.left, 0);
+ assertEquals(computedZoom.top, 0);
+ assertEquals(computedZoom.right, 100);
+ assertEquals(computedZoom.bottom, 100);
+ }
+
+ @Test
+ public void setZoomRatio_whenNewZoomGreaterThanMaxZoomClampToMaxZoom() {
+ final Float computedZoom = ZoomUtils.computeZoomRatio(21f, 1f, 20f);
+ assertNotNull(computedZoom);
+ assertEquals(computedZoom, 20f, 0.0f);
+ }
+
+ @Test
+ public void setZoomRatio_whenNewZoomLesserThanMinZoomClampToMinZoom() {
+ final Float computedZoom = ZoomUtils.computeZoomRatio(0.7f, 1f, 20f);
+ assertNotNull(computedZoom);
+ assertEquals(computedZoom, 1f, 0.0f);
+ }
+
+ @Test
+ public void setZoomRatio_whenNewZoomValidReturnNewZoom() {
+ final Float computedZoom = ZoomUtils.computeZoomRatio(2.0f, 1f, 20f);
+ assertNotNull(computedZoom);
+ assertEquals(computedZoom, 2.0f, 0.0f);
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/media/MediaRecorderBuilderTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/media/MediaRecorderBuilderTest.java
new file mode 100644
index 0000000..6cc58ee
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/media/MediaRecorderBuilderTest.java
@@ -0,0 +1,227 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.media;
+
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.*;
+
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import android.media.MediaRecorder;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.util.List;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InOrder;
+import org.robolectric.RobolectricTestRunner;
+import org.robolectric.annotation.Config;
+
+@RunWith(RobolectricTestRunner.class)
+public class MediaRecorderBuilderTest {
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void ctor_testLegacy() {
+ MediaRecorderBuilder builder =
+ new MediaRecorderBuilder(CamcorderProfile.get(CamcorderProfile.QUALITY_1080P), "");
+
+ assertNotNull(builder);
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void ctor_test() {
+ MediaRecorderBuilder builder =
+ new MediaRecorderBuilder(CamcorderProfile.getAll("0", CamcorderProfile.QUALITY_1080P), "");
+
+ assertNotNull(builder);
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void build_shouldSetValuesInCorrectOrderWhenAudioIsDisabledLegacy() throws IOException {
+ CamcorderProfile recorderProfile = getEmptyCamcorderProfile();
+ MediaRecorderBuilder.MediaRecorderFactory mockFactory =
+ mock(MediaRecorderBuilder.MediaRecorderFactory.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ String outputFilePath = "mock_video_file_path";
+ int mediaOrientation = 1;
+ MediaRecorderBuilder builder =
+ new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
+ .setEnableAudio(false)
+ .setMediaOrientation(mediaOrientation);
+
+ when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
+
+ MediaRecorder recorder = builder.build();
+
+ InOrder inOrder = inOrder(recorder);
+ inOrder.verify(recorder).setVideoSource(MediaRecorder.VideoSource.SURFACE);
+ inOrder.verify(recorder).setOutputFormat(recorderProfile.fileFormat);
+ inOrder.verify(recorder).setVideoEncoder(recorderProfile.videoCodec);
+ inOrder.verify(recorder).setVideoEncodingBitRate(recorderProfile.videoBitRate);
+ inOrder.verify(recorder).setVideoFrameRate(recorderProfile.videoFrameRate);
+ inOrder
+ .verify(recorder)
+ .setVideoSize(recorderProfile.videoFrameWidth, recorderProfile.videoFrameHeight);
+ inOrder.verify(recorder).setOutputFile(outputFilePath);
+ inOrder.verify(recorder).setOrientationHint(mediaOrientation);
+ inOrder.verify(recorder).prepare();
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void build_shouldSetValuesInCorrectOrderWhenAudioIsDisabled() throws IOException {
+ EncoderProfiles recorderProfile = mock(EncoderProfiles.class);
+ List<EncoderProfiles.VideoProfile> mockVideoProfiles =
+ List.of(mock(EncoderProfiles.VideoProfile.class));
+ List<EncoderProfiles.AudioProfile> mockAudioProfiles =
+ List.of(mock(EncoderProfiles.AudioProfile.class));
+ MediaRecorderBuilder.MediaRecorderFactory mockFactory =
+ mock(MediaRecorderBuilder.MediaRecorderFactory.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ String outputFilePath = "mock_video_file_path";
+ int mediaOrientation = 1;
+ MediaRecorderBuilder builder =
+ new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
+ .setEnableAudio(false)
+ .setMediaOrientation(mediaOrientation);
+
+ when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
+ when(recorderProfile.getVideoProfiles()).thenReturn(mockVideoProfiles);
+ when(recorderProfile.getAudioProfiles()).thenReturn(mockAudioProfiles);
+
+ MediaRecorder recorder = builder.build();
+
+ EncoderProfiles.VideoProfile videoProfile = mockVideoProfiles.get(0);
+
+ InOrder inOrder = inOrder(recorder);
+ inOrder.verify(recorder).setVideoSource(MediaRecorder.VideoSource.SURFACE);
+ inOrder.verify(recorder).setOutputFormat(recorderProfile.getRecommendedFileFormat());
+ inOrder.verify(recorder).setVideoEncoder(videoProfile.getCodec());
+ inOrder.verify(recorder).setVideoEncodingBitRate(videoProfile.getBitrate());
+ inOrder.verify(recorder).setVideoFrameRate(videoProfile.getFrameRate());
+ inOrder.verify(recorder).setVideoSize(videoProfile.getWidth(), videoProfile.getHeight());
+ inOrder.verify(recorder).setOutputFile(outputFilePath);
+ inOrder.verify(recorder).setOrientationHint(mediaOrientation);
+ inOrder.verify(recorder).prepare();
+ }
+
+ @Config(minSdk = 31)
+ @Test(expected = IndexOutOfBoundsException.class)
+ public void build_shouldThrowExceptionWithoutVideoOrAudioProfiles() throws IOException {
+ EncoderProfiles recorderProfile = mock(EncoderProfiles.class);
+ MediaRecorderBuilder.MediaRecorderFactory mockFactory =
+ mock(MediaRecorderBuilder.MediaRecorderFactory.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ String outputFilePath = "mock_video_file_path";
+ int mediaOrientation = 1;
+ MediaRecorderBuilder builder =
+ new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
+ .setEnableAudio(false)
+ .setMediaOrientation(mediaOrientation);
+
+ when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
+
+ MediaRecorder recorder = builder.build();
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void build_shouldSetValuesInCorrectOrderWhenAudioIsEnabledLegacy() throws IOException {
+ CamcorderProfile recorderProfile = getEmptyCamcorderProfile();
+ MediaRecorderBuilder.MediaRecorderFactory mockFactory =
+ mock(MediaRecorderBuilder.MediaRecorderFactory.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ String outputFilePath = "mock_video_file_path";
+ int mediaOrientation = 1;
+ MediaRecorderBuilder builder =
+ new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
+ .setEnableAudio(true)
+ .setMediaOrientation(mediaOrientation);
+
+ when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
+
+ MediaRecorder recorder = builder.build();
+
+ InOrder inOrder = inOrder(recorder);
+ inOrder.verify(recorder).setAudioSource(MediaRecorder.AudioSource.MIC);
+ inOrder.verify(recorder).setVideoSource(MediaRecorder.VideoSource.SURFACE);
+ inOrder.verify(recorder).setOutputFormat(recorderProfile.fileFormat);
+ inOrder.verify(recorder).setAudioEncoder(recorderProfile.audioCodec);
+ inOrder.verify(recorder).setAudioEncodingBitRate(recorderProfile.audioBitRate);
+ inOrder.verify(recorder).setAudioSamplingRate(recorderProfile.audioSampleRate);
+ inOrder.verify(recorder).setVideoEncoder(recorderProfile.videoCodec);
+ inOrder.verify(recorder).setVideoEncodingBitRate(recorderProfile.videoBitRate);
+ inOrder.verify(recorder).setVideoFrameRate(recorderProfile.videoFrameRate);
+ inOrder
+ .verify(recorder)
+ .setVideoSize(recorderProfile.videoFrameWidth, recorderProfile.videoFrameHeight);
+ inOrder.verify(recorder).setOutputFile(outputFilePath);
+ inOrder.verify(recorder).setOrientationHint(mediaOrientation);
+ inOrder.verify(recorder).prepare();
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void build_shouldSetValuesInCorrectOrderWhenAudioIsEnabled() throws IOException {
+ EncoderProfiles recorderProfile = mock(EncoderProfiles.class);
+ List<EncoderProfiles.VideoProfile> mockVideoProfiles =
+ List.of(mock(EncoderProfiles.VideoProfile.class));
+ List<EncoderProfiles.AudioProfile> mockAudioProfiles =
+ List.of(mock(EncoderProfiles.AudioProfile.class));
+ MediaRecorderBuilder.MediaRecorderFactory mockFactory =
+ mock(MediaRecorderBuilder.MediaRecorderFactory.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ String outputFilePath = "mock_video_file_path";
+ int mediaOrientation = 1;
+ MediaRecorderBuilder builder =
+ new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
+ .setEnableAudio(true)
+ .setMediaOrientation(mediaOrientation);
+
+ when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
+ when(recorderProfile.getVideoProfiles()).thenReturn(mockVideoProfiles);
+ when(recorderProfile.getAudioProfiles()).thenReturn(mockAudioProfiles);
+
+ MediaRecorder recorder = builder.build();
+
+ EncoderProfiles.VideoProfile videoProfile = mockVideoProfiles.get(0);
+ EncoderProfiles.AudioProfile audioProfile = mockAudioProfiles.get(0);
+
+ InOrder inOrder = inOrder(recorder);
+ inOrder.verify(recorder).setAudioSource(MediaRecorder.AudioSource.MIC);
+ inOrder.verify(recorder).setVideoSource(MediaRecorder.VideoSource.SURFACE);
+ inOrder.verify(recorder).setOutputFormat(recorderProfile.getRecommendedFileFormat());
+ inOrder.verify(recorder).setAudioEncoder(audioProfile.getCodec());
+ inOrder.verify(recorder).setAudioEncodingBitRate(audioProfile.getBitrate());
+ inOrder.verify(recorder).setAudioSamplingRate(audioProfile.getSampleRate());
+ inOrder.verify(recorder).setVideoEncoder(videoProfile.getCodec());
+ inOrder.verify(recorder).setVideoEncodingBitRate(videoProfile.getBitrate());
+ inOrder.verify(recorder).setVideoFrameRate(videoProfile.getFrameRate());
+ inOrder.verify(recorder).setVideoSize(videoProfile.getWidth(), videoProfile.getHeight());
+ inOrder.verify(recorder).setOutputFile(outputFilePath);
+ inOrder.verify(recorder).setOrientationHint(mediaOrientation);
+ inOrder.verify(recorder).prepare();
+ }
+
+ private CamcorderProfile getEmptyCamcorderProfile() {
+ try {
+ Constructor<CamcorderProfile> constructor =
+ CamcorderProfile.class.getDeclaredConstructor(
+ int.class, int.class, int.class, int.class, int.class, int.class, int.class,
+ int.class, int.class, int.class, int.class, int.class);
+
+ constructor.setAccessible(true);
+ return constructor.newInstance(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
+ } catch (Exception ignored) {
+ }
+
+ return null;
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/ExposureModeTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/ExposureModeTest.java
new file mode 100644
index 0000000..dbef851
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/ExposureModeTest.java
@@ -0,0 +1,37 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+
+public class ExposureModeTest {
+
+ @Test
+ public void getValueForString_returnsCorrectValues() {
+ assertEquals(
+ "Returns ExposureMode.auto for 'auto'",
+ ExposureMode.getValueForString("auto"),
+ ExposureMode.auto);
+ assertEquals(
+ "Returns ExposureMode.locked for 'locked'",
+ ExposureMode.getValueForString("locked"),
+ ExposureMode.locked);
+ }
+
+ @Test
+ public void getValueForString_returnsNullForNonexistantValue() {
+ assertEquals(
+ "Returns null for 'nonexistant'", ExposureMode.getValueForString("nonexistant"), null);
+ }
+
+ @Test
+ public void toString_returnsCorrectValue() {
+ assertEquals("Returns 'auto' for ExposureMode.auto", ExposureMode.auto.toString(), "auto");
+ assertEquals(
+ "Returns 'locked' for ExposureMode.locked", ExposureMode.locked.toString(), "locked");
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/FlashModeTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/FlashModeTest.java
new file mode 100644
index 0000000..7ae175e
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/FlashModeTest.java
@@ -0,0 +1,42 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+
+public class FlashModeTest {
+
+ @Test
+ public void getValueForString_returnsCorrectValues() {
+ assertEquals(
+ "Returns FlashMode.off for 'off'", FlashMode.getValueForString("off"), FlashMode.off);
+ assertEquals(
+ "Returns FlashMode.auto for 'auto'", FlashMode.getValueForString("auto"), FlashMode.auto);
+ assertEquals(
+ "Returns FlashMode.always for 'always'",
+ FlashMode.getValueForString("always"),
+ FlashMode.always);
+ assertEquals(
+ "Returns FlashMode.torch for 'torch'",
+ FlashMode.getValueForString("torch"),
+ FlashMode.torch);
+ }
+
+ @Test
+ public void getValueForString_returnsNullForNonexistantValue() {
+ assertEquals(
+ "Returns null for 'nonexistant'", FlashMode.getValueForString("nonexistant"), null);
+ }
+
+ @Test
+ public void toString_returnsCorrectValue() {
+ assertEquals("Returns 'off' for FlashMode.off", FlashMode.off.toString(), "off");
+ assertEquals("Returns 'auto' for FlashMode.auto", FlashMode.auto.toString(), "auto");
+ assertEquals("Returns 'always' for FlashMode.always", FlashMode.always.toString(), "always");
+ assertEquals("Returns 'torch' for FlashMode.torch", FlashMode.torch.toString(), "torch");
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/FocusModeTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/FocusModeTest.java
new file mode 100644
index 0000000..1d7b95c
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/types/FocusModeTest.java
@@ -0,0 +1,34 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+
+public class FocusModeTest {
+
+ @Test
+ public void getValueForString_returnsCorrectValues() {
+ assertEquals(
+ "Returns FocusMode.auto for 'auto'", FocusMode.getValueForString("auto"), FocusMode.auto);
+ assertEquals(
+ "Returns FocusMode.locked for 'locked'",
+ FocusMode.getValueForString("locked"),
+ FocusMode.locked);
+ }
+
+ @Test
+ public void getValueForString_returnsNullForNonexistantValue() {
+ assertEquals(
+ "Returns null for 'nonexistant'", FocusMode.getValueForString("nonexistant"), null);
+ }
+
+ @Test
+ public void toString_returnsCorrectValue() {
+ assertEquals("Returns 'auto' for FocusMode.auto", FocusMode.auto.toString(), "auto");
+ assertEquals("Returns 'locked' for FocusMode.locked", FocusMode.locked.toString(), "locked");
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/utils/TestUtils.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/utils/TestUtils.java
new file mode 100644
index 0000000..fce99b5
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/utils/TestUtils.java
@@ -0,0 +1,47 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.utils;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import org.junit.Assert;
+
+public class TestUtils {
+ public static <T> void setFinalStatic(Class<T> classToModify, String fieldName, Object newValue) {
+ try {
+ Field field = classToModify.getField(fieldName);
+ field.setAccessible(true);
+
+ Field modifiersField = Field.class.getDeclaredField("modifiers");
+ modifiersField.setAccessible(true);
+ modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
+
+ field.set(null, newValue);
+ } catch (Exception e) {
+ Assert.fail("Unable to mock static field: " + fieldName);
+ }
+ }
+
+ public static <T> void setPrivateField(T instance, String fieldName, Object newValue) {
+ try {
+ Field field = instance.getClass().getDeclaredField(fieldName);
+ field.setAccessible(true);
+ field.set(instance, newValue);
+ } catch (Exception e) {
+ Assert.fail("Unable to mock private field: " + fieldName);
+ }
+ }
+
+ public static <T> Object getPrivateField(T instance, String fieldName) {
+ try {
+ Field field = instance.getClass().getDeclaredField(fieldName);
+ field.setAccessible(true);
+ return field.get(instance);
+ } catch (Exception e) {
+ Assert.fail("Unable to mock private field: " + fieldName);
+ return null;
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/resources/robolectric.properties b/packages/camera/camera_android/android/src/test/resources/robolectric.properties
new file mode 100644
index 0000000..90fbd74
--- /dev/null
+++ b/packages/camera/camera_android/android/src/test/resources/robolectric.properties
@@ -0,0 +1 @@
+sdk=30
\ No newline at end of file
diff --git a/packages/camera/camera_android/example/android/app/build.gradle b/packages/camera/camera_android/example/android/app/build.gradle
new file mode 100644
index 0000000..5d6af58
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/build.gradle
@@ -0,0 +1,64 @@
+def localProperties = new Properties()
+def localPropertiesFile = rootProject.file('local.properties')
+if (localPropertiesFile.exists()) {
+ localPropertiesFile.withReader('UTF-8') { reader ->
+ localProperties.load(reader)
+ }
+}
+
+def flutterRoot = localProperties.getProperty('flutter.sdk')
+if (flutterRoot == null) {
+ throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
+}
+
+def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
+if (flutterVersionCode == null) {
+ flutterVersionCode = '1'
+}
+
+def flutterVersionName = localProperties.getProperty('flutter.versionName')
+if (flutterVersionName == null) {
+ flutterVersionName = '1.0'
+}
+
+apply plugin: 'com.android.application'
+apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
+
+android {
+ compileSdkVersion 31
+
+ lintOptions {
+ disable 'InvalidPackage'
+ }
+
+ defaultConfig {
+ applicationId "io.flutter.plugins.cameraexample"
+ minSdkVersion 21
+ targetSdkVersion 28
+ versionCode flutterVersionCode.toInteger()
+ versionName flutterVersionName
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ }
+
+ buildTypes {
+ release {
+ // TODO: Add your own signing config for the release build.
+ // Signing with the debug keys for now, so `flutter run --release` works.
+ signingConfig signingConfigs.debug
+ }
+ profile {
+ matchingFallbacks = ['debug', 'release']
+ }
+ }
+}
+
+flutter {
+ source '../..'
+}
+
+dependencies {
+ testImplementation 'junit:junit:4.13.2'
+ androidTestImplementation 'androidx.test:runner:1.2.0'
+ androidTestImplementation 'androidx.test:rules:1.2.0'
+ androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
+}
diff --git a/packages/camera/camera_android/example/android/app/gradle/wrapper/gradle-wrapper.properties b/packages/camera/camera_android/example/android/app/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..29e4134
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/packages/camera/camera_android/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java b/packages/camera/camera_android/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java
new file mode 100644
index 0000000..0f4298d
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface DartIntegrationTest {}
diff --git a/packages/camera/camera_android/example/android/app/src/androidTest/java/io/flutter/plugins/cameraexample/FlutterActivityTest.java b/packages/camera/camera_android/example/android/app/src/androidTest/java/io/flutter/plugins/cameraexample/FlutterActivityTest.java
new file mode 100644
index 0000000..39cae48
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/androidTest/java/io/flutter/plugins/cameraexample/FlutterActivityTest.java
@@ -0,0 +1,19 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.cameraexample;
+
+import androidx.test.rule.ActivityTestRule;
+import dev.flutter.plugins.integration_test.FlutterTestRunner;
+import io.flutter.embedding.android.FlutterActivity;
+import io.flutter.plugins.DartIntegrationTest;
+import org.junit.Rule;
+import org.junit.runner.RunWith;
+
+@DartIntegrationTest
+@RunWith(FlutterTestRunner.class)
+public class FlutterActivityTest {
+ @Rule
+ public ActivityTestRule<FlutterActivity> rule = new ActivityTestRule<>(FlutterActivity.class);
+}
diff --git a/packages/camera/camera_android/example/android/app/src/main/AndroidManifest.xml b/packages/camera/camera_android/example/android/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..cef2316
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/main/AndroidManifest.xml
@@ -0,0 +1,28 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="io.flutter.plugins.cameraexample">
+
+ <application
+ android:icon="@mipmap/ic_launcher"
+ android:label="camera_example">
+ <activity
+ android:configChanges="orientation|keyboardHidden|keyboard|screenSize|locale|layoutDirection"
+ android:hardwareAccelerated="true"
+ android:launchMode="singleTop"
+ android:name="io.flutter.embedding.android.FlutterActivity"
+ android:theme="@style/LaunchTheme"
+ android:windowSoftInputMode="adjustResize">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+ </activity>
+ <meta-data android:name="flutterEmbedding" android:value="2"/>
+ </application>
+
+ <uses-feature
+ android:name="android.hardware.camera"
+ android:required="true"/>
+
+ <uses-permission android:name="android.permission.INTERNET"/>
+ <uses-permission android:name="android.permission.FLASHLIGHT"/>
+</manifest>
diff --git a/packages/camera/camera_android/example/android/app/src/main/res/drawable/launch_background.xml b/packages/camera/camera_android/example/android/app/src/main/res/drawable/launch_background.xml
new file mode 100644
index 0000000..304732f
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/main/res/drawable/launch_background.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Modify this file to customize your launch splash screen -->
+<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
+ <item android:drawable="@android:color/white" />
+
+ <!-- You can insert your own image assets here -->
+ <!-- <item>
+ <bitmap
+ android:gravity="center"
+ android:src="@mipmap/launch_image" />
+ </item> -->
+</layer-list>
diff --git a/packages/camera/camera_android/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 0000000..db77bb4
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 0000000..17987b7
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..09d4391
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 0000000..d5f1c8d
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100644
index 0000000..4d6372e
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android/example/android/app/src/main/res/values/styles.xml b/packages/camera/camera_android/example/android/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..00fa441
--- /dev/null
+++ b/packages/camera/camera_android/example/android/app/src/main/res/values/styles.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
+ <!-- Show a splash screen on the activity. Automatically removed when
+ Flutter draws its first frame -->
+ <item name="android:windowBackground">@drawable/launch_background</item>
+ </style>
+</resources>
diff --git a/packages/camera/camera_android/example/android/build.gradle b/packages/camera/camera_android/example/android/build.gradle
new file mode 100644
index 0000000..c21bff8
--- /dev/null
+++ b/packages/camera/camera_android/example/android/build.gradle
@@ -0,0 +1,29 @@
+buildscript {
+ repositories {
+ google()
+ mavenCentral()
+ }
+
+ dependencies {
+ classpath 'com.android.tools.build:gradle:7.0.1'
+ }
+}
+
+allprojects {
+ repositories {
+ google()
+ mavenCentral()
+ }
+}
+
+rootProject.buildDir = '../build'
+subprojects {
+ project.buildDir = "${rootProject.buildDir}/${project.name}"
+}
+subprojects {
+ project.evaluationDependsOn(':app')
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/packages/camera/camera_android/example/android/gradle.properties b/packages/camera/camera_android/example/android/gradle.properties
new file mode 100644
index 0000000..d0448f1
--- /dev/null
+++ b/packages/camera/camera_android/example/android/gradle.properties
@@ -0,0 +1,4 @@
+org.gradle.jvmargs=-Xmx4G
+android.useAndroidX=true
+android.enableJetifier=false
+android.enableR8=true
diff --git a/packages/camera/camera_android/example/android/gradle/wrapper/gradle-wrapper.properties b/packages/camera/camera_android/example/android/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..297f2fe
--- /dev/null
+++ b/packages/camera/camera_android/example/android/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip
diff --git a/packages/camera/camera_android/example/android/settings.gradle b/packages/camera/camera_android/example/android/settings.gradle
new file mode 100644
index 0000000..115da6c
--- /dev/null
+++ b/packages/camera/camera_android/example/android/settings.gradle
@@ -0,0 +1,15 @@
+include ':app'
+
+def flutterProjectRoot = rootProject.projectDir.parentFile.toPath()
+
+def plugins = new Properties()
+def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins')
+if (pluginsFile.exists()) {
+ pluginsFile.withInputStream { stream -> plugins.load(stream) }
+}
+
+plugins.each { name, path ->
+ def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile()
+ include ":$name"
+ project(":$name").projectDir = pluginDirectory
+}
diff --git a/packages/camera/camera_android/example/integration_test/camera_test.dart b/packages/camera/camera_android/example/integration_test/camera_test.dart
new file mode 100644
index 0000000..e499872
--- /dev/null
+++ b/packages/camera/camera_android/example/integration_test/camera_test.dart
@@ -0,0 +1,287 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:io';
+import 'dart:ui';
+
+import 'package:camera_android/camera_android.dart';
+import 'package:camera_example/camera_controller.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/painting.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:path_provider/path_provider.dart';
+import 'package:video_player/video_player.dart';
+
+void main() {
+ late Directory testDir;
+
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ setUpAll(() async {
+ CameraPlatform.instance = AndroidCamera();
+ final Directory extDir = await getTemporaryDirectory();
+ testDir = await Directory('${extDir.path}/test').create(recursive: true);
+ });
+
+ tearDownAll(() async {
+ await testDir.delete(recursive: true);
+ });
+
+ final Map<ResolutionPreset, Size> presetExpectedSizes =
+ <ResolutionPreset, Size>{
+ ResolutionPreset.low: const Size(240, 320),
+ ResolutionPreset.medium: const Size(480, 720),
+ ResolutionPreset.high: const Size(720, 1280),
+ ResolutionPreset.veryHigh: const Size(1080, 1920),
+ ResolutionPreset.ultraHigh: const Size(2160, 3840),
+ // Don't bother checking for max here since it could be anything.
+ };
+
+ /// Verify that [actual] has dimensions that are at least as large as
+ /// [expectedSize]. Allows for a mismatch in portrait vs landscape. Returns
+ /// whether the dimensions exactly match.
+ bool assertExpectedDimensions(Size expectedSize, Size actual) {
+ expect(actual.shortestSide, lessThanOrEqualTo(expectedSize.shortestSide));
+ expect(actual.longestSide, lessThanOrEqualTo(expectedSize.longestSide));
+ return actual.shortestSide == expectedSize.shortestSide &&
+ actual.longestSide == expectedSize.longestSide;
+ }
+
+ // This tests that the capture is no bigger than the preset, since we have
+ // automatic code to fall back to smaller sizes when we need to. Returns
+ // whether the image is exactly the desired resolution.
+ Future<bool> testCaptureImageResolution(
+ CameraController controller, ResolutionPreset preset) async {
+ final Size expectedSize = presetExpectedSizes[preset]!;
+
+ // Take Picture
+ final XFile file = await controller.takePicture();
+
+ // Load picture
+ final File fileImage = File(file.path);
+ final Image image = await decodeImageFromList(fileImage.readAsBytesSync());
+
+ // Verify image dimensions are as expected
+ expect(image, isNotNull);
+ return assertExpectedDimensions(
+ expectedSize, Size(image.height.toDouble(), image.width.toDouble()));
+ }
+
+ testWidgets(
+ 'Capture specific image resolutions',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+ for (final CameraDescription cameraDescription in cameras) {
+ bool previousPresetExactlySupported = true;
+ for (final MapEntry<ResolutionPreset, Size> preset
+ in presetExpectedSizes.entries) {
+ final CameraController controller =
+ CameraController(cameraDescription, preset.key);
+ await controller.initialize();
+ final bool presetExactlySupported =
+ await testCaptureImageResolution(controller, preset.key);
+ assert(!(!previousPresetExactlySupported && presetExactlySupported),
+ 'The camera took higher resolution pictures at a lower resolution.');
+ previousPresetExactlySupported = presetExactlySupported;
+ await controller.dispose();
+ }
+ }
+ },
+ // TODO(egarciad): Fix https://github.com/flutter/flutter/issues/93686.
+ skip: true,
+ );
+
+ // This tests that the capture is no bigger than the preset, since we have
+ // automatic code to fall back to smaller sizes when we need to. Returns
+ // whether the image is exactly the desired resolution.
+ Future<bool> testCaptureVideoResolution(
+ CameraController controller, ResolutionPreset preset) async {
+ final Size expectedSize = presetExpectedSizes[preset]!;
+
+ // Take Video
+ await controller.startVideoRecording();
+ sleep(const Duration(milliseconds: 300));
+ final XFile file = await controller.stopVideoRecording();
+
+ // Load video metadata
+ final File videoFile = File(file.path);
+ final VideoPlayerController videoController =
+ VideoPlayerController.file(videoFile);
+ await videoController.initialize();
+ final Size video = videoController.value.size;
+
+ // Verify image dimensions are as expected
+ expect(video, isNotNull);
+ return assertExpectedDimensions(
+ expectedSize, Size(video.height, video.width));
+ }
+
+ testWidgets(
+ 'Capture specific video resolutions',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+ for (final CameraDescription cameraDescription in cameras) {
+ bool previousPresetExactlySupported = true;
+ for (final MapEntry<ResolutionPreset, Size> preset
+ in presetExpectedSizes.entries) {
+ final CameraController controller =
+ CameraController(cameraDescription, preset.key);
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+ final bool presetExactlySupported =
+ await testCaptureVideoResolution(controller, preset.key);
+ assert(!(!previousPresetExactlySupported && presetExactlySupported),
+ 'The camera took higher resolution pictures at a lower resolution.');
+ previousPresetExactlySupported = presetExactlySupported;
+ await controller.dispose();
+ }
+ }
+ },
+ // TODO(egarciad): Fix https://github.com/flutter/flutter/issues/93686.
+ skip: true,
+ );
+
+ testWidgets('Pause and resume video recording', (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+
+ int startPause;
+ int timePaused = 0;
+
+ await controller.startVideoRecording();
+ final int recordingStart = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+
+ await controller.pauseVideoRecording();
+ startPause = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+ await controller.resumeVideoRecording();
+ timePaused += DateTime.now().millisecondsSinceEpoch - startPause;
+
+ sleep(const Duration(milliseconds: 500));
+
+ await controller.pauseVideoRecording();
+ startPause = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+ await controller.resumeVideoRecording();
+ timePaused += DateTime.now().millisecondsSinceEpoch - startPause;
+
+ sleep(const Duration(milliseconds: 500));
+
+ final XFile file = await controller.stopVideoRecording();
+ final int recordingTime =
+ DateTime.now().millisecondsSinceEpoch - recordingStart;
+
+ final File videoFile = File(file.path);
+ final VideoPlayerController videoController = VideoPlayerController.file(
+ videoFile,
+ );
+ await videoController.initialize();
+ final int duration = videoController.value.duration.inMilliseconds;
+ await videoController.dispose();
+
+ expect(duration, lessThan(recordingTime - timePaused));
+ });
+
+ testWidgets(
+ 'image streaming',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ bool isDetecting = false;
+
+ await controller.startImageStream((CameraImageData image) {
+ if (isDetecting) {
+ return;
+ }
+
+ isDetecting = true;
+
+ expectLater(image, isNotNull).whenComplete(() => isDetecting = false);
+ });
+
+ expect(controller.value.isStreamingImages, true);
+
+ sleep(const Duration(milliseconds: 500));
+
+ await controller.stopImageStream();
+ await controller.dispose();
+ },
+ );
+
+ testWidgets(
+ 'recording with image stream',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ bool isDetecting = false;
+
+ await controller.startVideoRecording(
+ streamCallback: (CameraImageData image) {
+ if (isDetecting) {
+ return;
+ }
+
+ isDetecting = true;
+
+ expectLater(image, isNotNull);
+ });
+
+ expect(controller.value.isStreamingImages, true);
+
+ // Stopping recording before anything is recorded will throw, per
+ // https://developer.android.com/reference/android/media/MediaRecorder.html#stop()
+ // so delay long enough to ensure that some data is recorded.
+ await Future<void>.delayed(const Duration(seconds: 2));
+
+ await controller.stopVideoRecording();
+ await controller.dispose();
+
+ expect(controller.value.isStreamingImages, false);
+ },
+ );
+}
diff --git a/packages/camera/camera_android/example/lib/camera_controller.dart b/packages/camera/camera_android/example/lib/camera_controller.dart
new file mode 100644
index 0000000..8139dcd
--- /dev/null
+++ b/packages/camera/camera_android/example/lib/camera_controller.dart
@@ -0,0 +1,554 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:collection';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+/// The state of a [CameraController].
+class CameraValue {
+ /// Creates a new camera controller state.
+ const CameraValue({
+ required this.isInitialized,
+ this.previewSize,
+ required this.isRecordingVideo,
+ required this.isTakingPicture,
+ required this.isStreamingImages,
+ required this.isRecordingPaused,
+ required this.flashMode,
+ required this.exposureMode,
+ required this.focusMode,
+ required this.deviceOrientation,
+ this.lockedCaptureOrientation,
+ this.recordingOrientation,
+ this.isPreviewPaused = false,
+ this.previewPauseOrientation,
+ });
+
+ /// Creates a new camera controller state for an uninitialized controller.
+ const CameraValue.uninitialized()
+ : this(
+ isInitialized: false,
+ isRecordingVideo: false,
+ isTakingPicture: false,
+ isStreamingImages: false,
+ isRecordingPaused: false,
+ flashMode: FlashMode.auto,
+ exposureMode: ExposureMode.auto,
+ focusMode: FocusMode.auto,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ isPreviewPaused: false,
+ );
+
+ /// True after [CameraController.initialize] has completed successfully.
+ final bool isInitialized;
+
+ /// True when a picture capture request has been sent but as not yet returned.
+ final bool isTakingPicture;
+
+ /// True when the camera is recording (not the same as previewing).
+ final bool isRecordingVideo;
+
+ /// True when images from the camera are being streamed.
+ final bool isStreamingImages;
+
+ /// True when video recording is paused.
+ final bool isRecordingPaused;
+
+ /// True when the preview widget has been paused manually.
+ final bool isPreviewPaused;
+
+ /// Set to the orientation the preview was paused in, if it is currently paused.
+ final DeviceOrientation? previewPauseOrientation;
+
+ /// The size of the preview in pixels.
+ ///
+ /// Is `null` until [isInitialized] is `true`.
+ final Size? previewSize;
+
+ /// The flash mode the camera is currently set to.
+ final FlashMode flashMode;
+
+ /// The exposure mode the camera is currently set to.
+ final ExposureMode exposureMode;
+
+ /// The focus mode the camera is currently set to.
+ final FocusMode focusMode;
+
+ /// The current device UI orientation.
+ final DeviceOrientation deviceOrientation;
+
+ /// The currently locked capture orientation.
+ final DeviceOrientation? lockedCaptureOrientation;
+
+ /// Whether the capture orientation is currently locked.
+ bool get isCaptureOrientationLocked => lockedCaptureOrientation != null;
+
+ /// The orientation of the currently running video recording.
+ final DeviceOrientation? recordingOrientation;
+
+ /// Creates a modified copy of the object.
+ ///
+ /// Explicitly specified fields get the specified value, all other fields get
+ /// the same value of the current object.
+ CameraValue copyWith({
+ bool? isInitialized,
+ bool? isRecordingVideo,
+ bool? isTakingPicture,
+ bool? isStreamingImages,
+ Size? previewSize,
+ bool? isRecordingPaused,
+ FlashMode? flashMode,
+ ExposureMode? exposureMode,
+ FocusMode? focusMode,
+ bool? exposurePointSupported,
+ bool? focusPointSupported,
+ DeviceOrientation? deviceOrientation,
+ Optional<DeviceOrientation>? lockedCaptureOrientation,
+ Optional<DeviceOrientation>? recordingOrientation,
+ bool? isPreviewPaused,
+ Optional<DeviceOrientation>? previewPauseOrientation,
+ }) {
+ return CameraValue(
+ isInitialized: isInitialized ?? this.isInitialized,
+ previewSize: previewSize ?? this.previewSize,
+ isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo,
+ isTakingPicture: isTakingPicture ?? this.isTakingPicture,
+ isStreamingImages: isStreamingImages ?? this.isStreamingImages,
+ isRecordingPaused: isRecordingPaused ?? this.isRecordingPaused,
+ flashMode: flashMode ?? this.flashMode,
+ exposureMode: exposureMode ?? this.exposureMode,
+ focusMode: focusMode ?? this.focusMode,
+ deviceOrientation: deviceOrientation ?? this.deviceOrientation,
+ lockedCaptureOrientation: lockedCaptureOrientation == null
+ ? this.lockedCaptureOrientation
+ : lockedCaptureOrientation.orNull,
+ recordingOrientation: recordingOrientation == null
+ ? this.recordingOrientation
+ : recordingOrientation.orNull,
+ isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused,
+ previewPauseOrientation: previewPauseOrientation == null
+ ? this.previewPauseOrientation
+ : previewPauseOrientation.orNull,
+ );
+ }
+
+ @override
+ String toString() {
+ return '${objectRuntimeType(this, 'CameraValue')}('
+ 'isRecordingVideo: $isRecordingVideo, '
+ 'isInitialized: $isInitialized, '
+ 'previewSize: $previewSize, '
+ 'isStreamingImages: $isStreamingImages, '
+ 'flashMode: $flashMode, '
+ 'exposureMode: $exposureMode, '
+ 'focusMode: $focusMode, '
+ 'deviceOrientation: $deviceOrientation, '
+ 'lockedCaptureOrientation: $lockedCaptureOrientation, '
+ 'recordingOrientation: $recordingOrientation, '
+ 'isPreviewPaused: $isPreviewPaused, '
+ 'previewPausedOrientation: $previewPauseOrientation)';
+ }
+}
+
+/// Controls a device camera.
+///
+/// This is a stripped-down version of the app-facing controller to serve as a
+/// utility for the example and integration tests. It wraps only the calls that
+/// have state associated with them, to consolidate tracking of camera state
+/// outside of the overall example code.
+class CameraController extends ValueNotifier<CameraValue> {
+ /// Creates a new camera controller in an uninitialized state.
+ CameraController(
+ this.description,
+ this.resolutionPreset, {
+ this.enableAudio = true,
+ this.imageFormatGroup,
+ }) : super(const CameraValue.uninitialized());
+
+ /// The properties of the camera device controlled by this controller.
+ final CameraDescription description;
+
+ /// The resolution this controller is targeting.
+ ///
+ /// This resolution preset is not guaranteed to be available on the device,
+ /// if unavailable a lower resolution will be used.
+ ///
+ /// See also: [ResolutionPreset].
+ final ResolutionPreset resolutionPreset;
+
+ /// Whether to include audio when recording a video.
+ final bool enableAudio;
+
+ /// The [ImageFormatGroup] describes the output of the raw image format.
+ ///
+ /// When null the imageFormat will fallback to the platforms default.
+ final ImageFormatGroup? imageFormatGroup;
+
+ late int _cameraId;
+
+ bool _isDisposed = false;
+ StreamSubscription<CameraImageData>? _imageStreamSubscription;
+ FutureOr<bool>? _initCalled;
+ StreamSubscription<DeviceOrientationChangedEvent>?
+ _deviceOrientationSubscription;
+
+ /// The camera identifier with which the controller is associated.
+ int get cameraId => _cameraId;
+
+ /// Initializes the camera on the device.
+ Future<void> initialize() async {
+ final Completer<CameraInitializedEvent> initializeCompleter =
+ Completer<CameraInitializedEvent>();
+
+ _deviceOrientationSubscription = CameraPlatform.instance
+ .onDeviceOrientationChanged()
+ .listen((DeviceOrientationChangedEvent event) {
+ value = value.copyWith(
+ deviceOrientation: event.orientation,
+ );
+ });
+
+ _cameraId = await CameraPlatform.instance.createCamera(
+ description,
+ resolutionPreset,
+ enableAudio: enableAudio,
+ );
+
+ CameraPlatform.instance
+ .onCameraInitialized(_cameraId)
+ .first
+ .then((CameraInitializedEvent event) {
+ initializeCompleter.complete(event);
+ });
+
+ await CameraPlatform.instance.initializeCamera(
+ _cameraId,
+ imageFormatGroup: imageFormatGroup ?? ImageFormatGroup.unknown,
+ );
+
+ value = value.copyWith(
+ isInitialized: true,
+ previewSize: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => Size(
+ event.previewWidth,
+ event.previewHeight,
+ )),
+ exposureMode: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.exposureMode),
+ focusMode: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.focusMode),
+ exposurePointSupported: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.exposurePointSupported),
+ focusPointSupported: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.focusPointSupported),
+ );
+
+ _initCalled = true;
+ }
+
+ /// Prepare the capture session for video recording.
+ Future<void> prepareForVideoRecording() async {
+ await CameraPlatform.instance.prepareForVideoRecording();
+ }
+
+ /// Pauses the current camera preview
+ Future<void> pausePreview() async {
+ await CameraPlatform.instance.pausePreview(_cameraId);
+ value = value.copyWith(
+ isPreviewPaused: true,
+ previewPauseOrientation: Optional<DeviceOrientation>.of(
+ value.lockedCaptureOrientation ?? value.deviceOrientation));
+ }
+
+ /// Resumes the current camera preview
+ Future<void> resumePreview() async {
+ await CameraPlatform.instance.resumePreview(_cameraId);
+ value = value.copyWith(
+ isPreviewPaused: false,
+ previewPauseOrientation: const Optional<DeviceOrientation>.absent());
+ }
+
+ /// Captures an image and returns the file where it was saved.
+ ///
+ /// Throws a [CameraException] if the capture fails.
+ Future<XFile> takePicture() async {
+ value = value.copyWith(isTakingPicture: true);
+ final XFile file = await CameraPlatform.instance.takePicture(_cameraId);
+ value = value.copyWith(isTakingPicture: false);
+ return file;
+ }
+
+ /// Start streaming images from platform camera.
+ Future<void> startImageStream(
+ Function(CameraImageData image) onAvailable) async {
+ _imageStreamSubscription = CameraPlatform.instance
+ .onStreamedFrameAvailable(_cameraId)
+ .listen((CameraImageData imageData) {
+ onAvailable(imageData);
+ });
+ value = value.copyWith(isStreamingImages: true);
+ }
+
+ /// Stop streaming images from platform camera.
+ Future<void> stopImageStream() async {
+ value = value.copyWith(isStreamingImages: false);
+ await _imageStreamSubscription?.cancel();
+ _imageStreamSubscription = null;
+ }
+
+ /// Start a video recording.
+ ///
+ /// The video is returned as a [XFile] after calling [stopVideoRecording].
+ /// Throws a [CameraException] if the capture fails.
+ Future<void> startVideoRecording(
+ {Function(CameraImageData image)? streamCallback}) async {
+ await CameraPlatform.instance.startVideoCapturing(
+ VideoCaptureOptions(_cameraId, streamCallback: streamCallback));
+ value = value.copyWith(
+ isRecordingVideo: true,
+ isRecordingPaused: false,
+ isStreamingImages: streamCallback != null,
+ recordingOrientation: Optional<DeviceOrientation>.of(
+ value.lockedCaptureOrientation ?? value.deviceOrientation));
+ }
+
+ /// Stops the video recording and returns the file where it was saved.
+ ///
+ /// Throws a [CameraException] if the capture failed.
+ Future<XFile> stopVideoRecording() async {
+ if (value.isStreamingImages) {
+ await stopImageStream();
+ }
+
+ final XFile file =
+ await CameraPlatform.instance.stopVideoRecording(_cameraId);
+ value = value.copyWith(
+ isRecordingVideo: false,
+ isRecordingPaused: false,
+ recordingOrientation: const Optional<DeviceOrientation>.absent(),
+ );
+ return file;
+ }
+
+ /// Pause video recording.
+ ///
+ /// This feature is only available on iOS and Android sdk 24+.
+ Future<void> pauseVideoRecording() async {
+ await CameraPlatform.instance.pauseVideoRecording(_cameraId);
+ value = value.copyWith(isRecordingPaused: true);
+ }
+
+ /// Resume video recording after pausing.
+ ///
+ /// This feature is only available on iOS and Android sdk 24+.
+ Future<void> resumeVideoRecording() async {
+ await CameraPlatform.instance.resumeVideoRecording(_cameraId);
+ value = value.copyWith(isRecordingPaused: false);
+ }
+
+ /// Returns a widget showing a live camera preview.
+ Widget buildPreview() {
+ return CameraPlatform.instance.buildPreview(_cameraId);
+ }
+
+ /// Sets the flash mode for taking pictures.
+ Future<void> setFlashMode(FlashMode mode) async {
+ await CameraPlatform.instance.setFlashMode(_cameraId, mode);
+ value = value.copyWith(flashMode: mode);
+ }
+
+ /// Sets the exposure mode for taking pictures.
+ Future<void> setExposureMode(ExposureMode mode) async {
+ await CameraPlatform.instance.setExposureMode(_cameraId, mode);
+ value = value.copyWith(exposureMode: mode);
+ }
+
+ /// Sets the exposure offset for the selected camera.
+ Future<double> setExposureOffset(double offset) async {
+ // Check if offset is in range
+ final List<double> range = await Future.wait(<Future<double>>[
+ CameraPlatform.instance.getMinExposureOffset(_cameraId),
+ CameraPlatform.instance.getMaxExposureOffset(_cameraId)
+ ]);
+
+ // Round to the closest step if needed
+ final double stepSize =
+ await CameraPlatform.instance.getExposureOffsetStepSize(_cameraId);
+ if (stepSize > 0) {
+ final double inv = 1.0 / stepSize;
+ double roundedOffset = (offset * inv).roundToDouble() / inv;
+ if (roundedOffset > range[1]) {
+ roundedOffset = (offset * inv).floorToDouble() / inv;
+ } else if (roundedOffset < range[0]) {
+ roundedOffset = (offset * inv).ceilToDouble() / inv;
+ }
+ offset = roundedOffset;
+ }
+
+ return CameraPlatform.instance.setExposureOffset(_cameraId, offset);
+ }
+
+ /// Locks the capture orientation.
+ ///
+ /// If [orientation] is omitted, the current device orientation is used.
+ Future<void> lockCaptureOrientation() async {
+ await CameraPlatform.instance
+ .lockCaptureOrientation(_cameraId, value.deviceOrientation);
+ value = value.copyWith(
+ lockedCaptureOrientation:
+ Optional<DeviceOrientation>.of(value.deviceOrientation));
+ }
+
+ /// Unlocks the capture orientation.
+ Future<void> unlockCaptureOrientation() async {
+ await CameraPlatform.instance.unlockCaptureOrientation(_cameraId);
+ value = value.copyWith(
+ lockedCaptureOrientation: const Optional<DeviceOrientation>.absent());
+ }
+
+ /// Sets the focus mode for taking pictures.
+ Future<void> setFocusMode(FocusMode mode) async {
+ await CameraPlatform.instance.setFocusMode(_cameraId, mode);
+ value = value.copyWith(focusMode: mode);
+ }
+
+ /// Releases the resources of this camera.
+ @override
+ Future<void> dispose() async {
+ if (_isDisposed) {
+ return;
+ }
+ _deviceOrientationSubscription?.cancel();
+ _isDisposed = true;
+ super.dispose();
+ if (_initCalled != null) {
+ await _initCalled;
+ await CameraPlatform.instance.dispose(_cameraId);
+ }
+ }
+
+ @override
+ void removeListener(VoidCallback listener) {
+ // Prevent ValueListenableBuilder in CameraPreview widget from causing an
+ // exception to be thrown by attempting to remove its own listener after
+ // the controller has already been disposed.
+ if (!_isDisposed) {
+ super.removeListener(listener);
+ }
+ }
+}
+
+/// A value that might be absent.
+///
+/// Used to represent [DeviceOrientation]s that are optional but also able
+/// to be cleared.
+@immutable
+class Optional<T> extends IterableBase<T> {
+ /// Constructs an empty Optional.
+ const Optional.absent() : _value = null;
+
+ /// Constructs an Optional of the given [value].
+ ///
+ /// Throws [ArgumentError] if [value] is null.
+ Optional.of(T value) : _value = value {
+ // TODO(cbracken): Delete and make this ctor const once mixed-mode
+ // execution is no longer around.
+ ArgumentError.checkNotNull(value);
+ }
+
+ /// Constructs an Optional of the given [value].
+ ///
+ /// If [value] is null, returns [absent()].
+ const Optional.fromNullable(T? value) : _value = value;
+
+ final T? _value;
+
+ /// True when this optional contains a value.
+ bool get isPresent => _value != null;
+
+ /// True when this optional contains no value.
+ bool get isNotPresent => _value == null;
+
+ /// Gets the Optional value.
+ ///
+ /// Throws [StateError] if [value] is null.
+ T get value {
+ if (_value == null) {
+ throw StateError('value called on absent Optional.');
+ }
+ return _value!;
+ }
+
+ /// Executes a function if the Optional value is present.
+ void ifPresent(void Function(T value) ifPresent) {
+ if (isPresent) {
+ ifPresent(_value as T);
+ }
+ }
+
+ /// Execution a function if the Optional value is absent.
+ void ifAbsent(void Function() ifAbsent) {
+ if (!isPresent) {
+ ifAbsent();
+ }
+ }
+
+ /// Gets the Optional value with a default.
+ ///
+ /// The default is returned if the Optional is [absent()].
+ ///
+ /// Throws [ArgumentError] if [defaultValue] is null.
+ T or(T defaultValue) {
+ return _value ?? defaultValue;
+ }
+
+ /// Gets the Optional value, or `null` if there is none.
+ T? get orNull => _value;
+
+ /// Transforms the Optional value.
+ ///
+ /// If the Optional is [absent()], returns [absent()] without applying the transformer.
+ ///
+ /// The transformer must not return `null`. If it does, an [ArgumentError] is thrown.
+ Optional<S> transform<S>(S Function(T value) transformer) {
+ return _value == null
+ ? Optional<S>.absent()
+ : Optional<S>.of(transformer(_value as T));
+ }
+
+ /// Transforms the Optional value.
+ ///
+ /// If the Optional is [absent()], returns [absent()] without applying the transformer.
+ ///
+ /// Returns [absent()] if the transformer returns `null`.
+ Optional<S> transformNullable<S>(S? Function(T value) transformer) {
+ return _value == null
+ ? Optional<S>.absent()
+ : Optional<S>.fromNullable(transformer(_value as T));
+ }
+
+ @override
+ Iterator<T> get iterator =>
+ isPresent ? <T>[_value as T].iterator : Iterable<T>.empty().iterator;
+
+ /// Delegates to the underlying [value] hashCode.
+ @override
+ int get hashCode => _value.hashCode;
+
+ /// Delegates to the underlying [value] operator==.
+ @override
+ bool operator ==(Object o) => o is Optional<T> && o._value == _value;
+
+ @override
+ String toString() {
+ return _value == null
+ ? 'Optional { absent }'
+ : 'Optional { value: $_value }';
+ }
+}
diff --git a/packages/camera/camera_android/example/lib/camera_preview.dart b/packages/camera/camera_android/example/lib/camera_preview.dart
new file mode 100644
index 0000000..5e8f64c
--- /dev/null
+++ b/packages/camera/camera_android/example/lib/camera_preview.dart
@@ -0,0 +1,85 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+import 'camera_controller.dart';
+
+/// A widget showing a live camera preview.
+class CameraPreview extends StatelessWidget {
+ /// Creates a preview widget for the given camera controller.
+ const CameraPreview(this.controller, {Key? key, this.child})
+ : super(key: key);
+
+ /// The controller for the camera that the preview is shown for.
+ final CameraController controller;
+
+ /// A widget to overlay on top of the camera preview
+ final Widget? child;
+
+ @override
+ Widget build(BuildContext context) {
+ return controller.value.isInitialized
+ ? ValueListenableBuilder<CameraValue>(
+ valueListenable: controller,
+ builder: (BuildContext context, Object? value, Widget? child) {
+ final double cameraAspectRatio =
+ controller.value.previewSize!.width /
+ controller.value.previewSize!.height;
+ return AspectRatio(
+ aspectRatio: _isLandscape()
+ ? cameraAspectRatio
+ : (1 / cameraAspectRatio),
+ child: Stack(
+ fit: StackFit.expand,
+ children: <Widget>[
+ _wrapInRotatedBox(child: controller.buildPreview()),
+ child ?? Container(),
+ ],
+ ),
+ );
+ },
+ child: child,
+ )
+ : Container();
+ }
+
+ Widget _wrapInRotatedBox({required Widget child}) {
+ if (kIsWeb || defaultTargetPlatform != TargetPlatform.android) {
+ return child;
+ }
+
+ return RotatedBox(
+ quarterTurns: _getQuarterTurns(),
+ child: child,
+ );
+ }
+
+ bool _isLandscape() {
+ return <DeviceOrientation>[
+ DeviceOrientation.landscapeLeft,
+ DeviceOrientation.landscapeRight
+ ].contains(_getApplicableOrientation());
+ }
+
+ int _getQuarterTurns() {
+ final Map<DeviceOrientation, int> turns = <DeviceOrientation, int>{
+ DeviceOrientation.portraitUp: 0,
+ DeviceOrientation.landscapeRight: 1,
+ DeviceOrientation.portraitDown: 2,
+ DeviceOrientation.landscapeLeft: 3,
+ };
+ return turns[_getApplicableOrientation()]!;
+ }
+
+ DeviceOrientation _getApplicableOrientation() {
+ return controller.value.isRecordingVideo
+ ? controller.value.recordingOrientation!
+ : (controller.value.previewPauseOrientation ??
+ controller.value.lockedCaptureOrientation ??
+ controller.value.deviceOrientation);
+ }
+}
diff --git a/packages/camera/camera_android/example/lib/main.dart b/packages/camera/camera_android/example/lib/main.dart
new file mode 100644
index 0000000..4d98aed
--- /dev/null
+++ b/packages/camera/camera_android/example/lib/main.dart
@@ -0,0 +1,1094 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:io';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/scheduler.dart';
+import 'package:video_player/video_player.dart';
+
+import 'camera_controller.dart';
+import 'camera_preview.dart';
+
+/// Camera example home widget.
+class CameraExampleHome extends StatefulWidget {
+ /// Default Constructor
+ const CameraExampleHome({Key? key}) : super(key: key);
+
+ @override
+ State<CameraExampleHome> createState() {
+ return _CameraExampleHomeState();
+ }
+}
+
+/// Returns a suitable camera icon for [direction].
+IconData getCameraLensIcon(CameraLensDirection direction) {
+ switch (direction) {
+ case CameraLensDirection.back:
+ return Icons.camera_rear;
+ case CameraLensDirection.front:
+ return Icons.camera_front;
+ case CameraLensDirection.external:
+ return Icons.camera;
+ }
+ // This enum is from a different package, so a new value could be added at
+ // any time. The example should keep working if that happens.
+ // ignore: dead_code
+ return Icons.camera;
+}
+
+void _logError(String code, String? message) {
+ // ignore: avoid_print
+ print('Error: $code${message == null ? '' : '\nError Message: $message'}');
+}
+
+class _CameraExampleHomeState extends State<CameraExampleHome>
+ with WidgetsBindingObserver, TickerProviderStateMixin {
+ CameraController? controller;
+ XFile? imageFile;
+ XFile? videoFile;
+ VideoPlayerController? videoController;
+ VoidCallback? videoPlayerListener;
+ bool enableAudio = true;
+ double _minAvailableExposureOffset = 0.0;
+ double _maxAvailableExposureOffset = 0.0;
+ double _currentExposureOffset = 0.0;
+ late AnimationController _flashModeControlRowAnimationController;
+ late Animation<double> _flashModeControlRowAnimation;
+ late AnimationController _exposureModeControlRowAnimationController;
+ late Animation<double> _exposureModeControlRowAnimation;
+ late AnimationController _focusModeControlRowAnimationController;
+ late Animation<double> _focusModeControlRowAnimation;
+ double _minAvailableZoom = 1.0;
+ double _maxAvailableZoom = 1.0;
+ double _currentScale = 1.0;
+ double _baseScale = 1.0;
+
+ // Counting pointers (number of user fingers on screen)
+ int _pointers = 0;
+
+ @override
+ void initState() {
+ super.initState();
+ _ambiguate(WidgetsBinding.instance)?.addObserver(this);
+
+ _flashModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _flashModeControlRowAnimation = CurvedAnimation(
+ parent: _flashModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ _exposureModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _exposureModeControlRowAnimation = CurvedAnimation(
+ parent: _exposureModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ _focusModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _focusModeControlRowAnimation = CurvedAnimation(
+ parent: _focusModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ }
+
+ @override
+ void dispose() {
+ _ambiguate(WidgetsBinding.instance)?.removeObserver(this);
+ _flashModeControlRowAnimationController.dispose();
+ _exposureModeControlRowAnimationController.dispose();
+ super.dispose();
+ }
+
+ @override
+ void didChangeAppLifecycleState(AppLifecycleState state) {
+ final CameraController? cameraController = controller;
+
+ // App state changed before we got the chance to initialize.
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return;
+ }
+
+ if (state == AppLifecycleState.inactive) {
+ cameraController.dispose();
+ } else if (state == AppLifecycleState.resumed) {
+ onNewCameraSelected(cameraController.description);
+ }
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ return Scaffold(
+ appBar: AppBar(
+ title: const Text('Camera example'),
+ ),
+ body: Column(
+ children: <Widget>[
+ Expanded(
+ child: Container(
+ decoration: BoxDecoration(
+ color: Colors.black,
+ border: Border.all(
+ color:
+ controller != null && controller!.value.isRecordingVideo
+ ? Colors.redAccent
+ : Colors.grey,
+ width: 3.0,
+ ),
+ ),
+ child: Padding(
+ padding: const EdgeInsets.all(1.0),
+ child: Center(
+ child: _cameraPreviewWidget(),
+ ),
+ ),
+ ),
+ ),
+ _captureControlRowWidget(),
+ _modeControlRowWidget(),
+ Padding(
+ padding: const EdgeInsets.all(5.0),
+ child: Row(
+ children: <Widget>[
+ _cameraTogglesRowWidget(),
+ _thumbnailWidget(),
+ ],
+ ),
+ ),
+ ],
+ ),
+ );
+ }
+
+ /// Display the preview from the camera (or a message if the preview is not available).
+ Widget _cameraPreviewWidget() {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return const Text(
+ 'Tap a camera',
+ style: TextStyle(
+ color: Colors.white,
+ fontSize: 24.0,
+ fontWeight: FontWeight.w900,
+ ),
+ );
+ } else {
+ return Listener(
+ onPointerDown: (_) => _pointers++,
+ onPointerUp: (_) => _pointers--,
+ child: CameraPreview(
+ controller!,
+ child: LayoutBuilder(
+ builder: (BuildContext context, BoxConstraints constraints) {
+ return GestureDetector(
+ behavior: HitTestBehavior.opaque,
+ onScaleStart: _handleScaleStart,
+ onScaleUpdate: _handleScaleUpdate,
+ onTapDown: (TapDownDetails details) =>
+ onViewFinderTap(details, constraints),
+ );
+ }),
+ ),
+ );
+ }
+ }
+
+ void _handleScaleStart(ScaleStartDetails details) {
+ _baseScale = _currentScale;
+ }
+
+ Future<void> _handleScaleUpdate(ScaleUpdateDetails details) async {
+ // When there are not exactly two fingers on screen don't scale
+ if (controller == null || _pointers != 2) {
+ return;
+ }
+
+ _currentScale = (_baseScale * details.scale)
+ .clamp(_minAvailableZoom, _maxAvailableZoom);
+
+ await CameraPlatform.instance
+ .setZoomLevel(controller!.cameraId, _currentScale);
+ }
+
+ /// Display the thumbnail of the captured image or video.
+ Widget _thumbnailWidget() {
+ final VideoPlayerController? localVideoController = videoController;
+
+ return Expanded(
+ child: Align(
+ alignment: Alignment.centerRight,
+ child: Row(
+ mainAxisSize: MainAxisSize.min,
+ children: <Widget>[
+ if (localVideoController == null && imageFile == null)
+ Container()
+ else
+ SizedBox(
+ width: 64.0,
+ height: 64.0,
+ child: (localVideoController == null)
+ ? (
+ // The captured image on the web contains a network-accessible URL
+ // pointing to a location within the browser. It may be displayed
+ // either with Image.network or Image.memory after loading the image
+ // bytes to memory.
+ kIsWeb
+ ? Image.network(imageFile!.path)
+ : Image.file(File(imageFile!.path)))
+ : Container(
+ decoration: BoxDecoration(
+ border: Border.all(color: Colors.pink)),
+ child: Center(
+ child: AspectRatio(
+ aspectRatio:
+ localVideoController.value.size != null
+ ? localVideoController.value.aspectRatio
+ : 1.0,
+ child: VideoPlayer(localVideoController)),
+ ),
+ ),
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ /// Display a bar with buttons to change the flash and exposure modes
+ Widget _modeControlRowWidget() {
+ return Column(
+ children: <Widget>[
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.flash_on),
+ color: Colors.blue,
+ onPressed: controller != null ? onFlashModeButtonPressed : null,
+ ),
+ // The exposure and focus mode are currently not supported on the web.
+ ...!kIsWeb
+ ? <Widget>[
+ IconButton(
+ icon: const Icon(Icons.exposure),
+ color: Colors.blue,
+ onPressed: controller != null
+ ? onExposureModeButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.filter_center_focus),
+ color: Colors.blue,
+ onPressed:
+ controller != null ? onFocusModeButtonPressed : null,
+ )
+ ]
+ : <Widget>[],
+ IconButton(
+ icon: Icon(enableAudio ? Icons.volume_up : Icons.volume_mute),
+ color: Colors.blue,
+ onPressed: controller != null ? onAudioModeButtonPressed : null,
+ ),
+ IconButton(
+ icon: Icon(controller?.value.isCaptureOrientationLocked ?? false
+ ? Icons.screen_lock_rotation
+ : Icons.screen_rotation),
+ color: Colors.blue,
+ onPressed: controller != null
+ ? onCaptureOrientationLockButtonPressed
+ : null,
+ ),
+ ],
+ ),
+ _flashModeControlRowWidget(),
+ _exposureModeControlRowWidget(),
+ _focusModeControlRowWidget(),
+ ],
+ );
+ }
+
+ Widget _flashModeControlRowWidget() {
+ return SizeTransition(
+ sizeFactor: _flashModeControlRowAnimation,
+ child: ClipRect(
+ child: Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.flash_off),
+ color: controller?.value.flashMode == FlashMode.off
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.off)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.flash_auto),
+ color: controller?.value.flashMode == FlashMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.auto)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.flash_on),
+ color: controller?.value.flashMode == FlashMode.always
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.always)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.highlight),
+ color: controller?.value.flashMode == FlashMode.torch
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.torch)
+ : null,
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ Widget _exposureModeControlRowWidget() {
+ final ButtonStyle styleAuto = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.exposureMode == ExposureMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ );
+ final ButtonStyle styleLocked = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.exposureMode == ExposureMode.locked
+ ? Colors.orange
+ : Colors.blue,
+ );
+
+ return SizeTransition(
+ sizeFactor: _exposureModeControlRowAnimation,
+ child: ClipRect(
+ child: Container(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: <Widget>[
+ const Center(
+ child: Text('Exposure Mode'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ TextButton(
+ style: styleAuto,
+ onPressed: controller != null
+ ? () =>
+ onSetExposureModeButtonPressed(ExposureMode.auto)
+ : null,
+ onLongPress: () {
+ if (controller != null) {
+ CameraPlatform.instance
+ .setExposurePoint(controller!.cameraId, null);
+ showInSnackBar('Resetting exposure point');
+ }
+ },
+ child: const Text('AUTO'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () =>
+ onSetExposureModeButtonPressed(ExposureMode.locked)
+ : null,
+ child: const Text('LOCKED'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () => controller!.setExposureOffset(0.0)
+ : null,
+ child: const Text('RESET OFFSET'),
+ ),
+ ],
+ ),
+ const Center(
+ child: Text('Exposure Offset'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ Text(_minAvailableExposureOffset.toString()),
+ Slider(
+ value: _currentExposureOffset,
+ min: _minAvailableExposureOffset,
+ max: _maxAvailableExposureOffset,
+ label: _currentExposureOffset.toString(),
+ onChanged: _minAvailableExposureOffset ==
+ _maxAvailableExposureOffset
+ ? null
+ : setExposureOffset,
+ ),
+ Text(_maxAvailableExposureOffset.toString()),
+ ],
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ Widget _focusModeControlRowWidget() {
+ final ButtonStyle styleAuto = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.focusMode == FocusMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ );
+ final ButtonStyle styleLocked = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.focusMode == FocusMode.locked
+ ? Colors.orange
+ : Colors.blue,
+ );
+
+ return SizeTransition(
+ sizeFactor: _focusModeControlRowAnimation,
+ child: ClipRect(
+ child: Container(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: <Widget>[
+ const Center(
+ child: Text('Focus Mode'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ TextButton(
+ style: styleAuto,
+ onPressed: controller != null
+ ? () => onSetFocusModeButtonPressed(FocusMode.auto)
+ : null,
+ onLongPress: () {
+ if (controller != null) {
+ CameraPlatform.instance
+ .setFocusPoint(controller!.cameraId, null);
+ }
+ showInSnackBar('Resetting focus point');
+ },
+ child: const Text('AUTO'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () => onSetFocusModeButtonPressed(FocusMode.locked)
+ : null,
+ child: const Text('LOCKED'),
+ ),
+ ],
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ /// Display the control bar with buttons to take pictures and record videos.
+ Widget _captureControlRowWidget() {
+ final CameraController? cameraController = controller;
+
+ return Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.camera_alt),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ !cameraController.value.isRecordingVideo
+ ? onTakePictureButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.videocam),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ !cameraController.value.isRecordingVideo
+ ? onVideoRecordButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: cameraController != null &&
+ (!cameraController.value.isRecordingVideo ||
+ cameraController.value.isRecordingPaused)
+ ? const Icon(Icons.play_arrow)
+ : const Icon(Icons.pause),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ cameraController.value.isRecordingVideo
+ ? (cameraController.value.isRecordingPaused)
+ ? onResumeButtonPressed
+ : onPauseButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.stop),
+ color: Colors.red,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ cameraController.value.isRecordingVideo
+ ? onStopButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.pause_presentation),
+ color:
+ cameraController != null && cameraController.value.isPreviewPaused
+ ? Colors.red
+ : Colors.blue,
+ onPressed:
+ cameraController == null ? null : onPausePreviewButtonPressed,
+ ),
+ ],
+ );
+ }
+
+ /// Display a row of toggle to select the camera (or a message if no camera is available).
+ Widget _cameraTogglesRowWidget() {
+ final List<Widget> toggles = <Widget>[];
+
+ void onChanged(CameraDescription? description) {
+ if (description == null) {
+ return;
+ }
+
+ onNewCameraSelected(description);
+ }
+
+ if (_cameras.isEmpty) {
+ _ambiguate(SchedulerBinding.instance)?.addPostFrameCallback((_) async {
+ showInSnackBar('No camera found.');
+ });
+ return const Text('None');
+ } else {
+ for (final CameraDescription cameraDescription in _cameras) {
+ toggles.add(
+ SizedBox(
+ width: 90.0,
+ child: RadioListTile<CameraDescription>(
+ title: Icon(getCameraLensIcon(cameraDescription.lensDirection)),
+ groupValue: controller?.description,
+ value: cameraDescription,
+ onChanged:
+ controller != null && controller!.value.isRecordingVideo
+ ? null
+ : onChanged,
+ ),
+ ),
+ );
+ }
+ }
+
+ return Row(children: toggles);
+ }
+
+ String timestamp() => DateTime.now().millisecondsSinceEpoch.toString();
+
+ void showInSnackBar(String message) {
+ ScaffoldMessenger.of(context)
+ .showSnackBar(SnackBar(content: Text(message)));
+ }
+
+ void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) {
+ if (controller == null) {
+ return;
+ }
+
+ final CameraController cameraController = controller!;
+
+ final Point<double> point = Point<double>(
+ details.localPosition.dx / constraints.maxWidth,
+ details.localPosition.dy / constraints.maxHeight,
+ );
+ CameraPlatform.instance.setExposurePoint(cameraController.cameraId, point);
+ CameraPlatform.instance.setFocusPoint(cameraController.cameraId, point);
+ }
+
+ Future<void> onNewCameraSelected(CameraDescription cameraDescription) async {
+ final CameraController? oldController = controller;
+ if (oldController != null) {
+ // `controller` needs to be set to null before getting disposed,
+ // to avoid a race condition when we use the controller that is being
+ // disposed. This happens when camera permission dialog shows up,
+ // which triggers `didChangeAppLifecycleState`, which disposes and
+ // re-creates the controller.
+ controller = null;
+ await oldController.dispose();
+ }
+
+ final CameraController cameraController = CameraController(
+ cameraDescription,
+ kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium,
+ enableAudio: enableAudio,
+ imageFormatGroup: ImageFormatGroup.jpeg,
+ );
+
+ controller = cameraController;
+
+ // If the controller is updated then update the UI.
+ cameraController.addListener(() {
+ if (mounted) {
+ setState(() {});
+ }
+ });
+
+ try {
+ await cameraController.initialize();
+ await Future.wait(<Future<Object?>>[
+ // The exposure mode is currently not supported on the web.
+ ...!kIsWeb
+ ? <Future<Object?>>[
+ CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId)
+ .then(
+ (double value) => _minAvailableExposureOffset = value),
+ CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId)
+ .then((double value) => _maxAvailableExposureOffset = value)
+ ]
+ : <Future<Object?>>[],
+ CameraPlatform.instance
+ .getMaxZoomLevel(cameraController.cameraId)
+ .then((double value) => _maxAvailableZoom = value),
+ CameraPlatform.instance
+ .getMinZoomLevel(cameraController.cameraId)
+ .then((double value) => _minAvailableZoom = value),
+ ]);
+ } on CameraException catch (e) {
+ switch (e.code) {
+ case 'CameraAccessDenied':
+ showInSnackBar('You have denied camera access.');
+ break;
+ case 'CameraAccessDeniedWithoutPrompt':
+ // iOS only
+ showInSnackBar('Please go to Settings app to enable camera access.');
+ break;
+ case 'CameraAccessRestricted':
+ // iOS only
+ showInSnackBar('Camera access is restricted.');
+ break;
+ case 'AudioAccessDenied':
+ showInSnackBar('You have denied audio access.');
+ break;
+ case 'AudioAccessDeniedWithoutPrompt':
+ // iOS only
+ showInSnackBar('Please go to Settings app to enable audio access.');
+ break;
+ case 'AudioAccessRestricted':
+ // iOS only
+ showInSnackBar('Audio access is restricted.');
+ break;
+ case 'cameraPermission':
+ // Android & web only
+ showInSnackBar('Unknown permission error.');
+ break;
+ default:
+ _showCameraException(e);
+ break;
+ }
+ }
+
+ if (mounted) {
+ setState(() {});
+ }
+ }
+
+ void onTakePictureButtonPressed() {
+ takePicture().then((XFile? file) {
+ if (mounted) {
+ setState(() {
+ imageFile = file;
+ videoController?.dispose();
+ videoController = null;
+ });
+ if (file != null) {
+ showInSnackBar('Picture saved to ${file.path}');
+ }
+ }
+ });
+ }
+
+ void onFlashModeButtonPressed() {
+ if (_flashModeControlRowAnimationController.value == 1) {
+ _flashModeControlRowAnimationController.reverse();
+ } else {
+ _flashModeControlRowAnimationController.forward();
+ _exposureModeControlRowAnimationController.reverse();
+ _focusModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onExposureModeButtonPressed() {
+ if (_exposureModeControlRowAnimationController.value == 1) {
+ _exposureModeControlRowAnimationController.reverse();
+ } else {
+ _exposureModeControlRowAnimationController.forward();
+ _flashModeControlRowAnimationController.reverse();
+ _focusModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onFocusModeButtonPressed() {
+ if (_focusModeControlRowAnimationController.value == 1) {
+ _focusModeControlRowAnimationController.reverse();
+ } else {
+ _focusModeControlRowAnimationController.forward();
+ _flashModeControlRowAnimationController.reverse();
+ _exposureModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onAudioModeButtonPressed() {
+ enableAudio = !enableAudio;
+ if (controller != null) {
+ onNewCameraSelected(controller!.description);
+ }
+ }
+
+ Future<void> onCaptureOrientationLockButtonPressed() async {
+ try {
+ if (controller != null) {
+ final CameraController cameraController = controller!;
+ if (cameraController.value.isCaptureOrientationLocked) {
+ await cameraController.unlockCaptureOrientation();
+ showInSnackBar('Capture orientation unlocked');
+ } else {
+ await cameraController.lockCaptureOrientation();
+ showInSnackBar(
+ 'Capture orientation locked to ${cameraController.value.lockedCaptureOrientation.toString().split('.').last}');
+ }
+ }
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ }
+ }
+
+ void onSetFlashModeButtonPressed(FlashMode mode) {
+ setFlashMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Flash mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onSetExposureModeButtonPressed(ExposureMode mode) {
+ setExposureMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Exposure mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onSetFocusModeButtonPressed(FocusMode mode) {
+ setFocusMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Focus mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onVideoRecordButtonPressed() {
+ startVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ });
+ }
+
+ void onStopButtonPressed() {
+ stopVideoRecording().then((XFile? file) {
+ if (mounted) {
+ setState(() {});
+ }
+ if (file != null) {
+ showInSnackBar('Video recorded to ${file.path}');
+ videoFile = file;
+ _startVideoPlayer();
+ }
+ });
+ }
+
+ Future<void> onPausePreviewButtonPressed() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return;
+ }
+
+ if (cameraController.value.isPreviewPaused) {
+ await cameraController.resumePreview();
+ } else {
+ await cameraController.pausePreview();
+ }
+
+ if (mounted) {
+ setState(() {});
+ }
+ }
+
+ void onPauseButtonPressed() {
+ pauseVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Video recording paused');
+ });
+ }
+
+ void onResumeButtonPressed() {
+ resumeVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Video recording resumed');
+ });
+ }
+
+ Future<void> startVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return;
+ }
+
+ if (cameraController.value.isRecordingVideo) {
+ // A recording is already started, do nothing.
+ return;
+ }
+
+ try {
+ await cameraController.startVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return;
+ }
+ }
+
+ Future<XFile?> stopVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return null;
+ }
+
+ try {
+ return cameraController.stopVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return null;
+ }
+ }
+
+ Future<void> pauseVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return;
+ }
+
+ try {
+ await cameraController.pauseVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> resumeVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return;
+ }
+
+ try {
+ await cameraController.resumeVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setFlashMode(FlashMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setFlashMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setExposureMode(ExposureMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setExposureMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setExposureOffset(double offset) async {
+ if (controller == null) {
+ return;
+ }
+
+ setState(() {
+ _currentExposureOffset = offset;
+ });
+ try {
+ offset = await controller!.setExposureOffset(offset);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setFocusMode(FocusMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setFocusMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> _startVideoPlayer() async {
+ if (videoFile == null) {
+ return;
+ }
+
+ final VideoPlayerController vController = kIsWeb
+ ? VideoPlayerController.network(videoFile!.path)
+ : VideoPlayerController.file(File(videoFile!.path));
+
+ videoPlayerListener = () {
+ if (videoController != null && videoController!.value.size != null) {
+ // Refreshing the state to update video player with the correct ratio.
+ if (mounted) {
+ setState(() {});
+ }
+ videoController!.removeListener(videoPlayerListener!);
+ }
+ };
+ vController.addListener(videoPlayerListener!);
+ await vController.setLooping(true);
+ await vController.initialize();
+ await videoController?.dispose();
+ if (mounted) {
+ setState(() {
+ imageFile = null;
+ videoController = vController;
+ });
+ }
+ await vController.play();
+ }
+
+ Future<XFile?> takePicture() async {
+ final CameraController? cameraController = controller;
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return null;
+ }
+
+ if (cameraController.value.isTakingPicture) {
+ // A capture is already pending, do nothing.
+ return null;
+ }
+
+ try {
+ final XFile file = await cameraController.takePicture();
+ return file;
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return null;
+ }
+ }
+
+ void _showCameraException(CameraException e) {
+ _logError(e.code, e.description);
+ showInSnackBar('Error: ${e.code}\n${e.description}');
+ }
+}
+
+/// CameraApp is the Main Application.
+class CameraApp extends StatelessWidget {
+ /// Default Constructor
+ const CameraApp({Key? key}) : super(key: key);
+
+ @override
+ Widget build(BuildContext context) {
+ return const MaterialApp(
+ home: CameraExampleHome(),
+ );
+ }
+}
+
+List<CameraDescription> _cameras = <CameraDescription>[];
+
+Future<void> main() async {
+ // Fetch the available cameras before initializing the app.
+ try {
+ WidgetsFlutterBinding.ensureInitialized();
+ _cameras = await CameraPlatform.instance.availableCameras();
+ } on CameraException catch (e) {
+ _logError(e.code, e.description);
+ }
+ runApp(const CameraApp());
+}
+
+/// This allows a value of type T or T? to be treated as a value of type T?.
+///
+/// We use this so that APIs that have become non-nullable can still be used
+/// with `!` and `?` on the stable branch.
+T? _ambiguate<T>(T? value) => value;
diff --git a/packages/camera/camera_android/example/pubspec.yaml b/packages/camera/camera_android/example/pubspec.yaml
new file mode 100644
index 0000000..e23e31a
--- /dev/null
+++ b/packages/camera/camera_android/example/pubspec.yaml
@@ -0,0 +1,34 @@
+name: camera_example
+description: Demonstrates how to use the camera plugin.
+publish_to: none
+
+environment:
+ sdk: ">=2.14.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+dependencies:
+ camera_android:
+ # When depending on this package from a real application you should use:
+ # camera_android: ^x.y.z
+ # See https://dart.dev/tools/pub/dependencies#version-constraints
+ # The example app is bundled with the plugin so we use a path dependency on
+ # the parent directory to use the current plugin's version.
+ path: ../
+ camera_platform_interface: ^2.3.1
+ flutter:
+ sdk: flutter
+ path_provider: ^2.0.0
+ quiver: ^3.0.0
+ video_player: ^2.1.4
+
+dev_dependencies:
+ build_runner: ^2.1.10
+ flutter_driver:
+ sdk: flutter
+ flutter_test:
+ sdk: flutter
+ integration_test:
+ sdk: flutter
+
+flutter:
+ uses-material-design: true
diff --git a/packages/camera/camera_android/example/test_driver/integration_test.dart b/packages/camera/camera_android/example/test_driver/integration_test.dart
new file mode 100644
index 0000000..aa57599
--- /dev/null
+++ b/packages/camera/camera_android/example/test_driver/integration_test.dart
@@ -0,0 +1,66 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// ignore_for_file: avoid_print
+
+import 'dart:async';
+import 'dart:convert';
+import 'dart:io';
+
+import 'package:flutter_driver/flutter_driver.dart';
+
+const String _examplePackage = 'io.flutter.plugins.cameraexample';
+
+Future<void> main() async {
+ if (!(Platform.isLinux || Platform.isMacOS)) {
+ print('This test must be run on a POSIX host. Skipping...');
+ exit(0);
+ }
+ final bool adbExists =
+ Process.runSync('which', <String>['adb']).exitCode == 0;
+ if (!adbExists) {
+ print(r'This test needs ADB to exist on the $PATH. Skipping...');
+ exit(0);
+ }
+ print('Granting camera permissions...');
+ Process.runSync('adb', <String>[
+ 'shell',
+ 'pm',
+ 'grant',
+ _examplePackage,
+ 'android.permission.CAMERA'
+ ]);
+ Process.runSync('adb', <String>[
+ 'shell',
+ 'pm',
+ 'grant',
+ _examplePackage,
+ 'android.permission.RECORD_AUDIO'
+ ]);
+ print('Starting test.');
+ final FlutterDriver driver = await FlutterDriver.connect();
+ final String data = await driver.requestData(
+ null,
+ timeout: const Duration(minutes: 1),
+ );
+ await driver.close();
+ print('Test finished. Revoking camera permissions...');
+ Process.runSync('adb', <String>[
+ 'shell',
+ 'pm',
+ 'revoke',
+ _examplePackage,
+ 'android.permission.CAMERA'
+ ]);
+ Process.runSync('adb', <String>[
+ 'shell',
+ 'pm',
+ 'revoke',
+ _examplePackage,
+ 'android.permission.RECORD_AUDIO'
+ ]);
+
+ final Map<String, dynamic> result = jsonDecode(data) as Map<String, dynamic>;
+ exit(result['result'] == 'true' ? 0 : 1);
+}
diff --git a/packages/camera/camera_android/lib/camera_android.dart b/packages/camera/camera_android/lib/camera_android.dart
new file mode 100644
index 0000000..93e3e17
--- /dev/null
+++ b/packages/camera/camera_android/lib/camera_android.dart
@@ -0,0 +1,5 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'src/android_camera.dart';
diff --git a/packages/camera/camera_android/lib/src/android_camera.dart b/packages/camera/camera_android/lib/src/android_camera.dart
new file mode 100644
index 0000000..9ab9b57
--- /dev/null
+++ b/packages/camera/camera_android/lib/src/android_camera.dart
@@ -0,0 +1,633 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:stream_transform/stream_transform.dart';
+
+import 'type_conversion.dart';
+import 'utils.dart';
+
+const MethodChannel _channel =
+ MethodChannel('plugins.flutter.io/camera_android');
+
+/// The Android implementation of [CameraPlatform] that uses method channels.
+class AndroidCamera extends CameraPlatform {
+ /// Registers this class as the default instance of [CameraPlatform].
+ static void registerWith() {
+ CameraPlatform.instance = AndroidCamera();
+ }
+
+ final Map<int, MethodChannel> _channels = <int, MethodChannel>{};
+
+ /// The name of the channel that device events from the platform side are
+ /// sent on.
+ @visibleForTesting
+ static const String deviceEventChannelName =
+ 'plugins.flutter.io/camera_android/fromPlatform';
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to camera events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<CameraEvent> cameraEventStreamController =
+ StreamController<CameraEvent>.broadcast();
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to general device events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ late final StreamController<DeviceEvent> _deviceEventStreamController =
+ _createDeviceEventStreamController();
+
+ StreamController<DeviceEvent> _createDeviceEventStreamController() {
+ // Set up the method handler lazily.
+ const MethodChannel channel = MethodChannel(deviceEventChannelName);
+ channel.setMethodCallHandler(_handleDeviceMethodCall);
+ return StreamController<DeviceEvent>.broadcast();
+ }
+
+ // The stream to receive frames from the native code.
+ StreamSubscription<dynamic>? _platformImageStreamSubscription;
+
+ // The stream for vending frames to platform interface clients.
+ StreamController<CameraImageData>? _frameStreamController;
+
+ Stream<CameraEvent> _cameraEvents(int cameraId) =>
+ cameraEventStreamController.stream
+ .where((CameraEvent event) => event.cameraId == cameraId);
+
+ @override
+ Future<List<CameraDescription>> availableCameras() async {
+ try {
+ final List<Map<dynamic, dynamic>>? cameras = await _channel
+ .invokeListMethod<Map<dynamic, dynamic>>('availableCameras');
+
+ if (cameras == null) {
+ return <CameraDescription>[];
+ }
+
+ return cameras.map((Map<dynamic, dynamic> camera) {
+ return CameraDescription(
+ name: camera['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(camera['lensFacing']! as String),
+ sensorOrientation: camera['sensorOrientation']! as int,
+ );
+ }).toList();
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) async {
+ try {
+ final Map<String, dynamic>? reply = await _channel
+ .invokeMapMethod<String, dynamic>('create', <String, dynamic>{
+ 'cameraName': cameraDescription.name,
+ 'resolutionPreset': resolutionPreset != null
+ ? _serializeResolutionPreset(resolutionPreset)
+ : null,
+ 'enableAudio': enableAudio,
+ });
+
+ return reply!['cameraId']! as int;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> initializeCamera(
+ int cameraId, {
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) {
+ _channels.putIfAbsent(cameraId, () {
+ final MethodChannel channel =
+ MethodChannel('plugins.flutter.io/camera_android/camera$cameraId');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleCameraMethodCall(call, cameraId));
+ return channel;
+ });
+
+ final Completer<void> completer = Completer<void>();
+
+ onCameraInitialized(cameraId).first.then((CameraInitializedEvent value) {
+ completer.complete();
+ });
+
+ _channel.invokeMapMethod<String, dynamic>(
+ 'initialize',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'imageFormatGroup': imageFormatGroup.name(),
+ },
+ ).catchError(
+ // TODO(srawlins): This should return a value of the future's type. This
+ // will fail upcoming analysis checks with
+ // https://github.com/flutter/flutter/issues/105750.
+ // ignore: body_might_complete_normally_catch_error
+ (Object error, StackTrace stackTrace) {
+ if (error is! PlatformException) {
+ // ignore: only_throw_errors
+ throw error;
+ }
+ completer.completeError(
+ CameraException(error.code, error.message),
+ stackTrace,
+ );
+ },
+ );
+
+ return completer.future;
+ }
+
+ @override
+ Future<void> dispose(int cameraId) async {
+ if (_channels.containsKey(cameraId)) {
+ final MethodChannel? cameraChannel = _channels[cameraId];
+ cameraChannel?.setMethodCallHandler(null);
+ _channels.remove(cameraId);
+ }
+
+ await _channel.invokeMethod<void>(
+ 'dispose',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
+ }
+
+ @override
+ Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraResolutionChangedEvent>();
+ }
+
+ @override
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
+ }
+
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraErrorEvent>();
+ }
+
+ @override
+ Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
+ return _cameraEvents(cameraId).whereType<VideoRecordedEvent>();
+ }
+
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ return _deviceEventStreamController.stream
+ .whereType<DeviceOrientationChangedEvent>();
+ }
+
+ @override
+ Future<void> lockCaptureOrientation(
+ int cameraId,
+ DeviceOrientation orientation,
+ ) async {
+ await _channel.invokeMethod<String>(
+ 'lockCaptureOrientation',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'orientation': serializeDeviceOrientation(orientation)
+ },
+ );
+ }
+
+ @override
+ Future<void> unlockCaptureOrientation(int cameraId) async {
+ await _channel.invokeMethod<String>(
+ 'unlockCaptureOrientation',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<XFile> takePicture(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'takePicture',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> prepareForVideoRecording() =>
+ _channel.invokeMethod<void>('prepareForVideoRecording');
+
+ @override
+ Future<void> startVideoRecording(int cameraId,
+ {Duration? maxVideoDuration}) async {
+ return startVideoCapturing(
+ VideoCaptureOptions(cameraId, maxDuration: maxVideoDuration));
+ }
+
+ @override
+ Future<void> startVideoCapturing(VideoCaptureOptions options) async {
+ await _channel.invokeMethod<void>(
+ 'startVideoRecording',
+ <String, dynamic>{
+ 'cameraId': options.cameraId,
+ 'maxVideoDuration': options.maxDuration?.inMilliseconds,
+ 'enableStream': options.streamCallback != null,
+ },
+ );
+
+ if (options.streamCallback != null) {
+ _installStreamController().stream.listen(options.streamCallback);
+ _startStreamListener();
+ }
+ }
+
+ @override
+ Future<XFile> stopVideoRecording(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'stopVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> pauseVideoRecording(int cameraId) => _channel.invokeMethod<void>(
+ 'pauseVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Future<void> resumeVideoRecording(int cameraId) =>
+ _channel.invokeMethod<void>(
+ 'resumeVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
+ {CameraImageStreamOptions? options}) {
+ _installStreamController(onListen: _onFrameStreamListen);
+ return _frameStreamController!.stream;
+ }
+
+ StreamController<CameraImageData> _installStreamController(
+ {Function()? onListen}) {
+ _frameStreamController = StreamController<CameraImageData>(
+ onListen: onListen ?? () {},
+ onPause: _onFrameStreamPauseResume,
+ onResume: _onFrameStreamPauseResume,
+ onCancel: _onFrameStreamCancel,
+ );
+ return _frameStreamController!;
+ }
+
+ void _onFrameStreamListen() {
+ _startPlatformStream();
+ }
+
+ Future<void> _startPlatformStream() async {
+ await _channel.invokeMethod<void>('startImageStream');
+ _startStreamListener();
+ }
+
+ void _startStreamListener() {
+ const EventChannel cameraEventChannel =
+ EventChannel('plugins.flutter.io/camera_android/imageStream');
+ _platformImageStreamSubscription =
+ cameraEventChannel.receiveBroadcastStream().listen((dynamic imageData) {
+ _frameStreamController!
+ .add(cameraImageFromPlatformData(imageData as Map<dynamic, dynamic>));
+ });
+ }
+
+ FutureOr<void> _onFrameStreamCancel() async {
+ await _channel.invokeMethod<void>('stopImageStream');
+ await _platformImageStreamSubscription?.cancel();
+ _platformImageStreamSubscription = null;
+ _frameStreamController = null;
+ }
+
+ void _onFrameStreamPauseResume() {
+ throw CameraException('InvalidCall',
+ 'Pause and resume are not supported for onStreamedFrameAvailable');
+ }
+
+ @override
+ Future<void> setFlashMode(int cameraId, FlashMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFlashMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': _serializeFlashMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposureMode(int cameraId, ExposureMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setExposureMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeExposureMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposurePoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setExposurePoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMinExposureOffset(int cameraId) async {
+ final double? minExposureOffset = await _channel.invokeMethod<double>(
+ 'getMinExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minExposureOffset!;
+ }
+
+ @override
+ Future<double> getMaxExposureOffset(int cameraId) async {
+ final double? maxExposureOffset = await _channel.invokeMethod<double>(
+ 'getMaxExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxExposureOffset!;
+ }
+
+ @override
+ Future<double> getExposureOffsetStepSize(int cameraId) async {
+ final double? stepSize = await _channel.invokeMethod<double>(
+ 'getExposureOffsetStepSize',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return stepSize!;
+ }
+
+ @override
+ Future<double> setExposureOffset(int cameraId, double offset) async {
+ final double? appliedOffset = await _channel.invokeMethod<double>(
+ 'setExposureOffset',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'offset': offset,
+ },
+ );
+
+ return appliedOffset!;
+ }
+
+ @override
+ Future<void> setFocusMode(int cameraId, FocusMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFocusMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeFocusMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setFocusPoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setFocusPoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMaxZoomLevel(int cameraId) async {
+ final double? maxZoomLevel = await _channel.invokeMethod<double>(
+ 'getMaxZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxZoomLevel!;
+ }
+
+ @override
+ Future<double> getMinZoomLevel(int cameraId) async {
+ final double? minZoomLevel = await _channel.invokeMethod<double>(
+ 'getMinZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minZoomLevel!;
+ }
+
+ @override
+ Future<void> setZoomLevel(int cameraId, double zoom) async {
+ try {
+ await _channel.invokeMethod<double>(
+ 'setZoomLevel',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'zoom': zoom,
+ },
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> pausePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'pausePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<void> resumePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'resumePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Widget buildPreview(int cameraId) {
+ return Texture(textureId: cameraId);
+ }
+
+ /// Returns the flash mode as a String.
+ String _serializeFlashMode(FlashMode flashMode) {
+ switch (flashMode) {
+ case FlashMode.off:
+ return 'off';
+ case FlashMode.auto:
+ return 'auto';
+ case FlashMode.always:
+ return 'always';
+ case FlashMode.torch:
+ return 'torch';
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // ignore: dead_code
+ return 'off';
+ }
+
+ /// Returns the resolution preset as a String.
+ String _serializeResolutionPreset(ResolutionPreset resolutionPreset) {
+ switch (resolutionPreset) {
+ case ResolutionPreset.max:
+ return 'max';
+ case ResolutionPreset.ultraHigh:
+ return 'ultraHigh';
+ case ResolutionPreset.veryHigh:
+ return 'veryHigh';
+ case ResolutionPreset.high:
+ return 'high';
+ case ResolutionPreset.medium:
+ return 'medium';
+ case ResolutionPreset.low:
+ return 'low';
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // ignore: dead_code
+ return 'max';
+ }
+
+ /// Converts messages received from the native platform into device events.
+ Future<dynamic> _handleDeviceMethodCall(MethodCall call) async {
+ switch (call.method) {
+ case 'orientation_changed':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ _deviceEventStreamController.add(DeviceOrientationChangedEvent(
+ deserializeDeviceOrientation(arguments['orientation']! as String)));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+
+ /// Converts messages received from the native platform into camera events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ Future<dynamic> handleCameraMethodCall(MethodCall call, int cameraId) async {
+ switch (call.method) {
+ case 'initialized':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ arguments['previewWidth']! as double,
+ arguments['previewHeight']! as double,
+ deserializeExposureMode(arguments['exposureMode']! as String),
+ arguments['exposurePointSupported']! as bool,
+ deserializeFocusMode(arguments['focusMode']! as String),
+ arguments['focusPointSupported']! as bool,
+ ));
+ break;
+ case 'resolution_changed':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraResolutionChangedEvent(
+ cameraId,
+ arguments['captureWidth']! as double,
+ arguments['captureHeight']! as double,
+ ));
+ break;
+ case 'camera_closing':
+ cameraEventStreamController.add(CameraClosingEvent(
+ cameraId,
+ ));
+ break;
+ case 'video_recorded':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(VideoRecordedEvent(
+ cameraId,
+ XFile(arguments['path']! as String),
+ arguments['maxVideoDuration'] != null
+ ? Duration(milliseconds: arguments['maxVideoDuration']! as int)
+ : null,
+ ));
+ break;
+ case 'error':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraErrorEvent(
+ cameraId,
+ arguments['description']! as String,
+ ));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+
+ /// Returns the arguments of [call] as typed string-keyed Map.
+ ///
+ /// This does not do any type validation, so is only safe to call if the
+ /// arguments are known to be a map.
+ Map<String, Object?> _getArgumentDictionary(MethodCall call) {
+ return (call.arguments as Map<Object?, Object?>).cast<String, Object?>();
+ }
+}
diff --git a/packages/camera/camera_android/lib/src/type_conversion.dart b/packages/camera/camera_android/lib/src/type_conversion.dart
new file mode 100644
index 0000000..754a5a0
--- /dev/null
+++ b/packages/camera/camera_android/lib/src/type_conversion.dart
@@ -0,0 +1,49 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+
+/// Converts method channel call [data] for `receivedImageStreamData` to a
+/// [CameraImageData].
+CameraImageData cameraImageFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImageData(
+ format: _cameraImageFormatFromPlatformData(data['format']),
+ height: data['height'] as int,
+ width: data['width'] as int,
+ lensAperture: data['lensAperture'] as double?,
+ sensorExposureTime: data['sensorExposureTime'] as int?,
+ sensorSensitivity: data['sensorSensitivity'] as double?,
+ planes: List<CameraImagePlane>.unmodifiable(
+ (data['planes'] as List<dynamic>).map<CameraImagePlane>(
+ (dynamic planeData) => _cameraImagePlaneFromPlatformData(
+ planeData as Map<dynamic, dynamic>))));
+}
+
+CameraImageFormat _cameraImageFormatFromPlatformData(dynamic data) {
+ return CameraImageFormat(_imageFormatGroupFromPlatformData(data), raw: data);
+}
+
+ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
+ switch (data) {
+ case 35: // android.graphics.ImageFormat.YUV_420_888
+ return ImageFormatGroup.yuv420;
+ case 256: // android.graphics.ImageFormat.JPEG
+ return ImageFormatGroup.jpeg;
+ }
+
+ return ImageFormatGroup.unknown;
+}
+
+CameraImagePlane _cameraImagePlaneFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImagePlane(
+ bytes: data['bytes'] as Uint8List,
+ bytesPerPixel: data['bytesPerPixel'] as int?,
+ bytesPerRow: data['bytesPerRow'] as int,
+ height: data['height'] as int?,
+ width: data['width'] as int?);
+}
diff --git a/packages/camera/camera_android/lib/src/utils.dart b/packages/camera/camera_android/lib/src/utils.dart
new file mode 100644
index 0000000..8d58f7f
--- /dev/null
+++ b/packages/camera/camera_android/lib/src/utils.dart
@@ -0,0 +1,56 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+
+/// Parses a string into a corresponding CameraLensDirection.
+CameraLensDirection parseCameraLensDirection(String string) {
+ switch (string) {
+ case 'front':
+ return CameraLensDirection.front;
+ case 'back':
+ return CameraLensDirection.back;
+ case 'external':
+ return CameraLensDirection.external;
+ }
+ throw ArgumentError('Unknown CameraLensDirection value');
+}
+
+/// Returns the device orientation as a String.
+String serializeDeviceOrientation(DeviceOrientation orientation) {
+ switch (orientation) {
+ case DeviceOrientation.portraitUp:
+ return 'portraitUp';
+ case DeviceOrientation.portraitDown:
+ return 'portraitDown';
+ case DeviceOrientation.landscapeRight:
+ return 'landscapeRight';
+ case DeviceOrientation.landscapeLeft:
+ return 'landscapeLeft';
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // ignore: dead_code
+ return 'portraitUp';
+}
+
+/// Returns the device orientation for a given String.
+DeviceOrientation deserializeDeviceOrientation(String str) {
+ switch (str) {
+ case 'portraitUp':
+ return DeviceOrientation.portraitUp;
+ case 'portraitDown':
+ return DeviceOrientation.portraitDown;
+ case 'landscapeRight':
+ return DeviceOrientation.landscapeRight;
+ case 'landscapeLeft':
+ return DeviceOrientation.landscapeLeft;
+ default:
+ throw ArgumentError('"$str" is not a valid DeviceOrientation value');
+ }
+}
diff --git a/packages/camera/camera_android/pubspec.yaml b/packages/camera/camera_android/pubspec.yaml
new file mode 100644
index 0000000..fb33719
--- /dev/null
+++ b/packages/camera/camera_android/pubspec.yaml
@@ -0,0 +1,32 @@
+name: camera_android
+description: Android implementation of the camera plugin.
+repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_android
+issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
+version: 0.10.4
+
+environment:
+ sdk: ">=2.14.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+flutter:
+ plugin:
+ implements: camera
+ platforms:
+ android:
+ package: io.flutter.plugins.camera
+ pluginClass: CameraPlugin
+ dartPluginClass: AndroidCamera
+
+dependencies:
+ camera_platform_interface: ^2.3.1
+ flutter:
+ sdk: flutter
+ flutter_plugin_android_lifecycle: ^2.0.2
+ stream_transform: ^2.0.0
+
+dev_dependencies:
+ async: ^2.5.0
+ flutter_driver:
+ sdk: flutter
+ flutter_test:
+ sdk: flutter
diff --git a/packages/camera/camera_android/test/android_camera_test.dart b/packages/camera/camera_android/test/android_camera_test.dart
new file mode 100644
index 0000000..d80bd9c
--- /dev/null
+++ b/packages/camera/camera_android/test/android_camera_test.dart
@@ -0,0 +1,1131 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:async/async.dart';
+import 'package:camera_android/src/android_camera.dart';
+import 'package:camera_android/src/utils.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'method_channel_mock.dart';
+
+const String _channelName = 'plugins.flutter.io/camera_android';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ test('registers instance', () async {
+ AndroidCamera.registerWith();
+ expect(CameraPlatform.instance, isA<AndroidCamera>());
+ });
+
+ test('registration does not set message handlers', () async {
+ AndroidCamera.registerWith();
+
+ // Setting up a handler requires bindings to be initialized, and since
+ // registerWith is called very early in initialization the bindings won't
+ // have been initialized. While registerWith could intialize them, that
+ // could slow down startup, so instead the handler should be set up lazily.
+ final ByteData? response =
+ await _ambiguate(TestDefaultBinaryMessengerBinding.instance)!
+ .defaultBinaryMessenger
+ .handlePlatformMessage(
+ AndroidCamera.deviceEventChannelName,
+ const StandardMethodCodec().encodeMethodCall(const MethodCall(
+ 'orientation_changed',
+ <String, Object>{'orientation': 'portraitDown'})),
+ (ByteData? data) {});
+ expect(response, null);
+ });
+
+ group('Creation, Initialization & Disposal Tests', () {
+ test('Should send creation data and receive back a camera id', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ }
+ });
+ final AndroidCamera camera = AndroidCamera();
+
+ // Act
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0),
+ ResolutionPreset.high,
+ );
+
+ // Assert
+ expect(cameraMockChannel.log, <Matcher>[
+ isMethodCall(
+ 'create',
+ arguments: <String, Object?>{
+ 'cameraName': 'Test',
+ 'resolutionPreset': 'high',
+ 'enableAudio': false
+ },
+ ),
+ ]);
+ expect(cameraId, 1);
+ });
+
+ test('Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final AndroidCamera camera = AndroidCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final AndroidCamera camera = AndroidCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test(
+ 'Should throw CameraException when initialize throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'initialize': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ },
+ );
+ final AndroidCamera camera = AndroidCamera();
+
+ // Act
+ expect(
+ () => camera.initializeCamera(0),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having(
+ (CameraException e) => e.description,
+ 'description',
+ 'Mock error message used during testing.',
+ ),
+ ),
+ );
+ },
+ );
+
+ test('Should send initialization data', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ 'initialize': null
+ });
+ final AndroidCamera camera = AndroidCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+
+ // Act
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ isMethodCall(
+ 'initialize',
+ arguments: <String, Object?>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ ),
+ ]);
+ });
+
+ test('Should send a disposal call on dispose', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null,
+ 'dispose': <String, dynamic>{'cameraId': 1}
+ });
+
+ final AndroidCamera camera = AndroidCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Act
+ await camera.dispose(cameraId);
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ anything,
+ isMethodCall(
+ 'dispose',
+ arguments: <String, Object?>{'cameraId': 1},
+ ),
+ ]);
+ });
+ });
+
+ group('Event Tests', () {
+ late AndroidCamera camera;
+ late int cameraId;
+ setUp(() async {
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = AndroidCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+ });
+
+ test('Should receive initialized event', () async {
+ // Act
+ final Stream<CameraInitializedEvent> eventStream =
+ camera.onCameraInitialized(cameraId);
+ final StreamQueue<CameraInitializedEvent> streamQueue =
+ StreamQueue<CameraInitializedEvent>(eventStream);
+
+ // Emit test events
+ final CameraInitializedEvent event = CameraInitializedEvent(
+ cameraId,
+ 3840,
+ 2160,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ );
+ await camera.handleCameraMethodCall(
+ MethodCall('initialized', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive resolution changes', () async {
+ // Act
+ final Stream<CameraResolutionChangedEvent> resolutionStream =
+ camera.onCameraResolutionChanged(cameraId);
+ final StreamQueue<CameraResolutionChangedEvent> streamQueue =
+ StreamQueue<CameraResolutionChangedEvent>(resolutionStream);
+
+ // Emit test events
+ final CameraResolutionChangedEvent fhdEvent =
+ CameraResolutionChangedEvent(cameraId, 1920, 1080);
+ final CameraResolutionChangedEvent uhdEvent =
+ CameraResolutionChangedEvent(cameraId, 3840, 2160);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera closing events', () async {
+ // Act
+ final Stream<CameraClosingEvent> eventStream =
+ camera.onCameraClosing(cameraId);
+ final StreamQueue<CameraClosingEvent> streamQueue =
+ StreamQueue<CameraClosingEvent>(eventStream);
+
+ // Emit test events
+ final CameraClosingEvent event = CameraClosingEvent(cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera error events', () async {
+ // Act
+ final Stream<CameraErrorEvent> errorStream =
+ camera.onCameraError(cameraId);
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(errorStream);
+
+ // Emit test events
+ final CameraErrorEvent event =
+ CameraErrorEvent(cameraId, 'Error Description');
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive device orientation change events', () async {
+ // Act
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ camera.onDeviceOrientationChanged();
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+
+ // Emit test events
+ const DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+ for (int i = 0; i < 3; i++) {
+ await _ambiguate(TestDefaultBinaryMessengerBinding.instance)!
+ .defaultBinaryMessenger
+ .handlePlatformMessage(
+ AndroidCamera.deviceEventChannelName,
+ const StandardMethodCodec().encodeMethodCall(
+ MethodCall('orientation_changed', event.toJson())),
+ null);
+ }
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+ });
+
+ group('Function Tests', () {
+ late AndroidCamera camera;
+ late int cameraId;
+
+ setUp(() async {
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = AndroidCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(
+ CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ),
+ );
+ await initializeFuture;
+ });
+
+ test('Should fetch CameraDescription instances for available cameras',
+ () async {
+ // Arrange
+ final List<dynamic> returnData = <dynamic>[
+ <String, dynamic>{
+ 'name': 'Test 1',
+ 'lensFacing': 'front',
+ 'sensorOrientation': 1
+ },
+ <String, dynamic>{
+ 'name': 'Test 2',
+ 'lensFacing': 'back',
+ 'sensorOrientation': 2
+ }
+ ];
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'availableCameras': returnData},
+ );
+
+ // Act
+ final List<CameraDescription> cameras = await camera.availableCameras();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('availableCameras', arguments: null),
+ ]);
+ expect(cameras.length, returnData.length);
+ for (int i = 0; i < returnData.length; i++) {
+ final Map<String, Object?> typedData =
+ (returnData[i] as Map<dynamic, dynamic>).cast<String, Object?>();
+ final CameraDescription cameraDescription = CameraDescription(
+ name: typedData['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(typedData['lensFacing']! as String),
+ sensorOrientation: typedData['sensorOrientation']! as int,
+ );
+ expect(cameras[i], cameraDescription);
+ }
+ });
+
+ test(
+ 'Should throw CameraException when availableCameras throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'availableCameras': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+
+ // Act
+ expect(
+ camera.availableCameras,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should take a picture and return an XFile instance', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'takePicture': '/test/path.jpg'});
+
+ // Act
+ final XFile file = await camera.takePicture(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('takePicture', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.jpg');
+ });
+
+ test('Should prepare for video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'prepareForVideoRecording': null},
+ );
+
+ // Act
+ await camera.prepareForVideoRecording();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('prepareForVideoRecording', arguments: null),
+ ]);
+ });
+
+ test('Should start recording a video', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': null,
+ 'enableStream': false,
+ }),
+ ]);
+ });
+
+ test('Should pass maxVideoDuration when starting recording a video',
+ () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(
+ cameraId,
+ maxVideoDuration: const Duration(seconds: 10),
+ );
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': 10000,
+ 'enableStream': false,
+ }),
+ ]);
+ });
+
+ test(
+ 'Should pass enableStream if callback is passed when starting recording a video',
+ () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoCapturing(
+ VideoCaptureOptions(cameraId,
+ streamCallback: (CameraImageData imageData) {}),
+ );
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': null,
+ 'enableStream': true,
+ }),
+ ]);
+ });
+
+ test('Should stop a video recording and return the file', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'stopVideoRecording': '/test/path.mp4'},
+ );
+
+ // Act
+ final XFile file = await camera.stopVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('stopVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.mp4');
+ });
+
+ test('Should pause a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'pauseVideoRecording': null},
+ );
+
+ // Act
+ await camera.pauseVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pauseVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should resume a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'resumeVideoRecording': null},
+ );
+
+ // Act
+ await camera.resumeVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumeVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the flash mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFlashMode': null},
+ );
+
+ // Act
+ await camera.setFlashMode(cameraId, FlashMode.torch);
+ await camera.setFlashMode(cameraId, FlashMode.always);
+ await camera.setFlashMode(cameraId, FlashMode.auto);
+ await camera.setFlashMode(cameraId, FlashMode.off);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'torch'
+ }),
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'always'
+ }),
+ isMethodCall('setFlashMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setFlashMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'off'}),
+ ]);
+ });
+
+ test('Should set the exposure mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposureMode': null},
+ );
+
+ // Act
+ await camera.setExposureMode(cameraId, ExposureMode.auto);
+ await camera.setExposureMode(cameraId, ExposureMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setExposureMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposurePoint': null},
+ );
+
+ // Act
+ await camera.setExposurePoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setExposurePoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should get the min exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMinExposureOffset': 2.0},
+ );
+
+ // Act
+ final double minExposureOffset =
+ await camera.getMinExposureOffset(cameraId);
+
+ // Assert
+ expect(minExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the max exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMaxExposureOffset': 2.0},
+ );
+
+ // Act
+ final double maxExposureOffset =
+ await camera.getMaxExposureOffset(cameraId);
+
+ // Assert
+ expect(maxExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the exposure offset step size', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getExposureOffsetStepSize': 0.25},
+ );
+
+ // Act
+ final double stepSize = await camera.getExposureOffsetStepSize(cameraId);
+
+ // Assert
+ expect(stepSize, 0.25);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getExposureOffsetStepSize', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposureOffset': 0.6},
+ );
+
+ // Act
+ final double actualOffset = await camera.setExposureOffset(cameraId, 0.5);
+
+ // Assert
+ expect(actualOffset, 0.6);
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'offset': 0.5,
+ }),
+ ]);
+ });
+
+ test('Should set the focus mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFocusMode': null},
+ );
+
+ // Act
+ await camera.setFocusMode(cameraId, FocusMode.auto);
+ await camera.setFocusMode(cameraId, FocusMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setFocusMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFocusPoint': null},
+ );
+
+ // Act
+ await camera.setFocusPoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setFocusPoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should build a texture widget as preview widget', () async {
+ // Act
+ final Widget widget = camera.buildPreview(cameraId);
+
+ // Act
+ expect(widget is Texture, isTrue);
+ expect((widget as Texture).textureId, cameraId);
+ });
+
+ test('Should throw MissingPluginException when handling unknown method',
+ () {
+ final AndroidCamera camera = AndroidCamera();
+
+ expect(
+ () => camera.handleCameraMethodCall(
+ const MethodCall('unknown_method'), 1),
+ throwsA(isA<MissingPluginException>()));
+ });
+
+ test('Should get the max zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMaxZoomLevel': 10.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMaxZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 10.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the min zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMinZoomLevel': 1.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMinZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 1.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setZoomLevel': null},
+ );
+
+ // Act
+ await camera.setZoomLevel(cameraId, 2.0);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setZoomLevel',
+ arguments: <String, Object?>{'cameraId': cameraId, 'zoom': 2.0}),
+ ]);
+ });
+
+ test('Should throw CameraException when illegal zoom level is supplied',
+ () async {
+ // Arrange
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'setZoomLevel': PlatformException(
+ code: 'ZOOM_ERROR',
+ message: 'Illegal zoom error',
+ )
+ },
+ );
+
+ // Act & assert
+ expect(
+ () => camera.setZoomLevel(cameraId, -1.0),
+ throwsA(isA<CameraException>()
+ .having((CameraException e) => e.code, 'code', 'ZOOM_ERROR')
+ .having((CameraException e) => e.description, 'description',
+ 'Illegal zoom error')));
+ });
+
+ test('Should lock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'lockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.lockCaptureOrientation(
+ cameraId, DeviceOrientation.portraitUp);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('lockCaptureOrientation', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'orientation': 'portraitUp'
+ }),
+ ]);
+ });
+
+ test('Should unlock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'unlockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.unlockCaptureOrientation(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('unlockCaptureOrientation',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should pause the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'pausePreview': null},
+ );
+
+ // Act
+ await camera.pausePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pausePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should resume the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'resumePreview': null},
+ );
+
+ // Act
+ await camera.resumePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should start streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ ]);
+
+ subscription.cancel();
+ });
+
+ test('Should stop streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+ subscription.cancel();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ isMethodCall('stopImageStream', arguments: null),
+ ]);
+ });
+ });
+}
+
+/// This allows a value of type T or T? to be treated as a value of type T?.
+///
+/// We use this so that APIs that have become non-nullable can still be used
+/// with `!` and `?` on the stable branch.
+T? _ambiguate<T>(T? value) => value;
diff --git a/packages/camera/camera_android/test/method_channel_mock.dart b/packages/camera/camera_android/test/method_channel_mock.dart
new file mode 100644
index 0000000..f26d12a
--- /dev/null
+++ b/packages/camera/camera_android/test/method_channel_mock.dart
@@ -0,0 +1,47 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+class MethodChannelMock {
+ MethodChannelMock({
+ required String channelName,
+ this.delay,
+ required this.methods,
+ }) : methodChannel = MethodChannel(channelName) {
+ _ambiguate(TestDefaultBinaryMessengerBinding.instance)!
+ .defaultBinaryMessenger
+ .setMockMethodCallHandler(methodChannel, _handler);
+ }
+
+ final Duration? delay;
+ final MethodChannel methodChannel;
+ final Map<String, dynamic> methods;
+ final List<MethodCall> log = <MethodCall>[];
+
+ Future<dynamic> _handler(MethodCall methodCall) async {
+ log.add(methodCall);
+
+ if (!methods.containsKey(methodCall.method)) {
+ throw MissingPluginException('No implementation found for method '
+ '${methodCall.method} on channel ${methodChannel.name}');
+ }
+
+ return Future<dynamic>.delayed(delay ?? Duration.zero, () {
+ final dynamic result = methods[methodCall.method];
+ if (result is Exception) {
+ throw result;
+ }
+
+ return Future<dynamic>.value(result);
+ });
+ }
+}
+
+/// This allows a value of type T or T? to be treated as a value of type T?.
+///
+/// We use this so that APIs that have become non-nullable can still be used
+/// with `!` and `?` on the stable branch.
+T? _ambiguate<T>(T? value) => value;
diff --git a/packages/camera/camera_android/test/type_conversion_test.dart b/packages/camera/camera_android/test/type_conversion_test.dart
new file mode 100644
index 0000000..b07466d
--- /dev/null
+++ b/packages/camera/camera_android/test/type_conversion_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_android/src/type_conversion.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('CameraImageData can be created', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 1,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.height, 1);
+ expect(cameraImage.width, 4);
+ expect(cameraImage.format.group, ImageFormatGroup.unknown);
+ expect(cameraImage.planes.length, 1);
+ });
+
+ test('CameraImageData has ImageFormatGroup.yuv420', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 35,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ });
+}
diff --git a/packages/camera/camera_android/test/utils_test.dart b/packages/camera/camera_android/test/utils_test.dart
new file mode 100644
index 0000000..6f426bc
--- /dev/null
+++ b/packages/camera/camera_android/test/utils_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android/src/utils.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ group('Utility methods', () {
+ test(
+ 'Should return CameraLensDirection when valid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ parseCameraLensDirection('back'),
+ CameraLensDirection.back,
+ );
+ expect(
+ parseCameraLensDirection('front'),
+ CameraLensDirection.front,
+ );
+ expect(
+ parseCameraLensDirection('external'),
+ CameraLensDirection.external,
+ );
+ });
+
+ test(
+ 'Should throw ArgumentException when invalid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ () => parseCameraLensDirection('test'),
+ throwsA(isArgumentError),
+ );
+ });
+
+ test('serializeDeviceOrientation() should serialize correctly', () {
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitUp),
+ 'portraitUp');
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitDown),
+ 'portraitDown');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeRight),
+ 'landscapeRight');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeLeft),
+ 'landscapeLeft');
+ });
+
+ test('deserializeDeviceOrientation() should deserialize correctly', () {
+ expect(deserializeDeviceOrientation('portraitUp'),
+ DeviceOrientation.portraitUp);
+ expect(deserializeDeviceOrientation('portraitDown'),
+ DeviceOrientation.portraitDown);
+ expect(deserializeDeviceOrientation('landscapeRight'),
+ DeviceOrientation.landscapeRight);
+ expect(deserializeDeviceOrientation('landscapeLeft'),
+ DeviceOrientation.landscapeLeft);
+ });
+ });
+}
diff --git a/packages/camera/camera_android_camerax/.metadata b/packages/camera/camera_android_camerax/.metadata
new file mode 100644
index 0000000..1667b93
--- /dev/null
+++ b/packages/camera/camera_android_camerax/.metadata
@@ -0,0 +1,30 @@
+# This file tracks properties of this Flutter project.
+# Used by Flutter tool to assess capabilities and perform upgrades etc.
+#
+# This file should be version controlled.
+
+version:
+ revision: 6b04999e4aaa9dfafdcb5ca09e812df7379d9ee5
+ channel: spellcheck_1_1
+
+project_type: plugin
+
+# Tracks metadata for the flutter migrate command
+migration:
+ platforms:
+ - platform: root
+ create_revision: 6b04999e4aaa9dfafdcb5ca09e812df7379d9ee5
+ base_revision: 6b04999e4aaa9dfafdcb5ca09e812df7379d9ee5
+ - platform: android
+ create_revision: 6b04999e4aaa9dfafdcb5ca09e812df7379d9ee5
+ base_revision: 6b04999e4aaa9dfafdcb5ca09e812df7379d9ee5
+
+ # User provided section
+
+ # List of Local paths (relative to this file) that should be
+ # ignored by the migrate tool.
+ #
+ # Files that are not part of the templates will be ignored by default.
+ unmanaged_files:
+ - 'lib/main.dart'
+ - 'ios/Runner.xcodeproj/project.pbxproj'
diff --git a/packages/camera/camera_android_camerax/AUTHORS b/packages/camera/camera_android_camerax/AUTHORS
new file mode 100644
index 0000000..557dff9
--- /dev/null
+++ b/packages/camera/camera_android_camerax/AUTHORS
@@ -0,0 +1,6 @@
+# Below is a list of people and organizations that have contributed
+# to the Flutter project. Names should be added to the list like so:
+#
+# Name/Organization <email address>
+
+Google Inc.
diff --git a/packages/camera/camera_android_camerax/CHANGELOG.md b/packages/camera/camera_android_camerax/CHANGELOG.md
new file mode 100644
index 0000000..9e6c5a9
--- /dev/null
+++ b/packages/camera/camera_android_camerax/CHANGELOG.md
@@ -0,0 +1,14 @@
+## NEXT
+
+* Creates camera_android_camerax plugin for development.
+* Adds CameraInfo class and removes unnecessary code from plugin.
+* Adds CameraSelector class.
+* Adds ProcessCameraProvider class.
+* Bump CameraX version to 1.3.0-alpha02.
+* Adds Camera and UseCase classes, along with methods for binding UseCases to a lifecycle with the ProcessCameraProvider.
+* Bump CameraX version to 1.3.0-alpha03 and Kotlin version to 1.8.0.
+* Changes instance manager to allow the separate creation of identical objects.
+* Adds Preview and Surface classes, along with other methods needed to implement camera preview.
+* Adds implementation of availableCameras().
+* Implements camera preview, createCamera, initializeCamera, onCameraError, onDeviceOrientationChanged, and onCameraInitialized.
+* Adds integration test to plugin.
diff --git a/packages/camera/camera_android_camerax/LICENSE b/packages/camera/camera_android_camerax/LICENSE
new file mode 100644
index 0000000..c6823b8
--- /dev/null
+++ b/packages/camera/camera_android_camerax/LICENSE
@@ -0,0 +1,25 @@
+Copyright 2013 The Flutter Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/packages/camera/camera_android_camerax/README.md b/packages/camera/camera_android_camerax/README.md
new file mode 100644
index 0000000..06d837a
--- /dev/null
+++ b/packages/camera/camera_android_camerax/README.md
@@ -0,0 +1,3 @@
+# camera_android_camerax
+
+An implementation of the camera plugin on Android using CameraX.
diff --git a/packages/camera/camera_android_camerax/android/build.gradle b/packages/camera/camera_android_camerax/android/build.gradle
new file mode 100644
index 0000000..822c3f6
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/build.gradle
@@ -0,0 +1,68 @@
+group 'io.flutter.plugins.camerax'
+version '1.0'
+
+buildscript {
+ repositories {
+ google()
+ mavenCentral()
+ }
+
+ dependencies {
+ classpath 'com.android.tools.build:gradle:7.3.0'
+ }
+}
+
+rootProject.allprojects {
+ repositories {
+ google()
+ mavenCentral()
+ }
+}
+
+apply plugin: 'com.android.library'
+
+android {
+ // CameraX dependencies require compilation against version 33 or later.
+ compileSdkVersion 33
+
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+
+ defaultConfig {
+ // Many of the CameraX APIs require API 21.
+ minSdkVersion 21
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ }
+
+ testOptions {
+ unitTests.includeAndroidResources = true
+ unitTests.returnDefaultValues = true
+ unitTests.all {
+ testLogging {
+ events "passed", "skipped", "failed", "standardOut", "standardError"
+ outputs.upToDateWhen {false}
+ showStandardStreams = true
+ }
+ }
+ }
+
+ lintOptions {
+ disable 'AndroidGradlePluginVersion'
+ disable 'GradleDependency'
+ }
+}
+
+dependencies {
+ // CameraX core library using the camera2 implementation must use same version number.
+ def camerax_version = "1.3.0-alpha03"
+ implementation "androidx.camera:camera-core:${camerax_version}"
+ implementation "androidx.camera:camera-camera2:${camerax_version}"
+ implementation "androidx.camera:camera-lifecycle:${camerax_version}"
+ implementation 'com.google.guava:guava:31.1-android'
+ testImplementation 'junit:junit:4.13.2'
+ testImplementation 'org.mockito:mockito-inline:5.0.0'
+ testImplementation 'androidx.test:core:1.4.0'
+ testImplementation 'org.robolectric:robolectric:4.8'
+}
diff --git a/packages/camera/camera_android_camerax/android/settings.gradle b/packages/camera/camera_android_camerax/android/settings.gradle
new file mode 100644
index 0000000..613f994
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/settings.gradle
@@ -0,0 +1 @@
+rootProject.name = 'camera_android_camerax'
diff --git a/packages/camera/camera_android_camerax/android/src/main/AndroidManifest.xml b/packages/camera/camera_android_camerax/android/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..52012aa
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/AndroidManifest.xml
@@ -0,0 +1,8 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="io.flutter.plugins.camerax">
+ <uses-feature android:name="android.hardware.camera.any" />
+ <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"
+ android:maxSdkVersion="28" />
+</manifest>
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraAndroidCameraxPlugin.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraAndroidCameraxPlugin.java
new file mode 100644
index 0000000..b61e7ac
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraAndroidCameraxPlugin.java
@@ -0,0 +1,109 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import android.content.Context;
+import androidx.annotation.NonNull;
+import androidx.lifecycle.LifecycleOwner;
+import io.flutter.embedding.engine.plugins.FlutterPlugin;
+import io.flutter.embedding.engine.plugins.activity.ActivityAware;
+import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.view.TextureRegistry;
+
+/** Platform implementation of the camera_plugin implemented with the CameraX library. */
+public final class CameraAndroidCameraxPlugin implements FlutterPlugin, ActivityAware {
+ private InstanceManager instanceManager;
+ private FlutterPluginBinding pluginBinding;
+ private ProcessCameraProviderHostApiImpl processCameraProviderHostApi;
+ public SystemServicesHostApiImpl systemServicesHostApi;
+
+ /**
+ * Initialize this within the {@code #configureFlutterEngine} of a Flutter activity or fragment.
+ *
+ * <p>See {@code io.flutter.plugins.camera.MainActivity} for an example.
+ */
+ public CameraAndroidCameraxPlugin() {}
+
+ void setUp(BinaryMessenger binaryMessenger, Context context, TextureRegistry textureRegistry) {
+ // Set up instance manager.
+ instanceManager =
+ InstanceManager.open(
+ identifier -> {
+ new GeneratedCameraXLibrary.JavaObjectFlutterApi(binaryMessenger)
+ .dispose(identifier, reply -> {});
+ });
+
+ // Set up Host APIs.
+ GeneratedCameraXLibrary.CameraInfoHostApi.setup(
+ binaryMessenger, new CameraInfoHostApiImpl(instanceManager));
+ GeneratedCameraXLibrary.CameraSelectorHostApi.setup(
+ binaryMessenger, new CameraSelectorHostApiImpl(binaryMessenger, instanceManager));
+ GeneratedCameraXLibrary.JavaObjectHostApi.setup(
+ binaryMessenger, new JavaObjectHostApiImpl(instanceManager));
+ processCameraProviderHostApi =
+ new ProcessCameraProviderHostApiImpl(binaryMessenger, instanceManager, context);
+ GeneratedCameraXLibrary.ProcessCameraProviderHostApi.setup(
+ binaryMessenger, processCameraProviderHostApi);
+ systemServicesHostApi = new SystemServicesHostApiImpl(binaryMessenger, instanceManager);
+ GeneratedCameraXLibrary.SystemServicesHostApi.setup(binaryMessenger, systemServicesHostApi);
+ GeneratedCameraXLibrary.PreviewHostApi.setup(
+ binaryMessenger, new PreviewHostApiImpl(binaryMessenger, instanceManager, textureRegistry));
+ }
+
+ @Override
+ public void onAttachedToEngine(@NonNull FlutterPluginBinding flutterPluginBinding) {
+ pluginBinding = flutterPluginBinding;
+ }
+
+ @Override
+ public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
+ if (instanceManager != null) {
+ instanceManager.close();
+ }
+ }
+
+ // Activity Lifecycle methods:
+
+ @Override
+ public void onAttachedToActivity(@NonNull ActivityPluginBinding activityPluginBinding) {
+ setUp(
+ pluginBinding.getBinaryMessenger(),
+ pluginBinding.getApplicationContext(),
+ pluginBinding.getTextureRegistry());
+ updateContext(pluginBinding.getApplicationContext());
+ processCameraProviderHostApi.setLifecycleOwner(
+ (LifecycleOwner) activityPluginBinding.getActivity());
+ systemServicesHostApi.setActivity(activityPluginBinding.getActivity());
+ systemServicesHostApi.setPermissionsRegistry(
+ activityPluginBinding::addRequestPermissionsResultListener);
+ }
+
+ @Override
+ public void onDetachedFromActivityForConfigChanges() {
+ updateContext(pluginBinding.getApplicationContext());
+ }
+
+ @Override
+ public void onReattachedToActivityForConfigChanges(
+ @NonNull ActivityPluginBinding activityPluginBinding) {
+ updateContext(activityPluginBinding.getActivity());
+ }
+
+ @Override
+ public void onDetachedFromActivity() {
+ updateContext(pluginBinding.getApplicationContext());
+ }
+
+ /**
+ * Updates context that is used to fetch the corresponding instance of a {@code
+ * ProcessCameraProvider}.
+ */
+ public void updateContext(Context context) {
+ if (processCameraProviderHostApi != null) {
+ processCameraProviderHostApi.setContext(context);
+ }
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraFlutterApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraFlutterApiImpl.java
new file mode 100644
index 0000000..a035483
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraFlutterApiImpl.java
@@ -0,0 +1,22 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import androidx.camera.core.Camera;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CameraFlutterApi;
+
+public class CameraFlutterApiImpl extends CameraFlutterApi {
+ private final InstanceManager instanceManager;
+
+ public CameraFlutterApiImpl(BinaryMessenger binaryMessenger, InstanceManager instanceManager) {
+ super(binaryMessenger);
+ this.instanceManager = instanceManager;
+ }
+
+ void create(Camera camera, Reply<Void> reply) {
+ create(instanceManager.addHostCreatedInstance(camera), reply);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraInfoFlutterApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraInfoFlutterApiImpl.java
new file mode 100644
index 0000000..c538e42
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraInfoFlutterApiImpl.java
@@ -0,0 +1,23 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import androidx.camera.core.CameraInfo;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CameraInfoFlutterApi;
+
+public class CameraInfoFlutterApiImpl extends CameraInfoFlutterApi {
+ private final InstanceManager instanceManager;
+
+ public CameraInfoFlutterApiImpl(
+ BinaryMessenger binaryMessenger, InstanceManager instanceManager) {
+ super(binaryMessenger);
+ this.instanceManager = instanceManager;
+ }
+
+ void create(CameraInfo cameraInfo, Reply<Void> reply) {
+ create(instanceManager.addHostCreatedInstance(cameraInfo), reply);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraInfoHostApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraInfoHostApiImpl.java
new file mode 100644
index 0000000..d960b7f
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraInfoHostApiImpl.java
@@ -0,0 +1,25 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import androidx.annotation.NonNull;
+import androidx.camera.core.CameraInfo;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CameraInfoHostApi;
+import java.util.Objects;
+
+public class CameraInfoHostApiImpl implements CameraInfoHostApi {
+ private final InstanceManager instanceManager;
+
+ public CameraInfoHostApiImpl(InstanceManager instanceManager) {
+ this.instanceManager = instanceManager;
+ }
+
+ @Override
+ public Long getSensorRotationDegrees(@NonNull Long identifier) {
+ CameraInfo cameraInfo =
+ (CameraInfo) Objects.requireNonNull(instanceManager.getInstance(identifier));
+ return Long.valueOf(cameraInfo.getSensorRotationDegrees());
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraPermissionsManager.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraPermissionsManager.java
new file mode 100644
index 0000000..19b1ee5
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraPermissionsManager.java
@@ -0,0 +1,120 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import android.Manifest;
+import android.Manifest.permission;
+import android.app.Activity;
+import android.content.pm.PackageManager;
+import androidx.annotation.VisibleForTesting;
+import androidx.core.app.ActivityCompat;
+import androidx.core.content.ContextCompat;
+
+final class CameraPermissionsManager {
+ interface PermissionsRegistry {
+ @SuppressWarnings("deprecation")
+ void addListener(
+ io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener handler);
+ }
+
+ interface ResultCallback {
+ void onResult(String errorCode, String errorDescription);
+ }
+
+ /**
+ * Camera access permission errors handled when camera is created. See {@code MethodChannelCamera}
+ * in {@code camera/camera_platform_interface} for details.
+ */
+ private static final String CAMERA_PERMISSIONS_REQUEST_ONGOING =
+ "CameraPermissionsRequestOngoing";
+
+ private static final String CAMERA_PERMISSIONS_REQUEST_ONGOING_MESSAGE =
+ "Another request is ongoing and multiple requests cannot be handled at once.";
+ private static final String CAMERA_ACCESS_DENIED = "CameraAccessDenied";
+ private static final String CAMERA_ACCESS_DENIED_MESSAGE = "Camera access permission was denied.";
+ private static final String AUDIO_ACCESS_DENIED = "AudioAccessDenied";
+ private static final String AUDIO_ACCESS_DENIED_MESSAGE = "Audio access permission was denied.";
+
+ private static final int CAMERA_REQUEST_ID = 9796;
+ @VisibleForTesting boolean ongoing = false;
+
+ void requestPermissions(
+ Activity activity,
+ PermissionsRegistry permissionsRegistry,
+ boolean enableAudio,
+ ResultCallback callback) {
+ if (ongoing) {
+ callback.onResult(
+ CAMERA_PERMISSIONS_REQUEST_ONGOING, CAMERA_PERMISSIONS_REQUEST_ONGOING_MESSAGE);
+ return;
+ }
+ if (!hasCameraPermission(activity) || (enableAudio && !hasAudioPermission(activity))) {
+ permissionsRegistry.addListener(
+ new CameraRequestPermissionsListener(
+ (String errorCode, String errorDescription) -> {
+ ongoing = false;
+ callback.onResult(errorCode, errorDescription);
+ }));
+ ongoing = true;
+ ActivityCompat.requestPermissions(
+ activity,
+ enableAudio
+ ? new String[] {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}
+ : new String[] {Manifest.permission.CAMERA},
+ CAMERA_REQUEST_ID);
+ } else {
+ // Permissions already exist. Call the callback with success.
+ callback.onResult(null, null);
+ }
+ }
+
+ private boolean hasCameraPermission(Activity activity) {
+ return ContextCompat.checkSelfPermission(activity, permission.CAMERA)
+ == PackageManager.PERMISSION_GRANTED;
+ }
+
+ private boolean hasAudioPermission(Activity activity) {
+ return ContextCompat.checkSelfPermission(activity, permission.RECORD_AUDIO)
+ == PackageManager.PERMISSION_GRANTED;
+ }
+
+ @VisibleForTesting
+ @SuppressWarnings("deprecation")
+ static final class CameraRequestPermissionsListener
+ implements io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener {
+
+ // There's no way to unregister permission listeners in the v1 embedding, so we'll be called
+ // duplicate times in cases where the user denies and then grants a permission. Keep track of if
+ // we've responded before and bail out of handling the callback manually if this is a repeat
+ // call.
+ boolean alreadyCalled = false;
+
+ final ResultCallback callback;
+
+ @VisibleForTesting
+ CameraRequestPermissionsListener(ResultCallback callback) {
+ this.callback = callback;
+ }
+
+ @Override
+ public boolean onRequestPermissionsResult(int id, String[] permissions, int[] grantResults) {
+ if (alreadyCalled || id != CAMERA_REQUEST_ID) {
+ return false;
+ }
+
+ alreadyCalled = true;
+ // grantResults could be empty if the permissions request with the user is interrupted
+ // https://developer.android.com/reference/android/app/Activity#onRequestPermissionsResult(int,%20java.lang.String[],%20int[])
+ if (grantResults.length == 0 || grantResults[0] != PackageManager.PERMISSION_GRANTED) {
+ callback.onResult(CAMERA_ACCESS_DENIED, CAMERA_ACCESS_DENIED_MESSAGE);
+ } else if (grantResults.length > 1 && grantResults[1] != PackageManager.PERMISSION_GRANTED) {
+ callback.onResult(AUDIO_ACCESS_DENIED, AUDIO_ACCESS_DENIED_MESSAGE);
+ } else {
+ callback.onResult(null, null);
+ }
+ return true;
+ }
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraSelectorFlutterApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraSelectorFlutterApiImpl.java
new file mode 100644
index 0000000..6ca3782
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraSelectorFlutterApiImpl.java
@@ -0,0 +1,23 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import androidx.camera.core.CameraSelector;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CameraSelectorFlutterApi;
+
+public class CameraSelectorFlutterApiImpl extends CameraSelectorFlutterApi {
+ private final InstanceManager instanceManager;
+
+ public CameraSelectorFlutterApiImpl(
+ BinaryMessenger binaryMessenger, InstanceManager instanceManager) {
+ super(binaryMessenger);
+ this.instanceManager = instanceManager;
+ }
+
+ void create(CameraSelector cameraSelector, Long lensFacing, Reply<Void> reply) {
+ create(instanceManager.addHostCreatedInstance(cameraSelector), lensFacing, reply);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraSelectorHostApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraSelectorHostApiImpl.java
new file mode 100644
index 0000000..603f7cf
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraSelectorHostApiImpl.java
@@ -0,0 +1,67 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import androidx.camera.core.CameraInfo;
+import androidx.camera.core.CameraSelector;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CameraSelectorHostApi;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+public class CameraSelectorHostApiImpl implements CameraSelectorHostApi {
+ private final BinaryMessenger binaryMessenger;
+ private final InstanceManager instanceManager;
+
+ @VisibleForTesting public CameraXProxy cameraXProxy = new CameraXProxy();
+
+ public CameraSelectorHostApiImpl(
+ BinaryMessenger binaryMessenger, InstanceManager instanceManager) {
+ this.binaryMessenger = binaryMessenger;
+ this.instanceManager = instanceManager;
+ }
+
+ @Override
+ public void create(@NonNull Long identifier, Long lensFacing) {
+ CameraSelector.Builder cameraSelectorBuilder = cameraXProxy.createCameraSelectorBuilder();
+ CameraSelector cameraSelector;
+
+ if (lensFacing != null) {
+ cameraSelector = cameraSelectorBuilder.requireLensFacing(Math.toIntExact(lensFacing)).build();
+ } else {
+ cameraSelector = cameraSelectorBuilder.build();
+ }
+
+ instanceManager.addDartCreatedInstance(cameraSelector, identifier);
+ }
+
+ @Override
+ public List<Long> filter(@NonNull Long identifier, @NonNull List<Long> cameraInfoIds) {
+ CameraSelector cameraSelector =
+ (CameraSelector) Objects.requireNonNull(instanceManager.getInstance(identifier));
+ List<CameraInfo> cameraInfosForFilter = new ArrayList<CameraInfo>();
+
+ for (Number cameraInfoAsNumber : cameraInfoIds) {
+ Long cameraInfoId = cameraInfoAsNumber.longValue();
+
+ CameraInfo cameraInfo =
+ (CameraInfo) Objects.requireNonNull(instanceManager.getInstance(cameraInfoId));
+ cameraInfosForFilter.add(cameraInfo);
+ }
+
+ List<CameraInfo> filteredCameraInfos = cameraSelector.filter(cameraInfosForFilter);
+ List<Long> filteredCameraInfosIds = new ArrayList<Long>();
+
+ for (CameraInfo cameraInfo : filteredCameraInfos) {
+ Long filteredCameraInfoId = instanceManager.getIdentifierForStrongReference(cameraInfo);
+ filteredCameraInfosIds.add(filteredCameraInfoId);
+ }
+
+ return filteredCameraInfosIds;
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraXProxy.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraXProxy.java
new file mode 100644
index 0000000..4a3d277
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CameraXProxy.java
@@ -0,0 +1,51 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+import androidx.annotation.NonNull;
+import androidx.camera.core.CameraSelector;
+import androidx.camera.core.Preview;
+import io.flutter.plugin.common.BinaryMessenger;
+
+/** Utility class used to create CameraX-related objects primarily for testing purposes. */
+public class CameraXProxy {
+ public CameraSelector.Builder createCameraSelectorBuilder() {
+ return new CameraSelector.Builder();
+ }
+
+ public CameraPermissionsManager createCameraPermissionsManager() {
+ return new CameraPermissionsManager();
+ }
+
+ public DeviceOrientationManager createDeviceOrientationManager(
+ @NonNull Activity activity,
+ @NonNull Boolean isFrontFacing,
+ @NonNull int sensorOrientation,
+ @NonNull DeviceOrientationManager.DeviceOrientationChangeCallback callback) {
+ return new DeviceOrientationManager(activity, isFrontFacing, sensorOrientation, callback);
+ }
+
+ public Preview.Builder createPreviewBuilder() {
+ return new Preview.Builder();
+ }
+
+ public Surface createSurface(@NonNull SurfaceTexture surfaceTexture) {
+ return new Surface(surfaceTexture);
+ }
+
+ /**
+ * Creates an instance of the {@code SystemServicesFlutterApiImpl}.
+ *
+ * <p>Included in this class to utilize the callback methods it provides, e.g. {@code
+ * onCameraError(String)}.
+ */
+ public SystemServicesFlutterApiImpl createSystemServicesFlutterApiImpl(
+ @NonNull BinaryMessenger binaryMessenger) {
+ return new SystemServicesFlutterApiImpl(binaryMessenger);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/DeviceOrientationManager.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/DeviceOrientationManager.java
new file mode 100644
index 0000000..ebcb864
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/DeviceOrientationManager.java
@@ -0,0 +1,329 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import android.app.Activity;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.res.Configuration;
+import android.view.Display;
+import android.view.Surface;
+import android.view.WindowManager;
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+
+/**
+ * Support class to help to determine the media orientation based on the orientation of the device.
+ */
+public class DeviceOrientationManager {
+
+ interface DeviceOrientationChangeCallback {
+ void onChange(DeviceOrientation newOrientation);
+ }
+
+ private static final IntentFilter orientationIntentFilter =
+ new IntentFilter(Intent.ACTION_CONFIGURATION_CHANGED);
+
+ private final Activity activity;
+ private final boolean isFrontFacing;
+ private final int sensorOrientation;
+ private final DeviceOrientationChangeCallback deviceOrientationChangeCallback;
+ private PlatformChannel.DeviceOrientation lastOrientation;
+ private BroadcastReceiver broadcastReceiver;
+
+ DeviceOrientationManager(
+ @NonNull Activity activity,
+ boolean isFrontFacing,
+ int sensorOrientation,
+ DeviceOrientationChangeCallback callback) {
+ this.activity = activity;
+ this.isFrontFacing = isFrontFacing;
+ this.sensorOrientation = sensorOrientation;
+ this.deviceOrientationChangeCallback = callback;
+ }
+
+ /**
+ * Starts listening to the device's sensors or UI for orientation updates.
+ *
+ * <p>When orientation information is updated, the callback method of the {@link
+ * DeviceOrientationChangeCallback} is called with the new orientation. This latest value can also
+ * be retrieved through the {@link #getVideoOrientation()} accessor.
+ *
+ * <p>If the device's ACCELEROMETER_ROTATION setting is enabled the {@link
+ * DeviceOrientationManager} will report orientation updates based on the sensor information. If
+ * the ACCELEROMETER_ROTATION is disabled the {@link DeviceOrientationManager} will fallback to
+ * the deliver orientation updates based on the UI orientation.
+ */
+ public void start() {
+ if (broadcastReceiver != null) {
+ return;
+ }
+ broadcastReceiver =
+ new BroadcastReceiver() {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ handleUIOrientationChange();
+ }
+ };
+ activity.registerReceiver(broadcastReceiver, orientationIntentFilter);
+ broadcastReceiver.onReceive(activity, null);
+ }
+
+ /** Stops listening for orientation updates. */
+ public void stop() {
+ if (broadcastReceiver == null) {
+ return;
+ }
+ activity.unregisterReceiver(broadcastReceiver);
+ broadcastReceiver = null;
+ }
+
+ /**
+ * Returns the device's photo orientation in degrees based on the sensor orientation and the last
+ * known UI orientation.
+ *
+ * <p>Returns one of 0, 90, 180 or 270.
+ *
+ * @return The device's photo orientation in degrees.
+ */
+ public int getPhotoOrientation() {
+ return this.getPhotoOrientation(this.lastOrientation);
+ }
+
+ /**
+ * Returns the device's photo orientation in degrees based on the sensor orientation and the
+ * supplied {@link PlatformChannel.DeviceOrientation} value.
+ *
+ * <p>Returns one of 0, 90, 180 or 270.
+ *
+ * @param orientation The {@link PlatformChannel.DeviceOrientation} value that is to be converted
+ * into degrees.
+ * @return The device's photo orientation in degrees.
+ */
+ public int getPhotoOrientation(PlatformChannel.DeviceOrientation orientation) {
+ int angle = 0;
+ // Fallback to device orientation when the orientation value is null.
+ if (orientation == null) {
+ orientation = getUIOrientation();
+ }
+
+ switch (orientation) {
+ case PORTRAIT_UP:
+ angle = 90;
+ break;
+ case PORTRAIT_DOWN:
+ angle = 270;
+ break;
+ case LANDSCAPE_LEFT:
+ angle = isFrontFacing ? 180 : 0;
+ break;
+ case LANDSCAPE_RIGHT:
+ angle = isFrontFacing ? 0 : 180;
+ break;
+ }
+
+ // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X).
+ // This has to be taken into account so the JPEG is rotated properly.
+ // For devices with orientation of 90, this simply returns the mapping from ORIENTATIONS.
+ // For devices with orientation of 270, the JPEG is rotated 180 degrees instead.
+ return (angle + sensorOrientation + 270) % 360;
+ }
+
+ /**
+ * Returns the device's video orientation in clockwise degrees based on the sensor orientation and
+ * the last known UI orientation.
+ *
+ * <p>Returns one of 0, 90, 180 or 270.
+ *
+ * @return The device's video orientation in clockwise degrees.
+ */
+ public int getVideoOrientation() {
+ return this.getVideoOrientation(this.lastOrientation);
+ }
+
+ /**
+ * Returns the device's video orientation in clockwise degrees based on the sensor orientation and
+ * the supplied {@link PlatformChannel.DeviceOrientation} value.
+ *
+ * <p>Returns one of 0, 90, 180 or 270.
+ *
+ * <p>More details can be found in the official Android documentation:
+ * https://developer.android.com/reference/android/media/MediaRecorder#setOrientationHint(int)
+ *
+ * <p>See also:
+ * https://developer.android.com/training/camera2/camera-preview-large-screens#orientation_calculation
+ *
+ * @param orientation The {@link PlatformChannel.DeviceOrientation} value that is to be converted
+ * into degrees.
+ * @return The device's video orientation in clockwise degrees.
+ */
+ public int getVideoOrientation(PlatformChannel.DeviceOrientation orientation) {
+ int angle = 0;
+
+ // Fallback to device orientation when the orientation value is null.
+ if (orientation == null) {
+ orientation = getUIOrientation();
+ }
+
+ switch (orientation) {
+ case PORTRAIT_UP:
+ angle = 0;
+ break;
+ case PORTRAIT_DOWN:
+ angle = 180;
+ break;
+ case LANDSCAPE_LEFT:
+ angle = 270;
+ break;
+ case LANDSCAPE_RIGHT:
+ angle = 90;
+ break;
+ }
+
+ if (isFrontFacing) {
+ angle *= -1;
+ }
+
+ return (angle + sensorOrientation + 360) % 360;
+ }
+
+ /** @return the last received UI orientation. */
+ public PlatformChannel.DeviceOrientation getLastUIOrientation() {
+ return this.lastOrientation;
+ }
+
+ /**
+ * Handles orientation changes based on change events triggered by the OrientationIntentFilter.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ */
+ @VisibleForTesting
+ void handleUIOrientationChange() {
+ PlatformChannel.DeviceOrientation orientation = getUIOrientation();
+ handleOrientationChange(orientation, lastOrientation, deviceOrientationChangeCallback);
+ lastOrientation = orientation;
+ }
+
+ /**
+ * Handles orientation changes coming from either the device's sensors or the
+ * OrientationIntentFilter.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ */
+ @VisibleForTesting
+ static void handleOrientationChange(
+ DeviceOrientation newOrientation,
+ DeviceOrientation previousOrientation,
+ DeviceOrientationChangeCallback callback) {
+ if (!newOrientation.equals(previousOrientation)) {
+ callback.onChange(newOrientation);
+ }
+ }
+
+ /**
+ * Gets the current user interface orientation.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return The current user interface orientation.
+ */
+ @VisibleForTesting
+ PlatformChannel.DeviceOrientation getUIOrientation() {
+ final int rotation = getDisplay().getRotation();
+ final int orientation = activity.getResources().getConfiguration().orientation;
+
+ switch (orientation) {
+ case Configuration.ORIENTATION_PORTRAIT:
+ if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) {
+ return PlatformChannel.DeviceOrientation.PORTRAIT_UP;
+ } else {
+ return PlatformChannel.DeviceOrientation.PORTRAIT_DOWN;
+ }
+ case Configuration.ORIENTATION_LANDSCAPE:
+ if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) {
+ return PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT;
+ } else {
+ return PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT;
+ }
+ default:
+ return PlatformChannel.DeviceOrientation.PORTRAIT_UP;
+ }
+ }
+
+ /**
+ * Calculates the sensor orientation based on the supplied angle.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @param angle Orientation angle.
+ * @return The sensor orientation based on the supplied angle.
+ */
+ @VisibleForTesting
+ PlatformChannel.DeviceOrientation calculateSensorOrientation(int angle) {
+ final int tolerance = 45;
+ angle += tolerance;
+
+ // Orientation is 0 in the default orientation mode. This is portrait-mode for phones
+ // and landscape for tablets. We have to compensate for this by calculating the default
+ // orientation, and apply an offset accordingly.
+ int defaultDeviceOrientation = getDeviceDefaultOrientation();
+ if (defaultDeviceOrientation == Configuration.ORIENTATION_LANDSCAPE) {
+ angle += 90;
+ }
+ // Determine the orientation
+ angle = angle % 360;
+ return new PlatformChannel.DeviceOrientation[] {
+ PlatformChannel.DeviceOrientation.PORTRAIT_UP,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT,
+ PlatformChannel.DeviceOrientation.PORTRAIT_DOWN,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT,
+ }
+ [angle / 90];
+ }
+
+ /**
+ * Gets the default orientation of the device.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return The default orientation of the device.
+ */
+ @VisibleForTesting
+ int getDeviceDefaultOrientation() {
+ Configuration config = activity.getResources().getConfiguration();
+ int rotation = getDisplay().getRotation();
+ if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180)
+ && config.orientation == Configuration.ORIENTATION_LANDSCAPE)
+ || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270)
+ && config.orientation == Configuration.ORIENTATION_PORTRAIT)) {
+ return Configuration.ORIENTATION_LANDSCAPE;
+ } else {
+ return Configuration.ORIENTATION_PORTRAIT;
+ }
+ }
+
+ /**
+ * Gets an instance of the Android {@link android.view.Display}.
+ *
+ * <p>This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return An instance of the Android {@link android.view.Display}.
+ */
+ @SuppressWarnings("deprecation")
+ @VisibleForTesting
+ Display getDisplay() {
+ return ((WindowManager) activity.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/GeneratedCameraXLibrary.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/GeneratedCameraXLibrary.java
new file mode 100644
index 0000000..1e61ea6
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/GeneratedCameraXLibrary.java
@@ -0,0 +1,1112 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+// Autogenerated from Pigeon (v3.2.9), do not edit directly.
+// See also: https://pub.dev/packages/pigeon
+
+package io.flutter.plugins.camerax;
+
+import android.util.Log;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import io.flutter.plugin.common.BasicMessageChannel;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugin.common.MessageCodec;
+import io.flutter.plugin.common.StandardMessageCodec;
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/** Generated class from Pigeon. */
+@SuppressWarnings({"unused", "unchecked", "CodeBlock2Expr", "RedundantSuppression"})
+public class GeneratedCameraXLibrary {
+
+ /** Generated class from Pigeon that represents data sent in messages. */
+ public static class ResolutionInfo {
+ private @NonNull Long width;
+
+ public @NonNull Long getWidth() {
+ return width;
+ }
+
+ public void setWidth(@NonNull Long setterArg) {
+ if (setterArg == null) {
+ throw new IllegalStateException("Nonnull field \"width\" is null.");
+ }
+ this.width = setterArg;
+ }
+
+ private @NonNull Long height;
+
+ public @NonNull Long getHeight() {
+ return height;
+ }
+
+ public void setHeight(@NonNull Long setterArg) {
+ if (setterArg == null) {
+ throw new IllegalStateException("Nonnull field \"height\" is null.");
+ }
+ this.height = setterArg;
+ }
+
+ /** Constructor is private to enforce null safety; use Builder. */
+ private ResolutionInfo() {}
+
+ public static final class Builder {
+ private @Nullable Long width;
+
+ public @NonNull Builder setWidth(@NonNull Long setterArg) {
+ this.width = setterArg;
+ return this;
+ }
+
+ private @Nullable Long height;
+
+ public @NonNull Builder setHeight(@NonNull Long setterArg) {
+ this.height = setterArg;
+ return this;
+ }
+
+ public @NonNull ResolutionInfo build() {
+ ResolutionInfo pigeonReturn = new ResolutionInfo();
+ pigeonReturn.setWidth(width);
+ pigeonReturn.setHeight(height);
+ return pigeonReturn;
+ }
+ }
+
+ @NonNull
+ Map<String, Object> toMap() {
+ Map<String, Object> toMapResult = new HashMap<>();
+ toMapResult.put("width", width);
+ toMapResult.put("height", height);
+ return toMapResult;
+ }
+
+ static @NonNull ResolutionInfo fromMap(@NonNull Map<String, Object> map) {
+ ResolutionInfo pigeonResult = new ResolutionInfo();
+ Object width = map.get("width");
+ pigeonResult.setWidth(
+ (width == null) ? null : ((width instanceof Integer) ? (Integer) width : (Long) width));
+ Object height = map.get("height");
+ pigeonResult.setHeight(
+ (height == null)
+ ? null
+ : ((height instanceof Integer) ? (Integer) height : (Long) height));
+ return pigeonResult;
+ }
+ }
+
+ /** Generated class from Pigeon that represents data sent in messages. */
+ public static class CameraPermissionsErrorData {
+ private @NonNull String errorCode;
+
+ public @NonNull String getErrorCode() {
+ return errorCode;
+ }
+
+ public void setErrorCode(@NonNull String setterArg) {
+ if (setterArg == null) {
+ throw new IllegalStateException("Nonnull field \"errorCode\" is null.");
+ }
+ this.errorCode = setterArg;
+ }
+
+ private @NonNull String description;
+
+ public @NonNull String getDescription() {
+ return description;
+ }
+
+ public void setDescription(@NonNull String setterArg) {
+ if (setterArg == null) {
+ throw new IllegalStateException("Nonnull field \"description\" is null.");
+ }
+ this.description = setterArg;
+ }
+
+ /** Constructor is private to enforce null safety; use Builder. */
+ private CameraPermissionsErrorData() {}
+
+ public static final class Builder {
+ private @Nullable String errorCode;
+
+ public @NonNull Builder setErrorCode(@NonNull String setterArg) {
+ this.errorCode = setterArg;
+ return this;
+ }
+
+ private @Nullable String description;
+
+ public @NonNull Builder setDescription(@NonNull String setterArg) {
+ this.description = setterArg;
+ return this;
+ }
+
+ public @NonNull CameraPermissionsErrorData build() {
+ CameraPermissionsErrorData pigeonReturn = new CameraPermissionsErrorData();
+ pigeonReturn.setErrorCode(errorCode);
+ pigeonReturn.setDescription(description);
+ return pigeonReturn;
+ }
+ }
+
+ @NonNull
+ Map<String, Object> toMap() {
+ Map<String, Object> toMapResult = new HashMap<>();
+ toMapResult.put("errorCode", errorCode);
+ toMapResult.put("description", description);
+ return toMapResult;
+ }
+
+ static @NonNull CameraPermissionsErrorData fromMap(@NonNull Map<String, Object> map) {
+ CameraPermissionsErrorData pigeonResult = new CameraPermissionsErrorData();
+ Object errorCode = map.get("errorCode");
+ pigeonResult.setErrorCode((String) errorCode);
+ Object description = map.get("description");
+ pigeonResult.setDescription((String) description);
+ return pigeonResult;
+ }
+ }
+
+ public interface Result<T> {
+ void success(T result);
+
+ void error(Throwable error);
+ }
+
+ private static class JavaObjectHostApiCodec extends StandardMessageCodec {
+ public static final JavaObjectHostApiCodec INSTANCE = new JavaObjectHostApiCodec();
+
+ private JavaObjectHostApiCodec() {}
+ }
+
+ /** Generated interface from Pigeon that represents a handler of messages from Flutter. */
+ public interface JavaObjectHostApi {
+ void dispose(@NonNull Long identifier);
+
+ /** The codec used by JavaObjectHostApi. */
+ static MessageCodec<Object> getCodec() {
+ return JavaObjectHostApiCodec.INSTANCE;
+ }
+
+ /**
+ * Sets up an instance of `JavaObjectHostApi` to handle messages through the `binaryMessenger`.
+ */
+ static void setup(BinaryMessenger binaryMessenger, JavaObjectHostApi api) {
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.JavaObjectHostApi.dispose", getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ api.dispose((identifierArg == null) ? null : identifierArg.longValue());
+ wrapped.put("result", null);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ }
+ }
+
+ private static class JavaObjectFlutterApiCodec extends StandardMessageCodec {
+ public static final JavaObjectFlutterApiCodec INSTANCE = new JavaObjectFlutterApiCodec();
+
+ private JavaObjectFlutterApiCodec() {}
+ }
+
+ /** Generated class from Pigeon that represents Flutter messages that can be called from Java. */
+ public static class JavaObjectFlutterApi {
+ private final BinaryMessenger binaryMessenger;
+
+ public JavaObjectFlutterApi(BinaryMessenger argBinaryMessenger) {
+ this.binaryMessenger = argBinaryMessenger;
+ }
+
+ public interface Reply<T> {
+ void reply(T reply);
+ }
+
+ static MessageCodec<Object> getCodec() {
+ return JavaObjectFlutterApiCodec.INSTANCE;
+ }
+
+ public void dispose(@NonNull Long identifierArg, Reply<Void> callback) {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.JavaObjectFlutterApi.dispose", getCodec());
+ channel.send(
+ new ArrayList<Object>(Arrays.asList(identifierArg)),
+ channelReply -> {
+ callback.reply(null);
+ });
+ }
+ }
+
+ private static class CameraInfoHostApiCodec extends StandardMessageCodec {
+ public static final CameraInfoHostApiCodec INSTANCE = new CameraInfoHostApiCodec();
+
+ private CameraInfoHostApiCodec() {}
+ }
+
+ /** Generated interface from Pigeon that represents a handler of messages from Flutter. */
+ public interface CameraInfoHostApi {
+ @NonNull
+ Long getSensorRotationDegrees(@NonNull Long identifier);
+
+ /** The codec used by CameraInfoHostApi. */
+ static MessageCodec<Object> getCodec() {
+ return CameraInfoHostApiCodec.INSTANCE;
+ }
+
+ /**
+ * Sets up an instance of `CameraInfoHostApi` to handle messages through the `binaryMessenger`.
+ */
+ static void setup(BinaryMessenger binaryMessenger, CameraInfoHostApi api) {
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.CameraInfoHostApi.getSensorRotationDegrees",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ Long output =
+ api.getSensorRotationDegrees(
+ (identifierArg == null) ? null : identifierArg.longValue());
+ wrapped.put("result", output);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ }
+ }
+
+ private static class CameraInfoFlutterApiCodec extends StandardMessageCodec {
+ public static final CameraInfoFlutterApiCodec INSTANCE = new CameraInfoFlutterApiCodec();
+
+ private CameraInfoFlutterApiCodec() {}
+ }
+
+ /** Generated class from Pigeon that represents Flutter messages that can be called from Java. */
+ public static class CameraInfoFlutterApi {
+ private final BinaryMessenger binaryMessenger;
+
+ public CameraInfoFlutterApi(BinaryMessenger argBinaryMessenger) {
+ this.binaryMessenger = argBinaryMessenger;
+ }
+
+ public interface Reply<T> {
+ void reply(T reply);
+ }
+
+ static MessageCodec<Object> getCodec() {
+ return CameraInfoFlutterApiCodec.INSTANCE;
+ }
+
+ public void create(@NonNull Long identifierArg, Reply<Void> callback) {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.CameraInfoFlutterApi.create", getCodec());
+ channel.send(
+ new ArrayList<Object>(Arrays.asList(identifierArg)),
+ channelReply -> {
+ callback.reply(null);
+ });
+ }
+ }
+
+ private static class CameraSelectorHostApiCodec extends StandardMessageCodec {
+ public static final CameraSelectorHostApiCodec INSTANCE = new CameraSelectorHostApiCodec();
+
+ private CameraSelectorHostApiCodec() {}
+ }
+
+ /** Generated interface from Pigeon that represents a handler of messages from Flutter. */
+ public interface CameraSelectorHostApi {
+ void create(@NonNull Long identifier, @Nullable Long lensFacing);
+
+ @NonNull
+ List<Long> filter(@NonNull Long identifier, @NonNull List<Long> cameraInfoIds);
+
+ /** The codec used by CameraSelectorHostApi. */
+ static MessageCodec<Object> getCodec() {
+ return CameraSelectorHostApiCodec.INSTANCE;
+ }
+
+ /**
+ * Sets up an instance of `CameraSelectorHostApi` to handle messages through the
+ * `binaryMessenger`.
+ */
+ static void setup(BinaryMessenger binaryMessenger, CameraSelectorHostApi api) {
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.CameraSelectorHostApi.create", getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ Number lensFacingArg = (Number) args.get(1);
+ api.create(
+ (identifierArg == null) ? null : identifierArg.longValue(),
+ (lensFacingArg == null) ? null : lensFacingArg.longValue());
+ wrapped.put("result", null);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.CameraSelectorHostApi.filter", getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ List<Long> cameraInfoIdsArg = (List<Long>) args.get(1);
+ if (cameraInfoIdsArg == null) {
+ throw new NullPointerException("cameraInfoIdsArg unexpectedly null.");
+ }
+ List<Long> output =
+ api.filter(
+ (identifierArg == null) ? null : identifierArg.longValue(),
+ cameraInfoIdsArg);
+ wrapped.put("result", output);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ }
+ }
+
+ private static class CameraSelectorFlutterApiCodec extends StandardMessageCodec {
+ public static final CameraSelectorFlutterApiCodec INSTANCE =
+ new CameraSelectorFlutterApiCodec();
+
+ private CameraSelectorFlutterApiCodec() {}
+ }
+
+ /** Generated class from Pigeon that represents Flutter messages that can be called from Java. */
+ public static class CameraSelectorFlutterApi {
+ private final BinaryMessenger binaryMessenger;
+
+ public CameraSelectorFlutterApi(BinaryMessenger argBinaryMessenger) {
+ this.binaryMessenger = argBinaryMessenger;
+ }
+
+ public interface Reply<T> {
+ void reply(T reply);
+ }
+
+ static MessageCodec<Object> getCodec() {
+ return CameraSelectorFlutterApiCodec.INSTANCE;
+ }
+
+ public void create(
+ @NonNull Long identifierArg, @Nullable Long lensFacingArg, Reply<Void> callback) {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.CameraSelectorFlutterApi.create", getCodec());
+ channel.send(
+ new ArrayList<Object>(Arrays.asList(identifierArg, lensFacingArg)),
+ channelReply -> {
+ callback.reply(null);
+ });
+ }
+ }
+
+ private static class ProcessCameraProviderHostApiCodec extends StandardMessageCodec {
+ public static final ProcessCameraProviderHostApiCodec INSTANCE =
+ new ProcessCameraProviderHostApiCodec();
+
+ private ProcessCameraProviderHostApiCodec() {}
+ }
+
+ /** Generated interface from Pigeon that represents a handler of messages from Flutter. */
+ public interface ProcessCameraProviderHostApi {
+ void getInstance(Result<Long> result);
+
+ @NonNull
+ List<Long> getAvailableCameraInfos(@NonNull Long identifier);
+
+ @NonNull
+ Long bindToLifecycle(
+ @NonNull Long identifier,
+ @NonNull Long cameraSelectorIdentifier,
+ @NonNull List<Long> useCaseIds);
+
+ void unbind(@NonNull Long identifier, @NonNull List<Long> useCaseIds);
+
+ void unbindAll(@NonNull Long identifier);
+
+ /** The codec used by ProcessCameraProviderHostApi. */
+ static MessageCodec<Object> getCodec() {
+ return ProcessCameraProviderHostApiCodec.INSTANCE;
+ }
+
+ /**
+ * Sets up an instance of `ProcessCameraProviderHostApi` to handle messages through the
+ * `binaryMessenger`.
+ */
+ static void setup(BinaryMessenger binaryMessenger, ProcessCameraProviderHostApi api) {
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.ProcessCameraProviderHostApi.getInstance",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ Result<Long> resultCallback =
+ new Result<Long>() {
+ public void success(Long result) {
+ wrapped.put("result", result);
+ reply.reply(wrapped);
+ }
+
+ public void error(Throwable error) {
+ wrapped.put("error", wrapError(error));
+ reply.reply(wrapped);
+ }
+ };
+
+ api.getInstance(resultCallback);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ reply.reply(wrapped);
+ }
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.ProcessCameraProviderHostApi.getAvailableCameraInfos",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ List<Long> output =
+ api.getAvailableCameraInfos(
+ (identifierArg == null) ? null : identifierArg.longValue());
+ wrapped.put("result", output);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.ProcessCameraProviderHostApi.bindToLifecycle",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ Number cameraSelectorIdentifierArg = (Number) args.get(1);
+ if (cameraSelectorIdentifierArg == null) {
+ throw new NullPointerException(
+ "cameraSelectorIdentifierArg unexpectedly null.");
+ }
+ List<Long> useCaseIdsArg = (List<Long>) args.get(2);
+ if (useCaseIdsArg == null) {
+ throw new NullPointerException("useCaseIdsArg unexpectedly null.");
+ }
+ Long output =
+ api.bindToLifecycle(
+ (identifierArg == null) ? null : identifierArg.longValue(),
+ (cameraSelectorIdentifierArg == null)
+ ? null
+ : cameraSelectorIdentifierArg.longValue(),
+ useCaseIdsArg);
+ wrapped.put("result", output);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.ProcessCameraProviderHostApi.unbind",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ List<Long> useCaseIdsArg = (List<Long>) args.get(1);
+ if (useCaseIdsArg == null) {
+ throw new NullPointerException("useCaseIdsArg unexpectedly null.");
+ }
+ api.unbind(
+ (identifierArg == null) ? null : identifierArg.longValue(), useCaseIdsArg);
+ wrapped.put("result", null);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.ProcessCameraProviderHostApi.unbindAll",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ api.unbindAll((identifierArg == null) ? null : identifierArg.longValue());
+ wrapped.put("result", null);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ }
+ }
+
+ private static class ProcessCameraProviderFlutterApiCodec extends StandardMessageCodec {
+ public static final ProcessCameraProviderFlutterApiCodec INSTANCE =
+ new ProcessCameraProviderFlutterApiCodec();
+
+ private ProcessCameraProviderFlutterApiCodec() {}
+ }
+
+ /** Generated class from Pigeon that represents Flutter messages that can be called from Java. */
+ public static class ProcessCameraProviderFlutterApi {
+ private final BinaryMessenger binaryMessenger;
+
+ public ProcessCameraProviderFlutterApi(BinaryMessenger argBinaryMessenger) {
+ this.binaryMessenger = argBinaryMessenger;
+ }
+
+ public interface Reply<T> {
+ void reply(T reply);
+ }
+
+ static MessageCodec<Object> getCodec() {
+ return ProcessCameraProviderFlutterApiCodec.INSTANCE;
+ }
+
+ public void create(@NonNull Long identifierArg, Reply<Void> callback) {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.ProcessCameraProviderFlutterApi.create",
+ getCodec());
+ channel.send(
+ new ArrayList<Object>(Arrays.asList(identifierArg)),
+ channelReply -> {
+ callback.reply(null);
+ });
+ }
+ }
+
+ private static class CameraFlutterApiCodec extends StandardMessageCodec {
+ public static final CameraFlutterApiCodec INSTANCE = new CameraFlutterApiCodec();
+
+ private CameraFlutterApiCodec() {}
+ }
+
+ /** Generated class from Pigeon that represents Flutter messages that can be called from Java. */
+ public static class CameraFlutterApi {
+ private final BinaryMessenger binaryMessenger;
+
+ public CameraFlutterApi(BinaryMessenger argBinaryMessenger) {
+ this.binaryMessenger = argBinaryMessenger;
+ }
+
+ public interface Reply<T> {
+ void reply(T reply);
+ }
+
+ static MessageCodec<Object> getCodec() {
+ return CameraFlutterApiCodec.INSTANCE;
+ }
+
+ public void create(@NonNull Long identifierArg, Reply<Void> callback) {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.CameraFlutterApi.create", getCodec());
+ channel.send(
+ new ArrayList<Object>(Arrays.asList(identifierArg)),
+ channelReply -> {
+ callback.reply(null);
+ });
+ }
+ }
+
+ private static class SystemServicesHostApiCodec extends StandardMessageCodec {
+ public static final SystemServicesHostApiCodec INSTANCE = new SystemServicesHostApiCodec();
+
+ private SystemServicesHostApiCodec() {}
+
+ @Override
+ protected Object readValueOfType(byte type, ByteBuffer buffer) {
+ switch (type) {
+ case (byte) 128:
+ return CameraPermissionsErrorData.fromMap((Map<String, Object>) readValue(buffer));
+
+ default:
+ return super.readValueOfType(type, buffer);
+ }
+ }
+
+ @Override
+ protected void writeValue(ByteArrayOutputStream stream, Object value) {
+ if (value instanceof CameraPermissionsErrorData) {
+ stream.write(128);
+ writeValue(stream, ((CameraPermissionsErrorData) value).toMap());
+ } else {
+ super.writeValue(stream, value);
+ }
+ }
+ }
+
+ /** Generated interface from Pigeon that represents a handler of messages from Flutter. */
+ public interface SystemServicesHostApi {
+ void requestCameraPermissions(
+ @NonNull Boolean enableAudio, Result<CameraPermissionsErrorData> result);
+
+ void startListeningForDeviceOrientationChange(
+ @NonNull Boolean isFrontFacing, @NonNull Long sensorOrientation);
+
+ void stopListeningForDeviceOrientationChange();
+
+ /** The codec used by SystemServicesHostApi. */
+ static MessageCodec<Object> getCodec() {
+ return SystemServicesHostApiCodec.INSTANCE;
+ }
+
+ /**
+ * Sets up an instance of `SystemServicesHostApi` to handle messages through the
+ * `binaryMessenger`.
+ */
+ static void setup(BinaryMessenger binaryMessenger, SystemServicesHostApi api) {
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.SystemServicesHostApi.requestCameraPermissions",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Boolean enableAudioArg = (Boolean) args.get(0);
+ if (enableAudioArg == null) {
+ throw new NullPointerException("enableAudioArg unexpectedly null.");
+ }
+ Result<CameraPermissionsErrorData> resultCallback =
+ new Result<CameraPermissionsErrorData>() {
+ public void success(CameraPermissionsErrorData result) {
+ wrapped.put("result", result);
+ reply.reply(wrapped);
+ }
+
+ public void error(Throwable error) {
+ wrapped.put("error", wrapError(error));
+ reply.reply(wrapped);
+ }
+ };
+
+ api.requestCameraPermissions(enableAudioArg, resultCallback);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ reply.reply(wrapped);
+ }
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.SystemServicesHostApi.startListeningForDeviceOrientationChange",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Boolean isFrontFacingArg = (Boolean) args.get(0);
+ if (isFrontFacingArg == null) {
+ throw new NullPointerException("isFrontFacingArg unexpectedly null.");
+ }
+ Number sensorOrientationArg = (Number) args.get(1);
+ if (sensorOrientationArg == null) {
+ throw new NullPointerException("sensorOrientationArg unexpectedly null.");
+ }
+ api.startListeningForDeviceOrientationChange(
+ isFrontFacingArg,
+ (sensorOrientationArg == null) ? null : sensorOrientationArg.longValue());
+ wrapped.put("result", null);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.SystemServicesHostApi.stopListeningForDeviceOrientationChange",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ api.stopListeningForDeviceOrientationChange();
+ wrapped.put("result", null);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ }
+ }
+
+ private static class SystemServicesFlutterApiCodec extends StandardMessageCodec {
+ public static final SystemServicesFlutterApiCodec INSTANCE =
+ new SystemServicesFlutterApiCodec();
+
+ private SystemServicesFlutterApiCodec() {}
+ }
+
+ /** Generated class from Pigeon that represents Flutter messages that can be called from Java. */
+ public static class SystemServicesFlutterApi {
+ private final BinaryMessenger binaryMessenger;
+
+ public SystemServicesFlutterApi(BinaryMessenger argBinaryMessenger) {
+ this.binaryMessenger = argBinaryMessenger;
+ }
+
+ public interface Reply<T> {
+ void reply(T reply);
+ }
+
+ static MessageCodec<Object> getCodec() {
+ return SystemServicesFlutterApiCodec.INSTANCE;
+ }
+
+ public void onDeviceOrientationChanged(@NonNull String orientationArg, Reply<Void> callback) {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.SystemServicesFlutterApi.onDeviceOrientationChanged",
+ getCodec());
+ channel.send(
+ new ArrayList<Object>(Arrays.asList(orientationArg)),
+ channelReply -> {
+ callback.reply(null);
+ });
+ }
+
+ public void onCameraError(@NonNull String errorDescriptionArg, Reply<Void> callback) {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.SystemServicesFlutterApi.onCameraError",
+ getCodec());
+ channel.send(
+ new ArrayList<Object>(Arrays.asList(errorDescriptionArg)),
+ channelReply -> {
+ callback.reply(null);
+ });
+ }
+ }
+
+ private static class PreviewHostApiCodec extends StandardMessageCodec {
+ public static final PreviewHostApiCodec INSTANCE = new PreviewHostApiCodec();
+
+ private PreviewHostApiCodec() {}
+
+ @Override
+ protected Object readValueOfType(byte type, ByteBuffer buffer) {
+ switch (type) {
+ case (byte) 128:
+ return ResolutionInfo.fromMap((Map<String, Object>) readValue(buffer));
+
+ case (byte) 129:
+ return ResolutionInfo.fromMap((Map<String, Object>) readValue(buffer));
+
+ default:
+ return super.readValueOfType(type, buffer);
+ }
+ }
+
+ @Override
+ protected void writeValue(ByteArrayOutputStream stream, Object value) {
+ if (value instanceof ResolutionInfo) {
+ stream.write(128);
+ writeValue(stream, ((ResolutionInfo) value).toMap());
+ } else if (value instanceof ResolutionInfo) {
+ stream.write(129);
+ writeValue(stream, ((ResolutionInfo) value).toMap());
+ } else {
+ super.writeValue(stream, value);
+ }
+ }
+ }
+
+ /** Generated interface from Pigeon that represents a handler of messages from Flutter. */
+ public interface PreviewHostApi {
+ void create(
+ @NonNull Long identifier,
+ @Nullable Long rotation,
+ @Nullable ResolutionInfo targetResolution);
+
+ @NonNull
+ Long setSurfaceProvider(@NonNull Long identifier);
+
+ void releaseFlutterSurfaceTexture();
+
+ @NonNull
+ ResolutionInfo getResolutionInfo(@NonNull Long identifier);
+
+ /** The codec used by PreviewHostApi. */
+ static MessageCodec<Object> getCodec() {
+ return PreviewHostApiCodec.INSTANCE;
+ }
+
+ /** Sets up an instance of `PreviewHostApi` to handle messages through the `binaryMessenger`. */
+ static void setup(BinaryMessenger binaryMessenger, PreviewHostApi api) {
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.PreviewHostApi.create", getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ Number rotationArg = (Number) args.get(1);
+ ResolutionInfo targetResolutionArg = (ResolutionInfo) args.get(2);
+ api.create(
+ (identifierArg == null) ? null : identifierArg.longValue(),
+ (rotationArg == null) ? null : rotationArg.longValue(),
+ targetResolutionArg);
+ wrapped.put("result", null);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.PreviewHostApi.setSurfaceProvider",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ Long output =
+ api.setSurfaceProvider(
+ (identifierArg == null) ? null : identifierArg.longValue());
+ wrapped.put("result", output);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger,
+ "dev.flutter.pigeon.PreviewHostApi.releaseFlutterSurfaceTexture",
+ getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ api.releaseFlutterSurfaceTexture();
+ wrapped.put("result", null);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ {
+ BasicMessageChannel<Object> channel =
+ new BasicMessageChannel<>(
+ binaryMessenger, "dev.flutter.pigeon.PreviewHostApi.getResolutionInfo", getCodec());
+ if (api != null) {
+ channel.setMessageHandler(
+ (message, reply) -> {
+ Map<String, Object> wrapped = new HashMap<>();
+ try {
+ ArrayList<Object> args = (ArrayList<Object>) message;
+ Number identifierArg = (Number) args.get(0);
+ if (identifierArg == null) {
+ throw new NullPointerException("identifierArg unexpectedly null.");
+ }
+ ResolutionInfo output =
+ api.getResolutionInfo(
+ (identifierArg == null) ? null : identifierArg.longValue());
+ wrapped.put("result", output);
+ } catch (Error | RuntimeException exception) {
+ wrapped.put("error", wrapError(exception));
+ }
+ reply.reply(wrapped);
+ });
+ } else {
+ channel.setMessageHandler(null);
+ }
+ }
+ }
+ }
+
+ private static Map<String, Object> wrapError(Throwable exception) {
+ Map<String, Object> errorMap = new HashMap<>();
+ errorMap.put("message", exception.toString());
+ errorMap.put("code", exception.getClass().getSimpleName());
+ errorMap.put(
+ "details",
+ "Cause: " + exception.getCause() + ", Stacktrace: " + Log.getStackTraceString(exception));
+ return errorMap;
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/InstanceManager.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/InstanceManager.java
new file mode 100644
index 0000000..8212d12
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/InstanceManager.java
@@ -0,0 +1,209 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import android.os.Handler;
+import android.os.Looper;
+import androidx.annotation.Nullable;
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.WeakReference;
+import java.util.HashMap;
+import java.util.WeakHashMap;
+
+/**
+ * Maintains instances used to communicate with the corresponding objects in Dart.
+ *
+ * <p>When an instance is added with an identifier, either can be used to retrieve the other.
+ *
+ * <p>Added instances are added as a weak reference and a strong reference. When the strong
+ * reference is removed with `{@link #remove(long)}` and the weak reference is deallocated, the
+ * `finalizationListener` is made with the instance's identifier. However, if the strong reference
+ * is removed and then the identifier is retrieved with the intention to pass the identifier to Dart
+ * (e.g. calling {@link #getIdentifierForStrongReference(Object)}), the strong reference to the
+ * instance is recreated. The strong reference will then need to be removed manually again.
+ */
+@SuppressWarnings("unchecked")
+public class InstanceManager {
+ // Identifiers are locked to a specific range to avoid collisions with objects
+ // created simultaneously from Dart.
+ // Host uses identifiers >= 2^16 and Dart is expected to use values n where,
+ // 0 <= n < 2^16.
+ private static final long MIN_HOST_CREATED_IDENTIFIER = 65536;
+ private static final long CLEAR_FINALIZED_WEAK_REFERENCES_INTERVAL = 30000;
+
+ /** Interface for listening when a weak reference of an instance is removed from the manager. */
+ public interface FinalizationListener {
+ void onFinalize(long identifier);
+ }
+
+ private final WeakHashMap<Object, Long> identifiers = new WeakHashMap<>();
+ private final HashMap<Long, WeakReference<Object>> weakInstances = new HashMap<>();
+ private final HashMap<Long, Object> strongInstances = new HashMap<>();
+
+ private final ReferenceQueue<Object> referenceQueue = new ReferenceQueue<>();
+ private final HashMap<WeakReference<Object>, Long> weakReferencesToIdentifiers = new HashMap<>();
+
+ private final Handler handler = new Handler(Looper.getMainLooper());
+
+ private final FinalizationListener finalizationListener;
+
+ private long nextIdentifier = MIN_HOST_CREATED_IDENTIFIER;
+ private boolean isClosed = false;
+
+ /**
+ * Instantiate a new manager.
+ *
+ * <p>When the manager is no longer needed, {@link #close()} must be called.
+ *
+ * @param finalizationListener the listener for garbage collected weak references.
+ * @return a new `InstanceManager`.
+ */
+ public static InstanceManager open(FinalizationListener finalizationListener) {
+ return new InstanceManager(finalizationListener);
+ }
+
+ private InstanceManager(FinalizationListener finalizationListener) {
+ this.finalizationListener = finalizationListener;
+ handler.postDelayed(
+ this::releaseAllFinalizedInstances, CLEAR_FINALIZED_WEAK_REFERENCES_INTERVAL);
+ }
+
+ /**
+ * Removes `identifier` and its associated strongly referenced instance, if present, from the
+ * manager.
+ *
+ * @param identifier the identifier paired to an instance.
+ * @param <T> the expected return type.
+ * @return the removed instance if the manager contains the given identifier, otherwise null.
+ */
+ @Nullable
+ public <T> T remove(long identifier) {
+ assertManagerIsNotClosed();
+ return (T) strongInstances.remove(identifier);
+ }
+
+ /**
+ * Retrieves the identifier paired with an instance.
+ *
+ * <p>If the manager contains `instance`, as a strong or weak reference, the strong reference to
+ * `instance` will be recreated and will need to be removed again with {@link #remove(long)}.
+ *
+ * @param instance an instance that may be stored in the manager.
+ * @return the identifier associated with `instance` if the manager contains the value, otherwise
+ * null.
+ */
+ @Nullable
+ public Long getIdentifierForStrongReference(Object instance) {
+ assertManagerIsNotClosed();
+ final Long identifier = identifiers.get(instance);
+ if (identifier != null) {
+ strongInstances.put(identifier, instance);
+ }
+ return identifier;
+ }
+
+ /**
+ * Adds a new instance that was instantiated from Dart.
+ *
+ * <p>If an instance or identifier has already been added, it will be replaced by the new values.
+ * The Dart InstanceManager is considered the source of truth and has the capability to overwrite
+ * stored pairs in response to hot restarts.
+ *
+ * @param instance the instance to be stored.
+ * @param identifier the identifier to be paired with instance. This value must be >= 0.
+ */
+ public void addDartCreatedInstance(Object instance, long identifier) {
+ assertManagerIsNotClosed();
+ addInstance(instance, identifier);
+ }
+
+ /**
+ * Adds a new instance that was instantiated from the host platform.
+ *
+ * <p>If an instance has already been added, this will replace it. {@code #containsInstance} can
+ * be used to check if the object has already been added to avoid this.
+ *
+ * @param instance the instance to be stored.
+ * @return the unique identifier stored with instance.
+ */
+ public long addHostCreatedInstance(Object instance) {
+ assertManagerIsNotClosed();
+
+ final long identifier = nextIdentifier++;
+ addInstance(instance, identifier);
+ return identifier;
+ }
+
+ /**
+ * Retrieves the instance associated with identifier.
+ *
+ * @param identifier the identifier paired to an instance.
+ * @param <T> the expected return type.
+ * @return the instance associated with `identifier` if the manager contains the value, otherwise
+ * null.
+ */
+ @Nullable
+ public <T> T getInstance(long identifier) {
+ assertManagerIsNotClosed();
+ final WeakReference<T> instance = (WeakReference<T>) weakInstances.get(identifier);
+ if (instance != null) {
+ return instance.get();
+ }
+ return (T) strongInstances.get(identifier);
+ }
+
+ /**
+ * Returns whether this manager contains the given `instance`.
+ *
+ * @param instance the instance whose presence in this manager is to be tested.
+ * @return whether this manager contains the given `instance`.
+ */
+ public boolean containsInstance(Object instance) {
+ assertManagerIsNotClosed();
+ return identifiers.containsKey(instance);
+ }
+
+ /**
+ * Closes the manager and releases resources.
+ *
+ * <p>Calling a method after calling this one will throw an {@link AssertionError}. This method
+ * excluded.
+ */
+ public void close() {
+ handler.removeCallbacks(this::releaseAllFinalizedInstances);
+ isClosed = true;
+ }
+
+ private void releaseAllFinalizedInstances() {
+ WeakReference<Object> reference;
+ while ((reference = (WeakReference<Object>) referenceQueue.poll()) != null) {
+ final Long identifier = weakReferencesToIdentifiers.remove(reference);
+ if (identifier != null) {
+ weakInstances.remove(identifier);
+ strongInstances.remove(identifier);
+ finalizationListener.onFinalize(identifier);
+ }
+ }
+ handler.postDelayed(
+ this::releaseAllFinalizedInstances, CLEAR_FINALIZED_WEAK_REFERENCES_INTERVAL);
+ }
+
+ private void addInstance(Object instance, long identifier) {
+ if (identifier < 0) {
+ throw new IllegalArgumentException("Identifier must be >= 0.");
+ }
+ final WeakReference<Object> weakReference = new WeakReference<>(instance, referenceQueue);
+ identifiers.put(instance, identifier);
+ weakInstances.put(identifier, weakReference);
+ weakReferencesToIdentifiers.put(weakReference, identifier);
+ strongInstances.put(identifier, instance);
+ }
+
+ private void assertManagerIsNotClosed() {
+ if (isClosed) {
+ throw new AssertionError("Manager has already been closed.");
+ }
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/JavaObjectHostApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/JavaObjectHostApiImpl.java
new file mode 100644
index 0000000..5dc0ba7
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/JavaObjectHostApiImpl.java
@@ -0,0 +1,33 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.JavaObjectHostApi;
+
+/**
+ * A pigeon Host API implementation that handles creating {@link Object}s and invoking its static
+ * and instance methods.
+ *
+ * <p>{@link Object} instances created by {@link JavaObjectHostApiImpl} are used to intercommunicate
+ * with a paired Dart object.
+ */
+public class JavaObjectHostApiImpl implements JavaObjectHostApi {
+ private final InstanceManager instanceManager;
+
+ /**
+ * Constructs a {@link JavaObjectHostApiImpl}.
+ *
+ * @param instanceManager maintains instances stored to communicate with Dart objects
+ */
+ public JavaObjectHostApiImpl(InstanceManager instanceManager) {
+ this.instanceManager = instanceManager;
+ }
+
+ @Override
+ public void dispose(@NonNull Long identifier) {
+ instanceManager.remove(identifier);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/PreviewHostApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/PreviewHostApiImpl.java
new file mode 100644
index 0000000..838f0b3
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/PreviewHostApiImpl.java
@@ -0,0 +1,149 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import android.graphics.SurfaceTexture;
+import android.util.Size;
+import android.view.Surface;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
+import androidx.camera.core.Preview;
+import androidx.camera.core.SurfaceRequest;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.PreviewHostApi;
+import io.flutter.view.TextureRegistry;
+import java.util.Objects;
+import java.util.concurrent.Executors;
+
+public class PreviewHostApiImpl implements PreviewHostApi {
+ private final BinaryMessenger binaryMessenger;
+ private final InstanceManager instanceManager;
+ private final TextureRegistry textureRegistry;
+
+ @VisibleForTesting public CameraXProxy cameraXProxy = new CameraXProxy();
+ @VisibleForTesting public TextureRegistry.SurfaceTextureEntry flutterSurfaceTexture;
+
+ public PreviewHostApiImpl(
+ @NonNull BinaryMessenger binaryMessenger,
+ @NonNull InstanceManager instanceManager,
+ @NonNull TextureRegistry textureRegistry) {
+ this.binaryMessenger = binaryMessenger;
+ this.instanceManager = instanceManager;
+ this.textureRegistry = textureRegistry;
+ }
+
+ /** Creates a {@link Preview} with the target rotation and resolution if specified. */
+ @Override
+ public void create(
+ @NonNull Long identifier,
+ @Nullable Long rotation,
+ @Nullable GeneratedCameraXLibrary.ResolutionInfo targetResolution) {
+ Preview.Builder previewBuilder = cameraXProxy.createPreviewBuilder();
+ if (rotation != null) {
+ previewBuilder.setTargetRotation(rotation.intValue());
+ }
+ if (targetResolution != null) {
+ previewBuilder.setTargetResolution(
+ new Size(
+ targetResolution.getWidth().intValue(), targetResolution.getHeight().intValue()));
+ }
+ Preview preview = previewBuilder.build();
+ instanceManager.addDartCreatedInstance(preview, identifier);
+ }
+
+ /**
+ * Sets the {@link Preview.SurfaceProvider} that will be used to provide a {@code Surface} backed
+ * by a Flutter {@link TextureRegistry.SurfaceTextureEntry} used to build the {@link Preview}.
+ */
+ @Override
+ public Long setSurfaceProvider(@NonNull Long identifier) {
+ Preview preview = (Preview) Objects.requireNonNull(instanceManager.getInstance(identifier));
+ flutterSurfaceTexture = textureRegistry.createSurfaceTexture();
+ SurfaceTexture surfaceTexture = flutterSurfaceTexture.surfaceTexture();
+ Preview.SurfaceProvider surfaceProvider = createSurfaceProvider(surfaceTexture);
+ preview.setSurfaceProvider(surfaceProvider);
+
+ return flutterSurfaceTexture.id();
+ }
+
+ /**
+ * Creates a {@link Preview.SurfaceProvider} that specifies how to provide a {@link Surface} to a
+ * {@code Preview} that is backed by a Flutter {@link TextureRegistry.SurfaceTextureEntry}.
+ */
+ @VisibleForTesting
+ public Preview.SurfaceProvider createSurfaceProvider(@NonNull SurfaceTexture surfaceTexture) {
+ return new Preview.SurfaceProvider() {
+ @Override
+ public void onSurfaceRequested(SurfaceRequest request) {
+ surfaceTexture.setDefaultBufferSize(
+ request.getResolution().getWidth(), request.getResolution().getHeight());
+ Surface flutterSurface = cameraXProxy.createSurface(surfaceTexture);
+ request.provideSurface(
+ flutterSurface,
+ Executors.newSingleThreadExecutor(),
+ (result) -> {
+ // See https://developer.android.com/reference/androidx/camera/core/SurfaceRequest.Result for documentation.
+ // Always attempt a release.
+ flutterSurface.release();
+ int resultCode = result.getResultCode();
+ switch (resultCode) {
+ case SurfaceRequest.Result.RESULT_REQUEST_CANCELLED:
+ case SurfaceRequest.Result.RESULT_WILL_NOT_PROVIDE_SURFACE:
+ case SurfaceRequest.Result.RESULT_SURFACE_ALREADY_PROVIDED:
+ case SurfaceRequest.Result.RESULT_SURFACE_USED_SUCCESSFULLY:
+ // Only need to release, do nothing.
+ break;
+ case SurfaceRequest.Result.RESULT_INVALID_SURFACE: // Intentional fall through.
+ default:
+ // Release and send error.
+ SystemServicesFlutterApiImpl systemServicesFlutterApi =
+ cameraXProxy.createSystemServicesFlutterApiImpl(binaryMessenger);
+ systemServicesFlutterApi.sendCameraError(
+ getProvideSurfaceErrorDescription(resultCode), reply -> {});
+ break;
+ }
+ });
+ };
+ };
+ }
+
+ /**
+ * Returns an error description for each {@link SurfaceRequest.Result} that represents an error
+ * with providing a surface.
+ */
+ private String getProvideSurfaceErrorDescription(@Nullable int resultCode) {
+ switch (resultCode) {
+ case SurfaceRequest.Result.RESULT_INVALID_SURFACE:
+ return resultCode + ": Provided surface could not be used by the camera.";
+ default:
+ return resultCode + ": Attempt to provide a surface resulted with unrecognizable code.";
+ }
+ }
+
+ /**
+ * Releases the Flutter {@link TextureRegistry.SurfaceTextureEntry} if used to provide a surface
+ * for a {@link Preview}.
+ */
+ @Override
+ public void releaseFlutterSurfaceTexture() {
+ if (flutterSurfaceTexture != null) {
+ flutterSurfaceTexture.release();
+ }
+ }
+
+ /** Returns the resolution information for the specified {@link Preview}. */
+ @Override
+ public GeneratedCameraXLibrary.ResolutionInfo getResolutionInfo(@NonNull Long identifier) {
+ Preview preview = (Preview) Objects.requireNonNull(instanceManager.getInstance(identifier));
+ Size resolution = preview.getResolutionInfo().getResolution();
+
+ GeneratedCameraXLibrary.ResolutionInfo.Builder resolutionInfo =
+ new GeneratedCameraXLibrary.ResolutionInfo.Builder()
+ .setWidth(Long.valueOf(resolution.getWidth()))
+ .setHeight(Long.valueOf(resolution.getHeight()));
+ return resolutionInfo.build();
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/ProcessCameraProviderFlutterApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/ProcessCameraProviderFlutterApiImpl.java
new file mode 100644
index 0000000..90c94d0
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/ProcessCameraProviderFlutterApiImpl.java
@@ -0,0 +1,23 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import androidx.camera.lifecycle.ProcessCameraProvider;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.ProcessCameraProviderFlutterApi;
+
+public class ProcessCameraProviderFlutterApiImpl extends ProcessCameraProviderFlutterApi {
+ public ProcessCameraProviderFlutterApiImpl(
+ BinaryMessenger binaryMessenger, InstanceManager instanceManager) {
+ super(binaryMessenger);
+ this.instanceManager = instanceManager;
+ }
+
+ private final InstanceManager instanceManager;
+
+ void create(ProcessCameraProvider processCameraProvider, Reply<Void> reply) {
+ create(instanceManager.addHostCreatedInstance(processCameraProvider), reply);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/ProcessCameraProviderHostApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/ProcessCameraProviderHostApiImpl.java
new file mode 100644
index 0000000..e7036e7
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/ProcessCameraProviderHostApiImpl.java
@@ -0,0 +1,156 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import android.content.Context;
+import androidx.annotation.NonNull;
+import androidx.camera.core.Camera;
+import androidx.camera.core.CameraInfo;
+import androidx.camera.core.CameraSelector;
+import androidx.camera.core.UseCase;
+import androidx.camera.lifecycle.ProcessCameraProvider;
+import androidx.core.content.ContextCompat;
+import androidx.lifecycle.LifecycleOwner;
+import com.google.common.util.concurrent.ListenableFuture;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.ProcessCameraProviderHostApi;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+public class ProcessCameraProviderHostApiImpl implements ProcessCameraProviderHostApi {
+ private final BinaryMessenger binaryMessenger;
+ private final InstanceManager instanceManager;
+
+ private Context context;
+ private LifecycleOwner lifecycleOwner;
+
+ public ProcessCameraProviderHostApiImpl(
+ BinaryMessenger binaryMessenger, InstanceManager instanceManager, Context context) {
+ this.binaryMessenger = binaryMessenger;
+ this.instanceManager = instanceManager;
+ this.context = context;
+ }
+
+ public void setLifecycleOwner(LifecycleOwner lifecycleOwner) {
+ this.lifecycleOwner = lifecycleOwner;
+ }
+
+ /**
+ * Sets the context that the {@code ProcessCameraProvider} will use to attach the lifecycle of the
+ * camera to.
+ *
+ * <p>If using the camera plugin in an add-to-app context, ensure that a new instance of the
+ * {@code ProcessCameraProvider} is fetched via {@code #getInstance} anytime the context changes.
+ */
+ public void setContext(Context context) {
+ this.context = context;
+ }
+
+ /**
+ * Returns the instance of the {@code ProcessCameraProvider} to manage the lifecycle of the camera
+ * for the current {@code Context}.
+ */
+ @Override
+ public void getInstance(GeneratedCameraXLibrary.Result<Long> result) {
+ ListenableFuture<ProcessCameraProvider> processCameraProviderFuture =
+ ProcessCameraProvider.getInstance(context);
+
+ processCameraProviderFuture.addListener(
+ () -> {
+ try {
+ // Camera provider is now guaranteed to be available.
+ ProcessCameraProvider processCameraProvider = processCameraProviderFuture.get();
+
+ final ProcessCameraProviderFlutterApiImpl flutterApi =
+ new ProcessCameraProviderFlutterApiImpl(binaryMessenger, instanceManager);
+ if (!instanceManager.containsInstance(processCameraProvider)) {
+ flutterApi.create(processCameraProvider, reply -> {});
+ }
+ result.success(instanceManager.getIdentifierForStrongReference(processCameraProvider));
+ } catch (Exception e) {
+ result.error(e);
+ }
+ },
+ ContextCompat.getMainExecutor(context));
+ }
+
+ /** Returns cameras available to the {@code ProcessCameraProvider}. */
+ @Override
+ public List<Long> getAvailableCameraInfos(@NonNull Long identifier) {
+ ProcessCameraProvider processCameraProvider =
+ (ProcessCameraProvider) Objects.requireNonNull(instanceManager.getInstance(identifier));
+
+ List<CameraInfo> availableCameras = processCameraProvider.getAvailableCameraInfos();
+ List<Long> availableCamerasIds = new ArrayList<Long>();
+ final CameraInfoFlutterApiImpl cameraInfoFlutterApi =
+ new CameraInfoFlutterApiImpl(binaryMessenger, instanceManager);
+
+ for (CameraInfo cameraInfo : availableCameras) {
+ if (!instanceManager.containsInstance(cameraInfo)) {
+ cameraInfoFlutterApi.create(cameraInfo, result -> {});
+ }
+ availableCamerasIds.add(instanceManager.getIdentifierForStrongReference(cameraInfo));
+ }
+ return availableCamerasIds;
+ }
+
+ /**
+ * Binds specified {@code UseCase}s to the lifecycle of the {@code LifecycleOwner} that
+ * corresponds to this instance and returns the instance of the {@code Camera} whose lifecycle
+ * that {@code LifecycleOwner} reflects.
+ */
+ @Override
+ public Long bindToLifecycle(
+ @NonNull Long identifier,
+ @NonNull Long cameraSelectorIdentifier,
+ @NonNull List<Long> useCaseIds) {
+ ProcessCameraProvider processCameraProvider =
+ (ProcessCameraProvider) Objects.requireNonNull(instanceManager.getInstance(identifier));
+ CameraSelector cameraSelector =
+ (CameraSelector)
+ Objects.requireNonNull(instanceManager.getInstance(cameraSelectorIdentifier));
+ UseCase[] useCases = new UseCase[useCaseIds.size()];
+ for (int i = 0; i < useCaseIds.size(); i++) {
+ useCases[i] =
+ (UseCase)
+ Objects.requireNonNull(
+ instanceManager.getInstance(((Number) useCaseIds.get(i)).longValue()));
+ }
+
+ Camera camera =
+ processCameraProvider.bindToLifecycle(
+ (LifecycleOwner) lifecycleOwner, cameraSelector, useCases);
+
+ final CameraFlutterApiImpl cameraFlutterApi =
+ new CameraFlutterApiImpl(binaryMessenger, instanceManager);
+ if (!instanceManager.containsInstance(camera)) {
+ cameraFlutterApi.create(camera, result -> {});
+ }
+
+ return instanceManager.getIdentifierForStrongReference(camera);
+ }
+
+ @Override
+ public void unbind(@NonNull Long identifier, @NonNull List<Long> useCaseIds) {
+ ProcessCameraProvider processCameraProvider =
+ (ProcessCameraProvider) Objects.requireNonNull(instanceManager.getInstance(identifier));
+ UseCase[] useCases = new UseCase[useCaseIds.size()];
+ for (int i = 0; i < useCaseIds.size(); i++) {
+ useCases[i] =
+ (UseCase)
+ Objects.requireNonNull(
+ instanceManager.getInstance(((Number) useCaseIds.get(i)).longValue()));
+ }
+ processCameraProvider.unbind(useCases);
+ }
+
+ @Override
+ public void unbindAll(@NonNull Long identifier) {
+ ProcessCameraProvider processCameraProvider =
+ (ProcessCameraProvider) Objects.requireNonNull(instanceManager.getInstance(identifier));
+ processCameraProvider.unbindAll();
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/SystemServicesFlutterApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/SystemServicesFlutterApiImpl.java
new file mode 100644
index 0000000..6315897
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/SystemServicesFlutterApiImpl.java
@@ -0,0 +1,24 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import androidx.annotation.NonNull;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.SystemServicesFlutterApi;
+
+public class SystemServicesFlutterApiImpl extends SystemServicesFlutterApi {
+ public SystemServicesFlutterApiImpl(@NonNull BinaryMessenger binaryMessenger) {
+ super(binaryMessenger);
+ }
+
+ public void sendDeviceOrientationChangedEvent(
+ @NonNull String orientation, @NonNull Reply<Void> reply) {
+ super.onDeviceOrientationChanged(orientation, reply);
+ }
+
+ public void sendCameraError(@NonNull String errorDescription, @NonNull Reply<Void> reply) {
+ super.onCameraError(errorDescription, reply);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/SystemServicesHostApiImpl.java b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/SystemServicesHostApiImpl.java
new file mode 100644
index 0000000..a698581
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/SystemServicesHostApiImpl.java
@@ -0,0 +1,111 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import android.app.Activity;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.CameraPermissionsManager.PermissionsRegistry;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CameraPermissionsErrorData;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.Result;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.SystemServicesHostApi;
+
+public class SystemServicesHostApiImpl implements SystemServicesHostApi {
+ private final BinaryMessenger binaryMessenger;
+ private final InstanceManager instanceManager;
+
+ @VisibleForTesting public CameraXProxy cameraXProxy = new CameraXProxy();
+ @VisibleForTesting public DeviceOrientationManager deviceOrientationManager;
+ @VisibleForTesting public SystemServicesFlutterApiImpl systemServicesFlutterApi;
+
+ private Activity activity;
+ private PermissionsRegistry permissionsRegistry;
+
+ public SystemServicesHostApiImpl(
+ BinaryMessenger binaryMessenger, InstanceManager instanceManager) {
+ this.binaryMessenger = binaryMessenger;
+ this.instanceManager = instanceManager;
+ this.systemServicesFlutterApi = new SystemServicesFlutterApiImpl(binaryMessenger);
+ }
+
+ public void setActivity(Activity activity) {
+ this.activity = activity;
+ }
+
+ public void setPermissionsRegistry(PermissionsRegistry permissionsRegistry) {
+ this.permissionsRegistry = permissionsRegistry;
+ }
+
+ /**
+ * Requests camera permissions using an instance of a {@link CameraPermissionsManager}.
+ *
+ * <p>Will result with {@code null} if permissions were approved or there were no errors;
+ * otherwise, it will result with the error data explaining what went wrong.
+ */
+ @Override
+ public void requestCameraPermissions(
+ Boolean enableAudio, Result<CameraPermissionsErrorData> result) {
+ CameraPermissionsManager cameraPermissionsManager =
+ cameraXProxy.createCameraPermissionsManager();
+ cameraPermissionsManager.requestPermissions(
+ activity,
+ permissionsRegistry,
+ enableAudio,
+ (String errorCode, String description) -> {
+ if (errorCode == null) {
+ result.success(null);
+ } else {
+ // If permissions are ongoing or denied, error data will be sent to be handled.
+ CameraPermissionsErrorData errorData =
+ new CameraPermissionsErrorData.Builder()
+ .setErrorCode(errorCode)
+ .setDescription(description)
+ .build();
+ result.success(errorData);
+ }
+ });
+ }
+
+ /**
+ * Starts listening for device orientation changes using an instace of a {@link
+ * DeviceOrientationManager}.
+ *
+ * <p>Whenever a change in device orientation is detected by the {@code DeviceOrientationManager},
+ * the {@link SystemServicesFlutterApi} will be used to notify the Dart side.
+ */
+ @Override
+ public void startListeningForDeviceOrientationChange(
+ Boolean isFrontFacing, Long sensorOrientation) {
+ deviceOrientationManager =
+ cameraXProxy.createDeviceOrientationManager(
+ activity,
+ isFrontFacing,
+ sensorOrientation.intValue(),
+ (DeviceOrientation newOrientation) -> {
+ systemServicesFlutterApi.sendDeviceOrientationChangedEvent(
+ serializeDeviceOrientation(newOrientation), reply -> {});
+ });
+ deviceOrientationManager.start();
+ }
+
+ /** Serializes {@code DeviceOrientation} into a String that the Dart side is able to recognize. */
+ String serializeDeviceOrientation(DeviceOrientation orientation) {
+ return orientation.toString();
+ }
+
+ /**
+ * Tells the {@code deviceOrientationManager} to stop listening for orientation updates.
+ *
+ * <p>Has no effect if the {@code deviceOrientationManager} was never created to listen for device
+ * orientation updates.
+ */
+ @Override
+ public void stopListeningForDeviceOrientationChange() {
+ if (deviceOrientationManager != null) {
+ deviceOrientationManager.stop();
+ }
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraInfoTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraInfoTest.java
new file mode 100644
index 0000000..663d0e2
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraInfoTest.java
@@ -0,0 +1,66 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import androidx.camera.core.CameraInfo;
+import io.flutter.plugin.common.BinaryMessenger;
+import java.util.Objects;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnit;
+import org.mockito.junit.MockitoRule;
+
+public class CameraInfoTest {
+ @Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
+
+ @Mock public CameraInfo mockCameraInfo;
+ @Mock public BinaryMessenger mockBinaryMessenger;
+
+ InstanceManager testInstanceManager;
+
+ @Before
+ public void setUp() {
+ testInstanceManager = InstanceManager.open(identifier -> {});
+ }
+
+ @After
+ public void tearDown() {
+ testInstanceManager.close();
+ }
+
+ @Test
+ public void getSensorRotationDegreesTest() {
+ final CameraInfoHostApiImpl cameraInfoHostApi = new CameraInfoHostApiImpl(testInstanceManager);
+
+ testInstanceManager.addDartCreatedInstance(mockCameraInfo, 1);
+
+ when(mockCameraInfo.getSensorRotationDegrees()).thenReturn(90);
+
+ assertEquals((long) cameraInfoHostApi.getSensorRotationDegrees(1L), 90L);
+ verify(mockCameraInfo).getSensorRotationDegrees();
+ }
+
+ @Test
+ public void flutterApiCreateTest() {
+ final CameraInfoFlutterApiImpl spyFlutterApi =
+ spy(new CameraInfoFlutterApiImpl(mockBinaryMessenger, testInstanceManager));
+
+ spyFlutterApi.create(mockCameraInfo, reply -> {});
+
+ final long identifier =
+ Objects.requireNonNull(testInstanceManager.getIdentifierForStrongReference(mockCameraInfo));
+ verify(spyFlutterApi).create(eq(identifier), any());
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraPermissionsManagerTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraPermissionsManagerTest.java
new file mode 100644
index 0000000..d90bde9
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraPermissionsManagerTest.java
@@ -0,0 +1,89 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static junit.framework.TestCase.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+
+import android.content.pm.PackageManager;
+import io.flutter.plugins.camerax.CameraPermissionsManager.CameraRequestPermissionsListener;
+import io.flutter.plugins.camerax.CameraPermissionsManager.ResultCallback;
+import org.junit.Test;
+
+public class CameraPermissionsManagerTest {
+ @Test
+ public void listener_respondsOnce() {
+ final int[] calledCounter = {0};
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener((String code, String desc) -> calledCounter[0]++);
+
+ permissionsListener.onRequestPermissionsResult(
+ 9796, null, new int[] {PackageManager.PERMISSION_DENIED});
+ permissionsListener.onRequestPermissionsResult(
+ 9796, null, new int[] {PackageManager.PERMISSION_GRANTED});
+
+ assertEquals(1, calledCounter[0]);
+ }
+
+ @Test
+ public void callback_respondsWithCameraAccessDenied() {
+ ResultCallback fakeResultCallback = mock(ResultCallback.class);
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener(fakeResultCallback);
+
+ permissionsListener.onRequestPermissionsResult(
+ 9796, null, new int[] {PackageManager.PERMISSION_DENIED});
+
+ verify(fakeResultCallback)
+ .onResult("CameraAccessDenied", "Camera access permission was denied.");
+ }
+
+ @Test
+ public void callback_respondsWithAudioAccessDenied() {
+ ResultCallback fakeResultCallback = mock(ResultCallback.class);
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener(fakeResultCallback);
+
+ permissionsListener.onRequestPermissionsResult(
+ 9796,
+ null,
+ new int[] {PackageManager.PERMISSION_GRANTED, PackageManager.PERMISSION_DENIED});
+
+ verify(fakeResultCallback).onResult("AudioAccessDenied", "Audio access permission was denied.");
+ }
+
+ @Test
+ public void callback_doesNotRespond() {
+ ResultCallback fakeResultCallback = mock(ResultCallback.class);
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener(fakeResultCallback);
+
+ permissionsListener.onRequestPermissionsResult(
+ 9796,
+ null,
+ new int[] {PackageManager.PERMISSION_GRANTED, PackageManager.PERMISSION_GRANTED});
+
+ verify(fakeResultCallback, never())
+ .onResult("CameraAccessDenied", "Camera access permission was denied.");
+ verify(fakeResultCallback, never())
+ .onResult("AudioAccessDenied", "Audio access permission was denied.");
+ }
+
+ @Test
+ public void callback_respondsWithCameraAccessDeniedWhenEmptyResult() {
+ // Handles the case where the grantResults array is empty
+
+ ResultCallback fakeResultCallback = mock(ResultCallback.class);
+ CameraRequestPermissionsListener permissionsListener =
+ new CameraRequestPermissionsListener(fakeResultCallback);
+
+ permissionsListener.onRequestPermissionsResult(9796, null, new int[] {});
+
+ verify(fakeResultCallback)
+ .onResult("CameraAccessDenied", "Camera access permission was denied.");
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraSelectorTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraSelectorTest.java
new file mode 100644
index 0000000..2b27e08
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraSelectorTest.java
@@ -0,0 +1,97 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import androidx.camera.core.CameraInfo;
+import androidx.camera.core.CameraSelector;
+import io.flutter.plugin.common.BinaryMessenger;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnit;
+import org.mockito.junit.MockitoRule;
+
+public class CameraSelectorTest {
+ @Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
+
+ @Mock public CameraSelector mockCameraSelector;
+ @Mock public BinaryMessenger mockBinaryMessenger;
+
+ InstanceManager testInstanceManager;
+
+ @Before
+ public void setUp() {
+ testInstanceManager = InstanceManager.open(identifier -> {});
+ }
+
+ @After
+ public void tearDown() {
+ testInstanceManager.close();
+ }
+
+ @Test
+ public void createTest() {
+ final CameraSelectorHostApiImpl cameraSelectorHostApi =
+ new CameraSelectorHostApiImpl(mockBinaryMessenger, testInstanceManager);
+ final CameraXProxy mockCameraXProxy = mock(CameraXProxy.class);
+ final CameraSelector.Builder mockCameraSelectorBuilder = mock(CameraSelector.Builder.class);
+
+ cameraSelectorHostApi.cameraXProxy = mockCameraXProxy;
+ when(mockCameraXProxy.createCameraSelectorBuilder()).thenReturn(mockCameraSelectorBuilder);
+
+ when(mockCameraSelectorBuilder.requireLensFacing(1)).thenReturn(mockCameraSelectorBuilder);
+ when(mockCameraSelectorBuilder.build()).thenReturn(mockCameraSelector);
+
+ cameraSelectorHostApi.create(0L, 1L);
+
+ verify(mockCameraSelectorBuilder).requireLensFacing(CameraSelector.LENS_FACING_BACK);
+ assertEquals(testInstanceManager.getInstance(0L), mockCameraSelector);
+ }
+
+ @Test
+ public void filterTest() {
+ final CameraSelectorHostApiImpl cameraSelectorHostApi =
+ new CameraSelectorHostApiImpl(mockBinaryMessenger, testInstanceManager);
+ final CameraInfo cameraInfo = mock(CameraInfo.class);
+ final List<CameraInfo> cameraInfosForFilter = Arrays.asList(cameraInfo);
+ final List<Long> cameraInfosIds = Arrays.asList(1L);
+
+ testInstanceManager.addDartCreatedInstance(mockCameraSelector, 0);
+ testInstanceManager.addDartCreatedInstance(cameraInfo, 1);
+
+ when(mockCameraSelector.filter(cameraInfosForFilter)).thenReturn(cameraInfosForFilter);
+
+ assertEquals(
+ cameraSelectorHostApi.filter(0L, cameraInfosIds),
+ Arrays.asList(testInstanceManager.getIdentifierForStrongReference(cameraInfo)));
+ verify(mockCameraSelector).filter(cameraInfosForFilter);
+ }
+
+ @Test
+ public void flutterApiCreateTest() {
+ final CameraSelectorFlutterApiImpl spyFlutterApi =
+ spy(new CameraSelectorFlutterApiImpl(mockBinaryMessenger, testInstanceManager));
+
+ spyFlutterApi.create(mockCameraSelector, 0L, reply -> {});
+
+ final long identifier =
+ Objects.requireNonNull(
+ testInstanceManager.getIdentifierForStrongReference(mockCameraSelector));
+ verify(spyFlutterApi).create(eq(identifier), eq(0L), any());
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraTest.java
new file mode 100644
index 0000000..e2135b3
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraTest.java
@@ -0,0 +1,52 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+
+import androidx.camera.core.Camera;
+import io.flutter.plugin.common.BinaryMessenger;
+import java.util.Objects;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnit;
+import org.mockito.junit.MockitoRule;
+
+public class CameraTest {
+ @Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
+
+ @Mock public BinaryMessenger mockBinaryMessenger;
+ @Mock public Camera camera;
+
+ InstanceManager testInstanceManager;
+
+ @Before
+ public void setUp() {
+ testInstanceManager = InstanceManager.open(identifier -> {});
+ }
+
+ @After
+ public void tearDown() {
+ testInstanceManager.close();
+ }
+
+ @Test
+ public void flutterApiCreateTest() {
+ final CameraFlutterApiImpl spyFlutterApi =
+ spy(new CameraFlutterApiImpl(mockBinaryMessenger, testInstanceManager));
+
+ spyFlutterApi.create(camera, reply -> {});
+
+ final long identifier =
+ Objects.requireNonNull(testInstanceManager.getIdentifierForStrongReference(camera));
+ verify(spyFlutterApi).create(eq(identifier), any());
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/DeviceOrientationManagerTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/DeviceOrientationManagerTest.java
new file mode 100644
index 0000000..1e2bfba
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/DeviceOrientationManagerTest.java
@@ -0,0 +1,313 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.res.Configuration;
+import android.content.res.Resources;
+import android.provider.Settings;
+import android.view.Display;
+import android.view.Surface;
+import android.view.WindowManager;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugins.camerax.DeviceOrientationManager.DeviceOrientationChangeCallback;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+
+public class DeviceOrientationManagerTest {
+ private Activity mockActivity;
+ private DeviceOrientationChangeCallback mockDeviceOrientationChangeCallback;
+ private WindowManager mockWindowManager;
+ private Display mockDisplay;
+ private DeviceOrientationManager deviceOrientationManager;
+
+ @Before
+ @SuppressWarnings("deprecation")
+ public void before() {
+ mockActivity = mock(Activity.class);
+ mockDisplay = mock(Display.class);
+ mockWindowManager = mock(WindowManager.class);
+ mockDeviceOrientationChangeCallback = mock(DeviceOrientationChangeCallback.class);
+
+ when(mockActivity.getSystemService(Context.WINDOW_SERVICE)).thenReturn(mockWindowManager);
+ when(mockWindowManager.getDefaultDisplay()).thenReturn(mockDisplay);
+
+ deviceOrientationManager =
+ new DeviceOrientationManager(mockActivity, false, 0, mockDeviceOrientationChangeCallback);
+ }
+
+ @Test
+ public void getVideoOrientation_whenNaturalScreenOrientationEqualsPortraitUp() {
+ int degreesPortraitUp =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(0, degreesPortraitUp);
+ assertEquals(270, degreesLandscapeLeft);
+ assertEquals(180, degreesPortraitDown);
+ assertEquals(90, degreesLandscapeRight);
+ }
+
+ @Test
+ public void getVideoOrientation_whenNaturalScreenOrientationEqualsLandscapeLeft() {
+ DeviceOrientationManager orientationManager =
+ new DeviceOrientationManager(mockActivity, false, 90, mockDeviceOrientationChangeCallback);
+
+ int degreesPortraitUp = orientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ orientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ orientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ orientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(90, degreesPortraitUp);
+ assertEquals(0, degreesLandscapeLeft);
+ assertEquals(270, degreesPortraitDown);
+ assertEquals(180, degreesLandscapeRight);
+ }
+
+ @Test
+ public void getVideoOrientation_fallbackToPortraitSensorOrientationWhenOrientationIsNull() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+
+ int degrees = deviceOrientationManager.getVideoOrientation(null);
+
+ assertEquals(0, degrees);
+ }
+
+ @Test
+ public void getVideoOrientation_fallbackToLandscapeSensorOrientationWhenOrientationIsNull() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ DeviceOrientationManager orientationManager =
+ new DeviceOrientationManager(mockActivity, false, 90, mockDeviceOrientationChangeCallback);
+
+ int degrees = orientationManager.getVideoOrientation(null);
+
+ assertEquals(0, degrees);
+ }
+
+ @Test
+ public void getPhotoOrientation_whenNaturalScreenOrientationEqualsPortraitUp() {
+ int degreesPortraitUp =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(0, degreesPortraitUp);
+ assertEquals(90, degreesLandscapeRight);
+ assertEquals(180, degreesPortraitDown);
+ assertEquals(270, degreesLandscapeLeft);
+ }
+
+ @Test
+ public void getPhotoOrientation_whenNaturalScreenOrientationEqualsLandscapeLeft() {
+ DeviceOrientationManager orientationManager =
+ new DeviceOrientationManager(mockActivity, false, 90, mockDeviceOrientationChangeCallback);
+
+ int degreesPortraitUp = orientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ orientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ orientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ orientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(90, degreesPortraitUp);
+ assertEquals(180, degreesLandscapeRight);
+ assertEquals(270, degreesPortraitDown);
+ assertEquals(0, degreesLandscapeLeft);
+ }
+
+ @Test
+ public void getPhotoOrientation_shouldFallbackToCurrentOrientationWhenOrientationIsNull() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ int degrees = deviceOrientationManager.getPhotoOrientation(null);
+
+ assertEquals(270, degrees);
+ }
+
+ @Test
+ public void handleUIOrientationChange_shouldSendMessageWhenSensorAccessIsAllowed() {
+ try (MockedStatic<Settings.System> mockedSystem = mockStatic(Settings.System.class)) {
+ mockedSystem
+ .when(
+ () ->
+ Settings.System.getInt(any(), eq(Settings.System.ACCELEROMETER_ROTATION), eq(0)))
+ .thenReturn(0);
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ deviceOrientationManager.handleUIOrientationChange();
+ }
+
+ verify(mockDeviceOrientationChangeCallback, times(1))
+ .onChange(DeviceOrientation.LANDSCAPE_LEFT);
+ }
+
+ @Test
+ public void handleOrientationChange_shouldSendMessageWhenOrientationIsUpdated() {
+ DeviceOrientation previousOrientation = DeviceOrientation.PORTRAIT_UP;
+ DeviceOrientation newOrientation = DeviceOrientation.LANDSCAPE_LEFT;
+
+ DeviceOrientationManager.handleOrientationChange(
+ newOrientation, previousOrientation, mockDeviceOrientationChangeCallback);
+
+ verify(mockDeviceOrientationChangeCallback, times(1)).onChange(newOrientation);
+ }
+
+ @Test
+ public void handleOrientationChange_shouldNotSendMessageWhenOrientationIsNotUpdated() {
+ DeviceOrientation previousOrientation = DeviceOrientation.PORTRAIT_UP;
+ DeviceOrientation newOrientation = DeviceOrientation.PORTRAIT_UP;
+
+ DeviceOrientationManager.handleOrientationChange(
+ newOrientation, previousOrientation, mockDeviceOrientationChangeCallback);
+
+ verify(mockDeviceOrientationChangeCallback, never()).onChange(any());
+ }
+
+ @Test
+ public void getUIOrientation() {
+ // Orientation portrait and rotation of 0 should translate to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ DeviceOrientation uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+
+ // Orientation portrait and rotation of 90 should translate to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_90);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+
+ // Orientation portrait and rotation of 180 should translate to "PORTRAIT_DOWN".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_180);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, uiOrientation);
+
+ // Orientation portrait and rotation of 270 should translate to "PORTRAIT_DOWN".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_270);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, uiOrientation);
+
+ // Orientation landscape and rotation of 0 should translate to "LANDSCAPE_LEFT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, uiOrientation);
+
+ // Orientation landscape and rotation of 90 should translate to "LANDSCAPE_LEFT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_90);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, uiOrientation);
+
+ // Orientation landscape and rotation of 180 should translate to "LANDSCAPE_RIGHT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_180);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, uiOrientation);
+
+ // Orientation landscape and rotation of 270 should translate to "LANDSCAPE_RIGHT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_270);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, uiOrientation);
+
+ // Orientation undefined should default to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_UNDEFINED, Surface.ROTATION_0);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+ }
+
+ @Test
+ public void getDeviceDefaultOrientation() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ int orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_180);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_90);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_270);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_180);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_90);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_270);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+ }
+
+ @Test
+ public void calculateSensorOrientation() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ DeviceOrientation orientation = deviceOrientationManager.calculateSensorOrientation(0);
+ assertEquals(DeviceOrientation.PORTRAIT_UP, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(90);
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(180);
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(270);
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, orientation);
+ }
+
+ private void setUpUIOrientationMocks(int orientation, int rotation) {
+ Resources mockResources = mock(Resources.class);
+ Configuration mockConfiguration = mock(Configuration.class);
+
+ when(mockDisplay.getRotation()).thenReturn(rotation);
+
+ mockConfiguration.orientation = orientation;
+ when(mockActivity.getResources()).thenReturn(mockResources);
+ when(mockResources.getConfiguration()).thenReturn(mockConfiguration);
+ }
+
+ @Test
+ public void getDisplayTest() {
+ Display display = deviceOrientationManager.getDisplay();
+
+ assertEquals(mockDisplay, display);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/InstanceManagerTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/InstanceManagerTest.java
new file mode 100644
index 0000000..e2e012d
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/InstanceManagerTest.java
@@ -0,0 +1,77 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Test;
+
+public class InstanceManagerTest {
+ @Test
+ public void addDartCreatedInstance() {
+ final InstanceManager instanceManager = InstanceManager.open(identifier -> {});
+
+ final Object object = new Object();
+ instanceManager.addDartCreatedInstance(object, 0);
+
+ assertEquals(object, instanceManager.getInstance(0));
+ assertEquals((Long) 0L, instanceManager.getIdentifierForStrongReference(object));
+ assertTrue(instanceManager.containsInstance(object));
+
+ instanceManager.close();
+ }
+
+ @Test
+ public void addHostCreatedInstance() {
+ final InstanceManager instanceManager = InstanceManager.open(identifier -> {});
+
+ final Object object = new Object();
+ long identifier = instanceManager.addHostCreatedInstance(object);
+
+ assertNotNull(instanceManager.getInstance(identifier));
+ assertEquals(object, instanceManager.getInstance(identifier));
+ assertTrue(instanceManager.containsInstance(object));
+
+ instanceManager.close();
+ }
+
+ @Test
+ public void addHostCreatedInstance_createsSameInstanceTwice() {
+ final InstanceManager instanceManager = InstanceManager.open(identifier -> {});
+
+ final Object object = new Object();
+ long firstIdentifier = instanceManager.addHostCreatedInstance(object);
+ long secondIdentifier = instanceManager.addHostCreatedInstance(object);
+
+ assertNotEquals(firstIdentifier, secondIdentifier);
+ assertTrue(instanceManager.containsInstance(object));
+
+ instanceManager.close();
+ }
+
+ @Test
+ public void remove() {
+ final InstanceManager instanceManager = InstanceManager.open(identifier -> {});
+
+ Object object = new Object();
+ instanceManager.addDartCreatedInstance(object, 0);
+
+ assertEquals(object, instanceManager.remove(0));
+
+ // To allow for object to be garbage collected.
+ //noinspection UnusedAssignment
+ object = null;
+
+ Runtime.getRuntime().gc();
+
+ assertNull(instanceManager.getInstance(0));
+
+ instanceManager.close();
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/JavaObjectHostApiTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/JavaObjectHostApiTest.java
new file mode 100644
index 0000000..cce3341
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/JavaObjectHostApiTest.java
@@ -0,0 +1,32 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.junit.Assert.assertNull;
+
+import org.junit.Test;
+
+public class JavaObjectHostApiTest {
+ @Test
+ public void dispose() {
+ final InstanceManager instanceManager = InstanceManager.open(identifier -> {});
+
+ final JavaObjectHostApiImpl hostApi = new JavaObjectHostApiImpl(instanceManager);
+
+ Object object = new Object();
+ instanceManager.addDartCreatedInstance(object, 0);
+
+ // To free object for garbage collection.
+ //noinspection UnusedAssignment
+ object = null;
+
+ hostApi.dispose(0L);
+ Runtime.getRuntime().gc();
+
+ assertNull(instanceManager.getInstance(0));
+
+ instanceManager.close();
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/PreviewTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/PreviewTest.java
new file mode 100644
index 0000000..9cb4e91
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/PreviewTest.java
@@ -0,0 +1,221 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.graphics.SurfaceTexture;
+import android.util.Size;
+import android.view.Surface;
+import androidx.camera.core.Preview;
+import androidx.camera.core.SurfaceRequest;
+import androidx.core.util.Consumer;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.ResolutionInfo;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.SystemServicesFlutterApi.Reply;
+import io.flutter.view.TextureRegistry;
+import java.util.concurrent.Executor;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnit;
+import org.mockito.junit.MockitoRule;
+import org.robolectric.RobolectricTestRunner;
+
+@RunWith(RobolectricTestRunner.class)
+public class PreviewTest {
+ @Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
+
+ @Mock public Preview mockPreview;
+ @Mock public BinaryMessenger mockBinaryMessenger;
+ @Mock public TextureRegistry mockTextureRegistry;
+ @Mock public CameraXProxy mockCameraXProxy;
+
+ InstanceManager testInstanceManager;
+
+ @Before
+ public void setUp() {
+ testInstanceManager = spy(InstanceManager.open(identifier -> {}));
+ }
+
+ @After
+ public void tearDown() {
+ testInstanceManager.close();
+ }
+
+ @Test
+ public void create_createsPreviewWithCorrectConfiguration() {
+ final PreviewHostApiImpl previewHostApi =
+ new PreviewHostApiImpl(mockBinaryMessenger, testInstanceManager, mockTextureRegistry);
+ final Preview.Builder mockPreviewBuilder = mock(Preview.Builder.class);
+ final int targetRotation = 90;
+ final int targetResolutionWidth = 10;
+ final int targetResolutionHeight = 50;
+ final Long previewIdentifier = 3L;
+ final GeneratedCameraXLibrary.ResolutionInfo resolutionInfo =
+ new GeneratedCameraXLibrary.ResolutionInfo.Builder()
+ .setWidth(Long.valueOf(targetResolutionWidth))
+ .setHeight(Long.valueOf(targetResolutionHeight))
+ .build();
+
+ previewHostApi.cameraXProxy = mockCameraXProxy;
+ when(mockCameraXProxy.createPreviewBuilder()).thenReturn(mockPreviewBuilder);
+ when(mockPreviewBuilder.build()).thenReturn(mockPreview);
+
+ final ArgumentCaptor<Size> sizeCaptor = ArgumentCaptor.forClass(Size.class);
+
+ previewHostApi.create(previewIdentifier, Long.valueOf(targetRotation), resolutionInfo);
+
+ verify(mockPreviewBuilder).setTargetRotation(targetRotation);
+ verify(mockPreviewBuilder).setTargetResolution(sizeCaptor.capture());
+ assertEquals(sizeCaptor.getValue().getWidth(), targetResolutionWidth);
+ assertEquals(sizeCaptor.getValue().getHeight(), targetResolutionHeight);
+ verify(mockPreviewBuilder).build();
+ verify(testInstanceManager).addDartCreatedInstance(mockPreview, previewIdentifier);
+ }
+
+ @Test
+ public void setSurfaceProviderTest_createsSurfaceProviderAndReturnsTextureEntryId() {
+ final PreviewHostApiImpl previewHostApi =
+ spy(new PreviewHostApiImpl(mockBinaryMessenger, testInstanceManager, mockTextureRegistry));
+ final TextureRegistry.SurfaceTextureEntry mockSurfaceTextureEntry =
+ mock(TextureRegistry.SurfaceTextureEntry.class);
+ final SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
+ final Long previewIdentifier = 5L;
+ final Long surfaceTextureEntryId = 120L;
+
+ previewHostApi.cameraXProxy = mockCameraXProxy;
+ testInstanceManager.addDartCreatedInstance(mockPreview, previewIdentifier);
+
+ when(mockTextureRegistry.createSurfaceTexture()).thenReturn(mockSurfaceTextureEntry);
+ when(mockSurfaceTextureEntry.surfaceTexture()).thenReturn(mockSurfaceTexture);
+ when(mockSurfaceTextureEntry.id()).thenReturn(surfaceTextureEntryId);
+
+ final ArgumentCaptor<Preview.SurfaceProvider> surfaceProviderCaptor =
+ ArgumentCaptor.forClass(Preview.SurfaceProvider.class);
+ final ArgumentCaptor<Surface> surfaceCaptor = ArgumentCaptor.forClass(Surface.class);
+ final ArgumentCaptor<Consumer> consumerCaptor = ArgumentCaptor.forClass(Consumer.class);
+
+ // Test that surface provider was set and the surface texture ID was returned.
+ assertEquals(previewHostApi.setSurfaceProvider(previewIdentifier), surfaceTextureEntryId);
+ verify(mockPreview).setSurfaceProvider(surfaceProviderCaptor.capture());
+ verify(previewHostApi).createSurfaceProvider(mockSurfaceTexture);
+ }
+
+ @Test
+ public void createSurfaceProvider_createsExpectedPreviewSurfaceProvider() {
+ final PreviewHostApiImpl previewHostApi =
+ new PreviewHostApiImpl(mockBinaryMessenger, testInstanceManager, mockTextureRegistry);
+ final SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
+ final Surface mockSurface = mock(Surface.class);
+ final SurfaceRequest mockSurfaceRequest = mock(SurfaceRequest.class);
+ final SurfaceRequest.Result mockSurfaceRequestResult = mock(SurfaceRequest.Result.class);
+ final SystemServicesFlutterApiImpl mockSystemServicesFlutterApi =
+ mock(SystemServicesFlutterApiImpl.class);
+ final int resolutionWidth = 200;
+ final int resolutionHeight = 500;
+
+ previewHostApi.cameraXProxy = mockCameraXProxy;
+ when(mockCameraXProxy.createSurface(mockSurfaceTexture)).thenReturn(mockSurface);
+ when(mockSurfaceRequest.getResolution())
+ .thenReturn(new Size(resolutionWidth, resolutionHeight));
+ when(mockCameraXProxy.createSystemServicesFlutterApiImpl(mockBinaryMessenger))
+ .thenReturn(mockSystemServicesFlutterApi);
+
+ final ArgumentCaptor<Surface> surfaceCaptor = ArgumentCaptor.forClass(Surface.class);
+ final ArgumentCaptor<Consumer> consumerCaptor = ArgumentCaptor.forClass(Consumer.class);
+
+ Preview.SurfaceProvider previewSurfaceProvider =
+ previewHostApi.createSurfaceProvider(mockSurfaceTexture);
+ previewSurfaceProvider.onSurfaceRequested(mockSurfaceRequest);
+
+ verify(mockSurfaceTexture).setDefaultBufferSize(resolutionWidth, resolutionHeight);
+ verify(mockSurfaceRequest)
+ .provideSurface(surfaceCaptor.capture(), any(Executor.class), consumerCaptor.capture());
+
+ // Test that the surface derived from the surface texture entry will be provided to the surface request.
+ assertEquals(surfaceCaptor.getValue(), mockSurface);
+
+ // Test that the Consumer used to handle surface request result releases Flutter surface texture appropriately
+ // and sends camera errors appropriately.
+ Consumer<SurfaceRequest.Result> capturedConsumer = consumerCaptor.getValue();
+
+ // Case where Surface should be released.
+ when(mockSurfaceRequestResult.getResultCode())
+ .thenReturn(SurfaceRequest.Result.RESULT_REQUEST_CANCELLED);
+ capturedConsumer.accept(mockSurfaceRequestResult);
+ verify(mockSurface).release();
+ reset(mockSurface);
+
+ when(mockSurfaceRequestResult.getResultCode())
+ .thenReturn(SurfaceRequest.Result.RESULT_REQUEST_CANCELLED);
+ capturedConsumer.accept(mockSurfaceRequestResult);
+ verify(mockSurface).release();
+ reset(mockSurface);
+
+ when(mockSurfaceRequestResult.getResultCode())
+ .thenReturn(SurfaceRequest.Result.RESULT_WILL_NOT_PROVIDE_SURFACE);
+ capturedConsumer.accept(mockSurfaceRequestResult);
+ verify(mockSurface).release();
+ reset(mockSurface);
+
+ when(mockSurfaceRequestResult.getResultCode())
+ .thenReturn(SurfaceRequest.Result.RESULT_SURFACE_USED_SUCCESSFULLY);
+ capturedConsumer.accept(mockSurfaceRequestResult);
+ verify(mockSurface).release();
+ reset(mockSurface);
+
+ // Case where error must be sent.
+ when(mockSurfaceRequestResult.getResultCode())
+ .thenReturn(SurfaceRequest.Result.RESULT_INVALID_SURFACE);
+ capturedConsumer.accept(mockSurfaceRequestResult);
+ verify(mockSurface).release();
+ verify(mockSystemServicesFlutterApi).sendCameraError(anyString(), any(Reply.class));
+ }
+
+ @Test
+ public void releaseFlutterSurfaceTexture_makesCallToReleaseFlutterSurfaceTexture() {
+ final PreviewHostApiImpl previewHostApi =
+ new PreviewHostApiImpl(mockBinaryMessenger, testInstanceManager, mockTextureRegistry);
+ final TextureRegistry.SurfaceTextureEntry mockSurfaceTextureEntry =
+ mock(TextureRegistry.SurfaceTextureEntry.class);
+
+ previewHostApi.flutterSurfaceTexture = mockSurfaceTextureEntry;
+
+ previewHostApi.releaseFlutterSurfaceTexture();
+ verify(mockSurfaceTextureEntry).release();
+ }
+
+ @Test
+ public void getResolutionInfo_makesCallToRetrievePreviewResolutionInfo() {
+ final PreviewHostApiImpl previewHostApi =
+ new PreviewHostApiImpl(mockBinaryMessenger, testInstanceManager, mockTextureRegistry);
+ final androidx.camera.core.ResolutionInfo mockResolutionInfo =
+ mock(androidx.camera.core.ResolutionInfo.class);
+ final Long previewIdentifier = 23L;
+ final int resolutionWidth = 500;
+ final int resolutionHeight = 200;
+
+ testInstanceManager.addDartCreatedInstance(mockPreview, previewIdentifier);
+ when(mockPreview.getResolutionInfo()).thenReturn(mockResolutionInfo);
+ when(mockResolutionInfo.getResolution())
+ .thenReturn(new Size(resolutionWidth, resolutionHeight));
+
+ ResolutionInfo resolutionInfo = previewHostApi.getResolutionInfo(previewIdentifier);
+ assertEquals(resolutionInfo.getWidth(), Long.valueOf(resolutionWidth));
+ assertEquals(resolutionInfo.getHeight(), Long.valueOf(resolutionHeight));
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/ProcessCameraProviderTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/ProcessCameraProviderTest.java
new file mode 100644
index 0000000..47b4ed6
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/ProcessCameraProviderTest.java
@@ -0,0 +1,170 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.content.Context;
+import androidx.camera.core.Camera;
+import androidx.camera.core.CameraInfo;
+import androidx.camera.core.CameraSelector;
+import androidx.camera.core.UseCase;
+import androidx.camera.lifecycle.ProcessCameraProvider;
+import androidx.lifecycle.LifecycleOwner;
+import androidx.test.core.app.ApplicationProvider;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import io.flutter.plugin.common.BinaryMessenger;
+import java.util.Arrays;
+import java.util.Objects;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+import org.mockito.junit.MockitoJUnit;
+import org.mockito.junit.MockitoRule;
+import org.mockito.stubbing.Answer;
+import org.robolectric.RobolectricTestRunner;
+
+@RunWith(RobolectricTestRunner.class)
+public class ProcessCameraProviderTest {
+ @Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
+
+ @Mock public ProcessCameraProvider processCameraProvider;
+ @Mock public BinaryMessenger mockBinaryMessenger;
+
+ InstanceManager testInstanceManager;
+ private Context context;
+
+ @Before
+ public void setUp() {
+ testInstanceManager = InstanceManager.open(identifier -> {});
+ context = ApplicationProvider.getApplicationContext();
+ }
+
+ @After
+ public void tearDown() {
+ testInstanceManager.close();
+ }
+
+ @Test
+ public void getInstanceTest() {
+ final ProcessCameraProviderHostApiImpl processCameraProviderHostApi =
+ new ProcessCameraProviderHostApiImpl(mockBinaryMessenger, testInstanceManager, context);
+ final ListenableFuture<ProcessCameraProvider> processCameraProviderFuture =
+ spy(Futures.immediateFuture(processCameraProvider));
+ final GeneratedCameraXLibrary.Result<Long> mockResult =
+ mock(GeneratedCameraXLibrary.Result.class);
+
+ testInstanceManager.addDartCreatedInstance(processCameraProvider, 0);
+
+ try (MockedStatic<ProcessCameraProvider> mockedProcessCameraProvider =
+ Mockito.mockStatic(ProcessCameraProvider.class)) {
+ mockedProcessCameraProvider
+ .when(() -> ProcessCameraProvider.getInstance(context))
+ .thenAnswer(
+ (Answer<ListenableFuture<ProcessCameraProvider>>)
+ invocation -> processCameraProviderFuture);
+
+ final ArgumentCaptor<Runnable> runnableCaptor = ArgumentCaptor.forClass(Runnable.class);
+
+ processCameraProviderHostApi.getInstance(mockResult);
+ verify(processCameraProviderFuture).addListener(runnableCaptor.capture(), any());
+ runnableCaptor.getValue().run();
+ verify(mockResult).success(0L);
+ }
+ }
+
+ @Test
+ public void getAvailableCameraInfosTest() {
+ final ProcessCameraProviderHostApiImpl processCameraProviderHostApi =
+ new ProcessCameraProviderHostApiImpl(mockBinaryMessenger, testInstanceManager, context);
+ final CameraInfo mockCameraInfo = mock(CameraInfo.class);
+
+ testInstanceManager.addDartCreatedInstance(processCameraProvider, 0);
+ testInstanceManager.addDartCreatedInstance(mockCameraInfo, 1);
+
+ when(processCameraProvider.getAvailableCameraInfos()).thenReturn(Arrays.asList(mockCameraInfo));
+
+ assertEquals(processCameraProviderHostApi.getAvailableCameraInfos(0L), Arrays.asList(1L));
+ verify(processCameraProvider).getAvailableCameraInfos();
+ }
+
+ @Test
+ public void bindToLifecycleTest() {
+ final ProcessCameraProviderHostApiImpl processCameraProviderHostApi =
+ new ProcessCameraProviderHostApiImpl(mockBinaryMessenger, testInstanceManager, context);
+ final Camera mockCamera = mock(Camera.class);
+ final CameraSelector mockCameraSelector = mock(CameraSelector.class);
+ final UseCase mockUseCase = mock(UseCase.class);
+ UseCase[] mockUseCases = new UseCase[] {mockUseCase};
+
+ LifecycleOwner mockLifecycleOwner = mock(LifecycleOwner.class);
+ processCameraProviderHostApi.setLifecycleOwner(mockLifecycleOwner);
+
+ testInstanceManager.addDartCreatedInstance(processCameraProvider, 0);
+ testInstanceManager.addDartCreatedInstance(mockCameraSelector, 1);
+ testInstanceManager.addDartCreatedInstance(mockUseCase, 2);
+ testInstanceManager.addDartCreatedInstance(mockCamera, 3);
+
+ when(processCameraProvider.bindToLifecycle(
+ mockLifecycleOwner, mockCameraSelector, mockUseCases))
+ .thenReturn(mockCamera);
+
+ assertEquals(
+ processCameraProviderHostApi.bindToLifecycle(0L, 1L, Arrays.asList(2L)), Long.valueOf(3));
+ verify(processCameraProvider)
+ .bindToLifecycle(mockLifecycleOwner, mockCameraSelector, mockUseCases);
+ }
+
+ @Test
+ public void unbindTest() {
+ final ProcessCameraProviderHostApiImpl processCameraProviderHostApi =
+ new ProcessCameraProviderHostApiImpl(mockBinaryMessenger, testInstanceManager, context);
+ final UseCase mockUseCase = mock(UseCase.class);
+ UseCase[] mockUseCases = new UseCase[] {mockUseCase};
+
+ testInstanceManager.addDartCreatedInstance(processCameraProvider, 0);
+ testInstanceManager.addDartCreatedInstance(mockUseCase, 1);
+
+ processCameraProviderHostApi.unbind(0L, Arrays.asList(1L));
+ verify(processCameraProvider).unbind(mockUseCases);
+ }
+
+ @Test
+ public void unbindAllTest() {
+ final ProcessCameraProviderHostApiImpl processCameraProviderHostApi =
+ new ProcessCameraProviderHostApiImpl(mockBinaryMessenger, testInstanceManager, context);
+
+ testInstanceManager.addDartCreatedInstance(processCameraProvider, 0);
+
+ processCameraProviderHostApi.unbindAll(0L);
+ verify(processCameraProvider).unbindAll();
+ }
+
+ @Test
+ public void flutterApiCreateTest() {
+ final ProcessCameraProviderFlutterApiImpl spyFlutterApi =
+ spy(new ProcessCameraProviderFlutterApiImpl(mockBinaryMessenger, testInstanceManager));
+
+ spyFlutterApi.create(processCameraProvider, reply -> {});
+
+ final long identifier =
+ Objects.requireNonNull(
+ testInstanceManager.getIdentifierForStrongReference(processCameraProvider));
+ verify(spyFlutterApi).create(eq(identifier), any());
+ }
+}
diff --git a/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/SystemServicesTest.java b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/SystemServicesTest.java
new file mode 100644
index 0000000..eb36c45
--- /dev/null
+++ b/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/SystemServicesTest.java
@@ -0,0 +1,138 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camerax;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugins.camerax.CameraPermissionsManager.PermissionsRegistry;
+import io.flutter.plugins.camerax.CameraPermissionsManager.ResultCallback;
+import io.flutter.plugins.camerax.DeviceOrientationManager.DeviceOrientationChangeCallback;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CameraPermissionsErrorData;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.Result;
+import io.flutter.plugins.camerax.GeneratedCameraXLibrary.SystemServicesFlutterApi.Reply;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnit;
+import org.mockito.junit.MockitoRule;
+
+public class SystemServicesTest {
+ @Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
+
+ @Mock public BinaryMessenger mockBinaryMessenger;
+ @Mock public InstanceManager mockInstanceManager;
+
+ @Test
+ public void requestCameraPermissionsTest() {
+ final SystemServicesHostApiImpl systemServicesHostApi =
+ new SystemServicesHostApiImpl(mockBinaryMessenger, mockInstanceManager);
+ final CameraXProxy mockCameraXProxy = mock(CameraXProxy.class);
+ final CameraPermissionsManager mockCameraPermissionsManager =
+ mock(CameraPermissionsManager.class);
+ final Activity mockActivity = mock(Activity.class);
+ final PermissionsRegistry mockPermissionsRegistry = mock(PermissionsRegistry.class);
+ final Result<CameraPermissionsErrorData> mockResult = mock(Result.class);
+ final Boolean enableAudio = false;
+
+ systemServicesHostApi.cameraXProxy = mockCameraXProxy;
+ systemServicesHostApi.setActivity(mockActivity);
+ systemServicesHostApi.setPermissionsRegistry(mockPermissionsRegistry);
+ when(mockCameraXProxy.createCameraPermissionsManager())
+ .thenReturn(mockCameraPermissionsManager);
+
+ final ArgumentCaptor<ResultCallback> resultCallbackCaptor =
+ ArgumentCaptor.forClass(ResultCallback.class);
+
+ systemServicesHostApi.requestCameraPermissions(enableAudio, mockResult);
+
+ // Test camera permissions are requested.
+ verify(mockCameraPermissionsManager)
+ .requestPermissions(
+ eq(mockActivity),
+ eq(mockPermissionsRegistry),
+ eq(enableAudio),
+ resultCallbackCaptor.capture());
+
+ ResultCallback resultCallback = (ResultCallback) resultCallbackCaptor.getValue();
+
+ // Test no error data is sent upon permissions request success.
+ resultCallback.onResult(null, null);
+ verify(mockResult).success(null);
+
+ // Test expected error data is sent upon permissions request failure.
+ final String testErrorCode = "TestErrorCode";
+ final String testErrorDescription = "Test error description.";
+
+ final ArgumentCaptor<CameraPermissionsErrorData> cameraPermissionsErrorDataCaptor =
+ ArgumentCaptor.forClass(CameraPermissionsErrorData.class);
+
+ resultCallback.onResult(testErrorCode, testErrorDescription);
+ verify(mockResult, times(2)).success(cameraPermissionsErrorDataCaptor.capture());
+
+ CameraPermissionsErrorData cameraPermissionsErrorData =
+ cameraPermissionsErrorDataCaptor.getValue();
+ assertEquals(cameraPermissionsErrorData.getErrorCode(), testErrorCode);
+ assertEquals(cameraPermissionsErrorData.getDescription(), testErrorDescription);
+ }
+
+ @Test
+ public void deviceOrientationChangeTest() {
+ final SystemServicesHostApiImpl systemServicesHostApi =
+ new SystemServicesHostApiImpl(mockBinaryMessenger, mockInstanceManager);
+ final CameraXProxy mockCameraXProxy = mock(CameraXProxy.class);
+ final Activity mockActivity = mock(Activity.class);
+ final DeviceOrientationManager mockDeviceOrientationManager =
+ mock(DeviceOrientationManager.class);
+ final Boolean isFrontFacing = true;
+ final int sensorOrientation = 90;
+
+ SystemServicesFlutterApiImpl systemServicesFlutterApi =
+ mock(SystemServicesFlutterApiImpl.class);
+ systemServicesHostApi.systemServicesFlutterApi = systemServicesFlutterApi;
+
+ systemServicesHostApi.cameraXProxy = mockCameraXProxy;
+ systemServicesHostApi.setActivity(mockActivity);
+ when(mockCameraXProxy.createDeviceOrientationManager(
+ eq(mockActivity),
+ eq(isFrontFacing),
+ eq(sensorOrientation),
+ any(DeviceOrientationChangeCallback.class)))
+ .thenReturn(mockDeviceOrientationManager);
+
+ final ArgumentCaptor<DeviceOrientationChangeCallback> deviceOrientationChangeCallbackCaptor =
+ ArgumentCaptor.forClass(DeviceOrientationChangeCallback.class);
+
+ systemServicesHostApi.startListeningForDeviceOrientationChange(
+ isFrontFacing, Long.valueOf(sensorOrientation));
+
+ // Test callback method defined in Flutter API is called when device orientation changes.
+ verify(mockCameraXProxy)
+ .createDeviceOrientationManager(
+ eq(mockActivity),
+ eq(isFrontFacing),
+ eq(sensorOrientation),
+ deviceOrientationChangeCallbackCaptor.capture());
+ DeviceOrientationChangeCallback deviceOrientationChangeCallback =
+ deviceOrientationChangeCallbackCaptor.getValue();
+
+ deviceOrientationChangeCallback.onChange(DeviceOrientation.PORTRAIT_DOWN);
+ verify(systemServicesFlutterApi)
+ .sendDeviceOrientationChangedEvent(
+ eq(DeviceOrientation.PORTRAIT_DOWN.toString()), any(Reply.class));
+
+ // Test that the DeviceOrientationManager starts listening for device orientation changes.
+ verify(mockDeviceOrientationManager).start();
+ }
+}
diff --git a/packages/camera/camera_android_camerax/example/README.md b/packages/camera/camera_android_camerax/example/README.md
new file mode 100644
index 0000000..96b8bb1
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/README.md
@@ -0,0 +1,9 @@
+# Platform Implementation Test App
+
+This is a test app for manual testing and automated integration testing
+of this platform implementation. It is not intended to demonstrate actual use of
+this package, since the intent is that plugin clients use the app-facing
+package.
+
+Unless you are making changes to this implementation package, this example is
+very unlikely to be relevant.
diff --git a/packages/camera/camera_android_camerax/example/android/app/build.gradle b/packages/camera/camera_android_camerax/example/android/app/build.gradle
new file mode 100644
index 0000000..0c0cbcd
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/build.gradle
@@ -0,0 +1,66 @@
+def localProperties = new Properties()
+def localPropertiesFile = rootProject.file('local.properties')
+if (localPropertiesFile.exists()) {
+ localPropertiesFile.withReader('UTF-8') { reader ->
+ localProperties.load(reader)
+ }
+}
+
+def flutterRoot = localProperties.getProperty('flutter.sdk')
+if (flutterRoot == null) {
+ throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
+}
+
+def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
+if (flutterVersionCode == null) {
+ flutterVersionCode = '1'
+}
+
+def flutterVersionName = localProperties.getProperty('flutter.versionName')
+if (flutterVersionName == null) {
+ flutterVersionName = '1.0'
+}
+
+apply plugin: 'com.android.application'
+apply plugin: 'kotlin-android'
+apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
+
+android {
+ compileSdkVersion 33
+ ndkVersion flutter.ndkVersion
+
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+
+ defaultConfig {
+ // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
+ applicationId "io.flutter.plugins.cameraxexample"
+ // You can update the following values to match your application needs.
+ // For more information, see: https://docs.flutter.dev/deployment/android#reviewing-the-build-configuration.
+ minSdkVersion 21
+ targetSdkVersion 30
+ versionCode flutterVersionCode.toInteger()
+ versionName flutterVersionName
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ }
+
+ buildTypes {
+ release {
+ // TODO: Add your own signing config for the release build.
+ // Signing with the debug keys for now, so `flutter run --release` works.
+ signingConfig signingConfigs.debug
+ }
+ }
+}
+
+flutter {
+ source '../..'
+}
+
+dependencies {
+ testImplementation 'junit:junit:4.13.2'
+ androidTestImplementation 'androidx.test:runner:1.2.0'
+ api 'androidx.test:core:1.2.0'
+}
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java b/packages/camera/camera_android_camerax/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java
new file mode 100644
index 0000000..0f4298d
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface DartIntegrationTest {}
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/androidTest/java/io/flutter/plugins/cameraxexample/MainActivityTest.java b/packages/camera/camera_android_camerax/example/android/app/src/androidTest/java/io/flutter/plugins/cameraxexample/MainActivityTest.java
new file mode 100644
index 0000000..8bcb398
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/androidTest/java/io/flutter/plugins/cameraxexample/MainActivityTest.java
@@ -0,0 +1,17 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.cameraxexample;
+
+import androidx.test.rule.ActivityTestRule;
+import dev.flutter.plugins.integration_test.FlutterTestRunner;
+import io.flutter.plugins.DartIntegrationTest;
+import org.junit.Rule;
+import org.junit.runner.RunWith;
+
+@DartIntegrationTest
+@RunWith(FlutterTestRunner.class)
+public class MainActivityTest {
+ @Rule public ActivityTestRule<MainActivity> rule = new ActivityTestRule<>(MainActivity.class);
+}
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/debug/AndroidManifest.xml b/packages/camera/camera_android_camerax/example/android/app/src/debug/AndroidManifest.xml
new file mode 100644
index 0000000..093e904
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/debug/AndroidManifest.xml
@@ -0,0 +1,8 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="io.flutter.plugins.cameraxexample">
+ <!-- The INTERNET permission is required for development. Specifically,
+ the Flutter tool needs it to communicate with the running application
+ to allow setting breakpoints, to provide hot reload, etc.
+ -->
+ <uses-permission android:name="android.permission.INTERNET"/>
+</manifest>
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/AndroidManifest.xml b/packages/camera/camera_android_camerax/example/android/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..82b92e2
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/AndroidManifest.xml
@@ -0,0 +1,33 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="io.flutter.plugins.cameraxexample">
+ <application
+ android:label="camera_android_camerax_example"
+ android:name="${applicationName}"
+ android:icon="@mipmap/ic_launcher">
+ <activity
+ android:name=".MainActivity"
+ android:launchMode="singleTop"
+ android:theme="@style/LaunchTheme"
+ android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
+ android:hardwareAccelerated="true"
+ android:windowSoftInputMode="adjustResize">
+ <!-- Specifies an Android theme to apply to this Activity as soon as
+ the Android process has started. This theme is visible to the user
+ while the Flutter UI initializes. After that, this theme continues
+ to determine the Window background behind the Flutter UI. -->
+ <meta-data
+ android:name="io.flutter.embedding.android.NormalTheme"
+ android:resource="@style/NormalTheme"
+ />
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+ </activity>
+ <!-- Don't delete the meta-data below.
+ This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
+ <meta-data
+ android:name="flutterEmbedding"
+ android:value="2" />
+ </application>
+</manifest>
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/java/io/flutter/plugins/cameraexample/MainActivity.java b/packages/camera/camera_android_camerax/example/android/app/src/main/java/io/flutter/plugins/cameraexample/MainActivity.java
new file mode 100644
index 0000000..5e2a10f
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/java/io/flutter/plugins/cameraexample/MainActivity.java
@@ -0,0 +1,9 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.cameraxexample;
+
+import io.flutter.embedding.android.FlutterActivity;
+
+public class MainActivity extends FlutterActivity {}
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/drawable-v21/launch_background.xml b/packages/camera/camera_android_camerax/example/android/app/src/main/res/drawable-v21/launch_background.xml
new file mode 100644
index 0000000..f74085f
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/drawable-v21/launch_background.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Modify this file to customize your launch splash screen -->
+<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
+ <item android:drawable="?android:colorBackground" />
+
+ <!-- You can insert your own image assets here -->
+ <!-- <item>
+ <bitmap
+ android:gravity="center"
+ android:src="@mipmap/launch_image" />
+ </item> -->
+</layer-list>
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/drawable/launch_background.xml b/packages/camera/camera_android_camerax/example/android/app/src/main/res/drawable/launch_background.xml
new file mode 100644
index 0000000..304732f
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/drawable/launch_background.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Modify this file to customize your launch splash screen -->
+<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
+ <item android:drawable="@android:color/white" />
+
+ <!-- You can insert your own image assets here -->
+ <!-- <item>
+ <bitmap
+ android:gravity="center"
+ android:src="@mipmap/launch_image" />
+ </item> -->
+</layer-list>
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 0000000..db77bb4
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 0000000..17987b7
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..09d4391
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 0000000..d5f1c8d
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100644
index 0000000..4d6372e
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
Binary files differ
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/values-night/styles.xml b/packages/camera/camera_android_camerax/example/android/app/src/main/res/values-night/styles.xml
new file mode 100644
index 0000000..06952be
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/values-night/styles.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is on -->
+ <style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
+ <!-- Show a splash screen on the activity. Automatically removed when
+ the Flutter engine draws its first frame -->
+ <item name="android:windowBackground">@drawable/launch_background</item>
+ </style>
+ <!-- Theme applied to the Android Window as soon as the process has started.
+ This theme determines the color of the Android Window while your
+ Flutter UI initializes, as well as behind your Flutter UI while its
+ running.
+
+ This Theme is only used starting with V2 of Flutter's Android embedding. -->
+ <style name="NormalTheme" parent="@android:style/Theme.Black.NoTitleBar">
+ <item name="android:windowBackground">?android:colorBackground</item>
+ </style>
+</resources>
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/main/res/values/styles.xml b/packages/camera/camera_android_camerax/example/android/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..cb1ef88
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/main/res/values/styles.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is off -->
+ <style name="LaunchTheme" parent="@android:style/Theme.Light.NoTitleBar">
+ <!-- Show a splash screen on the activity. Automatically removed when
+ the Flutter engine draws its first frame -->
+ <item name="android:windowBackground">@drawable/launch_background</item>
+ </style>
+ <!-- Theme applied to the Android Window as soon as the process has started.
+ This theme determines the color of the Android Window while your
+ Flutter UI initializes, as well as behind your Flutter UI while its
+ running.
+
+ This Theme is only used starting with V2 of Flutter's Android embedding. -->
+ <style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
+ <item name="android:windowBackground">?android:colorBackground</item>
+ </style>
+</resources>
diff --git a/packages/camera/camera_android_camerax/example/android/app/src/profile/AndroidManifest.xml b/packages/camera/camera_android_camerax/example/android/app/src/profile/AndroidManifest.xml
new file mode 100644
index 0000000..093e904
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/app/src/profile/AndroidManifest.xml
@@ -0,0 +1,8 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="io.flutter.plugins.cameraxexample">
+ <!-- The INTERNET permission is required for development. Specifically,
+ the Flutter tool needs it to communicate with the running application
+ to allow setting breakpoints, to provide hot reload, etc.
+ -->
+ <uses-permission android:name="android.permission.INTERNET"/>
+</manifest>
diff --git a/packages/camera/camera_android_camerax/example/android/build.gradle b/packages/camera/camera_android_camerax/example/android/build.gradle
new file mode 100644
index 0000000..8640e4d
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/build.gradle
@@ -0,0 +1,31 @@
+buildscript {
+ ext.kotlin_version = '1.8.0'
+ repositories {
+ google()
+ mavenCentral()
+ }
+
+ dependencies {
+ classpath 'com.android.tools.build:gradle:7.2.2'
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
+ }
+}
+
+allprojects {
+ repositories {
+ google()
+ mavenCentral()
+ }
+}
+
+rootProject.buildDir = '../build'
+subprojects {
+ project.buildDir = "${rootProject.buildDir}/${project.name}"
+}
+subprojects {
+ project.evaluationDependsOn(':app')
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/packages/camera/camera_android_camerax/example/android/gradle.properties b/packages/camera/camera_android_camerax/example/android/gradle.properties
new file mode 100644
index 0000000..598d13f
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/gradle.properties
@@ -0,0 +1,3 @@
+org.gradle.jvmargs=-Xmx4G
+android.useAndroidX=true
+android.enableJetifier=true
diff --git a/packages/camera/camera_android_camerax/example/android/gradle/wrapper/gradle-wrapper.properties b/packages/camera/camera_android_camerax/example/android/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..3c472b9
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-all.zip
diff --git a/packages/camera/camera_android_camerax/example/android/settings.gradle b/packages/camera/camera_android_camerax/example/android/settings.gradle
new file mode 100644
index 0000000..44e62bc
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/android/settings.gradle
@@ -0,0 +1,11 @@
+include ':app'
+
+def localPropertiesFile = new File(rootProject.projectDir, "local.properties")
+def properties = new Properties()
+
+assert localPropertiesFile.exists()
+localPropertiesFile.withReader("UTF-8") { reader -> properties.load(reader) }
+
+def flutterSdkPath = properties.getProperty("flutter.sdk")
+assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
+apply from: "$flutterSdkPath/packages/flutter_tools/gradle/app_plugin_loader.gradle"
diff --git a/packages/camera/camera_android_camerax/example/integration_test/integration_test.dart b/packages/camera/camera_android_camerax/example/integration_test/integration_test.dart
new file mode 100644
index 0000000..b05d14a
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/integration_test/integration_test.dart
@@ -0,0 +1,28 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android_camerax/camera_android_camerax.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ setUpAll(() async {
+ CameraPlatform.instance = AndroidCameraCameraX();
+ });
+
+ testWidgets('availableCameras only supports valid back or front cameras',
+ (WidgetTester tester) async {
+ final List<CameraDescription> availableCameras =
+ await CameraPlatform.instance.availableCameras();
+
+ for (final CameraDescription cameraDescription in availableCameras) {
+ expect(
+ cameraDescription.lensDirection, isNot(CameraLensDirection.external));
+ expect(cameraDescription.sensorOrientation, anyOf(0, 90, 180, 270));
+ }
+ });
+}
diff --git a/packages/camera/camera_android_camerax/example/lib/camera_controller.dart b/packages/camera/camera_android_camerax/example/lib/camera_controller.dart
new file mode 100644
index 0000000..b1b5e9d
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/lib/camera_controller.dart
@@ -0,0 +1,957 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:collection';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+import 'camera_image.dart';
+
+/// Signature for a callback receiving the a camera image.
+///
+/// This is used by [CameraController.startImageStream].
+// TODO(stuartmorgan): Fix this naming the next time there's a breaking change
+// to this package.
+// ignore: camel_case_types
+typedef onLatestImageAvailable = Function(CameraImage image);
+
+/// Completes with a list of available cameras.
+///
+/// May throw a [CameraException].
+Future<List<CameraDescription>> availableCameras() async {
+ return CameraPlatform.instance.availableCameras();
+}
+
+// TODO(stuartmorgan): Remove this once the package requires 2.10, where the
+// dart:async `unawaited` accepts a nullable future.
+void _unawaited(Future<void>? future) {}
+
+/// The state of a [CameraController].
+class CameraValue {
+ /// Creates a new camera controller state.
+ const CameraValue({
+ required this.isInitialized,
+ this.errorDescription,
+ this.previewSize,
+ required this.isRecordingVideo,
+ required this.isTakingPicture,
+ required this.isStreamingImages,
+ required bool isRecordingPaused,
+ required this.flashMode,
+ required this.exposureMode,
+ required this.focusMode,
+ required this.exposurePointSupported,
+ required this.focusPointSupported,
+ required this.deviceOrientation,
+ this.lockedCaptureOrientation,
+ this.recordingOrientation,
+ this.isPreviewPaused = false,
+ this.previewPauseOrientation,
+ }) : _isRecordingPaused = isRecordingPaused;
+
+ /// Creates a new camera controller state for an uninitialized controller.
+ const CameraValue.uninitialized()
+ : this(
+ isInitialized: false,
+ isRecordingVideo: false,
+ isTakingPicture: false,
+ isStreamingImages: false,
+ isRecordingPaused: false,
+ flashMode: FlashMode.auto,
+ exposureMode: ExposureMode.auto,
+ exposurePointSupported: false,
+ focusMode: FocusMode.auto,
+ focusPointSupported: false,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ isPreviewPaused: false,
+ );
+
+ /// True after [CameraController.initialize] has completed successfully.
+ final bool isInitialized;
+
+ /// True when a picture capture request has been sent but as not yet returned.
+ final bool isTakingPicture;
+
+ /// True when the camera is recording (not the same as previewing).
+ final bool isRecordingVideo;
+
+ /// True when images from the camera are being streamed.
+ final bool isStreamingImages;
+
+ final bool _isRecordingPaused;
+
+ /// True when the preview widget has been paused manually.
+ final bool isPreviewPaused;
+
+ /// Set to the orientation the preview was paused in, if it is currently paused.
+ final DeviceOrientation? previewPauseOrientation;
+
+ /// True when camera [isRecordingVideo] and recording is paused.
+ bool get isRecordingPaused => isRecordingVideo && _isRecordingPaused;
+
+ /// Description of an error state.
+ ///
+ /// This is null while the controller is not in an error state.
+ /// When [hasError] is true this contains the error description.
+ final String? errorDescription;
+
+ /// The size of the preview in pixels.
+ ///
+ /// Is `null` until [isInitialized] is `true`.
+ final Size? previewSize;
+
+ /// Convenience getter for `previewSize.width / previewSize.height`.
+ ///
+ /// Can only be called when [initialize] is done.
+ double get aspectRatio => previewSize!.width / previewSize!.height;
+
+ /// Whether the controller is in an error state.
+ ///
+ /// When true [errorDescription] describes the error.
+ bool get hasError => errorDescription != null;
+
+ /// The flash mode the camera is currently set to.
+ final FlashMode flashMode;
+
+ /// The exposure mode the camera is currently set to.
+ final ExposureMode exposureMode;
+
+ /// The focus mode the camera is currently set to.
+ final FocusMode focusMode;
+
+ /// Whether setting the exposure point is supported.
+ final bool exposurePointSupported;
+
+ /// Whether setting the focus point is supported.
+ final bool focusPointSupported;
+
+ /// The current device UI orientation.
+ final DeviceOrientation deviceOrientation;
+
+ /// The currently locked capture orientation.
+ final DeviceOrientation? lockedCaptureOrientation;
+
+ /// Whether the capture orientation is currently locked.
+ bool get isCaptureOrientationLocked => lockedCaptureOrientation != null;
+
+ /// The orientation of the currently running video recording.
+ final DeviceOrientation? recordingOrientation;
+
+ /// Creates a modified copy of the object.
+ ///
+ /// Explicitly specified fields get the specified value, all other fields get
+ /// the same value of the current object.
+ CameraValue copyWith({
+ bool? isInitialized,
+ bool? isRecordingVideo,
+ bool? isTakingPicture,
+ bool? isStreamingImages,
+ String? errorDescription,
+ Size? previewSize,
+ bool? isRecordingPaused,
+ FlashMode? flashMode,
+ ExposureMode? exposureMode,
+ FocusMode? focusMode,
+ bool? exposurePointSupported,
+ bool? focusPointSupported,
+ DeviceOrientation? deviceOrientation,
+ Optional<DeviceOrientation>? lockedCaptureOrientation,
+ Optional<DeviceOrientation>? recordingOrientation,
+ bool? isPreviewPaused,
+ Optional<DeviceOrientation>? previewPauseOrientation,
+ }) {
+ return CameraValue(
+ isInitialized: isInitialized ?? this.isInitialized,
+ errorDescription: errorDescription,
+ previewSize: previewSize ?? this.previewSize,
+ isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo,
+ isTakingPicture: isTakingPicture ?? this.isTakingPicture,
+ isStreamingImages: isStreamingImages ?? this.isStreamingImages,
+ isRecordingPaused: isRecordingPaused ?? _isRecordingPaused,
+ flashMode: flashMode ?? this.flashMode,
+ exposureMode: exposureMode ?? this.exposureMode,
+ focusMode: focusMode ?? this.focusMode,
+ exposurePointSupported:
+ exposurePointSupported ?? this.exposurePointSupported,
+ focusPointSupported: focusPointSupported ?? this.focusPointSupported,
+ deviceOrientation: deviceOrientation ?? this.deviceOrientation,
+ lockedCaptureOrientation: lockedCaptureOrientation == null
+ ? this.lockedCaptureOrientation
+ : lockedCaptureOrientation.orNull,
+ recordingOrientation: recordingOrientation == null
+ ? this.recordingOrientation
+ : recordingOrientation.orNull,
+ isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused,
+ previewPauseOrientation: previewPauseOrientation == null
+ ? this.previewPauseOrientation
+ : previewPauseOrientation.orNull,
+ );
+ }
+
+ @override
+ String toString() {
+ return '${objectRuntimeType(this, 'CameraValue')}('
+ 'isRecordingVideo: $isRecordingVideo, '
+ 'isInitialized: $isInitialized, '
+ 'errorDescription: $errorDescription, '
+ 'previewSize: $previewSize, '
+ 'isStreamingImages: $isStreamingImages, '
+ 'flashMode: $flashMode, '
+ 'exposureMode: $exposureMode, '
+ 'focusMode: $focusMode, '
+ 'exposurePointSupported: $exposurePointSupported, '
+ 'focusPointSupported: $focusPointSupported, '
+ 'deviceOrientation: $deviceOrientation, '
+ 'lockedCaptureOrientation: $lockedCaptureOrientation, '
+ 'recordingOrientation: $recordingOrientation, '
+ 'isPreviewPaused: $isPreviewPaused, '
+ 'previewPausedOrientation: $previewPauseOrientation)';
+ }
+}
+
+/// Controls a device camera.
+///
+/// Use [availableCameras] to get a list of available cameras.
+///
+/// Before using a [CameraController] a call to [initialize] must complete.
+///
+/// To show the camera preview on the screen use a [CameraPreview] widget.
+class CameraController extends ValueNotifier<CameraValue> {
+ /// Creates a new camera controller in an uninitialized state.
+ CameraController(
+ this.description,
+ this.resolutionPreset, {
+ this.enableAudio = true,
+ this.imageFormatGroup,
+ }) : super(const CameraValue.uninitialized());
+
+ /// The properties of the camera device controlled by this controller.
+ final CameraDescription description;
+
+ /// The resolution this controller is targeting.
+ ///
+ /// This resolution preset is not guaranteed to be available on the device,
+ /// if unavailable a lower resolution will be used.
+ ///
+ /// See also: [ResolutionPreset].
+ final ResolutionPreset resolutionPreset;
+
+ /// Whether to include audio when recording a video.
+ final bool enableAudio;
+
+ /// The [ImageFormatGroup] describes the output of the raw image format.
+ ///
+ /// When null the imageFormat will fallback to the platforms default.
+ final ImageFormatGroup? imageFormatGroup;
+
+ /// The id of a camera that hasn't been initialized.
+ @visibleForTesting
+ static const int kUninitializedCameraId = -1;
+ int _cameraId = kUninitializedCameraId;
+
+ bool _isDisposed = false;
+ StreamSubscription<CameraImageData>? _imageStreamSubscription;
+ FutureOr<bool>? _initCalled;
+ StreamSubscription<DeviceOrientationChangedEvent>?
+ _deviceOrientationSubscription;
+
+ /// Checks whether [CameraController.dispose] has completed successfully.
+ ///
+ /// This is a no-op when asserts are disabled.
+ void debugCheckIsDisposed() {
+ assert(_isDisposed);
+ }
+
+ /// The camera identifier with which the controller is associated.
+ int get cameraId => _cameraId;
+
+ /// Initializes the camera on the device.
+ ///
+ /// Throws a [CameraException] if the initialization fails.
+ Future<void> initialize() async {
+ if (_isDisposed) {
+ throw CameraException(
+ 'Disposed CameraController',
+ 'initialize was called on a disposed CameraController',
+ );
+ }
+ try {
+ final Completer<CameraInitializedEvent> initializeCompleter =
+ Completer<CameraInitializedEvent>();
+
+ _deviceOrientationSubscription = CameraPlatform.instance
+ .onDeviceOrientationChanged()
+ .listen((DeviceOrientationChangedEvent event) {
+ value = value.copyWith(
+ deviceOrientation: event.orientation,
+ );
+ });
+
+ _cameraId = await CameraPlatform.instance.createCamera(
+ description,
+ resolutionPreset,
+ enableAudio: enableAudio,
+ );
+
+ _unawaited(CameraPlatform.instance
+ .onCameraInitialized(_cameraId)
+ .first
+ .then((CameraInitializedEvent event) {
+ initializeCompleter.complete(event);
+ }));
+
+ await CameraPlatform.instance.initializeCamera(
+ _cameraId,
+ imageFormatGroup: imageFormatGroup ?? ImageFormatGroup.unknown,
+ );
+
+ value = value.copyWith(
+ isInitialized: true,
+ previewSize: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => Size(
+ event.previewWidth,
+ event.previewHeight,
+ )),
+ exposureMode: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.exposureMode),
+ focusMode: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.focusMode),
+ exposurePointSupported: await initializeCompleter.future.then(
+ (CameraInitializedEvent event) => event.exposurePointSupported),
+ focusPointSupported: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.focusPointSupported),
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+
+ _initCalled = true;
+ }
+
+ /// Prepare the capture session for video recording.
+ ///
+ /// Use of this method is optional, but it may be called for performance
+ /// reasons on iOS.
+ ///
+ /// Preparing audio can cause a minor delay in the CameraPreview view on iOS.
+ /// If video recording is intended, calling this early eliminates this delay
+ /// that would otherwise be experienced when video recording is started.
+ /// This operation is a no-op on Android and Web.
+ ///
+ /// Throws a [CameraException] if the prepare fails.
+ Future<void> prepareForVideoRecording() async {
+ await CameraPlatform.instance.prepareForVideoRecording();
+ }
+
+ /// Pauses the current camera preview
+ Future<void> pausePreview() async {
+ if (value.isPreviewPaused) {
+ return;
+ }
+ try {
+ await CameraPlatform.instance.pausePreview(_cameraId);
+ value = value.copyWith(
+ isPreviewPaused: true,
+ previewPauseOrientation: Optional<DeviceOrientation>.of(
+ value.lockedCaptureOrientation ?? value.deviceOrientation));
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Resumes the current camera preview
+ Future<void> resumePreview() async {
+ if (!value.isPreviewPaused) {
+ return;
+ }
+ try {
+ await CameraPlatform.instance.resumePreview(_cameraId);
+ value = value.copyWith(
+ isPreviewPaused: false,
+ previewPauseOrientation: const Optional<DeviceOrientation>.absent());
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Captures an image and returns the file where it was saved.
+ ///
+ /// Throws a [CameraException] if the capture fails.
+ Future<XFile> takePicture() async {
+ _throwIfNotInitialized('takePicture');
+ if (value.isTakingPicture) {
+ throw CameraException(
+ 'Previous capture has not returned yet.',
+ 'takePicture was called before the previous capture returned.',
+ );
+ }
+ try {
+ value = value.copyWith(isTakingPicture: true);
+ final XFile file = await CameraPlatform.instance.takePicture(_cameraId);
+ value = value.copyWith(isTakingPicture: false);
+ return file;
+ } on PlatformException catch (e) {
+ value = value.copyWith(isTakingPicture: false);
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Start streaming images from platform camera.
+ ///
+ /// Settings for capturing images on iOS and Android is set to always use the
+ /// latest image available from the camera and will drop all other images.
+ ///
+ /// When running continuously with [CameraPreview] widget, this function runs
+ /// best with [ResolutionPreset.low]. Running on [ResolutionPreset.high] can
+ /// have significant frame rate drops for [CameraPreview] on lower end
+ /// devices.
+ ///
+ /// Throws a [CameraException] if image streaming or video recording has
+ /// already started.
+ ///
+ /// The `startImageStream` method is only available on Android and iOS (other
+ /// platforms won't be supported in current setup).
+ ///
+ // TODO(bmparr): Add settings for resolution and fps.
+ Future<void> startImageStream(onLatestImageAvailable onAvailable) async {
+ assert(defaultTargetPlatform == TargetPlatform.android ||
+ defaultTargetPlatform == TargetPlatform.iOS);
+ _throwIfNotInitialized('startImageStream');
+ if (value.isRecordingVideo) {
+ throw CameraException(
+ 'A video recording is already started.',
+ 'startImageStream was called while a video is being recorded.',
+ );
+ }
+ if (value.isStreamingImages) {
+ throw CameraException(
+ 'A camera has started streaming images.',
+ 'startImageStream was called while a camera was streaming images.',
+ );
+ }
+
+ try {
+ _imageStreamSubscription = CameraPlatform.instance
+ .onStreamedFrameAvailable(_cameraId)
+ .listen((CameraImageData imageData) {
+ onAvailable(CameraImage.fromPlatformInterface(imageData));
+ });
+ value = value.copyWith(isStreamingImages: true);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Stop streaming images from platform camera.
+ ///
+ /// Throws a [CameraException] if image streaming was not started or video
+ /// recording was started.
+ ///
+ /// The `stopImageStream` method is only available on Android and iOS (other
+ /// platforms won't be supported in current setup).
+ Future<void> stopImageStream() async {
+ assert(defaultTargetPlatform == TargetPlatform.android ||
+ defaultTargetPlatform == TargetPlatform.iOS);
+ _throwIfNotInitialized('stopImageStream');
+ if (!value.isStreamingImages) {
+ throw CameraException(
+ 'No camera is streaming images',
+ 'stopImageStream was called when no camera is streaming images.',
+ );
+ }
+
+ try {
+ value = value.copyWith(isStreamingImages: false);
+ await _imageStreamSubscription?.cancel();
+ _imageStreamSubscription = null;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Start a video recording.
+ ///
+ /// You may optionally pass an [onAvailable] callback to also have the
+ /// video frames streamed to this callback.
+ ///
+ /// The video is returned as a [XFile] after calling [stopVideoRecording].
+ /// Throws a [CameraException] if the capture fails.
+ Future<void> startVideoRecording(
+ {onLatestImageAvailable? onAvailable}) async {
+ _throwIfNotInitialized('startVideoRecording');
+ if (value.isRecordingVideo) {
+ throw CameraException(
+ 'A video recording is already started.',
+ 'startVideoRecording was called when a recording is already started.',
+ );
+ }
+
+ Function(CameraImageData image)? streamCallback;
+ if (onAvailable != null) {
+ streamCallback = (CameraImageData imageData) {
+ onAvailable(CameraImage.fromPlatformInterface(imageData));
+ };
+ }
+
+ try {
+ await CameraPlatform.instance.startVideoCapturing(
+ VideoCaptureOptions(_cameraId, streamCallback: streamCallback));
+ value = value.copyWith(
+ isRecordingVideo: true,
+ isRecordingPaused: false,
+ recordingOrientation: Optional<DeviceOrientation>.of(
+ value.lockedCaptureOrientation ?? value.deviceOrientation),
+ isStreamingImages: onAvailable != null);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Stops the video recording and returns the file where it was saved.
+ ///
+ /// Throws a [CameraException] if the capture failed.
+ Future<XFile> stopVideoRecording() async {
+ _throwIfNotInitialized('stopVideoRecording');
+ if (!value.isRecordingVideo) {
+ throw CameraException(
+ 'No video is recording',
+ 'stopVideoRecording was called when no video is recording.',
+ );
+ }
+
+ if (value.isStreamingImages) {
+ stopImageStream();
+ }
+
+ try {
+ final XFile file =
+ await CameraPlatform.instance.stopVideoRecording(_cameraId);
+ value = value.copyWith(
+ isRecordingVideo: false,
+ recordingOrientation: const Optional<DeviceOrientation>.absent(),
+ );
+ return file;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Pause video recording.
+ ///
+ /// This feature is only available on iOS and Android sdk 24+.
+ Future<void> pauseVideoRecording() async {
+ _throwIfNotInitialized('pauseVideoRecording');
+ if (!value.isRecordingVideo) {
+ throw CameraException(
+ 'No video is recording',
+ 'pauseVideoRecording was called when no video is recording.',
+ );
+ }
+ try {
+ await CameraPlatform.instance.pauseVideoRecording(_cameraId);
+ value = value.copyWith(isRecordingPaused: true);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Resume video recording after pausing.
+ ///
+ /// This feature is only available on iOS and Android sdk 24+.
+ Future<void> resumeVideoRecording() async {
+ _throwIfNotInitialized('resumeVideoRecording');
+ if (!value.isRecordingVideo) {
+ throw CameraException(
+ 'No video is recording',
+ 'resumeVideoRecording was called when no video is recording.',
+ );
+ }
+ try {
+ await CameraPlatform.instance.resumeVideoRecording(_cameraId);
+ value = value.copyWith(isRecordingPaused: false);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Returns a widget showing a live camera preview.
+ Widget buildPreview() {
+ _throwIfNotInitialized('buildPreview');
+ try {
+ return CameraPlatform.instance.buildPreview(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the maximum supported zoom level for the selected camera.
+ Future<double> getMaxZoomLevel() {
+ _throwIfNotInitialized('getMaxZoomLevel');
+ try {
+ return CameraPlatform.instance.getMaxZoomLevel(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the minimum supported zoom level for the selected camera.
+ Future<double> getMinZoomLevel() {
+ _throwIfNotInitialized('getMinZoomLevel');
+ try {
+ return CameraPlatform.instance.getMinZoomLevel(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Set the zoom level for the selected camera.
+ ///
+ /// The supplied [zoom] value should be between 1.0 and the maximum supported
+ /// zoom level returned by the `getMaxZoomLevel`. Throws an `CameraException`
+ /// when an illegal zoom level is suplied.
+ Future<void> setZoomLevel(double zoom) {
+ _throwIfNotInitialized('setZoomLevel');
+ try {
+ return CameraPlatform.instance.setZoomLevel(_cameraId, zoom);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the flash mode for taking pictures.
+ Future<void> setFlashMode(FlashMode mode) async {
+ try {
+ await CameraPlatform.instance.setFlashMode(_cameraId, mode);
+ value = value.copyWith(flashMode: mode);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the exposure mode for taking pictures.
+ Future<void> setExposureMode(ExposureMode mode) async {
+ try {
+ await CameraPlatform.instance.setExposureMode(_cameraId, mode);
+ value = value.copyWith(exposureMode: mode);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the exposure point for automatically determining the exposure value.
+ ///
+ /// Supplying a `null` value will reset the exposure point to it's default
+ /// value.
+ Future<void> setExposurePoint(Offset? point) async {
+ if (point != null &&
+ (point.dx < 0 || point.dx > 1 || point.dy < 0 || point.dy > 1)) {
+ throw ArgumentError(
+ 'The values of point should be anywhere between (0,0) and (1,1).');
+ }
+
+ try {
+ await CameraPlatform.instance.setExposurePoint(
+ _cameraId,
+ point == null
+ ? null
+ : Point<double>(
+ point.dx,
+ point.dy,
+ ),
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the minimum supported exposure offset for the selected camera in EV units.
+ Future<double> getMinExposureOffset() async {
+ _throwIfNotInitialized('getMinExposureOffset');
+ try {
+ return CameraPlatform.instance.getMinExposureOffset(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the maximum supported exposure offset for the selected camera in EV units.
+ Future<double> getMaxExposureOffset() async {
+ _throwIfNotInitialized('getMaxExposureOffset');
+ try {
+ return CameraPlatform.instance.getMaxExposureOffset(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Gets the supported step size for exposure offset for the selected camera in EV units.
+ ///
+ /// Returns 0 when the camera supports using a free value without stepping.
+ Future<double> getExposureOffsetStepSize() async {
+ _throwIfNotInitialized('getExposureOffsetStepSize');
+ try {
+ return CameraPlatform.instance.getExposureOffsetStepSize(_cameraId);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the exposure offset for the selected camera.
+ ///
+ /// The supplied [offset] value should be in EV units. 1 EV unit represents a
+ /// doubling in brightness. It should be between the minimum and maximum offsets
+ /// obtained through `getMinExposureOffset` and `getMaxExposureOffset` respectively.
+ /// Throws a `CameraException` when an illegal offset is supplied.
+ ///
+ /// When the supplied [offset] value does not align with the step size obtained
+ /// through `getExposureStepSize`, it will automatically be rounded to the nearest step.
+ ///
+ /// Returns the (rounded) offset value that was set.
+ Future<double> setExposureOffset(double offset) async {
+ _throwIfNotInitialized('setExposureOffset');
+ // Check if offset is in range
+ final List<double> range = await Future.wait(
+ <Future<double>>[getMinExposureOffset(), getMaxExposureOffset()]);
+ if (offset < range[0] || offset > range[1]) {
+ throw CameraException(
+ 'exposureOffsetOutOfBounds',
+ 'The provided exposure offset was outside the supported range for this device.',
+ );
+ }
+
+ // Round to the closest step if needed
+ final double stepSize = await getExposureOffsetStepSize();
+ if (stepSize > 0) {
+ final double inv = 1.0 / stepSize;
+ double roundedOffset = (offset * inv).roundToDouble() / inv;
+ if (roundedOffset > range[1]) {
+ roundedOffset = (offset * inv).floorToDouble() / inv;
+ } else if (roundedOffset < range[0]) {
+ roundedOffset = (offset * inv).ceilToDouble() / inv;
+ }
+ offset = roundedOffset;
+ }
+
+ try {
+ return CameraPlatform.instance.setExposureOffset(_cameraId, offset);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Locks the capture orientation.
+ ///
+ /// If [orientation] is omitted, the current device orientation is used.
+ Future<void> lockCaptureOrientation([DeviceOrientation? orientation]) async {
+ try {
+ await CameraPlatform.instance.lockCaptureOrientation(
+ _cameraId, orientation ?? value.deviceOrientation);
+ value = value.copyWith(
+ lockedCaptureOrientation: Optional<DeviceOrientation>.of(
+ orientation ?? value.deviceOrientation));
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the focus mode for taking pictures.
+ Future<void> setFocusMode(FocusMode mode) async {
+ try {
+ await CameraPlatform.instance.setFocusMode(_cameraId, mode);
+ value = value.copyWith(focusMode: mode);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Unlocks the capture orientation.
+ Future<void> unlockCaptureOrientation() async {
+ try {
+ await CameraPlatform.instance.unlockCaptureOrientation(_cameraId);
+ value = value.copyWith(
+ lockedCaptureOrientation: const Optional<DeviceOrientation>.absent());
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Sets the focus point for automatically determining the focus value.
+ ///
+ /// Supplying a `null` value will reset the focus point to it's default
+ /// value.
+ Future<void> setFocusPoint(Offset? point) async {
+ if (point != null &&
+ (point.dx < 0 || point.dx > 1 || point.dy < 0 || point.dy > 1)) {
+ throw ArgumentError(
+ 'The values of point should be anywhere between (0,0) and (1,1).');
+ }
+ try {
+ await CameraPlatform.instance.setFocusPoint(
+ _cameraId,
+ point == null
+ ? null
+ : Point<double>(
+ point.dx,
+ point.dy,
+ ),
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ /// Releases the resources of this camera.
+ @override
+ Future<void> dispose() async {
+ if (_isDisposed) {
+ return;
+ }
+ _unawaited(_deviceOrientationSubscription?.cancel());
+ _isDisposed = true;
+ super.dispose();
+ if (_initCalled != null) {
+ await _initCalled;
+ await CameraPlatform.instance.dispose(_cameraId);
+ }
+ }
+
+ void _throwIfNotInitialized(String functionName) {
+ if (!value.isInitialized) {
+ throw CameraException(
+ 'Uninitialized CameraController',
+ '$functionName() was called on an uninitialized CameraController.',
+ );
+ }
+ if (_isDisposed) {
+ throw CameraException(
+ 'Disposed CameraController',
+ '$functionName() was called on a disposed CameraController.',
+ );
+ }
+ }
+
+ @override
+ void removeListener(VoidCallback listener) {
+ // Prevent ValueListenableBuilder in CameraPreview widget from causing an
+ // exception to be thrown by attempting to remove its own listener after
+ // the controller has already been disposed.
+ if (!_isDisposed) {
+ super.removeListener(listener);
+ }
+ }
+}
+
+/// A value that might be absent.
+///
+/// Used to represent [DeviceOrientation]s that are optional but also able
+/// to be cleared.
+@immutable
+class Optional<T> extends IterableBase<T> {
+ /// Constructs an empty Optional.
+ const Optional.absent() : _value = null;
+
+ /// Constructs an Optional of the given [value].
+ ///
+ /// Throws [ArgumentError] if [value] is null.
+ Optional.of(T value) : _value = value {
+ // TODO(cbracken): Delete and make this ctor const once mixed-mode
+ // execution is no longer around.
+ ArgumentError.checkNotNull(value);
+ }
+
+ /// Constructs an Optional of the given [value].
+ ///
+ /// If [value] is null, returns [absent()].
+ const Optional.fromNullable(T? value) : _value = value;
+
+ final T? _value;
+
+ /// True when this optional contains a value.
+ bool get isPresent => _value != null;
+
+ /// True when this optional contains no value.
+ bool get isNotPresent => _value == null;
+
+ /// Gets the Optional value.
+ ///
+ /// Throws [StateError] if [value] is null.
+ T get value {
+ if (_value == null) {
+ throw StateError('value called on absent Optional.');
+ }
+ return _value!;
+ }
+
+ /// Executes a function if the Optional value is present.
+ void ifPresent(void Function(T value) ifPresent) {
+ if (isPresent) {
+ ifPresent(_value as T);
+ }
+ }
+
+ /// Execution a function if the Optional value is absent.
+ void ifAbsent(void Function() ifAbsent) {
+ if (!isPresent) {
+ ifAbsent();
+ }
+ }
+
+ /// Gets the Optional value with a default.
+ ///
+ /// The default is returned if the Optional is [absent()].
+ ///
+ /// Throws [ArgumentError] if [defaultValue] is null.
+ T or(T defaultValue) {
+ return _value ?? defaultValue;
+ }
+
+ /// Gets the Optional value, or `null` if there is none.
+ T? get orNull => _value;
+
+ /// Transforms the Optional value.
+ ///
+ /// If the Optional is [absent()], returns [absent()] without applying the transformer.
+ ///
+ /// The transformer must not return `null`. If it does, an [ArgumentError] is thrown.
+ Optional<S> transform<S>(S Function(T value) transformer) {
+ return _value == null
+ ? Optional<S>.absent()
+ : Optional<S>.of(transformer(_value as T));
+ }
+
+ /// Transforms the Optional value.
+ ///
+ /// If the Optional is [absent()], returns [absent()] without applying the transformer.
+ ///
+ /// Returns [absent()] if the transformer returns `null`.
+ Optional<S> transformNullable<S>(S? Function(T value) transformer) {
+ return _value == null
+ ? Optional<S>.absent()
+ : Optional<S>.fromNullable(transformer(_value as T));
+ }
+
+ @override
+ Iterator<T> get iterator =>
+ isPresent ? <T>[_value as T].iterator : Iterable<T>.empty().iterator;
+
+ /// Delegates to the underlying [value] hashCode.
+ @override
+ int get hashCode => _value.hashCode;
+
+ /// Delegates to the underlying [value] operator==.
+ @override
+ bool operator ==(Object o) => o is Optional<T> && o._value == _value;
+
+ @override
+ String toString() {
+ return _value == null
+ ? 'Optional { absent }'
+ : 'Optional { value: $_value }';
+ }
+}
diff --git a/packages/camera/camera_android_camerax/example/lib/camera_image.dart b/packages/camera/camera_android_camerax/example/lib/camera_image.dart
new file mode 100644
index 0000000..bfcad66
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/lib/camera_image.dart
@@ -0,0 +1,177 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+
+// TODO(stuartmorgan): Remove all of these classes in a breaking change, and
+// vend the platform interface versions directly. See
+// https://github.com/flutter/flutter/issues/104188
+
+/// A single color plane of image data.
+///
+/// The number and meaning of the planes in an image are determined by the
+/// format of the Image.
+class Plane {
+ Plane._fromPlatformInterface(CameraImagePlane plane)
+ : bytes = plane.bytes,
+ bytesPerPixel = plane.bytesPerPixel,
+ bytesPerRow = plane.bytesPerRow,
+ height = plane.height,
+ width = plane.width;
+
+ // Only used by the deprecated codepath that's kept to avoid breaking changes.
+ // Never called by the plugin itself.
+ Plane._fromPlatformData(Map<dynamic, dynamic> data)
+ : bytes = data['bytes'] as Uint8List,
+ bytesPerPixel = data['bytesPerPixel'] as int?,
+ bytesPerRow = data['bytesPerRow'] as int,
+ height = data['height'] as int?,
+ width = data['width'] as int?;
+
+ /// Bytes representing this plane.
+ final Uint8List bytes;
+
+ /// The distance between adjacent pixel samples on Android, in bytes.
+ ///
+ /// Will be `null` on iOS.
+ final int? bytesPerPixel;
+
+ /// The row stride for this color plane, in bytes.
+ final int bytesPerRow;
+
+ /// Height of the pixel buffer on iOS.
+ ///
+ /// Will be `null` on Android
+ final int? height;
+
+ /// Width of the pixel buffer on iOS.
+ ///
+ /// Will be `null` on Android.
+ final int? width;
+}
+
+/// Describes how pixels are represented in an image.
+class ImageFormat {
+ ImageFormat._fromPlatformInterface(CameraImageFormat format)
+ : group = format.group,
+ raw = format.raw;
+
+ // Only used by the deprecated codepath that's kept to avoid breaking changes.
+ // Never called by the plugin itself.
+ ImageFormat._fromPlatformData(this.raw) : group = _asImageFormatGroup(raw);
+
+ /// Describes the format group the raw image format falls into.
+ final ImageFormatGroup group;
+
+ /// Raw version of the format from the Android or iOS platform.
+ ///
+ /// On Android, this is an `int` from class `android.graphics.ImageFormat`. See
+ /// https://developer.android.com/reference/android/graphics/ImageFormat
+ ///
+ /// On iOS, this is a `FourCharCode` constant from Pixel Format Identifiers.
+ /// See https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers?language=objc
+ final dynamic raw;
+}
+
+// Only used by the deprecated codepath that's kept to avoid breaking changes.
+// Never called by the plugin itself.
+ImageFormatGroup _asImageFormatGroup(dynamic rawFormat) {
+ if (defaultTargetPlatform == TargetPlatform.android) {
+ switch (rawFormat) {
+ // android.graphics.ImageFormat.YUV_420_888
+ case 35:
+ return ImageFormatGroup.yuv420;
+ // android.graphics.ImageFormat.JPEG
+ case 256:
+ return ImageFormatGroup.jpeg;
+ }
+ }
+
+ if (defaultTargetPlatform == TargetPlatform.iOS) {
+ switch (rawFormat) {
+ // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ case 875704438:
+ return ImageFormatGroup.yuv420;
+ // kCVPixelFormatType_32BGRA
+ case 1111970369:
+ return ImageFormatGroup.bgra8888;
+ }
+ }
+
+ return ImageFormatGroup.unknown;
+}
+
+/// A single complete image buffer from the platform camera.
+///
+/// This class allows for direct application access to the pixel data of an
+/// Image through one or more [Uint8List]. Each buffer is encapsulated in a
+/// [Plane] that describes the layout of the pixel data in that plane. The
+/// [CameraImage] is not directly usable as a UI resource.
+///
+/// Although not all image formats are planar on iOS, we treat 1-dimensional
+/// images as single planar images.
+class CameraImage {
+ /// Creates a [CameraImage] from the platform interface version.
+ CameraImage.fromPlatformInterface(CameraImageData data)
+ : format = ImageFormat._fromPlatformInterface(data.format),
+ height = data.height,
+ width = data.width,
+ planes = List<Plane>.unmodifiable(data.planes.map<Plane>(
+ (CameraImagePlane plane) => Plane._fromPlatformInterface(plane))),
+ lensAperture = data.lensAperture,
+ sensorExposureTime = data.sensorExposureTime,
+ sensorSensitivity = data.sensorSensitivity;
+
+ /// Creates a [CameraImage] from method channel data.
+ @Deprecated('Use fromPlatformInterface instead')
+ CameraImage.fromPlatformData(Map<dynamic, dynamic> data)
+ : format = ImageFormat._fromPlatformData(data['format']),
+ height = data['height'] as int,
+ width = data['width'] as int,
+ lensAperture = data['lensAperture'] as double?,
+ sensorExposureTime = data['sensorExposureTime'] as int?,
+ sensorSensitivity = data['sensorSensitivity'] as double?,
+ planes = List<Plane>.unmodifiable((data['planes'] as List<dynamic>)
+ .map<Plane>((dynamic planeData) =>
+ Plane._fromPlatformData(planeData as Map<dynamic, dynamic>)));
+
+ /// Format of the image provided.
+ ///
+ /// Determines the number of planes needed to represent the image, and
+ /// the general layout of the pixel data in each [Uint8List].
+ final ImageFormat format;
+
+ /// Height of the image in pixels.
+ ///
+ /// For formats where some color channels are subsampled, this is the height
+ /// of the largest-resolution plane.
+ final int height;
+
+ /// Width of the image in pixels.
+ ///
+ /// For formats where some color channels are subsampled, this is the width
+ /// of the largest-resolution plane.
+ final int width;
+
+ /// The pixels planes for this image.
+ ///
+ /// The number of planes is determined by the format of the image.
+ final List<Plane> planes;
+
+ /// The aperture settings for this image.
+ ///
+ /// Represented as an f-stop value.
+ final double? lensAperture;
+
+ /// The sensor exposure time for this image in nanoseconds.
+ final int? sensorExposureTime;
+
+ /// The sensor sensitivity in standard ISO arithmetic units.
+ final double? sensorSensitivity;
+}
diff --git a/packages/camera/camera_android_camerax/example/lib/camera_preview.dart b/packages/camera/camera_android_camerax/example/lib/camera_preview.dart
new file mode 100644
index 0000000..3baaaf8
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/lib/camera_preview.dart
@@ -0,0 +1,81 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+import 'camera_controller.dart';
+
+/// A widget showing a live camera preview.
+class CameraPreview extends StatelessWidget {
+ /// Creates a preview widget for the given camera controller.
+ const CameraPreview(this.controller, {super.key, this.child});
+
+ /// The controller for the camera that the preview is shown for.
+ final CameraController controller;
+
+ /// A widget to overlay on top of the camera preview
+ final Widget? child;
+
+ @override
+ Widget build(BuildContext context) {
+ return controller.value.isInitialized
+ ? ValueListenableBuilder<CameraValue>(
+ valueListenable: controller,
+ builder: (BuildContext context, Object? value, Widget? child) {
+ return AspectRatio(
+ aspectRatio: _isLandscape()
+ ? controller.value.aspectRatio
+ : (1 / controller.value.aspectRatio),
+ child: Stack(
+ fit: StackFit.expand,
+ children: <Widget>[
+ _wrapInRotatedBox(child: controller.buildPreview()),
+ child ?? Container(),
+ ],
+ ),
+ );
+ },
+ child: child,
+ )
+ : Container();
+ }
+
+ Widget _wrapInRotatedBox({required Widget child}) {
+ if (kIsWeb || defaultTargetPlatform != TargetPlatform.android) {
+ return child;
+ }
+
+ return RotatedBox(
+ quarterTurns: _getQuarterTurns(),
+ child: child,
+ );
+ }
+
+ bool _isLandscape() {
+ return <DeviceOrientation>[
+ DeviceOrientation.landscapeLeft,
+ DeviceOrientation.landscapeRight
+ ].contains(_getApplicableOrientation());
+ }
+
+ int _getQuarterTurns() {
+ final Map<DeviceOrientation, int> turns = <DeviceOrientation, int>{
+ DeviceOrientation.portraitUp: 0,
+ DeviceOrientation.landscapeRight: 1,
+ DeviceOrientation.portraitDown: 2,
+ DeviceOrientation.landscapeLeft: 3,
+ };
+ return turns[_getApplicableOrientation()]!;
+ }
+
+ DeviceOrientation _getApplicableOrientation() {
+ return controller.value.isRecordingVideo
+ ? controller.value.recordingOrientation!
+ : (controller.value.previewPauseOrientation ??
+ controller.value.lockedCaptureOrientation ??
+ controller.value.deviceOrientation);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/example/lib/main.dart b/packages/camera/camera_android_camerax/example/lib/main.dart
new file mode 100644
index 0000000..4fd9652
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/lib/main.dart
@@ -0,0 +1,1047 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:io';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/scheduler.dart';
+import 'package:video_player/video_player.dart';
+
+import 'camera_controller.dart';
+import 'camera_preview.dart';
+
+/// Camera example home widget.
+class CameraExampleHome extends StatefulWidget {
+ /// Default Constructor
+ const CameraExampleHome({super.key});
+
+ @override
+ State<CameraExampleHome> createState() {
+ return _CameraExampleHomeState();
+ }
+}
+
+/// Returns a suitable camera icon for [direction].
+IconData getCameraLensIcon(CameraLensDirection direction) {
+ switch (direction) {
+ case CameraLensDirection.back:
+ return Icons.camera_rear;
+ case CameraLensDirection.front:
+ return Icons.camera_front;
+ case CameraLensDirection.external:
+ return Icons.camera;
+ }
+ // This enum is from a different package, so a new value could be added at
+ // any time. The example should keep working if that happens.
+ // ignore: dead_code
+ return Icons.camera;
+}
+
+void _logError(String code, String? message) {
+ // ignore: avoid_print
+ print('Error: $code${message == null ? '' : '\nError Message: $message'}');
+}
+
+class _CameraExampleHomeState extends State<CameraExampleHome>
+ with WidgetsBindingObserver, TickerProviderStateMixin {
+ CameraController? controller;
+ XFile? imageFile;
+ XFile? videoFile;
+ VideoPlayerController? videoController;
+ VoidCallback? videoPlayerListener;
+ bool enableAudio = true;
+ double _minAvailableExposureOffset = 0.0;
+ double _maxAvailableExposureOffset = 0.0;
+ double _currentExposureOffset = 0.0;
+ late AnimationController _flashModeControlRowAnimationController;
+ late Animation<double> _flashModeControlRowAnimation;
+ late AnimationController _exposureModeControlRowAnimationController;
+ late Animation<double> _exposureModeControlRowAnimation;
+ late AnimationController _focusModeControlRowAnimationController;
+ late Animation<double> _focusModeControlRowAnimation;
+ double _minAvailableZoom = 1.0;
+ double _maxAvailableZoom = 1.0;
+ double _currentScale = 1.0;
+ double _baseScale = 1.0;
+
+ // Counting pointers (number of user fingers on screen)
+ int _pointers = 0;
+
+ @override
+ void initState() {
+ super.initState();
+ WidgetsBinding.instance.addObserver(this);
+
+ _flashModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _flashModeControlRowAnimation = CurvedAnimation(
+ parent: _flashModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ _exposureModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _exposureModeControlRowAnimation = CurvedAnimation(
+ parent: _exposureModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ _focusModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _focusModeControlRowAnimation = CurvedAnimation(
+ parent: _focusModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ }
+
+ @override
+ void dispose() {
+ WidgetsBinding.instance.removeObserver(this);
+ _flashModeControlRowAnimationController.dispose();
+ _exposureModeControlRowAnimationController.dispose();
+ super.dispose();
+ }
+
+ // #docregion AppLifecycle
+ @override
+ void didChangeAppLifecycleState(AppLifecycleState state) {
+ final CameraController? cameraController = controller;
+
+ // App state changed before we got the chance to initialize.
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return;
+ }
+
+ if (state == AppLifecycleState.inactive) {
+ cameraController.dispose();
+ } else if (state == AppLifecycleState.resumed) {
+ onNewCameraSelected(cameraController.description);
+ }
+ }
+ // #enddocregion AppLifecycle
+
+ @override
+ Widget build(BuildContext context) {
+ return Scaffold(
+ appBar: AppBar(
+ title: const Text('Camera example'),
+ ),
+ body: Column(
+ children: <Widget>[
+ Expanded(
+ child: Container(
+ decoration: BoxDecoration(
+ color: Colors.black,
+ border: Border.all(
+ color:
+ controller != null && controller!.value.isRecordingVideo
+ ? Colors.redAccent
+ : Colors.grey,
+ width: 3.0,
+ ),
+ ),
+ child: Padding(
+ padding: const EdgeInsets.all(1.0),
+ child: Center(
+ child: _cameraPreviewWidget(),
+ ),
+ ),
+ ),
+ ),
+ _captureControlRowWidget(),
+ _modeControlRowWidget(),
+ Padding(
+ padding: const EdgeInsets.all(5.0),
+ child: Row(
+ children: <Widget>[
+ _cameraTogglesRowWidget(),
+ _thumbnailWidget(),
+ ],
+ ),
+ ),
+ ],
+ ),
+ );
+ }
+
+ /// Display the preview from the camera (or a message if the preview is not available).
+ Widget _cameraPreviewWidget() {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return const Text(
+ 'Tap a camera',
+ style: TextStyle(
+ color: Colors.white,
+ fontSize: 24.0,
+ fontWeight: FontWeight.w900,
+ ),
+ );
+ } else {
+ return Listener(
+ onPointerDown: (_) => _pointers++,
+ onPointerUp: (_) => _pointers--,
+ child: CameraPreview(
+ controller!,
+ child: LayoutBuilder(
+ builder: (BuildContext context, BoxConstraints constraints) {
+ return GestureDetector(
+ behavior: HitTestBehavior.opaque,
+ onScaleStart: _handleScaleStart,
+ onScaleUpdate: _handleScaleUpdate,
+ onTapDown: (TapDownDetails details) =>
+ onViewFinderTap(details, constraints),
+ );
+ }),
+ ),
+ );
+ }
+ }
+
+ void _handleScaleStart(ScaleStartDetails details) {
+ _baseScale = _currentScale;
+ }
+
+ Future<void> _handleScaleUpdate(ScaleUpdateDetails details) async {
+ // When there are not exactly two fingers on screen don't scale
+ if (controller == null || _pointers != 2) {
+ return;
+ }
+
+ _currentScale = (_baseScale * details.scale)
+ .clamp(_minAvailableZoom, _maxAvailableZoom);
+
+ await controller!.setZoomLevel(_currentScale);
+ }
+
+ /// Display the thumbnail of the captured image or video.
+ Widget _thumbnailWidget() {
+ final VideoPlayerController? localVideoController = videoController;
+
+ return Expanded(
+ child: Align(
+ alignment: Alignment.centerRight,
+ child: Row(
+ mainAxisSize: MainAxisSize.min,
+ children: <Widget>[
+ if (localVideoController == null && imageFile == null)
+ Container()
+ else
+ SizedBox(
+ width: 64.0,
+ height: 64.0,
+ child: (localVideoController == null)
+ ? (
+ // The captured image on the web contains a network-accessible URL
+ // pointing to a location within the browser. It may be displayed
+ // either with Image.network or Image.memory after loading the image
+ // bytes to memory.
+ kIsWeb
+ ? Image.network(imageFile!.path)
+ : Image.file(File(imageFile!.path)))
+ : Container(
+ decoration: BoxDecoration(
+ border: Border.all(color: Colors.pink)),
+ child: Center(
+ child: AspectRatio(
+ aspectRatio:
+ localVideoController.value.size != null
+ ? localVideoController.value.aspectRatio
+ : 1.0,
+ child: VideoPlayer(localVideoController)),
+ ),
+ ),
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ /// Display a bar with buttons to change the flash and exposure modes
+ Widget _modeControlRowWidget() {
+ return Column(
+ children: <Widget>[
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.flash_on),
+ color: Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ // The exposure and focus mode are currently not supported on the web.
+ ...!kIsWeb
+ ? <Widget>[
+ IconButton(
+ icon: const Icon(Icons.exposure),
+ color: Colors.blue,
+ onPressed:
+ () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: const Icon(Icons.filter_center_focus),
+ color: Colors.blue,
+ onPressed:
+ () {}, // TODO(camsim99): Add functionality back here.
+ )
+ ]
+ : <Widget>[],
+ IconButton(
+ icon: Icon(enableAudio ? Icons.volume_up : Icons.volume_mute),
+ color: Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: Icon(controller?.value.isCaptureOrientationLocked ?? false
+ ? Icons.screen_lock_rotation
+ : Icons.screen_rotation),
+ color: Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ ],
+ ),
+ _flashModeControlRowWidget(),
+ _exposureModeControlRowWidget(),
+ _focusModeControlRowWidget(),
+ ],
+ );
+ }
+
+ Widget _flashModeControlRowWidget() {
+ return SizeTransition(
+ sizeFactor: _flashModeControlRowAnimation,
+ child: ClipRect(
+ child: Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.flash_off),
+ color: controller?.value.flashMode == FlashMode.off
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: const Icon(Icons.flash_auto),
+ color: controller?.value.flashMode == FlashMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: const Icon(Icons.flash_on),
+ color: controller?.value.flashMode == FlashMode.always
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: const Icon(Icons.highlight),
+ color: controller?.value.flashMode == FlashMode.torch
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ Widget _exposureModeControlRowWidget() {
+ final ButtonStyle styleAuto = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.exposureMode == ExposureMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ );
+ final ButtonStyle styleLocked = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.exposureMode == ExposureMode.locked
+ ? Colors.orange
+ : Colors.blue,
+ );
+
+ return SizeTransition(
+ sizeFactor: _exposureModeControlRowAnimation,
+ child: ClipRect(
+ child: Container(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: <Widget>[
+ const Center(
+ child: Text('Exposure Mode'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ TextButton(
+ style: styleAuto,
+ onPressed:
+ () {}, // TODO(camsim99): Add functionality back here.
+ onLongPress: () {
+ if (controller != null) {
+ controller!.setExposurePoint(null);
+ showInSnackBar('Resetting exposure point');
+ }
+ },
+ child: const Text('AUTO'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed:
+ () {}, // TODO(camsim99): Add functionality back here.
+ child: const Text('LOCKED'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed:
+ () {}, // TODO(camsim99): Add functionality back here.
+ child: const Text('RESET OFFSET'),
+ ),
+ ],
+ ),
+ const Center(
+ child: Text('Exposure Offset'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ Text(_minAvailableExposureOffset.toString()),
+ Slider(
+ value: _currentExposureOffset,
+ min: _minAvailableExposureOffset,
+ max: _maxAvailableExposureOffset,
+ label: _currentExposureOffset.toString(),
+ onChanged: _minAvailableExposureOffset ==
+ _maxAvailableExposureOffset
+ ? null
+ : setExposureOffset,
+ ),
+ Text(_maxAvailableExposureOffset.toString()),
+ ],
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ Widget _focusModeControlRowWidget() {
+ final ButtonStyle styleAuto = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.focusMode == FocusMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ );
+ final ButtonStyle styleLocked = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.focusMode == FocusMode.locked
+ ? Colors.orange
+ : Colors.blue,
+ );
+
+ return SizeTransition(
+ sizeFactor: _focusModeControlRowAnimation,
+ child: ClipRect(
+ child: Container(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: <Widget>[
+ const Center(
+ child: Text('Focus Mode'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ TextButton(
+ style: styleAuto,
+ onPressed:
+ () {}, // TODO(camsim99): Add functionality back here.
+ onLongPress: () {
+ if (controller != null) {
+ controller!.setFocusPoint(null);
+ }
+ showInSnackBar('Resetting focus point');
+ },
+ child: const Text('AUTO'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed:
+ () {}, // TODO(camsim99): Add functionality back here.
+ child: const Text('LOCKED'),
+ ),
+ ],
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ /// Display the control bar with buttons to take pictures and record videos.
+ Widget _captureControlRowWidget() {
+ final CameraController? cameraController = controller;
+
+ return Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.camera_alt),
+ color: Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: const Icon(Icons.videocam),
+ color: Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: cameraController != null &&
+ cameraController.value.isRecordingPaused
+ ? const Icon(Icons.play_arrow)
+ : const Icon(Icons.pause),
+ color: Colors.blue,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: const Icon(Icons.stop),
+ color: Colors.red,
+ onPressed: () {}, // TODO(camsim99): Add functionality back here.
+ ),
+ IconButton(
+ icon: const Icon(Icons.pause_presentation),
+ color:
+ cameraController != null && cameraController.value.isPreviewPaused
+ ? Colors.red
+ : Colors.blue,
+ onPressed:
+ cameraController == null ? null : onPausePreviewButtonPressed,
+ ),
+ ],
+ );
+ }
+
+ /// Display a row of toggle to select the camera (or a message if no camera is available).
+ Widget _cameraTogglesRowWidget() {
+ final List<Widget> toggles = <Widget>[];
+
+ void onChanged(CameraDescription? description) {
+ if (description == null) {
+ return;
+ }
+
+ onNewCameraSelected(description);
+ }
+
+ if (_cameras.isEmpty) {
+ SchedulerBinding.instance.addPostFrameCallback((_) async {
+ showInSnackBar('No camera found.');
+ });
+ return const Text('None');
+ } else {
+ for (final CameraDescription cameraDescription in _cameras) {
+ toggles.add(
+ SizedBox(
+ width: 90.0,
+ child: RadioListTile<CameraDescription>(
+ title: Icon(getCameraLensIcon(cameraDescription.lensDirection)),
+ groupValue: controller?.description,
+ value: cameraDescription,
+ onChanged:
+ controller != null && controller!.value.isRecordingVideo
+ ? null
+ : onChanged,
+ ),
+ ),
+ );
+ }
+ }
+
+ return Row(children: toggles);
+ }
+
+ String timestamp() => DateTime.now().millisecondsSinceEpoch.toString();
+
+ void showInSnackBar(String message) {
+ ScaffoldMessenger.of(context)
+ .showSnackBar(SnackBar(content: Text(message)));
+ }
+
+ void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) {
+ if (controller == null) {
+ return;
+ }
+
+ final CameraController cameraController = controller!;
+
+ final Offset offset = Offset(
+ details.localPosition.dx / constraints.maxWidth,
+ details.localPosition.dy / constraints.maxHeight,
+ );
+ cameraController.setExposurePoint(offset);
+ cameraController.setFocusPoint(offset);
+ }
+
+ Future<void> onNewCameraSelected(CameraDescription cameraDescription) async {
+ final CameraController? oldController = controller;
+ if (oldController != null) {
+ // `controller` needs to be set to null before getting disposed,
+ // to avoid a race condition when we use the controller that is being
+ // disposed. This happens when camera permission dialog shows up,
+ // which triggers `didChangeAppLifecycleState`, which disposes and
+ // re-creates the controller.
+ controller = null;
+ await oldController.dispose();
+ }
+
+ final CameraController cameraController = CameraController(
+ cameraDescription,
+ kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium,
+ enableAudio: enableAudio,
+ imageFormatGroup: ImageFormatGroup.jpeg,
+ );
+
+ controller = cameraController;
+
+ // If the controller is updated then update the UI.
+ cameraController.addListener(() {
+ if (mounted) {
+ setState(() {});
+ }
+ if (cameraController.value.hasError) {
+ showInSnackBar(
+ 'Camera error ${cameraController.value.errorDescription}');
+ }
+ });
+
+ try {
+ await cameraController.initialize();
+ await Future.wait(<Future<Object?>>[
+ // The exposure mode is currently not supported on the web.
+ ...!kIsWeb
+ ? <Future<Object?>>[
+ cameraController.getMinExposureOffset().then(
+ (double value) => _minAvailableExposureOffset = value),
+ cameraController
+ .getMaxExposureOffset()
+ .then((double value) => _maxAvailableExposureOffset = value)
+ ]
+ : <Future<Object?>>[],
+ cameraController
+ .getMaxZoomLevel()
+ .then((double value) => _maxAvailableZoom = value),
+ cameraController
+ .getMinZoomLevel()
+ .then((double value) => _minAvailableZoom = value),
+ ]);
+ } on CameraException catch (e) {
+ switch (e.code) {
+ case 'CameraAccessDenied':
+ showInSnackBar('You have denied camera access.');
+ break;
+ case 'CameraAccessDeniedWithoutPrompt':
+ // iOS only
+ showInSnackBar('Please go to Settings app to enable camera access.');
+ break;
+ case 'CameraAccessRestricted':
+ // iOS only
+ showInSnackBar('Camera access is restricted.');
+ break;
+ case 'AudioAccessDenied':
+ showInSnackBar('You have denied audio access.');
+ break;
+ case 'AudioAccessDeniedWithoutPrompt':
+ // iOS only
+ showInSnackBar('Please go to Settings app to enable audio access.');
+ break;
+ case 'AudioAccessRestricted':
+ // iOS only
+ showInSnackBar('Audio access is restricted.');
+ break;
+ default:
+ _showCameraException(e);
+ break;
+ }
+ }
+
+ if (mounted) {
+ setState(() {});
+ }
+ }
+
+ void onTakePictureButtonPressed() {
+ takePicture().then((XFile? file) {
+ if (mounted) {
+ setState(() {
+ imageFile = file;
+ videoController?.dispose();
+ videoController = null;
+ });
+ if (file != null) {
+ showInSnackBar('Picture saved to ${file.path}');
+ }
+ }
+ });
+ }
+
+ void onFlashModeButtonPressed() {
+ if (_flashModeControlRowAnimationController.value == 1) {
+ _flashModeControlRowAnimationController.reverse();
+ } else {
+ _flashModeControlRowAnimationController.forward();
+ _exposureModeControlRowAnimationController.reverse();
+ _focusModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onExposureModeButtonPressed() {
+ if (_exposureModeControlRowAnimationController.value == 1) {
+ _exposureModeControlRowAnimationController.reverse();
+ } else {
+ _exposureModeControlRowAnimationController.forward();
+ _flashModeControlRowAnimationController.reverse();
+ _focusModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onFocusModeButtonPressed() {
+ if (_focusModeControlRowAnimationController.value == 1) {
+ _focusModeControlRowAnimationController.reverse();
+ } else {
+ _focusModeControlRowAnimationController.forward();
+ _flashModeControlRowAnimationController.reverse();
+ _exposureModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onAudioModeButtonPressed() {
+ enableAudio = !enableAudio;
+ if (controller != null) {
+ onNewCameraSelected(controller!.description);
+ }
+ }
+
+ Future<void> onCaptureOrientationLockButtonPressed() async {
+ try {
+ if (controller != null) {
+ final CameraController cameraController = controller!;
+ if (cameraController.value.isCaptureOrientationLocked) {
+ await cameraController.unlockCaptureOrientation();
+ showInSnackBar('Capture orientation unlocked');
+ } else {
+ await cameraController.lockCaptureOrientation();
+ showInSnackBar(
+ 'Capture orientation locked to ${cameraController.value.lockedCaptureOrientation.toString().split('.').last}');
+ }
+ }
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ }
+ }
+
+ void onSetFlashModeButtonPressed(FlashMode mode) {
+ setFlashMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Flash mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onSetExposureModeButtonPressed(ExposureMode mode) {
+ setExposureMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Exposure mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onSetFocusModeButtonPressed(FocusMode mode) {
+ setFocusMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Focus mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onVideoRecordButtonPressed() {
+ startVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ });
+ }
+
+ void onStopButtonPressed() {
+ stopVideoRecording().then((XFile? file) {
+ if (mounted) {
+ setState(() {});
+ }
+ if (file != null) {
+ showInSnackBar('Video recorded to ${file.path}');
+ videoFile = file;
+ _startVideoPlayer();
+ }
+ });
+ }
+
+ Future<void> onPausePreviewButtonPressed() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return;
+ }
+
+ if (cameraController.value.isPreviewPaused) {
+ await cameraController.resumePreview();
+ } else {
+ await cameraController.pausePreview();
+ }
+
+ if (mounted) {
+ setState(() {});
+ }
+ }
+
+ void onPauseButtonPressed() {
+ pauseVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Video recording paused');
+ });
+ }
+
+ void onResumeButtonPressed() {
+ resumeVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Video recording resumed');
+ });
+ }
+
+ Future<void> startVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return;
+ }
+
+ if (cameraController.value.isRecordingVideo) {
+ // A recording is already started, do nothing.
+ return;
+ }
+
+ try {
+ await cameraController.startVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return;
+ }
+ }
+
+ Future<XFile?> stopVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return null;
+ }
+
+ try {
+ return cameraController.stopVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return null;
+ }
+ }
+
+ Future<void> pauseVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return;
+ }
+
+ try {
+ await cameraController.pauseVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> resumeVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return;
+ }
+
+ try {
+ await cameraController.resumeVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setFlashMode(FlashMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setFlashMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setExposureMode(ExposureMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setExposureMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setExposureOffset(double offset) async {
+ if (controller == null) {
+ return;
+ }
+
+ setState(() {
+ _currentExposureOffset = offset;
+ });
+ try {
+ offset = await controller!.setExposureOffset(offset);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setFocusMode(FocusMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setFocusMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> _startVideoPlayer() async {
+ if (videoFile == null) {
+ return;
+ }
+
+ final VideoPlayerController vController = kIsWeb
+ ? VideoPlayerController.network(videoFile!.path)
+ : VideoPlayerController.file(File(videoFile!.path));
+
+ videoPlayerListener = () {
+ if (videoController != null && videoController!.value.size != null) {
+ // Refreshing the state to update video player with the correct ratio.
+ if (mounted) {
+ setState(() {});
+ }
+ videoController!.removeListener(videoPlayerListener!);
+ }
+ };
+ vController.addListener(videoPlayerListener!);
+ await vController.setLooping(true);
+ await vController.initialize();
+ await videoController?.dispose();
+ if (mounted) {
+ setState(() {
+ imageFile = null;
+ videoController = vController;
+ });
+ }
+ await vController.play();
+ }
+
+ Future<XFile?> takePicture() async {
+ final CameraController? cameraController = controller;
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return null;
+ }
+
+ if (cameraController.value.isTakingPicture) {
+ // A capture is already pending, do nothing.
+ return null;
+ }
+
+ try {
+ final XFile file = await cameraController.takePicture();
+ return file;
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return null;
+ }
+ }
+
+ void _showCameraException(CameraException e) {
+ _logError(e.code, e.description);
+ showInSnackBar('Error: ${e.code}\n${e.description}');
+ }
+}
+
+/// CameraApp is the Main Application.
+class CameraApp extends StatelessWidget {
+ /// Default Constructor
+ const CameraApp({super.key});
+
+ @override
+ Widget build(BuildContext context) {
+ return const MaterialApp(
+ home: CameraExampleHome(),
+ );
+ }
+}
+
+List<CameraDescription> _cameras = <CameraDescription>[];
+
+Future<void> main() async {
+ // Fetch the available cameras before initializing the app.
+ try {
+ WidgetsFlutterBinding.ensureInitialized();
+ _cameras = await availableCameras();
+ } on CameraException catch (e) {
+ _logError(e.code, e.description);
+ }
+ runApp(const CameraApp());
+}
diff --git a/packages/camera/camera_android_camerax/example/pubspec.yaml b/packages/camera/camera_android_camerax/example/pubspec.yaml
new file mode 100644
index 0000000..49a29b8
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/pubspec.yaml
@@ -0,0 +1,29 @@
+name: camera_android_camerax_example
+description: Demonstrates how to use the camera_android_camerax plugin.
+publish_to: 'none'
+
+environment:
+ sdk: '>=2.17.0 <3.0.0'
+ flutter: ">=3.0.0"
+
+dependencies:
+ camera_android_camerax:
+ # When depending on this package from a real application you should use:
+ # camera_android_camerax: ^x.y.z
+ # See https://dart.dev/tools/pub/dependencies#version-constraints
+ # The example app is bundled with the plugin so we use a path dependency on
+ # the parent directory to use the current plugin's version.
+ path: ../
+ camera_platform_interface: ^2.2.0
+ flutter:
+ sdk: flutter
+ video_player: ^2.4.10
+
+dev_dependencies:
+ flutter_test:
+ sdk: flutter
+ integration_test:
+ sdk: flutter
+
+flutter:
+ uses-material-design: true
diff --git a/packages/camera/camera_android_camerax/example/test/widget_test.dart b/packages/camera/camera_android_camerax/example/test/widget_test.dart
new file mode 100644
index 0000000..bfe91af
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/test/widget_test.dart
@@ -0,0 +1,18 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This is a basic Flutter widget test.
+//
+// To perform an interaction with a widget in your test, use the WidgetTester
+// utility in the flutter_test package. For example, you can send tap and scroll
+// gestures. You can also use WidgetTester to find child widgets in the widget
+// tree, read text, and verify that the values of widget properties are correct.
+
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ testWidgets('Fake test', (WidgetTester tester) async {
+ expect(true, isTrue);
+ });
+}
diff --git a/packages/camera/camera_android_camerax/example/test_driver/integration_test.dart b/packages/camera/camera_android_camerax/example/test_driver/integration_test.dart
new file mode 100644
index 0000000..4f10f2a
--- /dev/null
+++ b/packages/camera/camera_android_camerax/example/test_driver/integration_test.dart
@@ -0,0 +1,7 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:integration_test/integration_test_driver.dart';
+
+Future<void> main() => integrationDriver();
diff --git a/packages/camera/camera_android_camerax/lib/camera_android_camerax.dart b/packages/camera/camera_android_camerax/lib/camera_android_camerax.dart
new file mode 100644
index 0000000..4ddecd7
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/camera_android_camerax.dart
@@ -0,0 +1,5 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'src/android_camera_camerax.dart';
diff --git a/packages/camera/camera_android_camerax/lib/src/android_camera_camerax.dart b/packages/camera/camera_android_camerax/lib/src/android_camera_camerax.dart
new file mode 100644
index 0000000..18debf6
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/android_camera_camerax.dart
@@ -0,0 +1,382 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/widgets.dart';
+import 'package:stream_transform/stream_transform.dart';
+
+import 'camera.dart';
+import 'camera_info.dart';
+import 'camera_selector.dart';
+import 'camerax_library.g.dart';
+import 'preview.dart';
+import 'process_camera_provider.dart';
+import 'surface.dart';
+import 'system_services.dart';
+import 'use_case.dart';
+
+/// The Android implementation of [CameraPlatform] that uses the CameraX library.
+class AndroidCameraCameraX extends CameraPlatform {
+ /// Registers this class as the default instance of [CameraPlatform].
+ static void registerWith() {
+ CameraPlatform.instance = AndroidCameraCameraX();
+ }
+
+ /// The [ProcessCameraProvider] instance used to access camera functionality.
+ @visibleForTesting
+ ProcessCameraProvider? processCameraProvider;
+
+ /// The [Camera] instance returned by the [processCameraProvider] when a [UseCase] is
+ /// bound to the lifecycle of the camera it manages.
+ @visibleForTesting
+ Camera? camera;
+
+ /// The [Preview] instance that can be configured to present a live camera preview.
+ @visibleForTesting
+ Preview? preview;
+
+ /// Whether or not the [preview] is currently bound to the lifecycle that the
+ /// [processCameraProvider] tracks.
+ @visibleForTesting
+ bool previewIsBound = false;
+
+ bool _previewIsPaused = false;
+
+ /// The [CameraSelector] used to configure the [processCameraProvider] to use
+ /// the desired camera.
+ @visibleForTesting
+ CameraSelector? cameraSelector;
+
+ /// The controller we need to broadcast the different camera events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<CameraEvent> cameraEventStreamController =
+ StreamController<CameraEvent>.broadcast();
+
+ /// The stream of camera events.
+ Stream<CameraEvent> _cameraEvents(int cameraId) =>
+ cameraEventStreamController.stream
+ .where((CameraEvent event) => event.cameraId == cameraId);
+
+ /// Returns list of all available cameras and their descriptions.
+ @override
+ Future<List<CameraDescription>> availableCameras() async {
+ final List<CameraDescription> cameraDescriptions = <CameraDescription>[];
+
+ processCameraProvider ??= await ProcessCameraProvider.getInstance();
+ final List<CameraInfo> cameraInfos =
+ await processCameraProvider!.getAvailableCameraInfos();
+
+ CameraLensDirection? cameraLensDirection;
+ int cameraCount = 0;
+ int? cameraSensorOrientation;
+ String? cameraName;
+
+ for (final CameraInfo cameraInfo in cameraInfos) {
+ // Determine the lens direction by filtering the CameraInfo
+ // TODO(gmackall): replace this with call to CameraInfo.getLensFacing when changes containing that method are available
+ if ((await createCameraSelector(CameraSelector.lensFacingBack)
+ .filter(<CameraInfo>[cameraInfo]))
+ .isNotEmpty) {
+ cameraLensDirection = CameraLensDirection.back;
+ } else if ((await createCameraSelector(CameraSelector.lensFacingFront)
+ .filter(<CameraInfo>[cameraInfo]))
+ .isNotEmpty) {
+ cameraLensDirection = CameraLensDirection.front;
+ } else {
+ //Skip this CameraInfo as its lens direction is unknown
+ continue;
+ }
+
+ cameraSensorOrientation = await cameraInfo.getSensorRotationDegrees();
+ cameraName = 'Camera $cameraCount';
+ cameraCount++;
+
+ cameraDescriptions.add(CameraDescription(
+ name: cameraName,
+ lensDirection: cameraLensDirection,
+ sensorOrientation: cameraSensorOrientation));
+ }
+
+ return cameraDescriptions;
+ }
+
+ /// Creates an uninitialized camera instance and returns the camera ID.
+ ///
+ /// In the CameraX library, cameras are accessed by combining [UseCase]s
+ /// to an instance of a [ProcessCameraProvider]. Thus, to create an
+ /// unitialized camera instance, this method retrieves a
+ /// [ProcessCameraProvider] instance.
+ ///
+ /// To return the camera ID, which is equivalent to the ID of the surface texture
+ /// that a camera preview can be drawn to, a [Preview] instance is configured
+ /// and bound to the [ProcessCameraProvider] instance.
+ @override
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) async {
+ // Must obtain proper permissions before attempting to access a camera.
+ await requestCameraPermissions(enableAudio);
+
+ // Save CameraSelector that matches cameraDescription.
+ final int cameraSelectorLensDirection =
+ _getCameraSelectorLensDirection(cameraDescription.lensDirection);
+ final bool cameraIsFrontFacing =
+ cameraSelectorLensDirection == CameraSelector.lensFacingFront;
+ cameraSelector = createCameraSelector(cameraSelectorLensDirection);
+ // Start listening for device orientation changes preceding camera creation.
+ startListeningForDeviceOrientationChange(
+ cameraIsFrontFacing, cameraDescription.sensorOrientation);
+
+ // Retrieve a ProcessCameraProvider instance.
+ processCameraProvider ??= await ProcessCameraProvider.getInstance();
+
+ // Configure Preview instance and bind to ProcessCameraProvider.
+ final int targetRotation =
+ _getTargetRotation(cameraDescription.sensorOrientation);
+ final ResolutionInfo? targetResolution =
+ _getTargetResolutionForPreview(resolutionPreset);
+ preview = createPreview(targetRotation, targetResolution);
+ previewIsBound = false;
+ _previewIsPaused = false;
+ final int flutterSurfaceTextureId = await preview!.setSurfaceProvider();
+
+ return flutterSurfaceTextureId;
+ }
+
+ /// Initializes the camera on the device.
+ ///
+ /// Since initialization of a camera does not directly map as an operation to
+ /// the CameraX library, this method just retrieves information about the
+ /// camera and sends a [CameraInitializedEvent].
+ ///
+ /// [imageFormatGroup] is used to specify the image formatting used.
+ /// On Android this defaults to ImageFormat.YUV_420_888 and applies only to
+ /// the image stream.
+ @override
+ Future<void> initializeCamera(
+ int cameraId, {
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) async {
+ // TODO(camsim99): Use imageFormatGroup to configure ImageAnalysis use case
+ // for image streaming.
+ // https://github.com/flutter/flutter/issues/120463
+
+ // Configure CameraInitializedEvent to send as representation of a
+ // configured camera:
+ // Retrieve preview resolution.
+ assert(
+ preview != null,
+ 'Preview instance not found. Please call the "createCamera" method before calling "initializeCamera"',
+ );
+ await _bindPreviewToLifecycle();
+ final ResolutionInfo previewResolutionInfo =
+ await preview!.getResolutionInfo();
+ _unbindPreviewFromLifecycle();
+
+ // Retrieve exposure and focus mode configurations:
+ // TODO(camsim99): Implement support for retrieving exposure mode configuration.
+ // https://github.com/flutter/flutter/issues/120468
+ const ExposureMode exposureMode = ExposureMode.auto;
+ const bool exposurePointSupported = false;
+
+ // TODO(camsim99): Implement support for retrieving focus mode configuration.
+ // https://github.com/flutter/flutter/issues/120467
+ const FocusMode focusMode = FocusMode.auto;
+ const bool focusPointSupported = false;
+
+ cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ previewResolutionInfo.width.toDouble(),
+ previewResolutionInfo.height.toDouble(),
+ exposureMode,
+ exposurePointSupported,
+ focusMode,
+ focusPointSupported));
+ }
+
+ /// Releases the resources of the accessed camera.
+ ///
+ /// [cameraId] not used.
+ @override
+ Future<void> dispose(int cameraId) async {
+ preview?.releaseFlutterSurfaceTexture();
+ processCameraProvider?.unbindAll();
+ }
+
+ /// The camera has been initialized.
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
+ }
+
+ /// The camera experienced an error.
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ return SystemServices.cameraErrorStreamController.stream
+ .map<CameraErrorEvent>((String errorDescription) {
+ return CameraErrorEvent(cameraId, errorDescription);
+ });
+ }
+
+ /// The ui orientation changed.
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ return SystemServices.deviceOrientationChangedStreamController.stream;
+ }
+
+ /// Pause the active preview on the current frame for the selected camera.
+ ///
+ /// [cameraId] not used.
+ @override
+ Future<void> pausePreview(int cameraId) async {
+ _unbindPreviewFromLifecycle();
+ _previewIsPaused = true;
+ }
+
+ /// Resume the paused preview for the selected camera.
+ ///
+ /// [cameraId] not used.
+ @override
+ Future<void> resumePreview(int cameraId) async {
+ await _bindPreviewToLifecycle();
+ _previewIsPaused = false;
+ }
+
+ /// Returns a widget showing a live camera preview.
+ @override
+ Widget buildPreview(int cameraId) {
+ return FutureBuilder<void>(
+ future: _bindPreviewToLifecycle(),
+ builder: (BuildContext context, AsyncSnapshot<void> snapshot) {
+ switch (snapshot.connectionState) {
+ case ConnectionState.none:
+ case ConnectionState.waiting:
+ case ConnectionState.active:
+ // Do nothing while waiting for preview to be bound to lifecyle.
+ return const SizedBox.shrink();
+ case ConnectionState.done:
+ return Texture(textureId: cameraId);
+ }
+ });
+ }
+
+ // Methods for binding UseCases to the lifecycle of the camera controlled
+ // by a ProcessCameraProvider instance:
+
+ /// Binds [preview] instance to the camera lifecycle controlled by the
+ /// [processCameraProvider].
+ Future<void> _bindPreviewToLifecycle() async {
+ assert(processCameraProvider != null);
+ assert(cameraSelector != null);
+
+ if (previewIsBound || _previewIsPaused) {
+ // Only bind if preview is not already bound or intentionally paused.
+ return;
+ }
+
+ camera = await processCameraProvider!
+ .bindToLifecycle(cameraSelector!, <UseCase>[preview!]);
+ previewIsBound = true;
+ }
+
+ /// Unbinds [preview] instance to camera lifecycle controlled by the
+ /// [processCameraProvider].
+ void _unbindPreviewFromLifecycle() {
+ if (preview == null || !previewIsBound) {
+ return;
+ }
+
+ assert(processCameraProvider != null);
+
+ processCameraProvider!.unbind(<UseCase>[preview!]);
+ previewIsBound = false;
+ }
+
+ // Methods for mapping Flutter camera constants to CameraX constants:
+
+ /// Returns [CameraSelector] lens direction that maps to specified
+ /// [CameraLensDirection].
+ int _getCameraSelectorLensDirection(CameraLensDirection lensDirection) {
+ switch (lensDirection) {
+ case CameraLensDirection.front:
+ return CameraSelector.lensFacingFront;
+ case CameraLensDirection.back:
+ return CameraSelector.lensFacingBack;
+ case CameraLensDirection.external:
+ return CameraSelector.lensFacingExternal;
+ }
+ }
+
+ /// Returns [Surface] target rotation constant that maps to specified sensor
+ /// orientation.
+ int _getTargetRotation(int sensorOrientation) {
+ switch (sensorOrientation) {
+ case 90:
+ return Surface.ROTATION_90;
+ case 180:
+ return Surface.ROTATION_180;
+ case 270:
+ return Surface.ROTATION_270;
+ case 0:
+ return Surface.ROTATION_0;
+ default:
+ throw ArgumentError(
+ '"$sensorOrientation" is not a valid sensor orientation value');
+ }
+ }
+
+ /// Returns [ResolutionInfo] that maps to the specified resolution preset for
+ /// a camera preview.
+ ResolutionInfo? _getTargetResolutionForPreview(ResolutionPreset? resolution) {
+ // TODO(camsim99): Implement resolution configuration.
+ // https://github.com/flutter/flutter/issues/120462
+ return null;
+ }
+
+ // Methods for calls that need to be tested:
+
+ /// Requests camera permissions.
+ @visibleForTesting
+ Future<void> requestCameraPermissions(bool enableAudio) async {
+ await SystemServices.requestCameraPermissions(enableAudio);
+ }
+
+ /// Subscribes the plugin as a listener to changes in device orientation.
+ @visibleForTesting
+ void startListeningForDeviceOrientationChange(
+ bool cameraIsFrontFacing, int sensorOrientation) {
+ SystemServices.startListeningForDeviceOrientationChange(
+ cameraIsFrontFacing, sensorOrientation);
+ }
+
+ /// Returns a [CameraSelector] based on the specified camera lens direction.
+ @visibleForTesting
+ CameraSelector createCameraSelector(int cameraSelectorLensDirection) {
+ switch (cameraSelectorLensDirection) {
+ case CameraSelector.lensFacingFront:
+ return CameraSelector.getDefaultFrontCamera();
+ case CameraSelector.lensFacingBack:
+ return CameraSelector.getDefaultBackCamera();
+ default:
+ return CameraSelector(lensFacing: cameraSelectorLensDirection);
+ }
+ }
+
+ /// Returns a [Preview] configured with the specified target rotation and
+ /// resolution.
+ @visibleForTesting
+ Preview createPreview(int targetRotation, ResolutionInfo? targetResolution) {
+ return Preview(
+ targetRotation: targetRotation, targetResolution: targetResolution);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/android_camera_camerax_flutter_api_impls.dart b/packages/camera/camera_android_camerax/lib/src/android_camera_camerax_flutter_api_impls.dart
new file mode 100644
index 0000000..0a1b3ce
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/android_camera_camerax_flutter_api_impls.dart
@@ -0,0 +1,76 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'camera.dart';
+import 'camera_info.dart';
+import 'camera_selector.dart';
+import 'camerax_library.g.dart';
+import 'java_object.dart';
+import 'process_camera_provider.dart';
+import 'system_services.dart';
+
+/// Handles initialization of Flutter APIs for the Android CameraX library.
+class AndroidCameraXCameraFlutterApis {
+ /// Creates a [AndroidCameraXCameraFlutterApis].
+ AndroidCameraXCameraFlutterApis({
+ JavaObjectFlutterApiImpl? javaObjectFlutterApi,
+ CameraFlutterApiImpl? cameraFlutterApi,
+ CameraInfoFlutterApiImpl? cameraInfoFlutterApi,
+ CameraSelectorFlutterApiImpl? cameraSelectorFlutterApi,
+ ProcessCameraProviderFlutterApiImpl? processCameraProviderFlutterApi,
+ SystemServicesFlutterApiImpl? systemServicesFlutterApi,
+ }) {
+ this.javaObjectFlutterApi =
+ javaObjectFlutterApi ?? JavaObjectFlutterApiImpl();
+ this.cameraInfoFlutterApi =
+ cameraInfoFlutterApi ?? CameraInfoFlutterApiImpl();
+ this.cameraSelectorFlutterApi =
+ cameraSelectorFlutterApi ?? CameraSelectorFlutterApiImpl();
+ this.processCameraProviderFlutterApi = processCameraProviderFlutterApi ??
+ ProcessCameraProviderFlutterApiImpl();
+ this.cameraFlutterApi = cameraFlutterApi ?? CameraFlutterApiImpl();
+ this.systemServicesFlutterApi =
+ systemServicesFlutterApi ?? SystemServicesFlutterApiImpl();
+ }
+
+ static bool _haveBeenSetUp = false;
+
+ /// Mutable instance containing all Flutter Apis for Android CameraX Camera.
+ ///
+ /// This should only be changed for testing purposes.
+ static AndroidCameraXCameraFlutterApis instance =
+ AndroidCameraXCameraFlutterApis();
+
+ /// Handles callbacks methods for the native Java Object class.
+ late final JavaObjectFlutterApi javaObjectFlutterApi;
+
+ /// Flutter Api for [CameraInfo].
+ late final CameraInfoFlutterApiImpl cameraInfoFlutterApi;
+
+ /// Flutter Api for [CameraSelector].
+ late final CameraSelectorFlutterApiImpl cameraSelectorFlutterApi;
+
+ /// Flutter Api for [ProcessCameraProvider].
+ late final ProcessCameraProviderFlutterApiImpl
+ processCameraProviderFlutterApi;
+
+ /// Flutter Api for [Camera].
+ late final CameraFlutterApiImpl cameraFlutterApi;
+
+ /// Flutter Api for [SystemServices].
+ late final SystemServicesFlutterApiImpl systemServicesFlutterApi;
+
+ /// Ensures all the Flutter APIs have been setup to receive calls from native code.
+ void ensureSetUp() {
+ if (!_haveBeenSetUp) {
+ JavaObjectFlutterApi.setup(javaObjectFlutterApi);
+ CameraInfoFlutterApi.setup(cameraInfoFlutterApi);
+ CameraSelectorFlutterApi.setup(cameraSelectorFlutterApi);
+ ProcessCameraProviderFlutterApi.setup(processCameraProviderFlutterApi);
+ CameraFlutterApi.setup(cameraFlutterApi);
+ SystemServicesFlutterApi.setup(systemServicesFlutterApi);
+ _haveBeenSetUp = true;
+ }
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/camera.dart b/packages/camera/camera_android_camerax/lib/src/camera.dart
new file mode 100644
index 0000000..24ff305
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/camera.dart
@@ -0,0 +1,53 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart' show BinaryMessenger;
+
+import 'android_camera_camerax_flutter_api_impls.dart';
+import 'camerax_library.g.dart';
+import 'instance_manager.dart';
+import 'java_object.dart';
+
+/// The interface used to control the flow of data of use cases, control the
+/// camera, and publich the state of the camera.
+///
+/// See https://developer.android.com/reference/androidx/camera/core/Camera.
+class Camera extends JavaObject {
+ /// Constructs a [Camera] that is not automatically attached to a native object.
+ Camera.detached({super.binaryMessenger, super.instanceManager})
+ : super.detached() {
+ AndroidCameraXCameraFlutterApis.instance.ensureSetUp();
+ }
+}
+
+/// Flutter API implementation of [Camera].
+class CameraFlutterApiImpl implements CameraFlutterApi {
+ /// Constructs a [CameraSelectorFlutterApiImpl].
+ CameraFlutterApiImpl({
+ this.binaryMessenger,
+ InstanceManager? instanceManager,
+ }) : instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Maintains instances stored to communicate with native language objects.
+ final InstanceManager instanceManager;
+
+ @override
+ void create(int identifier) {
+ instanceManager.addHostCreatedInstance(
+ Camera.detached(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager),
+ identifier,
+ onCopy: (Camera original) {
+ return Camera.detached(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ },
+ );
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/camera_info.dart b/packages/camera/camera_android_camerax/lib/src/camera_info.dart
new file mode 100644
index 0000000..8c2c7bc
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/camera_info.dart
@@ -0,0 +1,84 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart' show BinaryMessenger;
+
+import 'android_camera_camerax_flutter_api_impls.dart';
+import 'camerax_library.g.dart';
+import 'instance_manager.dart';
+import 'java_object.dart';
+
+/// Represents the metadata of a camera.
+///
+/// See https://developer.android.com/reference/androidx/camera/core/CameraInfo.
+class CameraInfo extends JavaObject {
+ /// Constructs a [CameraInfo] that is not automatically attached to a native object.
+ CameraInfo.detached(
+ {BinaryMessenger? binaryMessenger, InstanceManager? instanceManager})
+ : super.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager) {
+ _api = CameraInfoHostApiImpl(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ AndroidCameraXCameraFlutterApis.instance.ensureSetUp();
+ }
+
+ late final CameraInfoHostApiImpl _api;
+
+ /// Gets sensor orientation degrees of camera.
+ Future<int> getSensorRotationDegrees() =>
+ _api.getSensorRotationDegreesFromInstance(this);
+}
+
+/// Host API implementation of [CameraInfo].
+class CameraInfoHostApiImpl extends CameraInfoHostApi {
+ /// Constructs a [CameraInfoHostApiImpl].
+ CameraInfoHostApiImpl(
+ {super.binaryMessenger, InstanceManager? instanceManager}) {
+ this.instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+ }
+
+ /// Maintains instances stored to communicate with native language objects.
+ late final InstanceManager instanceManager;
+
+ /// Gets sensor orientation degrees of [CameraInfo].
+ Future<int> getSensorRotationDegreesFromInstance(
+ CameraInfo instance,
+ ) async {
+ final int sensorRotationDegrees = await getSensorRotationDegrees(
+ instanceManager.getIdentifier(instance)!);
+ return sensorRotationDegrees;
+ }
+}
+
+/// Flutter API implementation of [CameraInfo].
+class CameraInfoFlutterApiImpl extends CameraInfoFlutterApi {
+ /// Constructs a [CameraInfoFlutterApiImpl].
+ CameraInfoFlutterApiImpl({
+ this.binaryMessenger,
+ InstanceManager? instanceManager,
+ }) : instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Maintains instances stored to communicate with native language objects.
+ final InstanceManager instanceManager;
+
+ @override
+ void create(int identifier) {
+ instanceManager.addHostCreatedInstance(
+ CameraInfo.detached(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager),
+ identifier,
+ onCopy: (CameraInfo original) {
+ return CameraInfo.detached(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ },
+ );
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/camera_selector.dart b/packages/camera/camera_android_camerax/lib/src/camera_selector.dart
new file mode 100644
index 0000000..f1d3c5f
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/camera_selector.dart
@@ -0,0 +1,193 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+
+import 'android_camera_camerax_flutter_api_impls.dart';
+import 'camera_info.dart';
+import 'camerax_library.g.dart';
+import 'instance_manager.dart';
+import 'java_object.dart';
+
+/// Selects a camera for use.
+///
+/// See https://developer.android.com/reference/androidx/camera/core/CameraSelector.
+class CameraSelector extends JavaObject {
+ /// Creates a [CameraSelector].
+ CameraSelector(
+ {BinaryMessenger? binaryMessenger,
+ InstanceManager? instanceManager,
+ this.lensFacing})
+ : super.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager) {
+ _api = CameraSelectorHostApiImpl(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ AndroidCameraXCameraFlutterApis.instance.ensureSetUp();
+ _api.createFromInstance(this, lensFacing);
+ }
+
+ /// Creates a detached [CameraSelector].
+ CameraSelector.detached(
+ {BinaryMessenger? binaryMessenger,
+ InstanceManager? instanceManager,
+ this.lensFacing})
+ : super.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager) {
+ _api = CameraSelectorHostApiImpl(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ AndroidCameraXCameraFlutterApis.instance.ensureSetUp();
+ }
+
+ late final CameraSelectorHostApiImpl _api;
+
+ /// ID for front facing lens.
+ ///
+ /// See https://developer.android.com/reference/androidx/camera/core/CameraSelector#LENS_FACING_FRONT().
+ static const int lensFacingFront = 0;
+
+ /// ID for back facing lens.
+ ///
+ /// See https://developer.android.com/reference/androidx/camera/core/CameraSelector#LENS_FACING_BACK().
+ static const int lensFacingBack = 1;
+
+ /// ID for external lens.
+ ///
+ /// See https://developer.android.com/reference/androidx/camera/core/CameraSelector#LENS_FACING_EXTERNAL().
+ static const int lensFacingExternal = 2;
+
+ /// ID for unknown lens.
+ ///
+ /// See https://developer.android.com/reference/androidx/camera/core/CameraSelector#LENS_FACING_UNKNOWN().
+ static const int lensFacingUnknown = -1;
+
+ /// Selector for default front facing camera.
+ static CameraSelector getDefaultFrontCamera({
+ BinaryMessenger? binaryMessenger,
+ InstanceManager? instanceManager,
+ }) {
+ return CameraSelector(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager,
+ lensFacing: lensFacingFront,
+ );
+ }
+
+ /// Selector for default back facing camera.
+ static CameraSelector getDefaultBackCamera({
+ BinaryMessenger? binaryMessenger,
+ InstanceManager? instanceManager,
+ }) {
+ return CameraSelector(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager,
+ lensFacing: lensFacingBack,
+ );
+ }
+
+ /// Lens direction of this selector.
+ final int? lensFacing;
+
+ /// Filters available cameras based on provided [CameraInfo]s.
+ Future<List<CameraInfo>> filter(List<CameraInfo> cameraInfos) {
+ return _api.filterFromInstance(this, cameraInfos);
+ }
+}
+
+/// Host API implementation of [CameraSelector].
+class CameraSelectorHostApiImpl extends CameraSelectorHostApi {
+ /// Constructs a [CameraSelectorHostApiImpl].
+ CameraSelectorHostApiImpl(
+ {this.binaryMessenger, InstanceManager? instanceManager})
+ : super(binaryMessenger: binaryMessenger) {
+ this.instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+ }
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Maintains instances stored to communicate with native language objects.
+ late final InstanceManager instanceManager;
+
+ /// Creates a [CameraSelector] with the lens direction provided if specified.
+ void createFromInstance(CameraSelector instance, int? lensFacing) {
+ int? identifier = instanceManager.getIdentifier(instance);
+ identifier ??= instanceManager.addDartCreatedInstance(instance,
+ onCopy: (CameraSelector original) {
+ return CameraSelector.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager,
+ lensFacing: original.lensFacing);
+ });
+
+ create(identifier, lensFacing);
+ }
+
+ /// Filters a list of [CameraInfo]s based on the [CameraSelector].
+ Future<List<CameraInfo>> filterFromInstance(
+ CameraSelector instance,
+ List<CameraInfo> cameraInfos,
+ ) async {
+ int? identifier = instanceManager.getIdentifier(instance);
+ identifier ??= instanceManager.addDartCreatedInstance(instance,
+ onCopy: (CameraSelector original) {
+ return CameraSelector.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager,
+ lensFacing: original.lensFacing);
+ });
+
+ final List<int> cameraInfoIds = cameraInfos
+ .map<int>((CameraInfo info) => instanceManager.getIdentifier(info)!)
+ .toList();
+ final List<int?> filteredCameraInfoIds =
+ await filter(identifier, cameraInfoIds);
+ if (filteredCameraInfoIds.isEmpty) {
+ return <CameraInfo>[];
+ }
+ return filteredCameraInfoIds
+ .map<CameraInfo>((int? id) =>
+ instanceManager.getInstanceWithWeakReference(id!)! as CameraInfo)
+ .toList();
+ }
+}
+
+/// Flutter API implementation of [CameraSelector].
+class CameraSelectorFlutterApiImpl implements CameraSelectorFlutterApi {
+ /// Constructs a [CameraSelectorFlutterApiImpl].
+ CameraSelectorFlutterApiImpl({
+ this.binaryMessenger,
+ InstanceManager? instanceManager,
+ }) : instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Maintains instances stored to communicate with native language objects.
+ final InstanceManager instanceManager;
+
+ @override
+ void create(int identifier, int? lensFacing) {
+ instanceManager.addHostCreatedInstance(
+ CameraSelector.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager,
+ lensFacing: lensFacing),
+ identifier,
+ onCopy: (CameraSelector original) {
+ return CameraSelector.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager,
+ lensFacing: original.lensFacing);
+ },
+ );
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/camerax_library.g.dart b/packages/camera/camera_android_camerax/lib/src/camerax_library.g.dart
new file mode 100644
index 0000000..1d315e5
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/camerax_library.g.dart
@@ -0,0 +1,855 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+// Autogenerated from Pigeon (v3.2.9), do not edit directly.
+// See also: https://pub.dev/packages/pigeon
+// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, prefer_null_aware_operators, omit_local_variable_types, unused_shown_name, unnecessary_import
+import 'dart:async';
+import 'dart:typed_data' show Uint8List, Int32List, Int64List, Float64List;
+
+import 'package:flutter/foundation.dart' show WriteBuffer, ReadBuffer;
+import 'package:flutter/services.dart';
+
+class ResolutionInfo {
+ ResolutionInfo({
+ required this.width,
+ required this.height,
+ });
+
+ int width;
+ int height;
+
+ Object encode() {
+ final Map<Object?, Object?> pigeonMap = <Object?, Object?>{};
+ pigeonMap['width'] = width;
+ pigeonMap['height'] = height;
+ return pigeonMap;
+ }
+
+ static ResolutionInfo decode(Object message) {
+ final Map<Object?, Object?> pigeonMap = message as Map<Object?, Object?>;
+ return ResolutionInfo(
+ width: pigeonMap['width']! as int,
+ height: pigeonMap['height']! as int,
+ );
+ }
+}
+
+class CameraPermissionsErrorData {
+ CameraPermissionsErrorData({
+ required this.errorCode,
+ required this.description,
+ });
+
+ String errorCode;
+ String description;
+
+ Object encode() {
+ final Map<Object?, Object?> pigeonMap = <Object?, Object?>{};
+ pigeonMap['errorCode'] = errorCode;
+ pigeonMap['description'] = description;
+ return pigeonMap;
+ }
+
+ static CameraPermissionsErrorData decode(Object message) {
+ final Map<Object?, Object?> pigeonMap = message as Map<Object?, Object?>;
+ return CameraPermissionsErrorData(
+ errorCode: pigeonMap['errorCode']! as String,
+ description: pigeonMap['description']! as String,
+ );
+ }
+}
+
+class _JavaObjectHostApiCodec extends StandardMessageCodec {
+ const _JavaObjectHostApiCodec();
+}
+
+class JavaObjectHostApi {
+ /// Constructor for [JavaObjectHostApi]. The [binaryMessenger] named argument is
+ /// available for dependency injection. If it is left null, the default
+ /// BinaryMessenger will be used which routes to the host platform.
+ JavaObjectHostApi({BinaryMessenger? binaryMessenger})
+ : _binaryMessenger = binaryMessenger;
+
+ final BinaryMessenger? _binaryMessenger;
+
+ static const MessageCodec<Object?> codec = _JavaObjectHostApiCodec();
+
+ Future<void> dispose(int arg_identifier) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.JavaObjectHostApi.dispose', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier]) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return;
+ }
+ }
+}
+
+class _JavaObjectFlutterApiCodec extends StandardMessageCodec {
+ const _JavaObjectFlutterApiCodec();
+}
+
+abstract class JavaObjectFlutterApi {
+ static const MessageCodec<Object?> codec = _JavaObjectFlutterApiCodec();
+
+ void dispose(int identifier);
+ static void setup(JavaObjectFlutterApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.JavaObjectFlutterApi.dispose', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMessageHandler(null);
+ } else {
+ channel.setMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.JavaObjectFlutterApi.dispose was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.JavaObjectFlutterApi.dispose was null, expected non-null int.');
+ api.dispose(arg_identifier!);
+ return;
+ });
+ }
+ }
+ }
+}
+
+class _CameraInfoHostApiCodec extends StandardMessageCodec {
+ const _CameraInfoHostApiCodec();
+}
+
+class CameraInfoHostApi {
+ /// Constructor for [CameraInfoHostApi]. The [binaryMessenger] named argument is
+ /// available for dependency injection. If it is left null, the default
+ /// BinaryMessenger will be used which routes to the host platform.
+ CameraInfoHostApi({BinaryMessenger? binaryMessenger})
+ : _binaryMessenger = binaryMessenger;
+
+ final BinaryMessenger? _binaryMessenger;
+
+ static const MessageCodec<Object?> codec = _CameraInfoHostApiCodec();
+
+ Future<int> getSensorRotationDegrees(int arg_identifier) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraInfoHostApi.getSensorRotationDegrees', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier]) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else if (replyMap['result'] == null) {
+ throw PlatformException(
+ code: 'null-error',
+ message: 'Host platform returned null value for non-null return value.',
+ );
+ } else {
+ return (replyMap['result'] as int?)!;
+ }
+ }
+}
+
+class _CameraInfoFlutterApiCodec extends StandardMessageCodec {
+ const _CameraInfoFlutterApiCodec();
+}
+
+abstract class CameraInfoFlutterApi {
+ static const MessageCodec<Object?> codec = _CameraInfoFlutterApiCodec();
+
+ void create(int identifier);
+ static void setup(CameraInfoFlutterApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraInfoFlutterApi.create', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMessageHandler(null);
+ } else {
+ channel.setMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.CameraInfoFlutterApi.create was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.CameraInfoFlutterApi.create was null, expected non-null int.');
+ api.create(arg_identifier!);
+ return;
+ });
+ }
+ }
+ }
+}
+
+class _CameraSelectorHostApiCodec extends StandardMessageCodec {
+ const _CameraSelectorHostApiCodec();
+}
+
+class CameraSelectorHostApi {
+ /// Constructor for [CameraSelectorHostApi]. The [binaryMessenger] named argument is
+ /// available for dependency injection. If it is left null, the default
+ /// BinaryMessenger will be used which routes to the host platform.
+ CameraSelectorHostApi({BinaryMessenger? binaryMessenger})
+ : _binaryMessenger = binaryMessenger;
+
+ final BinaryMessenger? _binaryMessenger;
+
+ static const MessageCodec<Object?> codec = _CameraSelectorHostApiCodec();
+
+ Future<void> create(int arg_identifier, int? arg_lensFacing) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraSelectorHostApi.create', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier, arg_lensFacing])
+ as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return;
+ }
+ }
+
+ Future<List<int?>> filter(
+ int arg_identifier, List<int?> arg_cameraInfoIds) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraSelectorHostApi.filter', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier, arg_cameraInfoIds])
+ as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else if (replyMap['result'] == null) {
+ throw PlatformException(
+ code: 'null-error',
+ message: 'Host platform returned null value for non-null return value.',
+ );
+ } else {
+ return (replyMap['result'] as List<Object?>?)!.cast<int?>();
+ }
+ }
+}
+
+class _CameraSelectorFlutterApiCodec extends StandardMessageCodec {
+ const _CameraSelectorFlutterApiCodec();
+}
+
+abstract class CameraSelectorFlutterApi {
+ static const MessageCodec<Object?> codec = _CameraSelectorFlutterApiCodec();
+
+ void create(int identifier, int? lensFacing);
+ static void setup(CameraSelectorFlutterApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraSelectorFlutterApi.create', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMessageHandler(null);
+ } else {
+ channel.setMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.CameraSelectorFlutterApi.create was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.CameraSelectorFlutterApi.create was null, expected non-null int.');
+ final int? arg_lensFacing = (args[1] as int?);
+ api.create(arg_identifier!, arg_lensFacing);
+ return;
+ });
+ }
+ }
+ }
+}
+
+class _ProcessCameraProviderHostApiCodec extends StandardMessageCodec {
+ const _ProcessCameraProviderHostApiCodec();
+}
+
+class ProcessCameraProviderHostApi {
+ /// Constructor for [ProcessCameraProviderHostApi]. The [binaryMessenger] named argument is
+ /// available for dependency injection. If it is left null, the default
+ /// BinaryMessenger will be used which routes to the host platform.
+ ProcessCameraProviderHostApi({BinaryMessenger? binaryMessenger})
+ : _binaryMessenger = binaryMessenger;
+
+ final BinaryMessenger? _binaryMessenger;
+
+ static const MessageCodec<Object?> codec =
+ _ProcessCameraProviderHostApiCodec();
+
+ Future<int> getInstance() async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.getInstance', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(null) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else if (replyMap['result'] == null) {
+ throw PlatformException(
+ code: 'null-error',
+ message: 'Host platform returned null value for non-null return value.',
+ );
+ } else {
+ return (replyMap['result'] as int?)!;
+ }
+ }
+
+ Future<List<int?>> getAvailableCameraInfos(int arg_identifier) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.getAvailableCameraInfos',
+ codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier]) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else if (replyMap['result'] == null) {
+ throw PlatformException(
+ code: 'null-error',
+ message: 'Host platform returned null value for non-null return value.',
+ );
+ } else {
+ return (replyMap['result'] as List<Object?>?)!.cast<int?>();
+ }
+ }
+
+ Future<int> bindToLifecycle(int arg_identifier,
+ int arg_cameraSelectorIdentifier, List<int?> arg_useCaseIds) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.bindToLifecycle',
+ codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap = await channel.send(<Object?>[
+ arg_identifier,
+ arg_cameraSelectorIdentifier,
+ arg_useCaseIds
+ ]) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else if (replyMap['result'] == null) {
+ throw PlatformException(
+ code: 'null-error',
+ message: 'Host platform returned null value for non-null return value.',
+ );
+ } else {
+ return (replyMap['result'] as int?)!;
+ }
+ }
+
+ Future<void> unbind(int arg_identifier, List<int?> arg_useCaseIds) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.unbind', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier, arg_useCaseIds])
+ as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return;
+ }
+ }
+
+ Future<void> unbindAll(int arg_identifier) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.unbindAll', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier]) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return;
+ }
+ }
+}
+
+class _ProcessCameraProviderFlutterApiCodec extends StandardMessageCodec {
+ const _ProcessCameraProviderFlutterApiCodec();
+}
+
+abstract class ProcessCameraProviderFlutterApi {
+ static const MessageCodec<Object?> codec =
+ _ProcessCameraProviderFlutterApiCodec();
+
+ void create(int identifier);
+ static void setup(ProcessCameraProviderFlutterApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderFlutterApi.create', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMessageHandler(null);
+ } else {
+ channel.setMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderFlutterApi.create was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderFlutterApi.create was null, expected non-null int.');
+ api.create(arg_identifier!);
+ return;
+ });
+ }
+ }
+ }
+}
+
+class _CameraFlutterApiCodec extends StandardMessageCodec {
+ const _CameraFlutterApiCodec();
+}
+
+abstract class CameraFlutterApi {
+ static const MessageCodec<Object?> codec = _CameraFlutterApiCodec();
+
+ void create(int identifier);
+ static void setup(CameraFlutterApi? api, {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraFlutterApi.create', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMessageHandler(null);
+ } else {
+ channel.setMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.CameraFlutterApi.create was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.CameraFlutterApi.create was null, expected non-null int.');
+ api.create(arg_identifier!);
+ return;
+ });
+ }
+ }
+ }
+}
+
+class _SystemServicesHostApiCodec extends StandardMessageCodec {
+ const _SystemServicesHostApiCodec();
+ @override
+ void writeValue(WriteBuffer buffer, Object? value) {
+ if (value is CameraPermissionsErrorData) {
+ buffer.putUint8(128);
+ writeValue(buffer, value.encode());
+ } else {
+ super.writeValue(buffer, value);
+ }
+ }
+
+ @override
+ Object? readValueOfType(int type, ReadBuffer buffer) {
+ switch (type) {
+ case 128:
+ return CameraPermissionsErrorData.decode(readValue(buffer)!);
+
+ default:
+ return super.readValueOfType(type, buffer);
+ }
+ }
+}
+
+class SystemServicesHostApi {
+ /// Constructor for [SystemServicesHostApi]. The [binaryMessenger] named argument is
+ /// available for dependency injection. If it is left null, the default
+ /// BinaryMessenger will be used which routes to the host platform.
+ SystemServicesHostApi({BinaryMessenger? binaryMessenger})
+ : _binaryMessenger = binaryMessenger;
+
+ final BinaryMessenger? _binaryMessenger;
+
+ static const MessageCodec<Object?> codec = _SystemServicesHostApiCodec();
+
+ Future<CameraPermissionsErrorData?> requestCameraPermissions(
+ bool arg_enableAudio) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.SystemServicesHostApi.requestCameraPermissions',
+ codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap = await channel
+ .send(<Object?>[arg_enableAudio]) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return (replyMap['result'] as CameraPermissionsErrorData?);
+ }
+ }
+
+ Future<void> startListeningForDeviceOrientationChange(
+ bool arg_isFrontFacing, int arg_sensorOrientation) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.SystemServicesHostApi.startListeningForDeviceOrientationChange',
+ codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_isFrontFacing, arg_sensorOrientation])
+ as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return;
+ }
+ }
+
+ Future<void> stopListeningForDeviceOrientationChange() async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.SystemServicesHostApi.stopListeningForDeviceOrientationChange',
+ codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(null) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return;
+ }
+ }
+}
+
+class _SystemServicesFlutterApiCodec extends StandardMessageCodec {
+ const _SystemServicesFlutterApiCodec();
+}
+
+abstract class SystemServicesFlutterApi {
+ static const MessageCodec<Object?> codec = _SystemServicesFlutterApiCodec();
+
+ void onDeviceOrientationChanged(String orientation);
+ void onCameraError(String errorDescription);
+ static void setup(SystemServicesFlutterApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.SystemServicesFlutterApi.onDeviceOrientationChanged',
+ codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMessageHandler(null);
+ } else {
+ channel.setMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesFlutterApi.onDeviceOrientationChanged was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final String? arg_orientation = (args[0] as String?);
+ assert(arg_orientation != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesFlutterApi.onDeviceOrientationChanged was null, expected non-null String.');
+ api.onDeviceOrientationChanged(arg_orientation!);
+ return;
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.SystemServicesFlutterApi.onCameraError', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMessageHandler(null);
+ } else {
+ channel.setMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesFlutterApi.onCameraError was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final String? arg_errorDescription = (args[0] as String?);
+ assert(arg_errorDescription != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesFlutterApi.onCameraError was null, expected non-null String.');
+ api.onCameraError(arg_errorDescription!);
+ return;
+ });
+ }
+ }
+ }
+}
+
+class _PreviewHostApiCodec extends StandardMessageCodec {
+ const _PreviewHostApiCodec();
+ @override
+ void writeValue(WriteBuffer buffer, Object? value) {
+ if (value is ResolutionInfo) {
+ buffer.putUint8(128);
+ writeValue(buffer, value.encode());
+ } else if (value is ResolutionInfo) {
+ buffer.putUint8(129);
+ writeValue(buffer, value.encode());
+ } else {
+ super.writeValue(buffer, value);
+ }
+ }
+
+ @override
+ Object? readValueOfType(int type, ReadBuffer buffer) {
+ switch (type) {
+ case 128:
+ return ResolutionInfo.decode(readValue(buffer)!);
+
+ case 129:
+ return ResolutionInfo.decode(readValue(buffer)!);
+
+ default:
+ return super.readValueOfType(type, buffer);
+ }
+ }
+}
+
+class PreviewHostApi {
+ /// Constructor for [PreviewHostApi]. The [binaryMessenger] named argument is
+ /// available for dependency injection. If it is left null, the default
+ /// BinaryMessenger will be used which routes to the host platform.
+ PreviewHostApi({BinaryMessenger? binaryMessenger})
+ : _binaryMessenger = binaryMessenger;
+
+ final BinaryMessenger? _binaryMessenger;
+
+ static const MessageCodec<Object?> codec = _PreviewHostApiCodec();
+
+ Future<void> create(int arg_identifier, int? arg_rotation,
+ ResolutionInfo? arg_targetResolution) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.PreviewHostApi.create', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap = await channel
+ .send(<Object?>[arg_identifier, arg_rotation, arg_targetResolution])
+ as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return;
+ }
+ }
+
+ Future<int> setSurfaceProvider(int arg_identifier) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.PreviewHostApi.setSurfaceProvider', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier]) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else if (replyMap['result'] == null) {
+ throw PlatformException(
+ code: 'null-error',
+ message: 'Host platform returned null value for non-null return value.',
+ );
+ } else {
+ return (replyMap['result'] as int?)!;
+ }
+ }
+
+ Future<void> releaseFlutterSurfaceTexture() async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.PreviewHostApi.releaseFlutterSurfaceTexture', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(null) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else {
+ return;
+ }
+ }
+
+ Future<ResolutionInfo> getResolutionInfo(int arg_identifier) async {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.PreviewHostApi.getResolutionInfo', codec,
+ binaryMessenger: _binaryMessenger);
+ final Map<Object?, Object?>? replyMap =
+ await channel.send(<Object?>[arg_identifier]) as Map<Object?, Object?>?;
+ if (replyMap == null) {
+ throw PlatformException(
+ code: 'channel-error',
+ message: 'Unable to establish connection on channel.',
+ );
+ } else if (replyMap['error'] != null) {
+ final Map<Object?, Object?> error =
+ (replyMap['error'] as Map<Object?, Object?>?)!;
+ throw PlatformException(
+ code: (error['code'] as String?)!,
+ message: error['message'] as String?,
+ details: error['details'],
+ );
+ } else if (replyMap['result'] == null) {
+ throw PlatformException(
+ code: 'null-error',
+ message: 'Host platform returned null value for non-null return value.',
+ );
+ } else {
+ return (replyMap['result'] as ResolutionInfo?)!;
+ }
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/instance_manager.dart b/packages/camera/camera_android_camerax/lib/src/instance_manager.dart
new file mode 100644
index 0000000..8c6081c
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/instance_manager.dart
@@ -0,0 +1,203 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+
+/// Maintains instances used to communicate with the native objects they
+/// represent.
+///
+/// Added instances are stored as weak references and their copies are stored
+/// as strong references to maintain access to their variables and callback
+/// methods. Both are stored with the same identifier.
+///
+/// When a weak referenced instance becomes inaccessible,
+/// [onWeakReferenceRemoved] is called with its associated identifier.
+///
+/// If an instance is retrieved and has the possibility to be used,
+/// (e.g. calling [getInstanceWithWeakReference]) a copy of the strong reference
+/// is added as a weak reference with the same identifier. This prevents a
+/// scenario where the weak referenced instance was released and then later
+/// returned by the host platform.
+class InstanceManager {
+ /// Constructs an [InstanceManager].
+ InstanceManager({required void Function(int) onWeakReferenceRemoved}) {
+ this.onWeakReferenceRemoved = (int identifier) {
+ debugPrint('Releasing weak reference with identifier: $identifier');
+ _weakInstances.remove(identifier);
+ onWeakReferenceRemoved(identifier);
+ };
+ _finalizer = Finalizer<int>(this.onWeakReferenceRemoved);
+ }
+
+ // Identifiers are locked to a specific range to avoid collisions with objects
+ // created simultaneously by the host platform.
+ // Host uses identifiers >= 2^16 and Dart is expected to use values n where,
+ // 0 <= n < 2^16.
+ static const int _maxDartCreatedIdentifier = 65536;
+
+ // Expando is used because it doesn't prevent its keys from becoming
+ // inaccessible. This allows the manager to efficiently retrieve an identifier
+ // of an instance without holding a strong reference to that instance.
+ //
+ // It also doesn't use `==` to search for identifiers, which would lead to an
+ // infinite loop when comparing an object to its copy. (i.e. which was caused
+ // by calling instanceManager.getIdentifier() inside of `==` while this was a
+ // HashMap).
+ final Expando<int> _identifiers = Expando<int>();
+ final Map<int, WeakReference<Object>> _weakInstances =
+ <int, WeakReference<Object>>{};
+ final Map<int, Object> _strongInstances = <int, Object>{};
+ final Map<int, Function> _copyCallbacks = <int, Function>{};
+ late final Finalizer<int> _finalizer;
+ int _nextIdentifier = 0;
+
+ /// Called when a weak referenced instance is removed by [removeWeakReference]
+ /// or becomes inaccessible.
+ late final void Function(int) onWeakReferenceRemoved;
+
+ /// Adds a new instance that was instantiated by Dart.
+ ///
+ /// In other words, Dart wants to add a new instance that will represent
+ /// an object that will be instantiated on the host platform.
+ ///
+ /// Throws assertion error if the instance has already been added.
+ ///
+ /// Returns the randomly generated id of the [instance] added.
+ int addDartCreatedInstance<T extends Object>(
+ T instance, {
+ required T Function(T original) onCopy,
+ }) {
+ assert(getIdentifier(instance) == null);
+
+ final int identifier = _nextUniqueIdentifier();
+ _addInstanceWithIdentifier(instance, identifier, onCopy: onCopy);
+ return identifier;
+ }
+
+ /// Removes the instance, if present, and call [onWeakReferenceRemoved] with
+ /// its identifier.
+ ///
+ /// Returns the identifier associated with the removed instance. Otherwise,
+ /// `null` if the instance was not found in this manager.
+ ///
+ /// This does not remove the the strong referenced instance associated with
+ /// [instance]. This can be done with [remove].
+ int? removeWeakReference(Object instance) {
+ final int? identifier = getIdentifier(instance);
+ if (identifier == null) {
+ return null;
+ }
+
+ _identifiers[instance] = null;
+ _finalizer.detach(instance);
+ onWeakReferenceRemoved(identifier);
+
+ return identifier;
+ }
+
+ /// Removes [identifier] and its associated strongly referenced instance, if
+ /// present, from the manager.
+ ///
+ /// Returns the strong referenced instance associated with [identifier] before
+ /// it was removed. Returns `null` if [identifier] was not associated with
+ /// any strong reference.
+ ///
+ /// This does not remove the the weak referenced instance associtated with
+ /// [identifier]. This can be done with [removeWeakReference].
+ T? remove<T extends Object>(int identifier) {
+ debugPrint('Releasing strong reference with identifier: $identifier');
+ _copyCallbacks.remove(identifier);
+ return _strongInstances.remove(identifier) as T?;
+ }
+
+ /// Retrieves the instance associated with identifier.
+ ///
+ /// The value returned is chosen from the following order:
+ ///
+ /// 1. A weakly referenced instance associated with identifier.
+ /// 2. If the only instance associated with identifier is a strongly
+ /// referenced instance, a copy of the instance is added as a weak reference
+ /// with the same identifier. Returning the newly created copy.
+ /// 3. If no instance is associated with identifier, returns null.
+ ///
+ /// This method also expects the host `InstanceManager` to have a strong
+ /// reference to the instance the identifier is associated with.
+ T? getInstanceWithWeakReference<T extends Object>(int identifier) {
+ final T? weakInstance = _weakInstances[identifier]?.target as T?;
+
+ if (weakInstance == null) {
+ final T? strongInstance = _strongInstances[identifier] as T?;
+ if (strongInstance != null) {
+ // This cast is safe since it matches the argument type for
+ // _addInstanceWithIdentifier, which is the only place _copyCallbacks
+ // is populated.
+ final T Function(T) copyCallback =
+ _copyCallbacks[identifier]! as T Function(T);
+ final T copy = copyCallback(strongInstance);
+ _identifiers[copy] = identifier;
+ _weakInstances[identifier] = WeakReference<T>(copy);
+ _finalizer.attach(copy, identifier, detach: copy);
+ return copy;
+ }
+ return strongInstance;
+ }
+
+ return weakInstance;
+ }
+
+ /// Retrieves the identifier associated with instance.
+ int? getIdentifier(Object instance) {
+ return _identifiers[instance];
+ }
+
+ /// Adds a new instance that was instantiated by the host platform.
+ ///
+ /// In other words, the host platform wants to add a new instance that
+ /// represents an object on the host platform. Stored with [identifier].
+ ///
+ /// Throws assertion error if the instance or its identifier has already been
+ /// added.
+ ///
+ /// Returns unique identifier of the [instance] added.
+ void addHostCreatedInstance<T extends Object>(
+ T instance,
+ int identifier, {
+ required T Function(T original) onCopy,
+ }) {
+ assert(!containsIdentifier(identifier));
+ assert(getIdentifier(instance) == null);
+ assert(identifier >= 0);
+ _addInstanceWithIdentifier(instance, identifier, onCopy: onCopy);
+ }
+
+ void _addInstanceWithIdentifier<T extends Object>(
+ T instance,
+ int identifier, {
+ required T Function(T original) onCopy,
+ }) {
+ _identifiers[instance] = identifier;
+ _weakInstances[identifier] = WeakReference<Object>(instance);
+ _finalizer.attach(instance, identifier, detach: instance);
+
+ final Object copy = onCopy(instance);
+ _identifiers[copy] = identifier;
+ _strongInstances[identifier] = copy;
+ _copyCallbacks[identifier] = onCopy;
+ }
+
+ /// Whether this manager contains the given [identifier].
+ bool containsIdentifier(int identifier) {
+ return _weakInstances.containsKey(identifier) ||
+ _strongInstances.containsKey(identifier);
+ }
+
+ int _nextUniqueIdentifier() {
+ late int identifier;
+ do {
+ identifier = _nextIdentifier;
+ _nextIdentifier = (_nextIdentifier + 1) % _maxDartCreatedIdentifier;
+ } while (containsIdentifier(identifier));
+ return identifier;
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/java_object.dart b/packages/camera/camera_android_camerax/lib/src/java_object.dart
new file mode 100644
index 0000000..f6127d4
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/java_object.dart
@@ -0,0 +1,76 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart' show immutable;
+import 'package:flutter/services.dart';
+
+import 'camerax_library.g.dart';
+import 'instance_manager.dart';
+
+/// Root of the Java class hierarchy.
+///
+/// See https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html.
+@immutable
+class JavaObject {
+ /// Constructs a [JavaObject] without creating the associated Java object.
+ ///
+ /// This should only be used by subclasses created by this library or to
+ /// create copies.
+ JavaObject.detached({
+ BinaryMessenger? binaryMessenger,
+ InstanceManager? instanceManager,
+ }) : _api = JavaObjectHostApiImpl(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager,
+ );
+
+ /// Global instance of [InstanceManager].
+ static final InstanceManager globalInstanceManager = InstanceManager(
+ onWeakReferenceRemoved: (int identifier) {
+ JavaObjectHostApiImpl().dispose(identifier);
+ },
+ );
+
+ /// Pigeon Host Api implementation for [JavaObject].
+ final JavaObjectHostApiImpl _api;
+
+ /// Release the reference to a native Java instance.
+ static void dispose(JavaObject instance) {
+ instance._api.instanceManager.removeWeakReference(instance);
+ }
+}
+
+/// Handles methods calls to the native Java Object class.
+class JavaObjectHostApiImpl extends JavaObjectHostApi {
+ /// Constructs a [JavaObjectHostApiImpl].
+ JavaObjectHostApiImpl({
+ this.binaryMessenger,
+ InstanceManager? instanceManager,
+ }) : instanceManager = instanceManager ?? JavaObject.globalInstanceManager,
+ super(binaryMessenger: binaryMessenger);
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Maintains instances stored to communicate with native language objects.
+ final InstanceManager instanceManager;
+}
+
+/// Handles callbacks methods for the native Java Object class.
+class JavaObjectFlutterApiImpl implements JavaObjectFlutterApi {
+ /// Constructs a [JavaObjectFlutterApiImpl].
+ JavaObjectFlutterApiImpl({InstanceManager? instanceManager})
+ : instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+
+ /// Maintains instances stored to communicate with native language objects.
+ final InstanceManager instanceManager;
+
+ @override
+ void dispose(int identifier) {
+ instanceManager.remove(identifier);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/preview.dart b/packages/camera/camera_android_camerax/lib/src/preview.dart
new file mode 100644
index 0000000..602bcb3
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/preview.dart
@@ -0,0 +1,126 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart' show BinaryMessenger;
+
+import 'camerax_library.g.dart';
+import 'instance_manager.dart';
+import 'java_object.dart';
+import 'use_case.dart';
+
+/// Use case that provides a camera preview stream for display.
+///
+/// See https://developer.android.com/reference/androidx/camera/core/Preview.
+class Preview extends UseCase {
+ /// Creates a [Preview].
+ Preview(
+ {BinaryMessenger? binaryMessenger,
+ InstanceManager? instanceManager,
+ this.targetRotation,
+ this.targetResolution})
+ : super.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager) {
+ _api = PreviewHostApiImpl(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ _api.createFromInstance(this, targetRotation, targetResolution);
+ }
+
+ /// Constructs a [Preview] that is not automatically attached to a native object.
+ Preview.detached(
+ {BinaryMessenger? binaryMessenger,
+ InstanceManager? instanceManager,
+ this.targetRotation,
+ this.targetResolution})
+ : super.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager) {
+ _api = PreviewHostApiImpl(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ }
+
+ late final PreviewHostApiImpl _api;
+
+ /// Target rotation of the camera used for the preview stream.
+ final int? targetRotation;
+
+ /// Target resolution of the camera preview stream.
+ final ResolutionInfo? targetResolution;
+
+ /// Sets the surface provider for the preview stream.
+ ///
+ /// Returns the ID of the FlutterSurfaceTextureEntry used on the native end
+ /// used to display the preview stream on a [Texture] of the same ID.
+ Future<int> setSurfaceProvider() {
+ return _api.setSurfaceProviderFromInstance(this);
+ }
+
+ /// Releases Flutter surface texture used to provide a surface for the preview
+ /// stream.
+ void releaseFlutterSurfaceTexture() {
+ _api.releaseFlutterSurfaceTextureFromInstance();
+ }
+
+ /// Retrieves the selected resolution information of this [Preview].
+ Future<ResolutionInfo> getResolutionInfo() {
+ return _api.getResolutionInfoFromInstance(this);
+ }
+}
+
+/// Host API implementation of [Preview].
+class PreviewHostApiImpl extends PreviewHostApi {
+ /// Constructs a [PreviewHostApiImpl].
+ PreviewHostApiImpl({this.binaryMessenger, InstanceManager? instanceManager}) {
+ this.instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+ }
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Maintains instances stored to communicate with native language objects.
+ late final InstanceManager instanceManager;
+
+ /// Creates a [Preview] with the target rotation provided if specified.
+ void createFromInstance(
+ Preview instance, int? targetRotation, ResolutionInfo? targetResolution) {
+ final int identifier = instanceManager.addDartCreatedInstance(instance,
+ onCopy: (Preview original) {
+ return Preview.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager,
+ targetRotation: original.targetRotation);
+ });
+ create(identifier, targetRotation, targetResolution);
+ }
+
+ /// Sets the surface provider of the specified [Preview] instance and returns
+ /// the ID corresponding to the surface it will provide.
+ Future<int> setSurfaceProviderFromInstance(Preview instance) async {
+ final int? identifier = instanceManager.getIdentifier(instance);
+ assert(identifier != null,
+ 'No Preview has the identifer of that requested to set the surface provider on.');
+
+ final int surfaceTextureEntryId = await setSurfaceProvider(identifier!);
+ return surfaceTextureEntryId;
+ }
+
+ /// Releases Flutter surface texture used to provide a surface for the preview
+ /// stream if a surface provider was set for a [Preview] instance.
+ void releaseFlutterSurfaceTextureFromInstance() {
+ releaseFlutterSurfaceTexture();
+ }
+
+ /// Gets the resolution information of the specified [Preview] instance.
+ Future<ResolutionInfo> getResolutionInfoFromInstance(Preview instance) async {
+ final int? identifier = instanceManager.getIdentifier(instance);
+ assert(identifier != null,
+ 'No Preview has the identifer of that requested to get the resolution information for.');
+
+ final ResolutionInfo resolutionInfo = await getResolutionInfo(identifier!);
+ return resolutionInfo;
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/process_camera_provider.dart b/packages/camera/camera_android_camerax/lib/src/process_camera_provider.dart
new file mode 100644
index 0000000..ed9e820
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/process_camera_provider.dart
@@ -0,0 +1,191 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+
+import 'android_camera_camerax_flutter_api_impls.dart';
+import 'camera.dart';
+import 'camera_info.dart';
+import 'camera_selector.dart';
+import 'camerax_library.g.dart';
+import 'instance_manager.dart';
+import 'java_object.dart';
+import 'use_case.dart';
+
+/// Provides an object to manage the camera.
+///
+/// See https://developer.android.com/reference/androidx/camera/lifecycle/ProcessCameraProvider.
+class ProcessCameraProvider extends JavaObject {
+ /// Creates a detached [ProcessCameraProvider].
+ ProcessCameraProvider.detached(
+ {BinaryMessenger? binaryMessenger, InstanceManager? instanceManager})
+ : super.detached(
+ binaryMessenger: binaryMessenger,
+ instanceManager: instanceManager) {
+ _api = ProcessCameraProviderHostApiImpl(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ AndroidCameraXCameraFlutterApis.instance.ensureSetUp();
+ }
+
+ late final ProcessCameraProviderHostApiImpl _api;
+
+ /// Gets an instance of [ProcessCameraProvider].
+ static Future<ProcessCameraProvider> getInstance(
+ {BinaryMessenger? binaryMessenger, InstanceManager? instanceManager}) {
+ AndroidCameraXCameraFlutterApis.instance.ensureSetUp();
+ final ProcessCameraProviderHostApiImpl api =
+ ProcessCameraProviderHostApiImpl(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+
+ return api.getInstancefromInstances();
+ }
+
+ /// Retrieves the cameras available to the device.
+ Future<List<CameraInfo>> getAvailableCameraInfos() {
+ return _api.getAvailableCameraInfosFromInstances(this);
+ }
+
+ /// Binds the specified [UseCase]s to the lifecycle of the camera that it
+ /// returns.
+ Future<Camera> bindToLifecycle(
+ CameraSelector cameraSelector, List<UseCase> useCases) {
+ return _api.bindToLifecycleFromInstances(this, cameraSelector, useCases);
+ }
+
+ /// Unbinds specified [UseCase]s from the lifecycle of the camera that this
+ /// instance tracks.
+ void unbind(List<UseCase> useCases) {
+ _api.unbindFromInstances(this, useCases);
+ }
+
+ /// Unbinds all previously bound [UseCase]s from the lifecycle of the camera
+ /// that this tracks.
+ void unbindAll() {
+ _api.unbindAllFromInstances(this);
+ }
+}
+
+/// Host API implementation of [ProcessCameraProvider].
+class ProcessCameraProviderHostApiImpl extends ProcessCameraProviderHostApi {
+ /// Creates a [ProcessCameraProviderHostApiImpl].
+ ProcessCameraProviderHostApiImpl(
+ {this.binaryMessenger, InstanceManager? instanceManager})
+ : super(binaryMessenger: binaryMessenger) {
+ this.instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+ }
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Maintains instances stored to communicate with native language objects.
+ late final InstanceManager instanceManager;
+
+ /// Retrieves an instance of a ProcessCameraProvider from the context of
+ /// the FlutterActivity.
+ Future<ProcessCameraProvider> getInstancefromInstances() async {
+ return instanceManager.getInstanceWithWeakReference(await getInstance())!
+ as ProcessCameraProvider;
+ }
+
+ /// Gets identifier that the [instanceManager] has set for
+ /// the [ProcessCameraProvider] instance.
+ int getProcessCameraProviderIdentifier(ProcessCameraProvider instance) {
+ final int? identifier = instanceManager.getIdentifier(instance);
+
+ assert(identifier != null,
+ 'No ProcessCameraProvider has the identifer of that which was requested.');
+ return identifier!;
+ }
+
+ /// Retrives the list of CameraInfos corresponding to the available cameras.
+ Future<List<CameraInfo>> getAvailableCameraInfosFromInstances(
+ ProcessCameraProvider instance) async {
+ final int identifier = getProcessCameraProviderIdentifier(instance);
+ final List<int?> cameraInfos = await getAvailableCameraInfos(identifier);
+ return cameraInfos
+ .map<CameraInfo>((int? id) =>
+ instanceManager.getInstanceWithWeakReference(id!)! as CameraInfo)
+ .toList();
+ }
+
+ /// Binds the specified [UseCase]s to the lifecycle of the camera which
+ /// the provided [ProcessCameraProvider] instance tracks.
+ ///
+ /// The instance of the camera whose lifecycle the [UseCase]s are bound to
+ /// is returned.
+ Future<Camera> bindToLifecycleFromInstances(
+ ProcessCameraProvider instance,
+ CameraSelector cameraSelector,
+ List<UseCase> useCases,
+ ) async {
+ final int identifier = getProcessCameraProviderIdentifier(instance);
+ final List<int> useCaseIds = useCases
+ .map<int>((UseCase useCase) => instanceManager.getIdentifier(useCase)!)
+ .toList();
+
+ final int cameraIdentifier = await bindToLifecycle(
+ identifier,
+ instanceManager.getIdentifier(cameraSelector)!,
+ useCaseIds,
+ );
+ return instanceManager.getInstanceWithWeakReference(cameraIdentifier)!
+ as Camera;
+ }
+
+ /// Unbinds specified [UseCase]s from the lifecycle of the camera which the
+ /// provided [ProcessCameraProvider] instance tracks.
+ void unbindFromInstances(
+ ProcessCameraProvider instance,
+ List<UseCase> useCases,
+ ) {
+ final int identifier = getProcessCameraProviderIdentifier(instance);
+ final List<int> useCaseIds = useCases
+ .map<int>((UseCase useCase) => instanceManager.getIdentifier(useCase)!)
+ .toList();
+
+ unbind(identifier, useCaseIds);
+ }
+
+ /// Unbinds all previously bound [UseCase]s from the lifecycle of the camera
+ /// which the provided [ProcessCameraProvider] instance tracks.
+ void unbindAllFromInstances(ProcessCameraProvider instance) {
+ final int identifier = getProcessCameraProviderIdentifier(instance);
+ unbindAll(identifier);
+ }
+}
+
+/// Flutter API Implementation of [ProcessCameraProvider].
+class ProcessCameraProviderFlutterApiImpl
+ implements ProcessCameraProviderFlutterApi {
+ /// Constructs a [ProcessCameraProviderFlutterApiImpl].
+ ProcessCameraProviderFlutterApiImpl({
+ this.binaryMessenger,
+ InstanceManager? instanceManager,
+ }) : instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Maintains instances stored to communicate with native language objects.
+ final InstanceManager instanceManager;
+
+ @override
+ void create(int identifier) {
+ instanceManager.addHostCreatedInstance(
+ ProcessCameraProvider.detached(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager),
+ identifier,
+ onCopy: (ProcessCameraProvider original) {
+ return ProcessCameraProvider.detached(
+ binaryMessenger: binaryMessenger, instanceManager: instanceManager);
+ },
+ );
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/surface.dart b/packages/camera/camera_android_camerax/lib/src/surface.dart
new file mode 100644
index 0000000..ea8cf8c
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/surface.dart
@@ -0,0 +1,34 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'java_object.dart';
+
+/// Handle onto the raw buffer managed by screen compositor.
+///
+/// See https://developer.android.com/reference/android/view/Surface.html.
+class Surface extends JavaObject {
+ /// Creates a detached [UseCase].
+ Surface.detached({super.binaryMessenger, super.instanceManager})
+ : super.detached();
+
+ /// Rotation constant to signify the natural orientation.
+ ///
+ /// See https://developer.android.com/reference/android/view/Surface.html#ROTATION_0.
+ static const int ROTATION_0 = 0;
+
+ /// Rotation constant to signify a 90 degrees rotation.
+ ///
+ /// See https://developer.android.com/reference/android/view/Surface.html#ROTATION_90.
+ static const int ROTATION_90 = 1;
+
+ /// Rotation constant to signify a 180 degrees rotation.
+ ///
+ /// See https://developer.android.com/reference/android/view/Surface.html#ROTATION_180.
+ static const int ROTATION_180 = 2;
+
+ /// Rotation constant to signify a 270 degrees rotation.
+ ///
+ /// See https://developer.android.com/reference/android/view/Surface.html#ROTATION_270.
+ static const int ROTATION_270 = 3;
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/system_services.dart b/packages/camera/camera_android_camerax/lib/src/system_services.dart
new file mode 100644
index 0000000..e108b61
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/system_services.dart
@@ -0,0 +1,147 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart'
+ show CameraException, DeviceOrientationChangedEvent;
+import 'package:flutter/services.dart';
+
+import 'android_camera_camerax_flutter_api_impls.dart';
+import 'camerax_library.g.dart';
+
+// Ignoring lint indicating this class only contains static members
+// as this class is a wrapper for various Android system services.
+// ignore_for_file: avoid_classes_with_only_static_members
+
+/// Utility class that offers access to Android system services needed for
+/// camera usage and other informational streams.
+class SystemServices {
+ /// Stream that emits the device orientation whenever it is changed.
+ ///
+ /// Values may start being added to the stream once
+ /// `startListeningForDeviceOrientationChange(...)` is called.
+ static final StreamController<DeviceOrientationChangedEvent>
+ deviceOrientationChangedStreamController =
+ StreamController<DeviceOrientationChangedEvent>.broadcast();
+
+ /// Stream that emits the errors caused by camera usage on the native side.
+ static final StreamController<String> cameraErrorStreamController =
+ StreamController<String>.broadcast();
+
+ /// Requests permission to access the camera and audio if specified.
+ static Future<void> requestCameraPermissions(bool enableAudio,
+ {BinaryMessenger? binaryMessenger}) {
+ final SystemServicesHostApiImpl api =
+ SystemServicesHostApiImpl(binaryMessenger: binaryMessenger);
+
+ return api.sendCameraPermissionsRequest(enableAudio);
+ }
+
+ /// Requests that [deviceOrientationChangedStreamController] start
+ /// emitting values for any change in device orientation.
+ static void startListeningForDeviceOrientationChange(
+ bool isFrontFacing, int sensorOrientation,
+ {BinaryMessenger? binaryMessenger}) {
+ AndroidCameraXCameraFlutterApis.instance.ensureSetUp();
+ final SystemServicesHostApi api =
+ SystemServicesHostApi(binaryMessenger: binaryMessenger);
+
+ api.startListeningForDeviceOrientationChange(
+ isFrontFacing, sensorOrientation);
+ }
+
+ /// Stops the [deviceOrientationChangedStreamController] from emitting values
+ /// for changes in device orientation.
+ static void stopListeningForDeviceOrientationChange(
+ {BinaryMessenger? binaryMessenger}) {
+ final SystemServicesHostApi api =
+ SystemServicesHostApi(binaryMessenger: binaryMessenger);
+
+ api.stopListeningForDeviceOrientationChange();
+ }
+}
+
+/// Host API implementation of [SystemServices].
+class SystemServicesHostApiImpl extends SystemServicesHostApi {
+ /// Creates a [SystemServicesHostApiImpl].
+ SystemServicesHostApiImpl({this.binaryMessenger})
+ : super(binaryMessenger: binaryMessenger);
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Requests permission to access the camera and audio if specified.
+ ///
+ /// Will complete normally if permissions are successfully granted; otherwise,
+ /// will throw a [CameraException].
+ Future<void> sendCameraPermissionsRequest(bool enableAudio) async {
+ final CameraPermissionsErrorData? error =
+ await requestCameraPermissions(enableAudio);
+
+ if (error != null) {
+ throw CameraException(
+ error.errorCode,
+ error.description,
+ );
+ }
+ }
+}
+
+/// Flutter API implementation of [SystemServices].
+class SystemServicesFlutterApiImpl implements SystemServicesFlutterApi {
+ /// Constructs a [SystemServicesFlutterApiImpl].
+ SystemServicesFlutterApiImpl({
+ this.binaryMessenger,
+ });
+
+ /// Receives binary data across the Flutter platform barrier.
+ ///
+ /// If it is null, the default BinaryMessenger will be used which routes to
+ /// the host platform.
+ final BinaryMessenger? binaryMessenger;
+
+ /// Callback method for any changes in device orientation.
+ ///
+ /// Will only be called if
+ /// `SystemServices.startListeningForDeviceOrientationChange(...)` was called
+ /// to start listening for device orientation updates.
+ @override
+ void onDeviceOrientationChanged(String orientation) {
+ final DeviceOrientation deviceOrientation =
+ deserializeDeviceOrientation(orientation);
+ if (deviceOrientation == null) {
+ return;
+ }
+ SystemServices.deviceOrientationChangedStreamController
+ .add(DeviceOrientationChangedEvent(deviceOrientation));
+ }
+
+ /// Deserializes device orientation in [String] format into a
+ /// [DeviceOrientation].
+ DeviceOrientation deserializeDeviceOrientation(String orientation) {
+ switch (orientation) {
+ case 'LANDSCAPE_LEFT':
+ return DeviceOrientation.landscapeLeft;
+ case 'LANDSCAPE_RIGHT':
+ return DeviceOrientation.landscapeRight;
+ case 'PORTRAIT_DOWN':
+ return DeviceOrientation.portraitDown;
+ case 'PORTRAIT_UP':
+ return DeviceOrientation.portraitUp;
+ default:
+ throw ArgumentError(
+ '"$orientation" is not a valid DeviceOrientation value');
+ }
+ }
+
+ /// Callback method for any errors caused by camera usage on the Java side.
+ @override
+ void onCameraError(String errorDescription) {
+ SystemServices.cameraErrorStreamController.add(errorDescription);
+ }
+}
diff --git a/packages/camera/camera_android_camerax/lib/src/use_case.dart b/packages/camera/camera_android_camerax/lib/src/use_case.dart
new file mode 100644
index 0000000..f8910d9
--- /dev/null
+++ b/packages/camera/camera_android_camerax/lib/src/use_case.dart
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'java_object.dart';
+
+/// An object representing the different functionalitites of the camera.
+///
+/// See https://developer.android.com/reference/androidx/camera/core/UseCase.
+class UseCase extends JavaObject {
+ /// Creates a detached [UseCase].
+ UseCase.detached({super.binaryMessenger, super.instanceManager})
+ : super.detached();
+}
diff --git a/packages/camera/camera_android_camerax/pigeons/camerax_library.dart b/packages/camera/camera_android_camerax/pigeons/camerax_library.dart
new file mode 100644
index 0000000..4172cd7
--- /dev/null
+++ b/packages/camera/camera_android_camerax/pigeons/camerax_library.dart
@@ -0,0 +1,133 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:pigeon/pigeon.dart';
+
+@ConfigurePigeon(
+ PigeonOptions(
+ dartOut: 'lib/src/camerax_library.g.dart',
+ dartTestOut: 'test/test_camerax_library.g.dart',
+ dartOptions: DartOptions(copyrightHeader: <String>[
+ 'Copyright 2013 The Flutter Authors. All rights reserved.',
+ 'Use of this source code is governed by a BSD-style license that can be',
+ 'found in the LICENSE file.',
+ ]),
+ javaOut:
+ 'android/src/main/java/io/flutter/plugins/camerax/GeneratedCameraXLibrary.java',
+ javaOptions: JavaOptions(
+ package: 'io.flutter.plugins.camerax',
+ className: 'GeneratedCameraXLibrary',
+ copyrightHeader: <String>[
+ 'Copyright 2013 The Flutter Authors. All rights reserved.',
+ 'Use of this source code is governed by a BSD-style license that can be',
+ 'found in the LICENSE file.',
+ ],
+ ),
+ ),
+)
+class ResolutionInfo {
+ ResolutionInfo({
+ required this.width,
+ required this.height,
+ });
+
+ int width;
+ int height;
+}
+
+class CameraPermissionsErrorData {
+ CameraPermissionsErrorData({
+ required this.errorCode,
+ required this.description,
+ });
+
+ String errorCode;
+ String description;
+}
+
+@HostApi(dartHostTestHandler: 'TestJavaObjectHostApi')
+abstract class JavaObjectHostApi {
+ void dispose(int identifier);
+}
+
+@FlutterApi()
+abstract class JavaObjectFlutterApi {
+ void dispose(int identifier);
+}
+
+@HostApi(dartHostTestHandler: 'TestCameraInfoHostApi')
+abstract class CameraInfoHostApi {
+ int getSensorRotationDegrees(int identifier);
+}
+
+@FlutterApi()
+abstract class CameraInfoFlutterApi {
+ void create(int identifier);
+}
+
+@HostApi(dartHostTestHandler: 'TestCameraSelectorHostApi')
+abstract class CameraSelectorHostApi {
+ void create(int identifier, int? lensFacing);
+
+ List<int> filter(int identifier, List<int> cameraInfoIds);
+}
+
+@FlutterApi()
+abstract class CameraSelectorFlutterApi {
+ void create(int identifier, int? lensFacing);
+}
+
+@HostApi(dartHostTestHandler: 'TestProcessCameraProviderHostApi')
+abstract class ProcessCameraProviderHostApi {
+ @async
+ int getInstance();
+
+ List<int> getAvailableCameraInfos(int identifier);
+
+ int bindToLifecycle(
+ int identifier, int cameraSelectorIdentifier, List<int> useCaseIds);
+
+ void unbind(int identifier, List<int> useCaseIds);
+
+ void unbindAll(int identifier);
+}
+
+@FlutterApi()
+abstract class ProcessCameraProviderFlutterApi {
+ void create(int identifier);
+}
+
+@FlutterApi()
+abstract class CameraFlutterApi {
+ void create(int identifier);
+}
+
+@HostApi(dartHostTestHandler: 'TestSystemServicesHostApi')
+abstract class SystemServicesHostApi {
+ @async
+ CameraPermissionsErrorData? requestCameraPermissions(bool enableAudio);
+
+ void startListeningForDeviceOrientationChange(
+ bool isFrontFacing, int sensorOrientation);
+
+ void stopListeningForDeviceOrientationChange();
+}
+
+@FlutterApi()
+abstract class SystemServicesFlutterApi {
+ void onDeviceOrientationChanged(String orientation);
+
+ void onCameraError(String errorDescription);
+}
+
+@HostApi(dartHostTestHandler: 'TestPreviewHostApi')
+abstract class PreviewHostApi {
+ void create(int identifier, int? rotation, ResolutionInfo? targetResolution);
+
+ int setSurfaceProvider(int identifier);
+
+ void releaseFlutterSurfaceTexture();
+
+ ResolutionInfo getResolutionInfo(int identifier);
+}
diff --git a/packages/camera/camera_android_camerax/pubspec.yaml b/packages/camera/camera_android_camerax/pubspec.yaml
new file mode 100644
index 0000000..f1496c6
--- /dev/null
+++ b/packages/camera/camera_android_camerax/pubspec.yaml
@@ -0,0 +1,34 @@
+name: camera_android_camerax
+description: Android implementation of the camera plugin using the CameraX library.
+repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_android_camerax
+issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
+publish_to: 'none'
+
+environment:
+ sdk: '>=2.17.0 <3.0.0'
+ flutter: ">=3.0.0"
+
+flutter:
+ plugin:
+ implements: camera
+ platforms:
+ android:
+ package: io.flutter.plugins.camerax
+ pluginClass: CameraAndroidCameraxPlugin
+ dartPluginClass: AndroidCameraCameraX
+
+dependencies:
+ camera_platform_interface: ^2.2.0
+ flutter:
+ sdk: flutter
+ integration_test:
+ sdk: flutter
+ stream_transform: ^2.1.0
+
+dev_dependencies:
+ async: ^2.5.0
+ build_runner: ^2.1.4
+ flutter_test:
+ sdk: flutter
+ mockito: ^5.3.2
+ pigeon: ^3.2.6
diff --git a/packages/camera/camera_android_camerax/test/android_camera_camerax_test.dart b/packages/camera/camera_android_camerax/test/android_camera_camerax_test.dart
new file mode 100644
index 0000000..acfaf16
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/android_camera_camerax_test.dart
@@ -0,0 +1,405 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+
+import 'package:async/async.dart';
+import 'package:camera_android_camerax/camera_android_camerax.dart';
+import 'package:camera_android_camerax/src/camera.dart';
+import 'package:camera_android_camerax/src/camera_info.dart';
+import 'package:camera_android_camerax/src/camera_selector.dart';
+import 'package:camera_android_camerax/src/camerax_library.g.dart';
+import 'package:camera_android_camerax/src/preview.dart';
+import 'package:camera_android_camerax/src/process_camera_provider.dart';
+import 'package:camera_android_camerax/src/system_services.dart';
+import 'package:camera_android_camerax/src/use_case.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart' show DeviceOrientation;
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:mockito/annotations.dart';
+import 'package:mockito/mockito.dart';
+
+import 'android_camera_camerax_test.mocks.dart';
+
+@GenerateNiceMocks(<MockSpec<Object>>[
+ MockSpec<Camera>(),
+ MockSpec<CameraInfo>(),
+ MockSpec<CameraSelector>(),
+ MockSpec<Preview>(),
+ MockSpec<ProcessCameraProvider>(),
+])
+@GenerateMocks(<Type>[BuildContext])
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ test('Should fetch CameraDescription instances for available cameras',
+ () async {
+ // Arrange
+ final MockAndroidCameraCamerax camera = MockAndroidCameraCamerax();
+ camera.processCameraProvider = MockProcessCameraProvider();
+ final List<dynamic> returnData = <dynamic>[
+ <String, dynamic>{
+ 'name': 'Camera 0',
+ 'lensFacing': 'back',
+ 'sensorOrientation': 0
+ },
+ <String, dynamic>{
+ 'name': 'Camera 1',
+ 'lensFacing': 'front',
+ 'sensorOrientation': 90
+ }
+ ];
+
+ // Create mocks to use
+ final MockCameraInfo mockFrontCameraInfo = MockCameraInfo();
+ final MockCameraInfo mockBackCameraInfo = MockCameraInfo();
+
+ // Mock calls to native platform
+ when(camera.processCameraProvider!.getAvailableCameraInfos()).thenAnswer(
+ (_) async => <MockCameraInfo>[mockBackCameraInfo, mockFrontCameraInfo]);
+ when(camera.mockBackCameraSelector
+ .filter(<MockCameraInfo>[mockFrontCameraInfo]))
+ .thenAnswer((_) async => <MockCameraInfo>[]);
+ when(camera.mockBackCameraSelector
+ .filter(<MockCameraInfo>[mockBackCameraInfo]))
+ .thenAnswer((_) async => <MockCameraInfo>[mockBackCameraInfo]);
+ when(camera.mockFrontCameraSelector
+ .filter(<MockCameraInfo>[mockBackCameraInfo]))
+ .thenAnswer((_) async => <MockCameraInfo>[]);
+ when(camera.mockFrontCameraSelector
+ .filter(<MockCameraInfo>[mockFrontCameraInfo]))
+ .thenAnswer((_) async => <MockCameraInfo>[mockFrontCameraInfo]);
+ when(mockBackCameraInfo.getSensorRotationDegrees())
+ .thenAnswer((_) async => 0);
+ when(mockFrontCameraInfo.getSensorRotationDegrees())
+ .thenAnswer((_) async => 90);
+
+ final List<CameraDescription> cameraDescriptions =
+ await camera.availableCameras();
+
+ expect(cameraDescriptions.length, returnData.length);
+ for (int i = 0; i < returnData.length; i++) {
+ final Map<String, Object?> typedData =
+ (returnData[i] as Map<dynamic, dynamic>).cast<String, Object?>();
+ final CameraDescription cameraDescription = CameraDescription(
+ name: typedData['name']! as String,
+ lensDirection: (typedData['lensFacing']! as String) == 'front'
+ ? CameraLensDirection.front
+ : CameraLensDirection.back,
+ sensorOrientation: typedData['sensorOrientation']! as int,
+ );
+ expect(cameraDescriptions[i], cameraDescription);
+ }
+ });
+
+ test(
+ 'createCamera requests permissions, starts listening for device orientation changes, and returns flutter surface texture ID',
+ () async {
+ final MockAndroidCameraCamerax camera = MockAndroidCameraCamerax();
+ camera.processCameraProvider = MockProcessCameraProvider();
+ const CameraLensDirection testLensDirection = CameraLensDirection.back;
+ const int testSensorOrientation = 90;
+ const CameraDescription testCameraDescription = CameraDescription(
+ name: 'cameraName',
+ lensDirection: testLensDirection,
+ sensorOrientation: testSensorOrientation);
+ const ResolutionPreset testResolutionPreset = ResolutionPreset.veryHigh;
+ const bool enableAudio = true;
+ const int testSurfaceTextureId = 6;
+
+ when(camera.testPreview.setSurfaceProvider())
+ .thenAnswer((_) async => testSurfaceTextureId);
+
+ expect(
+ await camera.createCamera(testCameraDescription, testResolutionPreset,
+ enableAudio: enableAudio),
+ equals(testSurfaceTextureId));
+
+ // Verify permissions are requested and the camera starts listening for device orientation changes.
+ expect(camera.cameraPermissionsRequested, isTrue);
+ expect(camera.startedListeningForDeviceOrientationChanges, isTrue);
+
+ // Verify CameraSelector is set with appropriate lens direction.
+ expect(camera.cameraSelector, equals(camera.mockBackCameraSelector));
+
+ // Verify the camera's Preview instance is instantiated properly.
+ expect(camera.preview, equals(camera.testPreview));
+
+ // Verify the camera's Preview instance has its surface provider set.
+ verify(camera.preview!.setSurfaceProvider());
+ });
+
+ test(
+ 'initializeCamera throws AssertionError when createCamera has not been called before initializedCamera',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+ expect(() => camera.initializeCamera(3), throwsAssertionError);
+ });
+
+ test('initializeCamera sends expected CameraInitializedEvent', () async {
+ final MockAndroidCameraCamerax camera = MockAndroidCameraCamerax();
+ camera.processCameraProvider = MockProcessCameraProvider();
+ const int cameraId = 10;
+ const CameraLensDirection testLensDirection = CameraLensDirection.back;
+ const int testSensorOrientation = 90;
+ const CameraDescription testCameraDescription = CameraDescription(
+ name: 'cameraName',
+ lensDirection: testLensDirection,
+ sensorOrientation: testSensorOrientation);
+ const ResolutionPreset testResolutionPreset = ResolutionPreset.veryHigh;
+ const bool enableAudio = true;
+ const int resolutionWidth = 350;
+ const int resolutionHeight = 750;
+ final Camera mockCamera = MockCamera();
+ final ResolutionInfo testResolutionInfo =
+ ResolutionInfo(width: resolutionWidth, height: resolutionHeight);
+
+ // TODO(camsim99): Modify this when camera configuration is supported and
+ // defualt values no longer being used.
+ // https://github.com/flutter/flutter/issues/120468
+ // https://github.com/flutter/flutter/issues/120467
+ final CameraInitializedEvent testCameraInitializedEvent =
+ CameraInitializedEvent(
+ cameraId,
+ resolutionWidth.toDouble(),
+ resolutionHeight.toDouble(),
+ ExposureMode.auto,
+ false,
+ FocusMode.auto,
+ false);
+
+ // Call createCamera.
+ when(camera.testPreview.setSurfaceProvider())
+ .thenAnswer((_) async => cameraId);
+ await camera.createCamera(testCameraDescription, testResolutionPreset,
+ enableAudio: enableAudio);
+
+ when(camera.processCameraProvider!.bindToLifecycle(
+ camera.cameraSelector!, <UseCase>[camera.testPreview]))
+ .thenAnswer((_) async => mockCamera);
+ when(camera.testPreview.getResolutionInfo())
+ .thenAnswer((_) async => testResolutionInfo);
+
+ // Start listening to camera events stream to verify the proper CameraInitializedEvent is sent.
+ camera.cameraEventStreamController.stream.listen((CameraEvent event) {
+ expect(event, const TypeMatcher<CameraInitializedEvent>());
+ expect(event, equals(testCameraInitializedEvent));
+ });
+
+ await camera.initializeCamera(cameraId);
+
+ // Verify preview was bound and unbound to get preview resolution information.
+ verify(camera.processCameraProvider!.bindToLifecycle(
+ camera.cameraSelector!, <UseCase>[camera.testPreview]));
+ verify(camera.processCameraProvider!.unbind(<UseCase>[camera.testPreview]));
+
+ // Check camera instance was received, but preview is no longer bound.
+ expect(camera.camera, equals(mockCamera));
+ expect(camera.previewIsBound, isFalse);
+ });
+
+ test('dispose releases Flutter surface texture and unbinds all use cases',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+
+ camera.preview = MockPreview();
+ camera.processCameraProvider = MockProcessCameraProvider();
+
+ camera.dispose(3);
+
+ verify(camera.preview!.releaseFlutterSurfaceTexture());
+ verify(camera.processCameraProvider!.unbindAll());
+ });
+
+ test('onCameraInitialized stream emits CameraInitializedEvents', () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+ const int cameraId = 16;
+ final Stream<CameraInitializedEvent> eventStream =
+ camera.onCameraInitialized(cameraId);
+ final StreamQueue<CameraInitializedEvent> streamQueue =
+ StreamQueue<CameraInitializedEvent>(eventStream);
+ const CameraInitializedEvent testEvent = CameraInitializedEvent(
+ cameraId, 320, 80, ExposureMode.auto, false, FocusMode.auto, false);
+
+ camera.cameraEventStreamController.add(testEvent);
+
+ expect(await streamQueue.next, testEvent);
+ await streamQueue.cancel();
+ });
+
+ test('onCameraError stream emits errors caught by system services', () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+ const int cameraId = 27;
+ const String testErrorDescription = 'Test error description!';
+ final Stream<CameraErrorEvent> eventStream = camera.onCameraError(cameraId);
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ SystemServices.cameraErrorStreamController.add(testErrorDescription);
+
+ expect(await streamQueue.next,
+ equals(const CameraErrorEvent(cameraId, testErrorDescription)));
+ await streamQueue.cancel();
+ });
+
+ test(
+ 'onDeviceOrientationChanged stream emits changes in device oreintation detected by system services',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ camera.onDeviceOrientationChanged();
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+ const DeviceOrientationChangedEvent testEvent =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitDown);
+
+ SystemServices.deviceOrientationChangedStreamController.add(testEvent);
+
+ expect(await streamQueue.next, testEvent);
+ await streamQueue.cancel();
+ });
+
+ test(
+ 'pausePreview unbinds preview from lifecycle when preview is nonnull and has been bound to lifecycle',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+
+ camera.processCameraProvider = MockProcessCameraProvider();
+ camera.preview = MockPreview();
+ camera.previewIsBound = true;
+
+ await camera.pausePreview(579);
+
+ verify(camera.processCameraProvider!.unbind(<UseCase>[camera.preview!]));
+ expect(camera.previewIsBound, isFalse);
+ });
+
+ test(
+ 'pausePreview does not unbind preview from lifecycle when preview has not been bound to lifecycle',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+
+ camera.processCameraProvider = MockProcessCameraProvider();
+ camera.preview = MockPreview();
+
+ await camera.pausePreview(632);
+
+ verifyNever(
+ camera.processCameraProvider!.unbind(<UseCase>[camera.preview!]));
+ });
+
+ test('resumePreview does not bind preview to lifecycle if already bound',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+
+ camera.processCameraProvider = MockProcessCameraProvider();
+ camera.cameraSelector = MockCameraSelector();
+ camera.preview = MockPreview();
+ camera.previewIsBound = true;
+
+ await camera.resumePreview(78);
+
+ verifyNever(camera.processCameraProvider!
+ .bindToLifecycle(camera.cameraSelector!, <UseCase>[camera.preview!]));
+ });
+
+ test('resumePreview binds preview to lifecycle if not already bound',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+
+ camera.processCameraProvider = MockProcessCameraProvider();
+ camera.cameraSelector = MockCameraSelector();
+ camera.preview = MockPreview();
+
+ await camera.resumePreview(78);
+
+ verify(camera.processCameraProvider!
+ .bindToLifecycle(camera.cameraSelector!, <UseCase>[camera.preview!]));
+ });
+
+ test(
+ 'buildPreview returns a FutureBuilder that does not return a Texture until the preview is bound to the lifecycle',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+ const int textureId = 75;
+
+ camera.processCameraProvider = MockProcessCameraProvider();
+ camera.cameraSelector = MockCameraSelector();
+ camera.preview = MockPreview();
+
+ final FutureBuilder<void> previewWidget =
+ camera.buildPreview(textureId) as FutureBuilder<void>;
+
+ expect(
+ previewWidget.builder(
+ MockBuildContext(), const AsyncSnapshot<void>.nothing()),
+ isA<SizedBox>());
+ expect(
+ previewWidget.builder(
+ MockBuildContext(), const AsyncSnapshot<void>.waiting()),
+ isA<SizedBox>());
+ expect(
+ previewWidget.builder(MockBuildContext(),
+ const AsyncSnapshot<void>.withData(ConnectionState.active, null)),
+ isA<SizedBox>());
+ });
+
+ test(
+ 'buildPreview returns a FutureBuilder that returns a Texture once the preview is bound to the lifecycle',
+ () async {
+ final AndroidCameraCameraX camera = AndroidCameraCameraX();
+ const int textureId = 75;
+
+ camera.processCameraProvider = MockProcessCameraProvider();
+ camera.cameraSelector = MockCameraSelector();
+ camera.preview = MockPreview();
+
+ final FutureBuilder<void> previewWidget =
+ camera.buildPreview(textureId) as FutureBuilder<void>;
+
+ final Texture previewTexture = previewWidget.builder(MockBuildContext(),
+ const AsyncSnapshot<void>.withData(ConnectionState.done, null))
+ as Texture;
+ expect(previewTexture.textureId, equals(textureId));
+ });
+}
+
+/// Mock of [AndroidCameraCameraX] that stubs behavior of some methods for
+/// testing.
+class MockAndroidCameraCamerax extends AndroidCameraCameraX {
+ bool cameraPermissionsRequested = false;
+ bool startedListeningForDeviceOrientationChanges = false;
+ final MockPreview testPreview = MockPreview();
+ final MockCameraSelector mockBackCameraSelector = MockCameraSelector();
+ final MockCameraSelector mockFrontCameraSelector = MockCameraSelector();
+
+ @override
+ Future<void> requestCameraPermissions(bool enableAudio) async {
+ cameraPermissionsRequested = true;
+ }
+
+ @override
+ void startListeningForDeviceOrientationChange(
+ bool cameraIsFrontFacing, int sensorOrientation) {
+ startedListeningForDeviceOrientationChanges = true;
+ return;
+ }
+
+ @override
+ CameraSelector createCameraSelector(int cameraSelectorLensDirection) {
+ switch (cameraSelectorLensDirection) {
+ case CameraSelector.lensFacingFront:
+ return mockFrontCameraSelector;
+ case CameraSelector.lensFacingBack:
+ default:
+ return mockBackCameraSelector;
+ }
+ }
+
+ @override
+ Preview createPreview(int targetRotation, ResolutionInfo? targetResolution) {
+ return testPreview;
+ }
+}
diff --git a/packages/camera/camera_android_camerax/test/android_camera_camerax_test.mocks.dart b/packages/camera/camera_android_camerax/test/android_camera_camerax_test.mocks.dart
new file mode 100644
index 0000000..af225a1
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/android_camera_camerax_test.mocks.dart
@@ -0,0 +1,389 @@
+// Mocks generated by Mockito 5.3.2 from annotations
+// in camera_android_camerax/test/android_camera_camerax_test.dart.
+// Do not manually edit this file.
+
+// ignore_for_file: no_leading_underscores_for_library_prefixes
+import 'dart:async' as _i8;
+
+import 'package:camera_android_camerax/src/camera.dart' as _i3;
+import 'package:camera_android_camerax/src/camera_info.dart' as _i7;
+import 'package:camera_android_camerax/src/camera_selector.dart' as _i9;
+import 'package:camera_android_camerax/src/camerax_library.g.dart' as _i2;
+import 'package:camera_android_camerax/src/preview.dart' as _i10;
+import 'package:camera_android_camerax/src/process_camera_provider.dart'
+ as _i11;
+import 'package:camera_android_camerax/src/use_case.dart' as _i12;
+import 'package:flutter/foundation.dart' as _i6;
+import 'package:flutter/services.dart' as _i5;
+import 'package:flutter/src/widgets/framework.dart' as _i4;
+import 'package:flutter/src/widgets/notification_listener.dart' as _i13;
+import 'package:mockito/mockito.dart' as _i1;
+
+// ignore_for_file: type=lint
+// ignore_for_file: avoid_redundant_argument_values
+// ignore_for_file: avoid_setters_without_getters
+// ignore_for_file: comment_references
+// ignore_for_file: implementation_imports
+// ignore_for_file: invalid_use_of_visible_for_testing_member
+// ignore_for_file: prefer_const_constructors
+// ignore_for_file: unnecessary_parenthesis
+// ignore_for_file: camel_case_types
+// ignore_for_file: subtype_of_sealed_class
+
+class _FakeResolutionInfo_0 extends _i1.SmartFake
+ implements _i2.ResolutionInfo {
+ _FakeResolutionInfo_0(
+ Object parent,
+ Invocation parentInvocation,
+ ) : super(
+ parent,
+ parentInvocation,
+ );
+}
+
+class _FakeCamera_1 extends _i1.SmartFake implements _i3.Camera {
+ _FakeCamera_1(
+ Object parent,
+ Invocation parentInvocation,
+ ) : super(
+ parent,
+ parentInvocation,
+ );
+}
+
+class _FakeWidget_2 extends _i1.SmartFake implements _i4.Widget {
+ _FakeWidget_2(
+ Object parent,
+ Invocation parentInvocation,
+ ) : super(
+ parent,
+ parentInvocation,
+ );
+
+ @override
+ String toString({_i5.DiagnosticLevel? minLevel = _i5.DiagnosticLevel.info}) =>
+ super.toString();
+}
+
+class _FakeInheritedWidget_3 extends _i1.SmartFake
+ implements _i4.InheritedWidget {
+ _FakeInheritedWidget_3(
+ Object parent,
+ Invocation parentInvocation,
+ ) : super(
+ parent,
+ parentInvocation,
+ );
+
+ @override
+ String toString({_i5.DiagnosticLevel? minLevel = _i5.DiagnosticLevel.info}) =>
+ super.toString();
+}
+
+class _FakeDiagnosticsNode_4 extends _i1.SmartFake
+ implements _i6.DiagnosticsNode {
+ _FakeDiagnosticsNode_4(
+ Object parent,
+ Invocation parentInvocation,
+ ) : super(
+ parent,
+ parentInvocation,
+ );
+
+ @override
+ String toString({
+ _i6.TextTreeConfiguration? parentConfiguration,
+ _i5.DiagnosticLevel? minLevel = _i5.DiagnosticLevel.info,
+ }) =>
+ super.toString();
+}
+
+/// A class which mocks [Camera].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockCamera extends _i1.Mock implements _i3.Camera {}
+
+/// A class which mocks [CameraInfo].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockCameraInfo extends _i1.Mock implements _i7.CameraInfo {
+ @override
+ _i8.Future<int> getSensorRotationDegrees() => (super.noSuchMethod(
+ Invocation.method(
+ #getSensorRotationDegrees,
+ [],
+ ),
+ returnValue: _i8.Future<int>.value(0),
+ returnValueForMissingStub: _i8.Future<int>.value(0),
+ ) as _i8.Future<int>);
+}
+
+/// A class which mocks [CameraSelector].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockCameraSelector extends _i1.Mock implements _i9.CameraSelector {
+ @override
+ _i8.Future<List<_i7.CameraInfo>> filter(List<_i7.CameraInfo>? cameraInfos) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #filter,
+ [cameraInfos],
+ ),
+ returnValue: _i8.Future<List<_i7.CameraInfo>>.value(<_i7.CameraInfo>[]),
+ returnValueForMissingStub:
+ _i8.Future<List<_i7.CameraInfo>>.value(<_i7.CameraInfo>[]),
+ ) as _i8.Future<List<_i7.CameraInfo>>);
+}
+
+/// A class which mocks [Preview].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockPreview extends _i1.Mock implements _i10.Preview {
+ @override
+ _i8.Future<int> setSurfaceProvider() => (super.noSuchMethod(
+ Invocation.method(
+ #setSurfaceProvider,
+ [],
+ ),
+ returnValue: _i8.Future<int>.value(0),
+ returnValueForMissingStub: _i8.Future<int>.value(0),
+ ) as _i8.Future<int>);
+ @override
+ void releaseFlutterSurfaceTexture() => super.noSuchMethod(
+ Invocation.method(
+ #releaseFlutterSurfaceTexture,
+ [],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ _i8.Future<_i2.ResolutionInfo> getResolutionInfo() => (super.noSuchMethod(
+ Invocation.method(
+ #getResolutionInfo,
+ [],
+ ),
+ returnValue: _i8.Future<_i2.ResolutionInfo>.value(_FakeResolutionInfo_0(
+ this,
+ Invocation.method(
+ #getResolutionInfo,
+ [],
+ ),
+ )),
+ returnValueForMissingStub:
+ _i8.Future<_i2.ResolutionInfo>.value(_FakeResolutionInfo_0(
+ this,
+ Invocation.method(
+ #getResolutionInfo,
+ [],
+ ),
+ )),
+ ) as _i8.Future<_i2.ResolutionInfo>);
+}
+
+/// A class which mocks [ProcessCameraProvider].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockProcessCameraProvider extends _i1.Mock
+ implements _i11.ProcessCameraProvider {
+ @override
+ _i8.Future<List<_i7.CameraInfo>> getAvailableCameraInfos() =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #getAvailableCameraInfos,
+ [],
+ ),
+ returnValue: _i8.Future<List<_i7.CameraInfo>>.value(<_i7.CameraInfo>[]),
+ returnValueForMissingStub:
+ _i8.Future<List<_i7.CameraInfo>>.value(<_i7.CameraInfo>[]),
+ ) as _i8.Future<List<_i7.CameraInfo>>);
+ @override
+ _i8.Future<_i3.Camera> bindToLifecycle(
+ _i9.CameraSelector? cameraSelector,
+ List<_i12.UseCase>? useCases,
+ ) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #bindToLifecycle,
+ [
+ cameraSelector,
+ useCases,
+ ],
+ ),
+ returnValue: _i8.Future<_i3.Camera>.value(_FakeCamera_1(
+ this,
+ Invocation.method(
+ #bindToLifecycle,
+ [
+ cameraSelector,
+ useCases,
+ ],
+ ),
+ )),
+ returnValueForMissingStub: _i8.Future<_i3.Camera>.value(_FakeCamera_1(
+ this,
+ Invocation.method(
+ #bindToLifecycle,
+ [
+ cameraSelector,
+ useCases,
+ ],
+ ),
+ )),
+ ) as _i8.Future<_i3.Camera>);
+ @override
+ void unbind(List<_i12.UseCase>? useCases) => super.noSuchMethod(
+ Invocation.method(
+ #unbind,
+ [useCases],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ void unbindAll() => super.noSuchMethod(
+ Invocation.method(
+ #unbindAll,
+ [],
+ ),
+ returnValueForMissingStub: null,
+ );
+}
+
+/// A class which mocks [BuildContext].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockBuildContext extends _i1.Mock implements _i4.BuildContext {
+ MockBuildContext() {
+ _i1.throwOnMissingStub(this);
+ }
+
+ @override
+ _i4.Widget get widget => (super.noSuchMethod(
+ Invocation.getter(#widget),
+ returnValue: _FakeWidget_2(
+ this,
+ Invocation.getter(#widget),
+ ),
+ ) as _i4.Widget);
+ @override
+ bool get mounted => (super.noSuchMethod(
+ Invocation.getter(#mounted),
+ returnValue: false,
+ ) as bool);
+ @override
+ bool get debugDoingBuild => (super.noSuchMethod(
+ Invocation.getter(#debugDoingBuild),
+ returnValue: false,
+ ) as bool);
+ @override
+ _i4.InheritedWidget dependOnInheritedElement(
+ _i4.InheritedElement? ancestor, {
+ Object? aspect,
+ }) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #dependOnInheritedElement,
+ [ancestor],
+ {#aspect: aspect},
+ ),
+ returnValue: _FakeInheritedWidget_3(
+ this,
+ Invocation.method(
+ #dependOnInheritedElement,
+ [ancestor],
+ {#aspect: aspect},
+ ),
+ ),
+ ) as _i4.InheritedWidget);
+ @override
+ void visitAncestorElements(bool Function(_i4.Element)? visitor) =>
+ super.noSuchMethod(
+ Invocation.method(
+ #visitAncestorElements,
+ [visitor],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ void visitChildElements(_i4.ElementVisitor? visitor) => super.noSuchMethod(
+ Invocation.method(
+ #visitChildElements,
+ [visitor],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ void dispatchNotification(_i13.Notification? notification) =>
+ super.noSuchMethod(
+ Invocation.method(
+ #dispatchNotification,
+ [notification],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ _i6.DiagnosticsNode describeElement(
+ String? name, {
+ _i6.DiagnosticsTreeStyle? style = _i6.DiagnosticsTreeStyle.errorProperty,
+ }) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #describeElement,
+ [name],
+ {#style: style},
+ ),
+ returnValue: _FakeDiagnosticsNode_4(
+ this,
+ Invocation.method(
+ #describeElement,
+ [name],
+ {#style: style},
+ ),
+ ),
+ ) as _i6.DiagnosticsNode);
+ @override
+ _i6.DiagnosticsNode describeWidget(
+ String? name, {
+ _i6.DiagnosticsTreeStyle? style = _i6.DiagnosticsTreeStyle.errorProperty,
+ }) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #describeWidget,
+ [name],
+ {#style: style},
+ ),
+ returnValue: _FakeDiagnosticsNode_4(
+ this,
+ Invocation.method(
+ #describeWidget,
+ [name],
+ {#style: style},
+ ),
+ ),
+ ) as _i6.DiagnosticsNode);
+ @override
+ List<_i6.DiagnosticsNode> describeMissingAncestor(
+ {required Type? expectedAncestorType}) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #describeMissingAncestor,
+ [],
+ {#expectedAncestorType: expectedAncestorType},
+ ),
+ returnValue: <_i6.DiagnosticsNode>[],
+ ) as List<_i6.DiagnosticsNode>);
+ @override
+ _i6.DiagnosticsNode describeOwnershipChain(String? name) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #describeOwnershipChain,
+ [name],
+ ),
+ returnValue: _FakeDiagnosticsNode_4(
+ this,
+ Invocation.method(
+ #describeOwnershipChain,
+ [name],
+ ),
+ ),
+ ) as _i6.DiagnosticsNode);
+}
diff --git a/packages/camera/camera_android_camerax/test/camera_info_test.dart b/packages/camera/camera_android_camerax/test/camera_info_test.dart
new file mode 100644
index 0000000..852c799
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/camera_info_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android_camerax/src/camera_info.dart';
+import 'package:camera_android_camerax/src/camerax_library.g.dart';
+import 'package:camera_android_camerax/src/instance_manager.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:mockito/annotations.dart';
+import 'package:mockito/mockito.dart';
+
+import 'camera_info_test.mocks.dart';
+import 'test_camerax_library.g.dart';
+
+@GenerateMocks(<Type>[TestCameraInfoHostApi])
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraInfo', () {
+ tearDown(() => TestCameraInfoHostApi.setup(null));
+
+ test('getSensorRotationDegreesTest', () async {
+ final MockTestCameraInfoHostApi mockApi = MockTestCameraInfoHostApi();
+ TestCameraInfoHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final CameraInfo cameraInfo = CameraInfo.detached(
+ instanceManager: instanceManager,
+ );
+ instanceManager.addHostCreatedInstance(
+ cameraInfo,
+ 0,
+ onCopy: (_) => CameraInfo.detached(),
+ );
+
+ when(mockApi.getSensorRotationDegrees(
+ instanceManager.getIdentifier(cameraInfo)))
+ .thenReturn(90);
+ expect(await cameraInfo.getSensorRotationDegrees(), equals(90));
+
+ verify(mockApi.getSensorRotationDegrees(0));
+ });
+
+ test('flutterApiCreateTest', () {
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final CameraInfoFlutterApi flutterApi = CameraInfoFlutterApiImpl(
+ instanceManager: instanceManager,
+ );
+
+ flutterApi.create(0);
+
+ expect(
+ instanceManager.getInstanceWithWeakReference(0), isA<CameraInfo>());
+ });
+ });
+}
diff --git a/packages/camera/camera_android_camerax/test/camera_info_test.mocks.dart b/packages/camera/camera_android_camerax/test/camera_info_test.mocks.dart
new file mode 100644
index 0000000..5e558a8
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/camera_info_test.mocks.dart
@@ -0,0 +1,38 @@
+// Mocks generated by Mockito 5.3.2 from annotations
+// in camera_android_camerax/test/camera_info_test.dart.
+// Do not manually edit this file.
+
+// ignore_for_file: no_leading_underscores_for_library_prefixes
+import 'package:mockito/mockito.dart' as _i1;
+
+import 'test_camerax_library.g.dart' as _i2;
+
+// ignore_for_file: type=lint
+// ignore_for_file: avoid_redundant_argument_values
+// ignore_for_file: avoid_setters_without_getters
+// ignore_for_file: comment_references
+// ignore_for_file: implementation_imports
+// ignore_for_file: invalid_use_of_visible_for_testing_member
+// ignore_for_file: prefer_const_constructors
+// ignore_for_file: unnecessary_parenthesis
+// ignore_for_file: camel_case_types
+// ignore_for_file: subtype_of_sealed_class
+
+/// A class which mocks [TestCameraInfoHostApi].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockTestCameraInfoHostApi extends _i1.Mock
+ implements _i2.TestCameraInfoHostApi {
+ MockTestCameraInfoHostApi() {
+ _i1.throwOnMissingStub(this);
+ }
+
+ @override
+ int getSensorRotationDegrees(int? identifier) => (super.noSuchMethod(
+ Invocation.method(
+ #getSensorRotationDegrees,
+ [identifier],
+ ),
+ returnValue: 0,
+ ) as int);
+}
diff --git a/packages/camera/camera_android_camerax/test/camera_selector_test.dart b/packages/camera/camera_android_camerax/test/camera_selector_test.dart
new file mode 100644
index 0000000..52f9a18
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/camera_selector_test.dart
@@ -0,0 +1,121 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android_camerax/src/camera_info.dart';
+import 'package:camera_android_camerax/src/camera_selector.dart';
+import 'package:camera_android_camerax/src/camerax_library.g.dart';
+import 'package:camera_android_camerax/src/instance_manager.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:mockito/annotations.dart';
+import 'package:mockito/mockito.dart';
+
+import 'camera_selector_test.mocks.dart';
+import 'test_camerax_library.g.dart';
+
+@GenerateMocks(<Type>[TestCameraSelectorHostApi])
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraSelector', () {
+ tearDown(() => TestCameraSelectorHostApi.setup(null));
+
+ test('detachedCreateTest', () async {
+ final MockTestCameraSelectorHostApi mockApi =
+ MockTestCameraSelectorHostApi();
+ TestCameraSelectorHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ CameraSelector.detached(
+ instanceManager: instanceManager,
+ );
+
+ verifyNever(mockApi.create(argThat(isA<int>()), null));
+ });
+
+ test('createTestWithoutLensSpecified', () async {
+ final MockTestCameraSelectorHostApi mockApi =
+ MockTestCameraSelectorHostApi();
+ TestCameraSelectorHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ CameraSelector(
+ instanceManager: instanceManager,
+ );
+
+ verify(mockApi.create(argThat(isA<int>()), null));
+ });
+
+ test('createTestWithLensSpecified', () async {
+ final MockTestCameraSelectorHostApi mockApi =
+ MockTestCameraSelectorHostApi();
+ TestCameraSelectorHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ CameraSelector(
+ instanceManager: instanceManager,
+ lensFacing: CameraSelector.lensFacingBack);
+
+ verify(
+ mockApi.create(argThat(isA<int>()), CameraSelector.lensFacingBack));
+ });
+
+ test('filterTest', () async {
+ final MockTestCameraSelectorHostApi mockApi =
+ MockTestCameraSelectorHostApi();
+ TestCameraSelectorHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final CameraSelector cameraSelector = CameraSelector.detached(
+ instanceManager: instanceManager,
+ );
+ const int cameraInfoId = 3;
+ final CameraInfo cameraInfo =
+ CameraInfo.detached(instanceManager: instanceManager);
+
+ instanceManager.addHostCreatedInstance(
+ cameraSelector,
+ 0,
+ onCopy: (_) => CameraSelector.detached(),
+ );
+ instanceManager.addHostCreatedInstance(
+ cameraInfo,
+ cameraInfoId,
+ onCopy: (_) => CameraInfo.detached(),
+ );
+
+ when(mockApi.filter(instanceManager.getIdentifier(cameraSelector),
+ <int>[cameraInfoId])).thenReturn(<int>[cameraInfoId]);
+ expect(await cameraSelector.filter(<CameraInfo>[cameraInfo]),
+ equals(<CameraInfo>[cameraInfo]));
+
+ verify(mockApi.filter(0, <int>[cameraInfoId]));
+ });
+
+ test('flutterApiCreateTest', () {
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final CameraSelectorFlutterApi flutterApi = CameraSelectorFlutterApiImpl(
+ instanceManager: instanceManager,
+ );
+
+ flutterApi.create(0, CameraSelector.lensFacingBack);
+
+ expect(instanceManager.getInstanceWithWeakReference(0),
+ isA<CameraSelector>());
+ expect(
+ (instanceManager.getInstanceWithWeakReference(0)! as CameraSelector)
+ .lensFacing,
+ equals(CameraSelector.lensFacingBack));
+ });
+ });
+}
diff --git a/packages/camera/camera_android_camerax/test/camera_selector_test.mocks.dart b/packages/camera/camera_android_camerax/test/camera_selector_test.mocks.dart
new file mode 100644
index 0000000..31dce51
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/camera_selector_test.mocks.dart
@@ -0,0 +1,60 @@
+// Mocks generated by Mockito 5.3.2 from annotations
+// in camera_android_camerax/test/camera_selector_test.dart.
+// Do not manually edit this file.
+
+// ignore_for_file: no_leading_underscores_for_library_prefixes
+import 'package:mockito/mockito.dart' as _i1;
+
+import 'test_camerax_library.g.dart' as _i2;
+
+// ignore_for_file: type=lint
+// ignore_for_file: avoid_redundant_argument_values
+// ignore_for_file: avoid_setters_without_getters
+// ignore_for_file: comment_references
+// ignore_for_file: implementation_imports
+// ignore_for_file: invalid_use_of_visible_for_testing_member
+// ignore_for_file: prefer_const_constructors
+// ignore_for_file: unnecessary_parenthesis
+// ignore_for_file: camel_case_types
+// ignore_for_file: subtype_of_sealed_class
+
+/// A class which mocks [TestCameraSelectorHostApi].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockTestCameraSelectorHostApi extends _i1.Mock
+ implements _i2.TestCameraSelectorHostApi {
+ MockTestCameraSelectorHostApi() {
+ _i1.throwOnMissingStub(this);
+ }
+
+ @override
+ void create(
+ int? identifier,
+ int? lensFacing,
+ ) =>
+ super.noSuchMethod(
+ Invocation.method(
+ #create,
+ [
+ identifier,
+ lensFacing,
+ ],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ List<int?> filter(
+ int? identifier,
+ List<int?>? cameraInfoIds,
+ ) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #filter,
+ [
+ identifier,
+ cameraInfoIds,
+ ],
+ ),
+ returnValue: <int?>[],
+ ) as List<int?>);
+}
diff --git a/packages/camera/camera_android_camerax/test/camera_test.dart b/packages/camera/camera_android_camerax/test/camera_test.dart
new file mode 100644
index 0000000..c294828
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/camera_test.dart
@@ -0,0 +1,26 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android_camerax/src/camera.dart';
+import 'package:camera_android_camerax/src/instance_manager.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('Camera', () {
+ test('flutterApiCreateTest', () {
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final CameraFlutterApiImpl flutterApi = CameraFlutterApiImpl(
+ instanceManager: instanceManager,
+ );
+
+ flutterApi.create(0);
+
+ expect(instanceManager.getInstanceWithWeakReference(0), isA<Camera>());
+ });
+ });
+}
diff --git a/packages/camera/camera_android_camerax/test/instance_manager_test.dart b/packages/camera/camera_android_camerax/test/instance_manager_test.dart
new file mode 100644
index 0000000..9562c41
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/instance_manager_test.dart
@@ -0,0 +1,174 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android_camerax/src/instance_manager.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ group('InstanceManager', () {
+ test('addHostCreatedInstance', () {
+ final Object object = Object();
+
+ final InstanceManager instanceManager =
+ InstanceManager(onWeakReferenceRemoved: (_) {});
+
+ instanceManager.addHostCreatedInstance(
+ object,
+ 0,
+ onCopy: (_) => Object(),
+ );
+
+ expect(instanceManager.getIdentifier(object), 0);
+ expect(
+ instanceManager.getInstanceWithWeakReference(0),
+ object,
+ );
+ });
+
+ test('addHostCreatedInstance prevents already used objects and ids', () {
+ final Object object = Object();
+
+ final InstanceManager instanceManager =
+ InstanceManager(onWeakReferenceRemoved: (_) {});
+
+ instanceManager.addHostCreatedInstance(
+ object,
+ 0,
+ onCopy: (_) => Object(),
+ );
+
+ expect(
+ () => instanceManager.addHostCreatedInstance(
+ object,
+ 0,
+ onCopy: (_) => Object(),
+ ),
+ throwsAssertionError,
+ );
+
+ expect(
+ () => instanceManager.addHostCreatedInstance(
+ Object(),
+ 0,
+ onCopy: (_) => Object(),
+ ),
+ throwsAssertionError,
+ );
+ });
+
+ test('addDartCreatedInstance', () {
+ final Object object = Object();
+
+ final InstanceManager instanceManager =
+ InstanceManager(onWeakReferenceRemoved: (_) {});
+
+ instanceManager.addDartCreatedInstance(
+ object,
+ onCopy: (_) => Object(),
+ );
+
+ final int? instanceId = instanceManager.getIdentifier(object);
+ expect(instanceId, isNotNull);
+ expect(
+ instanceManager.getInstanceWithWeakReference(instanceId!),
+ object,
+ );
+ });
+
+ test('removeWeakReference', () {
+ final Object object = Object();
+
+ int? weakInstanceId;
+ final InstanceManager instanceManager =
+ InstanceManager(onWeakReferenceRemoved: (int instanceId) {
+ weakInstanceId = instanceId;
+ });
+
+ instanceManager.addHostCreatedInstance(
+ object,
+ 0,
+ onCopy: (_) => Object(),
+ );
+
+ expect(instanceManager.removeWeakReference(object), 0);
+ expect(
+ instanceManager.getInstanceWithWeakReference(0),
+ isA<Object>(),
+ );
+ expect(weakInstanceId, 0);
+ });
+
+ test('removeWeakReference removes only weak reference', () {
+ final Object object = Object();
+
+ final InstanceManager instanceManager =
+ InstanceManager(onWeakReferenceRemoved: (_) {});
+
+ instanceManager.addHostCreatedInstance(
+ object,
+ 0,
+ onCopy: (_) => Object(),
+ );
+
+ expect(instanceManager.removeWeakReference(object), 0);
+ final Object copy = instanceManager.getInstanceWithWeakReference(
+ 0,
+ )!;
+ expect(identical(object, copy), isFalse);
+ });
+
+ test('removeStrongReference', () {
+ final Object object = Object();
+
+ final InstanceManager instanceManager =
+ InstanceManager(onWeakReferenceRemoved: (_) {});
+
+ instanceManager.addHostCreatedInstance(
+ object,
+ 0,
+ onCopy: (_) => Object(),
+ );
+ instanceManager.removeWeakReference(object);
+ expect(instanceManager.remove(0), isA<Object>());
+ expect(instanceManager.containsIdentifier(0), isFalse);
+ });
+
+ test('removeStrongReference removes only strong reference', () {
+ final Object object = Object();
+
+ final InstanceManager instanceManager =
+ InstanceManager(onWeakReferenceRemoved: (_) {});
+
+ instanceManager.addHostCreatedInstance(
+ object,
+ 0,
+ onCopy: (_) => Object(),
+ );
+ expect(instanceManager.remove(0), isA<Object>());
+ expect(
+ instanceManager.getInstanceWithWeakReference(0),
+ object,
+ );
+ });
+
+ test('getInstance can add a new weak reference', () {
+ final Object object = Object();
+
+ final InstanceManager instanceManager =
+ InstanceManager(onWeakReferenceRemoved: (_) {});
+
+ instanceManager.addHostCreatedInstance(
+ object,
+ 0,
+ onCopy: (_) => Object(),
+ );
+ instanceManager.removeWeakReference(object);
+
+ final Object newWeakCopy = instanceManager.getInstanceWithWeakReference(
+ 0,
+ )!;
+ expect(identical(object, newWeakCopy), isFalse);
+ });
+ });
+}
diff --git a/packages/camera/camera_android_camerax/test/preview_test.dart b/packages/camera/camera_android_camerax/test/preview_test.dart
new file mode 100644
index 0000000..36b56f0
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/preview_test.dart
@@ -0,0 +1,138 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android_camerax/src/camerax_library.g.dart';
+import 'package:camera_android_camerax/src/instance_manager.dart';
+import 'package:camera_android_camerax/src/preview.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:mockito/annotations.dart';
+import 'package:mockito/mockito.dart';
+
+import 'preview_test.mocks.dart';
+import 'test_camerax_library.g.dart';
+
+@GenerateMocks(<Type>[TestPreviewHostApi])
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('Preview', () {
+ tearDown(() => TestPreviewHostApi.setup(null));
+
+ test('detached create does not call create on the Java side', () async {
+ final MockTestPreviewHostApi mockApi = MockTestPreviewHostApi();
+ TestPreviewHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ Preview.detached(
+ instanceManager: instanceManager,
+ targetRotation: 90,
+ targetResolution: ResolutionInfo(width: 50, height: 10),
+ );
+
+ verifyNever(mockApi.create(argThat(isA<int>()), argThat(isA<int>()),
+ argThat(isA<ResolutionInfo>())));
+ });
+
+ test('create calls create on the Java side', () async {
+ final MockTestPreviewHostApi mockApi = MockTestPreviewHostApi();
+ TestPreviewHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ const int targetRotation = 90;
+ const int targetResolutionWidth = 10;
+ const int targetResolutionHeight = 50;
+ Preview(
+ instanceManager: instanceManager,
+ targetRotation: targetRotation,
+ targetResolution: ResolutionInfo(
+ width: targetResolutionWidth, height: targetResolutionHeight),
+ );
+
+ final VerificationResult createVerification = verify(mockApi.create(
+ argThat(isA<int>()), argThat(equals(targetRotation)), captureAny));
+ final ResolutionInfo capturedResolutionInfo =
+ createVerification.captured.single as ResolutionInfo;
+ expect(capturedResolutionInfo.width, equals(targetResolutionWidth));
+ expect(capturedResolutionInfo.height, equals(targetResolutionHeight));
+ });
+
+ test(
+ 'setSurfaceProvider makes call to set surface provider for preview instance',
+ () async {
+ final MockTestPreviewHostApi mockApi = MockTestPreviewHostApi();
+ TestPreviewHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ const int textureId = 8;
+ final Preview preview = Preview.detached(
+ instanceManager: instanceManager,
+ );
+ instanceManager.addHostCreatedInstance(
+ preview,
+ 0,
+ onCopy: (_) => Preview.detached(),
+ );
+
+ when(mockApi.setSurfaceProvider(instanceManager.getIdentifier(preview)))
+ .thenReturn(textureId);
+ expect(await preview.setSurfaceProvider(), equals(textureId));
+
+ verify(
+ mockApi.setSurfaceProvider(instanceManager.getIdentifier(preview)));
+ });
+
+ test(
+ 'releaseFlutterSurfaceTexture makes call to relase flutter surface texture entry',
+ () async {
+ final MockTestPreviewHostApi mockApi = MockTestPreviewHostApi();
+ TestPreviewHostApi.setup(mockApi);
+
+ final Preview preview = Preview.detached();
+
+ preview.releaseFlutterSurfaceTexture();
+
+ verify(mockApi.releaseFlutterSurfaceTexture());
+ });
+
+ test(
+ 'getResolutionInfo makes call to get resolution information for preview instance',
+ () async {
+ final MockTestPreviewHostApi mockApi = MockTestPreviewHostApi();
+ TestPreviewHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final Preview preview = Preview.detached(
+ instanceManager: instanceManager,
+ );
+ const int resolutionWidth = 10;
+ const int resolutionHeight = 60;
+ final ResolutionInfo testResolutionInfo =
+ ResolutionInfo(width: resolutionWidth, height: resolutionHeight);
+
+ instanceManager.addHostCreatedInstance(
+ preview,
+ 0,
+ onCopy: (_) => Preview.detached(),
+ );
+
+ when(mockApi.getResolutionInfo(instanceManager.getIdentifier(preview)))
+ .thenReturn(testResolutionInfo);
+
+ final ResolutionInfo previewResolutionInfo =
+ await preview.getResolutionInfo();
+ expect(previewResolutionInfo.width, equals(resolutionWidth));
+ expect(previewResolutionInfo.height, equals(resolutionHeight));
+
+ verify(mockApi.getResolutionInfo(instanceManager.getIdentifier(preview)));
+ });
+ });
+}
diff --git a/packages/camera/camera_android_camerax/test/preview_test.mocks.dart b/packages/camera/camera_android_camerax/test/preview_test.mocks.dart
new file mode 100644
index 0000000..60fa152
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/preview_test.mocks.dart
@@ -0,0 +1,89 @@
+// Mocks generated by Mockito 5.3.2 from annotations
+// in camera_android_camerax/test/preview_test.dart.
+// Do not manually edit this file.
+
+// ignore_for_file: no_leading_underscores_for_library_prefixes
+import 'package:camera_android_camerax/src/camerax_library.g.dart' as _i2;
+import 'package:mockito/mockito.dart' as _i1;
+
+import 'test_camerax_library.g.dart' as _i3;
+
+// ignore_for_file: type=lint
+// ignore_for_file: avoid_redundant_argument_values
+// ignore_for_file: avoid_setters_without_getters
+// ignore_for_file: comment_references
+// ignore_for_file: implementation_imports
+// ignore_for_file: invalid_use_of_visible_for_testing_member
+// ignore_for_file: prefer_const_constructors
+// ignore_for_file: unnecessary_parenthesis
+// ignore_for_file: camel_case_types
+// ignore_for_file: subtype_of_sealed_class
+
+class _FakeResolutionInfo_0 extends _i1.SmartFake
+ implements _i2.ResolutionInfo {
+ _FakeResolutionInfo_0(
+ Object parent,
+ Invocation parentInvocation,
+ ) : super(
+ parent,
+ parentInvocation,
+ );
+}
+
+/// A class which mocks [TestPreviewHostApi].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockTestPreviewHostApi extends _i1.Mock
+ implements _i3.TestPreviewHostApi {
+ MockTestPreviewHostApi() {
+ _i1.throwOnMissingStub(this);
+ }
+
+ @override
+ void create(
+ int? identifier,
+ int? rotation,
+ _i2.ResolutionInfo? targetResolution,
+ ) =>
+ super.noSuchMethod(
+ Invocation.method(
+ #create,
+ [
+ identifier,
+ rotation,
+ targetResolution,
+ ],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ int setSurfaceProvider(int? identifier) => (super.noSuchMethod(
+ Invocation.method(
+ #setSurfaceProvider,
+ [identifier],
+ ),
+ returnValue: 0,
+ ) as int);
+ @override
+ void releaseFlutterSurfaceTexture() => super.noSuchMethod(
+ Invocation.method(
+ #releaseFlutterSurfaceTexture,
+ [],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ _i2.ResolutionInfo getResolutionInfo(int? identifier) => (super.noSuchMethod(
+ Invocation.method(
+ #getResolutionInfo,
+ [identifier],
+ ),
+ returnValue: _FakeResolutionInfo_0(
+ this,
+ Invocation.method(
+ #getResolutionInfo,
+ [identifier],
+ ),
+ ),
+ ) as _i2.ResolutionInfo);
+}
diff --git a/packages/camera/camera_android_camerax/test/process_camera_provider_test.dart b/packages/camera/camera_android_camerax/test/process_camera_provider_test.dart
new file mode 100644
index 0000000..548ac3e
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/process_camera_provider_test.dart
@@ -0,0 +1,207 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android_camerax/src/camera.dart';
+import 'package:camera_android_camerax/src/camera_info.dart';
+import 'package:camera_android_camerax/src/camera_selector.dart';
+import 'package:camera_android_camerax/src/instance_manager.dart';
+import 'package:camera_android_camerax/src/process_camera_provider.dart';
+import 'package:camera_android_camerax/src/use_case.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:mockito/annotations.dart';
+import 'package:mockito/mockito.dart';
+
+import 'process_camera_provider_test.mocks.dart';
+import 'test_camerax_library.g.dart';
+
+@GenerateMocks(<Type>[TestProcessCameraProviderHostApi])
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('ProcessCameraProvider', () {
+ tearDown(() => TestProcessCameraProviderHostApi.setup(null));
+
+ test('getInstanceTest', () async {
+ final MockTestProcessCameraProviderHostApi mockApi =
+ MockTestProcessCameraProviderHostApi();
+ TestProcessCameraProviderHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final ProcessCameraProvider processCameraProvider =
+ ProcessCameraProvider.detached(
+ instanceManager: instanceManager,
+ );
+
+ instanceManager.addHostCreatedInstance(
+ processCameraProvider,
+ 0,
+ onCopy: (_) => ProcessCameraProvider.detached(),
+ );
+
+ when(mockApi.getInstance()).thenAnswer((_) async => 0);
+ expect(
+ await ProcessCameraProvider.getInstance(
+ instanceManager: instanceManager),
+ equals(processCameraProvider));
+ verify(mockApi.getInstance());
+ });
+
+ test('getAvailableCameraInfosTest', () async {
+ final MockTestProcessCameraProviderHostApi mockApi =
+ MockTestProcessCameraProviderHostApi();
+ TestProcessCameraProviderHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final ProcessCameraProvider processCameraProvider =
+ ProcessCameraProvider.detached(
+ instanceManager: instanceManager,
+ );
+
+ instanceManager.addHostCreatedInstance(
+ processCameraProvider,
+ 0,
+ onCopy: (_) => ProcessCameraProvider.detached(),
+ );
+ final CameraInfo fakeAvailableCameraInfo =
+ CameraInfo.detached(instanceManager: instanceManager);
+ instanceManager.addHostCreatedInstance(
+ fakeAvailableCameraInfo,
+ 1,
+ onCopy: (_) => CameraInfo.detached(),
+ );
+
+ when(mockApi.getAvailableCameraInfos(0)).thenReturn(<int>[1]);
+ expect(await processCameraProvider.getAvailableCameraInfos(),
+ equals(<CameraInfo>[fakeAvailableCameraInfo]));
+ verify(mockApi.getAvailableCameraInfos(0));
+ });
+
+ test('bindToLifecycleTest', () async {
+ final MockTestProcessCameraProviderHostApi mockApi =
+ MockTestProcessCameraProviderHostApi();
+ TestProcessCameraProviderHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final ProcessCameraProvider processCameraProvider =
+ ProcessCameraProvider.detached(
+ instanceManager: instanceManager,
+ );
+ final CameraSelector fakeCameraSelector =
+ CameraSelector.detached(instanceManager: instanceManager);
+ final UseCase fakeUseCase =
+ UseCase.detached(instanceManager: instanceManager);
+ final Camera fakeCamera =
+ Camera.detached(instanceManager: instanceManager);
+
+ instanceManager.addHostCreatedInstance(
+ processCameraProvider,
+ 0,
+ onCopy: (_) => ProcessCameraProvider.detached(),
+ );
+ instanceManager.addHostCreatedInstance(
+ fakeCameraSelector,
+ 1,
+ onCopy: (_) => CameraSelector.detached(),
+ );
+ instanceManager.addHostCreatedInstance(
+ fakeUseCase,
+ 2,
+ onCopy: (_) => UseCase.detached(),
+ );
+ instanceManager.addHostCreatedInstance(
+ fakeCamera,
+ 3,
+ onCopy: (_) => Camera.detached(),
+ );
+
+ when(mockApi.bindToLifecycle(0, 1, <int>[2])).thenReturn(3);
+ expect(
+ await processCameraProvider
+ .bindToLifecycle(fakeCameraSelector, <UseCase>[fakeUseCase]),
+ equals(fakeCamera));
+ verify(mockApi.bindToLifecycle(0, 1, <int>[2]));
+ });
+
+ test('unbindTest', () async {
+ final MockTestProcessCameraProviderHostApi mockApi =
+ MockTestProcessCameraProviderHostApi();
+ TestProcessCameraProviderHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final ProcessCameraProvider processCameraProvider =
+ ProcessCameraProvider.detached(
+ instanceManager: instanceManager,
+ );
+ final UseCase fakeUseCase =
+ UseCase.detached(instanceManager: instanceManager);
+
+ instanceManager.addHostCreatedInstance(
+ processCameraProvider,
+ 0,
+ onCopy: (_) => ProcessCameraProvider.detached(),
+ );
+ instanceManager.addHostCreatedInstance(
+ fakeUseCase,
+ 1,
+ onCopy: (_) => UseCase.detached(),
+ );
+
+ processCameraProvider.unbind(<UseCase>[fakeUseCase]);
+ verify(mockApi.unbind(0, <int>[1]));
+ });
+
+ test('unbindAllTest', () async {
+ final MockTestProcessCameraProviderHostApi mockApi =
+ MockTestProcessCameraProviderHostApi();
+ TestProcessCameraProviderHostApi.setup(mockApi);
+
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final ProcessCameraProvider processCameraProvider =
+ ProcessCameraProvider.detached(
+ instanceManager: instanceManager,
+ );
+ final UseCase fakeUseCase =
+ UseCase.detached(instanceManager: instanceManager);
+
+ instanceManager.addHostCreatedInstance(
+ processCameraProvider,
+ 0,
+ onCopy: (_) => ProcessCameraProvider.detached(),
+ );
+ instanceManager.addHostCreatedInstance(
+ fakeUseCase,
+ 1,
+ onCopy: (_) => UseCase.detached(),
+ );
+
+ processCameraProvider.unbind(<UseCase>[fakeUseCase]);
+ verify(mockApi.unbind(0, <int>[1]));
+ });
+
+ test('flutterApiCreateTest', () {
+ final InstanceManager instanceManager = InstanceManager(
+ onWeakReferenceRemoved: (_) {},
+ );
+ final ProcessCameraProviderFlutterApiImpl flutterApi =
+ ProcessCameraProviderFlutterApiImpl(
+ instanceManager: instanceManager,
+ );
+
+ flutterApi.create(0);
+
+ expect(instanceManager.getInstanceWithWeakReference(0),
+ isA<ProcessCameraProvider>());
+ });
+ });
+}
diff --git a/packages/camera/camera_android_camerax/test/process_camera_provider_test.mocks.dart b/packages/camera/camera_android_camerax/test/process_camera_provider_test.mocks.dart
new file mode 100644
index 0000000..2ce4ab7
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/process_camera_provider_test.mocks.dart
@@ -0,0 +1,88 @@
+// Mocks generated by Mockito 5.3.2 from annotations
+// in camera_android_camerax/test/process_camera_provider_test.dart.
+// Do not manually edit this file.
+
+// ignore_for_file: no_leading_underscores_for_library_prefixes
+import 'dart:async' as _i3;
+
+import 'package:mockito/mockito.dart' as _i1;
+
+import 'test_camerax_library.g.dart' as _i2;
+
+// ignore_for_file: type=lint
+// ignore_for_file: avoid_redundant_argument_values
+// ignore_for_file: avoid_setters_without_getters
+// ignore_for_file: comment_references
+// ignore_for_file: implementation_imports
+// ignore_for_file: invalid_use_of_visible_for_testing_member
+// ignore_for_file: prefer_const_constructors
+// ignore_for_file: unnecessary_parenthesis
+// ignore_for_file: camel_case_types
+// ignore_for_file: subtype_of_sealed_class
+
+/// A class which mocks [TestProcessCameraProviderHostApi].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockTestProcessCameraProviderHostApi extends _i1.Mock
+ implements _i2.TestProcessCameraProviderHostApi {
+ MockTestProcessCameraProviderHostApi() {
+ _i1.throwOnMissingStub(this);
+ }
+
+ @override
+ _i3.Future<int> getInstance() => (super.noSuchMethod(
+ Invocation.method(
+ #getInstance,
+ [],
+ ),
+ returnValue: _i3.Future<int>.value(0),
+ ) as _i3.Future<int>);
+ @override
+ List<int?> getAvailableCameraInfos(int? identifier) => (super.noSuchMethod(
+ Invocation.method(
+ #getAvailableCameraInfos,
+ [identifier],
+ ),
+ returnValue: <int?>[],
+ ) as List<int?>);
+ @override
+ int bindToLifecycle(
+ int? identifier,
+ int? cameraSelectorIdentifier,
+ List<int?>? useCaseIds,
+ ) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #bindToLifecycle,
+ [
+ identifier,
+ cameraSelectorIdentifier,
+ useCaseIds,
+ ],
+ ),
+ returnValue: 0,
+ ) as int);
+ @override
+ void unbind(
+ int? identifier,
+ List<int?>? useCaseIds,
+ ) =>
+ super.noSuchMethod(
+ Invocation.method(
+ #unbind,
+ [
+ identifier,
+ useCaseIds,
+ ],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ void unbindAll(int? identifier) => super.noSuchMethod(
+ Invocation.method(
+ #unbindAll,
+ [identifier],
+ ),
+ returnValueForMissingStub: null,
+ );
+}
diff --git a/packages/camera/camera_android_camerax/test/system_services_test.dart b/packages/camera/camera_android_camerax/test/system_services_test.dart
new file mode 100644
index 0000000..38037ea
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/system_services_test.dart
@@ -0,0 +1,110 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_android_camerax/src/camerax_library.g.dart'
+ show CameraPermissionsErrorData;
+import 'package:camera_android_camerax/src/system_services.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart'
+ show CameraException, DeviceOrientationChangedEvent;
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:mockito/annotations.dart';
+import 'package:mockito/mockito.dart';
+
+import 'system_services_test.mocks.dart';
+import 'test_camerax_library.g.dart';
+
+@GenerateMocks(<Type>[TestSystemServicesHostApi])
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('SystemServices', () {
+ tearDown(() => TestProcessCameraProviderHostApi.setup(null));
+
+ test(
+ 'requestCameraPermissionsFromInstance completes normally without errors test',
+ () async {
+ final MockTestSystemServicesHostApi mockApi =
+ MockTestSystemServicesHostApi();
+ TestSystemServicesHostApi.setup(mockApi);
+
+ when(mockApi.requestCameraPermissions(true))
+ .thenAnswer((_) async => null);
+
+ await SystemServices.requestCameraPermissions(true);
+ verify(mockApi.requestCameraPermissions(true));
+ });
+
+ test(
+ 'requestCameraPermissionsFromInstance throws CameraException if there was a request error',
+ () {
+ final MockTestSystemServicesHostApi mockApi =
+ MockTestSystemServicesHostApi();
+ TestSystemServicesHostApi.setup(mockApi);
+ final CameraPermissionsErrorData error = CameraPermissionsErrorData(
+ errorCode: 'Test error code',
+ description: 'Test error description',
+ );
+
+ when(mockApi.requestCameraPermissions(true))
+ .thenAnswer((_) async => error);
+
+ expect(
+ () async => SystemServices.requestCameraPermissions(true),
+ throwsA(isA<CameraException>()
+ .having((CameraException e) => e.code, 'code', 'Test error code')
+ .having((CameraException e) => e.description, 'description',
+ 'Test error description')));
+ verify(mockApi.requestCameraPermissions(true));
+ });
+
+ test('startListeningForDeviceOrientationChangeTest', () async {
+ final MockTestSystemServicesHostApi mockApi =
+ MockTestSystemServicesHostApi();
+ TestSystemServicesHostApi.setup(mockApi);
+
+ SystemServices.startListeningForDeviceOrientationChange(true, 90);
+ verify(mockApi.startListeningForDeviceOrientationChange(true, 90));
+ });
+
+ test('stopListeningForDeviceOrientationChangeTest', () async {
+ final MockTestSystemServicesHostApi mockApi =
+ MockTestSystemServicesHostApi();
+ TestSystemServicesHostApi.setup(mockApi);
+
+ SystemServices.stopListeningForDeviceOrientationChange();
+ verify(mockApi.stopListeningForDeviceOrientationChange());
+ });
+
+ test('onDeviceOrientationChanged adds new orientation to stream', () {
+ SystemServices.deviceOrientationChangedStreamController.stream
+ .listen((DeviceOrientationChangedEvent event) {
+ expect(event.orientation, equals(DeviceOrientation.landscapeLeft));
+ });
+ SystemServicesFlutterApiImpl()
+ .onDeviceOrientationChanged('LANDSCAPE_LEFT');
+ });
+
+ test(
+ 'onDeviceOrientationChanged throws error if new orientation is invalid',
+ () {
+ expect(
+ () => SystemServicesFlutterApiImpl()
+ .onDeviceOrientationChanged('FAKE_ORIENTATION'),
+ throwsA(isA<ArgumentError>().having(
+ (ArgumentError e) => e.message,
+ 'message',
+ '"FAKE_ORIENTATION" is not a valid DeviceOrientation value')));
+ });
+
+ test('onCameraError adds new error to stream', () {
+ const String testErrorDescription = 'Test error description!';
+ SystemServices.cameraErrorStreamController.stream
+ .listen((String errorDescription) {
+ expect(errorDescription, equals(testErrorDescription));
+ });
+ SystemServicesFlutterApiImpl().onCameraError(testErrorDescription);
+ });
+ });
+}
diff --git a/packages/camera/camera_android_camerax/test/system_services_test.mocks.dart b/packages/camera/camera_android_camerax/test/system_services_test.mocks.dart
new file mode 100644
index 0000000..0963ffb
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/system_services_test.mocks.dart
@@ -0,0 +1,66 @@
+// Mocks generated by Mockito 5.3.2 from annotations
+// in camera_android_camerax/test/system_services_test.dart.
+// Do not manually edit this file.
+
+// ignore_for_file: no_leading_underscores_for_library_prefixes
+import 'dart:async' as _i3;
+
+import 'package:camera_android_camerax/src/camerax_library.g.dart' as _i4;
+import 'package:mockito/mockito.dart' as _i1;
+
+import 'test_camerax_library.g.dart' as _i2;
+
+// ignore_for_file: type=lint
+// ignore_for_file: avoid_redundant_argument_values
+// ignore_for_file: avoid_setters_without_getters
+// ignore_for_file: comment_references
+// ignore_for_file: implementation_imports
+// ignore_for_file: invalid_use_of_visible_for_testing_member
+// ignore_for_file: prefer_const_constructors
+// ignore_for_file: unnecessary_parenthesis
+// ignore_for_file: camel_case_types
+// ignore_for_file: subtype_of_sealed_class
+
+/// A class which mocks [TestSystemServicesHostApi].
+///
+/// See the documentation for Mockito's code generation for more information.
+class MockTestSystemServicesHostApi extends _i1.Mock
+ implements _i2.TestSystemServicesHostApi {
+ MockTestSystemServicesHostApi() {
+ _i1.throwOnMissingStub(this);
+ }
+
+ @override
+ _i3.Future<_i4.CameraPermissionsErrorData?> requestCameraPermissions(
+ bool? enableAudio) =>
+ (super.noSuchMethod(
+ Invocation.method(
+ #requestCameraPermissions,
+ [enableAudio],
+ ),
+ returnValue: _i3.Future<_i4.CameraPermissionsErrorData?>.value(),
+ ) as _i3.Future<_i4.CameraPermissionsErrorData?>);
+ @override
+ void startListeningForDeviceOrientationChange(
+ bool? isFrontFacing,
+ int? sensorOrientation,
+ ) =>
+ super.noSuchMethod(
+ Invocation.method(
+ #startListeningForDeviceOrientationChange,
+ [
+ isFrontFacing,
+ sensorOrientation,
+ ],
+ ),
+ returnValueForMissingStub: null,
+ );
+ @override
+ void stopListeningForDeviceOrientationChange() => super.noSuchMethod(
+ Invocation.method(
+ #stopListeningForDeviceOrientationChange,
+ [],
+ ),
+ returnValueForMissingStub: null,
+ );
+}
diff --git a/packages/camera/camera_android_camerax/test/test_camerax_library.g.dart b/packages/camera/camera_android_camerax/test/test_camerax_library.g.dart
new file mode 100644
index 0000000..3f0e9c2
--- /dev/null
+++ b/packages/camera/camera_android_camerax/test/test_camerax_library.g.dart
@@ -0,0 +1,475 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+// Autogenerated from Pigeon (v3.2.9), do not edit directly.
+// See also: https://pub.dev/packages/pigeon
+// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, unnecessary_import
+// ignore_for_file: avoid_relative_lib_imports
+import 'dart:async';
+import 'dart:typed_data' show Uint8List, Int32List, Int64List, Float64List;
+import 'package:flutter/foundation.dart' show WriteBuffer, ReadBuffer;
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'package:camera_android_camerax/src/camerax_library.g.dart';
+
+class _TestJavaObjectHostApiCodec extends StandardMessageCodec {
+ const _TestJavaObjectHostApiCodec();
+}
+
+abstract class TestJavaObjectHostApi {
+ static const MessageCodec<Object?> codec = _TestJavaObjectHostApiCodec();
+
+ void dispose(int identifier);
+ static void setup(TestJavaObjectHostApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.JavaObjectHostApi.dispose', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.JavaObjectHostApi.dispose was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.JavaObjectHostApi.dispose was null, expected non-null int.');
+ api.dispose(arg_identifier!);
+ return <Object?, Object?>{};
+ });
+ }
+ }
+ }
+}
+
+class _TestCameraInfoHostApiCodec extends StandardMessageCodec {
+ const _TestCameraInfoHostApiCodec();
+}
+
+abstract class TestCameraInfoHostApi {
+ static const MessageCodec<Object?> codec = _TestCameraInfoHostApiCodec();
+
+ int getSensorRotationDegrees(int identifier);
+ static void setup(TestCameraInfoHostApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraInfoHostApi.getSensorRotationDegrees',
+ codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.CameraInfoHostApi.getSensorRotationDegrees was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.CameraInfoHostApi.getSensorRotationDegrees was null, expected non-null int.');
+ final int output = api.getSensorRotationDegrees(arg_identifier!);
+ return <Object?, Object?>{'result': output};
+ });
+ }
+ }
+ }
+}
+
+class _TestCameraSelectorHostApiCodec extends StandardMessageCodec {
+ const _TestCameraSelectorHostApiCodec();
+}
+
+abstract class TestCameraSelectorHostApi {
+ static const MessageCodec<Object?> codec = _TestCameraSelectorHostApiCodec();
+
+ void create(int identifier, int? lensFacing);
+ List<int?> filter(int identifier, List<int?> cameraInfoIds);
+ static void setup(TestCameraSelectorHostApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraSelectorHostApi.create', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.CameraSelectorHostApi.create was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.CameraSelectorHostApi.create was null, expected non-null int.');
+ final int? arg_lensFacing = (args[1] as int?);
+ api.create(arg_identifier!, arg_lensFacing);
+ return <Object?, Object?>{};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.CameraSelectorHostApi.filter', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.CameraSelectorHostApi.filter was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.CameraSelectorHostApi.filter was null, expected non-null int.');
+ final List<int?>? arg_cameraInfoIds =
+ (args[1] as List<Object?>?)?.cast<int?>();
+ assert(arg_cameraInfoIds != null,
+ 'Argument for dev.flutter.pigeon.CameraSelectorHostApi.filter was null, expected non-null List<int?>.');
+ final List<int?> output =
+ api.filter(arg_identifier!, arg_cameraInfoIds!);
+ return <Object?, Object?>{'result': output};
+ });
+ }
+ }
+ }
+}
+
+class _TestProcessCameraProviderHostApiCodec extends StandardMessageCodec {
+ const _TestProcessCameraProviderHostApiCodec();
+}
+
+abstract class TestProcessCameraProviderHostApi {
+ static const MessageCodec<Object?> codec =
+ _TestProcessCameraProviderHostApiCodec();
+
+ Future<int> getInstance();
+ List<int?> getAvailableCameraInfos(int identifier);
+ int bindToLifecycle(
+ int identifier, int cameraSelectorIdentifier, List<int?> useCaseIds);
+ void unbind(int identifier, List<int?> useCaseIds);
+ void unbindAll(int identifier);
+ static void setup(TestProcessCameraProviderHostApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.getInstance', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ // ignore message
+ final int output = await api.getInstance();
+ return <Object?, Object?>{'result': output};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.getAvailableCameraInfos',
+ codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.getAvailableCameraInfos was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.getAvailableCameraInfos was null, expected non-null int.');
+ final List<int?> output =
+ api.getAvailableCameraInfos(arg_identifier!);
+ return <Object?, Object?>{'result': output};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.bindToLifecycle',
+ codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.bindToLifecycle was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.bindToLifecycle was null, expected non-null int.');
+ final int? arg_cameraSelectorIdentifier = (args[1] as int?);
+ assert(arg_cameraSelectorIdentifier != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.bindToLifecycle was null, expected non-null int.');
+ final List<int?>? arg_useCaseIds =
+ (args[2] as List<Object?>?)?.cast<int?>();
+ assert(arg_useCaseIds != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.bindToLifecycle was null, expected non-null List<int?>.');
+ final int output = api.bindToLifecycle(
+ arg_identifier!, arg_cameraSelectorIdentifier!, arg_useCaseIds!);
+ return <Object?, Object?>{'result': output};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.unbind', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.unbind was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.unbind was null, expected non-null int.');
+ final List<int?>? arg_useCaseIds =
+ (args[1] as List<Object?>?)?.cast<int?>();
+ assert(arg_useCaseIds != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.unbind was null, expected non-null List<int?>.');
+ api.unbind(arg_identifier!, arg_useCaseIds!);
+ return <Object?, Object?>{};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.ProcessCameraProviderHostApi.unbindAll', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.unbindAll was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.ProcessCameraProviderHostApi.unbindAll was null, expected non-null int.');
+ api.unbindAll(arg_identifier!);
+ return <Object?, Object?>{};
+ });
+ }
+ }
+ }
+}
+
+class _TestSystemServicesHostApiCodec extends StandardMessageCodec {
+ const _TestSystemServicesHostApiCodec();
+ @override
+ void writeValue(WriteBuffer buffer, Object? value) {
+ if (value is CameraPermissionsErrorData) {
+ buffer.putUint8(128);
+ writeValue(buffer, value.encode());
+ } else {
+ super.writeValue(buffer, value);
+ }
+ }
+
+ @override
+ Object? readValueOfType(int type, ReadBuffer buffer) {
+ switch (type) {
+ case 128:
+ return CameraPermissionsErrorData.decode(readValue(buffer)!);
+
+ default:
+ return super.readValueOfType(type, buffer);
+ }
+ }
+}
+
+abstract class TestSystemServicesHostApi {
+ static const MessageCodec<Object?> codec = _TestSystemServicesHostApiCodec();
+
+ Future<CameraPermissionsErrorData?> requestCameraPermissions(
+ bool enableAudio);
+ void startListeningForDeviceOrientationChange(
+ bool isFrontFacing, int sensorOrientation);
+ void stopListeningForDeviceOrientationChange();
+ static void setup(TestSystemServicesHostApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.SystemServicesHostApi.requestCameraPermissions',
+ codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesHostApi.requestCameraPermissions was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final bool? arg_enableAudio = (args[0] as bool?);
+ assert(arg_enableAudio != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesHostApi.requestCameraPermissions was null, expected non-null bool.');
+ final CameraPermissionsErrorData? output =
+ await api.requestCameraPermissions(arg_enableAudio!);
+ return <Object?, Object?>{'result': output};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.SystemServicesHostApi.startListeningForDeviceOrientationChange',
+ codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesHostApi.startListeningForDeviceOrientationChange was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final bool? arg_isFrontFacing = (args[0] as bool?);
+ assert(arg_isFrontFacing != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesHostApi.startListeningForDeviceOrientationChange was null, expected non-null bool.');
+ final int? arg_sensorOrientation = (args[1] as int?);
+ assert(arg_sensorOrientation != null,
+ 'Argument for dev.flutter.pigeon.SystemServicesHostApi.startListeningForDeviceOrientationChange was null, expected non-null int.');
+ api.startListeningForDeviceOrientationChange(
+ arg_isFrontFacing!, arg_sensorOrientation!);
+ return <Object?, Object?>{};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.SystemServicesHostApi.stopListeningForDeviceOrientationChange',
+ codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ // ignore message
+ api.stopListeningForDeviceOrientationChange();
+ return <Object?, Object?>{};
+ });
+ }
+ }
+ }
+}
+
+class _TestPreviewHostApiCodec extends StandardMessageCodec {
+ const _TestPreviewHostApiCodec();
+ @override
+ void writeValue(WriteBuffer buffer, Object? value) {
+ if (value is ResolutionInfo) {
+ buffer.putUint8(128);
+ writeValue(buffer, value.encode());
+ } else if (value is ResolutionInfo) {
+ buffer.putUint8(129);
+ writeValue(buffer, value.encode());
+ } else {
+ super.writeValue(buffer, value);
+ }
+ }
+
+ @override
+ Object? readValueOfType(int type, ReadBuffer buffer) {
+ switch (type) {
+ case 128:
+ return ResolutionInfo.decode(readValue(buffer)!);
+
+ case 129:
+ return ResolutionInfo.decode(readValue(buffer)!);
+
+ default:
+ return super.readValueOfType(type, buffer);
+ }
+ }
+}
+
+abstract class TestPreviewHostApi {
+ static const MessageCodec<Object?> codec = _TestPreviewHostApiCodec();
+
+ void create(int identifier, int? rotation, ResolutionInfo? targetResolution);
+ int setSurfaceProvider(int identifier);
+ void releaseFlutterSurfaceTexture();
+ ResolutionInfo getResolutionInfo(int identifier);
+ static void setup(TestPreviewHostApi? api,
+ {BinaryMessenger? binaryMessenger}) {
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.PreviewHostApi.create', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.PreviewHostApi.create was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.PreviewHostApi.create was null, expected non-null int.');
+ final int? arg_rotation = (args[1] as int?);
+ final ResolutionInfo? arg_targetResolution =
+ (args[2] as ResolutionInfo?);
+ api.create(arg_identifier!, arg_rotation, arg_targetResolution);
+ return <Object?, Object?>{};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.PreviewHostApi.setSurfaceProvider', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.PreviewHostApi.setSurfaceProvider was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.PreviewHostApi.setSurfaceProvider was null, expected non-null int.');
+ final int output = api.setSurfaceProvider(arg_identifier!);
+ return <Object?, Object?>{'result': output};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.PreviewHostApi.releaseFlutterSurfaceTexture',
+ codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ // ignore message
+ api.releaseFlutterSurfaceTexture();
+ return <Object?, Object?>{};
+ });
+ }
+ }
+ {
+ final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
+ 'dev.flutter.pigeon.PreviewHostApi.getResolutionInfo', codec,
+ binaryMessenger: binaryMessenger);
+ if (api == null) {
+ channel.setMockMessageHandler(null);
+ } else {
+ channel.setMockMessageHandler((Object? message) async {
+ assert(message != null,
+ 'Argument for dev.flutter.pigeon.PreviewHostApi.getResolutionInfo was null.');
+ final List<Object?> args = (message as List<Object?>?)!;
+ final int? arg_identifier = (args[0] as int?);
+ assert(arg_identifier != null,
+ 'Argument for dev.flutter.pigeon.PreviewHostApi.getResolutionInfo was null, expected non-null int.');
+ final ResolutionInfo output = api.getResolutionInfo(arg_identifier!);
+ return <Object?, Object?>{'result': output};
+ });
+ }
+ }
+ }
+}
diff --git a/packages/camera/camera_avfoundation/AUTHORS b/packages/camera/camera_avfoundation/AUTHORS
new file mode 100644
index 0000000..493a0b4
--- /dev/null
+++ b/packages/camera/camera_avfoundation/AUTHORS
@@ -0,0 +1,66 @@
+# Below is a list of people and organizations that have contributed
+# to the Flutter project. Names should be added to the list like so:
+#
+# Name/Organization <email address>
+
+Google Inc.
+The Chromium Authors
+German Saprykin <saprykin.h@gmail.com>
+Benjamin Sauer <sauer.benjamin@gmail.com>
+larsenthomasj@gmail.com
+Ali Bitek <alibitek@protonmail.ch>
+Pol Batlló <pol.batllo@gmail.com>
+Anatoly Pulyaevskiy
+Hayden Flinner <haydenflinner@gmail.com>
+Stefano Rodriguez <hlsroddy@gmail.com>
+Salvatore Giordano <salvatoregiordanoo@gmail.com>
+Brian Armstrong <brian@flutter.institute>
+Paul DeMarco <paulmdemarco@gmail.com>
+Fabricio Nogueira <feufeu@gmail.com>
+Simon Lightfoot <simon@devangels.london>
+Ashton Thomas <ashton@acrinta.com>
+Thomas Danner <thmsdnnr@gmail.com>
+Diego Velásquez <diego.velasquez.lopez@gmail.com>
+Hajime Nakamura <nkmrhj@gmail.com>
+Tuyển Vũ Xuân <netsoft1985@gmail.com>
+Miguel Ruivo <miguel@miguelruivo.com>
+Sarthak Verma <sarthak@artiosys.com>
+Mike Diarmid <mike@invertase.io>
+Invertase <oss@invertase.io>
+Elliot Hesp <elliot@invertase.io>
+Vince Varga <vince.varga@smaho.com>
+Aawaz Gyawali <awazgyawali@gmail.com>
+EUI Limited <ian.evans3@admiralgroup.co.uk>
+Katarina Sheremet <katarina@sheremet.ch>
+Thomas Stockx <thomas@stockxit.com>
+Sarbagya Dhaubanjar <sarbagyastha@gmail.com>
+Ozkan Eksi <ozeksi@gmail.com>
+Rishab Nayak <rishab@bu.edu>
+ko2ic <ko2ic.dev@gmail.com>
+Jonathan Younger <jonathan@daikini.com>
+Jose Sanchez <josesm82@gmail.com>
+Debkanchan Samadder <debu.samadder@gmail.com>
+Audrius Karosevicius <audrius.karosevicius@gmail.com>
+Lukasz Piliszczuk <lukasz@intheloup.io>
+SoundReply Solutions GmbH <ch@soundreply.com>
+Rafal Wachol <rwachol@gmail.com>
+Pau Picas <pau.picas@gmail.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Alexandru Tuca <salexandru.tuca@outlook.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Rhodes Davis Jr. <rody.davis.jr@gmail.com>
+Luigi Agosti <luigi@tengio.com>
+Quentin Le Guennec <quentin@tengio.com>
+Koushik Ravikumar <koushik@tengio.com>
+Nissim Dsilva <nissim@tengio.com>
+Giancarlo Rocha <giancarloiff@gmail.com>
+Ryo Miyake <ryo@miyake.id>
+Théo Champion <contact.theochampion@gmail.com>
+Kazuki Yamaguchi <y.kazuki0614n@gmail.com>
+Eitan Schwartz <eshvartz@gmail.com>
+Chris Rutkowski <chrisrutkowski89@gmail.com>
+Juan Alvarez <juan.alvarez@resideo.com>
+Aleksandr Yurkovskiy <sanekyy@gmail.com>
+Anton Borries <mail@antonborri.es>
+Alex Li <google@alexv525.com>
+Rahul Raj <64.rahulraj@gmail.com>
diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md
new file mode 100644
index 0000000..f0605b7
--- /dev/null
+++ b/packages/camera/camera_avfoundation/CHANGELOG.md
@@ -0,0 +1,54 @@
+## 0.9.11
+
+* Adds back use of Optional type.
+* Updates minimum Flutter version to 3.0.
+
+## 0.9.10+2
+
+* Updates code for stricter lint checks.
+
+## 0.9.10+1
+
+* Updates code for stricter lint checks.
+
+## 0.9.10
+
+* Remove usage of deprecated quiver Optional type.
+
+## 0.9.9
+
+* Implements option to also stream when recording a video.
+
+## 0.9.8+6
+
+* Updates code for `no_leading_underscores_for_local_identifiers` lint.
+* Updates minimum Flutter version to 2.10.
+
+## 0.9.8+5
+
+* Fixes a regression introduced in 0.9.8+4 where the stream handler is not set.
+
+## 0.9.8+4
+
+* Fixes a crash due to sending orientation change events when the engine is torn down.
+
+## 0.9.8+3
+
+* Fixes avoid_redundant_argument_values lint warnings and minor typos.
+* Ignores missing return warnings in preparation for [upcoming analysis changes](https://github.com/flutter/flutter/issues/105750).
+
+## 0.9.8+2
+
+* Fixes exception in registerWith caused by the switch to an in-package method channel.
+
+## 0.9.8+1
+
+* Ignores deprecation warnings for upcoming styleFrom button API changes.
+
+## 0.9.8
+
+* Switches to internal method channel implementation.
+
+## 0.9.7+1
+
+* Splits from `camera` as a federated implementation.
diff --git a/packages/camera/camera_avfoundation/LICENSE b/packages/camera/camera_avfoundation/LICENSE
new file mode 100644
index 0000000..c6823b8
--- /dev/null
+++ b/packages/camera/camera_avfoundation/LICENSE
@@ -0,0 +1,25 @@
+Copyright 2013 The Flutter Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/packages/camera/camera_avfoundation/README.md b/packages/camera/camera_avfoundation/README.md
new file mode 100644
index 0000000..a063492
--- /dev/null
+++ b/packages/camera/camera_avfoundation/README.md
@@ -0,0 +1,11 @@
+# camera\_avfoundation
+
+The iOS implementation of [`camera`][1].
+
+## Usage
+
+This package is [endorsed][2], which means you can simply use `camera`
+normally. This package will be automatically included in your app when you do.
+
+[1]: https://pub.dev/packages/camera
+[2]: https://flutter.dev/docs/development/packages-and-plugins/developing-packages#endorsed-federated-plugin
diff --git a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart
new file mode 100644
index 0000000..34d460d
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart
@@ -0,0 +1,281 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:io';
+import 'dart:ui';
+
+import 'package:camera_avfoundation/camera_avfoundation.dart';
+import 'package:camera_example/camera_controller.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/painting.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:path_provider/path_provider.dart';
+import 'package:video_player/video_player.dart';
+
+void main() {
+ late Directory testDir;
+
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ setUpAll(() async {
+ CameraPlatform.instance = AVFoundationCamera();
+ final Directory extDir = await getTemporaryDirectory();
+ testDir = await Directory('${extDir.path}/test').create(recursive: true);
+ });
+
+ tearDownAll(() async {
+ await testDir.delete(recursive: true);
+ });
+
+ final Map<ResolutionPreset, Size> presetExpectedSizes =
+ <ResolutionPreset, Size>{
+ ResolutionPreset.low: const Size(288, 352),
+ ResolutionPreset.medium: const Size(480, 640),
+ ResolutionPreset.high: const Size(720, 1280),
+ ResolutionPreset.veryHigh: const Size(1080, 1920),
+ ResolutionPreset.ultraHigh: const Size(2160, 3840),
+ // Don't bother checking for max here since it could be anything.
+ };
+
+ /// Verify that [actual] has dimensions that are at least as large as
+ /// [expectedSize]. Allows for a mismatch in portrait vs landscape. Returns
+ /// whether the dimensions exactly match.
+ bool assertExpectedDimensions(Size expectedSize, Size actual) {
+ expect(actual.shortestSide, lessThanOrEqualTo(expectedSize.shortestSide));
+ expect(actual.longestSide, lessThanOrEqualTo(expectedSize.longestSide));
+ return actual.shortestSide == expectedSize.shortestSide &&
+ actual.longestSide == expectedSize.longestSide;
+ }
+
+ // This tests that the capture is no bigger than the preset, since we have
+ // automatic code to fall back to smaller sizes when we need to. Returns
+ // whether the image is exactly the desired resolution.
+ Future<bool> testCaptureImageResolution(
+ CameraController controller, ResolutionPreset preset) async {
+ final Size expectedSize = presetExpectedSizes[preset]!;
+
+ // Take Picture
+ final XFile file = await controller.takePicture();
+
+ // Load picture
+ final File fileImage = File(file.path);
+ final Image image = await decodeImageFromList(fileImage.readAsBytesSync());
+
+ // Verify image dimensions are as expected
+ expect(image, isNotNull);
+ return assertExpectedDimensions(
+ expectedSize, Size(image.height.toDouble(), image.width.toDouble()));
+ }
+
+ testWidgets('Capture specific image resolutions',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+ for (final CameraDescription cameraDescription in cameras) {
+ bool previousPresetExactlySupported = true;
+ for (final MapEntry<ResolutionPreset, Size> preset
+ in presetExpectedSizes.entries) {
+ final CameraController controller =
+ CameraController(cameraDescription, preset.key);
+ await controller.initialize();
+ final bool presetExactlySupported =
+ await testCaptureImageResolution(controller, preset.key);
+ assert(!(!previousPresetExactlySupported && presetExactlySupported),
+ 'The camera took higher resolution pictures at a lower resolution.');
+ previousPresetExactlySupported = presetExactlySupported;
+ await controller.dispose();
+ }
+ }
+ });
+
+ // This tests that the capture is no bigger than the preset, since we have
+ // automatic code to fall back to smaller sizes when we need to. Returns
+ // whether the image is exactly the desired resolution.
+ Future<bool> testCaptureVideoResolution(
+ CameraController controller, ResolutionPreset preset) async {
+ final Size expectedSize = presetExpectedSizes[preset]!;
+
+ // Take Video
+ await controller.startVideoRecording();
+ sleep(const Duration(milliseconds: 300));
+ final XFile file = await controller.stopVideoRecording();
+
+ // Load video metadata
+ final File videoFile = File(file.path);
+ final VideoPlayerController videoController =
+ VideoPlayerController.file(videoFile);
+ await videoController.initialize();
+ final Size video = videoController.value.size;
+
+ // Verify image dimensions are as expected
+ expect(video, isNotNull);
+ return assertExpectedDimensions(
+ expectedSize, Size(video.height, video.width));
+ }
+
+ testWidgets('Capture specific video resolutions',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+ for (final CameraDescription cameraDescription in cameras) {
+ bool previousPresetExactlySupported = true;
+ for (final MapEntry<ResolutionPreset, Size> preset
+ in presetExpectedSizes.entries) {
+ final CameraController controller =
+ CameraController(cameraDescription, preset.key);
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+ final bool presetExactlySupported =
+ await testCaptureVideoResolution(controller, preset.key);
+ assert(!(!previousPresetExactlySupported && presetExactlySupported),
+ 'The camera took higher resolution pictures at a lower resolution.');
+ previousPresetExactlySupported = presetExactlySupported;
+ await controller.dispose();
+ }
+ }
+ });
+
+ testWidgets('Pause and resume video recording', (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+
+ int startPause;
+ int timePaused = 0;
+
+ await controller.startVideoRecording();
+ final int recordingStart = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+
+ await controller.pauseVideoRecording();
+ startPause = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+ await controller.resumeVideoRecording();
+ timePaused += DateTime.now().millisecondsSinceEpoch - startPause;
+
+ sleep(const Duration(milliseconds: 500));
+
+ await controller.pauseVideoRecording();
+ startPause = DateTime.now().millisecondsSinceEpoch;
+ sleep(const Duration(milliseconds: 500));
+ await controller.resumeVideoRecording();
+ timePaused += DateTime.now().millisecondsSinceEpoch - startPause;
+
+ sleep(const Duration(milliseconds: 500));
+
+ final XFile file = await controller.stopVideoRecording();
+ final int recordingTime =
+ DateTime.now().millisecondsSinceEpoch - recordingStart;
+
+ final File videoFile = File(file.path);
+ final VideoPlayerController videoController = VideoPlayerController.file(
+ videoFile,
+ );
+ await videoController.initialize();
+ final int duration = videoController.value.duration.inMilliseconds;
+ await videoController.dispose();
+
+ expect(duration, lessThan(recordingTime - timePaused));
+ });
+
+ /// Start streaming with specifying the ImageFormatGroup.
+ Future<CameraImageData> startStreaming(List<CameraDescription> cameras,
+ ImageFormatGroup? imageFormatGroup) async {
+ final CameraController controller = CameraController(
+ cameras.first,
+ ResolutionPreset.low,
+ enableAudio: false,
+ imageFormatGroup: imageFormatGroup,
+ );
+
+ await controller.initialize();
+ final Completer<CameraImageData> completer = Completer<CameraImageData>();
+
+ await controller.startImageStream((CameraImageData image) {
+ if (!completer.isCompleted) {
+ Future<void>(() async {
+ await controller.stopImageStream();
+ await controller.dispose();
+ }).then((Object? value) {
+ completer.complete(image);
+ });
+ }
+ });
+ return completer.future;
+ }
+
+ testWidgets(
+ 'image streaming with imageFormatGroup',
+ (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ CameraImageData image = await startStreaming(cameras, null);
+ expect(image, isNotNull);
+ expect(image.format.group, ImageFormatGroup.bgra8888);
+ expect(image.planes.length, 1);
+
+ image = await startStreaming(cameras, ImageFormatGroup.yuv420);
+ expect(image, isNotNull);
+ expect(image.format.group, ImageFormatGroup.yuv420);
+ expect(image.planes.length, 2);
+
+ image = await startStreaming(cameras, ImageFormatGroup.bgra8888);
+ expect(image, isNotNull);
+ expect(image.format.group, ImageFormatGroup.bgra8888);
+ expect(image.planes.length, 1);
+ },
+ );
+
+ testWidgets('Recording with video streaming', (WidgetTester tester) async {
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+ final Completer<CameraImageData> completer = Completer<CameraImageData>();
+ await controller.startVideoRecording(
+ streamCallback: (CameraImageData image) {
+ if (!completer.isCompleted) {
+ completer.complete(image);
+ }
+ });
+ sleep(const Duration(milliseconds: 500));
+ await controller.stopVideoRecording();
+ await controller.dispose();
+
+ expect(await completer.future, isNotNull);
+ });
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/Flutter/AppFrameworkInfo.plist b/packages/camera/camera_avfoundation/example/ios/Flutter/AppFrameworkInfo.plist
new file mode 100644
index 0000000..3a9c234
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Flutter/AppFrameworkInfo.plist
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>App</string>
+ <key>CFBundleIdentifier</key>
+ <string>io.flutter.flutter.app</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>App</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>arm64</string>
+ </array>
+ <key>MinimumOSVersion</key>
+ <string>9.0</string>
+</dict>
+</plist>
diff --git a/packages/camera/camera_avfoundation/example/ios/Flutter/Debug.xcconfig b/packages/camera/camera_avfoundation/example/ios/Flutter/Debug.xcconfig
new file mode 100644
index 0000000..b2f5fae
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Flutter/Debug.xcconfig
@@ -0,0 +1,3 @@
+#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
+#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
+#include "Generated.xcconfig"
diff --git a/packages/camera/camera_avfoundation/example/ios/Flutter/Release.xcconfig b/packages/camera/camera_avfoundation/example/ios/Flutter/Release.xcconfig
new file mode 100644
index 0000000..88c2914
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Flutter/Release.xcconfig
@@ -0,0 +1,3 @@
+#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
+#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
+#include "Generated.xcconfig"
diff --git a/packages/camera/camera_avfoundation/example/ios/Podfile b/packages/camera/camera_avfoundation/example/ios/Podfile
new file mode 100644
index 0000000..5bc7b7e
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Podfile
@@ -0,0 +1,45 @@
+# Uncomment this line to define a global platform for your project
+# platform :ios, '9.0'
+
+# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
+ENV['COCOAPODS_DISABLE_STATS'] = 'true'
+
+project 'Runner', {
+ 'Debug' => :debug,
+ 'Profile' => :release,
+ 'Release' => :release,
+}
+
+def flutter_root
+ generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__)
+ unless File.exist?(generated_xcode_build_settings_path)
+ raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first"
+ end
+
+ File.foreach(generated_xcode_build_settings_path) do |line|
+ matches = line.match(/FLUTTER_ROOT\=(.*)/)
+ return matches[1].strip if matches
+ end
+ raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get"
+end
+
+require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root)
+
+flutter_ios_podfile_setup
+
+target 'Runner' do
+ flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
+
+ target 'RunnerTests' do
+ platform :ios, '9.0'
+ inherit! :search_paths
+ # Pods for testing
+ pod 'OCMock', '~> 3.8.1'
+ end
+end
+
+post_install do |installer|
+ installer.pods_project.targets.each do |target|
+ flutter_additional_ios_build_settings(target)
+ end
+end
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
new file mode 100644
index 0000000..03c80d7
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
@@ -0,0 +1,712 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 033B94BE269C40A200B4DF97 /* CameraMethodChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */; };
+ 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB766A2665316900CE5A93 /* CameraFocusTests.m */; };
+ 03F6F8B226CBB4670024B8D3 /* ThreadSafeFlutterResultTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03F6F8B126CBB4670024B8D3 /* ThreadSafeFlutterResultTests.m */; };
+ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
+ 236906D1621AE863A5B2E770 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 89D82918721FABF772705DB0 /* libPods-Runner.a */; };
+ 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; };
+ 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; };
+ 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
+ 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */; };
+ 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; };
+ 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; };
+ 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; };
+ 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
+ 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
+ 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
+ E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */; };
+ E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */; };
+ E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */; };
+ E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */; };
+ E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */; };
+ E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */; };
+ E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FD2770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m */; };
+ E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */; };
+ E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */; };
+ E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */; };
+ E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */; };
+ E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */; };
+ F6EE622F2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m in Sources */ = {isa = PBXBuildFile; fileRef = F6EE622E2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXContainerItemProxy section */
+ 03BB766D2665316900CE5A93 /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 97C146E61CF9000F007C117D /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 97C146ED1CF9000F007C117D;
+ remoteInfo = Runner;
+ };
+/* End PBXContainerItemProxy section */
+
+/* Begin PBXCopyFilesBuildPhase section */
+ 9705A1C41CF9048500538489 /* Embed Frameworks */ = {
+ isa = PBXCopyFilesBuildPhase;
+ buildActionMask = 2147483647;
+ dstPath = "";
+ dstSubfolderSpec = 10;
+ files = (
+ );
+ name = "Embed Frameworks";
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXCopyFilesBuildPhase section */
+
+/* Begin PBXFileReference section */
+ 033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraMethodChannelTests.m; sourceTree = "<group>"; };
+ 03BB76682665316900CE5A93 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
+ 03BB766A2665316900CE5A93 /* CameraFocusTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraFocusTests.m; sourceTree = "<group>"; };
+ 03BB766C2665316900CE5A93 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
+ 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraOrientationTests.m; sourceTree = "<group>"; };
+ 03F6F8B126CBB4670024B8D3 /* ThreadSafeFlutterResultTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeFlutterResultTests.m; sourceTree = "<group>"; };
+ 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
+ 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
+ 14AE82C910C2A12F2ECB2094 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = "<group>"; };
+ 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; };
+ 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
+ 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AvailableCamerasTest.m; sourceTree = "<group>"; };
+ 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
+ 788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = "<group>"; };
+ 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
+ 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
+ 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
+ 89D82918721FABF772705DB0 /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; };
+ 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
+ 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
+ 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
+ 97C146F21CF9000F007C117D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
+ 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
+ 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
+ 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
+ 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
+ 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = "<group>"; };
+ A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = "<group>"; };
+ E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = QueueUtilsTests.m; sourceTree = "<group>"; };
+ E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraCaptureSessionQueueRaceConditionTests.m; sourceTree = "<group>"; };
+ E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTSavePhotoDelegateTests.m; sourceTree = "<group>"; };
+ E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTCamPhotoCaptureTests.m; sourceTree = "<group>"; };
+ E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FLTCamSampleBufferTests.m; sourceTree = "<group>"; };
+ E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPermissionTests.m; sourceTree = "<group>"; };
+ E0C6E1FD2770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeMethodChannelTests.m; sourceTree = "<group>"; };
+ E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeTextureRegistryTests.m; sourceTree = "<group>"; };
+ E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeEventChannelTests.m; sourceTree = "<group>"; };
+ E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraTestUtils.h; sourceTree = "<group>"; };
+ E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraTestUtils.m; sourceTree = "<group>"; };
+ E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPropertiesTests.m; sourceTree = "<group>"; };
+ E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPreviewPauseTests.m; sourceTree = "<group>"; };
+ F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockFLTThreadSafeFlutterResult.h; sourceTree = "<group>"; };
+ F6EE622E2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockFLTThreadSafeFlutterResult.m; sourceTree = "<group>"; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 03BB76652665316900CE5A93 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 97C146EB1CF9000F007C117D /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 236906D1621AE863A5B2E770 /* libPods-Runner.a in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 03BB76692665316900CE5A93 /* RunnerTests */ = {
+ isa = PBXGroup;
+ children = (
+ 03BB766A2665316900CE5A93 /* CameraFocusTests.m */,
+ 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */,
+ 03BB766C2665316900CE5A93 /* Info.plist */,
+ 033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */,
+ 03F6F8B126CBB4670024B8D3 /* ThreadSafeFlutterResultTests.m */,
+ E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */,
+ E0C6E1FD2770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m */,
+ E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */,
+ E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */,
+ E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */,
+ E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */,
+ E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */,
+ E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */,
+ E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */,
+ E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */,
+ E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */,
+ F6EE622E2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m */,
+ F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */,
+ E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */,
+ E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */,
+ 788A065927B0E02900533D74 /* StreamingTest.m */,
+ 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */,
+ );
+ path = RunnerTests;
+ sourceTree = "<group>";
+ };
+ 3242FD2B467C15C62200632F /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ 89D82918721FABF772705DB0 /* libPods-Runner.a */,
+ 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */,
+ );
+ name = Frameworks;
+ sourceTree = "<group>";
+ };
+ 9740EEB11CF90186004384FC /* Flutter */ = {
+ isa = PBXGroup;
+ children = (
+ 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */,
+ 9740EEB21CF90195004384FC /* Debug.xcconfig */,
+ 7AFA3C8E1D35360C0083082E /* Release.xcconfig */,
+ 9740EEB31CF90195004384FC /* Generated.xcconfig */,
+ );
+ name = Flutter;
+ sourceTree = "<group>";
+ };
+ 97C146E51CF9000F007C117D = {
+ isa = PBXGroup;
+ children = (
+ 9740EEB11CF90186004384FC /* Flutter */,
+ 97C146F01CF9000F007C117D /* Runner */,
+ 03BB76692665316900CE5A93 /* RunnerTests */,
+ 97C146EF1CF9000F007C117D /* Products */,
+ FD386F00E98D73419C929072 /* Pods */,
+ 3242FD2B467C15C62200632F /* Frameworks */,
+ );
+ sourceTree = "<group>";
+ };
+ 97C146EF1CF9000F007C117D /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 97C146EE1CF9000F007C117D /* Runner.app */,
+ 03BB76682665316900CE5A93 /* RunnerTests.xctest */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+ 97C146F01CF9000F007C117D /* Runner */ = {
+ isa = PBXGroup;
+ children = (
+ 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */,
+ 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */,
+ 97C146FA1CF9000F007C117D /* Main.storyboard */,
+ 97C146FD1CF9000F007C117D /* Assets.xcassets */,
+ 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
+ 97C147021CF9000F007C117D /* Info.plist */,
+ 97C146F11CF9000F007C117D /* Supporting Files */,
+ 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
+ 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
+ );
+ path = Runner;
+ sourceTree = "<group>";
+ };
+ 97C146F11CF9000F007C117D /* Supporting Files */ = {
+ isa = PBXGroup;
+ children = (
+ 97C146F21CF9000F007C117D /* main.m */,
+ );
+ name = "Supporting Files";
+ sourceTree = "<group>";
+ };
+ FD386F00E98D73419C929072 /* Pods */ = {
+ isa = PBXGroup;
+ children = (
+ 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */,
+ 14AE82C910C2A12F2ECB2094 /* Pods-Runner.release.xcconfig */,
+ 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */,
+ A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */,
+ );
+ path = Pods;
+ sourceTree = "<group>";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ 03BB76672665316900CE5A93 /* RunnerTests */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 03BB76712665316900CE5A93 /* Build configuration list for PBXNativeTarget "RunnerTests" */;
+ buildPhases = (
+ 422786A96136AA9087A2041B /* [CP] Check Pods Manifest.lock */,
+ 03BB76642665316900CE5A93 /* Sources */,
+ 03BB76652665316900CE5A93 /* Frameworks */,
+ 03BB76662665316900CE5A93 /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ 03BB766E2665316900CE5A93 /* PBXTargetDependency */,
+ );
+ name = RunnerTests;
+ productName = camera_exampleTests;
+ productReference = 03BB76682665316900CE5A93 /* RunnerTests.xctest */;
+ productType = "com.apple.product-type.bundle.unit-test";
+ };
+ 97C146ED1CF9000F007C117D /* Runner */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
+ buildPhases = (
+ 9872F2A25E8A171A111468CD /* [CP] Check Pods Manifest.lock */,
+ 9740EEB61CF901F6004384FC /* Run Script */,
+ 97C146EA1CF9000F007C117D /* Sources */,
+ 97C146EB1CF9000F007C117D /* Frameworks */,
+ 97C146EC1CF9000F007C117D /* Resources */,
+ 9705A1C41CF9048500538489 /* Embed Frameworks */,
+ 3B06AD1E1E4923F5004D2608 /* Thin Binary */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = Runner;
+ productName = Runner;
+ productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
+ productType = "com.apple.product-type.application";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 97C146E61CF9000F007C117D /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastUpgradeCheck = 1300;
+ ORGANIZATIONNAME = "The Flutter Authors";
+ TargetAttributes = {
+ 03BB76672665316900CE5A93 = {
+ CreatedOnToolsVersion = 12.5;
+ ProvisioningStyle = Automatic;
+ TestTargetID = 97C146ED1CF9000F007C117D;
+ };
+ 97C146ED1CF9000F007C117D = {
+ CreatedOnToolsVersion = 7.3.1;
+ };
+ };
+ };
+ buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = en;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ Base,
+ );
+ mainGroup = 97C146E51CF9000F007C117D;
+ productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 97C146ED1CF9000F007C117D /* Runner */,
+ 03BB76672665316900CE5A93 /* RunnerTests */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ 03BB76662665316900CE5A93 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 97C146EC1CF9000F007C117D /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
+ 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
+ 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
+ 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXShellScriptBuildPhase section */
+ 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "Thin Binary";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
+ };
+ 422786A96136AA9087A2041B /* [CP] Check Pods Manifest.lock */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputFileListPaths = (
+ );
+ inputPaths = (
+ "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
+ "${PODS_ROOT}/Manifest.lock",
+ );
+ name = "[CP] Check Pods Manifest.lock";
+ outputFileListPaths = (
+ );
+ outputPaths = (
+ "$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt",
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
+ showEnvVarsInLog = 0;
+ };
+ 9740EEB61CF901F6004384FC /* Run Script */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "Run Script";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
+ };
+ 9872F2A25E8A171A111468CD /* [CP] Check Pods Manifest.lock */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputFileListPaths = (
+ );
+ inputPaths = (
+ "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
+ "${PODS_ROOT}/Manifest.lock",
+ );
+ name = "[CP] Check Pods Manifest.lock";
+ outputFileListPaths = (
+ );
+ outputPaths = (
+ "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
+ showEnvVarsInLog = 0;
+ };
+/* End PBXShellScriptBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 03BB76642665316900CE5A93 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 03F6F8B226CBB4670024B8D3 /* ThreadSafeFlutterResultTests.m in Sources */,
+ 033B94BE269C40A200B4DF97 /* CameraMethodChannelTests.m in Sources */,
+ E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */,
+ E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */,
+ 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */,
+ E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */,
+ E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */,
+ E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */,
+ 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */,
+ F6EE622F2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m in Sources */,
+ E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */,
+ 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */,
+ E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */,
+ 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */,
+ E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */,
+ E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */,
+ E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */,
+ E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */,
+ E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 97C146EA1CF9000F007C117D /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */,
+ 97C146F31CF9000F007C117D /* main.m in Sources */,
+ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin PBXTargetDependency section */
+ 03BB766E2665316900CE5A93 /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ target = 97C146ED1CF9000F007C117D /* Runner */;
+ targetProxy = 03BB766D2665316900CE5A93 /* PBXContainerItemProxy */;
+ };
+/* End PBXTargetDependency section */
+
+/* Begin PBXVariantGroup section */
+ 97C146FA1CF9000F007C117D /* Main.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ 97C146FB1CF9000F007C117D /* Base */,
+ );
+ name = Main.storyboard;
+ sourceTree = "<group>";
+ };
+ 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ 97C147001CF9000F007C117D /* Base */,
+ );
+ name = LaunchScreen.storyboard;
+ sourceTree = "<group>";
+ };
+/* End PBXVariantGroup section */
+
+/* Begin XCBuildConfiguration section */
+ 03BB766F2665316900CE5A93 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */;
+ buildSettings = {
+ BUNDLE_LOADER = "$(TEST_HOST)";
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_STYLE = Automatic;
+ DEVELOPMENT_TEAM = "";
+ GCC_C_LANGUAGE_STANDARD = gnu11;
+ INFOPLIST_FILE = RunnerTests/Info.plist;
+ IPHONEOS_DEPLOYMENT_TARGET = 11.0;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+ MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "dev.flutter.plugins.cameraExample.camera-exampleTests";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ TARGETED_DEVICE_FAMILY = "1,2";
+ TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/Runner";
+ };
+ name = Debug;
+ };
+ 03BB76702665316900CE5A93 /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */;
+ buildSettings = {
+ BUNDLE_LOADER = "$(TEST_HOST)";
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
+ CLANG_ENABLE_OBJC_WEAK = YES;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO;
+ CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
+ CODE_SIGN_STYLE = Automatic;
+ DEVELOPMENT_TEAM = "";
+ GCC_C_LANGUAGE_STANDARD = gnu11;
+ INFOPLIST_FILE = RunnerTests/Info.plist;
+ IPHONEOS_DEPLOYMENT_TARGET = 11.0;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+ MTL_FAST_MATH = YES;
+ PRODUCT_BUNDLE_IDENTIFIER = "dev.flutter.plugins.cameraExample.camera-exampleTests";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ TARGETED_DEVICE_FAMILY = "1,2";
+ TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/Runner";
+ };
+ name = Release;
+ };
+ 97C147031CF9000F007C117D /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_COMMA = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+ CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+ CLANG_WARN_STRICT_PROTOTYPES = YES;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_TESTABILITY = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 9.0;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ };
+ name = Debug;
+ };
+ 97C147041CF9000F007C117D /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_COMMA = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+ CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+ CLANG_WARN_STRICT_PROTOTYPES = YES;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_NS_ASSERTIONS = NO;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 9.0;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ VALIDATE_PRODUCT = YES;
+ };
+ name = Release;
+ };
+ 97C147061CF9000F007C117D /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ DEVELOPMENT_TEAM = "";
+ ENABLE_BITCODE = NO;
+ FRAMEWORK_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Flutter",
+ );
+ INFOPLIST_FILE = Runner/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LIBRARY_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Flutter",
+ );
+ PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.cameraExample;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Debug;
+ };
+ 97C147071CF9000F007C117D /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ DEVELOPMENT_TEAM = "";
+ ENABLE_BITCODE = NO;
+ FRAMEWORK_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Flutter",
+ );
+ INFOPLIST_FILE = Runner/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LIBRARY_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Flutter",
+ );
+ PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.cameraExample;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 03BB76712665316900CE5A93 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 03BB766F2665316900CE5A93 /* Debug */,
+ 03BB76702665316900CE5A93 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 97C147031CF9000F007C117D /* Debug */,
+ 97C147041CF9000F007C117D /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 97C147061CF9000F007C117D /* Debug */,
+ 97C147071CF9000F007C117D /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 97C146E61CF9000F007C117D /* Project object */;
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata
new file mode 100644
index 0000000..919434a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Workspace
+ version = "1.0">
+ <FileRef
+ location = "self:">
+ </FileRef>
+</Workspace>
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
new file mode 100644
index 0000000..f4b3c10
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
@@ -0,0 +1,104 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Scheme
+ LastUpgradeVersion = "1300"
+ version = "1.3">
+ <BuildAction
+ parallelizeBuildables = "YES"
+ buildImplicitDependencies = "YES">
+ <BuildActionEntries>
+ <BuildActionEntry
+ buildForTesting = "YES"
+ buildForRunning = "YES"
+ buildForProfiling = "YES"
+ buildForArchiving = "YES"
+ buildForAnalyzing = "YES">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "97C146ED1CF9000F007C117D"
+ BuildableName = "Runner.app"
+ BlueprintName = "Runner"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </BuildActionEntry>
+ </BuildActionEntries>
+ </BuildAction>
+ <TestAction
+ buildConfiguration = "Debug"
+ selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+ selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+ shouldUseLaunchSchemeArgsEnv = "YES">
+ <MacroExpansion>
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "97C146ED1CF9000F007C117D"
+ BuildableName = "Runner.app"
+ BlueprintName = "Runner"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </MacroExpansion>
+ <Testables>
+ <TestableReference
+ skipped = "NO">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "03BB76672665316900CE5A93"
+ BuildableName = "RunnerTests.xctest"
+ BlueprintName = "RunnerTests"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </TestableReference>
+ </Testables>
+ </TestAction>
+ <LaunchAction
+ buildConfiguration = "Debug"
+ selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+ selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+ launchStyle = "0"
+ useCustomWorkingDirectory = "NO"
+ ignoresPersistentStateOnLaunch = "NO"
+ debugDocumentVersioning = "YES"
+ debugServiceExtension = "internal"
+ allowLocationSimulation = "YES">
+ <BuildableProductRunnable
+ runnableDebuggingMode = "0">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "97C146ED1CF9000F007C117D"
+ BuildableName = "Runner.app"
+ BlueprintName = "Runner"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </BuildableProductRunnable>
+ <AdditionalOptions>
+ <AdditionalOption
+ key = "NSZombieEnabled"
+ value = "YES"
+ isEnabled = "YES">
+ </AdditionalOption>
+ </AdditionalOptions>
+ </LaunchAction>
+ <ProfileAction
+ buildConfiguration = "Release"
+ shouldUseLaunchSchemeArgsEnv = "YES"
+ savedToolIdentifier = ""
+ useCustomWorkingDirectory = "NO"
+ debugDocumentVersioning = "YES">
+ <BuildableProductRunnable
+ runnableDebuggingMode = "0">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "97C146ED1CF9000F007C117D"
+ BuildableName = "Runner.app"
+ BlueprintName = "Runner"
+ ReferencedContainer = "container:Runner.xcodeproj">
+ </BuildableReference>
+ </BuildableProductRunnable>
+ </ProfileAction>
+ <AnalyzeAction
+ buildConfiguration = "Debug">
+ </AnalyzeAction>
+ <ArchiveAction
+ buildConfiguration = "Release"
+ revealArchiveInOrganizer = "YES">
+ </ArchiveAction>
+</Scheme>
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/contents.xcworkspacedata b/packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/contents.xcworkspacedata
new file mode 100644
index 0000000..21a3cc1
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/contents.xcworkspacedata
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Workspace
+ version = "1.0">
+ <FileRef
+ location = "group:Runner.xcodeproj">
+ </FileRef>
+ <FileRef
+ location = "group:Pods/Pods.xcodeproj">
+ </FileRef>
+</Workspace>
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
new file mode 100644
index 0000000..18d9810
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>IDEDidComputeMac32BitWarning</key>
+ <true/>
+</dict>
+</plist>
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/AppDelegate.h b/packages/camera/camera_avfoundation/example/ios/Runner/AppDelegate.h
new file mode 100644
index 0000000..0681d28
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/AppDelegate.h
@@ -0,0 +1,10 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : FlutterAppDelegate
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/AppDelegate.m b/packages/camera/camera_avfoundation/example/ios/Runner/AppDelegate.m
new file mode 100644
index 0000000..30b8796
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/AppDelegate.m
@@ -0,0 +1,17 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "AppDelegate.h"
+#include "GeneratedPluginRegistrant.h"
+
+@implementation AppDelegate
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ [GeneratedPluginRegistrant registerWithRegistry:self];
+ // Override point for customization after application launch.
+ return [super application:application didFinishLaunchingWithOptions:launchOptions];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 0000000..d225b3c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,121 @@
+{
+ "images" : [
+ {
+ "size" : "20x20",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-20x20@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "20x20",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-20x20@3x.png",
+ "scale" : "3x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-29x29@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-29x29@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-29x29@3x.png",
+ "scale" : "3x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-40x40@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-40x40@3x.png",
+ "scale" : "3x"
+ },
+ {
+ "size" : "60x60",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-60x60@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "60x60",
+ "idiom" : "iphone",
+ "filename" : "Icon-App-60x60@3x.png",
+ "scale" : "3x"
+ },
+ {
+ "size" : "20x20",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-20x20@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "20x20",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-20x20@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-29x29@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-29x29@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-40x40@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-40x40@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "76x76",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-76x76@1x.png",
+ "scale" : "1x"
+ },
+ {
+ "size" : "76x76",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-76x76@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "size" : "83.5x83.5",
+ "idiom" : "ipad",
+ "filename" : "Icon-App-83.5x83.5@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ios-marketing",
+ "size" : "1024x1024",
+ "scale" : "1x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
\ No newline at end of file
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png
new file mode 100644
index 0000000..28c6bf0
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png
new file mode 100644
index 0000000..2ccbfd9
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png
new file mode 100644
index 0000000..f091b6b
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png
new file mode 100644
index 0000000..4cde121
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png
new file mode 100644
index 0000000..d0ef06e
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png
new file mode 100644
index 0000000..dcdc230
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png
new file mode 100644
index 0000000..2ccbfd9
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png
new file mode 100644
index 0000000..c8f9ed8
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png
new file mode 100644
index 0000000..a6d6b86
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png
new file mode 100644
index 0000000..a6d6b86
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png
new file mode 100644
index 0000000..75b2d16
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png
new file mode 100644
index 0000000..c4df70d
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png
new file mode 100644
index 0000000..6a84f41
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png
new file mode 100644
index 0000000..d0e1f58
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json
new file mode 100644
index 0000000..0bedcf2
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json
@@ -0,0 +1,23 @@
+{
+ "images" : [
+ {
+ "idiom" : "universal",
+ "filename" : "LaunchImage.png",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "universal",
+ "filename" : "LaunchImage@2x.png",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "universal",
+ "filename" : "LaunchImage@3x.png",
+ "scale" : "3x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png
new file mode 100644
index 0000000..9da19ea
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png
new file mode 100644
index 0000000..9da19ea
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png
new file mode 100644
index 0000000..9da19ea
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png
Binary files differ
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md
new file mode 100644
index 0000000..89c2725
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md
@@ -0,0 +1,5 @@
+# Launch Screen Assets
+
+You can customize the launch screen with your own desired assets by replacing the image files in this directory.
+
+You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images.
\ No newline at end of file
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Base.lproj/LaunchScreen.storyboard b/packages/camera/camera_avfoundation/example/ios/Runner/Base.lproj/LaunchScreen.storyboard
new file mode 100644
index 0000000..f2e259c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Base.lproj/LaunchScreen.storyboard
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
+ <dependencies>
+ <deployment identifier="iOS"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
+ </dependencies>
+ <scenes>
+ <!--View Controller-->
+ <scene sceneID="EHf-IW-A2E">
+ <objects>
+ <viewController id="01J-lp-oVM" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="Ydg-fD-yQy"/>
+ <viewControllerLayoutGuide type="bottom" id="xbc-2k-c8Z"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <subviews>
+ <imageView opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" image="LaunchImage" translatesAutoresizingMaskIntoConstraints="NO" id="YRO-k0-Ey4">
+ </imageView>
+ </subviews>
+ <color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
+ <constraints>
+ <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerX" secondItem="Ze5-6b-2t3" secondAttribute="centerX" id="1a2-6s-vTC"/>
+ <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerY" secondItem="Ze5-6b-2t3" secondAttribute="centerY" id="4X2-HB-R7a"/>
+ </constraints>
+ </view>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
+ </objects>
+ <point key="canvasLocation" x="53" y="375"/>
+ </scene>
+ </scenes>
+ <resources>
+ <image name="LaunchImage" width="168" height="185"/>
+ </resources>
+</document>
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Base.lproj/Main.storyboard b/packages/camera/camera_avfoundation/example/ios/Runner/Base.lproj/Main.storyboard
new file mode 100644
index 0000000..f3c2851
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Base.lproj/Main.storyboard
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
+ <dependencies>
+ <deployment identifier="iOS"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
+ </dependencies>
+ <scenes>
+ <!--Flutter View Controller-->
+ <scene sceneID="tne-QT-ifu">
+ <objects>
+ <viewController id="BYZ-38-t0r" customClass="FlutterViewController" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
+ <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
+ <rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ </view>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
+ </objects>
+ </scene>
+ </scenes>
+</document>
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist
new file mode 100644
index 0000000..ff2e341
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>camera_example</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSApplicationCategoryType</key>
+ <string></string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>NSCameraUsageDescription</key>
+ <string>Can I use the camera please? Only for demo purpose of the app</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Only for demo purpose of the app</string>
+ <key>UILaunchStoryboardName</key>
+ <string>LaunchScreen</string>
+ <key>UIMainStoryboardFile</key>
+ <string>Main</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>arm64</string>
+ </array>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UIViewControllerBasedStatusBarAppearance</key>
+ <false/>
+</dict>
+</plist>
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/main.m b/packages/camera/camera_avfoundation/example/ios/Runner/main.m
new file mode 100644
index 0000000..d1224fe
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/main.m
@@ -0,0 +1,19 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+#import <UIKit/UIKit.h>
+#import "AppDelegate.h"
+
+int main(int argc, char *argv[]) {
+ @autoreleasepool {
+ // The setup logic in `AppDelegate::didFinishLaunchingWithOptions:` eventually sends camera
+ // operations on the background queue, which would run concurrently with the test cases during
+ // unit tests, making the debugging process confusing. This setup is actually not necessary for
+ // the unit tests, so it is better to skip the AppDelegate when running unit tests.
+ BOOL isTesting = NSClassFromString(@"XCTestCase") != nil;
+ return UIApplicationMain(argc, argv, nil,
+ isTesting ? nil : NSStringFromClass([AppDelegate class]));
+ }
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m
new file mode 100644
index 0000000..6074b87
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m
@@ -0,0 +1,121 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import XCTest;
+@import AVFoundation;
+#import <OCMock/OCMock.h>
+#import "MockFLTThreadSafeFlutterResult.h"
+
+@interface AvailableCamerasTest : XCTestCase
+@end
+
+@implementation AvailableCamerasTest
+
+- (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone {
+ CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
+ XCTestExpectation *expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ // iPhone 13 Cameras:
+ AVCaptureDevice *wideAngleCamera = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([wideAngleCamera uniqueID]).andReturn(@"0");
+ OCMStub([wideAngleCamera position]).andReturn(AVCaptureDevicePositionBack);
+
+ AVCaptureDevice *frontFacingCamera = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([frontFacingCamera uniqueID]).andReturn(@"1");
+ OCMStub([frontFacingCamera position]).andReturn(AVCaptureDevicePositionFront);
+
+ AVCaptureDevice *ultraWideCamera = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([ultraWideCamera uniqueID]).andReturn(@"2");
+ OCMStub([ultraWideCamera position]).andReturn(AVCaptureDevicePositionBack);
+
+ AVCaptureDevice *telephotoCamera = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([telephotoCamera uniqueID]).andReturn(@"3");
+ OCMStub([telephotoCamera position]).andReturn(AVCaptureDevicePositionBack);
+
+ NSMutableArray *requiredTypes =
+ [@[ AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera ]
+ mutableCopy];
+ if (@available(iOS 13.0, *)) {
+ [requiredTypes addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera];
+ }
+
+ id discoverySessionMock = OCMClassMock([AVCaptureDeviceDiscoverySession class]);
+ OCMStub([discoverySessionMock discoverySessionWithDeviceTypes:requiredTypes
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified])
+ .andReturn(discoverySessionMock);
+
+ NSMutableArray *cameras = [NSMutableArray array];
+ [cameras addObjectsFromArray:@[ wideAngleCamera, frontFacingCamera, telephotoCamera ]];
+ if (@available(iOS 13.0, *)) {
+ [cameras addObject:ultraWideCamera];
+ }
+ OCMStub([discoverySessionMock devices]).andReturn([NSArray arrayWithArray:cameras]);
+
+ MockFLTThreadSafeFlutterResult *resultObject =
+ [[MockFLTThreadSafeFlutterResult alloc] initWithExpectation:expectation];
+
+ // Set up method call
+ FlutterMethodCall *call = [FlutterMethodCall methodCallWithMethodName:@"availableCameras"
+ arguments:nil];
+
+ [camera handleMethodCallAsync:call result:resultObject];
+
+ // Verify the result
+ NSDictionary *dictionaryResult = (NSDictionary *)resultObject.receivedResult;
+ if (@available(iOS 13.0, *)) {
+ XCTAssertTrue([dictionaryResult count] == 4);
+ } else {
+ XCTAssertTrue([dictionaryResult count] == 3);
+ }
+}
+- (void)testAvailableCamerasShouldReturnOneCameraOnSingleCameraIPhone {
+ CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
+ XCTestExpectation *expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ // iPhone 8 Cameras:
+ AVCaptureDevice *wideAngleCamera = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([wideAngleCamera uniqueID]).andReturn(@"0");
+ OCMStub([wideAngleCamera position]).andReturn(AVCaptureDevicePositionBack);
+
+ AVCaptureDevice *frontFacingCamera = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([frontFacingCamera uniqueID]).andReturn(@"1");
+ OCMStub([frontFacingCamera position]).andReturn(AVCaptureDevicePositionFront);
+
+ NSMutableArray *requiredTypes =
+ [@[ AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera ]
+ mutableCopy];
+ if (@available(iOS 13.0, *)) {
+ [requiredTypes addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera];
+ }
+
+ id discoverySessionMock = OCMClassMock([AVCaptureDeviceDiscoverySession class]);
+ OCMStub([discoverySessionMock discoverySessionWithDeviceTypes:requiredTypes
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified])
+ .andReturn(discoverySessionMock);
+
+ NSMutableArray *cameras = [NSMutableArray array];
+ [cameras addObjectsFromArray:@[ wideAngleCamera, frontFacingCamera ]];
+ OCMStub([discoverySessionMock devices]).andReturn([NSArray arrayWithArray:cameras]);
+
+ MockFLTThreadSafeFlutterResult *resultObject =
+ [[MockFLTThreadSafeFlutterResult alloc] initWithExpectation:expectation];
+
+ // Set up method call
+ FlutterMethodCall *call = [FlutterMethodCall methodCallWithMethodName:@"availableCameras"
+ arguments:nil];
+
+ [camera handleMethodCallAsync:call result:resultObject];
+
+ // Verify the result
+ NSDictionary *dictionaryResult = (NSDictionary *)resultObject.receivedResult;
+ XCTAssertTrue([dictionaryResult count] == 2);
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m
new file mode 100644
index 0000000..89f4030
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m
@@ -0,0 +1,45 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import XCTest;
+
+@interface CameraCaptureSessionQueueRaceConditionTests : XCTestCase
+@end
+
+@implementation CameraCaptureSessionQueueRaceConditionTests
+
+- (void)testFixForCaptureSessionQueueNullPointerCrashDueToRaceCondition {
+ CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
+
+ XCTestExpectation *disposeExpectation =
+ [self expectationWithDescription:@"dispose's result block must be called"];
+ XCTestExpectation *createExpectation =
+ [self expectationWithDescription:@"create's result block must be called"];
+ FlutterMethodCall *disposeCall = [FlutterMethodCall methodCallWithMethodName:@"dispose"
+ arguments:nil];
+ FlutterMethodCall *createCall = [FlutterMethodCall
+ methodCallWithMethodName:@"create"
+ arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
+ // Mimic a dispose call followed by a create call, which can be triggered by slightly dragging the
+ // home bar, causing the app to be inactive, and immediately regain active.
+ [camera handleMethodCall:disposeCall
+ result:^(id _Nullable result) {
+ [disposeExpectation fulfill];
+ }];
+ [camera createCameraOnSessionQueueWithCreateMethodCall:createCall
+ result:[[FLTThreadSafeFlutterResult alloc]
+ initWithResult:^(id _Nullable result) {
+ [createExpectation fulfill];
+ }]];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+ // `captureSessionQueue` must not be nil after `create` call. Otherwise a nil
+ // `captureSessionQueue` passed into `AVCaptureVideoDataOutput::setSampleBufferDelegate:queue:`
+ // API will cause a crash.
+ XCTAssertNotNil(camera.captureSessionQueue,
+ @"captureSessionQueue must not be nil after create method. ");
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m
new file mode 100644
index 0000000..7b641a5
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m
@@ -0,0 +1,55 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import XCTest;
+@import AVFoundation;
+#import <OCMock/OCMock.h>
+
+@interface FLTCam : NSObject <FlutterTexture,
+ AVCaptureVideoDataOutputSampleBufferDelegate,
+ AVCaptureAudioDataOutputSampleBufferDelegate>
+
+- (void)setExposurePointWithResult:(FlutterResult)result x:(double)x y:(double)y;
+@end
+
+@interface CameraExposureTests : XCTestCase
+@property(readonly, nonatomic) FLTCam *camera;
+@property(readonly, nonatomic) id mockDevice;
+@property(readonly, nonatomic) id mockUIDevice;
+@end
+
+@implementation CameraExposureTests
+
+- (void)setUp {
+ _camera = [[FLTCam alloc] init];
+ _mockDevice = OCMClassMock([AVCaptureDevice class]);
+ _mockUIDevice = OCMPartialMock([UIDevice currentDevice]);
+}
+
+- (void)tearDown {
+ [_mockDevice stopMocking];
+ [_mockUIDevice stopMocking];
+}
+
+- (void)testSetExpsourePointWithResult_SetsExposurePointOfInterest {
+ // UI is currently in landscape left orientation
+ OCMStub([(UIDevice *)_mockUIDevice orientation]).andReturn(UIDeviceOrientationLandscapeLeft);
+ // Exposure point of interest is supported
+ OCMStub([_mockDevice isExposurePointOfInterestSupported]).andReturn(true);
+ // Set mock device as the current capture device
+ [_camera setValue:_mockDevice forKey:@"captureDevice"];
+
+ // Run test
+ [_camera
+ setExposurePointWithResult:^void(id _Nullable result) {
+ }
+ x:1
+ y:1];
+
+ // Verify the focus point of interest has been set
+ OCMVerify([_mockDevice setExposurePointOfInterest:CGPointMake(1, 1)]);
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m
new file mode 100644
index 0000000..1b6ada5
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m
@@ -0,0 +1,127 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import XCTest;
+@import AVFoundation;
+#import <OCMock/OCMock.h>
+
+@interface CameraFocusTests : XCTestCase
+@property(readonly, nonatomic) FLTCam *camera;
+@property(readonly, nonatomic) id mockDevice;
+@property(readonly, nonatomic) id mockUIDevice;
+@end
+
+@implementation CameraFocusTests
+
+- (void)setUp {
+ _camera = [[FLTCam alloc] init];
+ _mockDevice = OCMClassMock([AVCaptureDevice class]);
+ _mockUIDevice = OCMPartialMock([UIDevice currentDevice]);
+}
+
+- (void)tearDown {
+ [_mockDevice stopMocking];
+ [_mockUIDevice stopMocking];
+}
+
+- (void)testAutoFocusWithContinuousModeSupported_ShouldSetContinuousAutoFocus {
+ // AVCaptureFocusModeContinuousAutoFocus is supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true);
+ // AVCaptureFocusModeContinuousAutoFocus is supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true);
+
+ // Don't expect setFocusMode:AVCaptureFocusModeAutoFocus
+ [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus];
+
+ // Run test
+ [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice];
+
+ // Expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus
+ OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]);
+}
+
+- (void)testAutoFocusWithContinuousModeNotSupported_ShouldSetAutoFocus {
+ // AVCaptureFocusModeContinuousAutoFocus is not supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus])
+ .andReturn(false);
+ // AVCaptureFocusModeContinuousAutoFocus is supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true);
+
+ // Don't expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus
+ [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
+
+ // Run test
+ [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice];
+
+ // Expect setFocusMode:AVCaptureFocusModeAutoFocus
+ OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]);
+}
+
+- (void)testAutoFocusWithNoModeSupported_ShouldSetNothing {
+ // AVCaptureFocusModeContinuousAutoFocus is not supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus])
+ .andReturn(false);
+ // AVCaptureFocusModeContinuousAutoFocus is not supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(false);
+
+ // Don't expect any setFocus
+ [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
+ [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus];
+
+ // Run test
+ [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice];
+}
+
+- (void)testLockedFocusWithModeSupported_ShouldSetModeAutoFocus {
+ // AVCaptureFocusModeContinuousAutoFocus is supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true);
+ // AVCaptureFocusModeContinuousAutoFocus is supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true);
+
+ // Don't expect any setFocus
+ [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
+
+ // Run test
+ [_camera applyFocusMode:FLTFocusModeLocked onDevice:_mockDevice];
+
+ // Expect setFocusMode:AVCaptureFocusModeAutoFocus
+ OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]);
+}
+
+- (void)testLockedFocusWithModeNotSupported_ShouldSetNothing {
+ // AVCaptureFocusModeContinuousAutoFocus is supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true);
+ // AVCaptureFocusModeContinuousAutoFocus is not supported
+ OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(false);
+
+ // Don't expect any setFocus
+ [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
+ [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus];
+
+ // Run test
+ [_camera applyFocusMode:FLTFocusModeLocked onDevice:_mockDevice];
+}
+
+- (void)testSetFocusPointWithResult_SetsFocusPointOfInterest {
+ // UI is currently in landscape left orientation
+ OCMStub([(UIDevice *)_mockUIDevice orientation]).andReturn(UIDeviceOrientationLandscapeLeft);
+ // Focus point of interest is supported
+ OCMStub([_mockDevice isFocusPointOfInterestSupported]).andReturn(true);
+ // Set mock device as the current capture device
+ [_camera setValue:_mockDevice forKey:@"captureDevice"];
+
+ // Run test
+ [_camera setFocusPointWithResult:[[FLTThreadSafeFlutterResult alloc]
+ initWithResult:^(id _Nullable result){
+ }]
+ x:1
+ y:1];
+
+ // Verify the focus point of interest has been set
+ OCMVerify([_mockDevice setFocusPointOfInterest:CGPointMake(1, 1)]);
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m
new file mode 100644
index 0000000..bd20134
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m
@@ -0,0 +1,48 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import XCTest;
+@import AVFoundation;
+#import <OCMock/OCMock.h>
+#import "MockFLTThreadSafeFlutterResult.h"
+
+@interface CameraMethodChannelTests : XCTestCase
+@end
+
+@implementation CameraMethodChannelTests
+
+- (void)testCreate_ShouldCallResultOnMainThread {
+ CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
+
+ XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
+
+ // Set up mocks for initWithCameraName method
+ id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]])
+ .andReturn([AVCaptureInput alloc]);
+
+ id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]);
+ OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock);
+ OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
+
+ MockFLTThreadSafeFlutterResult *resultObject =
+ [[MockFLTThreadSafeFlutterResult alloc] initWithExpectation:expectation];
+
+ // Set up method call
+ FlutterMethodCall *call = [FlutterMethodCall
+ methodCallWithMethodName:@"create"
+ arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
+
+ [camera createCameraOnSessionQueueWithCreateMethodCall:call result:resultObject];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+
+ // Verify the result
+ NSDictionary *dictionaryResult = (NSDictionary *)resultObject.receivedResult;
+ XCTAssertNotNil(dictionaryResult);
+ XCTAssert([[dictionaryResult allKeys] containsObject:@"cameraId"]);
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m
new file mode 100644
index 0000000..60e88ff
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m
@@ -0,0 +1,135 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import XCTest;
+@import Flutter;
+
+#import <OCMock/OCMock.h>
+
+@interface CameraOrientationTests : XCTestCase
+@end
+
+@implementation CameraOrientationTests
+
+- (void)testOrientationNotifications {
+ id mockMessenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
+ CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:mockMessenger];
+
+ [mockMessenger setExpectationOrderMatters:YES];
+
+ [self rotate:UIDeviceOrientationPortraitUpsideDown
+ expectedChannelOrientation:@"portraitDown"
+ cameraPlugin:cameraPlugin
+ messenger:mockMessenger];
+ [self rotate:UIDeviceOrientationPortrait
+ expectedChannelOrientation:@"portraitUp"
+ cameraPlugin:cameraPlugin
+ messenger:mockMessenger];
+ [self rotate:UIDeviceOrientationLandscapeRight
+ expectedChannelOrientation:@"landscapeLeft"
+ cameraPlugin:cameraPlugin
+ messenger:mockMessenger];
+ [self rotate:UIDeviceOrientationLandscapeLeft
+ expectedChannelOrientation:@"landscapeRight"
+ cameraPlugin:cameraPlugin
+ messenger:mockMessenger];
+
+ OCMReject([mockMessenger sendOnChannel:[OCMArg any] message:[OCMArg any]]);
+
+ // No notification when flat.
+ [cameraPlugin
+ orientationChanged:[self createMockNotificationForOrientation:UIDeviceOrientationFaceUp]];
+ // No notification when facedown.
+ [cameraPlugin
+ orientationChanged:[self createMockNotificationForOrientation:UIDeviceOrientationFaceDown]];
+
+ OCMVerifyAll(mockMessenger);
+}
+
+- (void)testOrientationUpdateMustBeOnCaptureSessionQueue {
+ XCTestExpectation *queueExpectation = [self
+ expectationWithDescription:@"Orientation update must happen on the capture session queue"];
+
+ CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
+ const char *captureSessionQueueSpecific = "capture_session_queue";
+ dispatch_queue_set_specific(camera.captureSessionQueue, captureSessionQueueSpecific,
+ (void *)captureSessionQueueSpecific, NULL);
+ FLTCam *mockCam = OCMClassMock([FLTCam class]);
+ camera.camera = mockCam;
+ OCMStub([mockCam setDeviceOrientation:UIDeviceOrientationLandscapeLeft])
+ .andDo(^(NSInvocation *invocation) {
+ if (dispatch_get_specific(captureSessionQueueSpecific)) {
+ [queueExpectation fulfill];
+ }
+ });
+
+ [camera orientationChanged:
+ [self createMockNotificationForOrientation:UIDeviceOrientationLandscapeLeft]];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)rotate:(UIDeviceOrientation)deviceOrientation
+ expectedChannelOrientation:(NSString *)channelOrientation
+ cameraPlugin:(CameraPlugin *)cameraPlugin
+ messenger:(NSObject<FlutterBinaryMessenger> *)messenger {
+ XCTestExpectation *orientationExpectation = [self expectationWithDescription:channelOrientation];
+
+ OCMExpect([messenger
+ sendOnChannel:[OCMArg any]
+ message:[OCMArg checkWithBlock:^BOOL(NSData *data) {
+ NSObject<FlutterMethodCodec> *codec = [FlutterStandardMethodCodec sharedInstance];
+ FlutterMethodCall *methodCall = [codec decodeMethodCall:data];
+ [orientationExpectation fulfill];
+ return
+ [methodCall.method isEqualToString:@"orientation_changed"] &&
+ [methodCall.arguments isEqualToDictionary:@{@"orientation" : channelOrientation}];
+ }]]);
+
+ [cameraPlugin orientationChanged:[self createMockNotificationForOrientation:deviceOrientation]];
+ [self waitForExpectationsWithTimeout:30.0 handler:nil];
+}
+
+- (void)testOrientationChanged_noRetainCycle {
+ dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL);
+ FLTCam *mockCam = OCMClassMock([FLTCam class]);
+ FLTThreadSafeMethodChannel *mockChannel = OCMClassMock([FLTThreadSafeMethodChannel class]);
+
+ __weak CameraPlugin *weakCamera;
+
+ @autoreleasepool {
+ CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
+ weakCamera = camera;
+ camera.captureSessionQueue = captureSessionQueue;
+ camera.camera = mockCam;
+ camera.deviceEventMethodChannel = mockChannel;
+
+ [camera orientationChanged:
+ [self createMockNotificationForOrientation:UIDeviceOrientationLandscapeLeft]];
+ }
+
+ // Sanity check
+ XCTAssertNil(weakCamera, @"Camera must have been deallocated.");
+
+ // Must check in captureSessionQueue since orientationChanged dispatches to this queue.
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:@"Dispatched to capture session queue"];
+ dispatch_async(captureSessionQueue, ^{
+ OCMVerify(never(), [mockCam setDeviceOrientation:UIDeviceOrientationLandscapeLeft]);
+ OCMVerify(never(), [mockChannel invokeMethod:@"orientation_changed" arguments:OCMOCK_ANY]);
+ [expectation fulfill];
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (NSNotification *)createMockNotificationForOrientation:(UIDeviceOrientation)deviceOrientation {
+ UIDevice *mockDevice = OCMClassMock([UIDevice class]);
+ OCMStub([mockDevice orientation]).andReturn(deviceOrientation);
+
+ return [NSNotification notificationWithName:@"orientation_test" object:mockDevice];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m
new file mode 100644
index 0000000..24ca5b6
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m
@@ -0,0 +1,231 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import AVFoundation;
+@import XCTest;
+#import <OCMock/OCMock.h>
+#import "CameraTestUtils.h"
+
+@interface CameraPermissionTests : XCTestCase
+
+@end
+
+@implementation CameraPermissionTests
+
+#pragma mark - camera permissions
+
+- (void)testRequestCameraPermission_completeWithoutErrorIfPrevoiuslyAuthorized {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:
+ @"Must copmlete without error if camera access was previously authorized."];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo])
+ .andReturn(AVAuthorizationStatusAuthorized);
+
+ FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+ if (error == nil) {
+ [expectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+- (void)testRequestCameraPermission_completeWithErrorIfPreviouslyDenied {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:
+ @"Must complete with error if camera access was previously denied."];
+ FlutterError *expectedError =
+ [FlutterError errorWithCode:@"CameraAccessDeniedWithoutPrompt"
+ message:@"User has previously denied the camera access request. Go to "
+ @"Settings to enable camera access."
+ details:nil];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo])
+ .andReturn(AVAuthorizationStatusDenied);
+ FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+ if ([error isEqual:expectedError]) {
+ [expectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testRequestCameraPermission_completeWithErrorIfRestricted {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:@"Must complete with error if camera access is restricted."];
+ FlutterError *expectedError = [FlutterError errorWithCode:@"CameraAccessRestricted"
+ message:@"Camera access is restricted. "
+ details:nil];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo])
+ .andReturn(AVAuthorizationStatusRestricted);
+
+ FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+ if ([error isEqual:expectedError]) {
+ [expectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testRequestCameraPermission_completeWithoutErrorIfUserGrantAccess {
+ XCTestExpectation *grantedExpectation = [self
+ expectationWithDescription:@"Must complete without error if user choose to grant access"];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo])
+ .andReturn(AVAuthorizationStatusNotDetermined);
+ // Mimic user choosing "allow" in permission dialog.
+ OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeVideo
+ completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) {
+ block(YES);
+ return YES;
+ }]]);
+
+ FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+ if (error == nil) {
+ [grantedExpectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testRequestCameraPermission_completeWithErrorIfUserDenyAccess {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:@"Must complete with error if user choose to deny access"];
+ FlutterError *expectedError =
+ [FlutterError errorWithCode:@"CameraAccessDenied"
+ message:@"User denied the camera access request."
+ details:nil];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo])
+ .andReturn(AVAuthorizationStatusNotDetermined);
+
+ // Mimic user choosing "deny" in permission dialog.
+ OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeVideo
+ completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) {
+ block(NO);
+ return YES;
+ }]]);
+ FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+ if ([error isEqual:expectedError]) {
+ [expectation fulfill];
+ }
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+#pragma mark - audio permissions
+
+- (void)testRequestAudioPermission_completeWithoutErrorIfPrevoiuslyAuthorized {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:
+ @"Must copmlete without error if audio access was previously authorized."];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio])
+ .andReturn(AVAuthorizationStatusAuthorized);
+
+ FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
+ if (error == nil) {
+ [expectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+- (void)testRequestAudioPermission_completeWithErrorIfPreviouslyDenied {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:
+ @"Must complete with error if audio access was previously denied."];
+ FlutterError *expectedError =
+ [FlutterError errorWithCode:@"AudioAccessDeniedWithoutPrompt"
+ message:@"User has previously denied the audio access request. Go to "
+ @"Settings to enable audio access."
+ details:nil];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio])
+ .andReturn(AVAuthorizationStatusDenied);
+ FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
+ if ([error isEqual:expectedError]) {
+ [expectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testRequestAudioPermission_completeWithErrorIfRestricted {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:@"Must complete with error if audio access is restricted."];
+ FlutterError *expectedError = [FlutterError errorWithCode:@"AudioAccessRestricted"
+ message:@"Audio access is restricted. "
+ details:nil];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio])
+ .andReturn(AVAuthorizationStatusRestricted);
+
+ FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
+ if ([error isEqual:expectedError]) {
+ [expectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testRequestAudioPermission_completeWithoutErrorIfUserGrantAccess {
+ XCTestExpectation *grantedExpectation = [self
+ expectationWithDescription:@"Must complete without error if user choose to grant access"];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio])
+ .andReturn(AVAuthorizationStatusNotDetermined);
+ // Mimic user choosing "allow" in permission dialog.
+ OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeAudio
+ completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) {
+ block(YES);
+ return YES;
+ }]]);
+
+ FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
+ if (error == nil) {
+ [grantedExpectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testRequestAudioPermission_completeWithErrorIfUserDenyAccess {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:@"Must complete with error if user choose to deny access"];
+ FlutterError *expectedError = [FlutterError errorWithCode:@"AudioAccessDenied"
+ message:@"User denied the audio access request."
+ details:nil];
+
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio])
+ .andReturn(AVAuthorizationStatusNotDetermined);
+
+ // Mimic user choosing "deny" in permission dialog.
+ OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeAudio
+ completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) {
+ block(NO);
+ return YES;
+ }]]);
+ FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
+ if ([error isEqual:expectedError]) {
+ [expectation fulfill];
+ }
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m
new file mode 100644
index 0000000..1dfc90b
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m
@@ -0,0 +1,33 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import XCTest;
+@import AVFoundation;
+#import <OCMock/OCMock.h>
+#import "MockFLTThreadSafeFlutterResult.h"
+
+@interface CameraPreviewPauseTests : XCTestCase
+@end
+
+@implementation CameraPreviewPauseTests
+
+- (void)testPausePreviewWithResult_shouldPausePreview {
+ FLTCam *camera = [[FLTCam alloc] init];
+ MockFLTThreadSafeFlutterResult *resultObject = [[MockFLTThreadSafeFlutterResult alloc] init];
+
+ [camera pausePreviewWithResult:resultObject];
+ XCTAssertTrue(camera.isPreviewPaused);
+}
+
+- (void)testResumePreviewWithResult_shouldResumePreview {
+ FLTCam *camera = [[FLTCam alloc] init];
+ MockFLTThreadSafeFlutterResult *resultObject = [[MockFLTThreadSafeFlutterResult alloc] init];
+
+ [camera resumePreviewWithResult:resultObject];
+ XCTAssertFalse(camera.isPreviewPaused);
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m
new file mode 100644
index 0000000..18c01e5
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m
@@ -0,0 +1,107 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation.Test;
+@import AVFoundation;
+@import XCTest;
+
+@interface CameraPropertiesTests : XCTestCase
+
+@end
+
+@implementation CameraPropertiesTests
+
+#pragma mark - flash mode tests
+
+- (void)testFLTGetFLTFlashModeForString {
+ XCTAssertEqual(FLTFlashModeOff, FLTGetFLTFlashModeForString(@"off"));
+ XCTAssertEqual(FLTFlashModeAuto, FLTGetFLTFlashModeForString(@"auto"));
+ XCTAssertEqual(FLTFlashModeAlways, FLTGetFLTFlashModeForString(@"always"));
+ XCTAssertEqual(FLTFlashModeTorch, FLTGetFLTFlashModeForString(@"torch"));
+ XCTAssertThrows(FLTGetFLTFlashModeForString(@"unkwown"));
+}
+
+- (void)testFLTGetAVCaptureFlashModeForFLTFlashMode {
+ XCTAssertEqual(AVCaptureFlashModeOff, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeOff));
+ XCTAssertEqual(AVCaptureFlashModeAuto, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeAuto));
+ XCTAssertEqual(AVCaptureFlashModeOn, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeAlways));
+ XCTAssertEqual(-1, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeTorch));
+}
+
+#pragma mark - exposure mode tests
+
+- (void)testFLTGetStringForFLTExposureMode {
+ XCTAssertEqualObjects(@"auto", FLTGetStringForFLTExposureMode(FLTExposureModeAuto));
+ XCTAssertEqualObjects(@"locked", FLTGetStringForFLTExposureMode(FLTExposureModeLocked));
+ XCTAssertThrows(FLTGetStringForFLTExposureMode(-1));
+}
+
+- (void)testFLTGetFLTExposureModeForString {
+ XCTAssertEqual(FLTExposureModeAuto, FLTGetFLTExposureModeForString(@"auto"));
+ XCTAssertEqual(FLTExposureModeLocked, FLTGetFLTExposureModeForString(@"locked"));
+ XCTAssertThrows(FLTGetFLTExposureModeForString(@"unknown"));
+}
+
+#pragma mark - focus mode tests
+
+- (void)testFLTGetStringForFLTFocusMode {
+ XCTAssertEqualObjects(@"auto", FLTGetStringForFLTFocusMode(FLTFocusModeAuto));
+ XCTAssertEqualObjects(@"locked", FLTGetStringForFLTFocusMode(FLTFocusModeLocked));
+ XCTAssertThrows(FLTGetStringForFLTFocusMode(-1));
+}
+
+- (void)testFLTGetFLTFocusModeForString {
+ XCTAssertEqual(FLTFocusModeAuto, FLTGetFLTFocusModeForString(@"auto"));
+ XCTAssertEqual(FLTFocusModeLocked, FLTGetFLTFocusModeForString(@"locked"));
+ XCTAssertThrows(FLTGetFLTFocusModeForString(@"unknown"));
+}
+
+#pragma mark - resolution preset tests
+
+- (void)testFLTGetFLTResolutionPresetForString {
+ XCTAssertEqual(FLTResolutionPresetVeryLow, FLTGetFLTResolutionPresetForString(@"veryLow"));
+ XCTAssertEqual(FLTResolutionPresetLow, FLTGetFLTResolutionPresetForString(@"low"));
+ XCTAssertEqual(FLTResolutionPresetMedium, FLTGetFLTResolutionPresetForString(@"medium"));
+ XCTAssertEqual(FLTResolutionPresetHigh, FLTGetFLTResolutionPresetForString(@"high"));
+ XCTAssertEqual(FLTResolutionPresetVeryHigh, FLTGetFLTResolutionPresetForString(@"veryHigh"));
+ XCTAssertEqual(FLTResolutionPresetUltraHigh, FLTGetFLTResolutionPresetForString(@"ultraHigh"));
+ XCTAssertEqual(FLTResolutionPresetMax, FLTGetFLTResolutionPresetForString(@"max"));
+ XCTAssertThrows(FLTGetFLTFlashModeForString(@"unknown"));
+}
+
+#pragma mark - video format tests
+
+- (void)testFLTGetVideoFormatFromString {
+ XCTAssertEqual(kCVPixelFormatType_32BGRA, FLTGetVideoFormatFromString(@"bgra8888"));
+ XCTAssertEqual(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ FLTGetVideoFormatFromString(@"yuv420"));
+ XCTAssertEqual(kCVPixelFormatType_32BGRA, FLTGetVideoFormatFromString(@"unknown"));
+}
+
+#pragma mark - device orientation tests
+
+- (void)testFLTGetUIDeviceOrientationForString {
+ XCTAssertEqual(UIDeviceOrientationPortraitUpsideDown,
+ FLTGetUIDeviceOrientationForString(@"portraitDown"));
+ XCTAssertEqual(UIDeviceOrientationLandscapeRight,
+ FLTGetUIDeviceOrientationForString(@"landscapeLeft"));
+ XCTAssertEqual(UIDeviceOrientationLandscapeLeft,
+ FLTGetUIDeviceOrientationForString(@"landscapeRight"));
+ XCTAssertEqual(UIDeviceOrientationPortrait, FLTGetUIDeviceOrientationForString(@"portraitUp"));
+ XCTAssertThrows(FLTGetUIDeviceOrientationForString(@"unknown"));
+}
+
+- (void)testFLTGetStringForUIDeviceOrientation {
+ XCTAssertEqualObjects(@"portraitDown",
+ FLTGetStringForUIDeviceOrientation(UIDeviceOrientationPortraitUpsideDown));
+ XCTAssertEqualObjects(@"landscapeLeft",
+ FLTGetStringForUIDeviceOrientation(UIDeviceOrientationLandscapeRight));
+ XCTAssertEqualObjects(@"landscapeRight",
+ FLTGetStringForUIDeviceOrientation(UIDeviceOrientationLandscapeLeft));
+ XCTAssertEqualObjects(@"portraitUp",
+ FLTGetStringForUIDeviceOrientation(UIDeviceOrientationPortrait));
+ XCTAssertEqualObjects(@"portraitUp", FLTGetStringForUIDeviceOrientation(-1));
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
new file mode 100644
index 0000000..f2d4611
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
@@ -0,0 +1,18 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/// Creates an `FLTCam` that runs its capture session operations on a given queue.
+/// @param captureSessionQueue the capture session queue
+/// @return an FLTCam object.
+extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue);
+
+/// Creates a test sample buffer.
+/// @return a test sample buffer.
+extern CMSampleBufferRef FLTCreateTestSampleBuffer(void);
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
new file mode 100644
index 0000000..0ae4887
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
@@ -0,0 +1,44 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "CameraTestUtils.h"
+#import <OCMock/OCMock.h>
+@import AVFoundation;
+
+FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue) {
+ id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
+ .andReturn(inputMock);
+
+ id sessionMock = OCMClassMock([AVCaptureSession class]);
+ OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
+ OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
+
+ return [[FLTCam alloc] initWithCameraName:@"camera"
+ resolutionPreset:@"medium"
+ enableAudio:true
+ orientation:UIDeviceOrientationPortrait
+ captureSession:sessionMock
+ captureSessionQueue:captureSessionQueue
+ error:nil];
+}
+
+CMSampleBufferRef FLTCreateTestSampleBuffer(void) {
+ CVPixelBufferRef pixelBuffer;
+ CVPixelBufferCreate(kCFAllocatorDefault, 100, 100, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);
+
+ CMFormatDescriptionRef formatDescription;
+ CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer,
+ &formatDescription);
+
+ CMSampleTimingInfo timingInfo = {CMTimeMake(1, 44100), kCMTimeZero, kCMTimeInvalid};
+
+ CMSampleBufferRef sampleBuffer;
+ CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDescription,
+ &timingInfo, &sampleBuffer);
+
+ CFRelease(pixelBuffer);
+ CFRelease(formatDescription);
+ return sampleBuffer;
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m
new file mode 100644
index 0000000..d1a835c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m
@@ -0,0 +1,49 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import XCTest;
+@import AVFoundation;
+#import <OCMock/OCMock.h>
+
+@interface FLTCam : NSObject <FlutterTexture,
+ AVCaptureVideoDataOutputSampleBufferDelegate,
+ AVCaptureAudioDataOutputSampleBufferDelegate>
+
+- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
+ x:(double)x
+ y:(double)y;
+
+@end
+
+@interface CameraUtilTests : XCTestCase
+@property(readonly, nonatomic) FLTCam *camera;
+
+@end
+
+@implementation CameraUtilTests
+
+- (void)setUp {
+ _camera = [[FLTCam alloc] init];
+}
+
+- (void)testGetCGPointForCoordsWithOrientation_ShouldRotateCoords {
+ CGPoint point;
+ point = [_camera getCGPointForCoordsWithOrientation:UIDeviceOrientationLandscapeLeft x:1 y:1];
+ XCTAssertTrue(CGPointEqualToPoint(point, CGPointMake(1, 1)),
+ @"Resulting coordinates are invalid.");
+ point = [_camera getCGPointForCoordsWithOrientation:UIDeviceOrientationPortrait x:0 y:1];
+ XCTAssertTrue(CGPointEqualToPoint(point, CGPointMake(1, 1)),
+ @"Resulting coordinates are invalid.");
+ point = [_camera getCGPointForCoordsWithOrientation:UIDeviceOrientationLandscapeRight x:0 y:0];
+ XCTAssertTrue(CGPointEqualToPoint(point, CGPointMake(1, 1)),
+ @"Resulting coordinates are invalid.");
+ point = [_camera getCGPointForCoordsWithOrientation:UIDeviceOrientationPortraitUpsideDown
+ x:1
+ y:0];
+ XCTAssertTrue(CGPointEqualToPoint(point, CGPointMake(1, 1)),
+ @"Resulting coordinates are invalid.");
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m
new file mode 100644
index 0000000..8a7c34c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m
@@ -0,0 +1,97 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import AVFoundation;
+@import XCTest;
+#import <OCMock/OCMock.h>
+#import "CameraTestUtils.h"
+
+/// Includes test cases related to photo capture operations for FLTCam class.
+@interface FLTCamPhotoCaptureTests : XCTestCase
+
+@end
+
+@implementation FLTCamPhotoCaptureTests
+
+- (void)testCaptureToFile_mustReportErrorToResultIfSavePhotoDelegateCompletionsWithError {
+ XCTestExpectation *errorExpectation =
+ [self expectationWithDescription:
+ @"Must send error to result if save photo delegate completes with error."];
+
+ dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL);
+ dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific,
+ (void *)FLTCaptureSessionQueueSpecific, NULL);
+ FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
+ AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
+ id mockSettings = OCMClassMock([AVCapturePhotoSettings class]);
+ OCMStub([mockSettings photoSettings]).andReturn(settings);
+
+ NSError *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil];
+ id mockResult = OCMClassMock([FLTThreadSafeFlutterResult class]);
+ OCMStub([mockResult sendError:error]).andDo(^(NSInvocation *invocation) {
+ [errorExpectation fulfill];
+ });
+
+ id mockOutput = OCMClassMock([AVCapturePhotoOutput class]);
+ OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY])
+ .andDo(^(NSInvocation *invocation) {
+ FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)];
+ // Completion runs on IO queue.
+ dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL);
+ dispatch_async(ioQueue, ^{
+ delegate.completionHandler(nil, error);
+ });
+ });
+ cam.capturePhotoOutput = mockOutput;
+
+ // `FLTCam::captureToFile` runs on capture session queue.
+ dispatch_async(captureSessionQueue, ^{
+ [cam captureToFile:mockResult];
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testCaptureToFile_mustReportPathToResultIfSavePhotoDelegateCompletionsWithPath {
+ XCTestExpectation *pathExpectation =
+ [self expectationWithDescription:
+ @"Must send file path to result if save photo delegate completes with file path."];
+
+ dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL);
+ dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific,
+ (void *)FLTCaptureSessionQueueSpecific, NULL);
+ FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
+
+ AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
+ id mockSettings = OCMClassMock([AVCapturePhotoSettings class]);
+ OCMStub([mockSettings photoSettings]).andReturn(settings);
+
+ NSString *filePath = @"test";
+ id mockResult = OCMClassMock([FLTThreadSafeFlutterResult class]);
+ OCMStub([mockResult sendSuccessWithData:filePath]).andDo(^(NSInvocation *invocation) {
+ [pathExpectation fulfill];
+ });
+
+ id mockOutput = OCMClassMock([AVCapturePhotoOutput class]);
+ OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY])
+ .andDo(^(NSInvocation *invocation) {
+ FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)];
+ // Completion runs on IO queue.
+ dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL);
+ dispatch_async(ioQueue, ^{
+ delegate.completionHandler(filePath, nil);
+ });
+ });
+ cam.capturePhotoOutput = mockOutput;
+
+ // `FLTCam::captureToFile` runs on capture session queue.
+ dispatch_async(captureSessionQueue, ^{
+ [cam captureToFile:mockResult];
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
new file mode 100644
index 0000000..94426ab
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
@@ -0,0 +1,41 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import AVFoundation;
+@import XCTest;
+#import <OCMock/OCMock.h>
+#import "CameraTestUtils.h"
+
+/// Includes test cases related to sample buffer handling for FLTCam class.
+@interface FLTCamSampleBufferTests : XCTestCase
+
+@end
+
+@implementation FLTCamSampleBufferTests
+
+- (void)testSampleBufferCallbackQueueMustBeCaptureSessionQueue {
+ dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL);
+ FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
+ XCTAssertEqual(captureSessionQueue, cam.captureVideoOutput.sampleBufferCallbackQueue,
+ @"Sample buffer callback queue must be the capture session queue.");
+}
+
+- (void)testCopyPixelBuffer {
+ FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("test", NULL));
+ CMSampleBufferRef capturedSampleBuffer = FLTCreateTestSampleBuffer();
+ CVPixelBufferRef capturedPixelBuffer = CMSampleBufferGetImageBuffer(capturedSampleBuffer);
+ // Mimic sample buffer callback when captured a new video sample
+ [cam captureOutput:cam.captureVideoOutput
+ didOutputSampleBuffer:capturedSampleBuffer
+ fromConnection:OCMClassMock([AVCaptureConnection class])];
+ CVPixelBufferRef deliveriedPixelBuffer = [cam copyPixelBuffer];
+ XCTAssertEqual(deliveriedPixelBuffer, capturedPixelBuffer,
+ @"FLTCam must deliver the latest captured pixel buffer to copyPixelBuffer API.");
+ CFRelease(capturedSampleBuffer);
+ CFRelease(deliveriedPixelBuffer);
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m
new file mode 100644
index 0000000..f763359
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m
@@ -0,0 +1,140 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import AVFoundation;
+@import XCTest;
+#import <OCMock/OCMock.h>
+
+@interface FLTSavePhotoDelegateTests : XCTestCase
+
+@end
+
+@implementation FLTSavePhotoDelegateTests
+
+- (void)testHandlePhotoCaptureResult_mustCompleteWithErrorIfFailedToCapture {
+ XCTestExpectation *completionExpectation =
+ [self expectationWithDescription:@"Must complete with error if failed to capture photo."];
+
+ NSError *captureError = [NSError errorWithDomain:@"test" code:0 userInfo:nil];
+ dispatch_queue_t ioQueue = dispatch_queue_create("test", NULL);
+ FLTSavePhotoDelegate *delegate = [[FLTSavePhotoDelegate alloc]
+ initWithPath:@"test"
+ ioQueue:ioQueue
+ completionHandler:^(NSString *_Nullable path, NSError *_Nullable error) {
+ XCTAssertEqualObjects(captureError, error);
+ XCTAssertNil(path);
+ [completionExpectation fulfill];
+ }];
+
+ [delegate handlePhotoCaptureResultWithError:captureError
+ photoDataProvider:^NSData * {
+ return nil;
+ }];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testHandlePhotoCaptureResult_mustCompleteWithErrorIfFailedToWrite {
+ XCTestExpectation *completionExpectation =
+ [self expectationWithDescription:@"Must complete with error if failed to write file."];
+ dispatch_queue_t ioQueue = dispatch_queue_create("test", NULL);
+
+ NSError *ioError = [NSError errorWithDomain:@"IOError"
+ code:0
+ userInfo:@{NSLocalizedDescriptionKey : @"Localized IO Error"}];
+ FLTSavePhotoDelegate *delegate = [[FLTSavePhotoDelegate alloc]
+ initWithPath:@"test"
+ ioQueue:ioQueue
+ completionHandler:^(NSString *_Nullable path, NSError *_Nullable error) {
+ XCTAssertEqualObjects(ioError, error);
+ XCTAssertNil(path);
+ [completionExpectation fulfill];
+ }];
+
+ // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
+ // `XCTRunnerIDESession::logDebugMessage:`) on a private queue.
+ id mockData = OCMPartialMock([NSData data]);
+ OCMStub([mockData writeToFile:OCMOCK_ANY
+ options:NSDataWritingAtomic
+ error:[OCMArg setTo:ioError]])
+ .andReturn(NO);
+ [delegate handlePhotoCaptureResultWithError:nil
+ photoDataProvider:^NSData * {
+ return mockData;
+ }];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testHandlePhotoCaptureResult_mustCompleteWithFilePathIfSuccessToWrite {
+ XCTestExpectation *completionExpectation =
+ [self expectationWithDescription:@"Must complete with file path if success to write file."];
+
+ dispatch_queue_t ioQueue = dispatch_queue_create("test", NULL);
+ NSString *filePath = @"test";
+ FLTSavePhotoDelegate *delegate = [[FLTSavePhotoDelegate alloc]
+ initWithPath:filePath
+ ioQueue:ioQueue
+ completionHandler:^(NSString *_Nullable path, NSError *_Nullable error) {
+ XCTAssertNil(error);
+ XCTAssertEqualObjects(filePath, path);
+ [completionExpectation fulfill];
+ }];
+
+ // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
+ // `XCTRunnerIDESession::logDebugMessage:`) on a private queue.
+ id mockData = OCMPartialMock([NSData data]);
+ OCMStub([mockData writeToFile:filePath options:NSDataWritingAtomic error:[OCMArg setTo:nil]])
+ .andReturn(YES);
+
+ [delegate handlePhotoCaptureResultWithError:nil
+ photoDataProvider:^NSData * {
+ return mockData;
+ }];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testHandlePhotoCaptureResult_bothProvideDataAndSaveFileMustRunOnIOQueue {
+ XCTestExpectation *dataProviderQueueExpectation =
+ [self expectationWithDescription:@"Data provider must run on io queue."];
+ XCTestExpectation *writeFileQueueExpectation =
+ [self expectationWithDescription:@"File writing must run on io queue"];
+ XCTestExpectation *completionExpectation =
+ [self expectationWithDescription:@"Must complete with file path if success to write file."];
+
+ dispatch_queue_t ioQueue = dispatch_queue_create("test", NULL);
+ const char *ioQueueSpecific = "io_queue_specific";
+ dispatch_queue_set_specific(ioQueue, ioQueueSpecific, (void *)ioQueueSpecific, NULL);
+
+ // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
+ // `XCTRunnerIDESession::logDebugMessage:`) on a private queue.
+ id mockData = OCMPartialMock([NSData data]);
+ OCMStub([mockData writeToFile:OCMOCK_ANY options:NSDataWritingAtomic error:[OCMArg setTo:nil]])
+ .andDo(^(NSInvocation *invocation) {
+ if (dispatch_get_specific(ioQueueSpecific)) {
+ [writeFileQueueExpectation fulfill];
+ }
+ })
+ .andReturn(YES);
+
+ NSString *filePath = @"test";
+ FLTSavePhotoDelegate *delegate = [[FLTSavePhotoDelegate alloc]
+ initWithPath:filePath
+ ioQueue:ioQueue
+ completionHandler:^(NSString *_Nullable path, NSError *_Nullable error) {
+ [completionExpectation fulfill];
+ }];
+
+ [delegate handlePhotoCaptureResultWithError:nil
+ photoDataProvider:^NSData * {
+ if (dispatch_get_specific(ioQueueSpecific)) {
+ [dataProviderQueueExpectation fulfill];
+ }
+ return mockData;
+ }];
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Info.plist b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Info.plist
new file mode 100644
index 0000000..64d65ca
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Info.plist
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>$(DEVELOPMENT_LANGUAGE)</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>$(PRODUCT_NAME)</string>
+ <key>CFBundlePackageType</key>
+ <string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+</dict>
+</plist>
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.h
new file mode 100644
index 0000000..8685f3f
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.h
@@ -0,0 +1,25 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MockFLTThreadSafeFlutterResult_h
+#define MockFLTThreadSafeFlutterResult_h
+
+/**
+ * Extends FLTThreadSafeFlutterResult to give tests the ability to wait on the result and
+ * read the received result.
+ */
+@interface MockFLTThreadSafeFlutterResult : FLTThreadSafeFlutterResult
+@property(readonly, nonatomic, nonnull) XCTestExpectation *expectation;
+@property(nonatomic, nullable) id receivedResult;
+
+/**
+ * Initializes the MockFLTThreadSafeFlutterResult with an expectation.
+ *
+ * The expectation is fullfilled when a result is called allowing tests to await the result in an
+ * asynchronous manner.
+ */
+- (nonnull instancetype)initWithExpectation:(nonnull XCTestExpectation *)expectation;
+@end
+
+#endif /* MockFLTThreadSafeFlutterResult_h */
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.m
new file mode 100644
index 0000000..d3d7b6a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.m
@@ -0,0 +1,27 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import XCTest;
+
+#import "MockFLTThreadSafeFlutterResult.h"
+
+@implementation MockFLTThreadSafeFlutterResult
+
+- (instancetype)initWithExpectation:(XCTestExpectation *)expectation {
+ self = [super init];
+ _expectation = expectation;
+ return self;
+}
+
+- (void)sendSuccessWithData:(id)data {
+ self.receivedResult = data;
+ [self.expectation fulfill];
+}
+
+- (void)sendSuccess {
+ self.receivedResult = nil;
+ [self.expectation fulfill];
+}
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/QueueUtilsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/QueueUtilsTests.m
new file mode 100644
index 0000000..a9fc739
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/QueueUtilsTests.m
@@ -0,0 +1,38 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import XCTest;
+
+@interface QueueUtilsTests : XCTestCase
+
+@end
+
+@implementation QueueUtilsTests
+
+- (void)testShouldStayOnMainQueueIfCalledFromMainQueue {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:@"Block must be run on the main queue."];
+ FLTEnsureToRunOnMainQueue(^{
+ if (NSThread.isMainThread) {
+ [expectation fulfill];
+ }
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testShouldDispatchToMainQueueIfCalledFromBackgroundQueue {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:@"Block must be run on the main queue."];
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ FLTEnsureToRunOnMainQueue(^{
+ if (NSThread.isMainThread) {
+ [expectation fulfill];
+ }
+ });
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m
new file mode 100644
index 0000000..14a6118
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m
@@ -0,0 +1,85 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import camera_avfoundation.Test;
+@import XCTest;
+@import AVFoundation;
+#import <OCMock/OCMock.h>
+#import "CameraTestUtils.h"
+
+@interface StreamingTests : XCTestCase
+@property(readonly, nonatomic) FLTCam *camera;
+@property(readonly, nonatomic) CMSampleBufferRef sampleBuffer;
+@end
+
+@implementation StreamingTests
+
+- (void)setUp {
+ dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL);
+ _camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
+ _sampleBuffer = FLTCreateTestSampleBuffer();
+}
+
+- (void)tearDown {
+ CFRelease(_sampleBuffer);
+}
+
+- (void)testExceedMaxStreamingPendingFramesCount {
+ XCTestExpectation *streamingExpectation = [self
+ expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"];
+
+ id handlerMock = OCMClassMock([FLTImageStreamHandler class]);
+ OCMStub([handlerMock eventSink]).andReturn(^(id event) {
+ [streamingExpectation fulfill];
+ });
+
+ id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
+ [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock];
+
+ XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages"
+ object:_camera
+ expectedValue:@YES];
+ XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1];
+ XCTAssertEqual(result, XCTWaiterResultCompleted);
+
+ streamingExpectation.expectedFulfillmentCount = 4;
+ for (int i = 0; i < 10; i++) {
+ [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
+ }
+
+ [self waitForExpectationsWithTimeout:1.0 handler:nil];
+}
+
+- (void)testReceivedImageStreamData {
+ XCTestExpectation *streamingExpectation =
+ [self expectationWithDescription:
+ @"Must be able to call the handler again when receivedImageStreamData is called"];
+
+ id handlerMock = OCMClassMock([FLTImageStreamHandler class]);
+ OCMStub([handlerMock eventSink]).andReturn(^(id event) {
+ [streamingExpectation fulfill];
+ });
+
+ id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
+ [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock];
+
+ XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages"
+ object:_camera
+ expectedValue:@YES];
+ XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1];
+ XCTAssertEqual(result, XCTWaiterResultCompleted);
+
+ streamingExpectation.expectedFulfillmentCount = 5;
+ for (int i = 0; i < 10; i++) {
+ [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
+ }
+
+ [_camera receivedImageStreamData];
+ [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
+
+ [self waitForExpectationsWithTimeout:1.0 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m
new file mode 100644
index 0000000..2aad7e3
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m
@@ -0,0 +1,82 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import XCTest;
+#import <OCMock/OCMock.h>
+
+@interface ThreadSafeEventChannelTests : XCTestCase
+@end
+
+@implementation ThreadSafeEventChannelTests
+
+- (void)testSetStreamHandler_shouldStayOnMainThreadIfCalledFromMainThread {
+ FlutterEventChannel *mockEventChannel = OCMClassMock([FlutterEventChannel class]);
+ FLTThreadSafeEventChannel *threadSafeEventChannel =
+ [[FLTThreadSafeEventChannel alloc] initWithEventChannel:mockEventChannel];
+
+ XCTestExpectation *mainThreadExpectation =
+ [self expectationWithDescription:@"setStreamHandler must be called on the main thread"];
+ XCTestExpectation *mainThreadCompletionExpectation =
+ [self expectationWithDescription:
+ @"setStreamHandler's completion block must be called on the main thread"];
+ OCMStub([mockEventChannel setStreamHandler:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [mainThreadExpectation fulfill];
+ }
+ });
+
+ [threadSafeEventChannel setStreamHandler:nil
+ completion:^{
+ if (NSThread.isMainThread) {
+ [mainThreadCompletionExpectation fulfill];
+ }
+ }];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testSetStreamHandler_shouldDispatchToMainThreadIfCalledFromBackgroundThread {
+ FlutterEventChannel *mockEventChannel = OCMClassMock([FlutterEventChannel class]);
+ FLTThreadSafeEventChannel *threadSafeEventChannel =
+ [[FLTThreadSafeEventChannel alloc] initWithEventChannel:mockEventChannel];
+
+ XCTestExpectation *mainThreadExpectation =
+ [self expectationWithDescription:@"setStreamHandler must be called on the main thread"];
+ XCTestExpectation *mainThreadCompletionExpectation =
+ [self expectationWithDescription:
+ @"setStreamHandler's completion block must be called on the main thread"];
+ OCMStub([mockEventChannel setStreamHandler:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [mainThreadExpectation fulfill];
+ }
+ });
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [threadSafeEventChannel setStreamHandler:nil
+ completion:^{
+ if (NSThread.isMainThread) {
+ [mainThreadCompletionExpectation fulfill];
+ }
+ }];
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testEventChannel_shouldBeKeptAliveWhenDispatchingBackToMainThread {
+ XCTestExpectation *expectation =
+ [self expectationWithDescription:@"Completion should be called."];
+ dispatch_async(dispatch_queue_create("test", NULL), ^{
+ FLTThreadSafeEventChannel *channel = [[FLTThreadSafeEventChannel alloc]
+ initWithEventChannel:OCMClassMock([FlutterEventChannel class])];
+
+ [channel setStreamHandler:OCMOCK_ANY
+ completion:^{
+ [expectation fulfill];
+ }];
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeFlutterResultTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeFlutterResultTests.m
new file mode 100644
index 0000000..b8de19c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeFlutterResultTests.m
@@ -0,0 +1,116 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import XCTest;
+
+@interface ThreadSafeFlutterResultTests : XCTestCase
+@end
+
+@implementation ThreadSafeFlutterResultTests
+- (void)testAsyncSendSuccess_ShouldCallResultOnMainThread {
+ XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult *threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert(NSThread.isMainThread);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendSuccess];
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testSyncSendSuccess_ShouldCallResultOnMainThread {
+ XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult *threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert(NSThread.isMainThread);
+ [expectation fulfill];
+ }];
+ [threadSafeFlutterResult sendSuccess];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testSendNotImplemented_ShouldSendNotImplementedToFlutterResult {
+ XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult *threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert([result isKindOfClass:FlutterMethodNotImplemented.class]);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendNotImplemented];
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testSendErrorDetails_ShouldSendErrorToFlutterResult {
+ NSString *errorCode = @"errorCode";
+ NSString *errorMessage = @"message";
+ NSString *errorDetails = @"error details";
+ XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult *threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert([result isKindOfClass:FlutterError.class]);
+ FlutterError *error = (FlutterError *)result;
+ XCTAssertEqualObjects(error.code, errorCode);
+ XCTAssertEqualObjects(error.message, errorMessage);
+ XCTAssertEqualObjects(error.details, errorDetails);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendErrorWithCode:errorCode message:errorMessage details:errorDetails];
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testSendNSError_ShouldSendErrorToFlutterResult {
+ NSError *originalError = [[NSError alloc] initWithDomain:NSURLErrorDomain code:404 userInfo:nil];
+ XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult *threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert([result isKindOfClass:FlutterError.class]);
+ FlutterError *error = (FlutterError *)result;
+ NSString *constructedErrorCode =
+ [NSString stringWithFormat:@"Error %d", (int)originalError.code];
+ XCTAssertEqualObjects(error.code, constructedErrorCode);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendError:originalError];
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testSendResult_ShouldSendResultToFlutterResult {
+ NSString *resultData = @"resultData";
+ XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult *threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssertEqualObjects(result, resultData);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendSuccessWithData:resultData];
+ });
+
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeMethodChannelTests.m
new file mode 100644
index 0000000..ce1b641
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeMethodChannelTests.m
@@ -0,0 +1,54 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import XCTest;
+#import <OCMock/OCMock.h>
+
+@interface ThreadSafeMethodChannelTests : XCTestCase
+@end
+
+@implementation ThreadSafeMethodChannelTests
+
+- (void)testInvokeMethod_shouldStayOnMainThreadIfCalledFromMainThread {
+ FlutterMethodChannel *mockMethodChannel = OCMClassMock([FlutterMethodChannel class]);
+ FLTThreadSafeMethodChannel *threadSafeMethodChannel =
+ [[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:mockMethodChannel];
+
+ XCTestExpectation *mainThreadExpectation =
+ [self expectationWithDescription:@"invokeMethod must be called on the main thread"];
+
+ OCMStub([mockMethodChannel invokeMethod:[OCMArg any] arguments:[OCMArg any]])
+ .andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [mainThreadExpectation fulfill];
+ }
+ });
+
+ [threadSafeMethodChannel invokeMethod:@"foo" arguments:nil];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testInvokeMethod__shouldDispatchToMainThreadIfCalledFromBackgroundThread {
+ FlutterMethodChannel *mockMethodChannel = OCMClassMock([FlutterMethodChannel class]);
+ FLTThreadSafeMethodChannel *threadSafeMethodChannel =
+ [[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:mockMethodChannel];
+
+ XCTestExpectation *mainThreadExpectation =
+ [self expectationWithDescription:@"invokeMethod must be called on the main thread"];
+
+ OCMStub([mockMethodChannel invokeMethod:[OCMArg any] arguments:[OCMArg any]])
+ .andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [mainThreadExpectation fulfill];
+ }
+ });
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [threadSafeMethodChannel invokeMethod:@"foo" arguments:nil];
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m
new file mode 100644
index 0000000..31f196f
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m
@@ -0,0 +1,108 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera_avfoundation;
+@import XCTest;
+#import <OCMock/OCMock.h>
+
+@interface ThreadSafeTextureRegistryTests : XCTestCase
+@end
+
+@implementation ThreadSafeTextureRegistryTests
+
+- (void)testShouldStayOnMainThreadIfCalledFromMainThread {
+ NSObject<FlutterTextureRegistry> *mockTextureRegistry =
+ OCMProtocolMock(@protocol(FlutterTextureRegistry));
+ FLTThreadSafeTextureRegistry *threadSafeTextureRegistry =
+ [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:mockTextureRegistry];
+
+ XCTestExpectation *registerTextureExpectation =
+ [self expectationWithDescription:@"registerTexture must be called on the main thread"];
+ XCTestExpectation *unregisterTextureExpectation =
+ [self expectationWithDescription:@"unregisterTexture must be called on the main thread"];
+ XCTestExpectation *textureFrameAvailableExpectation =
+ [self expectationWithDescription:@"textureFrameAvailable must be called on the main thread"];
+ XCTestExpectation *registerTextureCompletionExpectation =
+ [self expectationWithDescription:
+ @"registerTexture's completion block must be called on the main thread"];
+
+ OCMStub([mockTextureRegistry registerTexture:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [registerTextureExpectation fulfill];
+ }
+ });
+
+ OCMStub([mockTextureRegistry unregisterTexture:0]).andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [unregisterTextureExpectation fulfill];
+ }
+ });
+
+ OCMStub([mockTextureRegistry textureFrameAvailable:0]).andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [textureFrameAvailableExpectation fulfill];
+ }
+ });
+
+ NSObject<FlutterTexture> *anyTexture = OCMProtocolMock(@protocol(FlutterTexture));
+ [threadSafeTextureRegistry registerTexture:anyTexture
+ completion:^(int64_t textureId) {
+ if (NSThread.isMainThread) {
+ [registerTextureCompletionExpectation fulfill];
+ }
+ }];
+ [threadSafeTextureRegistry textureFrameAvailable:0];
+ [threadSafeTextureRegistry unregisterTexture:0];
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+- (void)testShouldDispatchToMainThreadIfCalledFromBackgroundThread {
+ NSObject<FlutterTextureRegistry> *mockTextureRegistry =
+ OCMProtocolMock(@protocol(FlutterTextureRegistry));
+ FLTThreadSafeTextureRegistry *threadSafeTextureRegistry =
+ [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:mockTextureRegistry];
+
+ XCTestExpectation *registerTextureExpectation =
+ [self expectationWithDescription:@"registerTexture must be called on the main thread"];
+ XCTestExpectation *unregisterTextureExpectation =
+ [self expectationWithDescription:@"unregisterTexture must be called on the main thread"];
+ XCTestExpectation *textureFrameAvailableExpectation =
+ [self expectationWithDescription:@"textureFrameAvailable must be called on the main thread"];
+ XCTestExpectation *registerTextureCompletionExpectation =
+ [self expectationWithDescription:
+ @"registerTexture's completion block must be called on the main thread"];
+
+ OCMStub([mockTextureRegistry registerTexture:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [registerTextureExpectation fulfill];
+ }
+ });
+
+ OCMStub([mockTextureRegistry unregisterTexture:0]).andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [unregisterTextureExpectation fulfill];
+ }
+ });
+
+ OCMStub([mockTextureRegistry textureFrameAvailable:0]).andDo(^(NSInvocation *invocation) {
+ if (NSThread.isMainThread) {
+ [textureFrameAvailableExpectation fulfill];
+ }
+ });
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ NSObject<FlutterTexture> *anyTexture = OCMProtocolMock(@protocol(FlutterTexture));
+ [threadSafeTextureRegistry registerTexture:anyTexture
+ completion:^(int64_t textureId) {
+ if (NSThread.isMainThread) {
+ [registerTextureCompletionExpectation fulfill];
+ }
+ }];
+ [threadSafeTextureRegistry textureFrameAvailable:0];
+ [threadSafeTextureRegistry unregisterTexture:0];
+ });
+ [self waitForExpectationsWithTimeout:1 handler:nil];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/example/lib/camera_controller.dart b/packages/camera/camera_avfoundation/example/lib/camera_controller.dart
new file mode 100644
index 0000000..5241868
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/lib/camera_controller.dart
@@ -0,0 +1,553 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:collection';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+/// The state of a [CameraController].
+class CameraValue {
+ /// Creates a new camera controller state.
+ const CameraValue({
+ required this.isInitialized,
+ this.previewSize,
+ required this.isRecordingVideo,
+ required this.isTakingPicture,
+ required this.isStreamingImages,
+ required this.isRecordingPaused,
+ required this.flashMode,
+ required this.exposureMode,
+ required this.focusMode,
+ required this.deviceOrientation,
+ this.lockedCaptureOrientation,
+ this.recordingOrientation,
+ this.isPreviewPaused = false,
+ this.previewPauseOrientation,
+ });
+
+ /// Creates a new camera controller state for an uninitialized controller.
+ const CameraValue.uninitialized()
+ : this(
+ isInitialized: false,
+ isRecordingVideo: false,
+ isTakingPicture: false,
+ isStreamingImages: false,
+ isRecordingPaused: false,
+ flashMode: FlashMode.auto,
+ exposureMode: ExposureMode.auto,
+ focusMode: FocusMode.auto,
+ deviceOrientation: DeviceOrientation.portraitUp,
+ isPreviewPaused: false,
+ );
+
+ /// True after [CameraController.initialize] has completed successfully.
+ final bool isInitialized;
+
+ /// True when a picture capture request has been sent but as not yet returned.
+ final bool isTakingPicture;
+
+ /// True when the camera is recording (not the same as previewing).
+ final bool isRecordingVideo;
+
+ /// True when images from the camera are being streamed.
+ final bool isStreamingImages;
+
+ /// True when video recording is paused.
+ final bool isRecordingPaused;
+
+ /// True when the preview widget has been paused manually.
+ final bool isPreviewPaused;
+
+ /// Set to the orientation the preview was paused in, if it is currently paused.
+ final DeviceOrientation? previewPauseOrientation;
+
+ /// The size of the preview in pixels.
+ ///
+ /// Is `null` until [isInitialized] is `true`.
+ final Size? previewSize;
+
+ /// The flash mode the camera is currently set to.
+ final FlashMode flashMode;
+
+ /// The exposure mode the camera is currently set to.
+ final ExposureMode exposureMode;
+
+ /// The focus mode the camera is currently set to.
+ final FocusMode focusMode;
+
+ /// The current device UI orientation.
+ final DeviceOrientation deviceOrientation;
+
+ /// The currently locked capture orientation.
+ final DeviceOrientation? lockedCaptureOrientation;
+
+ /// Whether the capture orientation is currently locked.
+ bool get isCaptureOrientationLocked => lockedCaptureOrientation != null;
+
+ /// The orientation of the currently running video recording.
+ final DeviceOrientation? recordingOrientation;
+
+ /// Creates a modified copy of the object.
+ ///
+ /// Explicitly specified fields get the specified value, all other fields get
+ /// the same value of the current object.
+ CameraValue copyWith({
+ bool? isInitialized,
+ bool? isRecordingVideo,
+ bool? isTakingPicture,
+ bool? isStreamingImages,
+ Size? previewSize,
+ bool? isRecordingPaused,
+ FlashMode? flashMode,
+ ExposureMode? exposureMode,
+ FocusMode? focusMode,
+ bool? exposurePointSupported,
+ bool? focusPointSupported,
+ DeviceOrientation? deviceOrientation,
+ Optional<DeviceOrientation>? lockedCaptureOrientation,
+ Optional<DeviceOrientation>? recordingOrientation,
+ bool? isPreviewPaused,
+ Optional<DeviceOrientation>? previewPauseOrientation,
+ }) {
+ return CameraValue(
+ isInitialized: isInitialized ?? this.isInitialized,
+ previewSize: previewSize ?? this.previewSize,
+ isRecordingVideo: isRecordingVideo ?? this.isRecordingVideo,
+ isTakingPicture: isTakingPicture ?? this.isTakingPicture,
+ isStreamingImages: isStreamingImages ?? this.isStreamingImages,
+ isRecordingPaused: isRecordingPaused ?? this.isRecordingPaused,
+ flashMode: flashMode ?? this.flashMode,
+ exposureMode: exposureMode ?? this.exposureMode,
+ focusMode: focusMode ?? this.focusMode,
+ deviceOrientation: deviceOrientation ?? this.deviceOrientation,
+ lockedCaptureOrientation: lockedCaptureOrientation == null
+ ? this.lockedCaptureOrientation
+ : lockedCaptureOrientation.orNull,
+ recordingOrientation: recordingOrientation == null
+ ? this.recordingOrientation
+ : recordingOrientation.orNull,
+ isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused,
+ previewPauseOrientation: previewPauseOrientation == null
+ ? this.previewPauseOrientation
+ : previewPauseOrientation.orNull,
+ );
+ }
+
+ @override
+ String toString() {
+ return '${objectRuntimeType(this, 'CameraValue')}('
+ 'isRecordingVideo: $isRecordingVideo, '
+ 'isInitialized: $isInitialized, '
+ 'previewSize: $previewSize, '
+ 'isStreamingImages: $isStreamingImages, '
+ 'flashMode: $flashMode, '
+ 'exposureMode: $exposureMode, '
+ 'focusMode: $focusMode, '
+ 'deviceOrientation: $deviceOrientation, '
+ 'lockedCaptureOrientation: $lockedCaptureOrientation, '
+ 'recordingOrientation: $recordingOrientation, '
+ 'isPreviewPaused: $isPreviewPaused, '
+ 'previewPausedOrientation: $previewPauseOrientation)';
+ }
+}
+
+/// Controls a device camera.
+///
+/// This is a stripped-down version of the app-facing controller to serve as a
+/// utility for the example and integration tests. It wraps only the calls that
+/// have state associated with them, to consolidate tracking of camera state
+/// outside of the overall example code.
+class CameraController extends ValueNotifier<CameraValue> {
+ /// Creates a new camera controller in an uninitialized state.
+ CameraController(
+ this.description,
+ this.resolutionPreset, {
+ this.enableAudio = true,
+ this.imageFormatGroup,
+ }) : super(const CameraValue.uninitialized());
+
+ /// The properties of the camera device controlled by this controller.
+ final CameraDescription description;
+
+ /// The resolution this controller is targeting.
+ ///
+ /// This resolution preset is not guaranteed to be available on the device,
+ /// if unavailable a lower resolution will be used.
+ ///
+ /// See also: [ResolutionPreset].
+ final ResolutionPreset resolutionPreset;
+
+ /// Whether to include audio when recording a video.
+ final bool enableAudio;
+
+ /// The [ImageFormatGroup] describes the output of the raw image format.
+ ///
+ /// When null the imageFormat will fallback to the platforms default.
+ final ImageFormatGroup? imageFormatGroup;
+
+ late int _cameraId;
+
+ bool _isDisposed = false;
+ StreamSubscription<CameraImageData>? _imageStreamSubscription;
+ FutureOr<bool>? _initCalled;
+ StreamSubscription<DeviceOrientationChangedEvent>?
+ _deviceOrientationSubscription;
+
+ /// The camera identifier with which the controller is associated.
+ int get cameraId => _cameraId;
+
+ /// Initializes the camera on the device.
+ Future<void> initialize() async {
+ final Completer<CameraInitializedEvent> initializeCompleter =
+ Completer<CameraInitializedEvent>();
+
+ _deviceOrientationSubscription = CameraPlatform.instance
+ .onDeviceOrientationChanged()
+ .listen((DeviceOrientationChangedEvent event) {
+ value = value.copyWith(
+ deviceOrientation: event.orientation,
+ );
+ });
+
+ _cameraId = await CameraPlatform.instance.createCamera(
+ description,
+ resolutionPreset,
+ enableAudio: enableAudio,
+ );
+
+ CameraPlatform.instance
+ .onCameraInitialized(_cameraId)
+ .first
+ .then((CameraInitializedEvent event) {
+ initializeCompleter.complete(event);
+ });
+
+ await CameraPlatform.instance.initializeCamera(
+ _cameraId,
+ imageFormatGroup: imageFormatGroup ?? ImageFormatGroup.unknown,
+ );
+
+ value = value.copyWith(
+ isInitialized: true,
+ previewSize: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => Size(
+ event.previewWidth,
+ event.previewHeight,
+ )),
+ exposureMode: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.exposureMode),
+ focusMode: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.focusMode),
+ exposurePointSupported: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.exposurePointSupported),
+ focusPointSupported: await initializeCompleter.future
+ .then((CameraInitializedEvent event) => event.focusPointSupported),
+ );
+
+ _initCalled = true;
+ }
+
+ /// Prepare the capture session for video recording.
+ Future<void> prepareForVideoRecording() async {
+ await CameraPlatform.instance.prepareForVideoRecording();
+ }
+
+ /// Pauses the current camera preview
+ Future<void> pausePreview() async {
+ await CameraPlatform.instance.pausePreview(_cameraId);
+ value = value.copyWith(
+ isPreviewPaused: true,
+ previewPauseOrientation: Optional<DeviceOrientation>.of(
+ value.lockedCaptureOrientation ?? value.deviceOrientation));
+ }
+
+ /// Resumes the current camera preview
+ Future<void> resumePreview() async {
+ await CameraPlatform.instance.resumePreview(_cameraId);
+ value = value.copyWith(
+ isPreviewPaused: false,
+ previewPauseOrientation: const Optional<DeviceOrientation>.absent());
+ }
+
+ /// Captures an image and returns the file where it was saved.
+ ///
+ /// Throws a [CameraException] if the capture fails.
+ Future<XFile> takePicture() async {
+ value = value.copyWith(isTakingPicture: true);
+ final XFile file = await CameraPlatform.instance.takePicture(_cameraId);
+ value = value.copyWith(isTakingPicture: false);
+ return file;
+ }
+
+ /// Start streaming images from platform camera.
+ Future<void> startImageStream(
+ Function(CameraImageData image) onAvailable) async {
+ _imageStreamSubscription = CameraPlatform.instance
+ .onStreamedFrameAvailable(_cameraId)
+ .listen((CameraImageData imageData) {
+ onAvailable(imageData);
+ });
+ value = value.copyWith(isStreamingImages: true);
+ }
+
+ /// Stop streaming images from platform camera.
+ Future<void> stopImageStream() async {
+ value = value.copyWith(isStreamingImages: false);
+ await _imageStreamSubscription?.cancel();
+ _imageStreamSubscription = null;
+ }
+
+ /// Start a video recording.
+ ///
+ /// The video is returned as a [XFile] after calling [stopVideoRecording].
+ /// Throws a [CameraException] if the capture fails.
+ Future<void> startVideoRecording(
+ {Function(CameraImageData image)? streamCallback}) async {
+ await CameraPlatform.instance.startVideoCapturing(
+ VideoCaptureOptions(_cameraId, streamCallback: streamCallback));
+ value = value.copyWith(
+ isRecordingVideo: true,
+ isRecordingPaused: false,
+ isStreamingImages: streamCallback != null,
+ recordingOrientation: Optional<DeviceOrientation>.of(
+ value.lockedCaptureOrientation ?? value.deviceOrientation));
+ }
+
+ /// Stops the video recording and returns the file where it was saved.
+ ///
+ /// Throws a [CameraException] if the capture failed.
+ Future<XFile> stopVideoRecording() async {
+ if (value.isStreamingImages) {
+ await stopImageStream();
+ }
+
+ final XFile file =
+ await CameraPlatform.instance.stopVideoRecording(_cameraId);
+ value = value.copyWith(
+ isRecordingVideo: false,
+ recordingOrientation: const Optional<DeviceOrientation>.absent(),
+ );
+ return file;
+ }
+
+ /// Pause video recording.
+ ///
+ /// This feature is only available on iOS and Android sdk 24+.
+ Future<void> pauseVideoRecording() async {
+ await CameraPlatform.instance.pauseVideoRecording(_cameraId);
+ value = value.copyWith(isRecordingPaused: true);
+ }
+
+ /// Resume video recording after pausing.
+ ///
+ /// This feature is only available on iOS and Android sdk 24+.
+ Future<void> resumeVideoRecording() async {
+ await CameraPlatform.instance.resumeVideoRecording(_cameraId);
+ value = value.copyWith(isRecordingPaused: false);
+ }
+
+ /// Returns a widget showing a live camera preview.
+ Widget buildPreview() {
+ return CameraPlatform.instance.buildPreview(_cameraId);
+ }
+
+ /// Sets the flash mode for taking pictures.
+ Future<void> setFlashMode(FlashMode mode) async {
+ await CameraPlatform.instance.setFlashMode(_cameraId, mode);
+ value = value.copyWith(flashMode: mode);
+ }
+
+ /// Sets the exposure mode for taking pictures.
+ Future<void> setExposureMode(ExposureMode mode) async {
+ await CameraPlatform.instance.setExposureMode(_cameraId, mode);
+ value = value.copyWith(exposureMode: mode);
+ }
+
+ /// Sets the exposure offset for the selected camera.
+ Future<double> setExposureOffset(double offset) async {
+ // Check if offset is in range
+ final List<double> range = await Future.wait(<Future<double>>[
+ CameraPlatform.instance.getMinExposureOffset(_cameraId),
+ CameraPlatform.instance.getMaxExposureOffset(_cameraId)
+ ]);
+
+ // Round to the closest step if needed
+ final double stepSize =
+ await CameraPlatform.instance.getExposureOffsetStepSize(_cameraId);
+ if (stepSize > 0) {
+ final double inv = 1.0 / stepSize;
+ double roundedOffset = (offset * inv).roundToDouble() / inv;
+ if (roundedOffset > range[1]) {
+ roundedOffset = (offset * inv).floorToDouble() / inv;
+ } else if (roundedOffset < range[0]) {
+ roundedOffset = (offset * inv).ceilToDouble() / inv;
+ }
+ offset = roundedOffset;
+ }
+
+ return CameraPlatform.instance.setExposureOffset(_cameraId, offset);
+ }
+
+ /// Locks the capture orientation.
+ ///
+ /// If [orientation] is omitted, the current device orientation is used.
+ Future<void> lockCaptureOrientation() async {
+ await CameraPlatform.instance
+ .lockCaptureOrientation(_cameraId, value.deviceOrientation);
+ value = value.copyWith(
+ lockedCaptureOrientation:
+ Optional<DeviceOrientation>.of(value.deviceOrientation));
+ }
+
+ /// Unlocks the capture orientation.
+ Future<void> unlockCaptureOrientation() async {
+ await CameraPlatform.instance.unlockCaptureOrientation(_cameraId);
+ value = value.copyWith(
+ lockedCaptureOrientation: const Optional<DeviceOrientation>.absent());
+ }
+
+ /// Sets the focus mode for taking pictures.
+ Future<void> setFocusMode(FocusMode mode) async {
+ await CameraPlatform.instance.setFocusMode(_cameraId, mode);
+ value = value.copyWith(focusMode: mode);
+ }
+
+ /// Releases the resources of this camera.
+ @override
+ Future<void> dispose() async {
+ if (_isDisposed) {
+ return;
+ }
+ _deviceOrientationSubscription?.cancel();
+ _isDisposed = true;
+ super.dispose();
+ if (_initCalled != null) {
+ await _initCalled;
+ await CameraPlatform.instance.dispose(_cameraId);
+ }
+ }
+
+ @override
+ void removeListener(VoidCallback listener) {
+ // Prevent ValueListenableBuilder in CameraPreview widget from causing an
+ // exception to be thrown by attempting to remove its own listener after
+ // the controller has already been disposed.
+ if (!_isDisposed) {
+ super.removeListener(listener);
+ }
+ }
+}
+
+/// A value that might be absent.
+///
+/// Used to represent [DeviceOrientation]s that are optional but also able
+/// to be cleared.
+@immutable
+class Optional<T> extends IterableBase<T> {
+ /// Constructs an empty Optional.
+ const Optional.absent() : _value = null;
+
+ /// Constructs an Optional of the given [value].
+ ///
+ /// Throws [ArgumentError] if [value] is null.
+ Optional.of(T value) : _value = value {
+ // TODO(cbracken): Delete and make this ctor const once mixed-mode
+ // execution is no longer around.
+ ArgumentError.checkNotNull(value);
+ }
+
+ /// Constructs an Optional of the given [value].
+ ///
+ /// If [value] is null, returns [absent()].
+ const Optional.fromNullable(T? value) : _value = value;
+
+ final T? _value;
+
+ /// True when this optional contains a value.
+ bool get isPresent => _value != null;
+
+ /// True when this optional contains no value.
+ bool get isNotPresent => _value == null;
+
+ /// Gets the Optional value.
+ ///
+ /// Throws [StateError] if [value] is null.
+ T get value {
+ if (_value == null) {
+ throw StateError('value called on absent Optional.');
+ }
+ return _value!;
+ }
+
+ /// Executes a function if the Optional value is present.
+ void ifPresent(void Function(T value) ifPresent) {
+ if (isPresent) {
+ ifPresent(_value as T);
+ }
+ }
+
+ /// Execution a function if the Optional value is absent.
+ void ifAbsent(void Function() ifAbsent) {
+ if (!isPresent) {
+ ifAbsent();
+ }
+ }
+
+ /// Gets the Optional value with a default.
+ ///
+ /// The default is returned if the Optional is [absent()].
+ ///
+ /// Throws [ArgumentError] if [defaultValue] is null.
+ T or(T defaultValue) {
+ return _value ?? defaultValue;
+ }
+
+ /// Gets the Optional value, or `null` if there is none.
+ T? get orNull => _value;
+
+ /// Transforms the Optional value.
+ ///
+ /// If the Optional is [absent()], returns [absent()] without applying the transformer.
+ ///
+ /// The transformer must not return `null`. If it does, an [ArgumentError] is thrown.
+ Optional<S> transform<S>(S Function(T value) transformer) {
+ return _value == null
+ ? Optional<S>.absent()
+ : Optional<S>.of(transformer(_value as T));
+ }
+
+ /// Transforms the Optional value.
+ ///
+ /// If the Optional is [absent()], returns [absent()] without applying the transformer.
+ ///
+ /// Returns [absent()] if the transformer returns `null`.
+ Optional<S> transformNullable<S>(S? Function(T value) transformer) {
+ return _value == null
+ ? Optional<S>.absent()
+ : Optional<S>.fromNullable(transformer(_value as T));
+ }
+
+ @override
+ Iterator<T> get iterator =>
+ isPresent ? <T>[_value as T].iterator : Iterable<T>.empty().iterator;
+
+ /// Delegates to the underlying [value] hashCode.
+ @override
+ int get hashCode => _value.hashCode;
+
+ /// Delegates to the underlying [value] operator==.
+ @override
+ bool operator ==(Object o) => o is Optional<T> && o._value == _value;
+
+ @override
+ String toString() {
+ return _value == null
+ ? 'Optional { absent }'
+ : 'Optional { value: $_value }';
+ }
+}
diff --git a/packages/camera/camera_avfoundation/example/lib/camera_preview.dart b/packages/camera/camera_avfoundation/example/lib/camera_preview.dart
new file mode 100644
index 0000000..5e8f64c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/lib/camera_preview.dart
@@ -0,0 +1,85 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+import 'camera_controller.dart';
+
+/// A widget showing a live camera preview.
+class CameraPreview extends StatelessWidget {
+ /// Creates a preview widget for the given camera controller.
+ const CameraPreview(this.controller, {Key? key, this.child})
+ : super(key: key);
+
+ /// The controller for the camera that the preview is shown for.
+ final CameraController controller;
+
+ /// A widget to overlay on top of the camera preview
+ final Widget? child;
+
+ @override
+ Widget build(BuildContext context) {
+ return controller.value.isInitialized
+ ? ValueListenableBuilder<CameraValue>(
+ valueListenable: controller,
+ builder: (BuildContext context, Object? value, Widget? child) {
+ final double cameraAspectRatio =
+ controller.value.previewSize!.width /
+ controller.value.previewSize!.height;
+ return AspectRatio(
+ aspectRatio: _isLandscape()
+ ? cameraAspectRatio
+ : (1 / cameraAspectRatio),
+ child: Stack(
+ fit: StackFit.expand,
+ children: <Widget>[
+ _wrapInRotatedBox(child: controller.buildPreview()),
+ child ?? Container(),
+ ],
+ ),
+ );
+ },
+ child: child,
+ )
+ : Container();
+ }
+
+ Widget _wrapInRotatedBox({required Widget child}) {
+ if (kIsWeb || defaultTargetPlatform != TargetPlatform.android) {
+ return child;
+ }
+
+ return RotatedBox(
+ quarterTurns: _getQuarterTurns(),
+ child: child,
+ );
+ }
+
+ bool _isLandscape() {
+ return <DeviceOrientation>[
+ DeviceOrientation.landscapeLeft,
+ DeviceOrientation.landscapeRight
+ ].contains(_getApplicableOrientation());
+ }
+
+ int _getQuarterTurns() {
+ final Map<DeviceOrientation, int> turns = <DeviceOrientation, int>{
+ DeviceOrientation.portraitUp: 0,
+ DeviceOrientation.landscapeRight: 1,
+ DeviceOrientation.portraitDown: 2,
+ DeviceOrientation.landscapeLeft: 3,
+ };
+ return turns[_getApplicableOrientation()]!;
+ }
+
+ DeviceOrientation _getApplicableOrientation() {
+ return controller.value.isRecordingVideo
+ ? controller.value.recordingOrientation!
+ : (controller.value.previewPauseOrientation ??
+ controller.value.lockedCaptureOrientation ??
+ controller.value.deviceOrientation);
+ }
+}
diff --git a/packages/camera/camera_avfoundation/example/lib/main.dart b/packages/camera/camera_avfoundation/example/lib/main.dart
new file mode 100644
index 0000000..4d98aed
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/lib/main.dart
@@ -0,0 +1,1094 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:io';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/scheduler.dart';
+import 'package:video_player/video_player.dart';
+
+import 'camera_controller.dart';
+import 'camera_preview.dart';
+
+/// Camera example home widget.
+class CameraExampleHome extends StatefulWidget {
+ /// Default Constructor
+ const CameraExampleHome({Key? key}) : super(key: key);
+
+ @override
+ State<CameraExampleHome> createState() {
+ return _CameraExampleHomeState();
+ }
+}
+
+/// Returns a suitable camera icon for [direction].
+IconData getCameraLensIcon(CameraLensDirection direction) {
+ switch (direction) {
+ case CameraLensDirection.back:
+ return Icons.camera_rear;
+ case CameraLensDirection.front:
+ return Icons.camera_front;
+ case CameraLensDirection.external:
+ return Icons.camera;
+ }
+ // This enum is from a different package, so a new value could be added at
+ // any time. The example should keep working if that happens.
+ // ignore: dead_code
+ return Icons.camera;
+}
+
+void _logError(String code, String? message) {
+ // ignore: avoid_print
+ print('Error: $code${message == null ? '' : '\nError Message: $message'}');
+}
+
+class _CameraExampleHomeState extends State<CameraExampleHome>
+ with WidgetsBindingObserver, TickerProviderStateMixin {
+ CameraController? controller;
+ XFile? imageFile;
+ XFile? videoFile;
+ VideoPlayerController? videoController;
+ VoidCallback? videoPlayerListener;
+ bool enableAudio = true;
+ double _minAvailableExposureOffset = 0.0;
+ double _maxAvailableExposureOffset = 0.0;
+ double _currentExposureOffset = 0.0;
+ late AnimationController _flashModeControlRowAnimationController;
+ late Animation<double> _flashModeControlRowAnimation;
+ late AnimationController _exposureModeControlRowAnimationController;
+ late Animation<double> _exposureModeControlRowAnimation;
+ late AnimationController _focusModeControlRowAnimationController;
+ late Animation<double> _focusModeControlRowAnimation;
+ double _minAvailableZoom = 1.0;
+ double _maxAvailableZoom = 1.0;
+ double _currentScale = 1.0;
+ double _baseScale = 1.0;
+
+ // Counting pointers (number of user fingers on screen)
+ int _pointers = 0;
+
+ @override
+ void initState() {
+ super.initState();
+ _ambiguate(WidgetsBinding.instance)?.addObserver(this);
+
+ _flashModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _flashModeControlRowAnimation = CurvedAnimation(
+ parent: _flashModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ _exposureModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _exposureModeControlRowAnimation = CurvedAnimation(
+ parent: _exposureModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ _focusModeControlRowAnimationController = AnimationController(
+ duration: const Duration(milliseconds: 300),
+ vsync: this,
+ );
+ _focusModeControlRowAnimation = CurvedAnimation(
+ parent: _focusModeControlRowAnimationController,
+ curve: Curves.easeInCubic,
+ );
+ }
+
+ @override
+ void dispose() {
+ _ambiguate(WidgetsBinding.instance)?.removeObserver(this);
+ _flashModeControlRowAnimationController.dispose();
+ _exposureModeControlRowAnimationController.dispose();
+ super.dispose();
+ }
+
+ @override
+ void didChangeAppLifecycleState(AppLifecycleState state) {
+ final CameraController? cameraController = controller;
+
+ // App state changed before we got the chance to initialize.
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return;
+ }
+
+ if (state == AppLifecycleState.inactive) {
+ cameraController.dispose();
+ } else if (state == AppLifecycleState.resumed) {
+ onNewCameraSelected(cameraController.description);
+ }
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ return Scaffold(
+ appBar: AppBar(
+ title: const Text('Camera example'),
+ ),
+ body: Column(
+ children: <Widget>[
+ Expanded(
+ child: Container(
+ decoration: BoxDecoration(
+ color: Colors.black,
+ border: Border.all(
+ color:
+ controller != null && controller!.value.isRecordingVideo
+ ? Colors.redAccent
+ : Colors.grey,
+ width: 3.0,
+ ),
+ ),
+ child: Padding(
+ padding: const EdgeInsets.all(1.0),
+ child: Center(
+ child: _cameraPreviewWidget(),
+ ),
+ ),
+ ),
+ ),
+ _captureControlRowWidget(),
+ _modeControlRowWidget(),
+ Padding(
+ padding: const EdgeInsets.all(5.0),
+ child: Row(
+ children: <Widget>[
+ _cameraTogglesRowWidget(),
+ _thumbnailWidget(),
+ ],
+ ),
+ ),
+ ],
+ ),
+ );
+ }
+
+ /// Display the preview from the camera (or a message if the preview is not available).
+ Widget _cameraPreviewWidget() {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ return const Text(
+ 'Tap a camera',
+ style: TextStyle(
+ color: Colors.white,
+ fontSize: 24.0,
+ fontWeight: FontWeight.w900,
+ ),
+ );
+ } else {
+ return Listener(
+ onPointerDown: (_) => _pointers++,
+ onPointerUp: (_) => _pointers--,
+ child: CameraPreview(
+ controller!,
+ child: LayoutBuilder(
+ builder: (BuildContext context, BoxConstraints constraints) {
+ return GestureDetector(
+ behavior: HitTestBehavior.opaque,
+ onScaleStart: _handleScaleStart,
+ onScaleUpdate: _handleScaleUpdate,
+ onTapDown: (TapDownDetails details) =>
+ onViewFinderTap(details, constraints),
+ );
+ }),
+ ),
+ );
+ }
+ }
+
+ void _handleScaleStart(ScaleStartDetails details) {
+ _baseScale = _currentScale;
+ }
+
+ Future<void> _handleScaleUpdate(ScaleUpdateDetails details) async {
+ // When there are not exactly two fingers on screen don't scale
+ if (controller == null || _pointers != 2) {
+ return;
+ }
+
+ _currentScale = (_baseScale * details.scale)
+ .clamp(_minAvailableZoom, _maxAvailableZoom);
+
+ await CameraPlatform.instance
+ .setZoomLevel(controller!.cameraId, _currentScale);
+ }
+
+ /// Display the thumbnail of the captured image or video.
+ Widget _thumbnailWidget() {
+ final VideoPlayerController? localVideoController = videoController;
+
+ return Expanded(
+ child: Align(
+ alignment: Alignment.centerRight,
+ child: Row(
+ mainAxisSize: MainAxisSize.min,
+ children: <Widget>[
+ if (localVideoController == null && imageFile == null)
+ Container()
+ else
+ SizedBox(
+ width: 64.0,
+ height: 64.0,
+ child: (localVideoController == null)
+ ? (
+ // The captured image on the web contains a network-accessible URL
+ // pointing to a location within the browser. It may be displayed
+ // either with Image.network or Image.memory after loading the image
+ // bytes to memory.
+ kIsWeb
+ ? Image.network(imageFile!.path)
+ : Image.file(File(imageFile!.path)))
+ : Container(
+ decoration: BoxDecoration(
+ border: Border.all(color: Colors.pink)),
+ child: Center(
+ child: AspectRatio(
+ aspectRatio:
+ localVideoController.value.size != null
+ ? localVideoController.value.aspectRatio
+ : 1.0,
+ child: VideoPlayer(localVideoController)),
+ ),
+ ),
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ /// Display a bar with buttons to change the flash and exposure modes
+ Widget _modeControlRowWidget() {
+ return Column(
+ children: <Widget>[
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.flash_on),
+ color: Colors.blue,
+ onPressed: controller != null ? onFlashModeButtonPressed : null,
+ ),
+ // The exposure and focus mode are currently not supported on the web.
+ ...!kIsWeb
+ ? <Widget>[
+ IconButton(
+ icon: const Icon(Icons.exposure),
+ color: Colors.blue,
+ onPressed: controller != null
+ ? onExposureModeButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.filter_center_focus),
+ color: Colors.blue,
+ onPressed:
+ controller != null ? onFocusModeButtonPressed : null,
+ )
+ ]
+ : <Widget>[],
+ IconButton(
+ icon: Icon(enableAudio ? Icons.volume_up : Icons.volume_mute),
+ color: Colors.blue,
+ onPressed: controller != null ? onAudioModeButtonPressed : null,
+ ),
+ IconButton(
+ icon: Icon(controller?.value.isCaptureOrientationLocked ?? false
+ ? Icons.screen_lock_rotation
+ : Icons.screen_rotation),
+ color: Colors.blue,
+ onPressed: controller != null
+ ? onCaptureOrientationLockButtonPressed
+ : null,
+ ),
+ ],
+ ),
+ _flashModeControlRowWidget(),
+ _exposureModeControlRowWidget(),
+ _focusModeControlRowWidget(),
+ ],
+ );
+ }
+
+ Widget _flashModeControlRowWidget() {
+ return SizeTransition(
+ sizeFactor: _flashModeControlRowAnimation,
+ child: ClipRect(
+ child: Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.flash_off),
+ color: controller?.value.flashMode == FlashMode.off
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.off)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.flash_auto),
+ color: controller?.value.flashMode == FlashMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.auto)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.flash_on),
+ color: controller?.value.flashMode == FlashMode.always
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.always)
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.highlight),
+ color: controller?.value.flashMode == FlashMode.torch
+ ? Colors.orange
+ : Colors.blue,
+ onPressed: controller != null
+ ? () => onSetFlashModeButtonPressed(FlashMode.torch)
+ : null,
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ Widget _exposureModeControlRowWidget() {
+ final ButtonStyle styleAuto = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.exposureMode == ExposureMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ );
+ final ButtonStyle styleLocked = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.exposureMode == ExposureMode.locked
+ ? Colors.orange
+ : Colors.blue,
+ );
+
+ return SizeTransition(
+ sizeFactor: _exposureModeControlRowAnimation,
+ child: ClipRect(
+ child: Container(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: <Widget>[
+ const Center(
+ child: Text('Exposure Mode'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ TextButton(
+ style: styleAuto,
+ onPressed: controller != null
+ ? () =>
+ onSetExposureModeButtonPressed(ExposureMode.auto)
+ : null,
+ onLongPress: () {
+ if (controller != null) {
+ CameraPlatform.instance
+ .setExposurePoint(controller!.cameraId, null);
+ showInSnackBar('Resetting exposure point');
+ }
+ },
+ child: const Text('AUTO'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () =>
+ onSetExposureModeButtonPressed(ExposureMode.locked)
+ : null,
+ child: const Text('LOCKED'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () => controller!.setExposureOffset(0.0)
+ : null,
+ child: const Text('RESET OFFSET'),
+ ),
+ ],
+ ),
+ const Center(
+ child: Text('Exposure Offset'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ Text(_minAvailableExposureOffset.toString()),
+ Slider(
+ value: _currentExposureOffset,
+ min: _minAvailableExposureOffset,
+ max: _maxAvailableExposureOffset,
+ label: _currentExposureOffset.toString(),
+ onChanged: _minAvailableExposureOffset ==
+ _maxAvailableExposureOffset
+ ? null
+ : setExposureOffset,
+ ),
+ Text(_maxAvailableExposureOffset.toString()),
+ ],
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ Widget _focusModeControlRowWidget() {
+ final ButtonStyle styleAuto = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.focusMode == FocusMode.auto
+ ? Colors.orange
+ : Colors.blue,
+ );
+ final ButtonStyle styleLocked = TextButton.styleFrom(
+ // TODO(darrenaustin): Migrate to new API once it lands in stable: https://github.com/flutter/flutter/issues/105724
+ // ignore: deprecated_member_use
+ primary: controller?.value.focusMode == FocusMode.locked
+ ? Colors.orange
+ : Colors.blue,
+ );
+
+ return SizeTransition(
+ sizeFactor: _focusModeControlRowAnimation,
+ child: ClipRect(
+ child: Container(
+ color: Colors.grey.shade50,
+ child: Column(
+ children: <Widget>[
+ const Center(
+ child: Text('Focus Mode'),
+ ),
+ Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ TextButton(
+ style: styleAuto,
+ onPressed: controller != null
+ ? () => onSetFocusModeButtonPressed(FocusMode.auto)
+ : null,
+ onLongPress: () {
+ if (controller != null) {
+ CameraPlatform.instance
+ .setFocusPoint(controller!.cameraId, null);
+ }
+ showInSnackBar('Resetting focus point');
+ },
+ child: const Text('AUTO'),
+ ),
+ TextButton(
+ style: styleLocked,
+ onPressed: controller != null
+ ? () => onSetFocusModeButtonPressed(FocusMode.locked)
+ : null,
+ child: const Text('LOCKED'),
+ ),
+ ],
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ /// Display the control bar with buttons to take pictures and record videos.
+ Widget _captureControlRowWidget() {
+ final CameraController? cameraController = controller;
+
+ return Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: <Widget>[
+ IconButton(
+ icon: const Icon(Icons.camera_alt),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ !cameraController.value.isRecordingVideo
+ ? onTakePictureButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.videocam),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ !cameraController.value.isRecordingVideo
+ ? onVideoRecordButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: cameraController != null &&
+ (!cameraController.value.isRecordingVideo ||
+ cameraController.value.isRecordingPaused)
+ ? const Icon(Icons.play_arrow)
+ : const Icon(Icons.pause),
+ color: Colors.blue,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ cameraController.value.isRecordingVideo
+ ? (cameraController.value.isRecordingPaused)
+ ? onResumeButtonPressed
+ : onPauseButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.stop),
+ color: Colors.red,
+ onPressed: cameraController != null &&
+ cameraController.value.isInitialized &&
+ cameraController.value.isRecordingVideo
+ ? onStopButtonPressed
+ : null,
+ ),
+ IconButton(
+ icon: const Icon(Icons.pause_presentation),
+ color:
+ cameraController != null && cameraController.value.isPreviewPaused
+ ? Colors.red
+ : Colors.blue,
+ onPressed:
+ cameraController == null ? null : onPausePreviewButtonPressed,
+ ),
+ ],
+ );
+ }
+
+ /// Display a row of toggle to select the camera (or a message if no camera is available).
+ Widget _cameraTogglesRowWidget() {
+ final List<Widget> toggles = <Widget>[];
+
+ void onChanged(CameraDescription? description) {
+ if (description == null) {
+ return;
+ }
+
+ onNewCameraSelected(description);
+ }
+
+ if (_cameras.isEmpty) {
+ _ambiguate(SchedulerBinding.instance)?.addPostFrameCallback((_) async {
+ showInSnackBar('No camera found.');
+ });
+ return const Text('None');
+ } else {
+ for (final CameraDescription cameraDescription in _cameras) {
+ toggles.add(
+ SizedBox(
+ width: 90.0,
+ child: RadioListTile<CameraDescription>(
+ title: Icon(getCameraLensIcon(cameraDescription.lensDirection)),
+ groupValue: controller?.description,
+ value: cameraDescription,
+ onChanged:
+ controller != null && controller!.value.isRecordingVideo
+ ? null
+ : onChanged,
+ ),
+ ),
+ );
+ }
+ }
+
+ return Row(children: toggles);
+ }
+
+ String timestamp() => DateTime.now().millisecondsSinceEpoch.toString();
+
+ void showInSnackBar(String message) {
+ ScaffoldMessenger.of(context)
+ .showSnackBar(SnackBar(content: Text(message)));
+ }
+
+ void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) {
+ if (controller == null) {
+ return;
+ }
+
+ final CameraController cameraController = controller!;
+
+ final Point<double> point = Point<double>(
+ details.localPosition.dx / constraints.maxWidth,
+ details.localPosition.dy / constraints.maxHeight,
+ );
+ CameraPlatform.instance.setExposurePoint(cameraController.cameraId, point);
+ CameraPlatform.instance.setFocusPoint(cameraController.cameraId, point);
+ }
+
+ Future<void> onNewCameraSelected(CameraDescription cameraDescription) async {
+ final CameraController? oldController = controller;
+ if (oldController != null) {
+ // `controller` needs to be set to null before getting disposed,
+ // to avoid a race condition when we use the controller that is being
+ // disposed. This happens when camera permission dialog shows up,
+ // which triggers `didChangeAppLifecycleState`, which disposes and
+ // re-creates the controller.
+ controller = null;
+ await oldController.dispose();
+ }
+
+ final CameraController cameraController = CameraController(
+ cameraDescription,
+ kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium,
+ enableAudio: enableAudio,
+ imageFormatGroup: ImageFormatGroup.jpeg,
+ );
+
+ controller = cameraController;
+
+ // If the controller is updated then update the UI.
+ cameraController.addListener(() {
+ if (mounted) {
+ setState(() {});
+ }
+ });
+
+ try {
+ await cameraController.initialize();
+ await Future.wait(<Future<Object?>>[
+ // The exposure mode is currently not supported on the web.
+ ...!kIsWeb
+ ? <Future<Object?>>[
+ CameraPlatform.instance
+ .getMinExposureOffset(cameraController.cameraId)
+ .then(
+ (double value) => _minAvailableExposureOffset = value),
+ CameraPlatform.instance
+ .getMaxExposureOffset(cameraController.cameraId)
+ .then((double value) => _maxAvailableExposureOffset = value)
+ ]
+ : <Future<Object?>>[],
+ CameraPlatform.instance
+ .getMaxZoomLevel(cameraController.cameraId)
+ .then((double value) => _maxAvailableZoom = value),
+ CameraPlatform.instance
+ .getMinZoomLevel(cameraController.cameraId)
+ .then((double value) => _minAvailableZoom = value),
+ ]);
+ } on CameraException catch (e) {
+ switch (e.code) {
+ case 'CameraAccessDenied':
+ showInSnackBar('You have denied camera access.');
+ break;
+ case 'CameraAccessDeniedWithoutPrompt':
+ // iOS only
+ showInSnackBar('Please go to Settings app to enable camera access.');
+ break;
+ case 'CameraAccessRestricted':
+ // iOS only
+ showInSnackBar('Camera access is restricted.');
+ break;
+ case 'AudioAccessDenied':
+ showInSnackBar('You have denied audio access.');
+ break;
+ case 'AudioAccessDeniedWithoutPrompt':
+ // iOS only
+ showInSnackBar('Please go to Settings app to enable audio access.');
+ break;
+ case 'AudioAccessRestricted':
+ // iOS only
+ showInSnackBar('Audio access is restricted.');
+ break;
+ case 'cameraPermission':
+ // Android & web only
+ showInSnackBar('Unknown permission error.');
+ break;
+ default:
+ _showCameraException(e);
+ break;
+ }
+ }
+
+ if (mounted) {
+ setState(() {});
+ }
+ }
+
+ void onTakePictureButtonPressed() {
+ takePicture().then((XFile? file) {
+ if (mounted) {
+ setState(() {
+ imageFile = file;
+ videoController?.dispose();
+ videoController = null;
+ });
+ if (file != null) {
+ showInSnackBar('Picture saved to ${file.path}');
+ }
+ }
+ });
+ }
+
+ void onFlashModeButtonPressed() {
+ if (_flashModeControlRowAnimationController.value == 1) {
+ _flashModeControlRowAnimationController.reverse();
+ } else {
+ _flashModeControlRowAnimationController.forward();
+ _exposureModeControlRowAnimationController.reverse();
+ _focusModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onExposureModeButtonPressed() {
+ if (_exposureModeControlRowAnimationController.value == 1) {
+ _exposureModeControlRowAnimationController.reverse();
+ } else {
+ _exposureModeControlRowAnimationController.forward();
+ _flashModeControlRowAnimationController.reverse();
+ _focusModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onFocusModeButtonPressed() {
+ if (_focusModeControlRowAnimationController.value == 1) {
+ _focusModeControlRowAnimationController.reverse();
+ } else {
+ _focusModeControlRowAnimationController.forward();
+ _flashModeControlRowAnimationController.reverse();
+ _exposureModeControlRowAnimationController.reverse();
+ }
+ }
+
+ void onAudioModeButtonPressed() {
+ enableAudio = !enableAudio;
+ if (controller != null) {
+ onNewCameraSelected(controller!.description);
+ }
+ }
+
+ Future<void> onCaptureOrientationLockButtonPressed() async {
+ try {
+ if (controller != null) {
+ final CameraController cameraController = controller!;
+ if (cameraController.value.isCaptureOrientationLocked) {
+ await cameraController.unlockCaptureOrientation();
+ showInSnackBar('Capture orientation unlocked');
+ } else {
+ await cameraController.lockCaptureOrientation();
+ showInSnackBar(
+ 'Capture orientation locked to ${cameraController.value.lockedCaptureOrientation.toString().split('.').last}');
+ }
+ }
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ }
+ }
+
+ void onSetFlashModeButtonPressed(FlashMode mode) {
+ setFlashMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Flash mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onSetExposureModeButtonPressed(ExposureMode mode) {
+ setExposureMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Exposure mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onSetFocusModeButtonPressed(FocusMode mode) {
+ setFocusMode(mode).then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Focus mode set to ${mode.toString().split('.').last}');
+ });
+ }
+
+ void onVideoRecordButtonPressed() {
+ startVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ });
+ }
+
+ void onStopButtonPressed() {
+ stopVideoRecording().then((XFile? file) {
+ if (mounted) {
+ setState(() {});
+ }
+ if (file != null) {
+ showInSnackBar('Video recorded to ${file.path}');
+ videoFile = file;
+ _startVideoPlayer();
+ }
+ });
+ }
+
+ Future<void> onPausePreviewButtonPressed() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return;
+ }
+
+ if (cameraController.value.isPreviewPaused) {
+ await cameraController.resumePreview();
+ } else {
+ await cameraController.pausePreview();
+ }
+
+ if (mounted) {
+ setState(() {});
+ }
+ }
+
+ void onPauseButtonPressed() {
+ pauseVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Video recording paused');
+ });
+ }
+
+ void onResumeButtonPressed() {
+ resumeVideoRecording().then((_) {
+ if (mounted) {
+ setState(() {});
+ }
+ showInSnackBar('Video recording resumed');
+ });
+ }
+
+ Future<void> startVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return;
+ }
+
+ if (cameraController.value.isRecordingVideo) {
+ // A recording is already started, do nothing.
+ return;
+ }
+
+ try {
+ await cameraController.startVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return;
+ }
+ }
+
+ Future<XFile?> stopVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return null;
+ }
+
+ try {
+ return cameraController.stopVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return null;
+ }
+ }
+
+ Future<void> pauseVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return;
+ }
+
+ try {
+ await cameraController.pauseVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> resumeVideoRecording() async {
+ final CameraController? cameraController = controller;
+
+ if (cameraController == null || !cameraController.value.isRecordingVideo) {
+ return;
+ }
+
+ try {
+ await cameraController.resumeVideoRecording();
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setFlashMode(FlashMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setFlashMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setExposureMode(ExposureMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setExposureMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setExposureOffset(double offset) async {
+ if (controller == null) {
+ return;
+ }
+
+ setState(() {
+ _currentExposureOffset = offset;
+ });
+ try {
+ offset = await controller!.setExposureOffset(offset);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> setFocusMode(FocusMode mode) async {
+ if (controller == null) {
+ return;
+ }
+
+ try {
+ await controller!.setFocusMode(mode);
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ rethrow;
+ }
+ }
+
+ Future<void> _startVideoPlayer() async {
+ if (videoFile == null) {
+ return;
+ }
+
+ final VideoPlayerController vController = kIsWeb
+ ? VideoPlayerController.network(videoFile!.path)
+ : VideoPlayerController.file(File(videoFile!.path));
+
+ videoPlayerListener = () {
+ if (videoController != null && videoController!.value.size != null) {
+ // Refreshing the state to update video player with the correct ratio.
+ if (mounted) {
+ setState(() {});
+ }
+ videoController!.removeListener(videoPlayerListener!);
+ }
+ };
+ vController.addListener(videoPlayerListener!);
+ await vController.setLooping(true);
+ await vController.initialize();
+ await videoController?.dispose();
+ if (mounted) {
+ setState(() {
+ imageFile = null;
+ videoController = vController;
+ });
+ }
+ await vController.play();
+ }
+
+ Future<XFile?> takePicture() async {
+ final CameraController? cameraController = controller;
+ if (cameraController == null || !cameraController.value.isInitialized) {
+ showInSnackBar('Error: select a camera first.');
+ return null;
+ }
+
+ if (cameraController.value.isTakingPicture) {
+ // A capture is already pending, do nothing.
+ return null;
+ }
+
+ try {
+ final XFile file = await cameraController.takePicture();
+ return file;
+ } on CameraException catch (e) {
+ _showCameraException(e);
+ return null;
+ }
+ }
+
+ void _showCameraException(CameraException e) {
+ _logError(e.code, e.description);
+ showInSnackBar('Error: ${e.code}\n${e.description}');
+ }
+}
+
+/// CameraApp is the Main Application.
+class CameraApp extends StatelessWidget {
+ /// Default Constructor
+ const CameraApp({Key? key}) : super(key: key);
+
+ @override
+ Widget build(BuildContext context) {
+ return const MaterialApp(
+ home: CameraExampleHome(),
+ );
+ }
+}
+
+List<CameraDescription> _cameras = <CameraDescription>[];
+
+Future<void> main() async {
+ // Fetch the available cameras before initializing the app.
+ try {
+ WidgetsFlutterBinding.ensureInitialized();
+ _cameras = await CameraPlatform.instance.availableCameras();
+ } on CameraException catch (e) {
+ _logError(e.code, e.description);
+ }
+ runApp(const CameraApp());
+}
+
+/// This allows a value of type T or T? to be treated as a value of type T?.
+///
+/// We use this so that APIs that have become non-nullable can still be used
+/// with `!` and `?` on the stable branch.
+T? _ambiguate<T>(T? value) => value;
diff --git a/packages/camera/camera_avfoundation/example/pubspec.yaml b/packages/camera/camera_avfoundation/example/pubspec.yaml
new file mode 100644
index 0000000..7c85ba8
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/pubspec.yaml
@@ -0,0 +1,34 @@
+name: camera_example
+description: Demonstrates how to use the camera plugin.
+publish_to: none
+
+environment:
+ sdk: ">=2.14.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+dependencies:
+ camera_avfoundation:
+ # When depending on this package from a real application you should use:
+ # camera_avfoundation: ^x.y.z
+ # See https://dart.dev/tools/pub/dependencies#version-constraints
+ # The example app is bundled with the plugin so we use a path dependency on
+ # the parent directory to use the current plugin's version.
+ path: ../
+ camera_platform_interface: ^2.2.0
+ flutter:
+ sdk: flutter
+ path_provider: ^2.0.0
+ quiver: ^3.0.0
+ video_player: ^2.1.4
+
+dev_dependencies:
+ build_runner: ^2.1.10
+ flutter_driver:
+ sdk: flutter
+ flutter_test:
+ sdk: flutter
+ integration_test:
+ sdk: flutter
+
+flutter:
+ uses-material-design: true
diff --git a/packages/camera/camera_avfoundation/example/test_driver/integration_test.dart b/packages/camera/camera_avfoundation/example/test_driver/integration_test.dart
new file mode 100644
index 0000000..4f10f2a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/test_driver/integration_test.dart
@@ -0,0 +1,7 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:integration_test/integration_test_driver.dart';
+
+Future<void> main() => integrationDriver();
diff --git a/packages/camera/camera_avfoundation/ios/Assets/.gitkeep b/packages/camera/camera_avfoundation/ios/Assets/.gitkeep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Assets/.gitkeep
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPermissionUtils.h b/packages/camera/camera_avfoundation/ios/Classes/CameraPermissionUtils.h
new file mode 100644
index 0000000..5cbbab0
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPermissionUtils.h
@@ -0,0 +1,32 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import Foundation;
+#import <Flutter/Flutter.h>
+
+typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *);
+
+/// Requests camera access permission.
+///
+/// If it is the first time requesting camera access, a permission dialog will show up on the
+/// screen. Otherwise AVFoundation simply returns the user's previous choice, and in this case the
+/// user will have to update the choice in Settings app.
+///
+/// @param handler if access permission is (or was previously) granted, completion handler will be
+/// called without error; Otherwise completion handler will be called with error. Handler can be
+/// called on an arbitrary dispatch queue.
+extern void FLTRequestCameraPermissionWithCompletionHandler(
+ FLTCameraPermissionRequestCompletionHandler handler);
+
+/// Requests audio access permission.
+///
+/// If it is the first time requesting audio access, a permission dialog will show up on the
+/// screen. Otherwise AVFoundation simply returns the user's previous choice, and in this case the
+/// user will have to update the choice in Settings app.
+///
+/// @param handler if access permission is (or was previously) granted, completion handler will be
+/// called without error; Otherwise completion handler will be called with error. Handler can be
+/// called on an arbitrary dispatch queue.
+extern void FLTRequestAudioPermissionWithCompletionHandler(
+ FLTCameraPermissionRequestCompletionHandler handler);
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPermissionUtils.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPermissionUtils.m
new file mode 100644
index 0000000..098265a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPermissionUtils.m
@@ -0,0 +1,87 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import AVFoundation;
+#import "CameraPermissionUtils.h"
+
+void FLTRequestPermission(BOOL forAudio, FLTCameraPermissionRequestCompletionHandler handler) {
+ AVMediaType mediaType;
+ if (forAudio) {
+ mediaType = AVMediaTypeAudio;
+ } else {
+ mediaType = AVMediaTypeVideo;
+ }
+
+ switch ([AVCaptureDevice authorizationStatusForMediaType:mediaType]) {
+ case AVAuthorizationStatusAuthorized:
+ handler(nil);
+ break;
+ case AVAuthorizationStatusDenied: {
+ FlutterError *flutterError;
+ if (forAudio) {
+ flutterError =
+ [FlutterError errorWithCode:@"AudioAccessDeniedWithoutPrompt"
+ message:@"User has previously denied the audio access request. "
+ @"Go to Settings to enable audio access."
+ details:nil];
+ } else {
+ flutterError =
+ [FlutterError errorWithCode:@"CameraAccessDeniedWithoutPrompt"
+ message:@"User has previously denied the camera access request. "
+ @"Go to Settings to enable camera access."
+ details:nil];
+ }
+ handler(flutterError);
+ break;
+ }
+ case AVAuthorizationStatusRestricted: {
+ FlutterError *flutterError;
+ if (forAudio) {
+ flutterError = [FlutterError errorWithCode:@"AudioAccessRestricted"
+ message:@"Audio access is restricted. "
+ details:nil];
+ } else {
+ flutterError = [FlutterError errorWithCode:@"CameraAccessRestricted"
+ message:@"Camera access is restricted. "
+ details:nil];
+ }
+ handler(flutterError);
+ break;
+ }
+ case AVAuthorizationStatusNotDetermined: {
+ [AVCaptureDevice requestAccessForMediaType:mediaType
+ completionHandler:^(BOOL granted) {
+ // handler can be invoked on an arbitrary dispatch queue.
+ if (granted) {
+ handler(nil);
+ } else {
+ FlutterError *flutterError;
+ if (forAudio) {
+ flutterError = [FlutterError
+ errorWithCode:@"AudioAccessDenied"
+ message:@"User denied the audio access request."
+ details:nil];
+ } else {
+ flutterError = [FlutterError
+ errorWithCode:@"CameraAccessDenied"
+ message:@"User denied the camera access request."
+ details:nil];
+ }
+ handler(flutterError);
+ }
+ }];
+ break;
+ }
+ }
+}
+
+void FLTRequestCameraPermissionWithCompletionHandler(
+ FLTCameraPermissionRequestCompletionHandler handler) {
+ FLTRequestPermission(/*forAudio*/ NO, handler);
+}
+
+void FLTRequestAudioPermissionWithCompletionHandler(
+ FLTCameraPermissionRequestCompletionHandler handler) {
+ FLTRequestPermission(/*forAudio*/ YES, handler);
+}
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.h b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.h
new file mode 100644
index 0000000..f13d810
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.h
@@ -0,0 +1,8 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+
+@interface CameraPlugin : NSObject <FlutterPlugin>
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
new file mode 100644
index 0000000..b85f68d
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
@@ -0,0 +1,339 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "CameraPlugin.h"
+#import "CameraPlugin_Test.h"
+
+@import AVFoundation;
+
+#import "CameraPermissionUtils.h"
+#import "CameraProperties.h"
+#import "FLTCam.h"
+#import "FLTThreadSafeEventChannel.h"
+#import "FLTThreadSafeFlutterResult.h"
+#import "FLTThreadSafeMethodChannel.h"
+#import "FLTThreadSafeTextureRegistry.h"
+#import "QueueUtils.h"
+
+@interface CameraPlugin ()
+@property(readonly, nonatomic) FLTThreadSafeTextureRegistry *registry;
+@property(readonly, nonatomic) NSObject<FlutterBinaryMessenger> *messenger;
+@end
+
+@implementation CameraPlugin
+
++ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
+ FlutterMethodChannel *channel =
+ [FlutterMethodChannel methodChannelWithName:@"plugins.flutter.io/camera_avfoundation"
+ binaryMessenger:[registrar messenger]];
+ CameraPlugin *instance = [[CameraPlugin alloc] initWithRegistry:[registrar textures]
+ messenger:[registrar messenger]];
+ [registrar addMethodCallDelegate:instance channel:channel];
+}
+
+- (instancetype)initWithRegistry:(NSObject<FlutterTextureRegistry> *)registry
+ messenger:(NSObject<FlutterBinaryMessenger> *)messenger {
+ self = [super init];
+ NSAssert(self, @"super init cannot be nil");
+ _registry = [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:registry];
+ _messenger = messenger;
+ _captureSessionQueue = dispatch_queue_create("io.flutter.camera.captureSessionQueue", NULL);
+ dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific,
+ (void *)FLTCaptureSessionQueueSpecific, NULL);
+
+ [self initDeviceEventMethodChannel];
+ [self startOrientationListener];
+ return self;
+}
+
+- (void)initDeviceEventMethodChannel {
+ FlutterMethodChannel *methodChannel = [FlutterMethodChannel
+ methodChannelWithName:@"plugins.flutter.io/camera_avfoundation/fromPlatform"
+ binaryMessenger:_messenger];
+ _deviceEventMethodChannel =
+ [[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:methodChannel];
+}
+
+- (void)detachFromEngineForRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
+ [UIDevice.currentDevice endGeneratingDeviceOrientationNotifications];
+}
+
+- (void)startOrientationListener {
+ [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(orientationChanged:)
+ name:UIDeviceOrientationDidChangeNotification
+ object:[UIDevice currentDevice]];
+}
+
+- (void)orientationChanged:(NSNotification *)note {
+ UIDevice *device = note.object;
+ UIDeviceOrientation orientation = device.orientation;
+
+ if (orientation == UIDeviceOrientationFaceUp || orientation == UIDeviceOrientationFaceDown) {
+ // Do not change when oriented flat.
+ return;
+ }
+
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ // `FLTCam::setDeviceOrientation` must be called on capture session queue.
+ [weakSelf.camera setDeviceOrientation:orientation];
+ // `CameraPlugin::sendDeviceOrientation` can be called on any queue.
+ [weakSelf sendDeviceOrientation:orientation];
+ });
+}
+
+- (void)sendDeviceOrientation:(UIDeviceOrientation)orientation {
+ [_deviceEventMethodChannel
+ invokeMethod:@"orientation_changed"
+ arguments:@{@"orientation" : FLTGetStringForUIDeviceOrientation(orientation)}];
+}
+
+- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
+ // Invoke the plugin on another dispatch queue to avoid blocking the UI.
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ FLTThreadSafeFlutterResult *threadSafeResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:result];
+ [weakSelf handleMethodCallAsync:call result:threadSafeResult];
+ });
+}
+
+- (void)handleMethodCallAsync:(FlutterMethodCall *)call
+ result:(FLTThreadSafeFlutterResult *)result {
+ if ([@"availableCameras" isEqualToString:call.method]) {
+ if (@available(iOS 10.0, *)) {
+ NSMutableArray *discoveryDevices =
+ [@[ AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera ]
+ mutableCopy];
+ if (@available(iOS 13.0, *)) {
+ [discoveryDevices addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera];
+ }
+ AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
+ discoverySessionWithDeviceTypes:discoveryDevices
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+ NSArray<AVCaptureDevice *> *devices = discoverySession.devices;
+ NSMutableArray<NSDictionary<NSString *, NSObject *> *> *reply =
+ [[NSMutableArray alloc] initWithCapacity:devices.count];
+ for (AVCaptureDevice *device in devices) {
+ NSString *lensFacing;
+ switch ([device position]) {
+ case AVCaptureDevicePositionBack:
+ lensFacing = @"back";
+ break;
+ case AVCaptureDevicePositionFront:
+ lensFacing = @"front";
+ break;
+ case AVCaptureDevicePositionUnspecified:
+ lensFacing = @"external";
+ break;
+ }
+ [reply addObject:@{
+ @"name" : [device uniqueID],
+ @"lensFacing" : lensFacing,
+ @"sensorOrientation" : @90,
+ }];
+ }
+ [result sendSuccessWithData:reply];
+ } else {
+ [result sendNotImplemented];
+ }
+ } else if ([@"create" isEqualToString:call.method]) {
+ [self handleCreateMethodCall:call result:result];
+ } else if ([@"startImageStream" isEqualToString:call.method]) {
+ [_camera startImageStreamWithMessenger:_messenger];
+ [result sendSuccess];
+ } else if ([@"stopImageStream" isEqualToString:call.method]) {
+ [_camera stopImageStream];
+ [result sendSuccess];
+ } else if ([@"receivedImageStreamData" isEqualToString:call.method]) {
+ [_camera receivedImageStreamData];
+ [result sendSuccess];
+ } else {
+ NSDictionary *argsMap = call.arguments;
+ NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue;
+ if ([@"initialize" isEqualToString:call.method]) {
+ NSString *videoFormatValue = ((NSString *)argsMap[@"imageFormatGroup"]);
+ [_camera setVideoFormat:FLTGetVideoFormatFromString(videoFormatValue)];
+
+ __weak CameraPlugin *weakSelf = self;
+ _camera.onFrameAvailable = ^{
+ if (![weakSelf.camera isPreviewPaused]) {
+ [weakSelf.registry textureFrameAvailable:cameraId];
+ }
+ };
+ FlutterMethodChannel *methodChannel = [FlutterMethodChannel
+ methodChannelWithName:
+ [NSString stringWithFormat:@"plugins.flutter.io/camera_avfoundation/camera%lu",
+ (unsigned long)cameraId]
+ binaryMessenger:_messenger];
+ FLTThreadSafeMethodChannel *threadSafeMethodChannel =
+ [[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:methodChannel];
+ _camera.methodChannel = threadSafeMethodChannel;
+ [threadSafeMethodChannel
+ invokeMethod:@"initialized"
+ arguments:@{
+ @"previewWidth" : @(_camera.previewSize.width),
+ @"previewHeight" : @(_camera.previewSize.height),
+ @"exposureMode" : FLTGetStringForFLTExposureMode([_camera exposureMode]),
+ @"focusMode" : FLTGetStringForFLTFocusMode([_camera focusMode]),
+ @"exposurePointSupported" :
+ @([_camera.captureDevice isExposurePointOfInterestSupported]),
+ @"focusPointSupported" : @([_camera.captureDevice isFocusPointOfInterestSupported]),
+ }];
+ [self sendDeviceOrientation:[UIDevice currentDevice].orientation];
+ [_camera start];
+ [result sendSuccess];
+ } else if ([@"takePicture" isEqualToString:call.method]) {
+ if (@available(iOS 10.0, *)) {
+ [_camera captureToFile:result];
+ } else {
+ [result sendNotImplemented];
+ }
+ } else if ([@"dispose" isEqualToString:call.method]) {
+ [_registry unregisterTexture:cameraId];
+ [_camera close];
+ [result sendSuccess];
+ } else if ([@"prepareForVideoRecording" isEqualToString:call.method]) {
+ [self.camera setUpCaptureSessionForAudio];
+ [result sendSuccess];
+ } else if ([@"startVideoRecording" isEqualToString:call.method]) {
+ BOOL enableStream = [call.arguments[@"enableStream"] boolValue];
+ if (enableStream) {
+ [_camera startVideoRecordingWithResult:result messengerForStreaming:_messenger];
+ } else {
+ [_camera startVideoRecordingWithResult:result];
+ }
+ } else if ([@"stopVideoRecording" isEqualToString:call.method]) {
+ [_camera stopVideoRecordingWithResult:result];
+ } else if ([@"pauseVideoRecording" isEqualToString:call.method]) {
+ [_camera pauseVideoRecordingWithResult:result];
+ } else if ([@"resumeVideoRecording" isEqualToString:call.method]) {
+ [_camera resumeVideoRecordingWithResult:result];
+ } else if ([@"getMaxZoomLevel" isEqualToString:call.method]) {
+ [_camera getMaxZoomLevelWithResult:result];
+ } else if ([@"getMinZoomLevel" isEqualToString:call.method]) {
+ [_camera getMinZoomLevelWithResult:result];
+ } else if ([@"setZoomLevel" isEqualToString:call.method]) {
+ CGFloat zoom = ((NSNumber *)argsMap[@"zoom"]).floatValue;
+ [_camera setZoomLevel:zoom Result:result];
+ } else if ([@"setFlashMode" isEqualToString:call.method]) {
+ [_camera setFlashModeWithResult:result mode:call.arguments[@"mode"]];
+ } else if ([@"setExposureMode" isEqualToString:call.method]) {
+ [_camera setExposureModeWithResult:result mode:call.arguments[@"mode"]];
+ } else if ([@"setExposurePoint" isEqualToString:call.method]) {
+ BOOL reset = ((NSNumber *)call.arguments[@"reset"]).boolValue;
+ double x = 0.5;
+ double y = 0.5;
+ if (!reset) {
+ x = ((NSNumber *)call.arguments[@"x"]).doubleValue;
+ y = ((NSNumber *)call.arguments[@"y"]).doubleValue;
+ }
+ [_camera setExposurePointWithResult:result x:x y:y];
+ } else if ([@"getMinExposureOffset" isEqualToString:call.method]) {
+ [result sendSuccessWithData:@(_camera.captureDevice.minExposureTargetBias)];
+ } else if ([@"getMaxExposureOffset" isEqualToString:call.method]) {
+ [result sendSuccessWithData:@(_camera.captureDevice.maxExposureTargetBias)];
+ } else if ([@"getExposureOffsetStepSize" isEqualToString:call.method]) {
+ [result sendSuccessWithData:@(0.0)];
+ } else if ([@"setExposureOffset" isEqualToString:call.method]) {
+ [_camera setExposureOffsetWithResult:result
+ offset:((NSNumber *)call.arguments[@"offset"]).doubleValue];
+ } else if ([@"lockCaptureOrientation" isEqualToString:call.method]) {
+ [_camera lockCaptureOrientationWithResult:result orientation:call.arguments[@"orientation"]];
+ } else if ([@"unlockCaptureOrientation" isEqualToString:call.method]) {
+ [_camera unlockCaptureOrientationWithResult:result];
+ } else if ([@"setFocusMode" isEqualToString:call.method]) {
+ [_camera setFocusModeWithResult:result mode:call.arguments[@"mode"]];
+ } else if ([@"setFocusPoint" isEqualToString:call.method]) {
+ BOOL reset = ((NSNumber *)call.arguments[@"reset"]).boolValue;
+ double x = 0.5;
+ double y = 0.5;
+ if (!reset) {
+ x = ((NSNumber *)call.arguments[@"x"]).doubleValue;
+ y = ((NSNumber *)call.arguments[@"y"]).doubleValue;
+ }
+ [_camera setFocusPointWithResult:result x:x y:y];
+ } else if ([@"pausePreview" isEqualToString:call.method]) {
+ [_camera pausePreviewWithResult:result];
+ } else if ([@"resumePreview" isEqualToString:call.method]) {
+ [_camera resumePreviewWithResult:result];
+ } else {
+ [result sendNotImplemented];
+ }
+ }
+}
+
+- (void)handleCreateMethodCall:(FlutterMethodCall *)call
+ result:(FLTThreadSafeFlutterResult *)result {
+ // Create FLTCam only if granted camera access (and audio access if audio is enabled)
+ __weak typeof(self) weakSelf = self;
+ FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+
+ if (error) {
+ [result sendFlutterError:error];
+ } else {
+ // Request audio permission on `create` call with `enableAudio` argument instead of the
+ // `prepareForVideoRecording` call. This is because `prepareForVideoRecording` call is
+ // optional, and used as a workaround to fix a missing frame issue on iOS.
+ BOOL audioEnabled = [call.arguments[@"enableAudio"] boolValue];
+ if (audioEnabled) {
+ // Setup audio capture session only if granted audio access.
+ FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
+ // cannot use the outter `strongSelf`
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+ if (error) {
+ [result sendFlutterError:error];
+ } else {
+ [strongSelf createCameraOnSessionQueueWithCreateMethodCall:call result:result];
+ }
+ });
+ } else {
+ [strongSelf createCameraOnSessionQueueWithCreateMethodCall:call result:result];
+ }
+ }
+ });
+}
+
+- (void)createCameraOnSessionQueueWithCreateMethodCall:(FlutterMethodCall *)createMethodCall
+ result:(FLTThreadSafeFlutterResult *)result {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+
+ NSString *cameraName = createMethodCall.arguments[@"cameraName"];
+ NSString *resolutionPreset = createMethodCall.arguments[@"resolutionPreset"];
+ NSNumber *enableAudio = createMethodCall.arguments[@"enableAudio"];
+ NSError *error;
+ FLTCam *cam = [[FLTCam alloc] initWithCameraName:cameraName
+ resolutionPreset:resolutionPreset
+ enableAudio:[enableAudio boolValue]
+ orientation:[[UIDevice currentDevice] orientation]
+ captureSessionQueue:strongSelf.captureSessionQueue
+ error:&error];
+
+ if (error) {
+ [result sendError:error];
+ } else {
+ if (strongSelf.camera) {
+ [strongSelf.camera close];
+ }
+ strongSelf.camera = cam;
+ [strongSelf.registry registerTexture:cam
+ completion:^(int64_t textureId) {
+ [result sendSuccessWithData:@{
+ @"cameraId" : @(textureId),
+ }];
+ }];
+ }
+ });
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap
new file mode 100644
index 0000000..abdad1a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap
@@ -0,0 +1,20 @@
+framework module camera_avfoundation {
+ umbrella header "camera_avfoundation-umbrella.h"
+
+ export *
+ module * { export * }
+
+ explicit module Test {
+ header "CameraPlugin_Test.h"
+ header "CameraPermissionUtils.h"
+ header "CameraProperties.h"
+ header "FLTCam.h"
+ header "FLTCam_Test.h"
+ header "FLTSavePhotoDelegate_Test.h"
+ header "FLTThreadSafeEventChannel.h"
+ header "FLTThreadSafeFlutterResult.h"
+ header "FLTThreadSafeMethodChannel.h"
+ header "FLTThreadSafeTextureRegistry.h"
+ header "QueueUtils.h"
+ }
+}
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h
new file mode 100644
index 0000000..f6c97da
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h
@@ -0,0 +1,51 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This header is available in the Test module. Import via "@import camera_avfoundation.Test;"
+
+#import "CameraPlugin.h"
+#import "FLTCam.h"
+#import "FLTThreadSafeFlutterResult.h"
+
+/// APIs exposed for unit testing.
+@interface CameraPlugin ()
+
+/// All FLTCam's state access and capture session related operations should be on run on this queue.
+@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
+
+/// An internal camera object that manages camera's state and performs camera operations.
+@property(nonatomic, strong) FLTCam *camera;
+
+/// A thread safe wrapper of the method channel used to send device events such as orientation
+/// changes.
+@property(nonatomic, strong) FLTThreadSafeMethodChannel *deviceEventMethodChannel;
+
+/// Inject @p FlutterTextureRegistry and @p FlutterBinaryMessenger for unit testing.
+- (instancetype)initWithRegistry:(NSObject<FlutterTextureRegistry> *)registry
+ messenger:(NSObject<FlutterBinaryMessenger> *)messenger
+ NS_DESIGNATED_INITIALIZER;
+
+/// Hide the default public constructor.
+- (instancetype)init NS_UNAVAILABLE;
+
+/// Handles `FlutterMethodCall`s and ensures result is send on the main dispatch queue.
+///
+/// @param call The method call command object.
+/// @param result A wrapper around the `FlutterResult` callback which ensures the callback is called
+/// on the main dispatch queue.
+- (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FLTThreadSafeFlutterResult *)result;
+
+/// Called by the @c NSNotificationManager each time the device's orientation is changed.
+///
+/// @param notification @c NSNotification instance containing a reference to the `UIDevice` object
+/// that triggered the orientation change.
+- (void)orientationChanged:(NSNotification *)notification;
+
+/// Creates FLTCam on session queue and reports the creation result.
+/// @param createMethodCall the create method call
+/// @param result a thread safe flutter result wrapper object to report creation result.
+- (void)createCameraOnSessionQueueWithCreateMethodCall:(FlutterMethodCall *)createMethodCall
+ result:(FLTThreadSafeFlutterResult *)result;
+
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h
new file mode 100644
index 0000000..aee4d64
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h
@@ -0,0 +1,118 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import AVFoundation;
+@import Foundation;
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark - flash mode
+
+/**
+ * Represents camera's flash mode. Mirrors `FlashMode` enum in flash_mode.dart.
+ */
+typedef NS_ENUM(NSInteger, FLTFlashMode) {
+ FLTFlashModeOff,
+ FLTFlashModeAuto,
+ FLTFlashModeAlways,
+ FLTFlashModeTorch,
+};
+
+/**
+ * Gets FLTFlashMode from its string representation.
+ * @param mode a string representation of the FLTFlashMode.
+ */
+extern FLTFlashMode FLTGetFLTFlashModeForString(NSString *mode);
+
+/**
+ * Gets AVCaptureFlashMode from FLTFlashMode.
+ * @param mode flash mode.
+ */
+extern AVCaptureFlashMode FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashMode mode);
+
+#pragma mark - exposure mode
+
+/**
+ * Represents camera's exposure mode. Mirrors ExposureMode in camera.dart.
+ */
+typedef NS_ENUM(NSInteger, FLTExposureMode) {
+ FLTExposureModeAuto,
+ FLTExposureModeLocked,
+};
+
+/**
+ * Gets a string representation of exposure mode.
+ * @param mode exposure mode
+ */
+extern NSString *FLTGetStringForFLTExposureMode(FLTExposureMode mode);
+
+/**
+ * Gets FLTExposureMode from its string representation.
+ * @param mode a string representation of the FLTExposureMode.
+ */
+extern FLTExposureMode FLTGetFLTExposureModeForString(NSString *mode);
+
+#pragma mark - focus mode
+
+/**
+ * Represents camera's focus mode. Mirrors FocusMode in camera.dart.
+ */
+typedef NS_ENUM(NSInteger, FLTFocusMode) {
+ FLTFocusModeAuto,
+ FLTFocusModeLocked,
+};
+
+/**
+ * Gets a string representation from FLTFocusMode.
+ * @param mode focus mode
+ */
+extern NSString *FLTGetStringForFLTFocusMode(FLTFocusMode mode);
+
+/**
+ * Gets FLTFocusMode from its string representation.
+ * @param mode a string representation of focus mode.
+ */
+extern FLTFocusMode FLTGetFLTFocusModeForString(NSString *mode);
+
+#pragma mark - device orientation
+
+/**
+ * Gets UIDeviceOrientation from its string representation.
+ */
+extern UIDeviceOrientation FLTGetUIDeviceOrientationForString(NSString *orientation);
+
+/**
+ * Gets a string representation of UIDeviceOrientation.
+ */
+extern NSString *FLTGetStringForUIDeviceOrientation(UIDeviceOrientation orientation);
+
+#pragma mark - resolution preset
+
+/**
+ * Represents camera's resolution present. Mirrors ResolutionPreset in camera.dart.
+ */
+typedef NS_ENUM(NSInteger, FLTResolutionPreset) {
+ FLTResolutionPresetVeryLow,
+ FLTResolutionPresetLow,
+ FLTResolutionPresetMedium,
+ FLTResolutionPresetHigh,
+ FLTResolutionPresetVeryHigh,
+ FLTResolutionPresetUltraHigh,
+ FLTResolutionPresetMax,
+};
+
+/**
+ * Gets FLTResolutionPreset from its string representation.
+ * @param preset a string representation of FLTResolutionPreset.
+ */
+extern FLTResolutionPreset FLTGetFLTResolutionPresetForString(NSString *preset);
+
+#pragma mark - video format
+
+/**
+ * Gets VideoFormat from its string representation.
+ */
+extern OSType FLTGetVideoFormatFromString(NSString *videoFormatString);
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m
new file mode 100644
index 0000000..e36f98a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m
@@ -0,0 +1,187 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "CameraProperties.h"
+
+#pragma mark - flash mode
+
+FLTFlashMode FLTGetFLTFlashModeForString(NSString *mode) {
+ if ([mode isEqualToString:@"off"]) {
+ return FLTFlashModeOff;
+ } else if ([mode isEqualToString:@"auto"]) {
+ return FLTFlashModeAuto;
+ } else if ([mode isEqualToString:@"always"]) {
+ return FLTFlashModeAlways;
+ } else if ([mode isEqualToString:@"torch"]) {
+ return FLTFlashModeTorch;
+ } else {
+ NSError *error = [NSError errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey : [NSString
+ stringWithFormat:@"Unknown flash mode %@", mode]
+ }];
+ @throw error;
+ }
+}
+
+AVCaptureFlashMode FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashMode mode) {
+ switch (mode) {
+ case FLTFlashModeOff:
+ return AVCaptureFlashModeOff;
+ case FLTFlashModeAuto:
+ return AVCaptureFlashModeAuto;
+ case FLTFlashModeAlways:
+ return AVCaptureFlashModeOn;
+ case FLTFlashModeTorch:
+ default:
+ return -1;
+ }
+}
+
+#pragma mark - exposure mode
+
+NSString *FLTGetStringForFLTExposureMode(FLTExposureMode mode) {
+ switch (mode) {
+ case FLTExposureModeAuto:
+ return @"auto";
+ case FLTExposureModeLocked:
+ return @"locked";
+ }
+ NSError *error = [NSError errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey : [NSString
+ stringWithFormat:@"Unknown string for exposure mode"]
+ }];
+ @throw error;
+}
+
+FLTExposureMode FLTGetFLTExposureModeForString(NSString *mode) {
+ if ([mode isEqualToString:@"auto"]) {
+ return FLTExposureModeAuto;
+ } else if ([mode isEqualToString:@"locked"]) {
+ return FLTExposureModeLocked;
+ } else {
+ NSError *error = [NSError errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey : [NSString
+ stringWithFormat:@"Unknown exposure mode %@", mode]
+ }];
+ @throw error;
+ }
+}
+
+#pragma mark - focus mode
+
+NSString *FLTGetStringForFLTFocusMode(FLTFocusMode mode) {
+ switch (mode) {
+ case FLTFocusModeAuto:
+ return @"auto";
+ case FLTFocusModeLocked:
+ return @"locked";
+ }
+ NSError *error = [NSError errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey : [NSString
+ stringWithFormat:@"Unknown string for focus mode"]
+ }];
+ @throw error;
+}
+
+FLTFocusMode FLTGetFLTFocusModeForString(NSString *mode) {
+ if ([mode isEqualToString:@"auto"]) {
+ return FLTFocusModeAuto;
+ } else if ([mode isEqualToString:@"locked"]) {
+ return FLTFocusModeLocked;
+ } else {
+ NSError *error = [NSError errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey : [NSString
+ stringWithFormat:@"Unknown focus mode %@", mode]
+ }];
+ @throw error;
+ }
+}
+
+#pragma mark - device orientation
+
+UIDeviceOrientation FLTGetUIDeviceOrientationForString(NSString *orientation) {
+ if ([orientation isEqualToString:@"portraitDown"]) {
+ return UIDeviceOrientationPortraitUpsideDown;
+ } else if ([orientation isEqualToString:@"landscapeLeft"]) {
+ return UIDeviceOrientationLandscapeRight;
+ } else if ([orientation isEqualToString:@"landscapeRight"]) {
+ return UIDeviceOrientationLandscapeLeft;
+ } else if ([orientation isEqualToString:@"portraitUp"]) {
+ return UIDeviceOrientationPortrait;
+ } else {
+ NSError *error = [NSError
+ errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey :
+ [NSString stringWithFormat:@"Unknown device orientation %@", orientation]
+ }];
+ @throw error;
+ }
+}
+
+NSString *FLTGetStringForUIDeviceOrientation(UIDeviceOrientation orientation) {
+ switch (orientation) {
+ case UIDeviceOrientationPortraitUpsideDown:
+ return @"portraitDown";
+ case UIDeviceOrientationLandscapeRight:
+ return @"landscapeLeft";
+ case UIDeviceOrientationLandscapeLeft:
+ return @"landscapeRight";
+ case UIDeviceOrientationPortrait:
+ default:
+ return @"portraitUp";
+ };
+}
+
+#pragma mark - resolution preset
+
+FLTResolutionPreset FLTGetFLTResolutionPresetForString(NSString *preset) {
+ if ([preset isEqualToString:@"veryLow"]) {
+ return FLTResolutionPresetVeryLow;
+ } else if ([preset isEqualToString:@"low"]) {
+ return FLTResolutionPresetLow;
+ } else if ([preset isEqualToString:@"medium"]) {
+ return FLTResolutionPresetMedium;
+ } else if ([preset isEqualToString:@"high"]) {
+ return FLTResolutionPresetHigh;
+ } else if ([preset isEqualToString:@"veryHigh"]) {
+ return FLTResolutionPresetVeryHigh;
+ } else if ([preset isEqualToString:@"ultraHigh"]) {
+ return FLTResolutionPresetUltraHigh;
+ } else if ([preset isEqualToString:@"max"]) {
+ return FLTResolutionPresetMax;
+ } else {
+ NSError *error = [NSError errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey : [NSString
+ stringWithFormat:@"Unknown resolution preset %@", preset]
+ }];
+ @throw error;
+ }
+}
+
+#pragma mark - video format
+
+OSType FLTGetVideoFormatFromString(NSString *videoFormatString) {
+ if ([videoFormatString isEqualToString:@"bgra8888"]) {
+ return kCVPixelFormatType_32BGRA;
+ } else if ([videoFormatString isEqualToString:@"yuv420"]) {
+ return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+ } else {
+ NSLog(@"The selected imageFormatGroup is not supported by iOS. Defaulting to brga8888");
+ return kCVPixelFormatType_32BGRA;
+ }
+}
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
new file mode 100644
index 0000000..85b8e2a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
@@ -0,0 +1,110 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import AVFoundation;
+@import Foundation;
+@import Flutter;
+
+#import "CameraProperties.h"
+#import "FLTThreadSafeEventChannel.h"
+#import "FLTThreadSafeFlutterResult.h"
+#import "FLTThreadSafeMethodChannel.h"
+#import "FLTThreadSafeTextureRegistry.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A class that manages camera's state and performs camera operations.
+ */
+@interface FLTCam : NSObject <FlutterTexture>
+
+@property(readonly, nonatomic) AVCaptureDevice *captureDevice;
+@property(readonly, nonatomic) CGSize previewSize;
+@property(assign, nonatomic) BOOL isPreviewPaused;
+@property(nonatomic, copy) void (^onFrameAvailable)(void);
+@property(nonatomic) FLTThreadSafeMethodChannel *methodChannel;
+@property(assign, nonatomic) FLTResolutionPreset resolutionPreset;
+@property(assign, nonatomic) FLTExposureMode exposureMode;
+@property(assign, nonatomic) FLTFocusMode focusMode;
+@property(assign, nonatomic) FLTFlashMode flashMode;
+// Format used for video and image streaming.
+@property(assign, nonatomic) FourCharCode videoFormat;
+
+/// Initializes an `FLTCam` instance.
+/// @param cameraName a name used to uniquely identify the camera.
+/// @param resolutionPreset the resolution preset
+/// @param enableAudio YES if audio should be enabled for video capturing; NO otherwise.
+/// @param orientation the orientation of camera
+/// @param captureSessionQueue the queue on which camera's capture session operations happen.
+/// @param error report to the caller if any error happened creating the camera.
+- (instancetype)initWithCameraName:(NSString *)cameraName
+ resolutionPreset:(NSString *)resolutionPreset
+ enableAudio:(BOOL)enableAudio
+ orientation:(UIDeviceOrientation)orientation
+ captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+ error:(NSError **)error;
+- (void)start;
+- (void)stop;
+- (void)setDeviceOrientation:(UIDeviceOrientation)orientation;
+- (void)captureToFile:(FLTThreadSafeFlutterResult *)result API_AVAILABLE(ios(10));
+- (void)close;
+- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result;
+/**
+ * Starts recording a video with an optional streaming messenger.
+ * If the messenger is non-null then it will be called for each
+ * captured frame, allowing streaming concurrently with recording.
+ *
+ * @param messenger Nullable messenger for capturing each frame.
+ */
+- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result
+ messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger;
+- (void)stopVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)pauseVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)resumeVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)lockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result
+ orientation:(NSString *)orientationStr;
+- (void)unlockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)setFlashModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr;
+- (void)setExposureModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr;
+- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr;
+- (void)applyFocusMode;
+
+/**
+ * Acknowledges the receipt of one image stream frame.
+ *
+ * This should be called each time a frame is received. Failing to call it may
+ * cause later frames to be dropped instead of streamed.
+ */
+- (void)receivedImageStreamData;
+
+/**
+ * Applies FocusMode on the AVCaptureDevice.
+ *
+ * If the @c focusMode is set to FocusModeAuto the AVCaptureDevice is configured to use
+ * AVCaptureFocusModeContinuousModeAutoFocus when supported, otherwise it is set to
+ * AVCaptureFocusModeAutoFocus. If neither AVCaptureFocusModeContinuousModeAutoFocus nor
+ * AVCaptureFocusModeAutoFocus are supported focus mode will not be set.
+ * If @c focusMode is set to FocusModeLocked the AVCaptureDevice is configured to use
+ * AVCaptureFocusModeAutoFocus. If AVCaptureFocusModeAutoFocus is not supported focus mode will not
+ * be set.
+ *
+ * @param focusMode The focus mode that should be applied to the @captureDevice instance.
+ * @param captureDevice The AVCaptureDevice to which the @focusMode will be applied.
+ */
+- (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice;
+- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y;
+- (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y;
+- (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset;
+- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger;
+- (void)stopImageStream;
+- (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)getMinZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)setZoomLevel:(CGFloat)zoom Result:(FLTThreadSafeFlutterResult *)result;
+- (void)setUpCaptureSessionForAudio;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
new file mode 100644
index 0000000..a7d6cd2
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
@@ -0,0 +1,1116 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTCam.h"
+#import "FLTCam_Test.h"
+#import "FLTSavePhotoDelegate.h"
+#import "QueueUtils.h"
+
+@import CoreMotion;
+#import <libkern/OSAtomic.h>
+
+@implementation FLTImageStreamHandler
+
+- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue {
+ self = [super init];
+ NSAssert(self, @"super init cannot be nil");
+ _captureSessionQueue = captureSessionQueue;
+ return self;
+}
+
+- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ weakSelf.eventSink = nil;
+ });
+ return nil;
+}
+
+- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments
+ eventSink:(nonnull FlutterEventSink)events {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ weakSelf.eventSink = events;
+ });
+ return nil;
+}
+@end
+
+@interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
+ AVCaptureAudioDataOutputSampleBufferDelegate>
+
+@property(readonly, nonatomic) int64_t textureId;
+@property BOOL enableAudio;
+@property(nonatomic) FLTImageStreamHandler *imageStreamHandler;
+@property(readonly, nonatomic) AVCaptureSession *captureSession;
+
+@property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
+/// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback.
+/// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API.
+@property(readwrite, nonatomic) CVPixelBufferRef latestPixelBuffer;
+@property(readonly, nonatomic) CGSize captureSize;
+@property(strong, nonatomic) AVAssetWriter *videoWriter;
+@property(strong, nonatomic) AVAssetWriterInput *videoWriterInput;
+@property(strong, nonatomic) AVAssetWriterInput *audioWriterInput;
+@property(strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor;
+@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
+@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
+@property(strong, nonatomic) NSString *videoRecordingPath;
+@property(assign, nonatomic) BOOL isRecording;
+@property(assign, nonatomic) BOOL isRecordingPaused;
+@property(assign, nonatomic) BOOL videoIsDisconnected;
+@property(assign, nonatomic) BOOL audioIsDisconnected;
+@property(assign, nonatomic) BOOL isAudioSetup;
+
+/// Number of frames currently pending processing.
+@property(assign, nonatomic) int streamingPendingFramesCount;
+
+/// Maximum number of frames pending processing.
+@property(assign, nonatomic) int maxStreamingPendingFramesCount;
+
+@property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation;
+@property(assign, nonatomic) CMTime lastVideoSampleTime;
+@property(assign, nonatomic) CMTime lastAudioSampleTime;
+@property(assign, nonatomic) CMTime videoTimeOffset;
+@property(assign, nonatomic) CMTime audioTimeOffset;
+@property(nonatomic) CMMotionManager *motionManager;
+@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
+/// All FLTCam's state access and capture session related operations should be on run on this queue.
+@property(strong, nonatomic) dispatch_queue_t captureSessionQueue;
+/// The queue on which `latestPixelBuffer` property is accessed.
+/// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`.
+@property(strong, nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue;
+/// The queue on which captured photos (not videos) are written to disk.
+/// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation.
+@property(strong, nonatomic) dispatch_queue_t photoIOQueue;
+@property(assign, nonatomic) UIDeviceOrientation deviceOrientation;
+@end
+
+@implementation FLTCam
+
+NSString *const errorMethod = @"error";
+
+- (instancetype)initWithCameraName:(NSString *)cameraName
+ resolutionPreset:(NSString *)resolutionPreset
+ enableAudio:(BOOL)enableAudio
+ orientation:(UIDeviceOrientation)orientation
+ captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+ error:(NSError **)error {
+ return [self initWithCameraName:cameraName
+ resolutionPreset:resolutionPreset
+ enableAudio:enableAudio
+ orientation:orientation
+ captureSession:[[AVCaptureSession alloc] init]
+ captureSessionQueue:captureSessionQueue
+ error:error];
+}
+
+- (instancetype)initWithCameraName:(NSString *)cameraName
+ resolutionPreset:(NSString *)resolutionPreset
+ enableAudio:(BOOL)enableAudio
+ orientation:(UIDeviceOrientation)orientation
+ captureSession:(AVCaptureSession *)captureSession
+ captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+ error:(NSError **)error {
+ self = [super init];
+ NSAssert(self, @"super init cannot be nil");
+ @try {
+ _resolutionPreset = FLTGetFLTResolutionPresetForString(resolutionPreset);
+ } @catch (NSError *e) {
+ *error = e;
+ }
+ _enableAudio = enableAudio;
+ _captureSessionQueue = captureSessionQueue;
+ _pixelBufferSynchronizationQueue =
+ dispatch_queue_create("io.flutter.camera.pixelBufferSynchronizationQueue", NULL);
+ _photoIOQueue = dispatch_queue_create("io.flutter.camera.photoIOQueue", NULL);
+ _captureSession = captureSession;
+ _captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName];
+ _flashMode = _captureDevice.hasFlash ? FLTFlashModeAuto : FLTFlashModeOff;
+ _exposureMode = FLTExposureModeAuto;
+ _focusMode = FLTFocusModeAuto;
+ _lockedCaptureOrientation = UIDeviceOrientationUnknown;
+ _deviceOrientation = orientation;
+ _videoFormat = kCVPixelFormatType_32BGRA;
+ _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary];
+
+ // To limit memory consumption, limit the number of frames pending processing.
+ // After some testing, 4 was determined to be the best maximum value.
+ // https://github.com/flutter/plugins/pull/4520#discussion_r766335637
+ _maxStreamingPendingFramesCount = 4;
+
+ NSError *localError = nil;
+ _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice
+ error:&localError];
+
+ if (localError) {
+ *error = localError;
+ return nil;
+ }
+
+ _captureVideoOutput = [AVCaptureVideoDataOutput new];
+ _captureVideoOutput.videoSettings =
+ @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)};
+ [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
+ [_captureVideoOutput setSampleBufferDelegate:self queue:captureSessionQueue];
+
+ AVCaptureConnection *connection =
+ [AVCaptureConnection connectionWithInputPorts:_captureVideoInput.ports
+ output:_captureVideoOutput];
+
+ if ([_captureDevice position] == AVCaptureDevicePositionFront) {
+ connection.videoMirrored = YES;
+ }
+
+ [_captureSession addInputWithNoConnections:_captureVideoInput];
+ [_captureSession addOutputWithNoConnections:_captureVideoOutput];
+ [_captureSession addConnection:connection];
+
+ if (@available(iOS 10.0, *)) {
+ _capturePhotoOutput = [AVCapturePhotoOutput new];
+ [_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
+ [_captureSession addOutput:_capturePhotoOutput];
+ }
+ _motionManager = [[CMMotionManager alloc] init];
+ [_motionManager startAccelerometerUpdates];
+
+ [self setCaptureSessionPreset:_resolutionPreset];
+ [self updateOrientation];
+
+ return self;
+}
+
+- (void)start {
+ [_captureSession startRunning];
+}
+
+- (void)stop {
+ [_captureSession stopRunning];
+}
+
+- (void)setVideoFormat:(OSType)videoFormat {
+ _videoFormat = videoFormat;
+ _captureVideoOutput.videoSettings =
+ @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
+}
+
+- (void)setDeviceOrientation:(UIDeviceOrientation)orientation {
+ if (_deviceOrientation == orientation) {
+ return;
+ }
+
+ _deviceOrientation = orientation;
+ [self updateOrientation];
+}
+
+- (void)updateOrientation {
+ if (_isRecording) {
+ return;
+ }
+
+ UIDeviceOrientation orientation = (_lockedCaptureOrientation != UIDeviceOrientationUnknown)
+ ? _lockedCaptureOrientation
+ : _deviceOrientation;
+
+ [self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput];
+ [self updateOrientation:orientation forCaptureOutput:_captureVideoOutput];
+}
+
+- (void)updateOrientation:(UIDeviceOrientation)orientation
+ forCaptureOutput:(AVCaptureOutput *)captureOutput {
+ if (!captureOutput) {
+ return;
+ }
+
+ AVCaptureConnection *connection = [captureOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (connection && connection.isVideoOrientationSupported) {
+ connection.videoOrientation = [self getVideoOrientationForDeviceOrientation:orientation];
+ }
+}
+
+- (void)captureToFile:(FLTThreadSafeFlutterResult *)result API_AVAILABLE(ios(10)) {
+ AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
+ if (_resolutionPreset == FLTResolutionPresetMax) {
+ [settings setHighResolutionPhotoEnabled:YES];
+ }
+
+ AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(_flashMode);
+ if (avFlashMode != -1) {
+ [settings setFlashMode:avFlashMode];
+ }
+ NSError *error;
+ NSString *path = [self getTemporaryFilePathWithExtension:@"jpg"
+ subfolder:@"pictures"
+ prefix:@"CAP_"
+ error:error];
+ if (error) {
+ [result sendError:error];
+ return;
+ }
+
+ __weak typeof(self) weakSelf = self;
+ FLTSavePhotoDelegate *savePhotoDelegate = [[FLTSavePhotoDelegate alloc]
+ initWithPath:path
+ ioQueue:self.photoIOQueue
+ completionHandler:^(NSString *_Nullable path, NSError *_Nullable error) {
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+ dispatch_async(strongSelf.captureSessionQueue, ^{
+ // cannot use the outter `strongSelf`
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+ [strongSelf.inProgressSavePhotoDelegates removeObjectForKey:@(settings.uniqueID)];
+ });
+
+ if (error) {
+ [result sendError:error];
+ } else {
+ NSAssert(path, @"Path must not be nil if no error.");
+ [result sendSuccessWithData:path];
+ }
+ }];
+
+ NSAssert(dispatch_get_specific(FLTCaptureSessionQueueSpecific),
+ @"save photo delegate references must be updated on the capture session queue");
+ self.inProgressSavePhotoDelegates[@(settings.uniqueID)] = savePhotoDelegate;
+ [self.capturePhotoOutput capturePhotoWithSettings:settings delegate:savePhotoDelegate];
+}
+
+- (AVCaptureVideoOrientation)getVideoOrientationForDeviceOrientation:
+ (UIDeviceOrientation)deviceOrientation {
+ if (deviceOrientation == UIDeviceOrientationPortrait) {
+ return AVCaptureVideoOrientationPortrait;
+ } else if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
+ // Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation
+ // is landscape left the video orientation should be landscape right.
+ return AVCaptureVideoOrientationLandscapeRight;
+ } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
+ // Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation
+ // is landscape right the video orientation should be landscape left.
+ return AVCaptureVideoOrientationLandscapeLeft;
+ } else if (deviceOrientation == UIDeviceOrientationPortraitUpsideDown) {
+ return AVCaptureVideoOrientationPortraitUpsideDown;
+ } else {
+ return AVCaptureVideoOrientationPortrait;
+ }
+}
+
+- (NSString *)getTemporaryFilePathWithExtension:(NSString *)extension
+ subfolder:(NSString *)subfolder
+ prefix:(NSString *)prefix
+ error:(NSError *)error {
+ NSString *docDir =
+ NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
+ NSString *fileDir =
+ [[docDir stringByAppendingPathComponent:@"camera"] stringByAppendingPathComponent:subfolder];
+ NSString *fileName = [prefix stringByAppendingString:[[NSUUID UUID] UUIDString]];
+ NSString *file =
+ [[fileDir stringByAppendingPathComponent:fileName] stringByAppendingPathExtension:extension];
+
+ NSFileManager *fm = [NSFileManager defaultManager];
+ if (![fm fileExistsAtPath:fileDir]) {
+ [[NSFileManager defaultManager] createDirectoryAtPath:fileDir
+ withIntermediateDirectories:true
+ attributes:nil
+ error:&error];
+ if (error) {
+ return nil;
+ }
+ }
+
+ return file;
+}
+
+- (void)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset {
+ switch (resolutionPreset) {
+ case FLTResolutionPresetMax:
+ case FLTResolutionPresetUltraHigh:
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
+ _captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
+ _previewSize = CGSizeMake(3840, 2160);
+ break;
+ }
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
+ _captureSession.sessionPreset = AVCaptureSessionPresetHigh;
+ _previewSize =
+ CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width,
+ _captureDevice.activeFormat.highResolutionStillImageDimensions.height);
+ break;
+ }
+ case FLTResolutionPresetVeryHigh:
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
+ _captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
+ _previewSize = CGSizeMake(1920, 1080);
+ break;
+ }
+ case FLTResolutionPresetHigh:
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
+ _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
+ _previewSize = CGSizeMake(1280, 720);
+ break;
+ }
+ case FLTResolutionPresetMedium:
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
+ _captureSession.sessionPreset = AVCaptureSessionPreset640x480;
+ _previewSize = CGSizeMake(640, 480);
+ break;
+ }
+ case FLTResolutionPresetLow:
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
+ _captureSession.sessionPreset = AVCaptureSessionPreset352x288;
+ _previewSize = CGSizeMake(352, 288);
+ break;
+ }
+ default:
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
+ _captureSession.sessionPreset = AVCaptureSessionPresetLow;
+ _previewSize = CGSizeMake(352, 288);
+ } else {
+ NSError *error =
+ [NSError errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey :
+ @"No capture session available for current capture session."
+ }];
+ @throw error;
+ }
+ }
+}
+
+- (void)captureOutput:(AVCaptureOutput *)output
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection {
+ if (output == _captureVideoOutput) {
+ CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ CFRetain(newBuffer);
+
+ __block CVPixelBufferRef previousPixelBuffer = nil;
+ // Use `dispatch_sync` to avoid unnecessary context switch under common non-contest scenarios;
+ // Under rare contest scenarios, it will not block for too long since the critical section is
+ // quite lightweight.
+ dispatch_sync(self.pixelBufferSynchronizationQueue, ^{
+ // No need weak self because it's dispatch_sync.
+ previousPixelBuffer = self.latestPixelBuffer;
+ self.latestPixelBuffer = newBuffer;
+ });
+ if (previousPixelBuffer) {
+ CFRelease(previousPixelBuffer);
+ }
+ if (_onFrameAvailable) {
+ _onFrameAvailable();
+ }
+ }
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
+ [_methodChannel invokeMethod:errorMethod
+ arguments:@"sample buffer is not ready. Skipping sample"];
+ return;
+ }
+ if (_isStreamingImages) {
+ FlutterEventSink eventSink = _imageStreamHandler.eventSink;
+ if (eventSink && (self.streamingPendingFramesCount < self.maxStreamingPendingFramesCount)) {
+ self.streamingPendingFramesCount++;
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ // Must lock base address before accessing the pixel data
+ CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+ size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer);
+ size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer);
+
+ NSMutableArray *planes = [NSMutableArray array];
+
+ const Boolean isPlanar = CVPixelBufferIsPlanar(pixelBuffer);
+ size_t planeCount;
+ if (isPlanar) {
+ planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
+ } else {
+ planeCount = 1;
+ }
+
+ for (int i = 0; i < planeCount; i++) {
+ void *planeAddress;
+ size_t bytesPerRow;
+ size_t height;
+ size_t width;
+
+ if (isPlanar) {
+ planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i);
+ bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i);
+ height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i);
+ width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i);
+ } else {
+ planeAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
+ bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
+ height = CVPixelBufferGetHeight(pixelBuffer);
+ width = CVPixelBufferGetWidth(pixelBuffer);
+ }
+
+ NSNumber *length = @(bytesPerRow * height);
+ NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue];
+
+ NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary];
+ planeBuffer[@"bytesPerRow"] = @(bytesPerRow);
+ planeBuffer[@"width"] = @(width);
+ planeBuffer[@"height"] = @(height);
+ planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes];
+
+ [planes addObject:planeBuffer];
+ }
+ // Lock the base address before accessing pixel data, and unlock it afterwards.
+ // Done accessing the `pixelBuffer` at this point.
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+ NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary];
+ imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth];
+ imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight];
+ imageBuffer[@"format"] = @(_videoFormat);
+ imageBuffer[@"planes"] = planes;
+ imageBuffer[@"lensAperture"] = [NSNumber numberWithFloat:[_captureDevice lensAperture]];
+ Float64 exposureDuration = CMTimeGetSeconds([_captureDevice exposureDuration]);
+ Float64 nsExposureDuration = 1000000000 * exposureDuration;
+ imageBuffer[@"sensorExposureTime"] = [NSNumber numberWithInt:nsExposureDuration];
+ imageBuffer[@"sensorSensitivity"] = [NSNumber numberWithFloat:[_captureDevice ISO]];
+
+ dispatch_async(dispatch_get_main_queue(), ^{
+ eventSink(imageBuffer);
+ });
+ }
+ }
+ if (_isRecording && !_isRecordingPaused) {
+ if (_videoWriter.status == AVAssetWriterStatusFailed) {
+ [_methodChannel invokeMethod:errorMethod
+ arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
+ return;
+ }
+
+ CFRetain(sampleBuffer);
+ CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+
+ if (_videoWriter.status != AVAssetWriterStatusWriting) {
+ [_videoWriter startWriting];
+ [_videoWriter startSessionAtSourceTime:currentSampleTime];
+ }
+
+ if (output == _captureVideoOutput) {
+ if (_videoIsDisconnected) {
+ _videoIsDisconnected = NO;
+
+ if (_videoTimeOffset.value == 0) {
+ _videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
+ } else {
+ CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
+ _videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset);
+ }
+
+ return;
+ }
+
+ _lastVideoSampleTime = currentSampleTime;
+
+ CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset);
+ [_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime];
+ } else {
+ CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
+
+ if (dur.value > 0) {
+ currentSampleTime = CMTimeAdd(currentSampleTime, dur);
+ }
+
+ if (_audioIsDisconnected) {
+ _audioIsDisconnected = NO;
+
+ if (_audioTimeOffset.value == 0) {
+ _audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
+ } else {
+ CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
+ _audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset);
+ }
+
+ return;
+ }
+
+ _lastAudioSampleTime = currentSampleTime;
+
+ if (_audioTimeOffset.value != 0) {
+ CFRelease(sampleBuffer);
+ sampleBuffer = [self adjustTime:sampleBuffer by:_audioTimeOffset];
+ }
+
+ [self newAudioSample:sampleBuffer];
+ }
+
+ CFRelease(sampleBuffer);
+ }
+}
+
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset CF_RETURNS_RETAINED {
+ CMItemCount count;
+ CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
+ CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
+ CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
+ for (CMItemCount i = 0; i < count; i++) {
+ pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
+ pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
+ }
+ CMSampleBufferRef sout;
+ CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
+ free(pInfo);
+ return sout;
+}
+
+- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer {
+ if (_videoWriter.status != AVAssetWriterStatusWriting) {
+ if (_videoWriter.status == AVAssetWriterStatusFailed) {
+ [_methodChannel invokeMethod:errorMethod
+ arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
+ }
+ return;
+ }
+ if (_videoWriterInput.readyForMoreMediaData) {
+ if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) {
+ [_methodChannel
+ invokeMethod:errorMethod
+ arguments:[NSString stringWithFormat:@"%@", @"Unable to write to video input"]];
+ }
+ }
+}
+
+- (void)newAudioSample:(CMSampleBufferRef)sampleBuffer {
+ if (_videoWriter.status != AVAssetWriterStatusWriting) {
+ if (_videoWriter.status == AVAssetWriterStatusFailed) {
+ [_methodChannel invokeMethod:errorMethod
+ arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
+ }
+ return;
+ }
+ if (_audioWriterInput.readyForMoreMediaData) {
+ if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) {
+ [_methodChannel
+ invokeMethod:errorMethod
+ arguments:[NSString stringWithFormat:@"%@", @"Unable to write to audio input"]];
+ }
+ }
+}
+
+- (void)close {
+ [_captureSession stopRunning];
+ for (AVCaptureInput *input in [_captureSession inputs]) {
+ [_captureSession removeInput:input];
+ }
+ for (AVCaptureOutput *output in [_captureSession outputs]) {
+ [_captureSession removeOutput:output];
+ }
+}
+
+- (void)dealloc {
+ if (_latestPixelBuffer) {
+ CFRelease(_latestPixelBuffer);
+ }
+ [_motionManager stopAccelerometerUpdates];
+}
+
+- (CVPixelBufferRef)copyPixelBuffer {
+ __block CVPixelBufferRef pixelBuffer = nil;
+ // Use `dispatch_sync` because `copyPixelBuffer` API requires synchronous return.
+ dispatch_sync(self.pixelBufferSynchronizationQueue, ^{
+ // No need weak self because it's dispatch_sync.
+ pixelBuffer = self.latestPixelBuffer;
+ self.latestPixelBuffer = nil;
+ });
+ return pixelBuffer;
+}
+
+- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
+ [self startVideoRecordingWithResult:result messengerForStreaming:nil];
+}
+
+- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result
+ messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger {
+ if (!_isRecording) {
+ if (messenger != nil) {
+ [self startImageStreamWithMessenger:messenger];
+ }
+
+ NSError *error;
+ _videoRecordingPath = [self getTemporaryFilePathWithExtension:@"mp4"
+ subfolder:@"videos"
+ prefix:@"REC_"
+ error:error];
+ if (error) {
+ [result sendError:error];
+ return;
+ }
+ if (![self setupWriterForPath:_videoRecordingPath]) {
+ [result sendErrorWithCode:@"IOError" message:@"Setup Writer Failed" details:nil];
+ return;
+ }
+ _isRecording = YES;
+ _isRecordingPaused = NO;
+ _videoTimeOffset = CMTimeMake(0, 1);
+ _audioTimeOffset = CMTimeMake(0, 1);
+ _videoIsDisconnected = NO;
+ _audioIsDisconnected = NO;
+ [result sendSuccess];
+ } else {
+ [result sendErrorWithCode:@"Error" message:@"Video is already recording" details:nil];
+ }
+}
+
+- (void)stopVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
+ if (_isRecording) {
+ _isRecording = NO;
+
+ if (_videoWriter.status != AVAssetWriterStatusUnknown) {
+ [_videoWriter finishWritingWithCompletionHandler:^{
+ if (self->_videoWriter.status == AVAssetWriterStatusCompleted) {
+ [self updateOrientation];
+ [result sendSuccessWithData:self->_videoRecordingPath];
+ self->_videoRecordingPath = nil;
+ } else {
+ [result sendErrorWithCode:@"IOError"
+ message:@"AVAssetWriter could not finish writing!"
+ details:nil];
+ }
+ }];
+ }
+ } else {
+ NSError *error =
+ [NSError errorWithDomain:NSCocoaErrorDomain
+ code:NSURLErrorResourceUnavailable
+ userInfo:@{NSLocalizedDescriptionKey : @"Video is not recording!"}];
+ [result sendError:error];
+ }
+}
+
+- (void)pauseVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
+ _isRecordingPaused = YES;
+ _videoIsDisconnected = YES;
+ _audioIsDisconnected = YES;
+ [result sendSuccess];
+}
+
+- (void)resumeVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
+ _isRecordingPaused = NO;
+ [result sendSuccess];
+}
+
+- (void)lockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result
+ orientation:(NSString *)orientationStr {
+ UIDeviceOrientation orientation;
+ @try {
+ orientation = FLTGetUIDeviceOrientationForString(orientationStr);
+ } @catch (NSError *e) {
+ [result sendError:e];
+ return;
+ }
+
+ if (_lockedCaptureOrientation != orientation) {
+ _lockedCaptureOrientation = orientation;
+ [self updateOrientation];
+ }
+
+ [result sendSuccess];
+}
+
+- (void)unlockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result {
+ _lockedCaptureOrientation = UIDeviceOrientationUnknown;
+ [self updateOrientation];
+ [result sendSuccess];
+}
+
+- (void)setFlashModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
+ FLTFlashMode mode;
+ @try {
+ mode = FLTGetFLTFlashModeForString(modeStr);
+ } @catch (NSError *e) {
+ [result sendError:e];
+ return;
+ }
+ if (mode == FLTFlashModeTorch) {
+ if (!_captureDevice.hasTorch) {
+ [result sendErrorWithCode:@"setFlashModeFailed"
+ message:@"Device does not support torch mode"
+ details:nil];
+ return;
+ }
+ if (!_captureDevice.isTorchAvailable) {
+ [result sendErrorWithCode:@"setFlashModeFailed"
+ message:@"Torch mode is currently not available"
+ details:nil];
+ return;
+ }
+ if (_captureDevice.torchMode != AVCaptureTorchModeOn) {
+ [_captureDevice lockForConfiguration:nil];
+ [_captureDevice setTorchMode:AVCaptureTorchModeOn];
+ [_captureDevice unlockForConfiguration];
+ }
+ } else {
+ if (!_captureDevice.hasFlash) {
+ [result sendErrorWithCode:@"setFlashModeFailed"
+ message:@"Device does not have flash capabilities"
+ details:nil];
+ return;
+ }
+ AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(mode);
+ if (![_capturePhotoOutput.supportedFlashModes
+ containsObject:[NSNumber numberWithInt:((int)avFlashMode)]]) {
+ [result sendErrorWithCode:@"setFlashModeFailed"
+ message:@"Device does not support this specific flash mode"
+ details:nil];
+ return;
+ }
+ if (_captureDevice.torchMode != AVCaptureTorchModeOff) {
+ [_captureDevice lockForConfiguration:nil];
+ [_captureDevice setTorchMode:AVCaptureTorchModeOff];
+ [_captureDevice unlockForConfiguration];
+ }
+ }
+ _flashMode = mode;
+ [result sendSuccess];
+}
+
+- (void)setExposureModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
+ FLTExposureMode mode;
+ @try {
+ mode = FLTGetFLTExposureModeForString(modeStr);
+ } @catch (NSError *e) {
+ [result sendError:e];
+ return;
+ }
+ _exposureMode = mode;
+ [self applyExposureMode];
+ [result sendSuccess];
+}
+
+- (void)applyExposureMode {
+ [_captureDevice lockForConfiguration:nil];
+ switch (_exposureMode) {
+ case FLTExposureModeLocked:
+ [_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
+ break;
+ case FLTExposureModeAuto:
+ if ([_captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
+ [_captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
+ } else {
+ [_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
+ }
+ break;
+ }
+ [_captureDevice unlockForConfiguration];
+}
+
+- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
+ FLTFocusMode mode;
+ @try {
+ mode = FLTGetFLTFocusModeForString(modeStr);
+ } @catch (NSError *e) {
+ [result sendError:e];
+ return;
+ }
+ _focusMode = mode;
+ [self applyFocusMode];
+ [result sendSuccess];
+}
+
+- (void)applyFocusMode {
+ [self applyFocusMode:_focusMode onDevice:_captureDevice];
+}
+
+- (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice {
+ [captureDevice lockForConfiguration:nil];
+ switch (focusMode) {
+ case FLTFocusModeLocked:
+ if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
+ [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
+ }
+ break;
+ case FLTFocusModeAuto:
+ if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
+ [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
+ } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
+ [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
+ }
+ break;
+ }
+ [captureDevice unlockForConfiguration];
+}
+
+- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result {
+ _isPreviewPaused = true;
+ [result sendSuccess];
+}
+
+- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result {
+ _isPreviewPaused = false;
+ [result sendSuccess];
+}
+
+- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
+ x:(double)x
+ y:(double)y {
+ double oldX = x, oldY = y;
+ switch (orientation) {
+ case UIDeviceOrientationPortrait: // 90 ccw
+ y = 1 - oldX;
+ x = oldY;
+ break;
+ case UIDeviceOrientationPortraitUpsideDown: // 90 cw
+ x = 1 - oldY;
+ y = oldX;
+ break;
+ case UIDeviceOrientationLandscapeRight: // 180
+ x = 1 - x;
+ y = 1 - y;
+ break;
+ case UIDeviceOrientationLandscapeLeft:
+ default:
+ // No rotation required
+ break;
+ }
+ return CGPointMake(x, y);
+}
+
+- (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y {
+ if (!_captureDevice.isExposurePointOfInterestSupported) {
+ [result sendErrorWithCode:@"setExposurePointFailed"
+ message:@"Device does not have exposure point capabilities"
+ details:nil];
+ return;
+ }
+ UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
+ [_captureDevice lockForConfiguration:nil];
+ [_captureDevice setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
+ x:x
+ y:y]];
+ [_captureDevice unlockForConfiguration];
+ // Retrigger auto exposure
+ [self applyExposureMode];
+ [result sendSuccess];
+}
+
+- (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y {
+ if (!_captureDevice.isFocusPointOfInterestSupported) {
+ [result sendErrorWithCode:@"setFocusPointFailed"
+ message:@"Device does not have focus point capabilities"
+ details:nil];
+ return;
+ }
+ UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
+ [_captureDevice lockForConfiguration:nil];
+
+ [_captureDevice setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
+ x:x
+ y:y]];
+ [_captureDevice unlockForConfiguration];
+ // Retrigger auto focus
+ [self applyFocusMode];
+ [result sendSuccess];
+}
+
+- (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset {
+ [_captureDevice lockForConfiguration:nil];
+ [_captureDevice setExposureTargetBias:offset completionHandler:nil];
+ [_captureDevice unlockForConfiguration];
+ [result sendSuccessWithData:@(offset)];
+}
+
+- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
+ [self startImageStreamWithMessenger:messenger
+ imageStreamHandler:[[FLTImageStreamHandler alloc]
+ initWithCaptureSessionQueue:_captureSessionQueue]];
+}
+
+- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
+ imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler {
+ if (!_isStreamingImages) {
+ FlutterEventChannel *eventChannel = [FlutterEventChannel
+ eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream"
+ binaryMessenger:messenger];
+ FLTThreadSafeEventChannel *threadSafeEventChannel =
+ [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];
+
+ _imageStreamHandler = imageStreamHandler;
+ __weak typeof(self) weakSelf = self;
+ [threadSafeEventChannel setStreamHandler:_imageStreamHandler
+ completion:^{
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+
+ dispatch_async(strongSelf.captureSessionQueue, ^{
+ // cannot use the outter strongSelf
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+
+ strongSelf.isStreamingImages = YES;
+ strongSelf.streamingPendingFramesCount = 0;
+ });
+ }];
+ } else {
+ [_methodChannel invokeMethod:errorMethod
+ arguments:@"Images from camera are already streaming!"];
+ }
+}
+
+- (void)stopImageStream {
+ if (_isStreamingImages) {
+ _isStreamingImages = NO;
+ _imageStreamHandler = nil;
+ } else {
+ [_methodChannel invokeMethod:errorMethod arguments:@"Images from camera are not streaming!"];
+ }
+}
+
+- (void)receivedImageStreamData {
+ self.streamingPendingFramesCount--;
+}
+
+- (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
+ CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];
+
+ [result sendSuccessWithData:[NSNumber numberWithFloat:maxZoomFactor]];
+}
+
+- (void)getMinZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
+ CGFloat minZoomFactor = [self getMinAvailableZoomFactor];
+ [result sendSuccessWithData:[NSNumber numberWithFloat:minZoomFactor]];
+}
+
+- (void)setZoomLevel:(CGFloat)zoom Result:(FLTThreadSafeFlutterResult *)result {
+ CGFloat maxAvailableZoomFactor = [self getMaxAvailableZoomFactor];
+ CGFloat minAvailableZoomFactor = [self getMinAvailableZoomFactor];
+
+ if (maxAvailableZoomFactor < zoom || minAvailableZoomFactor > zoom) {
+ NSString *errorMessage = [NSString
+ stringWithFormat:@"Zoom level out of bounds (zoom level should be between %f and %f).",
+ minAvailableZoomFactor, maxAvailableZoomFactor];
+
+ [result sendErrorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil];
+ return;
+ }
+
+ NSError *error = nil;
+ if (![_captureDevice lockForConfiguration:&error]) {
+ [result sendError:error];
+ return;
+ }
+ _captureDevice.videoZoomFactor = zoom;
+ [_captureDevice unlockForConfiguration];
+
+ [result sendSuccess];
+}
+
+- (CGFloat)getMinAvailableZoomFactor {
+ if (@available(iOS 11.0, *)) {
+ return _captureDevice.minAvailableVideoZoomFactor;
+ } else {
+ return 1.0;
+ }
+}
+
+- (CGFloat)getMaxAvailableZoomFactor {
+ if (@available(iOS 11.0, *)) {
+ return _captureDevice.maxAvailableVideoZoomFactor;
+ } else {
+ return _captureDevice.activeFormat.videoMaxZoomFactor;
+ }
+}
+
+- (BOOL)setupWriterForPath:(NSString *)path {
+ NSError *error = nil;
+ NSURL *outputURL;
+ if (path != nil) {
+ outputURL = [NSURL fileURLWithPath:path];
+ } else {
+ return NO;
+ }
+ if (_enableAudio && !_isAudioSetup) {
+ [self setUpCaptureSessionForAudio];
+ }
+
+ _videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL
+ fileType:AVFileTypeMPEG4
+ error:&error];
+ NSParameterAssert(_videoWriter);
+ if (error) {
+ [_methodChannel invokeMethod:errorMethod arguments:error.description];
+ return NO;
+ }
+
+ NSDictionary *videoSettings = [_captureVideoOutput
+ recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeMPEG4];
+ _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
+ outputSettings:videoSettings];
+
+ _videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
+ assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput
+ sourcePixelBufferAttributes:@{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)
+ }];
+
+ NSParameterAssert(_videoWriterInput);
+
+ _videoWriterInput.expectsMediaDataInRealTime = YES;
+
+ // Add the audio input
+ if (_enableAudio) {
+ AudioChannelLayout acl;
+ bzero(&acl, sizeof(acl));
+ acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
+ NSDictionary *audioOutputSettings = nil;
+ // Both type of audio inputs causes output video file to be corrupted.
+ audioOutputSettings = @{
+ AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatMPEG4AAC],
+ AVSampleRateKey : [NSNumber numberWithFloat:44100.0],
+ AVNumberOfChannelsKey : [NSNumber numberWithInt:1],
+ AVChannelLayoutKey : [NSData dataWithBytes:&acl length:sizeof(acl)],
+ };
+ _audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
+ outputSettings:audioOutputSettings];
+ _audioWriterInput.expectsMediaDataInRealTime = YES;
+
+ [_videoWriter addInput:_audioWriterInput];
+ [_audioOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
+ }
+
+ if (_flashMode == FLTFlashModeTorch) {
+ [self.captureDevice lockForConfiguration:nil];
+ [self.captureDevice setTorchMode:AVCaptureTorchModeOn];
+ [self.captureDevice unlockForConfiguration];
+ }
+
+ [_videoWriter addInput:_videoWriterInput];
+
+ [_captureVideoOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
+
+ return YES;
+}
+
+- (void)setUpCaptureSessionForAudio {
+ NSError *error = nil;
+ // Create a device input with the device and add it to the session.
+ // Setup the audio input.
+ AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
+ AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice
+ error:&error];
+ if (error) {
+ [_methodChannel invokeMethod:errorMethod arguments:error.description];
+ }
+ // Setup the audio output.
+ _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
+
+ if ([_captureSession canAddInput:audioInput]) {
+ [_captureSession addInput:audioInput];
+
+ if ([_captureSession canAddOutput:_audioOutput]) {
+ [_captureSession addOutput:_audioOutput];
+ _isAudioSetup = YES;
+ } else {
+ [_methodChannel invokeMethod:errorMethod
+ arguments:@"Unable to add Audio input/output to session capture"];
+ _isAudioSetup = NO;
+ }
+ }
+}
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h
new file mode 100644
index 0000000..19e2842
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h
@@ -0,0 +1,61 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTCam.h"
+#import "FLTSavePhotoDelegate.h"
+
+@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
+
+/// The queue on which `eventSink` property should be accessed.
+@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
+
+/// The event sink to stream camera events to Dart.
+///
+/// The property should only be accessed on `captureSessionQueue`.
+/// The block itself should be invoked on the main queue.
+@property FlutterEventSink eventSink;
+
+@end
+
+// APIs exposed for unit testing.
+@interface FLTCam ()
+
+/// The output for video capturing.
+@property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput;
+
+/// The output for photo capturing. Exposed setter for unit tests.
+@property(strong, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));
+
+/// True when images from the camera are being streamed.
+@property(assign, nonatomic) BOOL isStreamingImages;
+
+/// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the
+/// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the
+/// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo
+/// capture operations may overlap, so FLTCam has to keep track of multiple delegates in progress,
+/// instead of just a single delegate reference.
+@property(readonly, nonatomic)
+ NSMutableDictionary<NSNumber *, FLTSavePhotoDelegate *> *inProgressSavePhotoDelegates;
+
+/// Delegate callback when receiving a new video or audio sample.
+/// Exposed for unit tests.
+- (void)captureOutput:(AVCaptureOutput *)output
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection;
+
+/// Initializes a camera instance.
+/// Allows for injecting dependencies that are usually internal.
+- (instancetype)initWithCameraName:(NSString *)cameraName
+ resolutionPreset:(NSString *)resolutionPreset
+ enableAudio:(BOOL)enableAudio
+ orientation:(UIDeviceOrientation)orientation
+ captureSession:(AVCaptureSession *)captureSession
+ captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+ error:(NSError **)error;
+
+/// Start streaming images.
+- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
+ imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler;
+
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate.h b/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate.h
new file mode 100644
index 0000000..40e4562
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate.h
@@ -0,0 +1,38 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import AVFoundation;
+@import Foundation;
+
+#import "FLTThreadSafeFlutterResult.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/// The completion handler block for save photo operations.
+/// Can be called from either main queue or IO queue.
+/// If success, `error` will be present and `path` will be nil. Otherewise, `error` will be nil and
+/// `path` will be present.
+/// @param path the path for successfully saved photo file.
+/// @param error photo capture error or IO error.
+typedef void (^FLTSavePhotoDelegateCompletionHandler)(NSString *_Nullable path,
+ NSError *_Nullable error);
+
+/**
+ Delegate object that handles photo capture results.
+ */
+@interface FLTSavePhotoDelegate : NSObject <AVCapturePhotoCaptureDelegate>
+
+/**
+ * Initialize a photo capture delegate.
+ * @param path the path for captured photo file.
+ * @param ioQueue the queue on which captured photos are written to disk.
+ * @param completionHandler The completion handler block for save photo operations. Can
+ * be called from either main queue or IO queue.
+ */
+- (instancetype)initWithPath:(NSString *)path
+ ioQueue:(dispatch_queue_t)ioQueue
+ completionHandler:(FLTSavePhotoDelegateCompletionHandler)completionHandler;
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate.m b/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate.m
new file mode 100644
index 0000000..617890c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate.m
@@ -0,0 +1,77 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTSavePhotoDelegate.h"
+#import "FLTSavePhotoDelegate_Test.h"
+
+@interface FLTSavePhotoDelegate ()
+/// The file path for the captured photo.
+@property(readonly, nonatomic) NSString *path;
+/// The queue on which captured photos are written to disk.
+@property(readonly, nonatomic) dispatch_queue_t ioQueue;
+@end
+
+@implementation FLTSavePhotoDelegate
+
+- (instancetype)initWithPath:(NSString *)path
+ ioQueue:(dispatch_queue_t)ioQueue
+ completionHandler:(FLTSavePhotoDelegateCompletionHandler)completionHandler {
+ self = [super init];
+ NSAssert(self, @"super init cannot be nil");
+ _path = path;
+ _ioQueue = ioQueue;
+ _completionHandler = completionHandler;
+ return self;
+}
+
+- (void)handlePhotoCaptureResultWithError:(NSError *)error
+ photoDataProvider:(NSData * (^)(void))photoDataProvider {
+ if (error) {
+ self.completionHandler(nil, error);
+ return;
+ }
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.ioQueue, ^{
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+
+ NSData *data = photoDataProvider();
+ NSError *ioError;
+ if ([data writeToFile:strongSelf.path options:NSDataWritingAtomic error:&ioError]) {
+ strongSelf.completionHandler(self.path, nil);
+ } else {
+ strongSelf.completionHandler(nil, ioError);
+ }
+ });
+}
+
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+#pragma clang diagnostic ignored "-Wdeprecated-implementations"
+- (void)captureOutput:(AVCapturePhotoOutput *)output
+ didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer
+ previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
+ resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
+ bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
+ error:(NSError *)error API_AVAILABLE(ios(10)) {
+ [self handlePhotoCaptureResultWithError:error
+ photoDataProvider:^NSData * {
+ return [AVCapturePhotoOutput
+ JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer
+ previewPhotoSampleBuffer:
+ previewPhotoSampleBuffer];
+ }];
+}
+#pragma clang diagnostic pop
+
+- (void)captureOutput:(AVCapturePhotoOutput *)output
+ didFinishProcessingPhoto:(AVCapturePhoto *)photo
+ error:(NSError *)error API_AVAILABLE(ios(11.0)) {
+ [self handlePhotoCaptureResultWithError:error
+ photoDataProvider:^NSData * {
+ return [photo fileDataRepresentation];
+ }];
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate_Test.h b/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate_Test.h
new file mode 100644
index 0000000..2d0d4f9
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTSavePhotoDelegate_Test.h
@@ -0,0 +1,22 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTSavePhotoDelegate.h"
+
+/**
+ API exposed for unit tests.
+ */
+@interface FLTSavePhotoDelegate ()
+
+/// The completion handler block for capture and save photo operations.
+/// Can be called from either main queue or IO queue.
+/// Exposed for unit tests to manually trigger the completion.
+@property(readonly, nonatomic) FLTSavePhotoDelegateCompletionHandler completionHandler;
+
+/// Handler to write captured photo data into a file.
+/// @param error the capture error.
+/// @param photoDataProvider a closure that provides photo data.
+- (void)handlePhotoCaptureResultWithError:(NSError *)error
+ photoDataProvider:(NSData * (^)(void))photoDataProvider;
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeEventChannel.h b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeEventChannel.h
new file mode 100644
index 0000000..ddfa754
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeEventChannel.h
@@ -0,0 +1,30 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A thread safe wrapper for FlutterEventChannel that can be called from any thread, by dispatching
+ * its underlying engine calls to the main thread.
+ */
+@interface FLTThreadSafeEventChannel : NSObject
+
+/**
+ * Creates a FLTThreadSafeEventChannel by wrapping a FlutterEventChannel object.
+ * @param channel The FlutterEventChannel object to be wrapped.
+ */
+- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel;
+
+/*
+ * Registers a handler on the main thread for stream setup requests from the Flutter side.
+ # The completion block runs on the main thread.
+ */
+- (void)setStreamHandler:(nullable NSObject<FlutterStreamHandler> *)handler
+ completion:(void (^)(void))completion;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeEventChannel.m b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeEventChannel.m
new file mode 100644
index 0000000..57d154c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeEventChannel.m
@@ -0,0 +1,35 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTThreadSafeEventChannel.h"
+#import "QueueUtils.h"
+
+@interface FLTThreadSafeEventChannel ()
+@property(nonatomic, strong) FlutterEventChannel *channel;
+@end
+
+@implementation FLTThreadSafeEventChannel
+
+- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel {
+ self = [super init];
+ if (self) {
+ _channel = channel;
+ }
+ return self;
+}
+
+- (void)setStreamHandler:(NSObject<FlutterStreamHandler> *)handler
+ completion:(void (^)(void))completion {
+ // WARNING: Should not use weak self, because FLTThreadSafeEventChannel is a local variable
+ // (retained within call stack, but not in the heap). FLTEnsureToRunOnMainQueue may trigger a
+ // context switch (when calling from background thread), in which case using weak self will always
+ // result in a nil self. Alternative to using strong self, we can also create a local strong
+ // variable to be captured by this block.
+ FLTEnsureToRunOnMainQueue(^{
+ [self.channel setStreamHandler:handler];
+ completion();
+ });
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeFlutterResult.h b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeFlutterResult.h
new file mode 100644
index 0000000..6677505
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeFlutterResult.h
@@ -0,0 +1,62 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A thread safe wrapper for FlutterResult that can be called from any thread, by dispatching its
+ * underlying engine calls to the main thread.
+ */
+@interface FLTThreadSafeFlutterResult : NSObject
+
+/**
+ * Gets the original FlutterResult object wrapped by this FLTThreadSafeFlutterResult instance.
+ */
+@property(readonly, nonatomic) FlutterResult flutterResult;
+
+/**
+ * Initializes with a FlutterResult object.
+ * @param result The FlutterResult object that the result will be given to.
+ */
+- (instancetype)initWithResult:(FlutterResult)result;
+
+/**
+ * Sends a successful result on the main thread without any data.
+ */
+- (void)sendSuccess;
+
+/**
+ * Sends a successful result on the main thread with data.
+ * @param data Result data that is send to the Flutter Dart side.
+ */
+- (void)sendSuccessWithData:(id)data;
+
+/**
+ * Sends an NSError as result on the main thread.
+ * @param error Error that will be send as FlutterError.
+ */
+- (void)sendError:(NSError *)error;
+
+/**
+ * Sends a FlutterError as result on the main thread.
+ * @param flutterError FlutterError that will be sent to the Flutter Dart side.
+ */
+- (void)sendFlutterError:(FlutterError *)flutterError;
+
+/**
+ * Sends a FlutterError as result on the main thread.
+ */
+- (void)sendErrorWithCode:(NSString *)code
+ message:(nullable NSString *)message
+ details:(nullable id)details;
+
+/**
+ * Sends FlutterMethodNotImplemented as result on the main thread.
+ */
+- (void)sendNotImplemented;
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeFlutterResult.m b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeFlutterResult.m
new file mode 100644
index 0000000..283a0d6
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeFlutterResult.m
@@ -0,0 +1,64 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTThreadSafeFlutterResult.h"
+#import <Foundation/Foundation.h>
+#import "QueueUtils.h"
+
+@implementation FLTThreadSafeFlutterResult {
+}
+
+- (id)initWithResult:(FlutterResult)result {
+ self = [super init];
+ if (!self) {
+ return nil;
+ }
+ _flutterResult = result;
+ return self;
+}
+
+- (void)sendSuccess {
+ [self send:nil];
+}
+
+- (void)sendSuccessWithData:(id)data {
+ [self send:data];
+}
+
+- (void)sendError:(NSError *)error {
+ [self sendErrorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code]
+ message:error.localizedDescription
+ details:error.domain];
+}
+
+- (void)sendErrorWithCode:(NSString *)code
+ message:(NSString *_Nullable)message
+ details:(id _Nullable)details {
+ FlutterError *flutterError = [FlutterError errorWithCode:code message:message details:details];
+ [self send:flutterError];
+}
+
+- (void)sendFlutterError:(FlutterError *)flutterError {
+ [self send:flutterError];
+}
+
+- (void)sendNotImplemented {
+ [self send:FlutterMethodNotImplemented];
+}
+
+/**
+ * Sends result to flutterResult on the main thread.
+ */
+- (void)send:(id _Nullable)result {
+ FLTEnsureToRunOnMainQueue(^{
+ // WARNING: Should not use weak self, because `FlutterResult`s are passed as arguments
+ // (retained within call stack, but not in the heap). FLTEnsureToRunOnMainQueue may trigger a
+ // context switch (when calling from background thread), in which case using weak self will
+ // always result in a nil self. Alternative to using strong self, we can also create a local
+ // strong variable to be captured by this block.
+ self.flutterResult(result);
+ });
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.h b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.h
new file mode 100644
index 0000000..0f6611d
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.h
@@ -0,0 +1,28 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A thread safe wrapper for FlutterMethodChannel that can be called from any thread, by dispatching
+ * its underlying engine calls to the main thread.
+ */
+@interface FLTThreadSafeMethodChannel : NSObject
+
+/**
+ * Creates a FLTThreadSafeMethodChannel by wrapping a FlutterMethodChannel object.
+ * @param channel The FlutterMethodChannel object to be wrapped.
+ */
+- (instancetype)initWithMethodChannel:(FlutterMethodChannel *)channel;
+
+/**
+ * Invokes the specified flutter method on the main thread with the specified arguments.
+ */
+- (void)invokeMethod:(NSString *)method arguments:(nullable id)arguments;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.m b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.m
new file mode 100644
index 0000000..df7c169
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.m
@@ -0,0 +1,29 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTThreadSafeMethodChannel.h"
+#import "QueueUtils.h"
+
+@interface FLTThreadSafeMethodChannel ()
+@property(nonatomic, strong) FlutterMethodChannel *channel;
+@end
+
+@implementation FLTThreadSafeMethodChannel
+
+- (instancetype)initWithMethodChannel:(FlutterMethodChannel *)channel {
+ self = [super init];
+ if (self) {
+ _channel = channel;
+ }
+ return self;
+}
+
+- (void)invokeMethod:(NSString *)method arguments:(id)arguments {
+ __weak typeof(self) weakSelf = self;
+ FLTEnsureToRunOnMainQueue(^{
+ [weakSelf.channel invokeMethod:method arguments:arguments];
+ });
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h
new file mode 100644
index 0000000..030e2db
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h
@@ -0,0 +1,46 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Flutter/Flutter.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A thread safe wrapper for FlutterTextureRegistry that can be called from any thread, by
+ * dispatching its underlying engine calls to the main thread.
+ */
+@interface FLTThreadSafeTextureRegistry : NSObject
+
+/**
+ * Creates a FLTThreadSafeTextureRegistry by wrapping an object conforming to
+ * FlutterTextureRegistry.
+ * @param registry The FlutterTextureRegistry object to be wrapped.
+ */
+- (instancetype)initWithTextureRegistry:(NSObject<FlutterTextureRegistry> *)registry;
+
+/**
+ * Registers a `FlutterTexture` on the main thread for usage in Flutter and returns an id that can
+ * be used to reference that texture when calling into Flutter with channels.
+ *
+ * On success the completion block completes with the pointer to the registered texture, else with
+ * 0. The completion block runs on the main thread.
+ */
+- (void)registerTexture:(NSObject<FlutterTexture> *)texture
+ completion:(void (^)(int64_t))completion;
+
+/**
+ * Notifies the Flutter engine on the main thread that the given texture has been updated.
+ */
+- (void)textureFrameAvailable:(int64_t)textureId;
+
+/**
+ * Notifies the Flutter engine on the main thread to unregister a `FlutterTexture` that has been
+ * previously registered with `registerTexture:`.
+ * @param textureId The result that was previously returned from `registerTexture:`.
+ */
+- (void)unregisterTexture:(int64_t)textureId;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m
new file mode 100644
index 0000000..b82d566
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m
@@ -0,0 +1,46 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTThreadSafeTextureRegistry.h"
+#import "QueueUtils.h"
+
+@interface FLTThreadSafeTextureRegistry ()
+@property(nonatomic, strong) NSObject<FlutterTextureRegistry> *registry;
+@end
+
+@implementation FLTThreadSafeTextureRegistry
+
+- (instancetype)initWithTextureRegistry:(NSObject<FlutterTextureRegistry> *)registry {
+ self = [super init];
+ if (self) {
+ _registry = registry;
+ }
+ return self;
+}
+
+- (void)registerTexture:(NSObject<FlutterTexture> *)texture
+ completion:(void (^)(int64_t))completion {
+ __weak typeof(self) weakSelf = self;
+ FLTEnsureToRunOnMainQueue(^{
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+ completion([strongSelf.registry registerTexture:texture]);
+ });
+}
+
+- (void)textureFrameAvailable:(int64_t)textureId {
+ __weak typeof(self) weakSelf = self;
+ FLTEnsureToRunOnMainQueue(^{
+ [weakSelf.registry textureFrameAvailable:textureId];
+ });
+}
+
+- (void)unregisterTexture:(int64_t)textureId {
+ __weak typeof(self) weakSelf = self;
+ FLTEnsureToRunOnMainQueue(^{
+ [weakSelf.registry unregisterTexture:textureId];
+ });
+}
+
+@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/QueueUtils.h b/packages/camera/camera_avfoundation/ios/Classes/QueueUtils.h
new file mode 100644
index 0000000..a7e22da
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/QueueUtils.h
@@ -0,0 +1,19 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/// Queue-specific context data to be associated with the capture session queue.
+extern const char* FLTCaptureSessionQueueSpecific;
+
+/// Ensures the given block to be run on the main queue.
+/// If caller site is already on the main queue, the block will be run
+/// synchronously. Otherwise, the block will be dispatched asynchronously to the
+/// main queue.
+/// @param block the block to be run on the main queue.
+extern void FLTEnsureToRunOnMainQueue(dispatch_block_t block);
+
+NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/QueueUtils.m b/packages/camera/camera_avfoundation/ios/Classes/QueueUtils.m
new file mode 100644
index 0000000..1fd54cd
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/QueueUtils.m
@@ -0,0 +1,15 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "QueueUtils.h"
+
+const char *FLTCaptureSessionQueueSpecific = "capture_session_queue";
+
+void FLTEnsureToRunOnMainQueue(dispatch_block_t block) {
+ if (!NSThread.isMainThread) {
+ dispatch_async(dispatch_get_main_queue(), block);
+ } else {
+ block();
+ }
+}
diff --git a/packages/camera/camera_avfoundation/ios/Classes/camera_avfoundation-umbrella.h b/packages/camera/camera_avfoundation/ios/Classes/camera_avfoundation-umbrella.h
new file mode 100644
index 0000000..f8464aa
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/Classes/camera_avfoundation-umbrella.h
@@ -0,0 +1,9 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Foundation/Foundation.h>
+#import <camera_avfoundation/CameraPlugin.h>
+
+FOUNDATION_EXPORT double cameraVersionNumber;
+FOUNDATION_EXPORT const unsigned char cameraVersionString[];
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation.podspec b/packages/camera/camera_avfoundation/ios/camera_avfoundation.podspec
new file mode 100644
index 0000000..27f569c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation.podspec
@@ -0,0 +1,23 @@
+#
+# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
+#
+Pod::Spec.new do |s|
+ s.name = 'camera_avfoundation'
+ s.version = '0.0.1'
+ s.summary = 'Flutter Camera'
+ s.description = <<-DESC
+A Flutter plugin to use the camera from your Flutter app.
+ DESC
+ s.homepage = 'https://github.com/flutter/plugins'
+ s.license = { :type => 'BSD', :file => '../LICENSE' }
+ s.author = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' }
+ s.source = { :http => 'https://github.com/flutter/plugins/tree/main/packages/camera_avfoundation' }
+ s.documentation_url = 'https://pub.dev/packages/camera_avfoundation'
+ s.source_files = 'Classes/**/*.{h,m}'
+ s.public_header_files = 'Classes/**/*.h'
+ s.module_map = 'Classes/CameraPlugin.modulemap'
+ s.dependency 'Flutter'
+
+ s.platform = :ios, '9.0'
+ s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' }
+end
diff --git a/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart
new file mode 100644
index 0000000..e07a440
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/camera_avfoundation.dart
@@ -0,0 +1,5 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'src/avfoundation_camera.dart';
diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
new file mode 100644
index 0000000..5080c57
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
@@ -0,0 +1,639 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:stream_transform/stream_transform.dart';
+
+import 'type_conversion.dart';
+import 'utils.dart';
+
+const MethodChannel _channel =
+ MethodChannel('plugins.flutter.io/camera_avfoundation');
+
+/// An iOS implementation of [CameraPlatform] based on AVFoundation.
+class AVFoundationCamera extends CameraPlatform {
+ /// Registers this class as the default instance of [CameraPlatform].
+ static void registerWith() {
+ CameraPlatform.instance = AVFoundationCamera();
+ }
+
+ final Map<int, MethodChannel> _channels = <int, MethodChannel>{};
+
+ /// The name of the channel that device events from the platform side are
+ /// sent on.
+ @visibleForTesting
+ static const String deviceEventChannelName =
+ 'plugins.flutter.io/camera_avfoundation/fromPlatform';
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to camera events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<CameraEvent> cameraEventStreamController =
+ StreamController<CameraEvent>.broadcast();
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to general device events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ late final StreamController<DeviceEvent> _deviceEventStreamController =
+ _createDeviceEventStreamController();
+
+ StreamController<DeviceEvent> _createDeviceEventStreamController() {
+ // Set up the method handler lazily.
+ const MethodChannel channel = MethodChannel(deviceEventChannelName);
+ channel.setMethodCallHandler(_handleDeviceMethodCall);
+ return StreamController<DeviceEvent>.broadcast();
+ }
+
+ // The stream to receive frames from the native code.
+ StreamSubscription<dynamic>? _platformImageStreamSubscription;
+
+ // The stream for vending frames to platform interface clients.
+ StreamController<CameraImageData>? _frameStreamController;
+
+ Stream<CameraEvent> _cameraEvents(int cameraId) =>
+ cameraEventStreamController.stream
+ .where((CameraEvent event) => event.cameraId == cameraId);
+
+ @override
+ Future<List<CameraDescription>> availableCameras() async {
+ try {
+ final List<Map<dynamic, dynamic>>? cameras = await _channel
+ .invokeListMethod<Map<dynamic, dynamic>>('availableCameras');
+
+ if (cameras == null) {
+ return <CameraDescription>[];
+ }
+
+ return cameras.map((Map<dynamic, dynamic> camera) {
+ return CameraDescription(
+ name: camera['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(camera['lensFacing']! as String),
+ sensorOrientation: camera['sensorOrientation']! as int,
+ );
+ }).toList();
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) async {
+ try {
+ final Map<String, dynamic>? reply = await _channel
+ .invokeMapMethod<String, dynamic>('create', <String, dynamic>{
+ 'cameraName': cameraDescription.name,
+ 'resolutionPreset': resolutionPreset != null
+ ? _serializeResolutionPreset(resolutionPreset)
+ : null,
+ 'enableAudio': enableAudio,
+ });
+
+ return reply!['cameraId']! as int;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> initializeCamera(
+ int cameraId, {
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) {
+ _channels.putIfAbsent(cameraId, () {
+ final MethodChannel channel = MethodChannel(
+ 'plugins.flutter.io/camera_avfoundation/camera$cameraId');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleCameraMethodCall(call, cameraId));
+ return channel;
+ });
+
+ final Completer<void> completer = Completer<void>();
+
+ onCameraInitialized(cameraId).first.then((CameraInitializedEvent value) {
+ completer.complete();
+ });
+
+ _channel.invokeMapMethod<String, dynamic>(
+ 'initialize',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'imageFormatGroup': imageFormatGroup.name(),
+ },
+ ).catchError(
+ // TODO(srawlins): This should return a value of the future's type. This
+ // will fail upcoming analysis checks with
+ // https://github.com/flutter/flutter/issues/105750.
+ // ignore: body_might_complete_normally_catch_error
+ (Object error, StackTrace stackTrace) {
+ if (error is! PlatformException) {
+ // ignore: only_throw_errors
+ throw error;
+ }
+ completer.completeError(
+ CameraException(error.code, error.message),
+ stackTrace,
+ );
+ },
+ );
+
+ return completer.future;
+ }
+
+ @override
+ Future<void> dispose(int cameraId) async {
+ if (_channels.containsKey(cameraId)) {
+ final MethodChannel? cameraChannel = _channels[cameraId];
+ cameraChannel?.setMethodCallHandler(null);
+ _channels.remove(cameraId);
+ }
+
+ await _channel.invokeMethod<void>(
+ 'dispose',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
+ }
+
+ @override
+ Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraResolutionChangedEvent>();
+ }
+
+ @override
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
+ }
+
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraErrorEvent>();
+ }
+
+ @override
+ Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
+ return _cameraEvents(cameraId).whereType<VideoRecordedEvent>();
+ }
+
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ return _deviceEventStreamController.stream
+ .whereType<DeviceOrientationChangedEvent>();
+ }
+
+ @override
+ Future<void> lockCaptureOrientation(
+ int cameraId,
+ DeviceOrientation orientation,
+ ) async {
+ await _channel.invokeMethod<String>(
+ 'lockCaptureOrientation',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'orientation': serializeDeviceOrientation(orientation)
+ },
+ );
+ }
+
+ @override
+ Future<void> unlockCaptureOrientation(int cameraId) async {
+ await _channel.invokeMethod<String>(
+ 'unlockCaptureOrientation',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<XFile> takePicture(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'takePicture',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> prepareForVideoRecording() =>
+ _channel.invokeMethod<void>('prepareForVideoRecording');
+
+ @override
+ Future<void> startVideoRecording(int cameraId,
+ {Duration? maxVideoDuration}) async {
+ return startVideoCapturing(
+ VideoCaptureOptions(cameraId, maxDuration: maxVideoDuration));
+ }
+
+ @override
+ Future<void> startVideoCapturing(VideoCaptureOptions options) async {
+ await _channel.invokeMethod<void>(
+ 'startVideoRecording',
+ <String, dynamic>{
+ 'cameraId': options.cameraId,
+ 'maxVideoDuration': options.maxDuration?.inMilliseconds,
+ 'enableStream': options.streamCallback != null,
+ },
+ );
+
+ if (options.streamCallback != null) {
+ _frameStreamController = _createStreamController();
+ _frameStreamController!.stream.listen(options.streamCallback);
+ _startStreamListener();
+ }
+ }
+
+ @override
+ Future<XFile> stopVideoRecording(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'stopVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> pauseVideoRecording(int cameraId) => _channel.invokeMethod<void>(
+ 'pauseVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Future<void> resumeVideoRecording(int cameraId) =>
+ _channel.invokeMethod<void>(
+ 'resumeVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
+ {CameraImageStreamOptions? options}) {
+ _frameStreamController =
+ _createStreamController(onListen: _onFrameStreamListen);
+ return _frameStreamController!.stream;
+ }
+
+ StreamController<CameraImageData> _createStreamController(
+ {Function()? onListen}) {
+ return StreamController<CameraImageData>(
+ onListen: onListen ?? () {},
+ onPause: _onFrameStreamPauseResume,
+ onResume: _onFrameStreamPauseResume,
+ onCancel: _onFrameStreamCancel,
+ );
+ }
+
+ void _onFrameStreamListen() {
+ _startPlatformStream();
+ }
+
+ Future<void> _startPlatformStream() async {
+ await _channel.invokeMethod<void>('startImageStream');
+ _startStreamListener();
+ }
+
+ void _startStreamListener() {
+ const EventChannel cameraEventChannel =
+ EventChannel('plugins.flutter.io/camera_avfoundation/imageStream');
+ _platformImageStreamSubscription =
+ cameraEventChannel.receiveBroadcastStream().listen((dynamic imageData) {
+ try {
+ _channel.invokeMethod<void>('receivedImageStreamData');
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ _frameStreamController!
+ .add(cameraImageFromPlatformData(imageData as Map<dynamic, dynamic>));
+ });
+ }
+
+ FutureOr<void> _onFrameStreamCancel() async {
+ await _channel.invokeMethod<void>('stopImageStream');
+ await _platformImageStreamSubscription?.cancel();
+ _platformImageStreamSubscription = null;
+ _frameStreamController = null;
+ }
+
+ void _onFrameStreamPauseResume() {
+ throw CameraException('InvalidCall',
+ 'Pause and resume are not supported for onStreamedFrameAvailable');
+ }
+
+ @override
+ Future<void> setFlashMode(int cameraId, FlashMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFlashMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': _serializeFlashMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposureMode(int cameraId, ExposureMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setExposureMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeExposureMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposurePoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setExposurePoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMinExposureOffset(int cameraId) async {
+ final double? minExposureOffset = await _channel.invokeMethod<double>(
+ 'getMinExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minExposureOffset!;
+ }
+
+ @override
+ Future<double> getMaxExposureOffset(int cameraId) async {
+ final double? maxExposureOffset = await _channel.invokeMethod<double>(
+ 'getMaxExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxExposureOffset!;
+ }
+
+ @override
+ Future<double> getExposureOffsetStepSize(int cameraId) async {
+ final double? stepSize = await _channel.invokeMethod<double>(
+ 'getExposureOffsetStepSize',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return stepSize!;
+ }
+
+ @override
+ Future<double> setExposureOffset(int cameraId, double offset) async {
+ final double? appliedOffset = await _channel.invokeMethod<double>(
+ 'setExposureOffset',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'offset': offset,
+ },
+ );
+
+ return appliedOffset!;
+ }
+
+ @override
+ Future<void> setFocusMode(int cameraId, FocusMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFocusMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeFocusMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setFocusPoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setFocusPoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMaxZoomLevel(int cameraId) async {
+ final double? maxZoomLevel = await _channel.invokeMethod<double>(
+ 'getMaxZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxZoomLevel!;
+ }
+
+ @override
+ Future<double> getMinZoomLevel(int cameraId) async {
+ final double? minZoomLevel = await _channel.invokeMethod<double>(
+ 'getMinZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minZoomLevel!;
+ }
+
+ @override
+ Future<void> setZoomLevel(int cameraId, double zoom) async {
+ try {
+ await _channel.invokeMethod<double>(
+ 'setZoomLevel',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'zoom': zoom,
+ },
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> pausePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'pausePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<void> resumePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'resumePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Widget buildPreview(int cameraId) {
+ return Texture(textureId: cameraId);
+ }
+
+ /// Returns the flash mode as a String.
+ String _serializeFlashMode(FlashMode flashMode) {
+ switch (flashMode) {
+ case FlashMode.off:
+ return 'off';
+ case FlashMode.auto:
+ return 'auto';
+ case FlashMode.always:
+ return 'always';
+ case FlashMode.torch:
+ return 'torch';
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // ignore: dead_code
+ return 'off';
+ }
+
+ /// Returns the resolution preset as a String.
+ String _serializeResolutionPreset(ResolutionPreset resolutionPreset) {
+ switch (resolutionPreset) {
+ case ResolutionPreset.max:
+ return 'max';
+ case ResolutionPreset.ultraHigh:
+ return 'ultraHigh';
+ case ResolutionPreset.veryHigh:
+ return 'veryHigh';
+ case ResolutionPreset.high:
+ return 'high';
+ case ResolutionPreset.medium:
+ return 'medium';
+ case ResolutionPreset.low:
+ return 'low';
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // ignore: dead_code
+ return 'max';
+ }
+
+ /// Converts messages received from the native platform into device events.
+ Future<dynamic> _handleDeviceMethodCall(MethodCall call) async {
+ switch (call.method) {
+ case 'orientation_changed':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ _deviceEventStreamController.add(DeviceOrientationChangedEvent(
+ deserializeDeviceOrientation(arguments['orientation']! as String)));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+
+ /// Converts messages received from the native platform into camera events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ Future<dynamic> handleCameraMethodCall(MethodCall call, int cameraId) async {
+ switch (call.method) {
+ case 'initialized':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ arguments['previewWidth']! as double,
+ arguments['previewHeight']! as double,
+ deserializeExposureMode(arguments['exposureMode']! as String),
+ arguments['exposurePointSupported']! as bool,
+ deserializeFocusMode(arguments['focusMode']! as String),
+ arguments['focusPointSupported']! as bool,
+ ));
+ break;
+ case 'resolution_changed':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraResolutionChangedEvent(
+ cameraId,
+ arguments['captureWidth']! as double,
+ arguments['captureHeight']! as double,
+ ));
+ break;
+ case 'camera_closing':
+ cameraEventStreamController.add(CameraClosingEvent(
+ cameraId,
+ ));
+ break;
+ case 'video_recorded':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(VideoRecordedEvent(
+ cameraId,
+ XFile(arguments['path']! as String),
+ arguments['maxVideoDuration'] != null
+ ? Duration(milliseconds: arguments['maxVideoDuration']! as int)
+ : null,
+ ));
+ break;
+ case 'error':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraErrorEvent(
+ cameraId,
+ arguments['description']! as String,
+ ));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+
+ /// Returns the arguments of [call] as typed string-keyed Map.
+ ///
+ /// This does not do any type validation, so is only safe to call if the
+ /// arguments are known to be a map.
+ Map<String, Object?> _getArgumentDictionary(MethodCall call) {
+ return (call.arguments as Map<Object?, Object?>).cast<String, Object?>();
+ }
+}
diff --git a/packages/camera/camera_avfoundation/lib/src/type_conversion.dart b/packages/camera/camera_avfoundation/lib/src/type_conversion.dart
new file mode 100644
index 0000000..c2a539a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/src/type_conversion.dart
@@ -0,0 +1,50 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+
+/// Converts method channel call [data] for `receivedImageStreamData` to a
+/// [CameraImageData].
+CameraImageData cameraImageFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImageData(
+ format: _cameraImageFormatFromPlatformData(data['format']),
+ height: data['height'] as int,
+ width: data['width'] as int,
+ lensAperture: data['lensAperture'] as double?,
+ sensorExposureTime: data['sensorExposureTime'] as int?,
+ sensorSensitivity: data['sensorSensitivity'] as double?,
+ planes: List<CameraImagePlane>.unmodifiable(
+ (data['planes'] as List<dynamic>).map<CameraImagePlane>(
+ (dynamic planeData) => _cameraImagePlaneFromPlatformData(
+ planeData as Map<dynamic, dynamic>))));
+}
+
+CameraImageFormat _cameraImageFormatFromPlatformData(dynamic data) {
+ return CameraImageFormat(_imageFormatGroupFromPlatformData(data), raw: data);
+}
+
+ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
+ switch (data) {
+ case 875704438: // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ return ImageFormatGroup.yuv420;
+
+ case 1111970369: // kCVPixelFormatType_32BGRA
+ return ImageFormatGroup.bgra8888;
+ }
+
+ return ImageFormatGroup.unknown;
+}
+
+CameraImagePlane _cameraImagePlaneFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImagePlane(
+ bytes: data['bytes'] as Uint8List,
+ bytesPerPixel: data['bytesPerPixel'] as int?,
+ bytesPerRow: data['bytesPerRow'] as int,
+ height: data['height'] as int?,
+ width: data['width'] as int?);
+}
diff --git a/packages/camera/camera_avfoundation/lib/src/utils.dart b/packages/camera/camera_avfoundation/lib/src/utils.dart
new file mode 100644
index 0000000..8d58f7f
--- /dev/null
+++ b/packages/camera/camera_avfoundation/lib/src/utils.dart
@@ -0,0 +1,56 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+
+/// Parses a string into a corresponding CameraLensDirection.
+CameraLensDirection parseCameraLensDirection(String string) {
+ switch (string) {
+ case 'front':
+ return CameraLensDirection.front;
+ case 'back':
+ return CameraLensDirection.back;
+ case 'external':
+ return CameraLensDirection.external;
+ }
+ throw ArgumentError('Unknown CameraLensDirection value');
+}
+
+/// Returns the device orientation as a String.
+String serializeDeviceOrientation(DeviceOrientation orientation) {
+ switch (orientation) {
+ case DeviceOrientation.portraitUp:
+ return 'portraitUp';
+ case DeviceOrientation.portraitDown:
+ return 'portraitDown';
+ case DeviceOrientation.landscapeRight:
+ return 'landscapeRight';
+ case DeviceOrientation.landscapeLeft:
+ return 'landscapeLeft';
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // ignore: dead_code
+ return 'portraitUp';
+}
+
+/// Returns the device orientation for a given String.
+DeviceOrientation deserializeDeviceOrientation(String str) {
+ switch (str) {
+ case 'portraitUp':
+ return DeviceOrientation.portraitUp;
+ case 'portraitDown':
+ return DeviceOrientation.portraitDown;
+ case 'landscapeRight':
+ return DeviceOrientation.landscapeRight;
+ case 'landscapeLeft':
+ return DeviceOrientation.landscapeLeft;
+ default:
+ throw ArgumentError('"$str" is not a valid DeviceOrientation value');
+ }
+}
diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml
new file mode 100644
index 0000000..b272a4c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/pubspec.yaml
@@ -0,0 +1,30 @@
+name: camera_avfoundation
+description: iOS implementation of the camera plugin.
+repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_avfoundation
+issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
+version: 0.9.11
+
+environment:
+ sdk: ">=2.14.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+flutter:
+ plugin:
+ implements: camera
+ platforms:
+ ios:
+ pluginClass: CameraPlugin
+ dartPluginClass: AVFoundationCamera
+
+dependencies:
+ camera_platform_interface: ^2.3.1
+ flutter:
+ sdk: flutter
+ stream_transform: ^2.0.0
+
+dev_dependencies:
+ async: ^2.5.0
+ flutter_driver:
+ sdk: flutter
+ flutter_test:
+ sdk: flutter
diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
new file mode 100644
index 0000000..5d0b74c
--- /dev/null
+++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
@@ -0,0 +1,1132 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:async/async.dart';
+import 'package:camera_avfoundation/src/avfoundation_camera.dart';
+import 'package:camera_avfoundation/src/utils.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'method_channel_mock.dart';
+
+const String _channelName = 'plugins.flutter.io/camera_avfoundation';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ test('registers instance', () async {
+ AVFoundationCamera.registerWith();
+ expect(CameraPlatform.instance, isA<AVFoundationCamera>());
+ });
+
+ test('registration does not set message handlers', () async {
+ AVFoundationCamera.registerWith();
+
+ // Setting up a handler requires bindings to be initialized, and since
+ // registerWith is called very early in initialization the bindings won't
+ // have been initialized. While registerWith could intialize them, that
+ // could slow down startup, so instead the handler should be set up lazily.
+ final ByteData? response =
+ await _ambiguate(TestDefaultBinaryMessengerBinding.instance)!
+ .defaultBinaryMessenger
+ .handlePlatformMessage(
+ AVFoundationCamera.deviceEventChannelName,
+ const StandardMethodCodec().encodeMethodCall(const MethodCall(
+ 'orientation_changed',
+ <String, Object>{'orientation': 'portraitDown'})),
+ (ByteData? data) {});
+ expect(response, null);
+ });
+
+ group('Creation, Initialization & Disposal Tests', () {
+ test('Should send creation data and receive back a camera id', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ }
+ });
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ // Act
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0),
+ ResolutionPreset.high,
+ );
+
+ // Assert
+ expect(cameraMockChannel.log, <Matcher>[
+ isMethodCall(
+ 'create',
+ arguments: <String, Object?>{
+ 'cameraName': 'Test',
+ 'resolutionPreset': 'high',
+ 'enableAudio': false
+ },
+ ),
+ ]);
+ expect(cameraId, 1);
+ });
+
+ test('Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test(
+ 'Should throw CameraException when initialize throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'initialize': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ },
+ );
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ // Act
+ expect(
+ () => camera.initializeCamera(0),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having(
+ (CameraException e) => e.description,
+ 'description',
+ 'Mock error message used during testing.',
+ ),
+ ),
+ );
+ },
+ );
+
+ test('Should send initialization data', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ 'initialize': null
+ });
+ final AVFoundationCamera camera = AVFoundationCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+
+ // Act
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ isMethodCall(
+ 'initialize',
+ arguments: <String, Object?>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ ),
+ ]);
+ });
+
+ test('Should send a disposal call on dispose', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null,
+ 'dispose': <String, dynamic>{'cameraId': 1}
+ });
+
+ final AVFoundationCamera camera = AVFoundationCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Act
+ await camera.dispose(cameraId);
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ anything,
+ isMethodCall(
+ 'dispose',
+ arguments: <String, Object?>{'cameraId': 1},
+ ),
+ ]);
+ });
+ });
+
+ group('Event Tests', () {
+ late AVFoundationCamera camera;
+ late int cameraId;
+ setUp(() async {
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = AVFoundationCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+ });
+
+ test('Should receive initialized event', () async {
+ // Act
+ final Stream<CameraInitializedEvent> eventStream =
+ camera.onCameraInitialized(cameraId);
+ final StreamQueue<CameraInitializedEvent> streamQueue =
+ StreamQueue<CameraInitializedEvent>(eventStream);
+
+ // Emit test events
+ final CameraInitializedEvent event = CameraInitializedEvent(
+ cameraId,
+ 3840,
+ 2160,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ );
+ await camera.handleCameraMethodCall(
+ MethodCall('initialized', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive resolution changes', () async {
+ // Act
+ final Stream<CameraResolutionChangedEvent> resolutionStream =
+ camera.onCameraResolutionChanged(cameraId);
+ final StreamQueue<CameraResolutionChangedEvent> streamQueue =
+ StreamQueue<CameraResolutionChangedEvent>(resolutionStream);
+
+ // Emit test events
+ final CameraResolutionChangedEvent fhdEvent =
+ CameraResolutionChangedEvent(cameraId, 1920, 1080);
+ final CameraResolutionChangedEvent uhdEvent =
+ CameraResolutionChangedEvent(cameraId, 3840, 2160);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera closing events', () async {
+ // Act
+ final Stream<CameraClosingEvent> eventStream =
+ camera.onCameraClosing(cameraId);
+ final StreamQueue<CameraClosingEvent> streamQueue =
+ StreamQueue<CameraClosingEvent>(eventStream);
+
+ // Emit test events
+ final CameraClosingEvent event = CameraClosingEvent(cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera error events', () async {
+ // Act
+ final Stream<CameraErrorEvent> errorStream =
+ camera.onCameraError(cameraId);
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(errorStream);
+
+ // Emit test events
+ final CameraErrorEvent event =
+ CameraErrorEvent(cameraId, 'Error Description');
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive device orientation change events', () async {
+ // Act
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ camera.onDeviceOrientationChanged();
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+
+ // Emit test events
+ const DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+ for (int i = 0; i < 3; i++) {
+ await _ambiguate(TestDefaultBinaryMessengerBinding.instance)!
+ .defaultBinaryMessenger
+ .handlePlatformMessage(
+ AVFoundationCamera.deviceEventChannelName,
+ const StandardMethodCodec().encodeMethodCall(
+ MethodCall('orientation_changed', event.toJson())),
+ null);
+ }
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+ });
+
+ group('Function Tests', () {
+ late AVFoundationCamera camera;
+ late int cameraId;
+
+ setUp(() async {
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = AVFoundationCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(
+ CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ),
+ );
+ await initializeFuture;
+ });
+
+ test('Should fetch CameraDescription instances for available cameras',
+ () async {
+ // Arrange
+ // This deliberately uses 'dynamic' since that's what actual platform
+ // channel results will be, so using typed mock data could mask type
+ // handling bugs in the code under test.
+ final List<dynamic> returnData = <dynamic>[
+ <String, dynamic>{
+ 'name': 'Test 1',
+ 'lensFacing': 'front',
+ 'sensorOrientation': 1
+ },
+ <String, dynamic>{
+ 'name': 'Test 2',
+ 'lensFacing': 'back',
+ 'sensorOrientation': 2
+ }
+ ];
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'availableCameras': returnData},
+ );
+
+ // Act
+ final List<CameraDescription> cameras = await camera.availableCameras();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('availableCameras', arguments: null),
+ ]);
+ expect(cameras.length, returnData.length);
+ for (int i = 0; i < returnData.length; i++) {
+ final Map<String, Object?> typedData =
+ (returnData[i] as Map<dynamic, dynamic>).cast<String, Object?>();
+ final CameraDescription cameraDescription = CameraDescription(
+ name: typedData['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(typedData['lensFacing']! as String),
+ sensorOrientation: typedData['sensorOrientation']! as int,
+ );
+ expect(cameras[i], cameraDescription);
+ }
+ });
+
+ test(
+ 'Should throw CameraException when availableCameras throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(channelName: _channelName, methods: <String, dynamic>{
+ 'availableCameras': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+
+ // Act
+ expect(
+ camera.availableCameras,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should take a picture and return an XFile instance', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'takePicture': '/test/path.jpg'});
+
+ // Act
+ final XFile file = await camera.takePicture(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('takePicture', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.jpg');
+ });
+
+ test('Should prepare for video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'prepareForVideoRecording': null},
+ );
+
+ // Act
+ await camera.prepareForVideoRecording();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('prepareForVideoRecording', arguments: null),
+ ]);
+ });
+
+ test('Should start recording a video', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': null,
+ 'enableStream': false,
+ }),
+ ]);
+ });
+
+ test('Should pass maxVideoDuration when starting recording a video',
+ () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(
+ cameraId,
+ maxVideoDuration: const Duration(seconds: 10),
+ );
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': 10000,
+ 'enableStream': false,
+ }),
+ ]);
+ });
+
+ test(
+ 'Should pass enableStream if callback is passed when starting recording a video',
+ () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoCapturing(VideoCaptureOptions(cameraId,
+ streamCallback: (CameraImageData imageData) {}));
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': null,
+ 'enableStream': true,
+ }),
+ ]);
+ });
+
+ test('Should stop a video recording and return the file', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'stopVideoRecording': '/test/path.mp4'},
+ );
+
+ // Act
+ final XFile file = await camera.stopVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('stopVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.mp4');
+ });
+
+ test('Should pause a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'pauseVideoRecording': null},
+ );
+
+ // Act
+ await camera.pauseVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pauseVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should resume a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'resumeVideoRecording': null},
+ );
+
+ // Act
+ await camera.resumeVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumeVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the flash mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFlashMode': null},
+ );
+
+ // Act
+ await camera.setFlashMode(cameraId, FlashMode.torch);
+ await camera.setFlashMode(cameraId, FlashMode.always);
+ await camera.setFlashMode(cameraId, FlashMode.auto);
+ await camera.setFlashMode(cameraId, FlashMode.off);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'torch'
+ }),
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'always'
+ }),
+ isMethodCall('setFlashMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setFlashMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'off'}),
+ ]);
+ });
+
+ test('Should set the exposure mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposureMode': null},
+ );
+
+ // Act
+ await camera.setExposureMode(cameraId, ExposureMode.auto);
+ await camera.setExposureMode(cameraId, ExposureMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setExposureMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposurePoint': null},
+ );
+
+ // Act
+ await camera.setExposurePoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setExposurePoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should get the min exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMinExposureOffset': 2.0},
+ );
+
+ // Act
+ final double minExposureOffset =
+ await camera.getMinExposureOffset(cameraId);
+
+ // Assert
+ expect(minExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the max exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMaxExposureOffset': 2.0},
+ );
+
+ // Act
+ final double maxExposureOffset =
+ await camera.getMaxExposureOffset(cameraId);
+
+ // Assert
+ expect(maxExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the exposure offset step size', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getExposureOffsetStepSize': 0.25},
+ );
+
+ // Act
+ final double stepSize = await camera.getExposureOffsetStepSize(cameraId);
+
+ // Assert
+ expect(stepSize, 0.25);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getExposureOffsetStepSize', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setExposureOffset': 0.6},
+ );
+
+ // Act
+ final double actualOffset = await camera.setExposureOffset(cameraId, 0.5);
+
+ // Assert
+ expect(actualOffset, 0.6);
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'offset': 0.5,
+ }),
+ ]);
+ });
+
+ test('Should set the focus mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFocusMode': null},
+ );
+
+ // Act
+ await camera.setFocusMode(cameraId, FocusMode.auto);
+ await camera.setFocusMode(cameraId, FocusMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusMode',
+ arguments: <String, Object?>{'cameraId': cameraId, 'mode': 'auto'}),
+ isMethodCall('setFocusMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setFocusPoint': null},
+ );
+
+ // Act
+ await camera.setFocusPoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setFocusPoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should build a texture widget as preview widget', () async {
+ // Act
+ final Widget widget = camera.buildPreview(cameraId);
+
+ // Act
+ expect(widget is Texture, isTrue);
+ expect((widget as Texture).textureId, cameraId);
+ });
+
+ test('Should throw MissingPluginException when handling unknown method',
+ () {
+ final AVFoundationCamera camera = AVFoundationCamera();
+
+ expect(
+ () => camera.handleCameraMethodCall(
+ const MethodCall('unknown_method'), 1),
+ throwsA(isA<MissingPluginException>()));
+ });
+
+ test('Should get the max zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMaxZoomLevel': 10.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMaxZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 10.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the min zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'getMinZoomLevel': 1.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMinZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 1.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'setZoomLevel': null},
+ );
+
+ // Act
+ await camera.setZoomLevel(cameraId, 2.0);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setZoomLevel',
+ arguments: <String, Object?>{'cameraId': cameraId, 'zoom': 2.0}),
+ ]);
+ });
+
+ test('Should throw CameraException when illegal zoom level is supplied',
+ () async {
+ // Arrange
+ MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'setZoomLevel': PlatformException(
+ code: 'ZOOM_ERROR',
+ message: 'Illegal zoom error',
+ )
+ },
+ );
+
+ // Act & assert
+ expect(
+ () => camera.setZoomLevel(cameraId, -1.0),
+ throwsA(isA<CameraException>()
+ .having((CameraException e) => e.code, 'code', 'ZOOM_ERROR')
+ .having((CameraException e) => e.description, 'description',
+ 'Illegal zoom error')));
+ });
+
+ test('Should lock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'lockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.lockCaptureOrientation(
+ cameraId, DeviceOrientation.portraitUp);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('lockCaptureOrientation', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'orientation': 'portraitUp'
+ }),
+ ]);
+ });
+
+ test('Should unlock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'unlockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.unlockCaptureOrientation(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('unlockCaptureOrientation',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should pause the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'pausePreview': null},
+ );
+
+ // Act
+ await camera.pausePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pausePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should resume the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{'resumePreview': null},
+ );
+
+ // Act
+ await camera.resumePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should start streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ ]);
+
+ subscription.cancel();
+ });
+
+ test('Should stop streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+ subscription.cancel();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ isMethodCall('stopImageStream', arguments: null),
+ ]);
+ });
+ });
+}
+
+/// This allows a value of type T or T? to be treated as a value of type T?.
+///
+/// We use this so that APIs that have become non-nullable can still be used
+/// with `!` and `?` on the stable branch.
+T? _ambiguate<T>(T? value) => value;
diff --git a/packages/camera/camera_avfoundation/test/method_channel_mock.dart b/packages/camera/camera_avfoundation/test/method_channel_mock.dart
new file mode 100644
index 0000000..f26d12a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/test/method_channel_mock.dart
@@ -0,0 +1,47 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+class MethodChannelMock {
+ MethodChannelMock({
+ required String channelName,
+ this.delay,
+ required this.methods,
+ }) : methodChannel = MethodChannel(channelName) {
+ _ambiguate(TestDefaultBinaryMessengerBinding.instance)!
+ .defaultBinaryMessenger
+ .setMockMethodCallHandler(methodChannel, _handler);
+ }
+
+ final Duration? delay;
+ final MethodChannel methodChannel;
+ final Map<String, dynamic> methods;
+ final List<MethodCall> log = <MethodCall>[];
+
+ Future<dynamic> _handler(MethodCall methodCall) async {
+ log.add(methodCall);
+
+ if (!methods.containsKey(methodCall.method)) {
+ throw MissingPluginException('No implementation found for method '
+ '${methodCall.method} on channel ${methodChannel.name}');
+ }
+
+ return Future<dynamic>.delayed(delay ?? Duration.zero, () {
+ final dynamic result = methods[methodCall.method];
+ if (result is Exception) {
+ throw result;
+ }
+
+ return Future<dynamic>.value(result);
+ });
+ }
+}
+
+/// This allows a value of type T or T? to be treated as a value of type T?.
+///
+/// We use this so that APIs that have become non-nullable can still be used
+/// with `!` and `?` on the stable branch.
+T? _ambiguate<T>(T? value) => value;
diff --git a/packages/camera/camera_avfoundation/test/type_conversion_test.dart b/packages/camera/camera_avfoundation/test/type_conversion_test.dart
new file mode 100644
index 0000000..282f4ae
--- /dev/null
+++ b/packages/camera/camera_avfoundation/test/type_conversion_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_avfoundation/src/type_conversion.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('CameraImageData can be created', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 1,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.height, 1);
+ expect(cameraImage.width, 4);
+ expect(cameraImage.format.group, ImageFormatGroup.unknown);
+ expect(cameraImage.planes.length, 1);
+ });
+
+ test('CameraImageData has ImageFormatGroup.yuv420', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 875704438,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ });
+}
diff --git a/packages/camera/camera_avfoundation/test/utils_test.dart b/packages/camera/camera_avfoundation/test/utils_test.dart
new file mode 100644
index 0000000..bd28abb
--- /dev/null
+++ b/packages/camera/camera_avfoundation/test/utils_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_avfoundation/src/utils.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ group('Utility methods', () {
+ test(
+ 'Should return CameraLensDirection when valid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ parseCameraLensDirection('back'),
+ CameraLensDirection.back,
+ );
+ expect(
+ parseCameraLensDirection('front'),
+ CameraLensDirection.front,
+ );
+ expect(
+ parseCameraLensDirection('external'),
+ CameraLensDirection.external,
+ );
+ });
+
+ test(
+ 'Should throw ArgumentException when invalid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ () => parseCameraLensDirection('test'),
+ throwsA(isArgumentError),
+ );
+ });
+
+ test('serializeDeviceOrientation() should serialize correctly', () {
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitUp),
+ 'portraitUp');
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitDown),
+ 'portraitDown');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeRight),
+ 'landscapeRight');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeLeft),
+ 'landscapeLeft');
+ });
+
+ test('deserializeDeviceOrientation() should deserialize correctly', () {
+ expect(deserializeDeviceOrientation('portraitUp'),
+ DeviceOrientation.portraitUp);
+ expect(deserializeDeviceOrientation('portraitDown'),
+ DeviceOrientation.portraitDown);
+ expect(deserializeDeviceOrientation('landscapeRight'),
+ DeviceOrientation.landscapeRight);
+ expect(deserializeDeviceOrientation('landscapeLeft'),
+ DeviceOrientation.landscapeLeft);
+ });
+ });
+}
diff --git a/packages/camera/camera_platform_interface/AUTHORS b/packages/camera/camera_platform_interface/AUTHORS
new file mode 100644
index 0000000..493a0b4
--- /dev/null
+++ b/packages/camera/camera_platform_interface/AUTHORS
@@ -0,0 +1,66 @@
+# Below is a list of people and organizations that have contributed
+# to the Flutter project. Names should be added to the list like so:
+#
+# Name/Organization <email address>
+
+Google Inc.
+The Chromium Authors
+German Saprykin <saprykin.h@gmail.com>
+Benjamin Sauer <sauer.benjamin@gmail.com>
+larsenthomasj@gmail.com
+Ali Bitek <alibitek@protonmail.ch>
+Pol Batlló <pol.batllo@gmail.com>
+Anatoly Pulyaevskiy
+Hayden Flinner <haydenflinner@gmail.com>
+Stefano Rodriguez <hlsroddy@gmail.com>
+Salvatore Giordano <salvatoregiordanoo@gmail.com>
+Brian Armstrong <brian@flutter.institute>
+Paul DeMarco <paulmdemarco@gmail.com>
+Fabricio Nogueira <feufeu@gmail.com>
+Simon Lightfoot <simon@devangels.london>
+Ashton Thomas <ashton@acrinta.com>
+Thomas Danner <thmsdnnr@gmail.com>
+Diego Velásquez <diego.velasquez.lopez@gmail.com>
+Hajime Nakamura <nkmrhj@gmail.com>
+Tuyển Vũ Xuân <netsoft1985@gmail.com>
+Miguel Ruivo <miguel@miguelruivo.com>
+Sarthak Verma <sarthak@artiosys.com>
+Mike Diarmid <mike@invertase.io>
+Invertase <oss@invertase.io>
+Elliot Hesp <elliot@invertase.io>
+Vince Varga <vince.varga@smaho.com>
+Aawaz Gyawali <awazgyawali@gmail.com>
+EUI Limited <ian.evans3@admiralgroup.co.uk>
+Katarina Sheremet <katarina@sheremet.ch>
+Thomas Stockx <thomas@stockxit.com>
+Sarbagya Dhaubanjar <sarbagyastha@gmail.com>
+Ozkan Eksi <ozeksi@gmail.com>
+Rishab Nayak <rishab@bu.edu>
+ko2ic <ko2ic.dev@gmail.com>
+Jonathan Younger <jonathan@daikini.com>
+Jose Sanchez <josesm82@gmail.com>
+Debkanchan Samadder <debu.samadder@gmail.com>
+Audrius Karosevicius <audrius.karosevicius@gmail.com>
+Lukasz Piliszczuk <lukasz@intheloup.io>
+SoundReply Solutions GmbH <ch@soundreply.com>
+Rafal Wachol <rwachol@gmail.com>
+Pau Picas <pau.picas@gmail.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Alexandru Tuca <salexandru.tuca@outlook.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Rhodes Davis Jr. <rody.davis.jr@gmail.com>
+Luigi Agosti <luigi@tengio.com>
+Quentin Le Guennec <quentin@tengio.com>
+Koushik Ravikumar <koushik@tengio.com>
+Nissim Dsilva <nissim@tengio.com>
+Giancarlo Rocha <giancarloiff@gmail.com>
+Ryo Miyake <ryo@miyake.id>
+Théo Champion <contact.theochampion@gmail.com>
+Kazuki Yamaguchi <y.kazuki0614n@gmail.com>
+Eitan Schwartz <eshvartz@gmail.com>
+Chris Rutkowski <chrisrutkowski89@gmail.com>
+Juan Alvarez <juan.alvarez@resideo.com>
+Aleksandr Yurkovskiy <sanekyy@gmail.com>
+Anton Borries <mail@antonborri.es>
+Alex Li <google@alexv525.com>
+Rahul Raj <64.rahulraj@gmail.com>
diff --git a/packages/camera/camera_platform_interface/CHANGELOG.md b/packages/camera/camera_platform_interface/CHANGELOG.md
new file mode 100644
index 0000000..b51eb9c
--- /dev/null
+++ b/packages/camera/camera_platform_interface/CHANGELOG.md
@@ -0,0 +1,123 @@
+## 2.4.0
+
+* Allows camera to be switched while video recording.
+* Updates minimum Flutter version to 3.0.
+
+## 2.3.4
+
+* Updates code for stricter lint checks.
+
+## 2.3.3
+
+* Updates code for stricter lint checks.
+
+## 2.3.2
+
+* Updates MethodChannelCamera to have startVideoRecording call the newer startVideoCapturing.
+
+## 2.3.1
+
+* Exports VideoCaptureOptions to allow dependencies to implement concurrent stream and record.
+
+## 2.3.0
+
+* Adds new capture method for a camera to allow concurrent streaming and recording.
+
+## 2.2.2
+
+* Updates code for `no_leading_underscores_for_local_identifiers` lint.
+
+## 2.2.1
+
+* Updates imports for `prefer_relative_imports`.
+* Updates minimum Flutter version to 2.10.
+* Fixes avoid_redundant_argument_values lint warnings and minor typos.
+* Ignores unnecessary import warnings in preparation for [upcoming Flutter changes](https://github.com/flutter/flutter/pull/104231).
+* Ignores missing return warnings in preparation for [upcoming analysis changes](https://github.com/flutter/flutter/issues/105750).
+
+## 2.2.0
+
+* Adds image streaming to the platform interface.
+* Removes unnecessary imports.
+
+## 2.1.6
+
+* Adopts `Object.hash`.
+* Removes obsolete dependency on `pedantic`.
+
+## 2.1.5
+
+* Fixes asynchronous exceptions handling of the `initializeCamera` method.
+
+## 2.1.4
+
+* Removes dependency on `meta`.
+
+## 2.1.3
+
+* Update to use the `verify` method introduced in platform_plugin_interface 2.1.0.
+
+## 2.1.2
+
+* Adopts new analysis options and fixes all violations.
+
+## 2.1.1
+
+* Add web-relevant docs to platform interface code.
+
+## 2.1.0
+
+* Introduces interface methods for pausing and resuming the camera preview.
+
+## 2.0.1
+
+* Update platform_plugin_interface version requirement.
+
+## 2.0.0
+
+- Stable null safety release.
+
+## 1.6.0
+
+- Added VideoRecordedEvent to support ending a video recording in the native implementation.
+
+## 1.5.0
+
+- Introduces interface methods for locking and unlocking the capture orientation.
+- Introduces interface method for listening to the device orientation.
+
+## 1.4.0
+
+- Added interface methods to support auto focus.
+
+## 1.3.0
+
+- Introduces an option to set the image format when initializing.
+
+## 1.2.0
+
+- Added interface to support automatic exposure.
+
+## 1.1.0
+
+- Added an optional `maxVideoDuration` parameter to the `startVideoRecording` method, which allows implementations to limit the duration of a video recording.
+
+## 1.0.4
+
+- Added the torch option to the FlashMode enum, which when implemented indicates the flash light should be turned on continuously.
+
+## 1.0.3
+
+- Update Flutter SDK constraint.
+
+## 1.0.2
+
+- Added interface methods to support zoom features.
+
+## 1.0.1
+
+- Added interface methods for setting flash mode.
+
+## 1.0.0
+
+- Initial open-source release
diff --git a/packages/camera/camera_platform_interface/LICENSE b/packages/camera/camera_platform_interface/LICENSE
new file mode 100644
index 0000000..c6823b8
--- /dev/null
+++ b/packages/camera/camera_platform_interface/LICENSE
@@ -0,0 +1,25 @@
+Copyright 2013 The Flutter Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/packages/camera/camera_platform_interface/README.md b/packages/camera/camera_platform_interface/README.md
new file mode 100644
index 0000000..43be651
--- /dev/null
+++ b/packages/camera/camera_platform_interface/README.md
@@ -0,0 +1,26 @@
+# camera_platform_interface
+
+A common platform interface for the [`camera`][1] plugin.
+
+This interface allows platform-specific implementations of the `camera`
+plugin, as well as the plugin itself, to ensure they are supporting the
+same interface.
+
+# Usage
+
+To implement a new platform-specific implementation of `camera`, extend
+[`CameraPlatform`][2] with an implementation that performs the
+platform-specific behavior, and when you register your plugin, set the default
+`CameraPlatform` by calling
+`CameraPlatform.instance = MyPlatformCamera()`.
+
+# Note on breaking changes
+
+Strongly prefer non-breaking changes (such as adding a method to the interface)
+over breaking changes for this package.
+
+See https://flutter.dev/go/platform-interface-breaking-changes for a discussion
+on why a less-clean interface is preferable to a breaking change.
+
+[1]: ../camera
+[2]: lib/camera_platform_interface.dart
diff --git a/packages/camera/camera_platform_interface/lib/camera_platform_interface.dart b/packages/camera/camera_platform_interface/lib/camera_platform_interface.dart
new file mode 100644
index 0000000..6fab99b
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/camera_platform_interface.dart
@@ -0,0 +1,11 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// Expose XFile
+export 'package:cross_file/cross_file.dart';
+
+export 'src/events/camera_event.dart';
+export 'src/events/device_event.dart';
+export 'src/platform_interface/camera_platform.dart';
+export 'src/types/types.dart';
diff --git a/packages/camera/camera_platform_interface/lib/src/events/camera_event.dart b/packages/camera/camera_platform_interface/lib/src/events/camera_event.dart
new file mode 100644
index 0000000..a6ace8f
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/events/camera_event.dart
@@ -0,0 +1,287 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart' show immutable;
+
+import '../../camera_platform_interface.dart';
+
+/// Generic Event coming from the native side of Camera,
+/// related to a specific camera module.
+///
+/// All [CameraEvent]s contain the `cameraId` that originated the event. This
+/// should never be `null`.
+///
+/// This class is used as a base class for all the events that might be
+/// triggered from a Camera, but it is never used directly as an event type.
+///
+/// Do NOT instantiate new events like `CameraEvent(cameraId)` directly,
+/// use a specific class instead:
+///
+/// Do `class NewEvent extend CameraEvent` when creating your own events.
+/// See below for examples: `CameraClosingEvent`, `CameraErrorEvent`...
+/// These events are more semantic and more pleasant to use than raw generics.
+/// They can be (and in fact, are) filtered by the `instanceof`-operator.
+@immutable
+abstract class CameraEvent {
+ /// Build a Camera Event, that relates a `cameraId`.
+ ///
+ /// The `cameraId` is the ID of the camera that triggered the event.
+ const CameraEvent(this.cameraId) : assert(cameraId != null);
+
+ /// The ID of the Camera this event is associated to.
+ final int cameraId;
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ other is CameraEvent &&
+ runtimeType == other.runtimeType &&
+ cameraId == other.cameraId;
+
+ @override
+ int get hashCode => cameraId.hashCode;
+}
+
+/// An event fired when the camera has finished initializing.
+class CameraInitializedEvent extends CameraEvent {
+ /// Build a CameraInitialized event triggered from the camera represented by
+ /// `cameraId`.
+ ///
+ /// The `previewWidth` represents the width of the generated preview in pixels.
+ /// The `previewHeight` represents the height of the generated preview in pixels.
+ const CameraInitializedEvent(
+ int cameraId,
+ this.previewWidth,
+ this.previewHeight,
+ this.exposureMode,
+ this.exposurePointSupported,
+ this.focusMode,
+ this.focusPointSupported,
+ ) : super(cameraId);
+
+ /// Converts the supplied [Map] to an instance of the [CameraInitializedEvent]
+ /// class.
+ CameraInitializedEvent.fromJson(Map<String, dynamic> json)
+ : previewWidth = json['previewWidth']! as double,
+ previewHeight = json['previewHeight']! as double,
+ exposureMode = deserializeExposureMode(json['exposureMode']! as String),
+ exposurePointSupported =
+ (json['exposurePointSupported'] as bool?) ?? false,
+ focusMode = deserializeFocusMode(json['focusMode']! as String),
+ focusPointSupported = (json['focusPointSupported'] as bool?) ?? false,
+ super(json['cameraId']! as int);
+
+ /// The width of the preview in pixels.
+ final double previewWidth;
+
+ /// The height of the preview in pixels.
+ final double previewHeight;
+
+ /// The default exposure mode
+ final ExposureMode exposureMode;
+
+ /// The default focus mode
+ final FocusMode focusMode;
+
+ /// Whether setting exposure points is supported.
+ final bool exposurePointSupported;
+
+ /// Whether setting focus points is supported.
+ final bool focusPointSupported;
+
+ /// Converts the [CameraInitializedEvent] instance into a [Map] instance that
+ /// can be serialized to JSON.
+ Map<String, dynamic> toJson() => <String, Object>{
+ 'cameraId': cameraId,
+ 'previewWidth': previewWidth,
+ 'previewHeight': previewHeight,
+ 'exposureMode': serializeExposureMode(exposureMode),
+ 'exposurePointSupported': exposurePointSupported,
+ 'focusMode': serializeFocusMode(focusMode),
+ 'focusPointSupported': focusPointSupported,
+ };
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ super == other &&
+ other is CameraInitializedEvent &&
+ runtimeType == other.runtimeType &&
+ previewWidth == other.previewWidth &&
+ previewHeight == other.previewHeight &&
+ exposureMode == other.exposureMode &&
+ exposurePointSupported == other.exposurePointSupported &&
+ focusMode == other.focusMode &&
+ focusPointSupported == other.focusPointSupported;
+
+ @override
+ int get hashCode => Object.hash(
+ super.hashCode,
+ previewWidth,
+ previewHeight,
+ exposureMode,
+ exposurePointSupported,
+ focusMode,
+ focusPointSupported,
+ );
+}
+
+/// An event fired when the resolution preset of the camera has changed.
+class CameraResolutionChangedEvent extends CameraEvent {
+ /// Build a CameraResolutionChanged event triggered from the camera
+ /// represented by `cameraId`.
+ ///
+ /// The `captureWidth` represents the width of the resulting image in pixels.
+ /// The `captureHeight` represents the height of the resulting image in pixels.
+ const CameraResolutionChangedEvent(
+ int cameraId,
+ this.captureWidth,
+ this.captureHeight,
+ ) : super(cameraId);
+
+ /// Converts the supplied [Map] to an instance of the
+ /// [CameraResolutionChangedEvent] class.
+ CameraResolutionChangedEvent.fromJson(Map<String, dynamic> json)
+ : captureWidth = json['captureWidth']! as double,
+ captureHeight = json['captureHeight']! as double,
+ super(json['cameraId']! as int);
+
+ /// The capture width in pixels.
+ final double captureWidth;
+
+ /// The capture height in pixels.
+ final double captureHeight;
+
+ /// Converts the [CameraResolutionChangedEvent] instance into a [Map] instance
+ /// that can be serialized to JSON.
+ Map<String, dynamic> toJson() => <String, Object>{
+ 'cameraId': cameraId,
+ 'captureWidth': captureWidth,
+ 'captureHeight': captureHeight,
+ };
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ other is CameraResolutionChangedEvent &&
+ super == other &&
+ runtimeType == other.runtimeType &&
+ captureWidth == other.captureWidth &&
+ captureHeight == other.captureHeight;
+
+ @override
+ int get hashCode => Object.hash(super.hashCode, captureWidth, captureHeight);
+}
+
+/// An event fired when the camera is going to close.
+class CameraClosingEvent extends CameraEvent {
+ /// Build a CameraClosing event triggered from the camera represented by
+ /// `cameraId`.
+ const CameraClosingEvent(int cameraId) : super(cameraId);
+
+ /// Converts the supplied [Map] to an instance of the [CameraClosingEvent]
+ /// class.
+ CameraClosingEvent.fromJson(Map<String, dynamic> json)
+ : super(json['cameraId']! as int);
+
+ /// Converts the [CameraClosingEvent] instance into a [Map] instance that can
+ /// be serialized to JSON.
+ Map<String, dynamic> toJson() => <String, Object>{
+ 'cameraId': cameraId,
+ };
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ super == other &&
+ other is CameraClosingEvent &&
+ runtimeType == other.runtimeType;
+
+ @override
+ // This is here even though it just calls super to make it less likely that
+ // operator== would be changed without changing `hashCode`.
+ // ignore: unnecessary_overrides
+ int get hashCode => super.hashCode;
+}
+
+/// An event fired when an error occured while operating the camera.
+class CameraErrorEvent extends CameraEvent {
+ /// Build a CameraError event triggered from the camera represented by
+ /// `cameraId`.
+ ///
+ /// The `description` represents the error occured on the camera.
+ const CameraErrorEvent(int cameraId, this.description) : super(cameraId);
+
+ /// Converts the supplied [Map] to an instance of the [CameraErrorEvent]
+ /// class.
+ CameraErrorEvent.fromJson(Map<String, dynamic> json)
+ : description = json['description']! as String,
+ super(json['cameraId']! as int);
+
+ /// Description of the error.
+ final String description;
+
+ /// Converts the [CameraErrorEvent] instance into a [Map] instance that can be
+ /// serialized to JSON.
+ Map<String, dynamic> toJson() => <String, Object>{
+ 'cameraId': cameraId,
+ 'description': description,
+ };
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ super == other &&
+ other is CameraErrorEvent &&
+ runtimeType == other.runtimeType &&
+ description == other.description;
+
+ @override
+ int get hashCode => Object.hash(super.hashCode, description);
+}
+
+/// An event fired when a video has finished recording.
+class VideoRecordedEvent extends CameraEvent {
+ /// Build a VideoRecordedEvent triggered from the camera with the `cameraId`.
+ ///
+ /// The `file` represents the file of the video.
+ /// The `maxVideoDuration` shows if a maxVideoDuration shows if a maximum
+ /// video duration was set.
+ const VideoRecordedEvent(int cameraId, this.file, this.maxVideoDuration)
+ : super(cameraId);
+
+ /// Converts the supplied [Map] to an instance of the [VideoRecordedEvent]
+ /// class.
+ VideoRecordedEvent.fromJson(Map<String, dynamic> json)
+ : file = XFile(json['path']! as String),
+ maxVideoDuration = json['maxVideoDuration'] != null
+ ? Duration(milliseconds: json['maxVideoDuration'] as int)
+ : null,
+ super(json['cameraId']! as int);
+
+ /// XFile of the recorded video.
+ final XFile file;
+
+ /// Maximum duration of the recorded video.
+ final Duration? maxVideoDuration;
+
+ /// Converts the [VideoRecordedEvent] instance into a [Map] instance that can be
+ /// serialized to JSON.
+ Map<String, dynamic> toJson() => <String, Object?>{
+ 'cameraId': cameraId,
+ 'path': file.path,
+ 'maxVideoDuration': maxVideoDuration?.inMilliseconds
+ };
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ super == other &&
+ other is VideoRecordedEvent &&
+ runtimeType == other.runtimeType &&
+ maxVideoDuration == other.maxVideoDuration;
+
+ @override
+ int get hashCode => Object.hash(super.hashCode, file, maxVideoDuration);
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/events/device_event.dart b/packages/camera/camera_platform_interface/lib/src/events/device_event.dart
new file mode 100644
index 0000000..65a378f
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/events/device_event.dart
@@ -0,0 +1,58 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart' show immutable;
+import 'package:flutter/services.dart';
+
+import '../utils/utils.dart';
+
+/// Generic Event coming from the native side of Camera,
+/// not related to a specific camera module.
+///
+/// This class is used as a base class for all the events that might be
+/// triggered from a device, but it is never used directly as an event type.
+///
+/// Do NOT instantiate new events like `DeviceEvent()` directly,
+/// use a specific class instead:
+///
+/// Do `class NewEvent extend DeviceEvent` when creating your own events.
+/// See below for examples: `DeviceOrientationChangedEvent`...
+/// These events are more semantic and more pleasant to use than raw generics.
+/// They can be (and in fact, are) filtered by the `instanceof`-operator.
+@immutable
+abstract class DeviceEvent {
+ /// Creates a new device event.
+ const DeviceEvent();
+}
+
+/// The [DeviceOrientationChangedEvent] is fired every time the orientation of the device UI changes.
+class DeviceOrientationChangedEvent extends DeviceEvent {
+ /// Build a new orientation changed event.
+ const DeviceOrientationChangedEvent(this.orientation);
+
+ /// Converts the supplied [Map] to an instance of the [DeviceOrientationChangedEvent]
+ /// class.
+ DeviceOrientationChangedEvent.fromJson(Map<String, dynamic> json)
+ : orientation =
+ deserializeDeviceOrientation(json['orientation']! as String);
+
+ /// The new orientation of the device
+ final DeviceOrientation orientation;
+
+ /// Converts the [DeviceOrientationChangedEvent] instance into a [Map] instance that
+ /// can be serialized to JSON.
+ Map<String, dynamic> toJson() => <String, Object>{
+ 'orientation': serializeDeviceOrientation(orientation),
+ };
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ other is DeviceOrientationChangedEvent &&
+ runtimeType == other.runtimeType &&
+ orientation == other.orientation;
+
+ @override
+ int get hashCode => orientation.hashCode;
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/method_channel/method_channel_camera.dart b/packages/camera/camera_platform_interface/lib/src/method_channel/method_channel_camera.dart
new file mode 100644
index 0000000..14d20fc
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/method_channel/method_channel_camera.dart
@@ -0,0 +1,632 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:flutter/foundation.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:stream_transform/stream_transform.dart';
+
+import '../../camera_platform_interface.dart';
+import '../utils/utils.dart';
+import 'type_conversion.dart';
+
+const MethodChannel _channel = MethodChannel('plugins.flutter.io/camera');
+
+/// An implementation of [CameraPlatform] that uses method channels.
+class MethodChannelCamera extends CameraPlatform {
+ /// Construct a new method channel camera instance.
+ MethodChannelCamera() {
+ const MethodChannel channel =
+ MethodChannel('flutter.io/cameraPlugin/device');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleDeviceMethodCall(call));
+ }
+
+ final Map<int, MethodChannel> _channels = <int, MethodChannel>{};
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to camera events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<CameraEvent> cameraEventStreamController =
+ StreamController<CameraEvent>.broadcast();
+
+ /// The controller we need to broadcast the different events coming
+ /// from handleMethodCall, specific to general device events.
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<DeviceEvent> deviceEventStreamController =
+ StreamController<DeviceEvent>.broadcast();
+
+ // The stream to receive frames from the native code.
+ StreamSubscription<dynamic>? _platformImageStreamSubscription;
+
+ // The stream for vending frames to platform interface clients.
+ StreamController<CameraImageData>? _frameStreamController;
+
+ Stream<CameraEvent> _cameraEvents(int cameraId) =>
+ cameraEventStreamController.stream
+ .where((CameraEvent event) => event.cameraId == cameraId);
+
+ @override
+ Future<List<CameraDescription>> availableCameras() async {
+ try {
+ final List<Map<dynamic, dynamic>>? cameras = await _channel
+ .invokeListMethod<Map<dynamic, dynamic>>('availableCameras');
+
+ if (cameras == null) {
+ return <CameraDescription>[];
+ }
+
+ return cameras.map((Map<dynamic, dynamic> camera) {
+ return CameraDescription(
+ name: camera['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(camera['lensFacing']! as String),
+ sensorOrientation: camera['sensorOrientation']! as int,
+ );
+ }).toList();
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) async {
+ try {
+ final Map<String, dynamic>? reply = await _channel
+ .invokeMapMethod<String, dynamic>('create', <String, dynamic>{
+ 'cameraName': cameraDescription.name,
+ 'resolutionPreset': resolutionPreset != null
+ ? _serializeResolutionPreset(resolutionPreset)
+ : null,
+ 'enableAudio': enableAudio,
+ });
+
+ return reply!['cameraId']! as int;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> initializeCamera(
+ int cameraId, {
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) {
+ _channels.putIfAbsent(cameraId, () {
+ final MethodChannel channel =
+ MethodChannel('flutter.io/cameraPlugin/camera$cameraId');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleCameraMethodCall(call, cameraId));
+ return channel;
+ });
+
+ final Completer<void> completer = Completer<void>();
+
+ onCameraInitialized(cameraId).first.then((CameraInitializedEvent value) {
+ completer.complete();
+ });
+
+ _channel.invokeMapMethod<String, dynamic>(
+ 'initialize',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'imageFormatGroup': imageFormatGroup.name(),
+ },
+ ).catchError(
+ // TODO(srawlins): This should return a value of the future's type. This
+ // will fail upcoming analysis checks with
+ // https://github.com/flutter/flutter/issues/105750.
+ // ignore: body_might_complete_normally_catch_error
+ (Object error, StackTrace stackTrace) {
+ if (error is! PlatformException) {
+ // ignore: only_throw_errors
+ throw error;
+ }
+ completer.completeError(
+ CameraException(error.code, error.message),
+ stackTrace,
+ );
+ },
+ );
+
+ return completer.future;
+ }
+
+ @override
+ Future<void> dispose(int cameraId) async {
+ if (_channels.containsKey(cameraId)) {
+ final MethodChannel? cameraChannel = _channels[cameraId];
+ cameraChannel?.setMethodCallHandler(null);
+ _channels.remove(cameraId);
+ }
+
+ await _channel.invokeMethod<void>(
+ 'dispose',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
+ }
+
+ @override
+ Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraResolutionChangedEvent>();
+ }
+
+ @override
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
+ }
+
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraErrorEvent>();
+ }
+
+ @override
+ Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
+ return _cameraEvents(cameraId).whereType<VideoRecordedEvent>();
+ }
+
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ return deviceEventStreamController.stream
+ .whereType<DeviceOrientationChangedEvent>();
+ }
+
+ @override
+ Future<void> lockCaptureOrientation(
+ int cameraId,
+ DeviceOrientation orientation,
+ ) async {
+ await _channel.invokeMethod<String>(
+ 'lockCaptureOrientation',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'orientation': serializeDeviceOrientation(orientation)
+ },
+ );
+ }
+
+ @override
+ Future<void> unlockCaptureOrientation(int cameraId) async {
+ await _channel.invokeMethod<String>(
+ 'unlockCaptureOrientation',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<XFile> takePicture(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'takePicture',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> prepareForVideoRecording() =>
+ _channel.invokeMethod<void>('prepareForVideoRecording');
+
+ @override
+ Future<void> startVideoRecording(int cameraId,
+ {Duration? maxVideoDuration}) async {
+ return startVideoCapturing(
+ VideoCaptureOptions(cameraId, maxDuration: maxVideoDuration));
+ }
+
+ @override
+ Future<void> startVideoCapturing(VideoCaptureOptions options) async {
+ await _channel.invokeMethod<void>(
+ 'startVideoRecording',
+ <String, dynamic>{
+ 'cameraId': options.cameraId,
+ 'maxVideoDuration': options.maxDuration?.inMilliseconds,
+ 'enableStream': options.streamCallback != null,
+ },
+ );
+
+ if (options.streamCallback != null) {
+ _installStreamController().stream.listen(options.streamCallback);
+ _startStreamListener();
+ }
+ }
+
+ @override
+ Future<XFile> stopVideoRecording(int cameraId) async {
+ final String? path = await _channel.invokeMethod<String>(
+ 'stopVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ if (path == null) {
+ throw CameraException(
+ 'INVALID_PATH',
+ 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
+ );
+ }
+
+ return XFile(path);
+ }
+
+ @override
+ Future<void> pauseVideoRecording(int cameraId) => _channel.invokeMethod<void>(
+ 'pauseVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Future<void> resumeVideoRecording(int cameraId) =>
+ _channel.invokeMethod<void>(
+ 'resumeVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ @override
+ Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
+ {CameraImageStreamOptions? options}) {
+ _installStreamController(onListen: _onFrameStreamListen);
+ return _frameStreamController!.stream;
+ }
+
+ StreamController<CameraImageData> _installStreamController(
+ {Function()? onListen}) {
+ _frameStreamController = StreamController<CameraImageData>(
+ onListen: onListen ?? () {},
+ onPause: _onFrameStreamPauseResume,
+ onResume: _onFrameStreamPauseResume,
+ onCancel: _onFrameStreamCancel,
+ );
+ return _frameStreamController!;
+ }
+
+ void _onFrameStreamListen() {
+ _startPlatformStream();
+ }
+
+ Future<void> _startPlatformStream() async {
+ await _channel.invokeMethod<void>('startImageStream');
+ _startStreamListener();
+ }
+
+ void _startStreamListener() {
+ const EventChannel cameraEventChannel =
+ EventChannel('plugins.flutter.io/camera/imageStream');
+ _platformImageStreamSubscription =
+ cameraEventChannel.receiveBroadcastStream().listen((dynamic imageData) {
+ if (defaultTargetPlatform == TargetPlatform.iOS) {
+ try {
+ _channel.invokeMethod<void>('receivedImageStreamData');
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+ _frameStreamController!
+ .add(cameraImageFromPlatformData(imageData as Map<dynamic, dynamic>));
+ });
+ }
+
+ FutureOr<void> _onFrameStreamCancel() async {
+ await _channel.invokeMethod<void>('stopImageStream');
+ await _platformImageStreamSubscription?.cancel();
+ _platformImageStreamSubscription = null;
+ _frameStreamController = null;
+ }
+
+ void _onFrameStreamPauseResume() {
+ throw CameraException('InvalidCall',
+ 'Pause and resume are not supported for onStreamedFrameAvailable');
+ }
+
+ @override
+ Future<void> setFlashMode(int cameraId, FlashMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFlashMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': _serializeFlashMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposureMode(int cameraId, ExposureMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setExposureMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeExposureMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setExposurePoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setExposurePoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMinExposureOffset(int cameraId) async {
+ final double? minExposureOffset = await _channel.invokeMethod<double>(
+ 'getMinExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minExposureOffset!;
+ }
+
+ @override
+ Future<double> getMaxExposureOffset(int cameraId) async {
+ final double? maxExposureOffset = await _channel.invokeMethod<double>(
+ 'getMaxExposureOffset',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxExposureOffset!;
+ }
+
+ @override
+ Future<double> getExposureOffsetStepSize(int cameraId) async {
+ final double? stepSize = await _channel.invokeMethod<double>(
+ 'getExposureOffsetStepSize',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return stepSize!;
+ }
+
+ @override
+ Future<double> setExposureOffset(int cameraId, double offset) async {
+ final double? appliedOffset = await _channel.invokeMethod<double>(
+ 'setExposureOffset',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'offset': offset,
+ },
+ );
+
+ return appliedOffset!;
+ }
+
+ @override
+ Future<void> setFocusMode(int cameraId, FocusMode mode) =>
+ _channel.invokeMethod<void>(
+ 'setFocusMode',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'mode': serializeFocusMode(mode),
+ },
+ );
+
+ @override
+ Future<void> setFocusPoint(int cameraId, Point<double>? point) {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ return _channel.invokeMethod<void>(
+ 'setFocusPoint',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'reset': point == null,
+ 'x': point?.x,
+ 'y': point?.y,
+ },
+ );
+ }
+
+ @override
+ Future<double> getMaxZoomLevel(int cameraId) async {
+ final double? maxZoomLevel = await _channel.invokeMethod<double>(
+ 'getMaxZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return maxZoomLevel!;
+ }
+
+ @override
+ Future<double> getMinZoomLevel(int cameraId) async {
+ final double? minZoomLevel = await _channel.invokeMethod<double>(
+ 'getMinZoomLevel',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return minZoomLevel!;
+ }
+
+ @override
+ Future<void> setZoomLevel(int cameraId, double zoom) async {
+ try {
+ await _channel.invokeMethod<double>(
+ 'setZoomLevel',
+ <String, dynamic>{
+ 'cameraId': cameraId,
+ 'zoom': zoom,
+ },
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> pausePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'pausePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<void> resumePreview(int cameraId) async {
+ await _channel.invokeMethod<double>(
+ 'resumePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<void> setDescriptionWhileRecording(
+ CameraDescription description) async {
+ await _channel.invokeMethod<double>(
+ 'setDescriptionWhileRecording',
+ <String, dynamic>{
+ 'cameraName': description.name,
+ },
+ );
+ }
+
+ @override
+ Widget buildPreview(int cameraId) {
+ return Texture(textureId: cameraId);
+ }
+
+ /// Returns the flash mode as a String.
+ String _serializeFlashMode(FlashMode flashMode) {
+ switch (flashMode) {
+ case FlashMode.off:
+ return 'off';
+ case FlashMode.auto:
+ return 'auto';
+ case FlashMode.always:
+ return 'always';
+ case FlashMode.torch:
+ return 'torch';
+ }
+ }
+
+ /// Returns the resolution preset as a String.
+ String _serializeResolutionPreset(ResolutionPreset resolutionPreset) {
+ switch (resolutionPreset) {
+ case ResolutionPreset.max:
+ return 'max';
+ case ResolutionPreset.ultraHigh:
+ return 'ultraHigh';
+ case ResolutionPreset.veryHigh:
+ return 'veryHigh';
+ case ResolutionPreset.high:
+ return 'high';
+ case ResolutionPreset.medium:
+ return 'medium';
+ case ResolutionPreset.low:
+ return 'low';
+ }
+ }
+
+ /// Converts messages received from the native platform into device events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ Future<dynamic> handleDeviceMethodCall(MethodCall call) async {
+ switch (call.method) {
+ case 'orientation_changed':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ deviceEventStreamController.add(DeviceOrientationChangedEvent(
+ deserializeDeviceOrientation(arguments['orientation']! as String)));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+
+ /// Converts messages received from the native platform into camera events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ Future<dynamic> handleCameraMethodCall(MethodCall call, int cameraId) async {
+ switch (call.method) {
+ case 'initialized':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ arguments['previewWidth']! as double,
+ arguments['previewHeight']! as double,
+ deserializeExposureMode(arguments['exposureMode']! as String),
+ arguments['exposurePointSupported']! as bool,
+ deserializeFocusMode(arguments['focusMode']! as String),
+ arguments['focusPointSupported']! as bool,
+ ));
+ break;
+ case 'resolution_changed':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraResolutionChangedEvent(
+ cameraId,
+ arguments['captureWidth']! as double,
+ arguments['captureHeight']! as double,
+ ));
+ break;
+ case 'camera_closing':
+ cameraEventStreamController.add(CameraClosingEvent(
+ cameraId,
+ ));
+ break;
+ case 'video_recorded':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(VideoRecordedEvent(
+ cameraId,
+ XFile(arguments['path']! as String),
+ arguments['maxVideoDuration'] != null
+ ? Duration(milliseconds: arguments['maxVideoDuration']! as int)
+ : null,
+ ));
+ break;
+ case 'error':
+ final Map<String, Object?> arguments = _getArgumentDictionary(call);
+ cameraEventStreamController.add(CameraErrorEvent(
+ cameraId,
+ arguments['description']! as String,
+ ));
+ break;
+ default:
+ throw MissingPluginException();
+ }
+ }
+
+ /// Returns the arguments of [call] as typed string-keyed Map.
+ ///
+ /// This does not do any type validation, so is only safe to call if the
+ /// arguments are known to be a map.
+ Map<String, Object?> _getArgumentDictionary(MethodCall call) {
+ return (call.arguments as Map<Object?, Object?>).cast<String, Object?>();
+ }
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/method_channel/type_conversion.dart b/packages/camera/camera_platform_interface/lib/src/method_channel/type_conversion.dart
new file mode 100644
index 0000000..8b36007
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/method_channel/type_conversion.dart
@@ -0,0 +1,63 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:flutter/foundation.dart';
+
+import '../types/types.dart';
+
+/// Converts method channel call [data] for `receivedImageStreamData` to a
+/// [CameraImageData].
+CameraImageData cameraImageFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImageData(
+ format: _cameraImageFormatFromPlatformData(data['format']),
+ height: data['height'] as int,
+ width: data['width'] as int,
+ lensAperture: data['lensAperture'] as double?,
+ sensorExposureTime: data['sensorExposureTime'] as int?,
+ sensorSensitivity: data['sensorSensitivity'] as double?,
+ planes: List<CameraImagePlane>.unmodifiable(
+ (data['planes'] as List<dynamic>).map<CameraImagePlane>(
+ (dynamic planeData) => _cameraImagePlaneFromPlatformData(
+ planeData as Map<dynamic, dynamic>))));
+}
+
+CameraImageFormat _cameraImageFormatFromPlatformData(dynamic data) {
+ return CameraImageFormat(_imageFormatGroupFromPlatformData(data), raw: data);
+}
+
+ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
+ if (defaultTargetPlatform == TargetPlatform.android) {
+ switch (data) {
+ case 35: // android.graphics.ImageFormat.YUV_420_888
+ return ImageFormatGroup.yuv420;
+ case 256: // android.graphics.ImageFormat.JPEG
+ return ImageFormatGroup.jpeg;
+ }
+ }
+
+ if (defaultTargetPlatform == TargetPlatform.iOS) {
+ switch (data) {
+ case 875704438: // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ return ImageFormatGroup.yuv420;
+
+ case 1111970369: // kCVPixelFormatType_32BGRA
+ return ImageFormatGroup.bgra8888;
+ }
+ }
+
+ return ImageFormatGroup.unknown;
+}
+
+CameraImagePlane _cameraImagePlaneFromPlatformData(Map<dynamic, dynamic> data) {
+ return CameraImagePlane(
+ bytes: data['bytes'] as Uint8List,
+ bytesPerPixel: data['bytesPerPixel'] as int?,
+ bytesPerRow: data['bytesPerRow'] as int,
+ height: data['height'] as int?,
+ width: data['width'] as int?);
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/platform_interface/camera_platform.dart b/packages/camera/camera_platform_interface/lib/src/platform_interface/camera_platform.dart
new file mode 100644
index 0000000..b43629d
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/platform_interface/camera_platform.dart
@@ -0,0 +1,287 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:plugin_platform_interface/plugin_platform_interface.dart';
+
+import '../../camera_platform_interface.dart';
+import '../method_channel/method_channel_camera.dart';
+
+/// The interface that implementations of camera must implement.
+///
+/// Platform implementations should extend this class rather than implement it as `camera`
+/// does not consider newly added methods to be breaking changes. Extending this class
+/// (using `extends`) ensures that the subclass will get the default implementation, while
+/// platform implementations that `implements` this interface will be broken by newly added
+/// [CameraPlatform] methods.
+abstract class CameraPlatform extends PlatformInterface {
+ /// Constructs a CameraPlatform.
+ CameraPlatform() : super(token: _token);
+
+ static final Object _token = Object();
+
+ static CameraPlatform _instance = MethodChannelCamera();
+
+ /// The default instance of [CameraPlatform] to use.
+ ///
+ /// Defaults to [MethodChannelCamera].
+ static CameraPlatform get instance => _instance;
+
+ /// Platform-specific plugins should set this with their own platform-specific
+ /// class that extends [CameraPlatform] when they register themselves.
+ static set instance(CameraPlatform instance) {
+ PlatformInterface.verify(instance, _token);
+ _instance = instance;
+ }
+
+ /// Completes with a list of available cameras.
+ ///
+ /// This method returns an empty list when no cameras are available.
+ Future<List<CameraDescription>> availableCameras() {
+ throw UnimplementedError('availableCameras() is not implemented.');
+ }
+
+ /// Creates an uninitialized camera instance and returns the cameraId.
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) {
+ throw UnimplementedError('createCamera() is not implemented.');
+ }
+
+ /// Initializes the camera on the device.
+ ///
+ /// [imageFormatGroup] is used to specify the image formatting used.
+ /// On Android this defaults to ImageFormat.YUV_420_888 and applies only to the imageStream.
+ /// On iOS this defaults to kCVPixelFormatType_32BGRA.
+ /// On Web this parameter is currently not supported.
+ Future<void> initializeCamera(
+ int cameraId, {
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) {
+ throw UnimplementedError('initializeCamera() is not implemented.');
+ }
+
+ /// The camera has been initialized.
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ throw UnimplementedError('onCameraInitialized() is not implemented.');
+ }
+
+ /// The camera's resolution has changed.
+ /// On Web this returns an empty stream.
+ Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
+ throw UnimplementedError('onResolutionChanged() is not implemented.');
+ }
+
+ /// The camera started to close.
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
+ throw UnimplementedError('onCameraClosing() is not implemented.');
+ }
+
+ /// The camera experienced an error.
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ throw UnimplementedError('onCameraError() is not implemented.');
+ }
+
+ /// The camera finished recording a video.
+ Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
+ throw UnimplementedError('onCameraTimeLimitReached() is not implemented.');
+ }
+
+ /// The ui orientation changed.
+ ///
+ /// Implementations for this:
+ /// - Should support all 4 orientations.
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ throw UnimplementedError(
+ 'onDeviceOrientationChanged() is not implemented.');
+ }
+
+ /// Locks the capture orientation.
+ Future<void> lockCaptureOrientation(
+ int cameraId, DeviceOrientation orientation) {
+ throw UnimplementedError('lockCaptureOrientation() is not implemented.');
+ }
+
+ /// Unlocks the capture orientation.
+ Future<void> unlockCaptureOrientation(int cameraId) {
+ throw UnimplementedError('unlockCaptureOrientation() is not implemented.');
+ }
+
+ /// Captures an image and returns the file where it was saved.
+ Future<XFile> takePicture(int cameraId) {
+ throw UnimplementedError('takePicture() is not implemented.');
+ }
+
+ /// Prepare the capture session for video recording.
+ Future<void> prepareForVideoRecording() {
+ throw UnimplementedError('prepareForVideoRecording() is not implemented.');
+ }
+
+ /// Starts a video recording.
+ ///
+ /// The length of the recording can be limited by specifying the [maxVideoDuration].
+ /// By default no maximum duration is specified,
+ /// meaning the recording will continue until manually stopped.
+ /// With [maxVideoDuration] set the video is returned in a [VideoRecordedEvent]
+ /// through the [onVideoRecordedEvent] stream when the set duration is reached.
+ ///
+ /// This method is deprecated in favour of [startVideoCapturing].
+ Future<void> startVideoRecording(int cameraId, {Duration? maxVideoDuration}) {
+ throw UnimplementedError('startVideoRecording() is not implemented.');
+ }
+
+ /// Starts a video recording and/or streaming session.
+ ///
+ /// Please see [VideoCaptureOptions] for documentation on the
+ /// configuration options.
+ Future<void> startVideoCapturing(VideoCaptureOptions options) {
+ return startVideoRecording(options.cameraId,
+ maxVideoDuration: options.maxDuration);
+ }
+
+ /// Stops the video recording and returns the file where it was saved.
+ Future<XFile> stopVideoRecording(int cameraId) {
+ throw UnimplementedError('stopVideoRecording() is not implemented.');
+ }
+
+ /// Pause video recording.
+ Future<void> pauseVideoRecording(int cameraId) {
+ throw UnimplementedError('pauseVideoRecording() is not implemented.');
+ }
+
+ /// Resume video recording after pausing.
+ Future<void> resumeVideoRecording(int cameraId) {
+ throw UnimplementedError('resumeVideoRecording() is not implemented.');
+ }
+
+ /// A new streamed frame is available.
+ ///
+ /// Listening to this stream will start streaming, and canceling will stop.
+ /// Pausing will throw a [CameraException], as pausing the stream would cause
+ /// very high memory usage; to temporarily stop receiving frames, cancel, then
+ /// listen again later.
+ ///
+ ///
+ // TODO(bmparr): Add options to control streaming settings (e.g.,
+ // resolution and FPS).
+ Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
+ {CameraImageStreamOptions? options}) {
+ throw UnimplementedError('onStreamedFrameAvailable() is not implemented.');
+ }
+
+ /// Sets the flash mode for the selected camera.
+ /// On Web [FlashMode.auto] corresponds to [FlashMode.always].
+ Future<void> setFlashMode(int cameraId, FlashMode mode) {
+ throw UnimplementedError('setFlashMode() is not implemented.');
+ }
+
+ /// Sets the exposure mode for taking pictures.
+ Future<void> setExposureMode(int cameraId, ExposureMode mode) {
+ throw UnimplementedError('setExposureMode() is not implemented.');
+ }
+
+ /// Sets the exposure point for automatically determining the exposure values.
+ ///
+ /// Supplying `null` for the [point] argument will result in resetting to the
+ /// original exposure point value.
+ Future<void> setExposurePoint(int cameraId, Point<double>? point) {
+ throw UnimplementedError('setExposurePoint() is not implemented.');
+ }
+
+ /// Gets the minimum supported exposure offset for the selected camera in EV units.
+ Future<double> getMinExposureOffset(int cameraId) {
+ throw UnimplementedError('getMinExposureOffset() is not implemented.');
+ }
+
+ /// Gets the maximum supported exposure offset for the selected camera in EV units.
+ Future<double> getMaxExposureOffset(int cameraId) {
+ throw UnimplementedError('getMaxExposureOffset() is not implemented.');
+ }
+
+ /// Gets the supported step size for exposure offset for the selected camera in EV units.
+ ///
+ /// Returns 0 when the camera supports using a free value without stepping.
+ Future<double> getExposureOffsetStepSize(int cameraId) {
+ throw UnimplementedError('getMinExposureOffset() is not implemented.');
+ }
+
+ /// Sets the exposure offset for the selected camera.
+ ///
+ /// The supplied [offset] value should be in EV units. 1 EV unit represents a
+ /// doubling in brightness. It should be between the minimum and maximum offsets
+ /// obtained through `getMinExposureOffset` and `getMaxExposureOffset` respectively.
+ /// Throws a `CameraException` when an illegal offset is supplied.
+ ///
+ /// When the supplied [offset] value does not align with the step size obtained
+ /// through `getExposureStepSize`, it will automatically be rounded to the nearest step.
+ ///
+ /// Returns the (rounded) offset value that was set.
+ Future<double> setExposureOffset(int cameraId, double offset) {
+ throw UnimplementedError('setExposureOffset() is not implemented.');
+ }
+
+ /// Sets the focus mode for taking pictures.
+ Future<void> setFocusMode(int cameraId, FocusMode mode) {
+ throw UnimplementedError('setFocusMode() is not implemented.');
+ }
+
+ /// Sets the focus point for automatically determining the focus values.
+ ///
+ /// Supplying `null` for the [point] argument will result in resetting to the
+ /// original focus point value.
+ Future<void> setFocusPoint(int cameraId, Point<double>? point) {
+ throw UnimplementedError('setFocusPoint() is not implemented.');
+ }
+
+ /// Gets the maximum supported zoom level for the selected camera.
+ Future<double> getMaxZoomLevel(int cameraId) {
+ throw UnimplementedError('getMaxZoomLevel() is not implemented.');
+ }
+
+ /// Gets the minimum supported zoom level for the selected camera.
+ Future<double> getMinZoomLevel(int cameraId) {
+ throw UnimplementedError('getMinZoomLevel() is not implemented.');
+ }
+
+ /// Set the zoom level for the selected camera.
+ ///
+ /// The supplied [zoom] value should be between the minimum and the maximum supported
+ /// zoom level returned by `getMinZoomLevel` and `getMaxZoomLevel`. Throws a `CameraException`
+ /// when an illegal zoom level is supplied.
+ Future<void> setZoomLevel(int cameraId, double zoom) {
+ throw UnimplementedError('setZoomLevel() is not implemented.');
+ }
+
+ /// Pause the active preview on the current frame for the selected camera.
+ Future<void> pausePreview(int cameraId) {
+ throw UnimplementedError('pausePreview() is not implemented.');
+ }
+
+ /// Resume the paused preview for the selected camera.
+ Future<void> resumePreview(int cameraId) {
+ throw UnimplementedError('pausePreview() is not implemented.');
+ }
+
+ /// Sets the active camera while recording.
+ Future<void> setDescriptionWhileRecording(CameraDescription description) {
+ throw UnimplementedError(
+ 'setDescriptionWhileRecording() is not implemented.');
+ }
+
+ /// Returns a widget showing a live camera preview.
+ Widget buildPreview(int cameraId) {
+ throw UnimplementedError('buildView() has not been implemented.');
+ }
+
+ /// Releases the resources of this camera.
+ Future<void> dispose(int cameraId) {
+ throw UnimplementedError('dispose() is not implemented.');
+ }
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/camera_description.dart b/packages/camera/camera_platform_interface/lib/src/types/camera_description.dart
new file mode 100644
index 0000000..0167cf9
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/camera_description.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+
+/// The direction the camera is facing.
+enum CameraLensDirection {
+ /// Front facing camera (a user looking at the screen is seen by the camera).
+ front,
+
+ /// Back facing camera (a user looking at the screen is not seen by the camera).
+ back,
+
+ /// External camera which may not be mounted to the device.
+ external,
+}
+
+/// Properties of a camera device.
+@immutable
+class CameraDescription {
+ /// Creates a new camera description with the given properties.
+ const CameraDescription({
+ required this.name,
+ required this.lensDirection,
+ required this.sensorOrientation,
+ });
+
+ /// The name of the camera device.
+ final String name;
+
+ /// The direction the camera is facing.
+ final CameraLensDirection lensDirection;
+
+ /// Clockwise angle through which the output image needs to be rotated to be upright on the device screen in its native orientation.
+ ///
+ /// **Range of valid values:**
+ /// 0, 90, 180, 270
+ ///
+ /// On Android, also defines the direction of rolling shutter readout, which
+ /// is from top to bottom in the sensor's coordinate system.
+ final int sensorOrientation;
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ other is CameraDescription &&
+ runtimeType == other.runtimeType &&
+ name == other.name &&
+ lensDirection == other.lensDirection;
+
+ @override
+ int get hashCode => Object.hash(name, lensDirection);
+
+ @override
+ String toString() {
+ return '${objectRuntimeType(this, 'CameraDescription')}('
+ '$name, $lensDirection, $sensorOrientation)';
+ }
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/camera_exception.dart b/packages/camera/camera_platform_interface/lib/src/types/camera_exception.dart
new file mode 100644
index 0000000..d112f9f
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/camera_exception.dart
@@ -0,0 +1,20 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// This is thrown when the plugin reports an error.
+class CameraException implements Exception {
+ /// Creates a new camera exception with the given error code and description.
+ CameraException(this.code, this.description);
+
+ /// Error code.
+ // TODO(bparrishMines): Document possible error codes.
+ // https://github.com/flutter/flutter/issues/69298
+ String code;
+
+ /// Textual description of the error.
+ String? description;
+
+ @override
+ String toString() => 'CameraException($code, $description)';
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/camera_image_data.dart b/packages/camera/camera_platform_interface/lib/src/types/camera_image_data.dart
new file mode 100644
index 0000000..4bafe27
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/camera_image_data.dart
@@ -0,0 +1,128 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:flutter/foundation.dart';
+
+import '../../camera_platform_interface.dart';
+
+/// Options for configuring camera streaming.
+///
+/// Currently unused; this exists for future-proofing of the platform interface
+/// API.
+@immutable
+class CameraImageStreamOptions {}
+
+/// A single color plane of image data.
+///
+/// The number and meaning of the planes in an image are determined by its
+/// format.
+@immutable
+class CameraImagePlane {
+ /// Creates a new instance with the given bytes and optional metadata.
+ const CameraImagePlane({
+ required this.bytes,
+ required this.bytesPerRow,
+ this.bytesPerPixel,
+ this.height,
+ this.width,
+ });
+
+ /// Bytes representing this plane.
+ final Uint8List bytes;
+
+ /// The row stride for this color plane, in bytes.
+ final int bytesPerRow;
+
+ /// The distance between adjacent pixel samples in bytes, when available.
+ final int? bytesPerPixel;
+
+ /// Height of the pixel buffer, when available.
+ final int? height;
+
+ /// Width of the pixel buffer, when available.
+ final int? width;
+}
+
+/// Describes how pixels are represented in an image.
+@immutable
+class CameraImageFormat {
+ /// Create a new format with the given cross-platform group and raw underyling
+ /// platform identifier.
+ const CameraImageFormat(this.group, {required this.raw});
+
+ /// Describes the format group the raw image format falls into.
+ final ImageFormatGroup group;
+
+ /// Raw version of the format from the underlying platform.
+ ///
+ /// On Android, this should be an `int` from class
+ /// `android.graphics.ImageFormat`. See
+ /// https://developer.android.com/reference/android/graphics/ImageFormat
+ ///
+ /// On iOS, this should be a `FourCharCode` constant from Pixel Format
+ /// Identifiers. See
+ /// https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers
+ final dynamic raw;
+}
+
+/// A single complete image buffer from the platform camera.
+///
+/// This class allows for direct application access to the pixel data of an
+/// Image through one or more [Uint8List]. Each buffer is encapsulated in a
+/// [CameraImagePlane] that describes the layout of the pixel data in that
+/// plane. [CameraImageData] is not directly usable as a UI resource.
+///
+/// Although not all image formats are planar on all platforms, this class
+/// treats 1-dimensional images as single planar images.
+@immutable
+class CameraImageData {
+ /// Creates a new instance with the given format, planes, and metadata.
+ const CameraImageData({
+ required this.format,
+ required this.planes,
+ required this.height,
+ required this.width,
+ this.lensAperture,
+ this.sensorExposureTime,
+ this.sensorSensitivity,
+ });
+
+ /// Format of the image provided.
+ ///
+ /// Determines the number of planes needed to represent the image, and
+ /// the general layout of the pixel data in each [Uint8List].
+ final CameraImageFormat format;
+
+ /// Height of the image in pixels.
+ ///
+ /// For formats where some color channels are subsampled, this is the height
+ /// of the largest-resolution plane.
+ final int height;
+
+ /// Width of the image in pixels.
+ ///
+ /// For formats where some color channels are subsampled, this is the width
+ /// of the largest-resolution plane.
+ final int width;
+
+ /// The pixels planes for this image.
+ ///
+ /// The number of planes is determined by the format of the image.
+ final List<CameraImagePlane> planes;
+
+ /// The aperture settings for this image.
+ ///
+ /// Represented as an f-stop value.
+ final double? lensAperture;
+
+ /// The sensor exposure time for this image in nanoseconds.
+ final int? sensorExposureTime;
+
+ /// The sensor sensitivity in standard ISO arithmetic units.
+ final double? sensorSensitivity;
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/exposure_mode.dart b/packages/camera/camera_platform_interface/lib/src/types/exposure_mode.dart
new file mode 100644
index 0000000..6da44c9
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/exposure_mode.dart
@@ -0,0 +1,34 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// The possible exposure modes that can be set for a camera.
+enum ExposureMode {
+ /// Automatically determine exposure settings.
+ auto,
+
+ /// Lock the currently determined exposure settings.
+ locked,
+}
+
+/// Returns the exposure mode as a String.
+String serializeExposureMode(ExposureMode exposureMode) {
+ switch (exposureMode) {
+ case ExposureMode.locked:
+ return 'locked';
+ case ExposureMode.auto:
+ return 'auto';
+ }
+}
+
+/// Returns the exposure mode for a given String.
+ExposureMode deserializeExposureMode(String str) {
+ switch (str) {
+ case 'locked':
+ return ExposureMode.locked;
+ case 'auto':
+ return ExposureMode.auto;
+ default:
+ throw ArgumentError('"$str" is not a valid ExposureMode value');
+ }
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/flash_mode.dart b/packages/camera/camera_platform_interface/lib/src/types/flash_mode.dart
new file mode 100644
index 0000000..b9f146d
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/flash_mode.dart
@@ -0,0 +1,18 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// The possible flash modes that can be set for a camera
+enum FlashMode {
+ /// Do not use the flash when taking a picture.
+ off,
+
+ /// Let the device decide whether to flash the camera when taking a picture.
+ auto,
+
+ /// Always use the flash when taking a picture.
+ always,
+
+ /// Turns on the flash light and keeps it on until switched off.
+ torch,
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/focus_mode.dart b/packages/camera/camera_platform_interface/lib/src/types/focus_mode.dart
new file mode 100644
index 0000000..1f9cbef
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/focus_mode.dart
@@ -0,0 +1,34 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// The possible focus modes that can be set for a camera.
+enum FocusMode {
+ /// Automatically determine focus settings.
+ auto,
+
+ /// Lock the currently determined focus settings.
+ locked,
+}
+
+/// Returns the focus mode as a String.
+String serializeFocusMode(FocusMode focusMode) {
+ switch (focusMode) {
+ case FocusMode.locked:
+ return 'locked';
+ case FocusMode.auto:
+ return 'auto';
+ }
+}
+
+/// Returns the focus mode for a given String.
+FocusMode deserializeFocusMode(String str) {
+ switch (str) {
+ case 'locked':
+ return FocusMode.locked;
+ case 'auto':
+ return FocusMode.auto;
+ default:
+ throw ArgumentError('"$str" is not a valid FocusMode value');
+ }
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/image_format_group.dart b/packages/camera/camera_platform_interface/lib/src/types/image_format_group.dart
new file mode 100644
index 0000000..8dc69e0
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/image_format_group.dart
@@ -0,0 +1,53 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// Group of image formats that are comparable across Android and iOS platforms.
+enum ImageFormatGroup {
+ /// The image format does not fit into any specific group.
+ unknown,
+
+ /// Multi-plane YUV 420 format.
+ ///
+ /// This format is a generic YCbCr format, capable of describing any 4:2:0
+ /// chroma-subsampled planar or semiplanar buffer (but not fully interleaved),
+ /// with 8 bits per color sample.
+ ///
+ /// On Android, this is `android.graphics.ImageFormat.YUV_420_888`. See
+ /// https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888
+ ///
+ /// On iOS, this is `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`. See
+ /// https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers/kcvpixelformattype_420ypcbcr8biplanarvideorange?language=objc
+ yuv420,
+
+ /// 32-bit BGRA.
+ ///
+ /// On iOS, this is `kCVPixelFormatType_32BGRA`. See
+ /// https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers/kcvpixelformattype_32bgra?language=objc
+ bgra8888,
+
+ /// 32-big RGB image encoded into JPEG bytes.
+ ///
+ /// On Android, this is `android.graphics.ImageFormat.JPEG`. See
+ /// https://developer.android.com/reference/android/graphics/ImageFormat#JPEG
+ jpeg,
+}
+
+/// Extension on [ImageFormatGroup] to stringify the enum
+extension ImageFormatGroupName on ImageFormatGroup {
+ /// returns a String value for [ImageFormatGroup]
+ /// returns 'unknown' if platform is not supported
+ /// or if [ImageFormatGroup] is not supported for the platform
+ String name() {
+ switch (this) {
+ case ImageFormatGroup.bgra8888:
+ return 'bgra8888';
+ case ImageFormatGroup.yuv420:
+ return 'yuv420';
+ case ImageFormatGroup.jpeg:
+ return 'jpeg';
+ case ImageFormatGroup.unknown:
+ return 'unknown';
+ }
+ }
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/resolution_preset.dart b/packages/camera/camera_platform_interface/lib/src/types/resolution_preset.dart
new file mode 100644
index 0000000..fcb6b83
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/resolution_preset.dart
@@ -0,0 +1,26 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// Affect the quality of video recording and image capture:
+///
+/// If a preset is not available on the camera being used a preset of lower quality will be selected automatically.
+enum ResolutionPreset {
+ /// 352x288 on iOS, 240p (320x240) on Android and Web
+ low,
+
+ /// 480p (640x480 on iOS, 720x480 on Android and Web)
+ medium,
+
+ /// 720p (1280x720)
+ high,
+
+ /// 1080p (1920x1080)
+ veryHigh,
+
+ /// 2160p (3840x2160 on Android and iOS, 4096x2160 on Web)
+ ultraHigh,
+
+ /// The highest resolution available.
+ max,
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/types/types.dart b/packages/camera/camera_platform_interface/lib/src/types/types.dart
new file mode 100644
index 0000000..a8a4f8c
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/types.dart
@@ -0,0 +1,13 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'camera_description.dart';
+export 'camera_exception.dart';
+export 'camera_image_data.dart';
+export 'exposure_mode.dart';
+export 'flash_mode.dart';
+export 'focus_mode.dart';
+export 'image_format_group.dart';
+export 'resolution_preset.dart';
+export 'video_capture_options.dart';
diff --git a/packages/camera/camera_platform_interface/lib/src/types/video_capture_options.dart b/packages/camera/camera_platform_interface/lib/src/types/video_capture_options.dart
new file mode 100644
index 0000000..9fcb7fa
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/types/video_capture_options.dart
@@ -0,0 +1,55 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+
+import 'camera_image_data.dart';
+
+/// Options wrapper for [CameraPlatform.startVideoCapturing] parameters.
+@immutable
+class VideoCaptureOptions {
+ /// Constructs a new instance.
+ const VideoCaptureOptions(
+ this.cameraId, {
+ this.maxDuration,
+ this.streamCallback,
+ this.streamOptions,
+ }) : assert(
+ streamOptions == null || streamCallback != null,
+ 'Must specify streamCallback if providing streamOptions.',
+ );
+
+ /// The ID of the camera to use for capturing.
+ final int cameraId;
+
+ /// The maximum time to perform capturing for.
+ ///
+ /// By default there is no maximum on the capture time.
+ final Duration? maxDuration;
+
+ /// An optional callback to enable streaming.
+ ///
+ /// If set, then each image captured by the camera will be
+ /// passed to this callback.
+ final Function(CameraImageData image)? streamCallback;
+
+ /// Configuration options for streaming.
+ ///
+ /// Should only be set if a streamCallback is also present.
+ final CameraImageStreamOptions? streamOptions;
+
+ @override
+ bool operator ==(Object other) =>
+ identical(this, other) ||
+ other is VideoCaptureOptions &&
+ runtimeType == other.runtimeType &&
+ cameraId == other.cameraId &&
+ maxDuration == other.maxDuration &&
+ streamCallback == other.streamCallback &&
+ streamOptions == other.streamOptions;
+
+ @override
+ int get hashCode =>
+ Object.hash(cameraId, maxDuration, streamCallback, streamOptions);
+}
diff --git a/packages/camera/camera_platform_interface/lib/src/utils/utils.dart b/packages/camera/camera_platform_interface/lib/src/utils/utils.dart
new file mode 100644
index 0000000..771a94b
--- /dev/null
+++ b/packages/camera/camera_platform_interface/lib/src/utils/utils.dart
@@ -0,0 +1,50 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+
+import '../../camera_platform_interface.dart';
+
+/// Parses a string into a corresponding CameraLensDirection.
+CameraLensDirection parseCameraLensDirection(String string) {
+ switch (string) {
+ case 'front':
+ return CameraLensDirection.front;
+ case 'back':
+ return CameraLensDirection.back;
+ case 'external':
+ return CameraLensDirection.external;
+ }
+ throw ArgumentError('Unknown CameraLensDirection value');
+}
+
+/// Returns the device orientation as a String.
+String serializeDeviceOrientation(DeviceOrientation orientation) {
+ switch (orientation) {
+ case DeviceOrientation.portraitUp:
+ return 'portraitUp';
+ case DeviceOrientation.portraitDown:
+ return 'portraitDown';
+ case DeviceOrientation.landscapeRight:
+ return 'landscapeRight';
+ case DeviceOrientation.landscapeLeft:
+ return 'landscapeLeft';
+ }
+}
+
+/// Returns the device orientation for a given String.
+DeviceOrientation deserializeDeviceOrientation(String str) {
+ switch (str) {
+ case 'portraitUp':
+ return DeviceOrientation.portraitUp;
+ case 'portraitDown':
+ return DeviceOrientation.portraitDown;
+ case 'landscapeRight':
+ return DeviceOrientation.landscapeRight;
+ case 'landscapeLeft':
+ return DeviceOrientation.landscapeLeft;
+ default:
+ throw ArgumentError('"$str" is not a valid DeviceOrientation value');
+ }
+}
diff --git a/packages/camera/camera_platform_interface/pubspec.yaml b/packages/camera/camera_platform_interface/pubspec.yaml
new file mode 100644
index 0000000..4cdb285
--- /dev/null
+++ b/packages/camera/camera_platform_interface/pubspec.yaml
@@ -0,0 +1,23 @@
+name: camera_platform_interface
+description: A common platform interface for the camera plugin.
+repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_platform_interface
+issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
+# NOTE: We strongly prefer non-breaking changes, even at the expense of a
+# less-clean API. See https://flutter.dev/go/platform-interface-breaking-changes
+version: 2.4.0
+
+environment:
+ sdk: '>=2.12.0 <3.0.0'
+ flutter: ">=3.0.0"
+
+dependencies:
+ cross_file: ^0.3.1
+ flutter:
+ sdk: flutter
+ plugin_platform_interface: ^2.1.0
+ stream_transform: ^2.0.0
+
+dev_dependencies:
+ async: ^2.5.0
+ flutter_test:
+ sdk: flutter
diff --git a/packages/camera/camera_platform_interface/test/camera_platform_interface_test.dart b/packages/camera/camera_platform_interface/test/camera_platform_interface_test.dart
new file mode 100644
index 0000000..e3b6858
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/camera_platform_interface_test.dart
@@ -0,0 +1,498 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_platform_interface/src/method_channel/method_channel_camera.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('$CameraPlatform', () {
+ test('$MethodChannelCamera is the default instance', () {
+ expect(CameraPlatform.instance, isA<MethodChannelCamera>());
+ });
+
+ test('Cannot be implemented with `implements`', () {
+ expect(() {
+ CameraPlatform.instance = ImplementsCameraPlatform();
+ // In versions of `package:plugin_platform_interface` prior to fixing
+ // https://github.com/flutter/flutter/issues/109339, an attempt to
+ // implement a platform interface using `implements` would sometimes
+ // throw a `NoSuchMethodError` and other times throw an
+ // `AssertionError`. After the issue is fixed, an `AssertionError` will
+ // always be thrown. For the purpose of this test, we don't really care
+ // what exception is thrown, so just allow any exception.
+ }, throwsA(anything));
+ });
+
+ test('Can be extended', () {
+ CameraPlatform.instance = ExtendsCameraPlatform();
+ });
+
+ test(
+ 'Default implementation of availableCameras() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.availableCameras(),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of onCameraInitialized() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.onCameraInitialized(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of onResolutionChanged() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.onCameraResolutionChanged(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of onCameraClosing() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.onCameraClosing(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of onCameraError() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.onCameraError(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of onDeviceOrientationChanged() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.onDeviceOrientationChanged(),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of lockCaptureOrientation() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.lockCaptureOrientation(
+ 1, DeviceOrientation.portraitUp),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of unlockCaptureOrientation() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.unlockCaptureOrientation(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test('Default implementation of dispose() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.dispose(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of createCamera() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.createCamera(
+ const CameraDescription(
+ name: 'back',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of initializeCamera() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.initializeCamera(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of pauseVideoRecording() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.pauseVideoRecording(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of prepareForVideoRecording() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.prepareForVideoRecording(),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of resumeVideoRecording() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.resumeVideoRecording(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of setFlashMode() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.setFlashMode(1, FlashMode.auto),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of setExposureMode() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.setExposureMode(1, ExposureMode.auto),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of setExposurePoint() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.setExposurePoint(1, null),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of getMinExposureOffset() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.getMinExposureOffset(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of getMaxExposureOffset() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.getMaxExposureOffset(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of getExposureOffsetStepSize() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.getExposureOffsetStepSize(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of setExposureOffset() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.setExposureOffset(1, 2.0),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of setFocusMode() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.setFocusMode(1, FocusMode.auto),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of setFocusPoint() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.setFocusPoint(1, null),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of startVideoRecording() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.startVideoRecording(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of stopVideoRecording() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.stopVideoRecording(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of takePicture() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.takePicture(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of getMaxZoomLevel() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.getMaxZoomLevel(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of getMinZoomLevel() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.getMinZoomLevel(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of setZoomLevel() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.setZoomLevel(1, 1.0),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of pausePreview() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.pausePreview(1),
+ throwsUnimplementedError,
+ );
+ });
+
+ test(
+ 'Default implementation of resumePreview() should throw unimplemented error',
+ () {
+ // Arrange
+ final ExtendsCameraPlatform cameraPlatform = ExtendsCameraPlatform();
+
+ // Act & Assert
+ expect(
+ () => cameraPlatform.resumePreview(1),
+ throwsUnimplementedError,
+ );
+ });
+ });
+
+ group('exports', () {
+ test('CameraDescription is exported', () {
+ const CameraDescription(
+ name: 'abc-123',
+ sensorOrientation: 1,
+ lensDirection: CameraLensDirection.external);
+ });
+
+ test('CameraException is exported', () {
+ CameraException('1', 'error');
+ });
+
+ test('CameraImageData is exported', () {
+ const CameraImageData(
+ width: 1,
+ height: 1,
+ format: CameraImageFormat(ImageFormatGroup.bgra8888, raw: 1),
+ planes: <CameraImagePlane>[],
+ );
+ });
+
+ test('ExposureMode is exported', () {
+ // ignore: unnecessary_statements
+ ExposureMode.auto;
+ });
+
+ test('FlashMode is exported', () {
+ // ignore: unnecessary_statements
+ FlashMode.auto;
+ });
+
+ test('FocusMode is exported', () {
+ // ignore: unnecessary_statements
+ FocusMode.auto;
+ });
+
+ test('ResolutionPreset is exported', () {
+ // ignore: unnecessary_statements
+ ResolutionPreset.high;
+ });
+
+ test('VideoCaptureOptions is exported', () {
+ const VideoCaptureOptions(123);
+ });
+ });
+}
+
+class ImplementsCameraPlatform implements CameraPlatform {
+ @override
+ dynamic noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation);
+}
+
+class ExtendsCameraPlatform extends CameraPlatform {}
diff --git a/packages/camera/camera_platform_interface/test/events/camera_event_test.dart b/packages/camera/camera_platform_interface/test/events/camera_event_test.dart
new file mode 100644
index 0000000..074f203
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/events/camera_event_test.dart
@@ -0,0 +1,337 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraInitializedEvent tests', () {
+ test('Constructor should initialize all properties', () {
+ const CameraInitializedEvent event = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+
+ expect(event.cameraId, 1);
+ expect(event.previewWidth, 1024);
+ expect(event.previewHeight, 640);
+ expect(event.exposureMode, ExposureMode.auto);
+ expect(event.focusMode, FocusMode.auto);
+ expect(event.exposurePointSupported, true);
+ expect(event.focusPointSupported, true);
+ });
+
+ test('fromJson should initialize all properties', () {
+ final CameraInitializedEvent event =
+ CameraInitializedEvent.fromJson(const <String, dynamic>{
+ 'cameraId': 1,
+ 'previewWidth': 1024.0,
+ 'previewHeight': 640.0,
+ 'exposureMode': 'auto',
+ 'exposurePointSupported': true,
+ 'focusMode': 'auto',
+ 'focusPointSupported': true
+ });
+
+ expect(event.cameraId, 1);
+ expect(event.previewWidth, 1024);
+ expect(event.previewHeight, 640);
+ expect(event.exposureMode, ExposureMode.auto);
+ expect(event.exposurePointSupported, true);
+ expect(event.focusMode, FocusMode.auto);
+ expect(event.focusPointSupported, true);
+ });
+
+ test('toJson should return a map with all fields', () {
+ const CameraInitializedEvent event = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+
+ final Map<String, dynamic> jsonMap = event.toJson();
+
+ expect(jsonMap.length, 7);
+ expect(jsonMap['cameraId'], 1);
+ expect(jsonMap['previewWidth'], 1024);
+ expect(jsonMap['previewHeight'], 640);
+ expect(jsonMap['exposureMode'], 'auto');
+ expect(jsonMap['exposurePointSupported'], true);
+ expect(jsonMap['focusMode'], 'auto');
+ expect(jsonMap['focusPointSupported'], true);
+ });
+
+ test('equals should return true if objects are the same', () {
+ const CameraInitializedEvent firstEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ const CameraInitializedEvent secondEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+
+ expect(firstEvent == secondEvent, true);
+ });
+
+ test('equals should return false if cameraId is different', () {
+ const CameraInitializedEvent firstEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ const CameraInitializedEvent secondEvent = CameraInitializedEvent(
+ 2, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if previewWidth is different', () {
+ const CameraInitializedEvent firstEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ const CameraInitializedEvent secondEvent = CameraInitializedEvent(
+ 1, 2048, 640, ExposureMode.auto, true, FocusMode.auto, true);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if previewHeight is different', () {
+ const CameraInitializedEvent firstEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ const CameraInitializedEvent secondEvent = CameraInitializedEvent(
+ 1, 1024, 980, ExposureMode.auto, true, FocusMode.auto, true);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if exposureMode is different', () {
+ const CameraInitializedEvent firstEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ const CameraInitializedEvent secondEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.locked, true, FocusMode.auto, true);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if exposurePointSupported is different',
+ () {
+ const CameraInitializedEvent firstEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ const CameraInitializedEvent secondEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, false, FocusMode.auto, true);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if focusMode is different', () {
+ const CameraInitializedEvent firstEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ const CameraInitializedEvent secondEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.locked, true);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if focusPointSupported is different', () {
+ const CameraInitializedEvent firstEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ const CameraInitializedEvent secondEvent = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, false);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('hashCode should match hashCode of all properties', () {
+ const CameraInitializedEvent event = CameraInitializedEvent(
+ 1, 1024, 640, ExposureMode.auto, true, FocusMode.auto, true);
+ final int expectedHashCode = Object.hash(
+ event.cameraId.hashCode,
+ event.previewWidth,
+ event.previewHeight,
+ event.exposureMode,
+ event.exposurePointSupported,
+ event.focusMode,
+ event.focusPointSupported);
+
+ expect(event.hashCode, expectedHashCode);
+ });
+ });
+
+ group('CameraResolutionChangesEvent tests', () {
+ test('Constructor should initialize all properties', () {
+ const CameraResolutionChangedEvent event =
+ CameraResolutionChangedEvent(1, 1024, 640);
+
+ expect(event.cameraId, 1);
+ expect(event.captureWidth, 1024);
+ expect(event.captureHeight, 640);
+ });
+
+ test('fromJson should initialize all properties', () {
+ final CameraResolutionChangedEvent event =
+ CameraResolutionChangedEvent.fromJson(const <String, dynamic>{
+ 'cameraId': 1,
+ 'captureWidth': 1024.0,
+ 'captureHeight': 640.0,
+ });
+
+ expect(event.cameraId, 1);
+ expect(event.captureWidth, 1024);
+ expect(event.captureHeight, 640);
+ });
+
+ test('toJson should return a map with all fields', () {
+ const CameraResolutionChangedEvent event =
+ CameraResolutionChangedEvent(1, 1024, 640);
+
+ final Map<String, dynamic> jsonMap = event.toJson();
+
+ expect(jsonMap.length, 3);
+ expect(jsonMap['cameraId'], 1);
+ expect(jsonMap['captureWidth'], 1024);
+ expect(jsonMap['captureHeight'], 640);
+ });
+
+ test('equals should return true if objects are the same', () {
+ const CameraResolutionChangedEvent firstEvent =
+ CameraResolutionChangedEvent(1, 1024, 640);
+ const CameraResolutionChangedEvent secondEvent =
+ CameraResolutionChangedEvent(1, 1024, 640);
+
+ expect(firstEvent == secondEvent, true);
+ });
+
+ test('equals should return false if cameraId is different', () {
+ const CameraResolutionChangedEvent firstEvent =
+ CameraResolutionChangedEvent(1, 1024, 640);
+ const CameraResolutionChangedEvent secondEvent =
+ CameraResolutionChangedEvent(2, 1024, 640);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if captureWidth is different', () {
+ const CameraResolutionChangedEvent firstEvent =
+ CameraResolutionChangedEvent(1, 1024, 640);
+ const CameraResolutionChangedEvent secondEvent =
+ CameraResolutionChangedEvent(1, 2048, 640);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if captureHeight is different', () {
+ const CameraResolutionChangedEvent firstEvent =
+ CameraResolutionChangedEvent(1, 1024, 640);
+ const CameraResolutionChangedEvent secondEvent =
+ CameraResolutionChangedEvent(1, 1024, 980);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('hashCode should match hashCode of all properties', () {
+ const CameraResolutionChangedEvent event =
+ CameraResolutionChangedEvent(1, 1024, 640);
+ final int expectedHashCode = Object.hash(
+ event.cameraId.hashCode,
+ event.captureWidth,
+ event.captureHeight,
+ );
+
+ expect(event.hashCode, expectedHashCode);
+ });
+ });
+
+ group('CameraClosingEvent tests', () {
+ test('Constructor should initialize all properties', () {
+ const CameraClosingEvent event = CameraClosingEvent(1);
+
+ expect(event.cameraId, 1);
+ });
+
+ test('fromJson should initialize all properties', () {
+ final CameraClosingEvent event =
+ CameraClosingEvent.fromJson(const <String, dynamic>{
+ 'cameraId': 1,
+ });
+
+ expect(event.cameraId, 1);
+ });
+
+ test('toJson should return a map with all fields', () {
+ const CameraClosingEvent event = CameraClosingEvent(1);
+
+ final Map<String, dynamic> jsonMap = event.toJson();
+
+ expect(jsonMap.length, 1);
+ expect(jsonMap['cameraId'], 1);
+ });
+
+ test('equals should return true if objects are the same', () {
+ const CameraClosingEvent firstEvent = CameraClosingEvent(1);
+ const CameraClosingEvent secondEvent = CameraClosingEvent(1);
+
+ expect(firstEvent == secondEvent, true);
+ });
+
+ test('equals should return false if cameraId is different', () {
+ const CameraClosingEvent firstEvent = CameraClosingEvent(1);
+ const CameraClosingEvent secondEvent = CameraClosingEvent(2);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('hashCode should match hashCode of all properties', () {
+ const CameraClosingEvent event = CameraClosingEvent(1);
+ final int expectedHashCode = event.cameraId.hashCode;
+
+ expect(event.hashCode, expectedHashCode);
+ });
+ });
+
+ group('CameraErrorEvent tests', () {
+ test('Constructor should initialize all properties', () {
+ const CameraErrorEvent event = CameraErrorEvent(1, 'Error');
+
+ expect(event.cameraId, 1);
+ expect(event.description, 'Error');
+ });
+
+ test('fromJson should initialize all properties', () {
+ final CameraErrorEvent event = CameraErrorEvent.fromJson(
+ const <String, dynamic>{'cameraId': 1, 'description': 'Error'});
+
+ expect(event.cameraId, 1);
+ expect(event.description, 'Error');
+ });
+
+ test('toJson should return a map with all fields', () {
+ const CameraErrorEvent event = CameraErrorEvent(1, 'Error');
+
+ final Map<String, dynamic> jsonMap = event.toJson();
+
+ expect(jsonMap.length, 2);
+ expect(jsonMap['cameraId'], 1);
+ expect(jsonMap['description'], 'Error');
+ });
+
+ test('equals should return true if objects are the same', () {
+ const CameraErrorEvent firstEvent = CameraErrorEvent(1, 'Error');
+ const CameraErrorEvent secondEvent = CameraErrorEvent(1, 'Error');
+
+ expect(firstEvent == secondEvent, true);
+ });
+
+ test('equals should return false if cameraId is different', () {
+ const CameraErrorEvent firstEvent = CameraErrorEvent(1, 'Error');
+ const CameraErrorEvent secondEvent = CameraErrorEvent(2, 'Error');
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('equals should return false if description is different', () {
+ const CameraErrorEvent firstEvent = CameraErrorEvent(1, 'Error');
+ const CameraErrorEvent secondEvent = CameraErrorEvent(1, 'Ooops');
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('hashCode should match hashCode of all properties', () {
+ const CameraErrorEvent event = CameraErrorEvent(1, 'Error');
+ final int expectedHashCode =
+ Object.hash(event.cameraId.hashCode, event.description);
+
+ expect(event.hashCode, expectedHashCode);
+ });
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/events/device_event_test.dart b/packages/camera/camera_platform_interface/test/events/device_event_test.dart
new file mode 100644
index 0000000..11f786c
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/events/device_event_test.dart
@@ -0,0 +1,65 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('DeviceOrientationChangedEvent tests', () {
+ test('Constructor should initialize all properties', () {
+ const DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+
+ expect(event.orientation, DeviceOrientation.portraitUp);
+ });
+
+ test('fromJson should initialize all properties', () {
+ final DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent.fromJson(const <String, dynamic>{
+ 'orientation': 'portraitUp',
+ });
+
+ expect(event.orientation, DeviceOrientation.portraitUp);
+ });
+
+ test('toJson should return a map with all fields', () {
+ const DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+
+ final Map<String, dynamic> jsonMap = event.toJson();
+
+ expect(jsonMap.length, 1);
+ expect(jsonMap['orientation'], 'portraitUp');
+ });
+
+ test('equals should return true if objects are the same', () {
+ const DeviceOrientationChangedEvent firstEvent =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+ const DeviceOrientationChangedEvent secondEvent =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+
+ expect(firstEvent == secondEvent, true);
+ });
+
+ test('equals should return false if orientation is different', () {
+ const DeviceOrientationChangedEvent firstEvent =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+ const DeviceOrientationChangedEvent secondEvent =
+ DeviceOrientationChangedEvent(DeviceOrientation.landscapeLeft);
+
+ expect(firstEvent == secondEvent, false);
+ });
+
+ test('hashCode should match hashCode of all properties', () {
+ const DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+ final int expectedHashCode = event.orientation.hashCode;
+
+ expect(event.hashCode, expectedHashCode);
+ });
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/method_channel/method_channel_camera_test.dart b/packages/camera/camera_platform_interface/test/method_channel/method_channel_camera_test.dart
new file mode 100644
index 0000000..b01123d
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/method_channel/method_channel_camera_test.dart
@@ -0,0 +1,1115 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:async/async.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_platform_interface/src/method_channel/method_channel_camera.dart';
+import 'package:camera_platform_interface/src/utils/utils.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import '../utils/method_channel_mock.dart';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('$MethodChannelCamera', () {
+ group('Creation, Initialization & Disposal Tests', () {
+ test('Should send creation data and receive back a camera id', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ }
+ });
+ final MethodChannelCamera camera = MethodChannelCamera();
+
+ // Act
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0),
+ ResolutionPreset.high,
+ );
+
+ // Assert
+ expect(cameraMockChannel.log, <Matcher>[
+ isMethodCall(
+ 'create',
+ arguments: <String, Object?>{
+ 'cameraName': 'Test',
+ 'resolutionPreset': 'high',
+ 'enableAudio': false
+ },
+ ),
+ ]);
+ expect(cameraId, 1);
+ });
+
+ test(
+ 'Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final MethodChannelCamera camera = MethodChannelCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test(
+ 'Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final MethodChannelCamera camera = MethodChannelCamera();
+
+ // Act
+ expect(
+ () => camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test(
+ 'Should throw CameraException when initialize throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'initialize': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ },
+ );
+ final MethodChannelCamera camera = MethodChannelCamera();
+
+ // Act
+ expect(
+ () => camera.initializeCamera(0),
+ throwsA(
+ isA<CameraException>()
+ .having((CameraException e) => e.code, 'code',
+ 'TESTING_ERROR_CODE')
+ .having(
+ (CameraException e) => e.description,
+ 'description',
+ 'Mock error message used during testing.',
+ ),
+ ),
+ );
+ },
+ );
+
+ test('Should send initialization data', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ 'initialize': null
+ });
+ final MethodChannelCamera camera = MethodChannelCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+
+ // Act
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ isMethodCall(
+ 'initialize',
+ arguments: <String, Object?>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ ),
+ ]);
+ });
+
+ test('Should send a disposal call on dispose', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null,
+ 'dispose': <String, dynamic>{'cameraId': 1}
+ });
+
+ final MethodChannelCamera camera = MethodChannelCamera();
+ final int cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+
+ // Act
+ await camera.dispose(cameraId);
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ anything,
+ isMethodCall(
+ 'dispose',
+ arguments: <String, Object?>{'cameraId': 1},
+ ),
+ ]);
+ });
+ });
+
+ group('Event Tests', () {
+ late MethodChannelCamera camera;
+ late int cameraId;
+ setUp(() async {
+ MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = MethodChannelCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ));
+ await initializeFuture;
+ });
+
+ test('Should receive initialized event', () async {
+ // Act
+ final Stream<CameraInitializedEvent> eventStream =
+ camera.onCameraInitialized(cameraId);
+ final StreamQueue<CameraInitializedEvent> streamQueue =
+ StreamQueue<CameraInitializedEvent>(eventStream);
+
+ // Emit test events
+ final CameraInitializedEvent event = CameraInitializedEvent(
+ cameraId,
+ 3840,
+ 2160,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ );
+ await camera.handleCameraMethodCall(
+ MethodCall('initialized', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive resolution changes', () async {
+ // Act
+ final Stream<CameraResolutionChangedEvent> resolutionStream =
+ camera.onCameraResolutionChanged(cameraId);
+ final StreamQueue<CameraResolutionChangedEvent> streamQueue =
+ StreamQueue<CameraResolutionChangedEvent>(resolutionStream);
+
+ // Emit test events
+ final CameraResolutionChangedEvent fhdEvent =
+ CameraResolutionChangedEvent(cameraId, 1920, 1080);
+ final CameraResolutionChangedEvent uhdEvent =
+ CameraResolutionChangedEvent(cameraId, 3840, 2160);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', fhdEvent.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('resolution_changed', uhdEvent.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+ expect(await streamQueue.next, fhdEvent);
+ expect(await streamQueue.next, uhdEvent);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera closing events', () async {
+ // Act
+ final Stream<CameraClosingEvent> eventStream =
+ camera.onCameraClosing(cameraId);
+ final StreamQueue<CameraClosingEvent> streamQueue =
+ StreamQueue<CameraClosingEvent>(eventStream);
+
+ // Emit test events
+ final CameraClosingEvent event = CameraClosingEvent(cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera error events', () async {
+ // Act
+ final Stream<CameraErrorEvent> errorStream =
+ camera.onCameraError(cameraId);
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(errorStream);
+
+ // Emit test events
+ final CameraErrorEvent event =
+ CameraErrorEvent(cameraId, 'Error Description');
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await camera.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive device orientation change events', () async {
+ // Act
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ camera.onDeviceOrientationChanged();
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+
+ // Emit test events
+ const DeviceOrientationChangedEvent event =
+ DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+ await camera.handleDeviceMethodCall(
+ MethodCall('orientation_changed', event.toJson()));
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+ });
+
+ group('Function Tests', () {
+ late MethodChannelCamera camera;
+ late int cameraId;
+
+ setUp(() async {
+ MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': null
+ },
+ );
+ camera = MethodChannelCamera();
+ cameraId = await camera.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ final Future<void> initializeFuture = camera.initializeCamera(cameraId);
+ camera.cameraEventStreamController.add(
+ CameraInitializedEvent(
+ cameraId,
+ 1920,
+ 1080,
+ ExposureMode.auto,
+ true,
+ FocusMode.auto,
+ true,
+ ),
+ );
+ await initializeFuture;
+ });
+
+ test('Should fetch CameraDescription instances for available cameras',
+ () async {
+ // Arrange
+ final List<dynamic> returnData = <dynamic>[
+ <String, dynamic>{
+ 'name': 'Test 1',
+ 'lensFacing': 'front',
+ 'sensorOrientation': 1
+ },
+ <String, dynamic>{
+ 'name': 'Test 2',
+ 'lensFacing': 'back',
+ 'sensorOrientation': 2
+ }
+ ];
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'availableCameras': returnData},
+ );
+
+ // Act
+ final List<CameraDescription> cameras = await camera.availableCameras();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('availableCameras', arguments: null),
+ ]);
+ expect(cameras.length, returnData.length);
+ for (int i = 0; i < returnData.length; i++) {
+ final Map<String, Object?> typedData =
+ (returnData[i] as Map<dynamic, dynamic>).cast<String, Object?>();
+ final CameraDescription cameraDescription = CameraDescription(
+ name: typedData['name']! as String,
+ lensDirection:
+ parseCameraLensDirection(typedData['lensFacing']! as String),
+ sensorOrientation: typedData['sensorOrientation']! as int,
+ );
+ expect(cameras[i], cameraDescription);
+ }
+ });
+
+ test(
+ 'Should throw CameraException when availableCameras throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'availableCameras': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+
+ // Act
+ expect(
+ camera.availableCameras,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should take a picture and return an XFile instance', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'takePicture': '/test/path.jpg'});
+
+ // Act
+ final XFile file = await camera.takePicture(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('takePicture', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.jpg');
+ });
+
+ test('Should prepare for video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'prepareForVideoRecording': null},
+ );
+
+ // Act
+ await camera.prepareForVideoRecording();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('prepareForVideoRecording', arguments: null),
+ ]);
+ });
+
+ test('Should start recording a video', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': null,
+ 'enableStream': false,
+ }),
+ ]);
+ });
+
+ test('Should set description while recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'setDescriptionWhileRecording': null},
+ );
+
+ // Act
+ const CameraDescription cameraDescription = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0);
+ await camera.setDescriptionWhileRecording(cameraDescription);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setDescriptionWhileRecording',
+ arguments: <String, Object?>{
+ 'cameraName': cameraDescription.name
+ }),
+ ]);
+ });
+
+ test('Should pass maxVideoDuration when starting recording a video',
+ () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await camera.startVideoRecording(
+ cameraId,
+ maxVideoDuration: const Duration(seconds: 10),
+ );
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': 10000,
+ 'enableStream': false,
+ }),
+ ]);
+ });
+
+ test('Should stop a video recording and return the file', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'stopVideoRecording': '/test/path.mp4'},
+ );
+
+ // Act
+ final XFile file = await camera.stopVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('stopVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.mp4');
+ });
+
+ test('Should pause a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'pauseVideoRecording': null},
+ );
+
+ // Act
+ await camera.pauseVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pauseVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should resume a video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'resumeVideoRecording': null},
+ );
+
+ // Act
+ await camera.resumeVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumeVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the flash mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'setFlashMode': null},
+ );
+
+ // Act
+ await camera.setFlashMode(cameraId, FlashMode.torch);
+ await camera.setFlashMode(cameraId, FlashMode.always);
+ await camera.setFlashMode(cameraId, FlashMode.auto);
+ await camera.setFlashMode(cameraId, FlashMode.off);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'torch'
+ }),
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'always'
+ }),
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'auto'
+ }),
+ isMethodCall('setFlashMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'off'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'setExposureMode': null},
+ );
+
+ // Act
+ await camera.setExposureMode(cameraId, ExposureMode.auto);
+ await camera.setExposureMode(cameraId, ExposureMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'auto'
+ }),
+ isMethodCall('setExposureMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'setExposurePoint': null},
+ );
+
+ // Act
+ await camera.setExposurePoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setExposurePoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setExposurePoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should get the min exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'getMinExposureOffset': 2.0},
+ );
+
+ // Act
+ final double minExposureOffset =
+ await camera.getMinExposureOffset(cameraId);
+
+ // Assert
+ expect(minExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the max exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'getMaxExposureOffset': 2.0},
+ );
+
+ // Act
+ final double maxExposureOffset =
+ await camera.getMaxExposureOffset(cameraId);
+
+ // Assert
+ expect(maxExposureOffset, 2.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the exposure offset step size', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'getExposureOffsetStepSize': 0.25},
+ );
+
+ // Act
+ final double stepSize =
+ await camera.getExposureOffsetStepSize(cameraId);
+
+ // Assert
+ expect(stepSize, 0.25);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getExposureOffsetStepSize',
+ arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the exposure offset', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'setExposureOffset': 0.6},
+ );
+
+ // Act
+ final double actualOffset =
+ await camera.setExposureOffset(cameraId, 0.5);
+
+ // Assert
+ expect(actualOffset, 0.6);
+ expect(channel.log, <Matcher>[
+ isMethodCall('setExposureOffset', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'offset': 0.5,
+ }),
+ ]);
+ });
+
+ test('Should set the focus mode', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'setFocusMode': null},
+ );
+
+ // Act
+ await camera.setFocusMode(cameraId, FocusMode.auto);
+ await camera.setFocusMode(cameraId, FocusMode.locked);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'auto'
+ }),
+ isMethodCall('setFocusMode', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'mode': 'locked'
+ }),
+ ]);
+ });
+
+ test('Should set the exposure point', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'setFocusPoint': null},
+ );
+
+ // Act
+ await camera.setFocusPoint(cameraId, const Point<double>(0.5, 0.5));
+ await camera.setFocusPoint(cameraId, null);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': 0.5,
+ 'y': 0.5,
+ 'reset': false
+ }),
+ isMethodCall('setFocusPoint', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'x': null,
+ 'y': null,
+ 'reset': true
+ }),
+ ]);
+ });
+
+ test('Should build a texture widget as preview widget', () async {
+ // Act
+ final Widget widget = camera.buildPreview(cameraId);
+
+ // Act
+ expect(widget is Texture, isTrue);
+ expect((widget as Texture).textureId, cameraId);
+ });
+
+ test('Should throw MissingPluginException when handling unknown method',
+ () {
+ final MethodChannelCamera camera = MethodChannelCamera();
+
+ expect(
+ () => camera.handleCameraMethodCall(
+ const MethodCall('unknown_method'), 1),
+ throwsA(isA<MissingPluginException>()));
+ });
+
+ test('Should get the max zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'getMaxZoomLevel': 10.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMaxZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 10.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMaxZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should get the min zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'getMinZoomLevel': 1.0},
+ );
+
+ // Act
+ final double maxZoomLevel = await camera.getMinZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 1.0);
+ expect(channel.log, <Matcher>[
+ isMethodCall('getMinZoomLevel', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ });
+
+ test('Should set the zoom level', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'setZoomLevel': null},
+ );
+
+ // Act
+ await camera.setZoomLevel(cameraId, 2.0);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('setZoomLevel',
+ arguments: <String, Object?>{'cameraId': cameraId, 'zoom': 2.0}),
+ ]);
+ });
+
+ test('Should throw CameraException when illegal zoom level is supplied',
+ () async {
+ // Arrange
+ MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'setZoomLevel': PlatformException(
+ code: 'ZOOM_ERROR',
+ message: 'Illegal zoom error',
+ )
+ },
+ );
+
+ // Act & assert
+ expect(
+ () => camera.setZoomLevel(cameraId, -1.0),
+ throwsA(isA<CameraException>()
+ .having((CameraException e) => e.code, 'code', 'ZOOM_ERROR')
+ .having((CameraException e) => e.description, 'description',
+ 'Illegal zoom error')));
+ });
+
+ test('Should lock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'lockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.lockCaptureOrientation(
+ cameraId, DeviceOrientation.portraitUp);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('lockCaptureOrientation', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'orientation': 'portraitUp'
+ }),
+ ]);
+ });
+
+ test('Should unlock the capture orientation', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'unlockCaptureOrientation': null},
+ );
+
+ // Act
+ await camera.unlockCaptureOrientation(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('unlockCaptureOrientation',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should pause the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'pausePreview': null},
+ );
+
+ // Act
+ await camera.pausePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pausePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should resume the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{'resumePreview': null},
+ );
+
+ // Act
+ await camera.resumePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should start streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ ]);
+
+ subscription.cancel();
+ });
+
+ test('Should stop streaming', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: 'plugins.flutter.io/camera',
+ methods: <String, dynamic>{
+ 'startImageStream': null,
+ 'stopImageStream': null,
+ },
+ );
+
+ // Act
+ final StreamSubscription<CameraImageData> subscription = camera
+ .onStreamedFrameAvailable(cameraId)
+ .listen((CameraImageData imageData) {});
+ subscription.cancel();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startImageStream', arguments: null),
+ isMethodCall('stopImageStream', arguments: null),
+ ]);
+ });
+ });
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/method_channel/type_conversion_test.dart b/packages/camera/camera_platform_interface/test/method_channel/type_conversion_test.dart
new file mode 100644
index 0000000..4818074
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/method_channel/type_conversion_test.dart
@@ -0,0 +1,87 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_platform_interface/src/method_channel/type_conversion.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('CameraImageData can be created', () {
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 35,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.height, 1);
+ expect(cameraImage.width, 4);
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ expect(cameraImage.planes.length, 1);
+ });
+
+ test('CameraImageData has ImageFormatGroup.yuv420 for iOS', () {
+ debugDefaultTargetPlatformOverride = TargetPlatform.iOS;
+
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 875704438,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ });
+
+ test('CameraImageData has ImageFormatGroup.yuv420 for Android', () {
+ debugDefaultTargetPlatformOverride = TargetPlatform.android;
+
+ final CameraImageData cameraImage =
+ cameraImageFromPlatformData(<dynamic, dynamic>{
+ 'format': 35,
+ 'height': 1,
+ 'width': 4,
+ 'lensAperture': 1.8,
+ 'sensorExposureTime': 9991324,
+ 'sensorSensitivity': 92.0,
+ 'planes': <dynamic>[
+ <dynamic, dynamic>{
+ 'bytes': Uint8List.fromList(<int>[1, 2, 3, 4]),
+ 'bytesPerPixel': 1,
+ 'bytesPerRow': 4,
+ 'height': 1,
+ 'width': 4
+ }
+ ]
+ });
+ expect(cameraImage.format.group, ImageFormatGroup.yuv420);
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/types/camera_description_test.dart b/packages/camera/camera_platform_interface/test/types/camera_description_test.dart
new file mode 100644
index 0000000..a86df03
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/types/camera_description_test.dart
@@ -0,0 +1,113 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraLensDirection tests', () {
+ test('CameraLensDirection should contain 3 options', () {
+ const List<CameraLensDirection> values = CameraLensDirection.values;
+
+ expect(values.length, 3);
+ });
+
+ test('CameraLensDirection enum should have items in correct index', () {
+ const List<CameraLensDirection> values = CameraLensDirection.values;
+
+ expect(values[0], CameraLensDirection.front);
+ expect(values[1], CameraLensDirection.back);
+ expect(values[2], CameraLensDirection.external);
+ });
+ });
+
+ group('CameraDescription tests', () {
+ test('Constructor should initialize all properties', () {
+ const CameraDescription description = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 90,
+ );
+
+ expect(description.name, 'Test');
+ expect(description.lensDirection, CameraLensDirection.front);
+ expect(description.sensorOrientation, 90);
+ });
+
+ test('equals should return true if objects are the same', () {
+ const CameraDescription firstDescription = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 90,
+ );
+ const CameraDescription secondDescription = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 90,
+ );
+
+ expect(firstDescription == secondDescription, true);
+ });
+
+ test('equals should return false if name is different', () {
+ const CameraDescription firstDescription = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 90,
+ );
+ const CameraDescription secondDescription = CameraDescription(
+ name: 'Testing',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 90,
+ );
+
+ expect(firstDescription == secondDescription, false);
+ });
+
+ test('equals should return false if lens direction is different', () {
+ const CameraDescription firstDescription = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 90,
+ );
+ const CameraDescription secondDescription = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 90,
+ );
+
+ expect(firstDescription == secondDescription, false);
+ });
+
+ test('equals should return true if sensor orientation is different', () {
+ const CameraDescription firstDescription = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0,
+ );
+ const CameraDescription secondDescription = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 90,
+ );
+
+ expect(firstDescription == secondDescription, true);
+ });
+
+ test('hashCode should match hashCode of all equality-tested properties',
+ () {
+ const CameraDescription description = CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0,
+ );
+ final int expectedHashCode =
+ Object.hash(description.name, description.lensDirection);
+
+ expect(description.hashCode, expectedHashCode);
+ });
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/types/camera_exception_test.dart b/packages/camera/camera_platform_interface/test/types/camera_exception_test.dart
new file mode 100644
index 0000000..27baa9c
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/types/camera_exception_test.dart
@@ -0,0 +1,28 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('constructor should initialize properties', () {
+ const String code = 'TEST_ERROR';
+ const String description = 'This is a test error';
+ final CameraException exception = CameraException(code, description);
+
+ expect(exception.code, code);
+ expect(exception.description, description);
+ });
+
+ test('toString: Should return a description of the exception', () {
+ const String code = 'TEST_ERROR';
+ const String description = 'This is a test error';
+ const String expected = 'CameraException($code, $description)';
+ final CameraException exception = CameraException(code, description);
+
+ final String actual = exception.toString();
+
+ expect(actual, expected);
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/types/camera_image_data_test.dart b/packages/camera/camera_platform_interface/test/types/camera_image_data_test.dart
new file mode 100644
index 0000000..d8c582d
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/types/camera_image_data_test.dart
@@ -0,0 +1,40 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#104231)
+// ignore: unnecessary_import
+import 'dart:typed_data';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('CameraImageData can be created', () {
+ debugDefaultTargetPlatformOverride = TargetPlatform.android;
+ final CameraImageData cameraImage = CameraImageData(
+ format: const CameraImageFormat(ImageFormatGroup.jpeg, raw: 42),
+ height: 100,
+ width: 200,
+ lensAperture: 1.8,
+ sensorExposureTime: 11,
+ sensorSensitivity: 92.0,
+ planes: <CameraImagePlane>[
+ CameraImagePlane(
+ bytes: Uint8List.fromList(<int>[1, 2, 3, 4]),
+ bytesPerRow: 4,
+ bytesPerPixel: 2,
+ height: 100,
+ width: 200)
+ ],
+ );
+ expect(cameraImage.format.group, ImageFormatGroup.jpeg);
+ expect(cameraImage.lensAperture, 1.8);
+ expect(cameraImage.sensorExposureTime, 11);
+ expect(cameraImage.sensorSensitivity, 92.0);
+ expect(cameraImage.height, 100);
+ expect(cameraImage.width, 200);
+ expect(cameraImage.planes.length, 1);
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/types/exposure_mode_test.dart b/packages/camera/camera_platform_interface/test/types/exposure_mode_test.dart
new file mode 100644
index 0000000..7dd3824
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/types/exposure_mode_test.dart
@@ -0,0 +1,32 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_platform_interface/src/types/exposure_mode.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('ExposureMode should contain 2 options', () {
+ const List<ExposureMode> values = ExposureMode.values;
+
+ expect(values.length, 2);
+ });
+
+ test('ExposureMode enum should have items in correct index', () {
+ const List<ExposureMode> values = ExposureMode.values;
+
+ expect(values[0], ExposureMode.auto);
+ expect(values[1], ExposureMode.locked);
+ });
+
+ test('serializeExposureMode() should serialize correctly', () {
+ expect(serializeExposureMode(ExposureMode.auto), 'auto');
+ expect(serializeExposureMode(ExposureMode.locked), 'locked');
+ });
+
+ test('deserializeExposureMode() should deserialize correctly', () {
+ expect(deserializeExposureMode('auto'), ExposureMode.auto);
+ expect(deserializeExposureMode('locked'), ExposureMode.locked);
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/types/flash_mode_test.dart b/packages/camera/camera_platform_interface/test/types/flash_mode_test.dart
new file mode 100644
index 0000000..bfc38a0
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/types/flash_mode_test.dart
@@ -0,0 +1,23 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('FlashMode should contain 4 options', () {
+ const List<FlashMode> values = FlashMode.values;
+
+ expect(values.length, 4);
+ });
+
+ test('FlashMode enum should have items in correct index', () {
+ const List<FlashMode> values = FlashMode.values;
+
+ expect(values[0], FlashMode.off);
+ expect(values[1], FlashMode.auto);
+ expect(values[2], FlashMode.always);
+ expect(values[3], FlashMode.torch);
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/types/focus_mode_test.dart b/packages/camera/camera_platform_interface/test/types/focus_mode_test.dart
new file mode 100644
index 0000000..b7e5abf
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/types/focus_mode_test.dart
@@ -0,0 +1,31 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/src/types/focus_mode.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('FocusMode should contain 2 options', () {
+ const List<FocusMode> values = FocusMode.values;
+
+ expect(values.length, 2);
+ });
+
+ test('FocusMode enum should have items in correct index', () {
+ const List<FocusMode> values = FocusMode.values;
+
+ expect(values[0], FocusMode.auto);
+ expect(values[1], FocusMode.locked);
+ });
+
+ test('serializeFocusMode() should serialize correctly', () {
+ expect(serializeFocusMode(FocusMode.auto), 'auto');
+ expect(serializeFocusMode(FocusMode.locked), 'locked');
+ });
+
+ test('deserializeFocusMode() should deserialize correctly', () {
+ expect(deserializeFocusMode('auto'), FocusMode.auto);
+ expect(deserializeFocusMode('locked'), FocusMode.locked);
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/types/image_group_test.dart b/packages/camera/camera_platform_interface/test/types/image_group_test.dart
new file mode 100644
index 0000000..89585cc
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/types/image_group_test.dart
@@ -0,0 +1,17 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ group('$ImageFormatGroup tests', () {
+ test('ImageFormatGroupName extension returns correct values', () {
+ expect(ImageFormatGroup.bgra8888.name(), 'bgra8888');
+ expect(ImageFormatGroup.yuv420.name(), 'yuv420');
+ expect(ImageFormatGroup.jpeg.name(), 'jpeg');
+ expect(ImageFormatGroup.unknown.name(), 'unknown');
+ });
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/types/resolution_preset_test.dart b/packages/camera/camera_platform_interface/test/types/resolution_preset_test.dart
new file mode 100644
index 0000000..abc3397
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/types/resolution_preset_test.dart
@@ -0,0 +1,25 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('ResolutionPreset should contain 6 options', () {
+ const List<ResolutionPreset> values = ResolutionPreset.values;
+
+ expect(values.length, 6);
+ });
+
+ test('ResolutionPreset enum should have items in correct index', () {
+ const List<ResolutionPreset> values = ResolutionPreset.values;
+
+ expect(values[0], ResolutionPreset.low);
+ expect(values[1], ResolutionPreset.medium);
+ expect(values[2], ResolutionPreset.high);
+ expect(values[3], ResolutionPreset.veryHigh);
+ expect(values[4], ResolutionPreset.ultraHigh);
+ expect(values[5], ResolutionPreset.max);
+ });
+}
diff --git a/packages/camera/camera_platform_interface/test/utils/method_channel_mock.dart b/packages/camera/camera_platform_interface/test/utils/method_channel_mock.dart
new file mode 100644
index 0000000..f26d12a
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/utils/method_channel_mock.dart
@@ -0,0 +1,47 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+class MethodChannelMock {
+ MethodChannelMock({
+ required String channelName,
+ this.delay,
+ required this.methods,
+ }) : methodChannel = MethodChannel(channelName) {
+ _ambiguate(TestDefaultBinaryMessengerBinding.instance)!
+ .defaultBinaryMessenger
+ .setMockMethodCallHandler(methodChannel, _handler);
+ }
+
+ final Duration? delay;
+ final MethodChannel methodChannel;
+ final Map<String, dynamic> methods;
+ final List<MethodCall> log = <MethodCall>[];
+
+ Future<dynamic> _handler(MethodCall methodCall) async {
+ log.add(methodCall);
+
+ if (!methods.containsKey(methodCall.method)) {
+ throw MissingPluginException('No implementation found for method '
+ '${methodCall.method} on channel ${methodChannel.name}');
+ }
+
+ return Future<dynamic>.delayed(delay ?? Duration.zero, () {
+ final dynamic result = methods[methodCall.method];
+ if (result is Exception) {
+ throw result;
+ }
+
+ return Future<dynamic>.value(result);
+ });
+ }
+}
+
+/// This allows a value of type T or T? to be treated as a value of type T?.
+///
+/// We use this so that APIs that have become non-nullable can still be used
+/// with `!` and `?` on the stable branch.
+T? _ambiguate<T>(T? value) => value;
diff --git a/packages/camera/camera_platform_interface/test/utils/utils_test.dart b/packages/camera/camera_platform_interface/test/utils/utils_test.dart
new file mode 100644
index 0000000..0e4171d
--- /dev/null
+++ b/packages/camera/camera_platform_interface/test/utils/utils_test.dart
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_platform_interface/src/utils/utils.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ group('Utility methods', () {
+ test(
+ 'Should return CameraLensDirection when valid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ parseCameraLensDirection('back'),
+ CameraLensDirection.back,
+ );
+ expect(
+ parseCameraLensDirection('front'),
+ CameraLensDirection.front,
+ );
+ expect(
+ parseCameraLensDirection('external'),
+ CameraLensDirection.external,
+ );
+ });
+
+ test(
+ 'Should throw ArgumentException when invalid value is supplied when parsing camera lens direction',
+ () {
+ expect(
+ () => parseCameraLensDirection('test'),
+ throwsA(isArgumentError),
+ );
+ });
+
+ test('serializeDeviceOrientation() should serialize correctly', () {
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitUp),
+ 'portraitUp');
+ expect(serializeDeviceOrientation(DeviceOrientation.portraitDown),
+ 'portraitDown');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeRight),
+ 'landscapeRight');
+ expect(serializeDeviceOrientation(DeviceOrientation.landscapeLeft),
+ 'landscapeLeft');
+ });
+
+ test('deserializeDeviceOrientation() should deserialize correctly', () {
+ expect(deserializeDeviceOrientation('portraitUp'),
+ DeviceOrientation.portraitUp);
+ expect(deserializeDeviceOrientation('portraitDown'),
+ DeviceOrientation.portraitDown);
+ expect(deserializeDeviceOrientation('landscapeRight'),
+ DeviceOrientation.landscapeRight);
+ expect(deserializeDeviceOrientation('landscapeLeft'),
+ DeviceOrientation.landscapeLeft);
+ });
+ });
+}
diff --git a/packages/camera/camera_web/AUTHORS b/packages/camera/camera_web/AUTHORS
new file mode 100644
index 0000000..493a0b4
--- /dev/null
+++ b/packages/camera/camera_web/AUTHORS
@@ -0,0 +1,66 @@
+# Below is a list of people and organizations that have contributed
+# to the Flutter project. Names should be added to the list like so:
+#
+# Name/Organization <email address>
+
+Google Inc.
+The Chromium Authors
+German Saprykin <saprykin.h@gmail.com>
+Benjamin Sauer <sauer.benjamin@gmail.com>
+larsenthomasj@gmail.com
+Ali Bitek <alibitek@protonmail.ch>
+Pol Batlló <pol.batllo@gmail.com>
+Anatoly Pulyaevskiy
+Hayden Flinner <haydenflinner@gmail.com>
+Stefano Rodriguez <hlsroddy@gmail.com>
+Salvatore Giordano <salvatoregiordanoo@gmail.com>
+Brian Armstrong <brian@flutter.institute>
+Paul DeMarco <paulmdemarco@gmail.com>
+Fabricio Nogueira <feufeu@gmail.com>
+Simon Lightfoot <simon@devangels.london>
+Ashton Thomas <ashton@acrinta.com>
+Thomas Danner <thmsdnnr@gmail.com>
+Diego Velásquez <diego.velasquez.lopez@gmail.com>
+Hajime Nakamura <nkmrhj@gmail.com>
+Tuyển Vũ Xuân <netsoft1985@gmail.com>
+Miguel Ruivo <miguel@miguelruivo.com>
+Sarthak Verma <sarthak@artiosys.com>
+Mike Diarmid <mike@invertase.io>
+Invertase <oss@invertase.io>
+Elliot Hesp <elliot@invertase.io>
+Vince Varga <vince.varga@smaho.com>
+Aawaz Gyawali <awazgyawali@gmail.com>
+EUI Limited <ian.evans3@admiralgroup.co.uk>
+Katarina Sheremet <katarina@sheremet.ch>
+Thomas Stockx <thomas@stockxit.com>
+Sarbagya Dhaubanjar <sarbagyastha@gmail.com>
+Ozkan Eksi <ozeksi@gmail.com>
+Rishab Nayak <rishab@bu.edu>
+ko2ic <ko2ic.dev@gmail.com>
+Jonathan Younger <jonathan@daikini.com>
+Jose Sanchez <josesm82@gmail.com>
+Debkanchan Samadder <debu.samadder@gmail.com>
+Audrius Karosevicius <audrius.karosevicius@gmail.com>
+Lukasz Piliszczuk <lukasz@intheloup.io>
+SoundReply Solutions GmbH <ch@soundreply.com>
+Rafal Wachol <rwachol@gmail.com>
+Pau Picas <pau.picas@gmail.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Alexandru Tuca <salexandru.tuca@outlook.com>
+Christian Weder <chrstian.weder@yapeal.ch>
+Rhodes Davis Jr. <rody.davis.jr@gmail.com>
+Luigi Agosti <luigi@tengio.com>
+Quentin Le Guennec <quentin@tengio.com>
+Koushik Ravikumar <koushik@tengio.com>
+Nissim Dsilva <nissim@tengio.com>
+Giancarlo Rocha <giancarloiff@gmail.com>
+Ryo Miyake <ryo@miyake.id>
+Théo Champion <contact.theochampion@gmail.com>
+Kazuki Yamaguchi <y.kazuki0614n@gmail.com>
+Eitan Schwartz <eshvartz@gmail.com>
+Chris Rutkowski <chrisrutkowski89@gmail.com>
+Juan Alvarez <juan.alvarez@resideo.com>
+Aleksandr Yurkovskiy <sanekyy@gmail.com>
+Anton Borries <mail@antonborri.es>
+Alex Li <google@alexv525.com>
+Rahul Raj <64.rahulraj@gmail.com>
diff --git a/packages/camera/camera_web/CHANGELOG.md b/packages/camera/camera_web/CHANGELOG.md
new file mode 100644
index 0000000..2a8d43b
--- /dev/null
+++ b/packages/camera/camera_web/CHANGELOG.md
@@ -0,0 +1,58 @@
+## NEXT
+
+* Updates minimum Flutter version to 3.0.
+
+## 0.3.1+1
+
+* Updates code for stricter lint checks.
+
+## 0.3.1
+
+* Updates to latest camera platform interface, and fails if user attempts to use streaming with recording (since streaming is currently unsupported on web).
+
+## 0.3.0+1
+
+* Updates imports for `prefer_relative_imports`.
+* Updates minimum Flutter version to 2.10.
+* Fixes avoid_redundant_argument_values lint warnings and minor typos.
+* Ignores unnecessary import warnings in preparation for [upcoming Flutter changes](https://github.com/flutter/flutter/pull/106316).
+
+## 0.3.0
+
+* **BREAKING CHANGE**: Renames error code `cameraPermission` to `CameraAccessDenied` to be consistent with other platforms.
+
+## 0.2.1+6
+
+* Minor fixes for new analysis options.
+
+## 0.2.1+5
+
+* Fixes library_private_types_in_public_api, sort_child_properties_last and use_key_in_widget_constructors
+ lint warnings.
+
+## 0.2.1+4
+
+* Migrates from `ui.hash*` to `Object.hash*`.
+* Updates minimum Flutter version for changes in 0.2.1+3.
+
+## 0.2.1+3
+
+* Internal code cleanup for stricter analysis options.
+
+## 0.2.1+2
+
+* Fixes cameraNotReadable error that prevented access to the camera on some Android devices when initializing a camera.
+* Implemented support for new Dart SDKs with an async requestFullscreen API.
+
+## 0.2.1+1
+
+* Update usage documentation.
+
+## 0.2.1
+
+* Add video recording functionality.
+* Fix cameraNotReadable error that prevented access to the camera on some Android devices.
+
+## 0.2.0
+
+* Initial release, adapted from the Flutter [I/O Photobooth](https://photobooth.flutter.dev/) project.
diff --git a/packages/camera/camera_web/LICENSE b/packages/camera/camera_web/LICENSE
new file mode 100644
index 0000000..c6823b8
--- /dev/null
+++ b/packages/camera/camera_web/LICENSE
@@ -0,0 +1,25 @@
+Copyright 2013 The Flutter Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/packages/camera/camera_web/README.md b/packages/camera/camera_web/README.md
new file mode 100644
index 0000000..04bf665
--- /dev/null
+++ b/packages/camera/camera_web/README.md
@@ -0,0 +1,112 @@
+# Camera Web Plugin
+
+The web implementation of [`camera`][camera].
+
+*Note*: This plugin is under development. See [missing implementation](#missing-implementation).
+
+## Usage
+
+### Depend on the package
+
+This package is [endorsed](https://flutter.dev/docs/development/packages-and-plugins/developing-packages#endorsed-federated-plugin),
+which means you can simply use `camera`
+normally. This package will be automatically included in your app when you do.
+
+## Example
+
+Find the example in the [`camera` package](https://pub.dev/packages/camera#example).
+
+## Limitations on the web platform
+
+### Camera devices
+
+The camera devices are accessed with [Stream Web API](https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API)
+with the following [browser support](https://caniuse.com/stream):
+
+
+
+Accessing camera devices requires a [secure browsing context](https://developer.mozilla.org/en-US/docs/Web/Security/Secure_Contexts).
+Broadly speaking, this means that you need to serve your web application over HTTPS
+(or `localhost` for local development). For insecure contexts
+`CameraPlatform.availableCameras` might throw a `CameraException` with the
+`permissionDenied` error code.
+
+### Device orientation
+
+The device orientation implementation is backed by [`Screen Orientation Web API`](https://www.w3.org/TR/screen-orientation/)
+with the following [browser support](https://caniuse.com/screen-orientation):
+
+
+
+For the browsers that do not support the device orientation:
+
+- `CameraPlatform.onDeviceOrientationChanged` returns an empty stream.
+- `CameraPlatform.lockCaptureOrientation` and `CameraPlatform.unlockCaptureOrientation`
+throw a `PlatformException` with the `orientationNotSupported` error code.
+
+### Flash mode and zoom level
+
+The flash mode and zoom level implementation is backed by [Image Capture Web API](https://w3c.github.io/mediacapture-image/)
+with the following [browser support](https://caniuse.com/mdn-api_imagecapture):
+
+
+
+For the browsers that do not support the flash mode:
+
+- `CameraPlatform.setFlashMode` throws a `PlatformException` with the
+`torchModeNotSupported` error code.
+
+For the browsers that do not support the zoom level:
+
+- `CameraPlatform.getMaxZoomLevel`, `CameraPlatform.getMinZoomLevel` and
+`CameraPlatform.setZoomLevel` throw a `PlatformException` with the
+`zoomLevelNotSupported` error code.
+
+### Taking a picture
+
+The image capturing implementation is backed by [`URL.createObjectUrl` Web API](https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL)
+with the following [browser support](https://caniuse.com/bloburls):
+
+
+
+The web platform does not support `dart:io`. Attempts to display a captured image
+using `Image.file` will throw an error. The capture image contains a network-accessible
+URL pointing to a location within the browser (blob) and can be displayed using
+`Image.network` or `Image.memory` after loading the image bytes to memory.
+
+See the example below:
+
+```dart
+if (kIsWeb) {
+ Image.network(capturedImage.path);
+} else {
+ Image.file(File(capturedImage.path));
+}
+```
+
+### Video recording
+
+The video recording implementation is backed by [MediaRecorder Web API](https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder) with the following [browser support](https://caniuse.com/mdn-api_mediarecorder):
+
+.
+
+A video is recorded in one of the following video MIME types:
+- video/webm (e.g. on Chrome or Firefox)
+- video/mp4 (e.g. on Safari)
+
+Pausing, resuming or stopping the video recording throws a `PlatformException` with the `videoRecordingNotStarted` error code if the video recording was not started.
+
+For the browsers that do not support the video recording:
+- `CameraPlatform.startVideoRecording` throws a `PlatformException` with the `notSupported` error code.
+
+## Missing implementation
+
+The web implementation of [`camera`][camera] is missing the following features:
+- Exposure mode, point and offset
+- Focus mode and point
+- Sensor orientation
+- Image format group
+- Streaming of frames
+
+<!-- Links -->
+[camera]: https://pub.dev/packages/camera
diff --git a/packages/camera/camera_web/example/README.md b/packages/camera/camera_web/example/README.md
new file mode 100644
index 0000000..0e51ae5
--- /dev/null
+++ b/packages/camera/camera_web/example/README.md
@@ -0,0 +1,19 @@
+# Platform Implementation Test App
+
+This is a test app for manual testing and automated integration testing
+of this platform implementation. It is not intended to demonstrate actual use of
+this package, since the intent is that plugin clients use the app-facing
+package.
+
+Unless you are making changes to this implementation package, this example is
+very unlikely to be relevant.
+
+## Testing
+
+This package uses `package:integration_test` to run its tests in a web browser.
+
+See [Plugin Tests > Web Tests](https://github.com/flutter/flutter/wiki/Plugin-Tests#web-tests)
+in the Flutter wiki for instructions to setup and run the tests in this package.
+
+Check [flutter.dev > Integration testing](https://flutter.dev/docs/testing/integration-tests)
+for more info.
diff --git a/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart b/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart
new file mode 100644
index 0000000..e89018f
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart
@@ -0,0 +1,179 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html';
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraErrorCode', () {
+ group('toString returns a correct type for', () {
+ testWidgets('notSupported', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.notSupported.toString(),
+ equals('cameraNotSupported'),
+ );
+ });
+
+ testWidgets('notFound', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.notFound.toString(),
+ equals('cameraNotFound'),
+ );
+ });
+
+ testWidgets('notReadable', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.notReadable.toString(),
+ equals('cameraNotReadable'),
+ );
+ });
+
+ testWidgets('overconstrained', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.overconstrained.toString(),
+ equals('cameraOverconstrained'),
+ );
+ });
+
+ testWidgets('permissionDenied', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.permissionDenied.toString(),
+ equals('CameraAccessDenied'),
+ );
+ });
+
+ testWidgets('type', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.type.toString(),
+ equals('cameraType'),
+ );
+ });
+
+ testWidgets('abort', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.abort.toString(),
+ equals('cameraAbort'),
+ );
+ });
+
+ testWidgets('security', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.security.toString(),
+ equals('cameraSecurity'),
+ );
+ });
+
+ testWidgets('missingMetadata', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.missingMetadata.toString(),
+ equals('cameraMissingMetadata'),
+ );
+ });
+
+ testWidgets('orientationNotSupported', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.orientationNotSupported.toString(),
+ equals('orientationNotSupported'),
+ );
+ });
+
+ testWidgets('torchModeNotSupported', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.torchModeNotSupported.toString(),
+ equals('torchModeNotSupported'),
+ );
+ });
+
+ testWidgets('zoomLevelNotSupported', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.zoomLevelNotSupported.toString(),
+ equals('zoomLevelNotSupported'),
+ );
+ });
+
+ testWidgets('zoomLevelInvalid', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.zoomLevelInvalid.toString(),
+ equals('zoomLevelInvalid'),
+ );
+ });
+
+ testWidgets('notStarted', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.notStarted.toString(),
+ equals('cameraNotStarted'),
+ );
+ });
+
+ testWidgets('videoRecordingNotStarted', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.videoRecordingNotStarted.toString(),
+ equals('videoRecordingNotStarted'),
+ );
+ });
+
+ testWidgets('unknown', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.unknown.toString(),
+ equals('cameraUnknown'),
+ );
+ });
+
+ group('fromMediaError', () {
+ testWidgets('with aborted error code', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(MediaError.MEDIA_ERR_ABORTED),
+ ).toString(),
+ equals('mediaErrorAborted'),
+ );
+ });
+
+ testWidgets('with network error code', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(MediaError.MEDIA_ERR_NETWORK),
+ ).toString(),
+ equals('mediaErrorNetwork'),
+ );
+ });
+
+ testWidgets('with decode error code', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(MediaError.MEDIA_ERR_DECODE),
+ ).toString(),
+ equals('mediaErrorDecode'),
+ );
+ });
+
+ testWidgets('with source not supported error code',
+ (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED),
+ ).toString(),
+ equals('mediaErrorSourceNotSupported'),
+ );
+ });
+
+ testWidgets('with unknown error code', (WidgetTester tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(5),
+ ).toString(),
+ equals('mediaErrorUnknown'),
+ );
+ });
+ });
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_metadata_test.dart b/packages/camera/camera_web/example/integration_test/camera_metadata_test.dart
new file mode 100644
index 0000000..07252be
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_metadata_test.dart
@@ -0,0 +1,28 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraMetadata', () {
+ testWidgets('supports value equality', (WidgetTester tester) async {
+ expect(
+ const CameraMetadata(
+ deviceId: 'deviceId',
+ facingMode: 'environment',
+ ),
+ equals(
+ const CameraMetadata(
+ deviceId: 'deviceId',
+ facingMode: 'environment',
+ ),
+ ),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_options_test.dart b/packages/camera/camera_web/example/integration_test/camera_options_test.dart
new file mode 100644
index 0000000..6619ff4
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_options_test.dart
@@ -0,0 +1,211 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraOptions', () {
+ testWidgets('serializes correctly', (WidgetTester tester) async {
+ final CameraOptions cameraOptions = CameraOptions(
+ audio: const AudioConstraints(enabled: true),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.user),
+ ),
+ );
+
+ expect(
+ cameraOptions.toJson(),
+ equals(<String, Object>{
+ 'audio': cameraOptions.audio.toJson(),
+ 'video': cameraOptions.video.toJson(),
+ }),
+ );
+ });
+
+ testWidgets('supports value equality', (WidgetTester tester) async {
+ expect(
+ CameraOptions(
+ audio: const AudioConstraints(),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint(CameraType.environment),
+ width:
+ const VideoSizeConstraint(minimum: 10, ideal: 15, maximum: 20),
+ height:
+ const VideoSizeConstraint(minimum: 15, ideal: 20, maximum: 25),
+ deviceId: 'deviceId',
+ ),
+ ),
+ equals(
+ CameraOptions(
+ audio: const AudioConstraints(),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint(CameraType.environment),
+ width: const VideoSizeConstraint(
+ minimum: 10, ideal: 15, maximum: 20),
+ height: const VideoSizeConstraint(
+ minimum: 15, ideal: 20, maximum: 25),
+ deviceId: 'deviceId',
+ ),
+ ),
+ ),
+ );
+ });
+ });
+
+ group('AudioConstraints', () {
+ testWidgets('serializes correctly', (WidgetTester tester) async {
+ expect(
+ const AudioConstraints(enabled: true).toJson(),
+ equals(true),
+ );
+ });
+
+ testWidgets('supports value equality', (WidgetTester tester) async {
+ expect(
+ const AudioConstraints(enabled: true),
+ equals(const AudioConstraints(enabled: true)),
+ );
+ });
+ });
+
+ group('VideoConstraints', () {
+ testWidgets('serializes correctly', (WidgetTester tester) async {
+ final VideoConstraints videoConstraints = VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.user),
+ width: const VideoSizeConstraint(ideal: 100, maximum: 100),
+ height: const VideoSizeConstraint(ideal: 50, maximum: 50),
+ deviceId: 'deviceId',
+ );
+
+ expect(
+ videoConstraints.toJson(),
+ equals(<String, Object>{
+ 'facingMode': videoConstraints.facingMode!.toJson(),
+ 'width': videoConstraints.width!.toJson(),
+ 'height': videoConstraints.height!.toJson(),
+ 'deviceId': <String, Object>{
+ 'exact': 'deviceId',
+ }
+ }),
+ );
+ });
+
+ testWidgets('supports value equality', (WidgetTester tester) async {
+ expect(
+ VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.environment),
+ width:
+ const VideoSizeConstraint(minimum: 90, ideal: 100, maximum: 100),
+ height:
+ const VideoSizeConstraint(minimum: 40, ideal: 50, maximum: 50),
+ deviceId: 'deviceId',
+ ),
+ equals(
+ VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.environment),
+ width: const VideoSizeConstraint(
+ minimum: 90, ideal: 100, maximum: 100),
+ height:
+ const VideoSizeConstraint(minimum: 40, ideal: 50, maximum: 50),
+ deviceId: 'deviceId',
+ ),
+ ),
+ );
+ });
+ });
+
+ group('FacingModeConstraint', () {
+ group('ideal', () {
+ testWidgets(
+ 'serializes correctly '
+ 'for environment camera type', (WidgetTester tester) async {
+ expect(
+ FacingModeConstraint(CameraType.environment).toJson(),
+ equals(<String, Object>{'ideal': 'environment'}),
+ );
+ });
+
+ testWidgets(
+ 'serializes correctly '
+ 'for user camera type', (WidgetTester tester) async {
+ expect(
+ FacingModeConstraint(CameraType.user).toJson(),
+ equals(<String, Object>{'ideal': 'user'}),
+ );
+ });
+
+ testWidgets('supports value equality', (WidgetTester tester) async {
+ expect(
+ FacingModeConstraint(CameraType.user),
+ equals(FacingModeConstraint(CameraType.user)),
+ );
+ });
+ });
+
+ group('exact', () {
+ testWidgets(
+ 'serializes correctly '
+ 'for environment camera type', (WidgetTester tester) async {
+ expect(
+ FacingModeConstraint.exact(CameraType.environment).toJson(),
+ equals(<String, Object>{'exact': 'environment'}),
+ );
+ });
+
+ testWidgets(
+ 'serializes correctly '
+ 'for user camera type', (WidgetTester tester) async {
+ expect(
+ FacingModeConstraint.exact(CameraType.user).toJson(),
+ equals(<String, Object>{'exact': 'user'}),
+ );
+ });
+
+ testWidgets('supports value equality', (WidgetTester tester) async {
+ expect(
+ FacingModeConstraint.exact(CameraType.environment),
+ equals(FacingModeConstraint.exact(CameraType.environment)),
+ );
+ });
+ });
+ });
+
+ group('VideoSizeConstraint ', () {
+ testWidgets('serializes correctly', (WidgetTester tester) async {
+ expect(
+ const VideoSizeConstraint(
+ minimum: 200,
+ ideal: 400,
+ maximum: 400,
+ ).toJson(),
+ equals(<String, Object>{
+ 'min': 200,
+ 'ideal': 400,
+ 'max': 400,
+ }),
+ );
+ });
+
+ testWidgets('supports value equality', (WidgetTester tester) async {
+ expect(
+ const VideoSizeConstraint(
+ minimum: 100,
+ ideal: 200,
+ maximum: 300,
+ ),
+ equals(
+ const VideoSizeConstraint(
+ minimum: 100,
+ ideal: 200,
+ maximum: 300,
+ ),
+ ),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_service_test.dart b/packages/camera/camera_web/example/integration_test/camera_service_test.dart
new file mode 100644
index 0000000..2719932
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_service_test.dart
@@ -0,0 +1,920 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html';
+import 'dart:js_util' as js_util;
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#106316)
+// ignore: unnecessary_import
+import 'dart:ui';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/shims/dart_js_util.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:mocktail/mocktail.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraService', () {
+ const int cameraId = 1;
+
+ late Window window;
+ late Navigator navigator;
+ late MediaDevices mediaDevices;
+ late CameraService cameraService;
+ late JsUtil jsUtil;
+
+ setUp(() async {
+ window = MockWindow();
+ navigator = MockNavigator();
+ mediaDevices = MockMediaDevices();
+ jsUtil = MockJsUtil();
+
+ when(() => window.navigator).thenReturn(navigator);
+ when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+
+ // Mock JsUtil to return the real getProperty from dart:js_util.
+ when<dynamic>(() => jsUtil.getProperty(any(), any())).thenAnswer(
+ (Invocation invocation) => js_util.getProperty<dynamic>(
+ invocation.positionalArguments[0] as Object,
+ invocation.positionalArguments[1] as Object,
+ ),
+ );
+
+ cameraService = CameraService()..window = window;
+ });
+
+ group('getMediaStreamForOptions', () {
+ testWidgets(
+ 'calls MediaDevices.getUserMedia '
+ 'with provided options', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenAnswer((_) async => FakeMediaStream(<MediaStreamTrack>[]));
+
+ final CameraOptions options = CameraOptions(
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.user),
+ width: const VideoSizeConstraint(ideal: 200),
+ ),
+ );
+
+ await cameraService.getMediaStreamForOptions(options);
+
+ verify(
+ () => mediaDevices.getUserMedia(options.toJson()),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'throws PlatformException '
+ 'with notSupported error '
+ 'when there are no media devices', (WidgetTester tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(const CameraOptions()),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ group('throws CameraWebException', () {
+ testWidgets(
+ 'with notFound error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with NotFoundError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('NotFoundError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.notFound),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notFound error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with DevicesNotFoundError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('DevicesNotFoundError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.notFound),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notReadable error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with NotReadableError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('NotReadableError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.notReadable),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notReadable error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with TrackStartError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('TrackStartError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.notReadable),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with overconstrained error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with OverconstrainedError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('OverconstrainedError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.overconstrained),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with overconstrained error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with ConstraintNotSatisfiedError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('ConstraintNotSatisfiedError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.overconstrained),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with permissionDenied error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with NotAllowedError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('NotAllowedError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.permissionDenied),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with permissionDenied error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with PermissionDeniedError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('PermissionDeniedError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.permissionDenied),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with type error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with TypeError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('TypeError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.type),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with abort error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with AbortError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('AbortError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.abort),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with security error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with SecurityError', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('SecurityError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.security),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with unknown error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with an unknown error', (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('Unknown'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.unknown),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with unknown error '
+ 'when MediaDevices.getUserMedia throws an unknown exception',
+ (WidgetTester tester) async {
+ when(() => mediaDevices.getUserMedia(any())).thenThrow(Exception());
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA<CameraWebException>()
+ .having((CameraWebException e) => e.cameraId, 'cameraId',
+ cameraId)
+ .having((CameraWebException e) => e.code, 'code',
+ CameraErrorCode.unknown),
+ ),
+ );
+ });
+ });
+ });
+
+ group('getZoomLevelCapabilityForCamera', () {
+ late Camera camera;
+ late List<MediaStreamTrack> videoTracks;
+
+ setUp(() {
+ camera = MockCamera();
+ videoTracks = <MediaStreamTrack>[
+ MockMediaStreamTrack(),
+ MockMediaStreamTrack()
+ ];
+
+ when(() => camera.textureId).thenReturn(0);
+ when(() => camera.stream).thenReturn(FakeMediaStream(videoTracks));
+
+ cameraService.jsUtil = jsUtil;
+ });
+
+ testWidgets(
+ 'returns the zoom level capability '
+ 'based on the first video track', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'zoom': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'zoom': js_util.jsify(<dynamic, dynamic>{
+ 'min': 100,
+ 'max': 400,
+ 'step': 2,
+ }),
+ });
+
+ final ZoomLevelCapability zoomLevelCapability =
+ cameraService.getZoomLevelCapabilityForCamera(camera);
+
+ expect(zoomLevelCapability.minimum, equals(100.0));
+ expect(zoomLevelCapability.maximum, equals(400.0));
+ expect(zoomLevelCapability.videoTrack, equals(videoTracks.first));
+ });
+
+ group('throws CameraWebException', () {
+ testWidgets(
+ 'with zoomLevelNotSupported error '
+ 'when there are no media devices', (WidgetTester tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ expect(
+ () => cameraService.getZoomLevelCapabilityForCamera(camera),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ camera.textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with zoomLevelNotSupported error '
+ 'when the zoom level is not supported '
+ 'in the browser', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'zoom': false,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'zoom': <dynamic, dynamic>{
+ 'min': 100,
+ 'max': 400,
+ 'step': 2,
+ },
+ });
+
+ expect(
+ () => cameraService.getZoomLevelCapabilityForCamera(camera),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ camera.textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with zoomLevelNotSupported error '
+ 'when the zoom level is not supported '
+ 'by the camera', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'zoom': true,
+ });
+
+ when(videoTracks.first.getCapabilities)
+ .thenReturn(<dynamic, dynamic>{});
+
+ expect(
+ () => cameraService.getZoomLevelCapabilityForCamera(camera),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ camera.textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notStarted error '
+ 'when the camera stream has not been initialized',
+ (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'zoom': true,
+ });
+
+ // Create a camera stream with no video tracks.
+ when(() => camera.stream)
+ .thenReturn(FakeMediaStream(<MediaStreamTrack>[]));
+
+ expect(
+ () => cameraService.getZoomLevelCapabilityForCamera(camera),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ camera.textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.notStarted,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('getFacingModeForVideoTrack', () {
+ setUp(() {
+ cameraService.jsUtil = jsUtil;
+ });
+
+ testWidgets(
+ 'throws PlatformException '
+ 'with notSupported error '
+ 'when there are no media devices', (WidgetTester tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ expect(
+ () =>
+ cameraService.getFacingModeForVideoTrack(MockMediaStreamTrack()),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'returns null '
+ 'when the facing mode is not supported', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'facingMode': false,
+ });
+
+ final String? facingMode =
+ cameraService.getFacingModeForVideoTrack(MockMediaStreamTrack());
+
+ expect(facingMode, isNull);
+ });
+
+ group('when the facing mode is supported', () {
+ late MediaStreamTrack videoTrack;
+
+ setUp(() {
+ videoTrack = MockMediaStreamTrack();
+
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ .thenReturn(true);
+
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'facingMode': true,
+ });
+ });
+
+ testWidgets(
+ 'returns an appropriate facing mode '
+ 'based on the video track settings', (WidgetTester tester) async {
+ when(videoTrack.getSettings)
+ .thenReturn(<dynamic, dynamic>{'facingMode': 'user'});
+
+ final String? facingMode =
+ cameraService.getFacingModeForVideoTrack(videoTrack);
+
+ expect(facingMode, equals('user'));
+ });
+
+ testWidgets(
+ 'returns an appropriate facing mode '
+ 'based on the video track capabilities '
+ 'when the facing mode setting is empty',
+ (WidgetTester tester) async {
+ when(videoTrack.getSettings).thenReturn(<dynamic, dynamic>{});
+ when(videoTrack.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'facingMode': <dynamic>['environment', 'left']
+ });
+
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ .thenReturn(true);
+
+ final String? facingMode =
+ cameraService.getFacingModeForVideoTrack(videoTrack);
+
+ expect(facingMode, equals('environment'));
+ });
+
+ testWidgets(
+ 'returns null '
+ 'when the facing mode setting '
+ 'and capabilities are empty', (WidgetTester tester) async {
+ when(videoTrack.getSettings).thenReturn(<dynamic, dynamic>{});
+ when(videoTrack.getCapabilities)
+ .thenReturn(<dynamic, dynamic>{'facingMode': <dynamic>[]});
+
+ final String? facingMode =
+ cameraService.getFacingModeForVideoTrack(videoTrack);
+
+ expect(facingMode, isNull);
+ });
+
+ testWidgets(
+ 'returns null '
+ 'when the facing mode setting is empty and '
+ 'the video track capabilities are not supported',
+ (WidgetTester tester) async {
+ when(videoTrack.getSettings).thenReturn(<dynamic, dynamic>{});
+
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ .thenReturn(false);
+
+ final String? facingMode =
+ cameraService.getFacingModeForVideoTrack(videoTrack);
+
+ expect(facingMode, isNull);
+ });
+ });
+ });
+
+ group('mapFacingModeToLensDirection', () {
+ testWidgets(
+ 'returns front '
+ 'when the facing mode is user', (WidgetTester tester) async {
+ expect(
+ cameraService.mapFacingModeToLensDirection('user'),
+ equals(CameraLensDirection.front),
+ );
+ });
+
+ testWidgets(
+ 'returns back '
+ 'when the facing mode is environment', (WidgetTester tester) async {
+ expect(
+ cameraService.mapFacingModeToLensDirection('environment'),
+ equals(CameraLensDirection.back),
+ );
+ });
+
+ testWidgets(
+ 'returns external '
+ 'when the facing mode is left', (WidgetTester tester) async {
+ expect(
+ cameraService.mapFacingModeToLensDirection('left'),
+ equals(CameraLensDirection.external),
+ );
+ });
+
+ testWidgets(
+ 'returns external '
+ 'when the facing mode is right', (WidgetTester tester) async {
+ expect(
+ cameraService.mapFacingModeToLensDirection('right'),
+ equals(CameraLensDirection.external),
+ );
+ });
+ });
+
+ group('mapFacingModeToCameraType', () {
+ testWidgets(
+ 'returns user '
+ 'when the facing mode is user', (WidgetTester tester) async {
+ expect(
+ cameraService.mapFacingModeToCameraType('user'),
+ equals(CameraType.user),
+ );
+ });
+
+ testWidgets(
+ 'returns environment '
+ 'when the facing mode is environment', (WidgetTester tester) async {
+ expect(
+ cameraService.mapFacingModeToCameraType('environment'),
+ equals(CameraType.environment),
+ );
+ });
+
+ testWidgets(
+ 'returns user '
+ 'when the facing mode is left', (WidgetTester tester) async {
+ expect(
+ cameraService.mapFacingModeToCameraType('left'),
+ equals(CameraType.user),
+ );
+ });
+
+ testWidgets(
+ 'returns user '
+ 'when the facing mode is right', (WidgetTester tester) async {
+ expect(
+ cameraService.mapFacingModeToCameraType('right'),
+ equals(CameraType.user),
+ );
+ });
+ });
+
+ group('mapResolutionPresetToSize', () {
+ testWidgets(
+ 'returns 4096x2160 '
+ 'when the resolution preset is max', (WidgetTester tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.max),
+ equals(const Size(4096, 2160)),
+ );
+ });
+
+ testWidgets(
+ 'returns 4096x2160 '
+ 'when the resolution preset is ultraHigh',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.ultraHigh),
+ equals(const Size(4096, 2160)),
+ );
+ });
+
+ testWidgets(
+ 'returns 1920x1080 '
+ 'when the resolution preset is veryHigh',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.veryHigh),
+ equals(const Size(1920, 1080)),
+ );
+ });
+
+ testWidgets(
+ 'returns 1280x720 '
+ 'when the resolution preset is high', (WidgetTester tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.high),
+ equals(const Size(1280, 720)),
+ );
+ });
+
+ testWidgets(
+ 'returns 720x480 '
+ 'when the resolution preset is medium', (WidgetTester tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.medium),
+ equals(const Size(720, 480)),
+ );
+ });
+
+ testWidgets(
+ 'returns 320x240 '
+ 'when the resolution preset is low', (WidgetTester tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.low),
+ equals(const Size(320, 240)),
+ );
+ });
+ });
+
+ group('mapDeviceOrientationToOrientationType', () {
+ testWidgets(
+ 'returns portraitPrimary '
+ 'when the device orientation is portraitUp',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.portraitUp,
+ ),
+ equals(OrientationType.portraitPrimary),
+ );
+ });
+
+ testWidgets(
+ 'returns landscapePrimary '
+ 'when the device orientation is landscapeLeft',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.landscapeLeft,
+ ),
+ equals(OrientationType.landscapePrimary),
+ );
+ });
+
+ testWidgets(
+ 'returns portraitSecondary '
+ 'when the device orientation is portraitDown',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.portraitDown,
+ ),
+ equals(OrientationType.portraitSecondary),
+ );
+ });
+
+ testWidgets(
+ 'returns landscapeSecondary '
+ 'when the device orientation is landscapeRight',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.landscapeRight,
+ ),
+ equals(OrientationType.landscapeSecondary),
+ );
+ });
+ });
+
+ group('mapOrientationTypeToDeviceOrientation', () {
+ testWidgets(
+ 'returns portraitUp '
+ 'when the orientation type is portraitPrimary',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitPrimary,
+ ),
+ equals(DeviceOrientation.portraitUp),
+ );
+ });
+
+ testWidgets(
+ 'returns landscapeLeft '
+ 'when the orientation type is landscapePrimary',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.landscapePrimary,
+ ),
+ equals(DeviceOrientation.landscapeLeft),
+ );
+ });
+
+ testWidgets(
+ 'returns portraitDown '
+ 'when the orientation type is portraitSecondary',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitSecondary,
+ ),
+ equals(DeviceOrientation.portraitDown),
+ );
+ });
+
+ testWidgets(
+ 'returns portraitDown '
+ 'when the orientation type is portraitSecondary',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitSecondary,
+ ),
+ equals(DeviceOrientation.portraitDown),
+ );
+ });
+
+ testWidgets(
+ 'returns landscapeRight '
+ 'when the orientation type is landscapeSecondary',
+ (WidgetTester tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.landscapeSecondary,
+ ),
+ equals(DeviceOrientation.landscapeRight),
+ );
+ });
+
+ testWidgets(
+ 'returns portraitUp '
+ 'for an unknown orientation type', (WidgetTester tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ 'unknown',
+ ),
+ equals(DeviceOrientation.portraitUp),
+ );
+ });
+ });
+ });
+}
+
+class JSNoSuchMethodError implements Exception {}
diff --git a/packages/camera/camera_web/example/integration_test/camera_test.dart b/packages/camera/camera_web/example/integration_test/camera_test.dart
new file mode 100644
index 0000000..705d775
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_test.dart
@@ -0,0 +1,1723 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html';
+import 'dart:ui';
+
+import 'package:async/async.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:mocktail/mocktail.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('Camera', () {
+ const int textureId = 1;
+
+ late Window window;
+ late Navigator navigator;
+ late MediaDevices mediaDevices;
+
+ late MediaStream mediaStream;
+ late CameraService cameraService;
+
+ setUp(() {
+ window = MockWindow();
+ navigator = MockNavigator();
+ mediaDevices = MockMediaDevices();
+
+ when(() => window.navigator).thenReturn(navigator);
+ when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+
+ cameraService = MockCameraService();
+
+ final VideoElement videoElement =
+ getVideoElementWithBlankStream(const Size(10, 10));
+ mediaStream = videoElement.captureStream();
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ any(),
+ cameraId: any(named: 'cameraId'),
+ ),
+ ).thenAnswer((_) => Future<MediaStream>.value(mediaStream));
+ });
+
+ setUpAll(() {
+ registerFallbackValue(MockCameraOptions());
+ });
+
+ group('initialize', () {
+ testWidgets(
+ 'calls CameraService.getMediaStreamForOptions '
+ 'with provided options', (WidgetTester tester) async {
+ final CameraOptions options = CameraOptions(
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.user),
+ width: const VideoSizeConstraint(ideal: 200),
+ ),
+ );
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ options: options,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ verify(
+ () => cameraService.getMediaStreamForOptions(
+ options,
+ cameraId: textureId,
+ ),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'creates a video element '
+ 'with correct properties', (WidgetTester tester) async {
+ const AudioConstraints audioConstraints =
+ AudioConstraints(enabled: true);
+ final VideoConstraints videoConstraints = VideoConstraints(
+ facingMode: FacingModeConstraint(
+ CameraType.user,
+ ),
+ );
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ options: CameraOptions(
+ audio: audioConstraints,
+ video: videoConstraints,
+ ),
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(camera.videoElement, isNotNull);
+ expect(camera.videoElement.autoplay, isFalse);
+ expect(camera.videoElement.muted, isTrue);
+ expect(camera.videoElement.srcObject, mediaStream);
+ expect(camera.videoElement.attributes.keys, contains('playsinline'));
+
+ expect(
+ camera.videoElement.style.transformOrigin, equals('center center'));
+ expect(camera.videoElement.style.pointerEvents, equals('none'));
+ expect(camera.videoElement.style.width, equals('100%'));
+ expect(camera.videoElement.style.height, equals('100%'));
+ expect(camera.videoElement.style.objectFit, equals('cover'));
+ });
+
+ testWidgets(
+ 'flips the video element horizontally '
+ 'for a back camera', (WidgetTester tester) async {
+ final VideoConstraints videoConstraints = VideoConstraints(
+ facingMode: FacingModeConstraint(
+ CameraType.environment,
+ ),
+ );
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ options: CameraOptions(
+ video: videoConstraints,
+ ),
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(camera.videoElement.style.transform, equals('scaleX(-1)'));
+ });
+
+ testWidgets(
+ 'creates a wrapping div element '
+ 'with correct properties', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(camera.divElement, isNotNull);
+ expect(camera.divElement.style.objectFit, equals('cover'));
+ expect(camera.divElement.children, contains(camera.videoElement));
+ });
+
+ testWidgets('initializes the camera stream', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(camera.stream, mediaStream);
+ });
+
+ testWidgets(
+ 'throws an exception '
+ 'when CameraService.getMediaStreamForOptions throws',
+ (WidgetTester tester) async {
+ final Exception exception =
+ Exception('A media stream exception occured.');
+
+ when(() => cameraService.getMediaStreamForOptions(any(),
+ cameraId: any(named: 'cameraId'))).thenThrow(exception);
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ expect(
+ camera.initialize,
+ throwsA(exception),
+ );
+ });
+ });
+
+ group('play', () {
+ testWidgets('starts playing the video element',
+ (WidgetTester tester) async {
+ bool startedPlaying = false;
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ final StreamSubscription<Event> cameraPlaySubscription = camera
+ .videoElement.onPlay
+ .listen((Event event) => startedPlaying = true);
+
+ await camera.play();
+
+ expect(startedPlaying, isTrue);
+
+ await cameraPlaySubscription.cancel();
+ });
+
+ testWidgets(
+ 'initializes the camera stream '
+ 'from CameraService.getMediaStreamForOptions '
+ 'if it does not exist', (WidgetTester tester) async {
+ const CameraOptions options = CameraOptions(
+ video: VideoConstraints(
+ width: VideoSizeConstraint(ideal: 100),
+ ),
+ );
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ options: options,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ /// Remove the video element's source
+ /// by stopping the camera.
+ camera.stop();
+
+ await camera.play();
+
+ // Should be called twice: for initialize and play.
+ verify(
+ () => cameraService.getMediaStreamForOptions(
+ options,
+ cameraId: textureId,
+ ),
+ ).called(2);
+
+ expect(camera.videoElement.srcObject, mediaStream);
+ expect(camera.stream, mediaStream);
+ });
+ });
+
+ group('pause', () {
+ testWidgets('pauses the camera stream', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.play();
+
+ expect(camera.videoElement.paused, isFalse);
+
+ camera.pause();
+
+ expect(camera.videoElement.paused, isTrue);
+ });
+ });
+
+ group('stop', () {
+ testWidgets('resets the camera stream', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.play();
+
+ camera.stop();
+
+ expect(camera.videoElement.srcObject, isNull);
+ expect(camera.stream, isNull);
+ });
+ });
+
+ group('takePicture', () {
+ testWidgets('returns a captured picture', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.play();
+
+ final XFile pictureFile = await camera.takePicture();
+
+ expect(pictureFile, isNotNull);
+ });
+
+ group(
+ 'enables the torch mode '
+ 'when taking a picture', () {
+ late List<MediaStreamTrack> videoTracks;
+ late MediaStream videoStream;
+ late VideoElement videoElement;
+
+ setUp(() {
+ videoTracks = <MediaStreamTrack>[
+ MockMediaStreamTrack(),
+ MockMediaStreamTrack()
+ ];
+ videoStream = FakeMediaStream(videoTracks);
+
+ videoElement = getVideoElementWithBlankStream(const Size(100, 100))
+ ..muted = true;
+
+ when(() => videoTracks.first.applyConstraints(any()))
+ .thenAnswer((_) async => <dynamic, dynamic>{});
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+ });
+
+ testWidgets('if the flash mode is auto', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream
+ ..videoElement = videoElement
+ ..flashMode = FlashMode.auto;
+
+ await camera.play();
+
+ final XFile _ = await camera.takePicture();
+
+ verify(
+ () => videoTracks.first.applyConstraints(<dynamic, dynamic>{
+ 'advanced': <dynamic>[
+ <dynamic, dynamic>{
+ 'torch': true,
+ }
+ ]
+ }),
+ ).called(1);
+
+ verify(
+ () => videoTracks.first.applyConstraints(<dynamic, dynamic>{
+ 'advanced': <dynamic>[
+ <dynamic, dynamic>{
+ 'torch': false,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+
+ testWidgets('if the flash mode is always', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream
+ ..videoElement = videoElement
+ ..flashMode = FlashMode.always;
+
+ await camera.play();
+
+ final XFile _ = await camera.takePicture();
+
+ verify(
+ () => videoTracks.first.applyConstraints(<dynamic, dynamic>{
+ 'advanced': <dynamic>[
+ <dynamic, dynamic>{
+ 'torch': true,
+ }
+ ]
+ }),
+ ).called(1);
+
+ verify(
+ () => videoTracks.first.applyConstraints(<dynamic, dynamic>{
+ 'advanced': <dynamic>[
+ <dynamic, dynamic>{
+ 'torch': false,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+ });
+ });
+
+ group('getVideoSize', () {
+ testWidgets(
+ 'returns a size '
+ 'based on the first video track settings',
+ (WidgetTester tester) async {
+ const Size videoSize = Size(1280, 720);
+
+ final VideoElement videoElement =
+ getVideoElementWithBlankStream(videoSize);
+ mediaStream = videoElement.captureStream();
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(
+ camera.getVideoSize(),
+ equals(videoSize),
+ );
+ });
+
+ testWidgets(
+ 'returns Size.zero '
+ 'if the camera is missing video tracks', (WidgetTester tester) async {
+ // Create a video stream with no video tracks.
+ final VideoElement videoElement = VideoElement();
+ mediaStream = videoElement.captureStream();
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(
+ camera.getVideoSize(),
+ equals(Size.zero),
+ );
+ });
+ });
+
+ group('setFlashMode', () {
+ late List<MediaStreamTrack> videoTracks;
+ late MediaStream videoStream;
+
+ setUp(() {
+ videoTracks = <MediaStreamTrack>[
+ MockMediaStreamTrack(),
+ MockMediaStreamTrack()
+ ];
+ videoStream = FakeMediaStream(videoTracks);
+
+ when(() => videoTracks.first.applyConstraints(any()))
+ .thenAnswer((_) async => <dynamic, dynamic>{});
+
+ when(videoTracks.first.getCapabilities)
+ .thenReturn(<dynamic, dynamic>{});
+ });
+
+ testWidgets('sets the camera flash mode', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ const FlashMode flashMode = FlashMode.always;
+
+ camera.setFlashMode(flashMode);
+
+ expect(
+ camera.flashMode,
+ equals(flashMode),
+ );
+ });
+
+ testWidgets(
+ 'enables the torch mode '
+ 'if the flash mode is torch', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ camera.setFlashMode(FlashMode.torch);
+
+ verify(
+ () => videoTracks.first.applyConstraints(<dynamic, dynamic>{
+ 'advanced': <dynamic>[
+ <dynamic, dynamic>{
+ 'torch': true,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'disables the torch mode '
+ 'if the flash mode is not torch', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ camera.setFlashMode(FlashMode.auto);
+
+ verify(
+ () => videoTracks.first.applyConstraints(<dynamic, dynamic>{
+ 'advanced': <dynamic>[
+ <dynamic, dynamic>{
+ 'torch': false,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+
+ group('throws a CameraWebException', () {
+ testWidgets(
+ 'with torchModeNotSupported error '
+ 'when there are no media devices', (WidgetTester tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ expect(
+ () => camera.setFlashMode(FlashMode.always),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.torchModeNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with torchModeNotSupported error '
+ 'when the torch mode is not supported '
+ 'in the browser', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'torch': false,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ expect(
+ () => camera.setFlashMode(FlashMode.always),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.torchModeNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with torchModeNotSupported error '
+ 'when the torch mode is not supported '
+ 'by the camera', (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'torch': false,
+ });
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ expect(
+ () => camera.setFlashMode(FlashMode.always),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.torchModeNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notStarted error '
+ 'when the camera stream has not been initialized',
+ (WidgetTester tester) async {
+ when(mediaDevices.getSupportedConstraints)
+ .thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn(<dynamic, dynamic>{
+ 'torch': true,
+ });
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )..window = window;
+
+ expect(
+ () => camera.setFlashMode(FlashMode.always),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.notStarted,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('zoomLevel', () {
+ group('getMaxZoomLevel', () {
+ testWidgets(
+ 'returns maximum '
+ 'from CameraService.getZoomLevelCapabilityForCamera',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: MockMediaStreamTrack(),
+ );
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ final double maximumZoomLevel = camera.getMaxZoomLevel();
+
+ verify(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .called(1);
+
+ expect(
+ maximumZoomLevel,
+ equals(zoomLevelCapability.maximum),
+ );
+ });
+ });
+
+ group('getMinZoomLevel', () {
+ testWidgets(
+ 'returns minimum '
+ 'from CameraService.getZoomLevelCapabilityForCamera',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: MockMediaStreamTrack(),
+ );
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ final double minimumZoomLevel = camera.getMinZoomLevel();
+
+ verify(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .called(1);
+
+ expect(
+ minimumZoomLevel,
+ equals(zoomLevelCapability.minimum),
+ );
+ });
+ });
+
+ group('setZoomLevel', () {
+ testWidgets(
+ 'applies zoom on the video track '
+ 'from CameraService.getZoomLevelCapabilityForCamera',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final MockMediaStreamTrack videoTrack = MockMediaStreamTrack();
+
+ final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: videoTrack,
+ );
+
+ when(() => videoTrack.applyConstraints(any()))
+ .thenAnswer((_) async {});
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ const double zoom = 75.0;
+
+ camera.setZoomLevel(zoom);
+
+ verify(
+ () => videoTrack.applyConstraints(<dynamic, dynamic>{
+ 'advanced': <dynamic>[
+ <dynamic, dynamic>{
+ ZoomLevelCapability.constraintName: zoom,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+
+ group('throws a CameraWebException', () {
+ testWidgets(
+ 'with zoomLevelInvalid error '
+ 'when the provided zoom level is below minimum',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: MockMediaStreamTrack(),
+ );
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ expect(
+ () => camera.setZoomLevel(45.0),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelInvalid,
+ ),
+ ));
+ });
+
+ testWidgets(
+ 'with zoomLevelInvalid error '
+ 'when the provided zoom level is below minimum',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final ZoomLevelCapability zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: MockMediaStreamTrack(),
+ );
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ expect(
+ () => camera.setZoomLevel(105.0),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelInvalid,
+ ),
+ ),
+ );
+ });
+ });
+ });
+ });
+
+ group('getLensDirection', () {
+ testWidgets(
+ 'returns a lens direction '
+ 'based on the first video track settings',
+ (WidgetTester tester) async {
+ final MockVideoElement videoElement = MockVideoElement();
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )..videoElement = videoElement;
+
+ final MockMediaStreamTrack firstVideoTrack = MockMediaStreamTrack();
+
+ when(() => videoElement.srcObject).thenReturn(
+ FakeMediaStream(<MediaStreamTrack>[
+ firstVideoTrack,
+ MockMediaStreamTrack(),
+ ]),
+ );
+
+ when(firstVideoTrack.getSettings)
+ .thenReturn(<dynamic, dynamic>{'facingMode': 'environment'});
+
+ when(() => cameraService.mapFacingModeToLensDirection('environment'))
+ .thenReturn(CameraLensDirection.external);
+
+ expect(
+ camera.getLensDirection(),
+ equals(CameraLensDirection.external),
+ );
+ });
+
+ testWidgets(
+ 'returns null '
+ 'if the first video track is missing the facing mode',
+ (WidgetTester tester) async {
+ final MockVideoElement videoElement = MockVideoElement();
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )..videoElement = videoElement;
+
+ final MockMediaStreamTrack firstVideoTrack = MockMediaStreamTrack();
+
+ when(() => videoElement.srcObject).thenReturn(
+ FakeMediaStream(<MediaStreamTrack>[
+ firstVideoTrack,
+ MockMediaStreamTrack(),
+ ]),
+ );
+
+ when(firstVideoTrack.getSettings).thenReturn(<dynamic, dynamic>{});
+
+ expect(
+ camera.getLensDirection(),
+ isNull,
+ );
+ });
+
+ testWidgets(
+ 'returns null '
+ 'if the camera is missing video tracks', (WidgetTester tester) async {
+ // Create a video stream with no video tracks.
+ final VideoElement videoElement = VideoElement();
+ mediaStream = videoElement.captureStream();
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(
+ camera.getLensDirection(),
+ isNull,
+ );
+ });
+ });
+
+ group('getViewType', () {
+ testWidgets('returns a correct view type', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(
+ camera.getViewType(),
+ equals('plugins.flutter.io/camera_$textureId'),
+ );
+ });
+ });
+
+ group('video recording', () {
+ const String supportedVideoType = 'video/webm';
+
+ late MediaRecorder mediaRecorder;
+
+ bool isVideoTypeSupported(String type) => type == supportedVideoType;
+
+ setUp(() {
+ mediaRecorder = MockMediaRecorder();
+
+ when(() => mediaRecorder.onError)
+ .thenAnswer((_) => const Stream<Event>.empty());
+ });
+
+ group('startVideoRecording', () {
+ testWidgets(
+ 'creates a media recorder '
+ 'with appropriate options', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ expect(
+ camera.mediaRecorder!.stream,
+ equals(camera.stream),
+ );
+
+ expect(
+ camera.mediaRecorder!.mimeType,
+ equals(supportedVideoType),
+ );
+
+ expect(
+ camera.mediaRecorder!.state,
+ equals('recording'),
+ );
+ });
+
+ testWidgets('listens to the media recorder data events',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ verify(
+ () => mediaRecorder.addEventListener('dataavailable', any()),
+ ).called(1);
+ });
+
+ testWidgets('listens to the media recorder stop events',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ verify(
+ () => mediaRecorder.addEventListener('stop', any()),
+ ).called(1);
+ });
+
+ testWidgets('starts a video recording', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ verify(mediaRecorder.start).called(1);
+ });
+
+ testWidgets(
+ 'starts a video recording '
+ 'with maxVideoDuration', (WidgetTester tester) async {
+ const Duration maxVideoDuration = Duration(hours: 1);
+
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording(maxVideoDuration: maxVideoDuration);
+
+ verify(() => mediaRecorder.start(maxVideoDuration.inMilliseconds))
+ .called(1);
+ });
+
+ group('throws a CameraWebException', () {
+ testWidgets(
+ 'with notSupported error '
+ 'when maxVideoDuration is 0 milliseconds or less',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ expect(
+ () => camera.startVideoRecording(maxVideoDuration: Duration.zero),
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notSupported error '
+ 'when no video types are supported', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )..isVideoTypeSupported = (String type) => false;
+
+ await camera.initialize();
+ await camera.play();
+
+ expect(
+ camera.startVideoRecording,
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('pauseVideoRecording', () {
+ testWidgets('pauses a video recording', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )..mediaRecorder = mediaRecorder;
+
+ await camera.pauseVideoRecording();
+
+ verify(mediaRecorder.pause).called(1);
+ });
+
+ testWidgets(
+ 'throws a CameraWebException '
+ 'with videoRecordingNotStarted error '
+ 'if the video recording was not started',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ );
+
+ expect(
+ camera.pauseVideoRecording,
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.videoRecordingNotStarted,
+ ),
+ ),
+ );
+ });
+ });
+
+ group('resumeVideoRecording', () {
+ testWidgets('resumes a video recording', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )..mediaRecorder = mediaRecorder;
+
+ await camera.resumeVideoRecording();
+
+ verify(mediaRecorder.resume).called(1);
+ });
+
+ testWidgets(
+ 'throws a CameraWebException '
+ 'with videoRecordingNotStarted error '
+ 'if the video recording was not started',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ );
+
+ expect(
+ camera.resumeVideoRecording,
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.videoRecordingNotStarted,
+ ),
+ ),
+ );
+ });
+ });
+
+ group('stopVideoRecording', () {
+ testWidgets(
+ 'stops a video recording and '
+ 'returns the captured file '
+ 'based on all video data parts', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ late void Function(Event) videoDataAvailableListener;
+ late void Function(Event) videoRecordingStoppedListener;
+
+ when(
+ () => mediaRecorder.addEventListener('dataavailable', any()),
+ ).thenAnswer((Invocation invocation) {
+ videoDataAvailableListener =
+ invocation.positionalArguments[1] as void Function(Event);
+ });
+
+ when(
+ () => mediaRecorder.addEventListener('stop', any()),
+ ).thenAnswer((Invocation invocation) {
+ videoRecordingStoppedListener =
+ invocation.positionalArguments[1] as void Function(Event);
+ });
+
+ Blob? finalVideo;
+ List<Blob>? videoParts;
+ camera.blobBuilder = (List<Blob> blobs, String videoType) {
+ videoParts = <Blob>[...blobs];
+ finalVideo = Blob(blobs, videoType);
+ return finalVideo!;
+ };
+
+ await camera.startVideoRecording();
+ final Future<XFile> videoFileFuture = camera.stopVideoRecording();
+
+ final Blob capturedVideoPartOne = Blob(<Object>[]);
+ final Blob capturedVideoPartTwo = Blob(<Object>[]);
+
+ final List<Blob> capturedVideoParts = <Blob>[
+ capturedVideoPartOne,
+ capturedVideoPartTwo,
+ ];
+
+ videoDataAvailableListener(FakeBlobEvent(capturedVideoPartOne));
+ videoDataAvailableListener(FakeBlobEvent(capturedVideoPartTwo));
+
+ videoRecordingStoppedListener(Event('stop'));
+
+ final XFile videoFile = await videoFileFuture;
+
+ verify(mediaRecorder.stop).called(1);
+
+ expect(
+ videoFile,
+ isNotNull,
+ );
+
+ expect(
+ videoFile.mimeType,
+ equals(supportedVideoType),
+ );
+
+ expect(
+ videoFile.name,
+ equals(finalVideo.hashCode.toString()),
+ );
+
+ expect(
+ videoParts,
+ equals(capturedVideoParts),
+ );
+ });
+
+ testWidgets(
+ 'throws a CameraWebException '
+ 'with videoRecordingNotStarted error '
+ 'if the video recording was not started',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ );
+
+ expect(
+ camera.stopVideoRecording,
+ throwsA(
+ isA<CameraWebException>()
+ .having(
+ (CameraWebException e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (CameraWebException e) => e.code,
+ 'code',
+ CameraErrorCode.videoRecordingNotStarted,
+ ),
+ ),
+ );
+ });
+ });
+
+ group('on video data available', () {
+ late void Function(Event) videoDataAvailableListener;
+
+ setUp(() {
+ when(
+ () => mediaRecorder.addEventListener('dataavailable', any()),
+ ).thenAnswer((Invocation invocation) {
+ videoDataAvailableListener =
+ invocation.positionalArguments[1] as void Function(Event);
+ });
+ });
+
+ testWidgets(
+ 'stops a video recording '
+ 'if maxVideoDuration is given and '
+ 'the recording was not stopped manually',
+ (WidgetTester tester) async {
+ const Duration maxVideoDuration = Duration(hours: 1);
+
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+ await camera.startVideoRecording(maxVideoDuration: maxVideoDuration);
+
+ when(() => mediaRecorder.state).thenReturn('recording');
+
+ videoDataAvailableListener(FakeBlobEvent(Blob(<Object>[])));
+
+ await Future<void>.microtask(() {});
+
+ verify(mediaRecorder.stop).called(1);
+ });
+ });
+
+ group('on video recording stopped', () {
+ late void Function(Event) videoRecordingStoppedListener;
+
+ setUp(() {
+ when(
+ () => mediaRecorder.addEventListener('stop', any()),
+ ).thenAnswer((Invocation invocation) {
+ videoRecordingStoppedListener =
+ invocation.positionalArguments[1] as void Function(Event);
+ });
+ });
+
+ testWidgets('stops listening to the media recorder data events',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ videoRecordingStoppedListener(Event('stop'));
+
+ await Future<void>.microtask(() {});
+
+ verify(
+ () => mediaRecorder.removeEventListener('dataavailable', any()),
+ ).called(1);
+ });
+
+ testWidgets('stops listening to the media recorder stop events',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ videoRecordingStoppedListener(Event('stop'));
+
+ await Future<void>.microtask(() {});
+
+ verify(
+ () => mediaRecorder.removeEventListener('stop', any()),
+ ).called(1);
+ });
+
+ testWidgets('stops listening to the media recorder errors',
+ (WidgetTester tester) async {
+ final StreamController<ErrorEvent> onErrorStreamController =
+ StreamController<ErrorEvent>();
+
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ when(() => mediaRecorder.onError)
+ .thenAnswer((_) => onErrorStreamController.stream);
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ videoRecordingStoppedListener(Event('stop'));
+
+ await Future<void>.microtask(() {});
+
+ expect(
+ onErrorStreamController.hasListener,
+ isFalse,
+ );
+ });
+ });
+ });
+
+ group('dispose', () {
+ testWidgets("resets the video element's source",
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.dispose();
+
+ expect(camera.videoElement.srcObject, isNull);
+ });
+
+ testWidgets('closes the onEnded stream', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.dispose();
+
+ expect(
+ camera.onEndedController.isClosed,
+ isTrue,
+ );
+ });
+
+ testWidgets('closes the onVideoRecordedEvent stream',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.dispose();
+
+ expect(
+ camera.videoRecorderController.isClosed,
+ isTrue,
+ );
+ });
+
+ testWidgets('closes the onVideoRecordingError stream',
+ (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.dispose();
+
+ expect(
+ camera.videoRecordingErrorController.isClosed,
+ isTrue,
+ );
+ });
+ });
+
+ group('events', () {
+ group('onVideoRecordedEvent', () {
+ testWidgets(
+ 'emits a VideoRecordedEvent '
+ 'when a video recording is created', (WidgetTester tester) async {
+ const Duration maxVideoDuration = Duration(hours: 1);
+ const String supportedVideoType = 'video/webm';
+
+ final MockMediaRecorder mediaRecorder = MockMediaRecorder();
+ when(() => mediaRecorder.onError)
+ .thenAnswer((_) => const Stream<Event>.empty());
+
+ final Camera camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = (String type) => type == 'video/webm';
+
+ await camera.initialize();
+ await camera.play();
+
+ late void Function(Event) videoDataAvailableListener;
+ late void Function(Event) videoRecordingStoppedListener;
+
+ when(
+ () => mediaRecorder.addEventListener('dataavailable', any()),
+ ).thenAnswer((Invocation invocation) {
+ videoDataAvailableListener =
+ invocation.positionalArguments[1] as void Function(Event);
+ });
+
+ when(
+ () => mediaRecorder.addEventListener('stop', any()),
+ ).thenAnswer((Invocation invocation) {
+ videoRecordingStoppedListener =
+ invocation.positionalArguments[1] as void Function(Event);
+ });
+
+ final StreamQueue<VideoRecordedEvent> streamQueue =
+ StreamQueue<VideoRecordedEvent>(camera.onVideoRecordedEvent);
+
+ await camera.startVideoRecording(maxVideoDuration: maxVideoDuration);
+
+ Blob? finalVideo;
+ camera.blobBuilder = (List<Blob> blobs, String videoType) {
+ finalVideo = Blob(blobs, videoType);
+ return finalVideo!;
+ };
+
+ videoDataAvailableListener(FakeBlobEvent(Blob(<Object>[])));
+ videoRecordingStoppedListener(Event('stop'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ isA<VideoRecordedEvent>()
+ .having(
+ (VideoRecordedEvent e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (VideoRecordedEvent e) => e.file,
+ 'file',
+ isA<XFile>()
+ .having(
+ (XFile f) => f.mimeType,
+ 'mimeType',
+ supportedVideoType,
+ )
+ .having(
+ (XFile f) => f.name,
+ 'name',
+ finalVideo.hashCode.toString(),
+ ),
+ )
+ .having(
+ (VideoRecordedEvent e) => e.maxVideoDuration,
+ 'maxVideoDuration',
+ maxVideoDuration,
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+
+ group('onEnded', () {
+ testWidgets(
+ 'emits the default video track '
+ 'when it emits an ended event', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final StreamQueue<MediaStreamTrack> streamQueue =
+ StreamQueue<MediaStreamTrack>(camera.onEnded);
+
+ await camera.initialize();
+
+ final List<MediaStreamTrack> videoTracks =
+ camera.stream!.getVideoTracks();
+ final MediaStreamTrack defaultVideoTrack = videoTracks.first;
+
+ defaultVideoTrack.dispatchEvent(Event('ended'));
+
+ expect(
+ await streamQueue.next,
+ equals(defaultVideoTrack),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits the default video track '
+ 'when the camera is stopped', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final StreamQueue<MediaStreamTrack> streamQueue =
+ StreamQueue<MediaStreamTrack>(camera.onEnded);
+
+ await camera.initialize();
+
+ final List<MediaStreamTrack> videoTracks =
+ camera.stream!.getVideoTracks();
+ final MediaStreamTrack defaultVideoTrack = videoTracks.first;
+
+ camera.stop();
+
+ expect(
+ await streamQueue.next,
+ equals(defaultVideoTrack),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+
+ group('onVideoRecordingError', () {
+ testWidgets(
+ 'emits an ErrorEvent '
+ 'when the media recorder fails '
+ 'when recording a video', (WidgetTester tester) async {
+ final MockMediaRecorder mediaRecorder = MockMediaRecorder();
+ final StreamController<ErrorEvent> errorController =
+ StreamController<ErrorEvent>();
+
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )..mediaRecorder = mediaRecorder;
+
+ when(() => mediaRecorder.onError)
+ .thenAnswer((_) => errorController.stream);
+
+ final StreamQueue<ErrorEvent> streamQueue =
+ StreamQueue<ErrorEvent>(camera.onVideoRecordingError);
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ final ErrorEvent errorEvent = ErrorEvent('type');
+ errorController.add(errorEvent);
+
+ expect(
+ await streamQueue.next,
+ equals(errorEvent),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_web_exception_test.dart b/packages/camera/camera_web/example/integration_test/camera_web_exception_test.dart
new file mode 100644
index 0000000..fcb54da
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_web_exception_test.dart
@@ -0,0 +1,41 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraWebException', () {
+ testWidgets('sets all properties', (WidgetTester tester) async {
+ const int cameraId = 1;
+ const CameraErrorCode code = CameraErrorCode.notFound;
+ const String description = 'The camera is not found.';
+
+ final CameraWebException exception =
+ CameraWebException(cameraId, code, description);
+
+ expect(exception.cameraId, equals(cameraId));
+ expect(exception.code, equals(code));
+ expect(exception.description, equals(description));
+ });
+
+ testWidgets('toString includes all properties',
+ (WidgetTester tester) async {
+ const int cameraId = 2;
+ const CameraErrorCode code = CameraErrorCode.notReadable;
+ const String description = 'The camera is not readable.';
+
+ final CameraWebException exception =
+ CameraWebException(cameraId, code, description);
+
+ expect(
+ exception.toString(),
+ equals('CameraWebException($cameraId, $code, $description)'),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_web_test.dart b/packages/camera/camera_web/example/integration_test/camera_web_test.dart
new file mode 100644
index 0000000..820a84b
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_web_test.dart
@@ -0,0 +1,3102 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html';
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#106316)
+// ignore: unnecessary_import
+import 'dart:ui';
+
+import 'package:async/async.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_web/camera_web.dart';
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart' as widgets;
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:mocktail/mocktail.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraPlugin', () {
+ const int cameraId = 1;
+
+ late Window window;
+ late Navigator navigator;
+ late MediaDevices mediaDevices;
+ late VideoElement videoElement;
+ late Screen screen;
+ late ScreenOrientation screenOrientation;
+ late Document document;
+ late Element documentElement;
+
+ late CameraService cameraService;
+
+ setUp(() async {
+ window = MockWindow();
+ navigator = MockNavigator();
+ mediaDevices = MockMediaDevices();
+
+ videoElement = getVideoElementWithBlankStream(const Size(10, 10));
+
+ when(() => window.navigator).thenReturn(navigator);
+ when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+
+ screen = MockScreen();
+ screenOrientation = MockScreenOrientation();
+
+ when(() => screen.orientation).thenReturn(screenOrientation);
+ when(() => window.screen).thenReturn(screen);
+
+ document = MockDocument();
+ documentElement = MockElement();
+
+ when(() => document.documentElement).thenReturn(documentElement);
+ when(() => window.document).thenReturn(document);
+
+ cameraService = MockCameraService();
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ any(),
+ cameraId: any(named: 'cameraId'),
+ ),
+ ).thenAnswer(
+ (_) async => videoElement.captureStream(),
+ );
+
+ CameraPlatform.instance = CameraPlugin(
+ cameraService: cameraService,
+ )..window = window;
+ });
+
+ setUpAll(() {
+ registerFallbackValue(MockMediaStreamTrack());
+ registerFallbackValue(MockCameraOptions());
+ registerFallbackValue(FlashMode.off);
+ });
+
+ testWidgets('CameraPlugin is the live instance',
+ (WidgetTester tester) async {
+ expect(CameraPlatform.instance, isA<CameraPlugin>());
+ });
+
+ group('availableCameras', () {
+ setUp(() {
+ when(
+ () => cameraService.getFacingModeForVideoTrack(
+ any(),
+ ),
+ ).thenReturn(null);
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) async => <dynamic>[],
+ );
+ });
+
+ testWidgets('requests video and audio permissions',
+ (WidgetTester tester) async {
+ final List<CameraDescription> _ =
+ await CameraPlatform.instance.availableCameras();
+
+ verify(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(
+ audio: AudioConstraints(enabled: true),
+ ),
+ ),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'releases the camera stream '
+ 'used to request video and audio permissions',
+ (WidgetTester tester) async {
+ final MockMediaStreamTrack videoTrack = MockMediaStreamTrack();
+
+ bool videoTrackStopped = false;
+ when(videoTrack.stop).thenAnswer((Invocation _) {
+ videoTrackStopped = true;
+ });
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ const CameraOptions(
+ audio: AudioConstraints(enabled: true),
+ ),
+ ),
+ ).thenAnswer(
+ (_) => Future<MediaStream>.value(
+ FakeMediaStream(<MediaStreamTrack>[videoTrack]),
+ ),
+ );
+
+ final List<CameraDescription> _ =
+ await CameraPlatform.instance.availableCameras();
+
+ expect(videoTrackStopped, isTrue);
+ });
+
+ testWidgets(
+ 'gets a video stream '
+ 'for a video input device', (WidgetTester tester) async {
+ final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future<List<dynamic>>.value(<Object>[videoDevice]),
+ );
+
+ final List<CameraDescription> _ =
+ await CameraPlatform.instance.availableCameras();
+
+ verify(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(
+ deviceId: videoDevice.deviceId,
+ ),
+ ),
+ ),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'does not get a video stream '
+ 'for the video input device '
+ 'with an empty device id', (WidgetTester tester) async {
+ final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo(
+ '',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future<List<dynamic>>.value(<Object>[videoDevice]),
+ );
+
+ final List<CameraDescription> _ =
+ await CameraPlatform.instance.availableCameras();
+
+ verifyNever(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(
+ deviceId: videoDevice.deviceId,
+ ),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'gets the facing mode '
+ 'from the first available video track '
+ 'of the video input device', (WidgetTester tester) async {
+ final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ final FakeMediaStream videoStream = FakeMediaStream(
+ <MediaStreamTrack>[MockMediaStreamTrack(), MockMediaStreamTrack()]);
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: videoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer((Invocation _) => Future<MediaStream>.value(videoStream));
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future<List<dynamic>>.value(<Object>[videoDevice]),
+ );
+
+ final List<CameraDescription> _ =
+ await CameraPlatform.instance.availableCameras();
+
+ verify(
+ () => cameraService.getFacingModeForVideoTrack(
+ videoStream.getVideoTracks().first,
+ ),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'returns appropriate camera descriptions '
+ 'for multiple video devices '
+ 'based on video streams', (WidgetTester tester) async {
+ final FakeMediaDeviceInfo firstVideoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ final FakeMediaDeviceInfo secondVideoDevice = FakeMediaDeviceInfo(
+ '4',
+ 'Camera 4',
+ MediaDeviceKind.videoInput,
+ );
+
+ // Create a video stream for the first video device.
+ final FakeMediaStream firstVideoStream = FakeMediaStream(
+ <MediaStreamTrack>[MockMediaStreamTrack(), MockMediaStreamTrack()]);
+
+ // Create a video stream for the second video device.
+ final FakeMediaStream secondVideoStream =
+ FakeMediaStream(<MediaStreamTrack>[MockMediaStreamTrack()]);
+
+ // Mock media devices to return two video input devices
+ // and two audio devices.
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future<List<dynamic>>.value(<Object>[
+ firstVideoDevice,
+ FakeMediaDeviceInfo(
+ '2',
+ 'Audio Input 2',
+ MediaDeviceKind.audioInput,
+ ),
+ FakeMediaDeviceInfo(
+ '3',
+ 'Audio Output 3',
+ MediaDeviceKind.audioOutput,
+ ),
+ secondVideoDevice,
+ ]),
+ );
+
+ // Mock camera service to return the first video stream
+ // for the first video device.
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: firstVideoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer(
+ (Invocation _) => Future<MediaStream>.value(firstVideoStream));
+
+ // Mock camera service to return the second video stream
+ // for the second video device.
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: secondVideoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer(
+ (Invocation _) => Future<MediaStream>.value(secondVideoStream));
+
+ // Mock camera service to return a user facing mode
+ // for the first video stream.
+ when(
+ () => cameraService.getFacingModeForVideoTrack(
+ firstVideoStream.getVideoTracks().first,
+ ),
+ ).thenReturn('user');
+
+ when(() => cameraService.mapFacingModeToLensDirection('user'))
+ .thenReturn(CameraLensDirection.front);
+
+ // Mock camera service to return an environment facing mode
+ // for the second video stream.
+ when(
+ () => cameraService.getFacingModeForVideoTrack(
+ secondVideoStream.getVideoTracks().first,
+ ),
+ ).thenReturn('environment');
+
+ when(() => cameraService.mapFacingModeToLensDirection('environment'))
+ .thenReturn(CameraLensDirection.back);
+
+ final List<CameraDescription> cameras =
+ await CameraPlatform.instance.availableCameras();
+
+ // Expect two cameras and ignore two audio devices.
+ expect(
+ cameras,
+ equals(<CameraDescription>[
+ CameraDescription(
+ name: firstVideoDevice.label!,
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0,
+ ),
+ CameraDescription(
+ name: secondVideoDevice.label!,
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ )
+ ]),
+ );
+ });
+
+ testWidgets(
+ 'sets camera metadata '
+ 'for the camera description', (WidgetTester tester) async {
+ final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ final FakeMediaStream videoStream = FakeMediaStream(
+ <MediaStreamTrack>[MockMediaStreamTrack(), MockMediaStreamTrack()]);
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future<List<dynamic>>.value(<Object>[videoDevice]),
+ );
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: videoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer((Invocation _) => Future<MediaStream>.value(videoStream));
+
+ when(
+ () => cameraService.getFacingModeForVideoTrack(
+ videoStream.getVideoTracks().first,
+ ),
+ ).thenReturn('left');
+
+ when(() => cameraService.mapFacingModeToLensDirection('left'))
+ .thenReturn(CameraLensDirection.external);
+
+ final CameraDescription camera =
+ (await CameraPlatform.instance.availableCameras()).first;
+
+ expect(
+ (CameraPlatform.instance as CameraPlugin).camerasMetadata,
+ equals(<CameraDescription, CameraMetadata>{
+ camera: CameraMetadata(
+ deviceId: videoDevice.deviceId!,
+ facingMode: 'left',
+ )
+ }),
+ );
+ });
+
+ testWidgets(
+ 'releases the video stream '
+ 'of a video input device', (WidgetTester tester) async {
+ final FakeMediaDeviceInfo videoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ final FakeMediaStream videoStream = FakeMediaStream(
+ <MediaStreamTrack>[MockMediaStreamTrack(), MockMediaStreamTrack()]);
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future<List<dynamic>>.value(<Object>[videoDevice]),
+ );
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: videoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer((Invocation _) => Future<MediaStream>.value(videoStream));
+
+ final List<CameraDescription> _ =
+ await CameraPlatform.instance.availableCameras();
+
+ for (final MediaStreamTrack videoTrack
+ in videoStream.getVideoTracks()) {
+ verify(videoTrack.stop).called(1);
+ }
+ });
+
+ group('throws CameraException', () {
+ testWidgets(
+ 'with notSupported error '
+ 'when there are no media devices', (WidgetTester tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.availableCameras(),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when MediaDevices.enumerateDevices throws DomException',
+ (WidgetTester tester) async {
+ final FakeDomException exception =
+ FakeDomException(DomException.UNKNOWN);
+
+ when(mediaDevices.enumerateDevices).thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.availableCameras(),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'when CameraService.getMediaStreamForOptions '
+ 'throws CameraWebException', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.security,
+ 'description',
+ );
+
+ when(() => cameraService.getMediaStreamForOptions(any()))
+ .thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.availableCameras(),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'when CameraService.getMediaStreamForOptions '
+ 'throws PlatformException', (WidgetTester tester) async {
+ final PlatformException exception = PlatformException(
+ code: CameraErrorCode.notSupported.toString(),
+ message: 'message',
+ );
+
+ when(() => cameraService.getMediaStreamForOptions(any()))
+ .thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.availableCameras(),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ exception.code,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('createCamera', () {
+ group('creates a camera', () {
+ const Size ultraHighResolutionSize = Size(3840, 2160);
+ const Size maxResolutionSize = Size(3840, 2160);
+
+ const CameraDescription cameraDescription = CameraDescription(
+ name: 'name',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0,
+ );
+
+ const CameraMetadata cameraMetadata = CameraMetadata(
+ deviceId: 'deviceId',
+ facingMode: 'user',
+ );
+
+ setUp(() {
+ // Add metadata for the camera description.
+ (CameraPlatform.instance as CameraPlugin)
+ .camerasMetadata[cameraDescription] = cameraMetadata;
+
+ when(
+ () => cameraService.mapFacingModeToCameraType('user'),
+ ).thenReturn(CameraType.user);
+ });
+
+ testWidgets('with appropriate options', (WidgetTester tester) async {
+ when(
+ () => cameraService
+ .mapResolutionPresetToSize(ResolutionPreset.ultraHigh),
+ ).thenReturn(ultraHighResolutionSize);
+
+ final int cameraId = await CameraPlatform.instance.createCamera(
+ cameraDescription,
+ ResolutionPreset.ultraHigh,
+ enableAudio: true,
+ );
+
+ expect(
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId],
+ isA<Camera>()
+ .having(
+ (Camera camera) => camera.textureId,
+ 'textureId',
+ cameraId,
+ )
+ .having(
+ (Camera camera) => camera.options,
+ 'options',
+ CameraOptions(
+ audio: const AudioConstraints(enabled: true),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint(CameraType.user),
+ width: VideoSizeConstraint(
+ ideal: ultraHighResolutionSize.width.toInt(),
+ ),
+ height: VideoSizeConstraint(
+ ideal: ultraHighResolutionSize.height.toInt(),
+ ),
+ deviceId: cameraMetadata.deviceId,
+ ),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with a max resolution preset '
+ 'and enabled audio set to false '
+ 'when no options are specified', (WidgetTester tester) async {
+ when(
+ () => cameraService.mapResolutionPresetToSize(ResolutionPreset.max),
+ ).thenReturn(maxResolutionSize);
+
+ final int cameraId = await CameraPlatform.instance.createCamera(
+ cameraDescription,
+ null,
+ );
+
+ expect(
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId],
+ isA<Camera>().having(
+ (Camera camera) => camera.options,
+ 'options',
+ CameraOptions(
+ audio: const AudioConstraints(),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint(CameraType.user),
+ width: VideoSizeConstraint(
+ ideal: maxResolutionSize.width.toInt(),
+ ),
+ height: VideoSizeConstraint(
+ ideal: maxResolutionSize.height.toInt(),
+ ),
+ deviceId: cameraMetadata.deviceId,
+ ),
+ ),
+ ),
+ );
+ });
+ });
+
+ testWidgets(
+ 'throws CameraException '
+ 'with missingMetadata error '
+ 'if there is no metadata '
+ 'for the given camera description', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.createCamera(
+ const CameraDescription(
+ name: 'name',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.ultraHigh,
+ ),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ CameraErrorCode.missingMetadata.toString(),
+ ),
+ ),
+ );
+ });
+ });
+
+ group('initializeCamera', () {
+ late Camera camera;
+ late VideoElement videoElement;
+
+ late StreamController<Event> errorStreamController, abortStreamController;
+ late StreamController<MediaStreamTrack> endedStreamController;
+
+ setUp(() {
+ camera = MockCamera();
+ videoElement = MockVideoElement();
+
+ errorStreamController = StreamController<Event>();
+ abortStreamController = StreamController<Event>();
+ endedStreamController = StreamController<MediaStreamTrack>();
+
+ when(camera.getVideoSize).thenReturn(const Size(10, 10));
+ when(camera.initialize)
+ .thenAnswer((Invocation _) => Future<void>.value());
+ when(camera.play).thenAnswer((Invocation _) => Future<void>.value());
+
+ when(() => camera.videoElement).thenReturn(videoElement);
+ when(() => videoElement.onError).thenAnswer((Invocation _) =>
+ FakeElementStream<Event>(errorStreamController.stream));
+ when(() => videoElement.onAbort).thenAnswer((Invocation _) =>
+ FakeElementStream<Event>(abortStreamController.stream));
+
+ when(() => camera.onEnded)
+ .thenAnswer((Invocation _) => endedStreamController.stream);
+ });
+
+ testWidgets('initializes and plays the camera',
+ (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ verify(camera.initialize).called(1);
+ verify(camera.play).called(1);
+ });
+
+ testWidgets('starts listening to the camera video error and abort events',
+ (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(errorStreamController.hasListener, isFalse);
+ expect(abortStreamController.hasListener, isFalse);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ expect(errorStreamController.hasListener, isTrue);
+ expect(abortStreamController.hasListener, isTrue);
+ });
+
+ testWidgets('starts listening to the camera ended events',
+ (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(endedStreamController.hasListener, isFalse);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ expect(endedStreamController.hasListener, isTrue);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.initializeCamera(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when camera throws CameraWebException',
+ (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.permissionDenied,
+ 'description',
+ );
+
+ when(camera.initialize).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.initializeCamera(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when camera throws DomException',
+ (WidgetTester tester) async {
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_ALLOWED);
+
+ when(camera.initialize)
+ .thenAnswer((Invocation _) => Future<void>.value());
+ when(camera.play).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.initializeCamera(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('lockCaptureOrientation', () {
+ setUp(() {
+ when(
+ () => cameraService.mapDeviceOrientationToOrientationType(any()),
+ ).thenReturn(OrientationType.portraitPrimary);
+ });
+
+ testWidgets(
+ 'requests full-screen mode '
+ 'on documentElement', (WidgetTester tester) async {
+ await CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitUp,
+ );
+
+ verify(documentElement.requestFullscreen).called(1);
+ });
+
+ testWidgets(
+ 'locks the capture orientation '
+ 'based on the given device orientation', (WidgetTester tester) async {
+ when(
+ () => cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.landscapeRight,
+ ),
+ ).thenReturn(OrientationType.landscapeSecondary);
+
+ await CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.landscapeRight,
+ );
+
+ verify(
+ () => cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.landscapeRight,
+ ),
+ ).called(1);
+
+ verify(
+ () => screenOrientation.lock(
+ OrientationType.landscapeSecondary,
+ ),
+ ).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when screen is not supported', (WidgetTester tester) async {
+ when(() => window.screen).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitUp,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when screen orientation is not supported',
+ (WidgetTester tester) async {
+ when(() => screen.orientation).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitUp,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when documentElement is not available',
+ (WidgetTester tester) async {
+ when(() => document.documentElement).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitUp,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when lock throws DomException',
+ (WidgetTester tester) async {
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_ALLOWED);
+
+ when(() => screenOrientation.lock(any())).thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitDown,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('unlockCaptureOrientation', () {
+ setUp(() {
+ when(
+ () => cameraService.mapDeviceOrientationToOrientationType(any()),
+ ).thenReturn(OrientationType.portraitPrimary);
+ });
+
+ testWidgets('unlocks the capture orientation',
+ (WidgetTester tester) async {
+ await CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ );
+
+ verify(screenOrientation.unlock).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when screen is not supported', (WidgetTester tester) async {
+ when(() => window.screen).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when screen orientation is not supported',
+ (WidgetTester tester) async {
+ when(() => screen.orientation).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when documentElement is not available',
+ (WidgetTester tester) async {
+ when(() => document.documentElement).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when unlock throws DomException',
+ (WidgetTester tester) async {
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_ALLOWED);
+
+ when(screenOrientation.unlock).thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('takePicture', () {
+ testWidgets('captures a picture', (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final MockXFile capturedPicture = MockXFile();
+
+ when(camera.takePicture)
+ .thenAnswer((Invocation _) => Future<XFile>.value(capturedPicture));
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final XFile picture =
+ await CameraPlatform.instance.takePicture(cameraId);
+
+ verify(camera.takePicture).called(1);
+
+ expect(picture, equals(capturedPicture));
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.takePicture(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when takePicture throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.takePicture).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.takePicture(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when takePicture throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.takePicture).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.takePicture(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('startVideoRecording', () {
+ late Camera camera;
+
+ setUp(() {
+ camera = MockCamera();
+
+ when(camera.startVideoRecording).thenAnswer((Invocation _) async {});
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((Invocation _) => const Stream<ErrorEvent>.empty());
+ });
+
+ testWidgets('starts a video recording', (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+
+ verify(camera.startVideoRecording).called(1);
+ });
+
+ testWidgets('listens to the onVideoRecordingError stream',
+ (WidgetTester tester) async {
+ final StreamController<ErrorEvent> videoRecordingErrorController =
+ StreamController<ErrorEvent>();
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((Invocation _) => videoRecordingErrorController.stream);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+
+ expect(
+ videoRecordingErrorController.hasListener,
+ isTrue,
+ );
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.startVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when startVideoRecording throws DomException',
+ (WidgetTester tester) async {
+ final FakeDomException exception =
+ FakeDomException(DomException.INVALID_STATE);
+
+ when(camera.startVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.startVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when startVideoRecording throws CameraWebException',
+ (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.startVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.startVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('startVideoCapturing', () {
+ late Camera camera;
+
+ setUp(() {
+ camera = MockCamera();
+
+ when(camera.startVideoRecording).thenAnswer((Invocation _) async {});
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((Invocation _) => const Stream<ErrorEvent>.empty());
+ });
+
+ testWidgets('fails if trying to stream', (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.startVideoCapturing(VideoCaptureOptions(
+ cameraId,
+ streamCallback: (CameraImageData imageData) {})),
+ throwsA(
+ isA<UnimplementedError>(),
+ ),
+ );
+ });
+ });
+
+ group('stopVideoRecording', () {
+ testWidgets('stops a video recording', (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final MockXFile capturedVideo = MockXFile();
+
+ when(camera.stopVideoRecording)
+ .thenAnswer((Invocation _) => Future<XFile>.value(capturedVideo));
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final XFile video =
+ await CameraPlatform.instance.stopVideoRecording(cameraId);
+
+ verify(camera.stopVideoRecording).called(1);
+
+ expect(video, capturedVideo);
+ });
+
+ testWidgets('stops listening to the onVideoRecordingError stream',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final StreamController<ErrorEvent> videoRecordingErrorController =
+ StreamController<ErrorEvent>();
+
+ when(camera.startVideoRecording).thenAnswer((Invocation _) async {});
+
+ when(camera.stopVideoRecording)
+ .thenAnswer((Invocation _) => Future<XFile>.value(MockXFile()));
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((Invocation _) => videoRecordingErrorController.stream);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+ final XFile _ =
+ await CameraPlatform.instance.stopVideoRecording(cameraId);
+
+ expect(
+ videoRecordingErrorController.hasListener,
+ isFalse,
+ );
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.stopVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when stopVideoRecording throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.INVALID_STATE);
+
+ when(camera.stopVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.stopVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when stopVideoRecording throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.stopVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.stopVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('pauseVideoRecording', () {
+ testWidgets('pauses a video recording', (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+
+ when(camera.pauseVideoRecording).thenAnswer((Invocation _) async {});
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.pauseVideoRecording(cameraId);
+
+ verify(camera.pauseVideoRecording).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.pauseVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when pauseVideoRecording throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.INVALID_STATE);
+
+ when(camera.pauseVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.pauseVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when pauseVideoRecording throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.pauseVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.pauseVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('resumeVideoRecording', () {
+ testWidgets('resumes a video recording', (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+
+ when(camera.resumeVideoRecording).thenAnswer((Invocation _) async {});
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.resumeVideoRecording(cameraId);
+
+ verify(camera.resumeVideoRecording).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.resumeVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when resumeVideoRecording throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.INVALID_STATE);
+
+ when(camera.resumeVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.resumeVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when resumeVideoRecording throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.resumeVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.resumeVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('setFlashMode', () {
+ testWidgets('calls setFlashMode on the camera',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ const FlashMode flashMode = FlashMode.always;
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.setFlashMode(
+ cameraId,
+ flashMode,
+ );
+
+ verify(() => camera.setFlashMode(flashMode)).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.setFlashMode(
+ cameraId,
+ FlashMode.always,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setFlashMode throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(() => camera.setFlashMode(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.setFlashMode(
+ cameraId,
+ FlashMode.always,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setFlashMode throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(() => camera.setFlashMode(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.setFlashMode(
+ cameraId,
+ FlashMode.torch,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ testWidgets('setExposureMode throws UnimplementedError',
+ (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.setExposureMode(
+ cameraId,
+ ExposureMode.auto,
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('setExposurePoint throws UnimplementedError',
+ (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.setExposurePoint(
+ cameraId,
+ const Point<double>(0, 0),
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('getMinExposureOffset throws UnimplementedError',
+ (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.getMinExposureOffset(cameraId),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('getMaxExposureOffset throws UnimplementedError',
+ (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.getMaxExposureOffset(cameraId),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('getExposureOffsetStepSize throws UnimplementedError',
+ (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.getExposureOffsetStepSize(cameraId),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('setExposureOffset throws UnimplementedError',
+ (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.setExposureOffset(
+ cameraId,
+ 0,
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('setFocusMode throws UnimplementedError',
+ (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.setFocusMode(
+ cameraId,
+ FocusMode.auto,
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('setFocusPoint throws UnimplementedError',
+ (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.setFocusPoint(
+ cameraId,
+ const Point<double>(0, 0),
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ group('getMaxZoomLevel', () {
+ testWidgets('calls getMaxZoomLevel on the camera',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ const double maximumZoomLevel = 100.0;
+
+ when(camera.getMaxZoomLevel).thenReturn(maximumZoomLevel);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ await CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ equals(maximumZoomLevel),
+ );
+
+ verify(camera.getMaxZoomLevel).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () async => CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when getMaxZoomLevel throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.getMaxZoomLevel).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when getMaxZoomLevel throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.getMaxZoomLevel).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('getMinZoomLevel', () {
+ testWidgets('calls getMinZoomLevel on the camera',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ const double minimumZoomLevel = 100.0;
+
+ when(camera.getMinZoomLevel).thenReturn(minimumZoomLevel);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ await CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ equals(minimumZoomLevel),
+ );
+
+ verify(camera.getMinZoomLevel).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () async => CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when getMinZoomLevel throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.getMinZoomLevel).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when getMinZoomLevel throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.getMinZoomLevel).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('setZoomLevel', () {
+ testWidgets('calls setZoomLevel on the camera',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ const double zoom = 100.0;
+
+ await CameraPlatform.instance.setZoomLevel(cameraId, zoom);
+
+ verify(() => camera.setZoomLevel(zoom)).called(1);
+ });
+
+ group('throws CameraException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () async => CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setZoomLevel throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(() => camera.setZoomLevel(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setZoomLevel throws PlatformException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final PlatformException exception = PlatformException(
+ code: CameraErrorCode.notSupported.toString(),
+ message: 'message',
+ );
+
+ when(() => camera.setZoomLevel(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ exception.code,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setZoomLevel throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(() => camera.setZoomLevel(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA<CameraException>().having(
+ (CameraException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('pausePreview', () {
+ testWidgets('calls pause on the camera', (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.pausePreview(cameraId);
+
+ verify(camera.pause).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () async => CameraPlatform.instance.pausePreview(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when pause throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.pause).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.pausePreview(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('resumePreview', () {
+ testWidgets('calls play on the camera', (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+
+ when(camera.play).thenAnswer((Invocation _) async {});
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.resumePreview(cameraId);
+
+ verify(camera.play).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () async => CameraPlatform.instance.resumePreview(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when play throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.play).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.resumePreview(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when play throws CameraWebException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.unknown,
+ 'description',
+ );
+
+ when(camera.play).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => CameraPlatform.instance.resumePreview(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ testWidgets(
+ 'buildPreview returns an HtmlElementView '
+ 'with an appropriate view type', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: cameraId,
+ cameraService: cameraService,
+ );
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ CameraPlatform.instance.buildPreview(cameraId),
+ isA<widgets.HtmlElementView>().having(
+ (widgets.HtmlElementView view) => view.viewType,
+ 'viewType',
+ camera.getViewType(),
+ ),
+ );
+ });
+
+ group('dispose', () {
+ late Camera camera;
+ late VideoElement videoElement;
+
+ late StreamController<Event> errorStreamController, abortStreamController;
+ late StreamController<MediaStreamTrack> endedStreamController;
+ late StreamController<ErrorEvent> videoRecordingErrorController;
+
+ setUp(() {
+ camera = MockCamera();
+ videoElement = MockVideoElement();
+
+ errorStreamController = StreamController<Event>();
+ abortStreamController = StreamController<Event>();
+ endedStreamController = StreamController<MediaStreamTrack>();
+ videoRecordingErrorController = StreamController<ErrorEvent>();
+
+ when(camera.getVideoSize).thenReturn(const Size(10, 10));
+ when(camera.initialize)
+ .thenAnswer((Invocation _) => Future<void>.value());
+ when(camera.play).thenAnswer((Invocation _) => Future<void>.value());
+ when(camera.dispose).thenAnswer((Invocation _) => Future<void>.value());
+
+ when(() => camera.videoElement).thenReturn(videoElement);
+ when(() => videoElement.onError).thenAnswer((Invocation _) =>
+ FakeElementStream<Event>(errorStreamController.stream));
+ when(() => videoElement.onAbort).thenAnswer((Invocation _) =>
+ FakeElementStream<Event>(abortStreamController.stream));
+
+ when(() => camera.onEnded)
+ .thenAnswer((Invocation _) => endedStreamController.stream);
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((Invocation _) => videoRecordingErrorController.stream);
+
+ when(camera.startVideoRecording).thenAnswer((Invocation _) async {});
+ });
+
+ testWidgets('disposes the correct camera', (WidgetTester tester) async {
+ const int firstCameraId = 0;
+ const int secondCameraId = 1;
+
+ final MockCamera firstCamera = MockCamera();
+ final MockCamera secondCamera = MockCamera();
+
+ when(firstCamera.dispose)
+ .thenAnswer((Invocation _) => Future<void>.value());
+ when(secondCamera.dispose)
+ .thenAnswer((Invocation _) => Future<void>.value());
+
+ // Save cameras in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras.addAll(<int, Camera>{
+ firstCameraId: firstCamera,
+ secondCameraId: secondCamera,
+ });
+
+ // Dispose the first camera.
+ await CameraPlatform.instance.dispose(firstCameraId);
+
+ // The first camera should be disposed.
+ verify(firstCamera.dispose).called(1);
+ verifyNever(secondCamera.dispose);
+
+ // The first camera should be removed from the camera plugin.
+ expect(
+ (CameraPlatform.instance as CameraPlugin).cameras,
+ equals(<int, Camera>{
+ secondCameraId: secondCamera,
+ }),
+ );
+ });
+
+ testWidgets('cancels the camera video error and abort subscriptions',
+ (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+ await CameraPlatform.instance.dispose(cameraId);
+
+ expect(errorStreamController.hasListener, isFalse);
+ expect(abortStreamController.hasListener, isFalse);
+ });
+
+ testWidgets('cancels the camera ended subscriptions',
+ (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+ await CameraPlatform.instance.dispose(cameraId);
+
+ expect(endedStreamController.hasListener, isFalse);
+ });
+
+ testWidgets('cancels the camera video recording error subscriptions',
+ (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+ await CameraPlatform.instance.dispose(cameraId);
+
+ expect(videoRecordingErrorController.hasListener, isFalse);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => CameraPlatform.instance.dispose(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when dispose throws DomException',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final FakeDomException exception =
+ FakeDomException(DomException.INVALID_ACCESS);
+
+ when(camera.dispose).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.dispose(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('getCamera', () {
+ testWidgets('returns the correct camera', (WidgetTester tester) async {
+ final Camera camera = Camera(
+ textureId: cameraId,
+ cameraService: cameraService,
+ );
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ (CameraPlatform.instance as CameraPlugin).getCamera(cameraId),
+ equals(camera),
+ );
+ });
+
+ testWidgets(
+ 'throws PlatformException '
+ 'with notFound error '
+ 'if the camera does not exist', (WidgetTester tester) async {
+ expect(
+ () => (CameraPlatform.instance as CameraPlugin).getCamera(cameraId),
+ throwsA(
+ isA<PlatformException>().having(
+ (PlatformException e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+ });
+
+ group('events', () {
+ late Camera camera;
+ late VideoElement videoElement;
+
+ late StreamController<Event> errorStreamController, abortStreamController;
+ late StreamController<MediaStreamTrack> endedStreamController;
+ late StreamController<ErrorEvent> videoRecordingErrorController;
+
+ setUp(() {
+ camera = MockCamera();
+ videoElement = MockVideoElement();
+
+ errorStreamController = StreamController<Event>();
+ abortStreamController = StreamController<Event>();
+ endedStreamController = StreamController<MediaStreamTrack>();
+ videoRecordingErrorController = StreamController<ErrorEvent>();
+
+ when(camera.getVideoSize).thenReturn(const Size(10, 10));
+ when(camera.initialize)
+ .thenAnswer((Invocation _) => Future<void>.value());
+ when(camera.play).thenAnswer((Invocation _) => Future<void>.value());
+
+ when(() => camera.videoElement).thenReturn(videoElement);
+ when(() => videoElement.onError).thenAnswer((Invocation _) =>
+ FakeElementStream<Event>(errorStreamController.stream));
+ when(() => videoElement.onAbort).thenAnswer((Invocation _) =>
+ FakeElementStream<Event>(abortStreamController.stream));
+
+ when(() => camera.onEnded)
+ .thenAnswer((Invocation _) => endedStreamController.stream);
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((Invocation _) => videoRecordingErrorController.stream);
+
+ when(() => camera.startVideoRecording())
+ .thenAnswer((Invocation _) async {});
+ });
+
+ testWidgets(
+ 'onCameraInitialized emits a CameraInitializedEvent '
+ 'on initializeCamera', (WidgetTester tester) async {
+ // Mock the camera to use a blank video stream of size 1280x720.
+ const Size videoSize = Size(1280, 720);
+
+ videoElement = getVideoElementWithBlankStream(videoSize);
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ any(),
+ cameraId: cameraId,
+ ),
+ ).thenAnswer((Invocation _) async => videoElement.captureStream());
+
+ final Camera camera = Camera(
+ textureId: cameraId,
+ cameraService: cameraService,
+ );
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final Stream<CameraInitializedEvent> eventStream =
+ CameraPlatform.instance.onCameraInitialized(cameraId);
+
+ final StreamQueue<CameraInitializedEvent> streamQueue =
+ StreamQueue<CameraInitializedEvent>(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraInitializedEvent(
+ cameraId,
+ videoSize.width,
+ videoSize.height,
+ ExposureMode.auto,
+ false,
+ FocusMode.auto,
+ false,
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets('onCameraResolutionChanged emits an empty stream',
+ (WidgetTester tester) async {
+ expect(
+ CameraPlatform.instance.onCameraResolutionChanged(cameraId),
+ emits(isEmpty),
+ );
+ });
+
+ testWidgets(
+ 'onCameraClosing emits a CameraClosingEvent '
+ 'on the camera ended event', (WidgetTester tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final Stream<CameraClosingEvent> eventStream =
+ CameraPlatform.instance.onCameraClosing(cameraId);
+
+ final StreamQueue<CameraClosingEvent> streamQueue =
+ StreamQueue<CameraClosingEvent>(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ endedStreamController.add(MockMediaStreamTrack());
+
+ expect(
+ await streamQueue.next,
+ equals(
+ const CameraClosingEvent(cameraId),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ group('onCameraError', () {
+ setUp(() {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on the camera video error event '
+ 'with a message', (WidgetTester tester) async {
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ final FakeMediaError error = FakeMediaError(
+ MediaError.MEDIA_ERR_NETWORK,
+ 'A network error occured.',
+ );
+
+ final CameraErrorCode errorCode =
+ CameraErrorCode.fromMediaError(error);
+
+ when(() => videoElement.error).thenReturn(error);
+
+ errorStreamController.add(Event('error'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: $errorCode, error message: ${error.message}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on the camera video error event '
+ 'with no message', (WidgetTester tester) async {
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ final FakeMediaError error =
+ FakeMediaError(MediaError.MEDIA_ERR_NETWORK);
+ final CameraErrorCode errorCode =
+ CameraErrorCode.fromMediaError(error);
+
+ when(() => videoElement.error).thenReturn(error);
+
+ errorStreamController.add(Event('error'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: $errorCode, error message: No further diagnostic information can be determined or provided.',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on the camera video abort event', (WidgetTester tester) async {
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ abortStreamController.add(Event('abort'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ "Error code: ${CameraErrorCode.abort}, error message: The video element's source has not fully loaded.",
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on takePicture error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.takePicture).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.takePicture(cameraId),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on setFlashMode error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(() => camera.setFlashMode(any())).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.setFlashMode(
+ cameraId,
+ FlashMode.always,
+ ),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on getMaxZoomLevel error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.zoomLevelNotSupported,
+ 'description',
+ );
+
+ when(camera.getMaxZoomLevel).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on getMinZoomLevel error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.zoomLevelNotSupported,
+ 'description',
+ );
+
+ when(camera.getMinZoomLevel).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on setZoomLevel error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.zoomLevelNotSupported,
+ 'description',
+ );
+
+ when(() => camera.setZoomLevel(any())).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA<CameraException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on resumePreview error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.unknown,
+ 'description',
+ );
+
+ when(camera.play).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.resumePreview(cameraId),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on startVideoRecording error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((Invocation _) => const Stream<ErrorEvent>.empty());
+
+ when(
+ () => camera.startVideoRecording(
+ maxVideoDuration: any(named: 'maxVideoDuration'),
+ ),
+ ).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.startVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on the camera video recording error event',
+ (WidgetTester tester) async {
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+
+ final FakeErrorEvent errorEvent = FakeErrorEvent('type', 'message');
+
+ videoRecordingErrorController.add(errorEvent);
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${errorEvent.type}, error message: ${errorEvent.message}.',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on stopVideoRecording error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.stopVideoRecording).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.stopVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on pauseVideoRecording error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.pauseVideoRecording).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.pauseVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on resumeVideoRecording error', (WidgetTester tester) async {
+ final CameraWebException exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.resumeVideoRecording).thenThrow(exception);
+
+ final Stream<CameraErrorEvent> eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(eventStream);
+
+ expect(
+ () async => CameraPlatform.instance.resumeVideoRecording(cameraId),
+ throwsA(
+ isA<PlatformException>(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+
+ testWidgets('onVideoRecordedEvent emits a VideoRecordedEvent',
+ (WidgetTester tester) async {
+ final MockCamera camera = MockCamera();
+ final MockXFile capturedVideo = MockXFile();
+ final Stream<VideoRecordedEvent> stream =
+ Stream<VideoRecordedEvent>.value(
+ VideoRecordedEvent(cameraId, capturedVideo, Duration.zero));
+ when(() => camera.onVideoRecordedEvent)
+ .thenAnswer((Invocation _) => stream);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final StreamQueue<VideoRecordedEvent> streamQueue =
+ StreamQueue<VideoRecordedEvent>(
+ CameraPlatform.instance.onVideoRecordedEvent(cameraId));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ VideoRecordedEvent(cameraId, capturedVideo, Duration.zero),
+ ),
+ );
+ });
+
+ group('onDeviceOrientationChanged', () {
+ group('emits an empty stream', () {
+ testWidgets('when screen is not supported',
+ (WidgetTester tester) async {
+ when(() => window.screen).thenReturn(null);
+
+ expect(
+ CameraPlatform.instance.onDeviceOrientationChanged(),
+ emits(isEmpty),
+ );
+ });
+
+ testWidgets('when screen orientation is not supported',
+ (WidgetTester tester) async {
+ when(() => screen.orientation).thenReturn(null);
+
+ expect(
+ CameraPlatform.instance.onDeviceOrientationChanged(),
+ emits(isEmpty),
+ );
+ });
+ });
+
+ testWidgets('emits the initial DeviceOrientationChangedEvent',
+ (WidgetTester tester) async {
+ when(
+ () => cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitPrimary,
+ ),
+ ).thenReturn(DeviceOrientation.portraitUp);
+
+ // Set the initial screen orientation to portraitPrimary.
+ when(() => screenOrientation.type)
+ .thenReturn(OrientationType.portraitPrimary);
+
+ final StreamController<Event> eventStreamController =
+ StreamController<Event>();
+
+ when(() => screenOrientation.onChange)
+ .thenAnswer((Invocation _) => eventStreamController.stream);
+
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ CameraPlatform.instance.onDeviceOrientationChanged();
+
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+
+ expect(
+ await streamQueue.next,
+ equals(
+ const DeviceOrientationChangedEvent(
+ DeviceOrientation.portraitUp,
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a DeviceOrientationChangedEvent '
+ 'when the screen orientation is changed',
+ (WidgetTester tester) async {
+ when(
+ () => cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.landscapePrimary,
+ ),
+ ).thenReturn(DeviceOrientation.landscapeLeft);
+
+ when(
+ () => cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitSecondary,
+ ),
+ ).thenReturn(DeviceOrientation.portraitDown);
+
+ final StreamController<Event> eventStreamController =
+ StreamController<Event>();
+
+ when(() => screenOrientation.onChange)
+ .thenAnswer((Invocation _) => eventStreamController.stream);
+
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ CameraPlatform.instance.onDeviceOrientationChanged();
+
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+
+ // Change the screen orientation to landscapePrimary and
+ // emit an event on the screenOrientation.onChange stream.
+ when(() => screenOrientation.type)
+ .thenReturn(OrientationType.landscapePrimary);
+
+ eventStreamController.add(Event('change'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ const DeviceOrientationChangedEvent(
+ DeviceOrientation.landscapeLeft,
+ ),
+ ),
+ );
+
+ // Change the screen orientation to portraitSecondary and
+ // emit an event on the screenOrientation.onChange stream.
+ when(() => screenOrientation.type)
+ .thenReturn(OrientationType.portraitSecondary);
+
+ eventStreamController.add(Event('change'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ const DeviceOrientationChangedEvent(
+ DeviceOrientation.portraitDown,
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/helpers/helpers.dart b/packages/camera/camera_web/example/integration_test/helpers/helpers.dart
new file mode 100644
index 0000000..7094f55
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/helpers/helpers.dart
@@ -0,0 +1,5 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'mocks.dart';
diff --git a/packages/camera/camera_web/example/integration_test/helpers/mocks.dart b/packages/camera/camera_web/example/integration_test/helpers/mocks.dart
new file mode 100644
index 0000000..855ef2b
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/helpers/mocks.dart
@@ -0,0 +1,174 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// ignore_for_file: avoid_implementing_value_types
+
+import 'dart:async';
+import 'dart:html';
+import 'dart:ui';
+
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/shims/dart_js_util.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:cross_file/cross_file.dart';
+import 'package:mocktail/mocktail.dart';
+
+class MockWindow extends Mock implements Window {}
+
+class MockScreen extends Mock implements Screen {}
+
+class MockScreenOrientation extends Mock implements ScreenOrientation {}
+
+class MockDocument extends Mock implements Document {}
+
+class MockElement extends Mock implements Element {}
+
+class MockNavigator extends Mock implements Navigator {}
+
+class MockMediaDevices extends Mock implements MediaDevices {}
+
+class MockCameraService extends Mock implements CameraService {}
+
+class MockMediaStreamTrack extends Mock implements MediaStreamTrack {}
+
+class MockCamera extends Mock implements Camera {}
+
+class MockCameraOptions extends Mock implements CameraOptions {}
+
+class MockVideoElement extends Mock implements VideoElement {}
+
+class MockXFile extends Mock implements XFile {}
+
+class MockJsUtil extends Mock implements JsUtil {}
+
+class MockMediaRecorder extends Mock implements MediaRecorder {}
+
+/// A fake [MediaStream] that returns the provided [_videoTracks].
+class FakeMediaStream extends Fake implements MediaStream {
+ FakeMediaStream(this._videoTracks);
+
+ final List<MediaStreamTrack> _videoTracks;
+
+ @override
+ List<MediaStreamTrack> getVideoTracks() => _videoTracks;
+}
+
+/// A fake [MediaDeviceInfo] that returns the provided [_deviceId], [_label] and [_kind].
+class FakeMediaDeviceInfo extends Fake implements MediaDeviceInfo {
+ FakeMediaDeviceInfo(this._deviceId, this._label, this._kind);
+
+ final String _deviceId;
+ final String _label;
+ final String _kind;
+
+ @override
+ String? get deviceId => _deviceId;
+
+ @override
+ String? get label => _label;
+
+ @override
+ String? get kind => _kind;
+}
+
+/// A fake [MediaError] that returns the provided error [_code] and [_message].
+class FakeMediaError extends Fake implements MediaError {
+ FakeMediaError(
+ this._code, [
+ String message = '',
+ ]) : _message = message;
+
+ final int _code;
+ final String _message;
+
+ @override
+ int get code => _code;
+
+ @override
+ String? get message => _message;
+}
+
+/// A fake [DomException] that returns the provided error [_name] and [_message].
+class FakeDomException extends Fake implements DomException {
+ FakeDomException(
+ this._name, [
+ String? message,
+ ]) : _message = message;
+
+ final String _name;
+ final String? _message;
+
+ @override
+ String get name => _name;
+
+ @override
+ String? get message => _message;
+}
+
+/// A fake [ElementStream] that listens to the provided [_stream] on [listen].
+class FakeElementStream<T extends Event> extends Fake
+ implements ElementStream<T> {
+ FakeElementStream(this._stream);
+
+ final Stream<T> _stream;
+
+ @override
+ StreamSubscription<T> listen(void Function(T event)? onData,
+ {Function? onError, void Function()? onDone, bool? cancelOnError}) {
+ return _stream.listen(
+ onData,
+ onError: onError,
+ onDone: onDone,
+ cancelOnError: cancelOnError,
+ );
+ }
+}
+
+/// A fake [BlobEvent] that returns the provided blob [data].
+class FakeBlobEvent extends Fake implements BlobEvent {
+ FakeBlobEvent(this._blob);
+
+ final Blob? _blob;
+
+ @override
+ Blob? get data => _blob;
+}
+
+/// A fake [DomException] that returns the provided error [_name] and [_message].
+class FakeErrorEvent extends Fake implements ErrorEvent {
+ FakeErrorEvent(
+ String type, [
+ String? message,
+ ]) : _type = type,
+ _message = message;
+
+ final String _type;
+ final String? _message;
+
+ @override
+ String get type => _type;
+
+ @override
+ String? get message => _message;
+}
+
+/// Returns a video element with a blank stream of size [videoSize].
+///
+/// Can be used to mock a video stream:
+/// ```dart
+/// final videoElement = getVideoElementWithBlankStream(Size(100, 100));
+/// final videoStream = videoElement.captureStream();
+/// ```
+VideoElement getVideoElementWithBlankStream(Size videoSize) {
+ final CanvasElement canvasElement = CanvasElement(
+ width: videoSize.width.toInt(),
+ height: videoSize.height.toInt(),
+ )..context2D.fillRect(0, 0, videoSize.width, videoSize.height);
+
+ final VideoElement videoElement = VideoElement()
+ ..srcObject = canvasElement.captureStream();
+
+ return videoElement;
+}
diff --git a/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart b/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart
new file mode 100644
index 0000000..8614cd9
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart
@@ -0,0 +1,50 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('ZoomLevelCapability', () {
+ testWidgets('sets all properties', (WidgetTester tester) async {
+ const double minimum = 100.0;
+ const double maximum = 400.0;
+ final MockMediaStreamTrack videoTrack = MockMediaStreamTrack();
+
+ final ZoomLevelCapability capability = ZoomLevelCapability(
+ minimum: minimum,
+ maximum: maximum,
+ videoTrack: videoTrack,
+ );
+
+ expect(capability.minimum, equals(minimum));
+ expect(capability.maximum, equals(maximum));
+ expect(capability.videoTrack, equals(videoTrack));
+ });
+
+ testWidgets('supports value equality', (WidgetTester tester) async {
+ final MockMediaStreamTrack videoTrack = MockMediaStreamTrack();
+
+ expect(
+ ZoomLevelCapability(
+ minimum: 0.0,
+ maximum: 100.0,
+ videoTrack: videoTrack,
+ ),
+ equals(
+ ZoomLevelCapability(
+ minimum: 0.0,
+ maximum: 100.0,
+ videoTrack: videoTrack,
+ ),
+ ),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/lib/main.dart b/packages/camera/camera_web/example/lib/main.dart
new file mode 100644
index 0000000..670891f
--- /dev/null
+++ b/packages/camera/camera_web/example/lib/main.dart
@@ -0,0 +1,21 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/material.dart';
+
+void main() => runApp(const MyApp());
+
+/// App for testing
+class MyApp extends StatelessWidget {
+ /// Default Constructor
+ const MyApp({Key? key}) : super(key: key);
+
+ @override
+ Widget build(BuildContext context) {
+ return const Directionality(
+ textDirection: TextDirection.ltr,
+ child: Text('Testing... Look at the console output for results!'),
+ );
+ }
+}
diff --git a/packages/camera/camera_web/example/pubspec.yaml b/packages/camera/camera_web/example/pubspec.yaml
new file mode 100644
index 0000000..ee66870
--- /dev/null
+++ b/packages/camera/camera_web/example/pubspec.yaml
@@ -0,0 +1,24 @@
+name: camera_web_integration_tests
+publish_to: none
+
+environment:
+ sdk: ">=2.12.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+dependencies:
+ flutter:
+ sdk: flutter
+
+dev_dependencies:
+ async: ^2.5.0
+ camera_platform_interface: ^2.1.0
+ camera_web:
+ path: ../
+ cross_file: ^0.3.1
+ flutter_driver:
+ sdk: flutter
+ flutter_test:
+ sdk: flutter
+ integration_test:
+ sdk: flutter
+ mocktail: ^0.3.0
diff --git a/packages/camera/camera_web/example/run_test.sh b/packages/camera/camera_web/example/run_test.sh
new file mode 100755
index 0000000..00482fa
--- /dev/null
+++ b/packages/camera/camera_web/example/run_test.sh
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if pgrep -lf chromedriver > /dev/null; then
+ echo "chromedriver is running."
+
+ if [ $# -eq 0 ]; then
+ echo "No target specified, running all tests..."
+ find integration_test/ -iname *_test.dart | xargs -n1 -I{} -t flutter drive -d web-server --web-port=7357 --browser-name=chrome --driver=test_driver/integration_test.dart --target='{}'
+ else
+ echo "Running test target: $1..."
+ set -x
+ flutter drive -d web-server --web-port=7357 --browser-name=chrome --driver=test_driver/integration_test.dart --target=$1
+ fi
+
+ else
+ echo "chromedriver is not running."
+ echo "Please, check the README.md for instructions on how to use run_test.sh"
+fi
+
diff --git a/packages/camera/camera_web/example/test_driver/integration_test.dart b/packages/camera/camera_web/example/test_driver/integration_test.dart
new file mode 100644
index 0000000..4f10f2a
--- /dev/null
+++ b/packages/camera/camera_web/example/test_driver/integration_test.dart
@@ -0,0 +1,7 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:integration_test/integration_test_driver.dart';
+
+Future<void> main() => integrationDriver();
diff --git a/packages/camera/camera_web/example/web/index.html b/packages/camera/camera_web/example/web/index.html
new file mode 100644
index 0000000..f3c6a5e
--- /dev/null
+++ b/packages/camera/camera_web/example/web/index.html
@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<!-- Copyright 2013 The Flutter Authors. All rights reserved.
+Use of this source code is governed by a BSD-style license that can be
+found in the LICENSE file. -->
+<html>
+ <head>
+ <title>Browser Tests</title>
+ </head>
+ <body>
+ <script src="main.dart.js"></script>
+ </body>
+</html>
diff --git a/packages/camera/camera_web/lib/camera_web.dart b/packages/camera/camera_web/lib/camera_web.dart
new file mode 100644
index 0000000..dcefc92
--- /dev/null
+++ b/packages/camera/camera_web/lib/camera_web.dart
@@ -0,0 +1,7 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+library camera_web;
+
+export 'src/camera_web.dart';
diff --git a/packages/camera/camera_web/lib/src/camera.dart b/packages/camera/camera_web/lib/src/camera.dart
new file mode 100644
index 0000000..13ef21b
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/camera.dart
@@ -0,0 +1,649 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html' as html;
+import 'dart:ui';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+
+import 'camera_service.dart';
+import 'shims/dart_ui.dart' as ui;
+import 'types/types.dart';
+
+String _getViewType(int cameraId) => 'plugins.flutter.io/camera_$cameraId';
+
+/// A camera initialized from the media devices in the current window.
+/// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices
+///
+/// The obtained camera stream is constrained by [options] and fetched
+/// with [CameraService.getMediaStreamForOptions].
+///
+/// The camera stream is displayed in the [videoElement] wrapped in the
+/// [divElement] to avoid overriding the custom styles applied to
+/// the video element in [_applyDefaultVideoStyles].
+/// See: https://github.com/flutter/flutter/issues/79519
+///
+/// The camera stream can be played/stopped by calling [play]/[stop],
+/// may capture a picture by calling [takePicture] or capture a video
+/// by calling [startVideoRecording], [pauseVideoRecording],
+/// [resumeVideoRecording] or [stopVideoRecording].
+///
+/// The camera zoom may be adjusted with [setZoomLevel]. The provided
+/// zoom level must be a value in the range of [getMinZoomLevel] to
+/// [getMaxZoomLevel].
+///
+/// The [textureId] is used to register a camera view with the id
+/// defined by [_getViewType].
+class Camera {
+ /// Creates a new instance of [Camera]
+ /// with the given [textureId] and optional
+ /// [options] and [window].
+ Camera({
+ required this.textureId,
+ required CameraService cameraService,
+ this.options = const CameraOptions(),
+ }) : _cameraService = cameraService;
+
+ // A torch mode constraint name.
+ // See: https://w3c.github.io/mediacapture-image/#dom-mediatracksupportedconstraints-torch
+ static const String _torchModeKey = 'torch';
+
+ /// The texture id used to register the camera view.
+ final int textureId;
+
+ /// The camera options used to initialize a camera, empty by default.
+ final CameraOptions options;
+
+ /// The video element that displays the camera stream.
+ /// Initialized in [initialize].
+ late final html.VideoElement videoElement;
+
+ /// The wrapping element for the [videoElement] to avoid overriding
+ /// the custom styles applied in [_applyDefaultVideoStyles].
+ /// Initialized in [initialize].
+ late final html.DivElement divElement;
+
+ /// The camera stream displayed in the [videoElement].
+ /// Initialized in [initialize] and [play], reset in [stop].
+ html.MediaStream? stream;
+
+ /// The stream of the camera video tracks that have ended playing.
+ ///
+ /// This occurs when there is no more camera stream data, e.g.
+ /// the user has stopped the stream by changing the camera device,
+ /// revoked the camera permissions or ejected the camera device.
+ ///
+ /// MediaStreamTrack.onended:
+ /// https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/onended
+ Stream<html.MediaStreamTrack> get onEnded => onEndedController.stream;
+
+ /// The stream controller for the [onEnded] stream.
+ @visibleForTesting
+ final StreamController<html.MediaStreamTrack> onEndedController =
+ StreamController<html.MediaStreamTrack>.broadcast();
+
+ StreamSubscription<html.Event>? _onEndedSubscription;
+
+ /// The stream of the camera video recording errors.
+ ///
+ /// This occurs when the video recording is not allowed or an unsupported
+ /// codec is used.
+ ///
+ /// MediaRecorder.error:
+ /// https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/error_event
+ Stream<html.ErrorEvent> get onVideoRecordingError =>
+ videoRecordingErrorController.stream;
+
+ /// The stream controller for the [onVideoRecordingError] stream.
+ @visibleForTesting
+ final StreamController<html.ErrorEvent> videoRecordingErrorController =
+ StreamController<html.ErrorEvent>.broadcast();
+
+ StreamSubscription<html.Event>? _onVideoRecordingErrorSubscription;
+
+ /// The camera flash mode.
+ @visibleForTesting
+ FlashMode? flashMode;
+
+ /// The camera service used to get the media stream for the camera.
+ final CameraService _cameraService;
+
+ /// The current browser window used to access media devices.
+ @visibleForTesting
+ html.Window? window = html.window;
+
+ /// The recorder used to record a video from the camera.
+ @visibleForTesting
+ html.MediaRecorder? mediaRecorder;
+
+ /// Whether the video of the given type is supported.
+ @visibleForTesting
+ bool Function(String) isVideoTypeSupported =
+ html.MediaRecorder.isTypeSupported;
+
+ /// The list of consecutive video data files recorded with [mediaRecorder].
+ final List<html.Blob> _videoData = <html.Blob>[];
+
+ /// Completes when the video recording is stopped/finished.
+ Completer<XFile>? _videoAvailableCompleter;
+
+ /// A data listener fired when a new part of video data is available.
+ void Function(html.Event)? _videoDataAvailableListener;
+
+ /// A listener fired when a video recording is stopped.
+ void Function(html.Event)? _videoRecordingStoppedListener;
+
+ /// A builder to merge a list of blobs into a single blob.
+ @visibleForTesting
+ // TODO(stuartmorgan): Remove this 'ignore' once we don't analyze using 2.10
+ // any more. It's a false positive that is fixed in later versions.
+ // ignore: prefer_function_declarations_over_variables
+ html.Blob Function(List<html.Blob> blobs, String type) blobBuilder =
+ (List<html.Blob> blobs, String type) => html.Blob(blobs, type);
+
+ /// The stream that emits a [VideoRecordedEvent] when a video recording is created.
+ Stream<VideoRecordedEvent> get onVideoRecordedEvent =>
+ videoRecorderController.stream;
+
+ /// The stream controller for the [onVideoRecordedEvent] stream.
+ @visibleForTesting
+ final StreamController<VideoRecordedEvent> videoRecorderController =
+ StreamController<VideoRecordedEvent>.broadcast();
+
+ /// Initializes the camera stream displayed in the [videoElement].
+ /// Registers the camera view with [textureId] under [_getViewType] type.
+ /// Emits the camera default video track on the [onEnded] stream when it ends.
+ Future<void> initialize() async {
+ stream = await _cameraService.getMediaStreamForOptions(
+ options,
+ cameraId: textureId,
+ );
+
+ videoElement = html.VideoElement();
+
+ divElement = html.DivElement()
+ ..style.setProperty('object-fit', 'cover')
+ ..append(videoElement);
+
+ ui.platformViewRegistry.registerViewFactory(
+ _getViewType(textureId),
+ (_) => divElement,
+ );
+
+ videoElement
+ ..autoplay = false
+ ..muted = true
+ ..srcObject = stream
+ ..setAttribute('playsinline', '');
+
+ _applyDefaultVideoStyles(videoElement);
+
+ final List<html.MediaStreamTrack> videoTracks = stream!.getVideoTracks();
+
+ if (videoTracks.isNotEmpty) {
+ final html.MediaStreamTrack defaultVideoTrack = videoTracks.first;
+
+ _onEndedSubscription = defaultVideoTrack.onEnded.listen((html.Event _) {
+ onEndedController.add(defaultVideoTrack);
+ });
+ }
+ }
+
+ /// Starts the camera stream.
+ ///
+ /// Initializes the camera source if the camera was previously stopped.
+ Future<void> play() async {
+ if (videoElement.srcObject == null) {
+ stream = await _cameraService.getMediaStreamForOptions(
+ options,
+ cameraId: textureId,
+ );
+ videoElement.srcObject = stream;
+ }
+ await videoElement.play();
+ }
+
+ /// Pauses the camera stream on the current frame.
+ void pause() {
+ videoElement.pause();
+ }
+
+ /// Stops the camera stream and resets the camera source.
+ void stop() {
+ final List<html.MediaStreamTrack> videoTracks = stream!.getVideoTracks();
+ if (videoTracks.isNotEmpty) {
+ onEndedController.add(videoTracks.first);
+ }
+
+ final List<html.MediaStreamTrack>? tracks = stream?.getTracks();
+ if (tracks != null) {
+ for (final html.MediaStreamTrack track in tracks) {
+ track.stop();
+ }
+ }
+ videoElement.srcObject = null;
+ stream = null;
+ }
+
+ /// Captures a picture and returns the saved file in a JPEG format.
+ ///
+ /// Enables the camera flash (torch mode) for a period of taking a picture
+ /// if the flash mode is either [FlashMode.auto] or [FlashMode.always].
+ Future<XFile> takePicture() async {
+ final bool shouldEnableTorchMode =
+ flashMode == FlashMode.auto || flashMode == FlashMode.always;
+
+ if (shouldEnableTorchMode) {
+ _setTorchMode(enabled: true);
+ }
+
+ final int videoWidth = videoElement.videoWidth;
+ final int videoHeight = videoElement.videoHeight;
+ final html.CanvasElement canvas =
+ html.CanvasElement(width: videoWidth, height: videoHeight);
+ final bool isBackCamera = getLensDirection() == CameraLensDirection.back;
+
+ // Flip the picture horizontally if it is not taken from a back camera.
+ if (!isBackCamera) {
+ canvas.context2D
+ ..translate(videoWidth, 0)
+ ..scale(-1, 1);
+ }
+
+ canvas.context2D
+ .drawImageScaled(videoElement, 0, 0, videoWidth, videoHeight);
+
+ final html.Blob blob = await canvas.toBlob('image/jpeg');
+
+ if (shouldEnableTorchMode) {
+ _setTorchMode(enabled: false);
+ }
+
+ return XFile(html.Url.createObjectUrl(blob));
+ }
+
+ /// Returns a size of the camera video based on its first video track size.
+ ///
+ /// Returns [Size.zero] if the camera is missing a video track or
+ /// the video track does not include the width or height setting.
+ Size getVideoSize() {
+ final List<html.MediaStreamTrack> videoTracks =
+ videoElement.srcObject?.getVideoTracks() ?? <html.MediaStreamTrack>[];
+
+ if (videoTracks.isEmpty) {
+ return Size.zero;
+ }
+
+ final html.MediaStreamTrack defaultVideoTrack = videoTracks.first;
+ final Map<dynamic, dynamic> defaultVideoTrackSettings =
+ defaultVideoTrack.getSettings();
+
+ final double? width = defaultVideoTrackSettings['width'] as double?;
+ final double? height = defaultVideoTrackSettings['height'] as double?;
+
+ if (width != null && height != null) {
+ return Size(width, height);
+ } else {
+ return Size.zero;
+ }
+ }
+
+ /// Sets the camera flash mode to [mode] by modifying the camera
+ /// torch mode constraint.
+ ///
+ /// The torch mode is enabled for [FlashMode.torch] and
+ /// disabled for [FlashMode.off].
+ ///
+ /// For [FlashMode.auto] and [FlashMode.always] the torch mode is enabled
+ /// only for a period of taking a picture in [takePicture].
+ ///
+ /// Throws a [CameraWebException] if the torch mode is not supported
+ /// or the camera has not been initialized or started.
+ void setFlashMode(FlashMode mode) {
+ final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices;
+ final Map<dynamic, dynamic>? supportedConstraints =
+ mediaDevices?.getSupportedConstraints();
+ final bool torchModeSupported =
+ supportedConstraints?[_torchModeKey] as bool? ?? false;
+
+ if (!torchModeSupported) {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.torchModeNotSupported,
+ 'The torch mode is not supported in the current browser.',
+ );
+ }
+
+ // Save the updated flash mode to be used later when taking a picture.
+ flashMode = mode;
+
+ // Enable the torch mode only if the flash mode is torch.
+ _setTorchMode(enabled: mode == FlashMode.torch);
+ }
+
+ /// Sets the camera torch mode constraint to [enabled].
+ ///
+ /// Throws a [CameraWebException] if the torch mode is not supported
+ /// or the camera has not been initialized or started.
+ void _setTorchMode({required bool enabled}) {
+ final List<html.MediaStreamTrack> videoTracks =
+ stream?.getVideoTracks() ?? <html.MediaStreamTrack>[];
+
+ if (videoTracks.isNotEmpty) {
+ final html.MediaStreamTrack defaultVideoTrack = videoTracks.first;
+
+ final bool canEnableTorchMode =
+ defaultVideoTrack.getCapabilities()[_torchModeKey] as bool? ?? false;
+
+ if (canEnableTorchMode) {
+ defaultVideoTrack.applyConstraints(<String, Object>{
+ 'advanced': <Object>[
+ <String, Object>{
+ _torchModeKey: enabled,
+ }
+ ]
+ });
+ } else {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.torchModeNotSupported,
+ 'The torch mode is not supported by the current camera.',
+ );
+ }
+ } else {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.notStarted,
+ 'The camera has not been initialized or started.',
+ );
+ }
+ }
+
+ /// Returns the camera maximum zoom level.
+ ///
+ /// Throws a [CameraWebException] if the zoom level is not supported
+ /// or the camera has not been initialized or started.
+ double getMaxZoomLevel() =>
+ _cameraService.getZoomLevelCapabilityForCamera(this).maximum;
+
+ /// Returns the camera minimum zoom level.
+ ///
+ /// Throws a [CameraWebException] if the zoom level is not supported
+ /// or the camera has not been initialized or started.
+ double getMinZoomLevel() =>
+ _cameraService.getZoomLevelCapabilityForCamera(this).minimum;
+
+ /// Sets the camera zoom level to [zoom].
+ ///
+ /// Throws a [CameraWebException] if the zoom level is invalid,
+ /// not supported or the camera has not been initialized or started.
+ void setZoomLevel(double zoom) {
+ final ZoomLevelCapability zoomLevelCapability =
+ _cameraService.getZoomLevelCapabilityForCamera(this);
+
+ if (zoom < zoomLevelCapability.minimum ||
+ zoom > zoomLevelCapability.maximum) {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.zoomLevelInvalid,
+ 'The provided zoom level must be in the range of ${zoomLevelCapability.minimum} to ${zoomLevelCapability.maximum}.',
+ );
+ }
+
+ zoomLevelCapability.videoTrack.applyConstraints(<String, Object>{
+ 'advanced': <Object>[
+ <String, Object>{
+ ZoomLevelCapability.constraintName: zoom,
+ }
+ ]
+ });
+ }
+
+ /// Returns a lens direction of this camera.
+ ///
+ /// Returns null if the camera is missing a video track or
+ /// the video track does not include the facing mode setting.
+ CameraLensDirection? getLensDirection() {
+ final List<html.MediaStreamTrack> videoTracks =
+ videoElement.srcObject?.getVideoTracks() ?? <html.MediaStreamTrack>[];
+
+ if (videoTracks.isEmpty) {
+ return null;
+ }
+
+ final html.MediaStreamTrack defaultVideoTrack = videoTracks.first;
+ final Map<dynamic, dynamic> defaultVideoTrackSettings =
+ defaultVideoTrack.getSettings();
+
+ final String? facingMode =
+ defaultVideoTrackSettings['facingMode'] as String?;
+
+ if (facingMode != null) {
+ return _cameraService.mapFacingModeToLensDirection(facingMode);
+ } else {
+ return null;
+ }
+ }
+
+ /// Returns the registered view type of the camera.
+ String getViewType() => _getViewType(textureId);
+
+ /// Starts a new video recording using [html.MediaRecorder].
+ ///
+ /// Throws a [CameraWebException] if the provided maximum video duration is invalid
+ /// or the browser does not support any of the available video mime types
+ /// from [_videoMimeType].
+ Future<void> startVideoRecording({Duration? maxVideoDuration}) async {
+ if (maxVideoDuration != null && maxVideoDuration.inMilliseconds <= 0) {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.notSupported,
+ 'The maximum video duration must be greater than 0 milliseconds.',
+ );
+ }
+
+ mediaRecorder ??=
+ html.MediaRecorder(videoElement.srcObject!, <String, Object>{
+ 'mimeType': _videoMimeType,
+ });
+
+ _videoAvailableCompleter = Completer<XFile>();
+
+ _videoDataAvailableListener =
+ (html.Event event) => _onVideoDataAvailable(event, maxVideoDuration);
+
+ _videoRecordingStoppedListener =
+ (html.Event event) => _onVideoRecordingStopped(event, maxVideoDuration);
+
+ mediaRecorder!.addEventListener(
+ 'dataavailable',
+ _videoDataAvailableListener,
+ );
+
+ mediaRecorder!.addEventListener(
+ 'stop',
+ _videoRecordingStoppedListener,
+ );
+
+ _onVideoRecordingErrorSubscription =
+ mediaRecorder!.onError.listen((html.Event event) {
+ final html.ErrorEvent error = event as html.ErrorEvent;
+ if (error != null) {
+ videoRecordingErrorController.add(error);
+ }
+ });
+
+ if (maxVideoDuration != null) {
+ mediaRecorder!.start(maxVideoDuration.inMilliseconds);
+ } else {
+ // Don't pass the null duration as that will fire a `dataavailable` event directly.
+ mediaRecorder!.start();
+ }
+ }
+
+ void _onVideoDataAvailable(
+ html.Event event, [
+ Duration? maxVideoDuration,
+ ]) {
+ final html.Blob? blob = (event as html.BlobEvent).data;
+
+ // Append the recorded part of the video to the list of all video data files.
+ if (blob != null) {
+ _videoData.add(blob);
+ }
+
+ // Stop the recorder if the video has a maxVideoDuration
+ // and the recording was not stopped manually.
+ if (maxVideoDuration != null && mediaRecorder!.state == 'recording') {
+ mediaRecorder!.stop();
+ }
+ }
+
+ Future<void> _onVideoRecordingStopped(
+ html.Event event, [
+ Duration? maxVideoDuration,
+ ]) async {
+ if (_videoData.isNotEmpty) {
+ // Concatenate all video data files into a single blob.
+ final String videoType = _videoData.first.type;
+ final html.Blob videoBlob = blobBuilder(_videoData, videoType);
+
+ // Create a file containing the video blob.
+ final XFile file = XFile(
+ html.Url.createObjectUrl(videoBlob),
+ mimeType: _videoMimeType,
+ name: videoBlob.hashCode.toString(),
+ );
+
+ // Emit an event containing the recorded video file.
+ videoRecorderController.add(
+ VideoRecordedEvent(textureId, file, maxVideoDuration),
+ );
+
+ _videoAvailableCompleter?.complete(file);
+ }
+
+ // Clean up the media recorder with its event listeners and video data.
+ mediaRecorder!.removeEventListener(
+ 'dataavailable',
+ _videoDataAvailableListener,
+ );
+
+ mediaRecorder!.removeEventListener(
+ 'stop',
+ _videoDataAvailableListener,
+ );
+
+ await _onVideoRecordingErrorSubscription?.cancel();
+
+ mediaRecorder = null;
+ _videoDataAvailableListener = null;
+ _videoRecordingStoppedListener = null;
+ _videoData.clear();
+ }
+
+ /// Pauses the current video recording.
+ ///
+ /// Throws a [CameraWebException] if the video recorder is uninitialized.
+ Future<void> pauseVideoRecording() async {
+ if (mediaRecorder == null) {
+ throw _videoRecordingNotStartedException;
+ }
+ mediaRecorder!.pause();
+ }
+
+ /// Resumes the current video recording.
+ ///
+ /// Throws a [CameraWebException] if the video recorder is uninitialized.
+ Future<void> resumeVideoRecording() async {
+ if (mediaRecorder == null) {
+ throw _videoRecordingNotStartedException;
+ }
+ mediaRecorder!.resume();
+ }
+
+ /// Stops the video recording and returns the captured video file.
+ ///
+ /// Throws a [CameraWebException] if the video recorder is uninitialized.
+ Future<XFile> stopVideoRecording() async {
+ if (mediaRecorder == null || _videoAvailableCompleter == null) {
+ throw _videoRecordingNotStartedException;
+ }
+
+ mediaRecorder!.stop();
+
+ return _videoAvailableCompleter!.future;
+ }
+
+ /// Disposes the camera by stopping the camera stream,
+ /// the video recording and reloading the camera source.
+ Future<void> dispose() async {
+ // Stop the camera stream.
+ stop();
+
+ await videoRecorderController.close();
+ mediaRecorder = null;
+ _videoDataAvailableListener = null;
+
+ // Reset the [videoElement] to its initial state.
+ videoElement
+ ..srcObject = null
+ ..load();
+
+ await _onEndedSubscription?.cancel();
+ _onEndedSubscription = null;
+ await onEndedController.close();
+
+ await _onVideoRecordingErrorSubscription?.cancel();
+ _onVideoRecordingErrorSubscription = null;
+ await videoRecordingErrorController.close();
+ }
+
+ /// Returns the first supported video mime type (amongst mp4 and webm)
+ /// to use when recording a video.
+ ///
+ /// Throws a [CameraWebException] if the browser does not support
+ /// any of the available video mime types.
+ String get _videoMimeType {
+ const List<String> types = <String>[
+ 'video/mp4',
+ 'video/webm',
+ ];
+
+ return types.firstWhere(
+ (String type) => isVideoTypeSupported(type),
+ orElse: () => throw CameraWebException(
+ textureId,
+ CameraErrorCode.notSupported,
+ 'The browser does not support any of the following video types: ${types.join(',')}.',
+ ),
+ );
+ }
+
+ CameraWebException get _videoRecordingNotStartedException =>
+ CameraWebException(
+ textureId,
+ CameraErrorCode.videoRecordingNotStarted,
+ 'The video recorder is uninitialized. The recording might not have been started. Make sure to call `startVideoRecording` first.',
+ );
+
+ /// Applies default styles to the video [element].
+ void _applyDefaultVideoStyles(html.VideoElement element) {
+ final bool isBackCamera = getLensDirection() == CameraLensDirection.back;
+
+ // Flip the video horizontally if it is not taken from a back camera.
+ if (!isBackCamera) {
+ element.style.transform = 'scaleX(-1)';
+ }
+
+ element.style
+ ..transformOrigin = 'center'
+ ..pointerEvents = 'none'
+ ..width = '100%'
+ ..height = '100%'
+ ..objectFit = 'cover';
+ }
+}
diff --git a/packages/camera/camera_web/lib/src/camera_service.dart b/packages/camera/camera_web/lib/src/camera_service.dart
new file mode 100644
index 0000000..451278c
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/camera_service.dart
@@ -0,0 +1,346 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html' as html;
+// TODO(a14n): remove this import once Flutter 3.1 or later reaches stable (including flutter/flutter#106316)
+// ignore: unnecessary_import
+import 'dart:ui';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/services.dart';
+
+import 'camera.dart';
+import 'shims/dart_js_util.dart';
+import 'types/types.dart';
+
+/// A service to fetch, map camera settings and
+/// obtain the camera stream.
+class CameraService {
+ // A facing mode constraint name.
+ static const String _facingModeKey = 'facingMode';
+
+ /// The current browser window used to access media devices.
+ @visibleForTesting
+ html.Window? window = html.window;
+
+ /// The utility to manipulate JavaScript interop objects.
+ @visibleForTesting
+ JsUtil jsUtil = JsUtil();
+
+ /// Returns a media stream associated with the camera device
+ /// with [cameraId] and constrained by [options].
+ Future<html.MediaStream> getMediaStreamForOptions(
+ CameraOptions options, {
+ int cameraId = 0,
+ }) async {
+ final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices;
+
+ // Throw a not supported exception if the current browser window
+ // does not support any media devices.
+ if (mediaDevices == null) {
+ throw PlatformException(
+ code: CameraErrorCode.notSupported.toString(),
+ message: 'The camera is not supported on this device.',
+ );
+ }
+
+ try {
+ final Map<String, dynamic> constraints = options.toJson();
+ return await mediaDevices.getUserMedia(constraints);
+ } on html.DomException catch (e) {
+ switch (e.name) {
+ case 'NotFoundError':
+ case 'DevicesNotFoundError':
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.notFound,
+ 'No camera found for the given camera options.',
+ );
+ case 'NotReadableError':
+ case 'TrackStartError':
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.notReadable,
+ 'The camera is not readable due to a hardware error '
+ 'that prevented access to the device.',
+ );
+ case 'OverconstrainedError':
+ case 'ConstraintNotSatisfiedError':
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.overconstrained,
+ 'The camera options are impossible to satisfy.',
+ );
+ case 'NotAllowedError':
+ case 'PermissionDeniedError':
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.permissionDenied,
+ 'The camera cannot be used or the permission '
+ 'to access the camera is not granted.',
+ );
+ case 'TypeError':
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.type,
+ 'The camera options are incorrect or attempted '
+ 'to access the media input from an insecure context.',
+ );
+ case 'AbortError':
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.abort,
+ 'Some problem occurred that prevented the camera from being used.',
+ );
+ case 'SecurityError':
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.security,
+ 'The user media support is disabled in the current browser.',
+ );
+ default:
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.unknown,
+ 'An unknown error occured when fetching the camera stream.',
+ );
+ }
+ } catch (_) {
+ throw CameraWebException(
+ cameraId,
+ CameraErrorCode.unknown,
+ 'An unknown error occured when fetching the camera stream.',
+ );
+ }
+ }
+
+ /// Returns the zoom level capability for the given [camera].
+ ///
+ /// Throws a [CameraWebException] if the zoom level is not supported
+ /// or the camera has not been initialized or started.
+ ZoomLevelCapability getZoomLevelCapabilityForCamera(
+ Camera camera,
+ ) {
+ final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices;
+ final Map<dynamic, dynamic>? supportedConstraints =
+ mediaDevices?.getSupportedConstraints();
+ final bool zoomLevelSupported =
+ supportedConstraints?[ZoomLevelCapability.constraintName] as bool? ??
+ false;
+
+ if (!zoomLevelSupported) {
+ throw CameraWebException(
+ camera.textureId,
+ CameraErrorCode.zoomLevelNotSupported,
+ 'The zoom level is not supported in the current browser.',
+ );
+ }
+
+ final List<html.MediaStreamTrack> videoTracks =
+ camera.stream?.getVideoTracks() ?? <html.MediaStreamTrack>[];
+
+ if (videoTracks.isNotEmpty) {
+ final html.MediaStreamTrack defaultVideoTrack = videoTracks.first;
+
+ /// The zoom level capability is represented by MediaSettingsRange.
+ /// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaSettingsRange
+ final Object zoomLevelCapability = defaultVideoTrack
+ .getCapabilities()[ZoomLevelCapability.constraintName]
+ as Object? ??
+ <dynamic, dynamic>{};
+
+ // The zoom level capability is a nested JS object, therefore
+ // we need to access its properties with the js_util library.
+ // See: https://api.dart.dev/stable/2.13.4/dart-js_util/getProperty.html
+ final num? minimumZoomLevel =
+ jsUtil.getProperty(zoomLevelCapability, 'min') as num?;
+ final num? maximumZoomLevel =
+ jsUtil.getProperty(zoomLevelCapability, 'max') as num?;
+
+ if (minimumZoomLevel != null && maximumZoomLevel != null) {
+ return ZoomLevelCapability(
+ minimum: minimumZoomLevel.toDouble(),
+ maximum: maximumZoomLevel.toDouble(),
+ videoTrack: defaultVideoTrack,
+ );
+ } else {
+ throw CameraWebException(
+ camera.textureId,
+ CameraErrorCode.zoomLevelNotSupported,
+ 'The zoom level is not supported by the current camera.',
+ );
+ }
+ } else {
+ throw CameraWebException(
+ camera.textureId,
+ CameraErrorCode.notStarted,
+ 'The camera has not been initialized or started.',
+ );
+ }
+ }
+
+ /// Returns a facing mode of the [videoTrack]
+ /// (null if the facing mode is not available).
+ String? getFacingModeForVideoTrack(html.MediaStreamTrack videoTrack) {
+ final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices;
+
+ // Throw a not supported exception if the current browser window
+ // does not support any media devices.
+ if (mediaDevices == null) {
+ throw PlatformException(
+ code: CameraErrorCode.notSupported.toString(),
+ message: 'The camera is not supported on this device.',
+ );
+ }
+
+ // Check if the camera facing mode is supported by the current browser.
+ final Map<dynamic, dynamic> supportedConstraints =
+ mediaDevices.getSupportedConstraints();
+ final bool facingModeSupported =
+ supportedConstraints[_facingModeKey] as bool? ?? false;
+
+ // Return null if the facing mode is not supported.
+ if (!facingModeSupported) {
+ return null;
+ }
+
+ // Extract the facing mode from the video track settings.
+ // The property may not be available if it's not supported
+ // by the browser or not available due to context.
+ //
+ // MediaTrackSettings:
+ // https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings
+ final Map<dynamic, dynamic> videoTrackSettings = videoTrack.getSettings();
+ final String? facingMode = videoTrackSettings[_facingModeKey] as String?;
+
+ if (facingMode == null) {
+ // If the facing mode does not exist in the video track settings,
+ // check for the facing mode in the video track capabilities.
+ //
+ // MediaTrackCapabilities:
+ // https://www.w3.org/TR/mediacapture-streams/#dom-mediatrackcapabilities
+
+ // Check if getting the video track capabilities is supported.
+ //
+ // The method may not be supported on Firefox.
+ // See: https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/getCapabilities#browser_compatibility
+ if (!jsUtil.hasProperty(videoTrack, 'getCapabilities')) {
+ // Return null if the video track capabilites are not supported.
+ return null;
+ }
+
+ final Map<dynamic, dynamic> videoTrackCapabilities =
+ videoTrack.getCapabilities();
+
+ // A list of facing mode capabilities as
+ // the camera may support multiple facing modes.
+ final List<String> facingModeCapabilities = List<String>.from(
+ (videoTrackCapabilities[_facingModeKey] as List<dynamic>?)
+ ?.cast<String>() ??
+ <String>[]);
+
+ if (facingModeCapabilities.isNotEmpty) {
+ final String facingModeCapability = facingModeCapabilities.first;
+ return facingModeCapability;
+ } else {
+ // Return null if there are no facing mode capabilities.
+ return null;
+ }
+ }
+
+ return facingMode;
+ }
+
+ /// Maps the given [facingMode] to [CameraLensDirection].
+ ///
+ /// The following values for the facing mode are supported:
+ /// https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings/facingMode
+ CameraLensDirection mapFacingModeToLensDirection(String facingMode) {
+ switch (facingMode) {
+ case 'user':
+ return CameraLensDirection.front;
+ case 'environment':
+ return CameraLensDirection.back;
+ case 'left':
+ case 'right':
+ default:
+ return CameraLensDirection.external;
+ }
+ }
+
+ /// Maps the given [facingMode] to [CameraType].
+ ///
+ /// See [CameraMetadata.facingMode] for more details.
+ CameraType mapFacingModeToCameraType(String facingMode) {
+ switch (facingMode) {
+ case 'user':
+ return CameraType.user;
+ case 'environment':
+ return CameraType.environment;
+ case 'left':
+ case 'right':
+ default:
+ return CameraType.user;
+ }
+ }
+
+ /// Maps the given [resolutionPreset] to [Size].
+ Size mapResolutionPresetToSize(ResolutionPreset resolutionPreset) {
+ switch (resolutionPreset) {
+ case ResolutionPreset.max:
+ case ResolutionPreset.ultraHigh:
+ return const Size(4096, 2160);
+ case ResolutionPreset.veryHigh:
+ return const Size(1920, 1080);
+ case ResolutionPreset.high:
+ return const Size(1280, 720);
+ case ResolutionPreset.medium:
+ return const Size(720, 480);
+ case ResolutionPreset.low:
+ return const Size(320, 240);
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // ignore: dead_code
+ return const Size(320, 240);
+ }
+
+ /// Maps the given [deviceOrientation] to [OrientationType].
+ String mapDeviceOrientationToOrientationType(
+ DeviceOrientation deviceOrientation,
+ ) {
+ switch (deviceOrientation) {
+ case DeviceOrientation.portraitUp:
+ return OrientationType.portraitPrimary;
+ case DeviceOrientation.landscapeLeft:
+ return OrientationType.landscapePrimary;
+ case DeviceOrientation.portraitDown:
+ return OrientationType.portraitSecondary;
+ case DeviceOrientation.landscapeRight:
+ return OrientationType.landscapeSecondary;
+ }
+ }
+
+ /// Maps the given [orientationType] to [DeviceOrientation].
+ DeviceOrientation mapOrientationTypeToDeviceOrientation(
+ String orientationType,
+ ) {
+ switch (orientationType) {
+ case OrientationType.portraitPrimary:
+ return DeviceOrientation.portraitUp;
+ case OrientationType.landscapePrimary:
+ return DeviceOrientation.landscapeLeft;
+ case OrientationType.portraitSecondary:
+ return DeviceOrientation.portraitDown;
+ case OrientationType.landscapeSecondary:
+ return DeviceOrientation.landscapeRight;
+ default:
+ return DeviceOrientation.portraitUp;
+ }
+ }
+}
diff --git a/packages/camera/camera_web/lib/src/camera_web.dart b/packages/camera/camera_web/lib/src/camera_web.dart
new file mode 100644
index 0000000..52fdc1c
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/camera_web.dart
@@ -0,0 +1,703 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html' as html;
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_web_plugins/flutter_web_plugins.dart';
+import 'package:stream_transform/stream_transform.dart';
+
+import 'camera.dart';
+import 'camera_service.dart';
+import 'types/types.dart';
+
+// The default error message, when the error is an empty string.
+// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/message
+const String _kDefaultErrorMessage =
+ 'No further diagnostic information can be determined or provided.';
+
+/// The web implementation of [CameraPlatform].
+///
+/// This class implements the `package:camera` functionality for the web.
+class CameraPlugin extends CameraPlatform {
+ /// Creates a new instance of [CameraPlugin]
+ /// with the given [cameraService].
+ CameraPlugin({required CameraService cameraService})
+ : _cameraService = cameraService;
+
+ /// Registers this class as the default instance of [CameraPlatform].
+ static void registerWith(Registrar registrar) {
+ CameraPlatform.instance = CameraPlugin(
+ cameraService: CameraService(),
+ );
+ }
+
+ final CameraService _cameraService;
+
+ /// The cameras managed by the [CameraPlugin].
+ @visibleForTesting
+ final Map<int, Camera> cameras = <int, Camera>{};
+ int _textureCounter = 1;
+
+ /// Metadata associated with each camera description.
+ /// Populated in [availableCameras].
+ @visibleForTesting
+ final Map<CameraDescription, CameraMetadata> camerasMetadata =
+ <CameraDescription, CameraMetadata>{};
+
+ /// The controller used to broadcast different camera events.
+ ///
+ /// It is `broadcast` as multiple controllers may subscribe
+ /// to different stream views of this controller.
+ @visibleForTesting
+ final StreamController<CameraEvent> cameraEventStreamController =
+ StreamController<CameraEvent>.broadcast();
+
+ final Map<int, StreamSubscription<html.Event>>
+ _cameraVideoErrorSubscriptions = <int, StreamSubscription<html.Event>>{};
+
+ final Map<int, StreamSubscription<html.Event>>
+ _cameraVideoAbortSubscriptions = <int, StreamSubscription<html.Event>>{};
+
+ final Map<int, StreamSubscription<html.MediaStreamTrack>>
+ _cameraEndedSubscriptions =
+ <int, StreamSubscription<html.MediaStreamTrack>>{};
+
+ final Map<int, StreamSubscription<html.ErrorEvent>>
+ _cameraVideoRecordingErrorSubscriptions =
+ <int, StreamSubscription<html.ErrorEvent>>{};
+
+ /// Returns a stream of camera events for the given [cameraId].
+ Stream<CameraEvent> _cameraEvents(int cameraId) =>
+ cameraEventStreamController.stream
+ .where((CameraEvent event) => event.cameraId == cameraId);
+
+ /// The current browser window used to access media devices.
+ @visibleForTesting
+ html.Window? window = html.window;
+
+ @override
+ Future<List<CameraDescription>> availableCameras() async {
+ try {
+ final html.MediaDevices? mediaDevices = window?.navigator.mediaDevices;
+ final List<CameraDescription> cameras = <CameraDescription>[];
+
+ // Throw a not supported exception if the current browser window
+ // does not support any media devices.
+ if (mediaDevices == null) {
+ throw PlatformException(
+ code: CameraErrorCode.notSupported.toString(),
+ message: 'The camera is not supported on this device.',
+ );
+ }
+
+ // Request video and audio permissions.
+ final html.MediaStream cameraStream =
+ await _cameraService.getMediaStreamForOptions(
+ const CameraOptions(
+ audio: AudioConstraints(enabled: true),
+ ),
+ );
+
+ // Release the camera stream used to request video and audio permissions.
+ cameraStream
+ .getVideoTracks()
+ .forEach((html.MediaStreamTrack videoTrack) => videoTrack.stop());
+
+ // Request available media devices.
+ final List<dynamic> devices = await mediaDevices.enumerateDevices();
+
+ // Filter video input devices.
+ final Iterable<html.MediaDeviceInfo> videoInputDevices = devices
+ .whereType<html.MediaDeviceInfo>()
+ .where((html.MediaDeviceInfo device) =>
+ device.kind == MediaDeviceKind.videoInput)
+
+ /// The device id property is currently not supported on Internet Explorer:
+ /// https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/deviceId#browser_compatibility
+ .where(
+ (html.MediaDeviceInfo device) =>
+ device.deviceId != null && device.deviceId!.isNotEmpty,
+ );
+
+ // Map video input devices to camera descriptions.
+ for (final html.MediaDeviceInfo videoInputDevice in videoInputDevices) {
+ // Get the video stream for the current video input device
+ // to later use for the available video tracks.
+ final html.MediaStream videoStream = await _getVideoStreamForDevice(
+ videoInputDevice.deviceId!,
+ );
+
+ // Get all video tracks in the video stream
+ // to later extract the lens direction from the first track.
+ final List<html.MediaStreamTrack> videoTracks =
+ videoStream.getVideoTracks();
+
+ if (videoTracks.isNotEmpty) {
+ // Get the facing mode from the first available video track.
+ final String? facingMode =
+ _cameraService.getFacingModeForVideoTrack(videoTracks.first);
+
+ // Get the lens direction based on the facing mode.
+ // Fallback to the external lens direction
+ // if the facing mode is not available.
+ final CameraLensDirection lensDirection = facingMode != null
+ ? _cameraService.mapFacingModeToLensDirection(facingMode)
+ : CameraLensDirection.external;
+
+ // Create a camera description.
+ //
+ // The name is a camera label which might be empty
+ // if no permissions to media devices have been granted.
+ //
+ // MediaDeviceInfo.label:
+ // https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/label
+ //
+ // Sensor orientation is currently not supported.
+ final String cameraLabel = videoInputDevice.label ?? '';
+ final CameraDescription camera = CameraDescription(
+ name: cameraLabel,
+ lensDirection: lensDirection,
+ sensorOrientation: 0,
+ );
+
+ final CameraMetadata cameraMetadata = CameraMetadata(
+ deviceId: videoInputDevice.deviceId!,
+ facingMode: facingMode,
+ );
+
+ cameras.add(camera);
+
+ camerasMetadata[camera] = cameraMetadata;
+
+ // Release the camera stream of the current video input device.
+ for (final html.MediaStreamTrack videoTrack in videoTracks) {
+ videoTrack.stop();
+ }
+ } else {
+ // Ignore as no video tracks exist in the current video input device.
+ continue;
+ }
+ }
+
+ return cameras;
+ } on html.DomException catch (e) {
+ throw CameraException(e.name, e.message);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw CameraException(e.code.toString(), e.description);
+ }
+ }
+
+ @override
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) async {
+ try {
+ if (!camerasMetadata.containsKey(cameraDescription)) {
+ throw PlatformException(
+ code: CameraErrorCode.missingMetadata.toString(),
+ message:
+ 'Missing camera metadata. Make sure to call `availableCameras` before creating a camera.',
+ );
+ }
+
+ final int textureId = _textureCounter++;
+
+ final CameraMetadata cameraMetadata = camerasMetadata[cameraDescription]!;
+
+ final CameraType? cameraType = cameraMetadata.facingMode != null
+ ? _cameraService.mapFacingModeToCameraType(cameraMetadata.facingMode!)
+ : null;
+
+ // Use the highest resolution possible
+ // if the resolution preset is not specified.
+ final Size videoSize = _cameraService
+ .mapResolutionPresetToSize(resolutionPreset ?? ResolutionPreset.max);
+
+ // Create a camera with the given audio and video constraints.
+ // Sensor orientation is currently not supported.
+ final Camera camera = Camera(
+ textureId: textureId,
+ cameraService: _cameraService,
+ options: CameraOptions(
+ audio: AudioConstraints(enabled: enableAudio),
+ video: VideoConstraints(
+ facingMode:
+ cameraType != null ? FacingModeConstraint(cameraType) : null,
+ width: VideoSizeConstraint(
+ ideal: videoSize.width.toInt(),
+ ),
+ height: VideoSizeConstraint(
+ ideal: videoSize.height.toInt(),
+ ),
+ deviceId: cameraMetadata.deviceId,
+ ),
+ ),
+ );
+
+ cameras[textureId] = camera;
+
+ return textureId;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> initializeCamera(
+ int cameraId, {
+ // The image format group is currently not supported.
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) async {
+ try {
+ final Camera camera = getCamera(cameraId);
+
+ await camera.initialize();
+
+ // Add camera's video error events to the camera events stream.
+ // The error event fires when the video element's source has failed to load, or can't be used.
+ _cameraVideoErrorSubscriptions[cameraId] =
+ camera.videoElement.onError.listen((html.Event _) {
+ // The Event itself (_) doesn't contain information about the actual error.
+ // We need to look at the HTMLMediaElement.error.
+ // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error
+ final html.MediaError error = camera.videoElement.error!;
+ final CameraErrorCode errorCode = CameraErrorCode.fromMediaError(error);
+ final String? errorMessage =
+ error.message != '' ? error.message : _kDefaultErrorMessage;
+
+ cameraEventStreamController.add(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: $errorCode, error message: $errorMessage',
+ ),
+ );
+ });
+
+ // Add camera's video abort events to the camera events stream.
+ // The abort event fires when the video element's source has not fully loaded.
+ _cameraVideoAbortSubscriptions[cameraId] =
+ camera.videoElement.onAbort.listen((html.Event _) {
+ cameraEventStreamController.add(
+ CameraErrorEvent(
+ cameraId,
+ "Error code: ${CameraErrorCode.abort}, error message: The video element's source has not fully loaded.",
+ ),
+ );
+ });
+
+ await camera.play();
+
+ // Add camera's closing events to the camera events stream.
+ // The onEnded stream fires when there is no more camera stream data.
+ _cameraEndedSubscriptions[cameraId] =
+ camera.onEnded.listen((html.MediaStreamTrack _) {
+ cameraEventStreamController.add(
+ CameraClosingEvent(cameraId),
+ );
+ });
+
+ final Size cameraSize = camera.getVideoSize();
+
+ cameraEventStreamController.add(
+ CameraInitializedEvent(
+ cameraId,
+ cameraSize.width,
+ cameraSize.height,
+ // TODO(bselwe): Add support for exposure mode and point (https://github.com/flutter/flutter/issues/86857).
+ ExposureMode.auto,
+ false,
+ // TODO(bselwe): Add support for focus mode and point (https://github.com/flutter/flutter/issues/86858).
+ FocusMode.auto,
+ false,
+ ),
+ );
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
+ }
+
+ /// Emits an empty stream as there is no event corresponding to a change
+ /// in the camera resolution on the web.
+ ///
+ /// In order to change the camera resolution a new camera with appropriate
+ /// [CameraOptions.video] constraints has to be created and initialized.
+ @override
+ Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
+ return const Stream<CameraResolutionChangedEvent>.empty();
+ }
+
+ @override
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
+ }
+
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraErrorEvent>();
+ }
+
+ @override
+ Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
+ return getCamera(cameraId).onVideoRecordedEvent;
+ }
+
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ final html.ScreenOrientation? orientation = window?.screen?.orientation;
+
+ if (orientation != null) {
+ // Create an initial orientation event that emits the device orientation
+ // as soon as subscribed to this stream.
+ final html.Event initialOrientationEvent = html.Event('change');
+
+ return orientation.onChange.startWith(initialOrientationEvent).map(
+ (html.Event _) {
+ final DeviceOrientation deviceOrientation = _cameraService
+ .mapOrientationTypeToDeviceOrientation(orientation.type!);
+ return DeviceOrientationChangedEvent(deviceOrientation);
+ },
+ );
+ } else {
+ return const Stream<DeviceOrientationChangedEvent>.empty();
+ }
+ }
+
+ @override
+ Future<void> lockCaptureOrientation(
+ int cameraId,
+ DeviceOrientation orientation,
+ ) async {
+ try {
+ final html.ScreenOrientation? screenOrientation =
+ window?.screen?.orientation;
+ final html.Element? documentElement = window?.document.documentElement;
+
+ if (screenOrientation != null && documentElement != null) {
+ final String orientationType =
+ _cameraService.mapDeviceOrientationToOrientationType(orientation);
+
+ // Full-screen mode may be required to modify the device orientation.
+ // See: https://w3c.github.io/screen-orientation/#interaction-with-fullscreen-api
+ // Recent versions of Dart changed requestFullscreen to return a Future instead of void.
+ // This wrapper allows use of both the old and new APIs.
+ dynamic fullScreen() => documentElement.requestFullscreen();
+ await fullScreen();
+ await screenOrientation.lock(orientationType);
+ } else {
+ throw PlatformException(
+ code: CameraErrorCode.orientationNotSupported.toString(),
+ message: 'Orientation is not supported in the current browser.',
+ );
+ }
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ }
+ }
+
+ @override
+ Future<void> unlockCaptureOrientation(int cameraId) async {
+ try {
+ final html.ScreenOrientation? orientation = window?.screen?.orientation;
+ final html.Element? documentElement = window?.document.documentElement;
+
+ if (orientation != null && documentElement != null) {
+ orientation.unlock();
+ } else {
+ throw PlatformException(
+ code: CameraErrorCode.orientationNotSupported.toString(),
+ message: 'Orientation is not supported in the current browser.',
+ );
+ }
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ }
+ }
+
+ @override
+ Future<XFile> takePicture(int cameraId) {
+ try {
+ return getCamera(cameraId).takePicture();
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Future<void> prepareForVideoRecording() async {
+ // This is a no-op as it is not required for the web.
+ }
+
+ @override
+ Future<void> startVideoRecording(int cameraId, {Duration? maxVideoDuration}) {
+ return startVideoCapturing(
+ VideoCaptureOptions(cameraId, maxDuration: maxVideoDuration));
+ }
+
+ @override
+ Future<void> startVideoCapturing(VideoCaptureOptions options) {
+ if (options.streamCallback != null || options.streamOptions != null) {
+ throw UnimplementedError('Streaming is not currently supported on web');
+ }
+
+ try {
+ final Camera camera = getCamera(options.cameraId);
+
+ // Add camera's video recording errors to the camera events stream.
+ // The error event fires when the video recording is not allowed or an unsupported
+ // codec is used.
+ _cameraVideoRecordingErrorSubscriptions[options.cameraId] =
+ camera.onVideoRecordingError.listen((html.ErrorEvent errorEvent) {
+ cameraEventStreamController.add(
+ CameraErrorEvent(
+ options.cameraId,
+ 'Error code: ${errorEvent.type}, error message: ${errorEvent.message}.',
+ ),
+ );
+ });
+
+ return camera.startVideoRecording(maxVideoDuration: options.maxDuration);
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Future<XFile> stopVideoRecording(int cameraId) async {
+ try {
+ final XFile videoRecording =
+ await getCamera(cameraId).stopVideoRecording();
+ await _cameraVideoRecordingErrorSubscriptions[cameraId]?.cancel();
+ return videoRecording;
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Future<void> pauseVideoRecording(int cameraId) {
+ try {
+ return getCamera(cameraId).pauseVideoRecording();
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Future<void> resumeVideoRecording(int cameraId) {
+ try {
+ return getCamera(cameraId).resumeVideoRecording();
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Future<void> setFlashMode(int cameraId, FlashMode mode) async {
+ try {
+ getCamera(cameraId).setFlashMode(mode);
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Future<void> setExposureMode(int cameraId, ExposureMode mode) {
+ throw UnimplementedError('setExposureMode() is not implemented.');
+ }
+
+ @override
+ Future<void> setExposurePoint(int cameraId, Point<double>? point) {
+ throw UnimplementedError('setExposurePoint() is not implemented.');
+ }
+
+ @override
+ Future<double> getMinExposureOffset(int cameraId) {
+ throw UnimplementedError('getMinExposureOffset() is not implemented.');
+ }
+
+ @override
+ Future<double> getMaxExposureOffset(int cameraId) {
+ throw UnimplementedError('getMaxExposureOffset() is not implemented.');
+ }
+
+ @override
+ Future<double> getExposureOffsetStepSize(int cameraId) {
+ throw UnimplementedError('getExposureOffsetStepSize() is not implemented.');
+ }
+
+ @override
+ Future<double> setExposureOffset(int cameraId, double offset) {
+ throw UnimplementedError('setExposureOffset() is not implemented.');
+ }
+
+ @override
+ Future<void> setFocusMode(int cameraId, FocusMode mode) {
+ throw UnimplementedError('setFocusMode() is not implemented.');
+ }
+
+ @override
+ Future<void> setFocusPoint(int cameraId, Point<double>? point) {
+ throw UnimplementedError('setFocusPoint() is not implemented.');
+ }
+
+ @override
+ Future<double> getMaxZoomLevel(int cameraId) async {
+ try {
+ return getCamera(cameraId).getMaxZoomLevel();
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Future<double> getMinZoomLevel(int cameraId) async {
+ try {
+ return getCamera(cameraId).getMinZoomLevel();
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Future<void> setZoomLevel(int cameraId, double zoom) async {
+ try {
+ getCamera(cameraId).setZoomLevel(zoom);
+ } on html.DomException catch (e) {
+ throw CameraException(e.name, e.message);
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw CameraException(e.code.toString(), e.description);
+ }
+ }
+
+ @override
+ Future<void> pausePreview(int cameraId) async {
+ try {
+ getCamera(cameraId).pause();
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ }
+ }
+
+ @override
+ Future<void> resumePreview(int cameraId) async {
+ try {
+ await getCamera(cameraId).play();
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ } on CameraWebException catch (e) {
+ _addCameraErrorEvent(e);
+ throw PlatformException(code: e.code.toString(), message: e.description);
+ }
+ }
+
+ @override
+ Widget buildPreview(int cameraId) {
+ return HtmlElementView(
+ viewType: getCamera(cameraId).getViewType(),
+ );
+ }
+
+ @override
+ Future<void> dispose(int cameraId) async {
+ try {
+ await getCamera(cameraId).dispose();
+ await _cameraVideoErrorSubscriptions[cameraId]?.cancel();
+ await _cameraVideoAbortSubscriptions[cameraId]?.cancel();
+ await _cameraEndedSubscriptions[cameraId]?.cancel();
+ await _cameraVideoRecordingErrorSubscriptions[cameraId]?.cancel();
+
+ cameras.remove(cameraId);
+ _cameraVideoErrorSubscriptions.remove(cameraId);
+ _cameraVideoAbortSubscriptions.remove(cameraId);
+ _cameraEndedSubscriptions.remove(cameraId);
+ } on html.DomException catch (e) {
+ throw PlatformException(code: e.name, message: e.message);
+ }
+ }
+
+ /// Returns a media video stream for the device with the given [deviceId].
+ Future<html.MediaStream> _getVideoStreamForDevice(
+ String deviceId,
+ ) {
+ // Create camera options with the desired device id.
+ final CameraOptions cameraOptions = CameraOptions(
+ video: VideoConstraints(deviceId: deviceId),
+ );
+
+ return _cameraService.getMediaStreamForOptions(cameraOptions);
+ }
+
+ /// Returns a camera for the given [cameraId].
+ ///
+ /// Throws a [CameraException] if the camera does not exist.
+ @visibleForTesting
+ Camera getCamera(int cameraId) {
+ final Camera? camera = cameras[cameraId];
+
+ if (camera == null) {
+ throw PlatformException(
+ code: CameraErrorCode.notFound.toString(),
+ message: 'No camera found for the given camera id $cameraId.',
+ );
+ }
+
+ return camera;
+ }
+
+ /// Adds a [CameraErrorEvent], associated with the [exception],
+ /// to the stream of camera events.
+ void _addCameraErrorEvent(CameraWebException exception) {
+ cameraEventStreamController.add(
+ CameraErrorEvent(
+ exception.cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ );
+ }
+}
diff --git a/packages/camera/camera_web/lib/src/shims/dart_js_util.dart b/packages/camera/camera_web/lib/src/shims/dart_js_util.dart
new file mode 100644
index 0000000..7d766e8
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/shims/dart_js_util.dart
@@ -0,0 +1,15 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:js_util' as js_util;
+
+/// A utility that shims dart:js_util to manipulate JavaScript interop objects.
+class JsUtil {
+ /// Returns true if the object [o] has the property [name].
+ bool hasProperty(Object o, Object name) => js_util.hasProperty(o, name);
+
+ /// Returns the value of the property [name] in the object [o].
+ dynamic getProperty(Object o, Object name) =>
+ js_util.getProperty<dynamic>(o, name);
+}
diff --git a/packages/camera/camera_web/lib/src/shims/dart_ui.dart b/packages/camera/camera_web/lib/src/shims/dart_ui.dart
new file mode 100644
index 0000000..3a32721
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/shims/dart_ui.dart
@@ -0,0 +1,10 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// This file shims dart:ui in web-only scenarios, getting rid of the need to
+/// suppress analyzer warnings.
+
+// TODO(ditman): Remove this file once web-only dart:ui APIs are exposed from
+// a dedicated place. https://github.com/flutter/flutter/issues/55000
+export 'dart_ui_fake.dart' if (dart.library.html) 'dart_ui_real.dart';
diff --git a/packages/camera/camera_web/lib/src/shims/dart_ui_fake.dart b/packages/camera/camera_web/lib/src/shims/dart_ui_fake.dart
new file mode 100644
index 0000000..40d8f19
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/shims/dart_ui_fake.dart
@@ -0,0 +1,33 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html' as html;
+
+// Fake interface for the logic that this package needs from (web-only) dart:ui.
+// This is conditionally exported so the analyzer sees these methods as available.
+
+// ignore_for_file: avoid_classes_with_only_static_members
+// ignore_for_file: camel_case_types
+
+/// Shim for web_ui engine.PlatformViewRegistry
+/// https://github.com/flutter/engine/blob/main/lib/web_ui/lib/ui.dart#L62
+class platformViewRegistry {
+ /// Shim for registerViewFactory
+ /// https://github.com/flutter/engine/blob/main/lib/web_ui/lib/ui.dart#L72
+ static bool registerViewFactory(
+ String viewTypeId, html.Element Function(int viewId) viewFactory) {
+ return false;
+ }
+}
+
+/// Shim for web_ui engine.AssetManager.
+/// https://github.com/flutter/engine/blob/main/lib/web_ui/lib/src/engine/assets.dart#L12
+class webOnlyAssetManager {
+ /// Shim for getAssetUrl.
+ /// https://github.com/flutter/engine/blob/main/lib/web_ui/lib/src/engine/assets.dart#L45
+ static String getAssetUrl(String asset) => '';
+}
+
+/// Signature of callbacks that have no arguments and return no data.
+typedef VoidCallback = void Function();
diff --git a/packages/camera/camera_web/lib/src/shims/dart_ui_real.dart b/packages/camera/camera_web/lib/src/shims/dart_ui_real.dart
new file mode 100644
index 0000000..276b768
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/shims/dart_ui_real.dart
@@ -0,0 +1,5 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'dart:ui';
diff --git a/packages/camera/camera_web/lib/src/types/camera_error_code.dart b/packages/camera/camera_web/lib/src/types/camera_error_code.dart
new file mode 100644
index 0000000..8f1831f
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/types/camera_error_code.dart
@@ -0,0 +1,95 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html' as html;
+
+/// Error codes that may occur during the camera initialization,
+/// configuration or video streaming.
+class CameraErrorCode {
+ const CameraErrorCode._(this._type);
+
+ final String _type;
+
+ @override
+ String toString() => _type;
+
+ /// The camera is not supported.
+ static const CameraErrorCode notSupported =
+ CameraErrorCode._('cameraNotSupported');
+
+ /// The camera is not found.
+ static const CameraErrorCode notFound = CameraErrorCode._('cameraNotFound');
+
+ /// The camera is not readable.
+ static const CameraErrorCode notReadable =
+ CameraErrorCode._('cameraNotReadable');
+
+ /// The camera options are impossible to satisfy.
+ static const CameraErrorCode overconstrained =
+ CameraErrorCode._('cameraOverconstrained');
+
+ /// The camera cannot be used or the permission
+ /// to access the camera is not granted.
+ static const CameraErrorCode permissionDenied =
+ CameraErrorCode._('CameraAccessDenied');
+
+ /// The camera options are incorrect or attempted
+ /// to access the media input from an insecure context.
+ static const CameraErrorCode type = CameraErrorCode._('cameraType');
+
+ /// Some problem occurred that prevented the camera from being used.
+ static const CameraErrorCode abort = CameraErrorCode._('cameraAbort');
+
+ /// The user media support is disabled in the current browser.
+ static const CameraErrorCode security = CameraErrorCode._('cameraSecurity');
+
+ /// The camera metadata is missing.
+ static const CameraErrorCode missingMetadata =
+ CameraErrorCode._('cameraMissingMetadata');
+
+ /// The camera orientation is not supported.
+ static const CameraErrorCode orientationNotSupported =
+ CameraErrorCode._('orientationNotSupported');
+
+ /// The camera torch mode is not supported.
+ static const CameraErrorCode torchModeNotSupported =
+ CameraErrorCode._('torchModeNotSupported');
+
+ /// The camera zoom level is not supported.
+ static const CameraErrorCode zoomLevelNotSupported =
+ CameraErrorCode._('zoomLevelNotSupported');
+
+ /// The camera zoom level is invalid.
+ static const CameraErrorCode zoomLevelInvalid =
+ CameraErrorCode._('zoomLevelInvalid');
+
+ /// The camera has not been initialized or started.
+ static const CameraErrorCode notStarted =
+ CameraErrorCode._('cameraNotStarted');
+
+ /// The video recording was not started.
+ static const CameraErrorCode videoRecordingNotStarted =
+ CameraErrorCode._('videoRecordingNotStarted');
+
+ /// An unknown camera error.
+ static const CameraErrorCode unknown = CameraErrorCode._('cameraUnknown');
+
+ /// Returns a camera error code based on the media error.
+ ///
+ /// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code
+ static CameraErrorCode fromMediaError(html.MediaError error) {
+ switch (error.code) {
+ case html.MediaError.MEDIA_ERR_ABORTED:
+ return const CameraErrorCode._('mediaErrorAborted');
+ case html.MediaError.MEDIA_ERR_NETWORK:
+ return const CameraErrorCode._('mediaErrorNetwork');
+ case html.MediaError.MEDIA_ERR_DECODE:
+ return const CameraErrorCode._('mediaErrorDecode');
+ case html.MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED:
+ return const CameraErrorCode._('mediaErrorSourceNotSupported');
+ default:
+ return const CameraErrorCode._('mediaErrorUnknown');
+ }
+ }
+}
diff --git a/packages/camera/camera_web/lib/src/types/camera_metadata.dart b/packages/camera/camera_web/lib/src/types/camera_metadata.dart
new file mode 100644
index 0000000..e5c6b38
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/types/camera_metadata.dart
@@ -0,0 +1,40 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+
+/// Metadata used along the camera description
+/// to store additional web-specific camera details.
+@immutable
+class CameraMetadata {
+ /// Creates a new instance of [CameraMetadata]
+ /// with the given [deviceId] and [facingMode].
+ const CameraMetadata({required this.deviceId, required this.facingMode});
+
+ /// Uniquely identifies the camera device.
+ ///
+ /// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/deviceId
+ final String deviceId;
+
+ /// Describes the direction the camera is facing towards.
+ /// May be `user`, `environment`, `left`, `right`
+ /// or null if the facing mode is not available.
+ ///
+ /// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings/facingMode
+ final String? facingMode;
+
+ @override
+ bool operator ==(Object other) {
+ if (identical(this, other)) {
+ return true;
+ }
+
+ return other is CameraMetadata &&
+ other.deviceId == deviceId &&
+ other.facingMode == facingMode;
+ }
+
+ @override
+ int get hashCode => Object.hash(deviceId.hashCode, facingMode.hashCode);
+}
diff --git a/packages/camera/camera_web/lib/src/types/camera_options.dart b/packages/camera/camera_web/lib/src/types/camera_options.dart
new file mode 100644
index 0000000..08491b5
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/types/camera_options.dart
@@ -0,0 +1,274 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/foundation.dart';
+
+/// Options used to create a camera with the given
+/// [audio] and [video] media constraints.
+///
+/// These options represent web `MediaStreamConstraints`
+/// and can be used to request the browser for media streams
+/// with audio and video tracks containing the requested types of media.
+///
+/// https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamConstraints
+@immutable
+class CameraOptions {
+ /// Creates a new instance of [CameraOptions]
+ /// with the given [audio] and [video] constraints.
+ const CameraOptions({
+ AudioConstraints? audio,
+ VideoConstraints? video,
+ }) : audio = audio ?? const AudioConstraints(),
+ video = video ?? const VideoConstraints();
+
+ /// The audio constraints for the camera.
+ final AudioConstraints audio;
+
+ /// The video constraints for the camera.
+ final VideoConstraints video;
+
+ /// Converts the current instance to a Map.
+ Map<String, dynamic> toJson() {
+ return <String, Object>{
+ 'audio': audio.toJson(),
+ 'video': video.toJson(),
+ };
+ }
+
+ @override
+ bool operator ==(Object other) {
+ if (identical(this, other)) {
+ return true;
+ }
+
+ return other is CameraOptions &&
+ other.audio == audio &&
+ other.video == video;
+ }
+
+ @override
+ int get hashCode => Object.hash(audio, video);
+}
+
+/// Indicates whether the audio track is requested.
+///
+/// By default, the audio track is not requested.
+@immutable
+class AudioConstraints {
+ /// Creates a new instance of [AudioConstraints]
+ /// with the given [enabled] constraint.
+ const AudioConstraints({this.enabled = false});
+
+ /// Whether the audio track should be enabled.
+ final bool enabled;
+
+ /// Converts the current instance to a Map.
+ Object toJson() => enabled;
+
+ @override
+ bool operator ==(Object other) {
+ if (identical(this, other)) {
+ return true;
+ }
+
+ return other is AudioConstraints && other.enabled == enabled;
+ }
+
+ @override
+ int get hashCode => enabled.hashCode;
+}
+
+/// Defines constraints that the video track must have
+/// to be considered acceptable.
+@immutable
+class VideoConstraints {
+ /// Creates a new instance of [VideoConstraints]
+ /// with the given constraints.
+ const VideoConstraints({
+ this.facingMode,
+ this.width,
+ this.height,
+ this.deviceId,
+ });
+
+ /// The facing mode of the video track.
+ final FacingModeConstraint? facingMode;
+
+ /// The width of the video track.
+ final VideoSizeConstraint? width;
+
+ /// The height of the video track.
+ final VideoSizeConstraint? height;
+
+ /// The device id of the video track.
+ final String? deviceId;
+
+ /// Converts the current instance to a Map.
+ Object toJson() {
+ final Map<String, dynamic> json = <String, dynamic>{};
+
+ if (width != null) {
+ json['width'] = width!.toJson();
+ }
+ if (height != null) {
+ json['height'] = height!.toJson();
+ }
+ if (facingMode != null) {
+ json['facingMode'] = facingMode!.toJson();
+ }
+ if (deviceId != null) {
+ json['deviceId'] = <String, Object>{'exact': deviceId!};
+ }
+
+ return json;
+ }
+
+ @override
+ bool operator ==(Object other) {
+ if (identical(this, other)) {
+ return true;
+ }
+
+ return other is VideoConstraints &&
+ other.facingMode == facingMode &&
+ other.width == width &&
+ other.height == height &&
+ other.deviceId == deviceId;
+ }
+
+ @override
+ int get hashCode => Object.hash(facingMode, width, height, deviceId);
+}
+
+/// The camera type used in [FacingModeConstraint].
+///
+/// Specifies whether the requested camera should be facing away
+/// or toward the user.
+class CameraType {
+ const CameraType._(this._type);
+
+ final String _type;
+
+ @override
+ String toString() => _type;
+
+ /// The camera is facing away from the user, viewing their environment.
+ /// This includes the back camera on a smartphone.
+ static const CameraType environment = CameraType._('environment');
+
+ /// The camera is facing toward the user.
+ /// This includes the front camera on a smartphone.
+ static const CameraType user = CameraType._('user');
+}
+
+/// Indicates the direction in which the desired camera should be pointing.
+@immutable
+class FacingModeConstraint {
+ /// Creates a new instance of [FacingModeConstraint]
+ /// with [ideal] constraint set to [type].
+ factory FacingModeConstraint(CameraType type) =>
+ FacingModeConstraint._(ideal: type);
+
+ /// Creates a new instance of [FacingModeConstraint]
+ /// with the given [ideal] and [exact] constraints.
+ const FacingModeConstraint._({this.ideal, this.exact});
+
+ /// Creates a new instance of [FacingModeConstraint]
+ /// with [exact] constraint set to [type].
+ factory FacingModeConstraint.exact(CameraType type) =>
+ FacingModeConstraint._(exact: type);
+
+ /// The ideal facing mode constraint.
+ ///
+ /// If this constraint is used, then the camera would ideally have
+ /// the desired facing [type] but it may be considered optional.
+ final CameraType? ideal;
+
+ /// The exact facing mode constraint.
+ ///
+ /// If this constraint is used, then the camera must have
+ /// the desired facing [type] to be considered acceptable.
+ final CameraType? exact;
+
+ /// Converts the current instance to a Map.
+ Object toJson() {
+ return <String, Object>{
+ if (ideal != null) 'ideal': ideal.toString(),
+ if (exact != null) 'exact': exact.toString(),
+ };
+ }
+
+ @override
+ bool operator ==(Object other) {
+ if (identical(this, other)) {
+ return true;
+ }
+
+ return other is FacingModeConstraint &&
+ other.ideal == ideal &&
+ other.exact == exact;
+ }
+
+ @override
+ int get hashCode => Object.hash(ideal, exact);
+}
+
+/// The size of the requested video track used in
+/// [VideoConstraints.width] and [VideoConstraints.height].
+///
+/// The obtained video track will have a size between [minimum] and [maximum]
+/// with ideally a size of [ideal]. The size is determined by
+/// the capabilities of the hardware and the other specified constraints.
+@immutable
+class VideoSizeConstraint {
+ /// Creates a new instance of [VideoSizeConstraint] with the given
+ /// [minimum], [ideal] and [maximum] constraints.
+ const VideoSizeConstraint({this.minimum, this.ideal, this.maximum});
+
+ /// The minimum video size.
+ final int? minimum;
+
+ /// The ideal video size.
+ ///
+ /// The video would ideally have the [ideal] size
+ /// but it may be considered optional. If not possible
+ /// to satisfy, the size will be as close as possible
+ /// to [ideal].
+ final int? ideal;
+
+ /// The maximum video size.
+ final int? maximum;
+
+ /// Converts the current instance to a Map.
+ Object toJson() {
+ final Map<String, dynamic> json = <String, dynamic>{};
+
+ if (ideal != null) {
+ json['ideal'] = ideal;
+ }
+ if (minimum != null) {
+ json['min'] = minimum;
+ }
+ if (maximum != null) {
+ json['max'] = maximum;
+ }
+
+ return json;
+ }
+
+ @override
+ bool operator ==(Object other) {
+ if (identical(this, other)) {
+ return true;
+ }
+
+ return other is VideoSizeConstraint &&
+ other.minimum == minimum &&
+ other.ideal == ideal &&
+ other.maximum == maximum;
+ }
+
+ @override
+ int get hashCode => Object.hash(minimum, ideal, maximum);
+}
diff --git a/packages/camera/camera_web/lib/src/types/camera_web_exception.dart b/packages/camera/camera_web/lib/src/types/camera_web_exception.dart
new file mode 100644
index 0000000..e6c6d7a
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/types/camera_web_exception.dart
@@ -0,0 +1,29 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'types.dart';
+
+/// An exception thrown when the camera with id [cameraId] reports
+/// an initialization, configuration or video streaming error,
+/// or enters into an unexpected state.
+///
+/// This error should be emitted on the `onCameraError` stream
+/// of the camera platform.
+class CameraWebException implements Exception {
+ /// Creates a new instance of [CameraWebException]
+ /// with the given error [cameraId], [code] and [description].
+ CameraWebException(this.cameraId, this.code, this.description);
+
+ /// The id of the camera this exception is associated to.
+ int cameraId;
+
+ /// The error code of this exception.
+ CameraErrorCode code;
+
+ /// The description of this exception.
+ String description;
+
+ @override
+ String toString() => 'CameraWebException($cameraId, $code, $description)';
+}
diff --git a/packages/camera/camera_web/lib/src/types/media_device_kind.dart b/packages/camera/camera_web/lib/src/types/media_device_kind.dart
new file mode 100644
index 0000000..3607bb2
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/types/media_device_kind.dart
@@ -0,0 +1,17 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// A kind of a media device.
+///
+/// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/kind
+abstract class MediaDeviceKind {
+ /// A video input media device kind.
+ static const String videoInput = 'videoinput';
+
+ /// An audio input media device kind.
+ static const String audioInput = 'audioinput';
+
+ /// An audio output media device kind.
+ static const String audioOutput = 'audiooutput';
+}
diff --git a/packages/camera/camera_web/lib/src/types/orientation_type.dart b/packages/camera/camera_web/lib/src/types/orientation_type.dart
new file mode 100644
index 0000000..717f5f3
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/types/orientation_type.dart
@@ -0,0 +1,26 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+
+/// A screen orientation type.
+///
+/// See: https://developer.mozilla.org/en-US/docs/Web/API/ScreenOrientation/type
+abstract class OrientationType {
+ /// The primary portrait mode orientation.
+ /// Corresponds to [DeviceOrientation.portraitUp].
+ static const String portraitPrimary = 'portrait-primary';
+
+ /// The secondary portrait mode orientation.
+ /// Corresponds to [DeviceOrientation.portraitSecondary].
+ static const String portraitSecondary = 'portrait-secondary';
+
+ /// The primary landscape mode orientation.
+ /// Corresponds to [DeviceOrientation.landscapeLeft].
+ static const String landscapePrimary = 'landscape-primary';
+
+ /// The secondary landscape mode orientation.
+ /// Corresponds to [DeviceOrientation.landscapeRight].
+ static const String landscapeSecondary = 'landscape-secondary';
+}
diff --git a/packages/camera/camera_web/lib/src/types/types.dart b/packages/camera/camera_web/lib/src/types/types.dart
new file mode 100644
index 0000000..72d7fb8
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/types/types.dart
@@ -0,0 +1,10 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+export 'camera_error_code.dart';
+export 'camera_metadata.dart';
+export 'camera_options.dart';
+export 'camera_web_exception.dart';
+export 'media_device_kind.dart';
+export 'orientation_type.dart';
+export 'zoom_level_capability.dart';
diff --git a/packages/camera/camera_web/lib/src/types/zoom_level_capability.dart b/packages/camera/camera_web/lib/src/types/zoom_level_capability.dart
new file mode 100644
index 0000000..d20bd25
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/types/zoom_level_capability.dart
@@ -0,0 +1,49 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html' as html;
+
+import 'package:flutter/foundation.dart';
+
+/// The possible range of values for the zoom level configurable
+/// on the camera video track.
+@immutable
+class ZoomLevelCapability {
+ /// Creates a new instance of [ZoomLevelCapability] with the given
+ /// zoom level range of [minimum] to [maximum] configurable
+ /// on the [videoTrack].
+ const ZoomLevelCapability({
+ required this.minimum,
+ required this.maximum,
+ required this.videoTrack,
+ });
+
+ /// The zoom level constraint name.
+ /// See: https://w3c.github.io/mediacapture-image/#dom-mediatracksupportedconstraints-zoom
+ static const String constraintName = 'zoom';
+
+ /// The minimum zoom level.
+ final double minimum;
+
+ /// The maximum zoom level.
+ final double maximum;
+
+ /// The video track capable of configuring the zoom level.
+ final html.MediaStreamTrack videoTrack;
+
+ @override
+ bool operator ==(Object other) {
+ if (identical(this, other)) {
+ return true;
+ }
+
+ return other is ZoomLevelCapability &&
+ other.minimum == minimum &&
+ other.maximum == maximum &&
+ other.videoTrack == videoTrack;
+ }
+
+ @override
+ int get hashCode => Object.hash(minimum, maximum, videoTrack);
+}
diff --git a/packages/camera/camera_web/pubspec.yaml b/packages/camera/camera_web/pubspec.yaml
new file mode 100644
index 0000000..101444b
--- /dev/null
+++ b/packages/camera/camera_web/pubspec.yaml
@@ -0,0 +1,29 @@
+name: camera_web
+description: A Flutter plugin for getting information about and controlling the camera on Web.
+repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_web
+issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
+version: 0.3.1+1
+
+environment:
+ sdk: ">=2.12.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+flutter:
+ plugin:
+ implements: camera
+ platforms:
+ web:
+ pluginClass: CameraPlugin
+ fileName: camera_web.dart
+
+dependencies:
+ camera_platform_interface: ^2.3.1
+ flutter:
+ sdk: flutter
+ flutter_web_plugins:
+ sdk: flutter
+ stream_transform: ^2.0.0
+
+dev_dependencies:
+ flutter_test:
+ sdk: flutter
diff --git a/packages/camera/camera_web/test/README.md b/packages/camera/camera_web/test/README.md
new file mode 100644
index 0000000..7c5b4ad
--- /dev/null
+++ b/packages/camera/camera_web/test/README.md
@@ -0,0 +1,5 @@
+## test
+
+This package uses integration tests for testing.
+
+See `example/README.md` for more info.
diff --git a/packages/camera/camera_web/test/more_tests_exist_elsewhere_test.dart b/packages/camera/camera_web/test/more_tests_exist_elsewhere_test.dart
new file mode 100644
index 0000000..32f037e
--- /dev/null
+++ b/packages/camera/camera_web/test/more_tests_exist_elsewhere_test.dart
@@ -0,0 +1,16 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// ignore_for_file: avoid_print
+
+import 'package:flutter_test/flutter_test.dart';
+
+void main() {
+ test('Tell the user where to find more tests', () {
+ print('---');
+ print('This package also uses integration_test for its tests.');
+ print('See `example/README.md` for more info.');
+ print('---');
+ });
+}
diff --git a/packages/camera/camera_windows/.gitignore b/packages/camera/camera_windows/.gitignore
new file mode 100644
index 0000000..e9dc58d
--- /dev/null
+++ b/packages/camera/camera_windows/.gitignore
@@ -0,0 +1,7 @@
+.DS_Store
+.dart_tool/
+
+.packages
+.pub/
+
+build/
diff --git a/packages/camera/camera_windows/.metadata b/packages/camera/camera_windows/.metadata
new file mode 100644
index 0000000..5bed526
--- /dev/null
+++ b/packages/camera/camera_windows/.metadata
@@ -0,0 +1,10 @@
+# This file tracks properties of this Flutter project.
+# Used by Flutter tool to assess capabilities and perform upgrades etc.
+#
+# This file should be version controlled and should not be manually edited.
+
+version:
+ revision: 18116933e77adc82f80866c928266a5b4f1ed645
+ channel: stable
+
+project_type: plugin
diff --git a/packages/camera/camera_windows/AUTHORS b/packages/camera/camera_windows/AUTHORS
new file mode 100644
index 0000000..b2178a5
--- /dev/null
+++ b/packages/camera/camera_windows/AUTHORS
@@ -0,0 +1,8 @@
+# Below is a list of people and organizations that have contributed
+# to the Flutter project. Names should be added to the list like so:
+#
+# Name/Organization <email address>
+
+Google Inc.
+Joonas Kerttula <joonas.kerttula@codemate.com>
+Codemate Ltd.
diff --git a/packages/camera/camera_windows/CHANGELOG.md b/packages/camera/camera_windows/CHANGELOG.md
new file mode 100644
index 0000000..34ee668
--- /dev/null
+++ b/packages/camera/camera_windows/CHANGELOG.md
@@ -0,0 +1,56 @@
+## NEXT
+
+* Updates minimum Flutter version to 3.0.
+
+## 0.2.1+4
+
+* Updates code for stricter lint checks.
+
+## 0.2.1+3
+
+* Updates to latest camera platform interface but fails if user attempts to use streaming with recording (since streaming is currently unsupported on Windows).
+
+## 0.2.1+2
+
+* Updates code for `no_leading_underscores_for_local_identifiers` lint.
+* Updates minimum Flutter version to 2.10.
+
+## 0.2.1+1
+
+* Fixes avoid_redundant_argument_values lint warnings and minor typos.
+
+## 0.2.1
+
+* Adds a check for string size before Win32 MultiByte <-> WideChar conversions
+
+## 0.2.0
+
+**BREAKING CHANGES**:
+ * `CameraException.code` now has value `"CameraAccessDenied"` if camera access permission was denied.
+ * `CameraException.code` now has value `"camera_error"` if error occurs during capture.
+
+## 0.1.0+5
+
+* Fixes bugs in in error handling.
+
+## 0.1.0+4
+
+* Allows retrying camera initialization after error.
+
+## 0.1.0+3
+
+* Updates the README to better explain how to use the unendorsed package.
+
+## 0.1.0+2
+
+* Updates references to the obsolete master branch.
+
+## 0.1.0+1
+
+* Removes unnecessary imports.
+* Fixes library_private_types_in_public_api, sort_child_properties_last and use_key_in_widget_constructors
+ lint warnings.
+
+## 0.1.0
+
+* Initial release
diff --git a/packages/camera/camera_windows/LICENSE b/packages/camera/camera_windows/LICENSE
new file mode 100644
index 0000000..c6823b8
--- /dev/null
+++ b/packages/camera/camera_windows/LICENSE
@@ -0,0 +1,25 @@
+Copyright 2013 The Flutter Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/packages/camera/camera_windows/README.md b/packages/camera/camera_windows/README.md
new file mode 100644
index 0000000..4b66ad3
--- /dev/null
+++ b/packages/camera/camera_windows/README.md
@@ -0,0 +1,68 @@
+# Camera Windows Plugin
+
+The Windows implementation of [`camera`][camera].
+
+*Note*: This plugin is under development.
+See [missing implementations and limitations](#missing-features-on-the-windows-platform).
+
+## Usage
+
+### Depend on the package
+
+This package is not an [endorsed][endorsed-federated-plugin]
+implementation of the [`camera`][camera] plugin, so in addition to depending
+on [`camera`][camera] you'll need to
+[add `camera_windows` to your pubspec.yaml explicitly][install].
+Once you do, you can use the [`camera`][camera] APIs as you normally would.
+
+## Missing features on the Windows platform
+
+### Device orientation
+
+Device orientation detection
+is not yet implemented: [issue #97540][device-orientation-issue].
+
+### Pause and Resume video recording
+
+Pausing and resuming the video recording
+is not supported due to Windows API limitations.
+
+### Exposure mode, point and offset
+
+Support for explosure mode and offset
+is not yet implemented: [issue #97537][camera-control-issue].
+
+Exposure points are not supported due to
+limitations of the Windows API.
+
+### Focus mode and point
+
+Support for explosure mode and offset
+is not yet implemented: [issue #97537][camera-control-issue].
+
+### Flash mode
+
+Support for flash mode is not yet implemented: [issue #97537][camera-control-issue].
+
+Focus points are not supported due to
+current limitations of the Windows API.
+
+### Streaming of frames
+
+Support for image streaming is not yet implemented: [issue #97542][image-streams-issue].
+
+## Error handling
+
+Camera errors can be listened using the platform's `onCameraError` method.
+
+Listening to errors is important, and in certain situations,
+disposing of the camera is the only way to reset the situation.
+
+<!-- Links -->
+
+[camera]: https://pub.dev/packages/camera
+[endorsed-federated-plugin]: https://flutter.dev/docs/development/packages-and-plugins/developing-packages#endorsed-federated-plugin
+[install]: https://pub.dev/packages/camera_windows/install
+[camera-control-issue]: https://github.com/flutter/flutter/issues/97537
+[device-orientation-issue]: https://github.com/flutter/flutter/issues/97540
+[image-streams-issue]: https://github.com/flutter/flutter/issues/97542
diff --git a/packages/camera/camera_windows/example/.gitignore b/packages/camera/camera_windows/example/.gitignore
new file mode 100644
index 0000000..0fa6b67
--- /dev/null
+++ b/packages/camera/camera_windows/example/.gitignore
@@ -0,0 +1,46 @@
+# Miscellaneous
+*.class
+*.log
+*.pyc
+*.swp
+.DS_Store
+.atom/
+.buildlog/
+.history
+.svn/
+
+# IntelliJ related
+*.iml
+*.ipr
+*.iws
+.idea/
+
+# The .vscode folder contains launch configuration and tasks you configure in
+# VS Code which you may wish to be included in version control, so this line
+# is commented out by default.
+#.vscode/
+
+# Flutter/Dart/Pub related
+**/doc/api/
+**/ios/Flutter/.last_build_id
+.dart_tool/
+.flutter-plugins
+.flutter-plugins-dependencies
+.packages
+.pub-cache/
+.pub/
+/build/
+
+# Web related
+lib/generated_plugin_registrant.dart
+
+# Symbolication related
+app.*.symbols
+
+# Obfuscation related
+app.*.map.json
+
+# Android Studio will place build artifacts here
+/android/app/debug
+/android/app/profile
+/android/app/release
diff --git a/packages/camera/camera_windows/example/.metadata b/packages/camera/camera_windows/example/.metadata
new file mode 100644
index 0000000..a5584fc
--- /dev/null
+++ b/packages/camera/camera_windows/example/.metadata
@@ -0,0 +1,10 @@
+# This file tracks properties of this Flutter project.
+# Used by Flutter tool to assess capabilities and perform upgrades etc.
+#
+# This file should be version controlled and should not be manually edited.
+
+version:
+ revision: 18116933e77adc82f80866c928266a5b4f1ed645
+ channel: stable
+
+project_type: app
diff --git a/packages/camera/camera_windows/example/README.md b/packages/camera/camera_windows/example/README.md
new file mode 100644
index 0000000..96b8bb1
--- /dev/null
+++ b/packages/camera/camera_windows/example/README.md
@@ -0,0 +1,9 @@
+# Platform Implementation Test App
+
+This is a test app for manual testing and automated integration testing
+of this platform implementation. It is not intended to demonstrate actual use of
+this package, since the intent is that plugin clients use the app-facing
+package.
+
+Unless you are making changes to this implementation package, this example is
+very unlikely to be relevant.
diff --git a/packages/camera/camera_windows/example/integration_test/camera_test.dart b/packages/camera/camera_windows/example/integration_test/camera_test.dart
new file mode 100644
index 0000000..01db9e2
--- /dev/null
+++ b/packages/camera/camera_windows/example/integration_test/camera_test.dart
@@ -0,0 +1,100 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'package:async/async.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+// Note that these integration tests do not currently cover
+// most features and code paths, as they can only be tested if
+// one or more cameras are available in the test environment.
+// Native unit tests with better coverage are available at
+// the native part of the plugin implementation.
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('initializeCamera', () {
+ testWidgets('throws exception if camera is not created',
+ (WidgetTester _) async {
+ final CameraPlatform camera = CameraPlatform.instance;
+
+ expect(() async => camera.initializeCamera(1234),
+ throwsA(isA<CameraException>()));
+ });
+ });
+
+ group('takePicture', () {
+ testWidgets('throws exception if camera is not created',
+ (WidgetTester _) async {
+ final CameraPlatform camera = CameraPlatform.instance;
+
+ expect(() async => camera.takePicture(1234),
+ throwsA(isA<PlatformException>()));
+ });
+ });
+
+ group('startVideoRecording', () {
+ testWidgets('throws exception if camera is not created',
+ (WidgetTester _) async {
+ final CameraPlatform camera = CameraPlatform.instance;
+
+ expect(() async => camera.startVideoRecording(1234),
+ throwsA(isA<PlatformException>()));
+ });
+ });
+
+ group('stopVideoRecording', () {
+ testWidgets('throws exception if camera is not created',
+ (WidgetTester _) async {
+ final CameraPlatform camera = CameraPlatform.instance;
+
+ expect(() async => camera.stopVideoRecording(1234),
+ throwsA(isA<PlatformException>()));
+ });
+ });
+
+ group('pausePreview', () {
+ testWidgets('throws exception if camera is not created',
+ (WidgetTester _) async {
+ final CameraPlatform camera = CameraPlatform.instance;
+
+ expect(() async => camera.pausePreview(1234),
+ throwsA(isA<PlatformException>()));
+ });
+ });
+
+ group('resumePreview', () {
+ testWidgets('throws exception if camera is not created',
+ (WidgetTester _) async {
+ final CameraPlatform camera = CameraPlatform.instance;
+
+ expect(() async => camera.resumePreview(1234),
+ throwsA(isA<PlatformException>()));
+ });
+ });
+
+ group('onDeviceOrientationChanged', () {
+ testWidgets('emits the initial DeviceOrientationChangedEvent',
+ (WidgetTester _) async {
+ final Stream<DeviceOrientationChangedEvent> eventStream =
+ CameraPlatform.instance.onDeviceOrientationChanged();
+
+ final StreamQueue<DeviceOrientationChangedEvent> streamQueue =
+ StreamQueue<DeviceOrientationChangedEvent>(eventStream);
+
+ expect(
+ await streamQueue.next,
+ equals(
+ const DeviceOrientationChangedEvent(
+ DeviceOrientation.landscapeRight,
+ ),
+ ),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_windows/example/lib/main.dart b/packages/camera/camera_windows/example/lib/main.dart
new file mode 100644
index 0000000..d27edb8
--- /dev/null
+++ b/packages/camera/camera_windows/example/lib/main.dart
@@ -0,0 +1,453 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/material.dart';
+import 'package:flutter/services.dart';
+
+void main() {
+ runApp(const MyApp());
+}
+
+/// Example app for Camera Windows plugin.
+class MyApp extends StatefulWidget {
+ /// Default Constructor
+ const MyApp({Key? key}) : super(key: key);
+
+ @override
+ State<MyApp> createState() => _MyAppState();
+}
+
+class _MyAppState extends State<MyApp> {
+ String _cameraInfo = 'Unknown';
+ List<CameraDescription> _cameras = <CameraDescription>[];
+ int _cameraIndex = 0;
+ int _cameraId = -1;
+ bool _initialized = false;
+ bool _recording = false;
+ bool _recordingTimed = false;
+ bool _recordAudio = true;
+ bool _previewPaused = false;
+ Size? _previewSize;
+ ResolutionPreset _resolutionPreset = ResolutionPreset.veryHigh;
+ StreamSubscription<CameraErrorEvent>? _errorStreamSubscription;
+ StreamSubscription<CameraClosingEvent>? _cameraClosingStreamSubscription;
+
+ @override
+ void initState() {
+ super.initState();
+ WidgetsFlutterBinding.ensureInitialized();
+ _fetchCameras();
+ }
+
+ @override
+ void dispose() {
+ _disposeCurrentCamera();
+ _errorStreamSubscription?.cancel();
+ _errorStreamSubscription = null;
+ _cameraClosingStreamSubscription?.cancel();
+ _cameraClosingStreamSubscription = null;
+ super.dispose();
+ }
+
+ /// Fetches list of available cameras from camera_windows plugin.
+ Future<void> _fetchCameras() async {
+ String cameraInfo;
+ List<CameraDescription> cameras = <CameraDescription>[];
+
+ int cameraIndex = 0;
+ try {
+ cameras = await CameraPlatform.instance.availableCameras();
+ if (cameras.isEmpty) {
+ cameraInfo = 'No available cameras';
+ } else {
+ cameraIndex = _cameraIndex % cameras.length;
+ cameraInfo = 'Found camera: ${cameras[cameraIndex].name}';
+ }
+ } on PlatformException catch (e) {
+ cameraInfo = 'Failed to get cameras: ${e.code}: ${e.message}';
+ }
+
+ if (mounted) {
+ setState(() {
+ _cameraIndex = cameraIndex;
+ _cameras = cameras;
+ _cameraInfo = cameraInfo;
+ });
+ }
+ }
+
+ /// Initializes the camera on the device.
+ Future<void> _initializeCamera() async {
+ assert(!_initialized);
+
+ if (_cameras.isEmpty) {
+ return;
+ }
+
+ int cameraId = -1;
+ try {
+ final int cameraIndex = _cameraIndex % _cameras.length;
+ final CameraDescription camera = _cameras[cameraIndex];
+
+ cameraId = await CameraPlatform.instance.createCamera(
+ camera,
+ _resolutionPreset,
+ enableAudio: _recordAudio,
+ );
+
+ _errorStreamSubscription?.cancel();
+ _errorStreamSubscription = CameraPlatform.instance
+ .onCameraError(cameraId)
+ .listen(_onCameraError);
+
+ _cameraClosingStreamSubscription?.cancel();
+ _cameraClosingStreamSubscription = CameraPlatform.instance
+ .onCameraClosing(cameraId)
+ .listen(_onCameraClosing);
+
+ final Future<CameraInitializedEvent> initialized =
+ CameraPlatform.instance.onCameraInitialized(cameraId).first;
+
+ await CameraPlatform.instance.initializeCamera(
+ cameraId,
+ );
+
+ final CameraInitializedEvent event = await initialized;
+ _previewSize = Size(
+ event.previewWidth,
+ event.previewHeight,
+ );
+
+ if (mounted) {
+ setState(() {
+ _initialized = true;
+ _cameraId = cameraId;
+ _cameraIndex = cameraIndex;
+ _cameraInfo = 'Capturing camera: ${camera.name}';
+ });
+ }
+ } on CameraException catch (e) {
+ try {
+ if (cameraId >= 0) {
+ await CameraPlatform.instance.dispose(cameraId);
+ }
+ } on CameraException catch (e) {
+ debugPrint('Failed to dispose camera: ${e.code}: ${e.description}');
+ }
+
+ // Reset state.
+ if (mounted) {
+ setState(() {
+ _initialized = false;
+ _cameraId = -1;
+ _cameraIndex = 0;
+ _previewSize = null;
+ _recording = false;
+ _recordingTimed = false;
+ _cameraInfo =
+ 'Failed to initialize camera: ${e.code}: ${e.description}';
+ });
+ }
+ }
+ }
+
+ Future<void> _disposeCurrentCamera() async {
+ if (_cameraId >= 0 && _initialized) {
+ try {
+ await CameraPlatform.instance.dispose(_cameraId);
+
+ if (mounted) {
+ setState(() {
+ _initialized = false;
+ _cameraId = -1;
+ _previewSize = null;
+ _recording = false;
+ _recordingTimed = false;
+ _previewPaused = false;
+ _cameraInfo = 'Camera disposed';
+ });
+ }
+ } on CameraException catch (e) {
+ if (mounted) {
+ setState(() {
+ _cameraInfo =
+ 'Failed to dispose camera: ${e.code}: ${e.description}';
+ });
+ }
+ }
+ }
+ }
+
+ Widget _buildPreview() {
+ return CameraPlatform.instance.buildPreview(_cameraId);
+ }
+
+ Future<void> _takePicture() async {
+ final XFile file = await CameraPlatform.instance.takePicture(_cameraId);
+ _showInSnackBar('Picture captured to: ${file.path}');
+ }
+
+ Future<void> _recordTimed(int seconds) async {
+ if (_initialized && _cameraId > 0 && !_recordingTimed) {
+ CameraPlatform.instance
+ .onVideoRecordedEvent(_cameraId)
+ .first
+ .then((VideoRecordedEvent event) async {
+ if (mounted) {
+ setState(() {
+ _recordingTimed = false;
+ });
+
+ _showInSnackBar('Video captured to: ${event.file.path}');
+ }
+ });
+
+ await CameraPlatform.instance.startVideoRecording(
+ _cameraId,
+ maxVideoDuration: Duration(seconds: seconds),
+ );
+
+ if (mounted) {
+ setState(() {
+ _recordingTimed = true;
+ });
+ }
+ }
+ }
+
+ Future<void> _toggleRecord() async {
+ if (_initialized && _cameraId > 0) {
+ if (_recordingTimed) {
+ /// Request to stop timed recording short.
+ await CameraPlatform.instance.stopVideoRecording(_cameraId);
+ } else {
+ if (!_recording) {
+ await CameraPlatform.instance.startVideoRecording(_cameraId);
+ } else {
+ final XFile file =
+ await CameraPlatform.instance.stopVideoRecording(_cameraId);
+
+ _showInSnackBar('Video captured to: ${file.path}');
+ }
+
+ if (mounted) {
+ setState(() {
+ _recording = !_recording;
+ });
+ }
+ }
+ }
+ }
+
+ Future<void> _togglePreview() async {
+ if (_initialized && _cameraId >= 0) {
+ if (!_previewPaused) {
+ await CameraPlatform.instance.pausePreview(_cameraId);
+ } else {
+ await CameraPlatform.instance.resumePreview(_cameraId);
+ }
+ if (mounted) {
+ setState(() {
+ _previewPaused = !_previewPaused;
+ });
+ }
+ }
+ }
+
+ Future<void> _switchCamera() async {
+ if (_cameras.isNotEmpty) {
+ // select next index;
+ _cameraIndex = (_cameraIndex + 1) % _cameras.length;
+ if (_initialized && _cameraId >= 0) {
+ await _disposeCurrentCamera();
+ await _fetchCameras();
+ if (_cameras.isNotEmpty) {
+ await _initializeCamera();
+ }
+ } else {
+ await _fetchCameras();
+ }
+ }
+ }
+
+ Future<void> _onResolutionChange(ResolutionPreset newValue) async {
+ setState(() {
+ _resolutionPreset = newValue;
+ });
+ if (_initialized && _cameraId >= 0) {
+ // Re-inits camera with new resolution preset.
+ await _disposeCurrentCamera();
+ await _initializeCamera();
+ }
+ }
+
+ Future<void> _onAudioChange(bool recordAudio) async {
+ setState(() {
+ _recordAudio = recordAudio;
+ });
+ if (_initialized && _cameraId >= 0) {
+ // Re-inits camera with new record audio setting.
+ await _disposeCurrentCamera();
+ await _initializeCamera();
+ }
+ }
+
+ void _onCameraError(CameraErrorEvent event) {
+ if (mounted) {
+ _scaffoldMessengerKey.currentState?.showSnackBar(
+ SnackBar(content: Text('Error: ${event.description}')));
+
+ // Dispose camera on camera error as it can not be used anymore.
+ _disposeCurrentCamera();
+ _fetchCameras();
+ }
+ }
+
+ void _onCameraClosing(CameraClosingEvent event) {
+ if (mounted) {
+ _showInSnackBar('Camera is closing');
+ }
+ }
+
+ void _showInSnackBar(String message) {
+ _scaffoldMessengerKey.currentState?.showSnackBar(SnackBar(
+ content: Text(message),
+ duration: const Duration(seconds: 1),
+ ));
+ }
+
+ final GlobalKey<ScaffoldMessengerState> _scaffoldMessengerKey =
+ GlobalKey<ScaffoldMessengerState>();
+
+ @override
+ Widget build(BuildContext context) {
+ final List<DropdownMenuItem<ResolutionPreset>> resolutionItems =
+ ResolutionPreset.values
+ .map<DropdownMenuItem<ResolutionPreset>>((ResolutionPreset value) {
+ return DropdownMenuItem<ResolutionPreset>(
+ value: value,
+ child: Text(value.toString()),
+ );
+ }).toList();
+
+ return MaterialApp(
+ scaffoldMessengerKey: _scaffoldMessengerKey,
+ home: Scaffold(
+ appBar: AppBar(
+ title: const Text('Plugin example app'),
+ ),
+ body: ListView(
+ children: <Widget>[
+ Padding(
+ padding: const EdgeInsets.symmetric(
+ vertical: 5,
+ horizontal: 10,
+ ),
+ child: Text(_cameraInfo),
+ ),
+ if (_cameras.isEmpty)
+ ElevatedButton(
+ onPressed: _fetchCameras,
+ child: const Text('Re-check available cameras'),
+ ),
+ if (_cameras.isNotEmpty)
+ Row(
+ mainAxisAlignment: MainAxisAlignment.center,
+ children: <Widget>[
+ DropdownButton<ResolutionPreset>(
+ value: _resolutionPreset,
+ onChanged: (ResolutionPreset? value) {
+ if (value != null) {
+ _onResolutionChange(value);
+ }
+ },
+ items: resolutionItems,
+ ),
+ const SizedBox(width: 20),
+ const Text('Audio:'),
+ Switch(
+ value: _recordAudio,
+ onChanged: (bool state) => _onAudioChange(state)),
+ const SizedBox(width: 20),
+ ElevatedButton(
+ onPressed: _initialized
+ ? _disposeCurrentCamera
+ : _initializeCamera,
+ child:
+ Text(_initialized ? 'Dispose camera' : 'Create camera'),
+ ),
+ const SizedBox(width: 5),
+ ElevatedButton(
+ onPressed: _initialized ? _takePicture : null,
+ child: const Text('Take picture'),
+ ),
+ const SizedBox(width: 5),
+ ElevatedButton(
+ onPressed: _initialized ? _togglePreview : null,
+ child: Text(
+ _previewPaused ? 'Resume preview' : 'Pause preview',
+ ),
+ ),
+ const SizedBox(width: 5),
+ ElevatedButton(
+ onPressed: _initialized ? _toggleRecord : null,
+ child: Text(
+ (_recording || _recordingTimed)
+ ? 'Stop recording'
+ : 'Record Video',
+ ),
+ ),
+ const SizedBox(width: 5),
+ ElevatedButton(
+ onPressed: (_initialized && !_recording && !_recordingTimed)
+ ? () => _recordTimed(5)
+ : null,
+ child: const Text(
+ 'Record 5 seconds',
+ ),
+ ),
+ if (_cameras.length > 1) ...<Widget>[
+ const SizedBox(width: 5),
+ ElevatedButton(
+ onPressed: _switchCamera,
+ child: const Text(
+ 'Switch camera',
+ ),
+ ),
+ ]
+ ],
+ ),
+ const SizedBox(height: 5),
+ if (_initialized && _cameraId > 0 && _previewSize != null)
+ Padding(
+ padding: const EdgeInsets.symmetric(
+ vertical: 10,
+ ),
+ child: Align(
+ child: Container(
+ constraints: const BoxConstraints(
+ maxHeight: 500,
+ ),
+ child: AspectRatio(
+ aspectRatio: _previewSize!.width / _previewSize!.height,
+ child: _buildPreview(),
+ ),
+ ),
+ ),
+ ),
+ if (_previewSize != null)
+ Center(
+ child: Text(
+ 'Preview size: ${_previewSize!.width.toStringAsFixed(0)}x${_previewSize!.height.toStringAsFixed(0)}',
+ ),
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+}
diff --git a/packages/camera/camera_windows/example/pubspec.yaml b/packages/camera/camera_windows/example/pubspec.yaml
new file mode 100644
index 0000000..69ce1c3
--- /dev/null
+++ b/packages/camera/camera_windows/example/pubspec.yaml
@@ -0,0 +1,29 @@
+name: camera_windows_example
+description: Demonstrates how to use the camera_windows plugin.
+publish_to: 'none'
+
+environment:
+ sdk: ">=2.12.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+dependencies:
+ camera_platform_interface: ^2.1.2
+ camera_windows:
+ # When depending on this package from a real application you should use:
+ # camera_windows: ^x.y.z
+ # See https://dart.dev/tools/pub/dependencies#version-constraints
+ # The example app is bundled with the plugin so we use a path dependency on
+ # the parent directory to use the current plugin's version.
+ path: ../
+ flutter:
+ sdk: flutter
+
+dev_dependencies:
+ async: ^2.5.0
+ flutter_test:
+ sdk: flutter
+ integration_test:
+ sdk: flutter
+
+flutter:
+ uses-material-design: true
diff --git a/packages/camera/camera_windows/example/test_driver/integration_test.dart b/packages/camera/camera_windows/example/test_driver/integration_test.dart
new file mode 100644
index 0000000..4f10f2a
--- /dev/null
+++ b/packages/camera/camera_windows/example/test_driver/integration_test.dart
@@ -0,0 +1,7 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:integration_test/integration_test_driver.dart';
+
+Future<void> main() => integrationDriver();
diff --git a/packages/camera/camera_windows/example/windows/.gitignore b/packages/camera/camera_windows/example/windows/.gitignore
new file mode 100644
index 0000000..d492d0d
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/.gitignore
@@ -0,0 +1,17 @@
+flutter/ephemeral/
+
+# Visual Studio user-specific files.
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# Visual Studio build-related files.
+x64/
+x86/
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!*.[Cc]ache/
diff --git a/packages/camera/camera_windows/example/windows/CMakeLists.txt b/packages/camera/camera_windows/example/windows/CMakeLists.txt
new file mode 100644
index 0000000..28757c7
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/CMakeLists.txt
@@ -0,0 +1,100 @@
+cmake_minimum_required(VERSION 3.14)
+project(camera_windows_example LANGUAGES CXX)
+
+set(BINARY_NAME "camera_windows_example")
+
+cmake_policy(SET CMP0063 NEW)
+
+set(CMAKE_INSTALL_RPATH "$ORIGIN/lib")
+
+# Configure build options.
+get_property(IS_MULTICONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
+if(IS_MULTICONFIG)
+ set(CMAKE_CONFIGURATION_TYPES "Debug;Profile;Release"
+ CACHE STRING "" FORCE)
+else()
+ if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
+ set(CMAKE_BUILD_TYPE "Debug" CACHE
+ STRING "Flutter build mode" FORCE)
+ set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS
+ "Debug" "Profile" "Release")
+ endif()
+endif()
+
+set(CMAKE_EXE_LINKER_FLAGS_PROFILE "${CMAKE_EXE_LINKER_FLAGS_RELEASE}")
+set(CMAKE_SHARED_LINKER_FLAGS_PROFILE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE}")
+set(CMAKE_C_FLAGS_PROFILE "${CMAKE_C_FLAGS_RELEASE}")
+set(CMAKE_CXX_FLAGS_PROFILE "${CMAKE_CXX_FLAGS_RELEASE}")
+
+# Use Unicode for all projects.
+add_definitions(-DUNICODE -D_UNICODE)
+
+# Compilation settings that should be applied to most targets.
+function(APPLY_STANDARD_SETTINGS TARGET)
+ target_compile_features(${TARGET} PUBLIC cxx_std_17)
+ target_compile_options(${TARGET} PRIVATE /W4 /WX /wd"4100")
+ target_compile_options(${TARGET} PRIVATE /EHsc)
+ target_compile_definitions(${TARGET} PRIVATE "_HAS_EXCEPTIONS=0")
+ target_compile_definitions(${TARGET} PRIVATE "$<$<CONFIG:Debug>:_DEBUG>")
+endfunction()
+
+set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter")
+
+# Flutter library and tool build rules.
+add_subdirectory(${FLUTTER_MANAGED_DIR})
+
+# Application build
+add_subdirectory("runner")
+
+# Enable the test target.
+set(include_camera_windows_tests TRUE)
+# Provide an alias for the test target using the name expected by repo tooling.
+add_custom_target(unit_tests DEPENDS camera_windows_test)
+
+# Generated plugin build rules, which manage building the plugins and adding
+# them to the application.
+include(flutter/generated_plugins.cmake)
+
+
+# === Installation ===
+# Support files are copied into place next to the executable, so that it can
+# run in place. This is done instead of making a separate bundle (as on Linux)
+# so that building and running from within Visual Studio will work.
+set(BUILD_BUNDLE_DIR "$<TARGET_FILE_DIR:${BINARY_NAME}>")
+# Make the "install" step default, as it's required to run.
+set(CMAKE_VS_INCLUDE_INSTALL_TO_DEFAULT_BUILD 1)
+if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
+ set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE)
+endif()
+
+set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data")
+set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}")
+
+install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}"
+ COMPONENT Runtime)
+
+install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}"
+ COMPONENT Runtime)
+
+install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}"
+ COMPONENT Runtime)
+
+if(PLUGIN_BUNDLED_LIBRARIES)
+ install(FILES "${PLUGIN_BUNDLED_LIBRARIES}"
+ DESTINATION "${INSTALL_BUNDLE_LIB_DIR}"
+ COMPONENT Runtime)
+endif()
+
+# Fully re-copy the assets directory on each build to avoid having stale files
+# from a previous install.
+set(FLUTTER_ASSET_DIR_NAME "flutter_assets")
+install(CODE "
+ file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\")
+ " COMPONENT Runtime)
+install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}"
+ DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime)
+
+# Install the AOT library on non-Debug builds only.
+install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}"
+ CONFIGURATIONS Profile;Release
+ COMPONENT Runtime)
diff --git a/packages/camera/camera_windows/example/windows/flutter/CMakeLists.txt b/packages/camera/camera_windows/example/windows/flutter/CMakeLists.txt
new file mode 100644
index 0000000..b2e4bd8
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/flutter/CMakeLists.txt
@@ -0,0 +1,103 @@
+cmake_minimum_required(VERSION 3.14)
+
+set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral")
+
+# Configuration provided via flutter tool.
+include(${EPHEMERAL_DIR}/generated_config.cmake)
+
+# TODO: Move the rest of this into files in ephemeral. See
+# https://github.com/flutter/flutter/issues/57146.
+set(WRAPPER_ROOT "${EPHEMERAL_DIR}/cpp_client_wrapper")
+
+# === Flutter Library ===
+set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/flutter_windows.dll")
+
+# Published to parent scope for install step.
+set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE)
+set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE)
+set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE)
+set(AOT_LIBRARY "${PROJECT_DIR}/build/windows/app.so" PARENT_SCOPE)
+
+list(APPEND FLUTTER_LIBRARY_HEADERS
+ "flutter_export.h"
+ "flutter_windows.h"
+ "flutter_messenger.h"
+ "flutter_plugin_registrar.h"
+ "flutter_texture_registrar.h"
+)
+list(TRANSFORM FLUTTER_LIBRARY_HEADERS PREPEND "${EPHEMERAL_DIR}/")
+add_library(flutter INTERFACE)
+target_include_directories(flutter INTERFACE
+ "${EPHEMERAL_DIR}"
+)
+target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}.lib")
+add_dependencies(flutter flutter_assemble)
+
+# === Wrapper ===
+list(APPEND CPP_WRAPPER_SOURCES_CORE
+ "core_implementations.cc"
+ "standard_codec.cc"
+)
+list(TRANSFORM CPP_WRAPPER_SOURCES_CORE PREPEND "${WRAPPER_ROOT}/")
+list(APPEND CPP_WRAPPER_SOURCES_PLUGIN
+ "plugin_registrar.cc"
+)
+list(TRANSFORM CPP_WRAPPER_SOURCES_PLUGIN PREPEND "${WRAPPER_ROOT}/")
+list(APPEND CPP_WRAPPER_SOURCES_APP
+ "flutter_engine.cc"
+ "flutter_view_controller.cc"
+)
+list(TRANSFORM CPP_WRAPPER_SOURCES_APP PREPEND "${WRAPPER_ROOT}/")
+
+# Wrapper sources needed for a plugin.
+add_library(flutter_wrapper_plugin STATIC
+ ${CPP_WRAPPER_SOURCES_CORE}
+ ${CPP_WRAPPER_SOURCES_PLUGIN}
+)
+apply_standard_settings(flutter_wrapper_plugin)
+set_target_properties(flutter_wrapper_plugin PROPERTIES
+ POSITION_INDEPENDENT_CODE ON)
+set_target_properties(flutter_wrapper_plugin PROPERTIES
+ CXX_VISIBILITY_PRESET hidden)
+target_link_libraries(flutter_wrapper_plugin PUBLIC flutter)
+target_include_directories(flutter_wrapper_plugin PUBLIC
+ "${WRAPPER_ROOT}/include"
+)
+add_dependencies(flutter_wrapper_plugin flutter_assemble)
+
+# Wrapper sources needed for the runner.
+add_library(flutter_wrapper_app STATIC
+ ${CPP_WRAPPER_SOURCES_CORE}
+ ${CPP_WRAPPER_SOURCES_APP}
+)
+apply_standard_settings(flutter_wrapper_app)
+target_link_libraries(flutter_wrapper_app PUBLIC flutter)
+target_include_directories(flutter_wrapper_app PUBLIC
+ "${WRAPPER_ROOT}/include"
+)
+add_dependencies(flutter_wrapper_app flutter_assemble)
+
+# === Flutter tool backend ===
+# _phony_ is a non-existent file to force this command to run every time,
+# since currently there's no way to get a full input/output list from the
+# flutter tool.
+set(PHONY_OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/_phony_")
+set_source_files_properties("${PHONY_OUTPUT}" PROPERTIES SYMBOLIC TRUE)
+add_custom_command(
+ OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS}
+ ${CPP_WRAPPER_SOURCES_CORE} ${CPP_WRAPPER_SOURCES_PLUGIN}
+ ${CPP_WRAPPER_SOURCES_APP}
+ ${PHONY_OUTPUT}
+ COMMAND ${CMAKE_COMMAND} -E env
+ ${FLUTTER_TOOL_ENVIRONMENT}
+ "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.bat"
+ windows-x64 $<CONFIG>
+ VERBATIM
+)
+add_custom_target(flutter_assemble DEPENDS
+ "${FLUTTER_LIBRARY}"
+ ${FLUTTER_LIBRARY_HEADERS}
+ ${CPP_WRAPPER_SOURCES_CORE}
+ ${CPP_WRAPPER_SOURCES_PLUGIN}
+ ${CPP_WRAPPER_SOURCES_APP}
+)
diff --git a/packages/camera/camera_windows/example/windows/flutter/generated_plugins.cmake b/packages/camera/camera_windows/example/windows/flutter/generated_plugins.cmake
new file mode 100644
index 0000000..458d22d
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/flutter/generated_plugins.cmake
@@ -0,0 +1,24 @@
+#
+# Generated file, do not edit.
+#
+
+list(APPEND FLUTTER_PLUGIN_LIST
+ camera_windows
+)
+
+list(APPEND FLUTTER_FFI_PLUGIN_LIST
+)
+
+set(PLUGIN_BUNDLED_LIBRARIES)
+
+foreach(plugin ${FLUTTER_PLUGIN_LIST})
+ add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/windows plugins/${plugin})
+ target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin)
+ list(APPEND PLUGIN_BUNDLED_LIBRARIES $<TARGET_FILE:${plugin}_plugin>)
+ list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries})
+endforeach(plugin)
+
+foreach(ffi_plugin ${FLUTTER_FFI_PLUGIN_LIST})
+ add_subdirectory(flutter/ephemeral/.plugin_symlinks/${ffi_plugin}/windows plugins/${ffi_plugin})
+ list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${ffi_plugin}_bundled_libraries})
+endforeach(ffi_plugin)
diff --git a/packages/camera/camera_windows/example/windows/runner/CMakeLists.txt b/packages/camera/camera_windows/example/windows/runner/CMakeLists.txt
new file mode 100644
index 0000000..adb2052
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/CMakeLists.txt
@@ -0,0 +1,18 @@
+cmake_minimum_required(VERSION 3.14)
+project(runner LANGUAGES CXX)
+
+add_executable(${BINARY_NAME} WIN32
+ "flutter_window.cpp"
+ "main.cpp"
+ "utils.cpp"
+ "win32_window.cpp"
+ "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc"
+ "Runner.rc"
+ "runner.exe.manifest"
+)
+
+apply_standard_settings(${BINARY_NAME})
+target_compile_definitions(${BINARY_NAME} PRIVATE "NOMINMAX")
+target_link_libraries(${BINARY_NAME} PRIVATE flutter flutter_wrapper_app)
+target_include_directories(${BINARY_NAME} PRIVATE "${CMAKE_SOURCE_DIR}")
+add_dependencies(${BINARY_NAME} flutter_assemble)
diff --git a/packages/camera/camera_windows/example/windows/runner/Runner.rc b/packages/camera/camera_windows/example/windows/runner/Runner.rc
new file mode 100644
index 0000000..f1cfa43
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/Runner.rc
@@ -0,0 +1,121 @@
+// Microsoft Visual C++ generated resource script.
+//
+#pragma code_page(65001)
+#include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "winres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (United States) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""winres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Icon
+//
+
+// Icon with lowest ID value placed first to ensure application icon
+// remains consistent on all systems.
+IDI_APP_ICON ICON "resources\\app_icon.ico"
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+#ifdef FLUTTER_BUILD_NUMBER
+#define VERSION_AS_NUMBER FLUTTER_BUILD_NUMBER
+#else
+#define VERSION_AS_NUMBER 1,0,0
+#endif
+
+#ifdef FLUTTER_BUILD_NAME
+#define VERSION_AS_STRING #FLUTTER_BUILD_NAME
+#else
+#define VERSION_AS_STRING "1.0.0"
+#endif
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION VERSION_AS_NUMBER
+ PRODUCTVERSION VERSION_AS_NUMBER
+ FILEFLAGSMASK VS_FFI_FILEFLAGSMASK
+#ifdef _DEBUG
+ FILEFLAGS VS_FF_DEBUG
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS VOS__WINDOWS32
+ FILETYPE VFT_APP
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904e4"
+ BEGIN
+ VALUE "CompanyName", "com.example" "\0"
+ VALUE "FileDescription", "Demonstrates how to use the camera_windows plugin." "\0"
+ VALUE "FileVersion", VERSION_AS_STRING "\0"
+ VALUE "InternalName", "camera_windows_example" "\0"
+ VALUE "LegalCopyright", "Copyright (C) 2021 com.example. All rights reserved." "\0"
+ VALUE "OriginalFilename", "camera_windows_example.exe" "\0"
+ VALUE "ProductName", "camera_windows_example" "\0"
+ VALUE "ProductVersion", VERSION_AS_STRING "\0"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1252
+ END
+END
+
+#endif // English (United States) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
diff --git a/packages/camera/camera_windows/example/windows/runner/flutter_window.cpp b/packages/camera/camera_windows/example/windows/runner/flutter_window.cpp
new file mode 100644
index 0000000..8254bd9
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/flutter_window.cpp
@@ -0,0 +1,65 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "flutter_window.h"
+
+#include <optional>
+
+#include "flutter/generated_plugin_registrant.h"
+
+FlutterWindow::FlutterWindow(const flutter::DartProject& project)
+ : project_(project) {}
+
+FlutterWindow::~FlutterWindow() {}
+
+bool FlutterWindow::OnCreate() {
+ if (!Win32Window::OnCreate()) {
+ return false;
+ }
+
+ RECT frame = GetClientArea();
+
+ // The size here must match the window dimensions to avoid unnecessary surface
+ // creation / destruction in the startup path.
+ flutter_controller_ = std::make_unique<flutter::FlutterViewController>(
+ frame.right - frame.left, frame.bottom - frame.top, project_);
+ // Ensure that basic setup of the controller was successful.
+ if (!flutter_controller_->engine() || !flutter_controller_->view()) {
+ return false;
+ }
+ RegisterPlugins(flutter_controller_->engine());
+ SetChildContent(flutter_controller_->view()->GetNativeWindow());
+ return true;
+}
+
+void FlutterWindow::OnDestroy() {
+ if (flutter_controller_) {
+ flutter_controller_ = nullptr;
+ }
+
+ Win32Window::OnDestroy();
+}
+
+LRESULT
+FlutterWindow::MessageHandler(HWND hwnd, UINT const message,
+ WPARAM const wparam,
+ LPARAM const lparam) noexcept {
+ // Give Flutter, including plugins, an opportunity to handle window messages.
+ if (flutter_controller_) {
+ std::optional<LRESULT> result =
+ flutter_controller_->HandleTopLevelWindowProc(hwnd, message, wparam,
+ lparam);
+ if (result) {
+ return *result;
+ }
+ }
+
+ switch (message) {
+ case WM_FONTCHANGE:
+ flutter_controller_->engine()->ReloadSystemFonts();
+ break;
+ }
+
+ return Win32Window::MessageHandler(hwnd, message, wparam, lparam);
+}
diff --git a/packages/camera/camera_windows/example/windows/runner/flutter_window.h b/packages/camera/camera_windows/example/windows/runner/flutter_window.h
new file mode 100644
index 0000000..f1fc669
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/flutter_window.h
@@ -0,0 +1,37 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef RUNNER_FLUTTER_WINDOW_H_
+#define RUNNER_FLUTTER_WINDOW_H_
+
+#include <flutter/dart_project.h>
+#include <flutter/flutter_view_controller.h>
+
+#include <memory>
+
+#include "win32_window.h"
+
+// A window that does nothing but host a Flutter view.
+class FlutterWindow : public Win32Window {
+ public:
+ // Creates a new FlutterWindow hosting a Flutter view running |project|.
+ explicit FlutterWindow(const flutter::DartProject& project);
+ virtual ~FlutterWindow();
+
+ protected:
+ // Win32Window:
+ bool OnCreate() override;
+ void OnDestroy() override;
+ LRESULT MessageHandler(HWND window, UINT const message, WPARAM const wparam,
+ LPARAM const lparam) noexcept override;
+
+ private:
+ // The project to run.
+ flutter::DartProject project_;
+
+ // The Flutter instance hosted by this window.
+ std::unique_ptr<flutter::FlutterViewController> flutter_controller_;
+};
+
+#endif // RUNNER_FLUTTER_WINDOW_H_
diff --git a/packages/camera/camera_windows/example/windows/runner/main.cpp b/packages/camera/camera_windows/example/windows/runner/main.cpp
new file mode 100644
index 0000000..755a90b
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/main.cpp
@@ -0,0 +1,46 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <flutter/dart_project.h>
+#include <flutter/flutter_view_controller.h>
+#include <windows.h>
+
+#include "flutter_window.h"
+#include "utils.h"
+
+int APIENTRY wWinMain(_In_ HINSTANCE instance, _In_opt_ HINSTANCE prev,
+ _In_ wchar_t* command_line, _In_ int show_command) {
+ // Attach to console when present (e.g., 'flutter run') or create a
+ // new console when running with a debugger.
+ if (!::AttachConsole(ATTACH_PARENT_PROCESS) && ::IsDebuggerPresent()) {
+ CreateAndAttachConsole();
+ }
+
+ // Initialize COM, so that it is available for use in the library and/or
+ // plugins.
+ ::CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED);
+
+ flutter::DartProject project(L"data");
+
+ std::vector<std::string> command_line_arguments = GetCommandLineArguments();
+
+ project.set_dart_entrypoint_arguments(std::move(command_line_arguments));
+
+ FlutterWindow window(project);
+ Win32Window::Point origin(10, 10);
+ Win32Window::Size size(1280, 720);
+ if (!window.CreateAndShow(L"camera_windows_example", origin, size)) {
+ return EXIT_FAILURE;
+ }
+ window.SetQuitOnClose(true);
+
+ ::MSG msg;
+ while (::GetMessage(&msg, nullptr, 0, 0)) {
+ ::TranslateMessage(&msg);
+ ::DispatchMessage(&msg);
+ }
+
+ ::CoUninitialize();
+ return EXIT_SUCCESS;
+}
diff --git a/packages/camera/camera_windows/example/windows/runner/resource.h b/packages/camera/camera_windows/example/windows/runner/resource.h
new file mode 100644
index 0000000..d5d958d
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/resource.h
@@ -0,0 +1,16 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by Runner.rc
+//
+#define IDI_APP_ICON 101
+
+// Next default values for new objects
+//
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE 102
+#define _APS_NEXT_COMMAND_VALUE 40001
+#define _APS_NEXT_CONTROL_VALUE 1001
+#define _APS_NEXT_SYMED_VALUE 101
+#endif
+#endif
diff --git a/packages/camera/camera_windows/example/windows/runner/resources/app_icon.ico b/packages/camera/camera_windows/example/windows/runner/resources/app_icon.ico
new file mode 100644
index 0000000..c04e20c
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/resources/app_icon.ico
Binary files differ
diff --git a/packages/camera/camera_windows/example/windows/runner/runner.exe.manifest b/packages/camera/camera_windows/example/windows/runner/runner.exe.manifest
new file mode 100644
index 0000000..c977c4a
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/runner.exe.manifest
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+ <application xmlns="urn:schemas-microsoft-com:asm.v3">
+ <windowsSettings>
+ <dpiAwareness xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">PerMonitorV2</dpiAwareness>
+ </windowsSettings>
+ </application>
+ <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+ <application>
+ <!-- Windows 10 -->
+ <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+ <!-- Windows 8.1 -->
+ <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+ <!-- Windows 8 -->
+ <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+ <!-- Windows 7 -->
+ <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+ </application>
+ </compatibility>
+</assembly>
diff --git a/packages/camera/camera_windows/example/windows/runner/utils.cpp b/packages/camera/camera_windows/example/windows/runner/utils.cpp
new file mode 100644
index 0000000..fb7e945
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/utils.cpp
@@ -0,0 +1,67 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "utils.h"
+
+#include <flutter_windows.h>
+#include <io.h>
+#include <stdio.h>
+#include <windows.h>
+
+#include <iostream>
+
+void CreateAndAttachConsole() {
+ if (::AllocConsole()) {
+ FILE* unused;
+ if (freopen_s(&unused, "CONOUT$", "w", stdout)) {
+ _dup2(_fileno(stdout), 1);
+ }
+ if (freopen_s(&unused, "CONOUT$", "w", stderr)) {
+ _dup2(_fileno(stdout), 2);
+ }
+ std::ios::sync_with_stdio();
+ FlutterDesktopResyncOutputStreams();
+ }
+}
+
+std::vector<std::string> GetCommandLineArguments() {
+ // Convert the UTF-16 command line arguments to UTF-8 for the Engine to use.
+ int argc;
+ wchar_t** argv = ::CommandLineToArgvW(::GetCommandLineW(), &argc);
+ if (argv == nullptr) {
+ return std::vector<std::string>();
+ }
+
+ std::vector<std::string> command_line_arguments;
+
+ // Skip the first argument as it's the binary name.
+ for (int i = 1; i < argc; i++) {
+ command_line_arguments.push_back(Utf8FromUtf16(argv[i]));
+ }
+
+ ::LocalFree(argv);
+
+ return command_line_arguments;
+}
+
+std::string Utf8FromUtf16(const wchar_t* utf16_string) {
+ if (utf16_string == nullptr) {
+ return std::string();
+ }
+ int target_length =
+ ::WideCharToMultiByte(CP_UTF8, WC_ERR_INVALID_CHARS, utf16_string, -1,
+ nullptr, 0, nullptr, nullptr);
+ if (target_length == 0) {
+ return std::string();
+ }
+ std::string utf8_string;
+ utf8_string.resize(target_length);
+ int converted_length = ::WideCharToMultiByte(
+ CP_UTF8, WC_ERR_INVALID_CHARS, utf16_string, -1, utf8_string.data(),
+ target_length, nullptr, nullptr);
+ if (converted_length == 0) {
+ return std::string();
+ }
+ return utf8_string;
+}
diff --git a/packages/camera/camera_windows/example/windows/runner/utils.h b/packages/camera/camera_windows/example/windows/runner/utils.h
new file mode 100644
index 0000000..bd81e1e
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/utils.h
@@ -0,0 +1,23 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef RUNNER_UTILS_H_
+#define RUNNER_UTILS_H_
+
+#include <string>
+#include <vector>
+
+// Creates a console for the process, and redirects stdout and stderr to
+// it for both the runner and the Flutter library.
+void CreateAndAttachConsole();
+
+// Takes a null-terminated wchar_t* encoded in UTF-16 and returns a std::string
+// encoded in UTF-8. Returns an empty std::string on failure.
+std::string Utf8FromUtf16(const wchar_t* utf16_string);
+
+// Gets the command line arguments passed in as a std::vector<std::string>,
+// encoded in UTF-8. Returns an empty std::vector<std::string> on failure.
+std::vector<std::string> GetCommandLineArguments();
+
+#endif // RUNNER_UTILS_H_
diff --git a/packages/camera/camera_windows/example/windows/runner/win32_window.cpp b/packages/camera/camera_windows/example/windows/runner/win32_window.cpp
new file mode 100644
index 0000000..85aa361
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/win32_window.cpp
@@ -0,0 +1,241 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "win32_window.h"
+
+#include <flutter_windows.h>
+
+#include "resource.h"
+
+namespace {
+
+constexpr const wchar_t kWindowClassName[] = L"FLUTTER_RUNNER_WIN32_WINDOW";
+
+// The number of Win32Window objects that currently exist.
+static int g_active_window_count = 0;
+
+using EnableNonClientDpiScaling = BOOL __stdcall(HWND hwnd);
+
+// Scale helper to convert logical scaler values to physical using passed in
+// scale factor
+int Scale(int source, double scale_factor) {
+ return static_cast<int>(source * scale_factor);
+}
+
+// Dynamically loads the |EnableNonClientDpiScaling| from the User32 module.
+// This API is only needed for PerMonitor V1 awareness mode.
+void EnableFullDpiSupportIfAvailable(HWND hwnd) {
+ HMODULE user32_module = LoadLibraryA("User32.dll");
+ if (!user32_module) {
+ return;
+ }
+ auto enable_non_client_dpi_scaling =
+ reinterpret_cast<EnableNonClientDpiScaling*>(
+ GetProcAddress(user32_module, "EnableNonClientDpiScaling"));
+ if (enable_non_client_dpi_scaling != nullptr) {
+ enable_non_client_dpi_scaling(hwnd);
+ FreeLibrary(user32_module);
+ }
+}
+
+} // namespace
+
+// Manages the Win32Window's window class registration.
+class WindowClassRegistrar {
+ public:
+ ~WindowClassRegistrar() = default;
+
+ // Returns the singleton registar instance.
+ static WindowClassRegistrar* GetInstance() {
+ if (!instance_) {
+ instance_ = new WindowClassRegistrar();
+ }
+ return instance_;
+ }
+
+ // Returns the name of the window class, registering the class if it hasn't
+ // previously been registered.
+ const wchar_t* GetWindowClass();
+
+ // Unregisters the window class. Should only be called if there are no
+ // instances of the window.
+ void UnregisterWindowClass();
+
+ private:
+ WindowClassRegistrar() = default;
+
+ static WindowClassRegistrar* instance_;
+
+ bool class_registered_ = false;
+};
+
+WindowClassRegistrar* WindowClassRegistrar::instance_ = nullptr;
+
+const wchar_t* WindowClassRegistrar::GetWindowClass() {
+ if (!class_registered_) {
+ WNDCLASS window_class{};
+ window_class.hCursor = LoadCursor(nullptr, IDC_ARROW);
+ window_class.lpszClassName = kWindowClassName;
+ window_class.style = CS_HREDRAW | CS_VREDRAW;
+ window_class.cbClsExtra = 0;
+ window_class.cbWndExtra = 0;
+ window_class.hInstance = GetModuleHandle(nullptr);
+ window_class.hIcon =
+ LoadIcon(window_class.hInstance, MAKEINTRESOURCE(IDI_APP_ICON));
+ window_class.hbrBackground = 0;
+ window_class.lpszMenuName = nullptr;
+ window_class.lpfnWndProc = Win32Window::WndProc;
+ RegisterClass(&window_class);
+ class_registered_ = true;
+ }
+ return kWindowClassName;
+}
+
+void WindowClassRegistrar::UnregisterWindowClass() {
+ UnregisterClass(kWindowClassName, nullptr);
+ class_registered_ = false;
+}
+
+Win32Window::Win32Window() { ++g_active_window_count; }
+
+Win32Window::~Win32Window() {
+ --g_active_window_count;
+ Destroy();
+}
+
+bool Win32Window::CreateAndShow(const std::wstring& title, const Point& origin,
+ const Size& size) {
+ Destroy();
+
+ const wchar_t* window_class =
+ WindowClassRegistrar::GetInstance()->GetWindowClass();
+
+ const POINT target_point = {static_cast<LONG>(origin.x),
+ static_cast<LONG>(origin.y)};
+ HMONITOR monitor = MonitorFromPoint(target_point, MONITOR_DEFAULTTONEAREST);
+ UINT dpi = FlutterDesktopGetDpiForMonitor(monitor);
+ double scale_factor = dpi / 96.0;
+
+ HWND window = CreateWindow(
+ window_class, title.c_str(), WS_OVERLAPPEDWINDOW | WS_VISIBLE,
+ Scale(origin.x, scale_factor), Scale(origin.y, scale_factor),
+ Scale(size.width, scale_factor), Scale(size.height, scale_factor),
+ nullptr, nullptr, GetModuleHandle(nullptr), this);
+
+ if (!window) {
+ return false;
+ }
+
+ return OnCreate();
+}
+
+// static
+LRESULT CALLBACK Win32Window::WndProc(HWND const window, UINT const message,
+ WPARAM const wparam,
+ LPARAM const lparam) noexcept {
+ if (message == WM_NCCREATE) {
+ auto window_struct = reinterpret_cast<CREATESTRUCT*>(lparam);
+ SetWindowLongPtr(window, GWLP_USERDATA,
+ reinterpret_cast<LONG_PTR>(window_struct->lpCreateParams));
+
+ auto that = static_cast<Win32Window*>(window_struct->lpCreateParams);
+ EnableFullDpiSupportIfAvailable(window);
+ that->window_handle_ = window;
+ } else if (Win32Window* that = GetThisFromHandle(window)) {
+ return that->MessageHandler(window, message, wparam, lparam);
+ }
+
+ return DefWindowProc(window, message, wparam, lparam);
+}
+
+LRESULT
+Win32Window::MessageHandler(HWND hwnd, UINT const message, WPARAM const wparam,
+ LPARAM const lparam) noexcept {
+ switch (message) {
+ case WM_DESTROY:
+ window_handle_ = nullptr;
+ Destroy();
+ if (quit_on_close_) {
+ PostQuitMessage(0);
+ }
+ return 0;
+
+ case WM_DPICHANGED: {
+ auto newRectSize = reinterpret_cast<RECT*>(lparam);
+ LONG newWidth = newRectSize->right - newRectSize->left;
+ LONG newHeight = newRectSize->bottom - newRectSize->top;
+
+ SetWindowPos(hwnd, nullptr, newRectSize->left, newRectSize->top, newWidth,
+ newHeight, SWP_NOZORDER | SWP_NOACTIVATE);
+
+ return 0;
+ }
+ case WM_SIZE: {
+ RECT rect = GetClientArea();
+ if (child_content_ != nullptr) {
+ // Size and position the child window.
+ MoveWindow(child_content_, rect.left, rect.top, rect.right - rect.left,
+ rect.bottom - rect.top, TRUE);
+ }
+ return 0;
+ }
+
+ case WM_ACTIVATE:
+ if (child_content_ != nullptr) {
+ SetFocus(child_content_);
+ }
+ return 0;
+ }
+
+ return DefWindowProc(window_handle_, message, wparam, lparam);
+}
+
+void Win32Window::Destroy() {
+ OnDestroy();
+
+ if (window_handle_) {
+ DestroyWindow(window_handle_);
+ window_handle_ = nullptr;
+ }
+ if (g_active_window_count == 0) {
+ WindowClassRegistrar::GetInstance()->UnregisterWindowClass();
+ }
+}
+
+Win32Window* Win32Window::GetThisFromHandle(HWND const window) noexcept {
+ return reinterpret_cast<Win32Window*>(
+ GetWindowLongPtr(window, GWLP_USERDATA));
+}
+
+void Win32Window::SetChildContent(HWND content) {
+ child_content_ = content;
+ SetParent(content, window_handle_);
+ RECT frame = GetClientArea();
+
+ MoveWindow(content, frame.left, frame.top, frame.right - frame.left,
+ frame.bottom - frame.top, true);
+
+ SetFocus(child_content_);
+}
+
+RECT Win32Window::GetClientArea() {
+ RECT frame;
+ GetClientRect(window_handle_, &frame);
+ return frame;
+}
+
+HWND Win32Window::GetHandle() { return window_handle_; }
+
+void Win32Window::SetQuitOnClose(bool quit_on_close) {
+ quit_on_close_ = quit_on_close;
+}
+
+bool Win32Window::OnCreate() {
+ // No-op; provided for subclasses.
+ return true;
+}
+
+void Win32Window::OnDestroy() {
+ // No-op; provided for subclasses.
+}
diff --git a/packages/camera/camera_windows/example/windows/runner/win32_window.h b/packages/camera/camera_windows/example/windows/runner/win32_window.h
new file mode 100644
index 0000000..d2a7300
--- /dev/null
+++ b/packages/camera/camera_windows/example/windows/runner/win32_window.h
@@ -0,0 +1,99 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef RUNNER_WIN32_WINDOW_H_
+#define RUNNER_WIN32_WINDOW_H_
+
+#include <windows.h>
+
+#include <functional>
+#include <memory>
+#include <string>
+
+// A class abstraction for a high DPI-aware Win32 Window. Intended to be
+// inherited from by classes that wish to specialize with custom
+// rendering and input handling
+class Win32Window {
+ public:
+ struct Point {
+ unsigned int x;
+ unsigned int y;
+ Point(unsigned int x, unsigned int y) : x(x), y(y) {}
+ };
+
+ struct Size {
+ unsigned int width;
+ unsigned int height;
+ Size(unsigned int width, unsigned int height)
+ : width(width), height(height) {}
+ };
+
+ Win32Window();
+ virtual ~Win32Window();
+
+ // Creates and shows a win32 window with |title| and position and size using
+ // |origin| and |size|. New windows are created on the default monitor. Window
+ // sizes are specified to the OS in physical pixels, hence to ensure a
+ // consistent size to will treat the width height passed in to this function
+ // as logical pixels and scale to appropriate for the default monitor. Returns
+ // true if the window was created successfully.
+ bool CreateAndShow(const std::wstring& title, const Point& origin,
+ const Size& size);
+
+ // Release OS resources associated with window.
+ void Destroy();
+
+ // Inserts |content| into the window tree.
+ void SetChildContent(HWND content);
+
+ // Returns the backing Window handle to enable clients to set icon and other
+ // window properties. Returns nullptr if the window has been destroyed.
+ HWND GetHandle();
+
+ // If true, closing this window will quit the application.
+ void SetQuitOnClose(bool quit_on_close);
+
+ // Return a RECT representing the bounds of the current client area.
+ RECT GetClientArea();
+
+ protected:
+ // Processes and route salient window messages for mouse handling,
+ // size change and DPI. Delegates handling of these to member overloads that
+ // inheriting classes can handle.
+ virtual LRESULT MessageHandler(HWND window, UINT const message,
+ WPARAM const wparam,
+ LPARAM const lparam) noexcept;
+
+ // Called when CreateAndShow is called, allowing subclass window-related
+ // setup. Subclasses should return false if setup fails.
+ virtual bool OnCreate();
+
+ // Called when Destroy is called.
+ virtual void OnDestroy();
+
+ private:
+ friend class WindowClassRegistrar;
+
+ // OS callback called by message pump. Handles the WM_NCCREATE message which
+ // is passed when the non-client area is being created and enables automatic
+ // non-client DPI scaling so that the non-client area automatically
+ // responsponds to changes in DPI. All other messages are handled by
+ // MessageHandler.
+ static LRESULT CALLBACK WndProc(HWND const window, UINT const message,
+ WPARAM const wparam,
+ LPARAM const lparam) noexcept;
+
+ // Retrieves a class instance pointer for |window|
+ static Win32Window* GetThisFromHandle(HWND const window) noexcept;
+
+ bool quit_on_close_ = false;
+
+ // window handle for top level window.
+ HWND window_handle_ = nullptr;
+
+ // window handle for hosted content.
+ HWND child_content_ = nullptr;
+};
+
+#endif // RUNNER_WIN32_WINDOW_H_
diff --git a/packages/camera/camera_windows/lib/camera_windows.dart b/packages/camera/camera_windows/lib/camera_windows.dart
new file mode 100644
index 0000000..4b0c158
--- /dev/null
+++ b/packages/camera/camera_windows/lib/camera_windows.dart
@@ -0,0 +1,442 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:math';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:stream_transform/stream_transform.dart';
+
+/// An implementation of [CameraPlatform] for Windows.
+class CameraWindows extends CameraPlatform {
+ /// Registers the Windows implementation of CameraPlatform.
+ static void registerWith() {
+ CameraPlatform.instance = CameraWindows();
+ }
+
+ /// The method channel used to interact with the native platform.
+ @visibleForTesting
+ final MethodChannel pluginChannel =
+ const MethodChannel('plugins.flutter.io/camera_windows');
+
+ /// Camera specific method channels to allow communicating with specific cameras.
+ final Map<int, MethodChannel> _cameraChannels = <int, MethodChannel>{};
+
+ /// The controller that broadcasts events coming from handleCameraMethodCall
+ ///
+ /// It is a `broadcast` because multiple controllers will connect to
+ /// different stream views of this Controller.
+ /// This is only exposed for test purposes. It shouldn't be used by clients of
+ /// the plugin as it may break or change at any time.
+ @visibleForTesting
+ final StreamController<CameraEvent> cameraEventStreamController =
+ StreamController<CameraEvent>.broadcast();
+
+ /// Returns a stream of camera events for the given [cameraId].
+ Stream<CameraEvent> _cameraEvents(int cameraId) =>
+ cameraEventStreamController.stream
+ .where((CameraEvent event) => event.cameraId == cameraId);
+
+ @override
+ Future<List<CameraDescription>> availableCameras() async {
+ try {
+ final List<Map<dynamic, dynamic>>? cameras = await pluginChannel
+ .invokeListMethod<Map<dynamic, dynamic>>('availableCameras');
+
+ if (cameras == null) {
+ return <CameraDescription>[];
+ }
+
+ return cameras.map((Map<dynamic, dynamic> camera) {
+ return CameraDescription(
+ name: camera['name'] as String,
+ lensDirection:
+ parseCameraLensDirection(camera['lensFacing'] as String),
+ sensorOrientation: camera['sensorOrientation'] as int,
+ );
+ }).toList();
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<int> createCamera(
+ CameraDescription cameraDescription,
+ ResolutionPreset? resolutionPreset, {
+ bool enableAudio = false,
+ }) async {
+ try {
+ // If resolutionPreset is not specified, plugin selects the highest resolution possible.
+ final Map<String, dynamic>? reply = await pluginChannel
+ .invokeMapMethod<String, dynamic>('create', <String, dynamic>{
+ 'cameraName': cameraDescription.name,
+ 'resolutionPreset': _serializeResolutionPreset(resolutionPreset),
+ 'enableAudio': enableAudio,
+ });
+
+ if (reply == null) {
+ throw CameraException('System', 'Cannot create camera');
+ }
+
+ return reply['cameraId']! as int;
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+ }
+
+ @override
+ Future<void> initializeCamera(
+ int cameraId, {
+ ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
+ }) async {
+ final int requestedCameraId = cameraId;
+
+ /// Creates channel for camera events.
+ _cameraChannels.putIfAbsent(requestedCameraId, () {
+ final MethodChannel channel = MethodChannel(
+ 'plugins.flutter.io/camera_windows/camera$requestedCameraId');
+ channel.setMethodCallHandler(
+ (MethodCall call) => handleCameraMethodCall(call, requestedCameraId),
+ );
+ return channel;
+ });
+
+ final Map<String, double>? reply;
+ try {
+ reply = await pluginChannel.invokeMapMethod<String, double>(
+ 'initialize',
+ <String, dynamic>{
+ 'cameraId': requestedCameraId,
+ },
+ );
+ } on PlatformException catch (e) {
+ throw CameraException(e.code, e.message);
+ }
+
+ cameraEventStreamController.add(
+ CameraInitializedEvent(
+ requestedCameraId,
+ reply!['previewWidth']!,
+ reply['previewHeight']!,
+ ExposureMode.auto,
+ false,
+ FocusMode.auto,
+ false,
+ ),
+ );
+ }
+
+ @override
+ Future<void> dispose(int cameraId) async {
+ await pluginChannel.invokeMethod<void>(
+ 'dispose',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ // Destroy method channel after camera is disposed to be able to handle last messages.
+ if (_cameraChannels.containsKey(cameraId)) {
+ final MethodChannel? cameraChannel = _cameraChannels[cameraId];
+ cameraChannel?.setMethodCallHandler(null);
+ _cameraChannels.remove(cameraId);
+ }
+ }
+
+ @override
+ Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
+ }
+
+ @override
+ Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
+ /// Windows API does not automatically change the camera's resolution
+ /// during capture so these events are never send from the platform.
+ /// Support for changing resolution should be implemented, if support for
+ /// requesting resolution change is added to camera platform interface.
+ return const Stream<CameraResolutionChangedEvent>.empty();
+ }
+
+ @override
+ Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
+ }
+
+ @override
+ Stream<CameraErrorEvent> onCameraError(int cameraId) {
+ return _cameraEvents(cameraId).whereType<CameraErrorEvent>();
+ }
+
+ @override
+ Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
+ return _cameraEvents(cameraId).whereType<VideoRecordedEvent>();
+ }
+
+ @override
+ Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
+ // TODO(jokerttu): Implement device orientation detection, https://github.com/flutter/flutter/issues/97540.
+ // Force device orientation to landscape as by default camera plugin uses portraitUp orientation.
+ return Stream<DeviceOrientationChangedEvent>.value(
+ const DeviceOrientationChangedEvent(DeviceOrientation.landscapeRight),
+ );
+ }
+
+ @override
+ Future<void> lockCaptureOrientation(
+ int cameraId,
+ DeviceOrientation orientation,
+ ) async {
+ // TODO(jokerttu): Implement lock capture orientation feature, https://github.com/flutter/flutter/issues/97540.
+ throw UnimplementedError('lockCaptureOrientation() is not implemented.');
+ }
+
+ @override
+ Future<void> unlockCaptureOrientation(int cameraId) async {
+ // TODO(jokerttu): Implement unlock capture orientation feature, https://github.com/flutter/flutter/issues/97540.
+ throw UnimplementedError('unlockCaptureOrientation() is not implemented.');
+ }
+
+ @override
+ Future<XFile> takePicture(int cameraId) async {
+ final String? path;
+ path = await pluginChannel.invokeMethod<String>(
+ 'takePicture',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return XFile(path!);
+ }
+
+ @override
+ Future<void> prepareForVideoRecording() =>
+ pluginChannel.invokeMethod<void>('prepareForVideoRecording');
+
+ @override
+ Future<void> startVideoRecording(int cameraId,
+ {Duration? maxVideoDuration}) async {
+ return startVideoCapturing(
+ VideoCaptureOptions(cameraId, maxDuration: maxVideoDuration));
+ }
+
+ @override
+ Future<void> startVideoCapturing(VideoCaptureOptions options) async {
+ if (options.streamCallback != null || options.streamOptions != null) {
+ throw UnimplementedError(
+ 'Streaming is not currently supported on Windows');
+ }
+
+ await pluginChannel.invokeMethod<void>(
+ 'startVideoRecording',
+ <String, dynamic>{
+ 'cameraId': options.cameraId,
+ 'maxVideoDuration': options.maxDuration?.inMilliseconds,
+ },
+ );
+ }
+
+ @override
+ Future<XFile> stopVideoRecording(int cameraId) async {
+ final String? path;
+
+ path = await pluginChannel.invokeMethod<String>(
+ 'stopVideoRecording',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+
+ return XFile(path!);
+ }
+
+ @override
+ Future<void> pauseVideoRecording(int cameraId) async {
+ throw UnsupportedError(
+ 'pauseVideoRecording() is not supported due to Win32 API limitations.');
+ }
+
+ @override
+ Future<void> resumeVideoRecording(int cameraId) async {
+ throw UnsupportedError(
+ 'resumeVideoRecording() is not supported due to Win32 API limitations.');
+ }
+
+ @override
+ Future<void> setFlashMode(int cameraId, FlashMode mode) async {
+ // TODO(jokerttu): Implement flash mode support, https://github.com/flutter/flutter/issues/97537.
+ throw UnimplementedError('setFlashMode() is not implemented.');
+ }
+
+ @override
+ Future<void> setExposureMode(int cameraId, ExposureMode mode) async {
+ // TODO(jokerttu): Implement explosure mode support, https://github.com/flutter/flutter/issues/97537.
+ throw UnimplementedError('setExposureMode() is not implemented.');
+ }
+
+ @override
+ Future<void> setExposurePoint(int cameraId, Point<double>? point) async {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ throw UnsupportedError(
+ 'setExposurePoint() is not supported due to Win32 API limitations.');
+ }
+
+ @override
+ Future<double> getMinExposureOffset(int cameraId) async {
+ // TODO(jokerttu): Implement exposure control support, https://github.com/flutter/flutter/issues/97537.
+ // Value is returned to support existing implementations.
+ return 0.0;
+ }
+
+ @override
+ Future<double> getMaxExposureOffset(int cameraId) async {
+ // TODO(jokerttu): Implement exposure control support, https://github.com/flutter/flutter/issues/97537.
+ // Value is returned to support existing implementations.
+ return 0.0;
+ }
+
+ @override
+ Future<double> getExposureOffsetStepSize(int cameraId) async {
+ // TODO(jokerttu): Implement exposure control support, https://github.com/flutter/flutter/issues/97537.
+ // Value is returned to support existing implementations.
+ return 1.0;
+ }
+
+ @override
+ Future<double> setExposureOffset(int cameraId, double offset) async {
+ // TODO(jokerttu): Implement exposure control support, https://github.com/flutter/flutter/issues/97537.
+ throw UnimplementedError('setExposureOffset() is not implemented.');
+ }
+
+ @override
+ Future<void> setFocusMode(int cameraId, FocusMode mode) async {
+ // TODO(jokerttu): Implement focus mode support, https://github.com/flutter/flutter/issues/97537.
+ throw UnimplementedError('setFocusMode() is not implemented.');
+ }
+
+ @override
+ Future<void> setFocusPoint(int cameraId, Point<double>? point) async {
+ assert(point == null || point.x >= 0 && point.x <= 1);
+ assert(point == null || point.y >= 0 && point.y <= 1);
+
+ throw UnsupportedError(
+ 'setFocusPoint() is not supported due to Win32 API limitations.');
+ }
+
+ @override
+ Future<double> getMinZoomLevel(int cameraId) async {
+ // TODO(jokerttu): Implement zoom level support, https://github.com/flutter/flutter/issues/97537.
+ // Value is returned to support existing implementations.
+ return 1.0;
+ }
+
+ @override
+ Future<double> getMaxZoomLevel(int cameraId) async {
+ // TODO(jokerttu): Implement zoom level support, https://github.com/flutter/flutter/issues/97537.
+ // Value is returned to support existing implementations.
+ return 1.0;
+ }
+
+ @override
+ Future<void> setZoomLevel(int cameraId, double zoom) async {
+ // TODO(jokerttu): Implement zoom level support, https://github.com/flutter/flutter/issues/97537.
+ throw UnimplementedError('setZoomLevel() is not implemented.');
+ }
+
+ @override
+ Future<void> pausePreview(int cameraId) async {
+ await pluginChannel.invokeMethod<double>(
+ 'pausePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Future<void> resumePreview(int cameraId) async {
+ await pluginChannel.invokeMethod<double>(
+ 'resumePreview',
+ <String, dynamic>{'cameraId': cameraId},
+ );
+ }
+
+ @override
+ Widget buildPreview(int cameraId) {
+ return Texture(textureId: cameraId);
+ }
+
+ /// Returns the resolution preset as a nullable String.
+ String? _serializeResolutionPreset(ResolutionPreset? resolutionPreset) {
+ switch (resolutionPreset) {
+ case null:
+ return null;
+ case ResolutionPreset.max:
+ return 'max';
+ case ResolutionPreset.ultraHigh:
+ return 'ultraHigh';
+ case ResolutionPreset.veryHigh:
+ return 'veryHigh';
+ case ResolutionPreset.high:
+ return 'high';
+ case ResolutionPreset.medium:
+ return 'medium';
+ case ResolutionPreset.low:
+ return 'low';
+ }
+ }
+
+ /// Converts messages received from the native platform into camera events.
+ ///
+ /// This is only exposed for test purposes. It shouldn't be used by clients
+ /// of the plugin as it may break or change at any time.
+ @visibleForTesting
+ Future<dynamic> handleCameraMethodCall(MethodCall call, int cameraId) async {
+ switch (call.method) {
+ case 'camera_closing':
+ cameraEventStreamController.add(
+ CameraClosingEvent(
+ cameraId,
+ ),
+ );
+ break;
+ case 'video_recorded':
+ final Map<String, Object?> arguments =
+ (call.arguments as Map<Object?, Object?>).cast<String, Object?>();
+ final int? maxDuration = arguments['maxVideoDuration'] as int?;
+ // This is called if maxVideoDuration was given on record start.
+ cameraEventStreamController.add(
+ VideoRecordedEvent(
+ cameraId,
+ XFile(arguments['path']! as String),
+ maxDuration != null ? Duration(milliseconds: maxDuration) : null,
+ ),
+ );
+ break;
+ case 'error':
+ final Map<String, Object?> arguments =
+ (call.arguments as Map<Object?, Object?>).cast<String, Object?>();
+ cameraEventStreamController.add(
+ CameraErrorEvent(
+ cameraId,
+ arguments['description']! as String,
+ ),
+ );
+ break;
+ default:
+ throw UnimplementedError();
+ }
+ }
+
+ /// Parses string presentation of the camera lens direction and returns enum value.
+ @visibleForTesting
+ CameraLensDirection parseCameraLensDirection(String string) {
+ switch (string) {
+ case 'front':
+ return CameraLensDirection.front;
+ case 'back':
+ return CameraLensDirection.back;
+ case 'external':
+ return CameraLensDirection.external;
+ }
+ throw ArgumentError('Unknown CameraLensDirection value');
+ }
+}
diff --git a/packages/camera/camera_windows/pubspec.yaml b/packages/camera/camera_windows/pubspec.yaml
new file mode 100644
index 0000000..e028559
--- /dev/null
+++ b/packages/camera/camera_windows/pubspec.yaml
@@ -0,0 +1,29 @@
+name: camera_windows
+description: A Flutter plugin for getting information about and controlling the camera on Windows.
+repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_windows
+issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
+version: 0.2.1+4
+
+environment:
+ sdk: ">=2.12.0 <3.0.0"
+ flutter: ">=3.0.0"
+
+flutter:
+ plugin:
+ implements: camera
+ platforms:
+ windows:
+ pluginClass: CameraWindows
+ dartPluginClass: CameraWindows
+
+dependencies:
+ camera_platform_interface: ^2.3.1
+ cross_file: ^0.3.1
+ flutter:
+ sdk: flutter
+ stream_transform: ^2.0.0
+
+dev_dependencies:
+ async: ^2.5.0
+ flutter_test:
+ sdk: flutter
diff --git a/packages/camera/camera_windows/test/camera_windows_test.dart b/packages/camera/camera_windows/test/camera_windows_test.dart
new file mode 100644
index 0000000..8d7b5d3
--- /dev/null
+++ b/packages/camera/camera_windows/test/camera_windows_test.dart
@@ -0,0 +1,675 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:async/async.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_windows/camera_windows.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart';
+import 'package:flutter_test/flutter_test.dart';
+import './utils/method_channel_mock.dart';
+
+void main() {
+ const String pluginChannelName = 'plugins.flutter.io/camera_windows';
+ TestWidgetsFlutterBinding.ensureInitialized();
+
+ group('$CameraWindows()', () {
+ test('registered instance', () {
+ CameraWindows.registerWith();
+ expect(CameraPlatform.instance, isA<CameraWindows>());
+ });
+
+ group('Creation, Initialization & Disposal Tests', () {
+ test('Should send creation data and receive back a camera id', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ }
+ });
+ final CameraWindows plugin = CameraWindows();
+
+ // Act
+ final int cameraId = await plugin.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0),
+ ResolutionPreset.high,
+ );
+
+ // Assert
+ expect(cameraMockChannel.log, <Matcher>[
+ isMethodCall(
+ 'create',
+ arguments: <String, Object?>{
+ 'cameraName': 'Test',
+ 'resolutionPreset': 'high',
+ 'enableAudio': false
+ },
+ ),
+ ]);
+ expect(cameraId, 1);
+ });
+
+ test(
+ 'Should throw CameraException when create throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{
+ 'create': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+ final CameraWindows plugin = CameraWindows();
+
+ // Act
+ expect(
+ () => plugin.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ ),
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test(
+ 'Should throw CameraException when initialize throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{
+ 'initialize': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ },
+ );
+ final CameraWindows plugin = CameraWindows();
+
+ // Act
+ expect(
+ () => plugin.initializeCamera(0),
+ throwsA(
+ isA<CameraException>()
+ .having((CameraException e) => e.code, 'code',
+ 'TESTING_ERROR_CODE')
+ .having(
+ (CameraException e) => e.description,
+ 'description',
+ 'Mock error message used during testing.',
+ ),
+ ),
+ );
+ },
+ );
+
+ test('Should send initialization data', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{
+ 'cameraId': 1,
+ 'imageFormatGroup': 'unknown',
+ },
+ 'initialize': <String, dynamic>{
+ 'previewWidth': 1920.toDouble(),
+ 'previewHeight': 1080.toDouble()
+ },
+ });
+ final CameraWindows plugin = CameraWindows();
+ final int cameraId = await plugin.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+
+ // Act
+ await plugin.initializeCamera(cameraId);
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ isMethodCall(
+ 'initialize',
+ arguments: <String, Object?>{'cameraId': 1},
+ ),
+ ]);
+ });
+
+ test('Should send a disposal call on dispose', () async {
+ // Arrange
+ final MethodChannelMock cameraMockChannel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': <String, dynamic>{
+ 'previewWidth': 1920.toDouble(),
+ 'previewHeight': 1080.toDouble()
+ },
+ 'dispose': <String, dynamic>{'cameraId': 1}
+ });
+
+ final CameraWindows plugin = CameraWindows();
+ final int cameraId = await plugin.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ await plugin.initializeCamera(cameraId);
+
+ // Act
+ await plugin.dispose(cameraId);
+
+ // Assert
+ expect(cameraId, 1);
+ expect(cameraMockChannel.log, <Matcher>[
+ anything,
+ anything,
+ isMethodCall(
+ 'dispose',
+ arguments: <String, Object?>{'cameraId': 1},
+ ),
+ ]);
+ });
+ });
+
+ group('Event Tests', () {
+ late CameraWindows plugin;
+ late int cameraId;
+ setUp(() async {
+ MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': <String, dynamic>{
+ 'previewWidth': 1920.toDouble(),
+ 'previewHeight': 1080.toDouble()
+ },
+ },
+ );
+
+ plugin = CameraWindows();
+ cameraId = await plugin.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ await plugin.initializeCamera(cameraId);
+ });
+
+ test('Should receive camera closing events', () async {
+ // Act
+ final Stream<CameraClosingEvent> eventStream =
+ plugin.onCameraClosing(cameraId);
+ final StreamQueue<CameraClosingEvent> streamQueue =
+ StreamQueue<CameraClosingEvent>(eventStream);
+
+ // Emit test events
+ final CameraClosingEvent event = CameraClosingEvent(cameraId);
+ await plugin.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await plugin.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+ await plugin.handleCameraMethodCall(
+ MethodCall('camera_closing', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+
+ test('Should receive camera error events', () async {
+ // Act
+ final Stream<CameraErrorEvent> errorStream =
+ plugin.onCameraError(cameraId);
+ final StreamQueue<CameraErrorEvent> streamQueue =
+ StreamQueue<CameraErrorEvent>(errorStream);
+
+ // Emit test events
+ final CameraErrorEvent event =
+ CameraErrorEvent(cameraId, 'Error Description');
+ await plugin.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await plugin.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+ await plugin.handleCameraMethodCall(
+ MethodCall('error', event.toJson()), cameraId);
+
+ // Assert
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+ expect(await streamQueue.next, event);
+
+ // Clean up
+ await streamQueue.cancel();
+ });
+ });
+
+ group('Function Tests', () {
+ late CameraWindows plugin;
+ late int cameraId;
+
+ setUp(() async {
+ MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{
+ 'create': <String, dynamic>{'cameraId': 1},
+ 'initialize': <String, dynamic>{
+ 'previewWidth': 1920.toDouble(),
+ 'previewHeight': 1080.toDouble()
+ },
+ },
+ );
+ plugin = CameraWindows();
+ cameraId = await plugin.createCamera(
+ const CameraDescription(
+ name: 'Test',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.high,
+ );
+ await plugin.initializeCamera(cameraId);
+ });
+
+ test('Should fetch CameraDescription instances for available cameras',
+ () async {
+ // Arrange
+ final List<dynamic> returnData = <dynamic>[
+ <String, dynamic>{
+ 'name': 'Test 1',
+ 'lensFacing': 'front',
+ 'sensorOrientation': 1
+ },
+ <String, dynamic>{
+ 'name': 'Test 2',
+ 'lensFacing': 'back',
+ 'sensorOrientation': 2
+ }
+ ];
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{'availableCameras': returnData},
+ );
+
+ // Act
+ final List<CameraDescription> cameras = await plugin.availableCameras();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('availableCameras', arguments: null),
+ ]);
+ expect(cameras.length, returnData.length);
+ for (int i = 0; i < returnData.length; i++) {
+ final Map<String, Object?> typedData =
+ (returnData[i] as Map<dynamic, dynamic>).cast<String, Object?>();
+ final CameraDescription cameraDescription = CameraDescription(
+ name: typedData['name']! as String,
+ lensDirection: plugin
+ .parseCameraLensDirection(typedData['lensFacing']! as String),
+ sensorOrientation: typedData['sensorOrientation']! as int,
+ );
+ expect(cameras[i], cameraDescription);
+ }
+ });
+
+ test(
+ 'Should throw CameraException when availableCameras throws a PlatformException',
+ () {
+ // Arrange
+ MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{
+ 'availableCameras': PlatformException(
+ code: 'TESTING_ERROR_CODE',
+ message: 'Mock error message used during testing.',
+ )
+ });
+
+ // Act
+ expect(
+ plugin.availableCameras,
+ throwsA(
+ isA<CameraException>()
+ .having(
+ (CameraException e) => e.code, 'code', 'TESTING_ERROR_CODE')
+ .having((CameraException e) => e.description, 'description',
+ 'Mock error message used during testing.'),
+ ),
+ );
+ });
+
+ test('Should take a picture and return an XFile instance', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{'takePicture': '/test/path.jpg'});
+
+ // Act
+ final XFile file = await plugin.takePicture(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('takePicture', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.jpg');
+ });
+
+ test('Should prepare for video recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{'prepareForVideoRecording': null},
+ );
+
+ // Act
+ await plugin.prepareForVideoRecording();
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('prepareForVideoRecording', arguments: null),
+ ]);
+ });
+
+ test('Should start recording a video', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await plugin.startVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': null,
+ }),
+ ]);
+ });
+
+ test('Should pass maxVideoDuration when starting recording a video',
+ () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{'startVideoRecording': null},
+ );
+
+ // Act
+ await plugin.startVideoRecording(
+ cameraId,
+ maxVideoDuration: const Duration(seconds: 10),
+ );
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('startVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ 'maxVideoDuration': 10000
+ }),
+ ]);
+ });
+
+ test('capturing fails if trying to stream', () async {
+ // Act and Assert
+ expect(
+ () => plugin.startVideoCapturing(VideoCaptureOptions(cameraId,
+ streamCallback: (CameraImageData imageData) {})),
+ throwsA(isA<UnimplementedError>()),
+ );
+ });
+
+ test('Should stop a video recording and return the file', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{'stopVideoRecording': '/test/path.mp4'},
+ );
+
+ // Act
+ final XFile file = await plugin.stopVideoRecording(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('stopVideoRecording', arguments: <String, Object?>{
+ 'cameraId': cameraId,
+ }),
+ ]);
+ expect(file.path, '/test/path.mp4');
+ });
+
+ test('Should throw UnsupportedError when pause video recording is called',
+ () async {
+ // Act
+ expect(
+ () => plugin.pauseVideoRecording(cameraId),
+ throwsA(isA<UnsupportedError>()),
+ );
+ });
+
+ test(
+ 'Should throw UnsupportedError when resume video recording is called',
+ () async {
+ // Act
+ expect(
+ () => plugin.resumeVideoRecording(cameraId),
+ throwsA(isA<UnsupportedError>()),
+ );
+ });
+
+ test('Should throw UnimplementedError when flash mode is set', () async {
+ // Act
+ expect(
+ () => plugin.setFlashMode(cameraId, FlashMode.torch),
+ throwsA(isA<UnimplementedError>()),
+ );
+ });
+
+ test('Should throw UnimplementedError when exposure mode is set',
+ () async {
+ // Act
+ expect(
+ () => plugin.setExposureMode(cameraId, ExposureMode.auto),
+ throwsA(isA<UnimplementedError>()),
+ );
+ });
+
+ test('Should throw UnsupportedError when exposure point is set',
+ () async {
+ // Act
+ expect(
+ () => plugin.setExposurePoint(cameraId, null),
+ throwsA(isA<UnsupportedError>()),
+ );
+ });
+
+ test('Should get the min exposure offset', () async {
+ // Act
+ final double minExposureOffset =
+ await plugin.getMinExposureOffset(cameraId);
+
+ // Assert
+ expect(minExposureOffset, 0.0);
+ });
+
+ test('Should get the max exposure offset', () async {
+ // Act
+ final double maxExposureOffset =
+ await plugin.getMaxExposureOffset(cameraId);
+
+ // Assert
+ expect(maxExposureOffset, 0.0);
+ });
+
+ test('Should get the exposure offset step size', () async {
+ // Act
+ final double stepSize =
+ await plugin.getExposureOffsetStepSize(cameraId);
+
+ // Assert
+ expect(stepSize, 1.0);
+ });
+
+ test('Should throw UnimplementedError when exposure offset is set',
+ () async {
+ // Act
+ expect(
+ () => plugin.setExposureOffset(cameraId, 0.5),
+ throwsA(isA<UnimplementedError>()),
+ );
+ });
+
+ test('Should throw UnimplementedError when focus mode is set', () async {
+ // Act
+ expect(
+ () => plugin.setFocusMode(cameraId, FocusMode.auto),
+ throwsA(isA<UnimplementedError>()),
+ );
+ });
+
+ test('Should throw UnsupportedError when exposure point is set',
+ () async {
+ // Act
+ expect(
+ () => plugin.setFocusMode(cameraId, FocusMode.auto),
+ throwsA(isA<UnsupportedError>()),
+ );
+ });
+
+ test('Should build a texture widget as preview widget', () async {
+ // Act
+ final Widget widget = plugin.buildPreview(cameraId);
+
+ // Act
+ expect(widget is Texture, isTrue);
+ expect((widget as Texture).textureId, cameraId);
+ });
+
+ test('Should throw UnimplementedError when handling unknown method', () {
+ final CameraWindows plugin = CameraWindows();
+
+ expect(
+ () => plugin.handleCameraMethodCall(
+ const MethodCall('unknown_method'), 1),
+ throwsA(isA<UnimplementedError>()));
+ });
+
+ test('Should get the max zoom level', () async {
+ // Act
+ final double maxZoomLevel = await plugin.getMaxZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 1.0);
+ });
+
+ test('Should get the min zoom level', () async {
+ // Act
+ final double maxZoomLevel = await plugin.getMinZoomLevel(cameraId);
+
+ // Assert
+ expect(maxZoomLevel, 1.0);
+ });
+
+ test('Should throw UnimplementedError when zoom level is set', () async {
+ // Act
+ expect(
+ () => plugin.setZoomLevel(cameraId, 2.0),
+ throwsA(isA<UnimplementedError>()),
+ );
+ });
+
+ test(
+ 'Should throw UnimplementedError when lock capture orientation is called',
+ () async {
+ // Act
+ expect(
+ () => plugin.setZoomLevel(cameraId, 2.0),
+ throwsA(isA<UnimplementedError>()),
+ );
+ });
+
+ test(
+ 'Should throw UnimplementedError when unlock capture orientation is called',
+ () async {
+ // Act
+ expect(
+ () => plugin.unlockCaptureOrientation(cameraId),
+ throwsA(isA<UnimplementedError>()),
+ );
+ });
+
+ test('Should pause the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{'pausePreview': null},
+ );
+
+ // Act
+ await plugin.pausePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('pausePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+
+ test('Should resume the camera preview', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: pluginChannelName,
+ methods: <String, dynamic>{'resumePreview': null},
+ );
+
+ // Act
+ await plugin.resumePreview(cameraId);
+
+ // Assert
+ expect(channel.log, <Matcher>[
+ isMethodCall('resumePreview',
+ arguments: <String, Object?>{'cameraId': cameraId}),
+ ]);
+ });
+ });
+ });
+}
diff --git a/packages/camera/camera_windows/test/utils/method_channel_mock.dart b/packages/camera/camera_windows/test/utils/method_channel_mock.dart
new file mode 100644
index 0000000..559f606
--- /dev/null
+++ b/packages/camera/camera_windows/test/utils/method_channel_mock.dart
@@ -0,0 +1,53 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+/// A mock [MethodChannel] implementation for use in tests.
+class MethodChannelMock {
+ /// Creates a new instance with the specified channel name.
+ ///
+ /// This method channel will handle all method invocations specified by
+ /// returning the value mapped to the method name key. If a delay is
+ /// specified, results are returned after the delay has elapsed.
+ MethodChannelMock({
+ required String channelName,
+ this.delay,
+ required this.methods,
+ }) : methodChannel = MethodChannel(channelName) {
+ _ambiguate(TestDefaultBinaryMessengerBinding.instance)!
+ .defaultBinaryMessenger
+ .setMockMethodCallHandler(methodChannel, _handler);
+ }
+
+ final Duration? delay;
+ final MethodChannel methodChannel;
+ final Map<String, dynamic> methods;
+ final List<MethodCall> log = <MethodCall>[];
+
+ Future<dynamic> _handler(MethodCall methodCall) async {
+ log.add(methodCall);
+
+ if (!methods.containsKey(methodCall.method)) {
+ throw MissingPluginException('No TEST implementation found for method '
+ '${methodCall.method} on channel ${methodChannel.name}');
+ }
+
+ return Future<dynamic>.delayed(delay ?? Duration.zero, () {
+ final dynamic result = methods[methodCall.method];
+ if (result is Exception) {
+ throw result;
+ }
+
+ return Future<dynamic>.value(result);
+ });
+ }
+}
+
+/// This allows a value of type T or T? to be treated as a value of type T?.
+///
+/// We use this so that APIs that have become non-nullable can still be used
+/// with `!` and `?` on the stable branch.
+T? _ambiguate<T>(T? value) => value;
diff --git a/packages/camera/camera_windows/windows/.gitignore b/packages/camera/camera_windows/windows/.gitignore
new file mode 100644
index 0000000..b3eb2be
--- /dev/null
+++ b/packages/camera/camera_windows/windows/.gitignore
@@ -0,0 +1,17 @@
+flutter/
+
+# Visual Studio user-specific files.
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# Visual Studio build-related files.
+x64/
+x86/
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!*.[Cc]ache/
diff --git a/packages/camera/camera_windows/windows/CMakeLists.txt b/packages/camera/camera_windows/windows/CMakeLists.txt
new file mode 100644
index 0000000..caeb109
--- /dev/null
+++ b/packages/camera/camera_windows/windows/CMakeLists.txt
@@ -0,0 +1,99 @@
+cmake_minimum_required(VERSION 3.14)
+set(PROJECT_NAME "camera_windows")
+project(${PROJECT_NAME} LANGUAGES CXX)
+
+# This value is used when generating builds using this plugin, so it must
+# not be changed
+set(PLUGIN_NAME "${PROJECT_NAME}_plugin")
+
+list(APPEND PLUGIN_SOURCES
+ "camera_plugin.h"
+ "camera_plugin.cpp"
+ "camera.h"
+ "camera.cpp"
+ "capture_controller.h"
+ "capture_controller.cpp"
+ "capture_controller_listener.h"
+ "capture_engine_listener.h"
+ "capture_engine_listener.cpp"
+ "string_utils.h"
+ "string_utils.cpp"
+ "capture_device_info.h"
+ "capture_device_info.cpp"
+ "preview_handler.h"
+ "preview_handler.cpp"
+ "record_handler.h"
+ "record_handler.cpp"
+ "photo_handler.h"
+ "photo_handler.cpp"
+ "texture_handler.h"
+ "texture_handler.cpp"
+ "com_heap_ptr.h"
+)
+
+add_library(${PLUGIN_NAME} SHARED
+ "camera_windows.cpp"
+ "include/camera_windows/camera_windows.h"
+ ${PLUGIN_SOURCES}
+)
+
+apply_standard_settings(${PLUGIN_NAME})
+set_target_properties(${PLUGIN_NAME} PROPERTIES
+ CXX_VISIBILITY_PRESET hidden)
+target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL)
+target_include_directories(${PLUGIN_NAME} INTERFACE
+ "${CMAKE_CURRENT_SOURCE_DIR}/include")
+target_link_libraries(${PLUGIN_NAME} PRIVATE flutter flutter_wrapper_plugin)
+target_link_libraries(${PLUGIN_NAME} PRIVATE mf mfplat mfuuid d3d11)
+
+# List of absolute paths to libraries that should be bundled with the plugin
+set(camera_windows_bundled_libraries
+ ""
+ PARENT_SCOPE
+)
+
+
+# === Tests ===
+
+if (${include_${PROJECT_NAME}_tests})
+set(TEST_RUNNER "${PROJECT_NAME}_test")
+enable_testing()
+# TODO(stuartmorgan): Consider using a single shared, pre-checked-in googletest
+# instance rather than downloading for each plugin. This approach makes sense
+# for a template, but not for a monorepo with many plugins.
+include(FetchContent)
+FetchContent_Declare(
+ googletest
+ URL https://github.com/google/googletest/archive/release-1.11.0.zip
+)
+# Prevent overriding the parent project's compiler/linker settings
+set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
+# Disable install commands for gtest so it doesn't end up in the bundle.
+set(INSTALL_GTEST OFF CACHE BOOL "Disable installation of googletest" FORCE)
+
+FetchContent_MakeAvailable(googletest)
+
+# The plugin's C API is not very useful for unit testing, so build the sources
+# directly into the test binary rather than using the DLL.
+add_executable(${TEST_RUNNER}
+ test/mocks.h
+ test/camera_plugin_test.cpp
+ test/camera_test.cpp
+ test/capture_controller_test.cpp
+ ${PLUGIN_SOURCES}
+)
+apply_standard_settings(${TEST_RUNNER})
+target_include_directories(${TEST_RUNNER} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}")
+target_link_libraries(${TEST_RUNNER} PRIVATE flutter_wrapper_plugin)
+target_link_libraries(${TEST_RUNNER} PRIVATE mf mfplat mfuuid d3d11)
+target_link_libraries(${TEST_RUNNER} PRIVATE gtest_main gmock)
+
+# flutter_wrapper_plugin has link dependencies on the Flutter DLL.
+add_custom_command(TARGET ${TEST_RUNNER} POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy_if_different
+ "${FLUTTER_LIBRARY}" $<TARGET_FILE_DIR:${TEST_RUNNER}>
+)
+
+include(GoogleTest)
+gtest_discover_tests(${TEST_RUNNER})
+endif()
diff --git a/packages/camera/camera_windows/windows/camera.cpp b/packages/camera/camera_windows/windows/camera.cpp
new file mode 100644
index 0000000..6a09447
--- /dev/null
+++ b/packages/camera/camera_windows/windows/camera.cpp
@@ -0,0 +1,299 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "camera.h"
+
+namespace camera_windows {
+using flutter::EncodableList;
+using flutter::EncodableMap;
+using flutter::EncodableValue;
+
+// Camera channel events.
+constexpr char kCameraMethodChannelBaseName[] =
+ "plugins.flutter.io/camera_windows/camera";
+constexpr char kVideoRecordedEvent[] = "video_recorded";
+constexpr char kCameraClosingEvent[] = "camera_closing";
+constexpr char kErrorEvent[] = "error";
+
+// Camera error codes
+constexpr char kCameraAccessDenied[] = "CameraAccessDenied";
+constexpr char kCameraError[] = "camera_error";
+constexpr char kPluginDisposed[] = "plugin_disposed";
+
+std::string GetErrorCode(CameraResult result) {
+ assert(result != CameraResult::kSuccess);
+
+ switch (result) {
+ case CameraResult::kAccessDenied:
+ return kCameraAccessDenied;
+
+ case CameraResult::kSuccess:
+ case CameraResult::kError:
+ default:
+ return kCameraError;
+ }
+}
+
+CameraImpl::CameraImpl(const std::string& device_id)
+ : device_id_(device_id), Camera(device_id) {}
+
+CameraImpl::~CameraImpl() {
+ // Sends camera closing event.
+ OnCameraClosing();
+
+ capture_controller_ = nullptr;
+ SendErrorForPendingResults(kPluginDisposed,
+ "Plugin disposed before request was handled");
+}
+
+bool CameraImpl::InitCamera(flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger,
+ bool record_audio,
+ ResolutionPreset resolution_preset) {
+ auto capture_controller_factory =
+ std::make_unique<CaptureControllerFactoryImpl>();
+ return InitCamera(std::move(capture_controller_factory), texture_registrar,
+ messenger, record_audio, resolution_preset);
+}
+
+bool CameraImpl::InitCamera(
+ std::unique_ptr<CaptureControllerFactory> capture_controller_factory,
+ flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger, bool record_audio,
+ ResolutionPreset resolution_preset) {
+ assert(!device_id_.empty());
+ messenger_ = messenger;
+ capture_controller_ =
+ capture_controller_factory->CreateCaptureController(this);
+ return capture_controller_->InitCaptureDevice(
+ texture_registrar, device_id_, record_audio, resolution_preset);
+}
+
+bool CameraImpl::AddPendingResult(
+ PendingResultType type, std::unique_ptr<flutter::MethodResult<>> result) {
+ assert(result);
+
+ auto it = pending_results_.find(type);
+ if (it != pending_results_.end()) {
+ result->Error("Duplicate request", "Method handler already called");
+ return false;
+ }
+
+ pending_results_.insert(std::make_pair(type, std::move(result)));
+ return true;
+}
+
+std::unique_ptr<flutter::MethodResult<>> CameraImpl::GetPendingResultByType(
+ PendingResultType type) {
+ auto it = pending_results_.find(type);
+ if (it == pending_results_.end()) {
+ return nullptr;
+ }
+ auto result = std::move(it->second);
+ pending_results_.erase(it);
+ return result;
+}
+
+bool CameraImpl::HasPendingResultByType(PendingResultType type) const {
+ auto it = pending_results_.find(type);
+ if (it == pending_results_.end()) {
+ return false;
+ }
+ return it->second != nullptr;
+}
+
+void CameraImpl::SendErrorForPendingResults(const std::string& error_code,
+ const std::string& description) {
+ for (const auto& pending_result : pending_results_) {
+ pending_result.second->Error(error_code, description);
+ }
+ pending_results_.clear();
+}
+
+MethodChannel<>* CameraImpl::GetMethodChannel() {
+ assert(messenger_);
+ assert(camera_id_);
+
+ // Use existing channel if initialized
+ if (camera_channel_) {
+ return camera_channel_.get();
+ }
+
+ auto channel_name =
+ std::string(kCameraMethodChannelBaseName) + std::to_string(camera_id_);
+
+ camera_channel_ = std::make_unique<flutter::MethodChannel<>>(
+ messenger_, channel_name, &flutter::StandardMethodCodec::GetInstance());
+
+ return camera_channel_.get();
+}
+
+void CameraImpl::OnCreateCaptureEngineSucceeded(int64_t texture_id) {
+ // Use texture id as camera id
+ camera_id_ = texture_id;
+ auto pending_result =
+ GetPendingResultByType(PendingResultType::kCreateCamera);
+ if (pending_result) {
+ pending_result->Success(EncodableMap(
+ {{EncodableValue("cameraId"), EncodableValue(texture_id)}}));
+ }
+}
+
+void CameraImpl::OnCreateCaptureEngineFailed(CameraResult result,
+ const std::string& error) {
+ auto pending_result =
+ GetPendingResultByType(PendingResultType::kCreateCamera);
+ if (pending_result) {
+ std::string error_code = GetErrorCode(result);
+ pending_result->Error(error_code, error);
+ }
+}
+
+void CameraImpl::OnStartPreviewSucceeded(int32_t width, int32_t height) {
+ auto pending_result = GetPendingResultByType(PendingResultType::kInitialize);
+ if (pending_result) {
+ pending_result->Success(EncodableValue(EncodableMap({
+ {EncodableValue("previewWidth"),
+ EncodableValue(static_cast<float>(width))},
+ {EncodableValue("previewHeight"),
+ EncodableValue(static_cast<float>(height))},
+ })));
+ }
+};
+
+void CameraImpl::OnStartPreviewFailed(CameraResult result,
+ const std::string& error) {
+ auto pending_result = GetPendingResultByType(PendingResultType::kInitialize);
+ if (pending_result) {
+ std::string error_code = GetErrorCode(result);
+ pending_result->Error(error_code, error);
+ }
+};
+
+void CameraImpl::OnResumePreviewSucceeded() {
+ auto pending_result =
+ GetPendingResultByType(PendingResultType::kResumePreview);
+ if (pending_result) {
+ pending_result->Success();
+ }
+}
+
+void CameraImpl::OnResumePreviewFailed(CameraResult result,
+ const std::string& error) {
+ auto pending_result =
+ GetPendingResultByType(PendingResultType::kResumePreview);
+ if (pending_result) {
+ std::string error_code = GetErrorCode(result);
+ pending_result->Error(error_code, error);
+ }
+}
+
+void CameraImpl::OnPausePreviewSucceeded() {
+ auto pending_result =
+ GetPendingResultByType(PendingResultType::kPausePreview);
+ if (pending_result) {
+ pending_result->Success();
+ }
+}
+
+void CameraImpl::OnPausePreviewFailed(CameraResult result,
+ const std::string& error) {
+ auto pending_result =
+ GetPendingResultByType(PendingResultType::kPausePreview);
+ if (pending_result) {
+ std::string error_code = GetErrorCode(result);
+ pending_result->Error(error_code, error);
+ }
+}
+
+void CameraImpl::OnStartRecordSucceeded() {
+ auto pending_result = GetPendingResultByType(PendingResultType::kStartRecord);
+ if (pending_result) {
+ pending_result->Success();
+ }
+};
+
+void CameraImpl::OnStartRecordFailed(CameraResult result,
+ const std::string& error) {
+ auto pending_result = GetPendingResultByType(PendingResultType::kStartRecord);
+ if (pending_result) {
+ std::string error_code = GetErrorCode(result);
+ pending_result->Error(error_code, error);
+ }
+};
+
+void CameraImpl::OnStopRecordSucceeded(const std::string& file_path) {
+ auto pending_result = GetPendingResultByType(PendingResultType::kStopRecord);
+ if (pending_result) {
+ pending_result->Success(EncodableValue(file_path));
+ }
+};
+
+void CameraImpl::OnStopRecordFailed(CameraResult result,
+ const std::string& error) {
+ auto pending_result = GetPendingResultByType(PendingResultType::kStopRecord);
+ if (pending_result) {
+ std::string error_code = GetErrorCode(result);
+ pending_result->Error(error_code, error);
+ }
+};
+
+void CameraImpl::OnTakePictureSucceeded(const std::string& file_path) {
+ auto pending_result = GetPendingResultByType(PendingResultType::kTakePicture);
+ if (pending_result) {
+ pending_result->Success(EncodableValue(file_path));
+ }
+};
+
+void CameraImpl::OnTakePictureFailed(CameraResult result,
+ const std::string& error) {
+ auto pending_take_picture_result =
+ GetPendingResultByType(PendingResultType::kTakePicture);
+ if (pending_take_picture_result) {
+ std::string error_code = GetErrorCode(result);
+ pending_take_picture_result->Error(error_code, error);
+ }
+};
+
+void CameraImpl::OnVideoRecordSucceeded(const std::string& file_path,
+ int64_t video_duration_ms) {
+ if (messenger_ && camera_id_ >= 0) {
+ auto channel = GetMethodChannel();
+
+ std::unique_ptr<EncodableValue> message_data =
+ std::make_unique<EncodableValue>(
+ EncodableMap({{EncodableValue("path"), EncodableValue(file_path)},
+ {EncodableValue("maxVideoDuration"),
+ EncodableValue(video_duration_ms)}}));
+
+ channel->InvokeMethod(kVideoRecordedEvent, std::move(message_data));
+ }
+}
+
+void CameraImpl::OnVideoRecordFailed(CameraResult result,
+ const std::string& error){};
+
+void CameraImpl::OnCaptureError(CameraResult result, const std::string& error) {
+ if (messenger_ && camera_id_ >= 0) {
+ auto channel = GetMethodChannel();
+
+ std::unique_ptr<EncodableValue> message_data =
+ std::make_unique<EncodableValue>(EncodableMap(
+ {{EncodableValue("description"), EncodableValue(error)}}));
+ channel->InvokeMethod(kErrorEvent, std::move(message_data));
+ }
+
+ std::string error_code = GetErrorCode(result);
+ SendErrorForPendingResults(error_code, error);
+}
+
+void CameraImpl::OnCameraClosing() {
+ if (messenger_ && camera_id_ >= 0) {
+ auto channel = GetMethodChannel();
+ channel->InvokeMethod(kCameraClosingEvent,
+ std::move(std::make_unique<EncodableValue>()));
+ }
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/camera.h b/packages/camera/camera_windows/windows/camera.h
new file mode 100644
index 0000000..8508da1
--- /dev/null
+++ b/packages/camera/camera_windows/windows/camera.h
@@ -0,0 +1,206 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAMERA_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAMERA_H_
+
+#include <flutter/method_channel.h>
+#include <flutter/standard_method_codec.h>
+
+#include <functional>
+
+#include "capture_controller.h"
+
+namespace camera_windows {
+
+using flutter::EncodableMap;
+using flutter::MethodChannel;
+using flutter::MethodResult;
+
+// A set of result types that are stored
+// for processing asynchronous commands.
+enum class PendingResultType {
+ kCreateCamera,
+ kInitialize,
+ kTakePicture,
+ kStartRecord,
+ kStopRecord,
+ kPausePreview,
+ kResumePreview,
+};
+
+// Interface implemented by cameras.
+//
+// Access is provided to an associated |CaptureController|, which can be used
+// to capture video or photo from the camera.
+class Camera : public CaptureControllerListener {
+ public:
+ explicit Camera(const std::string& device_id) {}
+ virtual ~Camera() = default;
+
+ // Disallow copy and move.
+ Camera(const Camera&) = delete;
+ Camera& operator=(const Camera&) = delete;
+
+ // Tests if this camera has the specified device ID.
+ virtual bool HasDeviceId(std::string& device_id) const = 0;
+
+ // Tests if this camera has the specified camera ID.
+ virtual bool HasCameraId(int64_t camera_id) const = 0;
+
+ // Adds a pending result.
+ //
+ // Returns an error result if the result has already been added.
+ virtual bool AddPendingResult(PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) = 0;
+
+ // Checks if a pending result of the specified type already exists.
+ virtual bool HasPendingResultByType(PendingResultType type) const = 0;
+
+ // Returns a |CaptureController| that allows capturing video or still photos
+ // from this camera.
+ virtual camera_windows::CaptureController* GetCaptureController() = 0;
+
+ // Initializes this camera and its associated capture controller.
+ //
+ // Returns false if initialization fails.
+ virtual bool InitCamera(flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger,
+ bool record_audio,
+ ResolutionPreset resolution_preset) = 0;
+};
+
+// Concrete implementation of the |Camera| interface.
+//
+// This implementation is responsible for initializing the capture controller,
+// listening for camera events, processing pending results, and notifying
+// application code of processed events via the method channel.
+class CameraImpl : public Camera {
+ public:
+ explicit CameraImpl(const std::string& device_id);
+ virtual ~CameraImpl();
+
+ // Disallow copy and move.
+ CameraImpl(const CameraImpl&) = delete;
+ CameraImpl& operator=(const CameraImpl&) = delete;
+
+ // CaptureControllerListener
+ void OnCreateCaptureEngineSucceeded(int64_t texture_id) override;
+ void OnCreateCaptureEngineFailed(CameraResult result,
+ const std::string& error) override;
+ void OnStartPreviewSucceeded(int32_t width, int32_t height) override;
+ void OnStartPreviewFailed(CameraResult result,
+ const std::string& error) override;
+ void OnPausePreviewSucceeded() override;
+ void OnPausePreviewFailed(CameraResult result,
+ const std::string& error) override;
+ void OnResumePreviewSucceeded() override;
+ void OnResumePreviewFailed(CameraResult result,
+ const std::string& error) override;
+ void OnStartRecordSucceeded() override;
+ void OnStartRecordFailed(CameraResult result,
+ const std::string& error) override;
+ void OnStopRecordSucceeded(const std::string& file_path) override;
+ void OnStopRecordFailed(CameraResult result,
+ const std::string& error) override;
+ void OnTakePictureSucceeded(const std::string& file_path) override;
+ void OnTakePictureFailed(CameraResult result,
+ const std::string& error) override;
+ void OnVideoRecordSucceeded(const std::string& file_path,
+ int64_t video_duration) override;
+ void OnVideoRecordFailed(CameraResult result,
+ const std::string& error) override;
+ void OnCaptureError(CameraResult result, const std::string& error) override;
+
+ // Camera
+ bool HasDeviceId(std::string& device_id) const override {
+ return device_id_ == device_id;
+ }
+ bool HasCameraId(int64_t camera_id) const override {
+ return camera_id_ == camera_id;
+ }
+ bool AddPendingResult(PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) override;
+ bool HasPendingResultByType(PendingResultType type) const override;
+ camera_windows::CaptureController* GetCaptureController() override {
+ return capture_controller_.get();
+ }
+ bool InitCamera(flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger, bool record_audio,
+ ResolutionPreset resolution_preset) override;
+
+ // Initializes the camera and its associated capture controller.
+ //
+ // This is a convenience method called by |InitCamera| but also used in
+ // tests.
+ //
+ // Returns false if initialization fails.
+ bool InitCamera(
+ std::unique_ptr<CaptureControllerFactory> capture_controller_factory,
+ flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger, bool record_audio,
+ ResolutionPreset resolution_preset);
+
+ private:
+ // Loops through all pending results and calls their error handler with given
+ // error ID and description. Pending results are cleared in the process.
+ //
+ // error_code: A string error code describing the error.
+ // description: A user-readable error message (optional).
+ void SendErrorForPendingResults(const std::string& error_code,
+ const std::string& description);
+
+ // Called when camera is disposed.
+ // Sends camera closing message to the cameras method channel.
+ void OnCameraClosing();
+
+ // Initializes method channel instance and returns pointer it.
+ MethodChannel<>* GetMethodChannel();
+
+ // Finds pending result by type.
+ // Returns nullptr if type is not present.
+ std::unique_ptr<MethodResult<>> GetPendingResultByType(
+ PendingResultType type);
+
+ std::map<PendingResultType, std::unique_ptr<MethodResult<>>> pending_results_;
+ std::unique_ptr<CaptureController> capture_controller_;
+ std::unique_ptr<MethodChannel<>> camera_channel_;
+ flutter::BinaryMessenger* messenger_ = nullptr;
+ int64_t camera_id_ = -1;
+ std::string device_id_;
+};
+
+// Factory class for creating |Camera| instances from a specified device ID.
+class CameraFactory {
+ public:
+ CameraFactory() {}
+ virtual ~CameraFactory() = default;
+
+ // Disallow copy and move.
+ CameraFactory(const CameraFactory&) = delete;
+ CameraFactory& operator=(const CameraFactory&) = delete;
+
+ // Creates camera for given device id.
+ virtual std::unique_ptr<Camera> CreateCamera(
+ const std::string& device_id) = 0;
+};
+
+// Concrete implementation of |CameraFactory|.
+class CameraFactoryImpl : public CameraFactory {
+ public:
+ CameraFactoryImpl() {}
+ virtual ~CameraFactoryImpl() = default;
+
+ // Disallow copy and move.
+ CameraFactoryImpl(const CameraFactoryImpl&) = delete;
+ CameraFactoryImpl& operator=(const CameraFactoryImpl&) = delete;
+
+ std::unique_ptr<Camera> CreateCamera(const std::string& device_id) override {
+ return std::make_unique<CameraImpl>(device_id);
+ }
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAMERA_H_
diff --git a/packages/camera/camera_windows/windows/camera_plugin.cpp b/packages/camera/camera_windows/windows/camera_plugin.cpp
new file mode 100644
index 0000000..5503d17
--- /dev/null
+++ b/packages/camera/camera_windows/windows/camera_plugin.cpp
@@ -0,0 +1,596 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "camera_plugin.h"
+
+#include <flutter/flutter_view.h>
+#include <flutter/method_channel.h>
+#include <flutter/plugin_registrar_windows.h>
+#include <flutter/standard_method_codec.h>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <shlobj.h>
+#include <shobjidl.h>
+#include <windows.h>
+
+#include <cassert>
+#include <chrono>
+#include <memory>
+
+#include "capture_device_info.h"
+#include "com_heap_ptr.h"
+#include "string_utils.h"
+
+namespace camera_windows {
+using flutter::EncodableList;
+using flutter::EncodableMap;
+using flutter::EncodableValue;
+
+namespace {
+
+// Channel events
+constexpr char kChannelName[] = "plugins.flutter.io/camera_windows";
+
+constexpr char kAvailableCamerasMethod[] = "availableCameras";
+constexpr char kCreateMethod[] = "create";
+constexpr char kInitializeMethod[] = "initialize";
+constexpr char kTakePictureMethod[] = "takePicture";
+constexpr char kStartVideoRecordingMethod[] = "startVideoRecording";
+constexpr char kStopVideoRecordingMethod[] = "stopVideoRecording";
+constexpr char kPausePreview[] = "pausePreview";
+constexpr char kResumePreview[] = "resumePreview";
+constexpr char kDisposeMethod[] = "dispose";
+
+constexpr char kCameraNameKey[] = "cameraName";
+constexpr char kResolutionPresetKey[] = "resolutionPreset";
+constexpr char kEnableAudioKey[] = "enableAudio";
+
+constexpr char kCameraIdKey[] = "cameraId";
+constexpr char kMaxVideoDurationKey[] = "maxVideoDuration";
+
+constexpr char kResolutionPresetValueLow[] = "low";
+constexpr char kResolutionPresetValueMedium[] = "medium";
+constexpr char kResolutionPresetValueHigh[] = "high";
+constexpr char kResolutionPresetValueVeryHigh[] = "veryHigh";
+constexpr char kResolutionPresetValueUltraHigh[] = "ultraHigh";
+constexpr char kResolutionPresetValueMax[] = "max";
+
+const std::string kPictureCaptureExtension = "jpeg";
+const std::string kVideoCaptureExtension = "mp4";
+
+// Looks for |key| in |map|, returning the associated value if it is present, or
+// a nullptr if not.
+const EncodableValue* ValueOrNull(const EncodableMap& map, const char* key) {
+ auto it = map.find(EncodableValue(key));
+ if (it == map.end()) {
+ return nullptr;
+ }
+ return &(it->second);
+}
+
+// Looks for |key| in |map|, returning the associated int64 value if it is
+// present, or std::nullopt if not.
+std::optional<int64_t> GetInt64ValueOrNull(const EncodableMap& map,
+ const char* key) {
+ auto value = ValueOrNull(map, key);
+ if (!value) {
+ return std::nullopt;
+ }
+
+ if (std::holds_alternative<int32_t>(*value)) {
+ return static_cast<int64_t>(std::get<int32_t>(*value));
+ }
+ auto val64 = std::get_if<int64_t>(value);
+ if (!val64) {
+ return std::nullopt;
+ }
+ return *val64;
+}
+
+// Parses resolution preset argument to enum value.
+ResolutionPreset ParseResolutionPreset(const std::string& resolution_preset) {
+ if (resolution_preset.compare(kResolutionPresetValueLow) == 0) {
+ return ResolutionPreset::kLow;
+ } else if (resolution_preset.compare(kResolutionPresetValueMedium) == 0) {
+ return ResolutionPreset::kMedium;
+ } else if (resolution_preset.compare(kResolutionPresetValueHigh) == 0) {
+ return ResolutionPreset::kHigh;
+ } else if (resolution_preset.compare(kResolutionPresetValueVeryHigh) == 0) {
+ return ResolutionPreset::kVeryHigh;
+ } else if (resolution_preset.compare(kResolutionPresetValueUltraHigh) == 0) {
+ return ResolutionPreset::kUltraHigh;
+ } else if (resolution_preset.compare(kResolutionPresetValueMax) == 0) {
+ return ResolutionPreset::kMax;
+ }
+ return ResolutionPreset::kAuto;
+}
+
+// Builds CaptureDeviceInfo object from given device holding device name and id.
+std::unique_ptr<CaptureDeviceInfo> GetDeviceInfo(IMFActivate* device) {
+ assert(device);
+ auto device_info = std::make_unique<CaptureDeviceInfo>();
+ ComHeapPtr<wchar_t> name;
+ UINT32 name_size;
+
+ HRESULT hr = device->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
+ &name, &name_size);
+ if (FAILED(hr)) {
+ return device_info;
+ }
+
+ ComHeapPtr<wchar_t> id;
+ UINT32 id_size;
+ hr = device->GetAllocatedString(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &id, &id_size);
+
+ if (FAILED(hr)) {
+ return device_info;
+ }
+
+ device_info->SetDisplayName(Utf8FromUtf16(std::wstring(name, name_size)));
+ device_info->SetDeviceID(Utf8FromUtf16(std::wstring(id, id_size)));
+ return device_info;
+}
+
+// Builds datetime string from current time.
+// Used as part of the filenames for captured pictures and videos.
+std::string GetCurrentTimeString() {
+ std::chrono::system_clock::duration now =
+ std::chrono::system_clock::now().time_since_epoch();
+
+ auto s = std::chrono::duration_cast<std::chrono::seconds>(now).count();
+ auto ms =
+ std::chrono::duration_cast<std::chrono::milliseconds>(now).count() % 1000;
+
+ struct tm newtime;
+ localtime_s(&newtime, &s);
+
+ std::string time_start = "";
+ time_start.resize(80);
+ size_t len =
+ strftime(&time_start[0], time_start.size(), "%Y_%m%d_%H%M%S_", &newtime);
+ if (len > 0) {
+ time_start.resize(len);
+ }
+
+ // Add milliseconds to make sure the filename is unique
+ return time_start + std::to_string(ms);
+}
+
+// Builds file path for picture capture.
+std::optional<std::string> GetFilePathForPicture() {
+ ComHeapPtr<wchar_t> known_folder_path;
+ HRESULT hr = SHGetKnownFolderPath(FOLDERID_Pictures, KF_FLAG_CREATE, nullptr,
+ &known_folder_path);
+ if (FAILED(hr)) {
+ return std::nullopt;
+ }
+
+ std::string path = Utf8FromUtf16(std::wstring(known_folder_path));
+
+ return path + "\\" + "PhotoCapture_" + GetCurrentTimeString() + "." +
+ kPictureCaptureExtension;
+}
+
+// Builds file path for video capture.
+std::optional<std::string> GetFilePathForVideo() {
+ ComHeapPtr<wchar_t> known_folder_path;
+ HRESULT hr = SHGetKnownFolderPath(FOLDERID_Videos, KF_FLAG_CREATE, nullptr,
+ &known_folder_path);
+ if (FAILED(hr)) {
+ return std::nullopt;
+ }
+
+ std::string path = Utf8FromUtf16(std::wstring(known_folder_path));
+
+ return path + "\\" + "VideoCapture_" + GetCurrentTimeString() + "." +
+ kVideoCaptureExtension;
+}
+} // namespace
+
+// static
+void CameraPlugin::RegisterWithRegistrar(
+ flutter::PluginRegistrarWindows* registrar) {
+ auto channel = std::make_unique<flutter::MethodChannel<>>(
+ registrar->messenger(), kChannelName,
+ &flutter::StandardMethodCodec::GetInstance());
+
+ std::unique_ptr<CameraPlugin> plugin = std::make_unique<CameraPlugin>(
+ registrar->texture_registrar(), registrar->messenger());
+
+ channel->SetMethodCallHandler(
+ [plugin_pointer = plugin.get()](const auto& call, auto result) {
+ plugin_pointer->HandleMethodCall(call, std::move(result));
+ });
+
+ registrar->AddPlugin(std::move(plugin));
+}
+
+CameraPlugin::CameraPlugin(flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger)
+ : texture_registrar_(texture_registrar),
+ messenger_(messenger),
+ camera_factory_(std::make_unique<CameraFactoryImpl>()) {}
+
+CameraPlugin::CameraPlugin(flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger,
+ std::unique_ptr<CameraFactory> camera_factory)
+ : texture_registrar_(texture_registrar),
+ messenger_(messenger),
+ camera_factory_(std::move(camera_factory)) {}
+
+CameraPlugin::~CameraPlugin() {}
+
+void CameraPlugin::HandleMethodCall(
+ const flutter::MethodCall<>& method_call,
+ std::unique_ptr<flutter::MethodResult<>> result) {
+ const std::string& method_name = method_call.method_name();
+
+ if (method_name.compare(kAvailableCamerasMethod) == 0) {
+ return AvailableCamerasMethodHandler(std::move(result));
+ } else if (method_name.compare(kCreateMethod) == 0) {
+ const auto* arguments =
+ std::get_if<flutter::EncodableMap>(method_call.arguments());
+ assert(arguments);
+
+ return CreateMethodHandler(*arguments, std::move(result));
+ } else if (method_name.compare(kInitializeMethod) == 0) {
+ const auto* arguments =
+ std::get_if<flutter::EncodableMap>(method_call.arguments());
+ assert(arguments);
+
+ return this->InitializeMethodHandler(*arguments, std::move(result));
+ } else if (method_name.compare(kTakePictureMethod) == 0) {
+ const auto* arguments =
+ std::get_if<flutter::EncodableMap>(method_call.arguments());
+ assert(arguments);
+
+ return TakePictureMethodHandler(*arguments, std::move(result));
+ } else if (method_name.compare(kStartVideoRecordingMethod) == 0) {
+ const auto* arguments =
+ std::get_if<flutter::EncodableMap>(method_call.arguments());
+ assert(arguments);
+
+ return StartVideoRecordingMethodHandler(*arguments, std::move(result));
+ } else if (method_name.compare(kStopVideoRecordingMethod) == 0) {
+ const auto* arguments =
+ std::get_if<flutter::EncodableMap>(method_call.arguments());
+ assert(arguments);
+
+ return StopVideoRecordingMethodHandler(*arguments, std::move(result));
+ } else if (method_name.compare(kPausePreview) == 0) {
+ const auto* arguments =
+ std::get_if<flutter::EncodableMap>(method_call.arguments());
+ assert(arguments);
+
+ return PausePreviewMethodHandler(*arguments, std::move(result));
+ } else if (method_name.compare(kResumePreview) == 0) {
+ const auto* arguments =
+ std::get_if<flutter::EncodableMap>(method_call.arguments());
+ assert(arguments);
+
+ return ResumePreviewMethodHandler(*arguments, std::move(result));
+ } else if (method_name.compare(kDisposeMethod) == 0) {
+ const auto* arguments =
+ std::get_if<flutter::EncodableMap>(method_call.arguments());
+ assert(arguments);
+
+ return DisposeMethodHandler(*arguments, std::move(result));
+ } else {
+ result->NotImplemented();
+ }
+}
+
+Camera* CameraPlugin::GetCameraByDeviceId(std::string& device_id) {
+ for (auto it = begin(cameras_); it != end(cameras_); ++it) {
+ if ((*it)->HasDeviceId(device_id)) {
+ return it->get();
+ }
+ }
+ return nullptr;
+}
+
+Camera* CameraPlugin::GetCameraByCameraId(int64_t camera_id) {
+ for (auto it = begin(cameras_); it != end(cameras_); ++it) {
+ if ((*it)->HasCameraId(camera_id)) {
+ return it->get();
+ }
+ }
+ return nullptr;
+}
+
+void CameraPlugin::DisposeCameraByCameraId(int64_t camera_id) {
+ for (auto it = begin(cameras_); it != end(cameras_); ++it) {
+ if ((*it)->HasCameraId(camera_id)) {
+ cameras_.erase(it);
+ return;
+ }
+ }
+}
+
+void CameraPlugin::AvailableCamerasMethodHandler(
+ std::unique_ptr<flutter::MethodResult<>> result) {
+ // Enumerate devices.
+ ComHeapPtr<IMFActivate*> devices;
+ UINT32 count = 0;
+ if (!this->EnumerateVideoCaptureDeviceSources(&devices, &count)) {
+ result->Error("System error", "Failed to get available cameras");
+ // No need to free devices here, cos allocation failed.
+ return;
+ }
+
+ if (count == 0) {
+ result->Success(EncodableValue(EncodableList()));
+ return;
+ }
+
+ // Format found devices to the response.
+ EncodableList devices_list;
+ for (UINT32 i = 0; i < count; ++i) {
+ auto device_info = GetDeviceInfo(devices[i]);
+ auto deviceName = device_info->GetUniqueDeviceName();
+
+ devices_list.push_back(EncodableMap({
+ {EncodableValue("name"), EncodableValue(deviceName)},
+ {EncodableValue("lensFacing"), EncodableValue("front")},
+ {EncodableValue("sensorOrientation"), EncodableValue(0)},
+ }));
+ }
+
+ result->Success(std::move(EncodableValue(devices_list)));
+}
+
+bool CameraPlugin::EnumerateVideoCaptureDeviceSources(IMFActivate*** devices,
+ UINT32* count) {
+ return CaptureControllerImpl::EnumerateVideoCaptureDeviceSources(devices,
+ count);
+}
+
+void CameraPlugin::CreateMethodHandler(
+ const EncodableMap& args, std::unique_ptr<flutter::MethodResult<>> result) {
+ // Parse enableAudio argument.
+ const auto* record_audio =
+ std::get_if<bool>(ValueOrNull(args, kEnableAudioKey));
+ if (!record_audio) {
+ return result->Error("argument_error",
+ std::string(kEnableAudioKey) + " argument missing");
+ }
+
+ // Parse cameraName argument.
+ const auto* camera_name =
+ std::get_if<std::string>(ValueOrNull(args, kCameraNameKey));
+ if (!camera_name) {
+ return result->Error("argument_error",
+ std::string(kCameraNameKey) + " argument missing");
+ }
+
+ auto device_info = std::make_unique<CaptureDeviceInfo>();
+ if (!device_info->ParseDeviceInfoFromCameraName(*camera_name)) {
+ return result->Error(
+ "camera_error", "Cannot parse argument " + std::string(kCameraNameKey));
+ }
+
+ auto device_id = device_info->GetDeviceId();
+ if (GetCameraByDeviceId(device_id)) {
+ return result->Error("camera_error",
+ "Camera with given device id already exists. Existing "
+ "camera must be disposed before creating it again.");
+ }
+
+ std::unique_ptr<camera_windows::Camera> camera =
+ camera_factory_->CreateCamera(device_id);
+
+ if (camera->HasPendingResultByType(PendingResultType::kCreateCamera)) {
+ return result->Error("camera_error",
+ "Pending camera creation request exists");
+ }
+
+ if (camera->AddPendingResult(PendingResultType::kCreateCamera,
+ std::move(result))) {
+ // Parse resolution preset argument.
+ const auto* resolution_preset_argument =
+ std::get_if<std::string>(ValueOrNull(args, kResolutionPresetKey));
+ ResolutionPreset resolution_preset;
+ if (resolution_preset_argument) {
+ resolution_preset = ParseResolutionPreset(*resolution_preset_argument);
+ } else {
+ resolution_preset = ResolutionPreset::kAuto;
+ }
+
+ bool initialized = camera->InitCamera(texture_registrar_, messenger_,
+ *record_audio, resolution_preset);
+ if (initialized) {
+ cameras_.push_back(std::move(camera));
+ }
+ }
+}
+
+void CameraPlugin::InitializeMethodHandler(
+ const EncodableMap& args, std::unique_ptr<flutter::MethodResult<>> result) {
+ auto camera_id = GetInt64ValueOrNull(args, kCameraIdKey);
+ if (!camera_id) {
+ return result->Error("argument_error",
+ std::string(kCameraIdKey) + " missing");
+ }
+
+ auto camera = GetCameraByCameraId(*camera_id);
+ if (!camera) {
+ return result->Error("camera_error", "Camera not created");
+ }
+
+ if (camera->HasPendingResultByType(PendingResultType::kInitialize)) {
+ return result->Error("camera_error",
+ "Pending initialization request exists");
+ }
+
+ if (camera->AddPendingResult(PendingResultType::kInitialize,
+ std::move(result))) {
+ auto cc = camera->GetCaptureController();
+ assert(cc);
+ cc->StartPreview();
+ }
+}
+
+void CameraPlugin::PausePreviewMethodHandler(
+ const EncodableMap& args, std::unique_ptr<flutter::MethodResult<>> result) {
+ auto camera_id = GetInt64ValueOrNull(args, kCameraIdKey);
+ if (!camera_id) {
+ return result->Error("argument_error",
+ std::string(kCameraIdKey) + " missing");
+ }
+
+ auto camera = GetCameraByCameraId(*camera_id);
+ if (!camera) {
+ return result->Error("camera_error", "Camera not created");
+ }
+
+ if (camera->HasPendingResultByType(PendingResultType::kPausePreview)) {
+ return result->Error("camera_error",
+ "Pending pause preview request exists");
+ }
+
+ if (camera->AddPendingResult(PendingResultType::kPausePreview,
+ std::move(result))) {
+ auto cc = camera->GetCaptureController();
+ assert(cc);
+ cc->PausePreview();
+ }
+}
+
+void CameraPlugin::ResumePreviewMethodHandler(
+ const EncodableMap& args, std::unique_ptr<flutter::MethodResult<>> result) {
+ auto camera_id = GetInt64ValueOrNull(args, kCameraIdKey);
+ if (!camera_id) {
+ return result->Error("argument_error",
+ std::string(kCameraIdKey) + " missing");
+ }
+
+ auto camera = GetCameraByCameraId(*camera_id);
+ if (!camera) {
+ return result->Error("camera_error", "Camera not created");
+ }
+
+ if (camera->HasPendingResultByType(PendingResultType::kResumePreview)) {
+ return result->Error("camera_error",
+ "Pending resume preview request exists");
+ }
+
+ if (camera->AddPendingResult(PendingResultType::kResumePreview,
+ std::move(result))) {
+ auto cc = camera->GetCaptureController();
+ assert(cc);
+ cc->ResumePreview();
+ }
+}
+
+void CameraPlugin::StartVideoRecordingMethodHandler(
+ const EncodableMap& args, std::unique_ptr<flutter::MethodResult<>> result) {
+ auto camera_id = GetInt64ValueOrNull(args, kCameraIdKey);
+ if (!camera_id) {
+ return result->Error("argument_error",
+ std::string(kCameraIdKey) + " missing");
+ }
+
+ auto camera = GetCameraByCameraId(*camera_id);
+ if (!camera) {
+ return result->Error("camera_error", "Camera not created");
+ }
+
+ if (camera->HasPendingResultByType(PendingResultType::kStartRecord)) {
+ return result->Error("camera_error",
+ "Pending start recording request exists");
+ }
+
+ int64_t max_video_duration_ms = -1;
+ auto requested_max_video_duration_ms =
+ std::get_if<std::int32_t>(ValueOrNull(args, kMaxVideoDurationKey));
+
+ if (requested_max_video_duration_ms != nullptr) {
+ max_video_duration_ms = *requested_max_video_duration_ms;
+ }
+
+ std::optional<std::string> path = GetFilePathForVideo();
+ if (path) {
+ if (camera->AddPendingResult(PendingResultType::kStartRecord,
+ std::move(result))) {
+ auto cc = camera->GetCaptureController();
+ assert(cc);
+ cc->StartRecord(*path, max_video_duration_ms);
+ }
+ } else {
+ return result->Error("system_error",
+ "Failed to get path for video capture");
+ }
+}
+
+void CameraPlugin::StopVideoRecordingMethodHandler(
+ const EncodableMap& args, std::unique_ptr<flutter::MethodResult<>> result) {
+ auto camera_id = GetInt64ValueOrNull(args, kCameraIdKey);
+ if (!camera_id) {
+ return result->Error("argument_error",
+ std::string(kCameraIdKey) + " missing");
+ }
+
+ auto camera = GetCameraByCameraId(*camera_id);
+ if (!camera) {
+ return result->Error("camera_error", "Camera not created");
+ }
+
+ if (camera->HasPendingResultByType(PendingResultType::kStopRecord)) {
+ return result->Error("camera_error",
+ "Pending stop recording request exists");
+ }
+
+ if (camera->AddPendingResult(PendingResultType::kStopRecord,
+ std::move(result))) {
+ auto cc = camera->GetCaptureController();
+ assert(cc);
+ cc->StopRecord();
+ }
+}
+
+void CameraPlugin::TakePictureMethodHandler(
+ const EncodableMap& args, std::unique_ptr<flutter::MethodResult<>> result) {
+ auto camera_id = GetInt64ValueOrNull(args, kCameraIdKey);
+ if (!camera_id) {
+ return result->Error("argument_error",
+ std::string(kCameraIdKey) + " missing");
+ }
+
+ auto camera = GetCameraByCameraId(*camera_id);
+ if (!camera) {
+ return result->Error("camera_error", "Camera not created");
+ }
+
+ if (camera->HasPendingResultByType(PendingResultType::kTakePicture)) {
+ return result->Error("camera_error", "Pending take picture request exists");
+ }
+
+ std::optional<std::string> path = GetFilePathForPicture();
+ if (path) {
+ if (camera->AddPendingResult(PendingResultType::kTakePicture,
+ std::move(result))) {
+ auto cc = camera->GetCaptureController();
+ assert(cc);
+ cc->TakePicture(*path);
+ }
+ } else {
+ return result->Error("system_error",
+ "Failed to get capture path for picture");
+ }
+}
+
+void CameraPlugin::DisposeMethodHandler(
+ const EncodableMap& args, std::unique_ptr<flutter::MethodResult<>> result) {
+ auto camera_id = GetInt64ValueOrNull(args, kCameraIdKey);
+ if (!camera_id) {
+ return result->Error("argument_error",
+ std::string(kCameraIdKey) + " missing");
+ }
+
+ DisposeCameraByCameraId(*camera_id);
+ result->Success();
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/camera_plugin.h b/packages/camera/camera_windows/windows/camera_plugin.h
new file mode 100644
index 0000000..1baa247
--- /dev/null
+++ b/packages/camera/camera_windows/windows/camera_plugin.h
@@ -0,0 +1,132 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAMERA_PLUGIN_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAMERA_PLUGIN_H_
+
+#include <flutter/flutter_view.h>
+#include <flutter/method_channel.h>
+#include <flutter/plugin_registrar_windows.h>
+#include <flutter/standard_method_codec.h>
+
+#include <functional>
+
+#include "camera.h"
+#include "capture_controller.h"
+#include "capture_controller_listener.h"
+
+namespace camera_windows {
+using flutter::MethodResult;
+
+namespace test {
+namespace {
+// Forward declaration of test class.
+class MockCameraPlugin;
+} // namespace
+} // namespace test
+
+class CameraPlugin : public flutter::Plugin,
+ public VideoCaptureDeviceEnumerator {
+ public:
+ static void RegisterWithRegistrar(flutter::PluginRegistrarWindows* registrar);
+
+ CameraPlugin(flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger);
+
+ // Creates a plugin instance with the given CameraFactory instance.
+ // Exists for unit testing with mock implementations.
+ CameraPlugin(flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger,
+ std::unique_ptr<CameraFactory> camera_factory);
+
+ virtual ~CameraPlugin();
+
+ // Disallow copy and move.
+ CameraPlugin(const CameraPlugin&) = delete;
+ CameraPlugin& operator=(const CameraPlugin&) = delete;
+
+ // Called when a method is called on plugin channel.
+ void HandleMethodCall(const flutter::MethodCall<>& method_call,
+ std::unique_ptr<MethodResult<>> result);
+
+ private:
+ // Loops through cameras and returns camera
+ // with matching device_id or nullptr.
+ Camera* GetCameraByDeviceId(std::string& device_id);
+
+ // Loops through cameras and returns camera
+ // with matching camera_id or nullptr.
+ Camera* GetCameraByCameraId(int64_t camera_id);
+
+ // Disposes camera by camera id.
+ void DisposeCameraByCameraId(int64_t camera_id);
+
+ // Enumerates video capture devices.
+ bool EnumerateVideoCaptureDeviceSources(IMFActivate*** devices,
+ UINT32* count) override;
+
+ // Handles availableCameras method calls.
+ // Enumerates video capture devices and
+ // returns list of available camera devices.
+ void AvailableCamerasMethodHandler(
+ std::unique_ptr<flutter::MethodResult<>> result);
+
+ // Handles create method calls.
+ // Creates camera and initializes capture controller for requested device.
+ // Stores result object to be handled after request is processed.
+ void CreateMethodHandler(const EncodableMap& args,
+ std::unique_ptr<MethodResult<>> result);
+
+ // Handles initialize method calls.
+ // Requests existing camera controller to start preview.
+ // Stores result object to be handled after request is processed.
+ void InitializeMethodHandler(const EncodableMap& args,
+ std::unique_ptr<MethodResult<>> result);
+
+ // Handles takePicture method calls.
+ // Requests existing camera controller to take photo.
+ // Stores result object to be handled after request is processed.
+ void TakePictureMethodHandler(const EncodableMap& args,
+ std::unique_ptr<MethodResult<>> result);
+
+ // Handles startVideoRecording method calls.
+ // Requests existing camera controller to start recording.
+ // Stores result object to be handled after request is processed.
+ void StartVideoRecordingMethodHandler(const EncodableMap& args,
+ std::unique_ptr<MethodResult<>> result);
+
+ // Handles stopVideoRecording method calls.
+ // Requests existing camera controller to stop recording.
+ // Stores result object to be handled after request is processed.
+ void StopVideoRecordingMethodHandler(const EncodableMap& args,
+ std::unique_ptr<MethodResult<>> result);
+
+ // Handles pausePreview method calls.
+ // Requests existing camera controller to pause recording.
+ // Stores result object to be handled after request is processed.
+ void PausePreviewMethodHandler(const EncodableMap& args,
+ std::unique_ptr<MethodResult<>> result);
+
+ // Handles resumePreview method calls.
+ // Requests existing camera controller to resume preview.
+ // Stores result object to be handled after request is processed.
+ void ResumePreviewMethodHandler(const EncodableMap& args,
+ std::unique_ptr<MethodResult<>> result);
+
+ // Handles dsipose method calls.
+ // Disposes camera if exists.
+ void DisposeMethodHandler(const EncodableMap& args,
+ std::unique_ptr<MethodResult<>> result);
+
+ std::unique_ptr<CameraFactory> camera_factory_;
+ flutter::TextureRegistrar* texture_registrar_;
+ flutter::BinaryMessenger* messenger_;
+ std::vector<std::unique_ptr<Camera>> cameras_;
+
+ friend class camera_windows::test::MockCameraPlugin;
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAMERA_PLUGIN_H_
diff --git a/packages/camera/camera_windows/windows/camera_windows.cpp b/packages/camera/camera_windows/windows/camera_windows.cpp
new file mode 100644
index 0000000..2d6b781
--- /dev/null
+++ b/packages/camera/camera_windows/windows/camera_windows.cpp
@@ -0,0 +1,16 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "include/camera_windows/camera_windows.h"
+
+#include <flutter/plugin_registrar_windows.h>
+
+#include "camera_plugin.h"
+
+void CameraWindowsRegisterWithRegistrar(
+ FlutterDesktopPluginRegistrarRef registrar) {
+ camera_windows::CameraPlugin::RegisterWithRegistrar(
+ flutter::PluginRegistrarManager::GetInstance()
+ ->GetRegistrar<flutter::PluginRegistrarWindows>(registrar));
+}
diff --git a/packages/camera/camera_windows/windows/capture_controller.cpp b/packages/camera/camera_windows/windows/capture_controller.cpp
new file mode 100644
index 0000000..384c86a
--- /dev/null
+++ b/packages/camera/camera_windows/windows/capture_controller.cpp
@@ -0,0 +1,908 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "capture_controller.h"
+
+#include <comdef.h>
+#include <wincodec.h>
+#include <wrl/client.h>
+
+#include <cassert>
+#include <chrono>
+
+#include "com_heap_ptr.h"
+#include "photo_handler.h"
+#include "preview_handler.h"
+#include "record_handler.h"
+#include "string_utils.h"
+#include "texture_handler.h"
+
+namespace camera_windows {
+
+using Microsoft::WRL::ComPtr;
+
+CameraResult GetCameraResult(HRESULT hr) {
+ if (SUCCEEDED(hr)) {
+ return CameraResult::kSuccess;
+ }
+
+ return hr == E_ACCESSDENIED ? CameraResult::kAccessDenied
+ : CameraResult::kError;
+}
+
+CaptureControllerImpl::CaptureControllerImpl(
+ CaptureControllerListener* listener)
+ : capture_controller_listener_(listener), CaptureController(){};
+
+CaptureControllerImpl::~CaptureControllerImpl() {
+ ResetCaptureController();
+ capture_controller_listener_ = nullptr;
+};
+
+// static
+bool CaptureControllerImpl::EnumerateVideoCaptureDeviceSources(
+ IMFActivate*** devices, UINT32* count) {
+ ComPtr<IMFAttributes> attributes;
+
+ HRESULT hr = MFCreateAttributes(&attributes, 1);
+ if (FAILED(hr)) {
+ return false;
+ }
+
+ hr = attributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ if (FAILED(hr)) {
+ return false;
+ }
+
+ hr = MFEnumDeviceSources(attributes.Get(), devices, count);
+ if (FAILED(hr)) {
+ return false;
+ }
+
+ return true;
+}
+
+HRESULT CaptureControllerImpl::CreateDefaultAudioCaptureSource() {
+ audio_source_ = nullptr;
+ ComHeapPtr<IMFActivate*> devices;
+ UINT32 count = 0;
+
+ ComPtr<IMFAttributes> attributes;
+ HRESULT hr = MFCreateAttributes(&attributes, 1);
+
+ if (SUCCEEDED(hr)) {
+ hr = attributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
+ }
+
+ if (SUCCEEDED(hr)) {
+ hr = MFEnumDeviceSources(attributes.Get(), &devices, &count);
+ }
+
+ if (SUCCEEDED(hr) && count > 0) {
+ ComHeapPtr<wchar_t> audio_device_id;
+ UINT32 audio_device_id_size;
+
+ // Use first audio device.
+ hr = devices[0]->GetAllocatedString(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_ENDPOINT_ID, &audio_device_id,
+ &audio_device_id_size);
+
+ if (SUCCEEDED(hr)) {
+ ComPtr<IMFAttributes> audio_capture_source_attributes;
+ hr = MFCreateAttributes(&audio_capture_source_attributes, 2);
+
+ if (SUCCEEDED(hr)) {
+ hr = audio_capture_source_attributes->SetGUID(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
+ }
+
+ if (SUCCEEDED(hr)) {
+ hr = audio_capture_source_attributes->SetString(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_ENDPOINT_ID,
+ audio_device_id);
+ }
+
+ if (SUCCEEDED(hr)) {
+ hr = MFCreateDeviceSource(audio_capture_source_attributes.Get(),
+ audio_source_.GetAddressOf());
+ }
+ }
+ }
+
+ return hr;
+}
+
+HRESULT CaptureControllerImpl::CreateVideoCaptureSourceForDevice(
+ const std::string& video_device_id) {
+ video_source_ = nullptr;
+
+ ComPtr<IMFAttributes> video_capture_source_attributes;
+
+ HRESULT hr = MFCreateAttributes(&video_capture_source_attributes, 2);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = video_capture_source_attributes->SetGUID(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = video_capture_source_attributes->SetString(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK,
+ Utf16FromUtf8(video_device_id).c_str());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = MFCreateDeviceSource(video_capture_source_attributes.Get(),
+ video_source_.GetAddressOf());
+ return hr;
+}
+
+HRESULT CaptureControllerImpl::CreateD3DManagerWithDX11Device() {
+ // TODO: Use existing ANGLE device
+
+ HRESULT hr = S_OK;
+ hr = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr,
+ D3D11_CREATE_DEVICE_VIDEO_SUPPORT, nullptr, 0,
+ D3D11_SDK_VERSION, &dx11_device_, nullptr, nullptr);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Enable multithread protection
+ ComPtr<ID3D10Multithread> multi_thread;
+ hr = dx11_device_.As(&multi_thread);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ multi_thread->SetMultithreadProtected(TRUE);
+
+ hr = MFCreateDXGIDeviceManager(&dx_device_reset_token_,
+ dxgi_device_manager_.GetAddressOf());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = dxgi_device_manager_->ResetDevice(dx11_device_.Get(),
+ dx_device_reset_token_);
+ return hr;
+}
+
+HRESULT CaptureControllerImpl::CreateCaptureEngine() {
+ assert(!video_device_id_.empty());
+
+ HRESULT hr = S_OK;
+ ComPtr<IMFAttributes> attributes;
+
+ // Creates capture engine only if not already initialized by test framework
+ if (!capture_engine_) {
+ ComPtr<IMFCaptureEngineClassFactory> capture_engine_factory;
+
+ hr = CoCreateInstance(CLSID_MFCaptureEngineClassFactory, nullptr,
+ CLSCTX_INPROC_SERVER,
+ IID_PPV_ARGS(&capture_engine_factory));
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Creates CaptureEngine.
+ hr = capture_engine_factory->CreateInstance(CLSID_MFCaptureEngine,
+ IID_PPV_ARGS(&capture_engine_));
+ if (FAILED(hr)) {
+ return hr;
+ }
+ }
+
+ hr = CreateD3DManagerWithDX11Device();
+
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Creates video source only if not already initialized by test framework
+ if (!video_source_) {
+ hr = CreateVideoCaptureSourceForDevice(video_device_id_);
+ if (FAILED(hr)) {
+ return hr;
+ }
+ }
+
+ // Creates audio source only if not already initialized by test framework
+ if (record_audio_ && !audio_source_) {
+ hr = CreateDefaultAudioCaptureSource();
+ if (FAILED(hr)) {
+ return hr;
+ }
+ }
+
+ if (!capture_engine_callback_handler_) {
+ capture_engine_callback_handler_ =
+ ComPtr<CaptureEngineListener>(new CaptureEngineListener(this));
+ }
+
+ hr = MFCreateAttributes(&attributes, 2);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = attributes->SetUnknown(MF_CAPTURE_ENGINE_D3D_MANAGER,
+ dxgi_device_manager_.Get());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = attributes->SetUINT32(MF_CAPTURE_ENGINE_USE_VIDEO_DEVICE_ONLY,
+ !record_audio_);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Check MF_CAPTURE_ENGINE_INITIALIZED event handling
+ // for response process.
+ hr = capture_engine_->Initialize(capture_engine_callback_handler_.Get(),
+ attributes.Get(), audio_source_.Get(),
+ video_source_.Get());
+ return hr;
+}
+
+void CaptureControllerImpl::ResetCaptureController() {
+ if (record_handler_ && record_handler_->CanStop()) {
+ if (record_handler_->IsContinuousRecording()) {
+ StopRecord();
+ } else if (record_handler_->IsTimedRecording()) {
+ StopTimedRecord();
+ }
+ }
+
+ if (preview_handler_) {
+ StopPreview();
+ }
+
+ // Shuts down the media foundation platform object.
+ // Releases all resources including threads.
+ // Application should call MFShutdown the same number of times as MFStartup
+ if (media_foundation_started_) {
+ MFShutdown();
+ }
+
+ // States
+ media_foundation_started_ = false;
+ capture_engine_state_ = CaptureEngineState::kNotInitialized;
+ preview_frame_width_ = 0;
+ preview_frame_height_ = 0;
+ capture_engine_callback_handler_ = nullptr;
+ capture_engine_ = nullptr;
+ audio_source_ = nullptr;
+ video_source_ = nullptr;
+ base_preview_media_type_ = nullptr;
+ base_capture_media_type_ = nullptr;
+
+ if (dxgi_device_manager_) {
+ dxgi_device_manager_->ResetDevice(dx11_device_.Get(),
+ dx_device_reset_token_);
+ }
+ dxgi_device_manager_ = nullptr;
+ dx11_device_ = nullptr;
+
+ record_handler_ = nullptr;
+ preview_handler_ = nullptr;
+ photo_handler_ = nullptr;
+ texture_handler_ = nullptr;
+}
+
+bool CaptureControllerImpl::InitCaptureDevice(
+ flutter::TextureRegistrar* texture_registrar, const std::string& device_id,
+ bool record_audio, ResolutionPreset resolution_preset) {
+ assert(capture_controller_listener_);
+
+ if (IsInitialized()) {
+ capture_controller_listener_->OnCreateCaptureEngineFailed(
+ CameraResult::kError, "Capture device already initialized");
+ return false;
+ } else if (capture_engine_state_ == CaptureEngineState::kInitializing) {
+ capture_controller_listener_->OnCreateCaptureEngineFailed(
+ CameraResult::kError, "Capture device already initializing");
+ return false;
+ }
+
+ capture_engine_state_ = CaptureEngineState::kInitializing;
+ resolution_preset_ = resolution_preset;
+ record_audio_ = record_audio;
+ texture_registrar_ = texture_registrar;
+ video_device_id_ = device_id;
+
+ // MFStartup must be called before using Media Foundation.
+ if (!media_foundation_started_) {
+ HRESULT hr = MFStartup(MF_VERSION);
+
+ if (FAILED(hr)) {
+ capture_controller_listener_->OnCreateCaptureEngineFailed(
+ GetCameraResult(hr), "Failed to create camera");
+ ResetCaptureController();
+ return false;
+ }
+
+ media_foundation_started_ = true;
+ }
+
+ HRESULT hr = CreateCaptureEngine();
+ if (FAILED(hr)) {
+ capture_controller_listener_->OnCreateCaptureEngineFailed(
+ GetCameraResult(hr), "Failed to create camera");
+ ResetCaptureController();
+ return false;
+ }
+
+ return true;
+}
+
+void CaptureControllerImpl::TakePicture(const std::string& file_path) {
+ assert(capture_engine_callback_handler_);
+ assert(capture_engine_);
+
+ if (!IsInitialized()) {
+ return OnPicture(CameraResult::kError, "Not initialized");
+ }
+
+ HRESULT hr = S_OK;
+
+ if (!base_capture_media_type_) {
+ // Enumerates mediatypes and finds media type for video capture.
+ hr = FindBaseMediaTypes();
+ if (FAILED(hr)) {
+ return OnPicture(GetCameraResult(hr),
+ "Failed to initialize photo capture");
+ }
+ }
+
+ if (!photo_handler_) {
+ photo_handler_ = std::make_unique<PhotoHandler>();
+ } else if (photo_handler_->IsTakingPhoto()) {
+ return OnPicture(CameraResult::kError, "Photo already requested");
+ }
+
+ // Check MF_CAPTURE_ENGINE_PHOTO_TAKEN event handling
+ // for response process.
+ hr = photo_handler_->TakePhoto(file_path, capture_engine_.Get(),
+ base_capture_media_type_.Get());
+ if (FAILED(hr)) {
+ // Destroy photo handler on error cases to make sure state is resetted.
+ photo_handler_ = nullptr;
+ return OnPicture(GetCameraResult(hr), "Failed to take photo");
+ }
+}
+
+uint32_t CaptureControllerImpl::GetMaxPreviewHeight() const {
+ switch (resolution_preset_) {
+ case ResolutionPreset::kLow:
+ return 240;
+ break;
+ case ResolutionPreset::kMedium:
+ return 480;
+ break;
+ case ResolutionPreset::kHigh:
+ return 720;
+ break;
+ case ResolutionPreset::kVeryHigh:
+ return 1080;
+ break;
+ case ResolutionPreset::kUltraHigh:
+ return 2160;
+ break;
+ case ResolutionPreset::kMax:
+ case ResolutionPreset::kAuto:
+ default:
+ // no limit.
+ return 0xffffffff;
+ break;
+ }
+}
+
+// Finds best media type for given source stream index and max height;
+bool FindBestMediaType(DWORD source_stream_index, IMFCaptureSource* source,
+ IMFMediaType** target_media_type, uint32_t max_height,
+ uint32_t* target_frame_width,
+ uint32_t* target_frame_height,
+ float minimum_accepted_framerate = 15.f) {
+ assert(source);
+ ComPtr<IMFMediaType> media_type;
+
+ uint32_t best_width = 0;
+ uint32_t best_height = 0;
+ float best_framerate = 0.f;
+
+ // Loop native media types.
+ for (int i = 0;; i++) {
+ if (FAILED(source->GetAvailableDeviceMediaType(
+ source_stream_index, i, media_type.GetAddressOf()))) {
+ break;
+ }
+
+ uint32_t frame_rate_numerator, frame_rate_denominator;
+ if (FAILED(MFGetAttributeRatio(media_type.Get(), MF_MT_FRAME_RATE,
+ &frame_rate_numerator,
+ &frame_rate_denominator)) ||
+ !frame_rate_denominator) {
+ continue;
+ }
+
+ float frame_rate =
+ static_cast<float>(frame_rate_numerator) / frame_rate_denominator;
+ if (frame_rate < minimum_accepted_framerate) {
+ continue;
+ }
+
+ uint32_t frame_width;
+ uint32_t frame_height;
+ if (SUCCEEDED(MFGetAttributeSize(media_type.Get(), MF_MT_FRAME_SIZE,
+ &frame_width, &frame_height))) {
+ // Update target mediatype
+ if (frame_height <= max_height &&
+ (best_width < frame_width || best_height < frame_height ||
+ best_framerate < frame_rate)) {
+ media_type.CopyTo(target_media_type);
+ best_width = frame_width;
+ best_height = frame_height;
+ best_framerate = frame_rate;
+ }
+ }
+ }
+
+ if (target_frame_width && target_frame_height) {
+ *target_frame_width = best_width;
+ *target_frame_height = best_height;
+ }
+
+ return *target_media_type != nullptr;
+}
+
+HRESULT CaptureControllerImpl::FindBaseMediaTypes() {
+ if (!IsInitialized()) {
+ return E_FAIL;
+ }
+
+ ComPtr<IMFCaptureSource> source;
+ HRESULT hr = capture_engine_->GetSource(&source);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Find base media type for previewing.
+ if (!FindBestMediaType(
+ (DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_PREVIEW,
+ source.Get(), base_preview_media_type_.GetAddressOf(),
+ GetMaxPreviewHeight(), &preview_frame_width_,
+ &preview_frame_height_)) {
+ return E_FAIL;
+ }
+
+ // Find base media type for record and photo capture.
+ if (!FindBestMediaType(
+ (DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_RECORD,
+ source.Get(), base_capture_media_type_.GetAddressOf(), 0xffffffff,
+ nullptr, nullptr)) {
+ return E_FAIL;
+ }
+
+ return S_OK;
+}
+
+void CaptureControllerImpl::StartRecord(const std::string& file_path,
+ int64_t max_video_duration_ms) {
+ assert(capture_engine_);
+
+ if (!IsInitialized()) {
+ return OnRecordStarted(CameraResult::kError,
+ "Camera not initialized. Camera should be "
+ "disposed and reinitialized.");
+ }
+
+ HRESULT hr = S_OK;
+
+ if (!base_capture_media_type_) {
+ // Enumerates mediatypes and finds media type for video capture.
+ hr = FindBaseMediaTypes();
+ if (FAILED(hr)) {
+ return OnRecordStarted(GetCameraResult(hr),
+ "Failed to initialize video recording");
+ }
+ }
+
+ if (!record_handler_) {
+ record_handler_ = std::make_unique<RecordHandler>(record_audio_);
+ } else if (!record_handler_->CanStart()) {
+ return OnRecordStarted(
+ CameraResult::kError,
+ "Recording cannot be started. Previous recording must be stopped "
+ "first.");
+ }
+
+ // Check MF_CAPTURE_ENGINE_RECORD_STARTED event handling for response
+ // process.
+ hr = record_handler_->StartRecord(file_path, max_video_duration_ms,
+ capture_engine_.Get(),
+ base_capture_media_type_.Get());
+ if (FAILED(hr)) {
+ // Destroy record handler on error cases to make sure state is resetted.
+ record_handler_ = nullptr;
+ return OnRecordStarted(GetCameraResult(hr),
+ "Failed to start video recording");
+ }
+}
+
+void CaptureControllerImpl::StopRecord() {
+ assert(capture_controller_listener_);
+
+ if (!IsInitialized()) {
+ return OnRecordStopped(CameraResult::kError,
+ "Camera not initialized. Camera should be "
+ "disposed and reinitialized.");
+ }
+
+ if (!record_handler_ && !record_handler_->CanStop()) {
+ return OnRecordStopped(CameraResult::kError,
+ "Recording cannot be stopped.");
+ }
+
+ // Check MF_CAPTURE_ENGINE_RECORD_STOPPED event handling for response
+ // process.
+ HRESULT hr = record_handler_->StopRecord(capture_engine_.Get());
+ if (FAILED(hr)) {
+ return OnRecordStopped(GetCameraResult(hr),
+ "Failed to stop video recording");
+ }
+}
+
+// Stops timed recording. Called internally when requested time is passed.
+// Check MF_CAPTURE_ENGINE_RECORD_STOPPED event handling for response process.
+void CaptureControllerImpl::StopTimedRecord() {
+ assert(capture_controller_listener_);
+ if (!record_handler_ || !record_handler_->IsTimedRecording()) {
+ return;
+ }
+
+ HRESULT hr = record_handler_->StopRecord(capture_engine_.Get());
+ if (FAILED(hr)) {
+ // Destroy record handler on error cases to make sure state is resetted.
+ record_handler_ = nullptr;
+ return capture_controller_listener_->OnVideoRecordFailed(
+ GetCameraResult(hr), "Failed to record video");
+ }
+}
+
+// Starts capturing preview frames using preview handler
+// After first frame is captured, OnPreviewStarted is called
+void CaptureControllerImpl::StartPreview() {
+ assert(capture_engine_callback_handler_);
+ assert(capture_engine_);
+ assert(texture_handler_);
+
+ if (!IsInitialized() || !texture_handler_) {
+ return OnPreviewStarted(CameraResult::kError,
+ "Camera not initialized. Camera should be "
+ "disposed and reinitialized.");
+ }
+
+ HRESULT hr = S_OK;
+
+ if (!base_preview_media_type_) {
+ // Enumerates mediatypes and finds media type for video capture.
+ hr = FindBaseMediaTypes();
+ if (FAILED(hr)) {
+ return OnPreviewStarted(GetCameraResult(hr),
+ "Failed to initialize video preview");
+ }
+ }
+
+ texture_handler_->UpdateTextureSize(preview_frame_width_,
+ preview_frame_height_);
+
+ // TODO(loic-sharma): This does not handle duplicate calls properly.
+ // See: https://github.com/flutter/flutter/issues/108404
+ if (!preview_handler_) {
+ preview_handler_ = std::make_unique<PreviewHandler>();
+ } else if (preview_handler_->IsInitialized()) {
+ return OnPreviewStarted(CameraResult::kSuccess, "");
+ } else {
+ return OnPreviewStarted(CameraResult::kError, "Preview already exists");
+ }
+
+ // Check MF_CAPTURE_ENGINE_PREVIEW_STARTED event handling for response
+ // process.
+ hr = preview_handler_->StartPreview(capture_engine_.Get(),
+ base_preview_media_type_.Get(),
+ capture_engine_callback_handler_.Get());
+
+ if (FAILED(hr)) {
+ // Destroy preview handler on error cases to make sure state is resetted.
+ preview_handler_ = nullptr;
+ return OnPreviewStarted(GetCameraResult(hr),
+ "Failed to start video preview");
+ }
+}
+
+// Stops preview. Called by destructor
+// Use PausePreview and ResumePreview methods to for
+// pausing and resuming the preview.
+// Check MF_CAPTURE_ENGINE_PREVIEW_STOPPED event handling for response
+// process.
+HRESULT CaptureControllerImpl::StopPreview() {
+ assert(capture_engine_);
+
+ if (!IsInitialized() || !preview_handler_) {
+ return S_OK;
+ }
+
+ // Requests to stop preview.
+ return preview_handler_->StopPreview(capture_engine_.Get());
+}
+
+// Marks preview as paused.
+// When preview is paused, captured frames are not processed for preview
+// and flutter texture is not updated
+void CaptureControllerImpl::PausePreview() {
+ assert(capture_controller_listener_);
+
+ if (!preview_handler_ || !preview_handler_->IsInitialized()) {
+ return capture_controller_listener_->OnPausePreviewFailed(
+ CameraResult::kError, "Preview not started");
+ }
+
+ if (preview_handler_->PausePreview()) {
+ capture_controller_listener_->OnPausePreviewSucceeded();
+ } else {
+ capture_controller_listener_->OnPausePreviewFailed(
+ CameraResult::kError, "Failed to pause preview");
+ }
+}
+
+// Marks preview as not paused.
+// When preview is not paused, captured frames are processed for preview
+// and flutter texture is updated.
+void CaptureControllerImpl::ResumePreview() {
+ assert(capture_controller_listener_);
+
+ if (!preview_handler_ || !preview_handler_->IsInitialized()) {
+ return capture_controller_listener_->OnResumePreviewFailed(
+ CameraResult::kError, "Preview not started");
+ }
+
+ if (preview_handler_->ResumePreview()) {
+ capture_controller_listener_->OnResumePreviewSucceeded();
+ } else {
+ capture_controller_listener_->OnResumePreviewFailed(
+ CameraResult::kError, "Failed to pause preview");
+ }
+}
+
+// Handles capture engine events.
+// Called via IMFCaptureEngineOnEventCallback implementation.
+// Implements CaptureEngineObserver::OnEvent.
+void CaptureControllerImpl::OnEvent(IMFMediaEvent* event) {
+ if (!IsInitialized() &&
+ capture_engine_state_ != CaptureEngineState::kInitializing) {
+ return;
+ }
+
+ GUID extended_type_guid;
+ if (SUCCEEDED(event->GetExtendedType(&extended_type_guid))) {
+ std::string error;
+
+ HRESULT event_hr;
+ if (FAILED(event->GetStatus(&event_hr))) {
+ return;
+ }
+
+ if (FAILED(event_hr)) {
+ // Reads system error
+ _com_error err(event_hr);
+ error = Utf8FromUtf16(err.ErrorMessage());
+ }
+
+ CameraResult event_result = GetCameraResult(event_hr);
+ if (extended_type_guid == MF_CAPTURE_ENGINE_ERROR) {
+ OnCaptureEngineError(event_result, error);
+ } else if (extended_type_guid == MF_CAPTURE_ENGINE_INITIALIZED) {
+ OnCaptureEngineInitialized(event_result, error);
+ } else if (extended_type_guid == MF_CAPTURE_ENGINE_PREVIEW_STARTED) {
+ // Preview is marked as started after first frame is captured.
+ // This is because, CaptureEngine might inform that preview is started
+ // even if error is thrown right after.
+ } else if (extended_type_guid == MF_CAPTURE_ENGINE_PREVIEW_STOPPED) {
+ OnPreviewStopped(event_result, error);
+ } else if (extended_type_guid == MF_CAPTURE_ENGINE_RECORD_STARTED) {
+ OnRecordStarted(event_result, error);
+ } else if (extended_type_guid == MF_CAPTURE_ENGINE_RECORD_STOPPED) {
+ OnRecordStopped(event_result, error);
+ } else if (extended_type_guid == MF_CAPTURE_ENGINE_PHOTO_TAKEN) {
+ OnPicture(event_result, error);
+ } else if (extended_type_guid == MF_CAPTURE_ENGINE_CAMERA_STREAM_BLOCKED) {
+ // TODO: Inform capture state to flutter.
+ } else if (extended_type_guid ==
+ MF_CAPTURE_ENGINE_CAMERA_STREAM_UNBLOCKED) {
+ // TODO: Inform capture state to flutter.
+ }
+ }
+}
+
+// Handles Picture event and informs CaptureControllerListener.
+void CaptureControllerImpl::OnPicture(CameraResult result,
+ const std::string& error) {
+ if (result == CameraResult::kSuccess && photo_handler_) {
+ if (capture_controller_listener_) {
+ std::string path = photo_handler_->GetPhotoPath();
+ capture_controller_listener_->OnTakePictureSucceeded(path);
+ }
+ photo_handler_->OnPhotoTaken();
+ } else {
+ if (capture_controller_listener_) {
+ capture_controller_listener_->OnTakePictureFailed(result, error);
+ }
+ // Destroy photo handler on error cases to make sure state is resetted.
+ photo_handler_ = nullptr;
+ }
+}
+
+// Handles CaptureEngineInitialized event and informs
+// CaptureControllerListener.
+void CaptureControllerImpl::OnCaptureEngineInitialized(
+ CameraResult result, const std::string& error) {
+ if (capture_controller_listener_) {
+ if (result != CameraResult::kSuccess) {
+ capture_controller_listener_->OnCreateCaptureEngineFailed(
+ result, "Failed to initialize capture engine");
+ ResetCaptureController();
+ return;
+ }
+
+ // Create texture handler and register new texture.
+ texture_handler_ = std::make_unique<TextureHandler>(texture_registrar_);
+
+ int64_t texture_id = texture_handler_->RegisterTexture();
+ if (texture_id >= 0) {
+ capture_controller_listener_->OnCreateCaptureEngineSucceeded(texture_id);
+ capture_engine_state_ = CaptureEngineState::kInitialized;
+ } else {
+ capture_controller_listener_->OnCreateCaptureEngineFailed(
+ CameraResult::kError, "Failed to create texture_id");
+ // Reset state
+ ResetCaptureController();
+ }
+ }
+}
+
+// Handles CaptureEngineError event and informs CaptureControllerListener.
+void CaptureControllerImpl::OnCaptureEngineError(CameraResult result,
+ const std::string& error) {
+ if (capture_controller_listener_) {
+ capture_controller_listener_->OnCaptureError(result, error);
+ }
+
+ // TODO: If MF_CAPTURE_ENGINE_ERROR is returned,
+ // should capture controller be reinitialized automatically?
+}
+
+// Handles PreviewStarted event and informs CaptureControllerListener.
+// This should be called only after first frame has been received or
+// in error cases.
+void CaptureControllerImpl::OnPreviewStarted(CameraResult result,
+ const std::string& error) {
+ if (preview_handler_ && result == CameraResult::kSuccess) {
+ preview_handler_->OnPreviewStarted();
+ } else {
+ // Destroy preview handler on error cases to make sure state is resetted.
+ preview_handler_ = nullptr;
+ }
+
+ if (capture_controller_listener_) {
+ if (result == CameraResult::kSuccess && preview_frame_width_ > 0 &&
+ preview_frame_height_ > 0) {
+ capture_controller_listener_->OnStartPreviewSucceeded(
+ preview_frame_width_, preview_frame_height_);
+ } else {
+ capture_controller_listener_->OnStartPreviewFailed(result, error);
+ }
+ }
+};
+
+// Handles PreviewStopped event.
+void CaptureControllerImpl::OnPreviewStopped(CameraResult result,
+ const std::string& error) {
+ // Preview handler is destroyed if preview is stopped as it
+ // does not have any use anymore.
+ preview_handler_ = nullptr;
+};
+
+// Handles RecordStarted event and informs CaptureControllerListener.
+void CaptureControllerImpl::OnRecordStarted(CameraResult result,
+ const std::string& error) {
+ if (result == CameraResult::kSuccess && record_handler_) {
+ record_handler_->OnRecordStarted();
+ if (capture_controller_listener_) {
+ capture_controller_listener_->OnStartRecordSucceeded();
+ }
+ } else {
+ if (capture_controller_listener_) {
+ capture_controller_listener_->OnStartRecordFailed(result, error);
+ }
+
+ // Destroy record handler on error cases to make sure state is resetted.
+ record_handler_ = nullptr;
+ }
+};
+
+// Handles RecordStopped event and informs CaptureControllerListener.
+void CaptureControllerImpl::OnRecordStopped(CameraResult result,
+ const std::string& error) {
+ if (capture_controller_listener_ && record_handler_) {
+ // Always calls OnStopRecord listener methods
+ // to handle separate stop record request for timed records.
+
+ if (result == CameraResult::kSuccess) {
+ std::string path = record_handler_->GetRecordPath();
+ capture_controller_listener_->OnStopRecordSucceeded(path);
+ if (record_handler_->IsTimedRecording()) {
+ capture_controller_listener_->OnVideoRecordSucceeded(
+ path, (record_handler_->GetRecordedDuration() / 1000));
+ }
+ } else {
+ capture_controller_listener_->OnStopRecordFailed(result, error);
+ if (record_handler_->IsTimedRecording()) {
+ capture_controller_listener_->OnVideoRecordFailed(result, error);
+ }
+ }
+ }
+
+ if (result == CameraResult::kSuccess && record_handler_) {
+ record_handler_->OnRecordStopped();
+ } else {
+ // Destroy record handler on error cases to make sure state is resetted.
+ record_handler_ = nullptr;
+ }
+}
+
+// Updates texture handlers buffer with given data.
+// Called via IMFCaptureEngineOnSampleCallback implementation.
+// Implements CaptureEngineObserver::UpdateBuffer.
+bool CaptureControllerImpl::UpdateBuffer(uint8_t* buffer,
+ uint32_t data_length) {
+ if (!texture_handler_) {
+ return false;
+ }
+ return texture_handler_->UpdateBuffer(buffer, data_length);
+}
+
+// Handles capture time update from each processed frame.
+// Stops timed recordings if requested recording duration has passed.
+// Called via IMFCaptureEngineOnSampleCallback implementation.
+// Implements CaptureEngineObserver::UpdateCaptureTime.
+void CaptureControllerImpl::UpdateCaptureTime(uint64_t capture_time_us) {
+ if (!IsInitialized()) {
+ return;
+ }
+
+ if (preview_handler_ && preview_handler_->IsStarting()) {
+ // Informs that first frame is captured successfully and preview has
+ // started.
+ OnPreviewStarted(CameraResult::kSuccess, "");
+ }
+
+ // Checks if max_video_duration_ms is passed.
+ if (record_handler_) {
+ record_handler_->UpdateRecordingTime(capture_time_us);
+ if (record_handler_->ShouldStopTimedRecording()) {
+ StopTimedRecord();
+ }
+ }
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/capture_controller.h b/packages/camera/camera_windows/windows/capture_controller.h
new file mode 100644
index 0000000..9536be7
--- /dev/null
+++ b/packages/camera/camera_windows/windows/capture_controller.h
@@ -0,0 +1,296 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_CONTROLLER_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_CONTROLLER_H_
+
+#include <d3d11.h>
+#include <flutter/texture_registrar.h>
+#include <mfapi.h>
+#include <mfcaptureengine.h>
+#include <mferror.h>
+#include <mfidl.h>
+#include <windows.h>
+#include <wrl/client.h>
+
+#include <memory>
+#include <string>
+
+#include "capture_controller_listener.h"
+#include "capture_engine_listener.h"
+#include "photo_handler.h"
+#include "preview_handler.h"
+#include "record_handler.h"
+#include "texture_handler.h"
+
+namespace camera_windows {
+using flutter::TextureRegistrar;
+using Microsoft::WRL::ComPtr;
+
+// Camera resolution presets. Used to request a capture resolution.
+enum class ResolutionPreset {
+ // Automatic resolution, uses the highest resolution available.
+ kAuto,
+ // 240p (320x240)
+ kLow,
+ // 480p (720x480)
+ kMedium,
+ // 720p (1280x720)
+ kHigh,
+ // 1080p (1920x1080)
+ kVeryHigh,
+ // 2160p (4096x2160)
+ kUltraHigh,
+ // The highest resolution available.
+ kMax,
+};
+
+// Camera capture engine state.
+//
+// On creation, |CaptureControllers| start in state |kNotInitialized|.
+// On initialization, the capture controller transitions to the |kInitializing|
+// and then |kInitialized| state.
+enum class CaptureEngineState { kNotInitialized, kInitializing, kInitialized };
+
+// Interface for a class that enumerates video capture device sources.
+class VideoCaptureDeviceEnumerator {
+ private:
+ virtual bool EnumerateVideoCaptureDeviceSources(IMFActivate*** devices,
+ UINT32* count) = 0;
+};
+
+// Interface implemented by capture controllers.
+//
+// Capture controllers are used to capture video streams or still photos from
+// their associated |Camera|.
+class CaptureController {
+ public:
+ CaptureController() {}
+ virtual ~CaptureController() = default;
+
+ // Disallow copy and move.
+ CaptureController(const CaptureController&) = delete;
+ CaptureController& operator=(const CaptureController&) = delete;
+
+ // Initializes the capture controller with the specified device id.
+ //
+ // Returns false if the capture controller could not be initialized
+ // or is already initialized.
+ //
+ // texture_registrar: Pointer to Flutter TextureRegistrar instance. Used to
+ // register texture for capture preview.
+ // device_id: A string that holds information of camera device id to
+ // be captured.
+ // record_audio: A boolean value telling if audio should be captured on
+ // video recording.
+ // resolution_preset: Maximum capture resolution height.
+ virtual bool InitCaptureDevice(TextureRegistrar* texture_registrar,
+ const std::string& device_id,
+ bool record_audio,
+ ResolutionPreset resolution_preset) = 0;
+
+ // Returns preview frame width
+ virtual uint32_t GetPreviewWidth() const = 0;
+
+ // Returns preview frame height
+ virtual uint32_t GetPreviewHeight() const = 0;
+
+ // Starts the preview.
+ virtual void StartPreview() = 0;
+
+ // Pauses the preview.
+ virtual void PausePreview() = 0;
+
+ // Resumes the preview.
+ virtual void ResumePreview() = 0;
+
+ // Starts recording video.
+ virtual void StartRecord(const std::string& file_path,
+ int64_t max_video_duration_ms) = 0;
+
+ // Stops the current video recording.
+ virtual void StopRecord() = 0;
+
+ // Captures a still photo.
+ virtual void TakePicture(const std::string& file_path) = 0;
+};
+
+// Concrete implementation of the |CaptureController| interface.
+//
+// Handles the video preview stream via a |PreviewHandler| instance, video
+// capture via a |RecordHandler| instance, and still photo capture via a
+// |PhotoHandler| instance.
+class CaptureControllerImpl : public CaptureController,
+ public CaptureEngineObserver {
+ public:
+ static bool EnumerateVideoCaptureDeviceSources(IMFActivate*** devices,
+ UINT32* count);
+
+ explicit CaptureControllerImpl(CaptureControllerListener* listener);
+ virtual ~CaptureControllerImpl();
+
+ // Disallow copy and move.
+ CaptureControllerImpl(const CaptureControllerImpl&) = delete;
+ CaptureControllerImpl& operator=(const CaptureControllerImpl&) = delete;
+
+ // CaptureController
+ bool InitCaptureDevice(TextureRegistrar* texture_registrar,
+ const std::string& device_id, bool record_audio,
+ ResolutionPreset resolution_preset) override;
+ uint32_t GetPreviewWidth() const override { return preview_frame_width_; }
+ uint32_t GetPreviewHeight() const override { return preview_frame_height_; }
+ void StartPreview() override;
+ void PausePreview() override;
+ void ResumePreview() override;
+ void StartRecord(const std::string& file_path,
+ int64_t max_video_duration_ms) override;
+ void StopRecord() override;
+ void TakePicture(const std::string& file_path) override;
+
+ // CaptureEngineObserver
+ void OnEvent(IMFMediaEvent* event) override;
+ bool IsReadyForSample() const override {
+ return capture_engine_state_ == CaptureEngineState::kInitialized &&
+ preview_handler_ && preview_handler_->IsRunning();
+ }
+ bool UpdateBuffer(uint8_t* data, uint32_t data_length) override;
+ void UpdateCaptureTime(uint64_t capture_time) override;
+
+ // Sets capture engine, for testing purposes.
+ void SetCaptureEngine(IMFCaptureEngine* capture_engine) {
+ capture_engine_ = capture_engine;
+ }
+
+ // Sets video source, for testing purposes.
+ void SetVideoSource(IMFMediaSource* video_source) {
+ video_source_ = video_source;
+ }
+
+ // Sets audio source, for testing purposes.
+ void SetAudioSource(IMFMediaSource* audio_source) {
+ audio_source_ = audio_source;
+ }
+
+ private:
+ // Helper function to return initialized state as boolean;
+ bool IsInitialized() const {
+ return capture_engine_state_ == CaptureEngineState::kInitialized;
+ }
+
+ // Resets capture controller state.
+ // This is called if capture engine creation fails or is disposed.
+ void ResetCaptureController();
+
+ // Returns max preview height calculated from resolution present.
+ uint32_t GetMaxPreviewHeight() const;
+
+ // Uses first audio source to capture audio.
+ // Note: Enumerating audio sources via platform interface is not supported.
+ HRESULT CreateDefaultAudioCaptureSource();
+
+ // Initializes video capture source from camera device.
+ HRESULT CreateVideoCaptureSourceForDevice(const std::string& video_device_id);
+
+ // Creates DX11 Device and D3D Manager.
+ HRESULT CreateD3DManagerWithDX11Device();
+
+ // Initializes capture engine object.
+ HRESULT CreateCaptureEngine();
+
+ // Enumerates video_sources media types and finds out best resolution
+ // for preview and video capture.
+ HRESULT FindBaseMediaTypes();
+
+ // Stops timed video record. Called internally when record handler when max
+ // recording time is exceeded.
+ void StopTimedRecord();
+
+ // Stops preview. Called internally on camera reset and dispose.
+ HRESULT StopPreview();
+
+ // Handles capture engine initalization event.
+ void OnCaptureEngineInitialized(CameraResult result,
+ const std::string& error);
+
+ // Handles capture engine errors.
+ void OnCaptureEngineError(CameraResult result, const std::string& error);
+
+ // Handles picture events.
+ void OnPicture(CameraResult result, const std::string& error);
+
+ // Handles preview started events.
+ void OnPreviewStarted(CameraResult result, const std::string& error);
+
+ // Handles preview stopped events.
+ void OnPreviewStopped(CameraResult result, const std::string& error);
+
+ // Handles record started events.
+ void OnRecordStarted(CameraResult result, const std::string& error);
+
+ // Handles record stopped events.
+ void OnRecordStopped(CameraResult result, const std::string& error);
+
+ bool media_foundation_started_ = false;
+ bool record_audio_ = false;
+ uint32_t preview_frame_width_ = 0;
+ uint32_t preview_frame_height_ = 0;
+ UINT dx_device_reset_token_ = 0;
+ std::unique_ptr<RecordHandler> record_handler_;
+ std::unique_ptr<PreviewHandler> preview_handler_;
+ std::unique_ptr<PhotoHandler> photo_handler_;
+ std::unique_ptr<TextureHandler> texture_handler_;
+ CaptureControllerListener* capture_controller_listener_;
+
+ std::string video_device_id_;
+ CaptureEngineState capture_engine_state_ =
+ CaptureEngineState::kNotInitialized;
+ ResolutionPreset resolution_preset_ = ResolutionPreset::kMedium;
+ ComPtr<IMFCaptureEngine> capture_engine_;
+ ComPtr<CaptureEngineListener> capture_engine_callback_handler_;
+ ComPtr<IMFDXGIDeviceManager> dxgi_device_manager_;
+ ComPtr<ID3D11Device> dx11_device_;
+ ComPtr<IMFMediaType> base_capture_media_type_;
+ ComPtr<IMFMediaType> base_preview_media_type_;
+ ComPtr<IMFMediaSource> video_source_;
+ ComPtr<IMFMediaSource> audio_source_;
+
+ TextureRegistrar* texture_registrar_ = nullptr;
+};
+
+// Inferface for factory classes that create |CaptureController| instances.
+class CaptureControllerFactory {
+ public:
+ CaptureControllerFactory() {}
+ virtual ~CaptureControllerFactory() = default;
+
+ // Disallow copy and move.
+ CaptureControllerFactory(const CaptureControllerFactory&) = delete;
+ CaptureControllerFactory& operator=(const CaptureControllerFactory&) = delete;
+
+ // Create and return a |CaptureController| that makes callbacks on the
+ // specified |CaptureControllerListener|, which must not be null.
+ virtual std::unique_ptr<CaptureController> CreateCaptureController(
+ CaptureControllerListener* listener) = 0;
+};
+
+// Concreate implementation of |CaptureControllerFactory|.
+class CaptureControllerFactoryImpl : public CaptureControllerFactory {
+ public:
+ CaptureControllerFactoryImpl() {}
+ virtual ~CaptureControllerFactoryImpl() = default;
+
+ // Disallow copy and move.
+ CaptureControllerFactoryImpl(const CaptureControllerFactoryImpl&) = delete;
+ CaptureControllerFactoryImpl& operator=(const CaptureControllerFactoryImpl&) =
+ delete;
+
+ std::unique_ptr<CaptureController> CreateCaptureController(
+ CaptureControllerListener* listener) override {
+ return std::make_unique<CaptureControllerImpl>(listener);
+ }
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_CONTROLLER_H_
diff --git a/packages/camera/camera_windows/windows/capture_controller_listener.h b/packages/camera/camera_windows/windows/capture_controller_listener.h
new file mode 100644
index 0000000..bc7a173
--- /dev/null
+++ b/packages/camera/camera_windows/windows/capture_controller_listener.h
@@ -0,0 +1,134 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_CONTROLLER_LISTENER_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_CONTROLLER_LISTENER_H_
+
+#include <functional>
+
+namespace camera_windows {
+
+// Results that can occur when interacting with the camera.
+enum class CameraResult {
+ // Camera operation succeeded.
+ kSuccess,
+
+ // Camera operation failed.
+ kError,
+
+ // Camera access permission is denied.
+ kAccessDenied,
+};
+
+// Interface for classes that receives callbacks on events from the associated
+// |CaptureController|.
+class CaptureControllerListener {
+ public:
+ virtual ~CaptureControllerListener() = default;
+
+ // Called by CaptureController on successful capture engine initialization.
+ //
+ // texture_id: A 64bit integer id registered by TextureRegistrar
+ virtual void OnCreateCaptureEngineSucceeded(int64_t texture_id) = 0;
+
+ // Called by CaptureController if initializing the capture engine fails.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnCreateCaptureEngineFailed(CameraResult result,
+ const std::string& error) = 0;
+
+ // Called by CaptureController on successfully started preview.
+ //
+ // width: Preview frame width.
+ // height: Preview frame height.
+ virtual void OnStartPreviewSucceeded(int32_t width, int32_t height) = 0;
+
+ // Called by CaptureController if starting the preview fails.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnStartPreviewFailed(CameraResult result,
+ const std::string& error) = 0;
+
+ // Called by CaptureController on successfully paused preview.
+ virtual void OnPausePreviewSucceeded() = 0;
+
+ // Called by CaptureController if pausing the preview fails.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnPausePreviewFailed(CameraResult result,
+ const std::string& error) = 0;
+
+ // Called by CaptureController on successfully resumed preview.
+ virtual void OnResumePreviewSucceeded() = 0;
+
+ // Called by CaptureController if resuming the preview fails.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnResumePreviewFailed(CameraResult result,
+ const std::string& error) = 0;
+
+ // Called by CaptureController on successfully started recording.
+ virtual void OnStartRecordSucceeded() = 0;
+
+ // Called by CaptureController if starting the recording fails.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnStartRecordFailed(CameraResult result,
+ const std::string& error) = 0;
+
+ // Called by CaptureController on successfully stopped recording.
+ //
+ // file_path: Filesystem path of the recorded video file.
+ virtual void OnStopRecordSucceeded(const std::string& file_path) = 0;
+
+ // Called by CaptureController if stopping the recording fails.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnStopRecordFailed(CameraResult result,
+ const std::string& error) = 0;
+
+ // Called by CaptureController on successfully captured picture.
+ //
+ // file_path: Filesystem path of the captured image.
+ virtual void OnTakePictureSucceeded(const std::string& file_path) = 0;
+
+ // Called by CaptureController if taking picture fails.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnTakePictureFailed(CameraResult result,
+ const std::string& error) = 0;
+
+ // Called by CaptureController when timed recording is successfully recorded.
+ //
+ // file_path: Filesystem path of the captured image.
+ // video_duration: Duration of recorded video in milliseconds.
+ virtual void OnVideoRecordSucceeded(const std::string& file_path,
+ int64_t video_duration_ms) = 0;
+
+ // Called by CaptureController if timed recording fails.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnVideoRecordFailed(CameraResult result,
+ const std::string& error) = 0;
+
+ // Called by CaptureController if capture engine returns error.
+ // For example when camera is disconnected while on use.
+ //
+ // result: The kind of result.
+ // error: A string describing the error.
+ virtual void OnCaptureError(CameraResult result,
+ const std::string& error) = 0;
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_CONTROLLER_LISTENER_H_
diff --git a/packages/camera/camera_windows/windows/capture_device_info.cpp b/packages/camera/camera_windows/windows/capture_device_info.cpp
new file mode 100644
index 0000000..446056a
--- /dev/null
+++ b/packages/camera/camera_windows/windows/capture_device_info.cpp
@@ -0,0 +1,29 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "capture_device_info.h"
+
+#include <memory>
+#include <string>
+
+namespace camera_windows {
+std::string CaptureDeviceInfo::GetUniqueDeviceName() const {
+ return display_name_ + " <" + device_id_ + ">";
+}
+
+bool CaptureDeviceInfo::ParseDeviceInfoFromCameraName(
+ const std::string& camera_name) {
+ size_t delimeter_index = camera_name.rfind(' ', camera_name.length());
+ if (delimeter_index != std::string::npos) {
+ auto deviceInfo = std::make_unique<CaptureDeviceInfo>();
+ display_name_ = camera_name.substr(0, delimeter_index);
+ device_id_ = camera_name.substr(delimeter_index + 2,
+ camera_name.length() - delimeter_index - 3);
+ return true;
+ }
+
+ return false;
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/capture_device_info.h b/packages/camera/camera_windows/windows/capture_device_info.h
new file mode 100644
index 0000000..63ffa85
--- /dev/null
+++ b/packages/camera/camera_windows/windows/capture_device_info.h
@@ -0,0 +1,49 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_DEVICE_INFO_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_DEVICE_INFO_H_
+
+#include <string>
+
+namespace camera_windows {
+
+// Name and device ID information for a capture device.
+class CaptureDeviceInfo {
+ public:
+ CaptureDeviceInfo() {}
+ virtual ~CaptureDeviceInfo() = default;
+
+ // Disallow copy and move.
+ CaptureDeviceInfo(const CaptureDeviceInfo&) = delete;
+ CaptureDeviceInfo& operator=(const CaptureDeviceInfo&) = delete;
+
+ // Build unique device name from display name and device id.
+ // Format: "display_name <device_id>".
+ std::string GetUniqueDeviceName() const;
+
+ // Parses display name and device id from unique device name format.
+ // Format: "display_name <device_id>".
+ bool CaptureDeviceInfo::ParseDeviceInfoFromCameraName(
+ const std::string& camera_name);
+
+ // Updates display name.
+ void SetDisplayName(const std::string& display_name) {
+ display_name_ = display_name;
+ }
+
+ // Updates device id.
+ void SetDeviceID(const std::string& device_id) { device_id_ = device_id; }
+
+ // Returns device id.
+ std::string GetDeviceId() const { return device_id_; }
+
+ private:
+ std::string display_name_;
+ std::string device_id_;
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_DEVICE_INFO_H_
diff --git a/packages/camera/camera_windows/windows/capture_engine_listener.cpp b/packages/camera/camera_windows/windows/capture_engine_listener.cpp
new file mode 100644
index 0000000..5425b38
--- /dev/null
+++ b/packages/camera/camera_windows/windows/capture_engine_listener.cpp
@@ -0,0 +1,90 @@
+
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "capture_engine_listener.h"
+
+#include <mfcaptureengine.h>
+#include <wrl/client.h>
+
+namespace camera_windows {
+
+using Microsoft::WRL::ComPtr;
+
+// IUnknown
+STDMETHODIMP_(ULONG) CaptureEngineListener::AddRef() {
+ return InterlockedIncrement(&ref_);
+}
+
+// IUnknown
+STDMETHODIMP_(ULONG)
+CaptureEngineListener::Release() {
+ LONG ref = InterlockedDecrement(&ref_);
+ if (ref == 0) {
+ delete this;
+ }
+ return ref;
+}
+
+// IUnknown
+STDMETHODIMP_(HRESULT)
+CaptureEngineListener::QueryInterface(const IID& riid, void** ppv) {
+ *ppv = nullptr;
+
+ if (riid == IID_IMFCaptureEngineOnEventCallback) {
+ *ppv = static_cast<IMFCaptureEngineOnEventCallback*>(this);
+ ((IUnknown*)*ppv)->AddRef();
+ return S_OK;
+ } else if (riid == IID_IMFCaptureEngineOnSampleCallback) {
+ *ppv = static_cast<IMFCaptureEngineOnSampleCallback*>(this);
+ ((IUnknown*)*ppv)->AddRef();
+ return S_OK;
+ }
+
+ return E_NOINTERFACE;
+}
+
+STDMETHODIMP CaptureEngineListener::OnEvent(IMFMediaEvent* event) {
+ if (observer_) {
+ observer_->OnEvent(event);
+ }
+ return S_OK;
+}
+
+// IMFCaptureEngineOnSampleCallback
+HRESULT CaptureEngineListener::OnSample(IMFSample* sample) {
+ HRESULT hr = S_OK;
+
+ if (this->observer_ && sample) {
+ LONGLONG raw_time_stamp = 0;
+ // Receives the presentation time, in 100-nanosecond units.
+ sample->GetSampleTime(&raw_time_stamp);
+
+ // Report time in microseconds.
+ this->observer_->UpdateCaptureTime(
+ static_cast<uint64_t>(raw_time_stamp / 10));
+
+ if (!this->observer_->IsReadyForSample()) {
+ // No texture target available or not previewing, just return status.
+ return hr;
+ }
+
+ ComPtr<IMFMediaBuffer> buffer;
+ hr = sample->ConvertToContiguousBuffer(&buffer);
+
+ // Draw the frame.
+ if (SUCCEEDED(hr) && buffer) {
+ DWORD max_length = 0;
+ DWORD current_length = 0;
+ uint8_t* data;
+ if (SUCCEEDED(buffer->Lock(&data, &max_length, ¤t_length))) {
+ this->observer_->UpdateBuffer(data, current_length);
+ }
+ hr = buffer->Unlock();
+ }
+ }
+ return hr;
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/capture_engine_listener.h b/packages/camera/camera_windows/windows/capture_engine_listener.h
new file mode 100644
index 0000000..081e3ea
--- /dev/null
+++ b/packages/camera/camera_windows/windows/capture_engine_listener.h
@@ -0,0 +1,69 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_ENGINE_LISTENER_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_ENGINE_LISTENER_H_
+
+#include <mfcaptureengine.h>
+
+#include <cassert>
+#include <functional>
+
+namespace camera_windows {
+
+// A class that implements callbacks for events from a |CaptureEngineListener|.
+class CaptureEngineObserver {
+ public:
+ virtual ~CaptureEngineObserver() = default;
+
+ // Returns true if sample can be processed.
+ virtual bool IsReadyForSample() const = 0;
+
+ // Handles Capture Engine media events.
+ virtual void OnEvent(IMFMediaEvent* event) = 0;
+
+ // Updates texture buffer
+ virtual bool UpdateBuffer(uint8_t* data, uint32_t new_length) = 0;
+
+ // Handles capture timestamps updates.
+ // Used to stop timed recordings when recorded time is exceeded.
+ virtual void UpdateCaptureTime(uint64_t capture_time) = 0;
+};
+
+// Listener for Windows Media Foundation capture engine events and samples.
+//
+// Events are redirected to observers for processing. Samples are preprosessed
+// and sent to the associated observer if it is ready to process samples.
+class CaptureEngineListener : public IMFCaptureEngineOnSampleCallback,
+ public IMFCaptureEngineOnEventCallback {
+ public:
+ CaptureEngineListener(CaptureEngineObserver* observer) : observer_(observer) {
+ assert(observer);
+ }
+
+ ~CaptureEngineListener() {}
+
+ // Disallow copy and move.
+ CaptureEngineListener(const CaptureEngineListener&) = delete;
+ CaptureEngineListener& operator=(const CaptureEngineListener&) = delete;
+
+ // IUnknown
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+ STDMETHODIMP_(HRESULT) QueryInterface(const IID& riid, void** ppv);
+
+ // IMFCaptureEngineOnEventCallback
+ STDMETHODIMP OnEvent(IMFMediaEvent* pEvent);
+
+ // IMFCaptureEngineOnSampleCallback
+ STDMETHODIMP_(HRESULT) OnSample(IMFSample* pSample);
+
+ private:
+ CaptureEngineObserver* observer_;
+ volatile ULONG ref_ = 0;
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_ENGINE_LISTENER_H_
diff --git a/packages/camera/camera_windows/windows/com_heap_ptr.h b/packages/camera/camera_windows/windows/com_heap_ptr.h
new file mode 100644
index 0000000..a314ed3
--- /dev/null
+++ b/packages/camera/camera_windows/windows/com_heap_ptr.h
@@ -0,0 +1,66 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_COMHEAPPTR_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_COMHEAPPTR_H_
+
+#include <windows.h>
+
+#include <cassert>
+
+namespace camera_windows {
+// Wrapper for COM object for automatic memory release support
+// Destructor uses CoTaskMemFree to release memory allocations.
+template <typename T>
+class ComHeapPtr {
+ public:
+ ComHeapPtr() : p_obj_(nullptr) {}
+ ComHeapPtr(T* p_obj) : p_obj_(p_obj) {}
+
+ // Frees memory on destruction.
+ ~ComHeapPtr() { Free(); }
+
+ // Prevent copying / ownership transfer as not currently needed.
+ ComHeapPtr(ComHeapPtr const&) = delete;
+ ComHeapPtr& operator=(ComHeapPtr const&) = delete;
+
+ // Returns the pointer to the memory.
+ operator T*() { return p_obj_; }
+
+ // Returns the pointer to the memory.
+ T* operator->() {
+ assert(p_obj_ != nullptr);
+ return p_obj_;
+ }
+
+ // Returns the pointer to the memory.
+ const T* operator->() const {
+ assert(p_obj_ != nullptr);
+ return p_obj_;
+ }
+
+ // Returns the pointer to the memory.
+ T** operator&() {
+ // Wrapped object must be nullptr to avoid memory leaks.
+ // Object can be released with Reset(nullptr).
+ assert(p_obj_ == nullptr);
+ return &p_obj_;
+ }
+
+ // Frees the memory pointed to, and sets the pointer to nullptr.
+ void Free() {
+ if (p_obj_) {
+ CoTaskMemFree(p_obj_);
+ }
+ p_obj_ = nullptr;
+ }
+
+ private:
+ // Pointer to memory.
+ T* p_obj_;
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_COMHEAPPTR_H_
diff --git a/packages/camera/camera_windows/windows/include/camera_windows/camera_windows.h b/packages/camera/camera_windows/windows/include/camera_windows/camera_windows.h
new file mode 100644
index 0000000..b1e28b8
--- /dev/null
+++ b/packages/camera/camera_windows/windows/include/camera_windows/camera_windows.h
@@ -0,0 +1,27 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_INCLUDE_CAMERA_WINDOWS_CAMERA_WINDOWS_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_INCLUDE_CAMERA_WINDOWS_CAMERA_WINDOWS_H_
+
+#include <flutter_plugin_registrar.h>
+
+#ifdef FLUTTER_PLUGIN_IMPL
+#define FLUTTER_PLUGIN_EXPORT __declspec(dllexport)
+#else
+#define FLUTTER_PLUGIN_EXPORT __declspec(dllimport)
+#endif
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+FLUTTER_PLUGIN_EXPORT void CameraWindowsRegisterWithRegistrar(
+ FlutterDesktopPluginRegistrarRef registrar);
+
+#if defined(__cplusplus)
+} // extern "C"
+#endif
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_INCLUDE_CAMERA_WINDOWS_CAMERA_WINDOWS_H_
diff --git a/packages/camera/camera_windows/windows/photo_handler.cpp b/packages/camera/camera_windows/windows/photo_handler.cpp
new file mode 100644
index 0000000..479f0d3
--- /dev/null
+++ b/packages/camera/camera_windows/windows/photo_handler.cpp
@@ -0,0 +1,143 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "photo_handler.h"
+
+#include <mfapi.h>
+#include <mfcaptureengine.h>
+#include <wincodec.h>
+
+#include <cassert>
+
+#include "capture_engine_listener.h"
+#include "string_utils.h"
+
+namespace camera_windows {
+
+using Microsoft::WRL::ComPtr;
+
+// Initializes media type for photo capture for jpeg images.
+HRESULT BuildMediaTypeForPhotoCapture(IMFMediaType* src_media_type,
+ IMFMediaType** photo_media_type,
+ GUID image_format) {
+ assert(src_media_type);
+ ComPtr<IMFMediaType> new_media_type;
+
+ HRESULT hr = MFCreateMediaType(&new_media_type);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Clones everything from original media type.
+ hr = src_media_type->CopyAllItems(new_media_type.Get());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = new_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Image);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = new_media_type->SetGUID(MF_MT_SUBTYPE, image_format);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ new_media_type.CopyTo(photo_media_type);
+ return hr;
+}
+
+HRESULT PhotoHandler::InitPhotoSink(IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type) {
+ assert(capture_engine);
+ assert(base_media_type);
+
+ HRESULT hr = S_OK;
+
+ if (photo_sink_) {
+ // If photo sink already exists, only update output filename.
+ hr = photo_sink_->SetOutputFileName(Utf16FromUtf8(file_path_).c_str());
+
+ if (FAILED(hr)) {
+ photo_sink_ = nullptr;
+ }
+
+ return hr;
+ }
+
+ ComPtr<IMFMediaType> photo_media_type;
+ ComPtr<IMFCaptureSink> capture_sink;
+
+ // Get sink with photo type.
+ hr =
+ capture_engine->GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PHOTO, &capture_sink);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = capture_sink.As(&photo_sink_);
+ if (FAILED(hr)) {
+ photo_sink_ = nullptr;
+ return hr;
+ }
+
+ hr = photo_sink_->RemoveAllStreams();
+ if (FAILED(hr)) {
+ photo_sink_ = nullptr;
+ return hr;
+ }
+
+ hr = BuildMediaTypeForPhotoCapture(base_media_type,
+ photo_media_type.GetAddressOf(),
+ GUID_ContainerFormatJpeg);
+
+ if (FAILED(hr)) {
+ photo_sink_ = nullptr;
+ return hr;
+ }
+
+ DWORD photo_sink_stream_index;
+ hr = photo_sink_->AddStream(
+ (DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_PHOTO,
+ photo_media_type.Get(), nullptr, &photo_sink_stream_index);
+ if (FAILED(hr)) {
+ photo_sink_ = nullptr;
+ return hr;
+ }
+
+ hr = photo_sink_->SetOutputFileName(Utf16FromUtf8(file_path_).c_str());
+ if (FAILED(hr)) {
+ photo_sink_ = nullptr;
+ return hr;
+ }
+
+ return hr;
+}
+
+HRESULT PhotoHandler::TakePhoto(const std::string& file_path,
+ IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type) {
+ assert(!file_path.empty());
+ assert(capture_engine);
+ assert(base_media_type);
+
+ file_path_ = file_path;
+
+ HRESULT hr = InitPhotoSink(capture_engine, base_media_type);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ photo_state_ = PhotoState::kTakingPhoto;
+
+ return capture_engine->TakePhoto();
+}
+
+void PhotoHandler::OnPhotoTaken() {
+ assert(photo_state_ == PhotoState::kTakingPhoto);
+ photo_state_ = PhotoState::kIdle;
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/photo_handler.h b/packages/camera/camera_windows/windows/photo_handler.h
new file mode 100644
index 0000000..4d6ddf1
--- /dev/null
+++ b/packages/camera/camera_windows/windows/photo_handler.h
@@ -0,0 +1,80 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_PHOTO_HANDLER_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_PHOTO_HANDLER_H_
+
+#include <mfapi.h>
+#include <mfcaptureengine.h>
+#include <wrl/client.h>
+
+#include <memory>
+#include <string>
+
+#include "capture_engine_listener.h"
+
+namespace camera_windows {
+using Microsoft::WRL::ComPtr;
+
+// Various states that the photo handler can be in.
+//
+// When created, the handler is in |kNotStarted| state and transtions in
+// sequential order through the states.
+enum class PhotoState {
+ kNotStarted,
+ kIdle,
+ kTakingPhoto,
+};
+
+// Handles photo sink initialization and tracks photo capture states.
+class PhotoHandler {
+ public:
+ PhotoHandler() {}
+ virtual ~PhotoHandler() = default;
+
+ // Prevent copying.
+ PhotoHandler(PhotoHandler const&) = delete;
+ PhotoHandler& operator=(PhotoHandler const&) = delete;
+
+ // Initializes photo sink if not initialized and requests the capture engine
+ // to take photo.
+ //
+ // Sets photo state to: kTakingPhoto.
+ //
+ // capture_engine: A pointer to capture engine instance.
+ // Called to take the photo.
+ // base_media_type: A pointer to base media type used as a base
+ // for the actual photo capture media type.
+ // file_path: A string that hold file path for photo capture.
+ HRESULT TakePhoto(const std::string& file_path,
+ IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type);
+
+ // Set the photo handler recording state to: kIdle.
+ void OnPhotoTaken();
+
+ // Returns true if photo state is kIdle.
+ bool IsInitialized() const { return photo_state_ == PhotoState::kIdle; }
+
+ // Returns true if photo state is kTakingPhoto.
+ bool IsTakingPhoto() const {
+ return photo_state_ == PhotoState::kTakingPhoto;
+ }
+
+ // Returns the filesystem path of the captured photo.
+ std::string GetPhotoPath() const { return file_path_; }
+
+ private:
+ // Initializes record sink for video file capture.
+ HRESULT InitPhotoSink(IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type);
+
+ std::string file_path_;
+ PhotoState photo_state_ = PhotoState::kNotStarted;
+ ComPtr<IMFCapturePhotoSink> photo_sink_;
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_PHOTO_HANDLER_H_
diff --git a/packages/camera/camera_windows/windows/preview_handler.cpp b/packages/camera/camera_windows/windows/preview_handler.cpp
new file mode 100644
index 0000000..538754c
--- /dev/null
+++ b/packages/camera/camera_windows/windows/preview_handler.cpp
@@ -0,0 +1,166 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "preview_handler.h"
+
+#include <mfapi.h>
+#include <mfcaptureengine.h>
+
+#include <cassert>
+
+#include "capture_engine_listener.h"
+#include "string_utils.h"
+
+namespace camera_windows {
+
+using Microsoft::WRL::ComPtr;
+
+// Initializes media type for video preview.
+HRESULT BuildMediaTypeForVideoPreview(IMFMediaType* src_media_type,
+ IMFMediaType** preview_media_type) {
+ assert(src_media_type);
+ ComPtr<IMFMediaType> new_media_type;
+
+ HRESULT hr = MFCreateMediaType(&new_media_type);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Clones everything from original media type.
+ hr = src_media_type->CopyAllItems(new_media_type.Get());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Changes subtype to MFVideoFormat_RGB32.
+ hr = new_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = new_media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ new_media_type.CopyTo(preview_media_type);
+
+ return hr;
+}
+
+HRESULT PreviewHandler::InitPreviewSink(
+ IMFCaptureEngine* capture_engine, IMFMediaType* base_media_type,
+ CaptureEngineListener* sample_callback) {
+ assert(capture_engine);
+ assert(base_media_type);
+ assert(sample_callback);
+
+ HRESULT hr = S_OK;
+
+ if (preview_sink_) {
+ // Preview sink already initialized.
+ return hr;
+ }
+
+ ComPtr<IMFMediaType> preview_media_type;
+ ComPtr<IMFCaptureSink> capture_sink;
+
+ // Get sink with preview type.
+ hr = capture_engine->GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW,
+ &capture_sink);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = capture_sink.As(&preview_sink_);
+ if (FAILED(hr)) {
+ preview_sink_ = nullptr;
+ return hr;
+ }
+
+ hr = preview_sink_->RemoveAllStreams();
+ if (FAILED(hr)) {
+ preview_sink_ = nullptr;
+ return hr;
+ }
+
+ hr = BuildMediaTypeForVideoPreview(base_media_type,
+ preview_media_type.GetAddressOf());
+
+ if (FAILED(hr)) {
+ preview_sink_ = nullptr;
+ return hr;
+ }
+
+ DWORD preview_sink_stream_index;
+ hr = preview_sink_->AddStream(
+ (DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_PREVIEW,
+ preview_media_type.Get(), nullptr, &preview_sink_stream_index);
+
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = preview_sink_->SetSampleCallback(preview_sink_stream_index,
+ sample_callback);
+
+ if (FAILED(hr)) {
+ preview_sink_ = nullptr;
+ return hr;
+ }
+
+ return hr;
+}
+
+HRESULT PreviewHandler::StartPreview(IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type,
+ CaptureEngineListener* sample_callback) {
+ assert(capture_engine);
+ assert(base_media_type);
+
+ HRESULT hr =
+ InitPreviewSink(capture_engine, base_media_type, sample_callback);
+
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ preview_state_ = PreviewState::kStarting;
+ return capture_engine->StartPreview();
+}
+
+HRESULT PreviewHandler::StopPreview(IMFCaptureEngine* capture_engine) {
+ if (preview_state_ == PreviewState::kStarting ||
+ preview_state_ == PreviewState::kRunning ||
+ preview_state_ == PreviewState::kPaused) {
+ preview_state_ = PreviewState::kStopping;
+ return capture_engine->StopPreview();
+ }
+ return E_FAIL;
+}
+
+bool PreviewHandler::PausePreview() {
+ if (preview_state_ != PreviewState::kRunning) {
+ return false;
+ }
+ preview_state_ = PreviewState::kPaused;
+ return true;
+}
+
+bool PreviewHandler::ResumePreview() {
+ if (preview_state_ != PreviewState::kPaused) {
+ return false;
+ }
+ preview_state_ = PreviewState::kRunning;
+ return true;
+}
+
+void PreviewHandler::OnPreviewStarted() {
+ assert(preview_state_ == PreviewState::kStarting);
+ if (preview_state_ == PreviewState::kStarting) {
+ preview_state_ = PreviewState::kRunning;
+ }
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/preview_handler.h b/packages/camera/camera_windows/windows/preview_handler.h
new file mode 100644
index 0000000..311cf5a
--- /dev/null
+++ b/packages/camera/camera_windows/windows/preview_handler.h
@@ -0,0 +1,101 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_PREVIEW_HANDLER_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_PREVIEW_HANDLER_H_
+
+#include <mfapi.h>
+#include <mfcaptureengine.h>
+#include <wrl/client.h>
+
+#include <memory>
+#include <string>
+
+#include "capture_engine_listener.h"
+
+namespace camera_windows {
+using Microsoft::WRL::ComPtr;
+
+// States the preview handler can be in.
+//
+// When created, the handler starts in |kNotStarted| state and mostly
+// transitions in sequential order of the states. When the preview is running,
+// it can be set to the |kPaused| state and later resumed to |kRunning| state.
+enum class PreviewState {
+ kNotStarted,
+ kStarting,
+ kRunning,
+ kPaused,
+ kStopping
+};
+
+// Handler for a camera's video preview.
+//
+// Handles preview sink initialization and manages the state of the video
+// preview.
+class PreviewHandler {
+ public:
+ PreviewHandler() {}
+ virtual ~PreviewHandler() = default;
+
+ // Prevent copying.
+ PreviewHandler(PreviewHandler const&) = delete;
+ PreviewHandler& operator=(PreviewHandler const&) = delete;
+
+ // Initializes preview sink and requests capture engine to start previewing.
+ // Sets preview state to: starting.
+ //
+ // capture_engine: A pointer to capture engine instance. Used to start
+ // the actual recording.
+ // base_media_type: A pointer to base media type used as a base
+ // for the actual video capture media type.
+ // sample_callback: A pointer to capture engine listener.
+ // This is set as sample callback for preview sink.
+ HRESULT StartPreview(IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type,
+ CaptureEngineListener* sample_callback);
+
+ // Stops existing recording.
+ //
+ // capture_engine: A pointer to capture engine instance. Used to stop
+ // the ongoing recording.
+ HRESULT StopPreview(IMFCaptureEngine* capture_engine);
+
+ // Set the preview handler recording state to: paused.
+ bool PausePreview();
+
+ // Set the preview handler recording state to: running.
+ bool ResumePreview();
+
+ // Set the preview handler recording state to: running.
+ void OnPreviewStarted();
+
+ // Returns true if preview state is running or paused.
+ bool IsInitialized() const {
+ return preview_state_ == PreviewState::kRunning ||
+ preview_state_ == PreviewState::kPaused;
+ }
+
+ // Returns true if preview state is running.
+ bool IsRunning() const { return preview_state_ == PreviewState::kRunning; }
+
+ // Return true if preview state is paused.
+ bool IsPaused() const { return preview_state_ == PreviewState::kPaused; }
+
+ // Returns true if preview state is starting.
+ bool IsStarting() const { return preview_state_ == PreviewState::kStarting; }
+
+ private:
+ // Initializes record sink for video file capture.
+ HRESULT InitPreviewSink(IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type,
+ CaptureEngineListener* sample_callback);
+
+ PreviewState preview_state_ = PreviewState::kNotStarted;
+ ComPtr<IMFCapturePreviewSink> preview_sink_;
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_PREVIEW_HANDLER_H_
diff --git a/packages/camera/camera_windows/windows/record_handler.cpp b/packages/camera/camera_windows/windows/record_handler.cpp
new file mode 100644
index 0000000..0f71925
--- /dev/null
+++ b/packages/camera/camera_windows/windows/record_handler.cpp
@@ -0,0 +1,259 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "record_handler.h"
+
+#include <mfapi.h>
+#include <mfcaptureengine.h>
+
+#include <cassert>
+
+#include "string_utils.h"
+
+namespace camera_windows {
+
+using Microsoft::WRL::ComPtr;
+
+// Initializes media type for video capture.
+HRESULT BuildMediaTypeForVideoCapture(IMFMediaType* src_media_type,
+ IMFMediaType** video_record_media_type,
+ GUID capture_format) {
+ assert(src_media_type);
+ ComPtr<IMFMediaType> new_media_type;
+
+ HRESULT hr = MFCreateMediaType(&new_media_type);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Clones everything from original media type.
+ hr = src_media_type->CopyAllItems(new_media_type.Get());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = new_media_type->SetGUID(MF_MT_SUBTYPE, capture_format);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ new_media_type.CopyTo(video_record_media_type);
+ return S_OK;
+}
+
+// Queries interface object from collection.
+template <class Q>
+HRESULT GetCollectionObject(IMFCollection* pCollection, DWORD index,
+ Q** ppObj) {
+ ComPtr<IUnknown> pUnk;
+ HRESULT hr = pCollection->GetElement(index, pUnk.GetAddressOf());
+ if (FAILED(hr)) {
+ return hr;
+ }
+ return pUnk->QueryInterface(IID_PPV_ARGS(ppObj));
+}
+
+// Initializes media type for audo capture.
+HRESULT BuildMediaTypeForAudioCapture(IMFMediaType** audio_record_media_type) {
+ ComPtr<IMFAttributes> audio_output_attributes;
+ ComPtr<IMFMediaType> src_media_type;
+ ComPtr<IMFMediaType> new_media_type;
+ ComPtr<IMFCollection> available_output_types;
+ DWORD mt_count = 0;
+
+ HRESULT hr = MFCreateAttributes(&audio_output_attributes, 1);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Enumerates only low latency audio outputs.
+ hr = audio_output_attributes->SetUINT32(MF_LOW_LATENCY, TRUE);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ DWORD mft_flags = (MFT_ENUM_FLAG_ALL & (~MFT_ENUM_FLAG_FIELDOFUSE)) |
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTranscodeGetAudioOutputAvailableTypes(
+ MFAudioFormat_AAC, mft_flags, audio_output_attributes.Get(),
+ available_output_types.GetAddressOf());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = GetCollectionObject(available_output_types.Get(), 0,
+ src_media_type.GetAddressOf());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = available_output_types->GetElementCount(&mt_count);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ if (mt_count == 0) {
+ // No sources found, mark process as failure.
+ return E_FAIL;
+ }
+
+ // Create new media type to copy original media type to.
+ hr = MFCreateMediaType(&new_media_type);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = src_media_type->CopyAllItems(new_media_type.Get());
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ new_media_type.CopyTo(audio_record_media_type);
+ return hr;
+}
+
+HRESULT RecordHandler::InitRecordSink(IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type) {
+ assert(!file_path_.empty());
+ assert(capture_engine);
+ assert(base_media_type);
+
+ HRESULT hr = S_OK;
+ if (record_sink_) {
+ // If record sink already exists, only update output filename.
+ hr = record_sink_->SetOutputFileName(Utf16FromUtf8(file_path_).c_str());
+
+ if (FAILED(hr)) {
+ record_sink_ = nullptr;
+ }
+ return hr;
+ }
+
+ ComPtr<IMFMediaType> video_record_media_type;
+ ComPtr<IMFCaptureSink> capture_sink;
+
+ // Gets sink from capture engine with record type.
+
+ hr = capture_engine->GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_RECORD,
+ &capture_sink);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = capture_sink.As(&record_sink_);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ // Removes existing streams if available.
+ hr = record_sink_->RemoveAllStreams();
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ hr = BuildMediaTypeForVideoCapture(base_media_type,
+ video_record_media_type.GetAddressOf(),
+ MFVideoFormat_H264);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ DWORD video_record_sink_stream_index;
+ hr = record_sink_->AddStream(
+ (DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_RECORD,
+ video_record_media_type.Get(), nullptr, &video_record_sink_stream_index);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ if (record_audio_) {
+ ComPtr<IMFMediaType> audio_record_media_type;
+ HRESULT audio_capture_hr = S_OK;
+ audio_capture_hr =
+ BuildMediaTypeForAudioCapture(audio_record_media_type.GetAddressOf());
+
+ if (SUCCEEDED(audio_capture_hr)) {
+ DWORD audio_record_sink_stream_index;
+ hr = record_sink_->AddStream(
+ (DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_AUDIO,
+ audio_record_media_type.Get(), nullptr,
+ &audio_record_sink_stream_index);
+ }
+
+ if (FAILED(hr)) {
+ return hr;
+ }
+ }
+
+ hr = record_sink_->SetOutputFileName(Utf16FromUtf8(file_path_).c_str());
+
+ return hr;
+}
+
+HRESULT RecordHandler::StartRecord(const std::string& file_path,
+ int64_t max_duration,
+ IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type) {
+ assert(!file_path.empty());
+ assert(capture_engine);
+ assert(base_media_type);
+
+ type_ = max_duration < 0 ? RecordingType::kContinuous : RecordingType::kTimed;
+ max_video_duration_ms_ = max_duration;
+ file_path_ = file_path;
+ recording_start_timestamp_us_ = -1;
+ recording_duration_us_ = 0;
+
+ HRESULT hr = InitRecordSink(capture_engine, base_media_type);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ recording_state_ = RecordState::kStarting;
+ return capture_engine->StartRecord();
+}
+
+HRESULT RecordHandler::StopRecord(IMFCaptureEngine* capture_engine) {
+ if (recording_state_ == RecordState::kRunning) {
+ recording_state_ = RecordState::kStopping;
+ return capture_engine->StopRecord(true, false);
+ }
+ return E_FAIL;
+}
+
+void RecordHandler::OnRecordStarted() {
+ if (recording_state_ == RecordState::kStarting) {
+ recording_state_ = RecordState::kRunning;
+ }
+}
+
+void RecordHandler::OnRecordStopped() {
+ if (recording_state_ == RecordState::kStopping) {
+ file_path_ = "";
+ recording_start_timestamp_us_ = -1;
+ recording_duration_us_ = 0;
+ max_video_duration_ms_ = -1;
+ recording_state_ = RecordState::kNotStarted;
+ type_ = RecordingType::kNone;
+ }
+}
+
+void RecordHandler::UpdateRecordingTime(uint64_t timestamp) {
+ if (recording_start_timestamp_us_ < 0) {
+ recording_start_timestamp_us_ = timestamp;
+ }
+
+ recording_duration_us_ = (timestamp - recording_start_timestamp_us_);
+}
+
+bool RecordHandler::ShouldStopTimedRecording() const {
+ return type_ == RecordingType::kTimed &&
+ recording_state_ == RecordState::kRunning &&
+ max_video_duration_ms_ > 0 &&
+ recording_duration_us_ >=
+ (static_cast<uint64_t>(max_video_duration_ms_) * 1000);
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/record_handler.h b/packages/camera/camera_windows/windows/record_handler.h
new file mode 100644
index 0000000..0c87bf9
--- /dev/null
+++ b/packages/camera/camera_windows/windows/record_handler.h
@@ -0,0 +1,118 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_RECORD_HANDLER_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_RECORD_HANDLER_H_
+
+#include <mfapi.h>
+#include <mfcaptureengine.h>
+#include <wrl/client.h>
+
+#include <memory>
+#include <string>
+
+namespace camera_windows {
+using Microsoft::WRL::ComPtr;
+
+enum class RecordingType {
+ // Camera is not recording.
+ kNone,
+ // Recording continues until it is stopped with a separate stop command.
+ kContinuous,
+ // Recording stops automatically after requested record time is passed.
+ kTimed
+};
+
+// States that the record handler can be in.
+//
+// When created, the handler starts in |kNotStarted| state and transtions in
+// sequential order through the states.
+enum class RecordState { kNotStarted, kStarting, kRunning, kStopping };
+
+// Handler for video recording via the camera.
+//
+// Handles record sink initialization and manages the state of video recording.
+class RecordHandler {
+ public:
+ RecordHandler(bool record_audio) : record_audio_(record_audio) {}
+ virtual ~RecordHandler() = default;
+
+ // Prevent copying.
+ RecordHandler(RecordHandler const&) = delete;
+ RecordHandler& operator=(RecordHandler const&) = delete;
+
+ // Initializes record sink and requests capture engine to start recording.
+ //
+ // Sets record state to: starting.
+ //
+ // file_path: A string that hold file path for video capture.
+ // max_duration: A int64 value of maximun recording duration.
+ // If value is -1 video recording is considered as
+ // a continuous recording.
+ // capture_engine: A pointer to capture engine instance. Used to start
+ // the actual recording.
+ // base_media_type: A pointer to base media type used as a base
+ // for the actual video capture media type.
+ HRESULT StartRecord(const std::string& file_path, int64_t max_duration,
+ IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type);
+
+ // Stops existing recording.
+ //
+ // capture_engine: A pointer to capture engine instance. Used to stop
+ // the ongoing recording.
+ HRESULT StopRecord(IMFCaptureEngine* capture_engine);
+
+ // Set the record handler recording state to: running.
+ void OnRecordStarted();
+
+ // Resets the record handler state and
+ // sets recording state to: not started.
+ void OnRecordStopped();
+
+ // Returns true if recording type is continuous recording.
+ bool IsContinuousRecording() const {
+ return type_ == RecordingType::kContinuous;
+ }
+
+ // Returns true if recording type is timed recording.
+ bool IsTimedRecording() const { return type_ == RecordingType::kTimed; }
+
+ // Returns true if new recording can be started.
+ bool CanStart() const { return recording_state_ == RecordState::kNotStarted; }
+
+ // Returns true if recording can be stopped.
+ bool CanStop() const { return recording_state_ == RecordState::kRunning; }
+
+ // Returns the filesystem path of the video recording.
+ std::string GetRecordPath() const { return file_path_; }
+
+ // Returns the duration of the video recording in microseconds.
+ uint64_t GetRecordedDuration() const { return recording_duration_us_; }
+
+ // Calculates new recording time from capture timestamp.
+ void UpdateRecordingTime(uint64_t timestamp);
+
+ // Returns true if recording time has exceeded the maximum duration for timed
+ // recordings.
+ bool ShouldStopTimedRecording() const;
+
+ private:
+ // Initializes record sink for video file capture.
+ HRESULT InitRecordSink(IMFCaptureEngine* capture_engine,
+ IMFMediaType* base_media_type);
+
+ bool record_audio_ = false;
+ int64_t max_video_duration_ms_ = -1;
+ int64_t recording_start_timestamp_us_ = -1;
+ uint64_t recording_duration_us_ = 0;
+ std::string file_path_;
+ RecordState recording_state_ = RecordState::kNotStarted;
+ RecordingType type_ = RecordingType::kNone;
+ ComPtr<IMFCaptureRecordSink> record_sink_;
+};
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_RECORD_HANDLER_H_
diff --git a/packages/camera/camera_windows/windows/string_utils.cpp b/packages/camera/camera_windows/windows/string_utils.cpp
new file mode 100644
index 0000000..34b1336
--- /dev/null
+++ b/packages/camera/camera_windows/windows/string_utils.cpp
@@ -0,0 +1,60 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "string_utils.h"
+
+#include <shobjidl.h>
+#include <windows.h>
+
+#include <string>
+
+namespace camera_windows {
+
+// Converts the given UTF-16 string to UTF-8.
+std::string Utf8FromUtf16(const std::wstring& utf16_string) {
+ if (utf16_string.empty()) {
+ return std::string();
+ }
+ int target_length = ::WideCharToMultiByte(
+ CP_UTF8, WC_ERR_INVALID_CHARS, utf16_string.data(),
+ static_cast<int>(utf16_string.length()), nullptr, 0, nullptr, nullptr);
+ std::string utf8_string;
+ if (target_length == 0 || target_length > utf8_string.max_size()) {
+ return utf8_string;
+ }
+ utf8_string.resize(target_length);
+ int converted_length = ::WideCharToMultiByte(
+ CP_UTF8, WC_ERR_INVALID_CHARS, utf16_string.data(),
+ static_cast<int>(utf16_string.length()), utf8_string.data(),
+ target_length, nullptr, nullptr);
+ if (converted_length == 0) {
+ return std::string();
+ }
+ return utf8_string;
+}
+
+// Converts the given UTF-8 string to UTF-16.
+std::wstring Utf16FromUtf8(const std::string& utf8_string) {
+ if (utf8_string.empty()) {
+ return std::wstring();
+ }
+ int target_length =
+ ::MultiByteToWideChar(CP_UTF8, MB_ERR_INVALID_CHARS, utf8_string.data(),
+ static_cast<int>(utf8_string.length()), nullptr, 0);
+ std::wstring utf16_string;
+ if (target_length == 0 || target_length > utf16_string.max_size()) {
+ return utf16_string;
+ }
+ utf16_string.resize(target_length);
+ int converted_length =
+ ::MultiByteToWideChar(CP_UTF8, MB_ERR_INVALID_CHARS, utf8_string.data(),
+ static_cast<int>(utf8_string.length()),
+ utf16_string.data(), target_length);
+ if (converted_length == 0) {
+ return std::wstring();
+ }
+ return utf16_string;
+}
+
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/string_utils.h b/packages/camera/camera_windows/windows/string_utils.h
new file mode 100644
index 0000000..562c46a
--- /dev/null
+++ b/packages/camera/camera_windows/windows/string_utils.h
@@ -0,0 +1,22 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_STRING_UTILS_H_
+#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_STRING_UTILS_H_
+
+#include <shobjidl.h>
+
+#include <string>
+
+namespace camera_windows {
+
+// Converts the given UTF-16 string to UTF-8.
+std::string Utf8FromUtf16(const std::wstring& utf16_string);
+
+// Converts the given UTF-8 string to UTF-16.
+std::wstring Utf16FromUtf8(const std::string& utf8_string);
+
+} // namespace camera_windows
+
+#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_STRING_UTILS_H_
diff --git a/packages/camera/camera_windows/windows/test/camera_plugin_test.cpp b/packages/camera/camera_windows/windows/test/camera_plugin_test.cpp
new file mode 100644
index 0000000..9cab069
--- /dev/null
+++ b/packages/camera/camera_windows/windows/test/camera_plugin_test.cpp
@@ -0,0 +1,1055 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "camera_plugin.h"
+
+#include <flutter/method_call.h>
+#include <flutter/method_result_functions.h>
+#include <flutter/standard_method_codec.h>
+#include <flutter/texture_registrar.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <windows.h>
+
+#include <functional>
+#include <memory>
+#include <string>
+
+#include "mocks.h"
+
+namespace camera_windows {
+namespace test {
+
+using flutter::EncodableMap;
+using flutter::EncodableValue;
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::EndsWith;
+using ::testing::Eq;
+using ::testing::Pointee;
+using ::testing::Return;
+
+void MockInitCamera(MockCamera* camera, bool success) {
+ EXPECT_CALL(*camera,
+ HasPendingResultByType(Eq(PendingResultType::kCreateCamera)))
+ .Times(1)
+ .WillOnce(Return(false));
+
+ EXPECT_CALL(*camera,
+ AddPendingResult(Eq(PendingResultType::kCreateCamera), _))
+ .Times(1)
+ .WillOnce([camera](PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) {
+ camera->pending_result_ = std::move(result);
+ return true;
+ });
+
+ EXPECT_CALL(*camera, HasDeviceId(Eq(camera->device_id_)))
+ .WillRepeatedly(Return(true));
+
+ EXPECT_CALL(*camera, InitCamera)
+ .Times(1)
+ .WillOnce([camera, success](flutter::TextureRegistrar* texture_registrar,
+ flutter::BinaryMessenger* messenger,
+ bool record_audio,
+ ResolutionPreset resolution_preset) {
+ assert(camera->pending_result_);
+ if (success) {
+ camera->pending_result_->Success(EncodableValue(1));
+ return true;
+ } else {
+ camera->pending_result_->Error("camera_error", "InitCamera failed.");
+ return false;
+ }
+ });
+}
+
+TEST(CameraPlugin, AvailableCamerasHandlerSuccessIfNoCameras) {
+ std::unique_ptr<MockTextureRegistrar> texture_registrar_ =
+ std::make_unique<MockTextureRegistrar>();
+ std::unique_ptr<MockBinaryMessenger> messenger_ =
+ std::make_unique<MockBinaryMessenger>();
+ std::unique_ptr<MockCameraFactory> camera_factory_ =
+ std::make_unique<MockCameraFactory>();
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ MockCameraPlugin plugin(texture_registrar_.get(), messenger_.get(),
+ std::move(camera_factory_));
+
+ EXPECT_CALL(plugin, EnumerateVideoCaptureDeviceSources)
+ .Times(1)
+ .WillOnce([](IMFActivate*** devices, UINT32* count) {
+ *count = 0U;
+ *devices = static_cast<IMFActivate**>(
+ CoTaskMemAlloc(sizeof(IMFActivate*) * (*count)));
+ return true;
+ });
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(*result, SuccessInternal).Times(1);
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("availableCameras",
+ std::make_unique<EncodableValue>()),
+ std::move(result));
+}
+
+TEST(CameraPlugin, AvailableCamerasHandlerErrorIfFailsToEnumerateDevices) {
+ std::unique_ptr<MockTextureRegistrar> texture_registrar_ =
+ std::make_unique<MockTextureRegistrar>();
+ std::unique_ptr<MockBinaryMessenger> messenger_ =
+ std::make_unique<MockBinaryMessenger>();
+ std::unique_ptr<MockCameraFactory> camera_factory_ =
+ std::make_unique<MockCameraFactory>();
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ MockCameraPlugin plugin(texture_registrar_.get(), messenger_.get(),
+ std::move(camera_factory_));
+
+ EXPECT_CALL(plugin, EnumerateVideoCaptureDeviceSources)
+ .Times(1)
+ .WillOnce([](IMFActivate*** devices, UINT32* count) { return false; });
+
+ EXPECT_CALL(*result, ErrorInternal).Times(1);
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("availableCameras",
+ std::make_unique<EncodableValue>()),
+ std::move(result));
+}
+
+TEST(CameraPlugin, CreateHandlerCallsInitCamera) {
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+ std::unique_ptr<MockTextureRegistrar> texture_registrar_ =
+ std::make_unique<MockTextureRegistrar>();
+ std::unique_ptr<MockBinaryMessenger> messenger_ =
+ std::make_unique<MockBinaryMessenger>();
+ std::unique_ptr<MockCameraFactory> camera_factory_ =
+ std::make_unique<MockCameraFactory>();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ MockInitCamera(camera.get(), true);
+
+ // Move mocked camera to the factory to be passed
+ // for plugin with CreateCamera function.
+ camera_factory_->pending_camera_ = std::move(camera);
+
+ EXPECT_CALL(*camera_factory_, CreateCamera(MOCK_DEVICE_ID));
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(*result, SuccessInternal(Pointee(EncodableValue(1))));
+
+ CameraPlugin plugin(texture_registrar_.get(), messenger_.get(),
+ std::move(camera_factory_));
+ EncodableMap args = {
+ {EncodableValue("cameraName"), EncodableValue(MOCK_CAMERA_NAME)},
+ {EncodableValue("resolutionPreset"), EncodableValue(nullptr)},
+ {EncodableValue("enableAudio"), EncodableValue(true)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("create",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(result));
+}
+
+TEST(CameraPlugin, CreateHandlerErrorOnInvalidDeviceId) {
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+ std::unique_ptr<MockTextureRegistrar> texture_registrar_ =
+ std::make_unique<MockTextureRegistrar>();
+ std::unique_ptr<MockBinaryMessenger> messenger_ =
+ std::make_unique<MockBinaryMessenger>();
+ std::unique_ptr<MockCameraFactory> camera_factory_ =
+ std::make_unique<MockCameraFactory>();
+
+ CameraPlugin plugin(texture_registrar_.get(), messenger_.get(),
+ std::move(camera_factory_));
+ EncodableMap args = {
+ {EncodableValue("cameraName"), EncodableValue(MOCK_INVALID_CAMERA_NAME)},
+ {EncodableValue("resolutionPreset"), EncodableValue(nullptr)},
+ {EncodableValue("enableAudio"), EncodableValue(true)},
+ };
+
+ EXPECT_CALL(*result, ErrorInternal).Times(1);
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("create",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(result));
+}
+
+TEST(CameraPlugin, CreateHandlerErrorOnExistingDeviceId) {
+ std::unique_ptr<MockMethodResult> first_create_result =
+ std::make_unique<MockMethodResult>();
+ std::unique_ptr<MockMethodResult> second_create_result =
+ std::make_unique<MockMethodResult>();
+ std::unique_ptr<MockTextureRegistrar> texture_registrar_ =
+ std::make_unique<MockTextureRegistrar>();
+ std::unique_ptr<MockBinaryMessenger> messenger_ =
+ std::make_unique<MockBinaryMessenger>();
+ std::unique_ptr<MockCameraFactory> camera_factory_ =
+ std::make_unique<MockCameraFactory>();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ MockInitCamera(camera.get(), true);
+
+ // Move mocked camera to the factory to be passed
+ // for plugin with CreateCamera function.
+ camera_factory_->pending_camera_ = std::move(camera);
+
+ EXPECT_CALL(*camera_factory_, CreateCamera(MOCK_DEVICE_ID));
+
+ EXPECT_CALL(*first_create_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*first_create_result,
+ SuccessInternal(Pointee(EncodableValue(1))));
+
+ CameraPlugin plugin(texture_registrar_.get(), messenger_.get(),
+ std::move(camera_factory_));
+ EncodableMap args = {
+ {EncodableValue("cameraName"), EncodableValue(MOCK_CAMERA_NAME)},
+ {EncodableValue("resolutionPreset"), EncodableValue(nullptr)},
+ {EncodableValue("enableAudio"), EncodableValue(true)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("create",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(first_create_result));
+
+ EXPECT_CALL(*second_create_result, ErrorInternal).Times(1);
+ EXPECT_CALL(*second_create_result, SuccessInternal).Times(0);
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("create",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(second_create_result));
+}
+
+TEST(CameraPlugin, CreateHandlerAllowsRetry) {
+ std::unique_ptr<MockMethodResult> first_create_result =
+ std::make_unique<MockMethodResult>();
+ std::unique_ptr<MockMethodResult> second_create_result =
+ std::make_unique<MockMethodResult>();
+ std::unique_ptr<MockTextureRegistrar> texture_registrar_ =
+ std::make_unique<MockTextureRegistrar>();
+ std::unique_ptr<MockBinaryMessenger> messenger_ =
+ std::make_unique<MockBinaryMessenger>();
+ std::unique_ptr<MockCameraFactory> camera_factory_ =
+ std::make_unique<MockCameraFactory>();
+
+ // The camera will fail initialization once and then succeed.
+ EXPECT_CALL(*camera_factory_, CreateCamera(MOCK_DEVICE_ID))
+ .Times(2)
+ .WillOnce([](const std::string& device_id) {
+ std::unique_ptr<MockCamera> first_camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ MockInitCamera(first_camera.get(), false);
+
+ return first_camera;
+ })
+ .WillOnce([](const std::string& device_id) {
+ std::unique_ptr<MockCamera> second_camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ MockInitCamera(second_camera.get(), true);
+
+ return second_camera;
+ });
+
+ EXPECT_CALL(*first_create_result, ErrorInternal).Times(1);
+ EXPECT_CALL(*first_create_result, SuccessInternal).Times(0);
+
+ CameraPlugin plugin(texture_registrar_.get(), messenger_.get(),
+ std::move(camera_factory_));
+ EncodableMap args = {
+ {EncodableValue("cameraName"), EncodableValue(MOCK_CAMERA_NAME)},
+ {EncodableValue("resolutionPreset"), EncodableValue(nullptr)},
+ {EncodableValue("enableAudio"), EncodableValue(true)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("create",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(first_create_result));
+
+ EXPECT_CALL(*second_create_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*second_create_result,
+ SuccessInternal(Pointee(EncodableValue(1))));
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("create",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(second_create_result));
+}
+
+TEST(CameraPlugin, InitializeHandlerCallStartPreview) {
+ int64_t mock_camera_id = 1234;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId(Eq(mock_camera_id)))
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera,
+ HasPendingResultByType(Eq(PendingResultType::kInitialize)))
+ .Times(1)
+ .WillOnce(Return(false));
+
+ EXPECT_CALL(*camera, AddPendingResult(Eq(PendingResultType::kInitialize), _))
+ .Times(1)
+ .WillOnce([cam = camera.get()](PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) {
+ cam->pending_result_ = std::move(result);
+ return true;
+ });
+
+ EXPECT_CALL(*camera, GetCaptureController)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->capture_controller_.get();
+ });
+
+ EXPECT_CALL(*capture_controller, StartPreview())
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->pending_result_->Success();
+ });
+
+ camera->camera_id_ = mock_camera_id;
+ camera->capture_controller_ = std::move(capture_controller);
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(1);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(mock_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("initialize",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, InitializeHandlerErrorOnInvalidCameraId) {
+ int64_t mock_camera_id = 1234;
+ int64_t missing_camera_id = 5678;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId)
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera, HasPendingResultByType).Times(0);
+ EXPECT_CALL(*camera, AddPendingResult).Times(0);
+ EXPECT_CALL(*camera, GetCaptureController).Times(0);
+ EXPECT_CALL(*capture_controller, StartPreview).Times(0);
+
+ camera->camera_id_ = mock_camera_id;
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(1);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(0);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(missing_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("initialize",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, TakePictureHandlerCallsTakePictureWithPath) {
+ int64_t mock_camera_id = 1234;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId(Eq(mock_camera_id)))
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera,
+ HasPendingResultByType(Eq(PendingResultType::kTakePicture)))
+ .Times(1)
+ .WillOnce(Return(false));
+
+ EXPECT_CALL(*camera, AddPendingResult(Eq(PendingResultType::kTakePicture), _))
+ .Times(1)
+ .WillOnce([cam = camera.get()](PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) {
+ cam->pending_result_ = std::move(result);
+ return true;
+ });
+
+ EXPECT_CALL(*camera, GetCaptureController)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->capture_controller_.get();
+ });
+
+ EXPECT_CALL(*capture_controller, TakePicture(EndsWith(".jpeg")))
+ .Times(1)
+ .WillOnce([cam = camera.get()](const std::string& file_path) {
+ assert(cam->pending_result_);
+ return cam->pending_result_->Success();
+ });
+
+ camera->camera_id_ = mock_camera_id;
+ camera->capture_controller_ = std::move(capture_controller);
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(1);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(mock_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("takePicture",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, TakePictureHandlerErrorOnInvalidCameraId) {
+ int64_t mock_camera_id = 1234;
+ int64_t missing_camera_id = 5678;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId)
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera, HasPendingResultByType).Times(0);
+ EXPECT_CALL(*camera, AddPendingResult).Times(0);
+ EXPECT_CALL(*camera, GetCaptureController).Times(0);
+ EXPECT_CALL(*capture_controller, TakePicture).Times(0);
+
+ camera->camera_id_ = mock_camera_id;
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(1);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(0);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(missing_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("takePicture",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, StartVideoRecordingHandlerCallsStartRecordWithPath) {
+ int64_t mock_camera_id = 1234;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId(Eq(mock_camera_id)))
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera,
+ HasPendingResultByType(Eq(PendingResultType::kStartRecord)))
+ .Times(1)
+ .WillOnce(Return(false));
+
+ EXPECT_CALL(*camera, AddPendingResult(Eq(PendingResultType::kStartRecord), _))
+ .Times(1)
+ .WillOnce([cam = camera.get()](PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) {
+ cam->pending_result_ = std::move(result);
+ return true;
+ });
+
+ EXPECT_CALL(*camera, GetCaptureController)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->capture_controller_.get();
+ });
+
+ EXPECT_CALL(*capture_controller, StartRecord(EndsWith(".mp4"), -1))
+ .Times(1)
+ .WillOnce([cam = camera.get()](const std::string& file_path,
+ int64_t max_video_duration_ms) {
+ assert(cam->pending_result_);
+ return cam->pending_result_->Success();
+ });
+
+ camera->camera_id_ = mock_camera_id;
+ camera->capture_controller_ = std::move(capture_controller);
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(1);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(mock_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("startVideoRecording",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin,
+ StartVideoRecordingHandlerCallsStartRecordWithPathAndCaptureDuration) {
+ int64_t mock_camera_id = 1234;
+ int32_t mock_video_duration = 100000;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId(Eq(mock_camera_id)))
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera,
+ HasPendingResultByType(Eq(PendingResultType::kStartRecord)))
+ .Times(1)
+ .WillOnce(Return(false));
+
+ EXPECT_CALL(*camera, AddPendingResult(Eq(PendingResultType::kStartRecord), _))
+ .Times(1)
+ .WillOnce([cam = camera.get()](PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) {
+ cam->pending_result_ = std::move(result);
+ return true;
+ });
+
+ EXPECT_CALL(*camera, GetCaptureController)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->capture_controller_.get();
+ });
+
+ EXPECT_CALL(*capture_controller,
+ StartRecord(EndsWith(".mp4"), Eq(mock_video_duration)))
+ .Times(1)
+ .WillOnce([cam = camera.get()](const std::string& file_path,
+ int64_t max_video_duration_ms) {
+ assert(cam->pending_result_);
+ return cam->pending_result_->Success();
+ });
+
+ camera->camera_id_ = mock_camera_id;
+ camera->capture_controller_ = std::move(capture_controller);
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(1);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(mock_camera_id)},
+ {EncodableValue("maxVideoDuration"), EncodableValue(mock_video_duration)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("startVideoRecording",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, StartVideoRecordingHandlerErrorOnInvalidCameraId) {
+ int64_t mock_camera_id = 1234;
+ int64_t missing_camera_id = 5678;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId)
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera, HasPendingResultByType).Times(0);
+ EXPECT_CALL(*camera, AddPendingResult).Times(0);
+ EXPECT_CALL(*camera, GetCaptureController).Times(0);
+ EXPECT_CALL(*capture_controller, StartRecord(_, -1)).Times(0);
+
+ camera->camera_id_ = mock_camera_id;
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(1);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(0);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(missing_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("startVideoRecording",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, StopVideoRecordingHandlerCallsStopRecord) {
+ int64_t mock_camera_id = 1234;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId(Eq(mock_camera_id)))
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera,
+ HasPendingResultByType(Eq(PendingResultType::kStopRecord)))
+ .Times(1)
+ .WillOnce(Return(false));
+
+ EXPECT_CALL(*camera, AddPendingResult(Eq(PendingResultType::kStopRecord), _))
+ .Times(1)
+ .WillOnce([cam = camera.get()](PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) {
+ cam->pending_result_ = std::move(result);
+ return true;
+ });
+
+ EXPECT_CALL(*camera, GetCaptureController)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->capture_controller_.get();
+ });
+
+ EXPECT_CALL(*capture_controller, StopRecord)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->pending_result_->Success();
+ });
+
+ camera->camera_id_ = mock_camera_id;
+ camera->capture_controller_ = std::move(capture_controller);
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(1);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(mock_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("stopVideoRecording",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, StopVideoRecordingHandlerErrorOnInvalidCameraId) {
+ int64_t mock_camera_id = 1234;
+ int64_t missing_camera_id = 5678;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId)
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera, HasPendingResultByType).Times(0);
+ EXPECT_CALL(*camera, AddPendingResult).Times(0);
+ EXPECT_CALL(*camera, GetCaptureController).Times(0);
+ EXPECT_CALL(*capture_controller, StopRecord).Times(0);
+
+ camera->camera_id_ = mock_camera_id;
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(1);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(0);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(missing_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("stopVideoRecording",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, ResumePreviewHandlerCallsResumePreview) {
+ int64_t mock_camera_id = 1234;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId(Eq(mock_camera_id)))
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera,
+ HasPendingResultByType(Eq(PendingResultType::kResumePreview)))
+ .Times(1)
+ .WillOnce(Return(false));
+
+ EXPECT_CALL(*camera,
+ AddPendingResult(Eq(PendingResultType::kResumePreview), _))
+ .Times(1)
+ .WillOnce([cam = camera.get()](PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) {
+ cam->pending_result_ = std::move(result);
+ return true;
+ });
+
+ EXPECT_CALL(*camera, GetCaptureController)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->capture_controller_.get();
+ });
+
+ EXPECT_CALL(*capture_controller, ResumePreview)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->pending_result_->Success();
+ });
+
+ camera->camera_id_ = mock_camera_id;
+ camera->capture_controller_ = std::move(capture_controller);
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(1);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(mock_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("resumePreview",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, ResumePreviewHandlerErrorOnInvalidCameraId) {
+ int64_t mock_camera_id = 1234;
+ int64_t missing_camera_id = 5678;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId)
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera, HasPendingResultByType).Times(0);
+ EXPECT_CALL(*camera, AddPendingResult).Times(0);
+ EXPECT_CALL(*camera, GetCaptureController).Times(0);
+ EXPECT_CALL(*capture_controller, ResumePreview).Times(0);
+
+ camera->camera_id_ = mock_camera_id;
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(1);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(0);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(missing_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("resumePreview",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, PausePreviewHandlerCallsPausePreview) {
+ int64_t mock_camera_id = 1234;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId(Eq(mock_camera_id)))
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera,
+ HasPendingResultByType(Eq(PendingResultType::kPausePreview)))
+ .Times(1)
+ .WillOnce(Return(false));
+
+ EXPECT_CALL(*camera,
+ AddPendingResult(Eq(PendingResultType::kPausePreview), _))
+ .Times(1)
+ .WillOnce([cam = camera.get()](PendingResultType type,
+ std::unique_ptr<MethodResult<>> result) {
+ cam->pending_result_ = std::move(result);
+ return true;
+ });
+
+ EXPECT_CALL(*camera, GetCaptureController)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->capture_controller_.get();
+ });
+
+ EXPECT_CALL(*capture_controller, PausePreview)
+ .Times(1)
+ .WillOnce([cam = camera.get()]() {
+ assert(cam->pending_result_);
+ return cam->pending_result_->Success();
+ });
+
+ camera->camera_id_ = mock_camera_id;
+ camera->capture_controller_ = std::move(capture_controller);
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(1);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(mock_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("pausePreview",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+TEST(CameraPlugin, PausePreviewHandlerErrorOnInvalidCameraId) {
+ int64_t mock_camera_id = 1234;
+ int64_t missing_camera_id = 5678;
+
+ std::unique_ptr<MockMethodResult> initialize_result =
+ std::make_unique<MockMethodResult>();
+
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+
+ std::unique_ptr<MockCaptureController> capture_controller =
+ std::make_unique<MockCaptureController>();
+
+ EXPECT_CALL(*camera, HasCameraId)
+ .Times(1)
+ .WillOnce([cam = camera.get()](int64_t camera_id) {
+ return cam->camera_id_ == camera_id;
+ });
+
+ EXPECT_CALL(*camera, HasPendingResultByType).Times(0);
+ EXPECT_CALL(*camera, AddPendingResult).Times(0);
+ EXPECT_CALL(*camera, GetCaptureController).Times(0);
+ EXPECT_CALL(*capture_controller, PausePreview).Times(0);
+
+ camera->camera_id_ = mock_camera_id;
+
+ MockCameraPlugin plugin(std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(),
+ std::make_unique<MockCameraFactory>());
+
+ // Add mocked camera to plugins camera list.
+ plugin.AddCamera(std::move(camera));
+
+ EXPECT_CALL(*initialize_result, ErrorInternal).Times(1);
+ EXPECT_CALL(*initialize_result, SuccessInternal).Times(0);
+
+ EncodableMap args = {
+ {EncodableValue("cameraId"), EncodableValue(missing_camera_id)},
+ };
+
+ plugin.HandleMethodCall(
+ flutter::MethodCall("pausePreview",
+ std::make_unique<EncodableValue>(EncodableMap(args))),
+ std::move(initialize_result));
+}
+
+} // namespace test
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/test/camera_test.cpp b/packages/camera/camera_windows/windows/test/camera_test.cpp
new file mode 100644
index 0000000..158a2c2
--- /dev/null
+++ b/packages/camera/camera_windows/windows/test/camera_test.cpp
@@ -0,0 +1,505 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "camera.h"
+
+#include <flutter/method_call.h>
+#include <flutter/method_result_functions.h>
+#include <flutter/standard_method_codec.h>
+#include <flutter/texture_registrar.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <windows.h>
+
+#include <functional>
+#include <memory>
+#include <string>
+
+#include "mocks.h"
+
+namespace camera_windows {
+using ::testing::_;
+using ::testing::Eq;
+using ::testing::NiceMock;
+using ::testing::Pointee;
+using ::testing::Return;
+
+namespace test {
+
+TEST(Camera, InitCameraCreatesCaptureController) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockCaptureControllerFactory> capture_controller_factory =
+ std::make_unique<MockCaptureControllerFactory>();
+
+ EXPECT_CALL(*capture_controller_factory, CreateCaptureController)
+ .Times(1)
+ .WillOnce([]() {
+ std::unique_ptr<NiceMock<MockCaptureController>> capture_controller =
+ std::make_unique<NiceMock<MockCaptureController>>();
+
+ EXPECT_CALL(*capture_controller, InitCaptureDevice)
+ .Times(1)
+ .WillOnce(Return(true));
+
+ return capture_controller;
+ });
+
+ EXPECT_TRUE(camera->GetCaptureController() == nullptr);
+
+ // Init camera with mock capture controller factory
+ bool result =
+ camera->InitCamera(std::move(capture_controller_factory),
+ std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(), false,
+ ResolutionPreset::kAuto);
+ EXPECT_TRUE(result);
+ EXPECT_TRUE(camera->GetCaptureController() != nullptr);
+}
+
+TEST(Camera, InitCameraReportsFailure) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockCaptureControllerFactory> capture_controller_factory =
+ std::make_unique<MockCaptureControllerFactory>();
+
+ EXPECT_CALL(*capture_controller_factory, CreateCaptureController)
+ .Times(1)
+ .WillOnce([]() {
+ std::unique_ptr<NiceMock<MockCaptureController>> capture_controller =
+ std::make_unique<NiceMock<MockCaptureController>>();
+
+ EXPECT_CALL(*capture_controller, InitCaptureDevice)
+ .Times(1)
+ .WillOnce(Return(false));
+
+ return capture_controller;
+ });
+
+ EXPECT_TRUE(camera->GetCaptureController() == nullptr);
+
+ // Init camera with mock capture controller factory
+ bool result =
+ camera->InitCamera(std::move(capture_controller_factory),
+ std::make_unique<MockTextureRegistrar>().get(),
+ std::make_unique<MockBinaryMessenger>().get(), false,
+ ResolutionPreset::kAuto);
+ EXPECT_FALSE(result);
+ EXPECT_TRUE(camera->GetCaptureController() != nullptr);
+}
+
+TEST(Camera, AddPendingResultReturnsErrorForDuplicates) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> first_pending_result =
+ std::make_unique<MockMethodResult>();
+ std::unique_ptr<MockMethodResult> second_pending_result =
+ std::make_unique<MockMethodResult>();
+
+ EXPECT_CALL(*first_pending_result, ErrorInternal).Times(0);
+ EXPECT_CALL(*first_pending_result, SuccessInternal);
+ EXPECT_CALL(*second_pending_result, ErrorInternal).Times(1);
+
+ camera->AddPendingResult(PendingResultType::kCreateCamera,
+ std::move(first_pending_result));
+
+ // This should fail
+ camera->AddPendingResult(PendingResultType::kCreateCamera,
+ std::move(second_pending_result));
+
+ // Mark pending result as succeeded
+ camera->OnCreateCaptureEngineSucceeded(0);
+}
+
+TEST(Camera, OnCreateCaptureEngineSucceededReturnsCameraId) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const int64_t texture_id = 12345;
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(
+ *result,
+ SuccessInternal(Pointee(EncodableValue(EncodableMap(
+ {{EncodableValue("cameraId"), EncodableValue(texture_id)}})))));
+
+ camera->AddPendingResult(PendingResultType::kCreateCamera, std::move(result));
+
+ camera->OnCreateCaptureEngineSucceeded(texture_id);
+}
+
+TEST(Camera, CreateCaptureEngineReportsError) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result, ErrorInternal(Eq("camera_error"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kCreateCamera, std::move(result));
+
+ camera->OnCreateCaptureEngineFailed(CameraResult::kError, error_text);
+}
+
+TEST(Camera, CreateCaptureEngineReportsAccessDenied) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result,
+ ErrorInternal(Eq("CameraAccessDenied"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kCreateCamera, std::move(result));
+
+ camera->OnCreateCaptureEngineFailed(CameraResult::kAccessDenied, error_text);
+}
+
+TEST(Camera, OnStartPreviewSucceededReturnsFrameSize) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const int32_t width = 123;
+ const int32_t height = 456;
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(
+ *result,
+ SuccessInternal(Pointee(EncodableValue(EncodableMap({
+ {EncodableValue("previewWidth"), EncodableValue((float)width)},
+ {EncodableValue("previewHeight"), EncodableValue((float)height)},
+ })))));
+
+ camera->AddPendingResult(PendingResultType::kInitialize, std::move(result));
+
+ camera->OnStartPreviewSucceeded(width, height);
+}
+
+TEST(Camera, StartPreviewReportsError) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result, ErrorInternal(Eq("camera_error"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kInitialize, std::move(result));
+
+ camera->OnStartPreviewFailed(CameraResult::kError, error_text);
+}
+
+TEST(Camera, StartPreviewReportsAccessDenied) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result,
+ ErrorInternal(Eq("CameraAccessDenied"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kInitialize, std::move(result));
+
+ camera->OnStartPreviewFailed(CameraResult::kAccessDenied, error_text);
+}
+
+TEST(Camera, OnPausePreviewSucceededReturnsSuccess) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(*result, SuccessInternal(nullptr));
+
+ camera->AddPendingResult(PendingResultType::kPausePreview, std::move(result));
+
+ camera->OnPausePreviewSucceeded();
+}
+
+TEST(Camera, PausePreviewReportsError) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result, ErrorInternal(Eq("camera_error"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kPausePreview, std::move(result));
+
+ camera->OnPausePreviewFailed(CameraResult::kError, error_text);
+}
+
+TEST(Camera, PausePreviewReportsAccessDenied) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result,
+ ErrorInternal(Eq("CameraAccessDenied"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kPausePreview, std::move(result));
+
+ camera->OnPausePreviewFailed(CameraResult::kAccessDenied, error_text);
+}
+
+TEST(Camera, OnResumePreviewSucceededReturnsSuccess) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(*result, SuccessInternal(nullptr));
+
+ camera->AddPendingResult(PendingResultType::kResumePreview,
+ std::move(result));
+
+ camera->OnResumePreviewSucceeded();
+}
+
+TEST(Camera, ResumePreviewReportsError) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result, ErrorInternal(Eq("camera_error"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kResumePreview,
+ std::move(result));
+
+ camera->OnResumePreviewFailed(CameraResult::kError, error_text);
+}
+
+TEST(Camera, OnResumePreviewPermissionFailureReturnsError) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result,
+ ErrorInternal(Eq("CameraAccessDenied"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kResumePreview,
+ std::move(result));
+
+ camera->OnResumePreviewFailed(CameraResult::kAccessDenied, error_text);
+}
+
+TEST(Camera, OnStartRecordSucceededReturnsSuccess) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(*result, SuccessInternal(nullptr));
+
+ camera->AddPendingResult(PendingResultType::kStartRecord, std::move(result));
+
+ camera->OnStartRecordSucceeded();
+}
+
+TEST(Camera, StartRecordReportsError) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result, ErrorInternal(Eq("camera_error"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kStartRecord, std::move(result));
+
+ camera->OnStartRecordFailed(CameraResult::kError, error_text);
+}
+
+TEST(Camera, StartRecordReportsAccessDenied) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result,
+ ErrorInternal(Eq("CameraAccessDenied"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kStartRecord, std::move(result));
+
+ camera->OnStartRecordFailed(CameraResult::kAccessDenied, error_text);
+}
+
+TEST(Camera, OnStopRecordSucceededReturnsSuccess) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string file_path = "C:\temp\filename.mp4";
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(*result, SuccessInternal(Pointee(EncodableValue(file_path))));
+
+ camera->AddPendingResult(PendingResultType::kStopRecord, std::move(result));
+
+ camera->OnStopRecordSucceeded(file_path);
+}
+
+TEST(Camera, StopRecordReportsError) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result, ErrorInternal(Eq("camera_error"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kStopRecord, std::move(result));
+
+ camera->OnStopRecordFailed(CameraResult::kError, error_text);
+}
+
+TEST(Camera, StopRecordReportsAccessDenied) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result,
+ ErrorInternal(Eq("CameraAccessDenied"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kStopRecord, std::move(result));
+
+ camera->OnStopRecordFailed(CameraResult::kAccessDenied, error_text);
+}
+
+TEST(Camera, OnTakePictureSucceededReturnsSuccess) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string file_path = "C:\\temp\\filename.jpeg";
+
+ EXPECT_CALL(*result, ErrorInternal).Times(0);
+ EXPECT_CALL(*result, SuccessInternal(Pointee(EncodableValue(file_path))));
+
+ camera->AddPendingResult(PendingResultType::kTakePicture, std::move(result));
+
+ camera->OnTakePictureSucceeded(file_path);
+}
+
+TEST(Camera, TakePictureReportsError) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result, ErrorInternal(Eq("camera_error"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kTakePicture, std::move(result));
+
+ camera->OnTakePictureFailed(CameraResult::kError, error_text);
+}
+
+TEST(Camera, TakePictureReportsAccessDenied) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockMethodResult> result =
+ std::make_unique<MockMethodResult>();
+
+ const std::string error_text = "error_text";
+
+ EXPECT_CALL(*result, SuccessInternal).Times(0);
+ EXPECT_CALL(*result,
+ ErrorInternal(Eq("CameraAccessDenied"), Eq(error_text), _));
+
+ camera->AddPendingResult(PendingResultType::kTakePicture, std::move(result));
+
+ camera->OnTakePictureFailed(CameraResult::kAccessDenied, error_text);
+}
+
+TEST(Camera, OnVideoRecordSucceededInvokesCameraChannelEvent) {
+ std::unique_ptr<CameraImpl> camera =
+ std::make_unique<CameraImpl>(MOCK_DEVICE_ID);
+ std::unique_ptr<MockCaptureControllerFactory> capture_controller_factory =
+ std::make_unique<MockCaptureControllerFactory>();
+
+ std::unique_ptr<MockBinaryMessenger> binary_messenger =
+ std::make_unique<MockBinaryMessenger>();
+
+ const std::string file_path = "C:\\temp\\filename.mp4";
+ const int64_t camera_id = 12345;
+ std::string camera_channel =
+ std::string("plugins.flutter.io/camera_windows/camera") +
+ std::to_string(camera_id);
+ const int64_t video_duration = 1000000;
+
+ EXPECT_CALL(*capture_controller_factory, CreateCaptureController)
+ .Times(1)
+ .WillOnce(
+ []() { return std::make_unique<NiceMock<MockCaptureController>>(); });
+
+ // TODO: test binary content.
+ // First time is video record success message,
+ // and second is camera closing message.
+ EXPECT_CALL(*binary_messenger, Send(Eq(camera_channel), _, _, _)).Times(2);
+
+ // Init camera with mock capture controller factory
+ camera->InitCamera(std::move(capture_controller_factory),
+ std::make_unique<MockTextureRegistrar>().get(),
+ binary_messenger.get(), false, ResolutionPreset::kAuto);
+
+ // Pass camera id for camera
+ camera->OnCreateCaptureEngineSucceeded(camera_id);
+
+ camera->OnVideoRecordSucceeded(file_path, video_duration);
+
+ // Dispose camera before message channel.
+ camera = nullptr;
+}
+
+} // namespace test
+} // namespace camera_windows
diff --git a/packages/camera/camera_windows/windows/test/capture_controller_test.cpp b/packages/camera/camera_windows/windows/test/capture_controller_test.cpp
new file mode 100644
index 0000000..8d6632c
--- /dev/null
+++ b/packages/camera/camera_windows/windows/test/capture_controller_test.cpp
@@ -0,0 +1,1438 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "capture_controller.h"
+
+#include <flutter/method_call.h>
+#include <flutter/method_result_functions.h>
+#include <flutter/standard_method_codec.h>
+#include <flutter/texture_registrar.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <windows.h>
+#include <wrl/client.h>
+
+#include <functional>
+#include <memory>
+#include <string>
+
+#include "mocks.h"
+#include "string_utils.h"
+
+namespace camera_windows {
+
+namespace test {
+
+using Microsoft::WRL::ComPtr;
+using ::testing::_;
+using ::testing::Eq;
+using ::testing::Return;
+
+void MockInitCaptureController(CaptureControllerImpl* capture_controller,
+ MockTextureRegistrar* texture_registrar,
+ MockCaptureEngine* engine, MockCamera* camera,
+ int64_t mock_texture_id) {
+ ComPtr<MockMediaSource> video_source = new MockMediaSource();
+ ComPtr<MockMediaSource> audio_source = new MockMediaSource();
+
+ capture_controller->SetCaptureEngine(
+ reinterpret_cast<IMFCaptureEngine*>(engine));
+ capture_controller->SetVideoSource(
+ reinterpret_cast<IMFMediaSource*>(video_source.Get()));
+ capture_controller->SetAudioSource(
+ reinterpret_cast<IMFMediaSource*>(audio_source.Get()));
+
+ EXPECT_CALL(*texture_registrar, RegisterTexture)
+ .Times(1)
+ .WillOnce([reg = texture_registrar,
+ mock_texture_id](flutter::TextureVariant* texture) -> int64_t {
+ EXPECT_TRUE(texture);
+ reg->texture_ = texture;
+ reg->texture_id_ = mock_texture_id;
+ return reg->texture_id_;
+ });
+ EXPECT_CALL(*texture_registrar, UnregisterTexture(Eq(mock_texture_id)))
+ .Times(1);
+ EXPECT_CALL(*camera, OnCreateCaptureEngineFailed).Times(0);
+ EXPECT_CALL(*camera, OnCreateCaptureEngineSucceeded(Eq(mock_texture_id)))
+ .Times(1);
+ EXPECT_CALL(*engine, Initialize).Times(1);
+
+ bool result = capture_controller->InitCaptureDevice(
+ texture_registrar, MOCK_DEVICE_ID, true, ResolutionPreset::kAuto);
+
+ EXPECT_TRUE(result);
+
+ // MockCaptureEngine::Initialize is called
+ EXPECT_TRUE(engine->initialized_);
+
+ engine->CreateFakeEvent(S_OK, MF_CAPTURE_ENGINE_INITIALIZED);
+}
+
+void MockAvailableMediaTypes(MockCaptureEngine* engine,
+ MockCaptureSource* capture_source,
+ uint32_t mock_preview_width,
+ uint32_t mock_preview_height) {
+ EXPECT_CALL(*engine, GetSource)
+ .Times(1)
+ .WillOnce(
+ [src_source = capture_source](IMFCaptureSource** target_source) {
+ *target_source = src_source;
+ src_source->AddRef();
+ return S_OK;
+ });
+
+ EXPECT_CALL(
+ *capture_source,
+ GetAvailableDeviceMediaType(
+ Eq((DWORD)
+ MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_PREVIEW),
+ _, _))
+ .WillRepeatedly([mock_preview_width, mock_preview_height](
+ DWORD stream_index, DWORD media_type_index,
+ IMFMediaType** media_type) {
+ // We give only one media type to loop through
+ if (media_type_index != 0) return MF_E_NO_MORE_TYPES;
+ *media_type =
+ new FakeMediaType(MFMediaType_Video, MFVideoFormat_RGB32,
+ mock_preview_width, mock_preview_height);
+ (*media_type)->AddRef();
+ return S_OK;
+ });
+
+ EXPECT_CALL(
+ *capture_source,
+ GetAvailableDeviceMediaType(
+ Eq((DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_RECORD),
+ _, _))
+ .WillRepeatedly([mock_preview_width, mock_preview_height](
+ DWORD stream_index, DWORD media_type_index,
+ IMFMediaType** media_type) {
+ // We give only one media type to loop through
+ if (media_type_index != 0) return MF_E_NO_MORE_TYPES;
+ *media_type =
+ new FakeMediaType(MFMediaType_Video, MFVideoFormat_RGB32,
+ mock_preview_width, mock_preview_height);
+ (*media_type)->AddRef();
+ return S_OK;
+ });
+}
+
+void MockStartPreview(CaptureControllerImpl* capture_controller,
+ MockCapturePreviewSink* preview_sink,
+ MockTextureRegistrar* texture_registrar,
+ MockCaptureEngine* engine, MockCamera* camera,
+ std::unique_ptr<uint8_t[]> mock_source_buffer,
+ uint32_t mock_source_buffer_size,
+ uint32_t mock_preview_width, uint32_t mock_preview_height,
+ int64_t mock_texture_id) {
+ EXPECT_CALL(*engine, GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW, _))
+ .Times(1)
+ .WillOnce([src_sink = preview_sink](MF_CAPTURE_ENGINE_SINK_TYPE sink_type,
+ IMFCaptureSink** target_sink) {
+ *target_sink = src_sink;
+ src_sink->AddRef();
+ return S_OK;
+ });
+
+ EXPECT_CALL(*preview_sink, RemoveAllStreams).Times(1).WillOnce(Return(S_OK));
+ EXPECT_CALL(*preview_sink, AddStream).Times(1).WillOnce(Return(S_OK));
+ EXPECT_CALL(*preview_sink, SetSampleCallback)
+ .Times(1)
+ .WillOnce([sink = preview_sink](
+ DWORD dwStreamSinkIndex,
+ IMFCaptureEngineOnSampleCallback* pCallback) -> HRESULT {
+ sink->sample_callback_ = pCallback;
+ return S_OK;
+ });
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+ MockAvailableMediaTypes(engine, capture_source.Get(), mock_preview_width,
+ mock_preview_height);
+
+ EXPECT_CALL(*engine, StartPreview()).Times(1).WillOnce(Return(S_OK));
+
+ // Called by destructor
+ EXPECT_CALL(*engine, StopPreview()).Times(1).WillOnce(Return(S_OK));
+
+ // Called after first processed sample
+ EXPECT_CALL(*camera,
+ OnStartPreviewSucceeded(mock_preview_width, mock_preview_height))
+ .Times(1);
+ EXPECT_CALL(*camera, OnStartPreviewFailed).Times(0);
+ EXPECT_CALL(*texture_registrar, MarkTextureFrameAvailable(mock_texture_id))
+ .Times(1);
+
+ capture_controller->StartPreview();
+
+ EXPECT_EQ(capture_controller->GetPreviewHeight(), mock_preview_height);
+ EXPECT_EQ(capture_controller->GetPreviewWidth(), mock_preview_width);
+
+ // Capture engine is now started and will first send event of started preview
+ engine->CreateFakeEvent(S_OK, MF_CAPTURE_ENGINE_PREVIEW_STARTED);
+
+ // SendFake sample
+ preview_sink->SendFakeSample(mock_source_buffer.get(),
+ mock_source_buffer_size);
+}
+
+void MockPhotoSink(MockCaptureEngine* engine,
+ MockCapturePhotoSink* photo_sink) {
+ EXPECT_CALL(*engine, GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PHOTO, _))
+ .Times(1)
+ .WillOnce([src_sink = photo_sink](MF_CAPTURE_ENGINE_SINK_TYPE sink_type,
+ IMFCaptureSink** target_sink) {
+ *target_sink = src_sink;
+ src_sink->AddRef();
+ return S_OK;
+ });
+ EXPECT_CALL(*photo_sink, RemoveAllStreams).Times(1).WillOnce(Return(S_OK));
+ EXPECT_CALL(*photo_sink, AddStream).Times(1).WillOnce(Return(S_OK));
+ EXPECT_CALL(*photo_sink, SetOutputFileName).Times(1).WillOnce(Return(S_OK));
+}
+
+void MockRecordStart(CaptureControllerImpl* capture_controller,
+ MockCaptureEngine* engine,
+ MockCaptureRecordSink* record_sink, MockCamera* camera,
+ const std::string& mock_path_to_video) {
+ EXPECT_CALL(*engine, StartRecord()).Times(1).WillOnce(Return(S_OK));
+
+ EXPECT_CALL(*engine, GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_RECORD, _))
+ .Times(1)
+ .WillOnce([src_sink = record_sink](MF_CAPTURE_ENGINE_SINK_TYPE sink_type,
+ IMFCaptureSink** target_sink) {
+ *target_sink = src_sink;
+ src_sink->AddRef();
+ return S_OK;
+ });
+
+ EXPECT_CALL(*record_sink, RemoveAllStreams).Times(1).WillOnce(Return(S_OK));
+ EXPECT_CALL(*record_sink, AddStream).Times(2).WillRepeatedly(Return(S_OK));
+ EXPECT_CALL(*record_sink, SetOutputFileName).Times(1).WillOnce(Return(S_OK));
+
+ capture_controller->StartRecord(mock_path_to_video, -1);
+
+ EXPECT_CALL(*camera, OnStartRecordSucceeded()).Times(1);
+ engine->CreateFakeEvent(S_OK, MF_CAPTURE_ENGINE_RECORD_STARTED);
+}
+
+TEST(CaptureController,
+ InitCaptureEngineCallsOnCreateCaptureEngineSucceededWithTextureId) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Init capture controller with mocks and tests
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, InitCaptureEngineCanOnlyBeCalledOnce) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Init capture controller once with mocks and tests
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ // Init capture controller a second time.
+ EXPECT_CALL(*camera, OnCreateCaptureEngineFailed).Times(1);
+
+ bool result = capture_controller->InitCaptureDevice(
+ texture_registrar.get(), MOCK_DEVICE_ID, true, ResolutionPreset::kAuto);
+
+ EXPECT_FALSE(result);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, InitCaptureEngineReportsFailure) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ ComPtr<MockMediaSource> video_source = new MockMediaSource();
+ ComPtr<MockMediaSource> audio_source = new MockMediaSource();
+
+ capture_controller->SetCaptureEngine(
+ reinterpret_cast<IMFCaptureEngine*>(engine.Get()));
+ capture_controller->SetVideoSource(
+ reinterpret_cast<IMFMediaSource*>(video_source.Get()));
+ capture_controller->SetAudioSource(
+ reinterpret_cast<IMFMediaSource*>(audio_source.Get()));
+
+ // Cause initialization to fail
+ EXPECT_CALL(*engine.Get(), Initialize).Times(1).WillOnce(Return(E_FAIL));
+
+ EXPECT_CALL(*texture_registrar, RegisterTexture).Times(0);
+ EXPECT_CALL(*texture_registrar, UnregisterTexture(_)).Times(0);
+ EXPECT_CALL(*camera, OnCreateCaptureEngineSucceeded).Times(0);
+ EXPECT_CALL(*camera,
+ OnCreateCaptureEngineFailed(Eq(CameraResult::kError),
+ Eq("Failed to create camera")))
+ .Times(1);
+
+ bool result = capture_controller->InitCaptureDevice(
+ texture_registrar.get(), MOCK_DEVICE_ID, true, ResolutionPreset::kAuto);
+
+ EXPECT_FALSE(result);
+ EXPECT_FALSE(engine->initialized_);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, InitCaptureEngineReportsAccessDenied) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ ComPtr<MockMediaSource> video_source = new MockMediaSource();
+ ComPtr<MockMediaSource> audio_source = new MockMediaSource();
+
+ capture_controller->SetCaptureEngine(
+ reinterpret_cast<IMFCaptureEngine*>(engine.Get()));
+ capture_controller->SetVideoSource(
+ reinterpret_cast<IMFMediaSource*>(video_source.Get()));
+ capture_controller->SetAudioSource(
+ reinterpret_cast<IMFMediaSource*>(audio_source.Get()));
+
+ // Cause initialization to fail
+ EXPECT_CALL(*engine.Get(), Initialize)
+ .Times(1)
+ .WillOnce(Return(E_ACCESSDENIED));
+
+ EXPECT_CALL(*texture_registrar, RegisterTexture).Times(0);
+ EXPECT_CALL(*texture_registrar, UnregisterTexture(_)).Times(0);
+ EXPECT_CALL(*camera, OnCreateCaptureEngineSucceeded).Times(0);
+ EXPECT_CALL(*camera,
+ OnCreateCaptureEngineFailed(Eq(CameraResult::kAccessDenied),
+ Eq("Failed to create camera")))
+ .Times(1);
+
+ bool result = capture_controller->InitCaptureDevice(
+ texture_registrar.get(), MOCK_DEVICE_ID, true, ResolutionPreset::kAuto);
+
+ EXPECT_FALSE(result);
+ EXPECT_FALSE(engine->initialized_);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, ReportsInitializedErrorEvent) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ EXPECT_CALL(*camera, OnCreateCaptureEngineFailed(
+ Eq(CameraResult::kError),
+ Eq("Failed to initialize capture engine")))
+ .Times(1);
+ EXPECT_CALL(*camera, OnCreateCaptureEngineSucceeded).Times(0);
+
+ // Send initialization failed event
+ engine->CreateFakeEvent(E_FAIL, MF_CAPTURE_ENGINE_INITIALIZED);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, ReportsInitializedAccessDeniedEvent) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ EXPECT_CALL(*camera, OnCreateCaptureEngineFailed(
+ Eq(CameraResult::kAccessDenied),
+ Eq("Failed to initialize capture engine")))
+ .Times(1);
+ EXPECT_CALL(*camera, OnCreateCaptureEngineSucceeded).Times(0);
+
+ // Send initialization failed event
+ engine->CreateFakeEvent(E_ACCESSDENIED, MF_CAPTURE_ENGINE_INITIALIZED);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, ReportsCaptureEngineErrorEvent) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ EXPECT_CALL(*(camera.get()),
+ OnCaptureError(Eq(CameraResult::kError), Eq("Unspecified error")))
+ .Times(1);
+
+ // Send error event.
+ engine->CreateFakeEvent(E_FAIL, MF_CAPTURE_ENGINE_ERROR);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, ReportsCaptureEngineAccessDeniedEvent) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ EXPECT_CALL(*(camera.get()), OnCaptureError(Eq(CameraResult::kAccessDenied),
+ Eq("Access is denied.")))
+ .Times(1);
+
+ // Send error event.
+ engine->CreateFakeEvent(E_ACCESSDENIED, MF_CAPTURE_ENGINE_ERROR);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, StartPreviewStartsProcessingSamples) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCapturePreviewSink> preview_sink = new MockCapturePreviewSink();
+
+ // Let's keep these small for mock texture data. Two pixels should be
+ // enough.
+ uint32_t mock_preview_width = 2;
+ uint32_t mock_preview_height = 1;
+ uint32_t pixels_total = mock_preview_width * mock_preview_height;
+ uint32_t pixel_size = 4;
+
+ // Build mock texture
+ uint32_t mock_texture_data_size = pixels_total * pixel_size;
+
+ std::unique_ptr<uint8_t[]> mock_source_buffer =
+ std::make_unique<uint8_t[]>(mock_texture_data_size);
+
+ uint8_t mock_red_pixel = 0x11;
+ uint8_t mock_green_pixel = 0x22;
+ uint8_t mock_blue_pixel = 0x33;
+ MFVideoFormatRGB32Pixel* mock_source_buffer_data =
+ (MFVideoFormatRGB32Pixel*)mock_source_buffer.get();
+
+ for (uint32_t i = 0; i < pixels_total; i++) {
+ mock_source_buffer_data[i].r = mock_red_pixel;
+ mock_source_buffer_data[i].g = mock_green_pixel;
+ mock_source_buffer_data[i].b = mock_blue_pixel;
+ }
+
+ // Start preview and run preview tests
+ MockStartPreview(capture_controller.get(), preview_sink.Get(),
+ texture_registrar.get(), engine.Get(), camera.get(),
+ std::move(mock_source_buffer), mock_texture_data_size,
+ mock_preview_width, mock_preview_height, mock_texture_id);
+
+ // Test texture processing
+ EXPECT_TRUE(texture_registrar->texture_);
+ if (texture_registrar->texture_) {
+ auto pixel_buffer_texture =
+ std::get_if<flutter::PixelBufferTexture>(texture_registrar->texture_);
+ EXPECT_TRUE(pixel_buffer_texture);
+
+ if (pixel_buffer_texture) {
+ auto converted_buffer =
+ pixel_buffer_texture->CopyPixelBuffer((size_t)100, (size_t)100);
+
+ EXPECT_TRUE(converted_buffer);
+ if (converted_buffer) {
+ EXPECT_EQ(converted_buffer->height, mock_preview_height);
+ EXPECT_EQ(converted_buffer->width, mock_preview_width);
+
+ FlutterDesktopPixel* converted_buffer_data =
+ (FlutterDesktopPixel*)(converted_buffer->buffer);
+
+ for (uint32_t i = 0; i < pixels_total; i++) {
+ EXPECT_EQ(converted_buffer_data[i].r, mock_red_pixel);
+ EXPECT_EQ(converted_buffer_data[i].g, mock_green_pixel);
+ EXPECT_EQ(converted_buffer_data[i].b, mock_blue_pixel);
+ }
+
+ // Call release callback to get mutex lock unlocked.
+ converted_buffer->release_callback(converted_buffer->release_context);
+ }
+ converted_buffer = nullptr;
+ }
+ pixel_buffer_texture = nullptr;
+ }
+
+ capture_controller = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+}
+
+TEST(CaptureController, ReportsStartPreviewError) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+ MockAvailableMediaTypes(engine.Get(), capture_source.Get(), 1, 1);
+
+ // Cause start preview to fail
+ EXPECT_CALL(*engine.Get(), GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW, _))
+ .Times(1)
+ .WillOnce(Return(E_FAIL));
+
+ EXPECT_CALL(*engine.Get(), StartPreview).Times(0);
+ EXPECT_CALL(*engine.Get(), StopPreview).Times(0);
+ EXPECT_CALL(*camera, OnStartPreviewSucceeded).Times(0);
+ EXPECT_CALL(*camera,
+ OnStartPreviewFailed(Eq(CameraResult::kError),
+ Eq("Failed to start video preview")))
+ .Times(1);
+
+ capture_controller->StartPreview();
+
+ capture_controller = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+}
+
+// TODO(loic-sharma): Test duplicate calls to start preview.
+
+TEST(CaptureController, IgnoresStartPreviewErrorEvent) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ EXPECT_CALL(*camera, OnStartPreviewFailed).Times(0);
+ EXPECT_CALL(*camera, OnCreateCaptureEngineSucceeded).Times(0);
+
+ // Send a start preview error event
+ engine->CreateFakeEvent(E_FAIL, MF_CAPTURE_ENGINE_PREVIEW_STARTED);
+
+ capture_controller = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+}
+
+TEST(CaptureController, ReportsStartPreviewAccessDenied) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+ MockAvailableMediaTypes(engine.Get(), capture_source.Get(), 1, 1);
+
+ // Cause start preview to fail
+ EXPECT_CALL(*engine.Get(), GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW, _))
+ .Times(1)
+ .WillOnce(Return(E_ACCESSDENIED));
+
+ EXPECT_CALL(*engine.Get(), StartPreview).Times(0);
+ EXPECT_CALL(*engine.Get(), StopPreview).Times(0);
+ EXPECT_CALL(*camera, OnStartPreviewSucceeded).Times(0);
+ EXPECT_CALL(*camera,
+ OnStartPreviewFailed(Eq(CameraResult::kAccessDenied),
+ Eq("Failed to start video preview")))
+ .Times(1);
+
+ capture_controller->StartPreview();
+
+ capture_controller = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+ texture_registrar = nullptr;
+}
+
+TEST(CaptureController, StartRecordSuccess) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+
+ // Prepare fake media types
+ MockAvailableMediaTypes(engine.Get(), capture_source.Get(), 1, 1);
+
+ // Start record
+ ComPtr<MockCaptureRecordSink> record_sink = new MockCaptureRecordSink();
+ std::string mock_path_to_video = "mock_path_to_video";
+ MockRecordStart(capture_controller.get(), engine.Get(), record_sink.Get(),
+ camera.get(), mock_path_to_video);
+
+ // Called by destructor
+ EXPECT_CALL(*(engine.Get()), StopRecord(true, false))
+ .Times(1)
+ .WillOnce(Return(S_OK));
+
+ capture_controller = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+ record_sink = nullptr;
+}
+
+TEST(CaptureController, ReportsStartRecordError) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+
+ // Prepare fake media types
+ MockAvailableMediaTypes(engine.Get(), capture_source.Get(), 1, 1);
+
+ // Cause start record to fail
+ EXPECT_CALL(*engine.Get(), GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_RECORD, _))
+ .Times(1)
+ .WillOnce(Return(E_FAIL));
+
+ EXPECT_CALL(*engine.Get(), StartRecord).Times(0);
+ EXPECT_CALL(*engine.Get(), StopRecord).Times(0);
+ EXPECT_CALL(*camera, OnStartRecordSucceeded).Times(0);
+ EXPECT_CALL(*camera,
+ OnStartRecordFailed(Eq(CameraResult::kError),
+ Eq("Failed to start video recording")))
+ .Times(1);
+
+ capture_controller->StartRecord("mock_path", -1);
+
+ capture_controller = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+}
+
+TEST(CaptureController, ReportsStartRecordAccessDenied) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+
+ // Prepare fake media types
+ MockAvailableMediaTypes(engine.Get(), capture_source.Get(), 1, 1);
+
+ // Cause start record to fail
+ EXPECT_CALL(*engine.Get(), GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_RECORD, _))
+ .Times(1)
+ .WillOnce(Return(E_ACCESSDENIED));
+
+ EXPECT_CALL(*engine.Get(), StartRecord).Times(0);
+ EXPECT_CALL(*engine.Get(), StopRecord).Times(0);
+ EXPECT_CALL(*camera, OnStartRecordSucceeded).Times(0);
+ EXPECT_CALL(*camera,
+ OnStartRecordFailed(Eq(CameraResult::kAccessDenied),
+ Eq("Failed to start video recording")))
+ .Times(1);
+
+ capture_controller->StartRecord("mock_path", -1);
+
+ capture_controller = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+}
+
+TEST(CaptureController, ReportsStartRecordErrorEvent) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+
+ // Prepare fake media types
+ MockAvailableMediaTypes(engine.Get(), capture_source.Get(), 1, 1);
+
+ // Start record
+ ComPtr<MockCaptureRecordSink> record_sink = new MockCaptureRecordSink();
+ std::string mock_path_to_video = "mock_path_to_video";
+
+ EXPECT_CALL(*engine.Get(), StartRecord()).Times(1).WillOnce(Return(S_OK));
+
+ EXPECT_CALL(*engine.Get(), GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_RECORD, _))
+ .Times(1)
+ .WillOnce([src_sink = record_sink](MF_CAPTURE_ENGINE_SINK_TYPE sink_type,
+ IMFCaptureSink** target_sink) {
+ *target_sink = src_sink.Get();
+ src_sink->AddRef();
+ return S_OK;
+ });
+
+ EXPECT_CALL(*record_sink.Get(), RemoveAllStreams)
+ .Times(1)
+ .WillOnce(Return(S_OK));
+ EXPECT_CALL(*record_sink.Get(), AddStream)
+ .Times(2)
+ .WillRepeatedly(Return(S_OK));
+ EXPECT_CALL(*record_sink.Get(), SetOutputFileName)
+ .Times(1)
+ .WillOnce(Return(S_OK));
+
+ capture_controller->StartRecord(mock_path_to_video, -1);
+
+ // Send a start record failed event
+ EXPECT_CALL(*camera, OnStartRecordSucceeded).Times(0);
+ EXPECT_CALL(*camera, OnStartRecordFailed(Eq(CameraResult::kError),
+ Eq("Unspecified error")))
+ .Times(1);
+
+ engine->CreateFakeEvent(E_FAIL, MF_CAPTURE_ENGINE_RECORD_STARTED);
+
+ // Destructor shouldn't attempt to stop the recording that failed to start.
+ EXPECT_CALL(*engine.Get(), StopRecord).Times(0);
+
+ capture_controller = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+ record_sink = nullptr;
+}
+
+TEST(CaptureController, ReportsStartRecordAccessDeniedEvent) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+
+ // Prepare fake media types
+ MockAvailableMediaTypes(engine.Get(), capture_source.Get(), 1, 1);
+
+ // Start record
+ ComPtr<MockCaptureRecordSink> record_sink = new MockCaptureRecordSink();
+ std::string mock_path_to_video = "mock_path_to_video";
+
+ EXPECT_CALL(*engine.Get(), StartRecord()).Times(1).WillOnce(Return(S_OK));
+
+ EXPECT_CALL(*engine.Get(), GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_RECORD, _))
+ .Times(1)
+ .WillOnce([src_sink = record_sink](MF_CAPTURE_ENGINE_SINK_TYPE sink_type,
+ IMFCaptureSink** target_sink) {
+ *target_sink = src_sink.Get();
+ src_sink->AddRef();
+ return S_OK;
+ });
+
+ EXPECT_CALL(*record_sink.Get(), RemoveAllStreams)
+ .Times(1)
+ .WillOnce(Return(S_OK));
+ EXPECT_CALL(*record_sink.Get(), AddStream)
+ .Times(2)
+ .WillRepeatedly(Return(S_OK));
+ EXPECT_CALL(*record_sink.Get(), SetOutputFileName)
+ .Times(1)
+ .WillOnce(Return(S_OK));
+
+ // Send a start record failed event
+ capture_controller->StartRecord(mock_path_to_video, -1);
+
+ EXPECT_CALL(*camera, OnStartRecordSucceeded).Times(0);
+ EXPECT_CALL(*camera, OnStartRecordFailed(Eq(CameraResult::kAccessDenied),
+ Eq("Access is denied.")))
+ .Times(1);
+
+ engine->CreateFakeEvent(E_ACCESSDENIED, MF_CAPTURE_ENGINE_RECORD_STARTED);
+
+ // Destructor shouldn't attempt to stop the recording that failed to start.
+ EXPECT_CALL(*engine.Get(), StopRecord).Times(0);
+
+ capture_controller = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+ record_sink = nullptr;
+}
+
+TEST(CaptureController, StopRecordSuccess) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<MockTextureRegistrar>();
+
+ int64_t mock_texture_id = 1234;
+
+ // Initialize capture controller to be able to start preview
+ MockInitCaptureController(capture_controller.get(), texture_registrar.get(),
+ engine.Get(), camera.get(), mock_texture_id);
+
+ ComPtr<MockCaptureSource> capture_source = new MockCaptureSource();
+
+ // Prepare fake media types
+ MockAvailableMediaTypes(engine.Get(), capture_source.Get(), 1, 1);
+
+ // Start record
+ ComPtr<MockCaptureRecordSink> record_sink = new MockCaptureRecordSink();
+ std::string mock_path_to_video = "mock_path_to_video";
+ MockRecordStart(capture_controller.get(), engine.Get(), record_sink.Get(),
+ camera.get(), mock_path_to_video);
+
+ // Request to stop record
+ EXPECT_CALL(*(engine.Get()), StopRecord(true, false))
+ .Times(1)
+ .WillOnce(Return(S_OK));
+ capture_controller->StopRecord();
+
+ // OnStopRecordSucceeded should be called with mocked file path
+ EXPECT_CALL(*camera, OnStopRecordSucceeded(Eq(mock_path_to_video))).Times(1);
+ EXPECT_CALL(*camera, OnStopRecordFailed).Times(0);
+
+ engine->CreateFakeEvent(S_OK, MF_CAPTURE_ENGINE_RECORD_STOPPED);
+
+ capture_controller = nullptr;
+ texture_registrar = nullptr;
+ engine = nullptr;
+ camera = nullptr;
+ record_sink = nullptr;
+}
+
+TEST(CaptureController, ReportsStopRecordError) {
+ ComPtr<MockCaptureEngine> engine = new MockCaptureEngine();
+ std::unique_ptr<MockCamera> camera =
+ std::make_unique<MockCamera>(MOCK_DEVICE_ID);
+ std::unique_ptr<CaptureControllerImpl> capture_controller =
+ std::make_unique<CaptureControllerImpl>(camera.get());
+ std::unique_ptr<MockTextureRegistrar> texture_registrar =
+ std::make_unique<