diff --git a/.ci/scripts/build_all_plugins.sh b/.ci/scripts/build_all_plugins.sh
new file mode 100644
index 000000000000..008dea7c5e13
--- /dev/null
+++ b/.ci/scripts/build_all_plugins.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+cd all_plugins
+flutter build windows --debug
+flutter build windows --release
diff --git a/.ci/scripts/build_examples_uwp.sh b/.ci/scripts/build_examples_uwp.sh
new file mode 100644
index 000000000000..639cb054e4b7
--- /dev/null
+++ b/.ci/scripts/build_examples_uwp.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+dart ./script/tool/bin/flutter_plugin_tools.dart build-examples --winuwp \
+ --packages-for-branch
diff --git a/.ci/scripts/build_examples_win32.sh b/.ci/scripts/build_examples_win32.sh
new file mode 100644
index 000000000000..8c090f4b78d2
--- /dev/null
+++ b/.ci/scripts/build_examples_win32.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+dart ./script/tool/bin/flutter_plugin_tools.dart build-examples --windows \
+ --packages-for-branch
diff --git a/.ci/scripts/create_all_plugins_app.sh b/.ci/scripts/create_all_plugins_app.sh
new file mode 100644
index 000000000000..196fef9b06c9
--- /dev/null
+++ b/.ci/scripts/create_all_plugins_app.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+dart ./script/tool/bin/flutter_plugin_tools.dart all-plugins-app \
+ --output-dir=. --exclude script/configs/exclude_all_plugins_app.yaml
diff --git a/.ci/scripts/drive_examples_win32.sh b/.ci/scripts/drive_examples_win32.sh
new file mode 100644
index 000000000000..63abc06bec5a
--- /dev/null
+++ b/.ci/scripts/drive_examples_win32.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+dart ./script/tool/bin/flutter_plugin_tools.dart drive-examples --windows \
+ --packages-for-branch
diff --git a/.ci/scripts/native_test_win32.sh b/.ci/scripts/native_test_win32.sh
new file mode 100644
index 000000000000..938515784412
--- /dev/null
+++ b/.ci/scripts/native_test_win32.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+dart ./script/tool/bin/flutter_plugin_tools.dart native-test --windows \
+ --no-integration --packages-for-branch
diff --git a/.ci/scripts/plugin_tools_tests.sh b/.ci/scripts/plugin_tools_tests.sh
new file mode 100644
index 000000000000..96eec4349f08
--- /dev/null
+++ b/.ci/scripts/plugin_tools_tests.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+cd script/tool
+dart pub run test
diff --git a/packages/google_maps_flutter/google_maps_flutter_web/example/regen_mocks.sh b/.ci/scripts/prepare_tool.sh
old mode 100755
new mode 100644
similarity index 56%
rename from packages/google_maps_flutter/google_maps_flutter_web/example/regen_mocks.sh
rename to .ci/scripts/prepare_tool.sh
index 78bcdc0f9e28..1095e2189a36
--- a/packages/google_maps_flutter/google_maps_flutter_web/example/regen_mocks.sh
+++ b/.ci/scripts/prepare_tool.sh
@@ -1,10 +1,10 @@
-#!/usr/bin/bash
+#!/bin/bash
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-flutter pub get
+# To set FETCH_HEAD for "git merge-base" to work
+git fetch origin master
-echo "(Re)generating mocks."
-
-flutter pub run build_runner build --delete-conflicting-outputs
+cd script/tool
+dart pub get
diff --git a/.ci/targets/build_all_plugins.yaml b/.ci/targets/build_all_plugins.yaml
new file mode 100644
index 000000000000..b51a5b18dfd9
--- /dev/null
+++ b/.ci/targets/build_all_plugins.yaml
@@ -0,0 +1,7 @@
+tasks:
+ - name: prepare tool
+ script: .ci/scripts/prepare_tool.sh
+ - name: create all_plugins app
+ script: .ci/scripts/create_all_plugins_app.sh
+ - name: build all_plugins
+ script: .ci/scripts/build_all_plugins.sh
diff --git a/.ci/targets/plugin_tools_tests.yaml b/.ci/targets/plugin_tools_tests.yaml
new file mode 100644
index 000000000000..265e74bdd06b
--- /dev/null
+++ b/.ci/targets/plugin_tools_tests.yaml
@@ -0,0 +1,5 @@
+tasks:
+ - name: prepare tool
+ script: .ci/scripts/prepare_tool.sh
+ - name: tool unit tests
+ script: .ci/scripts/plugin_tools_tests.sh
diff --git a/.ci/targets/uwp_build_and_platform_tests.yaml b/.ci/targets/uwp_build_and_platform_tests.yaml
new file mode 100644
index 000000000000..a7f070776ff1
--- /dev/null
+++ b/.ci/targets/uwp_build_and_platform_tests.yaml
@@ -0,0 +1,5 @@
+tasks:
+ - name: prepare tool
+ script: .ci/scripts/prepare_tool.sh
+ - name: build examples (UWP)
+ script: .ci/scripts/build_examples_uwp.sh
diff --git a/.ci/targets/windows_build_and_platform_tests.yaml b/.ci/targets/windows_build_and_platform_tests.yaml
new file mode 100644
index 000000000000..cda3e57f75d2
--- /dev/null
+++ b/.ci/targets/windows_build_and_platform_tests.yaml
@@ -0,0 +1,9 @@
+tasks:
+ - name: prepare tool
+ script: .ci/scripts/prepare_tool.sh
+ - name: build examples (Win32)
+ script: .ci/scripts/build_examples_win32.sh
+ - name: native unit tests (Win32)
+ script: .ci/scripts/native_test_win32.sh
+ - name: drive examples (Win32)
+ script: .ci/scripts/drive_examples_win32.sh
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 000000000000..a64acf7692f9
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,54 @@
+name: release
+on:
+ push:
+ branches:
+ - master
+
+jobs:
+ release:
+ if: github.repository_owner == 'flutter'
+ name: release
+ permissions:
+ # Release needs to push a tag back to the repo.
+ contents: write
+ runs-on: ubuntu-latest
+ steps:
+ - name: "Install Flutter"
+ # Github Actions don't support templates so it is hard to share this snippet with another action
+ # If we eventually need to use this in more workflow, we could create a shell script that contains this
+ # snippet.
+ run: |
+ cd $HOME
+ git clone https://github.com/flutter/flutter.git --depth 1 -b stable _flutter
+ echo "$HOME/_flutter/bin" >> $GITHUB_PATH
+ cd $GITHUB_WORKSPACE
+ # Checks out a copy of the repo.
+ - name: Check out code
+ uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
+ with:
+ fetch-depth: 0 # Fetch all history so the tool can get all the tags to determine version.
+ - name: Set up tools
+ run: dart pub get
+ working-directory: ${{ github.workspace }}/script/tool
+
+ # This workflow should be the last to run. So wait for all the other tests to succeed.
+ - name: Wait on all tests
+ uses: lewagon/wait-on-check-action@5e937358caba2c7876a2ee06e4a48d0664fe4967
+ with:
+ ref: ${{ github.sha }}
+ running-workflow-name: 'release'
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+ wait-interval: 180 # seconds
+ allowed-conclusions: success,neutral
+ # verbose:true will produce too many logs that hang github actions web UI.
+ verbose: false
+
+ - name: run release
+ run: |
+ git config --global user.name ${{ secrets.USER_NAME }}
+ git config --global user.email ${{ secrets.USER_EMAIL }}
+ dart ./script/tool/lib/src/main.dart publish-plugin --all-changed --base-sha=HEAD~ --skip-confirmation --remote=origin
+ env: {PUB_CREDENTIALS: "${{ secrets.PUB_CREDENTIALS }}"}
+
+ env:
+ DEFAULT_BRANCH: master
diff --git a/packages/android_alarm_manager/android/lint-baseline.xml b/packages/android_alarm_manager/android/lint-baseline.xml
new file mode 100644
index 000000000000..de588614fdb2
--- /dev/null
+++ b/packages/android_alarm_manager/android/lint-baseline.xml
@@ -0,0 +1,59 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/packages/android_alarm_manager/example/android/app/src/androidTest/java/io/plugins/DartIntegrationTest.java b/packages/android_alarm_manager/example/android/app/src/androidTest/java/io/plugins/DartIntegrationTest.java
new file mode 100644
index 000000000000..0f4298dca155
--- /dev/null
+++ b/packages/android_alarm_manager/example/android/app/src/androidTest/java/io/plugins/DartIntegrationTest.java
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface DartIntegrationTest {}
diff --git a/packages/android_intent/example/android/app/src/androidTestDebug/java/io/flutter/plugins/DartIntegrationTest.java b/packages/android_intent/example/android/app/src/androidTestDebug/java/io/flutter/plugins/DartIntegrationTest.java
new file mode 100644
index 000000000000..0f4298dca155
--- /dev/null
+++ b/packages/android_intent/example/android/app/src/androidTestDebug/java/io/flutter/plugins/DartIntegrationTest.java
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface DartIntegrationTest {}
diff --git a/packages/battery/battery/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java b/packages/battery/battery/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java
new file mode 100644
index 000000000000..0f4298dca155
--- /dev/null
+++ b/packages/battery/battery/example/android/app/src/androidTest/java/io/flutter/plugins/DartIntegrationTest.java
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface DartIntegrationTest {}
diff --git a/packages/camera/camera/android/lint-baseline.xml b/packages/camera/camera/android/lint-baseline.xml
new file mode 100644
index 000000000000..4ddaafa87988
--- /dev/null
+++ b/packages/camera/camera/android/lint-baseline.xml
@@ -0,0 +1,114 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraCaptureCallback.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraCaptureCallback.java
new file mode 100644
index 000000000000..805f18298958
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraCaptureCallback.java
@@ -0,0 +1,183 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.util.Log;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.types.CameraCaptureProperties;
+import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper;
+
+/**
+ * A callback object for tracking the progress of a {@link android.hardware.camera2.CaptureRequest}
+ * submitted to the camera device.
+ */
+class CameraCaptureCallback extends CaptureCallback {
+ private static final String TAG = "CameraCaptureCallback";
+ private final CameraCaptureStateListener cameraStateListener;
+ private CameraState cameraState;
+ private final CaptureTimeoutsWrapper captureTimeouts;
+ private final CameraCaptureProperties captureProps;
+
+ private CameraCaptureCallback(
+ @NonNull CameraCaptureStateListener cameraStateListener,
+ @NonNull CaptureTimeoutsWrapper captureTimeouts,
+ @NonNull CameraCaptureProperties captureProps) {
+ cameraState = CameraState.STATE_PREVIEW;
+ this.cameraStateListener = cameraStateListener;
+ this.captureTimeouts = captureTimeouts;
+ this.captureProps = captureProps;
+ }
+
+ /**
+ * Creates a new instance of the {@link CameraCaptureCallback} class.
+ *
+ * @param cameraStateListener instance which will be called when the camera state changes.
+ * @param captureTimeouts specifying the different timeout counters that should be taken into
+ * account.
+ * @return a configured instance of the {@link CameraCaptureCallback} class.
+ */
+ public static CameraCaptureCallback create(
+ @NonNull CameraCaptureStateListener cameraStateListener,
+ @NonNull CaptureTimeoutsWrapper captureTimeouts,
+ @NonNull CameraCaptureProperties captureProps) {
+ return new CameraCaptureCallback(cameraStateListener, captureTimeouts, captureProps);
+ }
+
+ /**
+ * Gets the current {@link CameraState}.
+ *
+ * @return the current {@link CameraState}.
+ */
+ public CameraState getCameraState() {
+ return cameraState;
+ }
+
+ /**
+ * Sets the {@link CameraState}.
+ *
+ * @param state the camera is currently in.
+ */
+ public void setCameraState(@NonNull CameraState state) {
+ cameraState = state;
+ }
+
+ private void process(CaptureResult result) {
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
+
+ // Update capture properties
+ if (result instanceof TotalCaptureResult) {
+ Float lensAperture = result.get(CaptureResult.LENS_APERTURE);
+ Long sensorExposureTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
+ Integer sensorSensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);
+ this.captureProps.setLastLensAperture(lensAperture);
+ this.captureProps.setLastSensorExposureTime(sensorExposureTime);
+ this.captureProps.setLastSensorSensitivity(sensorSensitivity);
+ }
+
+ if (cameraState != CameraState.STATE_PREVIEW) {
+ Log.d(
+ TAG,
+ "CameraCaptureCallback | state: "
+ + cameraState
+ + " | afState: "
+ + afState
+ + " | aeState: "
+ + aeState);
+ }
+
+ switch (cameraState) {
+ case STATE_PREVIEW:
+ {
+ // We have nothing to do when the camera preview is working normally.
+ break;
+ }
+ case STATE_WAITING_FOCUS:
+ {
+ if (afState == null) {
+ return;
+ } else if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED
+ || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
+ handleWaitingFocusState(aeState);
+ } else if (captureTimeouts.getPreCaptureFocusing().getIsExpired()) {
+ Log.w(TAG, "Focus timeout, moving on with capture");
+ handleWaitingFocusState(aeState);
+ }
+
+ break;
+ }
+ case STATE_WAITING_PRECAPTURE_START:
+ {
+ // CONTROL_AE_STATE can be null on some devices
+ if (aeState == null
+ || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED
+ || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE
+ || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED) {
+ setCameraState(CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ } else if (captureTimeouts.getPreCaptureMetering().getIsExpired()) {
+ Log.w(TAG, "Metering timeout waiting for pre-capture to start, moving on with capture");
+
+ setCameraState(CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ }
+ break;
+ }
+ case STATE_WAITING_PRECAPTURE_DONE:
+ {
+ // CONTROL_AE_STATE can be null on some devices
+ if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
+ cameraStateListener.onConverged();
+ } else if (captureTimeouts.getPreCaptureMetering().getIsExpired()) {
+ Log.w(
+ TAG, "Metering timeout waiting for pre-capture to finish, moving on with capture");
+ cameraStateListener.onConverged();
+ }
+
+ break;
+ }
+ }
+ }
+
+ private void handleWaitingFocusState(Integer aeState) {
+ // CONTROL_AE_STATE can be null on some devices
+ if (aeState == null || aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED) {
+ cameraStateListener.onConverged();
+ } else {
+ cameraStateListener.onPrecapture();
+ }
+ }
+
+ @Override
+ public void onCaptureProgressed(
+ @NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request,
+ @NonNull CaptureResult partialResult) {
+ process(partialResult);
+ }
+
+ @Override
+ public void onCaptureCompleted(
+ @NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request,
+ @NonNull TotalCaptureResult result) {
+ process(result);
+ }
+
+ /** An interface that describes the different state changes implementers can be informed about. */
+ interface CameraCaptureStateListener {
+
+ /** Called when the {@link android.hardware.camera2.CaptureRequest} has been converged. */
+ void onConverged();
+
+ /**
+ * Called when the {@link android.hardware.camera2.CaptureRequest} enters the pre-capture state.
+ */
+ void onPrecapture();
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java
new file mode 100644
index 000000000000..951a2797d68f
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraRegionUtils.java
@@ -0,0 +1,182 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.annotation.TargetApi;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.os.Build;
+import android.util.Size;
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import java.util.Arrays;
+
+/**
+ * Utility class offering functions to calculate values regarding the camera boundaries.
+ *
+ *
The functions are used to calculate focus and exposure settings.
+ */
+public final class CameraRegionUtils {
+
+ /**
+ * Obtains the boundaries for the currently active camera, that can be used for calculating
+ * MeteringRectangle instances required for setting focus or exposure settings.
+ *
+ * @param cameraProperties - Collection of the characteristics for the current camera device.
+ * @param requestBuilder - The request builder for the current capture request.
+ * @return The boundaries for the current camera device.
+ */
+ public static Size getCameraBoundaries(
+ @NonNull CameraProperties cameraProperties, @NonNull CaptureRequest.Builder requestBuilder) {
+ if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.P
+ && supportsDistortionCorrection(cameraProperties)) {
+ // Get the current distortion correction mode.
+ Integer distortionCorrectionMode =
+ requestBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE);
+
+ // Return the correct boundaries depending on the mode.
+ android.graphics.Rect rect;
+ if (distortionCorrectionMode == null
+ || distortionCorrectionMode == CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) {
+ rect = cameraProperties.getSensorInfoPreCorrectionActiveArraySize();
+ } else {
+ rect = cameraProperties.getSensorInfoActiveArraySize();
+ }
+
+ return SizeFactory.create(rect.width(), rect.height());
+ } else {
+ // No distortion correction support.
+ return cameraProperties.getSensorInfoPixelArraySize();
+ }
+ }
+
+ /**
+ * Converts a point into a {@link MeteringRectangle} with the supplied coordinates as the center
+ * point.
+ *
+ *
Since the Camera API (due to cross-platform constraints) only accepts a point when
+ * configuring a specific focus or exposure area and Android requires a rectangle to configure
+ * these settings there is a need to convert the point into a rectangle. This method will create
+ * the required rectangle with an arbitrarily size that is a 10th of the current viewport and the
+ * coordinates as the center point.
+ *
+ * @param boundaries - The camera boundaries to calculate the metering rectangle for.
+ * @param x x - 1 >= coordinate >= 0.
+ * @param y y - 1 >= coordinate >= 0.
+ * @return The dimensions of the metering rectangle based on the supplied coordinates and
+ * boundaries.
+ */
+ public static MeteringRectangle convertPointToMeteringRectangle(
+ @NonNull Size boundaries,
+ double x,
+ double y,
+ @NonNull PlatformChannel.DeviceOrientation orientation) {
+ assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0);
+ assert (x >= 0 && x <= 1);
+ assert (y >= 0 && y <= 1);
+ // Rotate the coordinates to match the device orientation.
+ double oldX = x, oldY = y;
+ switch (orientation) {
+ case PORTRAIT_UP: // 90 ccw.
+ y = 1 - oldX;
+ x = oldY;
+ break;
+ case PORTRAIT_DOWN: // 90 cw.
+ x = 1 - oldY;
+ y = oldX;
+ break;
+ case LANDSCAPE_LEFT:
+ // No rotation required.
+ break;
+ case LANDSCAPE_RIGHT: // 180.
+ x = 1 - x;
+ y = 1 - y;
+ break;
+ }
+ // Interpolate the target coordinate.
+ int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1)));
+ int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1)));
+ // Determine the dimensions of the metering rectangle (10th of the viewport).
+ int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d);
+ int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d);
+ // Adjust target coordinate to represent top-left corner of metering rectangle.
+ targetX -= targetWidth / 2;
+ targetY -= targetHeight / 2;
+ // Adjust target coordinate as to not fall out of bounds.
+ if (targetX < 0) {
+ targetX = 0;
+ }
+ if (targetY < 0) {
+ targetY = 0;
+ }
+ int maxTargetX = boundaries.getWidth() - 1 - targetWidth;
+ int maxTargetY = boundaries.getHeight() - 1 - targetHeight;
+ if (targetX > maxTargetX) {
+ targetX = maxTargetX;
+ }
+ if (targetY > maxTargetY) {
+ targetY = maxTargetY;
+ }
+ // Build the metering rectangle.
+ return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1);
+ }
+
+ @TargetApi(Build.VERSION_CODES.P)
+ private static boolean supportsDistortionCorrection(CameraProperties cameraProperties) {
+ int[] availableDistortionCorrectionModes =
+ cameraProperties.getDistortionCorrectionAvailableModes();
+ if (availableDistortionCorrectionModes == null) {
+ availableDistortionCorrectionModes = new int[0];
+ }
+ long nonOffModesSupported =
+ Arrays.stream(availableDistortionCorrectionModes)
+ .filter((value) -> value != CaptureRequest.DISTORTION_CORRECTION_MODE_OFF)
+ .count();
+ return nonOffModesSupported > 0;
+ }
+
+ /** Factory class that assists in creating a {@link MeteringRectangle} instance. */
+ static class MeteringRectangleFactory {
+ /**
+ * Creates a new instance of the {@link MeteringRectangle} class.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this *
+ * class.
+ *
+ * @param x coordinate >= 0.
+ * @param y coordinate >= 0.
+ * @param width width >= 0.
+ * @param height height >= 0.
+ * @param meteringWeight weight between {@value MeteringRectangle#METERING_WEIGHT_MIN} and
+ * {@value MeteringRectangle#METERING_WEIGHT_MAX} inclusively.
+ * @return new instance of the {@link MeteringRectangle} class.
+ * @throws IllegalArgumentException if any of the parameters were negative.
+ */
+ @VisibleForTesting
+ public static MeteringRectangle create(
+ int x, int y, int width, int height, int meteringWeight) {
+ return new MeteringRectangle(x, y, width, height, meteringWeight);
+ }
+ }
+
+ /** Factory class that assists in creating a {@link Size} instance. */
+ static class SizeFactory {
+ /**
+ * Creates a new instance of the {@link Size} class.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this *
+ * class.
+ *
+ * @param width width >= 0.
+ * @param height height >= 0.
+ * @return new instance of the {@link Size} class.
+ */
+ @VisibleForTesting
+ public static Size create(int width, int height) {
+ return new Size(width, height);
+ }
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraState.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraState.java
new file mode 100644
index 000000000000..ac48caf18ac6
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/CameraState.java
@@ -0,0 +1,27 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+/**
+ * These are the states that the camera can be in. The camera can only take one photo at a time so
+ * this state describes the state of the camera itself. The camera works like a pipeline where we
+ * feed it requests through. It can only process one tasks at a time.
+ */
+public enum CameraState {
+ /** Idle, showing preview and not capturing anything. */
+ STATE_PREVIEW,
+
+ /** Starting and waiting for autofocus to complete. */
+ STATE_WAITING_FOCUS,
+
+ /** Start performing autoexposure. */
+ STATE_WAITING_PRECAPTURE_START,
+
+ /** waiting for autoexposure to complete. */
+ STATE_WAITING_PRECAPTURE_DONE,
+
+ /** Capturing an image. */
+ STATE_CAPTURING,
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/ImageSaver.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/ImageSaver.java
new file mode 100644
index 000000000000..821c9a50c13f
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/ImageSaver.java
@@ -0,0 +1,105 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import android.media.Image;
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/** Saves a JPEG {@link Image} into the specified {@link File}. */
+public class ImageSaver implements Runnable {
+
+ /** The JPEG image */
+ private final Image image;
+
+ /** The file we save the image into. */
+ private final File file;
+
+ /** Used to report the status of the save action. */
+ private final Callback callback;
+
+ /**
+ * Creates an instance of the ImageSaver runnable
+ *
+ * @param image - The image to save
+ * @param file - The file to save the image to
+ * @param callback - The callback that is run on completion, or when an error is encountered.
+ */
+ ImageSaver(@NonNull Image image, @NonNull File file, @NonNull Callback callback) {
+ this.image = image;
+ this.file = file;
+ this.callback = callback;
+ }
+
+ @Override
+ public void run() {
+ ByteBuffer buffer = image.getPlanes()[0].getBuffer();
+ byte[] bytes = new byte[buffer.remaining()];
+ buffer.get(bytes);
+ FileOutputStream output = null;
+ try {
+ output = FileOutputStreamFactory.create(file);
+ output.write(bytes);
+
+ callback.onComplete(file.getAbsolutePath());
+
+ } catch (IOException e) {
+ callback.onError("IOError", "Failed saving image");
+ } finally {
+ image.close();
+ if (null != output) {
+ try {
+ output.close();
+ } catch (IOException e) {
+ callback.onError("cameraAccess", e.getMessage());
+ }
+ }
+ }
+ }
+
+ /**
+ * The interface for the callback that is passed to ImageSaver, for detecting completion or
+ * failure of the image saving task.
+ */
+ public interface Callback {
+ /**
+ * Called when the image file has been saved successfully.
+ *
+ * @param absolutePath - The absolute path of the file that was saved.
+ */
+ void onComplete(String absolutePath);
+
+ /**
+ * Called when an error is encountered while saving the image file.
+ *
+ * @param errorCode - The error code.
+ * @param errorMessage - The human readable error message.
+ */
+ void onError(String errorCode, String errorMessage);
+ }
+
+ /** Factory class that assists in creating a {@link FileOutputStream} instance. */
+ static class FileOutputStreamFactory {
+ /**
+ * Creates a new instance of the {@link FileOutputStream} class.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this *
+ * class.
+ *
+ * @param file - The file to create the output stream for
+ * @return new instance of the {@link FileOutputStream} class.
+ * @throws FileNotFoundException when the supplied file could not be found.
+ */
+ @VisibleForTesting
+ public static FileOutputStream create(File file) throws FileNotFoundException {
+ return new FileOutputStream(file);
+ }
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactory.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactory.java
new file mode 100644
index 000000000000..b91f9a1c03f7
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactory.java
@@ -0,0 +1,149 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features;
+
+import android.app.Activity;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+
+/**
+ * Factory for creating the supported feature implementation controlling different aspects of the
+ * {@link android.hardware.camera2.CaptureRequest}.
+ */
+public interface CameraFeatureFactory {
+
+ /**
+ * Creates a new instance of the auto focus feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param recordingVideo indicates if the camera is currently recording.
+ * @return newly created instance of the AutoFocusFeature class.
+ */
+ AutoFocusFeature createAutoFocusFeature(
+ @NonNull CameraProperties cameraProperties, boolean recordingVideo);
+
+ /**
+ * Creates a new instance of the exposure lock feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the ExposureLockFeature class.
+ */
+ ExposureLockFeature createExposureLockFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the exposure offset feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the ExposureOffsetFeature class.
+ */
+ ExposureOffsetFeature createExposureOffsetFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the flash feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the FlashFeature class.
+ */
+ FlashFeature createFlashFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the resolution feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param initialSetting initial resolution preset.
+ * @param cameraName the name of the camera which can be used to identify the camera device.
+ * @return newly created instance of the ResolutionFeature class.
+ */
+ ResolutionFeature createResolutionFeature(
+ @NonNull CameraProperties cameraProperties,
+ ResolutionPreset initialSetting,
+ String cameraName);
+
+ /**
+ * Creates a new instance of the focus point feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param sensorOrientationFeature instance of the SensorOrientationFeature class containing
+ * information about the sensor and device orientation.
+ * @return newly created instance of the FocusPointFeature class.
+ */
+ FocusPointFeature createFocusPointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature);
+
+ /**
+ * Creates a new instance of the FPS range feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the FpsRangeFeature class.
+ */
+ FpsRangeFeature createFpsRangeFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the sensor orientation feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param activity current activity associated with the camera plugin.
+ * @param dartMessenger instance of the DartMessenger class, used to send state updates back to
+ * Dart.
+ * @return newly created instance of the SensorOrientationFeature class.
+ */
+ SensorOrientationFeature createSensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger);
+
+ /**
+ * Creates a new instance of the zoom level feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the ZoomLevelFeature class.
+ */
+ ZoomLevelFeature createZoomLevelFeature(@NonNull CameraProperties cameraProperties);
+
+ /**
+ * Creates a new instance of the exposure point feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @param sensorOrientationFeature instance of the SensorOrientationFeature class containing
+ * information about the sensor and device orientation.
+ * @return newly created instance of the ExposurePointFeature class.
+ */
+ ExposurePointFeature createExposurePointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature);
+
+ /**
+ * Creates a new instance of the noise reduction feature.
+ *
+ * @param cameraProperties instance of the CameraProperties class containing information about the
+ * cameras features.
+ * @return newly created instance of the NoiseReductionFeature class.
+ */
+ NoiseReductionFeature createNoiseReductionFeature(@NonNull CameraProperties cameraProperties);
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactoryImpl.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactoryImpl.java
new file mode 100644
index 000000000000..95a8c06caa0a
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatureFactoryImpl.java
@@ -0,0 +1,98 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features;
+
+import android.app.Activity;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+
+/**
+ * Implementation of the {@link CameraFeatureFactory} interface creating the supported feature
+ * implementation controlling different aspects of the {@link
+ * android.hardware.camera2.CaptureRequest}.
+ */
+public class CameraFeatureFactoryImpl implements CameraFeatureFactory {
+
+ @Override
+ public AutoFocusFeature createAutoFocusFeature(
+ @NonNull CameraProperties cameraProperties, boolean recordingVideo) {
+ return new AutoFocusFeature(cameraProperties, recordingVideo);
+ }
+
+ @Override
+ public ExposureLockFeature createExposureLockFeature(@NonNull CameraProperties cameraProperties) {
+ return new ExposureLockFeature(cameraProperties);
+ }
+
+ @Override
+ public ExposureOffsetFeature createExposureOffsetFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return new ExposureOffsetFeature(cameraProperties);
+ }
+
+ @Override
+ public FlashFeature createFlashFeature(@NonNull CameraProperties cameraProperties) {
+ return new FlashFeature(cameraProperties);
+ }
+
+ @Override
+ public ResolutionFeature createResolutionFeature(
+ @NonNull CameraProperties cameraProperties,
+ ResolutionPreset initialSetting,
+ String cameraName) {
+ return new ResolutionFeature(cameraProperties, initialSetting, cameraName);
+ }
+
+ @Override
+ public FocusPointFeature createFocusPointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature) {
+ return new FocusPointFeature(cameraProperties, sensorOrientationFeature);
+ }
+
+ @Override
+ public FpsRangeFeature createFpsRangeFeature(@NonNull CameraProperties cameraProperties) {
+ return new FpsRangeFeature(cameraProperties);
+ }
+
+ @Override
+ public SensorOrientationFeature createSensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger) {
+ return new SensorOrientationFeature(cameraProperties, activity, dartMessenger);
+ }
+
+ @Override
+ public ZoomLevelFeature createZoomLevelFeature(@NonNull CameraProperties cameraProperties) {
+ return new ZoomLevelFeature(cameraProperties);
+ }
+
+ @Override
+ public ExposurePointFeature createExposurePointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature) {
+ return new ExposurePointFeature(cameraProperties, sensorOrientationFeature);
+ }
+
+ @Override
+ public NoiseReductionFeature createNoiseReductionFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return new NoiseReductionFeature(cameraProperties);
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatures.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatures.java
new file mode 100644
index 000000000000..659fd15963e9
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/CameraFeatures.java
@@ -0,0 +1,285 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features;
+
+import android.app.Activity;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * These are all of our available features in the camera. Used in the Camera to access all features
+ * in a simpler way.
+ */
+public class CameraFeatures {
+ private static final String AUTO_FOCUS = "AUTO_FOCUS";
+ private static final String EXPOSURE_LOCK = "EXPOSURE_LOCK";
+ private static final String EXPOSURE_OFFSET = "EXPOSURE_OFFSET";
+ private static final String EXPOSURE_POINT = "EXPOSURE_POINT";
+ private static final String FLASH = "FLASH";
+ private static final String FOCUS_POINT = "FOCUS_POINT";
+ private static final String FPS_RANGE = "FPS_RANGE";
+ private static final String NOISE_REDUCTION = "NOISE_REDUCTION";
+ private static final String REGION_BOUNDARIES = "REGION_BOUNDARIES";
+ private static final String RESOLUTION = "RESOLUTION";
+ private static final String SENSOR_ORIENTATION = "SENSOR_ORIENTATION";
+ private static final String ZOOM_LEVEL = "ZOOM_LEVEL";
+
+ public static CameraFeatures init(
+ CameraFeatureFactory cameraFeatureFactory,
+ CameraProperties cameraProperties,
+ Activity activity,
+ DartMessenger dartMessenger,
+ ResolutionPreset resolutionPreset) {
+ CameraFeatures cameraFeatures = new CameraFeatures();
+ cameraFeatures.setAutoFocus(
+ cameraFeatureFactory.createAutoFocusFeature(cameraProperties, false));
+ cameraFeatures.setExposureLock(
+ cameraFeatureFactory.createExposureLockFeature(cameraProperties));
+ cameraFeatures.setExposureOffset(
+ cameraFeatureFactory.createExposureOffsetFeature(cameraProperties));
+ SensorOrientationFeature sensorOrientationFeature =
+ cameraFeatureFactory.createSensorOrientationFeature(
+ cameraProperties, activity, dartMessenger);
+ cameraFeatures.setSensorOrientation(sensorOrientationFeature);
+ cameraFeatures.setExposurePoint(
+ cameraFeatureFactory.createExposurePointFeature(
+ cameraProperties, sensorOrientationFeature));
+ cameraFeatures.setFlash(cameraFeatureFactory.createFlashFeature(cameraProperties));
+ cameraFeatures.setFocusPoint(
+ cameraFeatureFactory.createFocusPointFeature(cameraProperties, sensorOrientationFeature));
+ cameraFeatures.setFpsRange(cameraFeatureFactory.createFpsRangeFeature(cameraProperties));
+ cameraFeatures.setNoiseReduction(
+ cameraFeatureFactory.createNoiseReductionFeature(cameraProperties));
+ cameraFeatures.setResolution(
+ cameraFeatureFactory.createResolutionFeature(
+ cameraProperties, resolutionPreset, cameraProperties.getCameraName()));
+ cameraFeatures.setZoomLevel(cameraFeatureFactory.createZoomLevelFeature(cameraProperties));
+ return cameraFeatures;
+ }
+
+ private Map featureMap = new HashMap<>();
+
+ /**
+ * Gets a collection of all features that have been set.
+ *
+ * @return A collection of all features that have been set.
+ */
+ public Collection getAllFeatures() {
+ return this.featureMap.values();
+ }
+
+ /**
+ * Gets the auto focus feature if it has been set.
+ *
+ * @return the auto focus feature.
+ */
+ public AutoFocusFeature getAutoFocus() {
+ return (AutoFocusFeature) featureMap.get(AUTO_FOCUS);
+ }
+
+ /**
+ * Sets the instance of the auto focus feature.
+ *
+ * @param autoFocus the {@link AutoFocusFeature} instance to set.
+ */
+ public void setAutoFocus(AutoFocusFeature autoFocus) {
+ this.featureMap.put(AUTO_FOCUS, autoFocus);
+ }
+
+ /**
+ * Gets the exposure lock feature if it has been set.
+ *
+ * @return the exposure lock feature.
+ */
+ public ExposureLockFeature getExposureLock() {
+ return (ExposureLockFeature) featureMap.get(EXPOSURE_LOCK);
+ }
+
+ /**
+ * Sets the instance of the exposure lock feature.
+ *
+ * @param exposureLock the {@link ExposureLockFeature} instance to set.
+ */
+ public void setExposureLock(ExposureLockFeature exposureLock) {
+ this.featureMap.put(EXPOSURE_LOCK, exposureLock);
+ }
+
+ /**
+ * Gets the exposure offset feature if it has been set.
+ *
+ * @return the exposure offset feature.
+ */
+ public ExposureOffsetFeature getExposureOffset() {
+ return (ExposureOffsetFeature) featureMap.get(EXPOSURE_OFFSET);
+ }
+
+ /**
+ * Sets the instance of the exposure offset feature.
+ *
+ * @param exposureOffset the {@link ExposureOffsetFeature} instance to set.
+ */
+ public void setExposureOffset(ExposureOffsetFeature exposureOffset) {
+ this.featureMap.put(EXPOSURE_OFFSET, exposureOffset);
+ }
+
+ /**
+ * Gets the exposure point feature if it has been set.
+ *
+ * @return the exposure point feature.
+ */
+ public ExposurePointFeature getExposurePoint() {
+ return (ExposurePointFeature) featureMap.get(EXPOSURE_POINT);
+ }
+
+ /**
+ * Sets the instance of the exposure point feature.
+ *
+ * @param exposurePoint the {@link ExposurePointFeature} instance to set.
+ */
+ public void setExposurePoint(ExposurePointFeature exposurePoint) {
+ this.featureMap.put(EXPOSURE_POINT, exposurePoint);
+ }
+
+ /**
+ * Gets the flash feature if it has been set.
+ *
+ * @return the flash feature.
+ */
+ public FlashFeature getFlash() {
+ return (FlashFeature) featureMap.get(FLASH);
+ }
+
+ /**
+ * Sets the instance of the flash feature.
+ *
+ * @param flash the {@link FlashFeature} instance to set.
+ */
+ public void setFlash(FlashFeature flash) {
+ this.featureMap.put(FLASH, flash);
+ }
+
+ /**
+ * Gets the focus point feature if it has been set.
+ *
+ * @return the focus point feature.
+ */
+ public FocusPointFeature getFocusPoint() {
+ return (FocusPointFeature) featureMap.get(FOCUS_POINT);
+ }
+
+ /**
+ * Sets the instance of the focus point feature.
+ *
+ * @param focusPoint the {@link FocusPointFeature} instance to set.
+ */
+ public void setFocusPoint(FocusPointFeature focusPoint) {
+ this.featureMap.put(FOCUS_POINT, focusPoint);
+ }
+
+ /**
+ * Gets the fps range feature if it has been set.
+ *
+ * @return the fps range feature.
+ */
+ public FpsRangeFeature getFpsRange() {
+ return (FpsRangeFeature) featureMap.get(FPS_RANGE);
+ }
+
+ /**
+ * Sets the instance of the fps range feature.
+ *
+ * @param fpsRange the {@link FpsRangeFeature} instance to set.
+ */
+ public void setFpsRange(FpsRangeFeature fpsRange) {
+ this.featureMap.put(FPS_RANGE, fpsRange);
+ }
+
+ /**
+ * Gets the noise reduction feature if it has been set.
+ *
+ * @return the noise reduction feature.
+ */
+ public NoiseReductionFeature getNoiseReduction() {
+ return (NoiseReductionFeature) featureMap.get(NOISE_REDUCTION);
+ }
+
+ /**
+ * Sets the instance of the noise reduction feature.
+ *
+ * @param noiseReduction the {@link NoiseReductionFeature} instance to set.
+ */
+ public void setNoiseReduction(NoiseReductionFeature noiseReduction) {
+ this.featureMap.put(NOISE_REDUCTION, noiseReduction);
+ }
+
+ /**
+ * Gets the resolution feature if it has been set.
+ *
+ * @return the resolution feature.
+ */
+ public ResolutionFeature getResolution() {
+ return (ResolutionFeature) featureMap.get(RESOLUTION);
+ }
+
+ /**
+ * Sets the instance of the resolution feature.
+ *
+ * @param resolution the {@link ResolutionFeature} instance to set.
+ */
+ public void setResolution(ResolutionFeature resolution) {
+ this.featureMap.put(RESOLUTION, resolution);
+ }
+
+ /**
+ * Gets the sensor orientation feature if it has been set.
+ *
+ * @return the sensor orientation feature.
+ */
+ public SensorOrientationFeature getSensorOrientation() {
+ return (SensorOrientationFeature) featureMap.get(SENSOR_ORIENTATION);
+ }
+
+ /**
+ * Sets the instance of the sensor orientation feature.
+ *
+ * @param sensorOrientation the {@link SensorOrientationFeature} instance to set.
+ */
+ public void setSensorOrientation(SensorOrientationFeature sensorOrientation) {
+ this.featureMap.put(SENSOR_ORIENTATION, sensorOrientation);
+ }
+
+ /**
+ * Gets the zoom level feature if it has been set.
+ *
+ * @return the zoom level feature.
+ */
+ public ZoomLevelFeature getZoomLevel() {
+ return (ZoomLevelFeature) featureMap.get(ZOOM_LEVEL);
+ }
+
+ /**
+ * Sets the instance of the zoom level feature.
+ *
+ * @param zoomLevel the {@link ZoomLevelFeature} instance to set.
+ */
+ public void setZoomLevel(ZoomLevelFeature zoomLevel) {
+ this.featureMap.put(ZOOM_LEVEL, zoomLevel);
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeature.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeature.java
new file mode 100644
index 000000000000..a3a0172d3c37
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeature.java
@@ -0,0 +1,99 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.focuspoint;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.util.Size;
+import androidx.annotation.NonNull;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.CameraRegionUtils;
+import io.flutter.plugins.camera.features.CameraFeature;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+
+/** Focus point controls where in the frame focus will come from. */
+public class FocusPointFeature extends CameraFeature {
+
+ private Size cameraBoundaries;
+ private Point focusPoint;
+ private MeteringRectangle focusRectangle;
+ private final SensorOrientationFeature sensorOrientationFeature;
+
+ /**
+ * Creates a new instance of the {@link FocusPointFeature}.
+ *
+ * @param cameraProperties Collection of the characteristics for the current camera device.
+ */
+ public FocusPointFeature(
+ CameraProperties cameraProperties, SensorOrientationFeature sensorOrientationFeature) {
+ super(cameraProperties);
+ this.sensorOrientationFeature = sensorOrientationFeature;
+ }
+
+ /**
+ * Sets the camera boundaries that are required for the focus point feature to function.
+ *
+ * @param cameraBoundaries - The camera boundaries to set.
+ */
+ public void setCameraBoundaries(@NonNull Size cameraBoundaries) {
+ this.cameraBoundaries = cameraBoundaries;
+ this.buildFocusRectangle();
+ }
+
+ @Override
+ public String getDebugName() {
+ return "FocusPointFeature";
+ }
+
+ @Override
+ public Point getValue() {
+ return focusPoint;
+ }
+
+ @Override
+ public void setValue(Point value) {
+ this.focusPoint = value == null || value.x == null || value.y == null ? null : value;
+ this.buildFocusRectangle();
+ }
+
+ // Whether or not this camera can set the focus point.
+ @Override
+ public boolean checkIsSupported() {
+ Integer supportedRegions = cameraProperties.getControlMaxRegionsAutoFocus();
+ return supportedRegions != null && supportedRegions > 0;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+ requestBuilder.set(
+ CaptureRequest.CONTROL_AF_REGIONS,
+ focusRectangle == null ? null : new MeteringRectangle[] {focusRectangle});
+ }
+
+ private void buildFocusRectangle() {
+ if (this.cameraBoundaries == null) {
+ throw new AssertionError(
+ "The cameraBoundaries should be set (using `FocusPointFeature.setCameraBoundaries(Size)`) before updating the focus point.");
+ }
+ if (this.focusPoint == null) {
+ this.focusRectangle = null;
+ } else {
+ PlatformChannel.DeviceOrientation orientation =
+ this.sensorOrientationFeature.getLockedCaptureOrientation();
+ if (orientation == null) {
+ orientation =
+ this.sensorOrientationFeature.getDeviceOrientationManager().getLastUIOrientation();
+ }
+ this.focusRectangle =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.cameraBoundaries, this.focusPoint.x, this.focusPoint.y, orientation);
+ }
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeature.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeature.java
new file mode 100644
index 000000000000..500f2aa28dc2
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeature.java
@@ -0,0 +1,87 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.fpsrange;
+
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import android.util.Range;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+
+/**
+ * Controls the frames per seconds (FPS) range configuration on the {@link android.hardware.camera2}
+ * API.
+ */
+public class FpsRangeFeature extends CameraFeature> {
+ private static final Range MAX_PIXEL4A_RANGE = new Range<>(30, 30);
+ private Range currentSetting;
+
+ /**
+ * Creates a new instance of the {@link FpsRangeFeature}.
+ *
+ * @param cameraProperties Collection of characteristics for the current camera device.
+ */
+ public FpsRangeFeature(CameraProperties cameraProperties) {
+ super(cameraProperties);
+
+ if (isPixel4A()) {
+ // HACK: There is a bug in the Pixel 4A where it cannot support 60fps modes
+ // even though they are reported as supported by
+ // `getControlAutoExposureAvailableTargetFpsRanges`.
+ // For max device compatibility we will keep FPS under 60 even if they report they are
+ // capable of achieving 60 fps. Highest working FPS is 30.
+ // https://issuetracker.google.com/issues/189237151
+ currentSetting = MAX_PIXEL4A_RANGE;
+ } else {
+ Range[] ranges = cameraProperties.getControlAutoExposureAvailableTargetFpsRanges();
+
+ if (ranges != null) {
+ for (Range range : ranges) {
+ int upper = range.getUpper();
+
+ if (upper >= 10) {
+ if (currentSetting == null || upper > currentSetting.getUpper()) {
+ currentSetting = range;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private boolean isPixel4A() {
+ return Build.BRAND.equals("google") && Build.MODEL.equals("Pixel 4a");
+ }
+
+ @Override
+ public String getDebugName() {
+ return "FpsRangeFeature";
+ }
+
+ @Override
+ public Range getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(Range value) {
+ this.currentSetting = value;
+ }
+
+ // Always supported
+ @Override
+ public boolean checkIsSupported() {
+ return true;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, currentSetting);
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeature.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeature.java
new file mode 100644
index 000000000000..408575b375e6
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeature.java
@@ -0,0 +1,91 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.noisereduction;
+
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build.VERSION;
+import android.os.Build.VERSION_CODES;
+import android.util.Log;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+import java.util.HashMap;
+
+/**
+ * This can either be enabled or disabled. Only full capability devices can set this to off. Legacy
+ * and full support the fast mode.
+ * https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
+ */
+public class NoiseReductionFeature extends CameraFeature {
+ private NoiseReductionMode currentSetting = NoiseReductionMode.fast;
+
+ private final HashMap NOISE_REDUCTION_MODES = new HashMap<>();
+
+ /**
+ * Creates a new instance of the {@link NoiseReductionFeature}.
+ *
+ * @param cameraProperties Collection of the characteristics for the current camera device.
+ */
+ public NoiseReductionFeature(CameraProperties cameraProperties) {
+ super(cameraProperties);
+ NOISE_REDUCTION_MODES.put(NoiseReductionMode.off, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
+ NOISE_REDUCTION_MODES.put(NoiseReductionMode.fast, CaptureRequest.NOISE_REDUCTION_MODE_FAST);
+ NOISE_REDUCTION_MODES.put(
+ NoiseReductionMode.highQuality, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
+ if (VERSION.SDK_INT >= VERSION_CODES.M) {
+ NOISE_REDUCTION_MODES.put(
+ NoiseReductionMode.minimal, CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL);
+ NOISE_REDUCTION_MODES.put(
+ NoiseReductionMode.zeroShutterLag, CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG);
+ }
+ }
+
+ @Override
+ public String getDebugName() {
+ return "NoiseReductionFeature";
+ }
+
+ @Override
+ public NoiseReductionMode getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(NoiseReductionMode value) {
+ this.currentSetting = value;
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ /*
+ * Available settings: public static final int NOISE_REDUCTION_MODE_FAST = 1; public static
+ * final int NOISE_REDUCTION_MODE_HIGH_QUALITY = 2; public static final int
+ * NOISE_REDUCTION_MODE_MINIMAL = 3; public static final int NOISE_REDUCTION_MODE_OFF = 0;
+ * public static final int NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG = 4;
+ *
+ * Full-capability camera devices will always support OFF and FAST. Camera devices that
+ * support YUV_REPROCESSING or PRIVATE_REPROCESSING will support ZERO_SHUTTER_LAG.
+ * Legacy-capability camera devices will only support FAST mode.
+ */
+
+ // Can be null on some devices.
+ int[] modes = cameraProperties.getAvailableNoiseReductionModes();
+
+ /// If there's at least one mode available then we are supported.
+ return modes != null && modes.length > 0;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ Log.i("Camera", "updateNoiseReduction | currentSetting: " + currentSetting);
+
+ // Always use fast mode.
+ requestBuilder.set(
+ CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODES.get(currentSetting));
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionMode.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionMode.java
new file mode 100644
index 000000000000..425a458e2a2b
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionMode.java
@@ -0,0 +1,41 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.noisereduction;
+
+/** Only supports fast mode for now. */
+public enum NoiseReductionMode {
+ off("off"),
+ fast("fast"),
+ highQuality("highQuality"),
+ minimal("minimal"),
+ zeroShutterLag("zeroShutterLag");
+
+ private final String strValue;
+
+ NoiseReductionMode(String strValue) {
+ this.strValue = strValue;
+ }
+
+ /**
+ * Tries to convert the supplied string into a {@see NoiseReductionMode} enum value.
+ *
+ *
When the supplied string doesn't match a valid {@see NoiseReductionMode} enum value, null is
+ * returned.
+ *
+ * @param modeStr String value to convert into an {@see NoiseReductionMode} enum value.
+ * @return Matching {@see NoiseReductionMode} enum value, or null if no match is found.
+ */
+ public static NoiseReductionMode getValueForString(String modeStr) {
+ for (NoiseReductionMode value : values()) {
+ if (value.strValue.equals(modeStr)) return value;
+ }
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ return strValue;
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionFeature.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionFeature.java
new file mode 100644
index 000000000000..afbd7c3758a6
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionFeature.java
@@ -0,0 +1,256 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.resolution;
+
+import android.annotation.TargetApi;
+import android.hardware.camera2.CaptureRequest;
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import android.os.Build;
+import android.util.Size;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.features.CameraFeature;
+import java.util.List;
+
+/**
+ * Controls the resolutions configuration on the {@link android.hardware.camera2} API.
+ *
+ *
The {@link ResolutionFeature} is responsible for converting the platform independent {@link
+ * ResolutionPreset} into a {@link android.media.CamcorderProfile} which contains all the properties
+ * required to configure the resolution using the {@link android.hardware.camera2} API.
+ */
+public class ResolutionFeature extends CameraFeature {
+ private Size captureSize;
+ private Size previewSize;
+ private CamcorderProfile recordingProfileLegacy;
+ private EncoderProfiles recordingProfile;
+ private ResolutionPreset currentSetting;
+ private int cameraId;
+
+ /**
+ * Creates a new instance of the {@link ResolutionFeature}.
+ *
+ * @param cameraProperties Collection of characteristics for the current camera device.
+ * @param resolutionPreset Platform agnostic enum containing resolution information.
+ * @param cameraName Camera identifier of the camera for which to configure the resolution.
+ */
+ public ResolutionFeature(
+ CameraProperties cameraProperties, ResolutionPreset resolutionPreset, String cameraName) {
+ super(cameraProperties);
+ this.currentSetting = resolutionPreset;
+ try {
+ this.cameraId = Integer.parseInt(cameraName, 10);
+ } catch (NumberFormatException e) {
+ this.cameraId = -1;
+ return;
+ }
+ configureResolution(resolutionPreset, cameraId);
+ }
+
+ /**
+ * Gets the {@link android.media.CamcorderProfile} containing the information to configure the
+ * resolution using the {@link android.hardware.camera2} API.
+ *
+ * @return Resolution information to configure the {@link android.hardware.camera2} API.
+ */
+ public CamcorderProfile getRecordingProfileLegacy() {
+ return this.recordingProfileLegacy;
+ }
+
+ public EncoderProfiles getRecordingProfile() {
+ return this.recordingProfile;
+ }
+
+ /**
+ * Gets the optimal preview size based on the configured resolution.
+ *
+ * @return The optimal preview size.
+ */
+ public Size getPreviewSize() {
+ return this.previewSize;
+ }
+
+ /**
+ * Gets the optimal capture size based on the configured resolution.
+ *
+ * @return The optimal capture size.
+ */
+ public Size getCaptureSize() {
+ return this.captureSize;
+ }
+
+ @Override
+ public String getDebugName() {
+ return "ResolutionFeature";
+ }
+
+ @Override
+ public ResolutionPreset getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(ResolutionPreset value) {
+ this.currentSetting = value;
+ configureResolution(currentSetting, cameraId);
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ return cameraId >= 0;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ // No-op: when setting a resolution there is no need to update the request builder.
+ }
+
+ @VisibleForTesting
+ static Size computeBestPreviewSize(int cameraId, ResolutionPreset preset)
+ throws IndexOutOfBoundsException {
+ if (preset.ordinal() > ResolutionPreset.high.ordinal()) {
+ preset = ResolutionPreset.high;
+ }
+ if (Build.VERSION.SDK_INT >= 31) {
+ EncoderProfiles profile =
+ getBestAvailableCamcorderProfileForResolutionPreset(cameraId, preset);
+ List videoProfiles = profile.getVideoProfiles();
+ EncoderProfiles.VideoProfile defaultVideoProfile = videoProfiles.get(0);
+
+ return new Size(defaultVideoProfile.getWidth(), defaultVideoProfile.getHeight());
+ } else {
+ @SuppressWarnings("deprecation")
+ CamcorderProfile profile =
+ getBestAvailableCamcorderProfileForResolutionPresetLegacy(cameraId, preset);
+ return new Size(profile.videoFrameWidth, profile.videoFrameHeight);
+ }
+ }
+
+ /**
+ * Gets the best possible {@link android.media.CamcorderProfile} for the supplied {@link
+ * ResolutionPreset}. Supports SDK < 31.
+ *
+ * @param cameraId Camera identifier which indicates the device's camera for which to select a
+ * {@link android.media.CamcorderProfile}.
+ * @param preset The {@link ResolutionPreset} for which is to be translated to a {@link
+ * android.media.CamcorderProfile}.
+ * @return The best possible {@link android.media.CamcorderProfile} that matches the supplied
+ * {@link ResolutionPreset}.
+ */
+ public static CamcorderProfile getBestAvailableCamcorderProfileForResolutionPresetLegacy(
+ int cameraId, ResolutionPreset preset) {
+ if (cameraId < 0) {
+ throw new AssertionError(
+ "getBestAvailableCamcorderProfileForResolutionPreset can only be used with valid (>=0) camera identifiers.");
+ }
+
+ switch (preset) {
+ // All of these cases deliberately fall through to get the best available profile.
+ case max:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_HIGH)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH);
+ }
+ case ultraHigh:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_2160P)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_2160P);
+ }
+ case veryHigh:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_1080P)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_1080P);
+ }
+ case high:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_720P)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_720P);
+ }
+ case medium:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_480P)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_480P);
+ }
+ case low:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_QVGA)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_QVGA);
+ }
+ default:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_LOW)) {
+ return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_LOW);
+ } else {
+ throw new IllegalArgumentException(
+ "No capture session available for current capture session.");
+ }
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.S)
+ public static EncoderProfiles getBestAvailableCamcorderProfileForResolutionPreset(
+ int cameraId, ResolutionPreset preset) {
+ if (cameraId < 0) {
+ throw new AssertionError(
+ "getBestAvailableCamcorderProfileForResolutionPreset can only be used with valid (>=0) camera identifiers.");
+ }
+
+ String cameraIdString = Integer.toString(cameraId);
+
+ switch (preset) {
+ // All of these cases deliberately fall through to get the best available profile.
+ case max:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_HIGH)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_HIGH);
+ }
+ case ultraHigh:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_2160P)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_2160P);
+ }
+ case veryHigh:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_1080P)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_1080P);
+ }
+ case high:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_720P)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_720P);
+ }
+ case medium:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_480P)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_480P);
+ }
+ case low:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_QVGA)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_QVGA);
+ }
+ default:
+ if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_LOW)) {
+ return CamcorderProfile.getAll(cameraIdString, CamcorderProfile.QUALITY_LOW);
+ }
+
+ throw new IllegalArgumentException(
+ "No capture session available for current capture session.");
+ }
+ }
+
+ private void configureResolution(ResolutionPreset resolutionPreset, int cameraId)
+ throws IndexOutOfBoundsException {
+ if (!checkIsSupported()) {
+ return;
+ }
+
+ if (Build.VERSION.SDK_INT >= 31) {
+ recordingProfile =
+ getBestAvailableCamcorderProfileForResolutionPreset(cameraId, resolutionPreset);
+ List videoProfiles = recordingProfile.getVideoProfiles();
+
+ EncoderProfiles.VideoProfile defaultVideoProfile = videoProfiles.get(0);
+ captureSize = new Size(defaultVideoProfile.getWidth(), defaultVideoProfile.getHeight());
+ } else {
+ @SuppressWarnings("deprecation")
+ CamcorderProfile camcorderProfile =
+ getBestAvailableCamcorderProfileForResolutionPresetLegacy(cameraId, resolutionPreset);
+ recordingProfileLegacy = camcorderProfile;
+ captureSize =
+ new Size(recordingProfileLegacy.videoFrameWidth, recordingProfileLegacy.videoFrameHeight);
+ }
+
+ previewSize = computeBestPreviewSize(cameraId, resolutionPreset);
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionPreset.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionPreset.java
new file mode 100644
index 000000000000..359300305d40
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/resolution/ResolutionPreset.java
@@ -0,0 +1,15 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.resolution;
+
+// Mirrors camera.dart
+public enum ResolutionPreset {
+ low,
+ medium,
+ high,
+ veryHigh,
+ ultraHigh,
+ max,
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManager.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManager.java
new file mode 100644
index 000000000000..dd1e489e6225
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManager.java
@@ -0,0 +1,329 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.sensororientation;
+
+import android.app.Activity;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.res.Configuration;
+import android.view.Display;
+import android.view.Surface;
+import android.view.WindowManager;
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugins.camera.DartMessenger;
+
+/**
+ * Support class to help to determine the media orientation based on the orientation of the device.
+ */
+public class DeviceOrientationManager {
+
+ private static final IntentFilter orientationIntentFilter =
+ new IntentFilter(Intent.ACTION_CONFIGURATION_CHANGED);
+
+ private final Activity activity;
+ private final DartMessenger messenger;
+ private final boolean isFrontFacing;
+ private final int sensorOrientation;
+ private PlatformChannel.DeviceOrientation lastOrientation;
+ private BroadcastReceiver broadcastReceiver;
+
+ /** Factory method to create a device orientation manager. */
+ public static DeviceOrientationManager create(
+ @NonNull Activity activity,
+ @NonNull DartMessenger messenger,
+ boolean isFrontFacing,
+ int sensorOrientation) {
+ return new DeviceOrientationManager(activity, messenger, isFrontFacing, sensorOrientation);
+ }
+
+ private DeviceOrientationManager(
+ @NonNull Activity activity,
+ @NonNull DartMessenger messenger,
+ boolean isFrontFacing,
+ int sensorOrientation) {
+ this.activity = activity;
+ this.messenger = messenger;
+ this.isFrontFacing = isFrontFacing;
+ this.sensorOrientation = sensorOrientation;
+ }
+
+ /**
+ * Starts listening to the device's sensors or UI for orientation updates.
+ *
+ * When orientation information is updated the new orientation is send to the client using the
+ * {@link DartMessenger}. This latest value can also be retrieved through the {@link
+ * #getVideoOrientation()} accessor.
+ *
+ *
If the device's ACCELEROMETER_ROTATION setting is enabled the {@link
+ * DeviceOrientationManager} will report orientation updates based on the sensor information. If
+ * the ACCELEROMETER_ROTATION is disabled the {@link DeviceOrientationManager} will fallback to
+ * the deliver orientation updates based on the UI orientation.
+ */
+ public void start() {
+ if (broadcastReceiver != null) {
+ return;
+ }
+ broadcastReceiver =
+ new BroadcastReceiver() {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ handleUIOrientationChange();
+ }
+ };
+ activity.registerReceiver(broadcastReceiver, orientationIntentFilter);
+ broadcastReceiver.onReceive(activity, null);
+ }
+
+ /** Stops listening for orientation updates. */
+ public void stop() {
+ if (broadcastReceiver == null) {
+ return;
+ }
+ activity.unregisterReceiver(broadcastReceiver);
+ broadcastReceiver = null;
+ }
+
+ /**
+ * Returns the device's photo orientation in degrees based on the sensor orientation and the last
+ * known UI orientation.
+ *
+ *
Returns one of 0, 90, 180 or 270.
+ *
+ * @return The device's photo orientation in degrees.
+ */
+ public int getPhotoOrientation() {
+ return this.getPhotoOrientation(this.lastOrientation);
+ }
+
+ /**
+ * Returns the device's photo orientation in degrees based on the sensor orientation and the
+ * supplied {@link PlatformChannel.DeviceOrientation} value.
+ *
+ *
Returns one of 0, 90, 180 or 270.
+ *
+ * @param orientation The {@link PlatformChannel.DeviceOrientation} value that is to be converted
+ * into degrees.
+ * @return The device's photo orientation in degrees.
+ */
+ public int getPhotoOrientation(PlatformChannel.DeviceOrientation orientation) {
+ int angle = 0;
+ // Fallback to device orientation when the orientation value is null.
+ if (orientation == null) {
+ orientation = getUIOrientation();
+ }
+
+ switch (orientation) {
+ case PORTRAIT_UP:
+ angle = 90;
+ break;
+ case PORTRAIT_DOWN:
+ angle = 270;
+ break;
+ case LANDSCAPE_LEFT:
+ angle = isFrontFacing ? 180 : 0;
+ break;
+ case LANDSCAPE_RIGHT:
+ angle = isFrontFacing ? 0 : 180;
+ break;
+ }
+
+ // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X).
+ // This has to be taken into account so the JPEG is rotated properly.
+ // For devices with orientation of 90, this simply returns the mapping from ORIENTATIONS.
+ // For devices with orientation of 270, the JPEG is rotated 180 degrees instead.
+ return (angle + sensorOrientation + 270) % 360;
+ }
+
+ /**
+ * Returns the device's video orientation in degrees based on the sensor orientation and the last
+ * known UI orientation.
+ *
+ *
Returns one of 0, 90, 180 or 270.
+ *
+ * @return The device's video orientation in degrees.
+ */
+ public int getVideoOrientation() {
+ return this.getVideoOrientation(this.lastOrientation);
+ }
+
+ /**
+ * Returns the device's video orientation in degrees based on the sensor orientation and the
+ * supplied {@link PlatformChannel.DeviceOrientation} value.
+ *
+ *
Returns one of 0, 90, 180 or 270.
+ *
+ * @param orientation The {@link PlatformChannel.DeviceOrientation} value that is to be converted
+ * into degrees.
+ * @return The device's video orientation in degrees.
+ */
+ public int getVideoOrientation(PlatformChannel.DeviceOrientation orientation) {
+ int angle = 0;
+
+ // Fallback to device orientation when the orientation value is null.
+ if (orientation == null) {
+ orientation = getUIOrientation();
+ }
+
+ switch (orientation) {
+ case PORTRAIT_UP:
+ angle = 0;
+ break;
+ case PORTRAIT_DOWN:
+ angle = 180;
+ break;
+ case LANDSCAPE_LEFT:
+ angle = 90;
+ break;
+ case LANDSCAPE_RIGHT:
+ angle = 270;
+ break;
+ }
+
+ if (isFrontFacing) {
+ angle *= -1;
+ }
+
+ return (angle + sensorOrientation + 360) % 360;
+ }
+
+ /** @return the last received UI orientation. */
+ public PlatformChannel.DeviceOrientation getLastUIOrientation() {
+ return this.lastOrientation;
+ }
+
+ /**
+ * Handles orientation changes based on change events triggered by the OrientationIntentFilter.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this
+ * class.
+ */
+ @VisibleForTesting
+ void handleUIOrientationChange() {
+ PlatformChannel.DeviceOrientation orientation = getUIOrientation();
+ handleOrientationChange(orientation, lastOrientation, messenger);
+ lastOrientation = orientation;
+ }
+
+ /**
+ * Handles orientation changes coming from either the device's sensors or the
+ * OrientationIntentFilter.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this
+ * class.
+ */
+ @VisibleForTesting
+ static void handleOrientationChange(
+ DeviceOrientation newOrientation,
+ DeviceOrientation previousOrientation,
+ DartMessenger messenger) {
+ if (!newOrientation.equals(previousOrientation)) {
+ messenger.sendDeviceOrientationChangeEvent(newOrientation);
+ }
+ }
+
+ /**
+ * Gets the current user interface orientation.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return The current user interface orientation.
+ */
+ @VisibleForTesting
+ PlatformChannel.DeviceOrientation getUIOrientation() {
+ final int rotation = getDisplay().getRotation();
+ final int orientation = activity.getResources().getConfiguration().orientation;
+
+ switch (orientation) {
+ case Configuration.ORIENTATION_PORTRAIT:
+ if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) {
+ return PlatformChannel.DeviceOrientation.PORTRAIT_UP;
+ } else {
+ return PlatformChannel.DeviceOrientation.PORTRAIT_DOWN;
+ }
+ case Configuration.ORIENTATION_LANDSCAPE:
+ if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) {
+ return PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT;
+ } else {
+ return PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT;
+ }
+ default:
+ return PlatformChannel.DeviceOrientation.PORTRAIT_UP;
+ }
+ }
+
+ /**
+ * Calculates the sensor orientation based on the supplied angle.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @param angle Orientation angle.
+ * @return The sensor orientation based on the supplied angle.
+ */
+ @VisibleForTesting
+ PlatformChannel.DeviceOrientation calculateSensorOrientation(int angle) {
+ final int tolerance = 45;
+ angle += tolerance;
+
+ // Orientation is 0 in the default orientation mode. This is portrait-mode for phones
+ // and landscape for tablets. We have to compensate for this by calculating the default
+ // orientation, and apply an offset accordingly.
+ int defaultDeviceOrientation = getDeviceDefaultOrientation();
+ if (defaultDeviceOrientation == Configuration.ORIENTATION_LANDSCAPE) {
+ angle += 90;
+ }
+ // Determine the orientation
+ angle = angle % 360;
+ return new PlatformChannel.DeviceOrientation[] {
+ PlatformChannel.DeviceOrientation.PORTRAIT_UP,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT,
+ PlatformChannel.DeviceOrientation.PORTRAIT_DOWN,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT,
+ }
+ [angle / 90];
+ }
+
+ /**
+ * Gets the default orientation of the device.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return The default orientation of the device.
+ */
+ @VisibleForTesting
+ int getDeviceDefaultOrientation() {
+ Configuration config = activity.getResources().getConfiguration();
+ int rotation = getDisplay().getRotation();
+ if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180)
+ && config.orientation == Configuration.ORIENTATION_LANDSCAPE)
+ || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270)
+ && config.orientation == Configuration.ORIENTATION_PORTRAIT)) {
+ return Configuration.ORIENTATION_LANDSCAPE;
+ } else {
+ return Configuration.ORIENTATION_PORTRAIT;
+ }
+ }
+
+ /**
+ * Gets an instance of the Android {@link android.view.Display}.
+ *
+ *
This method is visible for testing purposes only and should never be used outside this
+ * class.
+ *
+ * @return An instance of the Android {@link android.view.Display}.
+ */
+ @SuppressWarnings("deprecation")
+ @VisibleForTesting
+ Display getDisplay() {
+ return ((WindowManager) activity.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeature.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeature.java
new file mode 100644
index 000000000000..9e316f741805
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeature.java
@@ -0,0 +1,105 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.sensororientation;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import androidx.annotation.NonNull;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import io.flutter.plugins.camera.features.CameraFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+
+/** Provides access to the sensor orientation of the camera devices. */
+public class SensorOrientationFeature extends CameraFeature {
+ private Integer currentSetting = 0;
+ private final DeviceOrientationManager deviceOrientationListener;
+ private PlatformChannel.DeviceOrientation lockedCaptureOrientation;
+
+ /**
+ * Creates a new instance of the {@link ResolutionFeature}.
+ *
+ * @param cameraProperties Collection of characteristics for the current camera device.
+ * @param activity Current Android {@link android.app.Activity}, used to detect UI orientation
+ * changes.
+ * @param dartMessenger Instance of a {@link DartMessenger} used to communicate orientation
+ * updates back to the client.
+ */
+ public SensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger) {
+ super(cameraProperties);
+ setValue(cameraProperties.getSensorOrientation());
+
+ boolean isFrontFacing = cameraProperties.getLensFacing() == CameraMetadata.LENS_FACING_FRONT;
+ deviceOrientationListener =
+ DeviceOrientationManager.create(activity, dartMessenger, isFrontFacing, currentSetting);
+ deviceOrientationListener.start();
+ }
+
+ @Override
+ public String getDebugName() {
+ return "SensorOrientationFeature";
+ }
+
+ @Override
+ public Integer getValue() {
+ return currentSetting;
+ }
+
+ @Override
+ public void setValue(Integer value) {
+ this.currentSetting = value;
+ }
+
+ @Override
+ public boolean checkIsSupported() {
+ return true;
+ }
+
+ @Override
+ public void updateBuilder(CaptureRequest.Builder requestBuilder) {
+ // Noop: when setting the sensor orientation there is no need to update the request builder.
+ }
+
+ /**
+ * Gets the instance of the {@link DeviceOrientationManager} used to detect orientation changes.
+ *
+ * @return The instance of the {@link DeviceOrientationManager}.
+ */
+ public DeviceOrientationManager getDeviceOrientationManager() {
+ return this.deviceOrientationListener;
+ }
+
+ /**
+ * Lock the capture orientation, indicating that the device orientation should not influence the
+ * capture orientation.
+ *
+ * @param orientation The orientation in which to lock the capture orientation.
+ */
+ public void lockCaptureOrientation(PlatformChannel.DeviceOrientation orientation) {
+ this.lockedCaptureOrientation = orientation;
+ }
+
+ /**
+ * Unlock the capture orientation, indicating that the device orientation should be used to
+ * configure the capture orientation.
+ */
+ public void unlockCaptureOrientation() {
+ this.lockedCaptureOrientation = null;
+ }
+
+ /**
+ * Gets the configured locked capture orientation.
+ *
+ * @return The configured locked capture orientation.
+ */
+ public PlatformChannel.DeviceOrientation getLockedCaptureOrientation() {
+ return this.lockedCaptureOrientation;
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/CameraCaptureProperties.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/CameraCaptureProperties.java
new file mode 100644
index 000000000000..68177f4ecfd6
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/CameraCaptureProperties.java
@@ -0,0 +1,67 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+public class CameraCaptureProperties {
+
+ private Float lastLensAperture;
+ private Long lastSensorExposureTime;
+ private Integer lastSensorSensitivity;
+
+ /**
+ * Gets the last known lens aperture. (As f-stop value)
+ *
+ * @return the last known lens aperture. (As f-stop value)
+ */
+ public Float getLastLensAperture() {
+ return lastLensAperture;
+ }
+
+ /**
+ * Sets the last known lens aperture. (As f-stop value)
+ *
+ * @param lastLensAperture - The last known lens aperture to set. (As f-stop value)
+ */
+ public void setLastLensAperture(Float lastLensAperture) {
+ this.lastLensAperture = lastLensAperture;
+ }
+
+ /**
+ * Gets the last known sensor exposure time in nanoseconds.
+ *
+ * @return the last known sensor exposure time in nanoseconds.
+ */
+ public Long getLastSensorExposureTime() {
+ return lastSensorExposureTime;
+ }
+
+ /**
+ * Sets the last known sensor exposure time in nanoseconds.
+ *
+ * @param lastSensorExposureTime - The last known sensor exposure time to set, in nanoseconds.
+ */
+ public void setLastSensorExposureTime(Long lastSensorExposureTime) {
+ this.lastSensorExposureTime = lastSensorExposureTime;
+ }
+
+ /**
+ * Gets the last known sensor sensitivity in ISO arithmetic units.
+ *
+ * @return the last known sensor sensitivity in ISO arithmetic units.
+ */
+ public Integer getLastSensorSensitivity() {
+ return lastSensorSensitivity;
+ }
+
+ /**
+ * Sets the last known sensor sensitivity in ISO arithmetic units.
+ *
+ * @param lastSensorSensitivity - The last known sensor sensitivity to set, in ISO arithmetic
+ * units.
+ */
+ public void setLastSensorSensitivity(Integer lastSensorSensitivity) {
+ this.lastSensorSensitivity = lastSensorSensitivity;
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/CaptureTimeoutsWrapper.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/CaptureTimeoutsWrapper.java
new file mode 100644
index 000000000000..ad59bd09c754
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/CaptureTimeoutsWrapper.java
@@ -0,0 +1,52 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+/**
+ * Wrapper class that provides a container for all {@link Timeout} instances that are required for
+ * the capture flow.
+ */
+public class CaptureTimeoutsWrapper {
+ private Timeout preCaptureFocusing;
+ private Timeout preCaptureMetering;
+ private final long preCaptureFocusingTimeoutMs;
+ private final long preCaptureMeteringTimeoutMs;
+
+ /**
+ * Create a new wrapper instance with the specified timeout values.
+ *
+ * @param preCaptureFocusingTimeoutMs focusing timeout milliseconds.
+ * @param preCaptureMeteringTimeoutMs metering timeout milliseconds.
+ */
+ public CaptureTimeoutsWrapper(
+ long preCaptureFocusingTimeoutMs, long preCaptureMeteringTimeoutMs) {
+ this.preCaptureFocusingTimeoutMs = preCaptureFocusingTimeoutMs;
+ this.preCaptureMeteringTimeoutMs = preCaptureMeteringTimeoutMs;
+ }
+
+ /** Reset all timeouts to the current timestamp. */
+ public void reset() {
+ this.preCaptureFocusing = Timeout.create(preCaptureFocusingTimeoutMs);
+ this.preCaptureMetering = Timeout.create(preCaptureMeteringTimeoutMs);
+ }
+
+ /**
+ * Returns the timeout instance related to precapture focusing.
+ *
+ * @return - The timeout object
+ */
+ public Timeout getPreCaptureFocusing() {
+ return preCaptureFocusing;
+ }
+
+ /**
+ * Returns the timeout instance related to precapture metering.
+ *
+ * @return - The timeout object
+ */
+ public Timeout getPreCaptureMetering() {
+ return preCaptureMetering;
+ }
+}
diff --git a/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/Timeout.java b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/Timeout.java
new file mode 100644
index 000000000000..67e05499d47a
--- /dev/null
+++ b/packages/camera/camera/android/src/main/java/io/flutter/plugins/camera/types/Timeout.java
@@ -0,0 +1,51 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.types;
+
+import android.os.SystemClock;
+
+/**
+ * This is a simple class for managing a timeout. In the camera we generally keep two timeouts: one
+ * for focusing and one for pre-capture metering.
+ *
+ * We use timeouts to ensure a picture is always captured within a reasonable amount of time even
+ * if the settings don't converge and focus can't be locked.
+ *
+ *
You generally check the status of the timeout in the CameraCaptureCallback during the capture
+ * sequence and use it to move to the next state if the timeout has passed.
+ */
+public class Timeout {
+
+ /** The timeout time in milliseconds */
+ private final long timeoutMs;
+
+ /** When this timeout was started. Will be used later to check if the timeout has expired yet. */
+ private final long timeStarted;
+
+ /**
+ * Factory method to create a new Timeout.
+ *
+ * @param timeoutMs timeout to use.
+ * @return returns a new Timeout.
+ */
+ public static Timeout create(long timeoutMs) {
+ return new Timeout(timeoutMs);
+ }
+
+ /**
+ * Create a new timeout.
+ *
+ * @param timeoutMs the time in milliseconds for this timeout to lapse.
+ */
+ private Timeout(long timeoutMs) {
+ this.timeoutMs = timeoutMs;
+ this.timeStarted = SystemClock.elapsedRealtime();
+ }
+
+ /** Will return true when the timeout period has lapsed. */
+ public boolean getIsExpired() {
+ return (SystemClock.elapsedRealtime() - timeStarted) > timeoutMs;
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackStatesTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackStatesTest.java
new file mode 100644
index 000000000000..934aff857ec7
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackStatesTest.java
@@ -0,0 +1,381 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.CaptureResult.Key;
+import android.hardware.camera2.TotalCaptureResult;
+import io.flutter.plugins.camera.CameraCaptureCallback.CameraCaptureStateListener;
+import io.flutter.plugins.camera.types.CameraCaptureProperties;
+import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper;
+import io.flutter.plugins.camera.types.Timeout;
+import io.flutter.plugins.camera.utils.TestUtils;
+import java.util.HashMap;
+import java.util.Map;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+import org.mockito.MockedStatic;
+
+public class CameraCaptureCallbackStatesTest extends TestCase {
+ private final Integer aeState;
+ private final Integer afState;
+ private final CameraState cameraState;
+ private final boolean isTimedOut;
+
+ private Runnable validate;
+
+ private CameraCaptureCallback cameraCaptureCallback;
+ private CameraCaptureStateListener mockCaptureStateListener;
+ private CameraCaptureSession mockCameraCaptureSession;
+ private CaptureRequest mockCaptureRequest;
+ private CaptureResult mockPartialCaptureResult;
+ private CaptureTimeoutsWrapper mockCaptureTimeouts;
+ private CameraCaptureProperties mockCaptureProps;
+ private TotalCaptureResult mockTotalCaptureResult;
+ private MockedStatic mockedStaticTimeout;
+ private Timeout mockTimeout;
+
+ public static TestSuite suite() {
+ TestSuite suite = new TestSuite();
+
+ setUpPreviewStateTest(suite);
+ setUpWaitingFocusTests(suite);
+ setUpWaitingPreCaptureStartTests(suite);
+ setUpWaitingPreCaptureDoneTests(suite);
+
+ return suite;
+ }
+
+ protected CameraCaptureCallbackStatesTest(
+ String name, CameraState cameraState, Integer afState, Integer aeState) {
+ this(name, cameraState, afState, aeState, false);
+ }
+
+ protected CameraCaptureCallbackStatesTest(
+ String name, CameraState cameraState, Integer afState, Integer aeState, boolean isTimedOut) {
+ super(name);
+
+ this.aeState = aeState;
+ this.afState = afState;
+ this.cameraState = cameraState;
+ this.isTimedOut = isTimedOut;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ mockedStaticTimeout = mockStatic(Timeout.class);
+ mockCaptureStateListener = mock(CameraCaptureStateListener.class);
+ mockCameraCaptureSession = mock(CameraCaptureSession.class);
+ mockCaptureRequest = mock(CaptureRequest.class);
+ mockPartialCaptureResult = mock(CaptureResult.class);
+ mockTotalCaptureResult = mock(TotalCaptureResult.class);
+ mockTimeout = mock(Timeout.class);
+ mockCaptureTimeouts = mock(CaptureTimeoutsWrapper.class);
+ mockCaptureProps = mock(CameraCaptureProperties.class);
+ when(mockCaptureTimeouts.getPreCaptureFocusing()).thenReturn(mockTimeout);
+ when(mockCaptureTimeouts.getPreCaptureMetering()).thenReturn(mockTimeout);
+
+ Key mockAeStateKey = mock(Key.class);
+ Key mockAfStateKey = mock(Key.class);
+
+ TestUtils.setFinalStatic(CaptureResult.class, "CONTROL_AE_STATE", mockAeStateKey);
+ TestUtils.setFinalStatic(CaptureResult.class, "CONTROL_AF_STATE", mockAfStateKey);
+
+ mockedStaticTimeout.when(() -> Timeout.create(1000)).thenReturn(mockTimeout);
+
+ cameraCaptureCallback =
+ CameraCaptureCallback.create(
+ mockCaptureStateListener, mockCaptureTimeouts, mockCaptureProps);
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+
+ mockedStaticTimeout.close();
+
+ TestUtils.setFinalStatic(CaptureResult.class, "CONTROL_AE_STATE", null);
+ TestUtils.setFinalStatic(CaptureResult.class, "CONTROL_AF_STATE", null);
+ }
+
+ @Override
+ protected void runTest() throws Throwable {
+ when(mockPartialCaptureResult.get(CaptureResult.CONTROL_AF_STATE)).thenReturn(afState);
+ when(mockPartialCaptureResult.get(CaptureResult.CONTROL_AE_STATE)).thenReturn(aeState);
+ when(mockTotalCaptureResult.get(CaptureResult.CONTROL_AF_STATE)).thenReturn(afState);
+ when(mockTotalCaptureResult.get(CaptureResult.CONTROL_AE_STATE)).thenReturn(aeState);
+
+ cameraCaptureCallback.setCameraState(cameraState);
+ if (isTimedOut) {
+ when(mockTimeout.getIsExpired()).thenReturn(true);
+ cameraCaptureCallback.onCaptureCompleted(
+ mockCameraCaptureSession, mockCaptureRequest, mockTotalCaptureResult);
+ } else {
+ cameraCaptureCallback.onCaptureProgressed(
+ mockCameraCaptureSession, mockCaptureRequest, mockPartialCaptureResult);
+ }
+
+ validate.run();
+ }
+
+ private static void setUpPreviewStateTest(TestSuite suite) {
+ CameraCaptureCallbackStatesTest previewStateTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_not_converge_or_pre_capture_when_state_is_preview",
+ CameraState.STATE_PREVIEW,
+ null,
+ null);
+ previewStateTest.validate =
+ () -> {
+ verify(previewStateTest.mockCaptureStateListener, never()).onConverged();
+ verify(previewStateTest.mockCaptureStateListener, never()).onConverged();
+ assertEquals(
+ CameraState.STATE_PREVIEW, previewStateTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(previewStateTest);
+ }
+
+ private static void setUpWaitingFocusTests(TestSuite suite) {
+ Integer[] actionableAfStates =
+ new Integer[] {
+ CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED,
+ CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
+ };
+
+ Integer[] nonActionableAfStates =
+ new Integer[] {
+ CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN,
+ CaptureResult.CONTROL_AF_STATE_INACTIVE,
+ CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED,
+ CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN,
+ CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED
+ };
+
+ Map aeStatesConvergeMap =
+ new HashMap() {
+ {
+ put(null, true);
+ put(CaptureResult.CONTROL_AE_STATE_CONVERGED, true);
+ put(CaptureResult.CONTROL_AE_STATE_PRECAPTURE, false);
+ put(CaptureResult.CONTROL_AE_STATE_LOCKED, false);
+ put(CaptureResult.CONTROL_AE_STATE_SEARCHING, false);
+ put(CaptureResult.CONTROL_AE_STATE_INACTIVE, false);
+ put(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED, false);
+ }
+ };
+
+ CameraCaptureCallbackStatesTest nullStateTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_not_converge_or_pre_capture_when_afstate_is_null",
+ CameraState.STATE_WAITING_FOCUS,
+ null,
+ null);
+ nullStateTest.validate =
+ () -> {
+ verify(nullStateTest.mockCaptureStateListener, never()).onConverged();
+ verify(nullStateTest.mockCaptureStateListener, never()).onConverged();
+ assertEquals(
+ CameraState.STATE_WAITING_FOCUS,
+ nullStateTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(nullStateTest);
+
+ for (Integer afState : actionableAfStates) {
+ aeStatesConvergeMap.forEach(
+ (aeState, shouldConverge) -> {
+ CameraCaptureCallbackStatesTest focusLockedTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_converge_when_af_state_is_"
+ + afState
+ + "_and_ae_state_is_"
+ + aeState,
+ CameraState.STATE_WAITING_FOCUS,
+ afState,
+ aeState);
+ focusLockedTest.validate =
+ () -> {
+ if (shouldConverge) {
+ verify(focusLockedTest.mockCaptureStateListener, times(1)).onConverged();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onPrecapture();
+ } else {
+ verify(focusLockedTest.mockCaptureStateListener, times(1)).onPrecapture();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onConverged();
+ }
+ assertEquals(
+ CameraState.STATE_WAITING_FOCUS,
+ focusLockedTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(focusLockedTest);
+ });
+ }
+
+ for (Integer afState : nonActionableAfStates) {
+ CameraCaptureCallbackStatesTest focusLockedTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_do_nothing_when_af_state_is_" + afState,
+ CameraState.STATE_WAITING_FOCUS,
+ afState,
+ null);
+ focusLockedTest.validate =
+ () -> {
+ verify(focusLockedTest.mockCaptureStateListener, never()).onConverged();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onPrecapture();
+ assertEquals(
+ CameraState.STATE_WAITING_FOCUS,
+ focusLockedTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(focusLockedTest);
+ }
+
+ for (Integer afState : nonActionableAfStates) {
+ aeStatesConvergeMap.forEach(
+ (aeState, shouldConverge) -> {
+ CameraCaptureCallbackStatesTest focusLockedTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_converge_when_af_state_is_"
+ + afState
+ + "_and_ae_state_is_"
+ + aeState,
+ CameraState.STATE_WAITING_FOCUS,
+ afState,
+ aeState,
+ true);
+ focusLockedTest.validate =
+ () -> {
+ if (shouldConverge) {
+ verify(focusLockedTest.mockCaptureStateListener, times(1)).onConverged();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onPrecapture();
+ } else {
+ verify(focusLockedTest.mockCaptureStateListener, times(1)).onPrecapture();
+ verify(focusLockedTest.mockCaptureStateListener, never()).onConverged();
+ }
+ assertEquals(
+ CameraState.STATE_WAITING_FOCUS,
+ focusLockedTest.cameraCaptureCallback.getCameraState());
+ };
+ suite.addTest(focusLockedTest);
+ });
+ }
+ }
+
+ private static void setUpWaitingPreCaptureStartTests(TestSuite suite) {
+ Map cameraStateMap =
+ new HashMap() {
+ {
+ put(null, CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ put(
+ CaptureResult.CONTROL_AE_STATE_INACTIVE,
+ CameraState.STATE_WAITING_PRECAPTURE_START);
+ put(
+ CaptureResult.CONTROL_AE_STATE_SEARCHING,
+ CameraState.STATE_WAITING_PRECAPTURE_START);
+ put(
+ CaptureResult.CONTROL_AE_STATE_CONVERGED,
+ CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ put(CaptureResult.CONTROL_AE_STATE_LOCKED, CameraState.STATE_WAITING_PRECAPTURE_START);
+ put(
+ CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED,
+ CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ put(
+ CaptureResult.CONTROL_AE_STATE_PRECAPTURE,
+ CameraState.STATE_WAITING_PRECAPTURE_DONE);
+ }
+ };
+
+ cameraStateMap.forEach(
+ (aeState, cameraState) -> {
+ CameraCaptureCallbackStatesTest testCase =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_update_camera_state_to_waiting_pre_capture_done_when_ae_state_is_"
+ + aeState,
+ CameraState.STATE_WAITING_PRECAPTURE_START,
+ null,
+ aeState);
+ testCase.validate =
+ () -> assertEquals(cameraState, testCase.cameraCaptureCallback.getCameraState());
+ suite.addTest(testCase);
+ });
+
+ cameraStateMap.forEach(
+ (aeState, cameraState) -> {
+ if (cameraState == CameraState.STATE_WAITING_PRECAPTURE_DONE) {
+ return;
+ }
+
+ CameraCaptureCallbackStatesTest testCase =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_update_camera_state_to_waiting_pre_capture_done_when_ae_state_is_"
+ + aeState,
+ CameraState.STATE_WAITING_PRECAPTURE_START,
+ null,
+ aeState,
+ true);
+ testCase.validate =
+ () ->
+ assertEquals(
+ CameraState.STATE_WAITING_PRECAPTURE_DONE,
+ testCase.cameraCaptureCallback.getCameraState());
+ suite.addTest(testCase);
+ });
+ }
+
+ private static void setUpWaitingPreCaptureDoneTests(TestSuite suite) {
+ Integer[] onConvergeStates =
+ new Integer[] {
+ null,
+ CaptureResult.CONTROL_AE_STATE_CONVERGED,
+ CaptureResult.CONTROL_AE_STATE_LOCKED,
+ CaptureResult.CONTROL_AE_STATE_SEARCHING,
+ CaptureResult.CONTROL_AE_STATE_INACTIVE,
+ CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED,
+ };
+
+ for (Integer aeState : onConvergeStates) {
+ CameraCaptureCallbackStatesTest shouldConvergeTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_converge_when_ae_state_is_" + aeState,
+ CameraState.STATE_WAITING_PRECAPTURE_DONE,
+ null,
+ null);
+ shouldConvergeTest.validate =
+ () -> verify(shouldConvergeTest.mockCaptureStateListener, times(1)).onConverged();
+ suite.addTest(shouldConvergeTest);
+ }
+
+ CameraCaptureCallbackStatesTest shouldNotConvergeTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_not_converge_when_ae_state_is_pre_capture",
+ CameraState.STATE_WAITING_PRECAPTURE_DONE,
+ null,
+ CaptureResult.CONTROL_AE_STATE_PRECAPTURE);
+ shouldNotConvergeTest.validate =
+ () -> verify(shouldNotConvergeTest.mockCaptureStateListener, never()).onConverged();
+ suite.addTest(shouldNotConvergeTest);
+
+ CameraCaptureCallbackStatesTest shouldConvergeWhenTimedOutTest =
+ new CameraCaptureCallbackStatesTest(
+ "process_should_not_converge_when_ae_state_is_pre_capture",
+ CameraState.STATE_WAITING_PRECAPTURE_DONE,
+ null,
+ CaptureResult.CONTROL_AE_STATE_PRECAPTURE,
+ true);
+ shouldConvergeWhenTimedOutTest.validate =
+ () ->
+ verify(shouldConvergeWhenTimedOutTest.mockCaptureStateListener, times(1)).onConverged();
+ suite.addTest(shouldConvergeWhenTimedOutTest);
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackTest.java
new file mode 100644
index 000000000000..75a5b25995e2
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraCaptureCallbackTest.java
@@ -0,0 +1,72 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.mockito.ArgumentMatchers.anyFloat;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import io.flutter.plugins.camera.types.CameraCaptureProperties;
+import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.RobolectricTestRunner;
+
+@RunWith(RobolectricTestRunner.class)
+public class CameraCaptureCallbackTest {
+
+ private CameraCaptureCallback cameraCaptureCallback;
+ private CameraCaptureProperties mockCaptureProps;
+
+ @Before
+ public void setUp() {
+ CameraCaptureCallback.CameraCaptureStateListener mockCaptureStateListener =
+ mock(CameraCaptureCallback.CameraCaptureStateListener.class);
+ CaptureTimeoutsWrapper mockCaptureTimeouts = mock(CaptureTimeoutsWrapper.class);
+ mockCaptureProps = mock(CameraCaptureProperties.class);
+ cameraCaptureCallback =
+ CameraCaptureCallback.create(
+ mockCaptureStateListener, mockCaptureTimeouts, mockCaptureProps);
+ }
+
+ @Test
+ public void onCaptureProgressed_doesNotUpdateCameraCaptureProperties() {
+ CameraCaptureSession mockSession = mock(CameraCaptureSession.class);
+ CaptureRequest mockRequest = mock(CaptureRequest.class);
+ CaptureResult mockResult = mock(CaptureResult.class);
+
+ cameraCaptureCallback.onCaptureProgressed(mockSession, mockRequest, mockResult);
+
+ verify(mockCaptureProps, never()).setLastLensAperture(anyFloat());
+ verify(mockCaptureProps, never()).setLastSensorExposureTime(anyLong());
+ verify(mockCaptureProps, never()).setLastSensorSensitivity(anyInt());
+ }
+
+ @Test
+ public void onCaptureCompleted_updatesCameraCaptureProperties() {
+ CameraCaptureSession mockSession = mock(CameraCaptureSession.class);
+ CaptureRequest mockRequest = mock(CaptureRequest.class);
+ TotalCaptureResult mockResult = mock(TotalCaptureResult.class);
+ when(mockResult.get(CaptureResult.LENS_APERTURE)).thenReturn(1.0f);
+ when(mockResult.get(CaptureResult.SENSOR_EXPOSURE_TIME)).thenReturn(2L);
+ when(mockResult.get(CaptureResult.SENSOR_SENSITIVITY)).thenReturn(3);
+
+ cameraCaptureCallback.onCaptureCompleted(mockSession, mockRequest, mockResult);
+
+ verify(mockCaptureProps, times(1)).setLastLensAperture(1.0f);
+ verify(mockCaptureProps, times(1)).setLastSensorExposureTime(2L);
+ verify(mockCaptureProps, times(1)).setLastSensorSensitivity(3);
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_convertPointToMeteringRectangleTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_convertPointToMeteringRectangleTest.java
new file mode 100644
index 000000000000..2c6d9d9177e9
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_convertPointToMeteringRectangleTest.java
@@ -0,0 +1,197 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.params.MeteringRectangle;
+import android.util.Size;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+public class CameraRegionUtils_convertPointToMeteringRectangleTest {
+ private MockedStatic mockedMeteringRectangleFactory;
+ private Size mockCameraBoundaries;
+
+ @Before
+ public void setUp() {
+ this.mockCameraBoundaries = mock(Size.class);
+ when(this.mockCameraBoundaries.getWidth()).thenReturn(100);
+ when(this.mockCameraBoundaries.getHeight()).thenReturn(100);
+ mockedMeteringRectangleFactory = mockStatic(CameraRegionUtils.MeteringRectangleFactory.class);
+
+ mockedMeteringRectangleFactory
+ .when(
+ () ->
+ CameraRegionUtils.MeteringRectangleFactory.create(
+ anyInt(), anyInt(), anyInt(), anyInt(), anyInt()))
+ .thenAnswer(
+ new Answer() {
+ @Override
+ public MeteringRectangle answer(InvocationOnMock createInvocation) throws Throwable {
+ MeteringRectangle mockMeteringRectangle = mock(MeteringRectangle.class);
+ when(mockMeteringRectangle.getX()).thenReturn(createInvocation.getArgument(0));
+ when(mockMeteringRectangle.getY()).thenReturn(createInvocation.getArgument(1));
+ when(mockMeteringRectangle.getWidth()).thenReturn(createInvocation.getArgument(2));
+ when(mockMeteringRectangle.getHeight()).thenReturn(createInvocation.getArgument(3));
+ when(mockMeteringRectangle.getMeteringWeight())
+ .thenReturn(createInvocation.getArgument(4));
+ when(mockMeteringRectangle.equals(any()))
+ .thenAnswer(
+ new Answer() {
+ @Override
+ public Boolean answer(InvocationOnMock equalsInvocation)
+ throws Throwable {
+ MeteringRectangle otherMockMeteringRectangle =
+ equalsInvocation.getArgument(0);
+ return mockMeteringRectangle.getX() == otherMockMeteringRectangle.getX()
+ && mockMeteringRectangle.getY() == otherMockMeteringRectangle.getY()
+ && mockMeteringRectangle.getWidth()
+ == otherMockMeteringRectangle.getWidth()
+ && mockMeteringRectangle.getHeight()
+ == otherMockMeteringRectangle.getHeight()
+ && mockMeteringRectangle.getMeteringWeight()
+ == otherMockMeteringRectangle.getMeteringWeight();
+ }
+ });
+ return mockMeteringRectangle;
+ }
+ });
+ }
+
+ @After
+ public void tearDown() {
+ mockedMeteringRectangleFactory.close();
+ }
+
+ @Test
+ public void convertPointToMeteringRectangle_shouldReturnValidMeteringRectangleForCenterCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0.5, 0.5, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(45, 45, 10, 10, 1).equals(r));
+ }
+
+ @Test
+ public void convertPointToMeteringRectangle_shouldReturnValidMeteringRectangleForTopLeftCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, 0, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(0, 0, 10, 10, 1).equals(r));
+ }
+
+ @Test
+ public void convertPointToMeteringRectangle_ShouldReturnValidMeteringRectangleForTopRightCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 0, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(89, 0, 10, 10, 1).equals(r));
+ }
+
+ @Test
+ public void
+ convertPointToMeteringRectangle_shouldReturnValidMeteringRectangleForBottomLeftCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, 1, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(0, 89, 10, 10, 1).equals(r));
+ }
+
+ @Test
+ public void
+ convertPointToMeteringRectangle_shouldReturnValidMeteringRectangleForBottomRightCoord() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(89, 89, 10, 10, 1).equals(r));
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowForXUpperBound() {
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1.5, 0, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowForXLowerBound() {
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, -0.5, 0, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowForYUpperBound() {
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, 1.5, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowForYLowerBound() {
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, -0.5, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test()
+ public void
+ convertPointToMeteringRectangle_shouldRotateMeteringRectangleAccordingToUiOrientationForPortraitUp() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(89, 0, 10, 10, 1).equals(r));
+ }
+
+ @Test()
+ public void
+ convertPointToMeteringRectangle_shouldRotateMeteringRectangleAccordingToUiOrientationForPortraitDown() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.PORTRAIT_DOWN);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(0, 89, 10, 10, 1).equals(r));
+ }
+
+ @Test()
+ public void
+ convertPointToMeteringRectangle_shouldRotateMeteringRectangleAccordingToUiOrientationForLandscapeLeft() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(89, 89, 10, 10, 1).equals(r));
+ }
+
+ @Test()
+ public void
+ convertPointToMeteringRectangle_shouldRotateMeteringRectangleAccordingToUiOrientationForLandscapeRight() {
+ MeteringRectangle r =
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 1, 1, PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT);
+ assertTrue(CameraRegionUtils.MeteringRectangleFactory.create(0, 0, 10, 10, 1).equals(r));
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowFor0WidthBoundary() {
+ Size mockCameraBoundaries = mock(Size.class);
+ when(mockCameraBoundaries.getWidth()).thenReturn(0);
+ when(mockCameraBoundaries.getHeight()).thenReturn(50);
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockCameraBoundaries, 0, -0.5, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test(expected = AssertionError.class)
+ public void convertPointToMeteringRectangle_shouldThrowFor0HeightBoundary() {
+ Size mockCameraBoundaries = mock(Size.class);
+ when(mockCameraBoundaries.getWidth()).thenReturn(50);
+ when(mockCameraBoundaries.getHeight()).thenReturn(0);
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ this.mockCameraBoundaries, 0, -0.5, PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_getCameraBoundariesTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_getCameraBoundariesTest.java
new file mode 100644
index 000000000000..4c0164981b74
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraRegionUtils_getCameraBoundariesTest.java
@@ -0,0 +1,247 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.graphics.Rect;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import android.util.Size;
+import io.flutter.plugins.camera.utils.TestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.stubbing.Answer;
+
+public class CameraRegionUtils_getCameraBoundariesTest {
+
+ Size mockCameraBoundaries;
+
+ @Before
+ public void setUp() {
+ this.mockCameraBoundaries = mock(Size.class);
+ when(this.mockCameraBoundaries.getWidth()).thenReturn(100);
+ when(this.mockCameraBoundaries.getHeight()).thenReturn(100);
+ }
+
+ @Test
+ public void getCameraBoundaries_shouldReturnSensorInfoPixelArraySizeWhenRunningPreAndroidP() {
+ updateSdkVersion(Build.VERSION_CODES.O_MR1);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ when(mockCameraProperties.getSensorInfoPixelArraySize()).thenReturn(mockCameraBoundaries);
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(mockCameraBoundaries, result);
+ verify(mockCameraProperties, never()).getSensorInfoPreCorrectionActiveArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnSensorInfoPixelArraySizeWhenDistortionCorrectionIsNull() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes()).thenReturn(null);
+ when(mockCameraProperties.getSensorInfoPixelArraySize()).thenReturn(mockCameraBoundaries);
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(mockCameraBoundaries, result);
+ verify(mockCameraProperties, never()).getSensorInfoPreCorrectionActiveArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnSensorInfoPixelArraySizeWhenDistortionCorrectionIsOff() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes())
+ .thenReturn(new int[] {CaptureRequest.DISTORTION_CORRECTION_MODE_OFF});
+ when(mockCameraProperties.getSensorInfoPixelArraySize()).thenReturn(mockCameraBoundaries);
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(mockCameraBoundaries, result);
+ verify(mockCameraProperties, never()).getSensorInfoPreCorrectionActiveArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnInfoPreCorrectionActiveArraySizeWhenDistortionCorrectionModeIsSetToNull() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ Rect mockSensorInfoPreCorrectionActiveArraySize = mock(Rect.class);
+ when(mockSensorInfoPreCorrectionActiveArraySize.width()).thenReturn(100);
+ when(mockSensorInfoPreCorrectionActiveArraySize.height()).thenReturn(100);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes())
+ .thenReturn(
+ new int[] {
+ CaptureRequest.DISTORTION_CORRECTION_MODE_OFF,
+ CaptureRequest.DISTORTION_CORRECTION_MODE_FAST
+ });
+ when(mockBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE)).thenReturn(null);
+ when(mockCameraProperties.getSensorInfoPreCorrectionActiveArraySize())
+ .thenReturn(mockSensorInfoPreCorrectionActiveArraySize);
+
+ try (MockedStatic mockedSizeFactory =
+ mockStatic(CameraRegionUtils.SizeFactory.class)) {
+ mockedSizeFactory
+ .when(() -> CameraRegionUtils.SizeFactory.create(anyInt(), anyInt()))
+ .thenAnswer(
+ (Answer)
+ invocation -> {
+ Size mockSize = mock(Size.class);
+ when(mockSize.getWidth()).thenReturn(invocation.getArgument(0));
+ when(mockSize.getHeight()).thenReturn(invocation.getArgument(1));
+ return mockSize;
+ });
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(100, result.getWidth());
+ assertEquals(100, result.getHeight());
+ verify(mockCameraProperties, never()).getSensorInfoPixelArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ }
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnInfoPreCorrectionActiveArraySizeWhenDistortionCorrectionModeIsSetToOff() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ Rect mockSensorInfoPreCorrectionActiveArraySize = mock(Rect.class);
+ when(mockSensorInfoPreCorrectionActiveArraySize.width()).thenReturn(100);
+ when(mockSensorInfoPreCorrectionActiveArraySize.height()).thenReturn(100);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes())
+ .thenReturn(
+ new int[] {
+ CaptureRequest.DISTORTION_CORRECTION_MODE_OFF,
+ CaptureRequest.DISTORTION_CORRECTION_MODE_FAST
+ });
+
+ when(mockBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE))
+ .thenReturn(CaptureRequest.DISTORTION_CORRECTION_MODE_OFF);
+ when(mockCameraProperties.getSensorInfoPreCorrectionActiveArraySize())
+ .thenReturn(mockSensorInfoPreCorrectionActiveArraySize);
+
+ try (MockedStatic mockedSizeFactory =
+ mockStatic(CameraRegionUtils.SizeFactory.class)) {
+ mockedSizeFactory
+ .when(() -> CameraRegionUtils.SizeFactory.create(anyInt(), anyInt()))
+ .thenAnswer(
+ (Answer)
+ invocation -> {
+ Size mockSize = mock(Size.class);
+ when(mockSize.getWidth()).thenReturn(invocation.getArgument(0));
+ when(mockSize.getHeight()).thenReturn(invocation.getArgument(1));
+ return mockSize;
+ });
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(100, result.getWidth());
+ assertEquals(100, result.getHeight());
+ verify(mockCameraProperties, never()).getSensorInfoPixelArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoActiveArraySize();
+ }
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ @Test
+ public void
+ getCameraBoundaries_shouldReturnSensorInfoActiveArraySizeWhenDistortionCorrectionModeIsSet() {
+ updateSdkVersion(Build.VERSION_CODES.P);
+
+ try {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ Rect mockSensorInfoActiveArraySize = mock(Rect.class);
+ when(mockSensorInfoActiveArraySize.width()).thenReturn(100);
+ when(mockSensorInfoActiveArraySize.height()).thenReturn(100);
+
+ when(mockCameraProperties.getDistortionCorrectionAvailableModes())
+ .thenReturn(
+ new int[] {
+ CaptureRequest.DISTORTION_CORRECTION_MODE_OFF,
+ CaptureRequest.DISTORTION_CORRECTION_MODE_FAST
+ });
+
+ when(mockBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE))
+ .thenReturn(CaptureRequest.DISTORTION_CORRECTION_MODE_FAST);
+ when(mockCameraProperties.getSensorInfoActiveArraySize())
+ .thenReturn(mockSensorInfoActiveArraySize);
+
+ try (MockedStatic mockedSizeFactory =
+ mockStatic(CameraRegionUtils.SizeFactory.class)) {
+ mockedSizeFactory
+ .when(() -> CameraRegionUtils.SizeFactory.create(anyInt(), anyInt()))
+ .thenAnswer(
+ (Answer)
+ invocation -> {
+ Size mockSize = mock(Size.class);
+ when(mockSize.getWidth()).thenReturn(invocation.getArgument(0));
+ when(mockSize.getHeight()).thenReturn(invocation.getArgument(1));
+ return mockSize;
+ });
+
+ Size result = CameraRegionUtils.getCameraBoundaries(mockCameraProperties, mockBuilder);
+
+ assertEquals(100, result.getWidth());
+ assertEquals(100, result.getHeight());
+ verify(mockCameraProperties, never()).getSensorInfoPixelArraySize();
+ verify(mockCameraProperties, never()).getSensorInfoPreCorrectionActiveArraySize();
+ }
+ } finally {
+ updateSdkVersion(0);
+ }
+ }
+
+ private static void updateSdkVersion(int version) {
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", version);
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
new file mode 100644
index 000000000000..1ed2e4c11d7b
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
@@ -0,0 +1,900 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.media.MediaRecorder;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import androidx.annotation.NonNull;
+import androidx.lifecycle.LifecycleObserver;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugin.common.MethodChannel;
+import io.flutter.plugins.camera.features.CameraFeatureFactory;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.autofocus.FocusMode;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureMode;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.flash.FlashMode;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.DeviceOrientationManager;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+import io.flutter.plugins.camera.utils.TestUtils;
+import io.flutter.view.TextureRegistry;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+
+public class CameraTest {
+ private CameraProperties mockCameraProperties;
+ private CameraFeatureFactory mockCameraFeatureFactory;
+ private DartMessenger mockDartMessenger;
+ private Camera camera;
+ private CameraCaptureSession mockCaptureSession;
+ private CaptureRequest.Builder mockPreviewRequestBuilder;
+ private MockedStatic mockHandlerThreadFactory;
+ private HandlerThread mockHandlerThread;
+ private MockedStatic mockHandlerFactory;
+ private Handler mockHandler;
+
+ @Before
+ public void before() {
+ mockCameraProperties = mock(CameraProperties.class);
+ mockCameraFeatureFactory = new TestCameraFeatureFactory();
+ mockDartMessenger = mock(DartMessenger.class);
+ mockCaptureSession = mock(CameraCaptureSession.class);
+ mockPreviewRequestBuilder = mock(CaptureRequest.Builder.class);
+ mockHandlerThreadFactory = mockStatic(Camera.HandlerThreadFactory.class);
+ mockHandlerThread = mock(HandlerThread.class);
+ mockHandlerFactory = mockStatic(Camera.HandlerFactory.class);
+ mockHandler = mock(Handler.class);
+
+ final Activity mockActivity = mock(Activity.class);
+ final TextureRegistry.SurfaceTextureEntry mockFlutterTexture =
+ mock(TextureRegistry.SurfaceTextureEntry.class);
+ final String cameraName = "1";
+ final ResolutionPreset resolutionPreset = ResolutionPreset.high;
+ final boolean enableAudio = false;
+
+ when(mockCameraProperties.getCameraName()).thenReturn(cameraName);
+ mockHandlerFactory.when(() -> Camera.HandlerFactory.create(any())).thenReturn(mockHandler);
+ mockHandlerThreadFactory
+ .when(() -> Camera.HandlerThreadFactory.create(any()))
+ .thenReturn(mockHandlerThread);
+
+ camera =
+ new Camera(
+ mockActivity,
+ mockFlutterTexture,
+ mockCameraFeatureFactory,
+ mockDartMessenger,
+ mockCameraProperties,
+ resolutionPreset,
+ enableAudio);
+
+ TestUtils.setPrivateField(camera, "captureSession", mockCaptureSession);
+ TestUtils.setPrivateField(camera, "previewRequestBuilder", mockPreviewRequestBuilder);
+ }
+
+ @After
+ public void after() {
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 0);
+ mockHandlerThreadFactory.close();
+ mockHandlerFactory.close();
+ }
+
+ @Test
+ public void shouldNotImplementLifecycleObserverInterface() {
+ Class cameraClass = Camera.class;
+
+ assertFalse(LifecycleObserver.class.isAssignableFrom(cameraClass));
+ }
+
+ @Test
+ public void shouldCreateCameraPluginAndSetAllFeatures() {
+ final Activity mockActivity = mock(Activity.class);
+ final TextureRegistry.SurfaceTextureEntry mockFlutterTexture =
+ mock(TextureRegistry.SurfaceTextureEntry.class);
+ final CameraFeatureFactory mockCameraFeatureFactory = mock(CameraFeatureFactory.class);
+ final String cameraName = "1";
+ final ResolutionPreset resolutionPreset = ResolutionPreset.high;
+ final boolean enableAudio = false;
+
+ when(mockCameraProperties.getCameraName()).thenReturn(cameraName);
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ when(mockCameraFeatureFactory.createSensorOrientationFeature(any(), any(), any()))
+ .thenReturn(mockSensorOrientationFeature);
+
+ Camera camera =
+ new Camera(
+ mockActivity,
+ mockFlutterTexture,
+ mockCameraFeatureFactory,
+ mockDartMessenger,
+ mockCameraProperties,
+ resolutionPreset,
+ enableAudio);
+
+ verify(mockCameraFeatureFactory, times(1))
+ .createSensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+ verify(mockCameraFeatureFactory, times(1)).createAutoFocusFeature(mockCameraProperties, false);
+ verify(mockCameraFeatureFactory, times(1)).createExposureLockFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1))
+ .createExposurePointFeature(eq(mockCameraProperties), eq(mockSensorOrientationFeature));
+ verify(mockCameraFeatureFactory, times(1)).createExposureOffsetFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1)).createFlashFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1))
+ .createFocusPointFeature(eq(mockCameraProperties), eq(mockSensorOrientationFeature));
+ verify(mockCameraFeatureFactory, times(1)).createFpsRangeFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1)).createNoiseReductionFeature(mockCameraProperties);
+ verify(mockCameraFeatureFactory, times(1))
+ .createResolutionFeature(mockCameraProperties, resolutionPreset, cameraName);
+ verify(mockCameraFeatureFactory, times(1)).createZoomLevelFeature(mockCameraProperties);
+ assertNotNull("should create a camera", camera);
+ }
+
+ @Test
+ public void getDeviceOrientationManager() {
+ SensorOrientationFeature mockSensorOrientationFeature =
+ mockCameraFeatureFactory.createSensorOrientationFeature(mockCameraProperties, null, null);
+ DeviceOrientationManager mockDeviceOrientationManager = mock(DeviceOrientationManager.class);
+
+ when(mockSensorOrientationFeature.getDeviceOrientationManager())
+ .thenReturn(mockDeviceOrientationManager);
+
+ DeviceOrientationManager actualDeviceOrientationManager = camera.getDeviceOrientationManager();
+
+ verify(mockSensorOrientationFeature, times(1)).getDeviceOrientationManager();
+ assertEquals(mockDeviceOrientationManager, actualDeviceOrientationManager);
+ }
+
+ @Test
+ public void getExposureOffsetStepSize() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ double stepSize = 2.3;
+
+ when(mockExposureOffsetFeature.getExposureOffsetStepSize()).thenReturn(stepSize);
+
+ double actualSize = camera.getExposureOffsetStepSize();
+
+ verify(mockExposureOffsetFeature, times(1)).getExposureOffsetStepSize();
+ assertEquals(stepSize, actualSize, 0);
+ }
+
+ @Test
+ public void getMaxExposureOffset() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ double expectedMaxOffset = 42.0;
+
+ when(mockExposureOffsetFeature.getMaxExposureOffset()).thenReturn(expectedMaxOffset);
+
+ double actualMaxOffset = camera.getMaxExposureOffset();
+
+ verify(mockExposureOffsetFeature, times(1)).getMaxExposureOffset();
+ assertEquals(expectedMaxOffset, actualMaxOffset, 0);
+ }
+
+ @Test
+ public void getMinExposureOffset() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ double expectedMinOffset = 21.5;
+
+ when(mockExposureOffsetFeature.getMinExposureOffset()).thenReturn(21.5);
+
+ double actualMinOffset = camera.getMinExposureOffset();
+
+ verify(mockExposureOffsetFeature, times(1)).getMinExposureOffset();
+ assertEquals(expectedMinOffset, actualMinOffset, 0);
+ }
+
+ @Test
+ public void getMaxZoomLevel() {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ float expectedMaxZoomLevel = 4.2f;
+
+ when(mockZoomLevelFeature.getMaximumZoomLevel()).thenReturn(expectedMaxZoomLevel);
+
+ float actualMaxZoomLevel = camera.getMaxZoomLevel();
+
+ verify(mockZoomLevelFeature, times(1)).getMaximumZoomLevel();
+ assertEquals(expectedMaxZoomLevel, actualMaxZoomLevel, 0);
+ }
+
+ @Test
+ public void getMinZoomLevel() {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ float expectedMinZoomLevel = 4.2f;
+
+ when(mockZoomLevelFeature.getMinimumZoomLevel()).thenReturn(expectedMinZoomLevel);
+
+ float actualMinZoomLevel = camera.getMinZoomLevel();
+
+ verify(mockZoomLevelFeature, times(1)).getMinimumZoomLevel();
+ assertEquals(expectedMinZoomLevel, actualMinZoomLevel, 0);
+ }
+
+ @Test
+ public void setExposureMode_shouldUpdateExposureLockFeature() {
+ ExposureLockFeature mockExposureLockFeature =
+ mockCameraFeatureFactory.createExposureLockFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ ExposureMode exposureMode = ExposureMode.locked;
+
+ camera.setExposureMode(mockResult, exposureMode);
+
+ verify(mockExposureLockFeature, times(1)).setValue(exposureMode);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setExposureMode_shouldUpdateBuilder() {
+ ExposureLockFeature mockExposureLockFeature =
+ mockCameraFeatureFactory.createExposureLockFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ ExposureMode exposureMode = ExposureMode.locked;
+
+ camera.setExposureMode(mockResult, exposureMode);
+
+ verify(mockExposureLockFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setExposureMode_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ ExposureMode exposureMode = ExposureMode.locked;
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setExposureMode(mockResult, exposureMode);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1))
+ .error("setExposureModeFailed", "Could not set exposure mode.", null);
+ }
+
+ @Test
+ public void setExposurePoint_shouldUpdateExposurePointFeature() {
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ ExposurePointFeature mockExposurePointFeature =
+ mockCameraFeatureFactory.createExposurePointFeature(
+ mockCameraProperties, mockSensorOrientationFeature);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+
+ camera.setExposurePoint(mockResult, point);
+
+ verify(mockExposurePointFeature, times(1)).setValue(point);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setExposurePoint_shouldUpdateBuilder() {
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ ExposurePointFeature mockExposurePointFeature =
+ mockCameraFeatureFactory.createExposurePointFeature(
+ mockCameraProperties, mockSensorOrientationFeature);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+
+ camera.setExposurePoint(mockResult, point);
+
+ verify(mockExposurePointFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setExposurePoint_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setExposurePoint(mockResult, point);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1))
+ .error("setExposurePointFailed", "Could not set exposure point.", null);
+ }
+
+ @Test
+ public void setFlashMode_shouldUpdateFlashFeature() {
+ FlashFeature mockFlashFeature =
+ mockCameraFeatureFactory.createFlashFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ FlashMode flashMode = FlashMode.always;
+
+ camera.setFlashMode(mockResult, flashMode);
+
+ verify(mockFlashFeature, times(1)).setValue(flashMode);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setFlashMode_shouldUpdateBuilder() {
+ FlashFeature mockFlashFeature =
+ mockCameraFeatureFactory.createFlashFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ FlashMode flashMode = FlashMode.always;
+
+ camera.setFlashMode(mockResult, flashMode);
+
+ verify(mockFlashFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setFlashMode_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ FlashMode flashMode = FlashMode.always;
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setFlashMode(mockResult, flashMode);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1)).error("setFlashModeFailed", "Could not set flash mode.", null);
+ }
+
+ @Test
+ public void setFocusPoint_shouldUpdateFocusPointFeature() {
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ FocusPointFeature mockFocusPointFeature =
+ mockCameraFeatureFactory.createFocusPointFeature(
+ mockCameraProperties, mockSensorOrientationFeature);
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+ when(mockAutoFocusFeature.getValue()).thenReturn(FocusMode.auto);
+
+ camera.setFocusPoint(mockResult, point);
+
+ verify(mockFocusPointFeature, times(1)).setValue(point);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setFocusPoint_shouldUpdateBuilder() {
+ SensorOrientationFeature mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ FocusPointFeature mockFocusPointFeature =
+ mockCameraFeatureFactory.createFocusPointFeature(
+ mockCameraProperties, mockSensorOrientationFeature);
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+ when(mockAutoFocusFeature.getValue()).thenReturn(FocusMode.auto);
+
+ camera.setFocusPoint(mockResult, point);
+
+ verify(mockFocusPointFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setFocusPoint_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ Point point = new Point(42d, 42d);
+ when(mockAutoFocusFeature.getValue()).thenReturn(FocusMode.auto);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setFocusPoint(mockResult, point);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1)).error("setFocusPointFailed", "Could not set focus point.", null);
+ }
+
+ @Test
+ public void setZoomLevel_shouldUpdateZoomLevelFeature() throws CameraAccessException {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ float zoomLevel = 1.0f;
+
+ when(mockZoomLevelFeature.getValue()).thenReturn(zoomLevel);
+ when(mockZoomLevelFeature.getMinimumZoomLevel()).thenReturn(0f);
+ when(mockZoomLevelFeature.getMaximumZoomLevel()).thenReturn(2f);
+
+ camera.setZoomLevel(mockResult, zoomLevel);
+
+ verify(mockZoomLevelFeature, times(1)).setValue(zoomLevel);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setZoomLevel_shouldUpdateBuilder() throws CameraAccessException {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ float zoomLevel = 1.0f;
+
+ when(mockZoomLevelFeature.getValue()).thenReturn(zoomLevel);
+ when(mockZoomLevelFeature.getMinimumZoomLevel()).thenReturn(0f);
+ when(mockZoomLevelFeature.getMaximumZoomLevel()).thenReturn(2f);
+
+ camera.setZoomLevel(mockResult, zoomLevel);
+
+ verify(mockZoomLevelFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setZoomLevel_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ ZoomLevelFeature mockZoomLevelFeature =
+ mockCameraFeatureFactory.createZoomLevelFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ float zoomLevel = 1.0f;
+
+ when(mockZoomLevelFeature.getValue()).thenReturn(zoomLevel);
+ when(mockZoomLevelFeature.getMinimumZoomLevel()).thenReturn(0f);
+ when(mockZoomLevelFeature.getMaximumZoomLevel()).thenReturn(2f);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setZoomLevel(mockResult, zoomLevel);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1)).error("setZoomLevelFailed", "Could not set zoom level.", null);
+ }
+
+ @Test
+ public void pauseVideoRecording_shouldSendNullResultWhenNotRecording() {
+ TestUtils.setPrivateField(camera, "recordingVideo", false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.pauseVideoRecording(mockResult);
+
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void pauseVideoRecording_shouldCallPauseWhenRecordingAndOnAPIN() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 24);
+
+ camera.pauseVideoRecording(mockResult);
+
+ verify(mockMediaRecorder, times(1)).pause();
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void pauseVideoRecording_shouldSendVideoRecordingFailedErrorWhenVersionCodeSmallerThenN() {
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 23);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.pauseVideoRecording(mockResult);
+
+ verify(mockResult, times(1))
+ .error("videoRecordingFailed", "pauseVideoRecording requires Android API +24.", null);
+ verify(mockResult, never()).success(any());
+ }
+
+ @Test
+ public void
+ pauseVideoRecording_shouldSendVideoRecordingFailedErrorWhenMediaRecorderPauseThrowsIllegalStateException() {
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 24);
+
+ IllegalStateException expectedException = new IllegalStateException("Test error message");
+
+ doThrow(expectedException).when(mockMediaRecorder).pause();
+
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.pauseVideoRecording(mockResult);
+
+ verify(mockResult, times(1)).error("videoRecordingFailed", "Test error message", null);
+ verify(mockResult, never()).success(any());
+ }
+
+ @Test
+ public void resumeVideoRecording_shouldSendNullResultWhenNotRecording() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ TestUtils.setPrivateField(camera, "recordingVideo", false);
+
+ camera.resumeVideoRecording(mockResult);
+
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void resumeVideoRecording_shouldCallPauseWhenRecordingAndOnAPIN() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 24);
+
+ camera.resumeVideoRecording(mockResult);
+
+ verify(mockMediaRecorder, times(1)).resume();
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void
+ resumeVideoRecording_shouldSendVideoRecordingFailedErrorWhenVersionCodeSmallerThanN() {
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 23);
+
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.resumeVideoRecording(mockResult);
+
+ verify(mockResult, times(1))
+ .error("videoRecordingFailed", "resumeVideoRecording requires Android API +24.", null);
+ verify(mockResult, never()).success(any());
+ }
+
+ @Test
+ public void
+ resumeVideoRecording_shouldSendVideoRecordingFailedErrorWhenMediaRecorderPauseThrowsIllegalStateException() {
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setFinalStatic(Build.VERSION.class, "SDK_INT", 24);
+
+ IllegalStateException expectedException = new IllegalStateException("Test error message");
+
+ doThrow(expectedException).when(mockMediaRecorder).resume();
+
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.resumeVideoRecording(mockResult);
+
+ verify(mockResult, times(1)).error("videoRecordingFailed", "Test error message", null);
+ verify(mockResult, never()).success(any());
+ }
+
+ @Test
+ public void setFocusMode_shouldUpdateAutoFocusFeature() {
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.setFocusMode(mockResult, FocusMode.auto);
+
+ verify(mockAutoFocusFeature, times(1)).setValue(FocusMode.auto);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void setFocusMode_shouldUpdateBuilder() {
+ AutoFocusFeature mockAutoFocusFeature =
+ mockCameraFeatureFactory.createAutoFocusFeature(mockCameraProperties, false);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.setFocusMode(mockResult, FocusMode.auto);
+
+ verify(mockAutoFocusFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setFocusMode_shouldUnlockAutoFocusForAutoMode() {
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.auto);
+ verify(mockPreviewRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
+ verify(mockPreviewRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+ }
+
+ @Test
+ public void setFocusMode_shouldSkipUnlockAutoFocusWhenNullCaptureSession() {
+ TestUtils.setPrivateField(camera, "captureSession", null);
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.auto);
+ verify(mockPreviewRequestBuilder, never())
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
+ verify(mockPreviewRequestBuilder, never())
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+ }
+
+ @Test
+ public void setFocusMode_shouldSendErrorEventOnUnlockAutoFocusCameraAccessException()
+ throws CameraAccessException {
+ when(mockCaptureSession.capture(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.auto);
+ verify(mockDartMessenger, times(1)).sendCameraErrorEvent(any());
+ }
+
+ @Test
+ public void setFocusMode_shouldLockAutoFocusForLockedMode() throws CameraAccessException {
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.locked);
+ verify(mockPreviewRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
+ verify(mockCaptureSession, times(1)).capture(any(), any(), any());
+ verify(mockCaptureSession, times(1)).setRepeatingRequest(any(), any(), any());
+ }
+
+ @Test
+ public void setFocusMode_shouldSkipLockAutoFocusWhenNullCaptureSession() {
+ TestUtils.setPrivateField(camera, "captureSession", null);
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.locked);
+ verify(mockPreviewRequestBuilder, never())
+ .set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
+ }
+
+ @Test
+ public void setFocusMode_shouldSendErrorEventOnLockAutoFocusCameraAccessException()
+ throws CameraAccessException {
+ when(mockCaptureSession.capture(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+ camera.setFocusMode(mock(MethodChannel.Result.class), FocusMode.locked);
+ verify(mockDartMessenger, times(1)).sendCameraErrorEvent(any());
+ }
+
+ @Test
+ public void setFocusMode_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setFocusMode(mockResult, FocusMode.locked);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1))
+ .error("setFocusModeFailed", "Error setting focus mode: null", null);
+ }
+
+ @Test
+ public void setExposureOffset_shouldUpdateExposureOffsetFeature() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ when(mockExposureOffsetFeature.getValue()).thenReturn(1.0);
+
+ camera.setExposureOffset(mockResult, 1.0);
+
+ verify(mockExposureOffsetFeature, times(1)).setValue(1.0);
+ verify(mockResult, never()).error(any(), any(), any());
+ verify(mockResult, times(1)).success(1.0);
+ }
+
+ @Test
+ public void setExposureOffset_shouldAndUpdateBuilder() {
+ ExposureOffsetFeature mockExposureOffsetFeature =
+ mockCameraFeatureFactory.createExposureOffsetFeature(mockCameraProperties);
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+
+ camera.setExposureOffset(mockResult, 1.0);
+
+ verify(mockExposureOffsetFeature, times(1)).updateBuilder(any());
+ }
+
+ @Test
+ public void setExposureOffset_shouldCallErrorOnResultOnCameraAccessException()
+ throws CameraAccessException {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0, ""));
+
+ camera.setExposureOffset(mockResult, 1.0);
+
+ verify(mockResult, never()).success(any());
+ verify(mockResult, times(1))
+ .error("setExposureOffsetFailed", "Could not set exposure offset.", null);
+ }
+
+ @Test
+ public void lockCaptureOrientation_shouldLockCaptureOrientation() {
+ final Activity mockActivity = mock(Activity.class);
+ SensorOrientationFeature mockSensorOrientationFeature =
+ mockCameraFeatureFactory.createSensorOrientationFeature(
+ mockCameraProperties, mockActivity, mockDartMessenger);
+
+ camera.lockCaptureOrientation(PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+
+ verify(mockSensorOrientationFeature, times(1))
+ .lockCaptureOrientation(PlatformChannel.DeviceOrientation.PORTRAIT_UP);
+ }
+
+ @Test
+ public void unlockCaptureOrientation_shouldUnlockCaptureOrientation() {
+ final Activity mockActivity = mock(Activity.class);
+ SensorOrientationFeature mockSensorOrientationFeature =
+ mockCameraFeatureFactory.createSensorOrientationFeature(
+ mockCameraProperties, mockActivity, mockDartMessenger);
+
+ camera.unlockCaptureOrientation();
+
+ verify(mockSensorOrientationFeature, times(1)).unlockCaptureOrientation();
+ }
+
+ @Test
+ public void pausePreview_shouldPausePreview() throws CameraAccessException {
+ camera.pausePreview();
+
+ assertEquals(TestUtils.getPrivateField(camera, "pausedPreview"), true);
+ verify(mockCaptureSession, times(1)).stopRepeating();
+ }
+
+ @Test
+ public void resumePreview_shouldResumePreview() throws CameraAccessException {
+ camera.resumePreview();
+
+ assertEquals(TestUtils.getPrivateField(camera, "pausedPreview"), false);
+ verify(mockCaptureSession, times(1)).setRepeatingRequest(any(), any(), any());
+ }
+
+ @Test
+ public void resumePreview_shouldSendErrorEventOnCameraAccessException()
+ throws CameraAccessException {
+ when(mockCaptureSession.setRepeatingRequest(any(), any(), any()))
+ .thenThrow(new CameraAccessException(0));
+
+ camera.resumePreview();
+
+ verify(mockDartMessenger, times(1)).sendCameraErrorEvent(any());
+ }
+
+ @Test
+ public void startBackgroundThread_shouldStartNewThread() {
+ camera.startBackgroundThread();
+
+ verify(mockHandlerThread, times(1)).start();
+ assertEquals(mockHandler, TestUtils.getPrivateField(camera, "backgroundHandler"));
+ }
+
+ @Test
+ public void startBackgroundThread_shouldNotStartNewThreadWhenAlreadyCreated() {
+ camera.startBackgroundThread();
+ camera.startBackgroundThread();
+
+ verify(mockHandlerThread, times(1)).start();
+ }
+
+ private static class TestCameraFeatureFactory implements CameraFeatureFactory {
+ private final AutoFocusFeature mockAutoFocusFeature;
+ private final ExposureLockFeature mockExposureLockFeature;
+ private final ExposureOffsetFeature mockExposureOffsetFeature;
+ private final ExposurePointFeature mockExposurePointFeature;
+ private final FlashFeature mockFlashFeature;
+ private final FocusPointFeature mockFocusPointFeature;
+ private final FpsRangeFeature mockFpsRangeFeature;
+ private final NoiseReductionFeature mockNoiseReductionFeature;
+ private final ResolutionFeature mockResolutionFeature;
+ private final SensorOrientationFeature mockSensorOrientationFeature;
+ private final ZoomLevelFeature mockZoomLevelFeature;
+
+ public TestCameraFeatureFactory() {
+ this.mockAutoFocusFeature = mock(AutoFocusFeature.class);
+ this.mockExposureLockFeature = mock(ExposureLockFeature.class);
+ this.mockExposureOffsetFeature = mock(ExposureOffsetFeature.class);
+ this.mockExposurePointFeature = mock(ExposurePointFeature.class);
+ this.mockFlashFeature = mock(FlashFeature.class);
+ this.mockFocusPointFeature = mock(FocusPointFeature.class);
+ this.mockFpsRangeFeature = mock(FpsRangeFeature.class);
+ this.mockNoiseReductionFeature = mock(NoiseReductionFeature.class);
+ this.mockResolutionFeature = mock(ResolutionFeature.class);
+ this.mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ this.mockZoomLevelFeature = mock(ZoomLevelFeature.class);
+ }
+
+ @Override
+ public AutoFocusFeature createAutoFocusFeature(
+ @NonNull CameraProperties cameraProperties, boolean recordingVideo) {
+ return mockAutoFocusFeature;
+ }
+
+ @Override
+ public ExposureLockFeature createExposureLockFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockExposureLockFeature;
+ }
+
+ @Override
+ public ExposureOffsetFeature createExposureOffsetFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockExposureOffsetFeature;
+ }
+
+ @Override
+ public FlashFeature createFlashFeature(@NonNull CameraProperties cameraProperties) {
+ return mockFlashFeature;
+ }
+
+ @Override
+ public ResolutionFeature createResolutionFeature(
+ @NonNull CameraProperties cameraProperties,
+ ResolutionPreset initialSetting,
+ String cameraName) {
+ return mockResolutionFeature;
+ }
+
+ @Override
+ public FocusPointFeature createFocusPointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrienttionFeature) {
+ return mockFocusPointFeature;
+ }
+
+ @Override
+ public FpsRangeFeature createFpsRangeFeature(@NonNull CameraProperties cameraProperties) {
+ return mockFpsRangeFeature;
+ }
+
+ @Override
+ public SensorOrientationFeature createSensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger) {
+ return mockSensorOrientationFeature;
+ }
+
+ @Override
+ public ZoomLevelFeature createZoomLevelFeature(@NonNull CameraProperties cameraProperties) {
+ return mockZoomLevelFeature;
+ }
+
+ @Override
+ public ExposurePointFeature createExposurePointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature) {
+ return mockExposurePointFeature;
+ }
+
+ @Override
+ public NoiseReductionFeature createNoiseReductionFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockNoiseReductionFeature;
+ }
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraTest_getRecordingProfileTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraTest_getRecordingProfileTest.java
new file mode 100644
index 000000000000..04bab14f26ac
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/CameraTest_getRecordingProfileTest.java
@@ -0,0 +1,205 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CaptureRequest;
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import android.os.Handler;
+import android.os.HandlerThread;
+import androidx.annotation.NonNull;
+import io.flutter.plugins.camera.features.CameraFeatureFactory;
+import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature;
+import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature;
+import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature;
+import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature;
+import io.flutter.plugins.camera.features.flash.FlashFeature;
+import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature;
+import io.flutter.plugins.camera.features.fpsrange.FpsRangeFeature;
+import io.flutter.plugins.camera.features.noisereduction.NoiseReductionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionFeature;
+import io.flutter.plugins.camera.features.resolution.ResolutionPreset;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature;
+import io.flutter.view.TextureRegistry;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.MockedStatic;
+import org.robolectric.RobolectricTestRunner;
+import org.robolectric.annotation.Config;
+
+@RunWith(RobolectricTestRunner.class)
+public class CameraTest_getRecordingProfileTest {
+
+ private CameraProperties mockCameraProperties;
+ private CameraFeatureFactory mockCameraFeatureFactory;
+ private DartMessenger mockDartMessenger;
+ private Camera camera;
+ private CameraCaptureSession mockCaptureSession;
+ private CaptureRequest.Builder mockPreviewRequestBuilder;
+ private MockedStatic mockHandlerThreadFactory;
+ private HandlerThread mockHandlerThread;
+ private MockedStatic mockHandlerFactory;
+ private Handler mockHandler;
+
+ @Before
+ public void before() {
+ mockCameraProperties = mock(CameraProperties.class);
+ mockCameraFeatureFactory = new TestCameraFeatureFactory();
+ mockDartMessenger = mock(DartMessenger.class);
+
+ final Activity mockActivity = mock(Activity.class);
+ final TextureRegistry.SurfaceTextureEntry mockFlutterTexture =
+ mock(TextureRegistry.SurfaceTextureEntry.class);
+ final ResolutionPreset resolutionPreset = ResolutionPreset.high;
+ final boolean enableAudio = false;
+
+ camera =
+ new Camera(
+ mockActivity,
+ mockFlutterTexture,
+ mockCameraFeatureFactory,
+ mockDartMessenger,
+ mockCameraProperties,
+ resolutionPreset,
+ enableAudio);
+ }
+
+ @Config(maxSdk = 30)
+ @Test
+ public void getRecordingProfileLegacy() {
+ ResolutionFeature mockResolutionFeature =
+ mockCameraFeatureFactory.createResolutionFeature(mockCameraProperties, null, null);
+ CamcorderProfile mockCamcorderProfile = mock(CamcorderProfile.class);
+
+ when(mockResolutionFeature.getRecordingProfileLegacy()).thenReturn(mockCamcorderProfile);
+
+ CamcorderProfile actualRecordingProfile = camera.getRecordingProfileLegacy();
+
+ verify(mockResolutionFeature, times(1)).getRecordingProfileLegacy();
+ assertEquals(mockCamcorderProfile, actualRecordingProfile);
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void getRecordingProfile() {
+ ResolutionFeature mockResolutionFeature =
+ mockCameraFeatureFactory.createResolutionFeature(mockCameraProperties, null, null);
+ EncoderProfiles mockRecordingProfile = mock(EncoderProfiles.class);
+
+ when(mockResolutionFeature.getRecordingProfile()).thenReturn(mockRecordingProfile);
+
+ EncoderProfiles actualRecordingProfile = camera.getRecordingProfile();
+
+ verify(mockResolutionFeature, times(1)).getRecordingProfile();
+ assertEquals(mockRecordingProfile, actualRecordingProfile);
+ }
+
+ private static class TestCameraFeatureFactory implements CameraFeatureFactory {
+ private final AutoFocusFeature mockAutoFocusFeature;
+ private final ExposureLockFeature mockExposureLockFeature;
+ private final ExposureOffsetFeature mockExposureOffsetFeature;
+ private final ExposurePointFeature mockExposurePointFeature;
+ private final FlashFeature mockFlashFeature;
+ private final FocusPointFeature mockFocusPointFeature;
+ private final FpsRangeFeature mockFpsRangeFeature;
+ private final NoiseReductionFeature mockNoiseReductionFeature;
+ private final ResolutionFeature mockResolutionFeature;
+ private final SensorOrientationFeature mockSensorOrientationFeature;
+ private final ZoomLevelFeature mockZoomLevelFeature;
+
+ public TestCameraFeatureFactory() {
+ this.mockAutoFocusFeature = mock(AutoFocusFeature.class);
+ this.mockExposureLockFeature = mock(ExposureLockFeature.class);
+ this.mockExposureOffsetFeature = mock(ExposureOffsetFeature.class);
+ this.mockExposurePointFeature = mock(ExposurePointFeature.class);
+ this.mockFlashFeature = mock(FlashFeature.class);
+ this.mockFocusPointFeature = mock(FocusPointFeature.class);
+ this.mockFpsRangeFeature = mock(FpsRangeFeature.class);
+ this.mockNoiseReductionFeature = mock(NoiseReductionFeature.class);
+ this.mockResolutionFeature = mock(ResolutionFeature.class);
+ this.mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ this.mockZoomLevelFeature = mock(ZoomLevelFeature.class);
+ }
+
+ @Override
+ public AutoFocusFeature createAutoFocusFeature(
+ @NonNull CameraProperties cameraProperties, boolean recordingVideo) {
+ return mockAutoFocusFeature;
+ }
+
+ @Override
+ public ExposureLockFeature createExposureLockFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockExposureLockFeature;
+ }
+
+ @Override
+ public ExposureOffsetFeature createExposureOffsetFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockExposureOffsetFeature;
+ }
+
+ @Override
+ public FlashFeature createFlashFeature(@NonNull CameraProperties cameraProperties) {
+ return mockFlashFeature;
+ }
+
+ @Override
+ public ResolutionFeature createResolutionFeature(
+ @NonNull CameraProperties cameraProperties,
+ ResolutionPreset initialSetting,
+ String cameraName) {
+ return mockResolutionFeature;
+ }
+
+ @Override
+ public FocusPointFeature createFocusPointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrienttionFeature) {
+ return mockFocusPointFeature;
+ }
+
+ @Override
+ public FpsRangeFeature createFpsRangeFeature(@NonNull CameraProperties cameraProperties) {
+ return mockFpsRangeFeature;
+ }
+
+ @Override
+ public SensorOrientationFeature createSensorOrientationFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull Activity activity,
+ @NonNull DartMessenger dartMessenger) {
+ return mockSensorOrientationFeature;
+ }
+
+ @Override
+ public ZoomLevelFeature createZoomLevelFeature(@NonNull CameraProperties cameraProperties) {
+ return mockZoomLevelFeature;
+ }
+
+ @Override
+ public ExposurePointFeature createExposurePointFeature(
+ @NonNull CameraProperties cameraProperties,
+ @NonNull SensorOrientationFeature sensorOrientationFeature) {
+ return mockExposurePointFeature;
+ }
+
+ @Override
+ public NoiseReductionFeature createNoiseReductionFeature(
+ @NonNull CameraProperties cameraProperties) {
+ return mockNoiseReductionFeature;
+ }
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/ImageSaverTests.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/ImageSaverTests.java
new file mode 100644
index 000000000000..0358ce6cb785
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/ImageSaverTests.java
@@ -0,0 +1,105 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.media.Image;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+public class ImageSaverTests {
+
+ Image mockImage;
+ File mockFile;
+ ImageSaver.Callback mockCallback;
+ ImageSaver imageSaver;
+ Image.Plane mockPlane;
+ ByteBuffer mockBuffer;
+ MockedStatic mockFileOutputStreamFactory;
+ FileOutputStream mockFileOutputStream;
+
+ @Before
+ public void setup() {
+ // Set up mocked file dependency
+ mockFile = mock(File.class);
+ when(mockFile.getAbsolutePath()).thenReturn("absolute/path");
+ mockPlane = mock(Image.Plane.class);
+ mockBuffer = mock(ByteBuffer.class);
+ when(mockBuffer.remaining()).thenReturn(3);
+ when(mockBuffer.get(any()))
+ .thenAnswer(
+ new Answer() {
+ @Override
+ public Object answer(InvocationOnMock invocation) throws Throwable {
+ byte[] bytes = invocation.getArgument(0);
+ bytes[0] = 0x42;
+ bytes[1] = 0x00;
+ bytes[2] = 0x13;
+ return mockBuffer;
+ }
+ });
+
+ // Set up mocked image dependency
+ mockImage = mock(Image.class);
+ when(mockPlane.getBuffer()).thenReturn(mockBuffer);
+ when(mockImage.getPlanes()).thenReturn(new Image.Plane[] {mockPlane});
+
+ // Set up mocked FileOutputStream
+ mockFileOutputStreamFactory = mockStatic(ImageSaver.FileOutputStreamFactory.class);
+ mockFileOutputStream = mock(FileOutputStream.class);
+ mockFileOutputStreamFactory
+ .when(() -> ImageSaver.FileOutputStreamFactory.create(any()))
+ .thenReturn(mockFileOutputStream);
+
+ // Set up testable ImageSaver instance
+ mockCallback = mock(ImageSaver.Callback.class);
+ imageSaver = new ImageSaver(mockImage, mockFile, mockCallback);
+ }
+
+ @After
+ public void teardown() {
+ mockFileOutputStreamFactory.close();
+ }
+
+ @Test
+ public void runWritesBytesToFileAndFinishesWithPath() throws IOException {
+ imageSaver.run();
+
+ verify(mockFileOutputStream, times(1)).write(new byte[] {0x42, 0x00, 0x13});
+ verify(mockCallback, times(1)).onComplete("absolute/path");
+ verify(mockCallback, never()).onError(any(), any());
+ }
+
+ @Test
+ public void runCallsErrorOnWriteIoexception() throws IOException {
+ doThrow(new IOException()).when(mockFileOutputStream).write(any());
+ imageSaver.run();
+ verify(mockCallback, times(1)).onError("IOError", "Failed saving image");
+ verify(mockCallback, never()).onComplete(any());
+ }
+
+ @Test
+ public void runCallsErrorOnCloseIoexception() throws IOException {
+ doThrow(new IOException("message")).when(mockFileOutputStream).close();
+ imageSaver.run();
+ verify(mockCallback, times(1)).onError("cameraAccess", "message");
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/MethodCallHandlerImplTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/MethodCallHandlerImplTest.java
new file mode 100644
index 000000000000..868e2e9e6d57
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/MethodCallHandlerImplTest.java
@@ -0,0 +1,77 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static org.junit.Assert.assertFalse;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraAccessException;
+import androidx.lifecycle.LifecycleObserver;
+import io.flutter.plugin.common.BinaryMessenger;
+import io.flutter.plugin.common.MethodCall;
+import io.flutter.plugin.common.MethodChannel;
+import io.flutter.plugins.camera.utils.TestUtils;
+import io.flutter.view.TextureRegistry;
+import org.junit.Before;
+import org.junit.Test;
+
+public class MethodCallHandlerImplTest {
+
+ MethodChannel.MethodCallHandler handler;
+ MethodChannel.Result mockResult;
+ Camera mockCamera;
+
+ @Before
+ public void setUp() {
+ handler =
+ new MethodCallHandlerImpl(
+ mock(Activity.class),
+ mock(BinaryMessenger.class),
+ mock(CameraPermissions.class),
+ mock(CameraPermissions.PermissionsRegistry.class),
+ mock(TextureRegistry.class));
+ mockResult = mock(MethodChannel.Result.class);
+ mockCamera = mock(Camera.class);
+ TestUtils.setPrivateField(handler, "camera", mockCamera);
+ }
+
+ @Test
+ public void shouldNotImplementLifecycleObserverInterface() {
+ Class methodCallHandlerClass = MethodCallHandlerImpl.class;
+
+ assertFalse(LifecycleObserver.class.isAssignableFrom(methodCallHandlerClass));
+ }
+
+ @Test
+ public void onMethodCall_pausePreview_shouldPausePreviewAndSendSuccessResult()
+ throws CameraAccessException {
+ handler.onMethodCall(new MethodCall("pausePreview", null), mockResult);
+
+ verify(mockCamera, times(1)).pausePreview();
+ verify(mockResult, times(1)).success(null);
+ }
+
+ @Test
+ public void onMethodCall_pausePreview_shouldSendErrorResultOnCameraAccessException()
+ throws CameraAccessException {
+ doThrow(new CameraAccessException(0)).when(mockCamera).pausePreview();
+
+ handler.onMethodCall(new MethodCall("pausePreview", null), mockResult);
+
+ verify(mockResult, times(1)).error("CameraAccess", null, null);
+ }
+
+ @Test
+ public void onMethodCall_resumePreview_shouldResumePreviewAndSendSuccessResult() {
+ handler.onMethodCall(new MethodCall("resumePreview", null), mockResult);
+
+ verify(mockCamera, times(1)).resumePreview();
+ verify(mockResult, times(1)).success(null);
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeatureTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeatureTest.java
new file mode 100644
index 000000000000..f03dc9f62e87
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/focuspoint/FocusPointFeatureTest.java
@@ -0,0 +1,318 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.focuspoint;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.isNull;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.util.Size;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.CameraRegionUtils;
+import io.flutter.plugins.camera.features.Point;
+import io.flutter.plugins.camera.features.sensororientation.DeviceOrientationManager;
+import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+
+public class FocusPointFeatureTest {
+
+ Size mockCameraBoundaries;
+ SensorOrientationFeature mockSensorOrientationFeature;
+ DeviceOrientationManager mockDeviceOrientationManager;
+
+ @Before
+ public void setUp() {
+ this.mockCameraBoundaries = mock(Size.class);
+ when(this.mockCameraBoundaries.getWidth()).thenReturn(100);
+ when(this.mockCameraBoundaries.getHeight()).thenReturn(100);
+ mockSensorOrientationFeature = mock(SensorOrientationFeature.class);
+ mockDeviceOrientationManager = mock(DeviceOrientationManager.class);
+ when(mockSensorOrientationFeature.getDeviceOrientationManager())
+ .thenReturn(mockDeviceOrientationManager);
+ when(mockDeviceOrientationManager.getLastUIOrientation())
+ .thenReturn(PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT);
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ assertEquals("FocusPointFeature", focusPointFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnNullIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Point actualPoint = focusPointFeature.getValue();
+ assertNull(focusPointFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ Point expectedPoint = new Point(0.0, 0.0);
+
+ focusPointFeature.setValue(expectedPoint);
+ Point actualPoint = focusPointFeature.getValue();
+
+ assertEquals(expectedPoint, actualPoint);
+ }
+
+ @Test
+ public void setValue_shouldResetPointWhenXCoordIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ focusPointFeature.setValue(new Point(null, 0.0));
+
+ assertNull(focusPointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldResetPointWhenYCoordIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ focusPointFeature.setValue(new Point(0.0, null));
+
+ assertNull(focusPointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldSetPointWhenValidCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ Point point = new Point(0.0, 0.0);
+
+ focusPointFeature.setValue(point);
+
+ assertEquals(point, focusPointFeature.getValue());
+ }
+
+ @Test
+ public void setValue_shouldDetermineMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ focusPointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ try (MockedStatic mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ focusPointFeature.setValue(new Point(0.5, 0.5));
+
+ mockedCameraRegionUtils.verify(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT),
+ times(1));
+ }
+ }
+
+ @Test(expected = AssertionError.class)
+ public void setValue_shouldThrowAssertionErrorWhenNoValidBoundariesAreSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ try (MockedStatic mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+ focusPointFeature.setValue(new Point(0.5, 0.5));
+ }
+ }
+
+ @Test
+ public void setValue_shouldNotDetermineMeteringRectangleWhenNullCoordsAreSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ focusPointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ try (MockedStatic mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ focusPointFeature.setValue(null);
+ focusPointFeature.setValue(new Point(null, 0.5));
+ focusPointFeature.setValue(new Point(0.5, null));
+
+ mockedCameraRegionUtils.verifyNoInteractions();
+ }
+ }
+
+ @Test
+ public void
+ setCameraBoundaries_shouldDetermineMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+ focusPointFeature.setValue(new Point(0.5, 0.5));
+ Size mockedCameraBoundaries = mock(Size.class);
+
+ try (MockedStatic mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+
+ focusPointFeature.setCameraBoundaries(mockedCameraBoundaries);
+
+ mockedCameraRegionUtils.verify(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT),
+ times(1));
+ }
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenMaxRegionsIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(null);
+
+ assertFalse(focusPointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenMaxRegionsIsZero() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(0);
+
+ assertFalse(focusPointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnTrueWhenMaxRegionsIsBiggerThenZero() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(new Size(100, 100));
+
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+
+ assertTrue(focusPointFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldReturnWhenCheckIsSupportedIsFalse() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(0);
+
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+
+ verify(mockCaptureRequestBuilder, never()).set(any(), any());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetMeteringRectangleWhenValidBoundariesAndCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ Size mockedCameraBoundaries = mock(Size.class);
+ MeteringRectangle mockedMeteringRectangle = mock(MeteringRectangle.class);
+
+ try (MockedStatic mockedCameraRegionUtils =
+ Mockito.mockStatic(CameraRegionUtils.class)) {
+ mockedCameraRegionUtils
+ .when(
+ () ->
+ CameraRegionUtils.convertPointToMeteringRectangle(
+ mockedCameraBoundaries,
+ 0.5,
+ 0.5,
+ PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT))
+ .thenReturn(mockedMeteringRectangle);
+ focusPointFeature.setCameraBoundaries(mockedCameraBoundaries);
+ focusPointFeature.setValue(new Point(0.5, 0.5));
+
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+ }
+
+ verify(mockCaptureRequestBuilder, times(1))
+ .set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {mockedMeteringRectangle});
+ }
+
+ @Test
+ public void updateBuilder_shouldNotSetMeteringRectangleWhenNoValidBoundariesAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ MeteringRectangle mockedMeteringRectangle = mock(MeteringRectangle.class);
+
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+
+ verify(mockCaptureRequestBuilder, times(1)).set(any(), isNull());
+ }
+
+ @Test
+ public void updateBuilder_shouldNotSetMeteringRectangleWhenNoValidCoordsAreSupplied() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ when(mockCameraProperties.getControlMaxRegionsAutoFocus()).thenReturn(1);
+ CaptureRequest.Builder mockCaptureRequestBuilder = mock(CaptureRequest.Builder.class);
+ FocusPointFeature focusPointFeature =
+ new FocusPointFeature(mockCameraProperties, mockSensorOrientationFeature);
+ focusPointFeature.setCameraBoundaries(this.mockCameraBoundaries);
+
+ focusPointFeature.setValue(null);
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+ focusPointFeature.setValue(new Point(0d, null));
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+ focusPointFeature.setValue(new Point(null, 0d));
+ focusPointFeature.updateBuilder(mockCaptureRequestBuilder);
+ verify(mockCaptureRequestBuilder, times(3)).set(any(), isNull());
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeaturePixel4aTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeaturePixel4aTest.java
new file mode 100644
index 000000000000..93cfe5523df3
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeaturePixel4aTest.java
@@ -0,0 +1,30 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.fpsrange;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
+import android.os.Build;
+import android.util.Range;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.utils.TestUtils;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.RobolectricTestRunner;
+
+@RunWith(RobolectricTestRunner.class)
+public class FpsRangeFeaturePixel4aTest {
+ @Test
+ public void ctor_shouldInitializeFpsRangeWith30WhenDeviceIsPixel4a() {
+ TestUtils.setFinalStatic(Build.class, "BRAND", "google");
+ TestUtils.setFinalStatic(Build.class, "MODEL", "Pixel 4a");
+
+ FpsRangeFeature fpsRangeFeature = new FpsRangeFeature(mock(CameraProperties.class));
+ Range range = fpsRangeFeature.getValue();
+ assertEquals(30, (int) range.getLower());
+ assertEquals(30, (int) range.getUpper());
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeatureTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeatureTest.java
new file mode 100644
index 000000000000..2bb4d849a277
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/fpsrange/FpsRangeFeatureTest.java
@@ -0,0 +1,108 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.fpsrange;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build;
+import android.util.Range;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.utils.TestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class FpsRangeFeatureTest {
+ @Before
+ public void before() {
+ TestUtils.setFinalStatic(Build.class, "BRAND", "Test Brand");
+ TestUtils.setFinalStatic(Build.class, "MODEL", "Test Model");
+ }
+
+ @After
+ public void after() {
+ TestUtils.setFinalStatic(Build.class, "BRAND", null);
+ TestUtils.setFinalStatic(Build.class, "MODEL", null);
+ }
+
+ @Test
+ public void ctor_shouldInitializeFpsRangeWithHighestUpperValueFromRangeArray() {
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+ assertEquals(13, (int) fpsRangeFeature.getValue().getUpper());
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+ assertEquals("FpsRangeFeature", fpsRangeFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnHighestUpperRangeIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+
+ assertEquals(13, (int) fpsRangeFeature.getValue().getUpper());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ FpsRangeFeature fpsRangeFeature = new FpsRangeFeature(mockCameraProperties);
+ @SuppressWarnings("unchecked")
+ Range expectedValue = mock(Range.class);
+
+ fpsRangeFeature.setValue(expectedValue);
+ Range actualValue = fpsRangeFeature.getValue();
+
+ assertEquals(expectedValue, actualValue);
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnTrue() {
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+ assertTrue(fpsRangeFeature.checkIsSupported());
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void updateBuilder_shouldSetAeTargetFpsRange() {
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ FpsRangeFeature fpsRangeFeature = createTestInstance();
+
+ fpsRangeFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder).set(eq(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE), any(Range.class));
+ }
+
+ private static FpsRangeFeature createTestInstance() {
+ @SuppressWarnings("unchecked")
+ Range rangeOne = mock(Range.class);
+ @SuppressWarnings("unchecked")
+ Range rangeTwo = mock(Range.class);
+ @SuppressWarnings("unchecked")
+ Range rangeThree = mock(Range.class);
+
+ when(rangeOne.getUpper()).thenReturn(11);
+ when(rangeTwo.getUpper()).thenReturn(12);
+ when(rangeThree.getUpper()).thenReturn(13);
+
+ @SuppressWarnings("unchecked")
+ Range[] ranges = new Range[] {rangeOne, rangeTwo, rangeThree};
+
+ CameraProperties cameraProperties = mock(CameraProperties.class);
+
+ when(cameraProperties.getControlAutoExposureAvailableTargetFpsRanges()).thenReturn(ranges);
+
+ return new FpsRangeFeature(cameraProperties);
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeatureTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeatureTest.java
new file mode 100644
index 000000000000..b89aad0f6773
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeatureTest.java
@@ -0,0 +1,150 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.noisereduction;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.hardware.camera2.CaptureRequest;
+import android.os.Build.VERSION;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.utils.TestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class NoiseReductionFeatureTest {
+ @Before
+ public void before() {
+ // Make sure the VERSION.SDK_INT field returns 23, to allow using all available
+ // noise reduction modes in tests.
+ TestUtils.setFinalStatic(VERSION.class, "SDK_INT", 23);
+ }
+
+ @After
+ public void after() {
+ // Make sure we reset the VERSION.SDK_INT field to it's original value.
+ TestUtils.setFinalStatic(VERSION.class, "SDK_INT", 0);
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ assertEquals("NoiseReductionFeature", noiseReductionFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnFastIfNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ assertEquals(NoiseReductionMode.fast, noiseReductionFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoTheSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+ NoiseReductionMode expectedValue = NoiseReductionMode.fast;
+
+ noiseReductionFeature.setValue(expectedValue);
+ NoiseReductionMode actualValue = noiseReductionFeature.getValue();
+
+ assertEquals(expectedValue, actualValue);
+ }
+
+ @Test
+ public void checkIsSupported_shouldReturnFalseWhenAvailableNoiseReductionModesIsNull() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(null);
+
+ assertFalse(noiseReductionFeature.checkIsSupported());
+ }
+
+ @Test
+ public void
+ checkIsSupported_shouldReturnFalseWhenAvailableNoiseReductionModesReturnsAnEmptyArray() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(new int[] {});
+
+ assertFalse(noiseReductionFeature.checkIsSupported());
+ }
+
+ @Test
+ public void
+ checkIsSupported_shouldReturnTrueWhenAvailableNoiseReductionModesReturnsAtLeastOneItem() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(new int[] {1});
+
+ assertTrue(noiseReductionFeature.checkIsSupported());
+ }
+
+ @Test
+ public void updateBuilder_shouldReturnWhenCheckIsSupportedIsFalse() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(new int[] {});
+
+ noiseReductionFeature.updateBuilder(mockBuilder);
+
+ verify(mockBuilder, never()).set(any(), any());
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeOffWhenOff() {
+ testUpdateBuilderWith(NoiseReductionMode.off, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeFastWhenFast() {
+ testUpdateBuilderWith(NoiseReductionMode.fast, CaptureRequest.NOISE_REDUCTION_MODE_FAST);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeHighQualityWhenHighQuality() {
+ testUpdateBuilderWith(
+ NoiseReductionMode.highQuality, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeMinimalWhenMinimal() {
+ testUpdateBuilderWith(NoiseReductionMode.minimal, CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL);
+ }
+
+ @Test
+ public void updateBuilder_shouldSetNoiseReductionModeZeroShutterLagWhenZeroShutterLag() {
+ testUpdateBuilderWith(
+ NoiseReductionMode.zeroShutterLag, CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG);
+ }
+
+ private static void testUpdateBuilderWith(NoiseReductionMode mode, int expectedResult) {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ CaptureRequest.Builder mockBuilder = mock(CaptureRequest.Builder.class);
+ NoiseReductionFeature noiseReductionFeature = new NoiseReductionFeature(mockCameraProperties);
+
+ when(mockCameraProperties.getAvailableNoiseReductionModes()).thenReturn(new int[] {1});
+
+ noiseReductionFeature.setValue(mode);
+ noiseReductionFeature.updateBuilder(mockBuilder);
+ verify(mockBuilder, times(1)).set(CaptureRequest.NOISE_REDUCTION_MODE, expectedResult);
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/resolution/ResolutionFeatureTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/resolution/ResolutionFeatureTest.java
new file mode 100644
index 000000000000..957b57a66435
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/resolution/ResolutionFeatureTest.java
@@ -0,0 +1,332 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.resolution;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.when;
+
+import android.media.CamcorderProfile;
+import android.media.EncoderProfiles;
+import io.flutter.plugins.camera.CameraProperties;
+import java.util.List;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.MockedStatic;
+import org.robolectric.RobolectricTestRunner;
+import org.robolectric.annotation.Config;
+
+@RunWith(RobolectricTestRunner.class)
+public class ResolutionFeatureTest {
+ private static final String cameraName = "1";
+ private CamcorderProfile mockProfileLowLegacy;
+ private EncoderProfiles mockProfileLow;
+ private MockedStatic mockedStaticProfile;
+
+ @Before
+ @SuppressWarnings("deprecation")
+ public void beforeLegacy() {
+ mockedStaticProfile = mockStatic(CamcorderProfile.class);
+ mockProfileLowLegacy = mock(CamcorderProfile.class);
+ CamcorderProfile mockProfileLegacy = mock(CamcorderProfile.class);
+
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_2160P))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_1080P))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_720P))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_480P))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(true);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_LOW))
+ .thenReturn(true);
+
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_2160P))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_1080P))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_480P))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(mockProfileLegacy);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_LOW))
+ .thenReturn(mockProfileLowLegacy);
+ }
+
+ public void before() {
+ mockProfileLow = mock(EncoderProfiles.class);
+ EncoderProfiles mockProfile = mock(EncoderProfiles.class);
+ EncoderProfiles.VideoProfile mockVideoProfile = mock(EncoderProfiles.VideoProfile.class);
+ List mockVideoProfilesList = List.of(mockVideoProfile);
+
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_2160P))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_1080P))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_480P))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(mockProfile);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_LOW))
+ .thenReturn(mockProfileLow);
+
+ when(mockProfile.getVideoProfiles()).thenReturn(mockVideoProfilesList);
+ when(mockVideoProfile.getHeight()).thenReturn(100);
+ when(mockVideoProfile.getWidth()).thenReturn(100);
+ }
+
+ @After
+ public void after() {
+ mockedStaticProfile.reset();
+ mockedStaticProfile.close();
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ assertEquals("ResolutionFeature", resolutionFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnInitialValueWhenNotSet() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ assertEquals(ResolutionPreset.max, resolutionFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoSetValue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ resolutionFeature.setValue(ResolutionPreset.high);
+
+ assertEquals(ResolutionPreset.high, resolutionFeature.getValue());
+ }
+
+ @Test
+ public void checkIsSupport_returnsTrue() {
+ CameraProperties mockCameraProperties = mock(CameraProperties.class);
+ ResolutionFeature resolutionFeature =
+ new ResolutionFeature(mockCameraProperties, ResolutionPreset.max, cameraName);
+
+ assertTrue(resolutionFeature.checkIsSupported());
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void getBestAvailableCamcorderProfileForResolutionPreset_shouldFallThroughLegacy() {
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_2160P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_1080P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_720P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_480P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_LOW))
+ .thenReturn(true);
+
+ assertEquals(
+ mockProfileLowLegacy,
+ ResolutionFeature.getBestAvailableCamcorderProfileForResolutionPresetLegacy(
+ 1, ResolutionPreset.max));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void getBestAvailableCamcorderProfileForResolutionPreset_shouldFallThrough() {
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_HIGH))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_2160P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_1080P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_720P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_480P))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_QVGA))
+ .thenReturn(false);
+ mockedStaticProfile
+ .when(() -> CamcorderProfile.hasProfile(1, CamcorderProfile.QUALITY_LOW))
+ .thenReturn(true);
+
+ assertEquals(
+ mockProfileLow,
+ ResolutionFeature.getBestAvailableCamcorderProfileForResolutionPreset(
+ 1, ResolutionPreset.max));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetMaxLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.max);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetMax() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.max);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetUltraHighLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.ultraHigh);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetUltraHigh() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.ultraHigh);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetVeryHighLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.veryHigh);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(minSdk = 31)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetVeryHigh() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.veryHigh);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetHighLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.high);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUse720PWhenResolutionPresetHigh() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.high);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_720P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUse480PWhenResolutionPresetMediumLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.medium);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_480P));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUse480PWhenResolutionPresetMedium() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.medium);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_480P));
+ }
+
+ @Config(maxSdk = 30)
+ @SuppressWarnings("deprecation")
+ @Test
+ public void computeBestPreviewSize_shouldUseQVGAWhenResolutionPresetLowLegacy() {
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.low);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.get(1, CamcorderProfile.QUALITY_QVGA));
+ }
+
+ @Config(minSdk = 31)
+ @Test
+ public void computeBestPreviewSize_shouldUseQVGAWhenResolutionPresetLow() {
+ before();
+ ResolutionFeature.computeBestPreviewSize(1, ResolutionPreset.low);
+
+ mockedStaticProfile.verify(() -> CamcorderProfile.getAll("1", CamcorderProfile.QUALITY_QVGA));
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManagerTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManagerTest.java
new file mode 100644
index 000000000000..82449a10188a
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/DeviceOrientationManagerTest.java
@@ -0,0 +1,301 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.sensororientation;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.res.Configuration;
+import android.content.res.Resources;
+import android.provider.Settings;
+import android.view.Display;
+import android.view.Surface;
+import android.view.WindowManager;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugins.camera.DartMessenger;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+
+public class DeviceOrientationManagerTest {
+ private Activity mockActivity;
+ private DartMessenger mockDartMessenger;
+ private WindowManager mockWindowManager;
+ private Display mockDisplay;
+ private DeviceOrientationManager deviceOrientationManager;
+
+ @Before
+ @SuppressWarnings("deprecation")
+ public void before() {
+ mockActivity = mock(Activity.class);
+ mockDartMessenger = mock(DartMessenger.class);
+ mockDisplay = mock(Display.class);
+ mockWindowManager = mock(WindowManager.class);
+
+ when(mockActivity.getSystemService(Context.WINDOW_SERVICE)).thenReturn(mockWindowManager);
+ when(mockWindowManager.getDefaultDisplay()).thenReturn(mockDisplay);
+
+ deviceOrientationManager =
+ DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 0);
+ }
+
+ @Test
+ public void getVideoOrientation_whenNaturalScreenOrientationEqualsPortraitUp() {
+ int degreesPortraitUp =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ deviceOrientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(0, degreesPortraitUp);
+ assertEquals(90, degreesLandscapeLeft);
+ assertEquals(180, degreesPortraitDown);
+ assertEquals(270, degreesLandscapeRight);
+ }
+
+ @Test
+ public void getVideoOrientation_whenNaturalScreenOrientationEqualsLandscapeLeft() {
+ DeviceOrientationManager orientationManager =
+ DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 90);
+
+ int degreesPortraitUp = orientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ orientationManager.getVideoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ orientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ orientationManager.getVideoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(90, degreesPortraitUp);
+ assertEquals(180, degreesLandscapeLeft);
+ assertEquals(270, degreesPortraitDown);
+ assertEquals(0, degreesLandscapeRight);
+ }
+
+ @Test
+ public void getVideoOrientation_shouldFallbackToSensorOrientationWhenOrientationIsNull() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ int degrees = deviceOrientationManager.getVideoOrientation(null);
+
+ assertEquals(90, degrees);
+ }
+
+ @Test
+ public void getPhotoOrientation_whenNaturalScreenOrientationEqualsPortraitUp() {
+ int degreesPortraitUp =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ deviceOrientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(0, degreesPortraitUp);
+ assertEquals(90, degreesLandscapeRight);
+ assertEquals(180, degreesPortraitDown);
+ assertEquals(270, degreesLandscapeLeft);
+ }
+
+ @Test
+ public void getPhotoOrientation_whenNaturalScreenOrientationEqualsLandscapeLeft() {
+ DeviceOrientationManager orientationManager =
+ DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 90);
+
+ int degreesPortraitUp = orientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_UP);
+ int degreesPortraitDown =
+ orientationManager.getPhotoOrientation(DeviceOrientation.PORTRAIT_DOWN);
+ int degreesLandscapeLeft =
+ orientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_LEFT);
+ int degreesLandscapeRight =
+ orientationManager.getPhotoOrientation(DeviceOrientation.LANDSCAPE_RIGHT);
+
+ assertEquals(90, degreesPortraitUp);
+ assertEquals(180, degreesLandscapeRight);
+ assertEquals(270, degreesPortraitDown);
+ assertEquals(0, degreesLandscapeLeft);
+ }
+
+ @Test
+ public void getPhotoOrientation_shouldFallbackToCurrentOrientationWhenOrientationIsNull() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ int degrees = deviceOrientationManager.getPhotoOrientation(null);
+
+ assertEquals(270, degrees);
+ }
+
+ @Test
+ public void handleUIOrientationChange_shouldSendMessageWhenSensorAccessIsAllowed() {
+ try (MockedStatic mockedSystem = mockStatic(Settings.System.class)) {
+ mockedSystem
+ .when(
+ () ->
+ Settings.System.getInt(any(), eq(Settings.System.ACCELEROMETER_ROTATION), eq(0)))
+ .thenReturn(0);
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+
+ deviceOrientationManager.handleUIOrientationChange();
+ }
+
+ verify(mockDartMessenger, times(1))
+ .sendDeviceOrientationChangeEvent(DeviceOrientation.LANDSCAPE_LEFT);
+ }
+
+ @Test
+ public void handleOrientationChange_shouldSendMessageWhenOrientationIsUpdated() {
+ DeviceOrientation previousOrientation = DeviceOrientation.PORTRAIT_UP;
+ DeviceOrientation newOrientation = DeviceOrientation.LANDSCAPE_LEFT;
+
+ DeviceOrientationManager.handleOrientationChange(
+ newOrientation, previousOrientation, mockDartMessenger);
+
+ verify(mockDartMessenger, times(1)).sendDeviceOrientationChangeEvent(newOrientation);
+ }
+
+ @Test
+ public void handleOrientationChange_shouldNotSendMessageWhenOrientationIsNotUpdated() {
+ DeviceOrientation previousOrientation = DeviceOrientation.PORTRAIT_UP;
+ DeviceOrientation newOrientation = DeviceOrientation.PORTRAIT_UP;
+
+ DeviceOrientationManager.handleOrientationChange(
+ newOrientation, previousOrientation, mockDartMessenger);
+
+ verify(mockDartMessenger, never()).sendDeviceOrientationChangeEvent(any());
+ }
+
+ @Test
+ public void getUIOrientation() {
+ // Orientation portrait and rotation of 0 should translate to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ DeviceOrientation uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+
+ // Orientation portrait and rotation of 90 should translate to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_90);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+
+ // Orientation portrait and rotation of 180 should translate to "PORTRAIT_DOWN".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_180);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, uiOrientation);
+
+ // Orientation portrait and rotation of 270 should translate to "PORTRAIT_DOWN".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_270);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, uiOrientation);
+
+ // Orientation landscape and rotation of 0 should translate to "LANDSCAPE_LEFT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, uiOrientation);
+
+ // Orientation landscape and rotation of 90 should translate to "LANDSCAPE_LEFT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_90);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, uiOrientation);
+
+ // Orientation landscape and rotation of 180 should translate to "LANDSCAPE_RIGHT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_180);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, uiOrientation);
+
+ // Orientation landscape and rotation of 270 should translate to "LANDSCAPE_RIGHT".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_270);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, uiOrientation);
+
+ // Orientation undefined should default to "PORTRAIT_UP".
+ setUpUIOrientationMocks(Configuration.ORIENTATION_UNDEFINED, Surface.ROTATION_0);
+ uiOrientation = deviceOrientationManager.getUIOrientation();
+ assertEquals(DeviceOrientation.PORTRAIT_UP, uiOrientation);
+ }
+
+ @Test
+ public void getDeviceDefaultOrientation() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ int orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_180);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_90);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_270);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_180);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_LANDSCAPE, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_90);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_LANDSCAPE, Surface.ROTATION_270);
+ orientation = deviceOrientationManager.getDeviceDefaultOrientation();
+ assertEquals(Configuration.ORIENTATION_PORTRAIT, orientation);
+ }
+
+ @Test
+ public void calculateSensorOrientation() {
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ DeviceOrientation orientation = deviceOrientationManager.calculateSensorOrientation(0);
+ assertEquals(DeviceOrientation.PORTRAIT_UP, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(90);
+ assertEquals(DeviceOrientation.LANDSCAPE_LEFT, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(180);
+ assertEquals(DeviceOrientation.PORTRAIT_DOWN, orientation);
+
+ setUpUIOrientationMocks(Configuration.ORIENTATION_PORTRAIT, Surface.ROTATION_0);
+ orientation = deviceOrientationManager.calculateSensorOrientation(270);
+ assertEquals(DeviceOrientation.LANDSCAPE_RIGHT, orientation);
+ }
+
+ private void setUpUIOrientationMocks(int orientation, int rotation) {
+ Resources mockResources = mock(Resources.class);
+ Configuration mockConfiguration = mock(Configuration.class);
+
+ when(mockDisplay.getRotation()).thenReturn(rotation);
+
+ mockConfiguration.orientation = orientation;
+ when(mockActivity.getResources()).thenReturn(mockResources);
+ when(mockResources.getConfiguration()).thenReturn(mockConfiguration);
+ }
+
+ @Test
+ public void getDisplayTest() {
+ Display display = deviceOrientationManager.getDisplay();
+
+ assertEquals(mockDisplay, display);
+ }
+}
diff --git a/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeatureTest.java b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeatureTest.java
new file mode 100644
index 000000000000..2c3a5ab46634
--- /dev/null
+++ b/packages/camera/camera/android/src/test/java/io/flutter/plugins/camera/features/sensororientation/SensorOrientationFeatureTest.java
@@ -0,0 +1,125 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera.features.sensororientation;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import android.app.Activity;
+import android.hardware.camera2.CameraMetadata;
+import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation;
+import io.flutter.plugins.camera.CameraProperties;
+import io.flutter.plugins.camera.DartMessenger;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.MockedStatic;
+
+public class SensorOrientationFeatureTest {
+ private MockedStatic mockedStaticDeviceOrientationManager;
+ private Activity mockActivity;
+ private CameraProperties mockCameraProperties;
+ private DartMessenger mockDartMessenger;
+ private DeviceOrientationManager mockDeviceOrientationManager;
+
+ @Before
+ public void before() {
+ mockedStaticDeviceOrientationManager = mockStatic(DeviceOrientationManager.class);
+ mockActivity = mock(Activity.class);
+ mockCameraProperties = mock(CameraProperties.class);
+ mockDartMessenger = mock(DartMessenger.class);
+ mockDeviceOrientationManager = mock(DeviceOrientationManager.class);
+
+ when(mockCameraProperties.getSensorOrientation()).thenReturn(0);
+ when(mockCameraProperties.getLensFacing()).thenReturn(CameraMetadata.LENS_FACING_BACK);
+
+ mockedStaticDeviceOrientationManager
+ .when(() -> DeviceOrientationManager.create(mockActivity, mockDartMessenger, false, 0))
+ .thenReturn(mockDeviceOrientationManager);
+ }
+
+ @After
+ public void after() {
+ mockedStaticDeviceOrientationManager.close();
+ }
+
+ @Test
+ public void ctor_shouldStartDeviceOrientationManager() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ verify(mockDeviceOrientationManager, times(1)).start();
+ }
+
+ @Test
+ public void getDebugName_shouldReturnTheNameOfTheFeature() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ assertEquals("SensorOrientationFeature", sensorOrientationFeature.getDebugName());
+ }
+
+ @Test
+ public void getValue_shouldReturnNullIfNotSet() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ assertEquals(0, (int) sensorOrientationFeature.getValue());
+ }
+
+ @Test
+ public void getValue_shouldEchoSetValue() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ sensorOrientationFeature.setValue(90);
+
+ assertEquals(90, (int) sensorOrientationFeature.getValue());
+ }
+
+ @Test
+ public void checkIsSupport_returnsTrue() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ assertTrue(sensorOrientationFeature.checkIsSupported());
+ }
+
+ @Test
+ public void getDeviceOrientationManager_shouldReturnInitializedDartOrientationManagerInstance() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ assertEquals(
+ mockDeviceOrientationManager, sensorOrientationFeature.getDeviceOrientationManager());
+ }
+
+ @Test
+ public void lockCaptureOrientation_shouldLockToSpecifiedOrientation() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ sensorOrientationFeature.lockCaptureOrientation(DeviceOrientation.PORTRAIT_DOWN);
+
+ assertEquals(
+ DeviceOrientation.PORTRAIT_DOWN, sensorOrientationFeature.getLockedCaptureOrientation());
+ }
+
+ @Test
+ public void unlockCaptureOrientation_shouldSetLockToNull() {
+ SensorOrientationFeature sensorOrientationFeature =
+ new SensorOrientationFeature(mockCameraProperties, mockActivity, mockDartMessenger);
+
+ sensorOrientationFeature.unlockCaptureOrientation();
+
+ assertNull(sensorOrientationFeature.getLockedCaptureOrientation());
+ }
+}
diff --git a/packages/camera/camera/android/src/test/resources/robolectric.properties b/packages/camera/camera/android/src/test/resources/robolectric.properties
new file mode 100644
index 000000000000..90fbd74370a7
--- /dev/null
+++ b/packages/camera/camera/android/src/test/resources/robolectric.properties
@@ -0,0 +1 @@
+sdk=30
\ No newline at end of file
diff --git a/packages/camera/camera/example/android/app/src/androidTestDebug/java/io/flutter/plugins/DartIntegrationTest.java b/packages/camera/camera/example/android/app/src/androidTestDebug/java/io/flutter/plugins/DartIntegrationTest.java
new file mode 100644
index 000000000000..0f4298dca155
--- /dev/null
+++ b/packages/camera/camera/example/android/app/src/androidTestDebug/java/io/flutter/plugins/DartIntegrationTest.java
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface DartIntegrationTest {}
diff --git a/packages/camera/camera/example/ios/RunnerTests/CameraExposureTests.m b/packages/camera/camera/example/ios/RunnerTests/CameraExposureTests.m
new file mode 100644
index 000000000000..ee43d3f155f4
--- /dev/null
+++ b/packages/camera/camera/example/ios/RunnerTests/CameraExposureTests.m
@@ -0,0 +1,55 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera;
+@import XCTest;
+@import AVFoundation;
+#import
+
+@interface FLTCam : NSObject
+
+- (void)setExposurePointWithResult:(FlutterResult)result x:(double)x y:(double)y;
+@end
+
+@interface CameraExposureTests : XCTestCase
+@property(readonly, nonatomic) FLTCam *camera;
+@property(readonly, nonatomic) id mockDevice;
+@property(readonly, nonatomic) id mockUIDevice;
+@end
+
+@implementation CameraExposureTests
+
+- (void)setUp {
+ _camera = [[FLTCam alloc] init];
+ _mockDevice = OCMClassMock([AVCaptureDevice class]);
+ _mockUIDevice = OCMPartialMock([UIDevice currentDevice]);
+}
+
+- (void)tearDown {
+ [_mockDevice stopMocking];
+ [_mockUIDevice stopMocking];
+}
+
+- (void)testSetExpsourePointWithResult_SetsExposurePointOfInterest {
+ // UI is currently in landscape left orientation
+ OCMStub([(UIDevice *)_mockUIDevice orientation]).andReturn(UIDeviceOrientationLandscapeLeft);
+ // Exposure point of interest is supported
+ OCMStub([_mockDevice isExposurePointOfInterestSupported]).andReturn(true);
+ // Set mock device as the current capture device
+ [_camera setValue:_mockDevice forKey:@"captureDevice"];
+
+ // Run test
+ [_camera
+ setExposurePointWithResult:^void(id _Nullable result) {
+ }
+ x:1
+ y:1];
+
+ // Verify the focus point of interest has been set
+ OCMVerify([_mockDevice setExposurePointOfInterest:CGPointMake(1, 1)]);
+}
+
+@end
diff --git a/packages/camera/camera/example/ios/RunnerTests/CameraMethodChannelTests.m b/packages/camera/camera/example/ios/RunnerTests/CameraMethodChannelTests.m
new file mode 100644
index 000000000000..254a33c7ee4e
--- /dev/null
+++ b/packages/camera/camera/example/ios/RunnerTests/CameraMethodChannelTests.m
@@ -0,0 +1,48 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera;
+@import camera.Test;
+@import XCTest;
+@import AVFoundation;
+#import
+#import "MockFLTThreadSafeFlutterResult.h"
+
+@interface CameraMethodChannelTests : XCTestCase
+@end
+
+@implementation CameraMethodChannelTests
+
+- (void)testCreate_ShouldCallResultOnMainThread {
+ CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
+
+ XCTestExpectation *expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ // Set up mocks for initWithCameraName method
+ id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]])
+ .andReturn([AVCaptureInput alloc]);
+
+ id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]);
+ OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock);
+ OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
+
+ MockFLTThreadSafeFlutterResult *resultObject =
+ [[MockFLTThreadSafeFlutterResult alloc] initWithExpectation:expectation];
+
+ // Set up method call
+ FlutterMethodCall *call = [FlutterMethodCall
+ methodCallWithMethodName:@"create"
+ arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
+
+ [camera handleMethodCallAsync:call result:resultObject];
+
+ // Verify the result
+ NSDictionary *dictionaryResult = (NSDictionary *)resultObject.receivedResult;
+ XCTAssertNotNil(dictionaryResult);
+ XCTAssert([[dictionaryResult allKeys] containsObject:@"cameraId"]);
+}
+
+@end
diff --git a/packages/camera/camera/example/ios/RunnerTests/CameraPreviewPauseTests.m b/packages/camera/camera/example/ios/RunnerTests/CameraPreviewPauseTests.m
new file mode 100644
index 000000000000..eb6c0079322c
--- /dev/null
+++ b/packages/camera/camera/example/ios/RunnerTests/CameraPreviewPauseTests.m
@@ -0,0 +1,42 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera;
+@import XCTest;
+@import AVFoundation;
+#import
+#import "MockFLTThreadSafeFlutterResult.h"
+
+@interface FLTCam : NSObject
+@property(assign, nonatomic) BOOL isPreviewPaused;
+
+- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
+
+- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
+@end
+
+@interface CameraPreviewPauseTests : XCTestCase
+@end
+
+@implementation CameraPreviewPauseTests
+
+- (void)testPausePreviewWithResult_shouldPausePreview {
+ FLTCam *camera = [[FLTCam alloc] init];
+ MockFLTThreadSafeFlutterResult *resultObject = [[MockFLTThreadSafeFlutterResult alloc] init];
+
+ [camera pausePreviewWithResult:resultObject];
+ XCTAssertTrue(camera.isPreviewPaused);
+}
+
+- (void)testResumePreviewWithResult_shouldResumePreview {
+ FLTCam *camera = [[FLTCam alloc] init];
+ MockFLTThreadSafeFlutterResult *resultObject = [[MockFLTThreadSafeFlutterResult alloc] init];
+
+ [camera resumePreviewWithResult:resultObject];
+ XCTAssertFalse(camera.isPreviewPaused);
+}
+
+@end
diff --git a/packages/camera/camera/example/ios/RunnerTests/CameraUtilTests.m b/packages/camera/camera/example/ios/RunnerTests/CameraUtilTests.m
new file mode 100644
index 000000000000..380f6e93de58
--- /dev/null
+++ b/packages/camera/camera/example/ios/RunnerTests/CameraUtilTests.m
@@ -0,0 +1,49 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera;
+@import XCTest;
+@import AVFoundation;
+#import
+
+@interface FLTCam : NSObject
+
+- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
+ x:(double)x
+ y:(double)y;
+
+@end
+
+@interface CameraUtilTests : XCTestCase
+@property(readonly, nonatomic) FLTCam *camera;
+
+@end
+
+@implementation CameraUtilTests
+
+- (void)setUp {
+ _camera = [[FLTCam alloc] init];
+}
+
+- (void)testGetCGPointForCoordsWithOrientation_ShouldRotateCoords {
+ CGPoint point;
+ point = [_camera getCGPointForCoordsWithOrientation:UIDeviceOrientationLandscapeLeft x:1 y:1];
+ XCTAssertTrue(CGPointEqualToPoint(point, CGPointMake(1, 1)),
+ @"Resulting coordinates are invalid.");
+ point = [_camera getCGPointForCoordsWithOrientation:UIDeviceOrientationPortrait x:0 y:1];
+ XCTAssertTrue(CGPointEqualToPoint(point, CGPointMake(1, 1)),
+ @"Resulting coordinates are invalid.");
+ point = [_camera getCGPointForCoordsWithOrientation:UIDeviceOrientationLandscapeRight x:0 y:0];
+ XCTAssertTrue(CGPointEqualToPoint(point, CGPointMake(1, 1)),
+ @"Resulting coordinates are invalid.");
+ point = [_camera getCGPointForCoordsWithOrientation:UIDeviceOrientationPortraitUpsideDown
+ x:1
+ y:0];
+ XCTAssertTrue(CGPointEqualToPoint(point, CGPointMake(1, 1)),
+ @"Resulting coordinates are invalid.");
+}
+
+@end
diff --git a/packages/camera/camera/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.h b/packages/camera/camera/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.h
new file mode 100644
index 000000000000..8685f3fd610b
--- /dev/null
+++ b/packages/camera/camera/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.h
@@ -0,0 +1,25 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MockFLTThreadSafeFlutterResult_h
+#define MockFLTThreadSafeFlutterResult_h
+
+/**
+ * Extends FLTThreadSafeFlutterResult to give tests the ability to wait on the result and
+ * read the received result.
+ */
+@interface MockFLTThreadSafeFlutterResult : FLTThreadSafeFlutterResult
+@property(readonly, nonatomic, nonnull) XCTestExpectation *expectation;
+@property(nonatomic, nullable) id receivedResult;
+
+/**
+ * Initializes the MockFLTThreadSafeFlutterResult with an expectation.
+ *
+ * The expectation is fullfilled when a result is called allowing tests to await the result in an
+ * asynchronous manner.
+ */
+- (nonnull instancetype)initWithExpectation:(nonnull XCTestExpectation *)expectation;
+@end
+
+#endif /* MockFLTThreadSafeFlutterResult_h */
diff --git a/packages/camera/camera/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.m b/packages/camera/camera/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.m
new file mode 100644
index 000000000000..da2fc2d936ba
--- /dev/null
+++ b/packages/camera/camera/example/ios/RunnerTests/MockFLTThreadSafeFlutterResult.m
@@ -0,0 +1,27 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera;
+@import XCTest;
+
+#import "MockFLTThreadSafeFlutterResult.h"
+
+@implementation MockFLTThreadSafeFlutterResult
+
+- (instancetype)initWithExpectation:(XCTestExpectation *)expectation {
+ self = [super init];
+ _expectation = expectation;
+ return self;
+}
+
+- (void)sendSuccessWithData:(id)data {
+ self.receivedResult = data;
+ [self.expectation fulfill];
+}
+
+- (void)sendSuccess {
+ self.receivedResult = nil;
+ [self.expectation fulfill];
+}
+@end
diff --git a/packages/camera/camera/example/ios/RunnerTests/ThreadSafeFlutterResultTests.m b/packages/camera/camera/example/ios/RunnerTests/ThreadSafeFlutterResultTests.m
new file mode 100644
index 000000000000..8cd4b8bc8c2a
--- /dev/null
+++ b/packages/camera/camera/example/ios/RunnerTests/ThreadSafeFlutterResultTests.m
@@ -0,0 +1,122 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+@import camera;
+@import XCTest;
+
+@interface ThreadSafeFlutterResultTests : XCTestCase
+@end
+
+@implementation ThreadSafeFlutterResultTests
+- (void)testAsyncSendSuccess_ShouldCallResultOnMainThread {
+ XCTestExpectation* expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult* threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert(NSThread.isMainThread);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendSuccess];
+ });
+
+ [self waitForExpectations:[NSArray arrayWithObject:expectation] timeout:1];
+}
+
+- (void)testSyncSendSuccess_ShouldCallResultOnMainThread {
+ XCTestExpectation* expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult* threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert(NSThread.isMainThread);
+ [expectation fulfill];
+ }];
+ [threadSafeFlutterResult sendSuccess];
+ [self waitForExpectations:[NSArray arrayWithObject:expectation] timeout:1];
+}
+
+- (void)testSendNotImplemented_ShouldSendNotImplementedToFlutterResult {
+ XCTestExpectation* expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult* threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert([result isKindOfClass:FlutterMethodNotImplemented.class]);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendNotImplemented];
+ });
+
+ [self waitForExpectations:[NSArray arrayWithObject:expectation] timeout:1];
+}
+
+- (void)testSendErrorDetails_ShouldSendErrorToFlutterResult {
+ NSString* errorCode = @"errorCode";
+ NSString* errorMessage = @"message";
+ NSString* errorDetails = @"error details";
+ XCTestExpectation* expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult* threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert([result isKindOfClass:FlutterError.class]);
+ FlutterError* error = (FlutterError*)result;
+ XCTAssertEqualObjects(error.code, errorCode);
+ XCTAssertEqualObjects(error.message, errorMessage);
+ XCTAssertEqualObjects(error.details, errorDetails);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendErrorWithCode:errorCode message:errorMessage details:errorDetails];
+ });
+
+ [self waitForExpectations:[NSArray arrayWithObject:expectation] timeout:1];
+}
+
+- (void)testSendNSError_ShouldSendErrorToFlutterResult {
+ NSError* originalError = [[NSError alloc] initWithDomain:NSURLErrorDomain code:404 userInfo:nil];
+ XCTestExpectation* expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult* threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssert([result isKindOfClass:FlutterError.class]);
+ FlutterError* error = (FlutterError*)result;
+ NSString* constructedErrorCode =
+ [NSString stringWithFormat:@"Error %d", (int)originalError.code];
+ XCTAssertEqualObjects(error.code, constructedErrorCode);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendError:originalError];
+ });
+
+ [self waitForExpectations:[NSArray arrayWithObject:expectation] timeout:1];
+}
+
+- (void)testSendResult_ShouldSendResultToFlutterResult {
+ NSString* resultData = @"resultData";
+ XCTestExpectation* expectation =
+ [[XCTestExpectation alloc] initWithDescription:@"Result finished"];
+
+ FLTThreadSafeFlutterResult* threadSafeFlutterResult =
+ [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id _Nullable result) {
+ XCTAssertEqualObjects(result, resultData);
+ [expectation fulfill];
+ }];
+ dispatch_queue_t dispatchQueue = dispatch_queue_create("test dispatchqueue", NULL);
+ dispatch_async(dispatchQueue, ^{
+ [threadSafeFlutterResult sendSuccessWithData:resultData];
+ });
+
+ [self waitForExpectations:[NSArray arrayWithObject:expectation] timeout:1];
+}
+@end
diff --git a/packages/camera/camera/example/web/favicon.png b/packages/camera/camera/example/web/favicon.png
new file mode 100644
index 000000000000..8aaa46ac1ae2
Binary files /dev/null and b/packages/camera/camera/example/web/favicon.png differ
diff --git a/packages/camera/camera/example/web/icons/Icon-192.png b/packages/camera/camera/example/web/icons/Icon-192.png
new file mode 100644
index 000000000000..b749bfef0747
Binary files /dev/null and b/packages/camera/camera/example/web/icons/Icon-192.png differ
diff --git a/packages/camera/camera/example/web/icons/Icon-512.png b/packages/camera/camera/example/web/icons/Icon-512.png
new file mode 100644
index 000000000000..88cfd48dff11
Binary files /dev/null and b/packages/camera/camera/example/web/icons/Icon-512.png differ
diff --git a/packages/camera/camera/example/web/index.html b/packages/camera/camera/example/web/index.html
new file mode 100644
index 000000000000..2a3117d29362
--- /dev/null
+++ b/packages/camera/camera/example/web/index.html
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Camera Web Example
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/packages/camera/camera/example/web/manifest.json b/packages/camera/camera/example/web/manifest.json
new file mode 100644
index 000000000000..5fe0e048afe6
--- /dev/null
+++ b/packages/camera/camera/example/web/manifest.json
@@ -0,0 +1,23 @@
+{
+ "name": "camera example",
+ "short_name": "camera",
+ "start_url": ".",
+ "display": "standalone",
+ "background_color": "#0175C2",
+ "theme_color": "#0175C2",
+ "description": "An example of the camera on the web.",
+ "orientation": "portrait-primary",
+ "prefer_related_applications": false,
+ "icons": [
+ {
+ "src": "icons/Icon-192.png",
+ "sizes": "192x192",
+ "type": "image/png"
+ },
+ {
+ "src": "icons/Icon-512.png",
+ "sizes": "512x512",
+ "type": "image/png"
+ }
+ ]
+}
diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.modulemap b/packages/camera/camera/ios/Classes/CameraPlugin.modulemap
new file mode 100644
index 000000000000..30afa91bdda2
--- /dev/null
+++ b/packages/camera/camera/ios/Classes/CameraPlugin.modulemap
@@ -0,0 +1,10 @@
+framework module camera {
+ umbrella header "camera-umbrella.h"
+
+ export *
+ module * { export * }
+
+ explicit module Test {
+ header "CameraPlugin_Test.h"
+ }
+}
diff --git a/packages/camera/camera/ios/Classes/CameraPlugin_Test.h b/packages/camera/camera/ios/Classes/CameraPlugin_Test.h
new file mode 100644
index 000000000000..afbf6864a1f8
--- /dev/null
+++ b/packages/camera/camera/ios/Classes/CameraPlugin_Test.h
@@ -0,0 +1,34 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This header is available in the Test module. Import via "@import camera.Test;"
+
+#import
+#import
+
+/// Methods exposed for unit testing.
+@interface CameraPlugin ()
+
+/// Inject @p FlutterTextureRegistry and @p FlutterBinaryMessenger for unit testing.
+- (instancetype)initWithRegistry:(NSObject *)registry
+ messenger:(NSObject *)messenger
+ NS_DESIGNATED_INITIALIZER;
+
+/// Hide the default public constructor.
+- (instancetype)init NS_UNAVAILABLE;
+
+/// Handles `FlutterMethodCall`s and ensures result is send on the main dispatch queue.
+///
+/// @param call The method call command object.
+/// @param result A wrapper around the `FlutterResult` callback which ensures the callback is called
+/// on the main dispatch queue.
+- (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FLTThreadSafeFlutterResult *)result;
+
+/// Called by the @c NSNotificationManager each time the device's orientation is changed.
+///
+/// @param notification @c NSNotification instance containing a reference to the `UIDevice` object
+/// that triggered the orientation change.
+- (void)orientationChanged:(NSNotification *)notification;
+
+@end
diff --git a/packages/camera/camera/ios/Classes/FLTThreadSafeFlutterResult.h b/packages/camera/camera/ios/Classes/FLTThreadSafeFlutterResult.h
new file mode 100644
index 000000000000..f290ca0fcd05
--- /dev/null
+++ b/packages/camera/camera/ios/Classes/FLTThreadSafeFlutterResult.h
@@ -0,0 +1,51 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import
+
+/**
+ * Wrapper for FlutterResult that always delivers the result on the main thread.
+ */
+@interface FLTThreadSafeFlutterResult : NSObject
+
+/**
+ * Gets the original FlutterResult object wrapped by this FLTThreadSafeFlutterResult instance.
+ */
+@property(readonly, nonatomic, nonnull) FlutterResult flutterResult;
+
+/**
+ * Initializes with a FlutterResult object.
+ * @param result The FlutterResult object that the result will be given to.
+ */
+- (nonnull instancetype)initWithResult:(nonnull FlutterResult)result;
+
+/**
+ * Sends a successful result without any data.
+ */
+- (void)sendSuccess;
+
+/**
+ * Sends a successful result with data.
+ * @param data Result data that is send to the Flutter Dart side.
+ */
+- (void)sendSuccessWithData:(nonnull id)data;
+
+/**
+ * Sends an NSError as result
+ * @param error Error that will be send as FlutterError.
+ */
+- (void)sendError:(nonnull NSError*)error;
+
+/**
+ * Sends a FlutterError as result.
+ */
+- (void)sendErrorWithCode:(nonnull NSString*)code
+ message:(nullable NSString*)message
+ details:(nullable id)details;
+
+/**
+ * Sends FlutterMethodNotImplemented as result.
+ */
+- (void)sendNotImplemented;
+@end
diff --git a/packages/camera/camera/ios/Classes/FLTThreadSafeFlutterResult.m b/packages/camera/camera/ios/Classes/FLTThreadSafeFlutterResult.m
new file mode 100644
index 000000000000..caa4788d8dc8
--- /dev/null
+++ b/packages/camera/camera/ios/Classes/FLTThreadSafeFlutterResult.m
@@ -0,0 +1,58 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "FLTThreadSafeFlutterResult.h"
+#import
+
+@implementation FLTThreadSafeFlutterResult {
+}
+
+- (id)initWithResult:(FlutterResult)result {
+ self = [super init];
+ if (!self) {
+ return nil;
+ }
+ _flutterResult = result;
+ return self;
+}
+
+- (void)sendSuccess {
+ [self send:nil];
+}
+
+- (void)sendSuccessWithData:(id)data {
+ [self send:data];
+}
+
+- (void)sendError:(NSError*)error {
+ [self sendErrorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code]
+ message:error.localizedDescription
+ details:error.domain];
+}
+
+- (void)sendErrorWithCode:(NSString*)code
+ message:(NSString* _Nullable)message
+ details:(id _Nullable)details {
+ FlutterError* flutterError = [FlutterError errorWithCode:code message:message details:details];
+ [self send:flutterError];
+}
+
+- (void)sendNotImplemented {
+ [self send:FlutterMethodNotImplemented];
+}
+
+/**
+ * Sends result to flutterResult on the main thread.
+ */
+- (void)send:(id _Nullable)result {
+ if (!NSThread.isMainThread) {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ self->_flutterResult(result);
+ });
+ } else {
+ _flutterResult(result);
+ }
+}
+
+@end
diff --git a/packages/camera/camera/ios/Classes/camera-umbrella.h b/packages/camera/camera/ios/Classes/camera-umbrella.h
new file mode 100644
index 000000000000..b0fd493b24df
--- /dev/null
+++ b/packages/camera/camera/ios/Classes/camera-umbrella.h
@@ -0,0 +1,10 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import
+#import
+#import
+
+FOUNDATION_EXPORT double cameraVersionNumber;
+FOUNDATION_EXPORT const unsigned char cameraVersionString[];
diff --git a/packages/google_maps_flutter/google_maps_flutter/AUTHORS b/packages/camera/camera_web/AUTHORS
similarity index 100%
rename from packages/google_maps_flutter/google_maps_flutter/AUTHORS
rename to packages/camera/camera_web/AUTHORS
diff --git a/packages/camera/camera_web/CHANGELOG.md b/packages/camera/camera_web/CHANGELOG.md
new file mode 100644
index 000000000000..dd9225f48ff4
--- /dev/null
+++ b/packages/camera/camera_web/CHANGELOG.md
@@ -0,0 +1,12 @@
+## 0.2.1+1
+
+* Update usage documentation.
+
+## 0.2.1
+
+* Add video recording functionality.
+* Fix cameraNotReadable error that prevented access to the camera on some Android devices.
+
+## 0.2.0
+
+* Initial release, adapted from the Flutter [I/O Photobooth](https://photobooth.flutter.dev/) project.
diff --git a/packages/google_maps_flutter/google_maps_flutter_web/LICENSE b/packages/camera/camera_web/LICENSE
similarity index 100%
rename from packages/google_maps_flutter/google_maps_flutter_web/LICENSE
rename to packages/camera/camera_web/LICENSE
diff --git a/packages/camera/camera_web/README.md b/packages/camera/camera_web/README.md
new file mode 100644
index 000000000000..04bf665c1039
--- /dev/null
+++ b/packages/camera/camera_web/README.md
@@ -0,0 +1,112 @@
+# Camera Web Plugin
+
+The web implementation of [`camera`][camera].
+
+*Note*: This plugin is under development. See [missing implementation](#missing-implementation).
+
+## Usage
+
+### Depend on the package
+
+This package is [endorsed](https://flutter.dev/docs/development/packages-and-plugins/developing-packages#endorsed-federated-plugin),
+which means you can simply use `camera`
+normally. This package will be automatically included in your app when you do.
+
+## Example
+
+Find the example in the [`camera` package](https://pub.dev/packages/camera#example).
+
+## Limitations on the web platform
+
+### Camera devices
+
+The camera devices are accessed with [Stream Web API](https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API)
+with the following [browser support](https://caniuse.com/stream):
+
+
+
+Accessing camera devices requires a [secure browsing context](https://developer.mozilla.org/en-US/docs/Web/Security/Secure_Contexts).
+Broadly speaking, this means that you need to serve your web application over HTTPS
+(or `localhost` for local development). For insecure contexts
+`CameraPlatform.availableCameras` might throw a `CameraException` with the
+`permissionDenied` error code.
+
+### Device orientation
+
+The device orientation implementation is backed by [`Screen Orientation Web API`](https://www.w3.org/TR/screen-orientation/)
+with the following [browser support](https://caniuse.com/screen-orientation):
+
+
+
+For the browsers that do not support the device orientation:
+
+- `CameraPlatform.onDeviceOrientationChanged` returns an empty stream.
+- `CameraPlatform.lockCaptureOrientation` and `CameraPlatform.unlockCaptureOrientation`
+throw a `PlatformException` with the `orientationNotSupported` error code.
+
+### Flash mode and zoom level
+
+The flash mode and zoom level implementation is backed by [Image Capture Web API](https://w3c.github.io/mediacapture-image/)
+with the following [browser support](https://caniuse.com/mdn-api_imagecapture):
+
+
+
+For the browsers that do not support the flash mode:
+
+- `CameraPlatform.setFlashMode` throws a `PlatformException` with the
+`torchModeNotSupported` error code.
+
+For the browsers that do not support the zoom level:
+
+- `CameraPlatform.getMaxZoomLevel`, `CameraPlatform.getMinZoomLevel` and
+`CameraPlatform.setZoomLevel` throw a `PlatformException` with the
+`zoomLevelNotSupported` error code.
+
+### Taking a picture
+
+The image capturing implementation is backed by [`URL.createObjectUrl` Web API](https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL)
+with the following [browser support](https://caniuse.com/bloburls):
+
+
+
+The web platform does not support `dart:io`. Attempts to display a captured image
+using `Image.file` will throw an error. The capture image contains a network-accessible
+URL pointing to a location within the browser (blob) and can be displayed using
+`Image.network` or `Image.memory` after loading the image bytes to memory.
+
+See the example below:
+
+```dart
+if (kIsWeb) {
+ Image.network(capturedImage.path);
+} else {
+ Image.file(File(capturedImage.path));
+}
+```
+
+### Video recording
+
+The video recording implementation is backed by [MediaRecorder Web API](https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder) with the following [browser support](https://caniuse.com/mdn-api_mediarecorder):
+
+.
+
+A video is recorded in one of the following video MIME types:
+- video/webm (e.g. on Chrome or Firefox)
+- video/mp4 (e.g. on Safari)
+
+Pausing, resuming or stopping the video recording throws a `PlatformException` with the `videoRecordingNotStarted` error code if the video recording was not started.
+
+For the browsers that do not support the video recording:
+- `CameraPlatform.startVideoRecording` throws a `PlatformException` with the `notSupported` error code.
+
+## Missing implementation
+
+The web implementation of [`camera`][camera] is missing the following features:
+- Exposure mode, point and offset
+- Focus mode and point
+- Sensor orientation
+- Image format group
+- Streaming of frames
+
+
+[camera]: https://pub.dev/packages/camera
diff --git a/packages/camera/camera_web/example/README.md b/packages/camera/camera_web/example/README.md
new file mode 100644
index 000000000000..8a6e74b107ea
--- /dev/null
+++ b/packages/camera/camera_web/example/README.md
@@ -0,0 +1,9 @@
+# Testing
+
+This package uses `package:integration_test` to run its tests in a web browser.
+
+See [Plugin Tests > Web Tests](https://github.com/flutter/flutter/wiki/Plugin-Tests#web-tests)
+in the Flutter wiki for instructions to setup and run the tests in this package.
+
+Check [flutter.dev > Integration testing](https://flutter.dev/docs/testing/integration-tests)
+for more info.
\ No newline at end of file
diff --git a/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart b/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart
new file mode 100644
index 000000000000..a298b57dfd7f
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_error_code_test.dart
@@ -0,0 +1,178 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html';
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraErrorCode', () {
+ group('toString returns a correct type for', () {
+ testWidgets('notSupported', (tester) async {
+ expect(
+ CameraErrorCode.notSupported.toString(),
+ equals('cameraNotSupported'),
+ );
+ });
+
+ testWidgets('notFound', (tester) async {
+ expect(
+ CameraErrorCode.notFound.toString(),
+ equals('cameraNotFound'),
+ );
+ });
+
+ testWidgets('notReadable', (tester) async {
+ expect(
+ CameraErrorCode.notReadable.toString(),
+ equals('cameraNotReadable'),
+ );
+ });
+
+ testWidgets('overconstrained', (tester) async {
+ expect(
+ CameraErrorCode.overconstrained.toString(),
+ equals('cameraOverconstrained'),
+ );
+ });
+
+ testWidgets('permissionDenied', (tester) async {
+ expect(
+ CameraErrorCode.permissionDenied.toString(),
+ equals('cameraPermission'),
+ );
+ });
+
+ testWidgets('type', (tester) async {
+ expect(
+ CameraErrorCode.type.toString(),
+ equals('cameraType'),
+ );
+ });
+
+ testWidgets('abort', (tester) async {
+ expect(
+ CameraErrorCode.abort.toString(),
+ equals('cameraAbort'),
+ );
+ });
+
+ testWidgets('security', (tester) async {
+ expect(
+ CameraErrorCode.security.toString(),
+ equals('cameraSecurity'),
+ );
+ });
+
+ testWidgets('missingMetadata', (tester) async {
+ expect(
+ CameraErrorCode.missingMetadata.toString(),
+ equals('cameraMissingMetadata'),
+ );
+ });
+
+ testWidgets('orientationNotSupported', (tester) async {
+ expect(
+ CameraErrorCode.orientationNotSupported.toString(),
+ equals('orientationNotSupported'),
+ );
+ });
+
+ testWidgets('torchModeNotSupported', (tester) async {
+ expect(
+ CameraErrorCode.torchModeNotSupported.toString(),
+ equals('torchModeNotSupported'),
+ );
+ });
+
+ testWidgets('zoomLevelNotSupported', (tester) async {
+ expect(
+ CameraErrorCode.zoomLevelNotSupported.toString(),
+ equals('zoomLevelNotSupported'),
+ );
+ });
+
+ testWidgets('zoomLevelInvalid', (tester) async {
+ expect(
+ CameraErrorCode.zoomLevelInvalid.toString(),
+ equals('zoomLevelInvalid'),
+ );
+ });
+
+ testWidgets('notStarted', (tester) async {
+ expect(
+ CameraErrorCode.notStarted.toString(),
+ equals('cameraNotStarted'),
+ );
+ });
+
+ testWidgets('videoRecordingNotStarted', (tester) async {
+ expect(
+ CameraErrorCode.videoRecordingNotStarted.toString(),
+ equals('videoRecordingNotStarted'),
+ );
+ });
+
+ testWidgets('unknown', (tester) async {
+ expect(
+ CameraErrorCode.unknown.toString(),
+ equals('cameraUnknown'),
+ );
+ });
+
+ group('fromMediaError', () {
+ testWidgets('with aborted error code', (tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(MediaError.MEDIA_ERR_ABORTED),
+ ).toString(),
+ equals('mediaErrorAborted'),
+ );
+ });
+
+ testWidgets('with network error code', (tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(MediaError.MEDIA_ERR_NETWORK),
+ ).toString(),
+ equals('mediaErrorNetwork'),
+ );
+ });
+
+ testWidgets('with decode error code', (tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(MediaError.MEDIA_ERR_DECODE),
+ ).toString(),
+ equals('mediaErrorDecode'),
+ );
+ });
+
+ testWidgets('with source not supported error code', (tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED),
+ ).toString(),
+ equals('mediaErrorSourceNotSupported'),
+ );
+ });
+
+ testWidgets('with unknown error code', (tester) async {
+ expect(
+ CameraErrorCode.fromMediaError(
+ FakeMediaError(5),
+ ).toString(),
+ equals('mediaErrorUnknown'),
+ );
+ });
+ });
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_metadata_test.dart b/packages/camera/camera_web/example/integration_test/camera_metadata_test.dart
new file mode 100644
index 000000000000..36ecb3e47f31
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_metadata_test.dart
@@ -0,0 +1,28 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraMetadata', () {
+ testWidgets('supports value equality', (tester) async {
+ expect(
+ CameraMetadata(
+ deviceId: 'deviceId',
+ facingMode: 'environment',
+ ),
+ equals(
+ CameraMetadata(
+ deviceId: 'deviceId',
+ facingMode: 'environment',
+ ),
+ ),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_options_test.dart b/packages/camera/camera_web/example/integration_test/camera_options_test.dart
new file mode 100644
index 000000000000..a74ba3088394
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_options_test.dart
@@ -0,0 +1,203 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraOptions', () {
+ testWidgets('serializes correctly', (tester) async {
+ final cameraOptions = CameraOptions(
+ audio: AudioConstraints(enabled: true),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.user),
+ ),
+ );
+
+ expect(
+ cameraOptions.toJson(),
+ equals({
+ 'audio': cameraOptions.audio.toJson(),
+ 'video': cameraOptions.video.toJson(),
+ }),
+ );
+ });
+
+ testWidgets('supports value equality', (tester) async {
+ expect(
+ CameraOptions(
+ audio: AudioConstraints(enabled: false),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint(CameraType.environment),
+ width: VideoSizeConstraint(minimum: 10, ideal: 15, maximum: 20),
+ height: VideoSizeConstraint(minimum: 15, ideal: 20, maximum: 25),
+ deviceId: 'deviceId',
+ ),
+ ),
+ equals(
+ CameraOptions(
+ audio: AudioConstraints(enabled: false),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint(CameraType.environment),
+ width: VideoSizeConstraint(minimum: 10, ideal: 15, maximum: 20),
+ height: VideoSizeConstraint(minimum: 15, ideal: 20, maximum: 25),
+ deviceId: 'deviceId',
+ ),
+ ),
+ ),
+ );
+ });
+ });
+
+ group('AudioConstraints', () {
+ testWidgets('serializes correctly', (tester) async {
+ expect(
+ AudioConstraints(enabled: true).toJson(),
+ equals(true),
+ );
+ });
+
+ testWidgets('supports value equality', (tester) async {
+ expect(
+ AudioConstraints(enabled: true),
+ equals(AudioConstraints(enabled: true)),
+ );
+ });
+ });
+
+ group('VideoConstraints', () {
+ testWidgets('serializes correctly', (tester) async {
+ final videoConstraints = VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.user),
+ width: VideoSizeConstraint(ideal: 100, maximum: 100),
+ height: VideoSizeConstraint(ideal: 50, maximum: 50),
+ deviceId: 'deviceId',
+ );
+
+ expect(
+ videoConstraints.toJson(),
+ equals({
+ 'facingMode': videoConstraints.facingMode!.toJson(),
+ 'width': videoConstraints.width!.toJson(),
+ 'height': videoConstraints.height!.toJson(),
+ 'deviceId': {
+ 'exact': 'deviceId',
+ }
+ }),
+ );
+ });
+
+ testWidgets('supports value equality', (tester) async {
+ expect(
+ VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.environment),
+ width: VideoSizeConstraint(minimum: 90, ideal: 100, maximum: 100),
+ height: VideoSizeConstraint(minimum: 40, ideal: 50, maximum: 50),
+ deviceId: 'deviceId',
+ ),
+ equals(
+ VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.environment),
+ width: VideoSizeConstraint(minimum: 90, ideal: 100, maximum: 100),
+ height: VideoSizeConstraint(minimum: 40, ideal: 50, maximum: 50),
+ deviceId: 'deviceId',
+ ),
+ ),
+ );
+ });
+ });
+
+ group('FacingModeConstraint', () {
+ group('ideal', () {
+ testWidgets(
+ 'serializes correctly '
+ 'for environment camera type', (tester) async {
+ expect(
+ FacingModeConstraint(CameraType.environment).toJson(),
+ equals({'ideal': 'environment'}),
+ );
+ });
+
+ testWidgets(
+ 'serializes correctly '
+ 'for user camera type', (tester) async {
+ expect(
+ FacingModeConstraint(CameraType.user).toJson(),
+ equals({'ideal': 'user'}),
+ );
+ });
+
+ testWidgets('supports value equality', (tester) async {
+ expect(
+ FacingModeConstraint(CameraType.user),
+ equals(FacingModeConstraint(CameraType.user)),
+ );
+ });
+ });
+
+ group('exact', () {
+ testWidgets(
+ 'serializes correctly '
+ 'for environment camera type', (tester) async {
+ expect(
+ FacingModeConstraint.exact(CameraType.environment).toJson(),
+ equals({'exact': 'environment'}),
+ );
+ });
+
+ testWidgets(
+ 'serializes correctly '
+ 'for user camera type', (tester) async {
+ expect(
+ FacingModeConstraint.exact(CameraType.user).toJson(),
+ equals({'exact': 'user'}),
+ );
+ });
+
+ testWidgets('supports value equality', (tester) async {
+ expect(
+ FacingModeConstraint.exact(CameraType.environment),
+ equals(FacingModeConstraint.exact(CameraType.environment)),
+ );
+ });
+ });
+ });
+
+ group('VideoSizeConstraint ', () {
+ testWidgets('serializes correctly', (tester) async {
+ expect(
+ VideoSizeConstraint(
+ minimum: 200,
+ ideal: 400,
+ maximum: 400,
+ ).toJson(),
+ equals({
+ 'min': 200,
+ 'ideal': 400,
+ 'max': 400,
+ }),
+ );
+ });
+
+ testWidgets('supports value equality', (tester) async {
+ expect(
+ VideoSizeConstraint(
+ minimum: 100,
+ ideal: 200,
+ maximum: 300,
+ ),
+ equals(
+ VideoSizeConstraint(
+ minimum: 100,
+ ideal: 200,
+ maximum: 300,
+ ),
+ ),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_service_test.dart b/packages/camera/camera_web/example/integration_test/camera_service_test.dart
new file mode 100644
index 000000000000..346ab26237ea
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_service_test.dart
@@ -0,0 +1,869 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html';
+import 'dart:ui';
+import 'dart:js_util' as js_util;
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/shims/dart_js_util.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:mocktail/mocktail.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraService', () {
+ const cameraId = 0;
+
+ late Window window;
+ late Navigator navigator;
+ late MediaDevices mediaDevices;
+ late CameraService cameraService;
+ late JsUtil jsUtil;
+
+ setUp(() async {
+ window = MockWindow();
+ navigator = MockNavigator();
+ mediaDevices = MockMediaDevices();
+ jsUtil = MockJsUtil();
+
+ when(() => window.navigator).thenReturn(navigator);
+ when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+
+ // Mock JsUtil to return the real getProperty from dart:js_util.
+ when(() => jsUtil.getProperty(any(), any())).thenAnswer(
+ (invocation) => js_util.getProperty(
+ invocation.positionalArguments[0],
+ invocation.positionalArguments[1],
+ ),
+ );
+
+ cameraService = CameraService()..window = window;
+ });
+
+ group('getMediaStreamForOptions', () {
+ testWidgets(
+ 'calls MediaDevices.getUserMedia '
+ 'with provided options', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenAnswer((_) async => FakeMediaStream([]));
+
+ final options = CameraOptions(
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.user),
+ width: VideoSizeConstraint(ideal: 200),
+ ),
+ );
+
+ await cameraService.getMediaStreamForOptions(options);
+
+ verify(
+ () => mediaDevices.getUserMedia(options.toJson()),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'throws PlatformException '
+ 'with notSupported error '
+ 'when there are no media devices', (tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(CameraOptions()),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ group('throws CameraWebException', () {
+ testWidgets(
+ 'with notFound error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with NotFoundError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('NotFoundError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.notFound),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notFound error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with DevicesNotFoundError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('DevicesNotFoundError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.notFound),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notReadable error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with NotReadableError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('NotReadableError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.notReadable),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notReadable error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with TrackStartError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('TrackStartError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.notReadable),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with overconstrained error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with OverconstrainedError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('OverconstrainedError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having(
+ (e) => e.code, 'code', CameraErrorCode.overconstrained),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with overconstrained error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with ConstraintNotSatisfiedError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('ConstraintNotSatisfiedError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having(
+ (e) => e.code, 'code', CameraErrorCode.overconstrained),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with permissionDenied error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with NotAllowedError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('NotAllowedError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having(
+ (e) => e.code, 'code', CameraErrorCode.permissionDenied),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with permissionDenied error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with PermissionDeniedError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('PermissionDeniedError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having(
+ (e) => e.code, 'code', CameraErrorCode.permissionDenied),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with type error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with TypeError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('TypeError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.type),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with abort error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with AbortError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('AbortError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.abort),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with security error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with SecurityError', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('SecurityError'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.security),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with unknown error '
+ 'when MediaDevices.getUserMedia throws DomException '
+ 'with an unknown error', (tester) async {
+ when(() => mediaDevices.getUserMedia(any()))
+ .thenThrow(FakeDomException('Unknown'));
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.unknown),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with unknown error '
+ 'when MediaDevices.getUserMedia throws an unknown exception',
+ (tester) async {
+ when(() => mediaDevices.getUserMedia(any())).thenThrow(Exception());
+
+ expect(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(),
+ cameraId: cameraId,
+ ),
+ throwsA(
+ isA()
+ .having((e) => e.cameraId, 'cameraId', cameraId)
+ .having((e) => e.code, 'code', CameraErrorCode.unknown),
+ ),
+ );
+ });
+ });
+ });
+
+ group('getZoomLevelCapabilityForCamera', () {
+ late Camera camera;
+ late List videoTracks;
+
+ setUp(() {
+ camera = MockCamera();
+ videoTracks = [MockMediaStreamTrack(), MockMediaStreamTrack()];
+
+ when(() => camera.textureId).thenReturn(0);
+ when(() => camera.stream).thenReturn(FakeMediaStream(videoTracks));
+
+ cameraService.jsUtil = jsUtil;
+ });
+
+ testWidgets(
+ 'returns the zoom level capability '
+ 'based on the first video track', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'zoom': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'zoom': js_util.jsify({
+ 'min': 100,
+ 'max': 400,
+ 'step': 2,
+ }),
+ });
+
+ final zoomLevelCapability =
+ cameraService.getZoomLevelCapabilityForCamera(camera);
+
+ expect(zoomLevelCapability.minimum, equals(100.0));
+ expect(zoomLevelCapability.maximum, equals(400.0));
+ expect(zoomLevelCapability.videoTrack, equals(videoTracks.first));
+ });
+
+ group('throws CameraWebException', () {
+ testWidgets(
+ 'with zoomLevelNotSupported error '
+ 'when there are no media devices', (tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ expect(
+ () => cameraService.getZoomLevelCapabilityForCamera(camera),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ camera.textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with zoomLevelNotSupported error '
+ 'when the zoom level is not supported '
+ 'in the browser', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'zoom': false,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'zoom': {
+ 'min': 100,
+ 'max': 400,
+ 'step': 2,
+ },
+ });
+
+ expect(
+ () => cameraService.getZoomLevelCapabilityForCamera(camera),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ camera.textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with zoomLevelNotSupported error '
+ 'when the zoom level is not supported '
+ 'by the camera', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'zoom': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({});
+
+ expect(
+ () => cameraService.getZoomLevelCapabilityForCamera(camera),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ camera.textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notStarted error '
+ 'when the camera stream has not been initialized', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'zoom': true,
+ });
+
+ // Create a camera stream with no video tracks.
+ when(() => camera.stream).thenReturn(FakeMediaStream([]));
+
+ expect(
+ () => cameraService.getZoomLevelCapabilityForCamera(camera),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ camera.textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notStarted,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('getFacingModeForVideoTrack', () {
+ setUp(() {
+ cameraService.jsUtil = jsUtil;
+ });
+
+ testWidgets(
+ 'throws PlatformException '
+ 'with notSupported error '
+ 'when there are no media devices', (tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ expect(
+ () =>
+ cameraService.getFacingModeForVideoTrack(MockMediaStreamTrack()),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'returns null '
+ 'when the facing mode is not supported', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'facingMode': false,
+ });
+
+ final facingMode =
+ cameraService.getFacingModeForVideoTrack(MockMediaStreamTrack());
+
+ expect(facingMode, isNull);
+ });
+
+ group('when the facing mode is supported', () {
+ late MediaStreamTrack videoTrack;
+
+ setUp(() {
+ videoTrack = MockMediaStreamTrack();
+
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ .thenReturn(true);
+
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'facingMode': true,
+ });
+ });
+
+ testWidgets(
+ 'returns an appropriate facing mode '
+ 'based on the video track settings', (tester) async {
+ when(videoTrack.getSettings).thenReturn({'facingMode': 'user'});
+
+ final facingMode =
+ cameraService.getFacingModeForVideoTrack(videoTrack);
+
+ expect(facingMode, equals('user'));
+ });
+
+ testWidgets(
+ 'returns an appropriate facing mode '
+ 'based on the video track capabilities '
+ 'when the facing mode setting is empty', (tester) async {
+ when(videoTrack.getSettings).thenReturn({});
+ when(videoTrack.getCapabilities).thenReturn({
+ 'facingMode': ['environment', 'left']
+ });
+
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ .thenReturn(true);
+
+ final facingMode =
+ cameraService.getFacingModeForVideoTrack(videoTrack);
+
+ expect(facingMode, equals('environment'));
+ });
+
+ testWidgets(
+ 'returns null '
+ 'when the facing mode setting '
+ 'and capabilities are empty', (tester) async {
+ when(videoTrack.getSettings).thenReturn({});
+ when(videoTrack.getCapabilities).thenReturn({'facingMode': []});
+
+ final facingMode =
+ cameraService.getFacingModeForVideoTrack(videoTrack);
+
+ expect(facingMode, isNull);
+ });
+
+ testWidgets(
+ 'returns null '
+ 'when the facing mode setting is empty and '
+ 'the video track capabilities are not supported', (tester) async {
+ when(videoTrack.getSettings).thenReturn({});
+
+ when(() => jsUtil.hasProperty(videoTrack, 'getCapabilities'))
+ .thenReturn(false);
+
+ final facingMode =
+ cameraService.getFacingModeForVideoTrack(videoTrack);
+
+ expect(facingMode, isNull);
+ });
+ });
+ });
+
+ group('mapFacingModeToLensDirection', () {
+ testWidgets(
+ 'returns front '
+ 'when the facing mode is user', (tester) async {
+ expect(
+ cameraService.mapFacingModeToLensDirection('user'),
+ equals(CameraLensDirection.front),
+ );
+ });
+
+ testWidgets(
+ 'returns back '
+ 'when the facing mode is environment', (tester) async {
+ expect(
+ cameraService.mapFacingModeToLensDirection('environment'),
+ equals(CameraLensDirection.back),
+ );
+ });
+
+ testWidgets(
+ 'returns external '
+ 'when the facing mode is left', (tester) async {
+ expect(
+ cameraService.mapFacingModeToLensDirection('left'),
+ equals(CameraLensDirection.external),
+ );
+ });
+
+ testWidgets(
+ 'returns external '
+ 'when the facing mode is right', (tester) async {
+ expect(
+ cameraService.mapFacingModeToLensDirection('right'),
+ equals(CameraLensDirection.external),
+ );
+ });
+ });
+
+ group('mapFacingModeToCameraType', () {
+ testWidgets(
+ 'returns user '
+ 'when the facing mode is user', (tester) async {
+ expect(
+ cameraService.mapFacingModeToCameraType('user'),
+ equals(CameraType.user),
+ );
+ });
+
+ testWidgets(
+ 'returns environment '
+ 'when the facing mode is environment', (tester) async {
+ expect(
+ cameraService.mapFacingModeToCameraType('environment'),
+ equals(CameraType.environment),
+ );
+ });
+
+ testWidgets(
+ 'returns user '
+ 'when the facing mode is left', (tester) async {
+ expect(
+ cameraService.mapFacingModeToCameraType('left'),
+ equals(CameraType.user),
+ );
+ });
+
+ testWidgets(
+ 'returns user '
+ 'when the facing mode is right', (tester) async {
+ expect(
+ cameraService.mapFacingModeToCameraType('right'),
+ equals(CameraType.user),
+ );
+ });
+ });
+
+ group('mapResolutionPresetToSize', () {
+ testWidgets(
+ 'returns 4096x2160 '
+ 'when the resolution preset is max', (tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.max),
+ equals(Size(4096, 2160)),
+ );
+ });
+
+ testWidgets(
+ 'returns 4096x2160 '
+ 'when the resolution preset is ultraHigh', (tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.ultraHigh),
+ equals(Size(4096, 2160)),
+ );
+ });
+
+ testWidgets(
+ 'returns 1920x1080 '
+ 'when the resolution preset is veryHigh', (tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.veryHigh),
+ equals(Size(1920, 1080)),
+ );
+ });
+
+ testWidgets(
+ 'returns 1280x720 '
+ 'when the resolution preset is high', (tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.high),
+ equals(Size(1280, 720)),
+ );
+ });
+
+ testWidgets(
+ 'returns 720x480 '
+ 'when the resolution preset is medium', (tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.medium),
+ equals(Size(720, 480)),
+ );
+ });
+
+ testWidgets(
+ 'returns 320x240 '
+ 'when the resolution preset is low', (tester) async {
+ expect(
+ cameraService.mapResolutionPresetToSize(ResolutionPreset.low),
+ equals(Size(320, 240)),
+ );
+ });
+ });
+
+ group('mapDeviceOrientationToOrientationType', () {
+ testWidgets(
+ 'returns portraitPrimary '
+ 'when the device orientation is portraitUp', (tester) async {
+ expect(
+ cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.portraitUp,
+ ),
+ equals(OrientationType.portraitPrimary),
+ );
+ });
+
+ testWidgets(
+ 'returns landscapePrimary '
+ 'when the device orientation is landscapeLeft', (tester) async {
+ expect(
+ cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.landscapeLeft,
+ ),
+ equals(OrientationType.landscapePrimary),
+ );
+ });
+
+ testWidgets(
+ 'returns portraitSecondary '
+ 'when the device orientation is portraitDown', (tester) async {
+ expect(
+ cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.portraitDown,
+ ),
+ equals(OrientationType.portraitSecondary),
+ );
+ });
+
+ testWidgets(
+ 'returns landscapeSecondary '
+ 'when the device orientation is landscapeRight', (tester) async {
+ expect(
+ cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.landscapeRight,
+ ),
+ equals(OrientationType.landscapeSecondary),
+ );
+ });
+ });
+
+ group('mapOrientationTypeToDeviceOrientation', () {
+ testWidgets(
+ 'returns portraitUp '
+ 'when the orientation type is portraitPrimary', (tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitPrimary,
+ ),
+ equals(DeviceOrientation.portraitUp),
+ );
+ });
+
+ testWidgets(
+ 'returns landscapeLeft '
+ 'when the orientation type is landscapePrimary', (tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.landscapePrimary,
+ ),
+ equals(DeviceOrientation.landscapeLeft),
+ );
+ });
+
+ testWidgets(
+ 'returns portraitDown '
+ 'when the orientation type is portraitSecondary', (tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitSecondary,
+ ),
+ equals(DeviceOrientation.portraitDown),
+ );
+ });
+
+ testWidgets(
+ 'returns portraitDown '
+ 'when the orientation type is portraitSecondary', (tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitSecondary,
+ ),
+ equals(DeviceOrientation.portraitDown),
+ );
+ });
+
+ testWidgets(
+ 'returns landscapeRight '
+ 'when the orientation type is landscapeSecondary', (tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.landscapeSecondary,
+ ),
+ equals(DeviceOrientation.landscapeRight),
+ );
+ });
+
+ testWidgets(
+ 'returns portraitUp '
+ 'for an unknown orientation type', (tester) async {
+ expect(
+ cameraService.mapOrientationTypeToDeviceOrientation(
+ 'unknown',
+ ),
+ equals(DeviceOrientation.portraitUp),
+ );
+ });
+ });
+ });
+}
+
+class JSNoSuchMethodError implements Exception {}
diff --git a/packages/camera/camera_web/example/integration_test/camera_test.dart b/packages/camera/camera_web/example/integration_test/camera_test.dart
new file mode 100644
index 000000000000..3a25e33c5398
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_test.dart
@@ -0,0 +1,1678 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html';
+import 'dart:ui';
+
+import 'package:async/async.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:mocktail/mocktail.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('Camera', () {
+ const textureId = 1;
+
+ late Window window;
+ late Navigator navigator;
+ late MediaDevices mediaDevices;
+
+ late MediaStream mediaStream;
+ late CameraService cameraService;
+
+ setUp(() {
+ window = MockWindow();
+ navigator = MockNavigator();
+ mediaDevices = MockMediaDevices();
+
+ when(() => window.navigator).thenReturn(navigator);
+ when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+
+ cameraService = MockCameraService();
+
+ final videoElement = getVideoElementWithBlankStream(Size(10, 10));
+ mediaStream = videoElement.captureStream();
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ any(),
+ cameraId: any(named: 'cameraId'),
+ ),
+ ).thenAnswer((_) => Future.value(mediaStream));
+ });
+
+ setUpAll(() {
+ registerFallbackValue(MockCameraOptions());
+ });
+
+ group('initialize', () {
+ testWidgets(
+ 'calls CameraService.getMediaStreamForOptions '
+ 'with provided options', (tester) async {
+ final options = CameraOptions(
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint.exact(CameraType.user),
+ width: VideoSizeConstraint(ideal: 200),
+ ),
+ );
+
+ final camera = Camera(
+ textureId: textureId,
+ options: options,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ verify(
+ () => cameraService.getMediaStreamForOptions(
+ options,
+ cameraId: textureId,
+ ),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'creates a video element '
+ 'with correct properties', (tester) async {
+ const audioConstraints = AudioConstraints(enabled: true);
+ final videoConstraints = VideoConstraints(
+ facingMode: FacingModeConstraint(
+ CameraType.user,
+ ),
+ );
+
+ final camera = Camera(
+ textureId: textureId,
+ options: CameraOptions(
+ audio: audioConstraints,
+ video: videoConstraints,
+ ),
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(camera.videoElement, isNotNull);
+ expect(camera.videoElement.autoplay, isFalse);
+ expect(camera.videoElement.muted, isTrue);
+ expect(camera.videoElement.srcObject, mediaStream);
+ expect(camera.videoElement.attributes.keys, contains('playsinline'));
+
+ expect(
+ camera.videoElement.style.transformOrigin, equals('center center'));
+ expect(camera.videoElement.style.pointerEvents, equals('none'));
+ expect(camera.videoElement.style.width, equals('100%'));
+ expect(camera.videoElement.style.height, equals('100%'));
+ expect(camera.videoElement.style.objectFit, equals('cover'));
+ });
+
+ testWidgets(
+ 'flips the video element horizontally '
+ 'for a back camera', (tester) async {
+ final videoConstraints = VideoConstraints(
+ facingMode: FacingModeConstraint(
+ CameraType.environment,
+ ),
+ );
+
+ final camera = Camera(
+ textureId: textureId,
+ options: CameraOptions(
+ video: videoConstraints,
+ ),
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(camera.videoElement.style.transform, equals('scaleX(-1)'));
+ });
+
+ testWidgets(
+ 'creates a wrapping div element '
+ 'with correct properties', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(camera.divElement, isNotNull);
+ expect(camera.divElement.style.objectFit, equals('cover'));
+ expect(camera.divElement.children, contains(camera.videoElement));
+ });
+
+ testWidgets('initializes the camera stream', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(camera.stream, mediaStream);
+ });
+
+ testWidgets(
+ 'throws an exception '
+ 'when CameraService.getMediaStreamForOptions throws', (tester) async {
+ final exception = Exception('A media stream exception occured.');
+
+ when(() => cameraService.getMediaStreamForOptions(any(),
+ cameraId: any(named: 'cameraId'))).thenThrow(exception);
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ expect(
+ camera.initialize,
+ throwsA(exception),
+ );
+ });
+ });
+
+ group('play', () {
+ testWidgets('starts playing the video element', (tester) async {
+ var startedPlaying = false;
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ final cameraPlaySubscription =
+ camera.videoElement.onPlay.listen((event) => startedPlaying = true);
+
+ await camera.play();
+
+ expect(startedPlaying, isTrue);
+
+ await cameraPlaySubscription.cancel();
+ });
+
+ testWidgets(
+ 'initializes the camera stream '
+ 'from CameraService.getMediaStreamForOptions '
+ 'if it does not exist', (tester) async {
+ final options = CameraOptions(
+ video: VideoConstraints(
+ width: VideoSizeConstraint(ideal: 100),
+ ),
+ );
+
+ final camera = Camera(
+ textureId: textureId,
+ options: options,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ /// Remove the video element's source
+ /// by stopping the camera.
+ camera.stop();
+
+ await camera.play();
+
+ // Should be called twice: for initialize and play.
+ verify(
+ () => cameraService.getMediaStreamForOptions(
+ options,
+ cameraId: textureId,
+ ),
+ ).called(2);
+
+ expect(camera.videoElement.srcObject, mediaStream);
+ expect(camera.stream, mediaStream);
+ });
+ });
+
+ group('pause', () {
+ testWidgets('pauses the camera stream', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.play();
+
+ expect(camera.videoElement.paused, isFalse);
+
+ camera.pause();
+
+ expect(camera.videoElement.paused, isTrue);
+ });
+ });
+
+ group('stop', () {
+ testWidgets('resets the camera stream', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.play();
+
+ camera.stop();
+
+ expect(camera.videoElement.srcObject, isNull);
+ expect(camera.stream, isNull);
+ });
+ });
+
+ group('takePicture', () {
+ testWidgets('returns a captured picture', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.play();
+
+ final pictureFile = await camera.takePicture();
+
+ expect(pictureFile, isNotNull);
+ });
+
+ group(
+ 'enables the torch mode '
+ 'when taking a picture', () {
+ late List videoTracks;
+ late MediaStream videoStream;
+ late VideoElement videoElement;
+
+ setUp(() {
+ videoTracks = [MockMediaStreamTrack(), MockMediaStreamTrack()];
+ videoStream = FakeMediaStream(videoTracks);
+
+ videoElement = getVideoElementWithBlankStream(Size(100, 100))
+ ..muted = true;
+
+ when(() => videoTracks.first.applyConstraints(any()))
+ .thenAnswer((_) async => {});
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'torch': true,
+ });
+ });
+
+ testWidgets('if the flash mode is auto', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream
+ ..videoElement = videoElement
+ ..flashMode = FlashMode.auto;
+
+ await camera.play();
+
+ final _ = await camera.takePicture();
+
+ verify(
+ () => videoTracks.first.applyConstraints({
+ "advanced": [
+ {
+ "torch": true,
+ }
+ ]
+ }),
+ ).called(1);
+
+ verify(
+ () => videoTracks.first.applyConstraints({
+ "advanced": [
+ {
+ "torch": false,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+
+ testWidgets('if the flash mode is always', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream
+ ..videoElement = videoElement
+ ..flashMode = FlashMode.always;
+
+ await camera.play();
+
+ final _ = await camera.takePicture();
+
+ verify(
+ () => videoTracks.first.applyConstraints({
+ "advanced": [
+ {
+ "torch": true,
+ }
+ ]
+ }),
+ ).called(1);
+
+ verify(
+ () => videoTracks.first.applyConstraints({
+ "advanced": [
+ {
+ "torch": false,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+ });
+ });
+
+ group('getVideoSize', () {
+ testWidgets(
+ 'returns a size '
+ 'based on the first video track settings', (tester) async {
+ const videoSize = Size(1280, 720);
+
+ final videoElement = getVideoElementWithBlankStream(videoSize);
+ mediaStream = videoElement.captureStream();
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(
+ camera.getVideoSize(),
+ equals(videoSize),
+ );
+ });
+
+ testWidgets(
+ 'returns Size.zero '
+ 'if the camera is missing video tracks', (tester) async {
+ // Create a video stream with no video tracks.
+ final videoElement = VideoElement();
+ mediaStream = videoElement.captureStream();
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(
+ camera.getVideoSize(),
+ equals(Size.zero),
+ );
+ });
+ });
+
+ group('setFlashMode', () {
+ late List videoTracks;
+ late MediaStream videoStream;
+
+ setUp(() {
+ videoTracks = [MockMediaStreamTrack(), MockMediaStreamTrack()];
+ videoStream = FakeMediaStream(videoTracks);
+
+ when(() => videoTracks.first.applyConstraints(any()))
+ .thenAnswer((_) async => {});
+
+ when(videoTracks.first.getCapabilities).thenReturn({});
+ });
+
+ testWidgets('sets the camera flash mode', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'torch': true,
+ });
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ const flashMode = FlashMode.always;
+
+ camera.setFlashMode(flashMode);
+
+ expect(
+ camera.flashMode,
+ equals(flashMode),
+ );
+ });
+
+ testWidgets(
+ 'enables the torch mode '
+ 'if the flash mode is torch', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'torch': true,
+ });
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ camera.setFlashMode(FlashMode.torch);
+
+ verify(
+ () => videoTracks.first.applyConstraints({
+ "advanced": [
+ {
+ "torch": true,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'disables the torch mode '
+ 'if the flash mode is not torch', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'torch': true,
+ });
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ camera.setFlashMode(FlashMode.auto);
+
+ verify(
+ () => videoTracks.first.applyConstraints({
+ "advanced": [
+ {
+ "torch": false,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+
+ group('throws a CameraWebException', () {
+ testWidgets(
+ 'with torchModeNotSupported error '
+ 'when there are no media devices', (tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ expect(
+ () => camera.setFlashMode(FlashMode.always),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.torchModeNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with torchModeNotSupported error '
+ 'when the torch mode is not supported '
+ 'in the browser', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'torch': false,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'torch': true,
+ });
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ expect(
+ () => camera.setFlashMode(FlashMode.always),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.torchModeNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with torchModeNotSupported error '
+ 'when the torch mode is not supported '
+ 'by the camera', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'torch': false,
+ });
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )
+ ..window = window
+ ..stream = videoStream;
+
+ expect(
+ () => camera.setFlashMode(FlashMode.always),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.torchModeNotSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notStarted error '
+ 'when the camera stream has not been initialized', (tester) async {
+ when(mediaDevices.getSupportedConstraints).thenReturn({
+ 'torch': true,
+ });
+
+ when(videoTracks.first.getCapabilities).thenReturn({
+ 'torch': true,
+ });
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )..window = window;
+
+ expect(
+ () => camera.setFlashMode(FlashMode.always),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notStarted,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('zoomLevel', () {
+ group('getMaxZoomLevel', () {
+ testWidgets(
+ 'returns maximum '
+ 'from CameraService.getZoomLevelCapabilityForCamera',
+ (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: MockMediaStreamTrack(),
+ );
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ final maximumZoomLevel = camera.getMaxZoomLevel();
+
+ verify(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .called(1);
+
+ expect(
+ maximumZoomLevel,
+ equals(zoomLevelCapability.maximum),
+ );
+ });
+ });
+
+ group('getMinZoomLevel', () {
+ testWidgets(
+ 'returns minimum '
+ 'from CameraService.getZoomLevelCapabilityForCamera',
+ (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: MockMediaStreamTrack(),
+ );
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ final minimumZoomLevel = camera.getMinZoomLevel();
+
+ verify(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .called(1);
+
+ expect(
+ minimumZoomLevel,
+ equals(zoomLevelCapability.minimum),
+ );
+ });
+ });
+
+ group('setZoomLevel', () {
+ testWidgets(
+ 'applies zoom on the video track '
+ 'from CameraService.getZoomLevelCapabilityForCamera',
+ (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final videoTrack = MockMediaStreamTrack();
+
+ final zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: videoTrack,
+ );
+
+ when(() => videoTrack.applyConstraints(any()))
+ .thenAnswer((_) async {});
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ const zoom = 75.0;
+
+ camera.setZoomLevel(zoom);
+
+ verify(
+ () => videoTrack.applyConstraints({
+ "advanced": [
+ {
+ ZoomLevelCapability.constraintName: zoom,
+ }
+ ]
+ }),
+ ).called(1);
+ });
+
+ group('throws a CameraWebException', () {
+ testWidgets(
+ 'with zoomLevelInvalid error '
+ 'when the provided zoom level is below minimum', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: MockMediaStreamTrack(),
+ );
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ expect(
+ () => camera.setZoomLevel(45.0),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelInvalid,
+ ),
+ ));
+ });
+
+ testWidgets(
+ 'with zoomLevelInvalid error '
+ 'when the provided zoom level is below minimum', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final zoomLevelCapability = ZoomLevelCapability(
+ minimum: 50.0,
+ maximum: 100.0,
+ videoTrack: MockMediaStreamTrack(),
+ );
+
+ when(() => cameraService.getZoomLevelCapabilityForCamera(camera))
+ .thenReturn(zoomLevelCapability);
+
+ expect(
+ () => camera.setZoomLevel(105.0),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.zoomLevelInvalid,
+ ),
+ ),
+ );
+ });
+ });
+ });
+ });
+
+ group('getLensDirection', () {
+ testWidgets(
+ 'returns a lens direction '
+ 'based on the first video track settings', (tester) async {
+ final videoElement = MockVideoElement();
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )..videoElement = videoElement;
+
+ final firstVideoTrack = MockMediaStreamTrack();
+
+ when(() => videoElement.srcObject).thenReturn(
+ FakeMediaStream([
+ firstVideoTrack,
+ MockMediaStreamTrack(),
+ ]),
+ );
+
+ when(firstVideoTrack.getSettings)
+ .thenReturn({'facingMode': 'environment'});
+
+ when(() => cameraService.mapFacingModeToLensDirection('environment'))
+ .thenReturn(CameraLensDirection.external);
+
+ expect(
+ camera.getLensDirection(),
+ equals(CameraLensDirection.external),
+ );
+ });
+
+ testWidgets(
+ 'returns null '
+ 'if the first video track is missing the facing mode',
+ (tester) async {
+ final videoElement = MockVideoElement();
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )..videoElement = videoElement;
+
+ final firstVideoTrack = MockMediaStreamTrack();
+
+ when(() => videoElement.srcObject).thenReturn(
+ FakeMediaStream([
+ firstVideoTrack,
+ MockMediaStreamTrack(),
+ ]),
+ );
+
+ when(firstVideoTrack.getSettings).thenReturn({});
+
+ expect(
+ camera.getLensDirection(),
+ isNull,
+ );
+ });
+
+ testWidgets(
+ 'returns null '
+ 'if the camera is missing video tracks', (tester) async {
+ // Create a video stream with no video tracks.
+ final videoElement = VideoElement();
+ mediaStream = videoElement.captureStream();
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(
+ camera.getLensDirection(),
+ isNull,
+ );
+ });
+ });
+
+ group('getViewType', () {
+ testWidgets('returns a correct view type', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+
+ expect(
+ camera.getViewType(),
+ equals('plugins.flutter.io/camera_$textureId'),
+ );
+ });
+ });
+
+ group('video recording', () {
+ const supportedVideoType = 'video/webm';
+
+ late MediaRecorder mediaRecorder;
+
+ bool isVideoTypeSupported(String type) => type == supportedVideoType;
+
+ setUp(() {
+ mediaRecorder = MockMediaRecorder();
+
+ when(() => mediaRecorder.onError)
+ .thenAnswer((_) => const Stream.empty());
+ });
+
+ group('startVideoRecording', () {
+ testWidgets(
+ 'creates a media recorder '
+ 'with appropriate options', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ expect(
+ camera.mediaRecorder!.stream,
+ equals(camera.stream),
+ );
+
+ expect(
+ camera.mediaRecorder!.mimeType,
+ equals(supportedVideoType),
+ );
+
+ expect(
+ camera.mediaRecorder!.state,
+ equals('recording'),
+ );
+ });
+
+ testWidgets('listens to the media recorder data events',
+ (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ verify(
+ () => mediaRecorder.addEventListener('dataavailable', any()),
+ ).called(1);
+ });
+
+ testWidgets('listens to the media recorder stop events',
+ (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ verify(
+ () => mediaRecorder.addEventListener('stop', any()),
+ ).called(1);
+ });
+
+ testWidgets('starts a video recording', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ verify(mediaRecorder.start).called(1);
+ });
+
+ testWidgets(
+ 'starts a video recording '
+ 'with maxVideoDuration', (tester) async {
+ const maxVideoDuration = Duration(hours: 1);
+
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording(maxVideoDuration: maxVideoDuration);
+
+ verify(() => mediaRecorder.start(maxVideoDuration.inMilliseconds))
+ .called(1);
+ });
+
+ group('throws a CameraWebException', () {
+ testWidgets(
+ 'with notSupported error '
+ 'when maxVideoDuration is 0 milliseconds or less',
+ (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ expect(
+ () => camera.startVideoRecording(maxVideoDuration: Duration.zero),
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with notSupported error '
+ 'when no video types are supported', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )..isVideoTypeSupported = (type) => false;
+
+ await camera.initialize();
+ await camera.play();
+
+ expect(
+ camera.startVideoRecording,
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('pauseVideoRecording', () {
+ testWidgets('pauses a video recording', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )..mediaRecorder = mediaRecorder;
+
+ await camera.pauseVideoRecording();
+
+ verify(mediaRecorder.pause).called(1);
+ });
+
+ testWidgets(
+ 'throws a CameraWebException '
+ 'with videoRecordingNotStarted error '
+ 'if the video recording was not started', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ );
+
+ expect(
+ camera.pauseVideoRecording,
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.videoRecordingNotStarted,
+ ),
+ ),
+ );
+ });
+ });
+
+ group('resumeVideoRecording', () {
+ testWidgets('resumes a video recording', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )..mediaRecorder = mediaRecorder;
+
+ await camera.resumeVideoRecording();
+
+ verify(mediaRecorder.resume).called(1);
+ });
+
+ testWidgets(
+ 'throws a CameraWebException '
+ 'with videoRecordingNotStarted error '
+ 'if the video recording was not started', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ );
+
+ expect(
+ camera.resumeVideoRecording,
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.videoRecordingNotStarted,
+ ),
+ ),
+ );
+ });
+ });
+
+ group('stopVideoRecording', () {
+ testWidgets(
+ 'stops a video recording and '
+ 'returns the captured file '
+ 'based on all video data parts', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ late void Function(Event) videoDataAvailableListener;
+ late void Function(Event) videoRecordingStoppedListener;
+
+ when(
+ () => mediaRecorder.addEventListener('dataavailable', any()),
+ ).thenAnswer((invocation) {
+ videoDataAvailableListener = invocation.positionalArguments[1];
+ });
+
+ when(
+ () => mediaRecorder.addEventListener('stop', any()),
+ ).thenAnswer((invocation) {
+ videoRecordingStoppedListener = invocation.positionalArguments[1];
+ });
+
+ Blob? finalVideo;
+ List? videoParts;
+ camera.blobBuilder = (blobs, videoType) {
+ videoParts = [...blobs];
+ finalVideo = Blob(blobs, videoType);
+ return finalVideo!;
+ };
+
+ await camera.startVideoRecording();
+ final videoFileFuture = camera.stopVideoRecording();
+
+ final capturedVideoPartOne = Blob([]);
+ final capturedVideoPartTwo = Blob([]);
+
+ final capturedVideoParts = [
+ capturedVideoPartOne,
+ capturedVideoPartTwo,
+ ];
+
+ videoDataAvailableListener
+ ..call(FakeBlobEvent(capturedVideoPartOne))
+ ..call(FakeBlobEvent(capturedVideoPartTwo));
+
+ videoRecordingStoppedListener.call(Event('stop'));
+
+ final videoFile = await videoFileFuture;
+
+ verify(mediaRecorder.stop).called(1);
+
+ expect(
+ videoFile,
+ isNotNull,
+ );
+
+ expect(
+ videoFile.mimeType,
+ equals(supportedVideoType),
+ );
+
+ expect(
+ videoFile.name,
+ equals(finalVideo.hashCode.toString()),
+ );
+
+ expect(
+ videoParts,
+ equals(capturedVideoParts),
+ );
+ });
+
+ testWidgets(
+ 'throws a CameraWebException '
+ 'with videoRecordingNotStarted error '
+ 'if the video recording was not started', (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ );
+
+ expect(
+ camera.stopVideoRecording,
+ throwsA(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.videoRecordingNotStarted,
+ ),
+ ),
+ );
+ });
+ });
+
+ group('on video data available', () {
+ late void Function(Event) videoDataAvailableListener;
+
+ setUp(() {
+ when(
+ () => mediaRecorder.addEventListener('dataavailable', any()),
+ ).thenAnswer((invocation) {
+ videoDataAvailableListener = invocation.positionalArguments[1];
+ });
+ });
+
+ testWidgets(
+ 'stops a video recording '
+ 'if maxVideoDuration is given and '
+ 'the recording was not stopped manually', (tester) async {
+ const maxVideoDuration = Duration(hours: 1);
+
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+ await camera.startVideoRecording(maxVideoDuration: maxVideoDuration);
+
+ when(() => mediaRecorder.state).thenReturn('recording');
+
+ videoDataAvailableListener.call(FakeBlobEvent(Blob([])));
+
+ await Future.microtask(() {});
+
+ verify(mediaRecorder.stop).called(1);
+ });
+ });
+
+ group('on video recording stopped', () {
+ late void Function(Event) videoRecordingStoppedListener;
+
+ setUp(() {
+ when(
+ () => mediaRecorder.addEventListener('stop', any()),
+ ).thenAnswer((invocation) {
+ videoRecordingStoppedListener = invocation.positionalArguments[1];
+ });
+ });
+
+ testWidgets('stops listening to the media recorder data events',
+ (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ videoRecordingStoppedListener.call(Event('stop'));
+
+ await Future.microtask(() {});
+
+ verify(
+ () => mediaRecorder.removeEventListener('dataavailable', any()),
+ ).called(1);
+ });
+
+ testWidgets('stops listening to the media recorder stop events',
+ (tester) async {
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ videoRecordingStoppedListener.call(Event('stop'));
+
+ await Future.microtask(() {});
+
+ verify(
+ () => mediaRecorder.removeEventListener('stop', any()),
+ ).called(1);
+ });
+
+ testWidgets('stops listening to the media recorder errors',
+ (tester) async {
+ final onErrorStreamController = StreamController();
+
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = isVideoTypeSupported;
+
+ when(() => mediaRecorder.onError)
+ .thenAnswer((_) => onErrorStreamController.stream);
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ videoRecordingStoppedListener.call(Event('stop'));
+
+ await Future.microtask(() {});
+
+ expect(
+ onErrorStreamController.hasListener,
+ isFalse,
+ );
+ });
+ });
+ });
+
+ group('dispose', () {
+ testWidgets('resets the video element\'s source', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.dispose();
+
+ expect(camera.videoElement.srcObject, isNull);
+ });
+
+ testWidgets('closes the onEnded stream', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.dispose();
+
+ expect(
+ camera.onEndedController.isClosed,
+ isTrue,
+ );
+ });
+
+ testWidgets('closes the onVideoRecordedEvent stream', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.dispose();
+
+ expect(
+ camera.videoRecorderController.isClosed,
+ isTrue,
+ );
+ });
+
+ testWidgets('closes the onVideoRecordingError stream', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ await camera.initialize();
+ await camera.dispose();
+
+ expect(
+ camera.videoRecordingErrorController.isClosed,
+ isTrue,
+ );
+ });
+ });
+
+ group('events', () {
+ group('onVideoRecordedEvent', () {
+ testWidgets(
+ 'emits a VideoRecordedEvent '
+ 'when a video recording is created', (tester) async {
+ const maxVideoDuration = Duration(hours: 1);
+ const supportedVideoType = 'video/webm';
+
+ final mediaRecorder = MockMediaRecorder();
+ when(() => mediaRecorder.onError)
+ .thenAnswer((_) => const Stream.empty());
+
+ final camera = Camera(
+ textureId: 1,
+ cameraService: cameraService,
+ )
+ ..mediaRecorder = mediaRecorder
+ ..isVideoTypeSupported = (type) => type == 'video/webm';
+
+ await camera.initialize();
+ await camera.play();
+
+ late void Function(Event) videoDataAvailableListener;
+ late void Function(Event) videoRecordingStoppedListener;
+
+ when(
+ () => mediaRecorder.addEventListener('dataavailable', any()),
+ ).thenAnswer((invocation) {
+ videoDataAvailableListener = invocation.positionalArguments[1];
+ });
+
+ when(
+ () => mediaRecorder.addEventListener('stop', any()),
+ ).thenAnswer((invocation) {
+ videoRecordingStoppedListener = invocation.positionalArguments[1];
+ });
+
+ final streamQueue = StreamQueue(camera.onVideoRecordedEvent);
+
+ await camera.startVideoRecording(maxVideoDuration: maxVideoDuration);
+
+ Blob? finalVideo;
+ camera.blobBuilder = (blobs, videoType) {
+ finalVideo = Blob(blobs, videoType);
+ return finalVideo!;
+ };
+
+ videoDataAvailableListener.call(FakeBlobEvent(Blob([])));
+ videoRecordingStoppedListener.call(Event('stop'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ isA()
+ .having(
+ (e) => e.cameraId,
+ 'cameraId',
+ textureId,
+ )
+ .having(
+ (e) => e.file,
+ 'file',
+ isA()
+ .having(
+ (f) => f.mimeType,
+ 'mimeType',
+ supportedVideoType,
+ )
+ .having(
+ (f) => f.name,
+ 'name',
+ finalVideo.hashCode.toString(),
+ ),
+ )
+ .having(
+ (e) => e.maxVideoDuration,
+ 'maxVideoDuration',
+ maxVideoDuration,
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+
+ group('onEnded', () {
+ testWidgets(
+ 'emits the default video track '
+ 'when it emits an ended event', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final streamQueue = StreamQueue(camera.onEnded);
+
+ await camera.initialize();
+
+ final videoTracks = camera.stream!.getVideoTracks();
+ final defaultVideoTrack = videoTracks.first;
+
+ defaultVideoTrack.dispatchEvent(Event('ended'));
+
+ expect(
+ await streamQueue.next,
+ equals(defaultVideoTrack),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits the default video track '
+ 'when the camera is stopped', (tester) async {
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ );
+
+ final streamQueue = StreamQueue(camera.onEnded);
+
+ await camera.initialize();
+
+ final videoTracks = camera.stream!.getVideoTracks();
+ final defaultVideoTrack = videoTracks.first;
+
+ camera.stop();
+
+ expect(
+ await streamQueue.next,
+ equals(defaultVideoTrack),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+
+ group('onVideoRecordingError', () {
+ testWidgets(
+ 'emits an ErrorEvent '
+ 'when the media recorder fails '
+ 'when recording a video', (tester) async {
+ final mediaRecorder = MockMediaRecorder();
+ final errorController = StreamController();
+
+ final camera = Camera(
+ textureId: textureId,
+ cameraService: cameraService,
+ )..mediaRecorder = mediaRecorder;
+
+ when(() => mediaRecorder.onError)
+ .thenAnswer((_) => errorController.stream);
+
+ final streamQueue = StreamQueue(camera.onVideoRecordingError);
+
+ await camera.initialize();
+ await camera.play();
+
+ await camera.startVideoRecording();
+
+ final errorEvent = ErrorEvent('type');
+ errorController.add(errorEvent);
+
+ expect(
+ await streamQueue.next,
+ equals(errorEvent),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_web_exception_test.dart b/packages/camera/camera_web/example/integration_test/camera_web_exception_test.dart
new file mode 100644
index 000000000000..6f8531b6f4af
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_web_exception_test.dart
@@ -0,0 +1,38 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraWebException', () {
+ testWidgets('sets all properties', (tester) async {
+ final cameraId = 1;
+ final code = CameraErrorCode.notFound;
+ final description = 'The camera is not found.';
+
+ final exception = CameraWebException(cameraId, code, description);
+
+ expect(exception.cameraId, equals(cameraId));
+ expect(exception.code, equals(code));
+ expect(exception.description, equals(description));
+ });
+
+ testWidgets('toString includes all properties', (tester) async {
+ final cameraId = 2;
+ final code = CameraErrorCode.notReadable;
+ final description = 'The camera is not readable.';
+
+ final exception = CameraWebException(cameraId, code, description);
+
+ expect(
+ exception.toString(),
+ equals('CameraWebException($cameraId, $code, $description)'),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/camera_web_test.dart b/packages/camera/camera_web/example/integration_test/camera_web_test.dart
new file mode 100644
index 000000000000..9749559ed8c6
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/camera_web_test.dart
@@ -0,0 +1,2946 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html';
+import 'dart:ui';
+
+import 'package:async/async.dart';
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_web/camera_web.dart';
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter/services.dart';
+import 'package:flutter/widgets.dart' as widgets;
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+import 'package:mocktail/mocktail.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('CameraPlugin', () {
+ const cameraId = 0;
+
+ late Window window;
+ late Navigator navigator;
+ late MediaDevices mediaDevices;
+ late VideoElement videoElement;
+ late Screen screen;
+ late ScreenOrientation screenOrientation;
+ late Document document;
+ late Element documentElement;
+
+ late CameraService cameraService;
+
+ setUp(() async {
+ window = MockWindow();
+ navigator = MockNavigator();
+ mediaDevices = MockMediaDevices();
+
+ videoElement = getVideoElementWithBlankStream(Size(10, 10));
+
+ when(() => window.navigator).thenReturn(navigator);
+ when(() => navigator.mediaDevices).thenReturn(mediaDevices);
+
+ screen = MockScreen();
+ screenOrientation = MockScreenOrientation();
+
+ when(() => screen.orientation).thenReturn(screenOrientation);
+ when(() => window.screen).thenReturn(screen);
+
+ document = MockDocument();
+ documentElement = MockElement();
+
+ when(() => document.documentElement).thenReturn(documentElement);
+ when(() => window.document).thenReturn(document);
+
+ cameraService = MockCameraService();
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ any(),
+ cameraId: any(named: 'cameraId'),
+ ),
+ ).thenAnswer(
+ (_) async => videoElement.captureStream(),
+ );
+
+ CameraPlatform.instance = CameraPlugin(
+ cameraService: cameraService,
+ )..window = window;
+ });
+
+ setUpAll(() {
+ registerFallbackValue(MockMediaStreamTrack());
+ registerFallbackValue(MockCameraOptions());
+ registerFallbackValue(FlashMode.off);
+ });
+
+ testWidgets('CameraPlugin is the live instance', (tester) async {
+ expect(CameraPlatform.instance, isA());
+ });
+
+ group('availableCameras', () {
+ setUp(() {
+ when(
+ () => cameraService.getFacingModeForVideoTrack(
+ any(),
+ ),
+ ).thenReturn(null);
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) async => [],
+ );
+ });
+
+ testWidgets('requests video and audio permissions', (tester) async {
+ final _ = await CameraPlatform.instance.availableCameras();
+
+ verify(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ audio: AudioConstraints(enabled: true),
+ ),
+ ),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'releases the camera stream '
+ 'used to request video and audio permissions', (tester) async {
+ final videoTrack = MockMediaStreamTrack();
+
+ var videoTrackStopped = false;
+ when(videoTrack.stop).thenAnswer((_) {
+ videoTrackStopped = true;
+ });
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ audio: AudioConstraints(enabled: true),
+ ),
+ ),
+ ).thenAnswer(
+ (_) => Future.value(
+ FakeMediaStream([videoTrack]),
+ ),
+ );
+
+ final _ = await CameraPlatform.instance.availableCameras();
+
+ expect(videoTrackStopped, isTrue);
+ });
+
+ testWidgets(
+ 'gets a video stream '
+ 'for a video input device', (tester) async {
+ final videoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future.value([videoDevice]),
+ );
+
+ final _ = await CameraPlatform.instance.availableCameras();
+
+ verify(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(
+ deviceId: videoDevice.deviceId,
+ ),
+ ),
+ ),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'does not get a video stream '
+ 'for the video input device '
+ 'with an empty device id', (tester) async {
+ final videoDevice = FakeMediaDeviceInfo(
+ '',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future.value([videoDevice]),
+ );
+
+ final _ = await CameraPlatform.instance.availableCameras();
+
+ verifyNever(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(
+ deviceId: videoDevice.deviceId,
+ ),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'gets the facing mode '
+ 'from the first available video track '
+ 'of the video input device', (tester) async {
+ final videoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ final videoStream =
+ FakeMediaStream([MockMediaStreamTrack(), MockMediaStreamTrack()]);
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: videoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer((_) => Future.value(videoStream));
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future.value([videoDevice]),
+ );
+
+ final _ = await CameraPlatform.instance.availableCameras();
+
+ verify(
+ () => cameraService.getFacingModeForVideoTrack(
+ videoStream.getVideoTracks().first,
+ ),
+ ).called(1);
+ });
+
+ testWidgets(
+ 'returns appropriate camera descriptions '
+ 'for multiple video devices '
+ 'based on video streams', (tester) async {
+ final firstVideoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ final secondVideoDevice = FakeMediaDeviceInfo(
+ '4',
+ 'Camera 4',
+ MediaDeviceKind.videoInput,
+ );
+
+ // Create a video stream for the first video device.
+ final firstVideoStream =
+ FakeMediaStream([MockMediaStreamTrack(), MockMediaStreamTrack()]);
+
+ // Create a video stream for the second video device.
+ final secondVideoStream = FakeMediaStream([MockMediaStreamTrack()]);
+
+ // Mock media devices to return two video input devices
+ // and two audio devices.
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future.value([
+ firstVideoDevice,
+ FakeMediaDeviceInfo(
+ '2',
+ 'Audio Input 2',
+ MediaDeviceKind.audioInput,
+ ),
+ FakeMediaDeviceInfo(
+ '3',
+ 'Audio Output 3',
+ MediaDeviceKind.audioOutput,
+ ),
+ secondVideoDevice,
+ ]),
+ );
+
+ // Mock camera service to return the first video stream
+ // for the first video device.
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: firstVideoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer((_) => Future.value(firstVideoStream));
+
+ // Mock camera service to return the second video stream
+ // for the second video device.
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: secondVideoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer((_) => Future.value(secondVideoStream));
+
+ // Mock camera service to return a user facing mode
+ // for the first video stream.
+ when(
+ () => cameraService.getFacingModeForVideoTrack(
+ firstVideoStream.getVideoTracks().first,
+ ),
+ ).thenReturn('user');
+
+ when(() => cameraService.mapFacingModeToLensDirection('user'))
+ .thenReturn(CameraLensDirection.front);
+
+ // Mock camera service to return an environment facing mode
+ // for the second video stream.
+ when(
+ () => cameraService.getFacingModeForVideoTrack(
+ secondVideoStream.getVideoTracks().first,
+ ),
+ ).thenReturn('environment');
+
+ when(() => cameraService.mapFacingModeToLensDirection('environment'))
+ .thenReturn(CameraLensDirection.back);
+
+ final cameras = await CameraPlatform.instance.availableCameras();
+
+ // Expect two cameras and ignore two audio devices.
+ expect(
+ cameras,
+ equals([
+ CameraDescription(
+ name: firstVideoDevice.label!,
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0,
+ ),
+ CameraDescription(
+ name: secondVideoDevice.label!,
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ )
+ ]),
+ );
+ });
+
+ testWidgets(
+ 'sets camera metadata '
+ 'for the camera description', (tester) async {
+ final videoDevice = FakeMediaDeviceInfo(
+ '1',
+ 'Camera 1',
+ MediaDeviceKind.videoInput,
+ );
+
+ final videoStream =
+ FakeMediaStream([MockMediaStreamTrack(), MockMediaStreamTrack()]);
+
+ when(mediaDevices.enumerateDevices).thenAnswer(
+ (_) => Future.value([videoDevice]),
+ );
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ CameraOptions(
+ video: VideoConstraints(deviceId: videoDevice.deviceId),
+ ),
+ ),
+ ).thenAnswer((_) => Future.value(videoStream));
+
+ when(
+ () => cameraService.getFacingModeForVideoTrack(
+ videoStream.getVideoTracks().first,
+ ),
+ ).thenReturn('left');
+
+ when(() => cameraService.mapFacingModeToLensDirection('left'))
+ .thenReturn(CameraLensDirection.external);
+
+ final camera = (await CameraPlatform.instance.availableCameras()).first;
+
+ expect(
+ (CameraPlatform.instance as CameraPlugin).camerasMetadata,
+ equals({
+ camera: CameraMetadata(
+ deviceId: videoDevice.deviceId!,
+ facingMode: 'left',
+ )
+ }),
+ );
+ });
+
+ group('throws CameraException', () {
+ testWidgets(
+ 'with notSupported error '
+ 'when there are no media devices', (tester) async {
+ when(() => navigator.mediaDevices).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.availableCameras(),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when MediaDevices.enumerateDevices throws DomException',
+ (tester) async {
+ final exception = FakeDomException(DomException.UNKNOWN);
+
+ when(mediaDevices.enumerateDevices).thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.availableCameras(),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'when CameraService.getMediaStreamForOptions '
+ 'throws CameraWebException', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.security,
+ 'description',
+ );
+
+ when(() => cameraService.getMediaStreamForOptions(any()))
+ .thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.availableCameras(),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'when CameraService.getMediaStreamForOptions '
+ 'throws PlatformException', (tester) async {
+ final exception = PlatformException(
+ code: CameraErrorCode.notSupported.toString(),
+ message: 'message',
+ );
+
+ when(() => cameraService.getMediaStreamForOptions(any()))
+ .thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.availableCameras(),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('createCamera', () {
+ group('creates a camera', () {
+ const ultraHighResolutionSize = Size(3840, 2160);
+ const maxResolutionSize = Size(3840, 2160);
+
+ final cameraDescription = CameraDescription(
+ name: 'name',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0,
+ );
+
+ final cameraMetadata = CameraMetadata(
+ deviceId: 'deviceId',
+ facingMode: 'user',
+ );
+
+ setUp(() {
+ // Add metadata for the camera description.
+ (CameraPlatform.instance as CameraPlugin)
+ .camerasMetadata[cameraDescription] = cameraMetadata;
+
+ when(
+ () => cameraService.mapFacingModeToCameraType('user'),
+ ).thenReturn(CameraType.user);
+ });
+
+ testWidgets('with appropriate options', (tester) async {
+ when(
+ () => cameraService
+ .mapResolutionPresetToSize(ResolutionPreset.ultraHigh),
+ ).thenReturn(ultraHighResolutionSize);
+
+ final cameraId = await CameraPlatform.instance.createCamera(
+ cameraDescription,
+ ResolutionPreset.ultraHigh,
+ enableAudio: true,
+ );
+
+ expect(
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId],
+ isA()
+ .having(
+ (camera) => camera.textureId,
+ 'textureId',
+ cameraId,
+ )
+ .having(
+ (camera) => camera.options,
+ 'options',
+ CameraOptions(
+ audio: AudioConstraints(enabled: true),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint(CameraType.user),
+ width: VideoSizeConstraint(
+ ideal: ultraHighResolutionSize.width.toInt(),
+ ),
+ height: VideoSizeConstraint(
+ ideal: ultraHighResolutionSize.height.toInt(),
+ ),
+ deviceId: cameraMetadata.deviceId,
+ ),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with a max resolution preset '
+ 'and enabled audio set to false '
+ 'when no options are specified', (tester) async {
+ when(
+ () => cameraService.mapResolutionPresetToSize(ResolutionPreset.max),
+ ).thenReturn(maxResolutionSize);
+
+ final cameraId = await CameraPlatform.instance.createCamera(
+ cameraDescription,
+ null,
+ );
+
+ expect(
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId],
+ isA().having(
+ (camera) => camera.options,
+ 'options',
+ CameraOptions(
+ audio: AudioConstraints(enabled: false),
+ video: VideoConstraints(
+ facingMode: FacingModeConstraint(CameraType.user),
+ width: VideoSizeConstraint(
+ ideal: maxResolutionSize.width.toInt(),
+ ),
+ height: VideoSizeConstraint(
+ ideal: maxResolutionSize.height.toInt(),
+ ),
+ deviceId: cameraMetadata.deviceId,
+ ),
+ ),
+ ),
+ );
+ });
+ });
+
+ testWidgets(
+ 'throws CameraException '
+ 'with missingMetadata error '
+ 'if there is no metadata '
+ 'for the given camera description', (tester) async {
+ expect(
+ () => CameraPlatform.instance.createCamera(
+ CameraDescription(
+ name: 'name',
+ lensDirection: CameraLensDirection.back,
+ sensorOrientation: 0,
+ ),
+ ResolutionPreset.ultraHigh,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.missingMetadata.toString(),
+ ),
+ ),
+ );
+ });
+ });
+
+ group('initializeCamera', () {
+ late Camera camera;
+ late VideoElement videoElement;
+
+ late StreamController errorStreamController, abortStreamController;
+ late StreamController endedStreamController;
+
+ setUp(() {
+ camera = MockCamera();
+ videoElement = MockVideoElement();
+
+ errorStreamController = StreamController();
+ abortStreamController = StreamController();
+ endedStreamController = StreamController();
+
+ when(camera.getVideoSize).thenReturn(Size(10, 10));
+ when(camera.initialize).thenAnswer((_) => Future.value());
+ when(camera.play).thenAnswer((_) => Future.value());
+
+ when(() => camera.videoElement).thenReturn(videoElement);
+ when(() => videoElement.onError)
+ .thenAnswer((_) => FakeElementStream(errorStreamController.stream));
+ when(() => videoElement.onAbort)
+ .thenAnswer((_) => FakeElementStream(abortStreamController.stream));
+
+ when(() => camera.onEnded)
+ .thenAnswer((_) => endedStreamController.stream);
+ });
+
+ testWidgets('initializes and plays the camera', (tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ verify(camera.initialize).called(1);
+ verify(camera.play).called(1);
+ });
+
+ testWidgets('starts listening to the camera video error and abort events',
+ (tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(errorStreamController.hasListener, isFalse);
+ expect(abortStreamController.hasListener, isFalse);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ expect(errorStreamController.hasListener, isTrue);
+ expect(abortStreamController.hasListener, isTrue);
+ });
+
+ testWidgets('starts listening to the camera ended events',
+ (tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(endedStreamController.hasListener, isFalse);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ expect(endedStreamController.hasListener, isTrue);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => CameraPlatform.instance.initializeCamera(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when camera throws CameraWebException', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.permissionDenied,
+ 'description',
+ );
+
+ when(camera.initialize).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.initializeCamera(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when camera throws DomException', (tester) async {
+ final exception = FakeDomException(DomException.NOT_ALLOWED);
+
+ when(camera.initialize).thenAnswer((_) => Future.value());
+ when(camera.play).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.initializeCamera(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('lockCaptureOrientation', () {
+ setUp(() {
+ when(
+ () => cameraService.mapDeviceOrientationToOrientationType(any()),
+ ).thenReturn(OrientationType.portraitPrimary);
+ });
+
+ testWidgets(
+ 'requests full-screen mode '
+ 'on documentElement', (tester) async {
+ await CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitUp,
+ );
+
+ verify(documentElement.requestFullscreen).called(1);
+ });
+
+ testWidgets(
+ 'locks the capture orientation '
+ 'based on the given device orientation', (tester) async {
+ when(
+ () => cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.landscapeRight,
+ ),
+ ).thenReturn(OrientationType.landscapeSecondary);
+
+ await CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.landscapeRight,
+ );
+
+ verify(
+ () => cameraService.mapDeviceOrientationToOrientationType(
+ DeviceOrientation.landscapeRight,
+ ),
+ ).called(1);
+
+ verify(
+ () => screenOrientation.lock(
+ OrientationType.landscapeSecondary,
+ ),
+ ).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when screen is not supported', (tester) async {
+ when(() => window.screen).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitUp,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when screen orientation is not supported', (tester) async {
+ when(() => screen.orientation).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitUp,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when documentElement is not available', (tester) async {
+ when(() => document.documentElement).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitUp,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when lock throws DomException', (tester) async {
+ final exception = FakeDomException(DomException.NOT_ALLOWED);
+
+ when(() => screenOrientation.lock(any())).thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.lockCaptureOrientation(
+ cameraId,
+ DeviceOrientation.portraitDown,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('unlockCaptureOrientation', () {
+ setUp(() {
+ when(
+ () => cameraService.mapDeviceOrientationToOrientationType(any()),
+ ).thenReturn(OrientationType.portraitPrimary);
+ });
+
+ testWidgets('unlocks the capture orientation', (tester) async {
+ await CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ );
+
+ verify(screenOrientation.unlock).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when screen is not supported', (tester) async {
+ when(() => window.screen).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when screen orientation is not supported', (tester) async {
+ when(() => screen.orientation).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets(
+ 'with orientationNotSupported error '
+ 'when documentElement is not available', (tester) async {
+ when(() => document.documentElement).thenReturn(null);
+
+ expect(
+ () => CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.orientationNotSupported.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when unlock throws DomException', (tester) async {
+ final exception = FakeDomException(DomException.NOT_ALLOWED);
+
+ when(screenOrientation.unlock).thenThrow(exception);
+
+ expect(
+ () => CameraPlatform.instance.unlockCaptureOrientation(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('takePicture', () {
+ testWidgets('captures a picture', (tester) async {
+ final camera = MockCamera();
+ final capturedPicture = MockXFile();
+
+ when(camera.takePicture)
+ .thenAnswer((_) => Future.value(capturedPicture));
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final picture = await CameraPlatform.instance.takePicture(cameraId);
+
+ verify(camera.takePicture).called(1);
+
+ expect(picture, equals(capturedPicture));
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => CameraPlatform.instance.takePicture(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when takePicture throws DomException', (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.takePicture).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.takePicture(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when takePicture throws CameraWebException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.takePicture).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.takePicture(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('startVideoRecording', () {
+ late Camera camera;
+
+ setUp(() {
+ camera = MockCamera();
+
+ when(camera.startVideoRecording).thenAnswer((_) async {});
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((_) => const Stream.empty());
+ });
+
+ testWidgets('starts a video recording', (tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+
+ verify(camera.startVideoRecording).called(1);
+ });
+
+ testWidgets('listens to the onVideoRecordingError stream',
+ (tester) async {
+ final videoRecordingErrorController = StreamController();
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((_) => videoRecordingErrorController.stream);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+
+ expect(
+ videoRecordingErrorController.hasListener,
+ isTrue,
+ );
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => CameraPlatform.instance.startVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when startVideoRecording throws DomException',
+ (tester) async {
+ final exception = FakeDomException(DomException.INVALID_STATE);
+
+ when(camera.startVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.startVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when startVideoRecording throws CameraWebException',
+ (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.startVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.startVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('stopVideoRecording', () {
+ testWidgets('stops a video recording', (tester) async {
+ final camera = MockCamera();
+ final capturedVideo = MockXFile();
+
+ when(camera.stopVideoRecording)
+ .thenAnswer((_) => Future.value(capturedVideo));
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final video =
+ await CameraPlatform.instance.stopVideoRecording(cameraId);
+
+ verify(camera.stopVideoRecording).called(1);
+
+ expect(video, capturedVideo);
+ });
+
+ testWidgets('stops listening to the onVideoRecordingError stream',
+ (tester) async {
+ final camera = MockCamera();
+ final videoRecordingErrorController = StreamController();
+
+ when(camera.startVideoRecording).thenAnswer((_) async => {});
+
+ when(camera.stopVideoRecording)
+ .thenAnswer((_) => Future.value(MockXFile()));
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((_) => videoRecordingErrorController.stream);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+ final _ = await CameraPlatform.instance.stopVideoRecording(cameraId);
+
+ expect(
+ videoRecordingErrorController.hasListener,
+ isFalse,
+ );
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => CameraPlatform.instance.stopVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when stopVideoRecording throws DomException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.INVALID_STATE);
+
+ when(camera.stopVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.stopVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when stopVideoRecording throws CameraWebException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.stopVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.stopVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('pauseVideoRecording', () {
+ testWidgets('pauses a video recording', (tester) async {
+ final camera = MockCamera();
+
+ when(camera.pauseVideoRecording).thenAnswer((_) async {});
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.pauseVideoRecording(cameraId);
+
+ verify(camera.pauseVideoRecording).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => CameraPlatform.instance.pauseVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when pauseVideoRecording throws DomException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.INVALID_STATE);
+
+ when(camera.pauseVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.pauseVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when pauseVideoRecording throws CameraWebException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.pauseVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.pauseVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('resumeVideoRecording', () {
+ testWidgets('resumes a video recording', (tester) async {
+ final camera = MockCamera();
+
+ when(camera.resumeVideoRecording).thenAnswer((_) async {});
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.resumeVideoRecording(cameraId);
+
+ verify(camera.resumeVideoRecording).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => CameraPlatform.instance.resumeVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when resumeVideoRecording throws DomException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.INVALID_STATE);
+
+ when(camera.resumeVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.resumeVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when resumeVideoRecording throws CameraWebException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.resumeVideoRecording).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.resumeVideoRecording(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('setFlashMode', () {
+ testWidgets('calls setFlashMode on the camera', (tester) async {
+ final camera = MockCamera();
+ const flashMode = FlashMode.always;
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.setFlashMode(
+ cameraId,
+ flashMode,
+ );
+
+ verify(() => camera.setFlashMode(flashMode)).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => CameraPlatform.instance.setFlashMode(
+ cameraId,
+ FlashMode.always,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setFlashMode throws DomException', (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(() => camera.setFlashMode(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.setFlashMode(
+ cameraId,
+ FlashMode.always,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setFlashMode throws CameraWebException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(() => camera.setFlashMode(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.setFlashMode(
+ cameraId,
+ FlashMode.torch,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ testWidgets('setExposureMode throws UnimplementedError', (tester) async {
+ expect(
+ () => CameraPlatform.instance.setExposureMode(
+ cameraId,
+ ExposureMode.auto,
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('setExposurePoint throws UnimplementedError', (tester) async {
+ expect(
+ () => CameraPlatform.instance.setExposurePoint(
+ cameraId,
+ const Point(0, 0),
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('getMinExposureOffset throws UnimplementedError',
+ (tester) async {
+ expect(
+ () => CameraPlatform.instance.getMinExposureOffset(cameraId),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('getMaxExposureOffset throws UnimplementedError',
+ (tester) async {
+ expect(
+ () => CameraPlatform.instance.getMaxExposureOffset(cameraId),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('getExposureOffsetStepSize throws UnimplementedError',
+ (tester) async {
+ expect(
+ () => CameraPlatform.instance.getExposureOffsetStepSize(cameraId),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('setExposureOffset throws UnimplementedError', (tester) async {
+ expect(
+ () => CameraPlatform.instance.setExposureOffset(
+ cameraId,
+ 0,
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('setFocusMode throws UnimplementedError', (tester) async {
+ expect(
+ () => CameraPlatform.instance.setFocusMode(
+ cameraId,
+ FocusMode.auto,
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ testWidgets('setFocusPoint throws UnimplementedError', (tester) async {
+ expect(
+ () => CameraPlatform.instance.setFocusPoint(
+ cameraId,
+ const Point(0, 0),
+ ),
+ throwsUnimplementedError,
+ );
+ });
+
+ group('getMaxZoomLevel', () {
+ testWidgets('calls getMaxZoomLevel on the camera', (tester) async {
+ final camera = MockCamera();
+ const maximumZoomLevel = 100.0;
+
+ when(camera.getMaxZoomLevel).thenReturn(maximumZoomLevel);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ await CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ equals(maximumZoomLevel),
+ );
+
+ verify(camera.getMaxZoomLevel).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () async => await CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when getMaxZoomLevel throws DomException', (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.getMaxZoomLevel).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when getMaxZoomLevel throws CameraWebException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.getMaxZoomLevel).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('getMinZoomLevel', () {
+ testWidgets('calls getMinZoomLevel on the camera', (tester) async {
+ final camera = MockCamera();
+ const minimumZoomLevel = 100.0;
+
+ when(camera.getMinZoomLevel).thenReturn(minimumZoomLevel);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ await CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ equals(minimumZoomLevel),
+ );
+
+ verify(camera.getMinZoomLevel).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () async => await CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when getMinZoomLevel throws DomException', (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.getMinZoomLevel).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when getMinZoomLevel throws CameraWebException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.getMinZoomLevel).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('setZoomLevel', () {
+ testWidgets('calls setZoomLevel on the camera', (tester) async {
+ final camera = MockCamera();
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ const zoom = 100.0;
+
+ await CameraPlatform.instance.setZoomLevel(cameraId, zoom);
+
+ verify(() => camera.setZoomLevel(zoom)).called(1);
+ });
+
+ group('throws CameraException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () async => await CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setZoomLevel throws DomException', (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(() => camera.setZoomLevel(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setZoomLevel throws PlatformException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = PlatformException(
+ code: CameraErrorCode.notSupported.toString(),
+ message: 'message',
+ );
+
+ when(() => camera.setZoomLevel(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when setZoomLevel throws CameraWebException',
+ (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(() => camera.setZoomLevel(any())).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('pausePreview', () {
+ testWidgets('calls pause on the camera', (tester) async {
+ final camera = MockCamera();
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.pausePreview(cameraId);
+
+ verify(camera.pause).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () async => await CameraPlatform.instance.pausePreview(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when pause throws DomException', (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.pause).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.pausePreview(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('resumePreview', () {
+ testWidgets('calls play on the camera', (tester) async {
+ final camera = MockCamera();
+
+ when(camera.play).thenAnswer((_) async => {});
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.resumePreview(cameraId);
+
+ verify(camera.play).called(1);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () async => await CameraPlatform.instance.resumePreview(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when play throws DomException', (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.NOT_SUPPORTED);
+
+ when(camera.play).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.resumePreview(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when play throws CameraWebException', (tester) async {
+ final camera = MockCamera();
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.unknown,
+ 'description',
+ );
+
+ when(camera.play).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () async => await CameraPlatform.instance.resumePreview(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.code.toString(),
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ testWidgets(
+ 'buildPreview returns an HtmlElementView '
+ 'with an appropriate view type', (tester) async {
+ final camera = Camera(
+ textureId: cameraId,
+ cameraService: cameraService,
+ );
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ CameraPlatform.instance.buildPreview(cameraId),
+ isA().having(
+ (view) => view.viewType,
+ 'viewType',
+ camera.getViewType(),
+ ),
+ );
+ });
+
+ group('dispose', () {
+ late Camera camera;
+ late VideoElement videoElement;
+
+ late StreamController errorStreamController, abortStreamController;
+ late StreamController endedStreamController;
+ late StreamController videoRecordingErrorController;
+
+ setUp(() {
+ camera = MockCamera();
+ videoElement = MockVideoElement();
+
+ errorStreamController = StreamController();
+ abortStreamController = StreamController();
+ endedStreamController = StreamController();
+ videoRecordingErrorController = StreamController();
+
+ when(camera.getVideoSize).thenReturn(Size(10, 10));
+ when(camera.initialize).thenAnswer((_) => Future.value());
+ when(camera.play).thenAnswer((_) => Future.value());
+ when(camera.dispose).thenAnswer((_) => Future.value());
+
+ when(() => camera.videoElement).thenReturn(videoElement);
+ when(() => videoElement.onError)
+ .thenAnswer((_) => FakeElementStream(errorStreamController.stream));
+ when(() => videoElement.onAbort)
+ .thenAnswer((_) => FakeElementStream(abortStreamController.stream));
+
+ when(() => camera.onEnded)
+ .thenAnswer((_) => endedStreamController.stream);
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((_) => videoRecordingErrorController.stream);
+
+ when(camera.startVideoRecording).thenAnswer((_) async {});
+ });
+
+ testWidgets('disposes the correct camera', (tester) async {
+ const firstCameraId = 0;
+ const secondCameraId = 1;
+
+ final firstCamera = MockCamera();
+ final secondCamera = MockCamera();
+
+ when(firstCamera.dispose).thenAnswer((_) => Future.value());
+ when(secondCamera.dispose).thenAnswer((_) => Future.value());
+
+ // Save cameras in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras.addAll({
+ firstCameraId: firstCamera,
+ secondCameraId: secondCamera,
+ });
+
+ // Dispose the first camera.
+ await CameraPlatform.instance.dispose(firstCameraId);
+
+ // The first camera should be disposed.
+ verify(firstCamera.dispose).called(1);
+ verifyNever(secondCamera.dispose);
+
+ // The first camera should be removed from the camera plugin.
+ expect(
+ (CameraPlatform.instance as CameraPlugin).cameras,
+ equals({
+ secondCameraId: secondCamera,
+ }),
+ );
+ });
+
+ testWidgets('cancels the camera video error and abort subscriptions',
+ (tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+ await CameraPlatform.instance.dispose(cameraId);
+
+ expect(errorStreamController.hasListener, isFalse);
+ expect(abortStreamController.hasListener, isFalse);
+ });
+
+ testWidgets('cancels the camera ended subscriptions', (tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+ await CameraPlatform.instance.dispose(cameraId);
+
+ expect(endedStreamController.hasListener, isFalse);
+ });
+
+ testWidgets('cancels the camera video recording error subscriptions',
+ (tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+ await CameraPlatform.instance.dispose(cameraId);
+
+ expect(videoRecordingErrorController.hasListener, isFalse);
+ });
+
+ group('throws PlatformException', () {
+ testWidgets(
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => CameraPlatform.instance.dispose(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+
+ testWidgets('when dispose throws DomException', (tester) async {
+ final camera = MockCamera();
+ final exception = FakeDomException(DomException.INVALID_ACCESS);
+
+ when(camera.dispose).thenThrow(exception);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ () => CameraPlatform.instance.dispose(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ exception.name,
+ ),
+ ),
+ );
+ });
+ });
+ });
+
+ group('getCamera', () {
+ testWidgets('returns the correct camera', (tester) async {
+ final camera = Camera(
+ textureId: cameraId,
+ cameraService: cameraService,
+ );
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ expect(
+ (CameraPlatform.instance as CameraPlugin).getCamera(cameraId),
+ equals(camera),
+ );
+ });
+
+ testWidgets(
+ 'throws PlatformException '
+ 'with notFound error '
+ 'if the camera does not exist', (tester) async {
+ expect(
+ () => (CameraPlatform.instance as CameraPlugin).getCamera(cameraId),
+ throwsA(
+ isA().having(
+ (e) => e.code,
+ 'code',
+ CameraErrorCode.notFound.toString(),
+ ),
+ ),
+ );
+ });
+ });
+
+ group('events', () {
+ late Camera camera;
+ late VideoElement videoElement;
+
+ late StreamController errorStreamController, abortStreamController;
+ late StreamController endedStreamController;
+ late StreamController videoRecordingErrorController;
+
+ setUp(() {
+ camera = MockCamera();
+ videoElement = MockVideoElement();
+
+ errorStreamController = StreamController();
+ abortStreamController = StreamController();
+ endedStreamController = StreamController();
+ videoRecordingErrorController = StreamController();
+
+ when(camera.getVideoSize).thenReturn(Size(10, 10));
+ when(camera.initialize).thenAnswer((_) => Future.value());
+ when(camera.play).thenAnswer((_) => Future.value());
+
+ when(() => camera.videoElement).thenReturn(videoElement);
+ when(() => videoElement.onError)
+ .thenAnswer((_) => FakeElementStream(errorStreamController.stream));
+ when(() => videoElement.onAbort)
+ .thenAnswer((_) => FakeElementStream(abortStreamController.stream));
+
+ when(() => camera.onEnded)
+ .thenAnswer((_) => endedStreamController.stream);
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((_) => videoRecordingErrorController.stream);
+
+ when(() => camera.startVideoRecording()).thenAnswer((_) async => {});
+ });
+
+ testWidgets(
+ 'onCameraInitialized emits a CameraInitializedEvent '
+ 'on initializeCamera', (tester) async {
+ // Mock the camera to use a blank video stream of size 1280x720.
+ const videoSize = Size(1280, 720);
+
+ videoElement = getVideoElementWithBlankStream(videoSize);
+
+ when(
+ () => cameraService.getMediaStreamForOptions(
+ any(),
+ cameraId: cameraId,
+ ),
+ ).thenAnswer((_) async => videoElement.captureStream());
+
+ final camera = Camera(
+ textureId: cameraId,
+ cameraService: cameraService,
+ );
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraInitialized(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraInitializedEvent(
+ cameraId,
+ videoSize.width,
+ videoSize.height,
+ ExposureMode.auto,
+ false,
+ FocusMode.auto,
+ false,
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets('onCameraResolutionChanged emits an empty stream',
+ (tester) async {
+ expect(
+ CameraPlatform.instance.onCameraResolutionChanged(cameraId),
+ emits(isEmpty),
+ );
+ });
+
+ testWidgets(
+ 'onCameraClosing emits a CameraClosingEvent '
+ 'on the camera ended event', (tester) async {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraClosing(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ endedStreamController.add(MockMediaStreamTrack());
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraClosingEvent(cameraId),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ group('onCameraError', () {
+ setUp(() {
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on the camera video error event '
+ 'with a message', (tester) async {
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ final error = FakeMediaError(
+ MediaError.MEDIA_ERR_NETWORK,
+ 'A network error occured.',
+ );
+
+ final errorCode = CameraErrorCode.fromMediaError(error);
+
+ when(() => videoElement.error).thenReturn(error);
+
+ errorStreamController.add(Event('error'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${errorCode}, error message: ${error.message}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on the camera video error event '
+ 'with no message', (tester) async {
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ final error = FakeMediaError(MediaError.MEDIA_ERR_NETWORK);
+ final errorCode = CameraErrorCode.fromMediaError(error);
+
+ when(() => videoElement.error).thenReturn(error);
+
+ errorStreamController.add(Event('error'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${errorCode}, error message: No further diagnostic information can be determined or provided.',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on the camera video abort event', (tester) async {
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+
+ abortStreamController.add(Event('abort'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${CameraErrorCode.abort}, error message: The video element\'s source has not fully loaded.',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on takePicture error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.takePicture).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async => await CameraPlatform.instance.takePicture(cameraId),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on setFlashMode error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(() => camera.setFlashMode(any())).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async => await CameraPlatform.instance.setFlashMode(
+ cameraId,
+ FlashMode.always,
+ ),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on getMaxZoomLevel error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.zoomLevelNotSupported,
+ 'description',
+ );
+
+ when(camera.getMaxZoomLevel).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async => await CameraPlatform.instance.getMaxZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on getMinZoomLevel error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.zoomLevelNotSupported,
+ 'description',
+ );
+
+ when(camera.getMinZoomLevel).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async => await CameraPlatform.instance.getMinZoomLevel(
+ cameraId,
+ ),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on setZoomLevel error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.zoomLevelNotSupported,
+ 'description',
+ );
+
+ when(() => camera.setZoomLevel(any())).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async => await CameraPlatform.instance.setZoomLevel(
+ cameraId,
+ 100.0,
+ ),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on resumePreview error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.unknown,
+ 'description',
+ );
+
+ when(camera.play).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async => await CameraPlatform.instance.resumePreview(cameraId),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on startVideoRecording error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(() => camera.onVideoRecordingError)
+ .thenAnswer((_) => const Stream.empty());
+
+ when(
+ () => camera.startVideoRecording(
+ maxVideoDuration: any(named: 'maxVideoDuration'),
+ ),
+ ).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async =>
+ await CameraPlatform.instance.startVideoRecording(cameraId),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on the camera video recording error event', (tester) async {
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ await CameraPlatform.instance.initializeCamera(cameraId);
+ await CameraPlatform.instance.startVideoRecording(cameraId);
+
+ final errorEvent = FakeErrorEvent('type', 'message');
+
+ videoRecordingErrorController.add(errorEvent);
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${errorEvent.type}, error message: ${errorEvent.message}.',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on stopVideoRecording error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.stopVideoRecording).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async =>
+ await CameraPlatform.instance.stopVideoRecording(cameraId),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on pauseVideoRecording error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.pauseVideoRecording).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async =>
+ await CameraPlatform.instance.pauseVideoRecording(cameraId),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a CameraErrorEvent '
+ 'on resumeVideoRecording error', (tester) async {
+ final exception = CameraWebException(
+ cameraId,
+ CameraErrorCode.notStarted,
+ 'description',
+ );
+
+ when(camera.resumeVideoRecording).thenThrow(exception);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onCameraError(cameraId);
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ () async =>
+ await CameraPlatform.instance.resumeVideoRecording(cameraId),
+ throwsA(
+ isA(),
+ ),
+ );
+
+ expect(
+ await streamQueue.next,
+ equals(
+ CameraErrorEvent(
+ cameraId,
+ 'Error code: ${exception.code}, error message: ${exception.description}',
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+
+ testWidgets('onVideoRecordedEvent emits a VideoRecordedEvent',
+ (tester) async {
+ final camera = MockCamera();
+ final capturedVideo = MockXFile();
+ final stream = Stream.value(
+ VideoRecordedEvent(cameraId, capturedVideo, Duration.zero));
+ when(() => camera.onVideoRecordedEvent).thenAnswer((_) => stream);
+
+ // Save the camera in the camera plugin.
+ (CameraPlatform.instance as CameraPlugin).cameras[cameraId] = camera;
+
+ final streamQueue =
+ StreamQueue(CameraPlatform.instance.onVideoRecordedEvent(cameraId));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ VideoRecordedEvent(cameraId, capturedVideo, Duration.zero),
+ ),
+ );
+ });
+
+ group('onDeviceOrientationChanged', () {
+ group('emits an empty stream', () {
+ testWidgets('when screen is not supported', (tester) async {
+ when(() => window.screen).thenReturn(null);
+
+ expect(
+ CameraPlatform.instance.onDeviceOrientationChanged(),
+ emits(isEmpty),
+ );
+ });
+
+ testWidgets('when screen orientation is not supported',
+ (tester) async {
+ when(() => screen.orientation).thenReturn(null);
+
+ expect(
+ CameraPlatform.instance.onDeviceOrientationChanged(),
+ emits(isEmpty),
+ );
+ });
+ });
+
+ testWidgets('emits the initial DeviceOrientationChangedEvent',
+ (tester) async {
+ when(
+ () => cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitPrimary,
+ ),
+ ).thenReturn(DeviceOrientation.portraitUp);
+
+ // Set the initial screen orientation to portraitPrimary.
+ when(() => screenOrientation.type)
+ .thenReturn(OrientationType.portraitPrimary);
+
+ final eventStreamController = StreamController();
+
+ when(() => screenOrientation.onChange)
+ .thenAnswer((_) => eventStreamController.stream);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onDeviceOrientationChanged();
+
+ final streamQueue = StreamQueue(eventStream);
+
+ expect(
+ await streamQueue.next,
+ equals(
+ DeviceOrientationChangedEvent(
+ DeviceOrientation.portraitUp,
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+
+ testWidgets(
+ 'emits a DeviceOrientationChangedEvent '
+ 'when the screen orientation is changed', (tester) async {
+ when(
+ () => cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.landscapePrimary,
+ ),
+ ).thenReturn(DeviceOrientation.landscapeLeft);
+
+ when(
+ () => cameraService.mapOrientationTypeToDeviceOrientation(
+ OrientationType.portraitSecondary,
+ ),
+ ).thenReturn(DeviceOrientation.portraitDown);
+
+ final eventStreamController = StreamController();
+
+ when(() => screenOrientation.onChange)
+ .thenAnswer((_) => eventStreamController.stream);
+
+ final Stream eventStream =
+ CameraPlatform.instance.onDeviceOrientationChanged();
+
+ final streamQueue = StreamQueue(eventStream);
+
+ // Change the screen orientation to landscapePrimary and
+ // emit an event on the screenOrientation.onChange stream.
+ when(() => screenOrientation.type)
+ .thenReturn(OrientationType.landscapePrimary);
+
+ eventStreamController.add(Event('change'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ DeviceOrientationChangedEvent(
+ DeviceOrientation.landscapeLeft,
+ ),
+ ),
+ );
+
+ // Change the screen orientation to portraitSecondary and
+ // emit an event on the screenOrientation.onChange stream.
+ when(() => screenOrientation.type)
+ .thenReturn(OrientationType.portraitSecondary);
+
+ eventStreamController.add(Event('change'));
+
+ expect(
+ await streamQueue.next,
+ equals(
+ DeviceOrientationChangedEvent(
+ DeviceOrientation.portraitDown,
+ ),
+ ),
+ );
+
+ await streamQueue.cancel();
+ });
+ });
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/integration_test/helpers/helpers.dart b/packages/camera/camera_web/example/integration_test/helpers/helpers.dart
new file mode 100644
index 000000000000..7094f55bb62e
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/helpers/helpers.dart
@@ -0,0 +1,5 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+export 'mocks.dart';
diff --git a/packages/camera/camera_web/example/integration_test/helpers/mocks.dart b/packages/camera/camera_web/example/integration_test/helpers/mocks.dart
new file mode 100644
index 000000000000..77e9077356f7
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/helpers/mocks.dart
@@ -0,0 +1,172 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html';
+import 'dart:ui';
+
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/shims/dart_js_util.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:cross_file/cross_file.dart';
+import 'package:mocktail/mocktail.dart';
+
+class MockWindow extends Mock implements Window {}
+
+class MockScreen extends Mock implements Screen {}
+
+class MockScreenOrientation extends Mock implements ScreenOrientation {}
+
+class MockDocument extends Mock implements Document {}
+
+class MockElement extends Mock implements Element {}
+
+class MockNavigator extends Mock implements Navigator {}
+
+class MockMediaDevices extends Mock implements MediaDevices {}
+
+class MockCameraService extends Mock implements CameraService {}
+
+class MockMediaStreamTrack extends Mock implements MediaStreamTrack {}
+
+class MockCamera extends Mock implements Camera {}
+
+class MockCameraOptions extends Mock implements CameraOptions {}
+
+class MockVideoElement extends Mock implements VideoElement {}
+
+class MockXFile extends Mock implements XFile {}
+
+class MockJsUtil extends Mock implements JsUtil {}
+
+class MockMediaRecorder extends Mock implements MediaRecorder {}
+
+/// A fake [MediaStream] that returns the provided [_videoTracks].
+class FakeMediaStream extends Fake implements MediaStream {
+ FakeMediaStream(this._videoTracks);
+
+ final List _videoTracks;
+
+ @override
+ List getVideoTracks() => _videoTracks;
+}
+
+/// A fake [MediaDeviceInfo] that returns the provided [_deviceId], [_label] and [_kind].
+class FakeMediaDeviceInfo extends Fake implements MediaDeviceInfo {
+ FakeMediaDeviceInfo(this._deviceId, this._label, this._kind);
+
+ final String _deviceId;
+ final String _label;
+ final String _kind;
+
+ @override
+ String? get deviceId => _deviceId;
+
+ @override
+ String? get label => _label;
+
+ @override
+ String? get kind => _kind;
+}
+
+/// A fake [MediaError] that returns the provided error [_code] and [_message].
+class FakeMediaError extends Fake implements MediaError {
+ FakeMediaError(
+ this._code, [
+ String message = '',
+ ]) : _message = message;
+
+ final int _code;
+ final String _message;
+
+ @override
+ int get code => _code;
+
+ @override
+ String? get message => _message;
+}
+
+/// A fake [DomException] that returns the provided error [_name] and [_message].
+class FakeDomException extends Fake implements DomException {
+ FakeDomException(
+ this._name, [
+ String? message,
+ ]) : _message = message;
+
+ final String _name;
+ final String? _message;
+
+ @override
+ String get name => _name;
+
+ @override
+ String? get message => _message;
+}
+
+/// A fake [ElementStream] that listens to the provided [_stream] on [listen].
+class FakeElementStream extends Fake
+ implements ElementStream {
+ FakeElementStream(this._stream);
+
+ final Stream _stream;
+
+ @override
+ StreamSubscription listen(void onData(T event)?,
+ {Function? onError, void onDone()?, bool? cancelOnError}) {
+ return _stream.listen(
+ onData,
+ onError: onError,
+ onDone: onDone,
+ cancelOnError: cancelOnError,
+ );
+ }
+}
+
+/// A fake [BlobEvent] that returns the provided blob [data].
+class FakeBlobEvent extends Fake implements BlobEvent {
+ FakeBlobEvent(this._blob);
+
+ final Blob? _blob;
+
+ @override
+ Blob? get data => _blob;
+}
+
+/// A fake [DomException] that returns the provided error [_name] and [_message].
+class FakeErrorEvent extends Fake implements ErrorEvent {
+ FakeErrorEvent(
+ String type, [
+ String? message,
+ ]) : _type = type,
+ _message = message;
+
+ final String _type;
+ final String? _message;
+
+ @override
+ String get type => _type;
+
+ @override
+ String? get message => _message;
+}
+
+/// Returns a video element with a blank stream of size [videoSize].
+///
+/// Can be used to mock a video stream:
+/// ```dart
+/// final videoElement = getVideoElementWithBlankStream(Size(100, 100));
+/// final videoStream = videoElement.captureStream();
+/// ```
+VideoElement getVideoElementWithBlankStream(Size videoSize) {
+ final canvasElement = CanvasElement(
+ width: videoSize.width.toInt(),
+ height: videoSize.height.toInt(),
+ )..context2D.fillRect(0, 0, videoSize.width, videoSize.height);
+
+ final videoElement = VideoElement()
+ ..srcObject = canvasElement.captureStream();
+
+ return videoElement;
+}
diff --git a/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart b/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart
new file mode 100644
index 000000000000..09de03100871
--- /dev/null
+++ b/packages/camera/camera_web/example/integration_test/zoom_level_capability_test.dart
@@ -0,0 +1,50 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter_test/flutter_test.dart';
+import 'package:integration_test/integration_test.dart';
+
+import 'helpers/helpers.dart';
+
+void main() {
+ IntegrationTestWidgetsFlutterBinding.ensureInitialized();
+
+ group('ZoomLevelCapability', () {
+ testWidgets('sets all properties', (tester) async {
+ const minimum = 100.0;
+ const maximum = 400.0;
+ final videoTrack = MockMediaStreamTrack();
+
+ final capability = ZoomLevelCapability(
+ minimum: minimum,
+ maximum: maximum,
+ videoTrack: videoTrack,
+ );
+
+ expect(capability.minimum, equals(minimum));
+ expect(capability.maximum, equals(maximum));
+ expect(capability.videoTrack, equals(videoTrack));
+ });
+
+ testWidgets('supports value equality', (tester) async {
+ final videoTrack = MockMediaStreamTrack();
+
+ expect(
+ ZoomLevelCapability(
+ minimum: 0.0,
+ maximum: 100.0,
+ videoTrack: videoTrack,
+ ),
+ equals(
+ ZoomLevelCapability(
+ minimum: 0.0,
+ maximum: 100.0,
+ videoTrack: videoTrack,
+ ),
+ ),
+ );
+ });
+ });
+}
diff --git a/packages/camera/camera_web/example/lib/main.dart b/packages/camera/camera_web/example/lib/main.dart
new file mode 100644
index 000000000000..6e8f85e74f40
--- /dev/null
+++ b/packages/camera/camera_web/example/lib/main.dart
@@ -0,0 +1,18 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'package:flutter/material.dart';
+
+void main() => runApp(MyApp());
+
+/// App for testing
+class MyApp extends StatelessWidget {
+ @override
+ Widget build(BuildContext context) {
+ return Directionality(
+ textDirection: TextDirection.ltr,
+ child: Text('Testing... Look at the console output for results!'),
+ );
+ }
+}
diff --git a/packages/google_maps_flutter/google_maps_flutter_web/example/pubspec.yaml b/packages/camera/camera_web/example/pubspec.yaml
similarity index 51%
rename from packages/google_maps_flutter/google_maps_flutter_web/example/pubspec.yaml
rename to packages/camera/camera_web/example/pubspec.yaml
index b0ac9910afc9..1e075712325e 100644
--- a/packages/google_maps_flutter/google_maps_flutter_web/example/pubspec.yaml
+++ b/packages/camera/camera_web/example/pubspec.yaml
@@ -1,22 +1,18 @@
-name: google_maps_flutter_web_integration_tests
+name: camera_web_integration_tests
publish_to: none
-# Tests require flutter beta or greater to run.
environment:
sdk: ">=2.12.0 <3.0.0"
- flutter: ">=2.1.0"
+ flutter: ">=2.0.0"
dependencies:
- google_maps_flutter_web:
- path: ../
flutter:
sdk: flutter
dev_dependencies:
- build_runner: ^1.11.0
- google_maps: ^5.1.0
- http: ^0.13.0
- mockito: ^5.0.0
+ mocktail: ^0.1.4
+ camera_web:
+ path: ../
flutter_driver:
sdk: flutter
flutter_test:
diff --git a/packages/camera/camera_web/example/run_test.sh b/packages/camera/camera_web/example/run_test.sh
new file mode 100755
index 000000000000..00482faa53df
--- /dev/null
+++ b/packages/camera/camera_web/example/run_test.sh
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+# Copyright 2013 The Flutter Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if pgrep -lf chromedriver > /dev/null; then
+ echo "chromedriver is running."
+
+ if [ $# -eq 0 ]; then
+ echo "No target specified, running all tests..."
+ find integration_test/ -iname *_test.dart | xargs -n1 -I{} -t flutter drive -d web-server --web-port=7357 --browser-name=chrome --driver=test_driver/integration_test.dart --target='{}'
+ else
+ echo "Running test target: $1..."
+ set -x
+ flutter drive -d web-server --web-port=7357 --browser-name=chrome --driver=test_driver/integration_test.dart --target=$1
+ fi
+
+ else
+ echo "chromedriver is not running."
+ echo "Please, check the README.md for instructions on how to use run_test.sh"
+fi
+
diff --git a/packages/google_maps_flutter/google_maps_flutter/example/test_driver/integration_test.dart b/packages/camera/camera_web/example/test_driver/integration_test.dart
similarity index 100%
rename from packages/google_maps_flutter/google_maps_flutter/example/test_driver/integration_test.dart
rename to packages/camera/camera_web/example/test_driver/integration_test.dart
diff --git a/packages/google_maps_flutter/google_maps_flutter_web/example/web/index.html b/packages/camera/camera_web/example/web/index.html
similarity index 54%
rename from packages/google_maps_flutter/google_maps_flutter_web/example/web/index.html
rename to packages/camera/camera_web/example/web/index.html
index 3121d189b913..f3c6a5e8a8e3 100644
--- a/packages/google_maps_flutter/google_maps_flutter_web/example/web/index.html
+++ b/packages/camera/camera_web/example/web/index.html
@@ -1,12 +1,10 @@
-
+
Browser Tests
-
-
diff --git a/packages/camera/camera_web/lib/camera_web.dart b/packages/camera/camera_web/lib/camera_web.dart
new file mode 100644
index 000000000000..dcefc9293b88
--- /dev/null
+++ b/packages/camera/camera_web/lib/camera_web.dart
@@ -0,0 +1,7 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+library camera_web;
+
+export 'src/camera_web.dart';
diff --git a/packages/camera/camera_web/lib/src/camera.dart b/packages/camera/camera_web/lib/src/camera.dart
new file mode 100644
index 000000000000..cf0187057188
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/camera.dart
@@ -0,0 +1,635 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:html' as html;
+import 'dart:ui';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_web/src/camera_service.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter/foundation.dart';
+
+import 'shims/dart_ui.dart' as ui;
+
+String _getViewType(int cameraId) => 'plugins.flutter.io/camera_$cameraId';
+
+/// A camera initialized from the media devices in the current window.
+/// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices
+///
+/// The obtained camera stream is constrained by [options] and fetched
+/// with [CameraService.getMediaStreamForOptions].
+///
+/// The camera stream is displayed in the [videoElement] wrapped in the
+/// [divElement] to avoid overriding the custom styles applied to
+/// the video element in [_applyDefaultVideoStyles].
+/// See: https://github.com/flutter/flutter/issues/79519
+///
+/// The camera stream can be played/stopped by calling [play]/[stop],
+/// may capture a picture by calling [takePicture] or capture a video
+/// by calling [startVideoRecording], [pauseVideoRecording],
+/// [resumeVideoRecording] or [stopVideoRecording].
+///
+/// The camera zoom may be adjusted with [setZoomLevel]. The provided
+/// zoom level must be a value in the range of [getMinZoomLevel] to
+/// [getMaxZoomLevel].
+///
+/// The [textureId] is used to register a camera view with the id
+/// defined by [_getViewType].
+class Camera {
+ /// Creates a new instance of [Camera]
+ /// with the given [textureId] and optional
+ /// [options] and [window].
+ Camera({
+ required this.textureId,
+ required CameraService cameraService,
+ this.options = const CameraOptions(),
+ }) : _cameraService = cameraService;
+
+ // A torch mode constraint name.
+ // See: https://w3c.github.io/mediacapture-image/#dom-mediatracksupportedconstraints-torch
+ static const _torchModeKey = "torch";
+
+ /// The texture id used to register the camera view.
+ final int textureId;
+
+ /// The camera options used to initialize a camera, empty by default.
+ final CameraOptions options;
+
+ /// The video element that displays the camera stream.
+ /// Initialized in [initialize].
+ late final html.VideoElement videoElement;
+
+ /// The wrapping element for the [videoElement] to avoid overriding
+ /// the custom styles applied in [_applyDefaultVideoStyles].
+ /// Initialized in [initialize].
+ late final html.DivElement divElement;
+
+ /// The camera stream displayed in the [videoElement].
+ /// Initialized in [initialize] and [play], reset in [stop].
+ html.MediaStream? stream;
+
+ /// The stream of the camera video tracks that have ended playing.
+ ///
+ /// This occurs when there is no more camera stream data, e.g.
+ /// the user has stopped the stream by changing the camera device,
+ /// revoked the camera permissions or ejected the camera device.
+ ///
+ /// MediaStreamTrack.onended:
+ /// https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/onended
+ Stream get onEnded => onEndedController.stream;
+
+ /// The stream controller for the [onEnded] stream.
+ @visibleForTesting
+ final onEndedController = StreamController.broadcast();
+
+ StreamSubscription? _onEndedSubscription;
+
+ /// The stream of the camera video recording errors.
+ ///
+ /// This occurs when the video recording is not allowed or an unsupported
+ /// codec is used.
+ ///
+ /// MediaRecorder.error:
+ /// https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/error_event
+ Stream get onVideoRecordingError =>
+ videoRecordingErrorController.stream;
+
+ /// The stream controller for the [onVideoRecordingError] stream.
+ @visibleForTesting
+ final videoRecordingErrorController =
+ StreamController.broadcast();
+
+ StreamSubscription? _onVideoRecordingErrorSubscription;
+
+ /// The camera flash mode.
+ @visibleForTesting
+ FlashMode? flashMode;
+
+ /// The camera service used to get the media stream for the camera.
+ final CameraService _cameraService;
+
+ /// The current browser window used to access media devices.
+ @visibleForTesting
+ html.Window? window = html.window;
+
+ /// The recorder used to record a video from the camera.
+ @visibleForTesting
+ html.MediaRecorder? mediaRecorder;
+
+ /// Whether the video of the given type is supported.
+ @visibleForTesting
+ bool Function(String) isVideoTypeSupported =
+ html.MediaRecorder.isTypeSupported;
+
+ /// The list of consecutive video data files recorded with [mediaRecorder].
+ List _videoData = [];
+
+ /// Completes when the video recording is stopped/finished.
+ Completer? _videoAvailableCompleter;
+
+ /// A data listener fired when a new part of video data is available.
+ void Function(html.Event)? _videoDataAvailableListener;
+
+ /// A listener fired when a video recording is stopped.
+ void Function(html.Event)? _videoRecordingStoppedListener;
+
+ /// A builder to merge a list of blobs into a single blob.
+ @visibleForTesting
+ html.Blob Function(List blobs, String type) blobBuilder =
+ (blobs, type) => html.Blob(blobs, type);
+
+ /// The stream that emits a [VideoRecordedEvent] when a video recording is created.
+ Stream get onVideoRecordedEvent =>
+ videoRecorderController.stream;
+
+ /// The stream controller for the [onVideoRecordedEvent] stream.
+ @visibleForTesting
+ final StreamController videoRecorderController =
+ StreamController.broadcast();
+
+ /// Initializes the camera stream displayed in the [videoElement].
+ /// Registers the camera view with [textureId] under [_getViewType] type.
+ /// Emits the camera default video track on the [onEnded] stream when it ends.
+ Future initialize() async {
+ stream = await _cameraService.getMediaStreamForOptions(
+ options,
+ cameraId: textureId,
+ );
+
+ videoElement = html.VideoElement();
+
+ divElement = html.DivElement()
+ ..style.setProperty('object-fit', 'cover')
+ ..append(videoElement);
+
+ ui.platformViewRegistry.registerViewFactory(
+ _getViewType(textureId),
+ (_) => divElement,
+ );
+
+ videoElement
+ ..autoplay = false
+ ..muted = true
+ ..srcObject = stream
+ ..setAttribute('playsinline', '');
+
+ _applyDefaultVideoStyles(videoElement);
+
+ final videoTracks = stream!.getVideoTracks();
+
+ if (videoTracks.isNotEmpty) {
+ final defaultVideoTrack = videoTracks.first;
+
+ _onEndedSubscription = defaultVideoTrack.onEnded.listen((html.Event _) {
+ onEndedController.add(defaultVideoTrack);
+ });
+ }
+ }
+
+ /// Starts the camera stream.
+ ///
+ /// Initializes the camera source if the camera was previously stopped.
+ Future play() async {
+ if (videoElement.srcObject == null) {
+ stream = await _cameraService.getMediaStreamForOptions(
+ options,
+ cameraId: textureId,
+ );
+ videoElement.srcObject = stream;
+ }
+ await videoElement.play();
+ }
+
+ /// Pauses the camera stream on the current frame.
+ void pause() {
+ videoElement.pause();
+ }
+
+ /// Stops the camera stream and resets the camera source.
+ void stop() {
+ final videoTracks = stream!.getVideoTracks();
+ if (videoTracks.isNotEmpty) {
+ onEndedController.add(videoTracks.first);
+ }
+
+ final tracks = stream?.getTracks();
+ if (tracks != null) {
+ for (final track in tracks) {
+ track.stop();
+ }
+ }
+ videoElement.srcObject = null;
+ stream = null;
+ }
+
+ /// Captures a picture and returns the saved file in a JPEG format.
+ ///
+ /// Enables the camera flash (torch mode) for a period of taking a picture
+ /// if the flash mode is either [FlashMode.auto] or [FlashMode.always].
+ Future takePicture() async {
+ final shouldEnableTorchMode =
+ flashMode == FlashMode.auto || flashMode == FlashMode.always;
+
+ if (shouldEnableTorchMode) {
+ _setTorchMode(enabled: true);
+ }
+
+ final videoWidth = videoElement.videoWidth;
+ final videoHeight = videoElement.videoHeight;
+ final canvas = html.CanvasElement(width: videoWidth, height: videoHeight);
+ final isBackCamera = getLensDirection() == CameraLensDirection.back;
+
+ // Flip the picture horizontally if it is not taken from a back camera.
+ if (!isBackCamera) {
+ canvas.context2D
+ ..translate(videoWidth, 0)
+ ..scale(-1, 1);
+ }
+
+ canvas.context2D
+ .drawImageScaled(videoElement, 0, 0, videoWidth, videoHeight);
+
+ final blob = await canvas.toBlob('image/jpeg');
+
+ if (shouldEnableTorchMode) {
+ _setTorchMode(enabled: false);
+ }
+
+ return XFile(html.Url.createObjectUrl(blob));
+ }
+
+ /// Returns a size of the camera video based on its first video track size.
+ ///
+ /// Returns [Size.zero] if the camera is missing a video track or
+ /// the video track does not include the width or height setting.
+ Size getVideoSize() {
+ final videoTracks = videoElement.srcObject?.getVideoTracks() ?? [];
+
+ if (videoTracks.isEmpty) {
+ return Size.zero;
+ }
+
+ final defaultVideoTrack = videoTracks.first;
+ final defaultVideoTrackSettings = defaultVideoTrack.getSettings();
+
+ final width = defaultVideoTrackSettings['width'];
+ final height = defaultVideoTrackSettings['height'];
+
+ if (width != null && height != null) {
+ return Size(width, height);
+ } else {
+ return Size.zero;
+ }
+ }
+
+ /// Sets the camera flash mode to [mode] by modifying the camera
+ /// torch mode constraint.
+ ///
+ /// The torch mode is enabled for [FlashMode.torch] and
+ /// disabled for [FlashMode.off].
+ ///
+ /// For [FlashMode.auto] and [FlashMode.always] the torch mode is enabled
+ /// only for a period of taking a picture in [takePicture].
+ ///
+ /// Throws a [CameraWebException] if the torch mode is not supported
+ /// or the camera has not been initialized or started.
+ void setFlashMode(FlashMode mode) {
+ final mediaDevices = window?.navigator.mediaDevices;
+ final supportedConstraints = mediaDevices?.getSupportedConstraints();
+ final torchModeSupported = supportedConstraints?[_torchModeKey] ?? false;
+
+ if (!torchModeSupported) {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.torchModeNotSupported,
+ 'The torch mode is not supported in the current browser.',
+ );
+ }
+
+ // Save the updated flash mode to be used later when taking a picture.
+ flashMode = mode;
+
+ // Enable the torch mode only if the flash mode is torch.
+ _setTorchMode(enabled: mode == FlashMode.torch);
+ }
+
+ /// Sets the camera torch mode constraint to [enabled].
+ ///
+ /// Throws a [CameraWebException] if the torch mode is not supported
+ /// or the camera has not been initialized or started.
+ void _setTorchMode({required bool enabled}) {
+ final videoTracks = stream?.getVideoTracks() ?? [];
+
+ if (videoTracks.isNotEmpty) {
+ final defaultVideoTrack = videoTracks.first;
+
+ final bool canEnableTorchMode =
+ defaultVideoTrack.getCapabilities()[_torchModeKey] ?? false;
+
+ if (canEnableTorchMode) {
+ defaultVideoTrack.applyConstraints({
+ "advanced": [
+ {
+ _torchModeKey: enabled,
+ }
+ ]
+ });
+ } else {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.torchModeNotSupported,
+ 'The torch mode is not supported by the current camera.',
+ );
+ }
+ } else {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.notStarted,
+ 'The camera has not been initialized or started.',
+ );
+ }
+ }
+
+ /// Returns the camera maximum zoom level.
+ ///
+ /// Throws a [CameraWebException] if the zoom level is not supported
+ /// or the camera has not been initialized or started.
+ double getMaxZoomLevel() =>
+ _cameraService.getZoomLevelCapabilityForCamera(this).maximum;
+
+ /// Returns the camera minimum zoom level.
+ ///
+ /// Throws a [CameraWebException] if the zoom level is not supported
+ /// or the camera has not been initialized or started.
+ double getMinZoomLevel() =>
+ _cameraService.getZoomLevelCapabilityForCamera(this).minimum;
+
+ /// Sets the camera zoom level to [zoom].
+ ///
+ /// Throws a [CameraWebException] if the zoom level is invalid,
+ /// not supported or the camera has not been initialized or started.
+ void setZoomLevel(double zoom) {
+ final zoomLevelCapability =
+ _cameraService.getZoomLevelCapabilityForCamera(this);
+
+ if (zoom < zoomLevelCapability.minimum ||
+ zoom > zoomLevelCapability.maximum) {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.zoomLevelInvalid,
+ 'The provided zoom level must be in the range of ${zoomLevelCapability.minimum} to ${zoomLevelCapability.maximum}.',
+ );
+ }
+
+ zoomLevelCapability.videoTrack.applyConstraints({
+ "advanced": [
+ {
+ ZoomLevelCapability.constraintName: zoom,
+ }
+ ]
+ });
+ }
+
+ /// Returns a lens direction of this camera.
+ ///
+ /// Returns null if the camera is missing a video track or
+ /// the video track does not include the facing mode setting.
+ CameraLensDirection? getLensDirection() {
+ final videoTracks = videoElement.srcObject?.getVideoTracks() ?? [];
+
+ if (videoTracks.isEmpty) {
+ return null;
+ }
+
+ final defaultVideoTrack = videoTracks.first;
+ final defaultVideoTrackSettings = defaultVideoTrack.getSettings();
+
+ final facingMode = defaultVideoTrackSettings['facingMode'];
+
+ if (facingMode != null) {
+ return _cameraService.mapFacingModeToLensDirection(facingMode);
+ } else {
+ return null;
+ }
+ }
+
+ /// Returns the registered view type of the camera.
+ String getViewType() => _getViewType(textureId);
+
+ /// Starts a new video recording using [html.MediaRecorder].
+ ///
+ /// Throws a [CameraWebException] if the provided maximum video duration is invalid
+ /// or the browser does not support any of the available video mime types
+ /// from [_videoMimeType].
+ Future startVideoRecording({Duration? maxVideoDuration}) async {
+ if (maxVideoDuration != null && maxVideoDuration.inMilliseconds <= 0) {
+ throw CameraWebException(
+ textureId,
+ CameraErrorCode.notSupported,
+ 'The maximum video duration must be greater than 0 milliseconds.',
+ );
+ }
+
+ mediaRecorder ??= html.MediaRecorder(videoElement.srcObject!, {
+ 'mimeType': _videoMimeType,
+ });
+
+ _videoAvailableCompleter = Completer();
+
+ _videoDataAvailableListener =
+ (event) => _onVideoDataAvailable(event, maxVideoDuration);
+
+ _videoRecordingStoppedListener =
+ (event) => _onVideoRecordingStopped(event, maxVideoDuration);
+
+ mediaRecorder!.addEventListener(
+ 'dataavailable',
+ _videoDataAvailableListener,
+ );
+
+ mediaRecorder!.addEventListener(
+ 'stop',
+ _videoRecordingStoppedListener,
+ );
+
+ _onVideoRecordingErrorSubscription =
+ mediaRecorder!.onError.listen((html.Event event) {
+ final error = event as html.ErrorEvent;
+ if (error != null) {
+ videoRecordingErrorController.add(error);
+ }
+ });
+
+ if (maxVideoDuration != null) {
+ mediaRecorder!.start(maxVideoDuration.inMilliseconds);
+ } else {
+ // Don't pass the null duration as that will fire a `dataavailable` event directly.
+ mediaRecorder!.start();
+ }
+ }
+
+ void _onVideoDataAvailable(
+ html.Event event, [
+ Duration? maxVideoDuration,
+ ]) {
+ final blob = (event as html.BlobEvent).data;
+
+ // Append the recorded part of the video to the list of all video data files.
+ if (blob != null) {
+ _videoData.add(blob);
+ }
+
+ // Stop the recorder if the video has a maxVideoDuration
+ // and the recording was not stopped manually.
+ if (maxVideoDuration != null && mediaRecorder!.state == 'recording') {
+ mediaRecorder!.stop();
+ }
+ }
+
+ Future _onVideoRecordingStopped(
+ html.Event event, [
+ Duration? maxVideoDuration,
+ ]) async {
+ if (_videoData.isNotEmpty) {
+ // Concatenate all video data files into a single blob.
+ final videoType = _videoData.first.type;
+ final videoBlob = blobBuilder(_videoData, videoType);
+
+ // Create a file containing the video blob.
+ final file = XFile(
+ html.Url.createObjectUrl(videoBlob),
+ mimeType: _videoMimeType,
+ name: videoBlob.hashCode.toString(),
+ );
+
+ // Emit an event containing the recorded video file.
+ videoRecorderController.add(
+ VideoRecordedEvent(this.textureId, file, maxVideoDuration),
+ );
+
+ _videoAvailableCompleter?.complete(file);
+ }
+
+ // Clean up the media recorder with its event listeners and video data.
+ mediaRecorder!.removeEventListener(
+ 'dataavailable',
+ _videoDataAvailableListener,
+ );
+
+ mediaRecorder!.removeEventListener(
+ 'stop',
+ _videoDataAvailableListener,
+ );
+
+ await _onVideoRecordingErrorSubscription?.cancel();
+
+ mediaRecorder = null;
+ _videoDataAvailableListener = null;
+ _videoRecordingStoppedListener = null;
+ _videoData.clear();
+ }
+
+ /// Pauses the current video recording.
+ ///
+ /// Throws a [CameraWebException] if the video recorder is uninitialized.
+ Future pauseVideoRecording() async {
+ if (mediaRecorder == null) {
+ throw _videoRecordingNotStartedException;
+ }
+ mediaRecorder!.pause();
+ }
+
+ /// Resumes the current video recording.
+ ///
+ /// Throws a [CameraWebException] if the video recorder is uninitialized.
+ Future resumeVideoRecording() async {
+ if (mediaRecorder == null) {
+ throw _videoRecordingNotStartedException;
+ }
+ mediaRecorder!.resume();
+ }
+
+ /// Stops the video recording and returns the captured video file.
+ ///
+ /// Throws a [CameraWebException] if the video recorder is uninitialized.
+ Future stopVideoRecording() async {
+ if (mediaRecorder == null || _videoAvailableCompleter == null) {
+ throw _videoRecordingNotStartedException;
+ }
+
+ mediaRecorder!.stop();
+
+ return _videoAvailableCompleter!.future;
+ }
+
+ /// Disposes the camera by stopping the camera stream,
+ /// the video recording and reloading the camera source.
+ Future dispose() async {
+ // Stop the camera stream.
+ stop();
+
+ await videoRecorderController.close();
+ mediaRecorder = null;
+ _videoDataAvailableListener = null;
+
+ // Reset the [videoElement] to its initial state.
+ videoElement
+ ..srcObject = null
+ ..load();
+
+ await _onEndedSubscription?.cancel();
+ _onEndedSubscription = null;
+ await onEndedController.close();
+
+ await _onVideoRecordingErrorSubscription?.cancel();
+ _onVideoRecordingErrorSubscription = null;
+ await videoRecordingErrorController.close();
+ }
+
+ /// Returns the first supported video mime type (amongst mp4 and webm)
+ /// to use when recording a video.
+ ///
+ /// Throws a [CameraWebException] if the browser does not support
+ /// any of the available video mime types.
+ String get _videoMimeType {
+ const types = [
+ 'video/mp4',
+ 'video/webm',
+ ];
+
+ return types.firstWhere(
+ (type) => isVideoTypeSupported(type),
+ orElse: () => throw CameraWebException(
+ textureId,
+ CameraErrorCode.notSupported,
+ 'The browser does not support any of the following video types: ${types.join(',')}.',
+ ),
+ );
+ }
+
+ CameraWebException get _videoRecordingNotStartedException =>
+ CameraWebException(
+ textureId,
+ CameraErrorCode.videoRecordingNotStarted,
+ 'The video recorder is uninitialized. The recording might not have been started. Make sure to call `startVideoRecording` first.',
+ );
+
+ /// Applies default styles to the video [element].
+ void _applyDefaultVideoStyles(html.VideoElement element) {
+ final isBackCamera = getLensDirection() == CameraLensDirection.back;
+
+ // Flip the video horizontally if it is not taken from a back camera.
+ if (!isBackCamera) {
+ element.style.transform = 'scaleX(-1)';
+ }
+
+ element.style
+ ..transformOrigin = 'center'
+ ..pointerEvents = 'none'
+ ..width = '100%'
+ ..height = '100%'
+ ..objectFit = 'cover';
+ }
+}
diff --git a/packages/camera/camera_web/lib/src/camera_service.dart b/packages/camera/camera_web/lib/src/camera_service.dart
new file mode 100644
index 000000000000..5ba5c80395cc
--- /dev/null
+++ b/packages/camera/camera_web/lib/src/camera_service.dart
@@ -0,0 +1,326 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import 'dart:html' as html;
+import 'dart:ui';
+
+import 'package:camera_platform_interface/camera_platform_interface.dart';
+import 'package:camera_web/src/camera.dart';
+import 'package:camera_web/src/shims/dart_js_util.dart';
+import 'package:camera_web/src/types/types.dart';
+import 'package:flutter/foundation.dart';
+import 'package:flutter/services.dart';
+
+/// A service to fetch, map camera settings and
+/// obtain the camera stream.
+class CameraService {
+ // A facing mode constraint name.
+ static const _facingModeKey = "facingMode";
+
+ /// The current browser window used to access media devices.
+ @visibleForTesting
+ html.Window? window = html.window;
+
+ /// The utility to manipulate JavaScript interop objects.
+ @visibleForTesting
+ JsUtil jsUtil = JsUtil();
+
+ /// Returns a media stream associated with the camera device
+ /// with [cameraId] and constrained by [options].
+ Future