diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index e06a658c617..fa4e97db269 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,5 +1,10 @@ -## NEXT +## 0.9.19+1 +* Adds `audioCaptureDeviceFactory` to `FLTCamConfiguration`. +* Renames the `lockCaptureOrientation` method of Objective-C type `FLTCam` when exported to Swift. +* Renames arguments of the `captureOutput` method of Objective-C type `FLTCam` when exported to Swift. +* Changes the `connection` argument type of the `captureOutput` method of the of `FLTCam` class to `AVCaptureConnection`. +* Makes `minimum/maximumAvailableZoomFactor` and `minimum/maximumExposureOffset` fields of `FLTCam` readonly. * Updates minimum supported SDK version to Flutter 3.27/Dart 3.6. ## 0.9.19 diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 7d6ca0e5068..161c8ebda27 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -60,7 +60,7 @@ E1ABED6F2D943B2500AED9CC /* MockCaptureDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = E15BC7ED2D86D85500F66474 /* MockCaptureDevice.swift */; }; E1ABED722D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1ABED702D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift */; }; E1ABED732D943DC700AED9CC /* MockCaptureInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1ABED712D943DC700AED9CC /* MockCaptureInput.swift */; }; - E1FFEAAD2D6C8DD700B14107 /* MockFLTCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAAC2D6C8DD700B14107 /* MockFLTCam.swift */; }; + E1FFEAAD2D6C8DD700B14107 /* MockCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAAC2D6C8DD700B14107 /* MockCamera.swift */; }; E1FFEAAF2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAAE2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift */; }; E1FFEAB12D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAB02D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift */; }; /* End PBXBuildFile section */ @@ -154,7 +154,7 @@ E1A5F4E22D80259C0005BA64 /* FLTCamSetFlashModeTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FLTCamSetFlashModeTests.swift; sourceTree = ""; }; E1ABED702D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MockCaptureDeviceInputFactory.swift; sourceTree = ""; }; E1ABED712D943DC700AED9CC /* MockCaptureInput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MockCaptureInput.swift; sourceTree = ""; }; - E1FFEAAC2D6C8DD700B14107 /* MockFLTCam.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockFLTCam.swift; sourceTree = ""; }; + E1FFEAAC2D6C8DD700B14107 /* MockCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockCamera.swift; sourceTree = ""; }; E1FFEAAE2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPluginCreateCameraTests.swift; sourceTree = ""; }; E1FFEAB02D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPluginInitializeCameraTests.swift; sourceTree = ""; }; E67C6DBF6478BE708993169F /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; @@ -245,7 +245,7 @@ E142F1392D85940600824824 /* MockCapturePhotoOutput.swift */, E142F1372D85919700824824 /* MockDeviceOrientationProvider.swift */, E142F1352D8587F900824824 /* MockCameraDeviceDiscoverer.swift */, - E1FFEAAC2D6C8DD700B14107 /* MockFLTCam.swift */, + E1FFEAAC2D6C8DD700B14107 /* MockCamera.swift */, 970ADABF2D6764CC00EFDCD9 /* MockEventChannel.swift */, E12C4FF72D68E85500515E70 /* MockFLTCameraPermissionManager.swift */, 970ADABD2D6740A900EFDCD9 /* MockWritableData.swift */, @@ -572,7 +572,7 @@ E1ABED722D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift in Sources */, 977A25202D5A439300931E34 /* AvailableCamerasTests.swift in Sources */, E142681F2D8566230046CBBC /* CameraTestUtils.swift in Sources */, - E1FFEAAD2D6C8DD700B14107 /* MockFLTCam.swift in Sources */, + E1FFEAAD2D6C8DD700B14107 /* MockCamera.swift in Sources */, E16602952D8471C0003CFE12 /* FLTCamZoomTests.swift in Sources */, 97BD4A102D5CE13500F857D5 /* CameraSessionPresetsTests.swift in Sources */, 979B3E022D5BA48F009BDE1A /* CameraOrientationTests.swift in Sources */, diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTests.swift index f371640e89b..e52bc49081f 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class AvailableCamerasTest: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.swift index e2a74652411..d2fd50a1ce3 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.swift @@ -8,7 +8,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class CameraCaptureSessionQueueRaceConditionTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.swift index d621a4077c4..127b3066d79 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class CameraMethodChannelTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.swift index 920f9f69820..61d9b1daaec 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.swift @@ -10,7 +10,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif private final class MockUIDevice: UIDevice { @@ -24,14 +24,14 @@ private final class MockUIDevice: UIDevice { final class CameraOrientationTests: XCTestCase { private func createCameraPlugin() -> ( cameraPlugin: CameraPlugin, - mockCamera: MockFLTCam, + mockCamera: MockCamera, mockEventAPI: MockGlobalEventApi, mockDevice: MockCaptureDevice, mockDeviceDiscoverer: MockCameraDeviceDiscoverer, captureSessionQueue: DispatchQueue ) { let mockDevice = MockCaptureDevice() - let mockCamera = MockFLTCam() + let mockCamera = MockCamera() let mockEventAPI = MockGlobalEventApi() let mockDeviceDiscoverer = MockCameraDeviceDiscoverer() let captureSessionQueue = DispatchQueue(label: "io.flutter.camera.captureSessionQueue") diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.swift index 5a963fbf4bc..3820696ad91 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif private final class MockPermissionService: NSObject, FLTPermissionServicing { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginCreateCameraTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginCreateCameraTests.swift index cce002fe2ab..c6e3b5344d9 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginCreateCameraTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginCreateCameraTests.swift @@ -8,7 +8,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class CameraPluginCreateCameraTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift index 0bf445a131b..b860b357bcf 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift @@ -8,13 +8,13 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif /// Tests of `CameraPlugin` methods delegating to `FLTCam` instance final class CameraPluginDelegatingMethodTests: XCTestCase { - private func createCameraPlugin() -> (CameraPlugin, MockFLTCam) { - let mockCamera = MockFLTCam() + private func createCameraPlugin() -> (CameraPlugin, MockCamera) { + let mockCamera = MockCamera() let cameraPlugin = CameraPlugin( registry: MockFlutterTextureRegistry(), @@ -39,7 +39,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let targetOrientation = FCPPlatformDeviceOrientation.landscapeLeft var lockCaptureCalled = false - mockCamera.lockCaptureStub = { orientation in + mockCamera.lockCaptureOrientationStub = { orientation in XCTAssertEqual(orientation, targetOrientation) lockCaptureCalled = true } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift index 6a8a2906147..97ea3f76a84 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift @@ -8,14 +8,14 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class CameraPluginInitializeCameraTests: XCTestCase { private func createCameraPlugin() -> ( - CameraPlugin, MockFLTCam, MockGlobalEventApi, DispatchQueue + CameraPlugin, MockCamera, MockGlobalEventApi, DispatchQueue ) { - let mockCamera = MockFLTCam() + let mockCamera = MockCamera() let mockGlobalEventApi = MockGlobalEventApi() let captureSessionQueue = DispatchQueue(label: "io.flutter.camera.captureSessionQueue") diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.swift index 306b299e0ed..37cd99ce019 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class CameraPreviewPauseTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.swift index e3df38ab8f0..4b9cda2d93e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.swift @@ -10,7 +10,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class CameraPropertiesTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift index bc1dd5b55a0..4b19116629e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif /// Includes test cases related to resolution presets setting operations for FLTCam class. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift index 6fd4e0da767..55cd0ba815c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif private let testResolutionPreset = FCPPlatformResolutionPreset.medium diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index b624c8d5692..22d6b9485ae 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -61,6 +61,7 @@ enum CameraTestUtils { resolutionPreset: FCPPlatformResolutionPreset.medium), mediaSettingsWrapper: FLTCamMediaSettingsAVWrapper(), captureDeviceFactory: { _ in captureDeviceMock }, + audioCaptureDeviceFactory: { MockCaptureDevice() }, captureSessionFactory: { videoSessionMock }, captureSessionQueue: captureSessionQueue, captureDeviceInputFactory: MockCaptureDeviceInputFactory(), @@ -173,6 +174,14 @@ enum CameraTestUtils { return sampleBuffer! } + + static func createTestAudioOutput() -> AVCaptureOutput { + return AVCaptureAudioDataOutput() + } + + static func createTestConnection(_ output: AVCaptureOutput) -> AVCaptureConnection { + return AVCaptureConnection(inputPorts: [], output: output) + } } extension XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift index 670ae621cb6..f60d4a43f2c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift @@ -8,7 +8,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class FLTCamExposureTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift index efd7869cbee..db82db82446 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class FLTCamSetFocusModeTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift index b0baa8ccc83..762152db317 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class FLTCamSetDeviceOrientationTests: XCTestCase { @@ -73,7 +73,7 @@ final class FLTCamSetDeviceOrientationTests: XCTestCase { videoSetVideoOrientationCalled = true } - camera.lockCapture(FCPPlatformDeviceOrientation.portraitDown) + camera.lockCaptureOrientation(FCPPlatformDeviceOrientation.portraitDown) camera.setDeviceOrientation(.landscapeLeft) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift index dd06ca592d2..a6e51dd9b7e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class FLTCamSetFlashModeTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift index 3e854037e0c..8cc507d9dbb 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class FLTCamZoomTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift similarity index 75% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift index c4ca7ad9f9f..14e30643e3a 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCam.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift @@ -9,110 +9,89 @@ import camera_avfoundation import camera_avfoundation_objc #endif -final class MockFLTCam: FLTCam { - var setOnFrameAvailableStub: ((() -> Void) -> Void)? - var setDartApiStub: ((FCPCameraEventApi) -> Void)? - var setFocusModeStub: ((FCPPlatformFocusMode) -> Void)? - var getMinimumAvailableZoomFactorStub: (() -> CGFloat)? - var getMaximumAvailableZoomFactorStub: (() -> CGFloat)? +final class MockCamera: FLTCam { + var setDartApiStub: ((FCPCameraEventApi?) -> Void)? + var setOnFrameAvailableStub: (((() -> Void)?) -> Void)? var getMinimumExposureOffsetStub: (() -> CGFloat)? var getMaximumExposureOffsetStub: (() -> CGFloat)? - + var getMinimumAvailableZoomFactorStub: (() -> CGFloat)? + var getMaximumAvailableZoomFactorStub: (() -> CGFloat)? + var setUpCaptureSessionForAudioIfNeededStub: (() -> Void)? + var receivedImageStreamDataStub: (() -> Void)? var startStub: (() -> Void)? - var setDeviceOrientationStub: ((UIDeviceOrientation) -> Void)? - var captureToFileStub: ((((String?, FlutterError?) -> Void)?) -> Void)? - var setImageFileFormatStub: ((FCPPlatformImageFileFormat) -> Void)? var startVideoRecordingStub: ((@escaping (FlutterError?) -> Void, FlutterBinaryMessenger?) -> Void)? - var stopVideoRecordingStub: ((((String?, FlutterError?) -> Void)?) -> Void)? var pauseVideoRecordingStub: (() -> Void)? var resumeVideoRecordingStub: (() -> Void)? - var lockCaptureStub: ((FCPPlatformDeviceOrientation) -> Void)? + var stopVideoRecordingStub: ((((String?, FlutterError?) -> Void)?) -> Void)? + var captureToFileStub: ((((String?, FlutterError?) -> Void)?) -> Void)? + var setDeviceOrientationStub: ((UIDeviceOrientation) -> Void)? + var lockCaptureOrientationStub: ((FCPPlatformDeviceOrientation) -> Void)? var unlockCaptureOrientationStub: (() -> Void)? - var setFlashModeStub: ((FCPPlatformFlashMode, ((FlutterError?) -> Void)?) -> Void)? + var setImageFileFormatStub: ((FCPPlatformImageFileFormat) -> Void)? var setExposureModeStub: ((FCPPlatformExposureMode) -> Void)? - var receivedImageStreamDataStub: (() -> Void)? + var setExposureOffsetStub: ((Double) -> Void)? + var setExposurePointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? + var setFocusModeStub: ((FCPPlatformFocusMode) -> Void)? + var setFocusPointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? + var setZoomLevelStub: ((CGFloat, ((FlutterError?) -> Void)?) -> Void)? + var setFlashModeStub: ((FCPPlatformFlashMode, ((FlutterError?) -> Void)?) -> Void)? var pausePreviewStub: (() -> Void)? var resumePreviewStub: (() -> Void)? var setDescriptionWhileRecordingStub: ((String, ((FlutterError?) -> Void)?) -> Void)? - var setExposurePointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? - var setFocusPointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? - var setExposureOffsetStub: ((Double) -> Void)? var startImageStreamStub: ((FlutterBinaryMessenger) -> Void)? var stopImageStreamStub: (() -> Void)? - var setZoomLevelStub: ((CGFloat, ((FlutterError?) -> Void)?) -> Void)? - var setUpCaptureSessionForAudioIfNeededStub: (() -> Void)? - - override var onFrameAvailable: (() -> Void) { - get { - return super.onFrameAvailable - } - set { - setOnFrameAvailableStub?(newValue) - } - } override var dartAPI: FCPCameraEventApi { get { - return super.dartAPI + preconditionFailure("Attempted to access unimplemented property: dartAPI") } set { setDartApiStub?(newValue) } } - override var minimumAvailableZoomFactor: CGFloat { - get { - return getMinimumAvailableZoomFactorStub?() ?? super.minimumAvailableZoomFactor - } - set { - super.minimumAvailableZoomFactor = newValue - } - } - - override var maximumAvailableZoomFactor: CGFloat { + override var onFrameAvailable: (() -> Void) { get { - return getMaximumAvailableZoomFactorStub?() ?? super.maximumAvailableZoomFactor + preconditionFailure("Attempted to access unimplemented property: onFrameAvailable") } set { - super.maximumAvailableZoomFactor = newValue + setOnFrameAvailableStub?(newValue) } } override var minimumExposureOffset: CGFloat { - get { - return getMinimumExposureOffsetStub?() ?? super.minimumExposureOffset - } - set { - super.minimumExposureOffset = newValue - } + return getMinimumExposureOffsetStub?() ?? 0 } override var maximumExposureOffset: CGFloat { - get { - return getMaximumExposureOffsetStub?() ?? super.maximumExposureOffset - } - set { - super.maximumExposureOffset = newValue - } + return getMaximumExposureOffsetStub?() ?? 0 } - override func start() { - startStub?() + override var minimumAvailableZoomFactor: CGFloat { + return getMinimumAvailableZoomFactorStub?() ?? 0 } - override func setDeviceOrientation(_ orientation: UIDeviceOrientation) { - setDeviceOrientationStub?(orientation) + override var maximumAvailableZoomFactor: CGFloat { + return getMaximumAvailableZoomFactorStub?() ?? 0 } - override func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { - captureToFileStub?(completion) + override func setUpCaptureSessionForAudioIfNeeded() { + setUpCaptureSessionForAudioIfNeededStub?() } - override func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { - setImageFileFormatStub?(fileFormat) + override func reportInitializationState() {} + + override func receivedImageStreamData() { + receivedImageStreamDataStub?() + } + + override func start() { + startStub?() } + override func stop() {} + override func startVideoRecording( completion: @escaping (FlutterError?) -> Void, messengerForStreaming messenger: FlutterBinaryMessenger? @@ -120,10 +99,6 @@ final class MockFLTCam: FLTCam { startVideoRecordingStub?(completion, messenger) } - override func stopVideoRecording(completion: ((String?, FlutterError?) -> Void)?) { - stopVideoRecordingStub?(completion) - } - override func pauseVideoRecording() { pauseVideoRecordingStub?() } @@ -132,30 +107,66 @@ final class MockFLTCam: FLTCam { resumeVideoRecordingStub?() } - override func lockCapture(_ orientation: FCPPlatformDeviceOrientation) { - lockCaptureStub?(orientation) + override func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { + stopVideoRecordingStub?(completion) + } + + override func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { + captureToFileStub?(completion) + } + + override func setDeviceOrientation(_ orientation: UIDeviceOrientation) { + setDeviceOrientationStub?(orientation) + } + + override func lockCaptureOrientation(_ orientation: FCPPlatformDeviceOrientation) { + lockCaptureOrientationStub?(orientation) } override func unlockCaptureOrientation() { unlockCaptureOrientationStub?() } + override func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { + setImageFileFormatStub?(fileFormat) + } + override func setExposureMode(_ mode: FCPPlatformExposureMode) { setExposureModeStub?(mode) } + override func setExposureOffset(_ offset: Double) { + setExposureOffsetStub?(offset) + } + + override func setExposurePoint( + _ point: FCPPlatformPoint?, withCompletion: @escaping (FlutterError?) -> Void + ) { + setExposurePointStub?(point, withCompletion) + } + override func setFocusMode(_ mode: FCPPlatformFocusMode) { setFocusModeStub?(mode) } - override func setFlashMode( - _ mode: FCPPlatformFlashMode, withCompletion completion: @escaping (FlutterError?) -> Void + override func setFocusPoint( + _ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void ) { - setFlashModeStub?(mode, completion) + setFocusPointStub?(point, completion) } - override func receivedImageStreamData() { - receivedImageStreamDataStub?() + override func setZoomLevel( + _ zoom: CGFloat, + withCompletion completion: @escaping (FlutterError?) -> Void + ) { + setZoomLevelStub?(zoom, completion) + } + + override func setFlashMode( + _ mode: FCPPlatformFlashMode, + withCompletion completion: @escaping (FlutterError?) -> Void + ) { + setFlashModeStub?(mode, completion) } override func pausePreview() { @@ -167,27 +178,12 @@ final class MockFLTCam: FLTCam { } override func setDescriptionWhileRecording( - _ cameraName: String, withCompletion completion: @escaping (FlutterError?) -> Void + _ cameraName: String, + withCompletion completion: @escaping (FlutterError?) -> Void ) { setDescriptionWhileRecordingStub?(cameraName, completion) } - override func setExposurePoint( - _ point: FCPPlatformPoint?, withCompletion completion: ((FlutterError?) -> Void)? - ) { - setExposurePointStub?(point, completion) - } - - override func setFocusPoint( - _ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void - ) { - setFocusPointStub?(point, completion) - } - - override func setExposureOffset(_ offset: Double) { - setExposureOffsetStub?(offset) - } - override func startImageStream(with messenger: FlutterBinaryMessenger) { startImageStreamStub?(messenger) } @@ -196,13 +192,15 @@ final class MockFLTCam: FLTCam { stopImageStreamStub?() } - override func setZoomLevel( - _ zoom: CGFloat, withCompletion completion: @escaping (FlutterError?) -> Void - ) { - setZoomLevelStub?(zoom, completion) - } + override func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) {} - override func setUpCaptureSessionForAudioIfNeeded() { - setUpCaptureSessionForAudioIfNeededStub?() + override func close() {} + + override func copyPixelBuffer() -> Unmanaged? { + return nil } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift index e7986fcce72..93915529f74 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif /// Includes test cases related to photo capture operations for FLTCam class. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/QueueUtilsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/QueueUtilsTests.swift index 00384a3c2a1..47f26d9e633 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/QueueUtilsTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/QueueUtilsTests.swift @@ -8,7 +8,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class QueueUtilsTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index 5b412d07395..bea41564eb0 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif private class FakeMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { @@ -72,8 +72,7 @@ final class CameraSampleBufferTests: XCTestCase { FLTCam, MockAssetWriter, MockAssetWriterInputPixelBufferAdaptor, - MockAssetWriterInput, - MockCaptureConnection + MockAssetWriterInput ) { let assetWriter = MockAssetWriter() let adaptor = MockAssetWriterInputPixelBufferAdaptor() @@ -96,8 +95,10 @@ final class CameraSampleBufferTests: XCTestCase { } return ( - FLTCam(configuration: configuration, error: nil), assetWriter, adaptor, input, - MockCaptureConnection() + CameraTestUtils.createTestCamera(configuration), + assetWriter, + adaptor, + input ) } @@ -110,13 +111,16 @@ final class CameraSampleBufferTests: XCTestCase { } func testCopyPixelBuffer() { - let (camera, _, _, _, connectionMock) = createCamera() + let (camera, _, _, _) = createCamera() let capturedSampleBuffer = CameraTestUtils.createTestSampleBuffer() let capturedPixelBuffer = CMSampleBufferGetImageBuffer(capturedSampleBuffer)! + let testConnection = CameraTestUtils.createTestConnection(camera.captureVideoOutput.avOutput) + // Mimic sample buffer callback when captured a new video sample. camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: capturedSampleBuffer, - from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: capturedSampleBuffer, + from: testConnection) let deliveredPixelBuffer = camera.copyPixelBuffer()?.takeRetainedValue() XCTAssertEqual( deliveredPixelBuffer, capturedPixelBuffer, @@ -124,8 +128,9 @@ final class CameraSampleBufferTests: XCTestCase { } func testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPauseResumeRecording() { - let (camera, _, _, _, connectionMock) = createCamera() + let (camera, _, _, _) = createCamera() let sampleBuffer = CameraTestUtils.createTestSampleBuffer() + let testConnection = CameraTestUtils.createTestConnection(camera.captureVideoOutput.avOutput) let initialRetainCount = CFGetRetainCount(sampleBuffer) @@ -135,7 +140,9 @@ final class CameraSampleBufferTests: XCTestCase { camera.resumeVideoRecording() camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: sampleBuffer, from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: sampleBuffer, + from: testConnection) let finalRetainCount = CFGetRetainCount(sampleBuffer) XCTAssertEqual( @@ -145,7 +152,7 @@ final class CameraSampleBufferTests: XCTestCase { } func testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples() { - let (camera, writerMock, adaptorMock, inputMock, connectionMock) = createCamera() + let (camera, writerMock, adaptorMock, inputMock) = createCamera() var status = AVAssetWriter.Status.unknown writerMock.startWritingStub = { status = .writing @@ -156,7 +163,12 @@ final class CameraSampleBufferTests: XCTestCase { } let videoSample = CameraTestUtils.createTestSampleBuffer() + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) + let audioSample = CameraTestUtils.createTestAudioSampleBuffer() + let testAudioOutput = CameraTestUtils.createTestAudioOutput() + let testAudioConnection = CameraTestUtils.createTestConnection(testAudioOutput) var writtenSamples: [String] = [] adaptorMock.appendStub = { buffer, time in @@ -170,21 +182,28 @@ final class CameraSampleBufferTests: XCTestCase { } camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) let expectedSamples = ["video", "audio"] XCTAssertEqual(writtenSamples, expectedSamples, "First appended sample must be video.") } func testDidOutputSampleBufferSampleTimesMustBeNumericAfterPauseResume() { - let (camera, writerMock, adaptorMock, inputMock, connectionMock) = createCamera() + let (camera, writerMock, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) + let audioSample = CameraTestUtils.createTestAudioSampleBuffer() + let testAudioOutput = CameraTestUtils.createTestAudioOutput() + let testAudioConnection = CameraTestUtils.createTestConnection(testAudioOutput) var status = AVAssetWriter.Status.unknown writerMock.startWritingStub = { @@ -215,19 +234,25 @@ final class CameraSampleBufferTests: XCTestCase { camera.pauseVideoRecording() camera.resumeVideoRecording() camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) - camera.captureOutput(nil, didOutputSampleBuffer: audioSample, from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) + camera.captureOutput(testAudioOutput, didOutput: audioSample, from: testAudioConnection) XCTAssert(videoAppended && audioAppended, "Video or audio was not appended.") } func testDidOutputSampleBufferMustNotAppendSampleWhenReadyForMoreMediaDataIsFalse() { - let (camera, _, adaptorMock, inputMock, connectionMock) = createCamera() + let (camera, _, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) var sampleAppended = false adaptorMock.appendStub = { buffer, time in @@ -240,18 +265,22 @@ final class CameraSampleBufferTests: XCTestCase { inputMock.readyForMoreMediaData = true sampleAppended = false camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) XCTAssertTrue(sampleAppended, "Sample was not appended.") inputMock.readyForMoreMediaData = false sampleAppended = false camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) XCTAssertFalse(sampleAppended, "Sample cannot be appended when readyForMoreMediaData is NO.") } func testStopVideoRecordingWithCompletionMustCallCompletion() { - let (camera, writerMock, _, _, _) = createCamera() + let (camera, writerMock, _, _) = createCamera() var status = AVAssetWriter.Status.unknown writerMock.startWritingStub = { @@ -279,9 +308,11 @@ final class CameraSampleBufferTests: XCTestCase { } func testStartWritingShouldNotBeCalledBetweenSampleCreationAndAppending() { - let (camera, writerMock, adaptorMock, inputMock, connectionMock) = createCamera() + let (camera, writerMock, adaptorMock, inputMock) = createCamera() let videoSample = CameraTestUtils.createTestSampleBuffer() + let testVideoConnection = CameraTestUtils.createTestConnection( + camera.captureVideoOutput.avOutput) var startWritingCalled = false writerMock.startWritingStub = { @@ -302,13 +333,17 @@ final class CameraSampleBufferTests: XCTestCase { let startWritingCalledBefore = startWritingCalled camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) XCTAssert( (startWritingCalledBefore && videoAppended) || (startWritingCalled && !videoAppended), "The startWriting was called between sample creation and appending.") camera.captureOutput( - camera.captureVideoOutput.avOutput, didOutputSampleBuffer: videoSample, from: connectionMock) + camera.captureVideoOutput.avOutput, + didOutput: videoSample, + from: testVideoConnection) XCTAssert(videoAppended, "Video was not appended.") } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SavePhotoDelegateTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SavePhotoDelegateTests.swift index 30d19830406..4bea59b6be3 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SavePhotoDelegateTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SavePhotoDelegateTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class SavePhotoDelegateTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift index 3909b5be9c9..129638c5356 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift @@ -9,7 +9,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif private class MockImageStreamHandler: FLTImageStreamHandler { @@ -32,19 +32,26 @@ private class MockImageStreamHandler: FLTImageStreamHandler { } final class StreamingTests: XCTestCase { - private func createCamera() -> (FLTCam, CMSampleBuffer) { + private func createCamera() -> ( + FLTCam, + AVCaptureOutput, + CMSampleBuffer, + AVCaptureConnection + ) { let captureSessionQueue = DispatchQueue(label: "testing") let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue let camera = CameraTestUtils.createTestCamera(configuration) + let testAudioOutput = CameraTestUtils.createTestAudioOutput() let sampleBuffer = CameraTestUtils.createTestSampleBuffer() + let testAudioConnection = CameraTestUtils.createTestConnection(testAudioOutput) - return (camera, sampleBuffer) + return (camera, testAudioOutput, sampleBuffer, testAudioConnection) } func testExceedMaxStreamingPendingFramesCount() { - let (camera, sampleBuffer) = createCamera() + let (camera, testAudioOutput, sampleBuffer, testAudioConnection) = createCamera() let streamingExpectation = expectation( description: "Must not call handler over maxStreamingPendingFramesCount") let handlerMock = MockImageStreamHandler() @@ -59,14 +66,14 @@ final class StreamingTests: XCTestCase { streamingExpectation.expectedFulfillmentCount = 4 for _ in 0..<10 { - camera.captureOutput(nil, didOutputSampleBuffer: sampleBuffer, from: nil) + camera.captureOutput(testAudioOutput, didOutput: sampleBuffer, from: testAudioConnection) } waitForExpectations(timeout: 30, handler: nil) } func testReceivedImageStreamData() { - let (camera, sampleBuffer) = createCamera() + let (camera, testAudioOutput, sampleBuffer, testAudioConnection) = createCamera() let streamingExpectation = expectation( description: "Must be able to call the handler again when receivedImageStreamData is called") let handlerMock = MockImageStreamHandler() @@ -81,11 +88,11 @@ final class StreamingTests: XCTestCase { streamingExpectation.expectedFulfillmentCount = 5 for _ in 0..<10 { - camera.captureOutput(nil, didOutputSampleBuffer: sampleBuffer, from: nil) + camera.captureOutput(testAudioOutput, didOutput: sampleBuffer, from: testAudioConnection) } camera.receivedImageStreamData() - camera.captureOutput(nil, didOutputSampleBuffer: sampleBuffer, from: nil) + camera.captureOutput(testAudioOutput, didOutput: sampleBuffer, from: testAudioConnection) waitForExpectations(timeout: 30, handler: nil) } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.swift index 5b588ef4f2b..b2eed05f364 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.swift @@ -8,7 +8,7 @@ import XCTest // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) - @testable import camera_avfoundation_objc + import camera_avfoundation_objc #endif final class ThreadSafeEventChannelTests: XCTestCase { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index a058afb50e4..570cd22095e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -238,6 +238,9 @@ extension CameraPlugin: FCPCameraApi { mediaSettings: settings, mediaSettingsWrapper: mediaSettingsAVWrapper, captureDeviceFactory: captureDeviceFactory, + audioCaptureDeviceFactory: { + FLTDefaultCaptureDevice(device: AVCaptureDevice.default(for: .audio)!) + }, captureSessionFactory: captureSessionFactory, captureSessionQueue: captureSessionQueue, captureDeviceInputFactory: captureDeviceInputFactory, @@ -342,7 +345,7 @@ extension CameraPlugin: FCPCameraApi { completion: @escaping (FlutterError?) -> Void ) { captureSessionQueue.async { [weak self] in - self?.camera?.lockCapture(orientation) + self?.camera?.lockCaptureOrientation(orientation) completion(nil) } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m index 974c3a92f78..1aa6a4598ea 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m @@ -13,6 +13,8 @@ #import "./include/camera_avfoundation/FLTCaptureDevice.h" #import "./include/camera_avfoundation/FLTDeviceOrientationProviding.h" #import "./include/camera_avfoundation/FLTEventChannel.h" +#import "./include/camera_avfoundation/FLTFormatUtils.h" +#import "./include/camera_avfoundation/FLTImageStreamHandler.h" #import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" #import "./include/camera_avfoundation/QueueUtils.h" @@ -24,33 +26,6 @@ details:error.domain]; } -@implementation FLTImageStreamHandler - -- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _captureSessionQueue = captureSessionQueue; - return self; -} - -- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { - __weak typeof(self) weakSelf = self; - dispatch_async(self.captureSessionQueue, ^{ - weakSelf.eventSink = nil; - }); - return nil; -} - -- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)events { - __weak typeof(self) weakSelf = self; - dispatch_async(self.captureSessionQueue, ^{ - weakSelf.eventSink = events; - }); - return nil; -} -@end - @interface FLTCam () @@ -108,6 +83,7 @@ @interface FLTCam () *captureDeviceInputFactory; @property(assign, nonatomic) FCPPlatformExposureMode exposureMode; @property(assign, nonatomic) FCPPlatformFocusMode focusMode; @@ -125,60 +101,6 @@ @implementation FLTCam NSString *const errorMethod = @"error"; -// Returns frame rate supported by format closest to targetFrameRate. -static double bestFrameRateForFormat(NSObject *format, - double targetFrameRate) { - double bestFrameRate = 0; - double minDistance = DBL_MAX; - for (NSObject *range in format.videoSupportedFrameRateRanges) { - double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); - double distance = fabs(frameRate - targetFrameRate); - if (distance < minDistance) { - bestFrameRate = frameRate; - minDistance = distance; - } - } - return bestFrameRate; -} - -// Finds format with same resolution as current activeFormat in captureDevice for which -// bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. -// Preferred are formats with the same subtype as current activeFormat. Sets this format -// as activeFormat and also updates mediaSettings.framesPerSecond to value which -// bestFrameRateForFormat returned for that format. -static void selectBestFormatForRequestedFrameRate( - NSObject *captureDevice, FCPPlatformMediaSettings *mediaSettings, - VideoDimensionsForFormat videoDimensionsForFormat) { - CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); - double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; - FourCharCode preferredSubType = - CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); - NSObject *bestFormat = captureDevice.activeFormat; - double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); - double minDistance = fabs(bestFrameRate - targetFrameRate); - BOOL isBestSubTypePreferred = YES; - for (NSObject *format in captureDevice.formats) { - CMVideoDimensions resolution = videoDimensionsForFormat(format); - if (resolution.width != targetResolution.width || - resolution.height != targetResolution.height) { - continue; - } - double frameRate = bestFrameRateForFormat(format, targetFrameRate); - double distance = fabs(frameRate - targetFrameRate); - FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - BOOL isSubTypePreferred = subType == preferredSubType; - if (distance < minDistance || - (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred)) { - bestFormat = format; - bestFrameRate = frameRate; - minDistance = distance; - isBestSubTypePreferred = isSubTypePreferred; - } - } - captureDevice.activeFormat = bestFormat; - mediaSettings.framesPerSecond = @(bestFrameRate); -} - - (instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configuration error:(NSError **)error { self = [super init]; @@ -193,6 +115,7 @@ - (instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configurati _videoCaptureSession = configuration.videoCaptureSession; _audioCaptureSession = configuration.audioCaptureSession; _captureDeviceFactory = configuration.captureDeviceFactory; + _audioCaptureDeviceFactory = configuration.audioCaptureDeviceFactory; _captureDevice = _captureDeviceFactory(configuration.initialCameraName); _captureDeviceInputFactory = configuration.captureDeviceInputFactory; _videoDimensionsForFormat = configuration.videoDimensionsForFormat; @@ -253,8 +176,8 @@ - (instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configurati return nil; } - selectBestFormatForRequestedFrameRate(_captureDevice, _mediaSettings, - _videoDimensionsForFormat); + FLTSelectBestFormatForRequestedFrameRate(_captureDevice, _mediaSettings, + _videoDimensionsForFormat); // Set frame rate with 1/10 precision allowing not integral values. int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0); @@ -590,7 +513,7 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(NSObject *)connection { + fromConnection:(AVCaptureConnection *)connection { if (output == _captureVideoOutput.avOutput) { CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CFRetain(newBuffer); @@ -1391,8 +1314,7 @@ - (void)setUpCaptureSessionForAudioIfNeeded { NSError *error = nil; // Create a device input with the device and add it to the session. // Setup the audio input. - NSObject *audioDevice = [[FLTDefaultCaptureDevice alloc] - initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]]; + NSObject *audioDevice = self.audioCaptureDeviceFactory(); NSObject *audioInput = [_captureDeviceInputFactory deviceInputWithDevice:audioDevice error:&error]; if (error) { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCamConfiguration.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCamConfiguration.m index 88b16d0cee5..0c934d5d483 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCamConfiguration.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCamConfiguration.m @@ -9,6 +9,7 @@ @implementation FLTCamConfiguration - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings mediaSettingsWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsWrapper captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory + audioCaptureDeviceFactory:(AudioCaptureDeviceFactory)audioCaptureDeviceFactory captureSessionFactory:(CaptureSessionFactory)captureSessionFactory captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceInputFactory: @@ -22,6 +23,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _videoCaptureSession = captureSessionFactory(); _audioCaptureSession = captureSessionFactory(); _captureDeviceFactory = captureDeviceFactory; + _audioCaptureDeviceFactory = audioCaptureDeviceFactory; _orientation = [[UIDevice currentDevice] orientation]; _deviceOrientationProvider = [[FLTDefaultDeviceOrientationProvider alloc] init]; _videoDimensionsForFormat = ^CMVideoDimensions(NSObject *format) { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTFormatUtils.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTFormatUtils.m new file mode 100644 index 00000000000..4f582bece5b --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTFormatUtils.m @@ -0,0 +1,60 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; +@import AVFoundation; + +#import "./include/camera_avfoundation/FLTFormatUtils.h" + +NS_ASSUME_NONNULL_BEGIN + +// Returns frame rate supported by format closest to targetFrameRate. +double FLTBestFrameRateForFormat(NSObject *format, double targetFrameRate) { + double bestFrameRate = 0; + double minDistance = DBL_MAX; + for (NSObject *range in format.videoSupportedFrameRateRanges) { + double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); + double distance = fabs(frameRate - targetFrameRate); + if (distance < minDistance) { + bestFrameRate = frameRate; + minDistance = distance; + } + } + return bestFrameRate; +} + +void FLTSelectBestFormatForRequestedFrameRate(NSObject *captureDevice, + FCPPlatformMediaSettings *mediaSettings, + VideoDimensionsForFormat videoDimensionsForFormat) { + CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); + double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; + FourCharCode preferredSubType = + CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); + NSObject *bestFormat = captureDevice.activeFormat; + double bestFrameRate = FLTBestFrameRateForFormat(bestFormat, targetFrameRate); + double minDistance = fabs(bestFrameRate - targetFrameRate); + BOOL isBestSubTypePreferred = YES; + for (NSObject *format in captureDevice.formats) { + CMVideoDimensions resolution = videoDimensionsForFormat(format); + if (resolution.width != targetResolution.width || + resolution.height != targetResolution.height) { + continue; + } + double frameRate = FLTBestFrameRateForFormat(format, targetFrameRate); + double distance = fabs(frameRate - targetFrameRate); + FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); + BOOL isSubTypePreferred = subType == preferredSubType; + if (distance < minDistance || + (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred)) { + bestFormat = format; + bestFrameRate = frameRate; + minDistance = distance; + isBestSubTypePreferred = isSubTypePreferred; + } + } + captureDevice.activeFormat = bestFormat; + mediaSettings.framesPerSecond = @(bestFrameRate); +} + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTImageStreamHandler.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTImageStreamHandler.m new file mode 100644 index 00000000000..453aeb5bc76 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTImageStreamHandler.m @@ -0,0 +1,34 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Flutter; + +#import "./include/camera_avfoundation/FLTImageStreamHandler.h" + +@implementation FLTImageStreamHandler + +- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + _captureSessionQueue = captureSessionQueue; + return self; +} + +- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { + __weak typeof(self) weakSelf = self; + dispatch_async(self.captureSessionQueue, ^{ + weakSelf.eventSink = nil; + }); + return nil; +} + +- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)events { + __weak typeof(self) weakSelf = self; + dispatch_async(self.captureSessionQueue, ^{ + weakSelf.eventSink = events; + }); + return nil; +} +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h index 197bf63bcc2..0283d79d8c2 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h @@ -28,10 +28,10 @@ NS_ASSUME_NONNULL_BEGIN // Format used for video and image streaming. @property(assign, nonatomic) FourCharCode videoFormat; @property(assign, nonatomic) FCPPlatformImageFileFormat fileFormat; -@property(assign, nonatomic) CGFloat minimumAvailableZoomFactor; -@property(assign, nonatomic) CGFloat maximumAvailableZoomFactor; -@property(assign, nonatomic) CGFloat minimumExposureOffset; -@property(assign, nonatomic) CGFloat maximumExposureOffset; +@property(readonly, nonatomic) CGFloat minimumAvailableZoomFactor; +@property(readonly, nonatomic) CGFloat maximumAvailableZoomFactor; +@property(readonly, nonatomic) CGFloat minimumExposureOffset; +@property(readonly, nonatomic) CGFloat maximumExposureOffset; /// Initializes an `FLTCam` instance with the given configuration. /// @param error report to the caller if any error happened creating the camera. @@ -57,7 +57,8 @@ NS_ASSUME_NONNULL_BEGIN FlutterError *_Nullable))completion; - (void)pauseVideoRecording; - (void)resumeVideoRecording; -- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation; +- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation + NS_SWIFT_NAME(lockCaptureOrientation(_:)); - (void)unlockCaptureOrientation; - (void)setFlashMode:(FCPPlatformFlashMode)mode withCompletion:(void (^)(FlutterError *_Nullable))completion; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h index 8426f129e89..36b8ac17784 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCamConfiguration.h @@ -19,6 +19,8 @@ NS_ASSUME_NONNULL_BEGIN /// Used in tests to inject a device into FLTCam. typedef NSObject *_Nonnull (^CaptureDeviceFactory)(NSString *); +typedef NSObject *_Nonnull (^AudioCaptureDeviceFactory)(void); + typedef NSObject *_Nonnull (^CaptureSessionFactory)(void); typedef NSObject *_Nonnull (^AssetWriterFactory)(NSURL *, AVFileType, @@ -38,6 +40,7 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(NSObject *videoCaptureSession; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h index 3e3a44922dd..0eed426d3c8 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h @@ -8,21 +8,9 @@ #import "FLTCapturePhotoOutput.h" #import "FLTCaptureVideoDataOutput.h" #import "FLTDeviceOrientationProviding.h" +#import "FLTImageStreamHandler.h" #import "FLTSavePhotoDelegate.h" -@interface FLTImageStreamHandler : NSObject - -/// The queue on which `eventSink` property should be accessed. -@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; - -/// The event sink to stream camera events to Dart. -/// -/// The property should only be accessed on `captureSessionQueue`. -/// The block itself should be invoked on the main queue. -@property FlutterEventSink eventSink; - -@end - // APIs exposed for unit testing. @interface FLTCam () @@ -47,7 +35,8 @@ /// Exposed for unit tests. - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(NSObject *)connection; + fromConnection:(AVCaptureConnection *)connection + NS_SWIFT_NAME(captureOutput(_:didOutput:from:)); /// Start streaming images. - (void)startImageStreamWithMessenger:(NSObject *)messenger diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTFormatUtils.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTFormatUtils.h new file mode 100644 index 00000000000..5cd5e5ba03c --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTFormatUtils.h @@ -0,0 +1,20 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "FLTCamConfiguration.h" +#import "FLTCaptureDevice.h" +#import "FLTCaptureDeviceFormat.h" + +NS_ASSUME_NONNULL_BEGIN + +// Finds format with same resolution as current activeFormat in captureDevice for which +// bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. +// Preferred are formats with the same subtype as current activeFormat. Sets this format +// as activeFormat and also updates mediaSettings.framesPerSecond to value which +// bestFrameRateForFormat returned for that format. +extern void FLTSelectBestFormatForRequestedFrameRate( + NSObject *captureDevice, FCPPlatformMediaSettings *mediaSettings, + VideoDimensionsForFormat videoDimensionsForFormat); + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTImageStreamHandler.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTImageStreamHandler.h new file mode 100644 index 00000000000..a5297baa8f9 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTImageStreamHandler.h @@ -0,0 +1,18 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@interface FLTImageStreamHandler : NSObject + +- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; + +/// The queue on which `eventSink` property should be accessed. +@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; + +/// The event sink to stream camera events to Dart. +/// +/// The property should only be accessed on `captureSessionQueue`. +/// The block itself should be invoked on the main queue. +@property FlutterEventSink eventSink; + +@end diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 51cd20fb911..b7bbb4e5510 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.19 +version: 0.9.19+1 environment: sdk: ^3.6.0