Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.

Commit 972e942

Browse files
authored
[camera] Fixed a crash when streaming on iOS (#4520)
1 parent 794fa82 commit 972e942

File tree

9 files changed

+159
-13
lines changed

9 files changed

+159
-13
lines changed

packages/camera/camera/CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
## 0.9.4+18
2+
3+
* Fixes a crash in iOS when streaming on low-performance devices.
4+
15
## 0.9.4+17
26

37
* Removes obsolete information from README, and adds OS support table.

packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; };
1616
334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; };
1717
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
18+
788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; };
1819
978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; };
1920
97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; };
2021
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
@@ -70,6 +71,7 @@
7071
1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; };
7172
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
7273
59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
74+
788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = "<group>"; };
7375
7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
7476
7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
7577
7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
@@ -142,6 +144,7 @@
142144
F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */,
143145
E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */,
144146
E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */,
147+
788A065927B0E02900533D74 /* StreamingTest.m */,
145148
);
146149
path = RunnerTests;
147150
sourceTree = "<group>";
@@ -416,6 +419,7 @@
416419
E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */,
417420
334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */,
418421
E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */,
422+
788A065A27B0E02900533D74 /* StreamingTest.m in Sources */,
419423
E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */,
420424
E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */,
421425
E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */,
Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
// Copyright 2013 The Flutter Authors. All rights reserved.
2+
// Use of this source code is governed by a BSD-style license that can be
3+
// found in the LICENSE file.
4+
5+
@import camera;
6+
@import camera.Test;
7+
@import XCTest;
8+
@import AVFoundation;
9+
#import <OCMock/OCMock.h>
10+
#import "CameraTestUtils.h"
11+
12+
@interface StreamingTests : XCTestCase
13+
@property(readonly, nonatomic) FLTCam *camera;
14+
@property(readonly, nonatomic) CMSampleBufferRef sampleBuffer;
15+
@end
16+
17+
@implementation StreamingTests
18+
19+
- (void)setUp {
20+
dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL);
21+
_camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
22+
_sampleBuffer = FLTCreateTestSampleBuffer();
23+
}
24+
25+
- (void)tearDown {
26+
CFRelease(_sampleBuffer);
27+
}
28+
29+
- (void)testExceedMaxStreamingPendingFramesCount {
30+
XCTestExpectation *streamingExpectation = [self
31+
expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"];
32+
33+
id handlerMock = OCMClassMock([FLTImageStreamHandler class]);
34+
OCMStub([handlerMock eventSink]).andReturn(^(id event) {
35+
[streamingExpectation fulfill];
36+
});
37+
38+
id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
39+
[_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock];
40+
41+
XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages"
42+
object:_camera
43+
expectedValue:@YES];
44+
XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1];
45+
XCTAssertEqual(result, XCTWaiterResultCompleted);
46+
47+
streamingExpectation.expectedFulfillmentCount = 4;
48+
for (int i = 0; i < 10; i++) {
49+
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
50+
}
51+
52+
[self waitForExpectationsWithTimeout:1.0 handler:nil];
53+
}
54+
55+
- (void)testReceivedImageStreamData {
56+
XCTestExpectation *streamingExpectation =
57+
[self expectationWithDescription:
58+
@"Must be able to call the handler again when receivedImageStreamData is called"];
59+
60+
id handlerMock = OCMClassMock([FLTImageStreamHandler class]);
61+
OCMStub([handlerMock eventSink]).andReturn(^(id event) {
62+
[streamingExpectation fulfill];
63+
});
64+
65+
id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
66+
[_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock];
67+
68+
XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages"
69+
object:_camera
70+
expectedValue:@YES];
71+
XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1];
72+
XCTAssertEqual(result, XCTWaiterResultCompleted);
73+
74+
streamingExpectation.expectedFulfillmentCount = 5;
75+
for (int i = 0; i < 10; i++) {
76+
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
77+
}
78+
79+
[_camera receivedImageStreamData];
80+
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
81+
82+
[self waitForExpectationsWithTimeout:1.0 handler:nil];
83+
}
84+
85+
@end

packages/camera/camera/ios/Classes/CameraPlugin.m

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,9 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call
162162
} else if ([@"stopImageStream" isEqualToString:call.method]) {
163163
[_camera stopImageStream];
164164
[result sendSuccess];
165+
} else if ([@"receivedImageStreamData" isEqualToString:call.method]) {
166+
[_camera receivedImageStreamData];
167+
[result sendSuccess];
165168
} else {
166169
NSDictionary *argsMap = call.arguments;
167170
NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue;

packages/camera/camera/ios/Classes/FLTCam.h

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,14 @@ NS_ASSUME_NONNULL_BEGIN
6161
- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr;
6262
- (void)applyFocusMode;
6363

64+
/**
65+
* Acknowledges the receipt of one image stream frame.
66+
*
67+
* This should be called each time a frame is received. Failing to call it may
68+
* cause later frames to be dropped instead of streamed.
69+
*/
70+
- (void)receivedImageStreamData;
71+
6472
/**
6573
* Applies FocusMode on the AVCaptureDevice.
6674
*

packages/camera/camera/ios/Classes/FLTCam.m

Lines changed: 27 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,6 @@
1010
@import CoreMotion;
1111
#import <libkern/OSAtomic.h>
1212

13-
@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
14-
// The queue on which `eventSink` property should be accessed
15-
@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
16-
// `eventSink` property should be accessed on `captureSessionQueue`.
17-
// The block itself should be invoked on the main queue.
18-
@property FlutterEventSink eventSink;
19-
@end
20-
2113
@implementation FLTImageStreamHandler
2214

2315
- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue {
@@ -68,7 +60,13 @@ @interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
6860
@property(assign, nonatomic) BOOL videoIsDisconnected;
6961
@property(assign, nonatomic) BOOL audioIsDisconnected;
7062
@property(assign, nonatomic) BOOL isAudioSetup;
71-
@property(assign, nonatomic) BOOL isStreamingImages;
63+
64+
/// Number of frames currently pending processing.
65+
@property(assign, nonatomic) int streamingPendingFramesCount;
66+
67+
/// Maximum number of frames pending processing.
68+
@property(assign, nonatomic) int maxStreamingPendingFramesCount;
69+
7270
@property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation;
7371
@property(assign, nonatomic) CMTime lastVideoSampleTime;
7472
@property(assign, nonatomic) CMTime lastAudioSampleTime;
@@ -135,6 +133,11 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
135133
_videoFormat = kCVPixelFormatType_32BGRA;
136134
_inProgressSavePhotoDelegates = [NSMutableDictionary dictionary];
137135

136+
// To limit memory consumption, limit the number of frames pending processing.
137+
// After some testing, 4 was determined to be the best maximum value.
138+
// https://github.com/flutter/plugins/pull/4520#discussion_r766335637
139+
_maxStreamingPendingFramesCount = 4;
140+
138141
NSError *localError = nil;
139142
_captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice
140143
error:&localError];
@@ -401,7 +404,8 @@ - (void)captureOutput:(AVCaptureOutput *)output
401404
}
402405
if (_isStreamingImages) {
403406
FlutterEventSink eventSink = _imageStreamHandler.eventSink;
404-
if (eventSink) {
407+
if (eventSink && (self.streamingPendingFramesCount < self.maxStreamingPendingFramesCount)) {
408+
self.streamingPendingFramesCount++;
405409
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
406410
// Must lock base address before accessing the pixel data
407411
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
@@ -898,19 +902,26 @@ - (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:
898902
}
899903

900904
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
905+
[self startImageStreamWithMessenger:messenger
906+
imageStreamHandler:[[FLTImageStreamHandler alloc]
907+
initWithCaptureSessionQueue:_captureSessionQueue]];
908+
}
909+
910+
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
911+
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler {
901912
if (!_isStreamingImages) {
902913
FlutterEventChannel *eventChannel =
903914
[FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream"
904915
binaryMessenger:messenger];
905916
FLTThreadSafeEventChannel *threadSafeEventChannel =
906917
[[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];
907918

908-
_imageStreamHandler =
909-
[[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue];
919+
_imageStreamHandler = imageStreamHandler;
910920
[threadSafeEventChannel setStreamHandler:_imageStreamHandler
911921
completion:^{
912922
dispatch_async(self->_captureSessionQueue, ^{
913923
self.isStreamingImages = YES;
924+
self.streamingPendingFramesCount = 0;
914925
});
915926
}];
916927
} else {
@@ -928,6 +939,10 @@ - (void)stopImageStream {
928939
}
929940
}
930941

942+
- (void)receivedImageStreamData {
943+
self.streamingPendingFramesCount--;
944+
}
945+
931946
- (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
932947
CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];
933948

packages/camera/camera/ios/Classes/FLTCam_Test.h

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,19 @@
55
#import "FLTCam.h"
66
#import "FLTSavePhotoDelegate.h"
77

8+
@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
9+
10+
/// The queue on which `eventSink` property should be accessed.
11+
@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
12+
13+
/// The event sink to stream camera events to Dart.
14+
///
15+
/// The property should only be accessed on `captureSessionQueue`.
16+
/// The block itself should be invoked on the main queue.
17+
@property FlutterEventSink eventSink;
18+
19+
@end
20+
821
// APIs exposed for unit testing.
922
@interface FLTCam ()
1023

@@ -14,6 +27,9 @@
1427
/// The output for photo capturing. Exposed setter for unit tests.
1528
@property(strong, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));
1629

30+
/// True when images from the camera are being streamed.
31+
@property(assign, nonatomic) BOOL isStreamingImages;
32+
1733
/// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the
1834
/// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the
1935
/// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo
@@ -38,4 +54,8 @@
3854
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
3955
error:(NSError **)error;
4056

57+
/// Start streaming images.
58+
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
59+
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler;
60+
4161
@end

packages/camera/camera/lib/src/camera_controller.dart

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -448,6 +448,13 @@ class CameraController extends ValueNotifier<CameraValue> {
448448
_imageStreamSubscription =
449449
cameraEventChannel.receiveBroadcastStream().listen(
450450
(dynamic imageData) {
451+
if (defaultTargetPlatform == TargetPlatform.iOS) {
452+
try {
453+
_channel.invokeMethod<void>('receivedImageStreamData');
454+
} on PlatformException catch (e) {
455+
throw CameraException(e.code, e.message);
456+
}
457+
}
451458
onAvailable(
452459
CameraImage.fromPlatformData(imageData as Map<dynamic, dynamic>));
453460
},

packages/camera/camera/pubspec.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing
44
Dart.
55
repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera
66
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
7-
version: 0.9.4+17
7+
version: 0.9.4+18
88

99
environment:
1010
sdk: ">=2.14.0 <3.0.0"

0 commit comments

Comments
 (0)