Skip to content

[camera_avfoundation] Implementation swift migration - part 11 #9690

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 0.9.21+1

* Migrates `startImageStream` and `setUpCaptureSessionForAudioIfNeeded` methods to Swift.
* Removes Objective-C implementation of `reportErrorMessage` method.

## 0.9.21

* Fixes crash when streaming is enabled during recording.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,92 @@ final class DefaultCamera: FLTCam, Camera {
return (captureVideoInput, captureVideoOutput, connection)
}

func setUpCaptureSessionForAudioIfNeeded() {
// Don't setup audio twice or we will lose the audio.
guard !mediaSettings.enableAudio || !isAudioSetup else { return }

let audioDevice = audioCaptureDeviceFactory()
do {
// Create a device input with the device and add it to the session.
// Setup the audio input.
let audioInput = try captureDeviceInputFactory.deviceInput(with: audioDevice)

// Setup the audio output.
let audioOutput = AVCaptureAudioDataOutput()

let block = {
// Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other
// plugins like video_player.
DefaultCamera.upgradeAudioSessionCategory(
requestedCategory: .playAndRecord,
options: [.defaultToSpeaker, .allowBluetoothA2DP, .allowAirPlay]
)
}

if !Thread.isMainThread {
DispatchQueue.main.sync(execute: block)
} else {
block()
}

if audioCaptureSession.canAddInput(audioInput) {
audioCaptureSession.addInput(audioInput)

if audioCaptureSession.canAddOutput(audioOutput) {
audioCaptureSession.addOutput(audioOutput)
audioOutput.setSampleBufferDelegate(self, queue: captureSessionQueue)
isAudioSetup = true
} else {
reportErrorMessage("Unable to add Audio input/output to session capture")
isAudioSetup = false
}
}
} catch let error as NSError {
reportErrorMessage(error.description)
}
}

// This function, although slightly modified, is also in video_player_avfoundation (in ObjC).
// Both need to do the same thing and run on the same thread (for example main thread).
// Configure application wide audio session manually to prevent overwriting flag
// MixWithOthers by capture session.
// Only change category if it is considered an upgrade which means it can only enable
// ability to play in silent mode or ability to record audio but never disables it,
// that could affect other plugins which depend on this global state. Only change
// category or options if there is change to prevent unnecessary lags and silence.
private static func upgradeAudioSessionCategory(
requestedCategory: AVAudioSession.Category,
options: AVAudioSession.CategoryOptions
) {
let playCategories: Set<AVAudioSession.Category> = [.playback, .playAndRecord]
let recordCategories: Set<AVAudioSession.Category> = [.record, .playAndRecord]
let requiredCategories: Set<AVAudioSession.Category> = [
requestedCategory, AVAudioSession.sharedInstance().category,
]

let requiresPlay = !requiredCategories.isDisjoint(with: playCategories)
let requiresRecord = !requiredCategories.isDisjoint(with: recordCategories)

var finalCategory = requestedCategory
if requiresPlay && requiresRecord {
finalCategory = .playAndRecord
} else if requiresPlay {
finalCategory = .playback
} else if requiresRecord {
finalCategory = .record
}

let finalOptions = AVAudioSession.sharedInstance().categoryOptions.union(options)

if finalCategory == AVAudioSession.sharedInstance().category
&& finalOptions == AVAudioSession.sharedInstance().categoryOptions
{
return
}

try? AVAudioSession.sharedInstance().setCategory(finalCategory, options: finalOptions)
}

func reportInitializationState() {
// Get all the state on the current thread, not the main thread.
let state = FCPPlatformCameraState.make(
Expand Down Expand Up @@ -257,7 +343,6 @@ final class DefaultCamera: FLTCam, Camera {
newAudioWriterInput.expectsMediaDataInRealTime = true
mediaSettingsAVWrapper.addInput(newAudioWriterInput, to: videoWriter)
self.audioWriterInput = newAudioWriterInput
audioOutput.setSampleBufferDelegate(self, queue: captureSessionQueue)
}

if flashMode == .torch {
Expand Down Expand Up @@ -728,6 +813,53 @@ final class DefaultCamera: FLTCam, Camera {
completion(nil)
}

func startImageStream(
with messenger: any FlutterBinaryMessenger, completion: @escaping (FlutterError?) -> Void
) {
startImageStream(
with: messenger,
imageStreamHandler: FLTImageStreamHandler(captureSessionQueue: captureSessionQueue),
completion: completion
)
}

func startImageStream(
with messenger: FlutterBinaryMessenger,
imageStreamHandler: FLTImageStreamHandler,
completion: @escaping (FlutterError?) -> Void
) {
if isStreamingImages {
reportErrorMessage("Images from camera are already streaming!")
completion(nil)
return
}

let eventChannel = FlutterEventChannel(
name: "plugins.flutter.io/camera_avfoundation/imageStream",
binaryMessenger: messenger
)
let threadSafeEventChannel = FLTThreadSafeEventChannel(eventChannel: eventChannel)

self.imageStreamHandler = imageStreamHandler
threadSafeEventChannel.setStreamHandler(imageStreamHandler) { [weak self] in
guard let strongSelf = self else {
completion(nil)
return
}

strongSelf.captureSessionQueue.async { [weak self] in
guard let strongSelf = self else {
completion(nil)
return
}

strongSelf.isStreamingImages = true
strongSelf.streamingPendingFramesCount = 0
completion(nil)
}
}
}

func stopImageStream() {
if isStreamingImages {
isStreamingImages = false
Expand Down Expand Up @@ -989,6 +1121,9 @@ final class DefaultCamera: FLTCam, Camera {
}
}

/// Reports the given error message to the Dart side of the plugin.
///
/// Can be called from any thread.
private func reportErrorMessage(_ errorMessage: String) {
FLTEnsureToRunOnMainQueue { [weak self] in
self?.dartAPI?.reportError(errorMessage) { _ in
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,11 @@ @interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
@property(strong, nonatomic)
NSObject<FLTAssetWriterInputPixelBufferAdaptor> *assetWriterPixelBufferAdaptor;
@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
@property(assign, nonatomic) BOOL isAudioSetup;

/// A wrapper for CMVideoFormatDescriptionGetDimensions.
/// Allows for alternate implementations in tests.
@property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat;
/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests.
@property(nonatomic, copy) AudioCaptureDeviceFactory audioCaptureDeviceFactory;
/// Reports the given error message to the Dart side of the plugin.
///
/// Can be called from any thread.
- (void)reportErrorMessage:(NSString *)errorMessage;

@end

@implementation FLTCam
Expand Down Expand Up @@ -308,139 +302,4 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset
return bestFormat;
}

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
completion:(void (^)(FlutterError *))completion {
[self startImageStreamWithMessenger:messenger
imageStreamHandler:[[FLTImageStreamHandler alloc]
initWithCaptureSessionQueue:_captureSessionQueue]
completion:completion];
}

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler
completion:(void (^)(FlutterError *))completion {
if (!_isStreamingImages) {
id<FLTEventChannel> eventChannel = [FlutterEventChannel
eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream"
binaryMessenger:messenger];
FLTThreadSafeEventChannel *threadSafeEventChannel =
[[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];

_imageStreamHandler = imageStreamHandler;
__weak typeof(self) weakSelf = self;
[threadSafeEventChannel setStreamHandler:_imageStreamHandler
completion:^{
typeof(self) strongSelf = weakSelf;
if (!strongSelf) {
completion(nil);
return;
}

dispatch_async(strongSelf.captureSessionQueue, ^{
// cannot use the outter strongSelf
typeof(self) strongSelf = weakSelf;
if (!strongSelf) {
completion(nil);
return;
}

strongSelf.isStreamingImages = YES;
strongSelf.streamingPendingFramesCount = 0;
completion(nil);
});
}];
} else {
[self reportErrorMessage:@"Images from camera are already streaming!"];
completion(nil);
}
}

// This function, although slightly modified, is also in video_player_avfoundation.
// Both need to do the same thing and run on the same thread (for example main thread).
// Configure application wide audio session manually to prevent overwriting flag
// MixWithOthers by capture session.
// Only change category if it is considered an upgrade which means it can only enable
// ability to play in silent mode or ability to record audio but never disables it,
// that could affect other plugins which depend on this global state. Only change
// category or options if there is change to prevent unnecessary lags and silence.
static void upgradeAudioSessionCategory(AVAudioSessionCategory requestedCategory,
AVAudioSessionCategoryOptions options) {
NSSet *playCategories = [NSSet
setWithObjects:AVAudioSessionCategoryPlayback, AVAudioSessionCategoryPlayAndRecord, nil];
NSSet *recordCategories =
[NSSet setWithObjects:AVAudioSessionCategoryRecord, AVAudioSessionCategoryPlayAndRecord, nil];
NSSet *requiredCategories =
[NSSet setWithObjects:requestedCategory, AVAudioSession.sharedInstance.category, nil];
BOOL requiresPlay = [requiredCategories intersectsSet:playCategories];
BOOL requiresRecord = [requiredCategories intersectsSet:recordCategories];
if (requiresPlay && requiresRecord) {
requestedCategory = AVAudioSessionCategoryPlayAndRecord;
} else if (requiresPlay) {
requestedCategory = AVAudioSessionCategoryPlayback;
} else if (requiresRecord) {
requestedCategory = AVAudioSessionCategoryRecord;
}
options = AVAudioSession.sharedInstance.categoryOptions | options;
if ([requestedCategory isEqualToString:AVAudioSession.sharedInstance.category] &&
options == AVAudioSession.sharedInstance.categoryOptions) {
return;
}
[AVAudioSession.sharedInstance setCategory:requestedCategory withOptions:options error:nil];
}

- (void)setUpCaptureSessionForAudioIfNeeded {
// Don't setup audio twice or we will lose the audio.
if (!_mediaSettings.enableAudio || _isAudioSetup) {
return;
}

NSError *error = nil;
// Create a device input with the device and add it to the session.
// Setup the audio input.
NSObject<FLTCaptureDevice> *audioDevice = self.audioCaptureDeviceFactory();
NSObject<FLTCaptureInput> *audioInput =
[_captureDeviceInputFactory deviceInputWithDevice:audioDevice error:&error];
if (error) {
[self reportErrorMessage:error.description];
}
// Setup the audio output.
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];

dispatch_block_t block = ^{
// Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other
// plugins like video_player.
upgradeAudioSessionCategory(AVAudioSessionCategoryPlayAndRecord,
AVAudioSessionCategoryOptionDefaultToSpeaker |
AVAudioSessionCategoryOptionAllowBluetoothA2DP |
AVAudioSessionCategoryOptionAllowAirPlay);
};
if (!NSThread.isMainThread) {
dispatch_sync(dispatch_get_main_queue(), block);
} else {
block();
}

if ([_audioCaptureSession canAddInput:audioInput]) {
[_audioCaptureSession addInput:audioInput];

if ([_audioCaptureSession canAddOutput:_audioOutput]) {
[_audioCaptureSession addOutput:_audioOutput];
_isAudioSetup = YES;
} else {
[self reportErrorMessage:@"Unable to add Audio input/output to session capture"];
_isAudioSetup = NO;
}
}
}

- (void)reportErrorMessage:(NSString *)errorMessage {
__weak typeof(self) weakSelf = self;
FLTEnsureToRunOnMainQueue(^{
[weakSelf.dartAPI reportError:errorMessage
completion:^(FlutterError *error){
// Ignore any errors, as this is just an event broadcast.
}];
});
}

@end
Original file line number Diff line number Diff line change
Expand Up @@ -64,16 +64,14 @@ NS_ASSUME_NONNULL_BEGIN
@property(readonly, nonatomic) FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper;
@property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings;
@property(nonatomic, copy) InputPixelBufferAdaptorFactory inputPixelBufferAdaptorFactory;
@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
@property(assign, nonatomic) BOOL isAudioSetup;
/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests.
@property(nonatomic, copy) AudioCaptureDeviceFactory audioCaptureDeviceFactory;

/// Initializes an `FLTCam` instance with the given configuration.
/// @param error report to the caller if any error happened creating the camera.
- (instancetype)initWithConfiguration:(FLTCamConfiguration *)configuration error:(NSError **)error;

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
completion:(nonnull void (^)(FlutterError *_Nullable))completion;
- (void)setUpCaptureSessionForAudioIfNeeded;

// Methods exposed for the Swift DefaultCamera subclass
- (void)updateOrientation;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,4 @@
@property(readonly, nonatomic)
NSMutableDictionary<NSNumber *, FLTSavePhotoDelegate *> *inProgressSavePhotoDelegates;

/// Start streaming images.
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler
completion:(void (^)(FlutterError *))completion;

@end
2 changes: 1 addition & 1 deletion packages/camera/camera_avfoundation/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: camera_avfoundation
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.9.21
version: 0.9.21+1

environment:
sdk: ^3.6.0
Expand Down