diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index cdb2bb789e4..c98b493d332 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.9.22+5 + +* Migrates `FLTCaptureDevice`, `FLTCaptureSession`, and `FLTFormatUtils` classes to Swift. + ## 0.9.22+4 * Migrates `FLTCameraDeviceDiscovering` and `FLTDeviceOrientationProviding` classes to Swift. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift index e83cecfa5fb..5456652e5eb 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift @@ -28,8 +28,8 @@ final class CameraSessionPresetsTests: XCTestCase { } let captureFormatMock = MockCaptureDeviceFormat() let captureDeviceMock = MockCaptureDevice() - captureDeviceMock.formats = [captureFormatMock] - captureDeviceMock.activeFormat = captureFormatMock + captureDeviceMock.fltFormats = [captureFormatMock] + captureDeviceMock.fltActiveFormat = captureFormatMock captureDeviceMock.lockForConfigurationStub = { lockForConfigurationExpectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift index bd5422f7747..5e57706b1a9 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift @@ -39,23 +39,23 @@ private final class TestMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { videoSettingsExpectation = test.expectation(description: "videoSettingsExpectation") } - override func lockDevice(_ captureDevice: FLTCaptureDevice) throws { + override func lockDevice(_ captureDevice: CaptureDevice) throws { lockExpectation.fulfill() } - override func unlockDevice(_ captureDevice: FLTCaptureDevice) { + override func unlockDevice(_ captureDevice: CaptureDevice) { unlockExpectation.fulfill() } - override func beginConfiguration(for videoCaptureSession: FLTCaptureSession) { + override func beginConfiguration(for videoCaptureSession: CaptureSession) { beginConfigurationExpectation.fulfill() } - override func commitConfiguration(for videoCaptureSession: FLTCaptureSession) { + override func commitConfiguration(for videoCaptureSession: CaptureSession) { commitConfigurationExpectation.fulfill() } - override func setMinFrameDuration(_ duration: CMTime, on captureDevice: FLTCaptureDevice) { + override func setMinFrameDuration(_ duration: CMTime, on captureDevice: CaptureDevice) { // FLTCam allows to set frame rate with 1/10 precision. let expectedDuration = CMTimeMake(value: 10, timescale: Int32(testFramesPerSecond * 10)) if duration == expectedDuration { @@ -63,7 +63,7 @@ private final class TestMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { } } - override func setMaxFrameDuration(_ duration: CMTime, on captureDevice: FLTCaptureDevice) { + override func setMaxFrameDuration(_ duration: CMTime, on captureDevice: CaptureDevice) { // FLTCam allows to set frame rate with 1/10 precision. let expectedDuration = CMTimeMake(value: 10, timescale: Int32(testFramesPerSecond * 10)) if duration == expectedDuration { @@ -203,7 +203,7 @@ final class CameraSettingsTests: XCTestCase { configuration.mediaSettings = settings let camera = CameraTestUtils.createTestCamera(configuration) - let range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0] + let range = camera.captureDevice.fltActiveFormat.videoSupportedFrameRateRanges[0] XCTAssertLessThanOrEqual(range.minFrameRate, 60) XCTAssertGreaterThanOrEqual(range.maxFrameRate, 60) } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index a716aade6f5..68583d3daae 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -47,7 +47,7 @@ enum CameraTestUtils { captureDeviceFormatMock2.videoSupportedFrameRateRanges = [frameRateRangeMock2] let captureDeviceMock = MockCaptureDevice() - captureDeviceMock.formats = [captureDeviceFormatMock1, captureDeviceFormatMock2] + captureDeviceMock.fltFormats = [captureDeviceFormatMock1, captureDeviceFormatMock2] var currentFormat: FLTCaptureDeviceFormat = captureDeviceFormatMock1 diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscoverer.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscoverer.swift index e52ed1df5c7..2a50f6889c9 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscoverer.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscoverer.swift @@ -18,7 +18,7 @@ final class MockCameraDeviceDiscoverer: NSObject, CameraDeviceDiscoverer { _ deviceTypes: [AVCaptureDevice.DeviceType], _ mediaType: AVMediaType, _ position: AVCaptureDevice.Position - ) -> [NSObject & FLTCaptureDevice]? + ) -> [NSObject & CaptureDevice]? )? /// A stub that replaces the default implementation of @@ -26,7 +26,7 @@ final class MockCameraDeviceDiscoverer: NSObject, CameraDeviceDiscoverer { func discoverySession( withDeviceTypes deviceTypes: [AVCaptureDevice.DeviceType], mediaType: AVMediaType, position: AVCaptureDevice.Position - ) -> [FLTCaptureDevice] { + ) -> [CaptureDevice] { return discoverySessionStub?(deviceTypes, mediaType, position) ?? [] } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.swift index 64ab36e43e0..87e550685f5 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.swift @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import camera_avfoundation +@testable import camera_avfoundation // Import Objective-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) @@ -11,7 +11,7 @@ import camera_avfoundation /// A mock implementation of `FLTCaptureDevice` that allows mocking the class /// properties. -class MockCaptureDevice: NSObject, FLTCaptureDevice { +class MockCaptureDevice: NSObject, CaptureDevice { var activeFormatStub: (() -> FLTCaptureDeviceFormat)? var setActiveFormatStub: ((FLTCaptureDeviceFormat) -> Void)? var getTorchModeStub: (() -> AVCaptureDevice.TorchMode)? @@ -26,7 +26,7 @@ class MockCaptureDevice: NSObject, FLTCaptureDevice { var setVideoZoomFactorStub: ((CGFloat) -> Void)? var lockForConfigurationStub: (() throws -> Void)? - var device: AVCaptureDevice { + var avDevice: AVCaptureDevice { preconditionFailure("Attempted to access unimplemented property: device") } @@ -34,7 +34,7 @@ class MockCaptureDevice: NSObject, FLTCaptureDevice { var position = AVCaptureDevice.Position.unspecified var deviceType = AVCaptureDevice.DeviceType.builtInWideAngleCamera - var activeFormat: FLTCaptureDeviceFormat { + var fltActiveFormat: FLTCaptureDeviceFormat { get { activeFormatStub?() ?? MockCaptureDeviceFormat() } @@ -43,7 +43,7 @@ class MockCaptureDevice: NSObject, FLTCaptureDevice { } } - var formats: [FLTCaptureDeviceFormat] = [] + var fltFormats: [FLTCaptureDeviceFormat] = [] var hasFlash = false var hasTorch = false var isTorchAvailable = false @@ -78,20 +78,28 @@ class MockCaptureDevice: NSObject, FLTCaptureDevice { return isFocusModeSupportedStub?(mode) ?? false } + var focusMode: AVCaptureDevice.FocusMode { + get { .autoFocus } + set { setFocusModeStub?(newValue) } + } + func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode) { setFocusModeStub?(focusMode) } - func setFocusPointOfInterest(_ point: CGPoint) { - setFocusPointOfInterestStub?(point) + var focusPointOfInterest: CGPoint { + get { CGPoint.zero } + set { setFocusPointOfInterestStub?(newValue) } } - func setExposureMode(_ exposureMode: AVCaptureDevice.ExposureMode) { - setExposureModeStub?(exposureMode) + var exposureMode: AVCaptureDevice.ExposureMode { + get { .autoExpose } + set { setExposureModeStub?(newValue) } } - func setExposurePointOfInterest(_ point: CGPoint) { - setExposurePointOfInterestStub?(point) + var exposurePointOfInterest: CGPoint { + get { CGPoint.zero } + set { setExposurePointOfInterestStub?(newValue) } } func setExposureTargetBias(_ bias: Float, completionHandler handler: ((CMTime) -> Void)? = nil) { @@ -102,17 +110,11 @@ class MockCaptureDevice: NSObject, FLTCaptureDevice { return isExposureModeSupportedStub?(mode) ?? false } - func lensAperture() -> Float { - return 0 - } + var lensAperture: Float { 0 } - func exposureDuration() -> CMTime { - return CMTime(value: 1, timescale: 1) - } + var exposureDuration: CMTime { CMTime(value: 1, timescale: 1) } - func iso() -> Float { - return 0 - } + var iso: Float { 0 } func lockForConfiguration() throws { try lockForConfigurationStub?() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceInputFactory.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceInputFactory.swift index bec6a651087..abf66e49106 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceInputFactory.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceInputFactory.swift @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import camera_avfoundation +@testable import camera_avfoundation // Import Objective-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) @@ -11,8 +11,8 @@ import camera_avfoundation ///// A mocked implementation of FLTCaptureDeviceInputFactory which allows injecting a custom ///// implementation. -final class MockCaptureDeviceInputFactory: NSObject, FLTCaptureDeviceInputFactory { - func deviceInput(with device: FLTCaptureDevice) throws -> FLTCaptureInput { +final class MockCaptureDeviceInputFactory: NSObject, CaptureDeviceInputFactory { + func deviceInput(with device: CaptureDevice) throws -> CaptureInput { return MockCaptureInput() } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureInput.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureInput.swift index 99a179061d3..0c6a38d1bc2 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureInput.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureInput.swift @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import camera_avfoundation +@testable import camera_avfoundation // Import Objective-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) @@ -11,8 +11,8 @@ import camera_avfoundation /// A mocked implementation of FLTCaptureInput which allows injecting a custom /// implementation. -final class MockCaptureInput: NSObject, FLTCaptureInput { - var input: AVCaptureInput { +final class MockCaptureInput: NSObject, CaptureInput { + var avInput: AVCaptureInput { preconditionFailure("Attempted to access unimplemented property: input") } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift index b3ddcd97754..4f5a7f9e729 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.swift @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import camera_avfoundation +@testable import camera_avfoundation // Import Objective-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) @@ -11,7 +11,7 @@ import camera_avfoundation /// Mock implementation of `FLTCaptureSession` protocol which allows injecting a custom /// implementation. -final class MockCaptureSession: NSObject, FLTCaptureSession { +final class MockCaptureSession: NSObject, CaptureSession { var setSessionPresetStub: ((AVCaptureSession.Preset) -> Void)? var beginConfigurationStub: (() -> Void)? var commitConfigurationStub: (() -> Void)? @@ -56,13 +56,13 @@ final class MockCaptureSession: NSObject, FLTCaptureSession { return canSetSessionPresetStub?(preset) ?? true } - func addInputWithNoConnections(_ input: FLTCaptureInput) {} + func addInputWithNoConnections(_ input: CaptureInput) {} func addOutputWithNoConnections(_ output: AVCaptureOutput) {} func addConnection(_: AVCaptureConnection) {} - func addInput(_: FLTCaptureInput) {} + func addInput(_: CaptureInput) {} func addOutput(_ output: AVCaptureOutput) { @@ -71,11 +71,11 @@ final class MockCaptureSession: NSObject, FLTCaptureSession { } } - func removeInput(_: FLTCaptureInput) {} + func removeInput(_: CaptureInput) {} func removeOutput(_: AVCaptureOutput) {} - func canAddInput(_: FLTCaptureInput) -> Bool { + func canAddInput(_: CaptureInput) -> Bool { return true } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index 8a39d55f945..13252520776 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -19,27 +19,27 @@ private class FakeMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { self.inputMock = inputMock } - override func lockDevice(_ captureDevice: FLTCaptureDevice) throws { + override func lockDevice(_ captureDevice: CaptureDevice) throws { // No-op. } - override func unlockDevice(_ captureDevice: FLTCaptureDevice) { + override func unlockDevice(_ captureDevice: CaptureDevice) { // No-op. } - override func beginConfiguration(for videoCaptureSession: FLTCaptureSession) { + override func beginConfiguration(for videoCaptureSession: CaptureSession) { // No-op. } - override func commitConfiguration(for videoCaptureSession: FLTCaptureSession) { + override func commitConfiguration(for videoCaptureSession: CaptureSession) { // No-op. } - override func setMinFrameDuration(_ duration: CMTime, on captureDevice: FLTCaptureDevice) { + override func setMinFrameDuration(_ duration: CMTime, on captureDevice: CaptureDevice) { // No-op. } - override func setMaxFrameDuration(_ duration: CMTime, on captureDevice: FLTCaptureDevice) { + override func setMaxFrameDuration(_ duration: CMTime, on captureDevice: CaptureDevice) { // No-op. } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraConfiguration.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraConfiguration.swift index 116a63fa42a..535c9e4b756 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraConfiguration.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraConfiguration.swift @@ -13,11 +13,11 @@ import UIKit /// Factory block returning an FLTCaptureDevice. /// Used in tests to inject a video capture device into DefaultCamera. -typealias VideoCaptureDeviceFactory = (_ cameraName: String) -> FLTCaptureDevice +typealias VideoCaptureDeviceFactory = (_ cameraName: String) -> CaptureDevice -typealias AudioCaptureDeviceFactory = () -> FLTCaptureDevice +typealias AudioCaptureDeviceFactory = () -> CaptureDevice -typealias CaptureSessionFactory = () -> FLTCaptureSession +typealias CaptureSessionFactory = () -> CaptureSession typealias AssetWriterFactory = (_ assetUrl: URL, _ fileType: AVFileType) throws -> FLTAssetWriter @@ -35,11 +35,11 @@ class CameraConfiguration { var mediaSettings: FCPPlatformMediaSettings var mediaSettingsWrapper: FLTCamMediaSettingsAVWrapper var captureSessionQueue: DispatchQueue - var videoCaptureSession: FLTCaptureSession - var audioCaptureSession: FLTCaptureSession + var videoCaptureSession: CaptureSession + var audioCaptureSession: CaptureSession var videoCaptureDeviceFactory: VideoCaptureDeviceFactory let audioCaptureDeviceFactory: AudioCaptureDeviceFactory - let captureDeviceInputFactory: FLTCaptureDeviceInputFactory + let captureDeviceInputFactory: CaptureDeviceInputFactory var assetWriterFactory: AssetWriterFactory var inputPixelBufferAdaptorFactory: InputPixelBufferAdaptorFactory var videoDimensionsConverter: VideoDimensionsConverter @@ -54,7 +54,7 @@ class CameraConfiguration { audioCaptureDeviceFactory: @escaping AudioCaptureDeviceFactory, captureSessionFactory: @escaping CaptureSessionFactory, captureSessionQueue: DispatchQueue, - captureDeviceInputFactory: FLTCaptureDeviceInputFactory, + captureDeviceInputFactory: CaptureDeviceInputFactory, initialCameraName: String ) { self.mediaSettings = mediaSettings diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraDeviceDiscoverer.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraDeviceDiscoverer.swift index 6713557fbff..36fe9616408 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraDeviceDiscoverer.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraDeviceDiscoverer.swift @@ -16,7 +16,7 @@ protocol CameraDeviceDiscoverer { withDeviceTypes deviceTypes: [AVCaptureDevice.DeviceType], mediaType: AVMediaType, position: AVCaptureDevice.Position - ) -> [FLTCaptureDevice] + ) -> [CaptureDevice] } /// The default implementation of the `CameraDeviceDiscoverer` protocol. @@ -26,18 +26,11 @@ class DefaultCameraDeviceDiscoverer: NSObject, CameraDeviceDiscoverer { withDeviceTypes deviceTypes: [AVCaptureDevice.DeviceType], mediaType: AVMediaType, position: AVCaptureDevice.Position - ) -> [FLTCaptureDevice] { - let discoverySession = AVCaptureDevice.DiscoverySession( + ) -> [CaptureDevice] { + return AVCaptureDevice.DiscoverySession( deviceTypes: deviceTypes, mediaType: mediaType, position: position - ) - - let devices = discoverySession.devices - let deviceControllers = devices.map { device in - FLTDefaultCaptureDevice(device: device) as FLTCaptureDevice - } - - return deviceControllers + ).devices } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index 577c886ed57..75687209f9e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -18,7 +18,7 @@ public final class CameraPlugin: NSObject, FlutterPlugin { private let permissionManager: FLTCameraPermissionManager private let captureDeviceFactory: VideoCaptureDeviceFactory private let captureSessionFactory: CaptureSessionFactory - private let captureDeviceInputFactory: FLTCaptureDeviceInputFactory + private let captureDeviceInputFactory: CaptureDeviceInputFactory /// All FLTCam's state access and capture session related operations should be on run on this queue. private let captureSessionQueue: DispatchQueue @@ -36,10 +36,10 @@ public final class CameraPlugin: NSObject, FlutterPlugin { permissionService: FLTDefaultPermissionService()), deviceFactory: { name in // TODO(RobertOdrowaz) Implement better error handling and remove non-null assertion - FLTDefaultCaptureDevice(device: AVCaptureDevice(uniqueID: name)!) + AVCaptureDevice(uniqueID: name)! }, - captureSessionFactory: { FLTDefaultCaptureSession(captureSession: AVCaptureSession()) }, - captureDeviceInputFactory: FLTDefaultCaptureDeviceInputFactory(), + captureSessionFactory: { AVCaptureSession() }, + captureDeviceInputFactory: DefaultCaptureDeviceInputFactory(), captureSessionQueue: DispatchQueue(label: "io.flutter.camera.captureSessionQueue") ) @@ -54,7 +54,7 @@ public final class CameraPlugin: NSObject, FlutterPlugin { permissionManager: FLTCameraPermissionManager, deviceFactory: @escaping VideoCaptureDeviceFactory, captureSessionFactory: @escaping CaptureSessionFactory, - captureDeviceInputFactory: FLTCaptureDeviceInputFactory, + captureDeviceInputFactory: CaptureDeviceInputFactory, captureSessionQueue: DispatchQueue ) { self.registry = registry @@ -159,8 +159,7 @@ extension CameraPlugin: FCPCameraApi { } } - private func platformLensDirection(for device: FLTCaptureDevice) -> FCPPlatformCameraLensDirection - { + private func platformLensDirection(for device: CaptureDevice) -> FCPPlatformCameraLensDirection { switch device.position { case .back: return .back @@ -173,7 +172,7 @@ extension CameraPlugin: FCPCameraApi { } } - private func platformLensType(for device: FLTCaptureDevice) -> FCPPlatformCameraLensType { + private func platformLensType(for device: CaptureDevice) -> FCPPlatformCameraLensType { switch device.deviceType { case .builtInWideAngleCamera: return .wide @@ -255,9 +254,7 @@ extension CameraPlugin: FCPCameraApi { mediaSettings: settings, mediaSettingsWrapper: mediaSettingsAVWrapper, captureDeviceFactory: captureDeviceFactory, - audioCaptureDeviceFactory: { - FLTDefaultCaptureDevice(device: AVCaptureDevice.default(for: .audio)!) - }, + audioCaptureDeviceFactory: { AVCaptureDevice.default(for: .audio)! }, captureSessionFactory: captureSessionFactory, captureSessionQueue: captureSessionQueue, captureDeviceInputFactory: captureDeviceInputFactory, diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift new file mode 100644 index 00000000000..f28bd979d04 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureDevice.swift @@ -0,0 +1,114 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import AVFoundation + +// Import Objective-C part of the implementation when SwiftPM is used. +#if canImport(camera_avfoundation_objc) + import camera_avfoundation_objc +#endif + +/// A protocol which is a direct passthrough to AVCaptureDevice. +/// It exists to allow replacing AVCaptureDevice in tests. +protocol CaptureDevice: NSObjectProtocol { + /// Underlying `AVCaptureDevice` instance. This is should not be used directly + /// in the plugin implementation code, but it exists so that other protocol default + /// implementation can pass the raw device to AVFoundation methods. + var avDevice: AVCaptureDevice { get } + + // Device identifier + var uniqueID: String { get } + + // Position/Orientation + var position: AVCaptureDevice.Position { get } + + // Lens type + var deviceType: AVCaptureDevice.DeviceType { get } + + // Format/Configuration + var fltActiveFormat: FLTCaptureDeviceFormat { get set } + var fltFormats: [FLTCaptureDeviceFormat] { get } + + // Flash/Torch + var hasFlash: Bool { get } + var hasTorch: Bool { get } + var isTorchAvailable: Bool { get } + var torchMode: AVCaptureDevice.TorchMode { get set } + func isFlashModeSupported(_ mode: AVCaptureDevice.FlashMode) -> Bool + + // Focus + var isFocusPointOfInterestSupported: Bool { get } + func isFocusModeSupported(_ mode: AVCaptureDevice.FocusMode) -> Bool + var focusMode: AVCaptureDevice.FocusMode { get set } + var focusPointOfInterest: CGPoint { get set } + + // Exposure + var isExposurePointOfInterestSupported: Bool { get } + var exposureMode: AVCaptureDevice.ExposureMode { get set } + var exposurePointOfInterest: CGPoint { get set } + var minExposureTargetBias: Float { get } + var maxExposureTargetBias: Float { get } + func setExposureTargetBias( + _ bias: Float, completionHandler handler: ((CMTime) -> Void)?) + func isExposureModeSupported(_ mode: AVCaptureDevice.ExposureMode) -> Bool + + // Zoom + var maxAvailableVideoZoomFactor: CGFloat { get } + var minAvailableVideoZoomFactor: CGFloat { get } + var videoZoomFactor: CGFloat { get set } + + // Camera Properties + var lensAperture: Float { get } + var exposureDuration: CMTime { get } + var iso: Float { get } + + // Configuration Lock + func lockForConfiguration() throws + func unlockForConfiguration() + + // Frame Duration + var activeVideoMinFrameDuration: CMTime { get set } + var activeVideoMaxFrameDuration: CMTime { get set } +} + +/// A protocol which is a direct passthrough to AVCaptureInput. +/// It exists to allow replacing AVCaptureInput in tests. +protocol CaptureInput: NSObjectProtocol { + /// Underlying input instance. It is exposed as raw AVCaptureInput has to be passed to some + /// AVFoundation methods. The plugin implementation code shouldn't use it though. + var avInput: AVCaptureInput { get } + + var ports: [AVCaptureInput.Port] { get } +} + +/// A protocol which wraps the creation of AVCaptureDeviceInput. +/// It exists to allow mocking instances of AVCaptureDeviceInput in tests. +protocol CaptureDeviceInputFactory: NSObjectProtocol { + func deviceInput(with device: CaptureDevice) throws -> CaptureInput +} + +extension AVCaptureDevice: CaptureDevice { + var avDevice: AVCaptureDevice { self } + + var fltActiveFormat: FLTCaptureDeviceFormat { + get { FLTDefaultCaptureDeviceFormat.init(format: activeFormat) } + set { activeFormat = newValue.format } + } + + var fltFormats: [FLTCaptureDeviceFormat] { + return self.formats.map { FLTDefaultCaptureDeviceFormat.init(format: $0) } + } +} + +extension AVCaptureInput: CaptureInput { + var avInput: AVCaptureInput { self } +} + +/// A default implementation of CaptureDeviceInputFactory protocol which +/// wraps a call to AVCaptureInput static method `deviceInputWithDevice`. +class DefaultCaptureDeviceInputFactory: NSObject, CaptureDeviceInputFactory { + func deviceInput(with device: CaptureDevice) throws -> CaptureInput { + return try AVCaptureDeviceInput(device: device.avDevice) + } +} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureSession.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureSession.swift new file mode 100644 index 00000000000..253abdabc0c --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CaptureSession.swift @@ -0,0 +1,53 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import AVFoundation + +// Import Objective-C part of the implementation when SwiftPM is used. +#if canImport(camera_avfoundation_objc) + import camera_avfoundation_objc +#endif + +/// A protocol which is a direct passthrough to AVCaptureSession. +/// It exists to allow replacing AVCaptureSession in tests. +protocol CaptureSession: NSObjectProtocol { + var sessionPreset: AVCaptureSession.Preset { get set } + var inputs: [AVCaptureInput] { get } + var outputs: [AVCaptureOutput] { get } + var automaticallyConfiguresApplicationAudioSession: Bool { get set } + + func beginConfiguration() + func commitConfiguration() + func startRunning() + func stopRunning() + func canSetSessionPreset(_ preset: AVCaptureSession.Preset) -> Bool + func addInputWithNoConnections(_ input: CaptureInput) + func addOutputWithNoConnections(_ output: AVCaptureOutput) + func addConnection(_ connection: AVCaptureConnection) + func addInput(_ input: CaptureInput) + func addOutput(_ output: AVCaptureOutput) + func removeInput(_ input: CaptureInput) + func removeOutput(_ output: AVCaptureOutput) + func canAddInput(_ input: CaptureInput) -> Bool + func canAddOutput(_ output: AVCaptureOutput) -> Bool + func canAddConnection(_ connection: AVCaptureConnection) -> Bool +} + +extension AVCaptureSession: CaptureSession { + func addInputWithNoConnections(_ input: CaptureInput) { + addInputWithNoConnections(input.avInput) + } + + func addInput(_ input: CaptureInput) { + addInput(input.avInput) + } + + func removeInput(_ input: CaptureInput) { + removeInput(input.avInput) + } + + func canAddInput(_ input: CaptureInput) -> Bool { + canAddInput(input.avInput) + } +} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index 276fd1897d8..99de0ff3a82 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -43,13 +43,13 @@ final class DefaultCamera: NSObject, Camera { private let mediaSettings: FCPPlatformMediaSettings private let mediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper - private let videoCaptureSession: FLTCaptureSession - private let audioCaptureSession: FLTCaptureSession + private let videoCaptureSession: CaptureSession + private let audioCaptureSession: CaptureSession /// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests. private let videoCaptureDeviceFactory: VideoCaptureDeviceFactory private let audioCaptureDeviceFactory: AudioCaptureDeviceFactory - private let captureDeviceInputFactory: FLTCaptureDeviceInputFactory + private let captureDeviceInputFactory: CaptureDeviceInputFactory private let assetWriterFactory: AssetWriterFactory private let inputPixelBufferAdaptorFactory: InputPixelBufferAdaptorFactory @@ -60,12 +60,12 @@ final class DefaultCamera: NSObject, Camera { private let deviceOrientationProvider: DeviceOrientationProvider private let motionManager = CMMotionManager() - private(set) var captureDevice: FLTCaptureDevice + private(set) var captureDevice: CaptureDevice // Setter exposed for tests. var captureVideoOutput: CaptureVideoDataOutput // Setter exposed for tests. var capturePhotoOutput: CapturePhotoOutput - private var captureVideoInput: FLTCaptureInput + private var captureVideoInput: CaptureInput private var videoWriter: FLTAssetWriter? private var videoWriterInput: FLTAssetWriterInput? @@ -132,10 +132,10 @@ final class DefaultCamera: NSObject, Camera { } private static func createConnection( - captureDevice: FLTCaptureDevice, + captureDevice: CaptureDevice, videoFormat: FourCharCode, - captureDeviceInputFactory: FLTCaptureDeviceInputFactory - ) throws -> (FLTCaptureInput, CaptureVideoDataOutput, AVCaptureConnection) { + captureDeviceInputFactory: CaptureDeviceInputFactory + ) throws -> (CaptureInput, CaptureVideoDataOutput, AVCaptureConnection) { // Setup video capture input. let captureVideoInput = try captureDeviceInputFactory.deviceInput(with: captureDevice) @@ -211,10 +211,10 @@ final class DefaultCamera: NSObject, Camera { try setCaptureSessionPreset(mediaSettings.resolutionPreset) - FLTSelectBestFormatForRequestedFrameRate( - captureDevice, - mediaSettings, - videoDimensionsConverter) + selectBestFormat( + for: captureDevice, + mediaSettings: mediaSettings, + videoDimensionsForFormat: videoDimensionsConverter) if let framesPerSecond = mediaSettings.framesPerSecond { // Set frame rate with 1/10 precision allowing non-integral values. @@ -248,7 +248,7 @@ final class DefaultCamera: NSObject, Camera { do { try captureDevice.lockForConfiguration() // Set the best device format found and finish the device configuration. - captureDevice.activeFormat = bestFormat + captureDevice.fltActiveFormat = bestFormat captureDevice.unlockForConfiguration() break } @@ -301,23 +301,23 @@ final class DefaultCamera: NSObject, Camera { } } - let size = videoDimensionsConverter(captureDevice.activeFormat) + let size = videoDimensionsConverter(captureDevice.fltActiveFormat) previewSize = CGSize(width: CGFloat(size.width), height: CGFloat(size.height)) audioCaptureSession.sessionPreset = videoCaptureSession.sessionPreset } /// Finds the highest available resolution in terms of pixel count for the given device. /// Preferred are formats with the same subtype as current activeFormat. - private func highestResolutionFormat(forCaptureDevice captureDevice: FLTCaptureDevice) + private func highestResolutionFormat(forCaptureDevice captureDevice: CaptureDevice) -> FLTCaptureDeviceFormat? { let preferredSubType = CMFormatDescriptionGetMediaSubType( - captureDevice.activeFormat.formatDescription) + captureDevice.fltActiveFormat.formatDescription) var bestFormat: FLTCaptureDeviceFormat? = nil var maxPixelCount: UInt = 0 var isBestSubTypePreferred = false - for format in captureDevice.formats { + for format in captureDevice.fltFormats { let resolution = videoDimensionsConverter(format) let height = UInt(resolution.height) let width = UInt(resolution.width) @@ -800,12 +800,12 @@ final class DefaultCamera: NSObject, Camera { switch exposureMode { case .locked: // AVCaptureExposureMode.autoExpose automatically adjusts the exposure one time, and then locks exposure for the device - captureDevice.setExposureMode(.autoExpose) + captureDevice.exposureMode = .autoExpose case .auto: if captureDevice.isExposureModeSupported(.continuousAutoExposure) { - captureDevice.setExposureMode(.continuousAutoExposure) + captureDevice.exposureMode = .continuousAutoExposure } else { - captureDevice.setExposureMode(.autoExpose) + captureDevice.exposureMode = .autoExpose } @unknown default: assertionFailure("Unknown exposure mode") @@ -836,7 +836,7 @@ final class DefaultCamera: NSObject, Camera { // A nil point resets to the center. let exposurePoint = cgPoint( for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation) - captureDevice.setExposurePointOfInterest(exposurePoint) + captureDevice.exposurePointOfInterest = exposurePoint captureDevice.unlockForConfiguration() // Retrigger auto exposure applyExposureMode() @@ -861,11 +861,11 @@ final class DefaultCamera: NSObject, Camera { let orientation = deviceOrientationProvider.orientation try? captureDevice.lockForConfiguration() // A nil point resets to the center. - captureDevice.setFocusPointOfInterest( + captureDevice.focusPointOfInterest = cgPoint( for: point ?? .makeWith(x: 0.5, y: 0.5), withOrientation: orientation) - ) + captureDevice.unlockForConfiguration() // Retrigger auto focus applyFocusMode() @@ -877,20 +877,20 @@ final class DefaultCamera: NSObject, Camera { } private func applyFocusMode( - _ focusMode: FCPPlatformFocusMode, onDevice captureDevice: FLTCaptureDevice + _ focusMode: FCPPlatformFocusMode, onDevice captureDevice: CaptureDevice ) { try? captureDevice.lockForConfiguration() switch focusMode { case .locked: // AVCaptureFocusMode.autoFocus automatically adjusts the focus one time, and then locks focus if captureDevice.isFocusModeSupported(.autoFocus) { - captureDevice.setFocusMode(.autoFocus) + captureDevice.focusMode = .autoFocus } case .auto: if captureDevice.isFocusModeSupported(.continuousAutoFocus) { - captureDevice.setFocusMode(.continuousAutoFocus) + captureDevice.focusMode = .continuousAutoFocus } else if captureDevice.isFocusModeSupported(.autoFocus) { - captureDevice.setFocusMode(.autoFocus) + captureDevice.focusMode = .autoFocus } @unknown default: assertionFailure("Unknown focus mode") @@ -1320,9 +1320,9 @@ final class DefaultCamera: NSObject, Camera { "height": imageHeight, "format": videoFormat, "planes": planes, - "lensAperture": Double(captureDevice.lensAperture()), - "sensorExposureTime": Int(captureDevice.exposureDuration().seconds * 1_000_000_000), - "sensorSensitivity": Double(captureDevice.iso()), + "lensAperture": Double(captureDevice.lensAperture), + "sensorExposureTime": Int(captureDevice.exposureDuration.seconds * 1_000_000_000), + "sensorSensitivity": Double(captureDevice.iso), ] DispatchQueue.main.async { @@ -1378,13 +1378,13 @@ final class DefaultCamera: NSObject, Camera { func close() { stop() for input in videoCaptureSession.inputs { - videoCaptureSession.removeInput(FLTDefaultCaptureInput(input: input)) + videoCaptureSession.removeInput(input) } for output in videoCaptureSession.outputs { videoCaptureSession.removeOutput(output) } for input in audioCaptureSession.inputs { - audioCaptureSession.removeInput(FLTDefaultCaptureInput(input: input)) + audioCaptureSession.removeInput(input) } for output in audioCaptureSession.outputs { audioCaptureSession.removeOutput(output) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift new file mode 100644 index 00000000000..f8f734fd4b9 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift @@ -0,0 +1,77 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import AVFoundation + +// Import Objective-C part of the implementation when SwiftPM is used. +#if canImport(camera_avfoundation_objc) + import camera_avfoundation_objc +#endif + +/// Determines the video dimensions (width and height) for a given capture device format. +/// Used in tests to mock CMVideoFormatDescriptionGetDimensions. +typealias VideoDimensionsForFormat = (FLTCaptureDeviceFormat) -> CMVideoDimensions + +/// Returns frame rate supported by format closest to targetFrameRate. +private func bestFrameRate(for format: FLTCaptureDeviceFormat, targetFrameRate: Double) -> Double { + var bestFrameRate: Double = 0 + var minDistance: Double = Double.greatestFiniteMagnitude + + for range in format.videoSupportedFrameRateRanges { + let frameRate = min( + max(targetFrameRate, Double(range.minFrameRate)), Double(range.maxFrameRate)) + let distance = abs(frameRate - targetFrameRate) + if distance < minDistance { + bestFrameRate = frameRate + minDistance = distance + } + } + + return bestFrameRate +} + +/// Finds format with same resolution as current activeFormat in captureDevice for which +/// bestFrameRate returned frame rate closest to mediaSettings.framesPerSecond. +/// Preferred are formats with the same subtype as current activeFormat. Sets this format +/// as activeFormat and also updates mediaSettings.framesPerSecond to value which +/// bestFrameRate returned for that format. +func selectBestFormat( + for captureDevice: CaptureDevice, + mediaSettings: FCPPlatformMediaSettings, + videoDimensionsForFormat: VideoDimensionsForFormat +) { + let targetResolution = videoDimensionsForFormat(captureDevice.fltActiveFormat) + let targetFrameRate = mediaSettings.framesPerSecond?.doubleValue ?? 0 + let preferredSubType = CMFormatDescriptionGetMediaSubType( + captureDevice.fltActiveFormat.formatDescription) + + var bestFormat = captureDevice.fltActiveFormat + var resolvedBastFrameRate = bestFrameRate(for: bestFormat, targetFrameRate: targetFrameRate) + var minDistance = abs(resolvedBastFrameRate - targetFrameRate) + var isBestSubTypePreferred = true + + for format in captureDevice.fltFormats { + let resolution = videoDimensionsForFormat(format) + if resolution.width != targetResolution.width || resolution.height != targetResolution.height { + continue + } + + let frameRate = bestFrameRate(for: format, targetFrameRate: targetFrameRate) + let distance = abs(frameRate - targetFrameRate) + let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription) + let isSubTypePreferred = subType == preferredSubType + + if distance < minDistance + || (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred) + { + bestFormat = format + resolvedBastFrameRate = frameRate + minDistance = distance + isBestSubTypePreferred = isSubTypePreferred + } + } + + captureDevice.fltActiveFormat = bestFormat + mediaSettings.framesPerSecond = NSNumber(value: resolvedBastFrameRate) +} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/MediaSettingsAVWrapper.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/MediaSettingsAVWrapper.swift index b0281ec7aa3..b74557c0af8 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/MediaSettingsAVWrapper.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/MediaSettingsAVWrapper.swift @@ -22,27 +22,27 @@ class FLTCamMediaSettingsAVWrapper { /// - Parameter captureDevice: The capture device. /// - Throws: An error if the device could not be locked for configuration. /// - Returns: A Bool indicating whether the device was successfully locked for configuration. - func lockDevice(_ captureDevice: FLTCaptureDevice) throws { + func lockDevice(_ captureDevice: CaptureDevice) throws { return try captureDevice.lockForConfiguration() } /// Release exclusive control over device hardware properties. /// - Parameter captureDevice: The capture device. - func unlockDevice(_ captureDevice: FLTCaptureDevice) { + func unlockDevice(_ captureDevice: CaptureDevice) { captureDevice.unlockForConfiguration() } /// When paired with commitConfiguration, allows a client to batch multiple configuration /// operations on a running session into atomic updates. /// - Parameter videoCaptureSession: The video capture session. - func beginConfiguration(for videoCaptureSession: FLTCaptureSession) { + func beginConfiguration(for videoCaptureSession: CaptureSession) { videoCaptureSession.beginConfiguration() } /// When preceded by beginConfiguration, allows a client to batch multiple configuration /// operations on a running session into atomic updates. /// - Parameter videoCaptureSession: The video capture session. - func commitConfiguration(for videoCaptureSession: FLTCaptureSession) { + func commitConfiguration(for videoCaptureSession: CaptureSession) { videoCaptureSession.commitConfiguration() } @@ -50,7 +50,7 @@ class FLTCamMediaSettingsAVWrapper { /// - Parameters: /// - duration: The frame duration. /// - captureDevice: The capture device - func setMinFrameDuration(_ duration: CMTime, on captureDevice: FLTCaptureDevice) { + func setMinFrameDuration(_ duration: CMTime, on captureDevice: CaptureDevice) { captureDevice.activeVideoMinFrameDuration = duration } @@ -58,7 +58,7 @@ class FLTCamMediaSettingsAVWrapper { /// - Parameters: /// - duration: The frame duration. /// - captureDevice: The capture device - func setMaxFrameDuration(_ duration: CMTime, on captureDevice: FLTCaptureDevice) { + func setMaxFrameDuration(_ duration: CMTime, on captureDevice: CaptureDevice) { captureDevice.activeVideoMaxFrameDuration = duration } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureDevice.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureDevice.m deleted file mode 100644 index 35ccd9790bb..00000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureDevice.m +++ /dev/null @@ -1,218 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import Flutter; - -#import "FLTCaptureDevice.h" - -@interface FLTDefaultCaptureDevice () -@property(nonatomic, strong) AVCaptureDevice *device; -@end - -@implementation FLTDefaultCaptureDevice - -- (instancetype)initWithDevice:(AVCaptureDevice *)device { - self = [super init]; - if (self) { - _device = device; - } - return self; -} - -// Device identifier -- (NSString *)uniqueID { - return self.device.uniqueID; -} - -// Position/Orientation -- (AVCaptureDevicePosition)position { - return self.device.position; -} - -- (AVCaptureDeviceType)deviceType { - return self.device.deviceType; -} - -// Format/Configuration -- (NSObject *)activeFormat { - return [[FLTDefaultCaptureDeviceFormat alloc] initWithFormat:self.device.activeFormat]; -} - -- (NSArray *> *)formats { - NSMutableArray> *wrappedFormats = - [NSMutableArray arrayWithCapacity:self.device.formats.count]; - for (AVCaptureDeviceFormat *format in self.device.formats) { - [wrappedFormats addObject:[[FLTDefaultCaptureDeviceFormat alloc] initWithFormat:format]]; - } - return wrappedFormats; -} - -- (void)setActiveFormat:(NSObject *)format { - self.device.activeFormat = format.format; -} - -// Flash/Torch -- (BOOL)hasFlash { - return self.device.hasFlash; -} - -- (BOOL)hasTorch { - return self.device.hasTorch; -} - -- (BOOL)isTorchAvailable { - return self.device.isTorchAvailable; -} - -- (AVCaptureTorchMode)torchMode { - return self.device.torchMode; -} - -- (void)setTorchMode:(AVCaptureTorchMode)torchMode { - self.device.torchMode = torchMode; -} - -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" -- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode { - return [self.device isFlashModeSupported:mode]; -} -#pragma clang diagnostic pop - -// Focus -- (BOOL)isFocusPointOfInterestSupported { - return self.device.isFocusPointOfInterestSupported; -} - -- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { - return [self.device isFocusModeSupported:mode]; -} - -- (void)setFocusMode:(AVCaptureFocusMode)focusMode { - self.device.focusMode = focusMode; -} - -- (void)setFocusPointOfInterest:(CGPoint)point { - self.device.focusPointOfInterest = point; -} - -// Exposure -- (BOOL)isExposurePointOfInterestSupported { - return self.device.isExposurePointOfInterestSupported; -} - -- (void)setExposureMode:(AVCaptureExposureMode)exposureMode { - self.device.exposureMode = exposureMode; -} - -- (void)setExposurePointOfInterest:(CGPoint)point { - self.device.exposurePointOfInterest = point; -} - -- (float)minExposureTargetBias { - return self.device.minExposureTargetBias; -} - -- (float)maxExposureTargetBias { - return self.device.maxExposureTargetBias; -} - -- (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler { - [self.device setExposureTargetBias:bias completionHandler:handler]; -} - -- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { - return [self.device isExposureModeSupported:mode]; -} - -// Zoom -- (CGFloat)maxAvailableVideoZoomFactor { - return self.device.maxAvailableVideoZoomFactor; -} - -- (CGFloat)minAvailableVideoZoomFactor { - return self.device.minAvailableVideoZoomFactor; -} - -- (CGFloat)videoZoomFactor { - return self.device.videoZoomFactor; -} - -- (void)setVideoZoomFactor:(CGFloat)factor { - self.device.videoZoomFactor = factor; -} - -// Camera Properties -- (float)lensAperture { - return self.device.lensAperture; -} - -- (CMTime)exposureDuration { - return self.device.exposureDuration; -} - -- (float)ISO { - return self.device.ISO; -} - -// Configuration Lock -- (BOOL)lockForConfiguration:(NSError **)error { - return [self.device lockForConfiguration:error]; -} - -- (void)unlockForConfiguration { - [self.device unlockForConfiguration]; -} - -- (CMTime)activeVideoMinFrameDuration { - return self.device.activeVideoMinFrameDuration; -} - -- (void)setActiveVideoMinFrameDuration:(CMTime)duration { - self.device.activeVideoMinFrameDuration = duration; -} - -- (CMTime)activeVideoMaxFrameDuration { - return self.device.activeVideoMaxFrameDuration; -} - -- (void)setActiveVideoMaxFrameDuration:(CMTime)duration { - self.device.activeVideoMaxFrameDuration = duration; -} - -@end - -@interface FLTDefaultCaptureInput () -@property(nonatomic, strong) AVCaptureInput *input; -@end - -@implementation FLTDefaultCaptureInput - -- (instancetype)initWithInput:(AVCaptureInput *)input { - self = [super init]; - if (self) { - _input = input; - } - return self; -} - -- (AVCaptureInput *)input { - return _input; -} - -- (NSArray *)ports { - return self.input.ports; -} - -@end - -@implementation FLTDefaultCaptureDeviceInputFactory - -- (NSObject *)deviceInputWithDevice:(NSObject *)device - error:(NSError **)error { - return [[FLTDefaultCaptureInput alloc] - initWithInput:[AVCaptureDeviceInput deviceInputWithDevice:device.device error:error]]; -} - -@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureSession.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureSession.m deleted file mode 100644 index 04c19ce45e5..00000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCaptureSession.m +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#import "FLTCaptureSession.h" - -@interface FLTDefaultCaptureSession () -@property(nonatomic, strong) AVCaptureSession *captureSession; -@end - -@implementation FLTDefaultCaptureSession - -- (instancetype)initWithCaptureSession:(AVCaptureSession *)session { - self = [super init]; - if (self) { - _captureSession = session; - } - return self; -} - -- (void)beginConfiguration { - [_captureSession beginConfiguration]; -} - -- (void)commitConfiguration { - [_captureSession commitConfiguration]; -} - -- (void)startRunning { - [_captureSession startRunning]; -} - -- (void)stopRunning { - [_captureSession stopRunning]; -} - -- (BOOL)automaticallyConfiguresApplicationAudioSession { - return _captureSession.automaticallyConfiguresApplicationAudioSession; -} - -- (void)setAutomaticallyConfiguresApplicationAudioSession:(BOOL)value { - _captureSession.automaticallyConfiguresApplicationAudioSession = value; -} - -- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { - return [_captureSession canSetSessionPreset:preset]; -} - -- (void)addInputWithNoConnections:(NSObject *)input { - [_captureSession addInputWithNoConnections:input.input]; -} - -- (void)addOutputWithNoConnections:(AVCaptureOutput *)output { - [_captureSession addOutputWithNoConnections:output]; -} - -- (void)addConnection:(AVCaptureConnection *)connection { - [_captureSession addConnection:connection]; -} - -- (void)addOutput:(AVCaptureOutput *)output { - [_captureSession addOutput:output]; -} - -- (void)removeInput:(NSObject *)input { - [_captureSession removeInput:input.input]; -} - -- (void)removeOutput:(AVCaptureOutput *)output { - [_captureSession removeOutput:output]; -} - -- (void)setSessionPreset:(AVCaptureSessionPreset)sessionPreset { - _captureSession.sessionPreset = sessionPreset; -} - -- (AVCaptureSessionPreset)sessionPreset { - return _captureSession.sessionPreset; -} - -- (NSArray *)inputs { - return _captureSession.inputs; -} - -- (NSArray *)outputs { - return _captureSession.outputs; -} - -- (BOOL)canAddInput:(NSObject *)input { - return [_captureSession canAddInput:input.input]; -} - -- (BOOL)canAddOutput:(AVCaptureOutput *)output { - return [_captureSession canAddOutput:output]; -} - -- (BOOL)canAddConnection:(AVCaptureConnection *)connection { - return [_captureSession canAddConnection:connection]; -} - -- (void)addInput:(NSObject *)input { - [_captureSession addInput:input.input]; -} - -@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTFormatUtils.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTFormatUtils.m deleted file mode 100644 index e3136638412..00000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTFormatUtils.m +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import Foundation; -@import AVFoundation; - -#import "./include/camera_avfoundation/FLTFormatUtils.h" - -NS_ASSUME_NONNULL_BEGIN - -// Returns frame rate supported by format closest to targetFrameRate. -double FLTBestFrameRateForFormat(NSObject *format, double targetFrameRate) { - double bestFrameRate = 0; - double minDistance = DBL_MAX; - for (NSObject *range in format.videoSupportedFrameRateRanges) { - double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); - double distance = fabs(frameRate - targetFrameRate); - if (distance < minDistance) { - bestFrameRate = frameRate; - minDistance = distance; - } - } - return bestFrameRate; -} - -void FLTSelectBestFormatForRequestedFrameRate(NSObject *captureDevice, - FCPPlatformMediaSettings *mediaSettings, - VideoDimensionsForFormat videoDimensionsForFormat) { - CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); - double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; - FourCharCode preferredSubType = - CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); - NSObject *bestFormat = captureDevice.activeFormat; - double bestFrameRate = FLTBestFrameRateForFormat(bestFormat, targetFrameRate); - double minDistance = fabs(bestFrameRate - targetFrameRate); - BOOL isBestSubTypePreferred = YES; - for (NSObject *format in captureDevice.formats) { - CMVideoDimensions resolution = videoDimensionsForFormat(format); - if (resolution.width != targetResolution.width || - resolution.height != targetResolution.height) { - continue; - } - double frameRate = FLTBestFrameRateForFormat(format, targetFrameRate); - double distance = fabs(frameRate - targetFrameRate); - FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - BOOL isSubTypePreferred = subType == preferredSubType; - if (distance < minDistance || - (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred)) { - bestFormat = format; - bestFrameRate = frameRate; - minDistance = distance; - isBestSubTypePreferred = isSubTypePreferred; - } - } - captureDevice.activeFormat = bestFormat; - mediaSettings.framesPerSecond = @(bestFrameRate); -} - -NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureDevice.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureDevice.h deleted file mode 100644 index 0d18df9ed9c..00000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureDevice.h +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import AVFoundation; -@import Foundation; - -#import "FLTCaptureDeviceFormat.h" - -NS_ASSUME_NONNULL_BEGIN - -/// A protocol which is a direct passthrough to AVCaptureDevice. -/// It exists to allow replacing AVCaptureDevice in tests. -@protocol FLTCaptureDevice - -/// Underlying `AVCaptureDevice` instance. This is should not be used directly -/// in the plugin implementation code, but it exists so that other protocol default -/// implementation can pass the raw device to AVFoundation methods. -@property(nonatomic, readonly) AVCaptureDevice *device; - -// Device identifier -@property(nonatomic, readonly) NSString *uniqueID; - -// Position/Orientation -@property(nonatomic, readonly) AVCaptureDevicePosition position; - -// Lens type -@property(nonatomic, readonly) AVCaptureDeviceType deviceType; - -// Format/Configuration -@property(nonatomic, retain) NSObject *activeFormat; -@property(nonatomic, readonly) NSArray *> *formats; - -// Flash/Torch -@property(nonatomic, readonly) BOOL hasFlash; -@property(nonatomic, readonly) BOOL hasTorch; -@property(nonatomic, readonly, getter=isTorchAvailable) BOOL torchAvailable; -@property(nonatomic) AVCaptureTorchMode torchMode; -- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode; - -// Focus -@property(nonatomic, readonly, getter=isFocusPointOfInterestSupported) - BOOL focusPointOfInterestSupported; -- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode; -- (void)setFocusMode:(AVCaptureFocusMode)focusMode; -- (void)setFocusPointOfInterest:(CGPoint)point; - -// Exposure -@property(nonatomic, readonly, getter=isExposurePointOfInterestSupported) - BOOL exposurePointOfInterestSupported; -- (void)setExposureMode:(AVCaptureExposureMode)exposureMode; -- (void)setExposurePointOfInterest:(CGPoint)point; -@property(nonatomic, readonly) float minExposureTargetBias; -@property(nonatomic, readonly) float maxExposureTargetBias; -- (void)setExposureTargetBias:(float)bias completionHandler:(void (^_Nullable)(CMTime))handler; -- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode; - -// Zoom -@property(nonatomic, readonly) CGFloat maxAvailableVideoZoomFactor; -@property(nonatomic, readonly) CGFloat minAvailableVideoZoomFactor; -@property(nonatomic) CGFloat videoZoomFactor; - -// Camera Properties -- (float)lensAperture; -- (CMTime)exposureDuration; -- (float)ISO; - -// Configuration Lock -- (BOOL)lockForConfiguration:(NSError *_Nullable *_Nullable)outError; -- (void)unlockForConfiguration; - -// Frame Duration -@property(nonatomic) CMTime activeVideoMinFrameDuration; -@property(nonatomic) CMTime activeVideoMaxFrameDuration; - -@end - -/// A protocol which is a direct passthrough to AVCaptureInput. -/// It exists to allow replacing AVCaptureInput in tests. -@protocol FLTCaptureInput - -/// Underlying input instance. It is exposed as raw AVCaptureInput has to be passed to some -/// AVFoundation methods. The plugin implementation code shouldn't use it though. -@property(nonatomic, readonly) AVCaptureInput *input; - -@property(nonatomic, readonly) NSArray *ports; -@end - -/// A protocol which wraps the creation of AVCaptureDeviceInput. -/// It exists to allow mocking instances of AVCaptureDeviceInput in tests. -@protocol FLTCaptureDeviceInputFactory -- (nullable NSObject *)deviceInputWithDevice:(NSObject *)device - error:(NSError **)error; -@end - -/// A default implementation of `FLTCaptureDevice` which is a direct passthrough to the underlying -/// `AVCaptureDevice`. -@interface FLTDefaultCaptureDevice : NSObject -- (instancetype)initWithDevice:(AVCaptureDevice *)device; -@end - -/// A default implementation of `FLTCaptureInput` which is a direct passthrough to the underlying -/// `AVCaptureInput`. -@interface FLTDefaultCaptureInput : NSObject -- (instancetype)initWithInput:(AVCaptureInput *)input; -@end - -/// A default implementation of FLTCaptureDeviceInputFactory protocol which -/// wraps a call to AVCaptureInput static method `deviceInputWithDevice`. -@interface FLTDefaultCaptureDeviceInputFactory : NSObject -@end - -NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureSession.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureSession.h deleted file mode 100644 index 54ce2b10ca3..00000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCaptureSession.h +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import AVFoundation; - -#import "FLTCaptureDevice.h" - -NS_ASSUME_NONNULL_BEGIN - -/// A protocol which is a direct passthrough to AVCaptureSession. -/// It exists to allow replacing AVCaptureSession in tests. -@protocol FLTCaptureSession - -@property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; -@property(nonatomic, readonly) NSArray *inputs; -@property(nonatomic, readonly) NSArray *outputs; -@property(nonatomic, assign) BOOL automaticallyConfiguresApplicationAudioSession; - -- (void)beginConfiguration; -- (void)commitConfiguration; -- (void)startRunning; -- (void)stopRunning; -- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset; -- (void)addInputWithNoConnections:(NSObject *)input; -- (void)addOutputWithNoConnections:(AVCaptureOutput *)output; -// Methods renamed in Swift for consistency with AVCaptureSession Swift interface. -- (void)addConnection:(AVCaptureConnection *)connection NS_SWIFT_NAME(addConnection(_:)); -- (void)addInput:(NSObject *)input NS_SWIFT_NAME(addInput(_:)); -- (void)addOutput:(AVCaptureOutput *)output NS_SWIFT_NAME(addOutput(_:)); -- (void)removeInput:(NSObject *)input NS_SWIFT_NAME(removeInput(_:)); -- (void)removeOutput:(AVCaptureOutput *)output NS_SWIFT_NAME(removeOutput(_:)); -- (BOOL)canAddInput:(NSObject *)input NS_SWIFT_NAME(canAddInput(_:)); -- (BOOL)canAddOutput:(AVCaptureOutput *)output NS_SWIFT_NAME(canAddOutput(_:)); -- (BOOL)canAddConnection:(AVCaptureConnection *)connection NS_SWIFT_NAME(canAddConnection(_:)); - -@end - -/// A default implementation of `FLTCaptureSession` which is a direct passthrough -/// to the underlying `AVCaptureSession`. -@interface FLTDefaultCaptureSession : NSObject -- (instancetype)initWithCaptureSession:(AVCaptureSession *)session; -@end - -NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTFormatUtils.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTFormatUtils.h deleted file mode 100644 index ab0097592d2..00000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTFormatUtils.h +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#import "FLTCaptureDevice.h" -#import "FLTCaptureDeviceFormat.h" -#import "messages.g.h" - -NS_ASSUME_NONNULL_BEGIN - -/// Determines the video dimensions (width and height) for a given capture device format. -/// Used in tests to mock CMVideoFormatDescriptionGetDimensions. -typedef CMVideoDimensions (^VideoDimensionsForFormat)(NSObject *); - -// Finds format with same resolution as current activeFormat in captureDevice for which -// bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. -// Preferred are formats with the same subtype as current activeFormat. Sets this format -// as activeFormat and also updates mediaSettings.framesPerSecond to value which -// bestFrameRateForFormat returned for that format. -extern void FLTSelectBestFormatForRequestedFrameRate( - NSObject *captureDevice, FCPPlatformMediaSettings *mediaSettings, - VideoDimensionsForFormat videoDimensionsForFormat); - -NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h index 89407032e54..77fa73867ab 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h @@ -3,6 +3,5 @@ // found in the LICENSE file. #import "FLTCameraPermissionManager.h" -#import "FLTCaptureDevice.h" #import "QueueUtils.h" #import "messages.g.h" diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 24f11c1bace..b774fe37924 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.22+4 +version: 0.9.22+5 environment: sdk: ^3.9.0