[camera_avfoundation] Test utils and mocks swift migration - part 3 (#8912)

Migrates camera test utils and mocks as part of https://github.com/flutter/flutter/issues/119109

Migrates to swift following mock classes:
- MockAssetWriter
- MockAssetWriterInput
- MockAssetWriterInputPixelBufferAdaptor
- MockCaptureDevice
- MockCaptureInput
- MockCaptureDeviceInputFactory

## Pre-Review Checklist

[^1]: Regular contributors who have demonstrated familiarity with the repository guidelines only need to comment if the PR is not auto-exempted by repo tooling.
diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md
index f7dc035..c0dad23 100644
--- a/packages/camera/camera_avfoundation/CHANGELOG.md
+++ b/packages/camera/camera_avfoundation/CHANGELOG.md
@@ -1,3 +1,8 @@
+## 0.9.18+13
+
+* Migrates test utils and mocks to Swift.
+* Aligns `FLTCaptureDevice` more closely with the `AVCaptureDevice` interface.
+
 ## 0.9.18+12
 
 * Migrates test utils and mocks to Swift.
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
index e3d9a2a..3ac9305 100644
--- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
@@ -17,9 +17,7 @@
 		7F8FD22C2D4D07DD001AF2C1 /* MockFlutterTextureRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F8FD22B2D4D07DD001AF2C1 /* MockFlutterTextureRegistry.m */; };
 		7F8FD22F2D4D0B88001AF2C1 /* MockFlutterBinaryMessenger.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F8FD22E2D4D0B88001AF2C1 /* MockFlutterBinaryMessenger.m */; };
 		7FCEDD352D43C2B900EA1CA8 /* MockDeviceOrientationProvider.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FCEDD342D43C2B900EA1CA8 /* MockDeviceOrientationProvider.m */; };
-		7FCEDD362D43C2B900EA1CA8 /* MockCaptureDevice.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FCEDD322D43C2B900EA1CA8 /* MockCaptureDevice.m */; };
 		7FD582202D579ECC003B1200 /* MockCapturePhotoOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FD5821F2D579ECC003B1200 /* MockCapturePhotoOutput.m */; };
-		7FD582272D57C020003B1200 /* MockAssetWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FD582262D57C020003B1200 /* MockAssetWriter.m */; };
 		7FD582352D57D97C003B1200 /* MockCaptureDeviceFormat.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FD582342D57D97C003B1200 /* MockCaptureDeviceFormat.m */; };
 		7FD83D2B2D5BA65B00F4DB7C /* MockCaptureConnection.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FD83D2A2D5BA65B00F4DB7C /* MockCaptureConnection.m */; };
 		970ADABE2D6740A900EFDCD9 /* MockWritableData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 970ADABD2D6740A900EFDCD9 /* MockWritableData.swift */; };
@@ -56,6 +54,12 @@
 		E15139182D80980900FEE47B /* FLTCamSetDeviceOrientationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E15139172D80980900FEE47B /* FLTCamSetDeviceOrientationTests.swift */; };
 		E16602952D8471C0003CFE12 /* FLTCamZoomTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E16602942D8471C0003CFE12 /* FLTCamZoomTests.swift */; };
 		E1A5F4E32D80259C0005BA64 /* FLTCamSetFlashModeTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1A5F4E22D80259C0005BA64 /* FLTCamSetFlashModeTests.swift */; };
+		E1ABED6C2D94392500AED9CC /* MockAssetWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = E15BC7E72D86D29F00F66474 /* MockAssetWriter.swift */; };
+		E1ABED6D2D94392700AED9CC /* MockAssetWriterInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = E15BC7E92D86D41F00F66474 /* MockAssetWriterInput.swift */; };
+		E1ABED6E2D94392900AED9CC /* MockAssetWriterInputPixelBufferAdaptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = E15BC7EB2D86D50200F66474 /* MockAssetWriterInputPixelBufferAdaptor.swift */; };
+		E1ABED6F2D943B2500AED9CC /* MockCaptureDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = E15BC7ED2D86D85500F66474 /* MockCaptureDevice.swift */; };
+		E1ABED722D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1ABED702D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift */; };
+		E1ABED732D943DC700AED9CC /* MockCaptureInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1ABED712D943DC700AED9CC /* MockCaptureInput.swift */; };
 		E1FFEAAD2D6C8DD700B14107 /* MockFLTCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAAC2D6C8DD700B14107 /* MockFLTCam.swift */; };
 		E1FFEAAF2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAAE2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift */; };
 		E1FFEAB12D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E1FFEAB02D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift */; };
@@ -102,14 +106,10 @@
 		7F8FD22B2D4D07DD001AF2C1 /* MockFlutterTextureRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockFlutterTextureRegistry.m; sourceTree = "<group>"; };
 		7F8FD22D2D4D0B73001AF2C1 /* MockFlutterBinaryMessenger.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockFlutterBinaryMessenger.h; sourceTree = "<group>"; };
 		7F8FD22E2D4D0B88001AF2C1 /* MockFlutterBinaryMessenger.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockFlutterBinaryMessenger.m; sourceTree = "<group>"; };
-		7FCEDD312D43C2B900EA1CA8 /* MockCaptureDevice.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureDevice.h; sourceTree = "<group>"; };
-		7FCEDD322D43C2B900EA1CA8 /* MockCaptureDevice.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureDevice.m; sourceTree = "<group>"; };
 		7FCEDD332D43C2B900EA1CA8 /* MockDeviceOrientationProvider.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockDeviceOrientationProvider.h; sourceTree = "<group>"; };
 		7FCEDD342D43C2B900EA1CA8 /* MockDeviceOrientationProvider.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockDeviceOrientationProvider.m; sourceTree = "<group>"; };
 		7FD5821F2D579ECC003B1200 /* MockCapturePhotoOutput.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoOutput.m; sourceTree = "<group>"; };
 		7FD582212D579ED9003B1200 /* MockCapturePhotoOutput.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoOutput.h; sourceTree = "<group>"; };
-		7FD582262D57C020003B1200 /* MockAssetWriter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockAssetWriter.m; sourceTree = "<group>"; };
-		7FD582282D57C02B003B1200 /* MockAssetWriter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockAssetWriter.h; sourceTree = "<group>"; };
 		7FD582342D57D97C003B1200 /* MockCaptureDeviceFormat.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureDeviceFormat.m; sourceTree = "<group>"; };
 		7FD582362D57D989003B1200 /* MockCaptureDeviceFormat.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureDeviceFormat.h; sourceTree = "<group>"; };
 		7FD83D292D5BA49100F4DB7C /* MockCaptureConnection.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureConnection.h; sourceTree = "<group>"; };
@@ -155,8 +155,14 @@
 		E142681C2D8483FD0046CBBC /* MockCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockCaptureSession.swift; sourceTree = "<group>"; };
 		E142681E2D8566230046CBBC /* CameraTestUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraTestUtils.swift; sourceTree = "<group>"; };
 		E15139172D80980900FEE47B /* FLTCamSetDeviceOrientationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FLTCamSetDeviceOrientationTests.swift; sourceTree = "<group>"; };
+		E15BC7E72D86D29F00F66474 /* MockAssetWriter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAssetWriter.swift; sourceTree = "<group>"; };
+		E15BC7E92D86D41F00F66474 /* MockAssetWriterInput.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAssetWriterInput.swift; sourceTree = "<group>"; };
+		E15BC7EB2D86D50200F66474 /* MockAssetWriterInputPixelBufferAdaptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAssetWriterInputPixelBufferAdaptor.swift; sourceTree = "<group>"; };
+		E15BC7ED2D86D85500F66474 /* MockCaptureDevice.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockCaptureDevice.swift; sourceTree = "<group>"; };
 		E16602942D8471C0003CFE12 /* FLTCamZoomTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FLTCamZoomTests.swift; sourceTree = "<group>"; };
 		E1A5F4E22D80259C0005BA64 /* FLTCamSetFlashModeTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FLTCamSetFlashModeTests.swift; sourceTree = "<group>"; };
+		E1ABED702D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MockCaptureDeviceInputFactory.swift; sourceTree = "<group>"; };
+		E1ABED712D943DC700AED9CC /* MockCaptureInput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MockCaptureInput.swift; sourceTree = "<group>"; };
 		E1FFEAAC2D6C8DD700B14107 /* MockFLTCam.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockFLTCam.swift; sourceTree = "<group>"; };
 		E1FFEAAE2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPluginCreateCameraTests.swift; sourceTree = "<group>"; };
 		E1FFEAB02D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPluginInitializeCameraTests.swift; sourceTree = "<group>"; };
@@ -235,8 +241,9 @@
 		7F29EB3F2D281C6D00740257 /* Mocks */ = {
 			isa = PBXGroup;
 			children = (
-				7FD582282D57C02B003B1200 /* MockAssetWriter.h */,
-				7FD582262D57C020003B1200 /* MockAssetWriter.m */,
+				E15BC7E72D86D29F00F66474 /* MockAssetWriter.swift */,
+				E15BC7E92D86D41F00F66474 /* MockAssetWriterInput.swift */,
+				E15BC7EB2D86D50200F66474 /* MockAssetWriterInputPixelBufferAdaptor.swift */,
 				7F8FD22D2D4D0B73001AF2C1 /* MockFlutterBinaryMessenger.h */,
 				7F8FD22E2D4D0B88001AF2C1 /* MockFlutterBinaryMessenger.m */,
 				7F8FD22A2D4D07A6001AF2C1 /* MockFlutterTextureRegistry.h */,
@@ -245,8 +252,9 @@
 				7F8FD2282D4BFABF001AF2C1 /* MockGlobalEventApi.m */,
 				7FD83D292D5BA49100F4DB7C /* MockCaptureConnection.h */,
 				7FD83D2A2D5BA65B00F4DB7C /* MockCaptureConnection.m */,
-				7FCEDD312D43C2B900EA1CA8 /* MockCaptureDevice.h */,
-				7FCEDD322D43C2B900EA1CA8 /* MockCaptureDevice.m */,
+				E15BC7ED2D86D85500F66474 /* MockCaptureDevice.swift */,
+				E1ABED702D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift */,
+				E1ABED712D943DC700AED9CC /* MockCaptureInput.swift */,
 				7FD582362D57D989003B1200 /* MockCaptureDeviceFormat.h */,
 				7FD582342D57D97C003B1200 /* MockCaptureDeviceFormat.m */,
 				7FD582212D579ED9003B1200 /* MockCapturePhotoOutput.h */,
@@ -554,8 +562,11 @@
 				972CA92D2D5A28C4004B846F /* QueueUtilsTests.swift in Sources */,
 				E1FFEAB12D6CDE5B00B14107 /* CameraPluginInitializeCameraTests.swift in Sources */,
 				979B3DFB2D5B6BC7009BDE1A /* ExceptionCatcher.m in Sources */,
+				E1ABED6E2D94392900AED9CC /* MockAssetWriterInputPixelBufferAdaptor.swift in Sources */,
+				E1ABED6C2D94392500AED9CC /* MockAssetWriter.swift in Sources */,
 				E1A5F4E32D80259C0005BA64 /* FLTCamSetFlashModeTests.swift in Sources */,
 				7FD83D2B2D5BA65B00F4DB7C /* MockCaptureConnection.m in Sources */,
+				E1ABED6D2D94392700AED9CC /* MockAssetWriterInput.swift in Sources */,
 				977A25242D5A511600931E34 /* CameraPermissionTests.swift in Sources */,
 				970ADABE2D6740A900EFDCD9 /* MockWritableData.swift in Sources */,
 				7F8FD2292D4BFABF001AF2C1 /* MockGlobalEventApi.m in Sources */,
@@ -573,14 +584,16 @@
 				979B3E002D5B9E6C009BDE1A /* CameraMethodChannelTests.swift in Sources */,
 				E1FFEAAF2D6CDA8C00B14107 /* CameraPluginCreateCameraTests.swift in Sources */,
 				97DB234D2D566D0700CEFE66 /* CameraPreviewPauseTests.swift in Sources */,
+				E1ABED732D943DC700AED9CC /* MockCaptureInput.swift in Sources */,
 				970ADAC02D6764CC00EFDCD9 /* MockEventChannel.swift in Sources */,
+				E1ABED6F2D943B2500AED9CC /* MockCaptureDevice.swift in Sources */,
+				E1ABED722D943DC700AED9CC /* MockCaptureDeviceInputFactory.swift in Sources */,
 				977A25202D5A439300931E34 /* AvailableCamerasTests.swift in Sources */,
 				E142681F2D8566230046CBBC /* CameraTestUtils.swift in Sources */,
 				E1FFEAAD2D6C8DD700B14107 /* MockFLTCam.swift in Sources */,
 				7F29EB292D26A59000740257 /* MockCameraDeviceDiscoverer.m in Sources */,
 				E16602952D8471C0003CFE12 /* FLTCamZoomTests.swift in Sources */,
 				97BD4A102D5CE13500F857D5 /* CameraSessionPresetsTests.swift in Sources */,
-				7FD582272D57C020003B1200 /* MockAssetWriter.m in Sources */,
 				979B3E022D5BA48F009BDE1A /* CameraOrientationTests.swift in Sources */,
 				E12C4FF62D68C69000515E70 /* CameraPluginDelegatingMethodTests.swift in Sources */,
 				977A25222D5A49EC00931E34 /* FLTCamFocusTests.swift in Sources */,
@@ -588,7 +601,6 @@
 				7FCEDD352D43C2B900EA1CA8 /* MockDeviceOrientationProvider.m in Sources */,
 				E11D6A8F2D81B81D0031E6C5 /* MockCaptureVideoDataOutput.swift in Sources */,
 				977CAC9F2D5E5180001E5DC3 /* ThreadSafeEventChannelTests.swift in Sources */,
-				7FCEDD362D43C2B900EA1CA8 /* MockCaptureDevice.m in Sources */,
 				7F8FD22C2D4D07DD001AF2C1 /* MockFlutterTextureRegistry.m in Sources */,
 				97C0FFAE2D5E023200A36284 /* SavePhotoDelegateTests.swift in Sources */,
 			);
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift
index a89f2cb..07fb2c8 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift
@@ -25,9 +25,8 @@
     let captureDeviceMock = MockCaptureDevice()
     captureDeviceMock.formats = [captureFormatMock]
     captureDeviceMock.activeFormat = captureFormatMock
-    captureDeviceMock.lockForConfigurationStub = { error in
+    captureDeviceMock.lockForConfigurationStub = {
       lockForConfigurationExpectation.fulfill()
-      return true
     }
 
     let configuration = CameraTestUtils.createTestCameraConfiguration()
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift
index e320beb..1c978db 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift
@@ -40,7 +40,6 @@
     captureDeviceFormatMock2.videoSupportedFrameRateRanges = [frameRateRangeMock2]
 
     let captureDeviceMock = MockCaptureDevice()
-    captureDeviceMock.lockForConfigurationStub = { _ in true }
     captureDeviceMock.formats = [captureDeviceFormatMock1, captureDeviceFormatMock2]
 
     var currentFormat: FLTCaptureDeviceFormat = captureDeviceFormatMock1
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift
index d6f0845..038a5a4 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift
@@ -64,7 +64,7 @@
     // UI is currently in landscape left orientation.
     mockDeviceOrientationProvider.orientation = .landscapeLeft
     // Exposure point of interest is supported.
-    mockDevice.exposurePointOfInterestSupported = true
+    mockDevice.isExposurePointOfInterestSupported = true
 
     // Verify the focus point of interest has been set.
     var setPoint = CGPoint.zero
@@ -89,7 +89,7 @@
     // UI is currently in landscape left orientation.
     mockDeviceOrientationProvider.orientation = .landscapeLeft
     // Exposure point of interest is not supported.
-    mockDevice.exposurePointOfInterestSupported = false
+    mockDevice.isExposurePointOfInterestSupported = false
 
     let expectation = expectation(description: "Completion with error")
 
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift
index 9bb3c2a..df9ac91 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift
@@ -124,7 +124,7 @@
     // UI is currently in landscape left orientation.
     mockDeviceOrientationProvider.orientation = .landscapeLeft
     // Focus point of interest is supported.
-    mockDevice.focusPointOfInterestSupported = true
+    mockDevice.isFocusPointOfInterestSupported = true
 
     var setFocusPointOfInterestCalled = false
     mockDevice.setFocusPointOfInterestStub = { point in
@@ -145,7 +145,7 @@
     // UI is currently in landscape left orientation.
     mockDeviceOrientationProvider.orientation = .landscapeLeft
     // Focus point of interest is not supported.
-    mockDevice.focusPointOfInterestSupported = false
+    mockDevice.isFocusPointOfInterestSupported = false
 
     let expectation = self.expectation(description: "Completion with error")
 
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift
index 57c340a..b3320de 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift
@@ -28,7 +28,7 @@
 
     var setVideoZoomFactorCalled = false
     mockDevice.setVideoZoomFactorStub = { zoom in
-      XCTAssertEqual(zoom, Float(targetZoom))
+      XCTAssertEqual(zoom, targetZoom)
       setVideoZoomFactorCalled = true
     }
 
@@ -85,7 +85,7 @@
 
     let targetZoom = CGFloat(1.0)
 
-    mockDevice.maxAvailableVideoZoomFactor = Float(targetZoom)
+    mockDevice.maxAvailableVideoZoomFactor = CGFloat(targetZoom)
 
     XCTAssertEqual(camera.maximumAvailableZoomFactor, targetZoom)
   }
@@ -95,7 +95,7 @@
 
     let targetZoom = CGFloat(1.0)
 
-    mockDevice.minAvailableVideoZoomFactor = Float(targetZoom)
+    mockDevice.minAvailableVideoZoomFactor = CGFloat(targetZoom)
 
     XCTAssertEqual(camera.minimumAvailableZoomFactor, targetZoom)
   }
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.h
deleted file mode 100644
index 2c02248..0000000
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.h
+++ /dev/null
@@ -1,44 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-@import camera_avfoundation;
-@import AVFoundation;
-
-/// Mock implementation of `FLTAssetWriter` protocol which allows injecting a custom
-/// implementation.
-@interface MockAssetWriter : NSObject <FLTAssetWriter>
-
-// Properties re-declared as read/write so a mocked value can be set during testing.
-@property(nonatomic, strong) NSError *error;
-
-// Stubs that are called when the corresponding public method is called.
-@property(nonatomic, copy) AVAssetWriterStatus (^statusStub)(void);
-@property(nonatomic, copy) void (^getStatusStub)(void);
-@property(nonatomic, copy) void (^startWritingStub)(void);
-@property(nonatomic, copy) void (^finishWritingStub)(void (^)(void));
-
-@end
-
-/// Mock implementation of `FLTAssetWriterInput` protocol which allows injecting a custom
-/// implementation.
-@interface MockAssetWriterInput : NSObject <FLTAssetWriterInput>
-
-// Properties re-declared as read/write so a mocked value can be set during testing.
-@property(nonatomic, strong) AVAssetWriterInput *input;
-@property(nonatomic, assign) BOOL readyForMoreMediaData;
-@property(nonatomic, assign) BOOL expectsMediaDataInRealTime;
-
-// Stub that is called when the `appendSampleBuffer` method is called.
-@property(nonatomic, copy) BOOL (^appendSampleBufferStub)(CMSampleBufferRef);
-
-@end
-
-/// Mock implementation of `FLTAssetWriterInput` protocol which allows injecting a custom
-/// implementation.
-@interface MockAssetWriterInputPixelBufferAdaptor : NSObject <FLTAssetWriterInputPixelBufferAdaptor>
-
-// Stub that is called when the `appendPixelBuffer` method is called.
-@property(nonatomic, copy) BOOL (^appendPixelBufferStub)(CVPixelBufferRef, CMTime);
-
-@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.m
deleted file mode 100644
index 866ec37..0000000
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.m
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "MockAssetWriter.h"
-
-@implementation MockAssetWriter
-
-- (BOOL)startWriting {
-  if (self.startWritingStub) {
-    self.startWritingStub();
-  }
-  return YES;
-}
-
-- (AVAssetWriterStatus)status {
-  if (_statusStub) {
-    return _statusStub();
-  }
-  return AVAssetWriterStatusUnknown;
-}
-
-- (void)finishWritingWithCompletionHandler:(void (^)(void))handler {
-  if (self.finishWritingStub) {
-    self.finishWritingStub(handler);
-  }
-}
-
-- (void)startSessionAtSourceTime:(CMTime)startTime {
-}
-
-- (void)addInput:(AVAssetWriterInput *)input {
-}
-
-@end
-
-@implementation MockAssetWriterInput
-
-- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer {
-  if (self.appendSampleBufferStub) {
-    return self.appendSampleBufferStub(sampleBuffer);
-  }
-  return YES;
-}
-
-@end
-
-@implementation MockAssetWriterInputPixelBufferAdaptor
-
-- (BOOL)appendPixelBuffer:(CVPixelBufferRef)pixelBuffer
-     withPresentationTime:(CMTime)presentationTime {
-  if (self.appendPixelBufferStub) {
-    return self.appendPixelBufferStub(pixelBuffer, presentationTime);
-  }
-  return YES;
-}
-
-@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.swift
new file mode 100644
index 0000000..f47e685
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.swift
@@ -0,0 +1,29 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// Mock implementation of `FLTAssetWriter` protocol which allows injecting a custom
+/// implementation.
+final class MockAssetWriter: NSObject, FLTAssetWriter {
+  var statusStub: (() -> AVAssetWriter.Status)?
+  var startWritingStub: (() -> Bool)?
+  var finishWritingStub: ((() -> Void) -> Void)?
+
+  var status: AVAssetWriter.Status {
+    return statusStub?() ?? .unknown
+  }
+
+  var error: Error? = nil
+
+  func startWriting() -> Bool {
+    return startWritingStub?() ?? true
+  }
+
+  func finishWriting(completionHandler handler: @escaping () -> Void) {
+    finishWritingStub?(handler)
+  }
+
+  func startSession(atSourceTime startTime: CMTime) {}
+
+  func add(_ input: AVAssetWriterInput) {}
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriterInput.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriterInput.swift
new file mode 100644
index 0000000..3b06941
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriterInput.swift
@@ -0,0 +1,21 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// Mock implementation of `FLTAssetWriterInput` protocol which allows injecting a custom
+/// implementation.
+final class MockAssetWriterInput: NSObject, FLTAssetWriterInput {
+  var appendStub: ((CMSampleBuffer) -> Bool)?
+
+  var input: AVAssetWriterInput {
+    preconditionFailure("Attempted to access unimplemented property: input")
+  }
+
+  var expectsMediaDataInRealTime = false
+
+  var readyForMoreMediaData = false
+
+  func append(_ sampleBuffer: CMSampleBuffer) -> Bool {
+    return appendStub?(sampleBuffer) ?? false
+  }
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriterInputPixelBufferAdaptor.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriterInputPixelBufferAdaptor.swift
new file mode 100644
index 0000000..20a2ade
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriterInputPixelBufferAdaptor.swift
@@ -0,0 +1,14 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// Mock implementation of `FLTAssetWriterInputPixelBufferAdaptor` protocol which allows injecting a custom
+/// implementation.
+final class MockAssetWriterInputPixelBufferAdaptor: NSObject, FLTAssetWriterInputPixelBufferAdaptor
+{
+  var appendStub: ((CVPixelBuffer, CMTime) -> Bool)?
+
+  func append(_ pixelBuffer: CVPixelBuffer, withPresentationTime presentationTime: CMTime) -> Bool {
+    appendStub?(pixelBuffer, presentationTime) ?? true
+  }
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.h
deleted file mode 100644
index 71589e1..0000000
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.h
+++ /dev/null
@@ -1,127 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-@import camera_avfoundation;
-#if __has_include(<camera_avfoundation/camera_avfoundation-umbrella.h>)
-@import camera_avfoundation.Test;
-#endif
-@import AVFoundation;
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface MockCaptureDevice : NSObject <FLTCaptureDevice>
-
-@property(nonatomic, assign) NSString *uniqueID;
-
-// Position/Orientation
-@property(nonatomic, assign) AVCaptureDevicePosition position;
-
-// Format/Configuration
-@property(nonatomic, strong) NSArray<NSObject<FLTCaptureDeviceFormat> *> *formats;
-/// Overrides the default implementation of getting the active format.
-@property(nonatomic, copy) NSObject<FLTCaptureDeviceFormat> * (^activeFormatStub)(void);
-/// Overrides the default implementation of setting active format.
-/// @param format The format being set
-@property(nonatomic, copy) void (^setActiveFormatStub)(NSObject<FLTCaptureDeviceFormat> *format);
-
-// Flash/Torch
-@property(nonatomic, assign) BOOL hasFlash;
-@property(nonatomic, assign) BOOL hasTorch;
-@property(nonatomic, assign) BOOL isTorchAvailable;
-@property(nonatomic, assign) AVCaptureTorchMode torchMode;
-/// Overrides the default implementation of setting torch mode.
-/// @param mode The torch mode being set
-@property(nonatomic, copy) void (^setTorchModeStub)(AVCaptureTorchMode mode);
-@property(nonatomic, copy) AVCaptureTorchMode (^getTorchModeStub)(void);
-@property(nonatomic, assign) BOOL flashModeSupported;
-
-// Focus
-@property(nonatomic, assign) BOOL focusPointOfInterestSupported;
-/// Overrides the default implementation of checking if focus mode is supported.
-/// @param mode The focus mode to check
-/// @return Whether the focus mode is supported
-@property(nonatomic, copy) BOOL (^isFocusModeSupportedStub)(AVCaptureFocusMode mode);
-@property(nonatomic, assign) AVCaptureFocusMode focusMode;
-/// Overrides the default implementation of setting focus mode.
-/// @param mode The focus mode being set
-@property(nonatomic, copy) void (^setFocusModeStub)(AVCaptureFocusMode mode);
-@property(nonatomic, assign) CGPoint focusPointOfInterest;
-/// Overrides the default implementation of setting focus point of interest.
-/// @param point The focus point being set
-@property(nonatomic, copy) void (^setFocusPointOfInterestStub)(CGPoint point);
-
-// Exposure
-@property(nonatomic, assign) BOOL exposurePointOfInterestSupported;
-@property(nonatomic, assign) AVCaptureExposureMode exposureMode;
-@property(nonatomic, copy) BOOL (^isExposureModeSupportedStub)(AVCaptureExposureMode mode);
-/// Overrides the default implementation of setting exposure mode.
-/// @param mode The exposure mode being set
-@property(nonatomic, copy) void (^setExposureModeStub)(AVCaptureExposureMode mode);
-@property(nonatomic, assign) CGPoint exposurePointOfInterest;
-/// Override the default implementation of setting exposure point of interest.
-/// @param point The exposure point being set
-@property(nonatomic, copy) void (^setExposurePointOfInterestStub)(CGPoint point);
-@property(nonatomic, assign) float minExposureTargetBias;
-@property(nonatomic, assign) float maxExposureTargetBias;
-/// Overrides the default implementation of setting exposure target bias.
-/// @param bias The exposure bias being set
-/// @param handler The completion handler to be called
-@property(nonatomic, copy) void (^setExposureTargetBiasStub)
-    (float bias, void (^_Nullable handler)(CMTime));
-
-// Zoom
-@property(nonatomic, assign) float maxAvailableVideoZoomFactor;
-@property(nonatomic, assign) float minAvailableVideoZoomFactor;
-@property(nonatomic, assign) float videoZoomFactor;
-/// Overrides the default implementation of setting video zoom factor.
-/// @param factor The zoom factor being set
-@property(nonatomic, copy) void (^setVideoZoomFactorStub)(float factor);
-
-// Camera Properties
-@property(nonatomic, assign) float lensAperture;
-@property(nonatomic, assign) CMTime exposureDuration;
-@property(nonatomic, assign) float ISO;
-
-// Configuration Lock
-/// Overrides the default implementation of locking device for configuration.
-/// @param error Error pointer to be set if lock fails
-@property(nonatomic, copy) BOOL (^lockForConfigurationStub)(NSError **error);
-/// Overrides the default implementation of unlocking device configuration.
-@property(nonatomic, copy) void (^unlockForConfigurationStub)(void);
-
-// Frame Duration
-@property(nonatomic, assign) CMTime activeVideoMinFrameDuration;
-@property(nonatomic, assign) CMTime activeVideoMaxFrameDuration;
-/// Overrides the default implementation of setting minimum frame duration.
-/// @param duration The minimum frame duration being set
-@property(nonatomic, copy) void (^setActiveVideoMinFrameDurationStub)(CMTime duration);
-/// Overrides the default implementation of setting maximum frame duration.
-/// @param duration The maximum frame duration being set
-@property(nonatomic, copy) void (^setActiveVideoMaxFrameDurationStub)(CMTime duration);
-
-@end
-
-/// A mocked implementation of FLTCaptureDeviceInputFactory which allows injecting a custom
-/// implementation.
-@interface MockCaptureInput : NSObject <FLTCaptureInput>
-
-/// This property is re-declared to be read/write to allow setting a mocked value for testing.
-@property(nonatomic, strong) NSArray<AVCaptureInputPort *> *ports;
-
-@end
-
-/// A mocked implementation of FLTCaptureDeviceInputFactory which allows injecting a custom
-/// implementation.
-@interface MockCaptureDeviceInputFactory : NSObject <FLTCaptureDeviceInputFactory>
-
-/// Initializes a new instance with the given mock device input. Whenever `deviceInputWithDevice` is
-/// called on this instance, it will return the mock device input.
-- (nonnull instancetype)initWithMockDeviceInput:(NSObject<FLTCaptureInput> *)mockDeviceInput;
-
-/// The mock device input to be returned by `deviceInputWithDevice`.
-@property(nonatomic, strong) NSObject<FLTCaptureInput> *mockDeviceInput;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.m
deleted file mode 100644
index b8ff1e3..0000000
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.m
+++ /dev/null
@@ -1,154 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "MockCaptureDevice.h"
-
-@import camera_avfoundation;
-#if __has_include(<camera_avfoundation/camera_avfoundation-umbrella.h>)
-@import camera_avfoundation.Test;
-#endif
-@import AVFoundation;
-
-@implementation MockCaptureDevice
-
-- (NSObject<FLTCaptureDeviceFormat> *)activeFormat {
-  if (self.activeFormatStub) {
-    return self.activeFormatStub();
-  }
-  return nil;
-}
-
-- (void)setActiveFormat:(NSObject<FLTCaptureDeviceFormat> *)format {
-  if (self.setActiveFormatStub) {
-    self.setActiveFormatStub(format);
-  }
-}
-
-- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode {
-  return self.flashModeSupported;
-}
-
-- (void)setTorchMode:(AVCaptureTorchMode)mode {
-  if (self.setTorchModeStub) {
-    self.setTorchModeStub(mode);
-  }
-}
-
-- (AVCaptureTorchMode)torchMode {
-  if (self.getTorchModeStub) {
-    return self.getTorchModeStub();
-  }
-  return AVCaptureTorchModeOff;
-}
-
-- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode {
-  if (self.isFocusModeSupportedStub) {
-    return self.isFocusModeSupportedStub(mode);
-  }
-  return NO;
-}
-
-- (void)setFocusMode:(AVCaptureFocusMode)mode {
-  if (self.setFocusModeStub) {
-    self.setFocusModeStub(mode);
-  }
-}
-
-- (void)setFocusPointOfInterest:(CGPoint)point {
-  if (self.setFocusPointOfInterestStub) {
-    self.setFocusPointOfInterestStub(point);
-  }
-}
-
-- (void)setExposureMode:(AVCaptureExposureMode)mode {
-  if (self.setExposureModeStub) {
-    self.setExposureModeStub(mode);
-  }
-}
-
-- (void)setExposurePointOfInterest:(CGPoint)point {
-  if (self.setExposurePointOfInterestStub) {
-    self.setExposurePointOfInterestStub(point);
-  }
-}
-
-- (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler {
-  if (self.setExposureTargetBiasStub) {
-    self.setExposureTargetBiasStub(bias, handler);
-  }
-}
-
-- (void)setVideoZoomFactor:(float)factor {
-  if (self.setVideoZoomFactorStub) {
-    self.setVideoZoomFactorStub(factor);
-  }
-}
-
-- (BOOL)lockForConfiguration:(NSError **)error {
-  if (self.lockForConfigurationStub) {
-    return self.lockForConfigurationStub(error);
-  }
-  return YES;
-}
-
-- (void)unlockForConfiguration {
-  if (self.unlockForConfigurationStub) {
-    self.unlockForConfigurationStub();
-  }
-}
-
-- (void)setActiveVideoMinFrameDuration:(CMTime)duration {
-  if (self.setActiveVideoMinFrameDurationStub) {
-    self.setActiveVideoMinFrameDurationStub(duration);
-  }
-}
-
-- (void)setActiveVideoMaxFrameDuration:(CMTime)duration {
-  if (self.setActiveVideoMaxFrameDurationStub) {
-    self.setActiveVideoMaxFrameDurationStub(duration);
-  }
-}
-
-- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode {
-  if (self.isExposureModeSupportedStub) {
-    return self.isExposureModeSupportedStub(mode);
-  } else {
-    return NO;
-  }
-}
-
-@synthesize device;
-
-@end
-
-@implementation MockCaptureInput
-@synthesize ports;
-@synthesize input;
-@end
-
-@implementation MockCaptureDeviceInputFactory
-
-- (nonnull instancetype)init {
-  self = [super init];
-  if (self) {
-    _mockDeviceInput = [[MockCaptureInput alloc] init];
-  }
-  return self;
-}
-
-- (nonnull instancetype)initWithMockDeviceInput:
-    (nonnull NSObject<FLTCaptureInput> *)mockDeviceInput {
-  self = [super init];
-  if (self) {
-    _mockDeviceInput = mockDeviceInput;
-  }
-  return self;
-}
-
-- (NSObject<FLTCaptureInput> *)deviceInputWithDevice:(NSObject<FLTCaptureDevice> *)device
-                                               error:(NSError **)error {
-  return _mockDeviceInput;
-}
-
-@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.swift
new file mode 100644
index 0000000..53886ad
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDevice.swift
@@ -0,0 +1,114 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// A mock implementation of `FLTCaptureDevice` that allows mocking the class
+/// properties.
+class MockCaptureDevice: NSObject, FLTCaptureDevice {
+  var activeFormatStub: (() -> FLTCaptureDeviceFormat)?
+  var setActiveFormatStub: ((FLTCaptureDeviceFormat) -> Void)?
+  var getTorchModeStub: (() -> AVCaptureDevice.TorchMode)?
+  var setTorchModeStub: ((AVCaptureDevice.TorchMode) -> Void)?
+  var isFocusModeSupportedStub: ((AVCaptureDevice.FocusMode) -> Bool)?
+  var setFocusModeStub: ((AVCaptureDevice.FocusMode) -> Void)?
+  var setFocusPointOfInterestStub: ((CGPoint) -> Void)?
+  var setExposureModeStub: ((AVCaptureDevice.ExposureMode) -> Void)?
+  var setExposurePointOfInterestStub: ((CGPoint) -> Void)?
+  var setExposureTargetBiasStub: ((Float, ((CMTime) -> Void)?) -> Void)?
+  var isExposureModeSupportedStub: ((AVCaptureDevice.ExposureMode) -> Bool)?
+  var setVideoZoomFactorStub: ((CGFloat) -> Void)?
+  var lockForConfigurationStub: (() throws -> Void)?
+
+  var device: AVCaptureDevice {
+    preconditionFailure("Attempted to access unimplemented property: device")
+  }
+
+  var uniqueID = ""
+  var position = AVCaptureDevice.Position.unspecified
+
+  var activeFormat: FLTCaptureDeviceFormat {
+    get {
+      activeFormatStub?() ?? MockCaptureDeviceFormat()
+    }
+    set {
+      setActiveFormatStub?(newValue)
+    }
+  }
+
+  var formats: [FLTCaptureDeviceFormat] = []
+  var hasFlash = false
+  var hasTorch = false
+  var isTorchAvailable = false
+  var torchMode: AVCaptureDevice.TorchMode {
+    get {
+      getTorchModeStub?() ?? .off
+    }
+    set {
+      setTorchModeStub?(newValue)
+    }
+  }
+  var isFocusPointOfInterestSupported = false
+  var maxAvailableVideoZoomFactor = CGFloat(0)
+  var minAvailableVideoZoomFactor = CGFloat(0)
+  var videoZoomFactor: CGFloat {
+    get { 0 }
+    set {
+      setVideoZoomFactorStub?(newValue)
+    }
+  }
+  var isExposurePointOfInterestSupported = false
+  var minExposureTargetBias = Float(0)
+  var maxExposureTargetBias = Float(0)
+  var activeVideoMinFrameDuration = CMTime(value: 1, timescale: 1)
+  var activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 1)
+
+  func isFlashModeSupported(_ mode: AVCaptureDevice.FlashMode) -> Bool {
+    return false
+  }
+
+  func isFocusModeSupported(_ mode: AVCaptureDevice.FocusMode) -> Bool {
+    return isFocusModeSupportedStub?(mode) ?? false
+  }
+
+  func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode) {
+    setFocusModeStub?(focusMode)
+  }
+
+  func setFocusPointOfInterest(_ point: CGPoint) {
+    setFocusPointOfInterestStub?(point)
+  }
+
+  func setExposureMode(_ exposureMode: AVCaptureDevice.ExposureMode) {
+    setExposureModeStub?(exposureMode)
+  }
+
+  func setExposurePointOfInterest(_ point: CGPoint) {
+    setExposurePointOfInterestStub?(point)
+  }
+
+  func setExposureTargetBias(_ bias: Float, completionHandler handler: ((CMTime) -> Void)? = nil) {
+    setExposureTargetBiasStub?(bias, handler)
+  }
+
+  func isExposureModeSupported(_ mode: AVCaptureDevice.ExposureMode) -> Bool {
+    return isExposureModeSupportedStub?(mode) ?? false
+  }
+
+  func lensAperture() -> Float {
+    return 0
+  }
+
+  func exposureDuration() -> CMTime {
+    return CMTime(value: 1, timescale: 1)
+  }
+
+  func iso() -> Float {
+    return 0
+  }
+
+  func lockForConfiguration() throws {
+    try lockForConfigurationStub?()
+  }
+
+  func unlockForConfiguration() {}
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceInputFactory.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceInputFactory.swift
new file mode 100644
index 0000000..bbca49a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceInputFactory.swift
@@ -0,0 +1,11 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+///// A mocked implementation of FLTCaptureDeviceInputFactory which allows injecting a custom
+///// implementation.
+final class MockCaptureDeviceInputFactory: NSObject, FLTCaptureDeviceInputFactory {
+  func deviceInput(with device: FLTCaptureDevice) throws -> FLTCaptureInput {
+    return MockCaptureInput()
+  }
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureInput.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureInput.swift
new file mode 100644
index 0000000..6c8151a
--- /dev/null
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureInput.swift
@@ -0,0 +1,13 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/// A mocked implementation of FLTCaptureInput which allows injecting a custom
+/// implementation.
+final class MockCaptureInput: NSObject, FLTCaptureInput {
+  var input: AVCaptureInput {
+    preconditionFailure("Attempted to access unimplemented property: input")
+  }
+
+  var ports: [AVCaptureInput.Port] = []
+}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/RunnerTests-Bridging-Header.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/RunnerTests-Bridging-Header.h
index 8316eb9..dabe017 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/RunnerTests-Bridging-Header.h
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/RunnerTests-Bridging-Header.h
@@ -11,10 +11,8 @@
 #import "camera_avfoundation/FLTThreadSafeEventChannel.h"
 
 // Mocks, protocols.
-#import "MockAssetWriter.h"
 #import "MockCameraDeviceDiscoverer.h"
 #import "MockCaptureConnection.h"
-#import "MockCaptureDevice.h"
 #import "MockCaptureDeviceFormat.h"
 #import "MockCapturePhotoOutput.h"
 #import "MockDeviceOrientationProvider.h"
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift
index bb8e18e..932f93e 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift
@@ -144,6 +144,7 @@
     var status = AVAssetWriter.Status.unknown
     writerMock.startWritingStub = {
       status = .writing
+      return true
     }
     writerMock.statusStub = {
       return status
@@ -153,12 +154,12 @@
     let audioSample = CameraTestUtils.createTestAudioSampleBuffer()
 
     var writtenSamples: [String] = []
-    adaptorMock.appendPixelBufferStub = { buffer, time in
+    adaptorMock.appendStub = { buffer, time in
       writtenSamples.append("video")
       return true
     }
     inputMock.readyForMoreMediaData = true
-    inputMock.appendSampleBufferStub = { buffer in
+    inputMock.appendStub = { buffer in
       writtenSamples.append("audio")
       return true
     }
@@ -183,13 +184,14 @@
     var status = AVAssetWriter.Status.unknown
     writerMock.startWritingStub = {
       status = .writing
+      return true
     }
     writerMock.statusStub = {
       return status
     }
 
     var videoAppended = false
-    adaptorMock.appendPixelBufferStub = { buffer, time in
+    adaptorMock.appendStub = { buffer, time in
       XCTAssert(CMTIME_IS_NUMERIC(time))
       videoAppended = true
       return true
@@ -197,8 +199,8 @@
 
     var audioAppended = false
     inputMock.readyForMoreMediaData = true
-    inputMock.appendSampleBufferStub = { buffer in
-      let sampleTime = CMSampleBufferGetPresentationTimeStamp(buffer!)
+    inputMock.appendStub = { buffer in
+      let sampleTime = CMSampleBufferGetPresentationTimeStamp(buffer)
       XCTAssert(CMTIME_IS_NUMERIC(sampleTime))
       audioAppended = true
       return true
@@ -223,7 +225,7 @@
     let videoSample = CameraTestUtils.createTestSampleBuffer()
 
     var sampleAppended = false
-    adaptorMock.appendPixelBufferStub = { buffer, time in
+    adaptorMock.appendStub = { buffer, time in
       sampleAppended = true
       return true
     }
@@ -249,6 +251,7 @@
     var status = AVAssetWriter.Status.unknown
     writerMock.startWritingStub = {
       status = .writing
+      return true
     }
     writerMock.statusStub = {
       return status
@@ -258,7 +261,7 @@
         writerMock.status == .writing,
         "Cannot call finishWritingWithCompletionHandler when status is not AVAssetWriter.Status.writing."
       )
-      handler?()
+      handler()
     }
 
     camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil)
@@ -278,10 +281,12 @@
     var startWritingCalled = false
     writerMock.startWritingStub = {
       startWritingCalled = true
+      return true
+
     }
 
     var videoAppended = false
-    adaptorMock.appendPixelBufferStub = { buffer, time in
+    adaptorMock.appendStub = { buffer, time in
       videoAppended = true
       return true
     }
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCaptureDevice.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCaptureDevice.m
index 41e75e9..0c6b317 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCaptureDevice.m
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCaptureDevice.m
@@ -77,8 +77,8 @@
 #pragma clang diagnostic pop
 
 // Focus
-- (BOOL)focusPointOfInterestSupported {
-  return self.device.focusPointOfInterestSupported;
+- (BOOL)isFocusPointOfInterestSupported {
+  return self.device.isFocusPointOfInterestSupported;
 }
 
 - (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode {
@@ -94,8 +94,8 @@
 }
 
 // Exposure
-- (BOOL)exposurePointOfInterestSupported {
-  return self.device.exposurePointOfInterestSupported;
+- (BOOL)isExposurePointOfInterestSupported {
+  return self.device.isExposurePointOfInterestSupported;
 }
 
 - (void)setExposureMode:(AVCaptureExposureMode)exposureMode {
@@ -123,19 +123,19 @@
 }
 
 // Zoom
-- (float)maxAvailableVideoZoomFactor {
+- (CGFloat)maxAvailableVideoZoomFactor {
   return self.device.maxAvailableVideoZoomFactor;
 }
 
-- (float)minAvailableVideoZoomFactor {
+- (CGFloat)minAvailableVideoZoomFactor {
   return self.device.minAvailableVideoZoomFactor;
 }
 
-- (float)videoZoomFactor {
+- (CGFloat)videoZoomFactor {
   return self.device.videoZoomFactor;
 }
 
-- (void)setVideoZoomFactor:(float)factor {
+- (void)setVideoZoomFactor:(CGFloat)factor {
   self.device.videoZoomFactor = factor;
 }
 
diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCaptureDevice.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCaptureDevice.h
index 76dc971..80735b8 100644
--- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCaptureDevice.h
+++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCaptureDevice.h
@@ -22,40 +22,40 @@
 @property(nonatomic, readonly) NSString *uniqueID;
 
 // Position/Orientation
-- (AVCaptureDevicePosition)position;
+@property(nonatomic, readonly) AVCaptureDevicePosition position;
 
 // Format/Configuration
 @property(nonatomic, retain) NSObject<FLTCaptureDeviceFormat> *activeFormat;
 @property(nonatomic, readonly) NSArray<NSObject<FLTCaptureDeviceFormat> *> *formats;
 
 // Flash/Torch
-- (BOOL)hasFlash;
-- (BOOL)hasTorch;
-- (BOOL)isTorchAvailable;
-- (AVCaptureTorchMode)torchMode;
-- (void)setTorchMode:(AVCaptureTorchMode)torchMode;
+@property(nonatomic, readonly) BOOL hasFlash;
+@property(nonatomic, readonly) BOOL hasTorch;
+@property(nonatomic, readonly, getter=isTorchAvailable) BOOL torchAvailable;
+@property(nonatomic) AVCaptureTorchMode torchMode;
 - (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode;
 
 // Focus
-- (BOOL)focusPointOfInterestSupported;
+@property(nonatomic, readonly, getter=isFocusPointOfInterestSupported)
+    BOOL focusPointOfInterestSupported;
 - (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode;
 - (void)setFocusMode:(AVCaptureFocusMode)focusMode;
 - (void)setFocusPointOfInterest:(CGPoint)point;
 
 // Exposure
-- (BOOL)exposurePointOfInterestSupported;
+@property(nonatomic, readonly, getter=isExposurePointOfInterestSupported)
+    BOOL exposurePointOfInterestSupported;
 - (void)setExposureMode:(AVCaptureExposureMode)exposureMode;
 - (void)setExposurePointOfInterest:(CGPoint)point;
-- (float)minExposureTargetBias;
-- (float)maxExposureTargetBias;
+@property(nonatomic, readonly) float minExposureTargetBias;
+@property(nonatomic, readonly) float maxExposureTargetBias;
 - (void)setExposureTargetBias:(float)bias completionHandler:(void (^_Nullable)(CMTime))handler;
 - (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode;
 
 // Zoom
-- (float)maxAvailableVideoZoomFactor;
-- (float)minAvailableVideoZoomFactor;
-- (float)videoZoomFactor;
-- (void)setVideoZoomFactor:(float)factor;
+@property(nonatomic, readonly) CGFloat maxAvailableVideoZoomFactor;
+@property(nonatomic, readonly) CGFloat minAvailableVideoZoomFactor;
+@property(nonatomic) CGFloat videoZoomFactor;
 
 // Camera Properties
 - (float)lensAperture;
@@ -63,14 +63,12 @@
 - (float)ISO;
 
 // Configuration Lock
-- (BOOL)lockForConfiguration:(NSError **)error;
+- (BOOL)lockForConfiguration:(NSError *_Nullable *_Nullable)outError;
 - (void)unlockForConfiguration;
 
 // Frame Duration
-- (CMTime)activeVideoMinFrameDuration;
-- (void)setActiveVideoMinFrameDuration:(CMTime)duration;
-- (CMTime)activeVideoMaxFrameDuration;
-- (void)setActiveVideoMaxFrameDuration:(CMTime)duration;
+@property(nonatomic) CMTime activeVideoMinFrameDuration;
+@property(nonatomic) CMTime activeVideoMaxFrameDuration;
 
 @end
 
diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml
index 7605730..66dffba 100644
--- a/packages/camera/camera_avfoundation/pubspec.yaml
+++ b/packages/camera/camera_avfoundation/pubspec.yaml
@@ -2,7 +2,7 @@
 description: iOS implementation of the camera plugin.
 repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
 issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.9.18+12
+version: 0.9.18+13
 
 environment:
   sdk: ^3.4.0