From 949b13de57a594e1ab8cae5a01fbf0aef4ff4527 Mon Sep 17 00:00:00 2001 From: Juma Allan Date: Fri, 13 Dec 2024 11:50:42 +0300 Subject: [PATCH 1/6] added active liveness metadata --- .../Classes/Networking/Models/v2/Metadata.swift | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift index a051148d..70b95f7c 100644 --- a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift +++ b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift @@ -11,6 +11,7 @@ public struct Metadata: Codable { Metadata(items: [ .sdk, .sdkVersion, + .activeLivenessVersion, .clientIP, .fingerprint, .deviceModel, @@ -50,10 +51,21 @@ public class Metadatum: Codable { public static let sdk = Metadatum(name: "sdk", value: "iOS") public static let sdkVersion = Metadatum(name: "sdk_version", value: SmileID.version) + public static let activeLivenessVersion = Metadatum(name: "active_liveness_version", value: "1.0.0") public static let clientIP = Metadatum(name: "client_ip", value: getIPAddress(useIPv4: true)) public static let fingerprint = Metadatum(name: "fingerprint", value: SmileID.deviceId) public static let deviceModel = Metadatum(name: "device_model", value: UIDevice.current.modelName) public static let deviceOS = Metadatum(name: "device_os", value: UIDevice.current.systemVersion) + + public class ActiveLivenessType: Metadatum { + public init(livenessType: LivenessType) { + super.init(name: "active_liveness_type", value: livenessType.rawValue) + } + + public required init(from decoder: Decoder) throws { + try super.init(from: decoder) + } + } public class SelfieImageOrigin: Metadatum { public init(cameraFacing: CameraFacingValue) { @@ -136,6 +148,11 @@ public class Metadatum: Codable { } } +public enum LivenessType: String, Codable { + case headPose = "head_pose" + case smile = "smile" +} + public enum DocumentImageOriginValue: String { case gallery case cameraAutoCapture = "camera_auto_capture" From 03a04a790f6e466fcc0b71ab16f03ee7f7ffc082 Mon Sep 17 00:00:00 2001 From: Juma Allan Date: Fri, 13 Dec 2024 12:03:30 +0300 Subject: [PATCH 2/6] added headpose metadata --- .../SelfieCapture/EnhancedSmartSelfieViewModel.swift | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift index b575a3b9..f327786d 100644 --- a/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift @@ -254,6 +254,7 @@ extension EnhancedSmartSelfieViewModel { livenessImages = [] selfieCaptureState = .capturingSelfie failureReason = nil + resetSelfieCaptureMetadata() } private func handleWindowSizeChanged(to rect: CGSize, edgeInsets: EdgeInsets) { @@ -446,7 +447,7 @@ extension EnhancedSmartSelfieViewModel: LivenessCheckManagerDelegate { extension EnhancedSmartSelfieViewModel: SelfieSubmissionDelegate { public func submitJob() async throws { // Add metadata before submission - addSelfieCaptureDurationMetaData() + addSelfieCaptureMetaData() if skipApiSubmission { // Skip API submission and update processing state to success @@ -469,9 +470,15 @@ extension EnhancedSmartSelfieViewModel: SelfieSubmissionDelegate { try await submissionManager.submitJob(failureReason: self.failureReason) } - private func addSelfieCaptureDurationMetaData() { + private func addSelfieCaptureMetaData() { localMetadata.addMetadata( Metadatum.SelfieCaptureDuration(duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata(Metadatum.ActiveLivenessType(livenessType: LivenessType.headPose)) + } + + private func resetSelfieCaptureMetadata() { + localMetadata.metadata.removeAllOfType(Metadatum.SelfieCaptureDuration.self) + localMetadata.metadata.removeAllOfType(Metadatum.ActiveLivenessType.self) } public func onFinished(callback: SmartSelfieResultDelegate) { From c66de2ce74cc6e0d183bdd25d14dac854bf23ec1 Mon Sep 17 00:00:00 2001 From: Juma Allan Date: Fri, 13 Dec 2024 13:13:18 +0300 Subject: [PATCH 3/6] updated camera name metadata --- CHANGELOG.md | 8 +- .../Classes/Camera/CameraManager.swift | 101 ++++--- .../Networking/Models/v2/Metadata.swift | 99 ++++--- .../EnhancedSmartSelfieViewModel.swift | 83 ++++-- .../SelfieCapture/SelfieViewModel.swift | 267 +++++++++++------- 5 files changed, 365 insertions(+), 193 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 126fb736..a91f037f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,15 @@ # Release Notes +## Unreleased + +* Fixed missing idType on Document Verification Jobs + ## 10.2.17 -### Added skipApiSubmission: Whether to skip api submission to SmileID and return only captured images on SmartSelfie enrollment, SmartSelfie authentic , Document verification and Enhanced DocV + +* Added skipApiSubmission: Whether to skip api submission to SmileID and return only captured images on SmartSelfie enrollment, SmartSelfie authentic , Document verification and Enhanced DocV ## 10.2.16 + ### Fixed * Clear images on retry or start capture with the same jobId diff --git a/Sources/SmileID/Classes/Camera/CameraManager.swift b/Sources/SmileID/Classes/Camera/CameraManager.swift index f57cee05..37aef80b 100644 --- a/Sources/SmileID/Classes/Camera/CameraManager.swift +++ b/Sources/SmileID/Classes/Camera/CameraManager.swift @@ -1,5 +1,5 @@ -import Foundation import AVFoundation +import Foundation import SwiftUI class CameraManager: NSObject, ObservableObject { @@ -21,7 +21,9 @@ class CameraManager: NSObject, ObservableObject { @Published var sampleBuffer: CVPixelBuffer? @Published var capturedImage: Data? - var sampleBufferPublisher: Published.Publisher { $sampleBuffer } + var sampleBufferPublisher: Published.Publisher { + $sampleBuffer + } var capturedImagePublisher: Published.Publisher { $capturedImage } let videoOutputQueue = DispatchQueue( label: "com.smileidentity.videooutput", @@ -50,7 +52,8 @@ class CameraManager: NSObject, ObservableObject { self.orientation = orientation super.init() sessionQueue.async { - self.videoOutput.setSampleBufferDelegate(self, queue: self.videoOutputQueue) + self.videoOutput.setSampleBufferDelegate( + self, queue: self.videoOutputQueue) } checkPermissions() } @@ -62,28 +65,28 @@ class CameraManager: NSObject, ObservableObject { } private func checkPermissions() { - switch AVCaptureDevice.authorizationStatus(for: .video) { - case .notDetermined: - sessionQueue.suspend() - AVCaptureDevice.requestAccess(for: .video) { authorized in - if !authorized { - self.status = .unauthorized - self.set(error: .deniedAuthorization) - } - self.sessionQueue.resume() + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .notDetermined: + sessionQueue.suspend() + AVCaptureDevice.requestAccess(for: .video) { authorized in + if !authorized { + self.status = .unauthorized + self.set(error: .deniedAuthorization) + } + self.sessionQueue.resume() + } + case .restricted: + status = .unauthorized + set(error: .restrictedAuthorization) + case .denied: + status = .unauthorized + set(error: .deniedAuthorization) + case .authorized: + break + @unknown default: + status = .unauthorized + set(error: .unknownAuthorization) } - case .restricted: - status = .unauthorized - set(error: .restrictedAuthorization) - case .denied: - status = .unauthorized - set(error: .deniedAuthorization) - case .authorized: - break - @unknown default: - status = .unauthorized - set(error: .unknownAuthorization) - } } private func addCameraInput(position: AVCaptureDevice.Position) { @@ -92,7 +95,8 @@ class CameraManager: NSObject, ObservableObject { status = .failed return } - cameraName = camera.uniqueID + + getCameraName(for: camera) do { let cameraInput = try AVCaptureDeviceInput(device: camera) @@ -108,25 +112,45 @@ class CameraManager: NSObject, ObservableObject { } } - private func getCameraForPosition(_ position: AVCaptureDevice.Position) -> AVCaptureDevice? { + private func getCameraName(for camera: AVCaptureDevice) { + var manufacturer: String + if #available(iOS 14.0, *) { + manufacturer = camera.manufacturer + } else { + manufacturer = "Apple Inc." + } + cameraName = + "\(manufacturer) \(camera.localizedName) \(camera.deviceType.rawValue)" + } + + private func getCameraForPosition(_ position: AVCaptureDevice.Position) + -> AVCaptureDevice? + { switch position { case .front: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .front) case .back: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .back) default: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .front) } } private func configureVideoOutput() { session.removeOutput(videoOutput) session.removeOutput(photoOutput) - if session.canAddOutput(videoOutput), session.canAddOutput(photoOutput) { + if session.canAddOutput(videoOutput), session.canAddOutput(photoOutput) + { session.addOutput(photoOutput) session.addOutput(videoOutput) videoOutput.videoSettings = - [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA] + [ + kCVPixelBufferPixelFormatTypeKey as String: + kCVPixelFormatType_32BGRA + ] if orientation == .portrait { let videoConnection = videoOutput.connection(with: .video) videoConnection?.videoOrientation = .portrait @@ -141,7 +165,9 @@ class CameraManager: NSObject, ObservableObject { checkPermissions() sessionQueue.async { [self] in if !session.isRunning { - if let currentInput = session.inputs.first as? AVCaptureDeviceInput { + if let currentInput = session.inputs.first + as? AVCaptureDeviceInput + { session.removeInput(currentInput) } addCameraInput(position: position) @@ -149,7 +175,9 @@ class CameraManager: NSObject, ObservableObject { session.startRunning() } else { session.beginConfiguration() - if let currentInput = session.inputs.first as? AVCaptureDeviceInput { + if let currentInput = session.inputs.first + as? AVCaptureDeviceInput + { session.removeInput(currentInput) } addCameraInput(position: position) @@ -174,7 +202,9 @@ class CameraManager: NSObject, ObservableObject { } internal func capturePhoto() { - guard let connection = photoOutput.connection(with: .video), connection.isEnabled, connection.isActive else { + guard let connection = photoOutput.connection(with: .video), + connection.isEnabled, connection.isActive + else { set(error: .cameraUnavailable) print("Camera unavailable") return @@ -191,7 +221,8 @@ extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate { didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection ) { - guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } + guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) + else { return } self.sampleBuffer = imageBuffer } } diff --git a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift index 70b95f7c..b4b40c1b 100644 --- a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift +++ b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift @@ -12,10 +12,11 @@ public struct Metadata: Codable { .sdk, .sdkVersion, .activeLivenessVersion, + .cameraName, .clientIP, .fingerprint, .deviceModel, - .deviceOS + .deviceOS, ]) } @@ -50,16 +51,26 @@ public class Metadatum: Codable { } public static let sdk = Metadatum(name: "sdk", value: "iOS") - public static let sdkVersion = Metadatum(name: "sdk_version", value: SmileID.version) - public static let activeLivenessVersion = Metadatum(name: "active_liveness_version", value: "1.0.0") - public static let clientIP = Metadatum(name: "client_ip", value: getIPAddress(useIPv4: true)) - public static let fingerprint = Metadatum(name: "fingerprint", value: SmileID.deviceId) - public static let deviceModel = Metadatum(name: "device_model", value: UIDevice.current.modelName) - public static let deviceOS = Metadatum(name: "device_os", value: UIDevice.current.systemVersion) - + public static let sdkVersion = Metadatum( + name: "sdk_version", value: SmileID.version) + public static let activeLivenessVersion = Metadatum( + name: "active_liveness_version", value: "1.0.0") + public static let cameraName = Metadatum( + name: "camera_name", + value: CameraManager.shared.cameraName ?? "Unknown Camera Name") + public static let clientIP = Metadatum( + name: "client_ip", value: getIPAddress(useIPv4: true)) + public static let fingerprint = Metadatum( + name: "fingerprint", value: SmileID.deviceId) + public static let deviceModel = Metadatum( + name: "device_model", value: UIDevice.current.modelName) + public static let deviceOS = Metadatum( + name: "device_os", value: UIDevice.current.systemVersion) + public class ActiveLivenessType: Metadatum { public init(livenessType: LivenessType) { - super.init(name: "active_liveness_type", value: livenessType.rawValue) + super.init( + name: "active_liveness_type", value: livenessType.rawValue) } public required init(from decoder: Decoder) throws { @@ -69,7 +80,8 @@ public class Metadatum: Codable { public class SelfieImageOrigin: Metadatum { public init(cameraFacing: CameraFacingValue) { - super.init(name: "selfie_image_origin", value: cameraFacing.rawValue) + super.init( + name: "selfie_image_origin", value: cameraFacing.rawValue) } public required init(from decoder: Decoder) throws { @@ -79,7 +91,9 @@ public class Metadatum: Codable { public class SelfieCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "selfie_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "selfie_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -89,7 +103,8 @@ public class Metadatum: Codable { public class DocumentFrontImageOrigin: Metadatum { public init(origin: DocumentImageOriginValue) { - super.init(name: "document_front_image_origin", value: origin.rawValue) + super.init( + name: "document_front_image_origin", value: origin.rawValue) } public required init(from decoder: Decoder) throws { @@ -99,7 +114,8 @@ public class Metadatum: Codable { public class DocumentBackImageOrigin: Metadatum { public init(origin: DocumentImageOriginValue) { - super.init(name: "document_back_image_origin", value: origin.rawValue) + super.init( + name: "document_back_image_origin", value: origin.rawValue) } public required init(from decoder: Decoder) throws { @@ -109,7 +125,8 @@ public class Metadatum: Codable { public class DocumentFrontCaptureRetries: Metadatum { public init(retries: Int) { - super.init(name: "document_front_capture_retries", value: String(retries)) + super.init( + name: "document_front_capture_retries", value: String(retries)) } public required init(from decoder: Decoder) throws { @@ -119,7 +136,8 @@ public class Metadatum: Codable { public class DocumentBackCaptureRetries: Metadatum { public init(retries: Int) { - super.init(name: "document_back_capture_retries", value: String(retries)) + super.init( + name: "document_back_capture_retries", value: String(retries)) } public required init(from decoder: Decoder) throws { @@ -129,7 +147,9 @@ public class Metadatum: Codable { public class DocumentFrontCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "document_front_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "document_front_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -139,7 +159,9 @@ public class Metadatum: Codable { public class DocumentBackCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "document_back_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "document_back_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -187,18 +209,23 @@ func getIPAddress(useIPv4: Bool) -> String { if addrFamily == UInt8(AF_INET) || addrFamily == UInt8(AF_INET6) { let name = String(cString: interface.ifa_name) if name == "en0" || name == "en1" || name == "pdp_ip0" - || name == "pdp_ip1" || name == "pdp_ip2" || name == "pdp_ip3" { + || name == "pdp_ip1" || name == "pdp_ip2" || name == "pdp_ip3" + { var hostname = [CChar](repeating: 0, count: Int(NI_MAXHOST)) - getnameinfo(interface.ifa_addr, socklen_t(interface.ifa_addr.pointee.sa_len), - &hostname, socklen_t(hostname.count), - nil, socklen_t(0), NI_NUMERICHOST) + getnameinfo( + interface.ifa_addr, + socklen_t(interface.ifa_addr.pointee.sa_len), + &hostname, socklen_t(hostname.count), + nil, socklen_t(0), NI_NUMERICHOST) address = String(cString: hostname) - if (useIPv4 && addrFamily == UInt8(AF_INET)) || - (!useIPv4 && addrFamily == UInt8(AF_INET6)) { + if (useIPv4 && addrFamily == UInt8(AF_INET)) + || (!useIPv4 && addrFamily == UInt8(AF_INET6)) + { if !useIPv4 { if let percentIndex = address.firstIndex(of: "%") { - address = String(address[.. 0 ? .landscapeRight : .landscapeLeft + self?.motionDeviceOrientation = + gravity.x > 0 ? .landscapeRight : .landscapeLeft } else { - self?.motionDeviceOrientation = gravity.y > 0 ? .portraitUpsideDown : .portrait + self?.motionDeviceOrientation = + gravity.y > 0 ? .portraitUpsideDown : .portrait } } } } private func handleCameraImageBuffer(_ imageBuffer: CVPixelBuffer) { - let currentOrientation: UIDeviceOrientation = motionManager.isDeviceMotionAvailable - ? motionDeviceOrientation : unlockedDeviceOrientation + let currentOrientation: UIDeviceOrientation = + motionManager.isDeviceMotionAvailable + ? motionDeviceOrientation : unlockedDeviceOrientation if currentOrientation == .portrait { analyzeFrame(imageBuffer: imageBuffer) } else { @@ -211,7 +219,9 @@ public class EnhancedSmartSelfieViewModel: ObservableObject { } } - private func publishUserInstruction(_ instruction: SelfieCaptureInstruction?) { + private func publishUserInstruction( + _ instruction: SelfieCaptureInstruction? + ) { if self.userInstruction != instruction { self.userInstruction = instruction self.resetGuideAnimationDelayTimer() @@ -257,7 +267,9 @@ extension EnhancedSmartSelfieViewModel { resetSelfieCaptureMetadata() } - private func handleWindowSizeChanged(to rect: CGSize, edgeInsets: EdgeInsets) { + private func handleWindowSizeChanged( + to rect: CGSize, edgeInsets: EdgeInsets + ) { let topPadding: CGFloat = edgeInsets.top + 100 faceLayoutGuideFrame = CGRect( x: (rect.width / 2) - faceLayoutGuideFrame.width / 2, @@ -281,7 +293,8 @@ extension EnhancedSmartSelfieViewModel { throw SmileIDError.unknown("Error resizing selfie image") } self.selfieImage = flipImageForPreview(uiImage) - self.selfieImageURL = try LocalStorage.createSelfieFile(jobId: jobId, selfieFile: imageData) + self.selfieImageURL = try LocalStorage.createSelfieFile( + jobId: jobId, selfieFile: imageData) } catch { handleError(error) } @@ -290,7 +303,8 @@ extension EnhancedSmartSelfieViewModel { private func flipImageForPreview(_ image: UIImage) -> UIImage? { guard let cgImage = image.cgImage else { return nil } - let contextSize = CGSize(width: image.size.width, height: image.size.height) + let contextSize = CGSize( + width: image.size.width, height: image.size.height) UIGraphicsBeginImageContextWithOptions(contextSize, false, 1.0) defer { UIGraphicsEndImageContext() @@ -309,7 +323,8 @@ extension EnhancedSmartSelfieViewModel { context.draw( cgImage, in: CGRect( - x: -image.size.width / 2, y: -image.size.height / 2, width: image.size.width, height: image.size.height) + x: -image.size.width / 2, y: -image.size.height / 2, + width: image.size.width, height: image.size.height) ) // Get the new UIImage from the context @@ -329,7 +344,8 @@ extension EnhancedSmartSelfieViewModel { else { throw SmileIDError.unknown("Error resizing liveness image") } - let imageUrl = try LocalStorage.createLivenessFile(jobId: jobId, livenessFile: imageData) + let imageUrl = try LocalStorage.createLivenessFile( + jobId: jobId, livenessFile: imageData) livenessImages.append(imageUrl) } catch { handleError(error) @@ -351,7 +367,8 @@ extension EnhancedSmartSelfieViewModel { } private func openSettings() { - guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } + guard let settingsURL = URL(string: UIApplication.openSettingsURLString) + else { return } UIApplication.shared.open(settingsURL) } } @@ -376,7 +393,9 @@ extension EnhancedSmartSelfieViewModel: FaceDetectorResultDelegate { } } - func faceDetector(_ detector: EnhancedFaceDetector, didFailWithError error: Error) { + func faceDetector( + _ detector: EnhancedFaceDetector, didFailWithError error: Error + ) { DispatchQueue.main.async { self.publishUserInstruction(.headInFrame) } @@ -405,14 +424,16 @@ extension EnhancedSmartSelfieViewModel: LivenessCheckManagerDelegate { private func captureNextFrame(capturedFrames: Int) { let maxFrames = LivenessTask.numberOfFramesToCapture guard capturedFrames < maxFrames, - let currentFrame = currentFrameBuffer else { + let currentFrame = currentFrameBuffer + else { return } captureLivenessImage(currentFrame) let nextCapturedFrames = capturedFrames + 1 if nextCapturedFrames < maxFrames { - DispatchQueue.main.asyncAfter(deadline: .now() + 0.4) { [weak self] in + DispatchQueue.main.asyncAfter(deadline: .now() + 0.4) { + [weak self] in self?.captureNextFrame(capturedFrames: nextCapturedFrames) } } else { @@ -472,21 +493,29 @@ extension EnhancedSmartSelfieViewModel: SelfieSubmissionDelegate { private func addSelfieCaptureMetaData() { localMetadata.addMetadata( - Metadatum.SelfieCaptureDuration(duration: metadataTimerStart.elapsedTime())) - localMetadata.addMetadata(Metadatum.ActiveLivenessType(livenessType: LivenessType.headPose)) + Metadatum.SelfieCaptureDuration( + duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata( + Metadatum.ActiveLivenessType(livenessType: LivenessType.headPose)) } - + private func resetSelfieCaptureMetadata() { - localMetadata.metadata.removeAllOfType(Metadatum.SelfieCaptureDuration.self) - localMetadata.metadata.removeAllOfType(Metadatum.ActiveLivenessType.self) + localMetadata.metadata.removeAllOfType( + Metadatum.SelfieCaptureDuration.self) + localMetadata.metadata.removeAllOfType( + Metadatum.ActiveLivenessType.self) } public func onFinished(callback: SmartSelfieResultDelegate) { if let selfieImageURL = selfieImageURL, let selfiePath = getRelativePath(from: selfieImageURL), livenessImages.count == numLivenessImages, - !livenessImages.contains(where: { getRelativePath(from: $0) == nil }) { - let livenessImagesPaths = livenessImages.compactMap { getRelativePath(from: $0) } + !livenessImages.contains(where: { getRelativePath(from: $0) == nil } + ) + { + let livenessImagesPaths = livenessImages.compactMap { + getRelativePath(from: $0) + } callback.didSucceed( selfieImage: selfiePath, diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift index a3097ca0..18b7f0c9 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift @@ -13,9 +13,9 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { private let minFaceAreaThreshold = 0.125 private let maxFaceAreaThreshold = 0.25 private let faceRotationThreshold = 0.03 - private let faceRollThreshold = 0.025 // roll has a smaller range than yaw + private let faceRollThreshold = 0.025 // roll has a smaller range than yaw private let numLivenessImages = 7 - private let numTotalSteps = 8 // numLivenessImages + 1 selfie image + private let numTotalSteps = 8 // numLivenessImages + 1 selfie image private let livenessImageSize = 320 private let selfieImageSize = 640 @@ -35,14 +35,18 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { var previousHeadPitch = Double.infinity var previousHeadYaw = Double.infinity var isSmiling = false - var currentlyUsingArKit: Bool { ARFaceTrackingConfiguration.isSupported && !useBackCamera } + var currentlyUsingArKit: Bool { + ARFaceTrackingConfiguration.isSupported && !useBackCamera + } var selfieImage: URL? var livenessImages: [URL] = [] var apiResponse: SmartSelfieResponse? var error: Error? - private let arKitFramePublisher = PassthroughSubject() + private let arKitFramePublisher = PassthroughSubject< + CVPixelBuffer?, Never + >() private var subscribers = Set() // UI Properties @@ -87,7 +91,10 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { cameraManager.sampleBufferPublisher .merge(with: arKitFramePublisher) - .throttle(for: 0.35, scheduler: DispatchQueue.global(qos: .userInitiated), latest: true) + .throttle( + for: 0.35, scheduler: DispatchQueue.global(qos: .userInitiated), + latest: true + ) // Drop the first ~2 seconds to allow the user to settle in .dropFirst(5) .compactMap { $0 } @@ -98,8 +105,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { localMetadata.addMetadata( useBackCamera - ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) - : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera) + ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) + : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera) ) } @@ -118,21 +125,26 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } do { - try faceDetector.detect(imageBuffer: image) { [weak self] request, error in + try faceDetector.detect(imageBuffer: image) { + [weak self] request, error in guard let self else { return } if let error { - print("Error analyzing image: \(error.localizedDescription)") + print( + "Error analyzing image: \(error.localizedDescription)") self.error = error return } - guard let results = request.results as? [VNFaceObservation] else { + guard let results = request.results as? [VNFaceObservation] + else { print("Did not receive the expected [VNFaceObservation]") return } if results.count == 0 { - DispatchQueue.main.async { self.directive = "Instructions.UnableToDetectFace" } + DispatchQueue.main.async { + self.directive = "Instructions.UnableToDetectFace" + } // If no faces are detected for a while, reset the state if elapsedtime > noFaceResetDelay { DispatchQueue.main.async { @@ -149,7 +161,9 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { // Ensure only 1 face is in frame if results.count > 1 { - DispatchQueue.main.async { self.directive = "Instructions.MultipleFaces" } + DispatchQueue.main.async { + self.directive = "Instructions.MultipleFaces" + } return } @@ -170,31 +184,44 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { || boundingBox.maxX > maxFaceCenteredThreshold || boundingBox.maxY > maxFaceCenteredThreshold { - DispatchQueue.main.async { self.directive = "Instructions.PutFaceInOval" } + DispatchQueue.main.async { + self.directive = "Instructions.PutFaceInOval" + } return } // image's area is equal to 1. so (bbox area / image area) == bbox area let faceFillRatio = boundingBox.width * boundingBox.height if faceFillRatio < minFaceAreaThreshold { - DispatchQueue.main.async { self.directive = "Instructions.MoveCloser" } + DispatchQueue.main.async { + self.directive = "Instructions.MoveCloser" + } return } if faceFillRatio > maxFaceAreaThreshold { - DispatchQueue.main.async { self.directive = "Instructions.MoveFarther" } + DispatchQueue.main.async { + self.directive = "Instructions.MoveFarther" + } return } - if let quality = face.faceCaptureQuality, quality < faceCaptureQualityThreshold { - DispatchQueue.main.async { self.directive = "Instructions.Quality" } + if let quality = face.faceCaptureQuality, + quality < faceCaptureQualityThreshold + { + DispatchQueue.main.async { + self.directive = "Instructions.Quality" + } return } - let userNeedsToSmile = livenessImages.count > numLivenessImages / 2 + let userNeedsToSmile = + livenessImages.count > numLivenessImages / 2 DispatchQueue.main.async { - self.directive = userNeedsToSmile ? "Instructions.Smile" : "Instructions.Capturing" + self.directive = + userNeedsToSmile + ? "Instructions.Smile" : "Instructions.Capturing" } // TODO: Use mouth deformation as an alternate signal for non-ARKit capture @@ -205,36 +232,50 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { // Perform the rotation checks *after* changing directive to Capturing -- we don't // want to explicitly tell the user to move their head if !hasFaceRotatedEnough(face: face) { - print("Not enough face rotation between captures. Waiting...") + print( + "Not enough face rotation between captures. Waiting...") return } - let orientation = currentlyUsingArKit ? CGImagePropertyOrientation.right : .up + let orientation = + currentlyUsingArKit ? CGImagePropertyOrientation.right : .up lastAutoCaptureTime = Date() do { if livenessImages.count < numLivenessImages { - guard let imageData = ImageUtils.resizePixelBufferToHeight( - image, - height: livenessImageSize, - orientation: orientation - ) else { - throw SmileIDError.unknown("Error resizing liveness image") + guard + let imageData = + ImageUtils.resizePixelBufferToHeight( + image, + height: livenessImageSize, + orientation: orientation + ) + else { + throw SmileIDError.unknown( + "Error resizing liveness image") } - let imageUrl = try LocalStorage.createLivenessFile(jobId: jobId, livenessFile: imageData) + let imageUrl = try LocalStorage.createLivenessFile( + jobId: jobId, livenessFile: imageData) livenessImages.append(imageUrl) DispatchQueue.main.async { - self.captureProgress = Double(self.livenessImages.count) / Double(self.numTotalSteps) + self.captureProgress = + Double(self.livenessImages.count) + / Double(self.numTotalSteps) } } else { shouldAnalyzeImages = false - guard let imageData = ImageUtils.resizePixelBufferToHeight( - image, - height: selfieImageSize, - orientation: orientation - ) else { - throw SmileIDError.unknown("Error resizing selfie image") + guard + let imageData = + ImageUtils.resizePixelBufferToHeight( + image, + height: selfieImageSize, + orientation: orientation + ) + else { + throw SmileIDError.unknown( + "Error resizing selfie image") } - let selfieImage = try LocalStorage.createSelfieFile(jobId: jobId, selfieFile: imageData) + let selfieImage = try LocalStorage.createSelfieFile( + jobId: jobId, selfieFile: imageData) self.selfieImage = selfieImage DispatchQueue.main.async { self.captureProgress = 1 @@ -255,14 +296,16 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } func hasFaceRotatedEnough(face: VNFaceObservation) -> Bool { - guard let roll = face.roll?.doubleValue, let yaw = face.yaw?.doubleValue else { + guard let roll = face.roll?.doubleValue, let yaw = face.yaw?.doubleValue + else { print("Roll and yaw unexpectedly nil") return true } var didPitchChange = false if #available(iOS 15, *) { if let pitch = face.pitch?.doubleValue { - didPitchChange = abs(pitch - previousHeadPitch) > faceRotationThreshold + didPitchChange = + abs(pitch - previousHeadPitch) > faceRotationThreshold } } let rollDelta = abs(roll - previousHeadRoll) @@ -274,7 +317,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { self.previousHeadPitch = face.pitch?.doubleValue ?? Double.infinity } - return didPitchChange || rollDelta > faceRollThreshold || yawDelta > faceRotationThreshold + return didPitchChange || rollDelta > faceRollThreshold + || yawDelta > faceRotationThreshold } func onSmiling(isSmiling: Bool) { @@ -289,7 +333,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { cameraManager.switchCamera(to: useBackCamera ? .back : .front) localMetadata.metadata.removeAllOfType(Metadatum.SelfieImageOrigin.self) localMetadata.addMetadata( - useBackCamera ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) + useBackCamera + ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera)) } @@ -304,7 +349,10 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { shouldAnalyzeImages = true cleanUpSelfieCapture() localMetadata.metadata.removeAllOfType(Metadatum.SelfieImageOrigin.self) - localMetadata.metadata.removeAllOfType(Metadatum.SelfieCaptureDuration.self) + localMetadata.metadata.removeAllOfType( + Metadatum.ActiveLivenessType.self) + localMetadata.metadata.removeAllOfType( + Metadatum.SelfieCaptureDuration.self) } func cleanUpSelfieCapture() { @@ -326,7 +374,11 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } public func submitJob() { - localMetadata.addMetadata(Metadatum.SelfieCaptureDuration(duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata( + Metadatum.SelfieCaptureDuration( + duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata( + Metadatum.ActiveLivenessType(livenessType: LivenessType.headPose)) if skipApiSubmission { DispatchQueue.main.async { self.processingState = .success } return @@ -334,10 +386,14 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { DispatchQueue.main.async { self.processingState = .inProgress } Task { do { - guard let selfieImage, livenessImages.count == numLivenessImages else { + guard let selfieImage, livenessImages.count == numLivenessImages + else { throw SmileIDError.unknown("Selfie capture failed") } - let jobType = isEnroll ? JobType.smartSelfieEnrollment : JobType.smartSelfieAuthentication + let jobType = + isEnroll + ? JobType.smartSelfieEnrollment + : JobType.smartSelfieAuthentication let authRequest = AuthenticationRequest( jobType: jobType, enrollment: isEnroll, @@ -355,19 +411,23 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { partnerParams: extraPartnerParams ) } - let authResponse = try await SmileID.api.authenticate(request: authRequest) + let authResponse = try await SmileID.api.authenticate( + request: authRequest) var smartSelfieLivenessImages = [MultipartBody]() var smartSelfieImage: MultipartBody? - if let selfie = try? Data(contentsOf: selfieImage), let media = MultipartBody( - withImage: selfie, - forKey: selfieImage.lastPathComponent, - forName: selfieImage.lastPathComponent - ) { + if let selfie = try? Data(contentsOf: selfieImage), + let media = MultipartBody( + withImage: selfie, + forKey: selfieImage.lastPathComponent, + forName: selfieImage.lastPathComponent + ) + { smartSelfieImage = media } if !livenessImages.isEmpty { - let livenessImageInfos = livenessImages.compactMap { liveness -> MultipartBody? in + let livenessImageInfos = livenessImages.compactMap { + liveness -> MultipartBody? in if let data = try? Data(contentsOf: liveness) { return MultipartBody( withImage: data, @@ -378,42 +438,44 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { return nil } - smartSelfieLivenessImages.append(contentsOf: livenessImageInfos.compactMap { $0 }) + smartSelfieLivenessImages.append( + contentsOf: livenessImageInfos.compactMap { $0 }) } guard let smartSelfieImage = smartSelfieImage, - smartSelfieLivenessImages.count == numLivenessImages + smartSelfieLivenessImages.count == numLivenessImages else { throw SmileIDError.unknown("Selfie capture failed") } - let response = if isEnroll { - try await SmileID.api.doSmartSelfieEnrollment( - signature: authResponse.signature, - timestamp: authResponse.timestamp, - selfieImage: smartSelfieImage, - livenessImages: smartSelfieLivenessImages, - userId: userId, - partnerParams: extraPartnerParams, - callbackUrl: SmileID.callbackUrl, - sandboxResult: nil, - allowNewEnroll: allowNewEnroll, - failureReason: nil, - metadata: localMetadata.metadata - ) - } else { - try await SmileID.api.doSmartSelfieAuthentication( - signature: authResponse.signature, - timestamp: authResponse.timestamp, - userId: userId, - selfieImage: smartSelfieImage, - livenessImages: smartSelfieLivenessImages, - partnerParams: extraPartnerParams, - callbackUrl: SmileID.callbackUrl, - sandboxResult: nil, - failureReason: nil, - metadata: localMetadata.metadata - ) - } + let response = + if isEnroll { + try await SmileID.api.doSmartSelfieEnrollment( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + userId: userId, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + allowNewEnroll: allowNewEnroll, + failureReason: nil, + metadata: localMetadata.metadata + ) + } else { + try await SmileID.api.doSmartSelfieAuthentication( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + userId: userId, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + failureReason: nil, + metadata: localMetadata.metadata + ) + } apiResponse = response do { try LocalStorage.moveToSubmittedJobs(jobId: self.jobId) @@ -422,11 +484,12 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { fileType: FileType.selfie, submitted: true ) - self.livenessImages = try LocalStorage.getFilesByType( - jobId: jobId, - fileType: FileType.liveness, - submitted: true - ) ?? [] + self.livenessImages = + try LocalStorage.getFilesByType( + jobId: jobId, + fileType: FileType.liveness, + submitted: true + ) ?? [] } catch { print("Error moving job to submitted directory: \(error)") self.error = error @@ -444,25 +507,29 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { fileType: FileType.selfie, submitted: true ) - self.livenessImages = try LocalStorage.getFilesByType( - jobId: jobId, - fileType: FileType.liveness, - submitted: true - ) ?? [] + self.livenessImages = + try LocalStorage.getFilesByType( + jobId: jobId, + fileType: FileType.liveness, + submitted: true + ) ?? [] } } catch { print("Error moving job to submitted directory: \(error)") self.error = error return } - if SmileID.allowOfflineMode, SmileIDError.isNetworkFailure(error: error) { + if SmileID.allowOfflineMode, + SmileIDError.isNetworkFailure(error: error) + { DispatchQueue.main.async { self.errorMessageRes = "Offline.Message" self.processingState = .success } } else { print("Error submitting job: \(error)") - let (errorMessageRes, errorMessage) = toErrorMessage(error: error) + let (errorMessageRes, errorMessage) = toErrorMessage( + error: error) self.error = error self.errorMessageRes = errorMessageRes self.errorMessage = errorMessage @@ -478,11 +545,14 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { public func onFinished(callback: SmartSelfieResultDelegate) { if let selfieImage = selfieImage, - let selfiePath = getRelativePath(from: selfieImage), - livenessImages.count == numLivenessImages, - !livenessImages.contains(where: { getRelativePath(from: $0) == nil }) + let selfiePath = getRelativePath(from: selfieImage), + livenessImages.count == numLivenessImages, + !livenessImages.contains(where: { getRelativePath(from: $0) == nil } + ) { - let livenessImagesPaths = livenessImages.compactMap { getRelativePath(from: $0) } + let livenessImagesPaths = livenessImages.compactMap { + getRelativePath(from: $0) + } callback.didSucceed( selfieImage: selfiePath, @@ -497,7 +567,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } func openSettings() { - guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } + guard let settingsURL = URL(string: UIApplication.openSettingsURLString) + else { return } UIApplication.shared.open(settingsURL) } } From 405d1c70b382be5888f293727abb01dc96fcb5f9 Mon Sep 17 00:00:00 2001 From: Juma Allan Date: Fri, 13 Dec 2024 13:55:37 +0300 Subject: [PATCH 4/6] updated liveness type --- Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift index 18b7f0c9..cadcbcc6 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift @@ -378,7 +378,7 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { Metadatum.SelfieCaptureDuration( duration: metadataTimerStart.elapsedTime())) localMetadata.addMetadata( - Metadatum.ActiveLivenessType(livenessType: LivenessType.headPose)) + Metadatum.ActiveLivenessType(livenessType: LivenessType.smile)) if skipApiSubmission { DispatchQueue.main.async { self.processingState = .success } return From 555fdac541d49258f0ff883a87976415455573e4 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Fri, 13 Dec 2024 15:55:29 +0100 Subject: [PATCH 5/6] fix the camera info collection. --- .../Classes/Networking/Models/v2/Metadata.swift | 6 +----- .../EnhancedSmartSelfieViewModel.swift | 15 +++++++++++---- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift index b4b40c1b..e502d385 100644 --- a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift +++ b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift @@ -12,11 +12,10 @@ public struct Metadata: Codable { .sdk, .sdkVersion, .activeLivenessVersion, - .cameraName, .clientIP, .fingerprint, .deviceModel, - .deviceOS, + .deviceOS ]) } @@ -55,9 +54,6 @@ public class Metadatum: Codable { name: "sdk_version", value: SmileID.version) public static let activeLivenessVersion = Metadatum( name: "active_liveness_version", value: "1.0.0") - public static let cameraName = Metadatum( - name: "camera_name", - value: CameraManager.shared.cameraName ?? "Unknown Camera Name") public static let clientIP = Metadatum( name: "client_ip", value: getIPAddress(useIPv4: true)) public static let fingerprint = Metadatum( diff --git a/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift index 7c5afac2..9e6dbd46 100644 --- a/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift @@ -511,9 +511,17 @@ extension EnhancedSmartSelfieViewModel: SelfieSubmissionDelegate { private func addSelfieCaptureMetaData() { localMetadata.addMetadata( Metadatum.SelfieCaptureDuration( - duration: metadataTimerStart.elapsedTime())) + duration: metadataTimerStart.elapsedTime()) + ) + localMetadata.addMetadata( + Metadatum.ActiveLivenessType(livenessType: LivenessType.headPose) + ) localMetadata.addMetadata( - Metadatum.ActiveLivenessType(livenessType: LivenessType.headPose)) + Metadatum( + name: "camera_name", + value: cameraManager.cameraName ?? "Unknown Camera Name" + ) + ) } private func resetSelfieCaptureMetadata() { @@ -528,8 +536,7 @@ extension EnhancedSmartSelfieViewModel: SelfieSubmissionDelegate { let selfiePath = getRelativePath(from: selfieImageURL), livenessImages.count == numLivenessImages, !livenessImages.contains(where: { getRelativePath(from: $0) == nil } - ) - { + ) { let livenessImagesPaths = livenessImages.compactMap { getRelativePath(from: $0) } From 8c97d30271363d89ce3d1df23c25d0540f94f65d Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Fri, 13 Dec 2024 15:59:42 +0100 Subject: [PATCH 6/6] code formatting --- Sources/SmileID/Classes/Camera/CameraManager.swift | 12 ++++-------- .../Classes/Networking/Models/v2/Metadata.swift | 9 +++------ .../SelfieCapture/EnhancedSmartSelfieViewModel.swift | 6 ++---- .../Classes/SelfieCapture/SelfieViewModel.swift | 6 ++---- 4 files changed, 11 insertions(+), 22 deletions(-) diff --git a/Sources/SmileID/Classes/Camera/CameraManager.swift b/Sources/SmileID/Classes/Camera/CameraManager.swift index 82561adf..8a5407e1 100644 --- a/Sources/SmileID/Classes/Camera/CameraManager.swift +++ b/Sources/SmileID/Classes/Camera/CameraManager.swift @@ -122,8 +122,7 @@ class CameraManager: NSObject, ObservableObject { } private func getCameraForPosition(_ position: AVCaptureDevice.Position) - -> AVCaptureDevice? - { + -> AVCaptureDevice? { switch position { case .front: return AVCaptureDevice.default( @@ -140,8 +139,7 @@ class CameraManager: NSObject, ObservableObject { private func configureVideoOutput() { session.removeOutput(videoOutput) session.removeOutput(photoOutput) - if session.canAddOutput(videoOutput), session.canAddOutput(photoOutput) - { + if session.canAddOutput(videoOutput), session.canAddOutput(photoOutput) { session.addOutput(photoOutput) session.addOutput(videoOutput) videoOutput.videoSettings = @@ -164,8 +162,7 @@ class CameraManager: NSObject, ObservableObject { sessionQueue.async { [self] in if !session.isRunning { if let currentInput = session.inputs.first - as? AVCaptureDeviceInput - { + as? AVCaptureDeviceInput { session.removeInput(currentInput) } addCameraInput(position: position) @@ -174,8 +171,7 @@ class CameraManager: NSObject, ObservableObject { } else { session.beginConfiguration() if let currentInput = session.inputs.first - as? AVCaptureDeviceInput - { + as? AVCaptureDeviceInput { session.removeInput(currentInput) } addCameraInput(position: position) diff --git a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift index e502d385..5ad912ca 100644 --- a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift +++ b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift @@ -205,8 +205,7 @@ func getIPAddress(useIPv4: Bool) -> String { if addrFamily == UInt8(AF_INET) || addrFamily == UInt8(AF_INET6) { let name = String(cString: interface.ifa_name) if name == "en0" || name == "en1" || name == "pdp_ip0" - || name == "pdp_ip1" || name == "pdp_ip2" || name == "pdp_ip3" - { + || name == "pdp_ip1" || name == "pdp_ip2" || name == "pdp_ip3" { var hostname = [CChar](repeating: 0, count: Int(NI_MAXHOST)) getnameinfo( interface.ifa_addr, @@ -216,8 +215,7 @@ func getIPAddress(useIPv4: Bool) -> String { address = String(cString: hostname) if (useIPv4 && addrFamily == UInt8(AF_INET)) - || (!useIPv4 && addrFamily == UInt8(AF_INET6)) - { + || (!useIPv4 && addrFamily == UInt8(AF_INET6)) { if !useIPv4 { if let percentIndex = address.firstIndex(of: "%") { address = String(address[.. MultipartBody? in + let livenessImageInfos = livenessImages.compactMap { liveness -> MultipartBody? in if let data = try? Data(contentsOf: liveness) { return MultipartBody( withImage: data,