From 861b7f5f076467da9e3cd89486c80e8a68a6f0ca Mon Sep 17 00:00:00 2001 From: Jubril Olambiwonnu Date: Tue, 18 Jul 2023 18:35:52 +0100 Subject: [PATCH] Fix selfie capture fallback implementation (#47) * Calculate face guide frame correctly * Calculate faceshape frame properly * Calculate displayed crop rect based on orientation * Setup AVCaptureSession in the right sequence * Remove manual face bounding box conversion from and delegate to `AVCaptureVideoPreviewLayer` * Set correct orientation for face detection sequence handler * Don't scale down ARKit face bounding box * Fix agent mode toggle animation * Set correct orientation * Add `allowAgentMode` and `showAttribution` flags to public interfaces * Add agent mode flag to UI * Enable agent mode on the sample app * Add pre smile check --- Example/SmileID.xcodeproj/project.pbxproj | 4 +- Example/SmileID/EnterUserIDView.swift | 1 + Example/SmileID/HomeView.swift | 1 + Example/SmileID/HomeViewController.swift | 6 ++- .../Classes/Camera/CameraManager.swift | 45 +++++++++---------- .../Classes/Camera/CameraViewController.swift | 27 ++--------- .../Classes/FaceDetector/FaceDetector.swift | 27 +---------- .../SmileID/Classes/Helpers/ImageUtils.swift | 16 +++---- .../SelfieCapture/View/ARViewController.swift | 2 +- .../SelfieCapture/View/FaceOverlayView.swift | 38 ++++++++++++---- .../View/SelfieCaptureView.swift | 21 ++------- .../ViewModel/SelfieCaptureViewModel.swift | 44 ++++++++++++------ Sources/SmileID/Classes/SmileID.swift | 17 ++++++- 13 files changed, 123 insertions(+), 126 deletions(-) diff --git a/Example/SmileID.xcodeproj/project.pbxproj b/Example/SmileID.xcodeproj/project.pbxproj index 5fa0fc5c2..04b8d2702 100644 --- a/Example/SmileID.xcodeproj/project.pbxproj +++ b/Example/SmileID.xcodeproj/project.pbxproj @@ -611,7 +611,7 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - CURRENT_PROJECT_VERSION = 19; + CURRENT_PROJECT_VERSION = 22; DEVELOPMENT_TEAM = 99P7YGX9Q6; "DEVELOPMENT_TEAM[sdk=iphoneos*]" = 99P7YGX9Q6; INFOPLIST_FILE = SmileID/Info.plist; @@ -638,7 +638,7 @@ CODE_SIGN_IDENTITY = "Apple Development"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Manual; - CURRENT_PROJECT_VERSION = 19; + CURRENT_PROJECT_VERSION = 22; DEVELOPMENT_TEAM = ""; "DEVELOPMENT_TEAM[sdk=iphoneos*]" = 99P7YGX9Q6; INFOPLIST_FILE = SmileID/Info.plist; diff --git a/Example/SmileID/EnterUserIDView.swift b/Example/SmileID/EnterUserIDView.swift index 5e71a066c..f00ccff75 100644 --- a/Example/SmileID/EnterUserIDView.swift +++ b/Example/SmileID/EnterUserIDView.swift @@ -23,6 +23,7 @@ struct EnterUserIDView: View { SmileTextField(field: $userId, placeholder: "User ID") .multilineTextAlignment(.center) NavigationLink(destination: SmileID.smartSelfieAuthenticationScreen(userId: userId, + allowAgentMode: true, delegate: viewModel) .navigationBarBackButtonHidden(true), isActive: $goToAuth ) { } diff --git a/Example/SmileID/HomeView.swift b/Example/SmileID/HomeView.swift index acd0c6663..1e61c4f12 100644 --- a/Example/SmileID/HomeView.swift +++ b/Example/SmileID/HomeView.swift @@ -19,6 +19,7 @@ struct HomeView: View { }) .sheet(isPresented: $viewModel.presentSmartSelfieEnrollment, content: { SmileID.smartSelfieEnrollmentScreen(userId: viewModel.generateUserID(), + allowAgentMode: true, delegate: viewModel) }) Button(action: { self.viewModel.handleSmartSelfieAuthTap() }, label: { diff --git a/Example/SmileID/HomeViewController.swift b/Example/SmileID/HomeViewController.swift index c70262935..efbac2d01 100644 --- a/Example/SmileID/HomeViewController.swift +++ b/Example/SmileID/HomeViewController.swift @@ -30,7 +30,8 @@ class HomeViewController: UIViewController, SmartSelfieResultDelegate { userID = UUID().uuidString currentJob = .smartSelfieEnrollment let smartSelfieRegistrationScreen = SmileID.smartSelfieEnrollmentScreen(userId: userID, - delegate: self) + allowAgentMode: true, + delegate: self) cameraVC = UIHostingController(rootView: smartSelfieRegistrationScreen) cameraVC?.modalPresentationStyle = .fullScreen navigationController?.present(cameraVC!, animated: true) @@ -42,7 +43,8 @@ class HomeViewController: UIViewController, SmartSelfieResultDelegate { func smartSelfieAuthenticationScreen(userID: String) { let smartSelfieAuthenticationScreen = SmileID.smartSelfieAuthenticationScreen(userId: userID, - delegate: self) + allowAgentMode: true, + delegate: self) cameraVC = UIHostingController(rootView: smartSelfieAuthenticationScreen) cameraVC?.modalPresentationStyle = .fullScreen navigationController?.present(cameraVC!, animated: true) diff --git a/Sources/SmileID/Classes/Camera/CameraManager.swift b/Sources/SmileID/Classes/Camera/CameraManager.swift index 1b10f650a..ef0db8841 100644 --- a/Sources/SmileID/Classes/Camera/CameraManager.swift +++ b/Sources/SmileID/Classes/Camera/CameraManager.swift @@ -2,7 +2,7 @@ import Foundation import AVFoundation import SwiftUI -protocol CameraManageable: AnyObject { +protocol CameraManageable: AnyObject { func switchCamera(to position: AVCaptureDevice.Position) func pauseSession() func resumeSession() @@ -84,19 +84,6 @@ class CameraManager: NSObject, ObservableObject, CameraManageable { } } - private func configureCaptureSession() { - guard status == .unconfigured else { - return - } - session.beginConfiguration() - defer { - session.commitConfiguration() - } - - addCameraInput(position: .front) - configureVideoOutput() - } - private func addCameraInput(position: AVCaptureDevice.Position) { guard let camera = getCameraForPosition(position) else { set(error: .cameraUnavailable) @@ -130,6 +117,7 @@ class CameraManager: NSObject, ObservableObject, CameraManageable { } private func configureVideoOutput() { + session.removeOutput(videoOutput) if session.canAddOutput(videoOutput) { session.addOutput(videoOutput) videoOutput.videoSettings = @@ -137,7 +125,6 @@ class CameraManager: NSObject, ObservableObject, CameraManageable { let videoConnection = videoOutput.connection(with: .video) videoConnection?.videoOrientation = .portrait - videoConnection?.isVideoMirrored = true } else { set(error: .cannotAddOutput) status = .failed @@ -148,15 +135,21 @@ class CameraManager: NSObject, ObservableObject, CameraManageable { self.checkPermissions() sessionQueue.async { [self] in if !self.session.isRunning { - self.session.startRunning() - } - self.session.beginConfiguration() - defer { self.session.commitConfiguration() } - if let currentInput = self.session.inputs.first as? AVCaptureDeviceInput { - self.session.removeInput(currentInput) + if let currentInput = self.session.inputs.first as? AVCaptureDeviceInput { + self.session.removeInput(currentInput) + } + self.addCameraInput(position: position) + self.configureVideoOutput() + session.startRunning() + } else { + self.session.beginConfiguration() + if let currentInput = self.session.inputs.first as? AVCaptureDeviceInput { + self.session.removeInput(currentInput) + } + self.addCameraInput(position: position) + self.configureVideoOutput() + self.session.commitConfiguration() } - self.configureVideoOutput() - self.addCameraInput(position: position) } } @@ -179,8 +172,10 @@ extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - if let buffer = sampleBuffer.imageBuffer { - self.sampleBuffer = buffer + guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + return } + + self.sampleBuffer = imageBuffer } } diff --git a/Sources/SmileID/Classes/Camera/CameraViewController.swift b/Sources/SmileID/Classes/Camera/CameraViewController.swift index 6f9b896ca..d0181121d 100644 --- a/Sources/SmileID/Classes/Camera/CameraViewController.swift +++ b/Sources/SmileID/Classes/Camera/CameraViewController.swift @@ -17,12 +17,9 @@ class PreviewView: UIViewController { fatalError("init(coder:) has not been implemented") } - override func viewWillAppear(_ animated: Bool) { - super.viewWillAppear(animated) - if layedOutSubviews == false { - configurePreviewLayer() - layedOutSubviews = true - } + override func viewDidLoad() { + super.viewDidLoad() + configurePreviewLayer() } func configurePreviewLayer() { @@ -30,7 +27,6 @@ class PreviewView: UIViewController { previewLayer = AVCaptureVideoPreviewLayer(session: session) previewLayer?.videoGravity = .resizeAspectFill previewLayer?.frame = view.bounds - previewLayer?.connection?.videoOrientation = .portrait view.layer.addSublayer(previewLayer!) } } @@ -40,21 +36,6 @@ extension PreviewView: FaceDetectorDelegate { guard let previewLayer = previewLayer else { return .zero } - - - let normalizedRect = cameraManager?.cameraPositon == .back ? rect : CGRect(x: rect.origin.y, - y: rect.origin.x, - width: rect.height, - height: rect.width) - - let transformedRect = previewLayer.layerRectConverted(fromMetadataOutputRect: normalizedRect) - - let mirroredRect = CGRect(x: previewLayer.bounds.width - transformedRect.origin.x - transformedRect.width, - y: previewLayer.bounds.height - transformedRect.origin.y - transformedRect.height, - width: transformedRect.width, - height: transformedRect.height) - - return mirroredRect - + return previewLayer.layerRectConverted(fromMetadataOutputRect: rect) } } diff --git a/Sources/SmileID/Classes/FaceDetector/FaceDetector.swift b/Sources/SmileID/Classes/FaceDetector/FaceDetector.swift index acdc7c9e0..679989469 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceDetector.swift +++ b/Sources/SmileID/Classes/FaceDetector/FaceDetector.swift @@ -16,29 +16,7 @@ class FaceDetector: NSObject, ARSCNViewDelegate { func detectFaces(imageBuffer: CVImageBuffer) { let detectCaptureQualityRequest = VNDetectFaceCaptureQualityRequest(completionHandler: detectedFaceQualityRequest) - let detectFaceRectanglesRequest = VNDetectFaceRectanglesRequest { [self] request, _ in - guard let results = request.results as? [VNFaceObservation], let viewDelegate = viewDelegate else { - model?.perform(action: .noFaceDetected) - return - } - - if results.count > 1 { - model?.perform(action: .multipleFacesDetected) - return - } - guard let result = results.first, !result.boundingBox.isNaN else { - model?.perform(action: .noFaceDetected) - return - } - let convertedBoundingBox = viewDelegate.convertFromMetadataToPreviewRect(rect: result.boundingBox) - - let faceObservationModel = FaceGeometryModel( - boundingBox: convertedBoundingBox, - roll: result.roll ?? 0, - yaw: result.yaw ?? 0 - ) - model?.perform(action: .faceObservationDetected(faceObservationModel)) - } + let detectFaceRectanglesRequest = VNDetectFaceRectanglesRequest(completionHandler: detectedFaceRectangles) // Use most recent models or fallback to older versions if #available(iOS 14.0, *) { @@ -114,8 +92,7 @@ class FaceDetector: NSObject, ARSCNViewDelegate { imageBuffer: CVImageBuffer) { do { try sequenceHandler.perform(requests, - on: imageBuffer, - orientation: .upMirrored) + on: imageBuffer, orientation: .leftMirrored) } catch { print(error.localizedDescription) } diff --git a/Sources/SmileID/Classes/Helpers/ImageUtils.swift b/Sources/SmileID/Classes/Helpers/ImageUtils.swift index 336aacb5b..a341738dc 100644 --- a/Sources/SmileID/Classes/Helpers/ImageUtils.swift +++ b/Sources/SmileID/Classes/Helpers/ImageUtils.swift @@ -24,24 +24,24 @@ class ImageUtils { let trueImageSize = CGSize(width: imagewidth, height: screenImageSize.height) // ratio of the true image width to displayed image width - let ycuttoffregionAgentMode: CGFloat = max(imageHeight, screenImageSize.width) / min(imageHeight, screenImageSize.width) let xcutoffregion: CGFloat = max(imagewidth, screenImageSize.width) / min(imagewidth, screenImageSize.width) - let ycutoffregion: CGFloat = max(imageHeight, screenImageSize.height) / min(imageHeight, screenImageSize.height) + var ycutoffregion: CGFloat = max(imageHeight, screenImageSize.height) / min(imageHeight, screenImageSize.height) + + // if pixel bufffer is gotten from AVCaptureSession, ycutoff is not required + if orientation == .up || orientation == .upMirrored { + ycutoffregion = 1 + } // scale down the original buffer to match the size of whats displayed on screen guard let scaledDownBuffer = resizePixelBuffer(buffer, size: trueImageSize) else { return nil } // calculate crop rect let cropL = max(faceGeometry.boundingBox.width, faceGeometry.boundingBox.height) - let cropRect = agentMode ? CGRect(x: faceGeometry.boundingBox.origin.y * ycuttoffregionAgentMode, - y: faceGeometry.boundingBox.origin.y * ycuttoffregionAgentMode, - width: cropL, - height: cropL) : CGRect(x: faceGeometry.boundingBox.origin.x * xcutoffregion, + let cropRect = CGRect(x: faceGeometry.boundingBox.origin.x * xcutoffregion, y: faceGeometry.boundingBox.origin.y * ycutoffregion, width: cropL, height: cropL) - let finalrect = agentMode ? increaseRect(rect: cropRect, - byPercentage: 1.5) : increaseRect(rect: cropRect, byPercentage: 1) + let finalrect = increaseRect(rect: cropRect, byPercentage: 1) // crop face from the buffer returned in the above operation and return jpg return cropFace(scaledDownBuffer, diff --git a/Sources/SmileID/Classes/SelfieCapture/View/ARViewController.swift b/Sources/SmileID/Classes/SelfieCapture/View/ARViewController.swift index 6b5fc17ce..ebf769327 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/ARViewController.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/ARViewController.swift @@ -85,7 +85,7 @@ class ARViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate { let maxX = allXs.max() ?? 0 let minY = allYs.min() ?? 0 let maxY = allYs.max() ?? 0 - let boundingBox = CGRect(x: minX, y: minY, width: (maxX - minX) * 0.8, height: (maxY - minY) * 0.8) + let boundingBox = CGRect(x: minX, y: minY, width: (maxX - minX), height: (maxY - minY)) return boundingBox } diff --git a/Sources/SmileID/Classes/SelfieCapture/View/FaceOverlayView.swift b/Sources/SmileID/Classes/SelfieCapture/View/FaceOverlayView.swift index 77a500320..7deb98bf4 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/FaceOverlayView.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/FaceOverlayView.swift @@ -1,8 +1,8 @@ import Foundation +import Combine import SwiftUI struct FaceOverlayView: View { - @State var agentMode = false @ObservedObject private(set) var model: SelfieCaptureViewModel var body: some View { GeometryReader { geometry in @@ -19,7 +19,27 @@ struct FaceOverlayView: View { .blendMode(.destinationOut) .frame(width: faceWidth, height: faceHeight) + .background(GeometryReader { localGeometry in // local geometry reader + Color.clear.onReceive(Just(localGeometry.frame(in: .global))) { globalFrame in + if globalFrame.origin.x != model.faceLayoutGuideFrame.origin.x + { + let window = UIApplication + .shared + .connectedScenes + .flatMap { ($0 as? UIWindowScene)?.windows ?? [] } + .last { $0.isKeyWindow } + if let rootView = window { + // Geometry reader's .global returns the frame in the screen's coordinate system. + let safeArea = rootView.screen.bounds.height - geometry.size.height + model.faceLayoutGuideFrame = CGRect(origin: CGPoint(x: globalFrame.origin.x, + y: globalFrame.origin.y - safeArea), + size: globalFrame.size) + } + + } + } + }) ) .overlay(FaceShape() .stroke(SmileID.theme.accent.opacity(0.4), @@ -41,18 +61,20 @@ struct FaceOverlayView: View { .scaleEffect(1.2, anchor: .top) InstructionsView(model: model) .padding(.top, -((faceWidth)/2)) - HStack(spacing: 10) { - Text("Agent Mode") - .foregroundColor(agentMode ? SmileID.theme.backgroundMain : SmileID.theme.accent) - .font(SmileID.theme.header4) - Toggle("", isOn: $model.agentMode).labelsHidden() - } + if model.allowsAgentMode { + HStack(spacing: 10) { + Text("Agent Mode") + .foregroundColor(model.agentMode ? SmileID.theme.backgroundMain : SmileID.theme.accent) + .font(SmileID.theme.header4) + Toggle("", isOn: $model.agentMode).labelsHidden() + } .frame(width: 188, height: 46) - .background(agentMode ? SmileID.theme.accent : SmileID.theme.backgroundMain) + .background(model.agentMode ? SmileID.theme.accent : SmileID.theme.backgroundMain) .cornerRadius(23) .shadow(radius: 23) .padding(.bottom, 35) .animation(.default) + } } } } diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureView.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureView.swift index eb5012858..33c4f618b 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureView.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureView.swift @@ -27,14 +27,10 @@ public struct SelfieCaptureView: View, SelfieViewDelegate { public var body: some View { GeometryReader { geometry in - let ovalSize = ovalSize(from: geometry) ZStack { if ARFaceTrackingConfiguration.isSupported && viewModel.agentMode == false { arView .onAppear { - viewModel.faceLayoutGuideFrame = - CGRect(origin: .zero, - size: ovalSize) arView?.preview.model = viewModel viewModel.viewFinderSize = geometry.size viewModel.selfieViewDelegate = self @@ -43,13 +39,10 @@ public struct SelfieCaptureView: View, SelfieViewDelegate { camera .onAppear { viewModel.captureResultDelegate = delegate - viewModel.faceLayoutGuideFrame = - CGRect(origin: .zero, - size: ovalSize) viewModel.viewDelegate = camera!.preview viewModel.viewFinderSize = geometry.size viewModel.cameraManager.switchCamera(to: viewModel.agentMode ? .back : .front) - }.scaleEffect(1.2, anchor: .top) + } } faceOverlay switch viewModel.processingState { @@ -62,9 +55,8 @@ public struct SelfieCaptureView: View, SelfieViewDelegate { case .error: ModalPresenter { ErrorView(viewModel: viewModel) } default: - Text("") + Color.clear } - } } .edgesIgnoringSafeArea(.all) @@ -106,15 +98,10 @@ struct FaceBoundingBoxView: View { Rectangle().fill(Color.clear) case .faceFound(let faceGeometryModel): Rectangle() - .path(in: CGRect( - x: faceGeometryModel.boundingBox.origin.x, - y: faceGeometryModel.boundingBox.origin.y, - width: faceGeometryModel.boundingBox.width, - height: faceGeometryModel.boundingBox.height - )) + .path(in: faceGeometryModel.boundingBox) .stroke(Color.yellow, lineWidth: 2.0) case .errored: - Rectangle().fill(Color.clear) + Rectangle().fill(Color.yellow) } } } diff --git a/Sources/SmileID/Classes/SelfieCapture/ViewModel/SelfieCaptureViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/ViewModel/SelfieCaptureViewModel.swift index 74fd89713..53787652c 100644 --- a/Sources/SmileID/Classes/SelfieCapture/ViewModel/SelfieCaptureViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/ViewModel/SelfieCaptureViewModel.swift @@ -79,13 +79,14 @@ final class SelfieCaptureViewModel: ObservableObject { } } - // MARK: Public Properties + // MARK: Private Properties private var userId: String private var jobId: String private var isEnroll: Bool private (set) var showAttribution: Bool private var selfieImage: Data? private var currentExif: [String: Any]? + private (set) var allowsAgentMode: Bool private let subject = PassthroughSubject() private (set) lazy var cameraManager: CameraManageable = CameraManager() private var faceDetector = FaceDetector() @@ -149,12 +150,14 @@ final class SelfieCaptureViewModel: ObservableObject { init(userId: String, jobId: String, isEnroll: Bool, + allowsAgentMode: Bool = false, showAttribution: Bool = true, cameraManager: CameraManageable? = nil) { self.userId = userId self.isEnroll = isEnroll self.jobId = jobId self.showAttribution = showAttribution + self.allowsAgentMode = allowsAgentMode faceDetector.model = self if let cameraManager = cameraManager { self.cameraManager = cameraManager @@ -309,9 +312,13 @@ final class SelfieCaptureViewModel: ObservableObject { case .smileDirective: subject.send("Instructions.Smile") case .smileAction: - isSmiling = true + if livenessImages.count >= 3 { + isSmiling = true + } case .noSmile: - isSmiling = false + if livenessImages.count < 3 { + isSmiling = false + } } } @@ -330,10 +337,20 @@ final class SelfieCaptureViewModel: ObservableObject { guard case let .faceFound(faceGeometry) = faceGeometryState else { return } + var orientation: CGImagePropertyOrientation + + if (isARSupported && !agentMode) { + orientation = .right + } else if (!isARSupported && !agentMode) { + orientation = .upMirrored + } else { + orientation = .up + } + while (livenessImages.count < numberOfLivenessImages) && ((Date().millisecondsSince1970 - lastCaptureTime) > interCaptureDelay) { guard let image = ImageUtils.resizePixelBufferToWidth(currentBuffer, width: 350, exif: - currentExif) else { + currentExif, orientation: orientation) else { return } @@ -350,10 +367,10 @@ final class SelfieCaptureViewModel: ObservableObject { faceGeometry: faceGeometry, agentMode: agentMode, finalSize: selfieImageSize, - screenImageSize: viewFinderSize) else { + screenImageSize: viewFinderSize, orientation: orientation) else { return } guard let selfieImage = ImageUtils.resizePixelBufferToWidth(currentBuffer, width: 600, - exif: currentExif) else { + exif: currentExif, orientation: orientation) else { return } lastCaptureTime = Date().millisecondsSince1970 self.selfieImage = selfieImage @@ -603,18 +620,18 @@ extension SelfieCaptureViewModel { } func updateAcceptableBounds(using boundingBox: CGRect) { - if boundingBox.width > 0.80 * faceLayoutGuideFrame.width { + if boundingBox.width > (0.80 * faceLayoutGuideFrame.width) { isAcceptableBounds = .detectedFaceTooLarge subject.send("Instructions.FaceClose") - } else if boundingBox.width < faceLayoutGuideFrame.width * 0.25 { + } else if boundingBox.width < (faceLayoutGuideFrame.width * 0.25) { isAcceptableBounds = .detectedFaceTooSmall subject.send("Instructions.FaceFar") } else { - if abs(boundingBox.midX - faceLayoutGuideFrame.midX) > 100 { - isAcceptableBounds = .detectedFaceOffCentre - subject.send("Instructions.Start") - resetCapture() - } else if abs(boundingBox.midY - faceLayoutGuideFrame.midY) > 210 { + let isFaceInFrame = boundingBox.minX >= faceLayoutGuideFrame.minX && + boundingBox.maxX <= faceLayoutGuideFrame.maxX && + boundingBox.maxY <= faceLayoutGuideFrame.maxY && + boundingBox.minY >= faceLayoutGuideFrame.minY + if !isFaceInFrame { isAcceptableBounds = .detectedFaceOffCentre subject.send("Instructions.Start") resetCapture() @@ -624,6 +641,7 @@ extension SelfieCaptureViewModel { } } + //TO-DO: Fix roll and yaw func updateAcceptableRollYaw(using roll: Double, yaw: Double) { // Roll values differ because back camera feed is in landscape let maxRoll = agentMode || !isARSupported ? 2.0 : 0.5 diff --git a/Sources/SmileID/Classes/SmileID.swift b/Sources/SmileID/Classes/SmileID.swift index 6d2f9f9d0..a39e67086 100644 --- a/Sources/SmileID/Classes/SmileID.swift +++ b/Sources/SmileID/Classes/SmileID.swift @@ -38,14 +38,21 @@ public class SmileID { public class func smartSelfieEnrollmentScreen(userId: String = "user-\(UUID().uuidString)", jobId: String = "job-\(UUID().uuidString)", + allowAgentMode: Bool = false, + showAttribution: Bool = true, delegate: SmartSelfieResultDelegate) -> SmartSelfieInstructionsView { - let viewModel = SelfieCaptureViewModel(userId: userId, jobId: jobId, isEnroll: true) + let viewModel = SelfieCaptureViewModel(userId: userId, + jobId: jobId, + isEnroll: true, + allowsAgentMode: allowAgentMode, + showAttribution: showAttribution) return SmartSelfieInstructionsView(viewModel: viewModel, delegate: delegate) } public class func documentVerificationScreen(userId _: String = "user-\(UUID().uuidString)", jobId _: String = "job-\(UUID().uuidString)", + showAttribution: Bool = true, delegate: DocumentCaptureResultDelegate) -> DocumentCaptureInstructionsView { let viewModel = DocumentCaptureViewModel() @@ -54,9 +61,15 @@ public class SmileID { public class func smartSelfieAuthenticationScreen(userId: String, jobId: String = "job-\(UUID().uuidString)", + allowAgentMode: Bool = false, + showAttribution: Bool = true, delegate: SmartSelfieResultDelegate) -> SmartSelfieInstructionsView { - let viewModel = SelfieCaptureViewModel(userId: userId, jobId: jobId, isEnroll: false) + let viewModel = SelfieCaptureViewModel(userId: userId, + jobId: jobId, + isEnroll: false, + allowsAgentMode: allowAgentMode, + showAttribution: showAttribution) return SmartSelfieInstructionsView(viewModel: viewModel, delegate: delegate) }