diff --git a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index 7859d774..a61605c0 100644 --- a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -5,8 +5,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { - "revision" : "79d062d354bb190666774e8ef3c83ad52f023889", - "version" : "2.38.0" + "branch" : "feat/no-light-support", + "revision" : "22e02fa21399122aac1d8b4f6ab23c242c79dae6" } }, { @@ -50,8 +50,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/stephencelis/SQLite.swift.git", "state" : { - "revision" : "a95fc6df17d108bd99210db5e8a9bac90fe984b8", - "version" : "0.15.3" + "revision" : "5f5ad81ac0d0a0f3e56e39e646e8423c617df523", + "version" : "0.13.2" } }, { diff --git a/HostApp/HostApp/Model/LivenessResult.swift b/HostApp/HostApp/Model/LivenessResult.swift index 226bc30f..3a36f089 100644 --- a/HostApp/HostApp/Model/LivenessResult.swift +++ b/HostApp/HostApp/Model/LivenessResult.swift @@ -6,11 +6,13 @@ // import Foundation +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct LivenessResult: Codable { let auditImageBytes: String? let confidenceScore: Double let isLive: Bool + let challenge: Challenge? } extension LivenessResult: CustomDebugStringConvertible { @@ -20,6 +22,8 @@ extension LivenessResult: CustomDebugStringConvertible { - confidenceScore: \(confidenceScore) - isLive: \(isLive) - auditImageBytes: \(auditImageBytes == nil ? "nil" : "") + - challengeType: \(String(describing: challenge?.type)) + - challengeVersion: \(String(describing: challenge?.version)) """ } } diff --git a/HostApp/HostApp/Views/LivenessResultContentView+Result.swift b/HostApp/HostApp/Views/LivenessResultContentView+Result.swift index 3f57982f..0b18eaab 100644 --- a/HostApp/HostApp/Views/LivenessResultContentView+Result.swift +++ b/HostApp/HostApp/Views/LivenessResultContentView+Result.swift @@ -6,6 +6,7 @@ // import SwiftUI +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin extension LivenessResultContentView { struct Result { @@ -15,6 +16,7 @@ extension LivenessResultContentView { let valueBackgroundColor: Color let auditImage: Data? let isLive: Bool + let challenge: Challenge? init(livenessResult: LivenessResult) { guard livenessResult.confidenceScore > 0 else { @@ -24,6 +26,7 @@ extension LivenessResultContentView { valueBackgroundColor = .clear auditImage = nil isLive = false + challenge = nil return } isLive = livenessResult.isLive @@ -41,6 +44,7 @@ extension LivenessResultContentView { auditImage = livenessResult.auditImageBytes.flatMap{ Data(base64Encoded: $0) } + challenge = livenessResult.challenge } } diff --git a/HostApp/HostApp/Views/LivenessResultContentView.swift b/HostApp/HostApp/Views/LivenessResultContentView.swift index de2ecff7..51660f55 100644 --- a/HostApp/HostApp/Views/LivenessResultContentView.swift +++ b/HostApp/HostApp/Views/LivenessResultContentView.swift @@ -6,9 +6,10 @@ // import SwiftUI +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct LivenessResultContentView: View { - @State var result: Result = .init(livenessResult: .init(auditImageBytes: nil, confidenceScore: -1, isLive: false)) + @State var result: Result = .init(livenessResult: .init(auditImageBytes: nil, confidenceScore: -1, isLive: false, challenge: nil)) let fetchResults: () async throws -> Result var body: some View { @@ -67,26 +68,48 @@ struct LivenessResultContentView: View { } } + func step(number: Int, text: String) -> some View { + HStack(alignment: .top) { + Text("\(number).") + Text(text) + } + } + + @ViewBuilder private func steps() -> some View { - func step(number: Int, text: String) -> some View { - HStack(alignment: .top) { - Text("\(number).") - Text(text) + switch result.challenge?.type { + case .faceMovementChallenge: + VStack( + alignment: .leading, + spacing: 8 + ) { + Text("Tips to pass the video check:") + .fontWeight(.semibold) + + Text("Remove sunglasses, mask, hat, or anything blocking your face.") + .accessibilityElement(children: .combine) + } + case .faceMovementAndLightChallenge: + VStack( + alignment: .leading, + spacing: 8 + ) { + Text("Tips to pass the video check:") + .fontWeight(.semibold) + + step(number: 1, text: "Avoid very bright lighting conditions, such as direct sunlight.") + .accessibilityElement(children: .combine) + + step(number: 2, text: "Remove sunglasses, mask, hat, or anything blocking your face.") + .accessibilityElement(children: .combine) + } + case .none: + VStack( + alignment: .leading, + spacing: 8 + ) { + EmptyView() } - } - - return VStack( - alignment: .leading, - spacing: 8 - ) { - Text("Tips to pass the video check:") - .fontWeight(.semibold) - - step(number: 1, text: "Avoid very bright lighting conditions, such as direct sunlight.") - .accessibilityElement(children: .combine) - - step(number: 2, text: "Remove sunglasses, mask, hat, or anything blocking your face.") - .accessibilityElement(children: .combine) } } } @@ -99,7 +122,8 @@ extension LivenessResultContentView { livenessResult: .init( auditImageBytes: nil, confidenceScore: 99.8329, - isLive: true + isLive: true, + challenge: nil ) ) } diff --git a/Package.resolved b/Package.resolved index 7859d774..13524dd4 100644 --- a/Package.resolved +++ b/Package.resolved @@ -5,8 +5,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { - "revision" : "79d062d354bb190666774e8ef3c83ad52f023889", - "version" : "2.38.0" + "branch" : "feat/no-light-support", + "revision" : "614be628cb01188e519bb0e9e4d90bd83703d139" } }, { diff --git a/Package.swift b/Package.swift index 4e4d49e2..446f12c0 100644 --- a/Package.swift +++ b/Package.swift @@ -13,7 +13,8 @@ let package = Package( targets: ["FaceLiveness"]), ], dependencies: [ - .package(url: "https://github.com/aws-amplify/amplify-swift", exact: "2.38.0") + // TODO: Change this before merge to main + .package(url: "https://github.com/aws-amplify/amplify-swift", branch: "feat/no-light-support") ], targets: [ .target( diff --git a/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift b/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift index d6879848..1d62b263 100644 --- a/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift +++ b/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift @@ -6,6 +6,7 @@ // import Foundation +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct DetectedFace { var boundingBox: CGRect @@ -19,7 +20,8 @@ struct DetectedFace { let confidence: Float - func boundingBoxFromLandmarks(ovalRect: CGRect) -> CGRect { + func boundingBoxFromLandmarks(ovalRect: CGRect, + ovalMatchChallenge: FaceLivenessSession.OvalMatchChallenge) -> CGRect { let alpha = 2.0 let gamma = 1.8 let ow = (alpha * pupilDistance + gamma * faceHeight) / 2 @@ -34,7 +36,7 @@ struct DetectedFace { } let faceWidth = ow - let faceHeight = 1.618 * faceWidth + let faceHeight = ovalMatchChallenge.oval.heightWidthRatio * faceWidth let faceBoxBottom = boundingBox.maxY let faceBoxTop = faceBoxBottom - faceHeight let faceBoxLeft = min(cx - ow / 2, rightEar.x) diff --git a/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift b/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift index d9430720..100f0418 100644 --- a/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift +++ b/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift @@ -12,6 +12,7 @@ import Accelerate import CoreGraphics import CoreImage import VideoToolbox +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin enum FaceDetectorShortRange {} @@ -33,11 +34,16 @@ extension FaceDetectorShortRange { ) } + weak var faceDetectionSessionConfiguration: FaceDetectionSessionConfigurationWrapper? weak var detectionResultHandler: FaceDetectionResultHandler? func setResultHandler(detectionResultHandler: FaceDetectionResultHandler) { self.detectionResultHandler = detectionResultHandler } + + func setFaceDetectionSessionConfigurationWrapper(configuration: FaceDetectionSessionConfigurationWrapper) { + self.faceDetectionSessionConfiguration = configuration + } func detectFaces(from buffer: CVPixelBuffer) { let faces = prediction(for: buffer) @@ -105,10 +111,17 @@ extension FaceDetectorShortRange { count: confidenceScoresCapacity ) ) + + let blazeFaceDetectionThreshold: Float + if let sessionConfiguration = faceDetectionSessionConfiguration?.sessionConfiguration { + blazeFaceDetectionThreshold = Float(sessionConfiguration.ovalMatchChallenge.faceDetectionThreshold) + } else { + blazeFaceDetectionThreshold = confidenceScoreThreshold + } var passingConfidenceScoresIndices = confidenceScores .enumerated() - .filter { $0.element >= confidenceScoreThreshold } + .filter { $0.element >= blazeFaceDetectionThreshold} .sorted(by: { $0.element > $1.element }) diff --git a/Sources/FaceLiveness/FaceDetection/FaceDetector.swift b/Sources/FaceLiveness/FaceDetection/FaceDetector.swift index 3801eeab..1afb90c1 100644 --- a/Sources/FaceLiveness/FaceDetection/FaceDetector.swift +++ b/Sources/FaceLiveness/FaceDetection/FaceDetector.swift @@ -6,6 +6,7 @@ // import AVFoundation +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin protocol FaceDetector { func detectFaces(from buffer: CVPixelBuffer) @@ -16,6 +17,10 @@ protocol FaceDetectionResultHandler: AnyObject { func process(newResult: FaceDetectionResult) } +protocol FaceDetectionSessionConfigurationWrapper: AnyObject { + var sessionConfiguration: FaceLivenessSession.SessionConfiguration? { get } +} + enum FaceDetectionResult { case noFace case singleFace(DetectedFace) diff --git a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift index 00ecb9b7..0c52ccff 100644 --- a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift +++ b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift @@ -6,17 +6,21 @@ // import SwiftUI +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct GetReadyPageView: View { let beginCheckButtonDisabled: Bool let onBegin: () -> Void - + let challenge: Challenge + init( onBegin: @escaping () -> Void, - beginCheckButtonDisabled: Bool = false + beginCheckButtonDisabled: Bool = false, + challenge: Challenge ) { self.onBegin = onBegin self.beginCheckButtonDisabled = beginCheckButtonDisabled + self.challenge = challenge } var body: some View { @@ -30,6 +34,7 @@ struct GetReadyPageView: View { popoverContent: { photosensitivityWarningPopoverContent } ) .accessibilityElement(children: .combine) + .opacity(challenge.type == .faceMovementAndLightChallenge ? 1.0 : 0.0) Text(LocalizedStrings.preview_center_your_face_text) .font(.title) .multilineTextAlignment(.center) @@ -72,6 +77,8 @@ struct GetReadyPageView: View { struct GetReadyPageView_Previews: PreviewProvider { static var previews: some View { - GetReadyPageView(onBegin: {}) + GetReadyPageView(onBegin: {}, + challenge: .init(version: "2.0.0", + type: .faceMovementAndLightChallenge)) } } diff --git a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift index ff02a3d6..5ed45ae7 100644 --- a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift +++ b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift @@ -7,6 +7,7 @@ import SwiftUI import Combine +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct InstructionContainerView: View { @ObservedObject var viewModel: FaceLivenessDetectionViewModel @@ -97,13 +98,29 @@ struct InstructionContainerView: View { argument: LocalizedStrings.challenge_verifying ) } - case .faceMatched: + case .completedNoLightCheck: InstructionView( - text: LocalizedStrings.challenge_instruction_hold_still, - backgroundColor: .livenessPrimaryBackground, - textColor: .livenessPrimaryLabel, - font: .title + text: LocalizedStrings.challenge_verifying, + backgroundColor: .livenessBackground ) + .onAppear { + UIAccessibility.post( + notification: .announcement, + argument: LocalizedStrings.challenge_verifying + ) + } + case .faceMatched: + if let challenge = viewModel.challenge, + case .faceMovementAndLightChallenge = challenge.type { + InstructionView( + text: LocalizedStrings.challenge_instruction_hold_still, + backgroundColor: .livenessPrimaryBackground, + textColor: .livenessPrimaryLabel, + font: .title + ) + } else { + EmptyView() + } default: EmptyView() } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift index 81eacfe9..f803a863 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift @@ -16,7 +16,7 @@ import Amplify public struct FaceLivenessDetectorView: View { @StateObject var viewModel: FaceLivenessDetectionViewModel @Binding var isPresented: Bool - @State var displayState: DisplayState = .awaitingCameraPermission + @State var displayState: DisplayState = .awaitingChallengeType @State var displayingCameraPermissionsNeededAlert = false let disableStartView: Bool @@ -31,7 +31,7 @@ public struct FaceLivenessDetectorView: View { disableStartView: Bool = false, isPresented: Binding, onCompletion: @escaping (Result) -> Void - ) { + ) { self.disableStartView = disableStartView self._isPresented = isPresented self.onCompletion = onCompletion @@ -41,7 +41,6 @@ public struct FaceLivenessDetectorView: View { withID: sessionID, credentialsProvider: credentialsProvider, region: region, - options: .init(), completion: map(detectionCompletion: onCompletion) ) return session @@ -79,7 +78,8 @@ public struct FaceLivenessDetectorView: View { captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: { onCompletion(.failure(.userCancelled)) }, - sessionID: sessionID + sessionID: sessionID, + isPreviewScreenEnabled: !disableStartView ) ) } @@ -102,7 +102,6 @@ public struct FaceLivenessDetectorView: View { withID: sessionID, credentialsProvider: credentialsProvider, region: region, - options: .init(), completion: map(detectionCompletion: onCompletion) ) return session @@ -119,39 +118,69 @@ public struct FaceLivenessDetectorView: View { captureSession: captureSession, videoChunker: captureSession.outputSampleBufferCapturer!.videoChunker, closeButtonAction: { onCompletion(.failure(.userCancelled)) }, - sessionID: sessionID + sessionID: sessionID, + isPreviewScreenEnabled: !disableStartView ) ) } public var body: some View { switch displayState { - case .awaitingLivenessSession: + case .awaitingChallengeType: + LoadingPageView() + .onAppear { + Task { + do { + let session = try await sessionTask.value + viewModel.livenessService = session + viewModel.registerServiceEvents(onChallengeTypeReceived: { challenge in + self.displayState = DisplayState.awaitingLivenessSession(challenge) + }) + viewModel.initializeLivenessStream() + } catch { + throw FaceLivenessDetectionError.accessDenied + } + + DispatchQueue.main.async { + if let faceDetector = viewModel.faceDetector as? FaceDetectorShortRange.Model { + faceDetector.setFaceDetectionSessionConfigurationWrapper(configuration: viewModel) + } + } + } + } + .onReceive(viewModel.$livenessState) { output in + switch output.state { + case .encounteredUnrecoverableError(let error): + let closeCode = error.webSocketCloseCode ?? .normalClosure + viewModel.livenessService?.closeSocket(with: closeCode) + isPresented = false + onCompletion(.failure(mapError(error))) + default: + break + } + } + case .awaitingLivenessSession(let challenge): Color.clear .onAppear { Task { do { let newState = disableStartView ? DisplayState.displayingLiveness - : DisplayState.displayingGetReadyView + : DisplayState.displayingGetReadyView(challenge) guard self.displayState != newState else { return } - let session = try await sessionTask.value - viewModel.livenessService = session - viewModel.registerServiceEvents() self.displayState = newState - } catch { - throw FaceLivenessDetectionError.accessDenied } } } - case .displayingGetReadyView: + case .displayingGetReadyView(let challenge): GetReadyPageView( onBegin: { guard displayState != .displayingLiveness else { return } displayState = .displayingLiveness }, - beginCheckButtonDisabled: false + beginCheckButtonDisabled: false, + challenge: challenge ) .onAppear { DispatchQueue.main.async { @@ -217,7 +246,8 @@ public struct FaceLivenessDetectorView: View { for: .video, completionHandler: { accessGranted in guard accessGranted == true else { return } - displayState = .awaitingLivenessSession + guard let challenge = viewModel.challenge else { return } + displayState = .awaitingLivenessSession(challenge) } ) @@ -235,18 +265,37 @@ public struct FaceLivenessDetectorView: View { case .restricted, .denied: alertCameraAccessNeeded() case .authorized: - displayState = .awaitingLivenessSession + guard let challenge = viewModel.challenge else { return } + displayState = .awaitingLivenessSession(challenge) @unknown default: break } } } -enum DisplayState { - case awaitingLivenessSession - case displayingGetReadyView +enum DisplayState: Equatable { + case awaitingChallengeType + case awaitingLivenessSession(Challenge) + case displayingGetReadyView(Challenge) case displayingLiveness case awaitingCameraPermission + + static func == (lhs: DisplayState, rhs: DisplayState) -> Bool { + switch (lhs, rhs) { + case (.awaitingChallengeType, .awaitingChallengeType): + return true + case (let .awaitingLivenessSession(c1), let .awaitingLivenessSession(c2)): + return c1.type == c2.type && c1.version == c2.version + case (let .displayingGetReadyView(c1), let .displayingGetReadyView(c2)): + return c1.type == c2.type && c1.version == c2.version + case (.displayingLiveness, .displayingLiveness): + return true + case (.awaitingCameraPermission, .awaitingCameraPermission): + return true + default: + return false + } + } } enum InstructionState { diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift index 99e92ee2..0e43de2a 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift @@ -28,14 +28,15 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { } case .singleFace(let face): var normalizedFace = normalizeFace(face) - normalizedFace.boundingBox = normalizedFace.boundingBoxFromLandmarks(ovalRect: ovalRect) + guard let sessionConfiguration = sessionConfiguration else { return } + normalizedFace.boundingBox = normalizedFace.boundingBoxFromLandmarks(ovalRect: ovalRect, + ovalMatchChallenge: sessionConfiguration.ovalMatchChallenge) switch livenessState.state { case .pendingFacePreparedConfirmation: - if face.faceDistance <= initialFaceDistanceThreshold { + if face.faceDistance <= sessionConfiguration.ovalMatchChallenge.face.distanceThreshold { DispatchQueue.main.async { self.livenessState.awaitingRecording() - self.initializeLivenessStream() } DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { self.livenessState.beginRecording() @@ -55,7 +56,6 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { ) }) case .recording(ovalDisplayed: true): - guard let sessionConfiguration = sessionConfiguration else { return } let instruction = faceInOvalMatching.faceMatchState( for: normalizedFace.boundingBox, in: ovalRect, @@ -64,18 +64,18 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { handleInstruction( instruction, - colorSequences: sessionConfiguration.colorChallenge.colors + colorSequences: sessionConfiguration.colorChallenge?.colors ) case .awaitingFaceInOvalMatch: - guard let sessionConfiguration = sessionConfiguration else { return } let instruction = faceInOvalMatching.faceMatchState( for: normalizedFace.boundingBox, in: ovalRect, challengeConfig: sessionConfiguration.ovalMatchChallenge ) + handleInstruction( instruction, - colorSequences: sessionConfiguration.colorChallenge.colors + colorSequences: sessionConfiguration.colorChallenge?.colors ) default: break @@ -104,16 +104,28 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { func handleInstruction( _ instruction: Instructor.Instruction, - colorSequences: [FaceLivenessSession.DisplayColor] + colorSequences: [FaceLivenessSession.DisplayColor]? ) { DispatchQueue.main.async { switch instruction { case .match: self.livenessState.faceMatched() self.faceMatchedTimestamp = Date().timestampMilliseconds - DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { - self.livenessViewControllerDelegate?.displayFreshness(colorSequences: colorSequences) + + // next step after face match + switch self.challenge?.type { + case .faceMovementAndLightChallenge: + if let colorSequences = colorSequences { + DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { + self.livenessViewControllerDelegate?.displayFreshness(colorSequences: colorSequences) + } + } + case .faceMovementChallenge: + self.livenessViewControllerDelegate?.completeNoLightCheck() + default: + break } + let generator = UINotificationFeedbackGenerator() generator.notificationOccurred(.success) self.noFitStartTime = nil diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift index c2ed2b39..d2f88343 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift @@ -11,8 +11,8 @@ extension FaceLivenessDetectionViewModel: VideoSegmentProcessor { func process(initalSegment: Data, currentSeparableSegment: Data) { let chunk = chunk(initial: initalSegment, current: currentSeparableSegment) sendVideoEvent(data: chunk, videoEventTime: .zero) - if !hasSentFinalVideoEvent, - case .completedDisplayingFreshness = livenessState.state { + if !hasSentFinalVideoEvent && + (livenessState.state == .completedDisplayingFreshness || livenessState.state == .completedNoLightCheck) { DispatchQueue.global(qos: .default).asyncAfter(deadline: .now() + 0.9) { self.sendFinalVideoEvent() } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift index 7f834cb2..42e7149a 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift @@ -12,6 +12,7 @@ import AVFoundation fileprivate let videoSize: CGSize = .init(width: 480, height: 640) fileprivate let defaultNoFitTimeoutInterval: TimeInterval = 7 +fileprivate let defaultAttemptCountResetInterval: TimeInterval = 300.0 @MainActor class FaceLivenessDetectionViewModel: ObservableObject { @@ -28,11 +29,13 @@ class FaceLivenessDetectionViewModel: ObservableObject { let faceDetector: FaceDetector let faceInOvalMatching: FaceInOvalMatching let challengeID: String = UUID().uuidString + let isPreviewScreenEnabled : Bool var colorSequences: [ColorSequence] = [] var hasSentFinalVideoEvent = false var hasSentFirstVideo = false var layerRectConverted: (CGRect) -> CGRect = { $0 } var sessionConfiguration: FaceLivenessSession.SessionConfiguration? + var challenge: Challenge? var normalizeFace: (DetectedFace) -> DetectedFace = { $0 } var provideSingleFrame: ((UIImage) -> Void)? var cameraViewRect = CGRect.zero @@ -42,6 +45,9 @@ class FaceLivenessDetectionViewModel: ObservableObject { var faceMatchedTimestamp: UInt64? var noFitStartTime: Date? + static var attemptCount: Int = 0 + static var attemptIdTimeStamp: Date = Date() + var noFitTimeoutInterval: TimeInterval { if let sessionTimeoutMilliSec = sessionConfiguration?.ovalMatchChallenge.oval.ovalFitTimeout { return TimeInterval(sessionTimeoutMilliSec/1_000) @@ -57,7 +63,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { videoChunker: VideoChunker, stateMachine: LivenessStateMachine = .init(state: .initial), closeButtonAction: @escaping () -> Void, - sessionID: String + sessionID: String, + isPreviewScreenEnabled: Bool ) { self.closeButtonAction = closeButtonAction self.videoChunker = videoChunker @@ -66,6 +73,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { self.captureSession = captureSession self.faceDetector = faceDetector self.faceInOvalMatching = faceInOvalMatching + self.isPreviewScreenEnabled = isPreviewScreenEnabled self.closeButtonAction = { [weak self] in guard let self else { return } @@ -89,7 +97,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { NotificationCenter.default.removeObserver(self) } - func registerServiceEvents() { + func registerServiceEvents(onChallengeTypeReceived: @escaping (Challenge) -> Void) { livenessService?.register(onComplete: { [weak self] reason in self?.stopRecording() @@ -112,6 +120,13 @@ class FaceLivenessDetectionViewModel: ObservableObject { }, on: .challenge ) + + livenessService?.register( + listener: { [weak self] _challenge in + self?.challenge = _challenge + onChallengeTypeReceived(_challenge) + }, + on: .challenge) } @objc func willResignActive(_ notification: Notification) { @@ -178,9 +193,20 @@ class FaceLivenessDetectionViewModel: ObservableObject { func initializeLivenessStream() { do { + if (abs(Self.attemptIdTimeStamp.timeIntervalSinceNow) > defaultAttemptCountResetInterval) { + Self.attemptCount = 1 + } else { + Self.attemptCount += 1 + } + Self.attemptIdTimeStamp = Date() + try livenessService?.initializeLivenessStream( withSessionID: sessionID, - userAgent: UserAgentValues.standard().userAgentString + userAgent: UserAgentValues.standard().userAgentString, + challenges: FaceLivenessSession.supportedChallenges, + options: .init( + attemptCount: Self.attemptCount, + preCheckViewEnabled: isPreviewScreenEnabled) ) } catch { DispatchQueue.main.async { @@ -226,6 +252,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { videoStartTime: UInt64 ) { guard initialClientEvent == nil else { return } + guard let challenge else { return } + videoChunker.start() let initialFace = FaceDetection( @@ -243,7 +271,9 @@ class FaceLivenessDetectionViewModel: ObservableObject { do { try livenessService?.send( - .initialFaceDetected(event: _initialClientEvent), + .initialFaceDetected(event: _initialClientEvent, + challenge: .init(version: challenge.version, + type: challenge.type)), eventDate: { .init() } ) } catch { @@ -261,7 +291,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { guard let sessionConfiguration, let initialClientEvent, - let faceMatchedTimestamp + let faceMatchedTimestamp, + let challenge else { return } let finalClientEvent = FinalClientEvent( @@ -275,7 +306,9 @@ class FaceLivenessDetectionViewModel: ObservableObject { do { try livenessService?.send( - .final(event: finalClientEvent), + .final(event: finalClientEvent, + challenge: .init(version: challenge.version, + type: challenge.type)), eventDate: { .init() } ) @@ -310,6 +343,13 @@ class FaceLivenessDetectionViewModel: ObservableObject { self.faceGuideRect = faceGuide } } + + func completeNoLightCheck(faceGuide: CGRect) { + DispatchQueue.main.async { + self.livenessState.completedNoLightCheck() + self.faceGuideRect = faceGuide + } + } func sendVideoEvent(data: Data, videoEventTime: UInt64) { guard !hasSentFinalVideoEvent else { return } @@ -362,3 +402,5 @@ class FaceLivenessDetectionViewModel: ObservableObject { return data } } + +extension FaceLivenessDetectionViewModel: FaceDetectionSessionConfigurationWrapper { } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessViewControllerPresenter.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessViewControllerPresenter.swift index 5786620b..8fff8b9f 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessViewControllerPresenter.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessViewControllerPresenter.swift @@ -12,4 +12,5 @@ protocol FaceLivenessViewControllerPresenter: AnyObject { func drawOvalInCanvas(_ ovalRect: CGRect) func displayFreshness(colorSequences: [FaceLivenessSession.DisplayColor]) func displaySingleFrame(uiImage: UIImage) + func completeNoLightCheck() } diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift b/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift index c59629c9..e61f8311 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift @@ -76,6 +76,10 @@ struct LivenessStateMachine { mutating func completedDisplayingFreshness() { state = .completedDisplayingFreshness } + + mutating func completedNoLightCheck() { + state = .completedNoLightCheck + } mutating func displayingFreshness() { state = .displayingFreshness @@ -95,6 +99,7 @@ struct LivenessStateMachine { enum State: Equatable { case initial + case awaitingChallengeType case pendingFacePreparedConfirmation(FaceNotPreparedReason) case recording(ovalDisplayed: Bool) case awaitingFaceInOvalMatch(FaceNotPreparedReason, Double) @@ -102,6 +107,7 @@ struct LivenessStateMachine { case initialClientInfoEventSent case displayingFreshness case completedDisplayingFreshness + case completedNoLightCheck case completed case awaitingDisconnectEvent case disconnectEventReceived diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift index c274bde0..79b91e44 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift @@ -173,4 +173,11 @@ extension _LivenessViewController: FaceLivenessViewControllerPresenter { self.ovalExists = true } } + + func completeNoLightCheck() { + guard let faceGuideRect = self.faceGuideRect else { return } + self.viewModel.completeNoLightCheck( + faceGuide: faceGuideRect + ) + } } diff --git a/Sources/FaceLiveness/Views/LoadingPage/LoadingPageView.swift b/Sources/FaceLiveness/Views/LoadingPage/LoadingPageView.swift new file mode 100644 index 00000000..e02b4e79 --- /dev/null +++ b/Sources/FaceLiveness/Views/LoadingPage/LoadingPageView.swift @@ -0,0 +1,27 @@ +// +// Copyright Amazon.com Inc. or its affiliates. +// All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +import SwiftUI + +struct LoadingPageView: View { + + var body: some View { + VStack { + HStack(spacing: 5) { + ProgressView() + Text(LocalizedStrings.challenge_connecting) + } + + } + } +} + +struct LoadingPageView_Previews: PreviewProvider { + static var previews: some View { + LoadingPageView() + } +} diff --git a/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift b/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift index 7d69251b..3c1dabbf 100644 --- a/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift +++ b/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift @@ -41,7 +41,8 @@ final class CredentialsProviderTestCase: XCTestCase { captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: {}, - sessionID: UUID().uuidString + sessionID: UUID().uuidString, + isPreviewScreenEnabled: false ) self.videoChunker = videoChunker diff --git a/Tests/FaceLivenessTests/DetectedFaceTests.swift b/Tests/FaceLivenessTests/DetectedFaceTests.swift index 4bee8292..6d538e33 100644 --- a/Tests/FaceLivenessTests/DetectedFaceTests.swift +++ b/Tests/FaceLivenessTests/DetectedFaceTests.swift @@ -7,7 +7,7 @@ import XCTest @testable import FaceLiveness - +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin final class DetectedFaceTests: XCTestCase { var detectedFace: DetectedFace! @@ -104,7 +104,29 @@ final class DetectedFaceTests: XCTestCase { width: 0.6240418540649166, height: 0.8144985824018897 ) - let boundingBox = detectedFace.boundingBoxFromLandmarks(ovalRect: ovalRect) + + let face = FaceLivenessSession.OvalMatchChallenge.Face( + distanceThreshold: 0.1, + distanceThresholdMax: 0.1, + distanceThresholdMin: 0.1, + iouWidthThreshold: 0.1, + iouHeightThreshold: 0.1 + ) + + let oval = FaceLivenessSession.OvalMatchChallenge.Oval(boundingBox: .init(x: 0.1, + y: 0.1, + width: 0.1, + height: 0.1), + heightWidthRatio: 1.618, + iouThreshold: 0.1, + iouWidthThreshold: 0.1, + iouHeightThreshold: 0.1, + ovalFitTimeout: 1) + + let boundingBox = detectedFace.boundingBoxFromLandmarks(ovalRect: ovalRect, + ovalMatchChallenge: .init(faceDetectionThreshold: 0.7, + face: face, + oval: oval)) XCTAssertEqual(boundingBox.origin.x, expectedBoundingBox.origin.x) XCTAssertEqual(boundingBox.origin.y, expectedBoundingBox.origin.y) XCTAssertEqual(boundingBox.width, expectedBoundingBox.width) diff --git a/Tests/FaceLivenessTests/LivenessTests.swift b/Tests/FaceLivenessTests/LivenessTests.swift index da063930..5603914a 100644 --- a/Tests/FaceLivenessTests/LivenessTests.swift +++ b/Tests/FaceLivenessTests/LivenessTests.swift @@ -32,7 +32,8 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: {}, - sessionID: UUID().uuidString + sessionID: UUID().uuidString, + isPreviewScreenEnabled: false ) self.videoChunker = videoChunker @@ -69,6 +70,7 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { /// Then: The end state of this flow is `.faceMatched` func testHappyPathToMatchedFace() async throws { viewModel.livenessService = self.livenessService + viewModel.challenge = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) viewModel.livenessState.checkIsFacePrepared() XCTAssertEqual(viewModel.livenessState.state, .pendingFacePreparedConfirmation(.pendingCheck)) @@ -104,15 +106,38 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { "setResultHandler(detectionResultHandler:) (FaceLivenessDetectionViewModel)" ]) XCTAssertEqual(livenessService.interactions, [ - "initializeLivenessStream(withSessionID:userAgent:)" + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" ]) } /// Given: A `FaceLivenessDetectionViewModel` /// When: The viewModel is processes a single face result with a face distance less than the inital face distance - /// Then: The end state of this flow is `.recording(ovalDisplayed: false)` and initializeLivenessStream(withSessionID:userAgent:) is called + /// Then: The end state of this flow is `.recording(ovalDisplayed: false)` func testTransitionToRecordingState() async throws { viewModel.livenessService = self.livenessService + viewModel.challenge = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) + + let face = FaceLivenessSession.OvalMatchChallenge.Face( + distanceThreshold: 0.32, + distanceThresholdMax: 0.1, + distanceThresholdMin: 0.1, + iouWidthThreshold: 0.1, + iouHeightThreshold: 0.1 + ) + + let oval = FaceLivenessSession.OvalMatchChallenge.Oval(boundingBox: .init(x: 0.1, + y: 0.1, + width: 0.1, + height: 0.1), + heightWidthRatio: 1.618, + iouThreshold: 0.1, + iouWidthThreshold: 0.1, + iouHeightThreshold: 0.1, + ovalFitTimeout: 1) + + viewModel.sessionConfiguration = .init(ovalMatchChallenge: .init(faceDetectionThreshold: 0.7, + face: face, + oval: oval)) viewModel.livenessState.checkIsFacePrepared() XCTAssertEqual(viewModel.livenessState.state, .pendingFacePreparedConfirmation(.pendingCheck)) @@ -136,9 +161,6 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { XCTAssertEqual(faceDetector.interactions, [ "setResultHandler(detectionResultHandler:) (FaceLivenessDetectionViewModel)" ]) - XCTAssertEqual(livenessService.interactions, [ - "initializeLivenessStream(withSessionID:userAgent:)" - ]) } /// Given: A `FaceLivenessDetectionViewModel` @@ -174,4 +196,55 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { try await Task.sleep(seconds: 1) XCTAssertEqual(self.viewModel.livenessState.state, .encounteredUnrecoverableError(.timedOut)) } + + /// Given: A `FaceLivenessDetectionViewModel` + /// When: The initializeLivenessStream() is called for the first time and then called again after 3 seconds + /// Then: The attempt count is incremented + func testAttemptCountIncrementFirstTime() async throws { + viewModel.livenessService = self.livenessService + self.viewModel.initializeLivenessStream() + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) + + XCTAssertEqual(FaceLivenessDetectionViewModel.attemptCount, 1) + try await Task.sleep(seconds: 3) + + self.viewModel.initializeLivenessStream() + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)", + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) + XCTAssertEqual(FaceLivenessDetectionViewModel.attemptCount, 2) + } + + /// Given: A `FaceLivenessDetectionViewModel` + /// When: The attempt count is 4, last attempt time was < 5 minutes and initializeLivenessStream() is called + /// Then: The attempt count is incremented + func testAttemptCountIncrement() async throws { + viewModel.livenessService = self.livenessService + FaceLivenessDetectionViewModel.attemptCount = 4 + FaceLivenessDetectionViewModel.attemptIdTimeStamp = Date().addingTimeInterval(-180) + self.viewModel.initializeLivenessStream() + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) + + XCTAssertEqual(FaceLivenessDetectionViewModel.attemptCount, 5) + } + + /// Given: A `FaceLivenessDetectionViewModel` + /// When: The attempt count is 4, last attempt time was > 5 minutes and initializeLivenessStream() is called + /// Then: The attempt count is not incremented and reset to 1 + func testAttemptCountReset() async throws { + viewModel.livenessService = self.livenessService + FaceLivenessDetectionViewModel.attemptCount = 4 + FaceLivenessDetectionViewModel.attemptIdTimeStamp = Date().addingTimeInterval(-305) + self.viewModel.initializeLivenessStream() + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) + + XCTAssertEqual(FaceLivenessDetectionViewModel.attemptCount, 1) + } } diff --git a/Tests/FaceLivenessTests/MockLivenessService.swift b/Tests/FaceLivenessTests/MockLivenessService.swift index 2b4633d1..d3e43a8d 100644 --- a/Tests/FaceLivenessTests/MockLivenessService.swift +++ b/Tests/FaceLivenessTests/MockLivenessService.swift @@ -18,7 +18,7 @@ class MockLivenessService { var onFinalClientEvent: (LivenessEvent, Date) -> Void = { _, _ in } var onFreshnessEvent: (LivenessEvent, Date) -> Void = { _, _ in } var onVideoEvent: (LivenessEvent, Date) -> Void = { _, _ in } - var onInitializeLivenessStream: (String, String) -> Void = { _, _ in } + var onInitializeLivenessStream: (String, String,[Challenge]?,FaceLivenessSession.Options) -> Void = { _, _, _, _ in } var onServiceException: (FaceLivenessSessionError) -> Void = { _ in } var onCloseSocket: (URLSessionWebSocketTask.CloseCode) -> Void = { _ in } } @@ -44,10 +44,13 @@ extension MockLivenessService: LivenessService { } func initializeLivenessStream( - withSessionID sessionID: String, userAgent: String + withSessionID sessionID: String, + userAgent: String, + challenges: [Challenge], + options: FaceLivenessSession.Options ) throws { interactions.append(#function) - onInitializeLivenessStream(sessionID, userAgent) + onInitializeLivenessStream(sessionID, userAgent, challenges, options) } func register( @@ -62,6 +65,10 @@ extension MockLivenessService: LivenessService { ) { interactions.append(#function) } + + func register(listener: @escaping (Challenge) -> Void, on event: LivenessEventKind.Server) { + interactions.append(#function) + } func closeSocket(with code: URLSessionWebSocketTask.CloseCode) { interactions.append(#function)