diff --git a/Example/Podfile.lock b/Example/Podfile.lock index 2f4db7331..e05e36e93 100644 --- a/Example/Podfile.lock +++ b/Example/Podfile.lock @@ -51,7 +51,7 @@ SPEC CHECKSUMS: lottie-ios: fcb5e73e17ba4c983140b7d21095c834b3087418 netfox: 9d5cc727fe7576c4c7688a2504618a156b7d44b7 Sentry: f8374b5415bc38dfb5645941b3ae31230fbeae57 - SmileID: 3c6d3101c7da84fe9acc36c10d2a189192f00d13 + SmileID: 93184d185549dec6858a3cc567bd9423de79abbb SwiftLint: 3fe909719babe5537c552ee8181c0031392be933 ZIPFoundation: b8c29ea7ae353b309bc810586181fd073cb3312c diff --git a/Example/SmileID.xcodeproj/project.pbxproj b/Example/SmileID.xcodeproj/project.pbxproj index 68a4cfcec..4e4f69bbc 100644 --- a/Example/SmileID.xcodeproj/project.pbxproj +++ b/Example/SmileID.xcodeproj/project.pbxproj @@ -7,7 +7,6 @@ objects = { /* Begin PBXBuildFile section */ - 1B229387BA34905BADB7776E /* Pods_SmileID_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 718422487EFECF403E12E64E /* Pods_SmileID_Example.framework */; }; 1E59E33E2BA1E64C00D2BAD2 /* PartnerParamsTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E59E33D2BA1E64C00D2BAD2 /* PartnerParamsTests.swift */; }; 1E60ED372A29C306002695FF /* HomeViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E60ED322A29C306002695FF /* HomeViewController.swift */; }; 1E60ED382A29C306002695FF /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E60ED332A29C306002695FF /* Constants.swift */; }; @@ -28,6 +27,7 @@ 20343AEF2C206BDF003536F5 /* JobListItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20343AEE2C206BDF003536F5 /* JobListItem.swift */; }; 20343AF22C206CEC003536F5 /* JobData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20343AF12C206CEC003536F5 /* JobData.swift */; }; 20343AFA2C2086D3003536F5 /* EmptyStateView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20343AF92C2086D3003536F5 /* EmptyStateView.swift */; }; + 204C95A12CDA455600A07386 /* FaceValidatorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 204C95A02CDA455600A07386 /* FaceValidatorTests.swift */; }; 205FB4A52C29AF1500FDE64F /* Date+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 205FB4A42C29AF1500FDE64F /* Date+Extensions.swift */; }; 205FB4A72C2C32A500FDE64F /* JobItemModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 205FB4A62C2C32A500FDE64F /* JobItemModel.swift */; }; 2089720E2C2624EE00D07D68 /* DataStoreClient.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2089720D2C2624EE00D07D68 /* DataStoreClient.swift */; }; @@ -38,6 +38,7 @@ 20B6D5EA2C21CA9E0023D51C /* CoreDataManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20B6D5E92C21CA9E0023D51C /* CoreDataManager.swift */; }; 20B6D5EC2C21CE660023D51C /* DataStoreError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20B6D5EB2C21CE660023D51C /* DataStoreError.swift */; }; 20C360C82C454C130008DBDE /* RootViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20C360C72C454C130008DBDE /* RootViewModel.swift */; }; + 20DDAB3C2CE5F02C00F7F7BA /* LivenessCheckManagerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20DDAB3B2CE5F02C00F7F7BA /* LivenessCheckManagerTests.swift */; }; 20DFA0EC2C21917100AC2AE7 /* View+TextSelection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20DFA0EB2C21917100AC2AE7 /* View+TextSelection.swift */; }; 20F3D6F32C25F4D700B32751 /* (null) in Sources */ = {isa = PBXBuildFile; }; 20F3D6F62C25F5C100B32751 /* SmileID.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 20F3D6F42C25F5C100B32751 /* SmileID.xcdatamodeld */; }; @@ -72,7 +73,8 @@ 91CB21A52AC10C61005AEBF5 /* NavigationBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91CB21A42AC10C61005AEBF5 /* NavigationBar.swift */; }; 91D9FBC42AB481FE00A8D36B /* PollingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91D9FBC32AB481FE00A8D36B /* PollingTests.swift */; }; 91D9FBD52AB8AB4700A8D36B /* CalculateSignatureTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91D9FBD42AB8AB4700A8D36B /* CalculateSignatureTests.swift */; }; - CAF00E3C75F6ADCC1E5AE60A /* Pods_SmileID_Tests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = AF55180EE707E0648C395190 /* Pods_SmileID_Tests.framework */; }; + C449A598C4B78EB14B493293 /* Pods_SmileID_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 02FE7FAA5FCF2B4172E1B98D /* Pods_SmileID_Example.framework */; }; + D4216F3C762CB28B31699F35 /* Pods_SmileID_Tests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = F3A0FB77997B5C7D540E6873 /* Pods_SmileID_Tests.framework */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -86,7 +88,7 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ - 0C27F5BE0A9BAAD20932D47D /* Pods-SmileID_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.debug.xcconfig"; sourceTree = ""; }; + 02FE7FAA5FCF2B4172E1B98D /* Pods_SmileID_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 1E59E33D2BA1E64C00D2BAD2 /* PartnerParamsTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PartnerParamsTests.swift; sourceTree = ""; }; 1E60ED322A29C306002695FF /* HomeViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = HomeViewController.swift; sourceTree = ""; }; 1E60ED332A29C306002695FF /* Constants.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; @@ -106,6 +108,7 @@ 20343AEE2C206BDF003536F5 /* JobListItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JobListItem.swift; sourceTree = ""; }; 20343AF12C206CEC003536F5 /* JobData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JobData.swift; sourceTree = ""; }; 20343AF92C2086D3003536F5 /* EmptyStateView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmptyStateView.swift; sourceTree = ""; }; + 204C95A02CDA455600A07386 /* FaceValidatorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceValidatorTests.swift; sourceTree = ""; }; 205FB4A42C29AF1500FDE64F /* Date+Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Date+Extensions.swift"; sourceTree = ""; }; 205FB4A62C2C32A500FDE64F /* JobItemModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JobItemModel.swift; sourceTree = ""; }; 2089720D2C2624EE00D07D68 /* DataStoreClient.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DataStoreClient.swift; sourceTree = ""; }; @@ -116,10 +119,10 @@ 20B6D5E92C21CA9E0023D51C /* CoreDataManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoreDataManager.swift; sourceTree = ""; }; 20B6D5EB2C21CE660023D51C /* DataStoreError.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DataStoreError.swift; sourceTree = ""; }; 20C360C72C454C130008DBDE /* RootViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RootViewModel.swift; sourceTree = ""; }; + 20DDAB3B2CE5F02C00F7F7BA /* LivenessCheckManagerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivenessCheckManagerTests.swift; sourceTree = ""; }; 20DFA0EB2C21917100AC2AE7 /* View+TextSelection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "View+TextSelection.swift"; sourceTree = ""; }; 20F3D6F52C25F5C100B32751 /* SmileID.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = SmileID.xcdatamodel; sourceTree = ""; }; - 3736E6936005080B51DC930F /* Pods-SmileID_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.release.xcconfig"; sourceTree = ""; }; - 38D85EC9740D09436F10E980 /* Pods-SmileID_Tests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.debug.xcconfig"; sourceTree = ""; }; + 23822FF3F5838ECB320564F5 /* Pods-SmileID_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.release.xcconfig"; sourceTree = ""; }; 5829A8BF2BC7429A001C1E7E /* PrivacyInfo.xcprivacy */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = PrivacyInfo.xcprivacy; sourceTree = ""; }; 585BE4872AC7748E0091DDD8 /* RestartableTimerTest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RestartableTimerTest.swift; sourceTree = ""; }; 58C5F1D72B05925800A6080C /* BiometricKycWithIdInputScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BiometricKycWithIdInputScreen.swift; sourceTree = ""; }; @@ -147,7 +150,9 @@ 6AC9893915EBA33F6984A6D9 /* DocumentSelectorViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DocumentSelectorViewModel.swift; sourceTree = ""; }; 6AC98BA00298258573CBCBD4 /* ScannerViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ScannerViewController.swift; sourceTree = ""; }; 6AC98BC49871655D87C7DEE3 /* SettingsViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SettingsViewModel.swift; sourceTree = ""; }; - 718422487EFECF403E12E64E /* Pods_SmileID_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 70FFBEEAB7A2E3A33EE9EA93 /* Pods-SmileID_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.release.xcconfig"; sourceTree = ""; }; + 784454F0D57FB1E2742E2156 /* Pods-SmileID_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.debug.xcconfig"; sourceTree = ""; }; + 821B859ACAC64E44F59427CD /* Pods-SmileID_Tests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.debug.xcconfig"; sourceTree = ""; }; 918321E02A52E36A00D6FB7F /* URLSessionRestServiceClientTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = URLSessionRestServiceClientTests.swift; sourceTree = ""; }; 918321E12A52E36A00D6FB7F /* XCTestExtension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = XCTestExtension.swift; path = ../../Tests/XCTestExtension.swift; sourceTree = ""; }; 918321E32A52E36A00D6FB7F /* DependencyContainerTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DependencyContainerTests.swift; sourceTree = ""; }; @@ -158,9 +163,8 @@ 91D9FBD42AB8AB4700A8D36B /* CalculateSignatureTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CalculateSignatureTests.swift; sourceTree = ""; }; 94E7560A47E255DD8215C183 /* README.md */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 9755B6A19CF28DE212F24C83 /* SmileID.podspec */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = SmileID.podspec; path = ../SmileID.podspec; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.ruby; }; - AF55180EE707E0648C395190 /* Pods_SmileID_Tests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Tests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; C8CD2E3DB817D8C6334E9240 /* LICENSE */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = LICENSE; path = ../LICENSE; sourceTree = ""; }; - EC1A4BF74BF20D4385E0CEA1 /* Pods-SmileID_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.release.xcconfig"; sourceTree = ""; }; + F3A0FB77997B5C7D540E6873 /* Pods_SmileID_Tests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Tests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -168,7 +172,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 1B229387BA34905BADB7776E /* Pods_SmileID_Example.framework in Frameworks */, + C449A598C4B78EB14B493293 /* Pods_SmileID_Example.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -176,7 +180,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - CAF00E3C75F6ADCC1E5AE60A /* Pods_SmileID_Tests.framework in Frameworks */, + D4216F3C762CB28B31699F35 /* Pods_SmileID_Tests.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -278,6 +282,15 @@ path = Helpers; sourceTree = ""; }; + 34F29B5AE452286D795FCD29 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 02FE7FAA5FCF2B4172E1B98D /* Pods_SmileID_Example.framework */, + F3A0FB77997B5C7D540E6873 /* Pods_SmileID_Tests.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; 58C5F1D62B05922100A6080C /* BiometricKYC */ = { isa = PBXGroup; children = ( @@ -297,7 +310,7 @@ 607FACE81AFB9204008FA782 /* Tests */, 607FACD11AFB9204008FA782 /* Products */, 828BF541E068101B2E6ED55F /* Pods */, - E88F96F2DEFAFF10CCF982E2 /* Frameworks */, + 34F29B5AE452286D795FCD29 /* Frameworks */, ); sourceTree = ""; }; @@ -359,6 +372,8 @@ 918321E12A52E36A00D6FB7F /* XCTestExtension.swift */, 607FACE91AFB9204008FA782 /* Supporting Files */, 585BE4872AC7748E0091DDD8 /* RestartableTimerTest.swift */, + 204C95A02CDA455600A07386 /* FaceValidatorTests.swift */, + 20DDAB3B2CE5F02C00F7F7BA /* LivenessCheckManagerTests.swift */, ); path = Tests; sourceTree = ""; @@ -403,10 +418,10 @@ 828BF541E068101B2E6ED55F /* Pods */ = { isa = PBXGroup; children = ( - 0C27F5BE0A9BAAD20932D47D /* Pods-SmileID_Example.debug.xcconfig */, - 3736E6936005080B51DC930F /* Pods-SmileID_Example.release.xcconfig */, - 38D85EC9740D09436F10E980 /* Pods-SmileID_Tests.debug.xcconfig */, - EC1A4BF74BF20D4385E0CEA1 /* Pods-SmileID_Tests.release.xcconfig */, + 784454F0D57FB1E2742E2156 /* Pods-SmileID_Example.debug.xcconfig */, + 70FFBEEAB7A2E3A33EE9EA93 /* Pods-SmileID_Example.release.xcconfig */, + 821B859ACAC64E44F59427CD /* Pods-SmileID_Tests.debug.xcconfig */, + 23822FF3F5838ECB320564F5 /* Pods-SmileID_Tests.release.xcconfig */, ); path = Pods; sourceTree = ""; @@ -443,15 +458,6 @@ path = ../../Tests/Mocks; sourceTree = ""; }; - E88F96F2DEFAFF10CCF982E2 /* Frameworks */ = { - isa = PBXGroup; - children = ( - 718422487EFECF403E12E64E /* Pods_SmileID_Example.framework */, - AF55180EE707E0648C395190 /* Pods_SmileID_Tests.framework */, - ); - name = Frameworks; - sourceTree = ""; - }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -459,13 +465,13 @@ isa = PBXNativeTarget; buildConfigurationList = 607FACEF1AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SmileID_Example" */; buildPhases = ( - A8844648E240C4012E2F4931 /* [CP] Check Pods Manifest.lock */, + 09CE3487C58D7803D9B1254B /* [CP] Check Pods Manifest.lock */, 607FACCC1AFB9204008FA782 /* Sources */, 917D79282AA8024400FA6624 /* SwiftLint */, 607FACCD1AFB9204008FA782 /* Frameworks */, 607FACCE1AFB9204008FA782 /* Resources */, C0BE335FFECD4DF6892309F3 /* Upload Debug Symbols to Sentry */, - E71DEE69CE1BFE014013BDFC /* [CP] Embed Pods Frameworks */, + 088EDD4D204057C2E745D8BC /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -480,7 +486,7 @@ isa = PBXNativeTarget; buildConfigurationList = 607FACF21AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SmileID_Tests" */; buildPhases = ( - E594EEEB073850BE36B39940 /* [CP] Check Pods Manifest.lock */, + FB435C9B79B1BE5E6BD677F2 /* [CP] Check Pods Manifest.lock */, 607FACE11AFB9204008FA782 /* Sources */, 607FACE21AFB9204008FA782 /* Frameworks */, 607FACE31AFB9204008FA782 /* Resources */, @@ -563,25 +569,39 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ - 917D79282AA8024400FA6624 /* SwiftLint */ = { + 088EDD4D204057C2E745D8BC /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); - inputFileListPaths = ( - ); inputPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example-frameworks.sh", + "${BUILT_PRODUCTS_DIR}/ArkanaKeys/ArkanaKeys.framework", + "${BUILT_PRODUCTS_DIR}/ArkanaKeysInterfaces/ArkanaKeysInterfaces.framework", + "${BUILT_PRODUCTS_DIR}/FingerprintJS/FingerprintJS.framework", + "${BUILT_PRODUCTS_DIR}/Sentry/Sentry.framework", + "${BUILT_PRODUCTS_DIR}/SmileID/SmileID.framework", + "${BUILT_PRODUCTS_DIR}/ZIPFoundation/ZIPFoundation.framework", + "${BUILT_PRODUCTS_DIR}/lottie-ios/Lottie.framework", + "${BUILT_PRODUCTS_DIR}/netfox/netfox.framework", ); - name = SwiftLint; - outputFileListPaths = ( - ); + name = "[CP] Embed Pods Frameworks"; outputPaths = ( + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ArkanaKeys.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ArkanaKeysInterfaces.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/FingerprintJS.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Sentry.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SmileID.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ZIPFoundation.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Lottie.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/netfox.framework", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "# Type a script or drag a script file from your workspace to insert its path.\ncd ../Sources\n\"${PODS_ROOT}/SwiftLint/swiftlint\"\n"; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example-frameworks.sh\"\n"; + showEnvVarsInLog = 0; }; - A8844648E240C4012E2F4931 /* [CP] Check Pods Manifest.lock */ = { + 09CE3487C58D7803D9B1254B /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -603,6 +623,24 @@ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; + 917D79282AA8024400FA6624 /* SwiftLint */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + ); + name = SwiftLint; + outputFileListPaths = ( + ); + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "# Type a script or drag a script file from your workspace to insert its path.\ncd ../Sources\n\"${PODS_ROOT}/SwiftLint/swiftlint\"\n"; + }; C0BE335FFECD4DF6892309F3 /* Upload Debug Symbols to Sentry */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; @@ -618,7 +656,7 @@ shellPath = /bin/sh; shellScript = "# This script is responsable to upload debug symbols and source context for Sentry.\nif which sentry-cli >/dev/null; then\nexport SENTRY_ORG=smile-identity\nexport SENTRY_PROJECT=ios\nERROR=$(sentry-cli debug-files upload --include-sources \"$DWARF_DSYM_FOLDER_PATH\" 2>&1 >/dev/null)\nif [ ! $? -eq 0 ]; then\necho \"warning: sentry-cli - $ERROR\"\nfi\nelse\necho \"warning: sentry-cli not installed, download from https://github.com/getsentry/sentry-cli/releases\"\nfi\n"; }; - E594EEEB073850BE36B39940 /* [CP] Check Pods Manifest.lock */ = { + FB435C9B79B1BE5E6BD677F2 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -640,38 +678,6 @@ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; - E71DEE69CE1BFE014013BDFC /* [CP] Embed Pods Frameworks */ = { - isa = PBXShellScriptBuildPhase; - buildActionMask = 2147483647; - files = ( - ); - inputPaths = ( - "${PODS_ROOT}/Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example-frameworks.sh", - "${BUILT_PRODUCTS_DIR}/ArkanaKeys/ArkanaKeys.framework", - "${BUILT_PRODUCTS_DIR}/ArkanaKeysInterfaces/ArkanaKeysInterfaces.framework", - "${BUILT_PRODUCTS_DIR}/FingerprintJS/FingerprintJS.framework", - "${BUILT_PRODUCTS_DIR}/Sentry/Sentry.framework", - "${BUILT_PRODUCTS_DIR}/SmileID/SmileID.framework", - "${BUILT_PRODUCTS_DIR}/ZIPFoundation/ZIPFoundation.framework", - "${BUILT_PRODUCTS_DIR}/lottie-ios/Lottie.framework", - "${BUILT_PRODUCTS_DIR}/netfox/netfox.framework", - ); - name = "[CP] Embed Pods Frameworks"; - outputPaths = ( - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ArkanaKeys.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ArkanaKeysInterfaces.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/FingerprintJS.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Sentry.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SmileID.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ZIPFoundation.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Lottie.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/netfox.framework", - ); - runOnlyForDeploymentPostprocessing = 0; - shellPath = /bin/sh; - shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example-frameworks.sh\"\n"; - showEnvVarsInLog = 0; - }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ @@ -744,7 +750,9 @@ 585BE4882AC7748E0091DDD8 /* RestartableTimerTest.swift in Sources */, 58C7118C2A69DE920062BBFB /* EnhancedKycTest.swift in Sources */, 918321EC2A52E36A00D6FB7F /* DependencyContainerTests.swift in Sources */, + 204C95A12CDA455600A07386 /* FaceValidatorTests.swift in Sources */, 918321EA2A52E36A00D6FB7F /* URLSessionRestServiceClientTests.swift in Sources */, + 20DDAB3C2CE5F02C00F7F7BA /* LivenessCheckManagerTests.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -881,7 +889,7 @@ }; 607FACF01AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 0C27F5BE0A9BAAD20932D47D /* Pods-SmileID_Example.debug.xcconfig */; + baseConfigurationReference = 784454F0D57FB1E2742E2156 /* Pods-SmileID_Example.debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_IDENTITY = "Apple Development"; @@ -914,7 +922,7 @@ }; 607FACF11AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 3736E6936005080B51DC930F /* Pods-SmileID_Example.release.xcconfig */; + baseConfigurationReference = 70FFBEEAB7A2E3A33EE9EA93 /* Pods-SmileID_Example.release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_IDENTITY = "Apple Distribution"; @@ -946,7 +954,7 @@ }; 607FACF31AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 38D85EC9740D09436F10E980 /* Pods-SmileID_Tests.debug.xcconfig */; + baseConfigurationReference = 821B859ACAC64E44F59427CD /* Pods-SmileID_Tests.debug.xcconfig */; buildSettings = { CLANG_ENABLE_MODULES = YES; CODE_SIGN_IDENTITY = "Apple Development"; @@ -979,7 +987,7 @@ }; 607FACF41AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = EC1A4BF74BF20D4385E0CEA1 /* Pods-SmileID_Tests.release.xcconfig */; + baseConfigurationReference = 23822FF3F5838ECB320564F5 /* Pods-SmileID_Tests.release.xcconfig */; buildSettings = { CLANG_ENABLE_MODULES = YES; CODE_SIGN_IDENTITY = "Apple Distribution"; diff --git a/Example/SmileID/Home/HomeView.swift b/Example/SmileID/Home/HomeView.swift index 7988f37f9..430291899 100644 --- a/Example/SmileID/Home/HomeView.swift +++ b/Example/SmileID/Home/HomeView.swift @@ -7,19 +7,19 @@ struct HomeView: View { @StateObject var viewModel: HomeViewModel init(config: Config) { - _viewModel = StateObject(wrappedValue: HomeViewModel(config: config)) + _viewModel = StateObject(wrappedValue: HomeViewModel(config: config)) } + let columns = [GridItem(.flexible()), GridItem(.flexible())] + var body: some View { NavigationView { VStack(spacing: 24) { Text("Test Our Products") .font(SmileID.theme.header2) .foregroundColor(.black) - - MyVerticalGrid( - maxColumns: 2, - items: [ + ScrollView(showsIndicators: false) { + LazyVGrid(columns: columns) { ProductCell( image: "smart_selfie_enroll", name: "SmartSelfie™ Enrollment", @@ -38,7 +38,7 @@ struct HomeView: View { ) ) } - ), + ) ProductCell( image: "smart_selfie_authentication", name: "SmartSelfie™ Authentication", @@ -51,7 +51,41 @@ struct HomeView: View { delegate: viewModel ) } - ), + ) + ProductCell( + image: "smart_selfie_enroll", + name: "SmartSelfie™ Enrollment (Strict Mode)", + onClick: { + viewModel.onProductClicked() + }, + content: { + SmileID.smartSelfieEnrollmentScreen( + userId: viewModel.smartSelfieEnrollmentUserId, + jobId: viewModel.newJobId, + allowAgentMode: true, + useStrictMode: true, + delegate: SmartSelfieEnrollmentDelegate( + userId: viewModel.smartSelfieEnrollmentUserId, + onEnrollmentSuccess: viewModel.onSmartSelfieEnrollment, + onError: viewModel.didError + ) + ) + } + ) + ProductCell( + image: "smart_selfie_authentication", + name: "SmartSelfie™ Authentication (Strict Mode)", + onClick: { + viewModel.onProductClicked() + }, + content: { + SmartSelfieAuthWithUserIdEntry( + initialUserId: viewModel.smartSelfieEnrollmentUserId, + useStrictMode: true, + delegate: viewModel + ) + } + ) ProductCell( image: "enhanced_kyc", name: "Enhanced KYC", @@ -67,7 +101,7 @@ struct HomeView: View { ) ) } - ), + ) ProductCell( image: "biometric", name: "Biometric KYC", @@ -83,7 +117,7 @@ struct HomeView: View { ) ) } - ), + ) ProductCell( image: "document", name: "\nDocument Verification", @@ -97,7 +131,7 @@ struct HomeView: View { delegate: viewModel ) } - ), + ) ProductCell( image: "enhanced_doc_v", name: "Enhanced Document Verification", @@ -112,10 +146,8 @@ struct HomeView: View { ) } ) - ].map { - AnyView($0) } - ) + } Text("Partner \(viewModel.partnerId) - Version \(version) - Build \(build)") .font(SmileID.theme.body) @@ -164,7 +196,9 @@ struct SmartSelfieEnrollmentDelegate: SmartSelfieResultDelegate { private struct SmartSelfieAuthWithUserIdEntry: View { let initialUserId: String + var useStrictMode: Bool = false let delegate: SmartSelfieResultDelegate + @State private var userId: String? var body: some View { @@ -172,6 +206,7 @@ private struct SmartSelfieAuthWithUserIdEntry: View { SmileID.smartSelfieAuthenticationScreen( userId: userId, allowAgentMode: true, + useStrictMode: useStrictMode, delegate: delegate ) } else { @@ -262,9 +297,9 @@ private struct MyVerticalGrid: View { ScrollView { VStack(alignment: .leading, spacing: 8) { let numRows = (items.count + maxColumns - 1) / maxColumns - ForEach(0 ..< numRows) { rowIndex in + ForEach(0 ..< numRows, id: \.self) { rowIndex in HStack(spacing: 16) { - ForEach(0 ..< maxColumns) { columnIndex in + ForEach(0 ..< maxColumns, id: \.self) { columnIndex in let itemIndex = rowIndex * maxColumns + columnIndex let width = geo.size.width / CGFloat(maxColumns) if itemIndex < items.count { diff --git a/Example/SmileID/Home/ProductCell.swift b/Example/SmileID/Home/ProductCell.swift index 0633a18b0..a25967ce2 100644 --- a/Example/SmileID/Home/ProductCell.swift +++ b/Example/SmileID/Home/ProductCell.swift @@ -1,18 +1,18 @@ import SmileID import SwiftUI -struct ProductCell: View { +struct ProductCell: View { let image: String let name: String let onClick: (() -> Void)? - @ViewBuilder let content: () -> any View + @ViewBuilder let content: () -> Content @State private var isPresented: Bool = false init( image: String, name: String, onClick: (() -> Void)? = nil, - @ViewBuilder content: @escaping () -> any View + @ViewBuilder content: @escaping () -> Content ) { self.image = image self.name = name @@ -41,8 +41,15 @@ struct ProductCell: View { .frame(maxWidth: .infinity) .background(SmileID.theme.accent) .cornerRadius(8) - .sheet(isPresented: $isPresented, content: { AnyView(content()) - }) + .fullScreenCover( + isPresented: $isPresented, + content: { + NavigationView { + content() + } + .environment(\.modalMode, $isPresented) + } + ) } ) } diff --git a/Example/Tests/FaceValidatorTests.swift b/Example/Tests/FaceValidatorTests.swift new file mode 100644 index 000000000..52af1d228 --- /dev/null +++ b/Example/Tests/FaceValidatorTests.swift @@ -0,0 +1,147 @@ +import XCTest + +@testable import SmileID + +class FaceValidatorTests: XCTestCase { + private var faceValidator: FaceValidator! + private var mockDelegate: MockFaceValidatorDelegate! + + override func setUp() { + super.setUp() + faceValidator = FaceValidator() + mockDelegate = MockFaceValidatorDelegate() + faceValidator.delegate = mockDelegate + let guideFrame: CGRect = .init(x: 30, y: 100, width: 250, height: 350) + faceValidator.setLayoutGuideFrame(with: guideFrame) + } + + override func tearDown() { + faceValidator = nil + mockDelegate = nil + super.tearDown() + } + + func testValidateWithValidFace() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), + brighness: 100 + ) + + XCTAssertTrue(result.faceInBounds) + XCTAssertTrue(result.hasDetectedValidFace) + XCTAssertNil(result.userInstruction) + } + + func testValidateWithFaceTooSmall() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 100, height: 100), + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), + brighness: 100 + ) + + XCTAssertFalse(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .moveCloser) + } + + func testValidateWithFaceTooLarge() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 250, height: 250), + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), + brighness: 100 + ) + + XCTAssertFalse(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .moveBack) + } + + func testValidWithFaceOffCentre() { + let result = performValidation( + faceBoundingBox: CGRect(x: 125, y: 164, width: 190, height: 190), + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), + brighness: 100 + ) + + XCTAssertFalse(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .headInFrame) + } + + func testValidateWithPoorBrightness() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), + brighness: 70 + ) + + XCTAssertTrue(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .goodLight) + } + + func testValidateWithPoorSelfieQuality() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), + selfieQualityData: SelfieQualityData(failed: 0.6, passed: 0.4), + brighness: 70 + ) + + XCTAssertTrue(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .goodLight) + } + + func testValidateWithLivenessTask() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), + selfieQualityData: SelfieQualityData(failed: 0.3, passed: 0.7), + brighness: 100, + livenessTask: .lookLeft + ) + + XCTAssertTrue(result.faceInBounds) + XCTAssertTrue(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .lookLeft) + } +} + +// MARK: - Helpers +extension FaceValidatorTests { + func performValidation( + faceBoundingBox: CGRect, + selfieQualityData: SelfieQualityData, + brighness: Int, + livenessTask: LivenessTask? = nil + ) -> FaceValidationResult { + let faceGeometry = FaceGeometryData( + boundingBox: faceBoundingBox, + roll: 0, + yaw: 0, + pitch: 0, + direction: .none + ) + faceValidator.validate( + faceGeometry: faceGeometry, + selfieQuality: selfieQualityData, + brightness: brighness, + currentLivenessTask: livenessTask + ) + + guard let mockValidationResult = mockDelegate.validationResult else { + XCTFail("Validation result should not be nil") + return FaceValidationResult(userInstruction: nil, hasDetectedValidFace: false, faceInBounds: false) + } + return mockValidationResult + } +} + +// MARK: - Mocks +class MockFaceValidatorDelegate: FaceValidatorDelegate { + var validationResult: FaceValidationResult? + + func updateValidationResult(_ result: FaceValidationResult) { + self.validationResult = result + } +} diff --git a/Example/Tests/LivenessCheckManagerTests.swift b/Example/Tests/LivenessCheckManagerTests.swift new file mode 100644 index 000000000..9233e1420 --- /dev/null +++ b/Example/Tests/LivenessCheckManagerTests.swift @@ -0,0 +1,134 @@ +import XCTest + +@testable import SmileID + +class LivenessCheckManagerTests: XCTestCase { + private var livenessCheckManager: LivenessCheckManager! + private var mockTimer: MockTimer! + private var dispatchQueueMock: DispatchQueueMock! + private var mockDelegate: MockLivenessCheckManagerDelegate! + private let taskTimeout: Int = 120 + + override func setUp() { + super.setUp() + mockTimer = MockTimer() + dispatchQueueMock = DispatchQueueMock() + livenessCheckManager = LivenessCheckManager( + taskTimer: mockTimer, + taskTimeoutDuration: TimeInterval(taskTimeout), + dispatchQueue: dispatchQueueMock, + livenessTaskSequence: LivenessTask.allCases + ) + mockDelegate = MockLivenessCheckManagerDelegate() + livenessCheckManager.delegate = mockDelegate + } + + override func tearDown() { + livenessCheckManager = nil + mockTimer = nil + dispatchQueueMock = nil + mockDelegate = nil + super.tearDown() + } + + func testInitializationShufflesTasks() { + let manager1 = LivenessCheckManager() + let manager2 = LivenessCheckManager() + + XCTAssertNotEqual( + manager1.livenessTaskSequence, manager2.livenessTaskSequence, + "Task sequences should be shuffled differently") + } + + func testInitiateSetsCurrentTask() { + livenessCheckManager.initiateLivenessCheck() + XCTAssertNotNil( + livenessCheckManager.currentTask, + "Current task should be set after initiating liveness check.") + } + + func testCompletesAllLivenessTasksInSequence() { + livenessCheckManager.initiateLivenessCheck() + + XCTAssertEqual(livenessCheckManager.currentTask, .lookLeft) + + // complete look left + let lookLeftFaceGeometry = FaceGeometryData( + boundingBox: .zero, + roll: 0, + yaw: -0.3, + pitch: 0, + direction: .none + ) + livenessCheckManager.processFaceGeometry(lookLeftFaceGeometry) + XCTAssertTrue(mockDelegate.didCompleteLivenessTaskCalled, "Delegate should be notified of task completed") + XCTAssertEqual( + livenessCheckManager.lookLeftProgress, 1.0, "Look left progress should be complete") + + // advance to next task + XCTAssertEqual(livenessCheckManager.currentTask, .lookRight) + + // complete look right + let lookRightFaceGeometry = FaceGeometryData( + boundingBox: .zero, + roll: 0, + yaw: 0.3, + pitch: 0, + direction: .none + ) + livenessCheckManager.processFaceGeometry(lookRightFaceGeometry) + XCTAssertTrue(mockDelegate.didCompleteLivenessTaskCalled, "Delegate should be notified of task completed") + XCTAssertEqual( + livenessCheckManager.lookRightProgress, 1.0, "Look right progress should be complete") + + // advance to next task + XCTAssertEqual(livenessCheckManager.currentTask, .lookUp) + + // complete look up + let lookUpFaceGeometry = FaceGeometryData( + boundingBox: .zero, + roll: 0, + yaw: 0, + pitch: -0.3, + direction: .none + ) + livenessCheckManager.processFaceGeometry(lookUpFaceGeometry) + XCTAssertTrue(mockDelegate.didCompleteLivenessTaskCalled, "Delegate should be notified of task completed") + XCTAssertEqual( + livenessCheckManager.lookUpProgress, 1.0, "Look up progress should be complete") + + XCTAssertTrue(mockDelegate.didCompleteLivenessChallengeCalled) + } + + func testTaskTimeout() { + livenessCheckManager.initiateLivenessCheck() + for _ in 0.. Void) { + work() + } +} diff --git a/SmileID.podspec b/SmileID.podspec index 774f949e2..76ff6f576 100644 --- a/SmileID.podspec +++ b/SmileID.podspec @@ -13,6 +13,6 @@ Pod::Spec.new do |s| s.swift_version = '5.5' s.source_files = 'Sources/SmileID/Classes/**/*' s.resource_bundles = { - 'SmileID_SmileID' => ['Sources/SmileID/Resources/**/*.{storyboard,storyboardc,xib,nib,xcassets,json,png,ttf,lproj,xcprivacy}'] + 'SmileID_SmileID' => ['Sources/SmileID/Resources/**/*.{storyboard,storyboardc,xib,nib,xcassets,json,png,ttf,lproj,xcprivacy,mlmodelc,lottie}'] } end diff --git a/Sources/SmileID/Classes/Camera/CameraManager.swift b/Sources/SmileID/Classes/Camera/CameraManager.swift index f22a21652..439f31d95 100644 --- a/Sources/SmileID/Classes/Camera/CameraManager.swift +++ b/Sources/SmileID/Classes/Camera/CameraManager.swift @@ -42,7 +42,9 @@ class CameraManager: NSObject, ObservableObject { @Published private(set) var status = Status.unconfigured private var orientation: Orientation - init(orientation: Orientation) { + static let shared: CameraManager = CameraManager(orientation: .portrait) + + private init(orientation: Orientation) { self.orientation = orientation super.init() sessionQueue.async { diff --git a/Sources/SmileID/Classes/Camera/CameraViewController.swift b/Sources/SmileID/Classes/Camera/CameraViewController.swift index f52bf3996..26f2797dd 100644 --- a/Sources/SmileID/Classes/Camera/CameraViewController.swift +++ b/Sources/SmileID/Classes/Camera/CameraViewController.swift @@ -2,7 +2,9 @@ import UIKit import Vision import AVFoundation -class PreviewView: UIViewController { +class CameraViewController: UIViewController { + var faceDetector: FaceDetectorV2? + var previewLayer: AVCaptureVideoPreviewLayer? private weak var cameraManager: CameraManager? @@ -17,6 +19,7 @@ class PreviewView: UIViewController { override func viewDidLoad() { super.viewDidLoad() + faceDetector?.viewDelegate = self configurePreviewLayer() } @@ -34,7 +37,16 @@ class PreviewView: UIViewController { } } -extension PreviewView: RectangleDetectionDelegate { +extension CameraViewController: FaceDetectorViewDelegate { + func convertFromMetadataToPreviewRect(rect: CGRect) -> CGRect { + guard let previewLayer = previewLayer else { + return CGRect.zero + } + return previewLayer.layerRectConverted(fromMetadataOutputRect: rect) + } +} + +extension CameraViewController: RectangleDetectionDelegate { func didDetectQuad( quad: Quadrilateral?, _ imageSize: CGSize, diff --git a/Sources/SmileID/Classes/DocumentVerification/Model/DocumentCaptureViewModel.swift b/Sources/SmileID/Classes/DocumentVerification/Model/DocumentCaptureViewModel.swift index 53691c3ac..d859d9ed5 100644 --- a/Sources/SmileID/Classes/DocumentVerification/Model/DocumentCaptureViewModel.swift +++ b/Sources/SmileID/Classes/DocumentVerification/Model/DocumentCaptureViewModel.swift @@ -15,7 +15,7 @@ class DocumentCaptureViewModel: ObservableObject { // Initializer properties private let knownAspectRatio: Double? private var localMetadata: LocalMetadata - + // Other properties private let defaultAspectRatio: Double private let textDetector = TextDetector() @@ -27,7 +27,7 @@ class DocumentCaptureViewModel: ObservableObject { private let side: DocumentCaptureSide private var retryCount: Int = 0 private(set) var documentImageOrigin: DocumentImageOriginValue? - + // UI properties @Published var unauthorizedAlert: AlertState? @Published var acknowledgedInstructions = false @@ -39,8 +39,8 @@ class DocumentCaptureViewModel: ObservableObject { @Published var documentImageToConfirm: Data? @Published var captureError: Error? @Published var isCapturing = false - @Published var cameraManager = CameraManager(orientation: .portrait) - + var cameraManager = CameraManager.shared + init( knownAspectRatio: Double? = nil, side: DocumentCaptureSide, @@ -53,26 +53,26 @@ class DocumentCaptureViewModel: ObservableObject { DispatchQueue.main.async { [self] in idAspectRatio = defaultAspectRatio } - + cameraManager.$status .receive(on: DispatchQueue.main) .filter { $0 == .unauthorized } .map { _ in AlertState.cameraUnauthorized } .sink { alert in self.unauthorizedAlert = alert } .store(in: &subscribers) - + cameraManager.capturedImagePublisher .receive(on: DispatchQueue.global()) .compactMap { $0 } .sink(receiveValue: onCaptureComplete) .store(in: &subscribers) - + cameraManager.sampleBufferPublisher .receive(on: DispatchQueue(label: "com.smileidentity.receivebuffer")) .compactMap { $0 } .sink(receiveValue: analyzeImage) .store(in: &subscribers) - + // Show Manual Capture button after 10 seconds Timer.scheduledTimer( timeInterval: 10, @@ -81,7 +81,7 @@ class DocumentCaptureViewModel: ObservableObject { userInfo: nil, repeats: false ) - + // Auto capture after 1 second of edges detected areEdgesDetectedSubscriber = $areEdgesDetected.sink(receiveValue: { areEdgesDetected in if areEdgesDetected { @@ -100,20 +100,20 @@ class DocumentCaptureViewModel: ObservableObject { } }) } - + let metadataTimerStart = MonotonicTime() - + func updateLocalMetadata(_ newMetadata: LocalMetadata) { self.localMetadata = newMetadata objectWillChange.send() } - + @objc func showManualCapture() { DispatchQueue.main.async { self.showManualCaptureButton = true } } - + /// Called when the user taps the "Take Photo" button on the instructions screen. This is NOT /// the same as the manual capture button. func onTakePhotoClick() { @@ -122,12 +122,12 @@ class DocumentCaptureViewModel: ObservableObject { self.acknowledgedInstructions = true } } - + /// Called when the user taps the "Select from Gallery" button on the instructions screen func onGalleryClick() { showPhotoPicker = true } - + func onPhotoSelectedFromGallery(_ image: UIImage) { guard let image = image.jpegData(compressionQuality: 1.0) else { DispatchQueue.main.async { @@ -142,7 +142,7 @@ class DocumentCaptureViewModel: ObservableObject { self.showPhotoPicker = false } } - + /// Called when auto capture determines the image quality is sufficient OR when the user taps /// the manual capture button. func captureDocument() { @@ -157,19 +157,19 @@ class DocumentCaptureViewModel: ObservableObject { documentImageOrigin = DocumentImageOriginValue.cameraManualCapture cameraManager.capturePhoto() } - + /// Called if the user declines the image in the capture confirmation dialog. func onRetry() { documentImageOrigin = nil switch side { - case .front: - localMetadata.metadata.removeAllOfType(Metadatum.DocumentFrontCaptureRetries.self) - localMetadata.metadata.removeAllOfType(Metadatum.DocumentFrontCaptureDuration.self) - localMetadata.metadata.removeAllOfType(Metadatum.DocumentFrontImageOrigin.self) - case .back: - localMetadata.metadata.removeAllOfType(Metadatum.DocumentBackCaptureRetries.self) - localMetadata.metadata.removeAllOfType(Metadatum.DocumentBackCaptureDuration.self) - localMetadata.metadata.removeAllOfType(Metadatum.DocumentBackImageOrigin.self) + case .front: + localMetadata.metadata.removeAllOfType(Metadatum.DocumentFrontCaptureRetries.self) + localMetadata.metadata.removeAllOfType(Metadatum.DocumentFrontCaptureDuration.self) + localMetadata.metadata.removeAllOfType(Metadatum.DocumentFrontImageOrigin.self) + case .back: + localMetadata.metadata.removeAllOfType(Metadatum.DocumentBackCaptureRetries.self) + localMetadata.metadata.removeAllOfType(Metadatum.DocumentBackCaptureDuration.self) + localMetadata.metadata.removeAllOfType(Metadatum.DocumentBackImageOrigin.self) } retryCount += 1 DispatchQueue.main.async { @@ -181,28 +181,32 @@ class DocumentCaptureViewModel: ObservableObject { self.areEdgesDetected = false } } - + private func onCaptureComplete(image: Data) { let croppedImage = ImageUtils.cropImageToAspectRatio( imageData: image, aspectRatio: 1 / idAspectRatio ) switch side { - case .front: - localMetadata.addMetadata(Metadatum.DocumentFrontCaptureDuration(duration: metadataTimerStart.elapsedTime())) - localMetadata.addMetadata(Metadatum.DocumentFrontCaptureRetries(retries: retryCount)) - localMetadata.addMetadata(Metadatum.DocumentFrontImageOrigin(origin: documentImageOrigin!)) - case .back: - localMetadata.addMetadata(Metadatum.DocumentBackCaptureDuration(duration: metadataTimerStart.elapsedTime())) - localMetadata.addMetadata(Metadatum.DocumentBackCaptureRetries(retries: retryCount)) - localMetadata.addMetadata(Metadatum.DocumentBackImageOrigin(origin: documentImageOrigin!)) + case .front: + localMetadata.addMetadata( + Metadatum.DocumentFrontCaptureDuration(duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata(Metadatum.DocumentFrontCaptureRetries(retries: retryCount)) + localMetadata.addMetadata( + Metadatum.DocumentFrontImageOrigin(origin: documentImageOrigin!)) + case .back: + localMetadata.addMetadata( + Metadatum.DocumentBackCaptureDuration(duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata(Metadatum.DocumentBackCaptureRetries(retries: retryCount)) + localMetadata.addMetadata( + Metadatum.DocumentBackImageOrigin(origin: documentImageOrigin!)) } DispatchQueue.main.async { [self] in documentImageToConfirm = croppedImage isCapturing = false } } - + /// Analyzes a single frame from the camera. No other frame will be processed until this one /// completes. This is to prevent the UI from flickering between different states. /// @@ -254,14 +258,14 @@ class DocumentCaptureViewModel: ObservableObject { } } } - + private func resetBoundingBox() { DispatchQueue.main.async { self.areEdgesDetected = false self.idAspectRatio = self.defaultAspectRatio } } - + private func isCorrectAspectRatio( detectedAspectRatio: Double, tolerance: Double = correctAspectRatioTolerance @@ -269,7 +273,7 @@ class DocumentCaptureViewModel: ObservableObject { let expectedAspectRatio = knownAspectRatio ?? detectedAspectRatio return abs(detectedAspectRatio - expectedAspectRatio) < tolerance } - + private func isRectCentered( detectedRect: Quadrilateral?, imageWidth: Double, @@ -277,29 +281,29 @@ class DocumentCaptureViewModel: ObservableObject { tolerance: Double = centeredTolerance ) -> Bool { guard let detectedRect = detectedRect else { return false } - + // Sometimes, the bounding box is out of frame. This cannot be considered centered // We check only left and right because the document should always fill the width but may // not fill the height if detectedRect.topLeft.x < tolerance || detectedRect.topRight.x > imageWidth - tolerance { return false } - + let imageCenterX = imageWidth / 2 let imageCenterY = imageHeight / 2 - + let rectCenterX = (detectedRect.topLeft.x + detectedRect.topRight.x) / 2 let rectCenterY = (detectedRect.topLeft.y + detectedRect.bottomLeft.y) / 2 - + let deltaX = abs(imageCenterX - rectCenterX) let deltaY = abs(imageCenterY - rectCenterY) - + let isCenteredHorizontally = deltaX < tolerance let isCenteredVertically = deltaY < tolerance - + return isCenteredHorizontally && isCenteredVertically } - + func openSettings() { guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } UIApplication.shared.open(settingsURL) diff --git a/Sources/SmileID/Classes/FaceDetector/CGImage+CVPixelBuffer.swift b/Sources/SmileID/Classes/FaceDetector/CGImage+CVPixelBuffer.swift new file mode 100644 index 000000000..a0a729c0a --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/CGImage+CVPixelBuffer.swift @@ -0,0 +1,17 @@ +import CoreGraphics +import CoreImage +import VideoToolbox + +extension CGImage { + /** + Creates a new CGImage from a CVPixelBuffer. + + - Note: Not all CVPixelBuffer pixel formats support conversion into a + CGImage-compatible pixel format. + */ + public static func create(pixelBuffer: CVPixelBuffer) -> CGImage? { + var cgImage: CGImage? + VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage) + return cgImage + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift b/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift new file mode 100644 index 000000000..8a9ba94f6 --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift @@ -0,0 +1,217 @@ +import AVFoundation +import Combine +import UIKit +import Vision + +enum FaceDetectorError: Error { + case unableToLoadSelfieModel + case invalidSelfieModelOutput + case noFaceDetected + case unableToCropImage +} + +protocol FaceDetectorViewDelegate: NSObjectProtocol { + func convertFromMetadataToPreviewRect(rect: CGRect) -> CGRect +} + +protocol FaceDetectorResultDelegate: AnyObject { + func faceDetector( + _ detector: FaceDetectorV2, + didDetectFace faceGeometry: FaceGeometryData, + withFaceQuality faceQuality: Float, + selfieQuality: SelfieQualityData, + brightness: Int + ) + func faceDetector(_ detector: FaceDetectorV2, didFailWithError error: Error) +} + +class FaceDetectorV2: NSObject { + private var selfieQualityModel: SelfieQualityDetector? + + private let cropSize = (width: 120, height: 120) + private let faceMovementThreshold: CGFloat = 0.15 + + private var sequenceHandler = VNSequenceRequestHandler() + + weak var viewDelegate: FaceDetectorViewDelegate? + weak var resultDelegate: FaceDetectorResultDelegate? + + override init() { + super.init() + selfieQualityModel = createImageClassifier() + } + + private func createImageClassifier() -> SelfieQualityDetector? { + do { + let modelConfiguration = MLModelConfiguration() + let coreMLModel = try SelfieQualityDetector(configuration: modelConfiguration) + return coreMLModel + } catch { + return nil + } + } + + /// Run Face Capture quality and Face Bounding Box and roll/pitch/yaw tracking + func processImageBuffer(_ imageBuffer: CVPixelBuffer) { + let detectFaceRectanglesRequest = VNDetectFaceRectanglesRequest() + let detectCaptureQualityRequest = VNDetectFaceCaptureQualityRequest() + + do { + try sequenceHandler.perform( + [detectFaceRectanglesRequest, detectCaptureQualityRequest], + on: imageBuffer, + orientation: .leftMirrored + ) + guard let faceDetections = detectFaceRectanglesRequest.results, + let faceQualityObservations = detectCaptureQualityRequest.results, + let faceObservation = faceDetections.first, + let faceQualityObservation = faceQualityObservations.first + else { + self.resultDelegate?.faceDetector( + self, didFailWithError: FaceDetectorError.noFaceDetected) + return + } + + let convertedBoundingBox = + self.viewDelegate?.convertFromMetadataToPreviewRect( + rect: faceObservation.boundingBox) ?? .zero + + let uiImage = UIImage(pixelBuffer: imageBuffer) + let brightness = self.calculateBrightness(uiImage) + let croppedImage = try self.cropImageToFace(uiImage) + + let selfieQualityData = try self.selfieQualityRequest(imageBuffer: croppedImage) + + if #available(iOS 15.0, *) { + let faceGeometryData = FaceGeometryData( + boundingBox: convertedBoundingBox, + roll: faceObservation.roll ?? 0.0, + yaw: faceObservation.yaw ?? 0.0, + pitch: faceObservation.pitch ?? 0.0, + direction: faceDirection(faceObservation: faceObservation) + ) + self.resultDelegate? + .faceDetector( + self, + didDetectFace: faceGeometryData, + withFaceQuality: faceQualityObservation.faceCaptureQuality ?? 0.0, + selfieQuality: selfieQualityData, + brightness: brightness + ) + } else { + // Fallback on earlier versions + } + } catch { + self.resultDelegate?.faceDetector(self, didFailWithError: error) + } + } + + func selfieQualityRequest(imageBuffer: CVPixelBuffer) throws -> SelfieQualityData { + guard let selfieQualityModel else { + throw FaceDetectorError.unableToLoadSelfieModel + } + let input = SelfieQualityDetectorInput(conv2d_193_input: imageBuffer) + + let prediction = try selfieQualityModel.prediction(input: input) + let output = prediction.Identity + + guard output.shape.count == 2, + output.shape[0] == 1, + output.shape[1] == 2 + else { + throw FaceDetectorError.invalidSelfieModelOutput + } + + let passScore = output[0].floatValue + let failScore = output[1].floatValue + + let selfieQualityData = SelfieQualityData( + failed: failScore, + passed: passScore + ) + return selfieQualityData + } + + private func cropImageToFace( + _ image: UIImage? + ) throws -> CVPixelBuffer { + guard let image, let cgImage = image.cgImage else { + throw FaceDetectorError.unableToCropImage + } + + let request = VNDetectFaceRectanglesRequest() + let handler = VNImageRequestHandler(cgImage: cgImage, options: [:]) + + try handler.perform([request]) + + guard let results = request.results, + let face = results.first + else { + throw FaceDetectorError.noFaceDetected + } + + let boundingBox = face.boundingBox + + let size = CGSize( + width: boundingBox.width * image.size.width, + height: boundingBox.height * image.size.height + ) + let origin = CGPoint( + x: boundingBox.minX * image.size.width, + y: (1 - boundingBox.minY) * image.size.height - size.height + ) + + let faceRect = CGRect(origin: origin, size: size) + + guard let croppedCGImage = cgImage.cropping(to: faceRect) else { + throw FaceDetectorError.unableToCropImage + } + + let croppedImage = UIImage(cgImage: croppedCGImage) + guard + let resizedImage = croppedImage.pixelBuffer( + width: cropSize.width, height: cropSize.height) + else { + throw FaceDetectorError.unableToCropImage + } + + return resizedImage + } + + private func calculateBrightness(_ image: UIImage?) -> Int { + guard let image, let cgImage = image.cgImage, + let imageData = cgImage.dataProvider?.data, + let dataPointer = CFDataGetBytePtr(imageData) + else { + return 0 + } + + let bytesPerPixel = cgImage.bitsPerPixel / cgImage.bitsPerComponent + let dataLength = CFDataGetLength(imageData) + var result = 0.0 + for index in stride(from: 0, to: dataLength, by: bytesPerPixel) { + let red = dataPointer[index] + let green = dataPointer[index + 1] + let blue = dataPointer[index + 2] + result += 0.299 * Double(red) + 0.587 * Double(green) + 0.114 * Double(blue) + } + let pixelsCount = dataLength / bytesPerPixel + let brightness = Int(result) / pixelsCount + return brightness + } + + private func faceDirection(faceObservation: VNFaceObservation) -> FaceDirection { + guard let yaw = faceObservation.yaw?.doubleValue else { + return .none + } + let yawInRadians = CGFloat(yaw) + + if yawInRadians > faceMovementThreshold { + return .right + } else if yawInRadians < -faceMovementThreshold { + return .left + } else { + return .none + } + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift b/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift index 675261cd5..1c569250e 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift +++ b/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift @@ -1,7 +1,30 @@ import Foundation -struct FaceGeometryModel: Equatable { +struct FaceGeometryData: Equatable { let boundingBox: CGRect let roll: NSNumber let yaw: NSNumber + let pitch: NSNumber + let direction: FaceDirection +} + +enum FaceDirection { + case left + case right + case none +} + +struct FaceQualityData { + let quality: Float +} + +struct SelfieQualityData { + let failed: Float + let passed: Float +} + +extension SelfieQualityData { + static var zero: SelfieQualityData { + return SelfieQualityData(failed: 0, passed: 0) + } } diff --git a/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift new file mode 100644 index 000000000..bee6b2ece --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift @@ -0,0 +1,137 @@ +import Foundation + +protocol FaceValidatorDelegate: AnyObject { + func updateValidationResult(_ result: FaceValidationResult) +} + +struct FaceValidationResult { + let userInstruction: SelfieCaptureInstruction? + let hasDetectedValidFace: Bool + let faceInBounds: Bool +} + +final class FaceValidator { + weak var delegate: FaceValidatorDelegate? + private var faceLayoutGuideFrame: CGRect = .zero + + // MARK: Constants + private let selfieQualityThreshold: Float = 0.5 + private let luminanceThreshold: ClosedRange = 80...200 + private let faceBoundsMultiplier: CGFloat = 1.5 + private let faceBoundsThreshold: CGFloat = 50 + + init() {} + + func setLayoutGuideFrame(with frame: CGRect) { + self.faceLayoutGuideFrame = frame + } + + func validate( + faceGeometry: FaceGeometryData, + selfieQuality: SelfieQualityData, + brightness: Int, + currentLivenessTask: LivenessTask? + ) { + // check face bounds + let faceBoundsState = checkFaceSizeAndPosition( + using: faceGeometry.boundingBox, + shouldCheckCentering: currentLivenessTask == nil + ) + let isAcceptableBounds = faceBoundsState == .detectedFaceAppropriateSizeAndPosition + + // check brightness + let isAcceptableBrightness = luminanceThreshold.contains(brightness) + + // check selfie quality + let isAcceptableSelfieQuality = checkSelfieQuality(selfieQuality) + + // check that face is ready for capture + let hasDetectedValidFace = checkValidFace( + isAcceptableBounds, + isAcceptableBrightness, + isAcceptableSelfieQuality + ) + + // determine what instruction/animation to display to users + let userInstruction = userInstruction( + from: faceBoundsState, + detectedValidFace: hasDetectedValidFace, + isAcceptableBrightness: isAcceptableBrightness, + isAcceptableSelfieQuality: isAcceptableSelfieQuality, + livenessTask: currentLivenessTask + ) + + let validationResult = FaceValidationResult( + userInstruction: userInstruction, + hasDetectedValidFace: hasDetectedValidFace, + faceInBounds: isAcceptableBounds + ) + delegate?.updateValidationResult(validationResult) + } + + private func userInstruction( + from faceBoundsState: FaceBoundsState, + detectedValidFace: Bool, + isAcceptableBrightness: Bool, + isAcceptableSelfieQuality: Bool, + livenessTask: LivenessTask? + ) -> SelfieCaptureInstruction? { + if detectedValidFace { + if let livenessTask { + switch livenessTask { + case .lookLeft: + return .lookLeft + case .lookRight: + return .lookRight + case .lookUp: + return .lookUp + } + } + return nil + } else if faceBoundsState == .detectedFaceOffCentre { + return .headInFrame + } else if faceBoundsState == .detectedFaceTooSmall { + return .moveCloser + } else if faceBoundsState == .detectedFaceTooLarge { + return .moveBack + } else if !isAcceptableSelfieQuality || !isAcceptableBrightness { + return .goodLight + } + return nil + } + + // MARK: Validation Checks + private func checkFaceSizeAndPosition(using boundingBox: CGRect, shouldCheckCentering: Bool) -> FaceBoundsState { + let maxFaceWidth = faceLayoutGuideFrame.width - 20 + let minFaceWidth = faceLayoutGuideFrame.width / faceBoundsMultiplier + + if boundingBox.width > maxFaceWidth { + return .detectedFaceTooLarge + } else if boundingBox.width < minFaceWidth { + return .detectedFaceTooSmall + } + + if shouldCheckCentering { + let horizontalOffset = abs(boundingBox.midX - faceLayoutGuideFrame.midX) + let verticalOffset = abs(boundingBox.midY - faceLayoutGuideFrame.midY) + + if horizontalOffset > faceBoundsThreshold || verticalOffset > faceBoundsThreshold { + return .detectedFaceOffCentre + } + } + + return .detectedFaceAppropriateSizeAndPosition + } + + private func checkSelfieQuality(_ value: SelfieQualityData) -> Bool { + return value.passed >= selfieQualityThreshold + } + + private func checkValidFace( + _ isAcceptableBounds: Bool, + _ isAcceptableBrightness: Bool, + _ isAcceptableSelfieQuality: Bool + ) -> Bool { + return isAcceptableBounds && isAcceptableBrightness && isAcceptableSelfieQuality + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift new file mode 100644 index 000000000..3d95749cf --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift @@ -0,0 +1,192 @@ +import Foundation +import Vision + +/// Represents the different tasks in an active liveness check. +enum LivenessTask: CaseIterable { + case lookLeft + case lookRight + case lookUp +} + +protocol LivenessCheckManagerDelegate: AnyObject { + func didCompleteLivenessTask() + func didCompleteLivenessChallenge() + func livenessChallengeTimeout() +} + +class LivenessCheckManager: ObservableObject { + /// The sequence of liveness tasks to be performed. + private(set) var livenessTaskSequence: [LivenessTask] = [] + /// The index pointing to the current task in the sequence. + private var currentTaskIndex: Int = 0 + + weak var delegate: LivenessCheckManagerDelegate? + + // MARK: Constants + /// The minimum threshold for yaw (left-right head movement) + private let minYawAngleThreshold: CGFloat = 0.15 + /// The maximum threshold for yaw (left-right head movement) + private let maxYawAngleThreshold: CGFloat = 0.3 + /// The minimum threshold for pitch (up-down head movement) + private let minPitchAngleThreshold: CGFloat = 0.15 + /// The maximum threshold for pitch (up-down head movement) + private let maxPitchAngleThreshold: CGFloat = 0.3 + /// The timeout duration for each task in seconds. + private let taskTimeoutDuration: TimeInterval + + // MARK: Face Orientation Properties + @Published var lookLeftProgress: CGFloat = 0.0 + @Published var lookRightProgress: CGFloat = 0.0 + @Published var lookUpProgress: CGFloat = 0.0 + + /// The current liveness task. + var currentTask: LivenessTask? { + didSet { + handleTaskChange() + } + } + /// The timer used for task timeout. + private let taskTimer: TimerProtocol + private let dispatchQueue: DispatchQueueType + private var elapsedTime: TimeInterval = 0.0 + + /// Initializes the LivenessCheckManager with a shuffled set of tasks. + init( + taskTimer: TimerProtocol = RealTimer(), + taskTimeoutDuration: TimeInterval = 120, + dispatchQueue: DispatchQueueType = DispatchQueue.main, + livenessTaskSequence: [LivenessTask] = LivenessTask.allCases.shuffled() + ) { + self.taskTimer = taskTimer + self.taskTimeoutDuration = taskTimeoutDuration + self.dispatchQueue = dispatchQueue + self.livenessTaskSequence = livenessTaskSequence + } + + /// Cleans up resources when the manager is no longer needed. + deinit { + stopTaskTimer() + } + + /// Resets the task timer to the initial timeout duration. + private func resetTaskTimer() { + stopTaskTimer() + elapsedTime = 0.0 + + dispatchQueue.async { + self.taskTimer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { [weak self] _ in + self?.taskTimerFired() + } + } + } + + private func taskTimerFired() { + self.elapsedTime += 1 + if self.elapsedTime == self.taskTimeoutDuration { + self.handleTaskTimeout() + } + } + + /// Stops the current task timer. + private func stopTaskTimer() { + taskTimer.invalidate() + } + + /// Handles the timeout event for a task. + private func handleTaskTimeout() { + stopTaskTimer() + delegate?.livenessChallengeTimeout() + } + + private func handleTaskChange() { + if currentTask != nil { + resetTaskTimer() + } else { + stopTaskTimer() + } + } + + /// Advances to the next task in the sequence + /// - Returns: `true` if there is a next task, `false` if all tasks are completed. + private func advanceToNextTask() -> Bool { + guard currentTaskIndex < livenessTaskSequence.count - 1 else { + return false + } + currentTaskIndex += 1 + currentTask = livenessTaskSequence[currentTaskIndex] + return true + } + + /// Sets the initial task for the liveness check. + func initiateLivenessCheck() { + currentTask = livenessTaskSequence.first + } + + /// Processes face geometry data and checks for task completion + /// - Parameter faceGeometry: The current face geometry data. + func processFaceGeometry(_ faceGeometry: FaceGeometryData) { + let yawValue = CGFloat(faceGeometry.yaw.doubleValue) + let pitchValue = CGFloat(faceGeometry.pitch.doubleValue) + updateFaceOrientationValues(yawValue, pitchValue) + } + + /// Updates the face orientation values based on the given face geometry. + /// - Parameter faceGeometry: The current face geometry data. + private func updateFaceOrientationValues( + _ yawValue: CGFloat, + _ pitchValue: CGFloat + ) { + guard let currentTask = currentTask else { return } + + switch currentTask { + case .lookLeft: + if yawValue < -minYawAngleThreshold { + let progress = + yawValue + .normalized(min: -minYawAngleThreshold, max: -maxYawAngleThreshold) + lookLeftProgress = min(max(lookLeftProgress, progress), 1.0) + if lookLeftProgress == 1.0 { + completeCurrentTask() + } + } + case .lookRight: + if yawValue > minYawAngleThreshold { + let progress = + yawValue + .normalized(min: minYawAngleThreshold, max: maxYawAngleThreshold) + lookRightProgress = min(max(lookRightProgress, progress), 1.0) + if lookRightProgress == 1.0 { + completeCurrentTask() + } + } + case .lookUp: + if pitchValue < -minPitchAngleThreshold { + let progress = + pitchValue + .normalized(min: -minPitchAngleThreshold, max: -maxPitchAngleThreshold) + lookUpProgress = min(max(lookUpProgress, progress), 1.0) + if lookUpProgress == 1.0 { + completeCurrentTask() + } + } + } + } + + /// Completes the current task and moves to the next one. + /// If all tasks are completed, it signals the completion of the liveness challenge. + private func completeCurrentTask() { + delegate?.didCompleteLivenessTask() + + if !advanceToNextTask() { + // Liveness challenge complete + delegate?.didCompleteLivenessChallenge() + self.currentTask = nil + } + } +} + +extension CGFloat { + func normalized(min: CGFloat, max: CGFloat) -> CGFloat { + return (self - min) / (max - min) + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift b/Sources/SmileID/Classes/FaceDetector/Models.swift similarity index 75% rename from Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift rename to Sources/SmileID/Classes/FaceDetector/Models.swift index 83687b18c..0d0bdd2ac 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift +++ b/Sources/SmileID/Classes/FaceDetector/Models.swift @@ -1,19 +1,15 @@ import Foundation enum FaceDetectionState: Equatable { - case sceneUnstable - case finalFrame - case multipleFacesDetected case faceDetected case noFaceDetected case faceDetectionErrored - case smileFrame } -enum FaceObservation: Equatable { +enum FaceObservation { case faceFound(T) case faceNotFound - case errored(E) + case errored(Error) } enum FaceBoundsState { diff --git a/Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift b/Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift new file mode 100644 index 000000000..e87c214c6 --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift @@ -0,0 +1,307 @@ +// swiftlint:disable all +// +// SelfieQualityDetector.swift +// +// This file was automatically generated and should not be edited. +// + +import CoreML + + +/// Model Prediction Input Type +@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) +class SelfieQualityDetectorInput : MLFeatureProvider { + + /// conv2d_193_input as color (kCVPixelFormatType_32BGRA) image buffer, 120 pixels wide by 120 pixels high + var conv2d_193_input: CVPixelBuffer + + var featureNames: Set { + get { + return ["conv2d_193_input"] + } + } + + func featureValue(for featureName: String) -> MLFeatureValue? { + if (featureName == "conv2d_193_input") { + return MLFeatureValue(pixelBuffer: conv2d_193_input) + } + return nil + } + + init(conv2d_193_input: CVPixelBuffer) { + self.conv2d_193_input = conv2d_193_input + } + + convenience init(conv2d_193_inputWith conv2d_193_input: CGImage) throws { + self.init(conv2d_193_input: try MLFeatureValue(cgImage: conv2d_193_input, pixelsWide: 120, pixelsHigh: 120, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!) + } + + convenience init(conv2d_193_inputAt conv2d_193_input: URL) throws { + self.init(conv2d_193_input: try MLFeatureValue(imageAt: conv2d_193_input, pixelsWide: 120, pixelsHigh: 120, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!) + } + + func setConv2d_193_input(with conv2d_193_input: CGImage) throws { + self.conv2d_193_input = try MLFeatureValue(cgImage: conv2d_193_input, pixelsWide: 120, pixelsHigh: 120, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue! + } + + func setConv2d_193_input(with conv2d_193_input: URL) throws { + self.conv2d_193_input = try MLFeatureValue(imageAt: conv2d_193_input, pixelsWide: 120, pixelsHigh: 120, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue! + } + +} + + +/// Model Prediction Output Type +@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) +class SelfieQualityDetectorOutput : MLFeatureProvider { + + /// Source provided by CoreML + private let provider : MLFeatureProvider + + /// Identity as multidimensional array of floats + var Identity: MLMultiArray { + return self.provider.featureValue(for: "Identity")!.multiArrayValue! + } + + /// Identity as multidimensional array of floats + @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) + var IdentityShapedArray: MLShapedArray { + return MLShapedArray(self.Identity) + } + + var featureNames: Set { + return self.provider.featureNames + } + + func featureValue(for featureName: String) -> MLFeatureValue? { + return self.provider.featureValue(for: featureName) + } + + init(Identity: MLMultiArray) { + self.provider = try! MLDictionaryFeatureProvider(dictionary: ["Identity" : MLFeatureValue(multiArray: Identity)]) + } + + init(features: MLFeatureProvider) { + self.provider = features + } +} + + +/// Class for model loading and prediction +@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) +class SelfieQualityDetector { + let model: MLModel + + /// URL of model assuming it was installed in the same bundle as this class + class var urlOfModelInThisBundle : URL { + let bundle = SmileIDResourcesHelper.bundle + return bundle.url(forResource: "SelfieQualityDetector", withExtension:"mlmodelc")! + } + + /** + Construct SelfieQualityDetector instance with an existing MLModel object. + + Usually the application does not use this initializer unless it makes a subclass of SelfieQualityDetector. + Such application may want to use `MLModel(contentsOfURL:configuration:)` and `SelfieQualityDetector.urlOfModelInThisBundle` to create a MLModel object to pass-in. + + - parameters: + - model: MLModel object + */ + init(model: MLModel) { + self.model = model + } + + /** + Construct SelfieQualityDetector instance by automatically loading the model from the app's bundle. + */ + @available(*, deprecated, message: "Use init(configuration:) instead and handle errors appropriately.") + convenience init() { + try! self.init(contentsOf: type(of:self).urlOfModelInThisBundle) + } + + /** + Construct a model with configuration + + - parameters: + - configuration: the desired model configuration + + - throws: an NSError object that describes the problem + */ + convenience init(configuration: MLModelConfiguration) throws { + try self.init(contentsOf: type(of:self).urlOfModelInThisBundle, configuration: configuration) + } + + /** + Construct SelfieQualityDetector instance with explicit path to mlmodelc file + - parameters: + - modelURL: the file url of the model + + - throws: an NSError object that describes the problem + */ + convenience init(contentsOf modelURL: URL) throws { + try self.init(model: MLModel(contentsOf: modelURL)) + } + + /** + Construct a model with URL of the .mlmodelc directory and configuration + + - parameters: + - modelURL: the file url of the model + - configuration: the desired model configuration + + - throws: an NSError object that describes the problem + */ + convenience init(contentsOf modelURL: URL, configuration: MLModelConfiguration) throws { + try self.init(model: MLModel(contentsOf: modelURL, configuration: configuration)) + } + + /** + Construct SelfieQualityDetector instance asynchronously with optional configuration. + + Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. + + - parameters: + - configuration: the desired model configuration + - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully + */ + @available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) + class func load(configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result) -> Void) { + return self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration, completionHandler: handler) + } + + /** + Construct SelfieQualityDetector instance asynchronously with optional configuration. + + Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. + + - parameters: + - configuration: the desired model configuration + */ + @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) + class func load(configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> SelfieQualityDetector { + return try await self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration) + } + + /** + Construct SelfieQualityDetector instance asynchronously with URL of the .mlmodelc directory with optional configuration. + + Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. + + - parameters: + - modelURL: the URL to the model + - configuration: the desired model configuration + - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully + */ + @available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) + class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result) -> Void) { + MLModel.load(contentsOf: modelURL, configuration: configuration) { result in + switch result { + case .failure(let error): + handler(.failure(error)) + case .success(let model): + handler(.success(SelfieQualityDetector(model: model))) + } + } + } + + /** + Construct SelfieQualityDetector instance asynchronously with URL of the .mlmodelc directory with optional configuration. + + Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. + + - parameters: + - modelURL: the URL to the model + - configuration: the desired model configuration + */ + @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) + class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> SelfieQualityDetector { + let model = try await MLModel.load(contentsOf: modelURL, configuration: configuration) + return SelfieQualityDetector(model: model) + } + + /** + Make a prediction using the structured interface + + - parameters: + - input: the input to the prediction as SelfieQualityDetectorInput + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as SelfieQualityDetectorOutput + */ + func prediction(input: SelfieQualityDetectorInput) throws -> SelfieQualityDetectorOutput { + return try self.prediction(input: input, options: MLPredictionOptions()) + } + + /** + Make a prediction using the structured interface + + - parameters: + - input: the input to the prediction as SelfieQualityDetectorInput + - options: prediction options + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as SelfieQualityDetectorOutput + */ + func prediction(input: SelfieQualityDetectorInput, options: MLPredictionOptions) throws -> SelfieQualityDetectorOutput { + let outFeatures = try model.prediction(from: input, options:options) + return SelfieQualityDetectorOutput(features: outFeatures) + } + + /** + Make an asynchronous prediction using the structured interface + + - parameters: + - input: the input to the prediction as SelfieQualityDetectorInput + - options: prediction options + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as SelfieQualityDetectorOutput + */ + @available(macOS 14.0, iOS 17.0, tvOS 17.0, watchOS 10.0, *) + func prediction(input: SelfieQualityDetectorInput, options: MLPredictionOptions = MLPredictionOptions()) async throws -> SelfieQualityDetectorOutput { + let outFeatures = try await model.prediction(from: input, options:options) + return SelfieQualityDetectorOutput(features: outFeatures) + } + + /** + Make a prediction using the convenience interface + + - parameters: + - conv2d_193_input as color (kCVPixelFormatType_32BGRA) image buffer, 120 pixels wide by 120 pixels high + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as SelfieQualityDetectorOutput + */ + func prediction(conv2d_193_input: CVPixelBuffer) throws -> SelfieQualityDetectorOutput { + let input_ = SelfieQualityDetectorInput(conv2d_193_input: conv2d_193_input) + return try self.prediction(input: input_) + } + + /** + Make a batch prediction using the structured interface + + - parameters: + - inputs: the inputs to the prediction as [SelfieQualityDetectorInput] + - options: prediction options + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as [SelfieQualityDetectorOutput] + */ + func predictions(inputs: [SelfieQualityDetectorInput], options: MLPredictionOptions = MLPredictionOptions()) throws -> [SelfieQualityDetectorOutput] { + let batchIn = MLArrayBatchProvider(array: inputs) + let batchOut = try model.predictions(from: batchIn, options: options) + var results : [SelfieQualityDetectorOutput] = [] + results.reserveCapacity(inputs.count) + for i in 0.. CVPixelBuffer? { + return pixelBuffer(width: width, height: height, + pixelFormatType: kCVPixelFormatType_32ARGB, + colorSpace: CGColorSpaceCreateDeviceRGB(), + alphaInfo: .noneSkipFirst) + } + + /** + Resizes the image to `width` x `height` and converts it to a `CVPixelBuffer` + with the specified pixel format, color space, and alpha channel. + */ + public func pixelBuffer(width: Int, height: Int, + pixelFormatType: OSType, + colorSpace: CGColorSpace, + alphaInfo: CGImageAlphaInfo) -> CVPixelBuffer? { + var maybePixelBuffer: CVPixelBuffer? + let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, + kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] + let status = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + pixelFormatType, + attrs as CFDictionary, + &maybePixelBuffer) + + guard status == kCVReturnSuccess, let pixelBuffer = maybePixelBuffer else { + return nil + } + + let flags = CVPixelBufferLockFlags(rawValue: 0) + guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(pixelBuffer, flags) else { + return nil + } + defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, flags) } + + guard let context = CGContext(data: CVPixelBufferGetBaseAddress(pixelBuffer), + width: width, + height: height, + bitsPerComponent: 8, + bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), + space: colorSpace, + bitmapInfo: alphaInfo.rawValue) + else { + return nil + } + + UIGraphicsPushContext(context) + context.translateBy(x: 0, y: CGFloat(height)) + context.scaleBy(x: 1, y: -1) + self.draw(in: CGRect(x: 0, y: 0, width: width, height: height)) + UIGraphicsPopContext() + + return pixelBuffer + } +} + +extension UIImage { + /** + Creates a new UIImage from a CVPixelBuffer. + + - Note: Not all CVPixelBuffer pixel formats support conversion into a + CGImage-compatible pixel format. + */ + public convenience init?(pixelBuffer: CVPixelBuffer) { + if let cgImage = CGImage.create(pixelBuffer: pixelBuffer) { + self.init(cgImage: cgImage) + } else { + return nil + } + } +} diff --git a/Sources/SmileID/Classes/Helpers/DispatchQueueType.swift b/Sources/SmileID/Classes/Helpers/DispatchQueueType.swift new file mode 100644 index 000000000..33ddaf03d --- /dev/null +++ b/Sources/SmileID/Classes/Helpers/DispatchQueueType.swift @@ -0,0 +1,11 @@ +import Foundation + +protocol DispatchQueueType { + func async(execute work: @escaping @convention(block) () -> Void) +} + +extension DispatchQueue: DispatchQueueType { + func async(execute work: @escaping @convention(block) () -> Void) { + async(group: nil, qos: .unspecified, flags: [], execute: work) + } +} diff --git a/Sources/SmileID/Classes/Helpers/NavigationHelper.swift b/Sources/SmileID/Classes/Helpers/NavigationHelper.swift index 0936b1d9e..fb8e26e6a 100644 --- a/Sources/SmileID/Classes/Helpers/NavigationHelper.swift +++ b/Sources/SmileID/Classes/Helpers/NavigationHelper.swift @@ -16,3 +16,14 @@ extension View { } } } + +public struct ModalModeKey: EnvironmentKey { + public static let defaultValue = Binding.constant(false) +} + +extension EnvironmentValues { + public var modalMode: Binding { + get { self[ModalModeKey.self] } + set { self[ModalModeKey.self] = newValue } + } +} diff --git a/Sources/SmileID/Classes/Helpers/SmileIDResourcesHelper.swift b/Sources/SmileID/Classes/Helpers/SmileIDResourcesHelper.swift index ef89fb622..38a40a681 100644 --- a/Sources/SmileID/Classes/Helpers/SmileIDResourcesHelper.swift +++ b/Sources/SmileID/Classes/Helpers/SmileIDResourcesHelper.swift @@ -76,6 +76,7 @@ public class SmileIDResourcesHelper { public static var ConsentContactDetails = SmileIDResourcesHelper.image("ConsentContactDetails")! public static var ConsentDocumentInfo = SmileIDResourcesHelper.image("ConsentDocumentInfo")! public static var ConsentPersonalInfo = SmileIDResourcesHelper.image("ConsentPersonalInfo")! + public static var Loader = SmileIDResourcesHelper.image("Loader")! /// Size of font. public static let pointSize: CGFloat = 16 diff --git a/Sources/SmileID/Classes/Helpers/TimerProtocol.swift b/Sources/SmileID/Classes/Helpers/TimerProtocol.swift new file mode 100644 index 000000000..983a63120 --- /dev/null +++ b/Sources/SmileID/Classes/Helpers/TimerProtocol.swift @@ -0,0 +1,54 @@ +import Foundation + +protocol TimerProtocol { + func scheduledTimer( + withTimeInterval interval: TimeInterval, repeats: Bool, block: @escaping (TimerProtocol) -> Void) + func invalidate() +} + +class RealTimer: TimerProtocol { + private var timer: Timer? + private let lock = NSLock() + + func scheduledTimer( + withTimeInterval interval: TimeInterval, repeats: Bool, block: @escaping (any TimerProtocol) -> Void + ) { + defer { lock.unlock() } + timer = Timer.scheduledTimer( + withTimeInterval: interval, repeats: repeats, + block: { [weak self] _ in + guard let self = self else { return } + block(self) + }) + } + + func invalidate() { + lock.lock() + defer { lock.unlock() } + timer?.invalidate() + timer = nil + } +} + +class MockTimer: TimerProtocol { + private var isInvalidated: Bool = false + private var interval: TimeInterval? + var repeats: Bool? + private var block: ((TimerProtocol) -> Void)? + + func scheduledTimer( + withTimeInterval interval: TimeInterval, repeats: Bool, block: @escaping (any TimerProtocol) -> Void + ) { + self.interval = interval + self.repeats = repeats + self.block = block + } + + func invalidate() { + isInvalidated = true + } + + func fire() { + block?(self) + } +} diff --git a/Sources/SmileID/Classes/Networking/Models/FailureReason.swift b/Sources/SmileID/Classes/Networking/Models/FailureReason.swift new file mode 100644 index 000000000..f68bcd028 --- /dev/null +++ b/Sources/SmileID/Classes/Networking/Models/FailureReason.swift @@ -0,0 +1,17 @@ +import Foundation + +public enum FailureReason: Encodable { + case mobileActiveLivenessTimeout + + private enum CodingKeys: String, CodingKey { + case mobileActiveLivenessTimeout = "mobile_active_liveness_timed_out" + } + + public func encode(to encoder: any Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + switch self { + case .mobileActiveLivenessTimeout: + try container.encode(true, forKey: .mobileActiveLivenessTimeout) + } + } +} diff --git a/Sources/SmileID/Classes/Networking/ServiceRunnable.swift b/Sources/SmileID/Classes/Networking/ServiceRunnable.swift index 32d05ddce..567e92f7a 100644 --- a/Sources/SmileID/Classes/Networking/ServiceRunnable.swift +++ b/Sources/SmileID/Classes/Networking/ServiceRunnable.swift @@ -4,18 +4,18 @@ protocol ServiceRunnable { var serviceClient: RestServiceClient { get } associatedtype PathType: CustomStringConvertible var baseURL: URL? { get } - + /// POST service call to a particular path with a body. /// - Parameters: /// - path: Endpoint to execute the POST call. /// - body: The contents of the body of the request. func post(to path: PathType, with body: T) async throws -> U - + /// Get service call to a particular path /// - Parameters: /// - path: Endpoint to execute the GET call. func get(to path: PathType) async throws -> U - + // POST service call to make a multipart request. /// - Parameters: /// - path: Endpoint to execute the POST call. @@ -31,9 +31,10 @@ protocol ServiceRunnable { callbackUrl: String?, sandboxResult: Int?, allowNewEnroll: Bool?, + failureReason: FailureReason?, metadata: Metadata ) async throws -> SmartSelfieResponse - + /// PUT service call to a particular path with a body. /// - Parameters: /// - data: Data to be uploaded @@ -53,7 +54,7 @@ extension ServiceRunnable { } return URL(string: SmileID.config.prodLambdaUrl) } - + func post( to path: PathType, with body: T @@ -66,7 +67,7 @@ extension ServiceRunnable { ) return try await serviceClient.send(request: request) } - + func get(to path: PathType) async throws -> U { let request = try createRestRequest( path: path, @@ -74,7 +75,7 @@ extension ServiceRunnable { ) return try await serviceClient.send(request: request) } - + func multipart( to path: PathType, signature: String, @@ -86,6 +87,7 @@ extension ServiceRunnable { callbackUrl: String? = nil, sandboxResult: Int? = nil, allowNewEnroll: Bool? = nil, + failureReason: FailureReason? = nil, metadata: Metadata = Metadata.default() ) async throws -> SmartSelfieResponse { let boundary = generateBoundary() @@ -108,14 +110,15 @@ extension ServiceRunnable { callbackUrl: callbackUrl?.nilIfEmpty(), sandboxResult: sandboxResult, allowNewEnroll: allowNewEnroll, + failureReason: failureReason, metadata: metadata, boundary: boundary ) ) - + return try await serviceClient.multipart(request: request) } - + private func createMultiPartRequest( url: PathType, method: RestMethod, @@ -126,15 +129,15 @@ extension ServiceRunnable { guard var baseURL = baseURL?.absoluteString else { throw URLError(.badURL) } - + if let range = baseURL.range(of: "/v1/", options: .backwards) { baseURL.removeSubrange(range) } - + guard let url = URL(string: baseURL)?.appendingPathComponent(path) else { throw URLError(.badURL) } - + let request = RestRequest( url: url, method: method, @@ -143,7 +146,7 @@ extension ServiceRunnable { ) return request } - + func upload( data: Data, to url: String, @@ -157,7 +160,7 @@ extension ServiceRunnable { ) return try await serviceClient.upload(request: uploadRequest) } - + private func createUploadRequest( url: String, method: RestMethod, @@ -176,7 +179,7 @@ extension ServiceRunnable { ) return request } - + private func createRestRequest( path: PathType, method: RestMethod, @@ -188,7 +191,7 @@ extension ServiceRunnable { guard let url = baseURL?.appendingPathComponent(path) else { throw URLError(.badURL) } - + do { let request = try RestRequest( url: url, @@ -202,7 +205,7 @@ extension ServiceRunnable { throw error } } - + private func createRestRequest( path: PathType, method: RestMethod, @@ -212,7 +215,7 @@ extension ServiceRunnable { guard let url = baseURL?.appendingPathComponent(path) else { throw URLError(.badURL) } - + let request = RestRequest( url: url, method: method, @@ -220,11 +223,11 @@ extension ServiceRunnable { ) return request } - + func generateBoundary() -> String { return UUID().uuidString } - + // swiftlint:disable line_length cyclomatic_complexity func createMultiPartRequestData( selfieImage: MultipartBody, @@ -234,67 +237,75 @@ extension ServiceRunnable { callbackUrl: String?, sandboxResult: Int?, allowNewEnroll: Bool?, + failureReason: FailureReason?, metadata: Metadata = Metadata.default(), boundary: String ) -> Data { let lineBreak = "\r\n" var body = Data() - + // Append parameters if available if let parameters = partnerParams { if let boundaryData = "--\(boundary)\(lineBreak)".data(using: .utf8), - let dispositionData = "Content-Disposition: form-data; name=\"partner_params\"\(lineBreak)".data(using: .utf8), - let contentTypeData = "Content-Type: application/json\(lineBreak + lineBreak)".data(using: .utf8), - let lineBreakData = lineBreak.data(using: .utf8) { + let dispositionData = "Content-Disposition: form-data; name=\"partner_params\"\(lineBreak)".data( + using: .utf8), + let contentTypeData = "Content-Type: application/json\(lineBreak + lineBreak)".data(using: .utf8), + let lineBreakData = lineBreak.data(using: .utf8) { body.append(boundaryData) body.append(dispositionData) body.append(contentTypeData) - + if let jsonData = try? JSONSerialization.data(withJSONObject: parameters, options: []) { body.append(jsonData) body.append(lineBreakData) } } } - + // Append userId if available if let userId = userId { if let valueData = "\(userId)\(lineBreak)".data(using: .utf8) { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"user_id\"\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"user_id\"\(lineBreak + lineBreak)".data(using: .utf8)!) body.append(valueData) } } - + // Append callbackUrl if available if let callbackUrl = callbackUrl { if let valueData = "\(callbackUrl)\(lineBreak)".data(using: .utf8) { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"callback_url\"\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"callback_url\"\(lineBreak + lineBreak)".data(using: .utf8)!) body.append(valueData) } } - + // Append sandboxResult if available if let sandboxResult = sandboxResult { let sandboxResultString = "\(sandboxResult)" if let valueData = "\(sandboxResultString)\(lineBreak)".data(using: .utf8) { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"sandbox_result\"\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"sandbox_result\"\(lineBreak + lineBreak)".data( + using: .utf8)!) body.append(valueData) } } - + // Append allowNewEnroll if available if let allowNewEnroll = allowNewEnroll { let allowNewEnrollString = "\(allowNewEnroll)" if let valueData = "\(allowNewEnrollString)\(lineBreak)".data(using: .utf8) { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"allow_new_enroll\"\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"allow_new_enroll\"\(lineBreak + lineBreak)".data( + using: .utf8)!) body.append(valueData) } } - + // Append metadata let encoder = JSONEncoder() if let metadataData = try? encoder.encode(metadata.items) { @@ -304,23 +315,37 @@ extension ServiceRunnable { body.append(metadataData) body.append(lineBreak.data(using: .utf8)!) } - + // Append liveness media files for item in livenessImages { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"\("liveness_images")\"; filename=\"\(item.filename)\"\(lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"\("liveness_images")\"; filename=\"\(item.filename)\"\(lineBreak)" + .data(using: .utf8)!) body.append("Content-Type: \(item.mimeType)\(lineBreak + lineBreak)".data(using: .utf8)!) body.append(item.data) body.append(lineBreak.data(using: .utf8)!) } - + // Append selfie media file body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"\("selfie_image")\"; filename=\"\(selfieImage.filename)\"\(lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"\("selfie_image")\"; filename=\"\(selfieImage.filename)\"\(lineBreak)" + .data(using: .utf8)!) body.append("Content-Type: \(selfieImage.mimeType)\(lineBreak + lineBreak)".data(using: .utf8)!) body.append(selfieImage.data) body.append(lineBreak.data(using: .utf8)!) - + + // Append failure reason if available + if let failureReason, + let failureReasonData = try? encoder.encode(failureReason) { + body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) + body.append("Content-Disposition: form-data; name=\"failure_reason\"\(lineBreak)".data(using: .utf8)!) + body.append("Content-Type: application/json\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append(failureReasonData) + body.append(lineBreak.data(using: .utf8)!) + } + // Append final boundary body.append("--\(boundary)--\(lineBreak)".data(using: .utf8)!) return body diff --git a/Sources/SmileID/Classes/Networking/SmileIDService.swift b/Sources/SmileID/Classes/Networking/SmileIDService.swift index ec9176774..a5661aed8 100644 --- a/Sources/SmileID/Classes/Networking/SmileIDService.swift +++ b/Sources/SmileID/Classes/Networking/SmileIDService.swift @@ -24,6 +24,7 @@ public protocol SmileIDServiceable { callbackUrl: String?, sandboxResult: Int?, allowNewEnroll: Bool?, + failureReason: FailureReason?, metadata: Metadata ) async throws -> SmartSelfieResponse @@ -38,6 +39,7 @@ public protocol SmileIDServiceable { partnerParams: [String: String]?, callbackUrl: String?, sandboxResult: Int?, + failureReason: FailureReason?, metadata: Metadata ) async throws -> SmartSelfieResponse @@ -222,6 +224,7 @@ public class SmileIDService: SmileIDServiceable, ServiceRunnable { callbackUrl: String? = SmileID.callbackUrl, sandboxResult: Int? = nil, allowNewEnroll: Bool? = nil, + failureReason: FailureReason? = nil, metadata: Metadata = Metadata.default() ) async throws -> SmartSelfieResponse { try await multipart( @@ -235,6 +238,7 @@ public class SmileIDService: SmileIDServiceable, ServiceRunnable { callbackUrl: callbackUrl, sandboxResult: sandboxResult, allowNewEnroll: allowNewEnroll, + failureReason: failureReason, metadata: metadata ) } @@ -248,6 +252,7 @@ public class SmileIDService: SmileIDServiceable, ServiceRunnable { partnerParams: [String: String]? = nil, callbackUrl: String? = SmileID.callbackUrl, sandboxResult: Int? = nil, + failureReason: FailureReason? = nil, metadata: Metadata = Metadata.default() ) async throws -> SmartSelfieResponse { try await multipart( @@ -260,6 +265,7 @@ public class SmileIDService: SmileIDServiceable, ServiceRunnable { partnerParams: partnerParams, callbackUrl: callbackUrl, sandboxResult: sandboxResult, + failureReason: failureReason, metadata: metadata ) } diff --git a/Sources/SmileID/Classes/SelfieCapture/CaptureGuideAnimation.swift b/Sources/SmileID/Classes/SelfieCapture/CaptureGuideAnimation.swift new file mode 100644 index 000000000..13d274ba0 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/CaptureGuideAnimation.swift @@ -0,0 +1,49 @@ +import Foundation + +enum CaptureGuideAnimation: Equatable { + case goodLight + case headInFrame + case moveBack + case moveCloser + case lookRight + case lookLeft + case lookUp + + var fileName: String { + switch self { + case .goodLight: + return "light_animation" + case .headInFrame: + return "positioning" + case .moveBack: + return "positioning" + case .moveCloser: + return "positioning" + case .lookRight: + return "liveness_guides" + case .lookLeft: + return "liveness_guides" + case .lookUp: + return "liveness_guides" + } + } + + var animationProgressRange: ClosedRange { + switch self { + case .headInFrame: + return 0...0.28 + case .moveBack: + return 0.38...0.67 + case .moveCloser: + return 0.73...1.0 + case .lookRight: + return 0...0.4 + case .lookLeft: + return 0.4...0.64 + case .lookUp: + return 0.64...1.0 + default: + return 0...1.0 + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieCaptureInstruction.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieCaptureInstruction.swift new file mode 100644 index 000000000..f86998116 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieCaptureInstruction.swift @@ -0,0 +1,54 @@ +import Foundation + +enum SelfieCaptureInstruction { + case headInFrame + case moveBack + case moveCloser + case lookStraight + case goodLight + case lookLeft + case lookRight + case lookUp + + var instruction: String { + switch self { + case .headInFrame: + return "Instructions.PositionHeadInView" + case .moveCloser: + return "Instructions.MoveCloser" + case .moveBack: + return "Instructions.MoveBack" + case .lookStraight: + return "Instructions.PositionHeadInView" + case .goodLight: + return "Instructions.Brightness" + case .lookLeft: + return "Instructions.TurnHeadLeft" + case .lookRight: + return "Instructions.TurnHeadRight" + case .lookUp: + return "Instructions.TurnHeadUp" + } + } + + var guideAnimation: CaptureGuideAnimation { + switch self { + case .headInFrame: + return .headInFrame + case .moveCloser: + return .moveCloser + case .moveBack: + return .moveBack + case .lookStraight: + return .headInFrame + case .goodLight: + return .goodLight + case .lookLeft: + return .lookLeft + case .lookRight: + return .lookRight + case .lookUp: + return .lookUp + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift new file mode 100644 index 000000000..e470dbfab --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift @@ -0,0 +1,231 @@ +import SwiftUI + +protocol SelfieSubmissionDelegate: AnyObject { + func submissionDidSucceed(_ apiResponse: SmartSelfieResponse) + func submissionDidFail(with error: Error, errorMessage: String?, errorMessageRes: String?) +} + +final class SelfieSubmissionManager { + // MARK: - Properties + private let userId: String + private let jobId: String + private let isEnroll: Bool + private let numLivenessImages: Int + private let allowNewEnroll: Bool + private var selfieImageUrl: URL? + private var livenessImages: [URL] + private var extraPartnerParams: [String: String] + private let localMetadata: LocalMetadata + + weak var delegate: SelfieSubmissionDelegate? + + // MARK: - Initializer + init( + userId: String, + jobId: String, + isEnroll: Bool, + numLivenessImages: Int, + allowNewEnroll: Bool, + selfieImageUrl: URL?, + livenessImages: [URL], + extraPartnerParams: [String: String], + localMetadata: LocalMetadata + ) { + self.userId = userId + self.jobId = jobId + self.isEnroll = isEnroll + self.numLivenessImages = numLivenessImages + self.allowNewEnroll = allowNewEnroll + self.selfieImageUrl = selfieImageUrl + self.livenessImages = livenessImages + self.extraPartnerParams = extraPartnerParams + self.localMetadata = localMetadata + } + + func submitJob(failureReason: FailureReason? = nil) async throws { + do { + // Validate that the necessary selfie data is present + try validateImages() + + // Determine the type of job (enrollment or authentication) + let jobType = determineJobType() + // Create an authentication request based on the job type + let authRequest = createAuthRequest(jobType: jobType) + + // Save the job locally if offline mode is allowed + if SmileID.allowOfflineMode { + try saveOfflineMode(jobType: jobType) + } + + // Authenticate the request with the API + let authResponse = try await SmileID.api.authenticate(request: authRequest) + + // Prepare the images for submission + let (smartSelfieImage, smartSelfieLivenessImages) = try prepareImagesForSubmission() + + // Submit the job data to the API + let response = try await submitJobRequest( + authResponse: authResponse, + smartSelfieImage: smartSelfieImage, + smartSelfieLivenessImages: smartSelfieLivenessImages, + failureReason: failureReason + ) + + // Update local storage after successful submission + try updateLocalStorageAfterSuccess() + + // Send out api response after successful submission + self.delegate?.submissionDidSucceed(response) + } catch let error as SmileIDError { + handleJobSubmissionFailure(error) + } + } + + private func validateImages() throws { + guard selfieImageUrl != nil, + livenessImages.count == numLivenessImages else { + throw SmileIDError.unknown("Selfie capture failed") + } + } + + private func determineJobType() -> JobType { + return isEnroll ? JobType.smartSelfieEnrollment : JobType.smartSelfieAuthentication + } + + private func createAuthRequest(jobType: JobType) -> AuthenticationRequest { + return AuthenticationRequest( + jobType: jobType, + enrollment: isEnroll, + jobId: jobId, + userId: userId + ) + } + + private func saveOfflineMode(jobType: JobType) throws { + try LocalStorage.saveOfflineJob( + jobId: jobId, + userId: userId, + jobType: jobType, + enrollment: isEnroll, + allowNewEnroll: allowNewEnroll, + localMetadata: localMetadata, + partnerParams: extraPartnerParams + ) + } + + private func prepareImagesForSubmission() throws -> (MultipartBody, [MultipartBody]) { + guard let smartSelfieImage = createMultipartBody(from: selfieImageUrl) else { + throw SmileIDError.fileNotFound("Could not create multipart body for file") + } + + let smartSelfieLivenessImages = livenessImages.compactMap { + createMultipartBody(from: $0) + } + guard smartSelfieLivenessImages.count == numLivenessImages else { + throw SmileIDError.unknown("Liveness image count mismatch") + } + + return (smartSelfieImage, smartSelfieLivenessImages) + } + + private func createMultipartBody(from fileURL: URL?) -> MultipartBody? { + guard let fileURL = fileURL, + let imageData = try? Data(contentsOf: fileURL) + else { + return nil + } + return MultipartBody( + withImage: imageData, + forKey: fileURL.lastPathComponent, + forName: fileURL.lastPathComponent + ) + } + + private func submitJobRequest( + authResponse: AuthenticationResponse, + smartSelfieImage: MultipartBody, + smartSelfieLivenessImages: [MultipartBody], + failureReason: FailureReason? + ) async throws -> SmartSelfieResponse { + if isEnroll { + return try await SmileID.api + .doSmartSelfieEnrollment( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + userId: userId, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + allowNewEnroll: allowNewEnroll, + failureReason: failureReason, + metadata: localMetadata.metadata + ) + } else { + return try await SmileID.api + .doSmartSelfieAuthentication( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + userId: userId, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + failureReason: failureReason, + metadata: localMetadata.metadata + ) + } + } + + private func updateLocalStorageAfterSuccess() throws { + // Move the job to the submitted jobs directory for record-keeping + try LocalStorage.moveToSubmittedJobs(jobId: self.jobId) + + // Update the references to the submitted selfie and liveness images + self.selfieImageUrl = try LocalStorage.getFileByType( + jobId: jobId, + fileType: FileType.selfie, + submitted: true + ) + self.livenessImages = + try LocalStorage.getFilesByType( + jobId: jobId, + fileType: FileType.liveness, + submitted: true + ) ?? [] + } + + private func handleJobSubmissionFailure(_ smileIDError: SmileIDError) { + do { + let didMove = try LocalStorage.handleOfflineJobFailure(jobId: self.jobId, error: smileIDError) + if didMove { + self.selfieImageUrl = try LocalStorage.getFileByType(jobId: jobId, fileType: .selfie, submitted: true) + self.livenessImages = + try LocalStorage.getFilesByType(jobId: jobId, fileType: .liveness, submitted: true) ?? [] + } + } catch { + let (errorMessageRes, errorMessage) = toErrorMessage(error: smileIDError) + self.delegate? + .submissionDidFail( + with: error, + errorMessage: errorMessageRes, + errorMessageRes: errorMessage + ) + return + } + + if SmileID.allowOfflineMode, SmileIDError.isNetworkFailure(error: smileIDError) { + self.delegate?.submissionDidFail(with: smileIDError, errorMessage: nil, errorMessageRes: "Offline.Message") + } else { + let (errorMessageRes, errorMessage) = toErrorMessage(error: smileIDError) + self.delegate? + .submissionDidFail( + with: smileIDError, + errorMessage: errorMessage, + errorMessageRes: errorMessageRes + ) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift index c62ef7b6e..c41e1270f 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift @@ -28,7 +28,7 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { private var localMetadata: LocalMetadata private let faceDetector = FaceDetector() - var cameraManager = CameraManager(orientation: .portrait) + var cameraManager = CameraManager.shared var shouldAnalyzeImages = true var lastAutoCaptureTime = Date() var previousHeadRoll = Double.infinity @@ -95,8 +95,10 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { .store(in: &subscribers) localMetadata.addMetadata( - useBackCamera ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) - : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera)) + useBackCamera + ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) + : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera) + ) } let metadataTimerStart = MonotonicTime() @@ -392,6 +394,7 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { callbackUrl: SmileID.callbackUrl, sandboxResult: nil, allowNewEnroll: allowNewEnroll, + failureReason: nil, metadata: localMetadata.metadata ) } else { @@ -404,6 +407,7 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { partnerParams: extraPartnerParams, callbackUrl: SmileID.callbackUrl, sandboxResult: nil, + failureReason: nil, metadata: localMetadata.metadata ) } diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift new file mode 100644 index 000000000..7b9c1c159 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift @@ -0,0 +1,15 @@ +import SwiftUI + +enum SelfieViewModelAction { + // View Setup Actions + case onViewAppear + case windowSizeDetected(CGSize, EdgeInsets) + + // Job Submission Actions + case jobProcessingDone + case retryJobSubmission + + // Others + case openApplicationSettings + case handleError(Error) +} diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift new file mode 100644 index 000000000..4f713d9c8 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -0,0 +1,430 @@ +import ARKit +import Combine +import SwiftUI + +public class SelfieViewModelV2: ObservableObject { + // MARK: Dependencies + let cameraManager = CameraManager.shared + let faceDetector = FaceDetectorV2() + private let faceValidator = FaceValidator() + var livenessCheckManager = LivenessCheckManager() + private var subscribers = Set() + private var guideAnimationDelayTimer: Timer? + private let metadataTimerStart = MonotonicTime() + + // MARK: Private Properties + private var faceLayoutGuideFrame = CGRect(x: 0, y: 0, width: 250, height: 350) + private var elapsedGuideAnimationDelay: TimeInterval = 0 + private var currentFrameBuffer: CVPixelBuffer? + var selfieImage: UIImage? + private var selfieImageURL: URL? { + didSet { + DispatchQueue.main.async { + self.selfieCaptured = self.selfieImage != nil + } + } + } + private var livenessImages: [URL] = [] + private var hasDetectedValidFace: Bool = false + private var shouldBeginLivenessChallenge: Bool { + hasDetectedValidFace && selfieImage != nil && livenessCheckManager.currentTask != nil + } + private var shouldSubmitJob: Bool { + selfieImage != nil && livenessImages.count == numLivenessImages + } + private var submissionTask: Task? + private var failureReason: FailureReason? + private var apiResponse: SmartSelfieResponse? + private var error: Error? + @Published public var errorMessageRes: String? + @Published public var errorMessage: String? + + // MARK: Constants + private let livenessImageSize = 320 + private let selfieImageSize = 640 + private let numLivenessImages = 6 + private let guideAnimationDelayTime: TimeInterval = 3 + + // MARK: UI Properties + @Published var unauthorizedAlert: AlertState? + @Published private(set) var userInstruction: SelfieCaptureInstruction? + @Published private(set) var faceInBounds: Bool = false + @Published private(set) var selfieCaptured: Bool = false + @Published private(set) var showGuideAnimation: Bool = false + @Published private(set) var selfieCaptureState: SelfieCaptureState = .capturingSelfie + + // MARK: Injected Properties + private let isEnroll: Bool + private let userId: String + private let jobId: String + private let allowNewEnroll: Bool + private let skipApiSubmission: Bool + private let extraPartnerParams: [String: String] + private let useStrictMode: Bool + private let onResult: SmartSelfieResultDelegate + private var localMetadata: LocalMetadata + + enum SelfieCaptureState: Equatable { + case capturingSelfie + case processing(ProcessingState) + + var title: String { + switch self { + case .capturingSelfie: + return "Instructions.Capturing" + case let .processing(processingState): + return processingState.title + } + } + } + + public init( + isEnroll: Bool, + userId: String, + jobId: String, + allowNewEnroll: Bool, + skipApiSubmission: Bool, + extraPartnerParams: [String: String], + useStrictMode: Bool, + onResult: SmartSelfieResultDelegate, + localMetadata: LocalMetadata + ) { + self.isEnroll = isEnroll + self.userId = userId + self.jobId = jobId + self.allowNewEnroll = allowNewEnroll + self.skipApiSubmission = skipApiSubmission + self.extraPartnerParams = extraPartnerParams + self.useStrictMode = useStrictMode + self.onResult = onResult + self.localMetadata = localMetadata + self.initialSetup() + } + + deinit { + stopGuideAnimationDelayTimer() + submissionTask?.cancel() + submissionTask = nil + } + + private func initialSetup() { + self.faceValidator.delegate = self + self.faceDetector.resultDelegate = self + self.livenessCheckManager.delegate = self + + self.faceValidator.setLayoutGuideFrame(with: faceLayoutGuideFrame) + self.userInstruction = .headInFrame + + livenessCheckManager.$lookLeftProgress + .merge( + with: livenessCheckManager.$lookRightProgress, + livenessCheckManager.$lookUpProgress + ) + .sink { [weak self] _ in + DispatchQueue.main.async { + self?.resetGuideAnimationDelayTimer() + } + } + .store(in: &subscribers) + + cameraManager.$status + .receive(on: DispatchQueue.main) + .filter { $0 == .unauthorized } + .map { _ in AlertState.cameraUnauthorized } + .sink { [weak self] alert in self?.unauthorizedAlert = alert } + .store(in: &subscribers) + + cameraManager.sampleBufferPublisher + .throttle( + for: 0.35, + scheduler: DispatchQueue.global(qos: .userInitiated), + latest: true + ) + // Drop the first ~2 seconds to allow the user to settle in + .dropFirst(5) + .compactMap { $0 } + .sink { [weak self] imageBuffer in + self?.analyzeFrame(imageBuffer: imageBuffer) + } + .store(in: &subscribers) + } + + private func analyzeFrame(imageBuffer: CVPixelBuffer) { + currentFrameBuffer = imageBuffer + faceDetector.processImageBuffer(imageBuffer) + if hasDetectedValidFace && selfieImage == nil { + captureSelfieImage(imageBuffer) + livenessCheckManager.initiateLivenessCheck() + } + } + + // MARK: Actions + func perform(action: SelfieViewModelAction) { + switch action { + case let .windowSizeDetected(windowRect, safeAreaInsets): + handleWindowSizeChanged(toRect: windowRect, edgeInsets: safeAreaInsets) + case .onViewAppear: + handleViewAppeared() + case .jobProcessingDone: + onFinished(callback: onResult) + case .retryJobSubmission: + handleSubmission() + case .openApplicationSettings: + openSettings() + case let .handleError(error): + handleError(error) + } + } + + private func publishUserInstruction(_ instruction: SelfieCaptureInstruction?) { + if self.userInstruction != instruction { + self.userInstruction = instruction + self.resetGuideAnimationDelayTimer() + } + } +} + +// MARK: Action Handlers +extension SelfieViewModelV2 { + private func resetGuideAnimationDelayTimer() { + elapsedGuideAnimationDelay = 0 + showGuideAnimation = false + guard guideAnimationDelayTimer == nil else { return } + guideAnimationDelayTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in + self.elapsedGuideAnimationDelay += 1 + if self.elapsedGuideAnimationDelay == self.guideAnimationDelayTime { + self.showGuideAnimation = true + self.stopGuideAnimationDelayTimer() + } + } + } + + private func stopGuideAnimationDelayTimer() { + guard guideAnimationDelayTimer != nil else { return } + guideAnimationDelayTimer?.invalidate() + guideAnimationDelayTimer = nil + } + + private func handleViewAppeared() { + cameraManager.switchCamera(to: .front) + resetGuideAnimationDelayTimer() + resetSelfieCaptureState() + } + + private func resetSelfieCaptureState() { + selfieImage = nil + livenessImages = [] + selfieCaptureState = .capturingSelfie + failureReason = nil + } + + private func handleWindowSizeChanged(toRect: CGSize, edgeInsets: EdgeInsets) { + let topPadding: CGFloat = edgeInsets.top + 100 + faceLayoutGuideFrame = CGRect( + x: (toRect.width / 2) - faceLayoutGuideFrame.width / 2, + y: topPadding, + width: faceLayoutGuideFrame.width, + height: faceLayoutGuideFrame.height + ) + faceValidator.setLayoutGuideFrame(with: faceLayoutGuideFrame) + } + + private func captureSelfieImage(_ pixelBuffer: CVPixelBuffer) { + do { + guard + let imageData = ImageUtils.resizePixelBufferToHeight( + pixelBuffer, + height: selfieImageSize, + orientation: .up + ) + else { + throw SmileIDError.unknown("Error resizing selfie image") + } + self.selfieImage = UIImage(data: imageData) + self.selfieImageURL = try LocalStorage.createSelfieFile(jobId: jobId, selfieFile: imageData) + } catch { + handleError(error) + } + } + + private func captureLivenessImage(_ pixelBuffer: CVPixelBuffer) { + do { + guard + let imageData = ImageUtils.resizePixelBufferToHeight( + pixelBuffer, + height: livenessImageSize, + orientation: .up + ) + else { + throw SmileIDError.unknown("Error resizing liveness image") + } + let imageUrl = try LocalStorage.createLivenessFile(jobId: jobId, livenessFile: imageData) + livenessImages.append(imageUrl) + } catch { + handleError(error) + } + } + + private func handleError(_ error: Error) { + debugPrint(error.localizedDescription) + } + + private func handleSubmission() { + DispatchQueue.main.async { + self.selfieCaptureState = .processing(.inProgress) + } + guard submissionTask == nil else { return } + submissionTask = Task { + try await submitJob() + } + } + + private func openSettings() { + guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } + UIApplication.shared.open(settingsURL) + } +} + +// MARK: FaceDetectorResultDelegate Methods +extension SelfieViewModelV2: FaceDetectorResultDelegate { + func faceDetector( + _ detector: FaceDetectorV2, + didDetectFace faceGeometry: FaceGeometryData, + withFaceQuality faceQuality: Float, + selfieQuality: SelfieQualityData, + brightness: Int + ) { + faceValidator + .validate( + faceGeometry: faceGeometry, + selfieQuality: selfieQuality, + brightness: brightness, + currentLivenessTask: self.livenessCheckManager.currentTask + ) + if shouldBeginLivenessChallenge { + livenessCheckManager.processFaceGeometry(faceGeometry) + } + } + + func faceDetector(_ detector: FaceDetectorV2, didFailWithError error: Error) { + DispatchQueue.main.async { + self.publishUserInstruction(.headInFrame) + } + } +} + +// MARK: FaceValidatorDelegate Methods +extension SelfieViewModelV2: FaceValidatorDelegate { + func updateValidationResult(_ result: FaceValidationResult) { + DispatchQueue.main.async { + self.faceInBounds = result.faceInBounds + self.hasDetectedValidFace = result.hasDetectedValidFace + self.publishUserInstruction(result.userInstruction) + } + } +} + +// MARK: LivenessCheckManagerDelegate Methods +extension SelfieViewModelV2: LivenessCheckManagerDelegate { + func didCompleteLivenessTask() { + // capture liveness image twice + guard let imageBuffer = currentFrameBuffer else { return } + captureLivenessImage(imageBuffer) + captureLivenessImage(imageBuffer) + } + + func didCompleteLivenessChallenge() { + DispatchQueue.main.asyncAfter(deadline: .now() + 1) { + self.cameraManager.pauseSession() + self.handleSubmission() + } + } + + func livenessChallengeTimeout() { + let remainingImages = numLivenessImages - livenessImages.count + let count = remainingImages > 0 ? remainingImages : 0 + for _ in 0.. Path { + var path = Path() + + // Position-dependent values + let startAngle: CGFloat = 180 + let endAngle: CGFloat + let radius: CGFloat + let horizontalOffset: CGFloat + let verticalOffset: CGFloat + + switch position { + case .top: + endAngle = 120 + radius = rect.width / 2 + horizontalOffset = 0 + verticalOffset = 0 + case .right, .left: + endAngle = 150 + radius = rect.width + horizontalOffset = -(radius - rect.width / 2) + verticalOffset = 0 + } + + path.addArc( + center: CGPoint( + x: rect.midX - horizontalOffset, + y: rect.midY - verticalOffset + ), + radius: radius, + startAngle: Angle(degrees: startAngle), + endAngle: Angle(degrees: clockwise ? endAngle : -endAngle), + clockwise: clockwise + ) + + return path + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift b/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift index 8833a4a15..aa04685b2 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift @@ -3,16 +3,21 @@ import AVFoundation import Vision struct CameraView: UIViewControllerRepresentable { - typealias UIViewType = PreviewView - let preview: PreviewView + typealias UIViewType = CameraViewController + let cameraViewController: CameraViewController - init(cameraManager: CameraManager) { - preview = PreviewView(cameraManager: cameraManager) + init( + cameraManager: CameraManager, + selfieViewModel: SelfieViewModelV2? = nil + ) { + let controller = CameraViewController(cameraManager: cameraManager) + controller.faceDetector = selfieViewModel?.faceDetector + cameraViewController = controller } - func makeUIViewController(context: Context) -> PreviewView { - preview + func makeUIViewController(context: Context) -> CameraViewController { + cameraViewController } - func updateUIViewController(_ uiViewController: PreviewView, context: Context) {} + func updateUIViewController(_ uiViewController: CameraViewController, context: Context) {} } diff --git a/Sources/SmileID/Classes/SelfieCapture/View/CircularProgressView.swift b/Sources/SmileID/Classes/SelfieCapture/View/CircularProgressView.swift new file mode 100644 index 000000000..863eb7aaa --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/CircularProgressView.swift @@ -0,0 +1,20 @@ +import SwiftUI + +struct CircularProgressView: View { + @State private var rotationAngle: Double = 0.0 + + var body: some View { + Image(uiImage: SmileIDResourcesHelper.Loader) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 48, height: 48) + .rotationEffect(Angle(degrees: rotationAngle)) + .onAppear { + DispatchQueue.main.async { + withAnimation(.linear(duration: 1.0).repeatForever(autoreverses: false)) { + rotationAngle = 360 + } + } + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingArea.swift b/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingArea.swift new file mode 100644 index 000000000..440e255fd --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingArea.swift @@ -0,0 +1,56 @@ +import Lottie +import SwiftUI + +struct FaceBoundingArea: View { + var faceInBounds: Bool + var selfieCaptured: Bool + var showGuideAnimation: Bool + var guideAnimation: CaptureGuideAnimation? + + private let faceShape = FaceShape() + @State private var playbackMode: LottiePlaybackMode = .paused + + var body: some View { + ZStack { + // Face Bounds Indicator + faceShape + .stroke( + faceInBounds ? selfieCaptured ? .clear : SmileID.theme.success : SmileID.theme.error, + style: StrokeStyle(lineWidth: 10) + ) + .frame(width: 270, height: 370) + + if let guideAnimation = guideAnimation, + showGuideAnimation { + faceShape + .fill(.black.opacity(0.5)) + .frame(width: 270, height: 370) + .overlay( + LottieView { + try await DotLottieFile + .named( + guideAnimation.fileName, + bundle: SmileIDResourcesHelper.bundle + ) + } + .playbackMode(playbackMode) + .frame(width: 224, height: 224) + ) + .clipShape(faceShape) + .onAppear { + playbackMode = getPlaybackMode(guideAnimation) + } + } + } + } + + private func getPlaybackMode(_ animation: CaptureGuideAnimation) -> LottiePlaybackMode { + return .playing( + .fromProgress( + animation.animationProgressRange.lowerBound, + toProgress: animation.animationProgressRange.upperBound, + loopMode: .autoReverse + ) + ) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift b/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift new file mode 100644 index 000000000..dd3f33e62 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift @@ -0,0 +1,13 @@ +import SwiftUI + +struct LayoutGuideView: View { + let layoutGuideFrame: CGRect + + var body: some View { + VStack { + Ellipse() + .stroke(.blue) + .frame(width: layoutGuideFrame.width, height: layoutGuideFrame.height) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift b/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift new file mode 100644 index 000000000..153c4a50b --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift @@ -0,0 +1,68 @@ +import Lottie +import SwiftUI + +public struct LivenessCaptureInstructionsView: View { + @Environment(\.modalMode) private var modalMode + @State private var showSelfieCaptureView: Bool = false + + private let showAttribution: Bool + private let viewModel: SelfieViewModelV2 + + public init(showAttribution: Bool, viewModel: SelfieViewModelV2) { + self.showAttribution = showAttribution + self.viewModel = viewModel + } + + public var body: some View { + VStack { + HStack { + Button { + self.modalMode.wrappedValue = false + } label: { + Text(SmileIDResourcesHelper.localizedString(for: "Action.Cancel")) + .foregroundColor(SmileID.theme.accent) + } + Spacer() + } + + ZStack { + LottieView { + try await DotLottieFile.named("instructions_no_progress", bundle: SmileIDResourcesHelper.bundle) + } + .playing(loopMode: .loop) + .frame(width: 235, height: 235) + } + .padding(.top, 100) + Spacer() + Text(SmileIDResourcesHelper.localizedString(for: "Instructions.SelfieCapture")) + .multilineTextAlignment(.center) + .font(SmileID.theme.header4) + .foregroundColor(SmileID.theme.tertiary) + + Spacer() + + VStack(spacing: 20) { + NavigationLink( + destination: SelfieCaptureScreenV2( + viewModel: viewModel, + showAttribution: showAttribution + ), + isActive: $showSelfieCaptureView + ) { EmptyView() } + + SmileButton( + title: "Action.GetStarted", + clicked: { + self.showSelfieCaptureView = true + } + ) + + if showAttribution { + Image(uiImage: SmileIDResourcesHelper.SmileEmblem) + } + } + } + .padding(.horizontal, 24) + .padding(.bottom, 40) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/LivenessGuidesView.swift b/Sources/SmileID/Classes/SelfieCapture/View/LivenessGuidesView.swift new file mode 100644 index 000000000..1e7f2a307 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/LivenessGuidesView.swift @@ -0,0 +1,28 @@ +import SwiftUI + +struct LivenessGuidesView: View { + var currentLivenessTask: LivenessTask + @Binding var topArcProgress: CGFloat + @Binding var rightArcProgress: CGFloat + @Binding var leftArcProgress: CGFloat + + var body: some View { + ZStack { + ArcProgressView(position: .top, progress: topArcProgress) + .rotationEffect(Angle(degrees: 60)) + .opacity(currentLivenessTask == .lookUp ? 1.0 : 0.0) + .animation(.easeInOut(duration: 0.2), value: currentLivenessTask) + .padding(.bottom, 120) + + ArcProgressView(position: .right, progress: rightArcProgress, clockwise: true) + .rotationEffect(Angle(degrees: -155)) + .opacity(currentLivenessTask == .lookRight ? 1.0 : 0.0) + .animation(.easeInOut(duration: 0.2), value: currentLivenessTask) + + ArcProgressView(position: .left, progress: leftArcProgress) + .rotationEffect(Angle(degrees: -25)) + .opacity(currentLivenessTask == .lookLeft ? 1.0 : 0.0) + .animation(.easeInOut(duration: 0.2), value: currentLivenessTask) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreenV2.swift b/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreenV2.swift new file mode 100644 index 000000000..d78716078 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreenV2.swift @@ -0,0 +1,58 @@ +import Foundation +import SwiftUI + +/// Orchestrates the selfie capture flow - navigates between instructions, requesting permissions, +/// showing camera view, and displaying processing screen +public struct OrchestratedSelfieCaptureScreenV2: View { + public let allowAgentMode: Bool + public let showAttribution: Bool + public let showInstructions: Bool + public let onResult: SmartSelfieResultDelegate + private let viewModel: SelfieViewModelV2 + + private var originalBrightness = UIScreen.main.brightness + + public init( + userId: String, + jobId: String, + isEnroll: Bool, + allowNewEnroll: Bool, + allowAgentMode: Bool, + showAttribution: Bool, + showInstructions: Bool, + useStrictMode: Bool, + extraPartnerParams: [String: String], + skipApiSubmission: Bool, + onResult: SmartSelfieResultDelegate + ) { + self.allowAgentMode = allowAgentMode + self.showAttribution = showAttribution + self.showInstructions = showInstructions + self.onResult = onResult + self.viewModel = SelfieViewModelV2( + isEnroll: isEnroll, + userId: userId, + jobId: jobId, + allowNewEnroll: allowNewEnroll, + skipApiSubmission: skipApiSubmission, + extraPartnerParams: extraPartnerParams, + useStrictMode: useStrictMode, + onResult: onResult, + localMetadata: LocalMetadata() + ) + } + + public var body: some View { + if showInstructions { + LivenessCaptureInstructionsView( + showAttribution: showAttribution, + viewModel: viewModel + ) + } else { + SelfieCaptureScreenV2( + viewModel: viewModel, + showAttribution: showAttribution + ) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieActionsView.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieActionsView.swift new file mode 100644 index 000000000..789f19792 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieActionsView.swift @@ -0,0 +1,26 @@ +import SwiftUI + +struct SelfieActionsView: View { + var processingState: ProcessingState + var retryAction: () -> Void + var doneAction: () -> Void + + var body: some View { + VStack { + Spacer() + switch processingState { + case .inProgress: + EmptyView() + case .success: + SmileButton(title: "Action.Done") { + doneAction() + } + case .error: + SmileButton(title: "Confirmation.Retry") { + retryAction() + } + } + } + .padding(.horizontal, 65) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift new file mode 100644 index 000000000..1eb217398 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift @@ -0,0 +1,146 @@ +import SwiftUI + +public struct SelfieCaptureScreenV2: View { + @ObservedObject var viewModel: SelfieViewModelV2 + let showAttribution: Bool + + private let faceShape = FaceShape() + @Environment(\.modalMode) private var modalMode + + public var body: some View { + GeometryReader { proxy in + VStack(spacing: 10) { + switch viewModel.selfieCaptureState { + case .capturingSelfie: + ZStack { + CameraView( + cameraManager: viewModel.cameraManager, + selfieViewModel: viewModel + ) + .cornerRadius(40) + + RoundedRectangle(cornerRadius: 40) + .fill(SmileID.theme.tertiary.opacity(0.8)) + .reverseMask(alignment: .top) { + faceShape + .frame(width: 250, height: 350) + .padding(.top, 60) + } + VStack { + ZStack { + FaceBoundingArea( + faceInBounds: viewModel.faceInBounds, + selfieCaptured: viewModel.selfieCaptured, + showGuideAnimation: viewModel.showGuideAnimation, + guideAnimation: viewModel.userInstruction?.guideAnimation + ) + if let currentLivenessTask = viewModel.livenessCheckManager.currentTask { + LivenessGuidesView( + currentLivenessTask: currentLivenessTask, + topArcProgress: $viewModel.livenessCheckManager.lookUpProgress, + rightArcProgress: $viewModel.livenessCheckManager.lookRightProgress, + leftArcProgress: $viewModel.livenessCheckManager.lookLeftProgress + ) + } + } + .padding(.top, 50) + Spacer() + if let userInstruction = viewModel.userInstruction { + UserInstructionsView( + instruction: userInstruction.instruction + ) + } + } + } + .selfieCaptureFrameBackground() + if showAttribution { + Image(uiImage: SmileIDResourcesHelper.SmileEmblem) + } + case let .processing(processingState): + ZStack { + if let selfieImage = viewModel.selfieImage { + SelfiePreviewView(image: selfieImage) + } + RoundedRectangle(cornerRadius: 40) + .fill(SmileID.theme.tertiary.opacity(0.8)) + .reverseMask(alignment: .top) { + faceShape + .frame(width: 250, height: 350) + .padding(.top, 60) + } + VStack { + Spacer() + UserInstructionsView( + instruction: processingState.title, + message: processingState == .error ? getErrorSubtitle( + errorMessageRes: viewModel.errorMessageRes, + errorMessage: viewModel.errorMessage + ) : nil + ) + } + SubmissionStatusView(processState: processingState) + .padding(.bottom, 40) + } + .selfieCaptureFrameBackground() + if showAttribution { + Image(uiImage: SmileIDResourcesHelper.SmileEmblem) + } + + Spacer() + SelfieActionsView( + processingState: processingState, + retryAction: { viewModel.perform(action: .retryJobSubmission) }, + doneAction: { + modalMode.wrappedValue = false + viewModel.perform(action: .jobProcessingDone) + } + ) + } + + Spacer() + + Button { + modalMode.wrappedValue = false + viewModel.perform(action: .jobProcessingDone) + } label: { + Text(SmileIDResourcesHelper.localizedString(for: "Action.Cancel")) + .font(SmileID.theme.button) + .foregroundColor(SmileID.theme.error) + } + } + .navigationBarHidden(true) + .onAppear { + viewModel.perform(action: .windowSizeDetected(proxy.size, proxy.safeAreaInsets)) + viewModel.perform(action: .onViewAppear) + } + .onDisappear { + viewModel.cameraManager.pauseSession() + } + .alert(item: $viewModel.unauthorizedAlert) { alert in + Alert( + title: Text(alert.title), + message: Text(alert.message ?? ""), + primaryButton: .default( + Text( + SmileIDResourcesHelper.localizedString( + for: "Camera.Unauthorized.PrimaryAction")), + action: { + viewModel.perform(action: .openApplicationSettings) + } + ), + secondaryButton: .cancel() + ) + } + } + } +} + +extension View { + func selfieCaptureFrameBackground() -> some View { + self + .shadow(color: .black.opacity(0.25), radius: 4, x: 0, y: 4) + .frame(height: 520) + .padding(.horizontal) + .padding(.top, 40) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfiePreviewView.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfiePreviewView.swift new file mode 100644 index 000000000..7687fc9f6 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfiePreviewView.swift @@ -0,0 +1,13 @@ +import SwiftUI + +struct SelfiePreviewView: View { + var image: UIImage + + var body: some View { + Image(uiImage: image) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(height: 520) + .clipShape(.rect(cornerRadius: 40)) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SubmissionStatusView.swift b/Sources/SmileID/Classes/SelfieCapture/View/SubmissionStatusView.swift new file mode 100644 index 000000000..faab587e0 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/SubmissionStatusView.swift @@ -0,0 +1,26 @@ +import SwiftUI + +struct SubmissionStatusView: View { + var processState: ProcessingState + + var body: some View { + switch processState { + case .inProgress: + CircularProgressView() + .frame(width: 48, height: 48) + case .success: + StatusImage("checkmark.circle.fill", color: SmileID.theme.success) + case .error: + StatusImage("xmark.circle.fill", color: SmileID.theme.error) + } + } + + // swiftlint:disable identifier_name + @ViewBuilder func StatusImage(_ image: String, color: Color) -> some View { + Image(systemName: image) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 48, height: 48) + .foregroundColor(color) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift b/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift new file mode 100644 index 000000000..a70b77106 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift @@ -0,0 +1,25 @@ +import SwiftUI + +struct UserInstructionsView: View { + var instruction: String + var message: String? + + var body: some View { + VStack { + Spacer(minLength: 0) + Text(SmileIDResourcesHelper.localizedString(for: instruction)) + .font(SmileID.theme.header2) + .foregroundColor(SmileID.theme.onDark) + .multilineTextAlignment(.center) + .lineLimit(3) + .minimumScaleFactor(0.8) + if let message = message { + Text(message) + .multilineTextAlignment(.center) + .font(SmileID.theme.header5) + .foregroundColor(SmileID.theme.onDark) + } + } + .padding(20) + } +} diff --git a/Sources/SmileID/Classes/SmileID.swift b/Sources/SmileID/Classes/SmileID.swift index b56618e00..8387ecb0b 100644 --- a/Sources/SmileID/Classes/SmileID.swift +++ b/Sources/SmileID/Classes/SmileID.swift @@ -207,7 +207,7 @@ public class SmileID { LocalStorage.getFileByType(jobId: jobId, fileType: .selfie), LocalStorage.getFileByType(jobId: jobId, fileType: .documentFront), LocalStorage.getFileByType(jobId: jobId, fileType: .documentBack), - LocalStorage.getInfoJsonFile(jobId: jobId), + LocalStorage.getInfoJsonFile(jobId: jobId) ].compactMap { $0 } allFiles = livenessFiles + additionalFiles } catch { @@ -291,28 +291,45 @@ public class SmileID { /// - showInstructions: Whether to deactivate capture screen's instructions for SmartSelfie. /// - extraPartnerParams: Custom values specific to partners /// - delegate: Callback to be invoked when the SmartSelfie™ Enrollment is complete. - public class func smartSelfieEnrollmentScreen( + @ViewBuilder public class func smartSelfieEnrollmentScreen( userId: String = generateUserId(), jobId: String = generateJobId(), allowNewEnroll: Bool = false, allowAgentMode: Bool = false, showAttribution: Bool = true, showInstructions: Bool = true, + useStrictMode: Bool = false, extraPartnerParams: [String: String] = [:], delegate: SmartSelfieResultDelegate ) -> some View { - OrchestratedSelfieCaptureScreen( - userId: userId, - jobId: jobId, - isEnroll: true, - allowNewEnroll: allowNewEnroll, - allowAgentMode: allowAgentMode, - showAttribution: showAttribution, - showInstructions: showInstructions, - extraPartnerParams: extraPartnerParams, - skipApiSubmission: false, - onResult: delegate - ) + if useStrictMode { + OrchestratedSelfieCaptureScreenV2( + userId: userId, + jobId: jobId, + isEnroll: true, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + useStrictMode: useStrictMode, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: false, + onResult: delegate + ) + } else { + OrchestratedSelfieCaptureScreen( + userId: userId, + jobId: jobId, + isEnroll: true, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: false, + onResult: delegate + ) + } } /// Perform a SmartSelfie™ Authentication @@ -335,28 +352,45 @@ public class SmileID { /// - showInstructions: Whether to deactivate capture screen's instructions for SmartSelfie. /// - extraPartnerParams: Custom values specific to partners /// - delegate: Callback to be invoked when the SmartSelfie™ Authentication is complete. - public class func smartSelfieAuthenticationScreen( + @ViewBuilder public class func smartSelfieAuthenticationScreen( userId: String, jobId: String = generateJobId(), allowNewEnroll: Bool = false, allowAgentMode: Bool = false, showAttribution: Bool = true, showInstructions: Bool = true, + useStrictMode: Bool = false, extraPartnerParams: [String: String] = [:], delegate: SmartSelfieResultDelegate ) -> some View { - OrchestratedSelfieCaptureScreen( - userId: userId, - jobId: jobId, - isEnroll: false, - allowNewEnroll: allowNewEnroll, - allowAgentMode: allowAgentMode, - showAttribution: showAttribution, - showInstructions: showInstructions, - extraPartnerParams: extraPartnerParams, - skipApiSubmission: false, - onResult: delegate - ) + if useStrictMode { + OrchestratedSelfieCaptureScreenV2( + userId: userId, + jobId: jobId, + isEnroll: true, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + useStrictMode: useStrictMode, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: false, + onResult: delegate + ) + } else { + OrchestratedSelfieCaptureScreen( + userId: userId, + jobId: jobId, + isEnroll: false, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: false, + onResult: delegate + ) + } } /// Perform a Document Verification diff --git a/Sources/SmileID/Classes/Util.swift b/Sources/SmileID/Classes/Util.swift index d2d612c34..bb2c23b3a 100644 --- a/Sources/SmileID/Classes/Util.swift +++ b/Sources/SmileID/Classes/Util.swift @@ -24,6 +24,21 @@ public extension View { } } +extension View { + @inlinable func reverseMask( + alignment: Alignment = .center, + @ViewBuilder _ mask: () -> Mask + ) -> some View { + self.mask( + ZStack(alignment: alignment) { + Rectangle() + mask() + .blendMode(.destinationOut) + } + ) + } +} + private struct StackedShape: Shape { var bottom: Bottom var top: Top @@ -64,6 +79,10 @@ func toErrorMessage(error: SmileIDError) -> (String, String?) { return (error.localizedDescription, nil) case let .httpError(_, message): return ("", message) + case let .fileNotFound(message): + return (message, nil) + case let .unknown(message): + return (message, nil) default: return ("Confirmation.FailureReason", nil) } diff --git a/Sources/SmileID/Classes/Views/ProcessingScreen.swift b/Sources/SmileID/Classes/Views/ProcessingScreen.swift index c59b3d7ed..ae56f456b 100644 --- a/Sources/SmileID/Classes/Views/ProcessingScreen.swift +++ b/Sources/SmileID/Classes/Views/ProcessingScreen.swift @@ -1,10 +1,18 @@ import Foundation import SwiftUI -public enum ProcessingState { +public enum ProcessingState: Equatable { case inProgress case success case error + + var title: String { + switch self { + case .inProgress: return "ProcessingState.Submitting" + case .success: return "ProcessingState.Successful" + case .error: return "ProcessingState.Failed" + } + } } /// This screen represents a generic Processing state. It has 3 sub-states: In Progress, Success, and diff --git a/Sources/SmileID/Resources/Localization/en.lproj/Localizable.strings b/Sources/SmileID/Resources/Localization/en.lproj/Localizable.strings index 44456b528..a0049efc6 100644 --- a/Sources/SmileID/Resources/Localization/en.lproj/Localizable.strings +++ b/Sources/SmileID/Resources/Localization/en.lproj/Localizable.strings @@ -14,6 +14,7 @@ "Instructions.Action" = "I'm Ready"; "Instructions.UnableToDetectFace" = "Unable to detect face. Please ensure your face is in the oval"; "Instructions.MoveCloser" = "Move closer"; +"Instructions.MoveBack" = "Move back"; "Instructions.MoveFarther" = "Move farther away"; "Instructions.UnknownError" = "We cannot take your photo right now"; "Instructions.MultipleFaces" = "Ensure only one face is visible"; @@ -21,7 +22,13 @@ "Instructions.Capturing" = "Capturing…"; "Instructions.PutFaceInOval" = "Position your face in the oval"; "Instructions.Quality" = "Move to well lit area and clear face of obstructions"; +"Instructions.Brightness" = "Move to a well lit room"; "Instructions.Start" = "Put your face inside the oval frame and wait until it turns green"; +"Instructions.SelfieCapture" = "Position you head in camera frame. \nThen move in the direction that is indicated"; +"Instructions.PositionHeadInView" = "Position your head in view"; +"Instructions.TurnHeadLeft" = "Turn your head to the left"; +"Instructions.TurnHeadRight" = "Turn your head to the right"; +"Instructions.TurnHeadUp" = "Turn your head slightly up"; "Instructions.Document.Front.Header" = "Submit Front of ID"; "Instructions.Document.Front.Callout" = "We'll use it to verify your identity. Please follow the instructions below"; "Instructions.Document.GoodLightBody" = "Make sure your ID image is taken in a well-lit environment. Ensure the ID is clear and visible."; @@ -36,6 +43,9 @@ "Confirmation.ProcessingSelfie" = "Processing Selfie"; "Confirmation.Time" = "Just a few more seconds"; "Confirmation.SelfieCaptureComplete" = "Selfie Capture Complete"; +"ProcessingState.Submitting" = "Submitting"; +"ProcessingState.Successful" = "Successful"; +"ProcessingState.Failed" = "Failed"; "Confirmation.SuccessBody" = "Your selfie was successfully taken. You can now proceed."; "Confirmation.Continue" = "Continue"; "Confirmation.Failure" = "Your selfie failed to process"; @@ -45,6 +55,9 @@ "Action.TakePhoto" = "Take Photo"; "Action.UploadPhoto" = "Upload Photo"; "Action.Skip" = "Skip back of ID"; +"Action.GetStarted" = "Get Started"; +"Action.Cancel" = "Cancel"; +"Action.Done" = "Done"; "Camera.AgentMode" = "Agent Mode"; "Camera.Unauthorized.Title" = "Allow access to your camera"; diff --git a/Sources/SmileID/Resources/LottieFiles/instructions_no_progress.lottie b/Sources/SmileID/Resources/LottieFiles/instructions_no_progress.lottie new file mode 100644 index 000000000..3207aaa70 Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/instructions_no_progress.lottie differ diff --git a/Sources/SmileID/Resources/LottieFiles/light_animation.lottie b/Sources/SmileID/Resources/LottieFiles/light_animation.lottie new file mode 100644 index 000000000..2640b2d52 Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/light_animation.lottie differ diff --git a/Sources/SmileID/Resources/LottieFiles/liveness_guides.lottie b/Sources/SmileID/Resources/LottieFiles/liveness_guides.lottie new file mode 100644 index 000000000..c3ed55e24 Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/liveness_guides.lottie differ diff --git a/Sources/SmileID/Resources/LottieFiles/positioning.lottie b/Sources/SmileID/Resources/LottieFiles/positioning.lottie new file mode 100644 index 000000000..fac50832e Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/positioning.lottie differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin new file mode 100644 index 000000000..577fd6917 Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/coremldata.bin new file mode 100644 index 000000000..df8a17152 Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/coremldata.bin differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/metadata.json b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/metadata.json new file mode 100644 index 000000000..a3266749a --- /dev/null +++ b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/metadata.json @@ -0,0 +1,64 @@ +[ + { + "metadataOutputVersion" : "3.0", + "storagePrecision" : "Float32", + "outputSchema" : [ + { + "hasShapeFlexibility" : "0", + "isOptional" : "0", + "dataType" : "Float32", + "formattedType" : "MultiArray (Float32)", + "shortDescription" : "", + "shape" : "[]", + "name" : "Identity", + "type" : "MultiArray" + } + ], + "modelParameters" : [ + + ], + "specificationVersion" : 4, + "computePrecision" : "Float16", + "isUpdatable" : "0", + "availability" : { + "macOS" : "10.15", + "tvOS" : "13.0", + "visionOS" : "1.0", + "watchOS" : "6.0", + "iOS" : "13.0", + "macCatalyst" : "13.0" + }, + "neuralNetworkLayerTypeHistogram" : { + "ActivationReLU" : 5, + "ReshapeStatic" : 1, + "Transpose" : 1, + "SoftmaxND" : 1, + "Convolution" : 5, + "InnerProduct" : 2, + "PoolingMax" : 3 + }, + "modelType" : { + "name" : "MLModelType_neuralNetwork" + }, + "userDefinedMetadata" : { + "com.github.apple.coremltools.version" : "7.2", + "com.github.apple.coremltools.source" : "tensorflow==2.15.0" + }, + "generatedClassName" : "SelfieQualityDetector", + "inputSchema" : [ + { + "height" : "120", + "colorspace" : "RGB", + "isOptional" : "0", + "width" : "120", + "isColor" : "1", + "formattedType" : "Image (Color 120 × 120)", + "hasSizeFlexibility" : "0", + "type" : "Image", + "shortDescription" : "", + "name" : "conv2d_193_input" + } + ], + "method" : "predict" + } +] \ No newline at end of file diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.net b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.net new file mode 100644 index 000000000..736b9233a --- /dev/null +++ b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.net @@ -0,0 +1,327 @@ +{ + "layers" : [ + { + "pad_r" : 0, + "fused_relu" : 1, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/conv2d_193\/BiasAdd", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/conv2d_193\/Relu", + "blob_weights" : 3, + "K" : 3, + "blob_biases" : 1, + "name" : "sequential_35\/conv2d_193\/BiasAdd", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 16, + "bottom" : "conv2d_193_input", + "weights" : { + + }, + "Nx" : 3, + "pad_mode" : 0, + "pad_value" : 0, + "Ny" : 3, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "fused_relu" : 1, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/conv2d_194\/BiasAdd", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/conv2d_194\/Relu", + "blob_weights" : 7, + "K" : 16, + "blob_biases" : 5, + "name" : "sequential_35\/conv2d_194\/BiasAdd", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 32, + "bottom" : "sequential_35\/conv2d_193\/Relu", + "weights" : { + + }, + "Nx" : 3, + "pad_mode" : 0, + "pad_value" : 0, + "Ny" : 3, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "debug_info" : "max_pool_0", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "size_x" : 2, + "top" : "max_pool_0", + "top_shape_style" : 0, + "stride_x" : 2, + "avg_or_max" : 1, + "average_count_exclude_padding" : 1, + "type" : "pool", + "name" : "max_pool_0", + "pad_t" : 0, + "stride_y" : 2, + "bottom" : "sequential_35\/conv2d_194\/Relu", + "weights" : { + + }, + "pad_mode" : 2, + "size_y" : 2, + "pad_value" : 0 + }, + { + "pad_r" : 0, + "fused_relu" : 1, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/batch_normalization_66\/FusedBatchNormV3_nchw", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/activation_66\/Relu", + "blob_weights" : 11, + "K" : 32, + "blob_biases" : 9, + "name" : "sequential_35\/batch_normalization_66\/FusedBatchNormV3_nchw", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 64, + "bottom" : "max_pool_0", + "weights" : { + + }, + "Nx" : 3, + "pad_mode" : 0, + "pad_value" : 0, + "Ny" : 3, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "debug_info" : "max_pool_1", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "size_x" : 2, + "top" : "max_pool_1", + "top_shape_style" : 0, + "stride_x" : 2, + "avg_or_max" : 1, + "average_count_exclude_padding" : 1, + "type" : "pool", + "name" : "max_pool_1", + "pad_t" : 0, + "stride_y" : 2, + "bottom" : "sequential_35\/activation_66\/Relu", + "weights" : { + + }, + "pad_mode" : 2, + "size_y" : 2, + "pad_value" : 0 + }, + { + "pad_r" : 0, + "fused_relu" : 0, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/conv2d_196\/BiasAdd", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/conv2d_196\/BiasAdd", + "blob_weights" : 15, + "K" : 64, + "blob_biases" : 13, + "name" : "sequential_35\/conv2d_196\/BiasAdd", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 128, + "bottom" : "max_pool_1", + "weights" : { + + }, + "Nx" : 3, + "pad_mode" : 0, + "pad_value" : 0, + "Ny" : 3, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "fused_relu" : 1, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/batch_normalization_67\/FusedBatchNormV3_nchw", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/activation_67\/Relu", + "blob_weights" : 19, + "K" : 128, + "blob_biases" : 17, + "name" : "sequential_35\/batch_normalization_67\/FusedBatchNormV3_nchw", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 32, + "bottom" : "sequential_35\/conv2d_196\/BiasAdd", + "weights" : { + + }, + "Nx" : 1, + "pad_mode" : 1, + "pad_value" : 0, + "Ny" : 1, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "debug_info" : "max_pool_2", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "size_x" : 2, + "top" : "max_pool_2", + "top_shape_style" : 0, + "stride_x" : 2, + "avg_or_max" : 1, + "average_count_exclude_padding" : 1, + "type" : "pool", + "name" : "max_pool_2", + "pad_t" : 0, + "stride_y" : 2, + "bottom" : "sequential_35\/activation_67\/Relu", + "weights" : { + + }, + "pad_mode" : 2, + "size_y" : 2, + "pad_value" : 0 + }, + { + "axis_h" : 0, + "axis_w" : 2, + "bottom" : "max_pool_2", + "axis_k" : 1, + "axis_n" : 3, + "axis_seq" : 4, + "weights" : { + + }, + "debug_info" : "transpose_17", + "top" : "transpose_17", + "type" : "transpose", + "name" : "transpose_17" + }, + { + "name" : "sequential_35\/flatten_35\/Reshape", + "weights" : { + + }, + "dst_w" : 5408, + "version" : 1, + "dst_n" : 1, + "dst_nd_rank" : 2, + "type" : "reshape", + "dst_h" : -1, + "mode" : 0, + "dynamic_shape" : false, + "bottom" : "transpose_17", + "debug_info" : "sequential_35\/flatten_35\/Reshape", + "dst_seq" : 1, + "dst_k" : 1, + "top" : "sequential_35\/flatten_35\/Reshape" + }, + { + "nB" : 5408, + "top" : "sequential_35\/dense_70\/Relu", + "has_biases" : 1, + "weights" : { + + }, + "nC" : 32, + "blob_weights" : 23, + "type" : "inner_product", + "has_relu" : 1, + "bottom" : "sequential_35\/flatten_35\/Reshape", + "blob_biases" : 21, + "has_tanh" : 0, + "debug_info" : "sequential_35\/dense_70\/BiasAdd", + "name" : "sequential_35\/dense_70\/BiasAdd", + "has_prelu" : 0 + }, + { + "nB" : 32, + "top" : "sequential_35\/dense_71\/BiasAdd", + "has_biases" : 1, + "weights" : { + + }, + "nC" : 2, + "blob_weights" : 27, + "type" : "inner_product", + "has_relu" : 0, + "bottom" : "sequential_35\/dense_70\/Relu", + "blob_biases" : 25, + "has_tanh" : 0, + "debug_info" : "sequential_35\/dense_71\/BiasAdd", + "name" : "sequential_35\/dense_71\/BiasAdd", + "has_prelu" : 0 + }, + { + "bottom" : "sequential_35\/dense_71\/BiasAdd", + "weights" : { + + }, + "nd_axis" : -1, + "debug_info" : "sequential_35\/dense_71\/Softmax", + "top" : "Identity", + "type" : "softmax_nd", + "name" : "sequential_35\/dense_71\/Softmax", + "attributes" : { + "is_output" : 1 + } + } + ], + "transform_params" : { + "conv2d_193_input" : { + "bias_a" : 0, + "bias_g" : 0, + "bias_r" : 0, + "bias_b" : 0, + "center_mean" : 0, + "is_network_bgr" : 0, + "scale" : 1 + } + }, + "storage" : "model.espresso.weights", + "metadata_in_weights" : [ + + ], + "properties" : { + + }, + "analyses" : { + + }, + "format_version" : 200 +} \ No newline at end of file diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape new file mode 100644 index 000000000..b091a65cd --- /dev/null +++ b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape @@ -0,0 +1,102 @@ +{ + "layer_shapes" : { + "sequential_35\/dense_70\/Relu" : { + "k" : 1, + "w" : 32, + "n" : 1, + "_rank" : 2, + "h" : 1 + }, + "max_pool_1" : { + "k" : 64, + "w" : 28, + "n" : 1, + "_rank" : 4, + "h" : 28 + }, + "transpose_17" : { + "k" : 13, + "w" : 32, + "n" : 1, + "_rank" : 4, + "h" : 13 + }, + "conv2d_193_input" : { + "k" : 3, + "w" : 120, + "n" : 1, + "_rank" : 4, + "h" : 120 + }, + "sequential_35\/conv2d_194\/Relu" : { + "k" : 32, + "w" : 116, + "n" : 1, + "_rank" : 4, + "h" : 116 + }, + "sequential_35\/activation_67\/Relu" : { + "k" : 32, + "w" : 26, + "n" : 1, + "_rank" : 4, + "h" : 26 + }, + "max_pool_0" : { + "k" : 32, + "w" : 58, + "n" : 1, + "_rank" : 4, + "h" : 58 + }, + "sequential_35\/activation_66\/Relu" : { + "k" : 64, + "w" : 56, + "n" : 1, + "_rank" : 4, + "h" : 56 + }, + "sequential_35\/conv2d_193\/Relu" : { + "k" : 16, + "w" : 118, + "n" : 1, + "_rank" : 4, + "h" : 118 + }, + "max_pool_2" : { + "k" : 32, + "w" : 13, + "n" : 1, + "_rank" : 4, + "h" : 13 + }, + "sequential_35\/conv2d_196\/BiasAdd" : { + "k" : 128, + "w" : 26, + "n" : 1, + "_rank" : 4, + "h" : 26 + }, + "sequential_35\/dense_71\/BiasAdd" : { + "k" : 1, + "w" : 2, + "n" : 1, + "_rank" : 2, + "h" : 1 + }, + "sequential_35\/flatten_35\/Reshape" : { + "k" : 1, + "w" : 5408, + "n" : 1, + "_rank" : 2, + "h" : 1 + }, + "Identity" : { + "k" : 1, + "w" : 2, + "n" : 1, + "_rank" : 2, + "h" : 1 + } + } +} \ No newline at end of file diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights new file mode 100644 index 000000000..b2e449c7b Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin new file mode 100644 index 000000000..ce4f3bc74 Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin new file mode 100644 index 000000000..43f653279 Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Contents.json b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Contents.json new file mode 100644 index 000000000..e39cf2a55 --- /dev/null +++ b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "Loader.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Loader@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "Loader@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader.png b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader.png new file mode 100644 index 000000000..e6d0524dc Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@2x.png b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@2x.png new file mode 100644 index 000000000..b8444aec0 Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@2x.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@3x.png b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@3x.png new file mode 100644 index 000000000..e95d4492b Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@3x.png differ diff --git a/Tests/Mocks/NetworkingMocks.swift b/Tests/Mocks/NetworkingMocks.swift index e72c113c5..eb8d1fef3 100644 --- a/Tests/Mocks/NetworkingMocks.swift +++ b/Tests/Mocks/NetworkingMocks.swift @@ -124,6 +124,7 @@ class MockSmileIdentityService: SmileIDServiceable { callbackUrl _: String?, sandboxResult _: Int?, allowNewEnroll _: Bool?, + failureReason: FailureReason?, metadata _: Metadata ) async throws -> SmartSelfieResponse { if MockHelper.shouldFail { @@ -154,6 +155,7 @@ class MockSmileIdentityService: SmileIDServiceable { partnerParams _: [String: String]?, callbackUrl _: String?, sandboxResult _: Int?, + failureReason: FailureReason?, metadata _: Metadata ) async throws -> SmartSelfieResponse { if MockHelper.shouldFail {