diff --git a/CHANGELOG.md b/CHANGELOG.md index a91f037f1..e156661d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ## Unreleased * Fixed missing idType on Document Verification Jobs +* Introduce screens for the new Enhanced Selfie Capture Enrollment and Authentication Products. ## 10.2.17 diff --git a/Example/Podfile.lock b/Example/Podfile.lock index f9273bacb..aa431d6b5 100644 --- a/Example/Podfile.lock +++ b/Example/Podfile.lock @@ -51,7 +51,7 @@ SPEC CHECKSUMS: lottie-ios: fcb5e73e17ba4c983140b7d21095c834b3087418 netfox: 9d5cc727fe7576c4c7688a2504618a156b7d44b7 Sentry: f8374b5415bc38dfb5645941b3ae31230fbeae57 - SmileID: 44fef36001a02aa7362368e8a3f1127c03751166 + SmileID: dc04628f6e1572fc6e407649bfd05f91647ed947 SwiftLint: 3fe909719babe5537c552ee8181c0031392be933 ZIPFoundation: b8c29ea7ae353b309bc810586181fd073cb3312c diff --git a/Example/SmileID.xcodeproj/project.pbxproj b/Example/SmileID.xcodeproj/project.pbxproj index 68a4cfcec..dc75cae2d 100644 --- a/Example/SmileID.xcodeproj/project.pbxproj +++ b/Example/SmileID.xcodeproj/project.pbxproj @@ -7,7 +7,6 @@ objects = { /* Begin PBXBuildFile section */ - 1B229387BA34905BADB7776E /* Pods_SmileID_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 718422487EFECF403E12E64E /* Pods_SmileID_Example.framework */; }; 1E59E33E2BA1E64C00D2BAD2 /* PartnerParamsTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E59E33D2BA1E64C00D2BAD2 /* PartnerParamsTests.swift */; }; 1E60ED372A29C306002695FF /* HomeViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E60ED322A29C306002695FF /* HomeViewController.swift */; }; 1E60ED382A29C306002695FF /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E60ED332A29C306002695FF /* Constants.swift */; }; @@ -28,6 +27,7 @@ 20343AEF2C206BDF003536F5 /* JobListItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20343AEE2C206BDF003536F5 /* JobListItem.swift */; }; 20343AF22C206CEC003536F5 /* JobData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20343AF12C206CEC003536F5 /* JobData.swift */; }; 20343AFA2C2086D3003536F5 /* EmptyStateView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20343AF92C2086D3003536F5 /* EmptyStateView.swift */; }; + 204C95A12CDA455600A07386 /* FaceValidatorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 204C95A02CDA455600A07386 /* FaceValidatorTests.swift */; }; 205FB4A52C29AF1500FDE64F /* Date+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 205FB4A42C29AF1500FDE64F /* Date+Extensions.swift */; }; 205FB4A72C2C32A500FDE64F /* JobItemModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 205FB4A62C2C32A500FDE64F /* JobItemModel.swift */; }; 2089720E2C2624EE00D07D68 /* DataStoreClient.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2089720D2C2624EE00D07D68 /* DataStoreClient.swift */; }; @@ -39,7 +39,7 @@ 20B6D5EC2C21CE660023D51C /* DataStoreError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20B6D5EB2C21CE660023D51C /* DataStoreError.swift */; }; 20C360C82C454C130008DBDE /* RootViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20C360C72C454C130008DBDE /* RootViewModel.swift */; }; 20DFA0EC2C21917100AC2AE7 /* View+TextSelection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20DFA0EB2C21917100AC2AE7 /* View+TextSelection.swift */; }; - 20F3D6F32C25F4D700B32751 /* (null) in Sources */ = {isa = PBXBuildFile; }; + 20F3D6F32C25F4D700B32751 /* BuildFile in Sources */ = {isa = PBXBuildFile; }; 20F3D6F62C25F5C100B32751 /* SmileID.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 20F3D6F42C25F5C100B32751 /* SmileID.xcdatamodeld */; }; 5829A8C02BC7429A001C1E7E /* PrivacyInfo.xcprivacy in Resources */ = {isa = PBXBuildFile; fileRef = 5829A8BF2BC7429A001C1E7E /* PrivacyInfo.xcprivacy */; }; 585BE4882AC7748E0091DDD8 /* RestartableTimerTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 585BE4872AC7748E0091DDD8 /* RestartableTimerTest.swift */; }; @@ -48,7 +48,7 @@ 607FACDB1AFB9204008FA782 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 607FACD91AFB9204008FA782 /* Main.storyboard */; }; 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */; }; 620F1E982B69194900185CD2 /* AlertView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 620F1E972B69194900185CD2 /* AlertView.swift */; }; - 620F1E9A2B691ABB00185CD2 /* (null) in Resources */ = {isa = PBXBuildFile; }; + 620F1E9A2B691ABB00185CD2 /* BuildFile in Resources */ = {isa = PBXBuildFile; }; 624777D02B0CDC9F00952842 /* EnhancedKycWithIdInputScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 624777CF2B0CDC9F00952842 /* EnhancedKycWithIdInputScreen.swift */; }; 62F6766F2B0D173600417419 /* EnhancedKycWithIdInputScreenViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62F6766E2B0D173600417419 /* EnhancedKycWithIdInputScreenViewModel.swift */; }; 62F676712B0E00E800417419 /* EnhancedKycResultDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62F676702B0E00E800417419 /* EnhancedKycResultDelegate.swift */; }; @@ -64,6 +64,7 @@ 6AC98B6FFA753C5463F7216F /* SmileConfigEntryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6AC984E484EEF69069C705C7 /* SmileConfigEntryView.swift */; }; 6AC98C0E9305B4B3EB66ED35 /* Util.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6AC980584C522B17A099E098 /* Util.swift */; }; 6AC98F5682012E19C815AE70 /* DocumentSelectorViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6AC9893915EBA33F6984A6D9 /* DocumentSelectorViewModel.swift */; }; + 6CD90C5E078F661252CEA1E7 /* Pods_SmileID_Tests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E8D6FA74D72803CA654BE8 /* Pods_SmileID_Tests.framework */; }; 918321EA2A52E36A00D6FB7F /* URLSessionRestServiceClientTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 918321E02A52E36A00D6FB7F /* URLSessionRestServiceClientTests.swift */; }; 918321EB2A52E36A00D6FB7F /* XCTestExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 918321E12A52E36A00D6FB7F /* XCTestExtension.swift */; }; 918321EC2A52E36A00D6FB7F /* DependencyContainerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 918321E32A52E36A00D6FB7F /* DependencyContainerTests.swift */; }; @@ -72,7 +73,7 @@ 91CB21A52AC10C61005AEBF5 /* NavigationBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91CB21A42AC10C61005AEBF5 /* NavigationBar.swift */; }; 91D9FBC42AB481FE00A8D36B /* PollingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91D9FBC32AB481FE00A8D36B /* PollingTests.swift */; }; 91D9FBD52AB8AB4700A8D36B /* CalculateSignatureTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91D9FBD42AB8AB4700A8D36B /* CalculateSignatureTests.swift */; }; - CAF00E3C75F6ADCC1E5AE60A /* Pods_SmileID_Tests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = AF55180EE707E0648C395190 /* Pods_SmileID_Tests.framework */; }; + F4581E5752199E232AA7876E /* Pods_SmileID_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 9CFB9F504DFC73EC007239DF /* Pods_SmileID_Example.framework */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -86,7 +87,6 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ - 0C27F5BE0A9BAAD20932D47D /* Pods-SmileID_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.debug.xcconfig"; sourceTree = ""; }; 1E59E33D2BA1E64C00D2BAD2 /* PartnerParamsTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PartnerParamsTests.swift; sourceTree = ""; }; 1E60ED322A29C306002695FF /* HomeViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = HomeViewController.swift; sourceTree = ""; }; 1E60ED332A29C306002695FF /* Constants.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; @@ -106,6 +106,7 @@ 20343AEE2C206BDF003536F5 /* JobListItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JobListItem.swift; sourceTree = ""; }; 20343AF12C206CEC003536F5 /* JobData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JobData.swift; sourceTree = ""; }; 20343AF92C2086D3003536F5 /* EmptyStateView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmptyStateView.swift; sourceTree = ""; }; + 204C95A02CDA455600A07386 /* FaceValidatorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceValidatorTests.swift; sourceTree = ""; }; 205FB4A42C29AF1500FDE64F /* Date+Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Date+Extensions.swift"; sourceTree = ""; }; 205FB4A62C2C32A500FDE64F /* JobItemModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JobItemModel.swift; sourceTree = ""; }; 2089720D2C2624EE00D07D68 /* DataStoreClient.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DataStoreClient.swift; sourceTree = ""; }; @@ -118,8 +119,7 @@ 20C360C72C454C130008DBDE /* RootViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RootViewModel.swift; sourceTree = ""; }; 20DFA0EB2C21917100AC2AE7 /* View+TextSelection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "View+TextSelection.swift"; sourceTree = ""; }; 20F3D6F52C25F5C100B32751 /* SmileID.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = SmileID.xcdatamodel; sourceTree = ""; }; - 3736E6936005080B51DC930F /* Pods-SmileID_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.release.xcconfig"; sourceTree = ""; }; - 38D85EC9740D09436F10E980 /* Pods-SmileID_Tests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.debug.xcconfig"; sourceTree = ""; }; + 52A42996691DF0E59A2D2FA8 /* Pods-SmileID_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.release.xcconfig"; sourceTree = ""; }; 5829A8BF2BC7429A001C1E7E /* PrivacyInfo.xcprivacy */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = PrivacyInfo.xcprivacy; sourceTree = ""; }; 585BE4872AC7748E0091DDD8 /* RestartableTimerTest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RestartableTimerTest.swift; sourceTree = ""; }; 58C5F1D72B05925800A6080C /* BiometricKycWithIdInputScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BiometricKycWithIdInputScreen.swift; sourceTree = ""; }; @@ -147,7 +147,7 @@ 6AC9893915EBA33F6984A6D9 /* DocumentSelectorViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DocumentSelectorViewModel.swift; sourceTree = ""; }; 6AC98BA00298258573CBCBD4 /* ScannerViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ScannerViewController.swift; sourceTree = ""; }; 6AC98BC49871655D87C7DEE3 /* SettingsViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SettingsViewModel.swift; sourceTree = ""; }; - 718422487EFECF403E12E64E /* Pods_SmileID_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 85FA486B80497097116C39A5 /* Pods-SmileID_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.release.xcconfig"; sourceTree = ""; }; 918321E02A52E36A00D6FB7F /* URLSessionRestServiceClientTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = URLSessionRestServiceClientTests.swift; sourceTree = ""; }; 918321E12A52E36A00D6FB7F /* XCTestExtension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = XCTestExtension.swift; path = ../../Tests/XCTestExtension.swift; sourceTree = ""; }; 918321E32A52E36A00D6FB7F /* DependencyContainerTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DependencyContainerTests.swift; sourceTree = ""; }; @@ -158,9 +158,11 @@ 91D9FBD42AB8AB4700A8D36B /* CalculateSignatureTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CalculateSignatureTests.swift; sourceTree = ""; }; 94E7560A47E255DD8215C183 /* README.md */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 9755B6A19CF28DE212F24C83 /* SmileID.podspec */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = SmileID.podspec; path = ../SmileID.podspec; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.ruby; }; - AF55180EE707E0648C395190 /* Pods_SmileID_Tests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Tests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 9CFB9F504DFC73EC007239DF /* Pods_SmileID_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + A55DCB8BFFF66B290B1883E6 /* Pods-SmileID_Tests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.debug.xcconfig"; sourceTree = ""; }; C8CD2E3DB817D8C6334E9240 /* LICENSE */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = LICENSE; path = ../LICENSE; sourceTree = ""; }; - EC1A4BF74BF20D4385E0CEA1 /* Pods-SmileID_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.release.xcconfig"; sourceTree = ""; }; + D7E8D6FA74D72803CA654BE8 /* Pods_SmileID_Tests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Tests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + ED17F9F64A5C52AE70E8BC55 /* Pods-SmileID_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.debug.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -168,7 +170,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 1B229387BA34905BADB7776E /* Pods_SmileID_Example.framework in Frameworks */, + F4581E5752199E232AA7876E /* Pods_SmileID_Example.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -176,7 +178,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - CAF00E3C75F6ADCC1E5AE60A /* Pods_SmileID_Tests.framework in Frameworks */, + 6CD90C5E078F661252CEA1E7 /* Pods_SmileID_Tests.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -297,7 +299,7 @@ 607FACE81AFB9204008FA782 /* Tests */, 607FACD11AFB9204008FA782 /* Products */, 828BF541E068101B2E6ED55F /* Pods */, - E88F96F2DEFAFF10CCF982E2 /* Frameworks */, + EC80851519D319A69037C387 /* Frameworks */, ); sourceTree = ""; }; @@ -359,6 +361,7 @@ 918321E12A52E36A00D6FB7F /* XCTestExtension.swift */, 607FACE91AFB9204008FA782 /* Supporting Files */, 585BE4872AC7748E0091DDD8 /* RestartableTimerTest.swift */, + 204C95A02CDA455600A07386 /* FaceValidatorTests.swift */, ); path = Tests; sourceTree = ""; @@ -403,10 +406,10 @@ 828BF541E068101B2E6ED55F /* Pods */ = { isa = PBXGroup; children = ( - 0C27F5BE0A9BAAD20932D47D /* Pods-SmileID_Example.debug.xcconfig */, - 3736E6936005080B51DC930F /* Pods-SmileID_Example.release.xcconfig */, - 38D85EC9740D09436F10E980 /* Pods-SmileID_Tests.debug.xcconfig */, - EC1A4BF74BF20D4385E0CEA1 /* Pods-SmileID_Tests.release.xcconfig */, + ED17F9F64A5C52AE70E8BC55 /* Pods-SmileID_Example.debug.xcconfig */, + 85FA486B80497097116C39A5 /* Pods-SmileID_Example.release.xcconfig */, + A55DCB8BFFF66B290B1883E6 /* Pods-SmileID_Tests.debug.xcconfig */, + 52A42996691DF0E59A2D2FA8 /* Pods-SmileID_Tests.release.xcconfig */, ); path = Pods; sourceTree = ""; @@ -443,11 +446,11 @@ path = ../../Tests/Mocks; sourceTree = ""; }; - E88F96F2DEFAFF10CCF982E2 /* Frameworks */ = { + EC80851519D319A69037C387 /* Frameworks */ = { isa = PBXGroup; children = ( - 718422487EFECF403E12E64E /* Pods_SmileID_Example.framework */, - AF55180EE707E0648C395190 /* Pods_SmileID_Tests.framework */, + 9CFB9F504DFC73EC007239DF /* Pods_SmileID_Example.framework */, + D7E8D6FA74D72803CA654BE8 /* Pods_SmileID_Tests.framework */, ); name = Frameworks; sourceTree = ""; @@ -459,13 +462,13 @@ isa = PBXNativeTarget; buildConfigurationList = 607FACEF1AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SmileID_Example" */; buildPhases = ( - A8844648E240C4012E2F4931 /* [CP] Check Pods Manifest.lock */, + 3F18CA39137BF686F4A7D046 /* [CP] Check Pods Manifest.lock */, 607FACCC1AFB9204008FA782 /* Sources */, 917D79282AA8024400FA6624 /* SwiftLint */, 607FACCD1AFB9204008FA782 /* Frameworks */, 607FACCE1AFB9204008FA782 /* Resources */, C0BE335FFECD4DF6892309F3 /* Upload Debug Symbols to Sentry */, - E71DEE69CE1BFE014013BDFC /* [CP] Embed Pods Frameworks */, + A24D0B35FBC0DF1AECE43CC6 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -480,7 +483,7 @@ isa = PBXNativeTarget; buildConfigurationList = 607FACF21AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SmileID_Tests" */; buildPhases = ( - E594EEEB073850BE36B39940 /* [CP] Check Pods Manifest.lock */, + ABA257918BF63D6014CDE6F4 /* [CP] Check Pods Manifest.lock */, 607FACE11AFB9204008FA782 /* Sources */, 607FACE21AFB9204008FA782 /* Frameworks */, 607FACE31AFB9204008FA782 /* Resources */, @@ -546,7 +549,7 @@ buildActionMask = 2147483647; files = ( 1EFAB3172A375265008E3C13 /* Images.xcassets in Resources */, - 620F1E9A2B691ABB00185CD2 /* (null) in Resources */, + 620F1E9A2B691ABB00185CD2 /* BuildFile in Resources */, 607FACDB1AFB9204008FA782 /* Main.storyboard in Resources */, 5829A8C02BC7429A001C1E7E /* PrivacyInfo.xcprivacy in Resources */, 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */, @@ -563,7 +566,7 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ - 917D79282AA8024400FA6624 /* SwiftLint */ = { + 3F18CA39137BF686F4A7D046 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -571,17 +574,21 @@ inputFileListPaths = ( ); inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", ); - name = SwiftLint; + name = "[CP] Check Pods Manifest.lock"; outputFileListPaths = ( ); outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-SmileID_Example-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "# Type a script or drag a script file from your workspace to insert its path.\ncd ../Sources\n\"${PODS_ROOT}/SwiftLint/swiftlint\"\n"; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; }; - A8844648E240C4012E2F4931 /* [CP] Check Pods Manifest.lock */ = { + 917D79282AA8024400FA6624 /* SwiftLint */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -589,36 +596,49 @@ inputFileListPaths = ( ); inputPaths = ( - "${PODS_PODFILE_DIR_PATH}/Podfile.lock", - "${PODS_ROOT}/Manifest.lock", ); - name = "[CP] Check Pods Manifest.lock"; + name = SwiftLint; outputFileListPaths = ( ); outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-SmileID_Example-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; - showEnvVarsInLog = 0; + shellScript = "# Type a script or drag a script file from your workspace to insert its path.\ncd ../Sources\n\"${PODS_ROOT}/SwiftLint/swiftlint\"\n"; }; - C0BE335FFECD4DF6892309F3 /* Upload Debug Symbols to Sentry */ = { + A24D0B35FBC0DF1AECE43CC6 /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputPaths = ( - "${DWARF_DSYM_FOLDER_PATH}/${DWARF_DSYM_FILE_NAME}/Contents/Resources/DWARF/${TARGET_NAME}", + "${PODS_ROOT}/Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example-frameworks.sh", + "${BUILT_PRODUCTS_DIR}/ArkanaKeys/ArkanaKeys.framework", + "${BUILT_PRODUCTS_DIR}/ArkanaKeysInterfaces/ArkanaKeysInterfaces.framework", + "${BUILT_PRODUCTS_DIR}/FingerprintJS/FingerprintJS.framework", + "${BUILT_PRODUCTS_DIR}/Sentry/Sentry.framework", + "${BUILT_PRODUCTS_DIR}/SmileID/SmileID.framework", + "${BUILT_PRODUCTS_DIR}/ZIPFoundation/ZIPFoundation.framework", + "${BUILT_PRODUCTS_DIR}/lottie-ios/Lottie.framework", + "${BUILT_PRODUCTS_DIR}/netfox/netfox.framework", ); - name = "Upload Debug Symbols to Sentry"; + name = "[CP] Embed Pods Frameworks"; outputPaths = ( + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ArkanaKeys.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ArkanaKeysInterfaces.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/FingerprintJS.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Sentry.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SmileID.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ZIPFoundation.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Lottie.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/netfox.framework", ); - runOnlyForDeploymentPostprocessing = 1; + runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "# This script is responsable to upload debug symbols and source context for Sentry.\nif which sentry-cli >/dev/null; then\nexport SENTRY_ORG=smile-identity\nexport SENTRY_PROJECT=ios\nERROR=$(sentry-cli debug-files upload --include-sources \"$DWARF_DSYM_FOLDER_PATH\" 2>&1 >/dev/null)\nif [ ! $? -eq 0 ]; then\necho \"warning: sentry-cli - $ERROR\"\nfi\nelse\necho \"warning: sentry-cli not installed, download from https://github.com/getsentry/sentry-cli/releases\"\nfi\n"; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example-frameworks.sh\"\n"; + showEnvVarsInLog = 0; }; - E594EEEB073850BE36B39940 /* [CP] Check Pods Manifest.lock */ = { + ABA257918BF63D6014CDE6F4 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -640,37 +660,20 @@ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; - E71DEE69CE1BFE014013BDFC /* [CP] Embed Pods Frameworks */ = { + C0BE335FFECD4DF6892309F3 /* Upload Debug Symbols to Sentry */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputPaths = ( - "${PODS_ROOT}/Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example-frameworks.sh", - "${BUILT_PRODUCTS_DIR}/ArkanaKeys/ArkanaKeys.framework", - "${BUILT_PRODUCTS_DIR}/ArkanaKeysInterfaces/ArkanaKeysInterfaces.framework", - "${BUILT_PRODUCTS_DIR}/FingerprintJS/FingerprintJS.framework", - "${BUILT_PRODUCTS_DIR}/Sentry/Sentry.framework", - "${BUILT_PRODUCTS_DIR}/SmileID/SmileID.framework", - "${BUILT_PRODUCTS_DIR}/ZIPFoundation/ZIPFoundation.framework", - "${BUILT_PRODUCTS_DIR}/lottie-ios/Lottie.framework", - "${BUILT_PRODUCTS_DIR}/netfox/netfox.framework", + "${DWARF_DSYM_FOLDER_PATH}/${DWARF_DSYM_FILE_NAME}/Contents/Resources/DWARF/${TARGET_NAME}", ); - name = "[CP] Embed Pods Frameworks"; + name = "Upload Debug Symbols to Sentry"; outputPaths = ( - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ArkanaKeys.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ArkanaKeysInterfaces.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/FingerprintJS.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Sentry.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SmileID.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ZIPFoundation.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Lottie.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/netfox.framework", ); - runOnlyForDeploymentPostprocessing = 0; + runOnlyForDeploymentPostprocessing = 1; shellPath = /bin/sh; - shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example-frameworks.sh\"\n"; - showEnvVarsInLog = 0; + shellScript = "# This script is responsable to upload debug symbols and source context for Sentry.\nif which sentry-cli >/dev/null; then\nexport SENTRY_ORG=smile-identity\nexport SENTRY_PROJECT=ios\nERROR=$(sentry-cli debug-files upload --include-sources \"$DWARF_DSYM_FOLDER_PATH\" 2>&1 >/dev/null)\nif [ ! $? -eq 0 ]; then\necho \"warning: sentry-cli - $ERROR\"\nfi\nelse\necho \"warning: sentry-cli not installed, download from https://github.com/getsentry/sentry-cli/releases\"\nfi\n"; }; /* End PBXShellScriptBuildPhase section */ @@ -695,7 +698,7 @@ 1ED53F6D2A2F28590020BEFB /* SmileTextField.swift in Sources */, 91CB21A52AC10C61005AEBF5 /* NavigationBar.swift in Sources */, 1ED53F6B2A2F28590020BEFB /* ProductCell.swift in Sources */, - 20F3D6F32C25F4D700B32751 /* (null) in Sources */, + 20F3D6F32C25F4D700B32751 /* BuildFile in Sources */, 1E60ED382A29C306002695FF /* Constants.swift in Sources */, 624777D02B0CDC9F00952842 /* EnhancedKycWithIdInputScreen.swift in Sources */, 1ED53F712A2F28590020BEFB /* EnterUserIDView.swift in Sources */, @@ -744,6 +747,7 @@ 585BE4882AC7748E0091DDD8 /* RestartableTimerTest.swift in Sources */, 58C7118C2A69DE920062BBFB /* EnhancedKycTest.swift in Sources */, 918321EC2A52E36A00D6FB7F /* DependencyContainerTests.swift in Sources */, + 204C95A12CDA455600A07386 /* FaceValidatorTests.swift in Sources */, 918321EA2A52E36A00D6FB7F /* URLSessionRestServiceClientTests.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -881,13 +885,13 @@ }; 607FACF01AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 0C27F5BE0A9BAAD20932D47D /* Pods-SmileID_Example.debug.xcconfig */; + baseConfigurationReference = ED17F9F64A5C52AE70E8BC55 /* Pods-SmileID_Example.debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_IDENTITY = "Apple Development"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Development"; CODE_SIGN_STYLE = Manual; - CURRENT_PROJECT_VERSION = 36; + CURRENT_PROJECT_VERSION = 40; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEVELOPMENT_TEAM = ""; "DEVELOPMENT_TEAM[sdk=iphoneos*]" = 99P7YGX9Q6; @@ -897,7 +901,7 @@ INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.developer-tools"; IPHONEOS_DEPLOYMENT_TARGET = 14.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; - MARKETING_VERSION = 1.3.0; + MARKETING_VERSION = 1.3.1; MODULE_NAME = ExampleApp; PRODUCT_BUNDLE_IDENTIFIER = "com.smileidentity.example-ios"; PRODUCT_NAME = "Smile ID"; @@ -914,13 +918,13 @@ }; 607FACF11AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 3736E6936005080B51DC930F /* Pods-SmileID_Example.release.xcconfig */; + baseConfigurationReference = 85FA486B80497097116C39A5 /* Pods-SmileID_Example.release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_IDENTITY = "Apple Distribution"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Distribution"; CODE_SIGN_STYLE = Manual; - CURRENT_PROJECT_VERSION = 36; + CURRENT_PROJECT_VERSION = 40; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEVELOPMENT_TEAM = ""; "DEVELOPMENT_TEAM[sdk=iphoneos*]" = 99P7YGX9Q6; @@ -930,7 +934,7 @@ INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.developer-tools"; IPHONEOS_DEPLOYMENT_TARGET = 14.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; - MARKETING_VERSION = 1.3.0; + MARKETING_VERSION = 1.3.1; MODULE_NAME = ExampleApp; PRODUCT_BUNDLE_IDENTIFIER = "com.smileidentity.example-ios"; PRODUCT_NAME = "Smile ID"; @@ -946,7 +950,7 @@ }; 607FACF31AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 38D85EC9740D09436F10E980 /* Pods-SmileID_Tests.debug.xcconfig */; + baseConfigurationReference = A55DCB8BFFF66B290B1883E6 /* Pods-SmileID_Tests.debug.xcconfig */; buildSettings = { CLANG_ENABLE_MODULES = YES; CODE_SIGN_IDENTITY = "Apple Development"; @@ -979,7 +983,7 @@ }; 607FACF41AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = EC1A4BF74BF20D4385E0CEA1 /* Pods-SmileID_Tests.release.xcconfig */; + baseConfigurationReference = 52A42996691DF0E59A2D2FA8 /* Pods-SmileID_Tests.release.xcconfig */; buildSettings = { CLANG_ENABLE_MODULES = YES; CODE_SIGN_IDENTITY = "Apple Distribution"; diff --git a/Example/SmileID/BiometricKYC/BiometricKycWithIdInputScreen.swift b/Example/SmileID/BiometricKYC/BiometricKycWithIdInputScreen.swift index 2e52e4892..ecdf4ab24 100644 --- a/Example/SmileID/BiometricKYC/BiometricKycWithIdInputScreen.swift +++ b/Example/SmileID/BiometricKYC/BiometricKycWithIdInputScreen.swift @@ -6,7 +6,7 @@ struct BiometricKycWithIdInputScreen: View { let delegate: BiometricKycResultDelegate @State private var selectedCountry: CountryInfo? - @ObservedObject var viewModel: BiometricKycWithIdInputScreenViewModel + @StateObject var viewModel: BiometricKycWithIdInputScreenViewModel var body: some View { switch viewModel.step { @@ -19,25 +19,27 @@ struct BiometricKycWithIdInputScreen: View { } .frame(maxWidth: .infinity) case .idTypeSelection(let countryList): - SearchableDropdownSelector( - items: countryList, - selectedItem: selectedCountry, - itemDisplayName: { $0.name }, - onItemSelected: { selectedCountry = $0 } - ) - if let selectedCountry = selectedCountry { - RadioGroupSelector( - title: "Select ID Type", - items: selectedCountry.availableIdTypes, - itemDisplayName: { $0.label }, - onItemSelected: { idType in - viewModel.onIdTypeSelected( - country: selectedCountry.countryCode, - idType: idType.idTypeKey, - requiredFields: idType.requiredFields ?? [] - ) - } + VStack { + SearchableDropdownSelector( + items: countryList, + selectedItem: selectedCountry, + itemDisplayName: { $0.name }, + onItemSelected: { selectedCountry = $0 } ) + if let selectedCountry = selectedCountry { + RadioGroupSelector( + title: "Select ID Type", + items: selectedCountry.availableIdTypes, + itemDisplayName: { $0.label }, + onItemSelected: { idType in + viewModel.onIdTypeSelected( + country: selectedCountry.countryCode, + idType: idType.idTypeKey, + requiredFields: idType.requiredFields ?? [] + ) + } + ) + } } case .consent(let country, let idType, let requiredFields): SmileID.consentScreen( diff --git a/Example/SmileID/EnhancedKYC/EnhancedKycWithIdInputScreen.swift b/Example/SmileID/EnhancedKYC/EnhancedKycWithIdInputScreen.swift index b72daadfc..c0f1ff2db 100644 --- a/Example/SmileID/EnhancedKYC/EnhancedKycWithIdInputScreen.swift +++ b/Example/SmileID/EnhancedKYC/EnhancedKycWithIdInputScreen.swift @@ -18,25 +18,27 @@ struct EnhancedKycWithIdInputScreen: View { } .frame(maxWidth: .infinity) case .idTypeSelection(let countryList): - SearchableDropdownSelector( - items: countryList, - selectedItem: selectedCountry, - itemDisplayName: { $0.name }, - onItemSelected: { selectedCountry = $0 } - ) - if let selectedCountry = selectedCountry { - RadioGroupSelector( - title: "Select ID Type", - items: selectedCountry.availableIdTypes, - itemDisplayName: { $0.label }, - onItemSelected: { idType in - viewModel.onIdTypeSelected( - country: selectedCountry.countryCode, - idType: idType.idTypeKey, - requiredFields: idType.requiredFields ?? [] - ) - } + VStack { + SearchableDropdownSelector( + items: countryList, + selectedItem: selectedCountry, + itemDisplayName: { $0.name }, + onItemSelected: { selectedCountry = $0 } ) + if let selectedCountry = selectedCountry { + RadioGroupSelector( + title: "Select ID Type", + items: selectedCountry.availableIdTypes, + itemDisplayName: { $0.label }, + onItemSelected: { idType in + viewModel.onIdTypeSelected( + country: selectedCountry.countryCode, + idType: idType.idTypeKey, + requiredFields: idType.requiredFields ?? [] + ) + } + ) + } } case .consent(let country, let idType, let requiredFields): SmileID.consentScreen( diff --git a/Example/SmileID/Home/HomeView.swift b/Example/SmileID/Home/HomeView.swift index 7988f37f9..086b7fb42 100644 --- a/Example/SmileID/Home/HomeView.swift +++ b/Example/SmileID/Home/HomeView.swift @@ -7,19 +7,19 @@ struct HomeView: View { @StateObject var viewModel: HomeViewModel init(config: Config) { - _viewModel = StateObject(wrappedValue: HomeViewModel(config: config)) + _viewModel = StateObject(wrappedValue: HomeViewModel(config: config)) } + let columns = [GridItem(.flexible()), GridItem(.flexible())] + var body: some View { NavigationView { VStack(spacing: 24) { Text("Test Our Products") .font(SmileID.theme.header2) .foregroundColor(.black) - - MyVerticalGrid( - maxColumns: 2, - items: [ + ScrollView(showsIndicators: false) { + LazyVGrid(columns: columns) { ProductCell( image: "smart_selfie_enroll", name: "SmartSelfie™ Enrollment", @@ -28,17 +28,17 @@ struct HomeView: View { }, content: { SmileID.smartSelfieEnrollmentScreen( - userId: viewModel.smartSelfieEnrollmentUserId, + userId: viewModel.newUserId, jobId: viewModel.newJobId, allowAgentMode: true, delegate: SmartSelfieEnrollmentDelegate( - userId: viewModel.smartSelfieEnrollmentUserId, + userId: viewModel.newUserId, onEnrollmentSuccess: viewModel.onSmartSelfieEnrollment, onError: viewModel.didError ) ) } - ), + ) ProductCell( image: "smart_selfie_authentication", name: "SmartSelfie™ Authentication", @@ -47,11 +47,45 @@ struct HomeView: View { }, content: { SmartSelfieAuthWithUserIdEntry( - initialUserId: viewModel.smartSelfieEnrollmentUserId, + initialUserId: viewModel.lastSelfieEnrollmentUserId ?? "", + delegate: viewModel + ) + } + ) + ProductCell( + image: "smart_selfie_enroll", + name: "SmartSelfie™ Enrollment (Enhanced)", + onClick: { + viewModel.onProductClicked() + }, + content: { + SmileID.smartSelfieEnrollmentScreen( + userId: viewModel.newUserId, + jobId: viewModel.newJobId, + allowAgentMode: true, + useStrictMode: true, + delegate: SmartSelfieEnrollmentDelegate( + userId: viewModel.newUserId, + onEnrollmentSuccess: viewModel.onSmartSelfieEnrollment, + onError: viewModel.didError + ) + ) + } + ) + ProductCell( + image: "smart_selfie_authentication", + name: "SmartSelfie™ Authentication (Enhanced)", + onClick: { + viewModel.onProductClicked() + }, + content: { + SmartSelfieAuthWithUserIdEntry( + initialUserId: viewModel.lastSelfieEnrollmentUserId ?? "", + useStrictMode: true, delegate: viewModel ) } - ), + ) ProductCell( image: "enhanced_kyc", name: "Enhanced KYC", @@ -67,7 +101,7 @@ struct HomeView: View { ) ) } - ), + ) ProductCell( image: "biometric", name: "Biometric KYC", @@ -83,7 +117,7 @@ struct HomeView: View { ) ) } - ), + ) ProductCell( image: "document", name: "\nDocument Verification", @@ -97,7 +131,7 @@ struct HomeView: View { delegate: viewModel ) } - ), + ) ProductCell( image: "enhanced_doc_v", name: "Enhanced Document Verification", @@ -112,10 +146,8 @@ struct HomeView: View { ) } ) - ].map { - AnyView($0) } - ) + } Text("Partner \(viewModel.partnerId) - Version \(version) - Build \(build)") .font(SmileID.theme.body) @@ -164,7 +196,9 @@ struct SmartSelfieEnrollmentDelegate: SmartSelfieResultDelegate { private struct SmartSelfieAuthWithUserIdEntry: View { let initialUserId: String + var useStrictMode: Bool = false let delegate: SmartSelfieResultDelegate + @State private var userId: String? var body: some View { @@ -172,6 +206,7 @@ private struct SmartSelfieAuthWithUserIdEntry: View { SmileID.smartSelfieAuthenticationScreen( userId: userId, allowAgentMode: true, + useStrictMode: useStrictMode, delegate: delegate ) } else { @@ -262,9 +297,9 @@ private struct MyVerticalGrid: View { ScrollView { VStack(alignment: .leading, spacing: 8) { let numRows = (items.count + maxColumns - 1) / maxColumns - ForEach(0 ..< numRows) { rowIndex in + ForEach(0 ..< numRows, id: \.self) { rowIndex in HStack(spacing: 16) { - ForEach(0 ..< maxColumns) { columnIndex in + ForEach(0 ..< maxColumns, id: \.self) { columnIndex in let itemIndex = rowIndex * maxColumns + columnIndex let width = geo.size.width / CGFloat(maxColumns) if itemIndex < items.count { diff --git a/Example/SmileID/Home/HomeViewModel.swift b/Example/SmileID/Home/HomeViewModel.swift index 709b464cf..6828faf80 100644 --- a/Example/SmileID/Home/HomeViewModel.swift +++ b/Example/SmileID/Home/HomeViewModel.swift @@ -19,7 +19,10 @@ class HomeViewModel: ObservableObject, @Published var partnerId: String var networkMonitor = NetworkMonitor.shared - @Published private(set) var smartSelfieEnrollmentUserId = generateUserId() + var lastSelfieEnrollmentUserId: String? { + guard let value = UIPasteboard.general.string else { return nil } + return value.hasPrefix("user-") ? value : nil + } @Published private(set) var newUserId: String = generateUserId() @Published private(set) var newJobId: String = generateJobId() diff --git a/Example/SmileID/Home/ProductCell.swift b/Example/SmileID/Home/ProductCell.swift index 0633a18b0..6e6da271b 100644 --- a/Example/SmileID/Home/ProductCell.swift +++ b/Example/SmileID/Home/ProductCell.swift @@ -1,18 +1,18 @@ import SmileID import SwiftUI -struct ProductCell: View { +struct ProductCell: View { let image: String let name: String let onClick: (() -> Void)? - @ViewBuilder let content: () -> any View + @ViewBuilder let content: () -> Content @State private var isPresented: Bool = false init( image: String, name: String, onClick: (() -> Void)? = nil, - @ViewBuilder content: @escaping () -> any View + @ViewBuilder content: @escaping () -> Content ) { self.image = image self.name = name @@ -41,8 +41,24 @@ struct ProductCell: View { .frame(maxWidth: .infinity) .background(SmileID.theme.accent) .cornerRadius(8) - .sheet(isPresented: $isPresented, content: { AnyView(content()) - }) + .fullScreenCover( + isPresented: $isPresented, + content: { + NavigationView { + content() + .toolbar { + ToolbarItem(placement: .cancellationAction) { + Button { + isPresented = false + } label: { + Text(SmileIDResourcesHelper.localizedString(for: "Action.Cancel")) + .foregroundColor(SmileID.theme.accent) + } + } + } + } + } + ) } ) } diff --git a/Example/SmileID/Info.plist b/Example/SmileID/Info.plist index 09bc02c40..b42aec4ca 100644 --- a/Example/SmileID/Info.plist +++ b/Example/SmileID/Info.plist @@ -2,6 +2,8 @@ + ITSAppUsesNonExemptEncryption + CFBundleDevelopmentRegion en CFBundleExecutable diff --git a/Example/SmileID/WelcomeScreen.swift b/Example/SmileID/WelcomeScreen.swift index 0fa83dd0c..d4ae0e37a 100644 --- a/Example/SmileID/WelcomeScreen.swift +++ b/Example/SmileID/WelcomeScreen.swift @@ -24,7 +24,7 @@ struct WelcomeScreen: View { .padding(.vertical) Text("To begin testing, you need to add a configuration from the Smile Portal") - .font(EpilogueFont.regular(with: 16)) + .font(DMSansFont.regular(with: 16)) .foregroundColor(SmileID.theme.onLight) .padding(.vertical) diff --git a/Example/Tests/FaceValidatorTests.swift b/Example/Tests/FaceValidatorTests.swift new file mode 100644 index 000000000..bde035c08 --- /dev/null +++ b/Example/Tests/FaceValidatorTests.swift @@ -0,0 +1,147 @@ +import XCTest + +@testable import SmileID + +class FaceValidatorTests: XCTestCase { + var faceValidator: FaceValidator! + var mockDelegate: MockFaceValidatorDelegate! + + override func setUp() { + super.setUp() + faceValidator = FaceValidator() + mockDelegate = MockFaceValidatorDelegate() + faceValidator.delegate = mockDelegate + let guideFrame: CGRect = .init(x: 30, y: 100, width: 250, height: 350) + faceValidator.setLayoutGuideFrame(with: guideFrame) + } + + override func tearDown() { + faceValidator = nil + mockDelegate = nil + super.tearDown() + } + + func testValidateWithValidFace() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), + faceQuality: 0.5, + brighness: 100 + ) + + XCTAssertTrue(result.faceInBounds) + XCTAssertTrue(result.hasDetectedValidFace) + XCTAssertNil(result.userInstruction) + } + + func testValidateWithFaceTooSmall() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 100, height: 100), + faceQuality: 0.5, + brighness: 100 + ) + + XCTAssertFalse(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .moveCloser) + } + + func testValidateWithFaceTooLarge() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 250, height: 250), + faceQuality: 0.5, + brighness: 100 + ) + + XCTAssertFalse(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .moveBack) + } + + func testValidWithFaceOffCentre() { + let result = performValidation( + faceBoundingBox: CGRect(x: 125, y: 164, width: 190, height: 190), + faceQuality: 0.5, + brighness: 100 + ) + + XCTAssertFalse(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .headInFrame) + } + + func testValidateWithPoorBrightness() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), + faceQuality: 0.5, + brighness: 35 + ) + + XCTAssertTrue(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .goodLight) + } + + func testValidateWithPoorFaceQuality() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), + faceQuality: 0.2, + brighness: 70 + ) + + XCTAssertTrue(result.faceInBounds) + XCTAssertFalse(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .goodLight) + } + + func testValidateWithLivenessTask() { + let result = performValidation( + faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), + faceQuality: 0.3, + brighness: 100, + livenessTask: .lookLeft + ) + + XCTAssertTrue(result.faceInBounds) + XCTAssertTrue(result.hasDetectedValidFace) + XCTAssertEqual(result.userInstruction, .lookLeft) + } +} + +// MARK: - Helpers +extension FaceValidatorTests { + func performValidation( + faceBoundingBox: CGRect, + faceQuality: Float, + brighness: Int, + livenessTask: LivenessTask? = nil + ) -> FaceValidationResult { + let faceGeometry = FaceGeometryData( + boundingBox: faceBoundingBox, + roll: 0, + yaw: 0, + pitch: 0, + direction: .none + ) + faceValidator.validate( + faceGeometry: faceGeometry, + faceQuality: faceQuality, + brightness: brighness, + currentLivenessTask: livenessTask + ) + + guard let mockValidationResult = mockDelegate.validationResult else { + XCTFail("Validation result should not be nil") + return FaceValidationResult(userInstruction: nil, hasDetectedValidFace: false, faceInBounds: false) + } + return mockValidationResult + } +} + +// MARK: - Mocks +class MockFaceValidatorDelegate: FaceValidatorDelegate { + var validationResult: FaceValidationResult? + + func updateValidationResult(_ result: FaceValidationResult) { + self.validationResult = result + } +} diff --git a/SmileID.podspec b/SmileID.podspec index 9e78b83c8..9f3375edc 100644 --- a/SmileID.podspec +++ b/SmileID.podspec @@ -13,6 +13,6 @@ Pod::Spec.new do |s| s.swift_version = '5.5' s.source_files = 'Sources/SmileID/Classes/**/*' s.resource_bundles = { - 'SmileID_SmileID' => ['Sources/SmileID/Resources/**/*.{storyboard,storyboardc,xib,nib,xcassets,json,png,ttf,lproj,xcprivacy}'] + 'SmileID_SmileID' => ['Sources/SmileID/Resources/**/*.{storyboard,storyboardc,xib,nib,xcassets,json,png,ttf,lproj,xcprivacy,mlmodelc,lottie}'] } end diff --git a/Sources/SmileID/Classes/Camera/CameraManager.swift b/Sources/SmileID/Classes/Camera/CameraManager.swift index f22a21652..8a5407e1e 100644 --- a/Sources/SmileID/Classes/Camera/CameraManager.swift +++ b/Sources/SmileID/Classes/Camera/CameraManager.swift @@ -1,5 +1,5 @@ -import Foundation import AVFoundation +import Foundation import SwiftUI class CameraManager: NSObject, ObservableObject { @@ -21,7 +21,9 @@ class CameraManager: NSObject, ObservableObject { @Published var sampleBuffer: CVPixelBuffer? @Published var capturedImage: Data? - var sampleBufferPublisher: Published.Publisher { $sampleBuffer } + var sampleBufferPublisher: Published.Publisher { + $sampleBuffer + } var capturedImagePublisher: Published.Publisher { $capturedImage } let videoOutputQueue = DispatchQueue( label: "com.smileidentity.videooutput", @@ -35,6 +37,8 @@ class CameraManager: NSObject, ObservableObject { (session.inputs.first as? AVCaptureDeviceInput)?.device.position } + private(set) var cameraName: String? + // Used to queue and then resume tasks while waiting for Camera permissions private let sessionQueue = DispatchQueue(label: "com.smileidentity.ios") private let videoOutput = AVCaptureVideoDataOutput() @@ -46,7 +50,8 @@ class CameraManager: NSObject, ObservableObject { self.orientation = orientation super.init() sessionQueue.async { - self.videoOutput.setSampleBufferDelegate(self, queue: self.videoOutputQueue) + self.videoOutput.setSampleBufferDelegate( + self, queue: self.videoOutputQueue) } checkPermissions() } @@ -58,28 +63,28 @@ class CameraManager: NSObject, ObservableObject { } private func checkPermissions() { - switch AVCaptureDevice.authorizationStatus(for: .video) { - case .notDetermined: - sessionQueue.suspend() - AVCaptureDevice.requestAccess(for: .video) { authorized in - if !authorized { - self.status = .unauthorized - self.set(error: .deniedAuthorization) - } - self.sessionQueue.resume() + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .notDetermined: + sessionQueue.suspend() + AVCaptureDevice.requestAccess(for: .video) { authorized in + if !authorized { + self.status = .unauthorized + self.set(error: .deniedAuthorization) + } + self.sessionQueue.resume() + } + case .restricted: + status = .unauthorized + set(error: .restrictedAuthorization) + case .denied: + status = .unauthorized + set(error: .deniedAuthorization) + case .authorized: + break + @unknown default: + status = .unauthorized + set(error: .unknownAuthorization) } - case .restricted: - status = .unauthorized - set(error: .restrictedAuthorization) - case .denied: - status = .unauthorized - set(error: .deniedAuthorization) - case .authorized: - break - @unknown default: - status = .unauthorized - set(error: .unknownAuthorization) - } } private func addCameraInput(position: AVCaptureDevice.Position) { @@ -89,6 +94,8 @@ class CameraManager: NSObject, ObservableObject { return } + getCameraName(for: camera) + do { let cameraInput = try AVCaptureDeviceInput(device: camera) if session.canAddInput(cameraInput) { @@ -103,14 +110,29 @@ class CameraManager: NSObject, ObservableObject { } } - private func getCameraForPosition(_ position: AVCaptureDevice.Position) -> AVCaptureDevice? { + private func getCameraName(for camera: AVCaptureDevice) { + var manufacturer: String + if #available(iOS 14.0, *) { + manufacturer = camera.manufacturer + } else { + manufacturer = "Apple Inc." + } + cameraName = + "\(manufacturer) \(camera.localizedName) \(camera.deviceType.rawValue)" + } + + private func getCameraForPosition(_ position: AVCaptureDevice.Position) + -> AVCaptureDevice? { switch position { case .front: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .front) case .back: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .back) default: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .front) } } @@ -121,7 +143,10 @@ class CameraManager: NSObject, ObservableObject { session.addOutput(photoOutput) session.addOutput(videoOutput) videoOutput.videoSettings = - [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA] + [ + kCVPixelBufferPixelFormatTypeKey as String: + kCVPixelFormatType_32BGRA + ] if orientation == .portrait { let videoConnection = videoOutput.connection(with: .video) videoConnection?.videoOrientation = .portrait @@ -136,7 +161,8 @@ class CameraManager: NSObject, ObservableObject { checkPermissions() sessionQueue.async { [self] in if !session.isRunning { - if let currentInput = session.inputs.first as? AVCaptureDeviceInput { + if let currentInput = session.inputs.first + as? AVCaptureDeviceInput { session.removeInput(currentInput) } addCameraInput(position: position) @@ -144,7 +170,8 @@ class CameraManager: NSObject, ObservableObject { session.startRunning() } else { session.beginConfiguration() - if let currentInput = session.inputs.first as? AVCaptureDeviceInput { + if let currentInput = session.inputs.first + as? AVCaptureDeviceInput { session.removeInput(currentInput) } addCameraInput(position: position) @@ -169,7 +196,9 @@ class CameraManager: NSObject, ObservableObject { } internal func capturePhoto() { - guard let connection = photoOutput.connection(with: .video), connection.isEnabled, connection.isActive else { + guard let connection = photoOutput.connection(with: .video), + connection.isEnabled, connection.isActive + else { set(error: .cameraUnavailable) print("Camera unavailable") return @@ -186,7 +215,8 @@ extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate { didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection ) { - guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } + guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) + else { return } self.sampleBuffer = imageBuffer } } diff --git a/Sources/SmileID/Classes/Camera/CameraViewController.swift b/Sources/SmileID/Classes/Camera/CameraViewController.swift index f52bf3996..d96b773d7 100644 --- a/Sources/SmileID/Classes/Camera/CameraViewController.swift +++ b/Sources/SmileID/Classes/Camera/CameraViewController.swift @@ -2,7 +2,9 @@ import UIKit import Vision import AVFoundation -class PreviewView: UIViewController { +class CameraViewController: UIViewController { + var faceDetector: EnhancedFaceDetector? + var previewLayer: AVCaptureVideoPreviewLayer? private weak var cameraManager: CameraManager? @@ -17,6 +19,7 @@ class PreviewView: UIViewController { override func viewDidLoad() { super.viewDidLoad() + faceDetector?.viewDelegate = self configurePreviewLayer() } @@ -34,7 +37,16 @@ class PreviewView: UIViewController { } } -extension PreviewView: RectangleDetectionDelegate { +extension CameraViewController: FaceDetectorViewDelegate { + func convertFromMetadataToPreviewRect(rect: CGRect) -> CGRect { + guard let previewLayer = previewLayer else { + return CGRect.zero + } + return previewLayer.layerRectConverted(fromMetadataOutputRect: rect) + } +} + +extension CameraViewController: RectangleDetectionDelegate { func didDetectQuad( quad: Quadrilateral?, _ imageSize: CGSize, diff --git a/Sources/SmileID/Classes/Consent/OrchestratedConsentScreen.swift b/Sources/SmileID/Classes/Consent/OrchestratedConsentScreen.swift index 9224c27d9..430276f79 100644 --- a/Sources/SmileID/Classes/Consent/OrchestratedConsentScreen.swift +++ b/Sources/SmileID/Classes/Consent/OrchestratedConsentScreen.swift @@ -74,7 +74,7 @@ public struct ConsentScreen: View { .padding(16) VStack(spacing: 16) { - ForEach(0.. Bool { guard let detectedRect = detectedRect else { return false } - + // Sometimes, the bounding box is out of frame. This cannot be considered centered // We check only left and right because the document should always fill the width but may // not fill the height if detectedRect.topLeft.x < tolerance || detectedRect.topRight.x > imageWidth - tolerance { return false } - + let imageCenterX = imageWidth / 2 let imageCenterY = imageHeight / 2 - + let rectCenterX = (detectedRect.topLeft.x + detectedRect.topRight.x) / 2 let rectCenterY = (detectedRect.topLeft.y + detectedRect.bottomLeft.y) / 2 - + let deltaX = abs(imageCenterX - rectCenterX) let deltaY = abs(imageCenterY - rectCenterY) - + let isCenteredHorizontally = deltaX < tolerance let isCenteredVertically = deltaY < tolerance - + return isCenteredHorizontally && isCenteredVertically } - + func openSettings() { guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } UIApplication.shared.open(settingsURL) diff --git a/Sources/SmileID/Classes/FaceDetector/CGImage+CVPixelBuffer.swift b/Sources/SmileID/Classes/FaceDetector/CGImage+CVPixelBuffer.swift new file mode 100644 index 000000000..a0a729c0a --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/CGImage+CVPixelBuffer.swift @@ -0,0 +1,17 @@ +import CoreGraphics +import CoreImage +import VideoToolbox + +extension CGImage { + /** + Creates a new CGImage from a CVPixelBuffer. + + - Note: Not all CVPixelBuffer pixel formats support conversion into a + CGImage-compatible pixel format. + */ + public static func create(pixelBuffer: CVPixelBuffer) -> CGImage? { + var cgImage: CGImage? + VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage) + return cgImage + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/EnhancedFaceDetector.swift b/Sources/SmileID/Classes/FaceDetector/EnhancedFaceDetector.swift new file mode 100644 index 000000000..2806a250d --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/EnhancedFaceDetector.swift @@ -0,0 +1,225 @@ +import AVFoundation +import Combine +import UIKit +import Vision + +enum FaceDetectorError: Error { + case unableToLoadSelfieModel + case invalidSelfieModelOutput + case noFaceDetected + case multipleFacesDetected + case unableToCropImage +} + +protocol FaceDetectorViewDelegate: NSObjectProtocol { + func convertFromMetadataToPreviewRect(rect: CGRect) -> CGRect +} + +protocol FaceDetectorResultDelegate: AnyObject { + func faceDetector( + _ detector: EnhancedFaceDetector, + didDetectFace faceGeometry: FaceGeometryData, + withFaceQuality faceQuality: Float, + brightness: Int + ) + func faceDetector(_ detector: EnhancedFaceDetector, didFailWithError error: Error) +} + +class EnhancedFaceDetector: NSObject { + private var selfieQualityModel: SelfieQualityDetector? + + private let cropSize = (width: 120, height: 120) + private let faceMovementThreshold: CGFloat = 0.15 + + private var sequenceHandler = VNSequenceRequestHandler() + + weak var viewDelegate: FaceDetectorViewDelegate? + weak var resultDelegate: FaceDetectorResultDelegate? + + override init() { + super.init() + selfieQualityModel = createImageClassifier() + } + + private func createImageClassifier() -> SelfieQualityDetector? { + do { + let modelConfiguration = MLModelConfiguration() + let coreMLModel = try SelfieQualityDetector(configuration: modelConfiguration) + return coreMLModel + } catch { + return nil + } + } + + /// Run Face Capture quality and Face Bounding Box and roll/pitch/yaw tracking + func processImageBuffer(_ imageBuffer: CVPixelBuffer) { + let detectFaceRectanglesRequest = VNDetectFaceRectanglesRequest() + let detectCaptureQualityRequest = VNDetectFaceCaptureQualityRequest() + + do { + try sequenceHandler.perform( + [detectFaceRectanglesRequest, detectCaptureQualityRequest], + on: imageBuffer, + orientation: .leftMirrored + ) + guard let faceDetections = detectFaceRectanglesRequest.results, + let faceQualityObservations = detectCaptureQualityRequest.results, + let faceObservation = faceDetections.first, + let faceQualityObservation = faceQualityObservations.first + else { + self.resultDelegate?.faceDetector( + self, didFailWithError: FaceDetectorError.noFaceDetected) + return + } + + guard faceDetections.count == 1 else { + self.resultDelegate?.faceDetector(self, didFailWithError: FaceDetectorError.multipleFacesDetected) + return + } + + let convertedBoundingBox = + self.viewDelegate?.convertFromMetadataToPreviewRect( + rect: faceObservation.boundingBox) ?? .zero + + let uiImage = UIImage(pixelBuffer: imageBuffer) + let brightness = self.calculateBrightness(uiImage) + + let faceGeometryData: FaceGeometryData + if #available(iOS 15.0, *) { + faceGeometryData = FaceGeometryData( + boundingBox: convertedBoundingBox, + roll: faceObservation.roll ?? 0.0, + yaw: faceObservation.yaw ?? 0.0, + pitch: faceObservation.pitch ?? 0.0, + direction: faceDirection(faceObservation: faceObservation) + ) + } else { // Fallback on earlier versions + faceGeometryData = FaceGeometryData( + boundingBox: convertedBoundingBox, + roll: faceObservation.roll ?? 0.0, + yaw: faceObservation.yaw ?? 0.0, + pitch: 0.0, + direction: faceDirection(faceObservation: faceObservation) + ) + } + self.resultDelegate? + .faceDetector( + self, + didDetectFace: faceGeometryData, + withFaceQuality: faceQualityObservation.faceCaptureQuality ?? 0.0, + brightness: brightness + ) + } catch { + self.resultDelegate?.faceDetector(self, didFailWithError: error) + } + } + + func selfieQualityRequest(imageBuffer: CVPixelBuffer) throws -> SelfieQualityData { + guard let selfieQualityModel else { + throw FaceDetectorError.unableToLoadSelfieModel + } + let input = SelfieQualityDetectorInput(conv2d_193_input: imageBuffer) + + let prediction = try selfieQualityModel.prediction(input: input) + let output = prediction.Identity + + guard output.shape.count == 2, + output.shape[0] == 1, + output.shape[1] == 2 + else { + throw FaceDetectorError.invalidSelfieModelOutput + } + + let passScore = output[0].floatValue + let failScore = output[1].floatValue + + let selfieQualityData = SelfieQualityData( + failed: failScore, + passed: passScore + ) + return selfieQualityData + } + + private func cropImageToFace( + _ image: UIImage? + ) throws -> CVPixelBuffer { + guard let image, let cgImage = image.cgImage else { + throw FaceDetectorError.unableToCropImage + } + + let request = VNDetectFaceRectanglesRequest() + let handler = VNImageRequestHandler(cgImage: cgImage, options: [:]) + + try handler.perform([request]) + + guard let results = request.results, + let face = results.first + else { + throw FaceDetectorError.noFaceDetected + } + + let boundingBox = face.boundingBox + + let size = CGSize( + width: boundingBox.width * image.size.width, + height: boundingBox.height * image.size.height + ) + let origin = CGPoint( + x: boundingBox.minX * image.size.width, + y: (1 - boundingBox.minY) * image.size.height - size.height + ) + + let faceRect = CGRect(origin: origin, size: size) + + guard let croppedCGImage = cgImage.cropping(to: faceRect) else { + throw FaceDetectorError.unableToCropImage + } + + let croppedImage = UIImage(cgImage: croppedCGImage) + guard + let resizedImage = croppedImage.pixelBuffer( + width: cropSize.width, height: cropSize.height) + else { + throw FaceDetectorError.unableToCropImage + } + + return resizedImage + } + + private func calculateBrightness(_ image: UIImage?) -> Int { + guard let image, let cgImage = image.cgImage, + let imageData = cgImage.dataProvider?.data, + let dataPointer = CFDataGetBytePtr(imageData) + else { + return 0 + } + + let bytesPerPixel = cgImage.bitsPerPixel / cgImage.bitsPerComponent + let dataLength = CFDataGetLength(imageData) + var result = 0.0 + for index in stride(from: 0, to: dataLength, by: bytesPerPixel) { + let red = dataPointer[index] + let green = dataPointer[index + 1] + let blue = dataPointer[index + 2] + result += 0.299 * Double(red) + 0.587 * Double(green) + 0.114 * Double(blue) + } + let pixelsCount = dataLength / bytesPerPixel + let brightness = Int(result) / pixelsCount + return brightness + } + + private func faceDirection(faceObservation: VNFaceObservation) -> FaceDirection { + guard let yaw = faceObservation.yaw?.doubleValue else { + return .none + } + let yawInRadians = CGFloat(yaw) + + if yawInRadians > faceMovementThreshold { + return .right + } else if yawInRadians < -faceMovementThreshold { + return .left + } else { + return .none + } + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift b/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift index 675261cd5..1c569250e 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift +++ b/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift @@ -1,7 +1,30 @@ import Foundation -struct FaceGeometryModel: Equatable { +struct FaceGeometryData: Equatable { let boundingBox: CGRect let roll: NSNumber let yaw: NSNumber + let pitch: NSNumber + let direction: FaceDirection +} + +enum FaceDirection { + case left + case right + case none +} + +struct FaceQualityData { + let quality: Float +} + +struct SelfieQualityData { + let failed: Float + let passed: Float +} + +extension SelfieQualityData { + static var zero: SelfieQualityData { + return SelfieQualityData(failed: 0, passed: 0) + } } diff --git a/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift new file mode 100644 index 000000000..1721bfcaa --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift @@ -0,0 +1,145 @@ +import Foundation + +protocol FaceValidatorDelegate: AnyObject { + func updateValidationResult(_ result: FaceValidationResult) +} + +struct FaceValidationResult { + let userInstruction: SelfieCaptureInstruction? + let hasDetectedValidFace: Bool + let faceInBounds: Bool +} + +final class FaceValidator { + weak var delegate: FaceValidatorDelegate? + private var faceLayoutGuideFrame: CGRect = .zero + + // MARK: Constants + private let faceQualityThreshold: Float = 0.25 + private let luminanceThreshold: ClosedRange = 40...200 + private let selfiefaceBoundsMultiplier: CGFloat = 1.5 + private let livenessfaceBoundsMultiplier: CGFloat = 2.2 + private let faceBoundsThreshold: CGFloat = 50 + + init() {} + + func setLayoutGuideFrame(with frame: CGRect) { + self.faceLayoutGuideFrame = frame + } + + func validate( + faceGeometry: FaceGeometryData, + faceQuality: Float, + brightness: Int, + currentLivenessTask: LivenessTask? + ) { + // check face bounds + let faceBoundsState = checkFaceSizeAndPosition( + using: faceGeometry.boundingBox, + shouldCheckCentering: currentLivenessTask == nil + ) + let isAcceptableBounds = faceBoundsState == .detectedFaceAppropriateSizeAndPosition + + // check brightness + let isAcceptableBrightness = luminanceThreshold.contains(brightness) + + // check face quality + let isAcceptableFaceQuality = checkFaceQuality(faceQuality) + + // check that face is ready for capture + let hasDetectedValidFace = checkValidFace( + isAcceptableBounds, + isAcceptableBrightness, + isAcceptableFaceQuality + ) + + // determine what instruction/animation to display to users + let userInstruction = userInstruction( + from: faceBoundsState, + detectedValidFace: hasDetectedValidFace, + isAcceptableBrightness: isAcceptableBrightness, + isAcceptableFaceQuality: isAcceptableFaceQuality, + livenessTask: currentLivenessTask + ) + + let validationResult = FaceValidationResult( + userInstruction: userInstruction, + hasDetectedValidFace: hasDetectedValidFace, + faceInBounds: isAcceptableBounds + ) + delegate?.updateValidationResult(validationResult) + } + + private func userInstruction( + from faceBoundsState: FaceBoundsState, + detectedValidFace: Bool, + isAcceptableBrightness: Bool, + isAcceptableFaceQuality: Bool, + livenessTask: LivenessTask? + ) -> SelfieCaptureInstruction? { + if detectedValidFace { + if let livenessTask { + switch livenessTask { + case .lookLeft: + return .lookLeft + case .lookRight: + return .lookRight + case .lookUp: + return .lookUp + } + } + return nil + } else if !isAcceptableFaceQuality || !isAcceptableBrightness { + return .goodLight + } else if faceBoundsState == .detectedFaceOffCentre + || faceBoundsState == .detectedFaceNotWithinFrame { + return .headInFrame + } else if faceBoundsState == .detectedFaceTooSmall { + return .moveCloser + } else if faceBoundsState == .detectedFaceTooLarge { + return .moveBack + } + return nil + } + + // MARK: Validation Checks + private func checkFaceSizeAndPosition( + using boundingBox: CGRect, + shouldCheckCentering: Bool + ) -> FaceBoundsState { + let maxFaceWidth = faceLayoutGuideFrame.width - 20 + let faceBoundsMultiplier = shouldCheckCentering ? selfiefaceBoundsMultiplier : livenessfaceBoundsMultiplier + let minFaceWidth = faceLayoutGuideFrame.width / faceBoundsMultiplier + + // check how far/close face is + if boundingBox.width > maxFaceWidth { + return .detectedFaceTooLarge + } else if boundingBox.width < minFaceWidth { + return .detectedFaceTooSmall + } + + // check that face is centered for selfie capture only + if shouldCheckCentering { + let horizontalOffset = abs(boundingBox.midX - faceLayoutGuideFrame.midX) + let verticalOffset = abs(boundingBox.midY - faceLayoutGuideFrame.midY) + + if horizontalOffset > faceBoundsThreshold || verticalOffset > faceBoundsThreshold { + return .detectedFaceOffCentre + } + } + + return .detectedFaceAppropriateSizeAndPosition + } + + private func checkFaceQuality(_ value: Float) -> Bool { + return value >= faceQualityThreshold + } + + private func checkValidFace( + _ isAcceptableBounds: Bool, + _ isAcceptableBrightness: Bool, + _ isAcceptableFaceQuality: Bool + ) -> Bool { + return isAcceptableBounds && isAcceptableBrightness && isAcceptableFaceQuality + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift new file mode 100644 index 000000000..0c5e22a7a --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift @@ -0,0 +1,194 @@ +import Foundation +import Vision + +/// Represents the different tasks in an active liveness check. +enum LivenessTask { + case lookLeft + case lookRight + case lookUp + + static var numberOfFramesToCapture: Int { + if #available(iOS 15.0, *) { + return 2 + } else { + return 3 + } + } +} + +protocol LivenessCheckManagerDelegate: AnyObject { + func didCompleteLivenessTask() + func didCompleteLivenessChallenge() + func livenessChallengeTimeout() +} + +class LivenessCheckManager: ObservableObject { + /// The sequence of liveness tasks to be performed. + private var livenessTaskSequence: [LivenessTask] = [] + /// The index pointing to the current task in the sequence. + private var currentTaskIndex: Int = 0 + + weak var delegate: LivenessCheckManagerDelegate? + + // MARK: Constants + /// The minimum threshold for yaw (left-right head movement) + private let minYawAngleThreshold: CGFloat = 0.15 + /// The maximum threshold for yaw (left-right head movement) + private let maxYawAngleThreshold: CGFloat = 0.25 + /// The minimum threshold for pitch (up-down head movement) + private let minPitchAngleThreshold: CGFloat = 0.15 + /// The maximum threshold for pitch (up-down head movement) + private let maxPitchAngleThreshold: CGFloat = 0.30 + /// The timeout duration for each task in seconds. + private let taskTimeoutDuration: TimeInterval = 120 + + // MARK: Face Orientation Properties + @Published var lookLeftProgress: CGFloat = 0.0 + @Published var lookRightProgress: CGFloat = 0.0 + @Published var lookUpProgress: CGFloat = 0.0 + + /// The current liveness task. + private(set) var currentTask: LivenessTask? { + didSet { + if currentTask != nil { + resetTaskTimer() + } else { + stopTaskTimer() + } + } + } + /// The timer used for task timeout. + private var taskTimer: Timer? + private var elapsedTime: TimeInterval = 0.0 + + /// Initializes the LivenessCheckManager with a shuffled set of tasks. + init() { + if #available(iOS 15.0, *) { + livenessTaskSequence = [.lookLeft, .lookRight, .lookUp].shuffled() + } else { + livenessTaskSequence = [.lookLeft, .lookRight].shuffled() + } + } + + /// Cleans up resources when the manager is no longer needed. + deinit { + stopTaskTimer() + } + + /// Resets the task timer to the initial timeout duration. + private func resetTaskTimer() { + guard taskTimer == nil else { return } + DispatchQueue.main.async { + self.taskTimer = Timer.scheduledTimer( + withTimeInterval: 1.0, + repeats: true + ) { [weak self] _ in + self?.taskTimerFired() + } + } + } + + private func taskTimerFired() { + self.elapsedTime += 1 + if self.elapsedTime == self.taskTimeoutDuration { + self.handleTaskTimeout() + } + } + + /// Stops the current task timer. + private func stopTaskTimer() { + guard taskTimer != nil else { return } + taskTimer?.invalidate() + taskTimer = nil + } + + /// Handles the timeout event for a task. + private func handleTaskTimeout() { + stopTaskTimer() + delegate?.livenessChallengeTimeout() + } + + /// Advances to the next task in the sequence + /// - Returns: `true` if there is a next task, `false` if all tasks are completed. + private func advanceToNextTask() -> Bool { + guard currentTaskIndex < livenessTaskSequence.count - 1 else { + return false + } + currentTaskIndex += 1 + currentTask = livenessTaskSequence[currentTaskIndex] + return true + } + + /// Sets the initial task for the liveness check. + func initiateLivenessCheck() { + currentTask = livenessTaskSequence[currentTaskIndex] + } + + /// Processes face geometry data and checks for task completion + /// - Parameter faceGeometry: The current face geometry data. + func processFaceGeometry(_ faceGeometry: FaceGeometryData) { + let yawValue = CGFloat(faceGeometry.yaw.doubleValue) + let pitchValue = CGFloat(faceGeometry.pitch.doubleValue) + updateFaceOrientationValues(yawValue, pitchValue) + } + + /// Updates the face orientation values based on the given face geometry. + /// - Parameter faceGeometry: The current face geometry data. + private func updateFaceOrientationValues( + _ yawValue: CGFloat, + _ pitchValue: CGFloat + ) { + guard let currentTask = currentTask else { return } + + switch currentTask { + case .lookLeft: + if yawValue < -minYawAngleThreshold { + let progress = + yawValue + .normalized(min: -minYawAngleThreshold, max: -maxYawAngleThreshold) + lookLeftProgress = min(max(lookLeftProgress, progress), 1.0) + if lookLeftProgress == 1.0 { + completeCurrentTask() + } + } + case .lookRight: + if yawValue > minYawAngleThreshold { + let progress = + yawValue + .normalized(min: minYawAngleThreshold, max: maxYawAngleThreshold) + lookRightProgress = min(max(lookRightProgress, progress), 1.0) + if lookRightProgress == 1.0 { + completeCurrentTask() + } + } + case .lookUp: + if pitchValue < -minPitchAngleThreshold { + let progress = + pitchValue + .normalized(min: -minPitchAngleThreshold, max: -maxPitchAngleThreshold) + lookUpProgress = min(max(lookUpProgress, progress), 1.0) + if lookUpProgress == 1.0 { + completeCurrentTask() + } + } + } + } + + /// Completes the current task and moves to the next one. + /// If all tasks are completed, it signals the completion of the liveness challenge. + private func completeCurrentTask() { + delegate?.didCompleteLivenessTask() + + if !advanceToNextTask() { + // Liveness challenge complete + delegate?.didCompleteLivenessChallenge() + self.currentTask = nil + } + } +} + +extension CGFloat { + func normalized(min: CGFloat, max: CGFloat) -> CGFloat { + return (self - min) / (max - min) + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift b/Sources/SmileID/Classes/FaceDetector/Models.swift similarity index 75% rename from Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift rename to Sources/SmileID/Classes/FaceDetector/Models.swift index 83687b18c..6566942c1 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift +++ b/Sources/SmileID/Classes/FaceDetector/Models.swift @@ -1,19 +1,15 @@ import Foundation enum FaceDetectionState: Equatable { - case sceneUnstable - case finalFrame - case multipleFacesDetected case faceDetected case noFaceDetected case faceDetectionErrored - case smileFrame } -enum FaceObservation: Equatable { +enum FaceObservation { case faceFound(T) case faceNotFound - case errored(E) + case errored(Error) } enum FaceBoundsState { @@ -21,6 +17,7 @@ enum FaceBoundsState { case detectedFaceTooSmall case detectedFaceTooLarge case detectedFaceOffCentre + case detectedFaceNotWithinFrame case detectedFaceAppropriateSizeAndPosition } diff --git a/Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift b/Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift new file mode 100644 index 000000000..e87c214c6 --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift @@ -0,0 +1,307 @@ +// swiftlint:disable all +// +// SelfieQualityDetector.swift +// +// This file was automatically generated and should not be edited. +// + +import CoreML + + +/// Model Prediction Input Type +@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) +class SelfieQualityDetectorInput : MLFeatureProvider { + + /// conv2d_193_input as color (kCVPixelFormatType_32BGRA) image buffer, 120 pixels wide by 120 pixels high + var conv2d_193_input: CVPixelBuffer + + var featureNames: Set { + get { + return ["conv2d_193_input"] + } + } + + func featureValue(for featureName: String) -> MLFeatureValue? { + if (featureName == "conv2d_193_input") { + return MLFeatureValue(pixelBuffer: conv2d_193_input) + } + return nil + } + + init(conv2d_193_input: CVPixelBuffer) { + self.conv2d_193_input = conv2d_193_input + } + + convenience init(conv2d_193_inputWith conv2d_193_input: CGImage) throws { + self.init(conv2d_193_input: try MLFeatureValue(cgImage: conv2d_193_input, pixelsWide: 120, pixelsHigh: 120, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!) + } + + convenience init(conv2d_193_inputAt conv2d_193_input: URL) throws { + self.init(conv2d_193_input: try MLFeatureValue(imageAt: conv2d_193_input, pixelsWide: 120, pixelsHigh: 120, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!) + } + + func setConv2d_193_input(with conv2d_193_input: CGImage) throws { + self.conv2d_193_input = try MLFeatureValue(cgImage: conv2d_193_input, pixelsWide: 120, pixelsHigh: 120, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue! + } + + func setConv2d_193_input(with conv2d_193_input: URL) throws { + self.conv2d_193_input = try MLFeatureValue(imageAt: conv2d_193_input, pixelsWide: 120, pixelsHigh: 120, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue! + } + +} + + +/// Model Prediction Output Type +@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) +class SelfieQualityDetectorOutput : MLFeatureProvider { + + /// Source provided by CoreML + private let provider : MLFeatureProvider + + /// Identity as multidimensional array of floats + var Identity: MLMultiArray { + return self.provider.featureValue(for: "Identity")!.multiArrayValue! + } + + /// Identity as multidimensional array of floats + @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) + var IdentityShapedArray: MLShapedArray { + return MLShapedArray(self.Identity) + } + + var featureNames: Set { + return self.provider.featureNames + } + + func featureValue(for featureName: String) -> MLFeatureValue? { + return self.provider.featureValue(for: featureName) + } + + init(Identity: MLMultiArray) { + self.provider = try! MLDictionaryFeatureProvider(dictionary: ["Identity" : MLFeatureValue(multiArray: Identity)]) + } + + init(features: MLFeatureProvider) { + self.provider = features + } +} + + +/// Class for model loading and prediction +@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) +class SelfieQualityDetector { + let model: MLModel + + /// URL of model assuming it was installed in the same bundle as this class + class var urlOfModelInThisBundle : URL { + let bundle = SmileIDResourcesHelper.bundle + return bundle.url(forResource: "SelfieQualityDetector", withExtension:"mlmodelc")! + } + + /** + Construct SelfieQualityDetector instance with an existing MLModel object. + + Usually the application does not use this initializer unless it makes a subclass of SelfieQualityDetector. + Such application may want to use `MLModel(contentsOfURL:configuration:)` and `SelfieQualityDetector.urlOfModelInThisBundle` to create a MLModel object to pass-in. + + - parameters: + - model: MLModel object + */ + init(model: MLModel) { + self.model = model + } + + /** + Construct SelfieQualityDetector instance by automatically loading the model from the app's bundle. + */ + @available(*, deprecated, message: "Use init(configuration:) instead and handle errors appropriately.") + convenience init() { + try! self.init(contentsOf: type(of:self).urlOfModelInThisBundle) + } + + /** + Construct a model with configuration + + - parameters: + - configuration: the desired model configuration + + - throws: an NSError object that describes the problem + */ + convenience init(configuration: MLModelConfiguration) throws { + try self.init(contentsOf: type(of:self).urlOfModelInThisBundle, configuration: configuration) + } + + /** + Construct SelfieQualityDetector instance with explicit path to mlmodelc file + - parameters: + - modelURL: the file url of the model + + - throws: an NSError object that describes the problem + */ + convenience init(contentsOf modelURL: URL) throws { + try self.init(model: MLModel(contentsOf: modelURL)) + } + + /** + Construct a model with URL of the .mlmodelc directory and configuration + + - parameters: + - modelURL: the file url of the model + - configuration: the desired model configuration + + - throws: an NSError object that describes the problem + */ + convenience init(contentsOf modelURL: URL, configuration: MLModelConfiguration) throws { + try self.init(model: MLModel(contentsOf: modelURL, configuration: configuration)) + } + + /** + Construct SelfieQualityDetector instance asynchronously with optional configuration. + + Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. + + - parameters: + - configuration: the desired model configuration + - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully + */ + @available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) + class func load(configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result) -> Void) { + return self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration, completionHandler: handler) + } + + /** + Construct SelfieQualityDetector instance asynchronously with optional configuration. + + Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. + + - parameters: + - configuration: the desired model configuration + */ + @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) + class func load(configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> SelfieQualityDetector { + return try await self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration) + } + + /** + Construct SelfieQualityDetector instance asynchronously with URL of the .mlmodelc directory with optional configuration. + + Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. + + - parameters: + - modelURL: the URL to the model + - configuration: the desired model configuration + - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully + */ + @available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) + class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result) -> Void) { + MLModel.load(contentsOf: modelURL, configuration: configuration) { result in + switch result { + case .failure(let error): + handler(.failure(error)) + case .success(let model): + handler(.success(SelfieQualityDetector(model: model))) + } + } + } + + /** + Construct SelfieQualityDetector instance asynchronously with URL of the .mlmodelc directory with optional configuration. + + Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. + + - parameters: + - modelURL: the URL to the model + - configuration: the desired model configuration + */ + @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) + class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> SelfieQualityDetector { + let model = try await MLModel.load(contentsOf: modelURL, configuration: configuration) + return SelfieQualityDetector(model: model) + } + + /** + Make a prediction using the structured interface + + - parameters: + - input: the input to the prediction as SelfieQualityDetectorInput + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as SelfieQualityDetectorOutput + */ + func prediction(input: SelfieQualityDetectorInput) throws -> SelfieQualityDetectorOutput { + return try self.prediction(input: input, options: MLPredictionOptions()) + } + + /** + Make a prediction using the structured interface + + - parameters: + - input: the input to the prediction as SelfieQualityDetectorInput + - options: prediction options + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as SelfieQualityDetectorOutput + */ + func prediction(input: SelfieQualityDetectorInput, options: MLPredictionOptions) throws -> SelfieQualityDetectorOutput { + let outFeatures = try model.prediction(from: input, options:options) + return SelfieQualityDetectorOutput(features: outFeatures) + } + + /** + Make an asynchronous prediction using the structured interface + + - parameters: + - input: the input to the prediction as SelfieQualityDetectorInput + - options: prediction options + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as SelfieQualityDetectorOutput + */ + @available(macOS 14.0, iOS 17.0, tvOS 17.0, watchOS 10.0, *) + func prediction(input: SelfieQualityDetectorInput, options: MLPredictionOptions = MLPredictionOptions()) async throws -> SelfieQualityDetectorOutput { + let outFeatures = try await model.prediction(from: input, options:options) + return SelfieQualityDetectorOutput(features: outFeatures) + } + + /** + Make a prediction using the convenience interface + + - parameters: + - conv2d_193_input as color (kCVPixelFormatType_32BGRA) image buffer, 120 pixels wide by 120 pixels high + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as SelfieQualityDetectorOutput + */ + func prediction(conv2d_193_input: CVPixelBuffer) throws -> SelfieQualityDetectorOutput { + let input_ = SelfieQualityDetectorInput(conv2d_193_input: conv2d_193_input) + return try self.prediction(input: input_) + } + + /** + Make a batch prediction using the structured interface + + - parameters: + - inputs: the inputs to the prediction as [SelfieQualityDetectorInput] + - options: prediction options + + - throws: an NSError object that describes the problem + + - returns: the result of the prediction as [SelfieQualityDetectorOutput] + */ + func predictions(inputs: [SelfieQualityDetectorInput], options: MLPredictionOptions = MLPredictionOptions()) throws -> [SelfieQualityDetectorOutput] { + let batchIn = MLArrayBatchProvider(array: inputs) + let batchOut = try model.predictions(from: batchIn, options: options) + var results : [SelfieQualityDetectorOutput] = [] + results.reserveCapacity(inputs.count) + for i in 0.. CVPixelBuffer? { + return pixelBuffer(width: width, height: height, + pixelFormatType: kCVPixelFormatType_32ARGB, + colorSpace: CGColorSpaceCreateDeviceRGB(), + alphaInfo: .noneSkipFirst) + } + + /** + Resizes the image to `width` x `height` and converts it to a `CVPixelBuffer` + with the specified pixel format, color space, and alpha channel. + */ + public func pixelBuffer(width: Int, height: Int, + pixelFormatType: OSType, + colorSpace: CGColorSpace, + alphaInfo: CGImageAlphaInfo) -> CVPixelBuffer? { + var maybePixelBuffer: CVPixelBuffer? + let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, + kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] + let status = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + pixelFormatType, + attrs as CFDictionary, + &maybePixelBuffer) + + guard status == kCVReturnSuccess, let pixelBuffer = maybePixelBuffer else { + return nil + } + + let flags = CVPixelBufferLockFlags(rawValue: 0) + guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(pixelBuffer, flags) else { + return nil + } + defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, flags) } + + guard let context = CGContext(data: CVPixelBufferGetBaseAddress(pixelBuffer), + width: width, + height: height, + bitsPerComponent: 8, + bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), + space: colorSpace, + bitmapInfo: alphaInfo.rawValue) + else { + return nil + } + + UIGraphicsPushContext(context) + context.translateBy(x: 0, y: CGFloat(height)) + context.scaleBy(x: 1, y: -1) + self.draw(in: CGRect(x: 0, y: 0, width: width, height: height)) + UIGraphicsPopContext() + + return pixelBuffer + } +} + +extension UIImage { + /** + Creates a new UIImage from a CVPixelBuffer. + + - Note: Not all CVPixelBuffer pixel formats support conversion into a + CGImage-compatible pixel format. + */ + public convenience init?(pixelBuffer: CVPixelBuffer) { + if let cgImage = CGImage.create(pixelBuffer: pixelBuffer) { + self.init(cgImage: cgImage) + } else { + return nil + } + } +} diff --git a/Sources/SmileID/Classes/Helpers/Backport.swift b/Sources/SmileID/Classes/Helpers/Backport.swift new file mode 100644 index 000000000..118f4b594 --- /dev/null +++ b/Sources/SmileID/Classes/Helpers/Backport.swift @@ -0,0 +1,52 @@ +// swiftlint:disable all +import SwiftUI +import ObjectiveC + +/// Provides a convenient method for backporting API, +/// including types, functions, properties, property wrappers and more. +/// +/// To backport a SwiftUI Label for example, you could apply the +/// following extension: +/// +/// extension Backport where Content == Any { +/// public struct Label { } +/// } +/// +/// Now if we want to provide further extensions to our backport type, +/// we need to ensure we retain the `Content == Any` generic requirement: +/// +/// extension Backport.Label where Content == Any, Title == Text, Icon == Image { +/// public init(_ title: S, systemName: String) { } +/// } +/// +/// In addition to types, we can also provide backports for properties +/// and methods: +/// +/// extension Backport.Label where Content: View { +/// func onChange(of value: Value, perform action: (Value) -> Void) -> some View { +/// // `content` provides access to the extended type +/// content.modifier(OnChangeModifier(value, action)) +/// } +/// } +/// +public struct Backport { + /// The underlying content this backport represents. + public let wrapped: Wrapped + + /// Initializes a new Backport for the specified content. + /// - Parameter content: The content (type) that's being backported + public init(_ wrapped: Wrapped) { + self.wrapped = wrapped + } +} + +public extension Backport where Wrapped == Any { + init(_ wrapped: Wrapped) { + self.wrapped = wrapped + } +} + +public extension NSObjectProtocol { + /// Wraps an `NSObject` that can be extended to provide backport functionality. + var backport: Backport { .init(self) } +} diff --git a/Sources/SmileID/Classes/Helpers/DMSansFont.swift b/Sources/SmileID/Classes/Helpers/DMSansFont.swift new file mode 100644 index 000000000..e4ea80da5 --- /dev/null +++ b/Sources/SmileID/Classes/Helpers/DMSansFont.swift @@ -0,0 +1,36 @@ +import SwiftUI + +enum DMSans: String, CaseIterable { + case regular = "DMSans-Regular" + case medium = "DMSans-Medium" + case bold = "DMSans-Bold" +} + +public struct DMSansFont: FontType { + public static var medium: Font { + medium(with: SmileIDResourcesHelper.pointSize) + } + + public static var bold: Font { + bold(with: SmileIDResourcesHelper.pointSize) + } + + public static var pointSize: CGFloat { + SmileIDResourcesHelper.pointSize + } + + public static func regular(with size: CGFloat) -> Font { + SmileIDResourcesHelper.loadFontIfNeeded(name: DMSans.regular.rawValue) + return Font.custom(DMSans.regular.rawValue, size: size) + } + + public static func medium(with size: CGFloat) -> Font { + SmileIDResourcesHelper.loadFontIfNeeded(name: DMSans.medium.rawValue) + return Font.custom(DMSans.medium.rawValue, size: size) + } + + public static func bold(with size: CGFloat) -> Font { + SmileIDResourcesHelper.loadFontIfNeeded(name: DMSans.bold.rawValue) + return Font.custom(DMSans.bold.rawValue, size: size) + } +} diff --git a/Sources/SmileID/Classes/Helpers/DeviceRotationViewModifier.swift b/Sources/SmileID/Classes/Helpers/DeviceRotationViewModifier.swift new file mode 100644 index 000000000..4ca0cc07c --- /dev/null +++ b/Sources/SmileID/Classes/Helpers/DeviceRotationViewModifier.swift @@ -0,0 +1,19 @@ +import SwiftUI + +struct DeviceRotationViewModifier: ViewModifier { + let action: (UIDeviceOrientation) -> Void + + func body(content: Content) -> some View { + content + .onAppear() + .onReceive(NotificationCenter.default.publisher(for: UIDevice.orientationDidChangeNotification)) { _ in + action(UIDevice.current.orientation) + } + } +} + +extension View { + func onRotate(perform action: @escaping (UIDeviceOrientation) -> Void) -> some View { + self.modifier(DeviceRotationViewModifier(action: action)) + } +} diff --git a/Sources/SmileID/Classes/Helpers/HapticManager.swift b/Sources/SmileID/Classes/Helpers/HapticManager.swift new file mode 100644 index 000000000..fd1cf28b3 --- /dev/null +++ b/Sources/SmileID/Classes/Helpers/HapticManager.swift @@ -0,0 +1,23 @@ +import UIKit + +class HapticManager { + static let shared = HapticManager() + + private init() {} + + // MARK: Notification Feedback + + /// Triggers a notification haptic feedback + /// - Parameter type: The notification type (success, warning, error) + func notification(type: UINotificationFeedbackGenerator.FeedbackType) { + let generator = UINotificationFeedbackGenerator() + generator.notificationOccurred(type) + } + + // MARK: Impact Feedback + + func impact(style: UIImpactFeedbackGenerator.FeedbackStyle) { + let generator = UIImpactFeedbackGenerator(style: style) + generator.impactOccurred() + } +} diff --git a/Sources/SmileID/Classes/Helpers/NavigationHelper.swift b/Sources/SmileID/Classes/Helpers/NavigationHelper.swift index 0936b1d9e..fb8e26e6a 100644 --- a/Sources/SmileID/Classes/Helpers/NavigationHelper.swift +++ b/Sources/SmileID/Classes/Helpers/NavigationHelper.swift @@ -16,3 +16,14 @@ extension View { } } } + +public struct ModalModeKey: EnvironmentKey { + public static let defaultValue = Binding.constant(false) +} + +extension EnvironmentValues { + public var modalMode: Binding { + get { self[ModalModeKey.self] } + set { self[ModalModeKey.self] = newValue } + } +} diff --git a/Sources/SmileID/Classes/Helpers/SmileIDResourcesHelper.swift b/Sources/SmileID/Classes/Helpers/SmileIDResourcesHelper.swift index ef89fb622..9d6597e8c 100644 --- a/Sources/SmileID/Classes/Helpers/SmileIDResourcesHelper.swift +++ b/Sources/SmileID/Classes/Helpers/SmileIDResourcesHelper.swift @@ -76,6 +76,9 @@ public class SmileIDResourcesHelper { public static var ConsentContactDetails = SmileIDResourcesHelper.image("ConsentContactDetails")! public static var ConsentDocumentInfo = SmileIDResourcesHelper.image("ConsentDocumentInfo")! public static var ConsentPersonalInfo = SmileIDResourcesHelper.image("ConsentPersonalInfo")! + public static var Loader = SmileIDResourcesHelper.image("Loader")! + public static var Checkmark = SmileIDResourcesHelper.image("Checkmark")! + public static var Xmark = SmileIDResourcesHelper.image("Xmark")! /// Size of font. public static let pointSize: CGFloat = 16 diff --git a/Sources/SmileID/Classes/Helpers/StateObject.swift b/Sources/SmileID/Classes/Helpers/StateObject.swift new file mode 100644 index 000000000..098f2392f --- /dev/null +++ b/Sources/SmileID/Classes/Helpers/StateObject.swift @@ -0,0 +1,151 @@ +// swiftlint:disable all +import Combine +import SwiftUI + +@available(iOS, deprecated: 14.0) +@available(macOS, deprecated: 11.0) +@available(tvOS, deprecated: 14.0) +@available(watchOS, deprecated: 7.0) +public extension Backport where Wrapped: ObservableObject { + + /// A property wrapper type that instantiates an observable object. + /// + /// Create a state object in a ``SwiftUI/View``, ``SwiftUI/App``, or + /// ``SwiftUI/Scene`` by applying the `@Backport.StateObject` attribute to a property + /// declaration and providing an initial value that conforms to the + /// + /// protocol: + /// + /// @Backport.StateObject var model = DataModel() + /// + /// SwiftUI creates a new instance of the object only once for each instance of + /// the structure that declares the object. When published properties of the + /// observable object change, SwiftUI updates the parts of any view that depend + /// on those properties: + /// + /// Text(model.title) // Updates the view any time `title` changes. + /// + /// You can pass the state object into a property that has the + /// ``SwiftUI/ObservedObject`` attribute. You can alternatively add the object + /// to the environment of a view hierarchy by applying the + /// ``SwiftUI/View/environmentObject(_:)`` modifier: + /// + /// ContentView() + /// .environmentObject(model) + /// + /// If you create an environment object as shown in the code above, you can + /// read the object inside `ContentView` or any of its descendants + /// using the ``SwiftUI/EnvironmentObject`` attribute: + /// + /// @EnvironmentObject var model: DataModel + /// + /// Get a ``SwiftUI/Binding`` to one of the state object's properties using the + /// `$` operator. Use a binding when you want to create a two-way connection to + /// one of the object's properties. For example, you can let a + /// ``SwiftUI/Toggle`` control a Boolean value called `isEnabled` stored in the + /// model: + /// + /// Toggle("Enabled", isOn: $model.isEnabled) + @propertyWrapper struct StateObject: DynamicProperty { + private final class Wrapper: ObservableObject { + private var subject = PassthroughSubject() + + var value: Wrapped? { + didSet { + cancellable = nil + cancellable = value?.objectWillChange + .sink { [subject] _ in subject.send() } + } + } + + private var cancellable: AnyCancellable? + + var objectWillChange: AnyPublisher { + subject.eraseToAnyPublisher() + } + } + + @State private var state = Wrapper() + + @ObservedObject private var observedObject = Wrapper() + + private var thunk: () -> Wrapped + + /// The underlying value referenced by the state object. + /// + /// The wrapped value property provides primary access to the value's data. + /// However, you don't access `wrappedValue` directly. Instead, use the + /// property variable created with the `@Backport.StateObject` attribute: + /// + /// @Backport.StateObject var contact = Contact() + /// + /// var body: some View { + /// Text(contact.name) // Accesses contact's wrapped value. + /// } + /// + /// When you change a property of the wrapped value, you can access the new + /// value immediately. However, SwiftUI updates views displaying the value + /// asynchronously, so the user interface might not update immediately. + public var wrappedValue: Wrapped { + if let object = state.value { + return object + } else { + let object = thunk() + state.value = object + return object + } + } + + /// A projection of the state object that creates bindings to its + /// properties. + /// + /// Use the projected value to pass a binding value down a view hierarchy. + /// To get the projected value, prefix the property variable with `$`. For + /// example, you can get a binding to a model's `isEnabled` Boolean so that + /// a ``SwiftUI/Toggle`` view can control the value: + /// + /// struct MyView: View { + /// @Backport.StateObject var model = DataModel() + /// + /// var body: some View { + /// Toggle("Enabled", isOn: $model.isEnabled) + /// } + /// } + public var projectedValue: ObservedObject.Wrapper { + ObservedObject(wrappedValue: wrappedValue).projectedValue + } + + /// Creates a new state object with an initial wrapped value. + /// + /// You don’t call this initializer directly. Instead, declare a property + /// with the `@Backport.StateObject` attribute in a ``SwiftUI/View``, + /// ``SwiftUI/App``, or ``SwiftUI/Scene``, and provide an initial value: + /// + /// struct MyView: View { + /// @Backport.StateObject var model = DataModel() + /// + /// // ... + /// } + /// + /// SwiftUI creates only one instance of the state object for each + /// container instance that you declare. In the code above, SwiftUI + /// creates `model` only the first time it initializes a particular instance + /// of `MyView`. On the other hand, each different instance of `MyView` + /// receives a distinct copy of the data model. + /// + /// - Parameter thunk: An initial value for the state object. + public init(wrappedValue thunk: @autoclosure @escaping () -> Wrapped) { + self.thunk = thunk + } + + public mutating func update() { + if state.value == nil { + state.value = thunk() + } + if observedObject.value !== state.value { + observedObject.value = state.value + } + } + } + +} diff --git a/Sources/SmileID/Classes/Helpers/Theme.swift b/Sources/SmileID/Classes/Helpers/Theme.swift index e3a73b020..40ba54432 100644 --- a/Sources/SmileID/Classes/Helpers/Theme.swift +++ b/Sources/SmileID/Classes/Helpers/Theme.swift @@ -68,19 +68,23 @@ public extension SmileIdTheme { // TO-DO: Rename fonts when Kwame comes up with a naming convention var header1: Font { - EpilogueFont.bold(with: 32) + DMSansFont.bold(with: 24) } var header2: Font { - EpilogueFont.bold(with: 20) + DMSansFont.bold(with: 20) + } + + var header3: Font { + DMSansFont.medium(with: 20) } var header4: Font { - EpilogueFont.bold(with: 16) + DMSansFont.medium(with: 16) } var header5: Font { - EpilogueFont.medium(with: 12) + DMSansFont.medium(with: 12) } var button: Font { @@ -88,7 +92,7 @@ public extension SmileIdTheme { } var body: Font { - EpilogueFont.medium(with: 14) + DMSansFont.regular(with: 16) } } diff --git a/Sources/SmileID/Classes/Networking/Models/FailureReason.swift b/Sources/SmileID/Classes/Networking/Models/FailureReason.swift new file mode 100644 index 000000000..f68bcd028 --- /dev/null +++ b/Sources/SmileID/Classes/Networking/Models/FailureReason.swift @@ -0,0 +1,17 @@ +import Foundation + +public enum FailureReason: Encodable { + case mobileActiveLivenessTimeout + + private enum CodingKeys: String, CodingKey { + case mobileActiveLivenessTimeout = "mobile_active_liveness_timed_out" + } + + public func encode(to encoder: any Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + switch self { + case .mobileActiveLivenessTimeout: + try container.encode(true, forKey: .mobileActiveLivenessTimeout) + } + } +} diff --git a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift index 06ff2d7c1..5ad912cad 100644 --- a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift +++ b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift @@ -11,10 +11,11 @@ public struct Metadata: Codable { Metadata(items: [ .sdk, .sdkVersion, + .activeLivenessVersion, .clientIP, .fingerprint, .deviceModel, - .deviceOS, + .deviceOS ]) } @@ -49,15 +50,34 @@ public class Metadatum: Codable { } public static let sdk = Metadatum(name: "sdk", value: "iOS") - public static let sdkVersion = Metadatum(name: "sdk_version", value: SmileID.version) - public static let clientIP = Metadatum(name: "client_ip", value: getIPAddress(useIPv4: true)) - public static let fingerprint = Metadatum(name: "fingerprint", value: SmileID.deviceId) - public static let deviceModel = Metadatum(name: "device_model", value: UIDevice.current.modelName) - public static let deviceOS = Metadatum(name: "device_os", value: UIDevice.current.systemVersion) + public static let sdkVersion = Metadatum( + name: "sdk_version", value: SmileID.version) + public static let activeLivenessVersion = Metadatum( + name: "active_liveness_version", value: "1.0.0") + public static let clientIP = Metadatum( + name: "client_ip", value: getIPAddress(useIPv4: true)) + public static let fingerprint = Metadatum( + name: "fingerprint", value: SmileID.deviceId) + public static let deviceModel = Metadatum( + name: "device_model", value: UIDevice.current.modelName) + public static let deviceOS = Metadatum( + name: "device_os", value: UIDevice.current.systemVersion) + + public class ActiveLivenessType: Metadatum { + public init(livenessType: LivenessType) { + super.init( + name: "active_liveness_type", value: livenessType.rawValue) + } + + public required init(from decoder: Decoder) throws { + try super.init(from: decoder) + } + } public class SelfieImageOrigin: Metadatum { public init(cameraFacing: CameraFacingValue) { - super.init(name: "selfie_image_origin", value: cameraFacing.rawValue) + super.init( + name: "selfie_image_origin", value: cameraFacing.rawValue) } public required init(from decoder: Decoder) throws { @@ -67,7 +87,9 @@ public class Metadatum: Codable { public class SelfieCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "selfie_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "selfie_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -77,7 +99,8 @@ public class Metadatum: Codable { public class DocumentFrontImageOrigin: Metadatum { public init(origin: DocumentImageOriginValue) { - super.init(name: "document_front_image_origin", value: origin.rawValue) + super.init( + name: "document_front_image_origin", value: origin.rawValue) } public required init(from decoder: Decoder) throws { @@ -87,7 +110,8 @@ public class Metadatum: Codable { public class DocumentBackImageOrigin: Metadatum { public init(origin: DocumentImageOriginValue) { - super.init(name: "document_back_image_origin", value: origin.rawValue) + super.init( + name: "document_back_image_origin", value: origin.rawValue) } public required init(from decoder: Decoder) throws { @@ -97,7 +121,8 @@ public class Metadatum: Codable { public class DocumentFrontCaptureRetries: Metadatum { public init(retries: Int) { - super.init(name: "document_front_capture_retries", value: String(retries)) + super.init( + name: "document_front_capture_retries", value: String(retries)) } public required init(from decoder: Decoder) throws { @@ -107,7 +132,8 @@ public class Metadatum: Codable { public class DocumentBackCaptureRetries: Metadatum { public init(retries: Int) { - super.init(name: "document_back_capture_retries", value: String(retries)) + super.init( + name: "document_back_capture_retries", value: String(retries)) } public required init(from decoder: Decoder) throws { @@ -117,7 +143,9 @@ public class Metadatum: Codable { public class DocumentFrontCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "document_front_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "document_front_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -127,7 +155,9 @@ public class Metadatum: Codable { public class DocumentBackCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "document_back_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "document_back_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -136,6 +166,11 @@ public class Metadatum: Codable { } } +public enum LivenessType: String, Codable { + case headPose = "head_pose" + case smile = "smile" +} + public enum DocumentImageOriginValue: String { case gallery case cameraAutoCapture = "camera_auto_capture" @@ -170,20 +205,21 @@ func getIPAddress(useIPv4: Bool) -> String { if addrFamily == UInt8(AF_INET) || addrFamily == UInt8(AF_INET6) { let name = String(cString: interface.ifa_name) if name == "en0" || name == "en1" || name == "pdp_ip0" - || name == "pdp_ip1" || name == "pdp_ip2" || name == "pdp_ip3" - { + || name == "pdp_ip1" || name == "pdp_ip2" || name == "pdp_ip3" { var hostname = [CChar](repeating: 0, count: Int(NI_MAXHOST)) - getnameinfo(interface.ifa_addr, socklen_t(interface.ifa_addr.pointee.sa_len), - &hostname, socklen_t(hostname.count), - nil, socklen_t(0), NI_NUMERICHOST) + getnameinfo( + interface.ifa_addr, + socklen_t(interface.ifa_addr.pointee.sa_len), + &hostname, socklen_t(hostname.count), + nil, socklen_t(0), NI_NUMERICHOST) address = String(cString: hostname) - if (useIPv4 && addrFamily == UInt8(AF_INET)) || - (!useIPv4 && addrFamily == UInt8(AF_INET6)) - { + if (useIPv4 && addrFamily == UInt8(AF_INET)) + || (!useIPv4 && addrFamily == UInt8(AF_INET6)) { if !useIPv4 { if let percentIndex = address.firstIndex(of: "%") { - address = String(address[..(to path: PathType, with body: T) async throws -> U - + /// Get service call to a particular path /// - Parameters: /// - path: Endpoint to execute the GET call. func get(to path: PathType) async throws -> U - + // POST service call to make a multipart request. /// - Parameters: /// - path: Endpoint to execute the POST call. @@ -31,9 +31,10 @@ protocol ServiceRunnable { callbackUrl: String?, sandboxResult: Int?, allowNewEnroll: Bool?, + failureReason: FailureReason?, metadata: Metadata ) async throws -> SmartSelfieResponse - + /// PUT service call to a particular path with a body. /// - Parameters: /// - data: Data to be uploaded @@ -53,7 +54,7 @@ extension ServiceRunnable { } return URL(string: SmileID.config.prodLambdaUrl) } - + func post( to path: PathType, with body: T @@ -66,7 +67,7 @@ extension ServiceRunnable { ) return try await serviceClient.send(request: request) } - + func get(to path: PathType) async throws -> U { let request = try createRestRequest( path: path, @@ -74,7 +75,7 @@ extension ServiceRunnable { ) return try await serviceClient.send(request: request) } - + func multipart( to path: PathType, signature: String, @@ -86,6 +87,7 @@ extension ServiceRunnable { callbackUrl: String? = nil, sandboxResult: Int? = nil, allowNewEnroll: Bool? = nil, + failureReason: FailureReason? = nil, metadata: Metadata = Metadata.default() ) async throws -> SmartSelfieResponse { let boundary = generateBoundary() @@ -108,14 +110,15 @@ extension ServiceRunnable { callbackUrl: callbackUrl?.nilIfEmpty(), sandboxResult: sandboxResult, allowNewEnroll: allowNewEnroll, + failureReason: failureReason, metadata: metadata, boundary: boundary ) ) - + return try await serviceClient.multipart(request: request) } - + private func createMultiPartRequest( url: PathType, method: RestMethod, @@ -126,15 +129,15 @@ extension ServiceRunnable { guard var baseURL = baseURL?.absoluteString else { throw URLError(.badURL) } - + if let range = baseURL.range(of: "/v1/", options: .backwards) { baseURL.removeSubrange(range) } - + guard let url = URL(string: baseURL)?.appendingPathComponent(path) else { throw URLError(.badURL) } - + let request = RestRequest( url: url, method: method, @@ -143,7 +146,7 @@ extension ServiceRunnable { ) return request } - + func upload( data: Data, to url: String, @@ -157,7 +160,7 @@ extension ServiceRunnable { ) return try await serviceClient.upload(request: uploadRequest) } - + private func createUploadRequest( url: String, method: RestMethod, @@ -176,7 +179,7 @@ extension ServiceRunnable { ) return request } - + private func createRestRequest( path: PathType, method: RestMethod, @@ -188,7 +191,7 @@ extension ServiceRunnable { guard let url = baseURL?.appendingPathComponent(path) else { throw URLError(.badURL) } - + do { let request = try RestRequest( url: url, @@ -202,7 +205,7 @@ extension ServiceRunnable { throw error } } - + private func createRestRequest( path: PathType, method: RestMethod, @@ -212,7 +215,7 @@ extension ServiceRunnable { guard let url = baseURL?.appendingPathComponent(path) else { throw URLError(.badURL) } - + let request = RestRequest( url: url, method: method, @@ -220,11 +223,11 @@ extension ServiceRunnable { ) return request } - + func generateBoundary() -> String { return UUID().uuidString } - + // swiftlint:disable line_length cyclomatic_complexity func createMultiPartRequestData( selfieImage: MultipartBody, @@ -234,67 +237,75 @@ extension ServiceRunnable { callbackUrl: String?, sandboxResult: Int?, allowNewEnroll: Bool?, + failureReason: FailureReason?, metadata: Metadata = Metadata.default(), boundary: String ) -> Data { let lineBreak = "\r\n" var body = Data() - + // Append parameters if available if let parameters = partnerParams { if let boundaryData = "--\(boundary)\(lineBreak)".data(using: .utf8), - let dispositionData = "Content-Disposition: form-data; name=\"partner_params\"\(lineBreak)".data(using: .utf8), - let contentTypeData = "Content-Type: application/json\(lineBreak + lineBreak)".data(using: .utf8), - let lineBreakData = lineBreak.data(using: .utf8) { + let dispositionData = "Content-Disposition: form-data; name=\"partner_params\"\(lineBreak)".data( + using: .utf8), + let contentTypeData = "Content-Type: application/json\(lineBreak + lineBreak)".data(using: .utf8), + let lineBreakData = lineBreak.data(using: .utf8) { body.append(boundaryData) body.append(dispositionData) body.append(contentTypeData) - + if let jsonData = try? JSONSerialization.data(withJSONObject: parameters, options: []) { body.append(jsonData) body.append(lineBreakData) } } } - + // Append userId if available if let userId = userId { if let valueData = "\(userId)\(lineBreak)".data(using: .utf8) { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"user_id\"\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"user_id\"\(lineBreak + lineBreak)".data(using: .utf8)!) body.append(valueData) } } - + // Append callbackUrl if available if let callbackUrl = callbackUrl { if let valueData = "\(callbackUrl)\(lineBreak)".data(using: .utf8) { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"callback_url\"\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"callback_url\"\(lineBreak + lineBreak)".data(using: .utf8)!) body.append(valueData) } } - + // Append sandboxResult if available if let sandboxResult = sandboxResult { let sandboxResultString = "\(sandboxResult)" if let valueData = "\(sandboxResultString)\(lineBreak)".data(using: .utf8) { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"sandbox_result\"\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"sandbox_result\"\(lineBreak + lineBreak)".data( + using: .utf8)!) body.append(valueData) } } - + // Append allowNewEnroll if available if let allowNewEnroll = allowNewEnroll { let allowNewEnrollString = "\(allowNewEnroll)" if let valueData = "\(allowNewEnrollString)\(lineBreak)".data(using: .utf8) { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"allow_new_enroll\"\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"allow_new_enroll\"\(lineBreak + lineBreak)".data( + using: .utf8)!) body.append(valueData) } } - + // Append metadata let encoder = JSONEncoder() if let metadataData = try? encoder.encode(metadata.items) { @@ -304,23 +315,37 @@ extension ServiceRunnable { body.append(metadataData) body.append(lineBreak.data(using: .utf8)!) } - + // Append liveness media files for item in livenessImages { body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"\("liveness_images")\"; filename=\"\(item.filename)\"\(lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"\("liveness_images")\"; filename=\"\(item.filename)\"\(lineBreak)" + .data(using: .utf8)!) body.append("Content-Type: \(item.mimeType)\(lineBreak + lineBreak)".data(using: .utf8)!) body.append(item.data) body.append(lineBreak.data(using: .utf8)!) } - + // Append selfie media file body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append("Content-Disposition: form-data; name=\"\("selfie_image")\"; filename=\"\(selfieImage.filename)\"\(lineBreak)".data(using: .utf8)!) + body.append( + "Content-Disposition: form-data; name=\"\("selfie_image")\"; filename=\"\(selfieImage.filename)\"\(lineBreak)" + .data(using: .utf8)!) body.append("Content-Type: \(selfieImage.mimeType)\(lineBreak + lineBreak)".data(using: .utf8)!) body.append(selfieImage.data) body.append(lineBreak.data(using: .utf8)!) - + + // Append failure reason if available + if let failureReason, + let failureReasonData = try? encoder.encode(failureReason) { + body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) + body.append("Content-Disposition: form-data; name=\"failure_reason\"\(lineBreak)".data(using: .utf8)!) + body.append("Content-Type: application/json\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append(failureReasonData) + body.append(lineBreak.data(using: .utf8)!) + } + // Append final boundary body.append("--\(boundary)--\(lineBreak)".data(using: .utf8)!) return body diff --git a/Sources/SmileID/Classes/Networking/SmileIDService.swift b/Sources/SmileID/Classes/Networking/SmileIDService.swift index ec9176774..a5661aed8 100644 --- a/Sources/SmileID/Classes/Networking/SmileIDService.swift +++ b/Sources/SmileID/Classes/Networking/SmileIDService.swift @@ -24,6 +24,7 @@ public protocol SmileIDServiceable { callbackUrl: String?, sandboxResult: Int?, allowNewEnroll: Bool?, + failureReason: FailureReason?, metadata: Metadata ) async throws -> SmartSelfieResponse @@ -38,6 +39,7 @@ public protocol SmileIDServiceable { partnerParams: [String: String]?, callbackUrl: String?, sandboxResult: Int?, + failureReason: FailureReason?, metadata: Metadata ) async throws -> SmartSelfieResponse @@ -222,6 +224,7 @@ public class SmileIDService: SmileIDServiceable, ServiceRunnable { callbackUrl: String? = SmileID.callbackUrl, sandboxResult: Int? = nil, allowNewEnroll: Bool? = nil, + failureReason: FailureReason? = nil, metadata: Metadata = Metadata.default() ) async throws -> SmartSelfieResponse { try await multipart( @@ -235,6 +238,7 @@ public class SmileIDService: SmileIDServiceable, ServiceRunnable { callbackUrl: callbackUrl, sandboxResult: sandboxResult, allowNewEnroll: allowNewEnroll, + failureReason: failureReason, metadata: metadata ) } @@ -248,6 +252,7 @@ public class SmileIDService: SmileIDServiceable, ServiceRunnable { partnerParams: [String: String]? = nil, callbackUrl: String? = SmileID.callbackUrl, sandboxResult: Int? = nil, + failureReason: FailureReason? = nil, metadata: Metadata = Metadata.default() ) async throws -> SmartSelfieResponse { try await multipart( @@ -260,6 +265,7 @@ public class SmileIDService: SmileIDServiceable, ServiceRunnable { partnerParams: partnerParams, callbackUrl: callbackUrl, sandboxResult: sandboxResult, + failureReason: failureReason, metadata: metadata ) } diff --git a/Sources/SmileID/Classes/SelfieCapture/CaptureGuideAnimation.swift b/Sources/SmileID/Classes/SelfieCapture/CaptureGuideAnimation.swift new file mode 100644 index 000000000..97d81a074 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/CaptureGuideAnimation.swift @@ -0,0 +1,52 @@ +import Foundation + +enum CaptureGuideAnimation: Equatable { + case goodLight + case headInFrame + case moveBack + case moveCloser + case lookRight + case lookLeft + case lookUp + case turnPhoneUp + + var fileName: String { + switch self { + case .goodLight: + return "light_animation_with_bg" + case .headInFrame: + return "positioning_with_bg" + case .moveBack: + return "positioning_with_bg" + case .moveCloser: + return "positioning_with_bg" + case .lookRight: + return "headdirection_with_bg" + case .lookLeft: + return "headdirection_with_bg" + case .lookUp: + return "headdirection_with_bg" + case .turnPhoneUp: + return "device_orientation" + } + } + + var animationProgressRange: ClosedRange { + switch self { + case .headInFrame: + return 0...0.28 + case .moveBack: + return 0.38...0.67 + case .moveCloser: + return 0.73...1.0 + case .lookRight: + return 0...0.4 + case .lookLeft: + return 0.4...0.64 + case .lookUp: + return 0.64...1.0 + default: + return 0...1.0 + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift new file mode 100644 index 000000000..105a5bb7e --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/EnhancedSmartSelfieViewModel.swift @@ -0,0 +1,590 @@ +import ARKit +import Combine +import CoreMotion +import SwiftUI + +public class EnhancedSmartSelfieViewModel: ObservableObject { + // MARK: Dependencies + private let motionManager = CMMotionManager() + let cameraManager = CameraManager(orientation: .portrait) + let faceDetector = EnhancedFaceDetector() + private let faceValidator = FaceValidator() + var livenessCheckManager = LivenessCheckManager() + private var subscribers = Set() + private var guideAnimationDelayTimer: Timer? + private let metadataTimerStart = MonotonicTime() + + // MARK: Private Properties + private var motionDeviceOrientation: UIDeviceOrientation = UIDevice.current + .orientation + private var unlockedDeviceOrientation: UIDeviceOrientation { + UIDevice.current.orientation + } + private var faceLayoutGuideFrame = CGRect( + x: 0, y: 0, width: 250, height: 350) + private var elapsedGuideAnimationDelay: TimeInterval = 0 + private var currentFrameBuffer: CVPixelBuffer? + var selfieImage: UIImage? + private var selfieImageURL: URL? { + didSet { + DispatchQueue.main.async { + self.selfieCaptured = self.selfieImage != nil + } + } + } + private var livenessImages: [URL] = [] + private var hasDetectedValidFace: Bool = false + private var isCapturingLivenessImages = false + private var shouldBeginLivenessChallenge: Bool { + hasDetectedValidFace && selfieImage != nil + && livenessCheckManager.currentTask != nil + } + private var shouldSubmitJob: Bool { + selfieImage != nil && livenessImages.count == numLivenessImages + } + private var submissionTask: Task? + private var failureReason: FailureReason? + private var apiResponse: SmartSelfieResponse? + private var error: Error? + @Published public var errorMessageRes: String? + @Published public var errorMessage: String? + + // MARK: Constants + private let livenessImageSize = 320 + private let selfieImageSize = 640 + private let numLivenessImages = 6 + private let guideAnimationDelayTime: TimeInterval = 3 + + // MARK: UI Properties + @Published var unauthorizedAlert: AlertState? + @Published private(set) var userInstruction: SelfieCaptureInstruction? + @Published private(set) var faceInBounds: Bool = false + @Published private(set) var selfieCaptured: Bool = false + @Published private(set) var showGuideAnimation: Bool = false + @Published private(set) var selfieCaptureState: SelfieCaptureState = + .capturingSelfie + + // MARK: Injected Properties + private let isEnroll: Bool + private let userId: String + private let jobId: String + private let allowNewEnroll: Bool + private let skipApiSubmission: Bool + private let extraPartnerParams: [String: String] + private let useStrictMode: Bool + private let onResult: SmartSelfieResultDelegate + private var localMetadata: LocalMetadata + + enum SelfieCaptureState: Equatable { + case capturingSelfie + case processing(ProcessingState) + + var title: String { + switch self { + case .capturingSelfie: + return "Instructions.Capturing" + case let .processing(processingState): + return processingState.title + } + } + } + + public init( + isEnroll: Bool, + userId: String, + jobId: String, + allowNewEnroll: Bool, + skipApiSubmission: Bool, + extraPartnerParams: [String: String], + useStrictMode: Bool, + onResult: SmartSelfieResultDelegate, + localMetadata: LocalMetadata + ) { + self.isEnroll = isEnroll + self.userId = userId + self.jobId = jobId + self.allowNewEnroll = allowNewEnroll + self.skipApiSubmission = skipApiSubmission + self.extraPartnerParams = extraPartnerParams + self.useStrictMode = useStrictMode + self.onResult = onResult + self.localMetadata = localMetadata + self.initialSetup() + } + + deinit { + subscribers.removeAll() + stopGuideAnimationDelayTimer() + invalidateSubmissionTask() + motionManager.stopDeviceMotionUpdates() + } + + private func initialSetup() { + self.faceValidator.delegate = self + self.faceDetector.resultDelegate = self + self.livenessCheckManager.delegate = self + + self.faceValidator.setLayoutGuideFrame(with: faceLayoutGuideFrame) + + livenessCheckManager.$lookLeftProgress + .merge( + with: livenessCheckManager.$lookRightProgress, + livenessCheckManager.$lookUpProgress + ) + .receive(on: DispatchQueue.main) + .sink { [weak self] _ in + DispatchQueue.main.async { + self?.resetGuideAnimationDelayTimer() + } + } + .store(in: &subscribers) + + if cameraManager.session.canSetSessionPreset(.vga640x480) { + cameraManager.session.sessionPreset = .vga640x480 + } + cameraManager.$status + .receive(on: DispatchQueue.main) + .filter { $0 == .unauthorized } + .map { _ in AlertState.cameraUnauthorized } + .sink { [weak self] alert in self?.unauthorizedAlert = alert } + .store(in: &subscribers) + + cameraManager.sampleBufferPublisher + .receive(on: DispatchQueue.main) + .throttle( + for: 0.35, + scheduler: DispatchQueue.global(qos: .userInitiated), + latest: true + ) + // Drop the first ~2 seconds to allow the user to settle in + .dropFirst(5) + .compactMap { $0 } + .sink { [weak self] imageBuffer in + self?.handleCameraImageBuffer(imageBuffer) + } + .store(in: &subscribers) + + if motionManager.isDeviceMotionAvailable { + motionManager.startDeviceMotionUpdates(to: OperationQueue()) { [weak self] deviceMotion, _ in + guard let gravity = deviceMotion?.gravity else { return } + if abs(gravity.y) < abs(gravity.x) { + self?.motionDeviceOrientation = + gravity.x > 0 ? .landscapeRight : .landscapeLeft + } else { + self?.motionDeviceOrientation = + gravity.y > 0 ? .portraitUpsideDown : .portrait + } + } + } + } + + private func handleCameraImageBuffer(_ imageBuffer: CVPixelBuffer) { + let currentOrientation: UIDeviceOrientation = + motionManager.isDeviceMotionAvailable + ? motionDeviceOrientation : unlockedDeviceOrientation + if currentOrientation == .portrait { + analyzeFrame(imageBuffer: imageBuffer) + } else { + DispatchQueue.main.async { + self.faceInBounds = false + self.publishUserInstruction(.turnPhoneUp) + } + } + } + + private func analyzeFrame(imageBuffer: CVPixelBuffer) { + currentFrameBuffer = imageBuffer + faceDetector.processImageBuffer(imageBuffer) + if hasDetectedValidFace && selfieImage == nil { + captureSelfieImage(imageBuffer) + HapticManager.shared.notification(type: .success) + livenessCheckManager.initiateLivenessCheck() + } + } + + // MARK: Actions + func perform(action: SelfieViewModelAction) { + switch action { + case let .windowSizeDetected(windowRect, safeAreaInsets): + handleWindowSizeChanged(to: windowRect, edgeInsets: safeAreaInsets) + case .onViewAppear: + handleViewAppeared() + case .cancelSelfieCapture: + handleCancelSelfieCapture() + case .retryJobSubmission: + handleSubmission() + case .openApplicationSettings: + openSettings() + case let .handleError(error): + handleError(error) + } + } + + private func publishUserInstruction( + _ instruction: SelfieCaptureInstruction? + ) { + if self.userInstruction != instruction { + self.userInstruction = instruction + self.resetGuideAnimationDelayTimer() + } + } +} + +// MARK: Action Handlers +extension EnhancedSmartSelfieViewModel { + private func resetGuideAnimationDelayTimer() { + elapsedGuideAnimationDelay = 0 + showGuideAnimation = false + guard guideAnimationDelayTimer == nil else { return } + guideAnimationDelayTimer = Timer.scheduledTimer( + withTimeInterval: 1, + repeats: true + ) { _ in + self.elapsedGuideAnimationDelay += 1 + if self.elapsedGuideAnimationDelay == self.guideAnimationDelayTime { + self.showGuideAnimation = true + self.stopGuideAnimationDelayTimer() + } + } + } + + private func stopGuideAnimationDelayTimer() { + guard guideAnimationDelayTimer != nil else { return } + guideAnimationDelayTimer?.invalidate() + guideAnimationDelayTimer = nil + } + + private func handleViewAppeared() { + cameraManager.switchCamera(to: .front) + resetGuideAnimationDelayTimer() + resetSelfieCaptureState() + } + + private func resetSelfieCaptureState() { + selfieImage = nil + livenessImages = [] + selfieCaptureState = .capturingSelfie + failureReason = nil + resetSelfieCaptureMetadata() + } + + private func handleWindowSizeChanged( + to rect: CGSize, edgeInsets: EdgeInsets + ) { + let topPadding: CGFloat = edgeInsets.top + 100 + faceLayoutGuideFrame = CGRect( + x: (rect.width / 2) - faceLayoutGuideFrame.width / 2, + y: topPadding, + width: faceLayoutGuideFrame.width, + height: faceLayoutGuideFrame.height + ) + faceValidator.setLayoutGuideFrame(with: faceLayoutGuideFrame) + } + + private func captureSelfieImage(_ pixelBuffer: CVPixelBuffer) { + do { + guard + let imageData = ImageUtils.resizePixelBufferToHeight( + pixelBuffer, + height: selfieImageSize, + orientation: .up + ), + let uiImage = UIImage(data: imageData) + else { + throw SmileIDError.unknown("Error resizing selfie image") + } + self.selfieImage = flipImageForPreview(uiImage) + self.selfieImageURL = try LocalStorage.createSelfieFile( + jobId: jobId, selfieFile: imageData) + } catch { + handleError(error) + } + } + + private func flipImageForPreview(_ image: UIImage) -> UIImage? { + guard let cgImage = image.cgImage else { return nil } + + let contextSize = CGSize( + width: image.size.width, height: image.size.height) + UIGraphicsBeginImageContextWithOptions(contextSize, false, 1.0) + defer { + UIGraphicsEndImageContext() + } + guard let context = UIGraphicsGetCurrentContext() else { + return nil + } + + // Apply a 180° counterclockwise rotation + // Translate the context to the center before rotating + // to ensure the image rotates around its center + context.translateBy(x: contextSize.width / 2, y: contextSize.height / 2) + context.rotate(by: -.pi) + + // Draw the image + context.draw( + cgImage, + in: CGRect( + x: -image.size.width / 2, y: -image.size.height / 2, + width: image.size.width, height: image.size.height) + ) + + // Get the new UIImage from the context + let correctedImage = UIGraphicsGetImageFromCurrentImageContext() + + return correctedImage + } + + private func captureLivenessImage(_ pixelBuffer: CVPixelBuffer) { + do { + guard + let imageData = ImageUtils.resizePixelBufferToHeight( + pixelBuffer, + height: livenessImageSize, + orientation: .up + ) + else { + throw SmileIDError.unknown("Error resizing liveness image") + } + let imageUrl = try LocalStorage.createLivenessFile( + jobId: jobId, livenessFile: imageData) + livenessImages.append(imageUrl) + } catch { + handleError(error) + } + } + + private func handleError(_ error: Error) { + debugPrint(error.localizedDescription) + } + + private func handleSubmission() { + DispatchQueue.main.async { + self.selfieCaptureState = .processing(.inProgress) + } + guard submissionTask == nil else { return } + submissionTask = Task { + try await submitJob() + } + } + + private func openSettings() { + guard let settingsURL = URL(string: UIApplication.openSettingsURLString) + else { return } + UIApplication.shared.open(settingsURL) + } + + private func handleCancelSelfieCapture() { + invalidateSubmissionTask() + UIApplication.shared.windows.first?.rootViewController?.dismiss(animated: true) + } + + private func dismissSelfieCapture() { + UIApplication.shared.windows.first?.rootViewController?.dismiss( + animated: true, + completion: { [weak self] in + guard let self else { return } + self.onFinished(callback: self.onResult) + } + ) + } +} + +// MARK: FaceDetectorResultDelegate Methods +extension EnhancedSmartSelfieViewModel: FaceDetectorResultDelegate { + func faceDetector( + _ detector: EnhancedFaceDetector, + didDetectFace faceGeometry: FaceGeometryData, + withFaceQuality faceQuality: Float, + brightness: Int + ) { + faceValidator + .validate( + faceGeometry: faceGeometry, + faceQuality: faceQuality, + brightness: brightness, + currentLivenessTask: self.livenessCheckManager.currentTask + ) + if shouldBeginLivenessChallenge && !isCapturingLivenessImages { + livenessCheckManager.processFaceGeometry(faceGeometry) + } + } + + func faceDetector( + _ detector: EnhancedFaceDetector, didFailWithError error: Error + ) { + DispatchQueue.main.async { + self.publishUserInstruction(.headInFrame) + } + } +} + +// MARK: FaceValidatorDelegate Methods +extension EnhancedSmartSelfieViewModel: FaceValidatorDelegate { + func updateValidationResult(_ result: FaceValidationResult) { + DispatchQueue.main.async { + self.faceInBounds = result.faceInBounds + self.hasDetectedValidFace = result.hasDetectedValidFace + self.publishUserInstruction(result.userInstruction) + } + } +} + +// MARK: LivenessCheckManagerDelegate Methods +extension EnhancedSmartSelfieViewModel: LivenessCheckManagerDelegate { + func didCompleteLivenessTask() { + isCapturingLivenessImages = true + let capturedFrames = 0 + captureNextFrame(capturedFrames: capturedFrames) + } + + private func captureNextFrame(capturedFrames: Int) { + let maxFrames = LivenessTask.numberOfFramesToCapture + guard capturedFrames < maxFrames, + let currentFrame = currentFrameBuffer + else { + return + } + + captureLivenessImage(currentFrame) + let nextCapturedFrames = capturedFrames + 1 + if nextCapturedFrames < maxFrames { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.4) { [weak self] in + self?.captureNextFrame(capturedFrames: nextCapturedFrames) + } + } else { + isCapturingLivenessImages = false + HapticManager.shared.notification(type: .success) + } + } + + func didCompleteLivenessChallenge() { + DispatchQueue.main.asyncAfter(deadline: .now() + 1) { + self.cameraManager.pauseSession() + self.handleSubmission() + } + } + + func livenessChallengeTimeout() { + let remainingImages = numLivenessImages - livenessImages.count + let count = remainingImages > 0 ? remainingImages : 0 + for _ in 0.. JobType { + return isEnroll ? JobType.smartSelfieEnrollment : JobType.smartSelfieAuthentication + } + + private func createAuthRequest(jobType: JobType) -> AuthenticationRequest { + return AuthenticationRequest( + jobType: jobType, + enrollment: isEnroll, + jobId: jobId, + userId: userId + ) + } + + private func saveOfflineMode(jobType: JobType) throws { + try LocalStorage.saveOfflineJob( + jobId: jobId, + userId: userId, + jobType: jobType, + enrollment: isEnroll, + allowNewEnroll: allowNewEnroll, + localMetadata: localMetadata, + partnerParams: extraPartnerParams + ) + } + + private func prepareImagesForSubmission() throws -> (MultipartBody, [MultipartBody]) { + guard let smartSelfieImage = createMultipartBody(from: selfieImageUrl) else { + throw SmileIDError.fileNotFound("Could not create multipart body for file") + } + + let smartSelfieLivenessImages = livenessImages.compactMap { + createMultipartBody(from: $0) + } + guard smartSelfieLivenessImages.count == numLivenessImages else { + throw SmileIDError.unknown("Liveness image count mismatch") + } + + return (smartSelfieImage, smartSelfieLivenessImages) + } + + private func createMultipartBody(from fileURL: URL?) -> MultipartBody? { + guard let fileURL = fileURL, + let imageData = try? Data(contentsOf: fileURL) + else { + return nil + } + return MultipartBody( + withImage: imageData, + forKey: fileURL.lastPathComponent, + forName: fileURL.lastPathComponent + ) + } + + private func submitJobRequest( + authResponse: AuthenticationResponse, + smartSelfieImage: MultipartBody, + smartSelfieLivenessImages: [MultipartBody], + failureReason: FailureReason? + ) async throws -> SmartSelfieResponse { + if isEnroll { + return try await SmileID.api + .doSmartSelfieEnrollment( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + userId: userId, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + allowNewEnroll: allowNewEnroll, + failureReason: failureReason, + metadata: localMetadata.metadata + ) + } else { + return try await SmileID.api + .doSmartSelfieAuthentication( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + userId: userId, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + failureReason: failureReason, + metadata: localMetadata.metadata + ) + } + } + + private func updateLocalStorageAfterSuccess() throws { + // Move the job to the submitted jobs directory for record-keeping + try LocalStorage.moveToSubmittedJobs(jobId: self.jobId) + + // Update the references to the submitted selfie and liveness images + self.selfieImageUrl = try LocalStorage.getFileByType( + jobId: jobId, + fileType: FileType.selfie, + submitted: true + ) + self.livenessImages = + try LocalStorage.getFilesByType( + jobId: jobId, + fileType: FileType.liveness, + submitted: true + ) ?? [] + } + + private func handleJobSubmissionFailure(_ smileIDError: SmileIDError) { + do { + let didMove = try LocalStorage.handleOfflineJobFailure(jobId: self.jobId, error: smileIDError) + if didMove { + self.selfieImageUrl = try LocalStorage.getFileByType(jobId: jobId, fileType: .selfie, submitted: true) + self.livenessImages = + try LocalStorage.getFilesByType(jobId: jobId, fileType: .liveness, submitted: true) ?? [] + } + } catch { + let (errorMessageRes, errorMessage) = toErrorMessage(error: smileIDError) + self.delegate? + .submissionDidFail( + with: error, + errorMessage: errorMessageRes, + errorMessageRes: errorMessage, + updatedSelfieImageUrl: selfieImageUrl, + updatedLivenessImages: livenessImages + ) + return + } + + if SmileID.allowOfflineMode, SmileIDError.isNetworkFailure(error: smileIDError) { + self.delegate? + .submissionDidFail( + with: smileIDError, + errorMessage: nil, + errorMessageRes: "Offline.Message", + updatedSelfieImageUrl: selfieImageUrl, + updatedLivenessImages: livenessImages + ) + } else { + let (errorMessageRes, errorMessage) = toErrorMessage(error: smileIDError) + self.delegate? + .submissionDidFail( + with: smileIDError, + errorMessage: errorMessage, + errorMessageRes: errorMessageRes, + updatedSelfieImageUrl: selfieImageUrl, + updatedLivenessImages: livenessImages + ) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift index c62ef7b6e..cd2451694 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift @@ -13,9 +13,9 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { private let minFaceAreaThreshold = 0.125 private let maxFaceAreaThreshold = 0.25 private let faceRotationThreshold = 0.03 - private let faceRollThreshold = 0.025 // roll has a smaller range than yaw + private let faceRollThreshold = 0.025 // roll has a smaller range than yaw private let numLivenessImages = 7 - private let numTotalSteps = 8 // numLivenessImages + 1 selfie image + private let numTotalSteps = 8 // numLivenessImages + 1 selfie image private let livenessImageSize = 320 private let selfieImageSize = 640 @@ -35,14 +35,18 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { var previousHeadPitch = Double.infinity var previousHeadYaw = Double.infinity var isSmiling = false - var currentlyUsingArKit: Bool { ARFaceTrackingConfiguration.isSupported && !useBackCamera } + var currentlyUsingArKit: Bool { + ARFaceTrackingConfiguration.isSupported && !useBackCamera + } var selfieImage: URL? var livenessImages: [URL] = [] var apiResponse: SmartSelfieResponse? var error: Error? - private let arKitFramePublisher = PassthroughSubject() + private let arKitFramePublisher = PassthroughSubject< + CVPixelBuffer?, Never + >() private var subscribers = Set() // UI Properties @@ -78,25 +82,35 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { self.extraPartnerParams = extraPartnerParams self.localMetadata = localMetadata + if cameraManager.session.canSetSessionPreset(.vga640x480) { + cameraManager.session.sessionPreset = .vga640x480 + } cameraManager.$status .receive(on: DispatchQueue.main) .filter { $0 == .unauthorized } .map { _ in AlertState.cameraUnauthorized } - .sink { alert in self.unauthorizedAlert = alert } + .sink { [weak self] alert in self?.unauthorizedAlert = alert } .store(in: &subscribers) cameraManager.sampleBufferPublisher .merge(with: arKitFramePublisher) - .throttle(for: 0.35, scheduler: DispatchQueue.global(qos: .userInitiated), latest: true) + .throttle( + for: 0.35, scheduler: DispatchQueue.global(qos: .userInitiated), + latest: true + ) // Drop the first ~2 seconds to allow the user to settle in .dropFirst(5) .compactMap { $0 } - .sink(receiveValue: analyzeImage) + .sink { [weak self] imageBuffer in + self?.analyzeImage(image: imageBuffer) + } .store(in: &subscribers) localMetadata.addMetadata( - useBackCamera ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) - : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera)) + useBackCamera + ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) + : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera) + ) } let metadataTimerStart = MonotonicTime() @@ -114,20 +128,25 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } do { - try faceDetector.detect(imageBuffer: image) { [self] request, error in + try faceDetector.detect(imageBuffer: image) { [weak self] request, error in + guard let self else { return } if let error { - print("Error analyzing image: \(error.localizedDescription)") + print( + "Error analyzing image: \(error.localizedDescription)") self.error = error return } - guard let results = request.results as? [VNFaceObservation] else { + guard let results = request.results as? [VNFaceObservation] + else { print("Did not receive the expected [VNFaceObservation]") return } if results.count == 0 { - DispatchQueue.main.async { self.directive = "Instructions.UnableToDetectFace" } + DispatchQueue.main.async { + self.directive = "Instructions.UnableToDetectFace" + } // If no faces are detected for a while, reset the state if elapsedtime > noFaceResetDelay { DispatchQueue.main.async { @@ -144,7 +163,9 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { // Ensure only 1 face is in frame if results.count > 1 { - DispatchQueue.main.async { self.directive = "Instructions.MultipleFaces" } + DispatchQueue.main.async { + self.directive = "Instructions.MultipleFaces" + } return } @@ -165,31 +186,44 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { || boundingBox.maxX > maxFaceCenteredThreshold || boundingBox.maxY > maxFaceCenteredThreshold { - DispatchQueue.main.async { self.directive = "Instructions.PutFaceInOval" } + DispatchQueue.main.async { + self.directive = "Instructions.PutFaceInOval" + } return } // image's area is equal to 1. so (bbox area / image area) == bbox area let faceFillRatio = boundingBox.width * boundingBox.height if faceFillRatio < minFaceAreaThreshold { - DispatchQueue.main.async { self.directive = "Instructions.MoveCloser" } + DispatchQueue.main.async { + self.directive = "Instructions.MoveCloser" + } return } if faceFillRatio > maxFaceAreaThreshold { - DispatchQueue.main.async { self.directive = "Instructions.MoveFarther" } + DispatchQueue.main.async { + self.directive = "Instructions.MoveFarther" + } return } - if let quality = face.faceCaptureQuality, quality < faceCaptureQualityThreshold { - DispatchQueue.main.async { self.directive = "Instructions.Quality" } + if let quality = face.faceCaptureQuality, + quality < faceCaptureQualityThreshold + { + DispatchQueue.main.async { + self.directive = "Instructions.Quality" + } return } - let userNeedsToSmile = livenessImages.count > numLivenessImages / 2 + let userNeedsToSmile = + livenessImages.count > numLivenessImages / 2 - DispatchQueue.main.async { [self] in - directive = userNeedsToSmile ? "Instructions.Smile" : "Instructions.Capturing" + DispatchQueue.main.async { + self.directive = + userNeedsToSmile + ? "Instructions.Smile" : "Instructions.Capturing" } // TODO: Use mouth deformation as an alternate signal for non-ARKit capture @@ -200,36 +234,50 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { // Perform the rotation checks *after* changing directive to Capturing -- we don't // want to explicitly tell the user to move their head if !hasFaceRotatedEnough(face: face) { - print("Not enough face rotation between captures. Waiting...") + print( + "Not enough face rotation between captures. Waiting...") return } - let orientation = currentlyUsingArKit ? CGImagePropertyOrientation.right : .up + let orientation = + currentlyUsingArKit ? CGImagePropertyOrientation.right : .up lastAutoCaptureTime = Date() do { if livenessImages.count < numLivenessImages { - guard let imageData = ImageUtils.resizePixelBufferToHeight( - image, - height: livenessImageSize, - orientation: orientation - ) else { - throw SmileIDError.unknown("Error resizing liveness image") + guard + let imageData = + ImageUtils.resizePixelBufferToHeight( + image, + height: livenessImageSize, + orientation: orientation + ) + else { + throw SmileIDError.unknown( + "Error resizing liveness image") } - let imageUrl = try LocalStorage.createLivenessFile(jobId: jobId, livenessFile: imageData) + let imageUrl = try LocalStorage.createLivenessFile( + jobId: jobId, livenessFile: imageData) livenessImages.append(imageUrl) DispatchQueue.main.async { - self.captureProgress = Double(self.livenessImages.count) / Double(self.numTotalSteps) + self.captureProgress = + Double(self.livenessImages.count) + / Double(self.numTotalSteps) } } else { shouldAnalyzeImages = false - guard let imageData = ImageUtils.resizePixelBufferToHeight( - image, - height: selfieImageSize, - orientation: orientation - ) else { - throw SmileIDError.unknown("Error resizing selfie image") + guard + let imageData = + ImageUtils.resizePixelBufferToHeight( + image, + height: selfieImageSize, + orientation: orientation + ) + else { + throw SmileIDError.unknown( + "Error resizing selfie image") } - let selfieImage = try LocalStorage.createSelfieFile(jobId: jobId, selfieFile: imageData) + let selfieImage = try LocalStorage.createSelfieFile( + jobId: jobId, selfieFile: imageData) self.selfieImage = selfieImage DispatchQueue.main.async { self.captureProgress = 1 @@ -250,14 +298,16 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } func hasFaceRotatedEnough(face: VNFaceObservation) -> Bool { - guard let roll = face.roll?.doubleValue, let yaw = face.yaw?.doubleValue else { + guard let roll = face.roll?.doubleValue, let yaw = face.yaw?.doubleValue + else { print("Roll and yaw unexpectedly nil") return true } var didPitchChange = false if #available(iOS 15, *) { if let pitch = face.pitch?.doubleValue { - didPitchChange = abs(pitch - previousHeadPitch) > faceRotationThreshold + didPitchChange = + abs(pitch - previousHeadPitch) > faceRotationThreshold } } let rollDelta = abs(roll - previousHeadRoll) @@ -269,7 +319,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { self.previousHeadPitch = face.pitch?.doubleValue ?? Double.infinity } - return didPitchChange || rollDelta > faceRollThreshold || yawDelta > faceRotationThreshold + return didPitchChange || rollDelta > faceRollThreshold + || yawDelta > faceRotationThreshold } func onSmiling(isSmiling: Bool) { @@ -284,7 +335,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { cameraManager.switchCamera(to: useBackCamera ? .back : .front) localMetadata.metadata.removeAllOfType(Metadatum.SelfieImageOrigin.self) localMetadata.addMetadata( - useBackCamera ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) + useBackCamera + ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera)) } @@ -299,7 +351,10 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { shouldAnalyzeImages = true cleanUpSelfieCapture() localMetadata.metadata.removeAllOfType(Metadatum.SelfieImageOrigin.self) - localMetadata.metadata.removeAllOfType(Metadatum.SelfieCaptureDuration.self) + localMetadata.metadata.removeAllOfType( + Metadatum.ActiveLivenessType.self) + localMetadata.metadata.removeAllOfType( + Metadatum.SelfieCaptureDuration.self) } func cleanUpSelfieCapture() { @@ -321,7 +376,11 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } public func submitJob() { - localMetadata.addMetadata(Metadatum.SelfieCaptureDuration(duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata( + Metadatum.SelfieCaptureDuration( + duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata( + Metadatum.ActiveLivenessType(livenessType: LivenessType.smile)) if skipApiSubmission { DispatchQueue.main.async { self.processingState = .success } return @@ -329,10 +388,14 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { DispatchQueue.main.async { self.processingState = .inProgress } Task { do { - guard let selfieImage, livenessImages.count == numLivenessImages else { + guard let selfieImage, livenessImages.count == numLivenessImages + else { throw SmileIDError.unknown("Selfie capture failed") } - let jobType = isEnroll ? JobType.smartSelfieEnrollment : JobType.smartSelfieAuthentication + let jobType = + isEnroll + ? JobType.smartSelfieEnrollment + : JobType.smartSelfieAuthentication let authRequest = AuthenticationRequest( jobType: jobType, enrollment: isEnroll, @@ -350,15 +413,18 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { partnerParams: extraPartnerParams ) } - let authResponse = try await SmileID.api.authenticate(request: authRequest) + let authResponse = try await SmileID.api.authenticate( + request: authRequest) var smartSelfieLivenessImages = [MultipartBody]() var smartSelfieImage: MultipartBody? - if let selfie = try? Data(contentsOf: selfieImage), let media = MultipartBody( - withImage: selfie, - forKey: selfieImage.lastPathComponent, - forName: selfieImage.lastPathComponent - ) { + if let selfie = try? Data(contentsOf: selfieImage), + let media = MultipartBody( + withImage: selfie, + forKey: selfieImage.lastPathComponent, + forName: selfieImage.lastPathComponent + ) + { smartSelfieImage = media } if !livenessImages.isEmpty { @@ -373,40 +439,44 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { return nil } - smartSelfieLivenessImages.append(contentsOf: livenessImageInfos.compactMap { $0 }) + smartSelfieLivenessImages.append( + contentsOf: livenessImageInfos.compactMap { $0 }) } guard let smartSelfieImage = smartSelfieImage, - smartSelfieLivenessImages.count == numLivenessImages + smartSelfieLivenessImages.count == numLivenessImages else { throw SmileIDError.unknown("Selfie capture failed") } - let response = if isEnroll { - try await SmileID.api.doSmartSelfieEnrollment( - signature: authResponse.signature, - timestamp: authResponse.timestamp, - selfieImage: smartSelfieImage, - livenessImages: smartSelfieLivenessImages, - userId: userId, - partnerParams: extraPartnerParams, - callbackUrl: SmileID.callbackUrl, - sandboxResult: nil, - allowNewEnroll: allowNewEnroll, - metadata: localMetadata.metadata - ) - } else { - try await SmileID.api.doSmartSelfieAuthentication( - signature: authResponse.signature, - timestamp: authResponse.timestamp, - userId: userId, - selfieImage: smartSelfieImage, - livenessImages: smartSelfieLivenessImages, - partnerParams: extraPartnerParams, - callbackUrl: SmileID.callbackUrl, - sandboxResult: nil, - metadata: localMetadata.metadata - ) - } + let response = + if isEnroll { + try await SmileID.api.doSmartSelfieEnrollment( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + userId: userId, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + allowNewEnroll: allowNewEnroll, + failureReason: nil, + metadata: localMetadata.metadata + ) + } else { + try await SmileID.api.doSmartSelfieAuthentication( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + userId: userId, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + failureReason: nil, + metadata: localMetadata.metadata + ) + } apiResponse = response do { try LocalStorage.moveToSubmittedJobs(jobId: self.jobId) @@ -415,11 +485,12 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { fileType: FileType.selfie, submitted: true ) - self.livenessImages = try LocalStorage.getFilesByType( - jobId: jobId, - fileType: FileType.liveness, - submitted: true - ) ?? [] + self.livenessImages = + try LocalStorage.getFilesByType( + jobId: jobId, + fileType: FileType.liveness, + submitted: true + ) ?? [] } catch { print("Error moving job to submitted directory: \(error)") self.error = error @@ -437,25 +508,29 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { fileType: FileType.selfie, submitted: true ) - self.livenessImages = try LocalStorage.getFilesByType( - jobId: jobId, - fileType: FileType.liveness, - submitted: true - ) ?? [] + self.livenessImages = + try LocalStorage.getFilesByType( + jobId: jobId, + fileType: FileType.liveness, + submitted: true + ) ?? [] } } catch { print("Error moving job to submitted directory: \(error)") self.error = error return } - if SmileID.allowOfflineMode, SmileIDError.isNetworkFailure(error: error) { + if SmileID.allowOfflineMode, + SmileIDError.isNetworkFailure(error: error) + { DispatchQueue.main.async { self.errorMessageRes = "Offline.Message" self.processingState = .success } } else { print("Error submitting job: \(error)") - let (errorMessageRes, errorMessage) = toErrorMessage(error: error) + let (errorMessageRes, errorMessage) = toErrorMessage( + error: error) self.error = error self.errorMessageRes = errorMessageRes self.errorMessage = errorMessage @@ -471,11 +546,14 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { public func onFinished(callback: SmartSelfieResultDelegate) { if let selfieImage = selfieImage, - let selfiePath = getRelativePath(from: selfieImage), - livenessImages.count == numLivenessImages, - !livenessImages.contains(where: { getRelativePath(from: $0) == nil }) + let selfiePath = getRelativePath(from: selfieImage), + livenessImages.count == numLivenessImages, + !livenessImages.contains(where: { getRelativePath(from: $0) == nil } + ) { - let livenessImagesPaths = livenessImages.compactMap { getRelativePath(from: $0) } + let livenessImagesPaths = livenessImages.compactMap { + getRelativePath(from: $0) + } callback.didSucceed( selfieImage: selfiePath, @@ -490,7 +568,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } func openSettings() { - guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } + guard let settingsURL = URL(string: UIApplication.openSettingsURLString) + else { return } UIApplication.shared.open(settingsURL) } } diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift new file mode 100644 index 000000000..2bec66da1 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift @@ -0,0 +1,15 @@ +import SwiftUI + +enum SelfieViewModelAction { + // View Setup Actions + case onViewAppear + case windowSizeDetected(CGSize, EdgeInsets) + + // Job Submission Actions + case cancelSelfieCapture + case retryJobSubmission + + // Others + case openApplicationSettings + case handleError(Error) +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/ArcProgressView.swift b/Sources/SmileID/Classes/SelfieCapture/View/ArcProgressView.swift new file mode 100644 index 000000000..ffd20e2a9 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/ArcProgressView.swift @@ -0,0 +1,96 @@ +import SwiftUI + +struct ArcProgressView: View { + // Configuration Properties + var strokeLineWidth: CGFloat = 12 + var arcSize: CGSize = .init(width: 270, height: 270) + var progressTrackColor: Color = SmileID.theme.onDark + var progressFillColor: Color = SmileID.theme.success + + // View Properties + var position: Position + var progress: CGFloat + var totalSteps: Int = 10 + var minValue: CGFloat = 0 + var maxValue: CGFloat = 1.0 + var clockwise: Bool = false + + enum Position { case top, right, left } + + var body: some View { + ZStack { + ArcShape(clockwise: clockwise, position: position) + .stroke( + style: StrokeStyle( + lineWidth: strokeLineWidth, + lineCap: .round, + lineJoin: .round + ) + ) + .foregroundColor(progressTrackColor) + .frame(width: arcSize.width, height: arcSize.height) + ArcShape(clockwise: clockwise, position: position) + .trim(from: 0.0, to: normalizedProgress) + .stroke( + style: StrokeStyle( + lineWidth: strokeLineWidth, + lineCap: .round, + lineJoin: .round + ) + ) + .animation(.linear, value: normalizedProgress) + .foregroundColor(progressFillColor) + .frame(width: arcSize.width, height: arcSize.height) + } + } + + private var normalizedProgress: CGFloat { + (progress - minValue) / (maxValue - minValue) + } + + private var remainingSteps: Int { + return max(0, totalSteps - Int(progress)) + } +} + +struct ArcShape: Shape { + var clockwise: Bool = false + var position: ArcProgressView.Position + + func path(in rect: CGRect) -> Path { + var path = Path() + + // Position-dependent values + let startAngle: CGFloat = 180 + let endAngle: CGFloat + let radius: CGFloat + let horizontalOffset: CGFloat + let verticalOffset: CGFloat + + switch position { + case .top: + endAngle = 120 + radius = rect.width / 2 + horizontalOffset = 0 + verticalOffset = 0 + case .right, .left: + endAngle = 150 + radius = rect.width + horizontalOffset = -(radius - rect.width / 2) + verticalOffset = 0 + } + + path.addArc( + center: CGPoint( + x: rect.midX - horizontalOffset, + y: rect.midY - verticalOffset + ), + radius: radius, + startAngle: Angle(degrees: startAngle), + endAngle: Angle(degrees: clockwise ? endAngle : -endAngle), + clockwise: clockwise + ) + + return path + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift b/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift index 8833a4a15..98da6d547 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift @@ -3,16 +3,21 @@ import AVFoundation import Vision struct CameraView: UIViewControllerRepresentable { - typealias UIViewType = PreviewView - let preview: PreviewView + typealias UIViewType = CameraViewController + let cameraViewController: CameraViewController - init(cameraManager: CameraManager) { - preview = PreviewView(cameraManager: cameraManager) + init( + cameraManager: CameraManager, + selfieViewModel: EnhancedSmartSelfieViewModel? = nil + ) { + let controller = CameraViewController(cameraManager: cameraManager) + controller.faceDetector = selfieViewModel?.faceDetector + cameraViewController = controller } - func makeUIViewController(context: Context) -> PreviewView { - preview + func makeUIViewController(context: Context) -> CameraViewController { + cameraViewController } - func updateUIViewController(_ uiViewController: PreviewView, context: Context) {} + func updateUIViewController(_ uiViewController: CameraViewController, context: Context) {} } diff --git a/Sources/SmileID/Classes/SelfieCapture/View/CircularProgressView.swift b/Sources/SmileID/Classes/SelfieCapture/View/CircularProgressView.swift new file mode 100644 index 000000000..863eb7aaa --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/CircularProgressView.swift @@ -0,0 +1,20 @@ +import SwiftUI + +struct CircularProgressView: View { + @State private var rotationAngle: Double = 0.0 + + var body: some View { + Image(uiImage: SmileIDResourcesHelper.Loader) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 48, height: 48) + .rotationEffect(Angle(degrees: rotationAngle)) + .onAppear { + DispatchQueue.main.async { + withAnimation(.linear(duration: 1.0).repeatForever(autoreverses: false)) { + rotationAngle = 360 + } + } + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/EnhancedSelfieCaptureScreen.swift b/Sources/SmileID/Classes/SelfieCapture/View/EnhancedSelfieCaptureScreen.swift new file mode 100644 index 000000000..f79d7ebb9 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/EnhancedSelfieCaptureScreen.swift @@ -0,0 +1,142 @@ +import SwiftUI + +public struct EnhancedSelfieCaptureScreen: View { + @Backport.StateObject var viewModel: EnhancedSmartSelfieViewModel + let showAttribution: Bool + + private let faceShape = FaceShape() + private(set) var originalBrightness = UIScreen.main.brightness + private let cameraContainerHeight: CGFloat = 480 + + public var body: some View { + GeometryReader { proxy in + VStack(spacing: 10) { + switch viewModel.selfieCaptureState { + case .capturingSelfie: + ZStack { + CameraView( + cameraManager: viewModel.cameraManager, + selfieViewModel: viewModel + ) + .cornerRadius(40) + .frame(height: cameraContainerHeight) + + RoundedRectangle(cornerRadius: 40) + .fill(SmileID.theme.tertiary.opacity(0.8)) + .reverseMask(alignment: .top) { + faceShape + .frame(width: 250, height: 350) + .padding(.top, 50) + } + .frame(height: cameraContainerHeight) + VStack { + ZStack { + FaceBoundingArea( + faceInBounds: viewModel.faceInBounds, + selfieCaptured: viewModel.selfieCaptured, + showGuideAnimation: viewModel.showGuideAnimation, + guideAnimation: viewModel.userInstruction?.guideAnimation + ) + if let currentLivenessTask = viewModel.livenessCheckManager.currentTask, + viewModel.faceInBounds { + LivenessGuidesView( + currentLivenessTask: currentLivenessTask, + topArcProgress: $viewModel.livenessCheckManager.lookUpProgress, + rightArcProgress: $viewModel.livenessCheckManager.lookRightProgress, + leftArcProgress: $viewModel.livenessCheckManager.lookLeftProgress + ) + } + } + .padding(.top, 50) + Spacer() + if let userInstruction = viewModel.userInstruction { + UserInstructionsView( + instruction: userInstruction.instruction + ) + } + } + } + .selfieCaptureFrameBackground(cameraContainerHeight) + if showAttribution { + Image(uiImage: SmileIDResourcesHelper.SmileEmblem) + } + case let .processing(processingState): + ZStack { + if let selfieImage = viewModel.selfieImage { + SelfiePreviewView(image: selfieImage) + } + RoundedRectangle(cornerRadius: 40) + .fill(SmileID.theme.tertiary.opacity(0.8)) + .reverseMask(alignment: .top) { + faceShape + .frame(width: 250, height: 350) + .padding(.top, 50) + } + .frame(height: cameraContainerHeight) + VStack { + Spacer() + UserInstructionsView( + instruction: processingState.title, + message: processingState == .error ? getErrorSubtitle( + errorMessageRes: viewModel.errorMessageRes, + errorMessage: viewModel.errorMessage + ) : nil + ) + } + SubmissionStatusView(processState: processingState) + .padding(.bottom, 40) + } + .selfieCaptureFrameBackground(cameraContainerHeight) + if showAttribution { + Image(uiImage: SmileIDResourcesHelper.SmileEmblem) + } + } + Spacer() + SelfieActionsView( + captureState: viewModel.selfieCaptureState, + retryAction: { viewModel.perform(action: .retryJobSubmission) }, + cancelAction: { + viewModel.perform(action: .cancelSelfieCapture) + } + ) + } + .padding(.vertical, 20) + .navigationBarHidden(true) + .onAppear { + UIScreen.main.brightness = 1 + UIApplication.shared.isIdleTimerDisabled = true + viewModel.perform(action: .windowSizeDetected(proxy.size, proxy.safeAreaInsets)) + viewModel.perform(action: .onViewAppear) + } + .onDisappear { + UIScreen.main.brightness = originalBrightness + UIApplication.shared.isIdleTimerDisabled = false + viewModel.cameraManager.pauseSession() + } + .alert(item: $viewModel.unauthorizedAlert) { alert in + Alert( + title: Text(alert.title), + message: Text(alert.message ?? ""), + primaryButton: .default( + Text( + SmileIDResourcesHelper.localizedString( + for: "Camera.Unauthorized.PrimaryAction")), + action: { + viewModel.perform(action: .openApplicationSettings) + } + ), + secondaryButton: .cancel() + ) + } + } + } +} + +extension View { + func selfieCaptureFrameBackground(_ containerHeight: CGFloat) -> some View { + self + .shadow(color: .black.opacity(0.25), radius: 4, x: 0, y: 4) + .frame(height: containerHeight) + .padding(.horizontal) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingArea.swift b/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingArea.swift new file mode 100644 index 000000000..5e0b1cbd6 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingArea.swift @@ -0,0 +1,50 @@ +import Lottie +import SwiftUI + +struct FaceBoundingArea: View { + var faceInBounds: Bool + var selfieCaptured: Bool + var showGuideAnimation: Bool + var guideAnimation: CaptureGuideAnimation? + + private let faceShape = FaceShape() + @State private var playbackMode: LottiePlaybackMode = .paused + + var body: some View { + ZStack { + // Face Bounds Indicator + faceShape + .stroke( + faceInBounds ? selfieCaptured ? .clear : SmileID.theme.success : SmileID.theme.error, + style: StrokeStyle(lineWidth: 8) + ) + .frame(width: 260, height: 360) + + if let guideAnimation = guideAnimation, + showGuideAnimation { + LottieView { + try await DotLottieFile + .named( + guideAnimation.fileName, + bundle: SmileIDResourcesHelper.bundle + ) + } + .playbackMode(playbackMode) + .frame(width: 224, height: 224) + .onAppear { + playbackMode = getPlaybackMode(guideAnimation) + } + } + } + } + + private func getPlaybackMode(_ animation: CaptureGuideAnimation) -> LottiePlaybackMode { + return .playing( + .fromProgress( + animation.animationProgressRange.lowerBound, + toProgress: animation.animationProgressRange.upperBound, + loopMode: .autoReverse + ) + ) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/FaceShapedProgressIndicator.swift b/Sources/SmileID/Classes/SelfieCapture/View/FaceShapedProgressIndicator.swift index f139dda96..6c2aa9e96 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/FaceShapedProgressIndicator.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/FaceShapedProgressIndicator.swift @@ -4,7 +4,7 @@ import SwiftUI struct FaceShapedProgressIndicator: View { let progress: Double private let strokeWidth = 10 - private let faceShape = FaceShape().scale(x: 0.8, y: 0.6).offset(y: -50) + private let faceShape = FaceShape().scale(x: 0.8, y: 0.55).offset(y: -50) private let bgColor = Color.white.opacity(0.8) var body: some View { bgColor diff --git a/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift b/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift new file mode 100644 index 000000000..dd3f33e62 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift @@ -0,0 +1,13 @@ +import SwiftUI + +struct LayoutGuideView: View { + let layoutGuideFrame: CGRect + + var body: some View { + VStack { + Ellipse() + .stroke(.blue) + .frame(width: layoutGuideFrame.width, height: layoutGuideFrame.height) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift b/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift new file mode 100644 index 000000000..81dccba45 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift @@ -0,0 +1,62 @@ +import Lottie +import SwiftUI + +public struct LivenessCaptureInstructionsView: View { + @State private var showSelfieCaptureView: Bool = false + + private let showAttribution: Bool + private let viewModel: EnhancedSmartSelfieViewModel + + public init(showAttribution: Bool, viewModel: EnhancedSmartSelfieViewModel) { + self.showAttribution = showAttribution + self.viewModel = viewModel + } + + public var body: some View { + VStack { + ZStack { + LottieView { + try await DotLottieFile + .named( + "instruction_screen_with_side_bar", + bundle: SmileIDResourcesHelper.bundle + ) + } + .playing(loopMode: .loop) + .frame(width: 235, height: 235) + } + .padding(.top, 100) + Spacer() + Text(SmileIDResourcesHelper.localizedString(for: "Instructions.SelfieCapture")) + .multilineTextAlignment(.center) + .font(SmileID.theme.header4) + .lineSpacing(4) + .foregroundColor(SmileID.theme.tertiary) + + Spacer() + + VStack(spacing: 20) { + NavigationLink( + destination: EnhancedSelfieCaptureScreen( + viewModel: viewModel, + showAttribution: showAttribution + ), + isActive: $showSelfieCaptureView + ) { EmptyView() } + + SmileButton( + title: "Action.GetStarted", + clicked: { + self.showSelfieCaptureView = true + } + ) + + if showAttribution { + Image(uiImage: SmileIDResourcesHelper.SmileEmblem) + } + } + } + .padding(.horizontal, 24) + .padding(.bottom, 40) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/LivenessGuidesView.swift b/Sources/SmileID/Classes/SelfieCapture/View/LivenessGuidesView.swift new file mode 100644 index 000000000..1e7f2a307 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/LivenessGuidesView.swift @@ -0,0 +1,28 @@ +import SwiftUI + +struct LivenessGuidesView: View { + var currentLivenessTask: LivenessTask + @Binding var topArcProgress: CGFloat + @Binding var rightArcProgress: CGFloat + @Binding var leftArcProgress: CGFloat + + var body: some View { + ZStack { + ArcProgressView(position: .top, progress: topArcProgress) + .rotationEffect(Angle(degrees: 60)) + .opacity(currentLivenessTask == .lookUp ? 1.0 : 0.0) + .animation(.easeInOut(duration: 0.2), value: currentLivenessTask) + .padding(.bottom, 120) + + ArcProgressView(position: .right, progress: rightArcProgress, clockwise: true) + .rotationEffect(Angle(degrees: -155)) + .opacity(currentLivenessTask == .lookRight ? 1.0 : 0.0) + .animation(.easeInOut(duration: 0.2), value: currentLivenessTask) + + ArcProgressView(position: .left, progress: leftArcProgress) + .rotationEffect(Angle(degrees: -25)) + .opacity(currentLivenessTask == .lookLeft ? 1.0 : 0.0) + .animation(.easeInOut(duration: 0.2), value: currentLivenessTask) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedEnhancedSelfieCaptureScreen.swift b/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedEnhancedSelfieCaptureScreen.swift new file mode 100644 index 000000000..6cd1b67cd --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedEnhancedSelfieCaptureScreen.swift @@ -0,0 +1,56 @@ +import Foundation +import SwiftUI + +/// Orchestrates the selfie capture flow - navigates between instructions, requesting permissions, +/// showing camera view, and displaying processing screen +public struct OrchestratedEnhancedSelfieCaptureScreen: View { + public let allowAgentMode: Bool + public let showAttribution: Bool + public let showInstructions: Bool + public let onResult: SmartSelfieResultDelegate + private let viewModel: EnhancedSmartSelfieViewModel + + public init( + userId: String, + jobId: String, + isEnroll: Bool, + allowNewEnroll: Bool, + allowAgentMode: Bool, + showAttribution: Bool, + showInstructions: Bool, + useStrictMode: Bool, + extraPartnerParams: [String: String], + skipApiSubmission: Bool, + onResult: SmartSelfieResultDelegate + ) { + self.allowAgentMode = allowAgentMode + self.showAttribution = showAttribution + self.showInstructions = showInstructions + self.onResult = onResult + self.viewModel = EnhancedSmartSelfieViewModel( + isEnroll: isEnroll, + userId: userId, + jobId: jobId, + allowNewEnroll: allowNewEnroll, + skipApiSubmission: skipApiSubmission, + extraPartnerParams: extraPartnerParams, + useStrictMode: useStrictMode, + onResult: onResult, + localMetadata: LocalMetadata() + ) + } + + public var body: some View { + if showInstructions { + LivenessCaptureInstructionsView( + showAttribution: showAttribution, + viewModel: viewModel + ) + } else { + EnhancedSelfieCaptureScreen( + viewModel: viewModel, + showAttribution: showAttribution + ) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreen.swift b/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreen.swift index fd608cb47..ff0981853 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreen.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreen.swift @@ -67,8 +67,8 @@ public struct OrchestratedSelfieCaptureScreen: View { for: "Confirmation.Failure" ), errorSubtitle: getErrorSubtitle( - errorMessageRes: $viewModel.errorMessageRes.wrappedValue, - errorMessage: $viewModel.errorMessage.wrappedValue + errorMessageRes: viewModel.errorMessageRes, + errorMessage: viewModel.errorMessage ), errorIcon: SmileIDResourcesHelper.Scan, continueButtonText: SmileIDResourcesHelper.localizedString( @@ -105,8 +105,8 @@ public struct OrchestratedSelfieCaptureScreen: View { ) } else { SelfieCaptureScreen( - allowAgentMode: allowAgentMode, - viewModel: viewModel + viewModel: viewModel, + allowAgentMode: allowAgentMode ) .onAppear { viewModel.updateLocalMetadata(localMetadata) diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieActionsView.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieActionsView.swift new file mode 100644 index 000000000..0d3374bd6 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieActionsView.swift @@ -0,0 +1,39 @@ +import SwiftUI + +struct SelfieActionsView: View { + var captureState: EnhancedSmartSelfieViewModel.SelfieCaptureState + var retryAction: () -> Void + var cancelAction: () -> Void + + var body: some View { + VStack { + switch captureState { + case .capturingSelfie: + cancelButton + case .processing(let processingState): + switch processingState { + case .inProgress: + cancelButton + case .success: + EmptyView() + case .error: + SmileButton(title: "Confirmation.Retry") { + retryAction() + } + cancelButton + } + } + } + .padding(.horizontal, 65) + } + + var cancelButton: some View { + Button { + cancelAction() + } label: { + Text(SmileIDResourcesHelper.localizedString(for: "Action.Cancel")) + .font(SmileID.theme.button) + .foregroundColor(SmileID.theme.error) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreen.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreen.swift index 11c8bfad8..2e4f29ea0 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreen.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreen.swift @@ -4,13 +4,8 @@ import SwiftUI /// The actual selfie capture screen, which shows the camera preview and the progress indicator public struct SelfieCaptureScreen: View { + @Backport.StateObject var viewModel: SelfieViewModel let allowAgentMode: Bool - @ObservedObject var viewModel: SelfieViewModel - - public init(allowAgentMode: Bool, viewModel: SelfieViewModel) { - self.allowAgentMode = allowAgentMode - self.viewModel = viewModel - } public var body: some View { ZStack { diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfiePreviewView.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfiePreviewView.swift new file mode 100644 index 000000000..fe824010d --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfiePreviewView.swift @@ -0,0 +1,13 @@ +import SwiftUI + +struct SelfiePreviewView: View { + var image: UIImage + + var body: some View { + Image(uiImage: image) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(height: 480) + .clipShape(.rect(cornerRadius: 40)) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SubmissionStatusView.swift b/Sources/SmileID/Classes/SelfieCapture/View/SubmissionStatusView.swift new file mode 100644 index 000000000..e590d85a8 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/SubmissionStatusView.swift @@ -0,0 +1,25 @@ +import SwiftUI + +struct SubmissionStatusView: View { + var processState: ProcessingState + + var body: some View { + switch processState { + case .inProgress: + CircularProgressView() + .frame(width: 48, height: 48) + case .success: + StatusImage(SmileIDResourcesHelper.Checkmark) + case .error: + StatusImage(SmileIDResourcesHelper.Xmark) + } + } + + // swiftlint:disable identifier_name + @ViewBuilder func StatusImage(_ uiImage: UIImage) -> some View { + Image(uiImage: uiImage) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 48, height: 48) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift b/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift new file mode 100644 index 000000000..79836a437 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift @@ -0,0 +1,25 @@ +import SwiftUI + +struct UserInstructionsView: View { + var instruction: String + var message: String? + + var body: some View { + VStack { + Spacer(minLength: 0) + Text(SmileIDResourcesHelper.localizedString(for: instruction)) + .font(SmileID.theme.header2) + .foregroundColor(SmileID.theme.onDark) + .multilineTextAlignment(.center) + .lineLimit(2) + .minimumScaleFactor(0.8) + if let message = message { + Text(message) + .multilineTextAlignment(.center) + .font(SmileID.theme.header5) + .foregroundColor(SmileID.theme.onDark) + } + } + .padding(20) + } +} diff --git a/Sources/SmileID/Classes/SmileID.swift b/Sources/SmileID/Classes/SmileID.swift index 1f48c71c0..50497fafd 100644 --- a/Sources/SmileID/Classes/SmileID.swift +++ b/Sources/SmileID/Classes/SmileID.swift @@ -164,7 +164,8 @@ public class SmileID { if !jobIds.contains(jobId) { throw SmileIDError.invalidJobId } - guard let authRequestFile = try? LocalStorage.fetchAuthenticationRequestFile(jobId: jobId) else { + guard let authRequestFile = try? LocalStorage.fetchAuthenticationRequestFile(jobId: jobId) + else { throw SmileIDError.fileNotFound("Authentication Request file is missing") } guard let prepUploadFile = try? LocalStorage.fetchPrepUploadFile(jobId: jobId) else { @@ -180,7 +181,8 @@ public class SmileID { ) let authResponse = try await SmileID.api.authenticate(request: authRequest) let prepUploadRequest = PrepUploadRequest( - partnerParams: authResponse.partnerParams.copy(extras: prepUploadFile.partnerParams.extras), + partnerParams: authResponse.partnerParams.copy( + extras: prepUploadFile.partnerParams.extras), // TODO: - Fix when Michael changes this to boolean allowNewEnroll: String(prepUploadFile.allowNewEnroll), timestamp: authResponse.timestamp, @@ -203,7 +205,8 @@ public class SmileID { } let allFiles: [URL] do { - let livenessFiles = try LocalStorage.getFilesByType(jobId: jobId, fileType: .liveness) ?? [] + let livenessFiles = + try LocalStorage.getFilesByType(jobId: jobId, fileType: .liveness) ?? [] let additionalFiles = try [ LocalStorage.getFileByType(jobId: jobId, fileType: .selfie), LocalStorage.getFileByType(jobId: jobId, fileType: .documentFront), @@ -293,29 +296,46 @@ public class SmileID { /// - skipApiSubmission: Whether to skip api submission to SmileID and return only captured images /// - extraPartnerParams: Custom values specific to partners /// - delegate: Callback to be invoked when the SmartSelfie™ Enrollment is complete. - public class func smartSelfieEnrollmentScreen( + @ViewBuilder public class func smartSelfieEnrollmentScreen( userId: String = generateUserId(), jobId: String = generateJobId(), allowNewEnroll: Bool = false, allowAgentMode: Bool = false, showAttribution: Bool = true, showInstructions: Bool = true, + useStrictMode: Bool = false, skipApiSubmission: Bool = false, extraPartnerParams: [String: String] = [:], delegate: SmartSelfieResultDelegate ) -> some View { - OrchestratedSelfieCaptureScreen( - userId: userId, - jobId: jobId, - isEnroll: true, - allowNewEnroll: allowNewEnroll, - allowAgentMode: allowAgentMode, - showAttribution: showAttribution, - showInstructions: showInstructions, - extraPartnerParams: extraPartnerParams, - skipApiSubmission: skipApiSubmission, - onResult: delegate - ) + if useStrictMode { + OrchestratedEnhancedSelfieCaptureScreen( + userId: userId, + jobId: jobId, + isEnroll: true, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + useStrictMode: useStrictMode, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: skipApiSubmission, + onResult: delegate + ) + } else { + OrchestratedSelfieCaptureScreen( + userId: userId, + jobId: jobId, + isEnroll: true, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: skipApiSubmission, + onResult: delegate + ) + } } /// Perform a SmartSelfie™ Authentication @@ -339,29 +359,45 @@ public class SmileID { /// - skipApiSubmission: Whether to skip api submission to SmileID and return only captured images /// - extraPartnerParams: Custom values specific to partners /// - delegate: Callback to be invoked when the SmartSelfie™ Authentication is complete. - public class func smartSelfieAuthenticationScreen( + @ViewBuilder public class func smartSelfieAuthenticationScreen( userId: String, jobId: String = generateJobId(), allowNewEnroll: Bool = false, allowAgentMode: Bool = false, showAttribution: Bool = true, showInstructions: Bool = true, - skipApiSubmission: Bool = false, + useStrictMode: Bool = false, extraPartnerParams: [String: String] = [:], delegate: SmartSelfieResultDelegate ) -> some View { - OrchestratedSelfieCaptureScreen( - userId: userId, - jobId: jobId, - isEnroll: false, - allowNewEnroll: allowNewEnroll, - allowAgentMode: allowAgentMode, - showAttribution: showAttribution, - showInstructions: showInstructions, - extraPartnerParams: extraPartnerParams, - skipApiSubmission: skipApiSubmission, - onResult: delegate - ) + if useStrictMode { + OrchestratedEnhancedSelfieCaptureScreen( + userId: userId, + jobId: jobId, + isEnroll: false, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + useStrictMode: useStrictMode, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: false, + onResult: delegate + ) + } else { + OrchestratedSelfieCaptureScreen( + userId: userId, + jobId: jobId, + isEnroll: false, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: false, + onResult: delegate + ) + } } /// Perform a Document Verification diff --git a/Sources/SmileID/Classes/Util.swift b/Sources/SmileID/Classes/Util.swift index d2d612c34..bb2c23b3a 100644 --- a/Sources/SmileID/Classes/Util.swift +++ b/Sources/SmileID/Classes/Util.swift @@ -24,6 +24,21 @@ public extension View { } } +extension View { + @inlinable func reverseMask( + alignment: Alignment = .center, + @ViewBuilder _ mask: () -> Mask + ) -> some View { + self.mask( + ZStack(alignment: alignment) { + Rectangle() + mask() + .blendMode(.destinationOut) + } + ) + } +} + private struct StackedShape: Shape { var bottom: Bottom var top: Top @@ -64,6 +79,10 @@ func toErrorMessage(error: SmileIDError) -> (String, String?) { return (error.localizedDescription, nil) case let .httpError(_, message): return ("", message) + case let .fileNotFound(message): + return (message, nil) + case let .unknown(message): + return (message, nil) default: return ("Confirmation.FailureReason", nil) } diff --git a/Sources/SmileID/Classes/Views/ProcessingScreen.swift b/Sources/SmileID/Classes/Views/ProcessingScreen.swift index c59b3d7ed..ae56f456b 100644 --- a/Sources/SmileID/Classes/Views/ProcessingScreen.swift +++ b/Sources/SmileID/Classes/Views/ProcessingScreen.swift @@ -1,10 +1,18 @@ import Foundation import SwiftUI -public enum ProcessingState { +public enum ProcessingState: Equatable { case inProgress case success case error + + var title: String { + switch self { + case .inProgress: return "ProcessingState.Submitting" + case .success: return "ProcessingState.Successful" + case .error: return "ProcessingState.Failed" + } + } } /// This screen represents a generic Processing state. It has 3 sub-states: In Progress, Success, and diff --git a/Sources/SmileID/Classes/Views/SearchableDropdownSelector.swift b/Sources/SmileID/Classes/Views/SearchableDropdownSelector.swift index 4e8ae8b83..92c8952a0 100644 --- a/Sources/SmileID/Classes/Views/SearchableDropdownSelector.swift +++ b/Sources/SmileID/Classes/Views/SearchableDropdownSelector.swift @@ -36,11 +36,12 @@ public struct SearchableDropdownSelector: View { Spacer() Text(itemDisplayName(selectedItem)) .foregroundColor(SmileID.theme.accent) - .onTapGesture { onItemSelected(nil) } Spacer() Image(systemName: "arrowtriangle.down.circle.fill") .foregroundColor(SmileID.theme.accent) } + .contentShape(.rect) + .onTapGesture { onItemSelected(nil) } } else { ZStack(alignment: .leading) { Image(systemName: "magnifyingglass") diff --git a/Sources/SmileID/Resources/Fonts/DMSans-Bold.ttf b/Sources/SmileID/Resources/Fonts/DMSans-Bold.ttf new file mode 100644 index 000000000..4f5412dc8 Binary files /dev/null and b/Sources/SmileID/Resources/Fonts/DMSans-Bold.ttf differ diff --git a/Sources/SmileID/Resources/Fonts/DMSans-Medium.ttf b/Sources/SmileID/Resources/Fonts/DMSans-Medium.ttf new file mode 100644 index 000000000..841d31d03 Binary files /dev/null and b/Sources/SmileID/Resources/Fonts/DMSans-Medium.ttf differ diff --git a/Sources/SmileID/Resources/Fonts/DMSans-Regular.ttf b/Sources/SmileID/Resources/Fonts/DMSans-Regular.ttf new file mode 100644 index 000000000..07266ae18 Binary files /dev/null and b/Sources/SmileID/Resources/Fonts/DMSans-Regular.ttf differ diff --git a/Sources/SmileID/Resources/Localization/en.lproj/Localizable.strings b/Sources/SmileID/Resources/Localization/en.lproj/Localizable.strings index 44456b528..252dfa138 100644 --- a/Sources/SmileID/Resources/Localization/en.lproj/Localizable.strings +++ b/Sources/SmileID/Resources/Localization/en.lproj/Localizable.strings @@ -14,6 +14,7 @@ "Instructions.Action" = "I'm Ready"; "Instructions.UnableToDetectFace" = "Unable to detect face. Please ensure your face is in the oval"; "Instructions.MoveCloser" = "Move closer"; +"Instructions.MoveBack" = "Move back"; "Instructions.MoveFarther" = "Move farther away"; "Instructions.UnknownError" = "We cannot take your photo right now"; "Instructions.MultipleFaces" = "Ensure only one face is visible"; @@ -21,7 +22,14 @@ "Instructions.Capturing" = "Capturing…"; "Instructions.PutFaceInOval" = "Position your face in the oval"; "Instructions.Quality" = "Move to well lit area and clear face of obstructions"; +"Instructions.Brightness" = "Move to a well lit room"; "Instructions.Start" = "Put your face inside the oval frame and wait until it turns green"; +"Instructions.SelfieCapture" = "Position your head in the camera view. Then move in the direction that is indicated."; +"Instructions.PositionHeadInView" = "Position your head in view"; +"Instructions.TurnHeadLeft" = "Turn your head to the left"; +"Instructions.TurnHeadRight" = "Turn your head to the right"; +"Instructions.TurnHeadUp" = "Turn your head slightly up"; +"Instructions.TurnPhoneUp" = "Turn your phone to portrait mode"; "Instructions.Document.Front.Header" = "Submit Front of ID"; "Instructions.Document.Front.Callout" = "We'll use it to verify your identity. Please follow the instructions below"; "Instructions.Document.GoodLightBody" = "Make sure your ID image is taken in a well-lit environment. Ensure the ID is clear and visible."; @@ -36,6 +44,9 @@ "Confirmation.ProcessingSelfie" = "Processing Selfie"; "Confirmation.Time" = "Just a few more seconds"; "Confirmation.SelfieCaptureComplete" = "Selfie Capture Complete"; +"ProcessingState.Submitting" = "Submitting"; +"ProcessingState.Successful" = "Successful"; +"ProcessingState.Failed" = "Failed"; "Confirmation.SuccessBody" = "Your selfie was successfully taken. You can now proceed."; "Confirmation.Continue" = "Continue"; "Confirmation.Failure" = "Your selfie failed to process"; @@ -45,6 +56,9 @@ "Action.TakePhoto" = "Take Photo"; "Action.UploadPhoto" = "Upload Photo"; "Action.Skip" = "Skip back of ID"; +"Action.GetStarted" = "Get Started"; +"Action.Cancel" = "Cancel"; +"Action.Done" = "Done"; "Camera.AgentMode" = "Agent Mode"; "Camera.Unauthorized.Title" = "Allow access to your camera"; @@ -112,6 +126,7 @@ "Si.Error.Message.2211" = ""; "Si.Error.Message.2212" = ""; "Si.Error.Message.2213" = ""; +"Si.Error.Message.2214" = ""; "Si.Error.Message.2413" = ""; "Si.Error.Message.2215" = ""; "Si.Error.Message.2216" = ""; diff --git a/Sources/SmileID/Resources/LottieFiles/device_orientation.lottie b/Sources/SmileID/Resources/LottieFiles/device_orientation.lottie new file mode 100644 index 000000000..7bcdd86e9 Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/device_orientation.lottie differ diff --git a/Sources/SmileID/Resources/LottieFiles/headdirection_with_bg.lottie b/Sources/SmileID/Resources/LottieFiles/headdirection_with_bg.lottie new file mode 100644 index 000000000..53c54f9db Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/headdirection_with_bg.lottie differ diff --git a/Sources/SmileID/Resources/LottieFiles/instruction_screen_with_side_bar.lottie b/Sources/SmileID/Resources/LottieFiles/instruction_screen_with_side_bar.lottie new file mode 100644 index 000000000..792d9345e Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/instruction_screen_with_side_bar.lottie differ diff --git a/Sources/SmileID/Resources/LottieFiles/light_animation_with_bg.lottie b/Sources/SmileID/Resources/LottieFiles/light_animation_with_bg.lottie new file mode 100644 index 000000000..56784698c Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/light_animation_with_bg.lottie differ diff --git a/Sources/SmileID/Resources/LottieFiles/positioning_with_bg.lottie b/Sources/SmileID/Resources/LottieFiles/positioning_with_bg.lottie new file mode 100644 index 000000000..3097c92c3 Binary files /dev/null and b/Sources/SmileID/Resources/LottieFiles/positioning_with_bg.lottie differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin new file mode 100644 index 000000000..577fd6917 Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/coremldata.bin new file mode 100644 index 000000000..df8a17152 Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/coremldata.bin differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/metadata.json b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/metadata.json new file mode 100644 index 000000000..a3266749a --- /dev/null +++ b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/metadata.json @@ -0,0 +1,64 @@ +[ + { + "metadataOutputVersion" : "3.0", + "storagePrecision" : "Float32", + "outputSchema" : [ + { + "hasShapeFlexibility" : "0", + "isOptional" : "0", + "dataType" : "Float32", + "formattedType" : "MultiArray (Float32)", + "shortDescription" : "", + "shape" : "[]", + "name" : "Identity", + "type" : "MultiArray" + } + ], + "modelParameters" : [ + + ], + "specificationVersion" : 4, + "computePrecision" : "Float16", + "isUpdatable" : "0", + "availability" : { + "macOS" : "10.15", + "tvOS" : "13.0", + "visionOS" : "1.0", + "watchOS" : "6.0", + "iOS" : "13.0", + "macCatalyst" : "13.0" + }, + "neuralNetworkLayerTypeHistogram" : { + "ActivationReLU" : 5, + "ReshapeStatic" : 1, + "Transpose" : 1, + "SoftmaxND" : 1, + "Convolution" : 5, + "InnerProduct" : 2, + "PoolingMax" : 3 + }, + "modelType" : { + "name" : "MLModelType_neuralNetwork" + }, + "userDefinedMetadata" : { + "com.github.apple.coremltools.version" : "7.2", + "com.github.apple.coremltools.source" : "tensorflow==2.15.0" + }, + "generatedClassName" : "SelfieQualityDetector", + "inputSchema" : [ + { + "height" : "120", + "colorspace" : "RGB", + "isOptional" : "0", + "width" : "120", + "isColor" : "1", + "formattedType" : "Image (Color 120 × 120)", + "hasSizeFlexibility" : "0", + "type" : "Image", + "shortDescription" : "", + "name" : "conv2d_193_input" + } + ], + "method" : "predict" + } +] \ No newline at end of file diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.net b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.net new file mode 100644 index 000000000..736b9233a --- /dev/null +++ b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.net @@ -0,0 +1,327 @@ +{ + "layers" : [ + { + "pad_r" : 0, + "fused_relu" : 1, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/conv2d_193\/BiasAdd", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/conv2d_193\/Relu", + "blob_weights" : 3, + "K" : 3, + "blob_biases" : 1, + "name" : "sequential_35\/conv2d_193\/BiasAdd", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 16, + "bottom" : "conv2d_193_input", + "weights" : { + + }, + "Nx" : 3, + "pad_mode" : 0, + "pad_value" : 0, + "Ny" : 3, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "fused_relu" : 1, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/conv2d_194\/BiasAdd", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/conv2d_194\/Relu", + "blob_weights" : 7, + "K" : 16, + "blob_biases" : 5, + "name" : "sequential_35\/conv2d_194\/BiasAdd", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 32, + "bottom" : "sequential_35\/conv2d_193\/Relu", + "weights" : { + + }, + "Nx" : 3, + "pad_mode" : 0, + "pad_value" : 0, + "Ny" : 3, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "debug_info" : "max_pool_0", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "size_x" : 2, + "top" : "max_pool_0", + "top_shape_style" : 0, + "stride_x" : 2, + "avg_or_max" : 1, + "average_count_exclude_padding" : 1, + "type" : "pool", + "name" : "max_pool_0", + "pad_t" : 0, + "stride_y" : 2, + "bottom" : "sequential_35\/conv2d_194\/Relu", + "weights" : { + + }, + "pad_mode" : 2, + "size_y" : 2, + "pad_value" : 0 + }, + { + "pad_r" : 0, + "fused_relu" : 1, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/batch_normalization_66\/FusedBatchNormV3_nchw", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/activation_66\/Relu", + "blob_weights" : 11, + "K" : 32, + "blob_biases" : 9, + "name" : "sequential_35\/batch_normalization_66\/FusedBatchNormV3_nchw", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 64, + "bottom" : "max_pool_0", + "weights" : { + + }, + "Nx" : 3, + "pad_mode" : 0, + "pad_value" : 0, + "Ny" : 3, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "debug_info" : "max_pool_1", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "size_x" : 2, + "top" : "max_pool_1", + "top_shape_style" : 0, + "stride_x" : 2, + "avg_or_max" : 1, + "average_count_exclude_padding" : 1, + "type" : "pool", + "name" : "max_pool_1", + "pad_t" : 0, + "stride_y" : 2, + "bottom" : "sequential_35\/activation_66\/Relu", + "weights" : { + + }, + "pad_mode" : 2, + "size_y" : 2, + "pad_value" : 0 + }, + { + "pad_r" : 0, + "fused_relu" : 0, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/conv2d_196\/BiasAdd", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/conv2d_196\/BiasAdd", + "blob_weights" : 15, + "K" : 64, + "blob_biases" : 13, + "name" : "sequential_35\/conv2d_196\/BiasAdd", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 128, + "bottom" : "max_pool_1", + "weights" : { + + }, + "Nx" : 3, + "pad_mode" : 0, + "pad_value" : 0, + "Ny" : 3, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "fused_relu" : 1, + "fused_tanh" : 0, + "debug_info" : "sequential_35\/batch_normalization_67\/FusedBatchNormV3_nchw", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "top" : "sequential_35\/activation_67\/Relu", + "blob_weights" : 19, + "K" : 128, + "blob_biases" : 17, + "name" : "sequential_35\/batch_normalization_67\/FusedBatchNormV3_nchw", + "has_batch_norm" : 0, + "type" : "convolution", + "n_groups" : 1, + "pad_t" : 0, + "has_biases" : 1, + "C" : 32, + "bottom" : "sequential_35\/conv2d_196\/BiasAdd", + "weights" : { + + }, + "Nx" : 1, + "pad_mode" : 1, + "pad_value" : 0, + "Ny" : 1, + "n_parallel" : 1 + }, + { + "pad_r" : 0, + "debug_info" : "max_pool_2", + "pad_fill_mode" : 0, + "pad_b" : 0, + "pad_l" : 0, + "size_x" : 2, + "top" : "max_pool_2", + "top_shape_style" : 0, + "stride_x" : 2, + "avg_or_max" : 1, + "average_count_exclude_padding" : 1, + "type" : "pool", + "name" : "max_pool_2", + "pad_t" : 0, + "stride_y" : 2, + "bottom" : "sequential_35\/activation_67\/Relu", + "weights" : { + + }, + "pad_mode" : 2, + "size_y" : 2, + "pad_value" : 0 + }, + { + "axis_h" : 0, + "axis_w" : 2, + "bottom" : "max_pool_2", + "axis_k" : 1, + "axis_n" : 3, + "axis_seq" : 4, + "weights" : { + + }, + "debug_info" : "transpose_17", + "top" : "transpose_17", + "type" : "transpose", + "name" : "transpose_17" + }, + { + "name" : "sequential_35\/flatten_35\/Reshape", + "weights" : { + + }, + "dst_w" : 5408, + "version" : 1, + "dst_n" : 1, + "dst_nd_rank" : 2, + "type" : "reshape", + "dst_h" : -1, + "mode" : 0, + "dynamic_shape" : false, + "bottom" : "transpose_17", + "debug_info" : "sequential_35\/flatten_35\/Reshape", + "dst_seq" : 1, + "dst_k" : 1, + "top" : "sequential_35\/flatten_35\/Reshape" + }, + { + "nB" : 5408, + "top" : "sequential_35\/dense_70\/Relu", + "has_biases" : 1, + "weights" : { + + }, + "nC" : 32, + "blob_weights" : 23, + "type" : "inner_product", + "has_relu" : 1, + "bottom" : "sequential_35\/flatten_35\/Reshape", + "blob_biases" : 21, + "has_tanh" : 0, + "debug_info" : "sequential_35\/dense_70\/BiasAdd", + "name" : "sequential_35\/dense_70\/BiasAdd", + "has_prelu" : 0 + }, + { + "nB" : 32, + "top" : "sequential_35\/dense_71\/BiasAdd", + "has_biases" : 1, + "weights" : { + + }, + "nC" : 2, + "blob_weights" : 27, + "type" : "inner_product", + "has_relu" : 0, + "bottom" : "sequential_35\/dense_70\/Relu", + "blob_biases" : 25, + "has_tanh" : 0, + "debug_info" : "sequential_35\/dense_71\/BiasAdd", + "name" : "sequential_35\/dense_71\/BiasAdd", + "has_prelu" : 0 + }, + { + "bottom" : "sequential_35\/dense_71\/BiasAdd", + "weights" : { + + }, + "nd_axis" : -1, + "debug_info" : "sequential_35\/dense_71\/Softmax", + "top" : "Identity", + "type" : "softmax_nd", + "name" : "sequential_35\/dense_71\/Softmax", + "attributes" : { + "is_output" : 1 + } + } + ], + "transform_params" : { + "conv2d_193_input" : { + "bias_a" : 0, + "bias_g" : 0, + "bias_r" : 0, + "bias_b" : 0, + "center_mean" : 0, + "is_network_bgr" : 0, + "scale" : 1 + } + }, + "storage" : "model.espresso.weights", + "metadata_in_weights" : [ + + ], + "properties" : { + + }, + "analyses" : { + + }, + "format_version" : 200 +} \ No newline at end of file diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape new file mode 100644 index 000000000..b091a65cd --- /dev/null +++ b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape @@ -0,0 +1,102 @@ +{ + "layer_shapes" : { + "sequential_35\/dense_70\/Relu" : { + "k" : 1, + "w" : 32, + "n" : 1, + "_rank" : 2, + "h" : 1 + }, + "max_pool_1" : { + "k" : 64, + "w" : 28, + "n" : 1, + "_rank" : 4, + "h" : 28 + }, + "transpose_17" : { + "k" : 13, + "w" : 32, + "n" : 1, + "_rank" : 4, + "h" : 13 + }, + "conv2d_193_input" : { + "k" : 3, + "w" : 120, + "n" : 1, + "_rank" : 4, + "h" : 120 + }, + "sequential_35\/conv2d_194\/Relu" : { + "k" : 32, + "w" : 116, + "n" : 1, + "_rank" : 4, + "h" : 116 + }, + "sequential_35\/activation_67\/Relu" : { + "k" : 32, + "w" : 26, + "n" : 1, + "_rank" : 4, + "h" : 26 + }, + "max_pool_0" : { + "k" : 32, + "w" : 58, + "n" : 1, + "_rank" : 4, + "h" : 58 + }, + "sequential_35\/activation_66\/Relu" : { + "k" : 64, + "w" : 56, + "n" : 1, + "_rank" : 4, + "h" : 56 + }, + "sequential_35\/conv2d_193\/Relu" : { + "k" : 16, + "w" : 118, + "n" : 1, + "_rank" : 4, + "h" : 118 + }, + "max_pool_2" : { + "k" : 32, + "w" : 13, + "n" : 1, + "_rank" : 4, + "h" : 13 + }, + "sequential_35\/conv2d_196\/BiasAdd" : { + "k" : 128, + "w" : 26, + "n" : 1, + "_rank" : 4, + "h" : 26 + }, + "sequential_35\/dense_71\/BiasAdd" : { + "k" : 1, + "w" : 2, + "n" : 1, + "_rank" : 2, + "h" : 1 + }, + "sequential_35\/flatten_35\/Reshape" : { + "k" : 1, + "w" : 5408, + "n" : 1, + "_rank" : 2, + "h" : 1 + }, + "Identity" : { + "k" : 1, + "w" : 2, + "n" : 1, + "_rank" : 2, + "h" : 1 + } + } +} \ No newline at end of file diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights new file mode 100644 index 000000000..b2e449c7b Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin new file mode 100644 index 000000000..ce4f3bc74 Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin differ diff --git a/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin new file mode 100644 index 000000000..43f653279 Binary files /dev/null and b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark.png b/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark.png new file mode 100644 index 000000000..3aaf7483e Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark@2x.png b/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark@2x.png new file mode 100644 index 000000000..f69cf9dc7 Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark@2x.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark@3x.png b/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark@3x.png new file mode 100644 index 000000000..ca06bbb8a Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Checkmark@3x.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Contents.json b/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Contents.json new file mode 100644 index 000000000..91ff57529 --- /dev/null +++ b/Sources/SmileID/Resources/Media.xcassets/Checkmark.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "Checkmark.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Checkmark@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "Checkmark@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Contents.json b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Contents.json new file mode 100644 index 000000000..e39cf2a55 --- /dev/null +++ b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "Loader.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Loader@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "Loader@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader.png b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader.png new file mode 100644 index 000000000..e6d0524dc Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@2x.png b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@2x.png new file mode 100644 index 000000000..b8444aec0 Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@2x.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@3x.png b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@3x.png new file mode 100644 index 000000000..e95d4492b Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Loader.imageset/Loader@3x.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Contents.json b/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Contents.json new file mode 100644 index 000000000..4d1aa468c --- /dev/null +++ b/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "filename" : "Xmark.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Xmark@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "Xmark@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark.png b/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark.png new file mode 100644 index 000000000..9f1fe162a Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark@2x.png b/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark@2x.png new file mode 100644 index 000000000..f20432035 Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark@2x.png differ diff --git a/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark@3x.png b/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark@3x.png new file mode 100644 index 000000000..717aa86dc Binary files /dev/null and b/Sources/SmileID/Resources/Media.xcassets/Xmark.imageset/Xmark@3x.png differ diff --git a/Tests/Mocks/NetworkingMocks.swift b/Tests/Mocks/NetworkingMocks.swift index e72c113c5..eb8d1fef3 100644 --- a/Tests/Mocks/NetworkingMocks.swift +++ b/Tests/Mocks/NetworkingMocks.swift @@ -124,6 +124,7 @@ class MockSmileIdentityService: SmileIDServiceable { callbackUrl _: String?, sandboxResult _: Int?, allowNewEnroll _: Bool?, + failureReason: FailureReason?, metadata _: Metadata ) async throws -> SmartSelfieResponse { if MockHelper.shouldFail { @@ -154,6 +155,7 @@ class MockSmileIdentityService: SmileIDServiceable { partnerParams _: [String: String]?, callbackUrl _: String?, sandboxResult _: Int?, + failureReason: FailureReason?, metadata _: Metadata ) async throws -> SmartSelfieResponse { if MockHelper.shouldFail {