diff --git a/Example/SmileID.xcodeproj/project.pbxproj b/Example/SmileID.xcodeproj/project.pbxproj index d6ff32768..39ddd3e2b 100644 --- a/Example/SmileID.xcodeproj/project.pbxproj +++ b/Example/SmileID.xcodeproj/project.pbxproj @@ -7,7 +7,6 @@ objects = { /* Begin PBXBuildFile section */ - 0070E03EA5C2B707E67804D3 /* Pods_SmileID_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0A98AE7C7B30F859CBC4DA6E /* Pods_SmileID_Example.framework */; }; 1E59E33E2BA1E64C00D2BAD2 /* PartnerParamsTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E59E33D2BA1E64C00D2BAD2 /* PartnerParamsTests.swift */; }; 1E60ED372A29C306002695FF /* HomeViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E60ED322A29C306002695FF /* HomeViewController.swift */; }; 1E60ED382A29C306002695FF /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E60ED332A29C306002695FF /* Constants.swift */; }; @@ -39,7 +38,7 @@ 20B6D5EC2C21CE660023D51C /* DataStoreError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20B6D5EB2C21CE660023D51C /* DataStoreError.swift */; }; 20C360C82C454C130008DBDE /* RootViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20C360C72C454C130008DBDE /* RootViewModel.swift */; }; 20DFA0EC2C21917100AC2AE7 /* View+TextSelection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20DFA0EB2C21917100AC2AE7 /* View+TextSelection.swift */; }; - 20F3D6F32C25F4D700B32751 /* (null) in Sources */ = {isa = PBXBuildFile; }; + 20F3D6F32C25F4D700B32751 /* BuildFile in Sources */ = {isa = PBXBuildFile; }; 20F3D6F62C25F5C100B32751 /* SmileID.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 20F3D6F42C25F5C100B32751 /* SmileID.xcdatamodeld */; }; 5829A8C02BC7429A001C1E7E /* PrivacyInfo.xcprivacy in Resources */ = {isa = PBXBuildFile; fileRef = 5829A8BF2BC7429A001C1E7E /* PrivacyInfo.xcprivacy */; }; 585BE4882AC7748E0091DDD8 /* RestartableTimerTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 585BE4872AC7748E0091DDD8 /* RestartableTimerTest.swift */; }; @@ -48,7 +47,7 @@ 607FACDB1AFB9204008FA782 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 607FACD91AFB9204008FA782 /* Main.storyboard */; }; 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */; }; 620F1E982B69194900185CD2 /* AlertView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 620F1E972B69194900185CD2 /* AlertView.swift */; }; - 620F1E9A2B691ABB00185CD2 /* (null) in Resources */ = {isa = PBXBuildFile; }; + 620F1E9A2B691ABB00185CD2 /* BuildFile in Resources */ = {isa = PBXBuildFile; }; 624777D02B0CDC9F00952842 /* EnhancedKycWithIdInputScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 624777CF2B0CDC9F00952842 /* EnhancedKycWithIdInputScreen.swift */; }; 62F6766F2B0D173600417419 /* EnhancedKycWithIdInputScreenViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62F6766E2B0D173600417419 /* EnhancedKycWithIdInputScreenViewModel.swift */; }; 62F676712B0E00E800417419 /* EnhancedKycResultDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62F676702B0E00E800417419 /* EnhancedKycResultDelegate.swift */; }; @@ -72,7 +71,8 @@ 91CB21A52AC10C61005AEBF5 /* NavigationBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91CB21A42AC10C61005AEBF5 /* NavigationBar.swift */; }; 91D9FBC42AB481FE00A8D36B /* PollingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91D9FBC32AB481FE00A8D36B /* PollingTests.swift */; }; 91D9FBD52AB8AB4700A8D36B /* CalculateSignatureTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91D9FBD42AB8AB4700A8D36B /* CalculateSignatureTests.swift */; }; - F703CAEEB3CEB5ADB5C99D44 /* Pods_SmileID_Tests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 287986BB9E93D632523CC13A /* Pods_SmileID_Tests.framework */; }; + CF6C5B9C611F1E82DC393276 /* Pods_SmileID_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D1BD6BC67E3ED75E00105F7B /* Pods_SmileID_Example.framework */; }; + D852C426DDFC5A53E45164A7 /* Pods_SmileID_Tests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B7429B58F2AFC96534131127 /* Pods_SmileID_Tests.framework */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -86,7 +86,7 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ - 0A98AE7C7B30F859CBC4DA6E /* Pods_SmileID_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 0374B5DE135D00E9C63222D2 /* Pods-SmileID_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.release.xcconfig"; sourceTree = ""; }; 1E59E33D2BA1E64C00D2BAD2 /* PartnerParamsTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PartnerParamsTests.swift; sourceTree = ""; }; 1E60ED322A29C306002695FF /* HomeViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = HomeViewController.swift; sourceTree = ""; }; 1E60ED332A29C306002695FF /* Constants.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; @@ -118,8 +118,6 @@ 20C360C72C454C130008DBDE /* RootViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RootViewModel.swift; sourceTree = ""; }; 20DFA0EB2C21917100AC2AE7 /* View+TextSelection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "View+TextSelection.swift"; sourceTree = ""; }; 20F3D6F52C25F5C100B32751 /* SmileID.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = SmileID.xcdatamodel; sourceTree = ""; }; - 262BF9A8643DF9220FD233E3 /* Pods-SmileID_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.release.xcconfig"; sourceTree = ""; }; - 287986BB9E93D632523CC13A /* Pods_SmileID_Tests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Tests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 5829A8BF2BC7429A001C1E7E /* PrivacyInfo.xcprivacy */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = PrivacyInfo.xcprivacy; sourceTree = ""; }; 585BE4872AC7748E0091DDD8 /* RestartableTimerTest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RestartableTimerTest.swift; sourceTree = ""; }; 58C5F1D72B05925800A6080C /* BiometricKycWithIdInputScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BiometricKycWithIdInputScreen.swift; sourceTree = ""; }; @@ -135,7 +133,7 @@ 624777CF2B0CDC9F00952842 /* EnhancedKycWithIdInputScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnhancedKycWithIdInputScreen.swift; sourceTree = ""; }; 62F6766E2B0D173600417419 /* EnhancedKycWithIdInputScreenViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnhancedKycWithIdInputScreenViewModel.swift; sourceTree = ""; }; 62F676702B0E00E800417419 /* EnhancedKycResultDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnhancedKycResultDelegate.swift; sourceTree = ""; }; - 67420F8D15457A4FC46AFB84 /* Pods-SmileID_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.debug.xcconfig"; sourceTree = ""; }; + 6A35EC7327A28317BD3691DB /* Pods-SmileID_Tests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.debug.xcconfig"; sourceTree = ""; }; 6AC980584C522B17A099E098 /* Util.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Util.swift; sourceTree = ""; }; 6AC980D3CB9C357AD1B13D80 /* IdInfoInputScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = IdInfoInputScreen.swift; sourceTree = ""; }; 6AC982147640002B81F72DEC /* SettingsView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SettingsView.swift; sourceTree = ""; }; @@ -158,9 +156,11 @@ 91D9FBD42AB8AB4700A8D36B /* CalculateSignatureTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CalculateSignatureTests.swift; sourceTree = ""; }; 94E7560A47E255DD8215C183 /* README.md */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 9755B6A19CF28DE212F24C83 /* SmileID.podspec */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = SmileID.podspec; path = ../SmileID.podspec; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.ruby; }; + B7429B58F2AFC96534131127 /* Pods_SmileID_Tests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Tests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; C8CD2E3DB817D8C6334E9240 /* LICENSE */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = LICENSE; path = ../LICENSE; sourceTree = ""; }; - F7E93DC564F4962974BFA3CB /* Pods-SmileID_Tests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.debug.xcconfig"; sourceTree = ""; }; - FE4071A6FC696DDB1E5382FA /* Pods-SmileID_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.release.xcconfig"; sourceTree = ""; }; + D1BD6BC67E3ED75E00105F7B /* Pods_SmileID_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SmileID_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + D6155C4F90D148251B9B9077 /* Pods-SmileID_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Tests.release.xcconfig"; path = "Target Support Files/Pods-SmileID_Tests/Pods-SmileID_Tests.release.xcconfig"; sourceTree = ""; }; + F88A959ED467AA6482744E51 /* Pods-SmileID_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SmileID_Example.debug.xcconfig"; path = "Target Support Files/Pods-SmileID_Example/Pods-SmileID_Example.debug.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -168,7 +168,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 0070E03EA5C2B707E67804D3 /* Pods_SmileID_Example.framework in Frameworks */, + CF6C5B9C611F1E82DC393276 /* Pods_SmileID_Example.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -176,7 +176,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - F703CAEEB3CEB5ADB5C99D44 /* Pods_SmileID_Tests.framework in Frameworks */, + D852C426DDFC5A53E45164A7 /* Pods_SmileID_Tests.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -297,7 +297,7 @@ 607FACE81AFB9204008FA782 /* Tests */, 607FACD11AFB9204008FA782 /* Products */, 828BF541E068101B2E6ED55F /* Pods */, - 61C8F125453D3EB83C903E19 /* Frameworks */, + B5C101CE2863A6DEA0960B58 /* Frameworks */, ); sourceTree = ""; }; @@ -381,15 +381,6 @@ name = "Podspec Metadata"; sourceTree = ""; }; - 61C8F125453D3EB83C903E19 /* Frameworks */ = { - isa = PBXGroup; - children = ( - 0A98AE7C7B30F859CBC4DA6E /* Pods_SmileID_Example.framework */, - 287986BB9E93D632523CC13A /* Pods_SmileID_Tests.framework */, - ); - name = Frameworks; - sourceTree = ""; - }; 624777CC2B0CDC5200952842 /* EnhancedKYC */ = { isa = PBXGroup; children = ( @@ -412,10 +403,10 @@ 828BF541E068101B2E6ED55F /* Pods */ = { isa = PBXGroup; children = ( - 67420F8D15457A4FC46AFB84 /* Pods-SmileID_Example.debug.xcconfig */, - 262BF9A8643DF9220FD233E3 /* Pods-SmileID_Example.release.xcconfig */, - F7E93DC564F4962974BFA3CB /* Pods-SmileID_Tests.debug.xcconfig */, - FE4071A6FC696DDB1E5382FA /* Pods-SmileID_Tests.release.xcconfig */, + F88A959ED467AA6482744E51 /* Pods-SmileID_Example.debug.xcconfig */, + 0374B5DE135D00E9C63222D2 /* Pods-SmileID_Example.release.xcconfig */, + 6A35EC7327A28317BD3691DB /* Pods-SmileID_Tests.debug.xcconfig */, + D6155C4F90D148251B9B9077 /* Pods-SmileID_Tests.release.xcconfig */, ); path = Pods; sourceTree = ""; @@ -452,6 +443,15 @@ path = ../../Tests/Mocks; sourceTree = ""; }; + B5C101CE2863A6DEA0960B58 /* Frameworks */ = { + isa = PBXGroup; + children = ( + D1BD6BC67E3ED75E00105F7B /* Pods_SmileID_Example.framework */, + B7429B58F2AFC96534131127 /* Pods_SmileID_Tests.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -459,13 +459,13 @@ isa = PBXNativeTarget; buildConfigurationList = 607FACEF1AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SmileID_Example" */; buildPhases = ( - A9F06B7D2BE86D450711DA74 /* [CP] Check Pods Manifest.lock */, + BF04E59E0BBAEF08E0F6670F /* [CP] Check Pods Manifest.lock */, 607FACCC1AFB9204008FA782 /* Sources */, 917D79282AA8024400FA6624 /* SwiftLint */, 607FACCD1AFB9204008FA782 /* Frameworks */, 607FACCE1AFB9204008FA782 /* Resources */, - 24C14E01D54D394A2B3921B7 /* [CP] Embed Pods Frameworks */, C0BE335FFECD4DF6892309F3 /* Upload Debug Symbols to Sentry */, + 100910DDFF47A40C3D218CFE /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -480,7 +480,7 @@ isa = PBXNativeTarget; buildConfigurationList = 607FACF21AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SmileID_Tests" */; buildPhases = ( - BD877CF0071F75996FF65D7F /* [CP] Check Pods Manifest.lock */, + 0A095CF74E534671ED6B5831 /* [CP] Check Pods Manifest.lock */, 607FACE11AFB9204008FA782 /* Sources */, 607FACE21AFB9204008FA782 /* Frameworks */, 607FACE31AFB9204008FA782 /* Resources */, @@ -546,7 +546,7 @@ buildActionMask = 2147483647; files = ( 1EFAB3172A375265008E3C13 /* Images.xcassets in Resources */, - 620F1E9A2B691ABB00185CD2 /* (null) in Resources */, + 620F1E9A2B691ABB00185CD2 /* BuildFile in Resources */, 607FACDB1AFB9204008FA782 /* Main.storyboard in Resources */, 5829A8C02BC7429A001C1E7E /* PrivacyInfo.xcprivacy in Resources */, 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */, @@ -563,7 +563,29 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ - 24C14E01D54D394A2B3921B7 /* [CP] Embed Pods Frameworks */ = { + 0A095CF74E534671ED6B5831 /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-SmileID_Tests-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; + 100910DDFF47A40C3D218CFE /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -611,7 +633,7 @@ shellPath = /bin/sh; shellScript = "# Type a script or drag a script file from your workspace to insert its path.\ncd ../Sources\n\"${PODS_ROOT}/SwiftLint/swiftlint\"\n"; }; - A9F06B7D2BE86D450711DA74 /* [CP] Check Pods Manifest.lock */ = { + BF04E59E0BBAEF08E0F6670F /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -633,28 +655,6 @@ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; - BD877CF0071F75996FF65D7F /* [CP] Check Pods Manifest.lock */ = { - isa = PBXShellScriptBuildPhase; - buildActionMask = 2147483647; - files = ( - ); - inputFileListPaths = ( - ); - inputPaths = ( - "${PODS_PODFILE_DIR_PATH}/Podfile.lock", - "${PODS_ROOT}/Manifest.lock", - ); - name = "[CP] Check Pods Manifest.lock"; - outputFileListPaths = ( - ); - outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-SmileID_Tests-checkManifestLockResult.txt", - ); - runOnlyForDeploymentPostprocessing = 0; - shellPath = /bin/sh; - shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; - showEnvVarsInLog = 0; - }; C0BE335FFECD4DF6892309F3 /* Upload Debug Symbols to Sentry */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; @@ -693,7 +693,7 @@ 1ED53F6D2A2F28590020BEFB /* SmileTextField.swift in Sources */, 91CB21A52AC10C61005AEBF5 /* NavigationBar.swift in Sources */, 1ED53F6B2A2F28590020BEFB /* ProductCell.swift in Sources */, - 20F3D6F32C25F4D700B32751 /* (null) in Sources */, + 20F3D6F32C25F4D700B32751 /* BuildFile in Sources */, 1E60ED382A29C306002695FF /* Constants.swift in Sources */, 624777D02B0CDC9F00952842 /* EnhancedKycWithIdInputScreen.swift in Sources */, 1ED53F712A2F28590020BEFB /* EnterUserIDView.swift in Sources */, @@ -879,7 +879,7 @@ }; 607FACF01AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 67420F8D15457A4FC46AFB84 /* Pods-SmileID_Example.debug.xcconfig */; + baseConfigurationReference = F88A959ED467AA6482744E51 /* Pods-SmileID_Example.debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_IDENTITY = "Apple Development"; @@ -912,7 +912,7 @@ }; 607FACF11AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 262BF9A8643DF9220FD233E3 /* Pods-SmileID_Example.release.xcconfig */; + baseConfigurationReference = 0374B5DE135D00E9C63222D2 /* Pods-SmileID_Example.release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_IDENTITY = "Apple Development"; @@ -944,7 +944,7 @@ }; 607FACF31AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = F7E93DC564F4962974BFA3CB /* Pods-SmileID_Tests.debug.xcconfig */; + baseConfigurationReference = 6A35EC7327A28317BD3691DB /* Pods-SmileID_Tests.debug.xcconfig */; buildSettings = { CLANG_ENABLE_MODULES = YES; CODE_SIGN_IDENTITY = "Apple Development"; @@ -976,7 +976,7 @@ }; 607FACF41AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = FE4071A6FC696DDB1E5382FA /* Pods-SmileID_Tests.release.xcconfig */; + baseConfigurationReference = D6155C4F90D148251B9B9077 /* Pods-SmileID_Tests.release.xcconfig */; buildSettings = { CLANG_ENABLE_MODULES = YES; CODE_SIGN_IDENTITY = "Apple Development"; diff --git a/ML/ImageClassifier.swift b/ML/ImageClassifier.swift deleted file mode 100644 index 4117ab05e..000000000 --- a/ML/ImageClassifier.swift +++ /dev/null @@ -1,117 +0,0 @@ -import CoreML -import Vision - -/// An enum representing possible errors during image classification -enum ImageClassifierError: Error { - case preprocessingFailed - case classificationFailed - case invalidOutputFormat - case imageConversionFailed - case faceCroppingFailed -} - -/// A structure representing the image quality check result -struct ImageQualityResult { - let passed: Bool - let confidence: Float - - var description: String { - return passed ? "Passed" : "Failed" - } -} - -/// A class that performs image classification to determine selfie quality using a Core ML Model -class ModelImageClassifier { - let cropSize = (width: 120, height: 120) - - init() {} - - /// Classifies an image using the Core ML Model - /// - Parameter image: The input image as a UIImage - /// - Returns: A result containing classifiction confidence. - func classify(imageBuffer: CVPixelBuffer) async throws -> ImageQualityResult { - do { - guard let image = UIImage(pixelBuffer: imageBuffer) else { - throw ImageClassifierError.preprocessingFailed - } - let croppedImage = try await cropToFace(image: image) - guard let convertedImage = croppedImage.pixelBuffer(width: cropSize.width, height: cropSize.height) else { - throw ImageClassifierError.preprocessingFailed - } - return try performClassification(imageBuffer: convertedImage) - } catch { - throw error - } - } - - /// Crops the input image to the region of the first face in the image. - /// - Parameter image: The original input image that should have a face. - /// - Returns: A cropped image of the detected face in the input image. - private func cropToFace(image: UIImage) async throws -> UIImage { - guard let cgImage = image.cgImage else { - throw ImageClassifierError.faceCroppingFailed - } - - let request = VNDetectFaceRectanglesRequest() - let handler = VNImageRequestHandler(cgImage: cgImage, options: [:]) - - try handler.perform([request]) - - guard let results = request.results, - let face = results.first else { - throw ImageClassifierError.faceCroppingFailed - } - - let boundingBox = face.boundingBox - - let size = CGSize( - width: boundingBox.width * image.size.width, - height: boundingBox.height * image.size.height - ) - let origin = CGPoint( - x: boundingBox.minX * image.size.width, - y: (1 - boundingBox.minY) * image.size.height - size.height - ) - - let faceRect = CGRect(origin: origin, size: size) - - guard let croppedImage = cgImage.cropping(to: faceRect) else { - throw ImageClassifierError.faceCroppingFailed - } - - return UIImage(cgImage: croppedImage) - } - - /// Performs the actual classification using the `ImageQualityCP20` model - /// - Parameter mlMultiArray: The processed input image as a MultiArray - /// - Returns: The ImageQualityResult - private func performClassification(imageBuffer: CVPixelBuffer) throws -> ImageQualityResult { - let modelConfiguration = MLModelConfiguration() - let model = try SelfieQualityDetector(configuration: modelConfiguration) - - let input = SelfieQualityDetectorInput(conv2d_193_input: imageBuffer) - - let prediction = try model.prediction(input: input) - let output = prediction.Identity - return try processModelOuput(output) - } - - /// Processes the model's output to determine the final classification - /// - Parameter output: The MLMultiArray output from the model - /// - Returns: The ImageQualityResult - private func processModelOuput(_ output: MLMultiArray) throws -> ImageQualityResult { - guard output.shape.count == 2, - output.shape[0] == 1, - output.shape[1] == 2 else { - throw ImageClassifierError.invalidOutputFormat - } - - let failScore = output[0].floatValue - let passScore = output[1].floatValue - - let passed = passScore > failScore - let confidence = passed ? passScore : failScore - - return ImageQualityResult(passed: passed, confidence: confidence) - } -} diff --git a/SmileID.xcodeproj/project.pbxproj b/SmileID.xcodeproj/project.pbxproj index 60c1081c3..014e6eb71 100644 --- a/SmileID.xcodeproj/project.pbxproj +++ b/SmileID.xcodeproj/project.pbxproj @@ -64,6 +64,7 @@ 1F33A24780AE0C89E89EDB3A /* NavigationBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62D2BD3354878490B62F04FB /* NavigationBar.swift */; }; 1F57B39849B01BB6E26B7F3B /* DocumentCaptureInstructionsScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 540FE72848132B16535C7281 /* DocumentCaptureInstructionsScreen.swift */; }; 1F76F767069D4436C8AF6074 /* Injected.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5EB66DD1BDD5A4C66AC800B3 /* Injected.swift */; }; + 20991A4E2C8625EE00C0B13E /* SelfieQualityDetector.mlmodelc in Resources */ = {isa = PBXBuildFile; fileRef = 20991A4C2C8625EE00C0B13E /* SelfieQualityDetector.mlmodelc */; }; 216916E16C0F6C9CBD698C92 /* ImageExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0C1C11D7A28442BD26144875 /* ImageExtensions.swift */; }; 2228D942A74ACE036BCC6AC3 /* OrchestratedDocumentVerificationViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A389AF9529CF6ADA5A11D4D6 /* OrchestratedDocumentVerificationViewModel.swift */; }; 225873F4B9CFD28079C4E610 /* FaceShape.swift in Sources */ = {isa = PBXBuildFile; fileRef = 60101EA1F67FE523A8D10E2F /* FaceShape.swift */; }; @@ -760,6 +761,7 @@ 204E25AA87E9519F31645909 /* DocumentCaptureScreen.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = DocumentCaptureScreen.swift; path = Sources/SmileID/Classes/DocumentVerification/View/DocumentCaptureScreen.swift; sourceTree = ""; }; 2057FBF6B6724BE33B6CC727 /* DocumentCaptureScreen.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = DocumentCaptureScreen.swift; path = Sources/SmileID/Classes/DocumentVerification/View/DocumentCaptureScreen.swift; sourceTree = ""; }; 208563C91ED929072F239DE9 /* NavigationHelper.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = NavigationHelper.swift; path = Sources/SmileID/Classes/Helpers/NavigationHelper.swift; sourceTree = ""; }; + 20991A4C2C8625EE00C0B13E /* SelfieQualityDetector.mlmodelc */ = {isa = PBXFileReference; lastKnownFileType = wrapper; path = SelfieQualityDetector.mlmodelc; sourceTree = ""; }; 209BE3605849ED55515BE4BF /* OrchestratedDocumentVerificationViewModel.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = OrchestratedDocumentVerificationViewModel.swift; path = Sources/SmileID/Classes/DocumentVerification/Model/OrchestratedDocumentVerificationViewModel.swift; sourceTree = ""; }; 20BD76F142CABEAE07EF1F67 /* SmileIDService.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = SmileIDService.swift; path = Sources/SmileID/Classes/Networking/SmileIDService.swift; sourceTree = ""; }; 20CAE1873FDA57190CC2F915 /* HTTPHeader.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = HTTPHeader.swift; path = Sources/SmileID/Classes/Networking/HTTPHeader.swift; sourceTree = ""; }; @@ -6522,6 +6524,7 @@ 1EEFC2BF2B58412300B8A934 /* Resources */ = { isa = PBXGroup; children = ( + 20991A4D2C8625EE00C0B13E /* ML */, 5829A8C42BC8494F001C1E7E /* PrivacyInfo.xcprivacy */, 1EEFC2C02B58412300B8A934 /* Media.xcassets */, 1EEFC2C12B58412300B8A934 /* Fonts */, @@ -6598,6 +6601,14 @@ path = DependencyContainer; sourceTree = ""; }; + 20991A4D2C8625EE00C0B13E /* ML */ = { + isa = PBXGroup; + children = ( + 20991A4C2C8625EE00C0B13E /* SelfieQualityDetector.mlmodelc */, + ); + path = ML; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXHeadersBuildPhase section */ @@ -6730,6 +6741,7 @@ 5829A8C52BC8494F001C1E7E /* PrivacyInfo.xcprivacy in Resources */, 1EEFC3342B58412400B8A934 /* Localizable.strings in Resources */, 1EEFC3312B58412400B8A934 /* Media.xcassets in Resources */, + 20991A4E2C8625EE00C0B13E /* SelfieQualityDetector.mlmodelc in Resources */, 1EEFC3332B58412400B8A934 /* Epilogue-Bold.ttf in Resources */, 1EEFC2E92B58412300B8A934 /* ARViewController.xib in Resources */, 1EEFC3322B58412400B8A934 /* Epilogue-Medium.ttf in Resources */, diff --git a/Sources/SmileID/Classes/Camera/CameraManager.swift b/Sources/SmileID/Classes/Camera/CameraManager.swift index f22a21652..439f31d95 100644 --- a/Sources/SmileID/Classes/Camera/CameraManager.swift +++ b/Sources/SmileID/Classes/Camera/CameraManager.swift @@ -42,7 +42,9 @@ class CameraManager: NSObject, ObservableObject { @Published private(set) var status = Status.unconfigured private var orientation: Orientation - init(orientation: Orientation) { + static let shared: CameraManager = CameraManager(orientation: .portrait) + + private init(orientation: Orientation) { self.orientation = orientation super.init() sessionQueue.async { diff --git a/Sources/SmileID/Classes/Camera/CameraViewController.swift b/Sources/SmileID/Classes/Camera/CameraViewController.swift index f52bf3996..74072078e 100644 --- a/Sources/SmileID/Classes/Camera/CameraViewController.swift +++ b/Sources/SmileID/Classes/Camera/CameraViewController.swift @@ -2,7 +2,9 @@ import UIKit import Vision import AVFoundation -class PreviewView: UIViewController { +class CameraViewController: UIViewController { + var faceDetector: FaceDetectorV2? + var previewLayer: AVCaptureVideoPreviewLayer? private weak var cameraManager: CameraManager? @@ -17,6 +19,7 @@ class PreviewView: UIViewController { override func viewDidLoad() { super.viewDidLoad() + faceDetector?.viewDelegate = self configurePreviewLayer() } @@ -34,7 +37,16 @@ class PreviewView: UIViewController { } } -extension PreviewView: RectangleDetectionDelegate { +extension CameraViewController: FaceDetectorDelegate { + func convertFromMetadataToPreviewRect(rect: CGRect) -> CGRect { + guard let previewLayer = previewLayer else { + return CGRect.zero + } + return previewLayer.layerRectConverted(fromMetadataOutputRect: rect) + } +} + +extension CameraViewController: RectangleDetectionDelegate { func didDetectQuad( quad: Quadrilateral?, _ imageSize: CGSize, diff --git a/Sources/SmileID/Classes/DocumentVerification/Model/DocumentCaptureViewModel.swift b/Sources/SmileID/Classes/DocumentVerification/Model/DocumentCaptureViewModel.swift index 24c14d091..8a7afea69 100644 --- a/Sources/SmileID/Classes/DocumentVerification/Model/DocumentCaptureViewModel.swift +++ b/Sources/SmileID/Classes/DocumentVerification/Model/DocumentCaptureViewModel.swift @@ -35,7 +35,7 @@ class DocumentCaptureViewModel: ObservableObject { @Published var documentImageToConfirm: Data? @Published var captureError: Error? @Published var isCapturing = false - @Published var cameraManager = CameraManager(orientation: .portrait) + var cameraManager = CameraManager.shared init(knownAspectRatio: Double? = nil) { self.knownAspectRatio = knownAspectRatio diff --git a/ML/CGImage+CVPixelBuffer.swift b/Sources/SmileID/Classes/FaceDetector/CGImage+CVPixelBuffer.swift similarity index 100% rename from ML/CGImage+CVPixelBuffer.swift rename to Sources/SmileID/Classes/FaceDetector/CGImage+CVPixelBuffer.swift diff --git a/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift b/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift new file mode 100644 index 000000000..49bc7e0b8 --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift @@ -0,0 +1,199 @@ +import AVFoundation +import Combine +import UIKit +import Vision + +protocol FaceDetectorDelegate: NSObjectProtocol { + func convertFromMetadataToPreviewRect(rect: CGRect) -> CGRect +} + +class FaceDetectorV2: NSObject { + private var selfieQualityModel: SelfieQualityDetector? + + private let cropSize = (width: 120, height: 120) + private let faceMovementThreshold: CGFloat = 0.15 + + var sequenceHandler = VNSequenceRequestHandler() + var currentFrameBuffer: CVPixelBuffer? + + weak var selfieViewModel: SelfieViewModelV2? + weak var viewDelegate: FaceDetectorDelegate? + + override init() { + super.init() + selfieQualityModel = createImageClassifier() + } + + private func createImageClassifier() -> SelfieQualityDetector? { + do { + let modelConfiguration = MLModelConfiguration() + let coreMLModel = try SelfieQualityDetector(configuration: modelConfiguration) + return coreMLModel + } catch { + return nil + } + } + + /// Run Face Capture quality and Face Bounding Box and roll/pitch/yaw tracking + func detect(_ imageBuffer: CVPixelBuffer) { + currentFrameBuffer = imageBuffer + + let detectFaceRectanglesRequest = VNDetectFaceRectanglesRequest( + completionHandler: detectedFaceRectangles + ) + + let detectCaptureQualityRequest = VNDetectFaceCaptureQualityRequest( + completionHandler: detectedFaceQualityRequest + ) + + do { + try sequenceHandler.perform( + [ + detectFaceRectanglesRequest, + detectCaptureQualityRequest + ], + on: imageBuffer, + orientation: .leftMirrored + ) + } catch { + selfieViewModel?.perform(action: .handleError(error)) + } + + do { + guard let image = UIImage(pixelBuffer: imageBuffer) else { + return + } + guard let croppedImage = try cropToFace(image: image) else { + return + } + guard let convertedImage = croppedImage.pixelBuffer(width: cropSize.width, height: cropSize.height) else { + return + } + selfieQualityRequest(imageBuffer: convertedImage) + } catch { + selfieViewModel?.perform(action: .handleError(error)) + } + } + + func selfieQualityRequest(imageBuffer: CVPixelBuffer) { + guard let selfieViewModel, + let selfieQualityModel else { return } + do { + let input = SelfieQualityDetectorInput(conv2d_193_input: imageBuffer) + + let prediction = try selfieQualityModel.prediction(input: input) + let output = prediction.Identity + + guard output.shape.count == 2, + output.shape[0] == 1, + output.shape[1] == 2 else { + return + } + + let passScore = output[0].floatValue + let failScore = output[1].floatValue + + let selfieQualityModel = SelfieQualityModel( + failed: failScore, + passed: passScore + ) + selfieViewModel.perform(action: .selfieQualityObservationDetected(selfieQualityModel)) + } catch { + selfieViewModel.perform(action: .handleError(error)) + } + } + + private func cropToFace(image: UIImage) throws -> UIImage? { + guard let cgImage = image.cgImage else { + return nil + } + + let request = VNDetectFaceRectanglesRequest() + let handler = VNImageRequestHandler(cgImage: cgImage, options: [:]) + + try handler.perform([request]) + + guard let results = request.results, + let face = results.first else { + return nil + } + + let boundingBox = face.boundingBox + + let size = CGSize( + width: boundingBox.width * image.size.width, + height: boundingBox.height * image.size.height + ) + let origin = CGPoint( + x: boundingBox.minX * image.size.width, + y: (1 - boundingBox.minY) * image.size.height - size.height + ) + + let faceRect = CGRect(origin: origin, size: size) + + guard let croppedImage = cgImage.cropping(to: faceRect) else { + return nil + } + + return UIImage(cgImage: croppedImage) + } +} + +// MARK: - Private methods +extension FaceDetectorV2 { + func detectedFaceRectangles(request: VNRequest, error: Error?) { + guard let selfieViewModel = selfieViewModel, + let viewDelegate = viewDelegate else { return } + + guard let results = request.results as? [VNFaceObservation], + let result = results.first else { + selfieViewModel.perform(action: .noFaceDetected) + return + } + + let convertedBoundingBox = viewDelegate.convertFromMetadataToPreviewRect(rect: result.boundingBox) + + if #available(iOS 15.0, *) { + let faceObservationModel = FaceGeometryModel( + boundingBox: convertedBoundingBox, + roll: result.roll ?? 0.0, + yaw: result.yaw ?? 0.0, + pitch: result.pitch ?? 0.0, + direction: faceDirection(faceObservation: result) + ) + selfieViewModel.perform(action: .faceObservationDetected(faceObservationModel)) + } else { + // Fallback on earlier versions + } + } + + private func faceDirection(faceObservation: VNFaceObservation) -> FaceDirection { + guard let yaw = faceObservation.yaw?.doubleValue else { + return .none + } + let yawInRadians = CGFloat(yaw) + + if yawInRadians > faceMovementThreshold { + return .right + } else if yawInRadians < -faceMovementThreshold { + return .left + } else { + return .none + } + } + + func detectedFaceQualityRequest(request: VNRequest, error: Error?) { + guard let selfieViewModel = selfieViewModel else { return } + + guard let results = request.results as? [VNFaceObservation], + let result = results.first else { + selfieViewModel.perform(action: .noFaceDetected) + return + } + + let faceQualityModel = FaceQualityModel( + quality: result.faceCaptureQuality ?? 0.0 + ) + selfieViewModel.perform(action: .faceQualityObservationDetected(faceQualityModel)) + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift b/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift index 675261cd5..9075c71b4 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift +++ b/Sources/SmileID/Classes/FaceDetector/FaceGeometryModel.swift @@ -4,4 +4,27 @@ struct FaceGeometryModel: Equatable { let boundingBox: CGRect let roll: NSNumber let yaw: NSNumber + let pitch: NSNumber + let direction: FaceDirection +} + +enum FaceDirection { + case left + case right + case none +} + +struct FaceQualityModel { + let quality: Float +} + +struct SelfieQualityModel { + let failed: Float + let passed: Float +} + +extension SelfieQualityModel { + static var zero: SelfieQualityModel { + return SelfieQualityModel(failed: 0, passed: 0) + } } diff --git a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift new file mode 100644 index 000000000..62264e054 --- /dev/null +++ b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift @@ -0,0 +1,120 @@ +import Foundation +import Vision + +/// Represents the different tasks in an active liveness check. +enum LivenessTask { + case lookLeft + case lookRight + case lookUp + + /// The user-friendly instruction for each task. + var instruction: String { + switch self { + case .lookLeft: + return "Look Left" + case .lookRight: + return "Look Right" + case .lookUp: + return "Look Up" + } + } +} + +class LivenessCheckManager: ObservableObject { + /// The sequence of liveness tasks to be performed. + private var livenessTaskSequence: [LivenessTask] = [] + /// The index pointing to the current task in the sequence. + private var currentTaskIndex: Int = 0 + /// The view model associated with the selfie capture process. + weak var selfieViewModel: SelfieViewModelV2? + /// A closure to trigger photo capture during the liveness check. + var captureImage: (() -> Void)? + + // MARK: Constants + + /// The minimum threshold for yaw (left-right head movement) + private let minYawAngleThreshold: CGFloat = 0.15 + /// The maximum threshold for yaw (left-right head movement) + private let maxYawAngleThreshold: CGFloat = 0.6 + /// The minimum threshold for pitch (up-down head movement) + private let minPitchAngleThreshold: CGFloat = 0.15 + /// The maximum threshold for pitch (up-down head movement) + private let maxPitchAngleThreshold: CGFloat = 0.6 + + // MARK: Face Orientation Properties + @Published private(set) var yawAngle: Double = 0.0 + @Published private(set) var rollAngle: Double = 0.0 + @Published private(set) var pitchAngle: Double = 0.0 + @Published private(set) var faceDirection: FaceDirection = .none + + /// Initializes the LivenessCheckManager with a shuffled set of tasks. + init() { + livenessTaskSequence = [.lookLeft, .lookRight, .lookUp].shuffled() + } + + /// The current liveness task. + private(set) var currentTask: LivenessTask? + + /// Advances to the next task in the sequence + /// - Returns: `true` if there is a next task, `false` if all tasks are completed. + private func advanceToNextTask() -> Bool { + guard currentTaskIndex < livenessTaskSequence.count - 1 else { + return false + } + currentTaskIndex += 1 + currentTask = livenessTaskSequence[currentTaskIndex] + return true + } + + /// Sets the initial task for the liveness check. + func initiateLivenessCheck() { + currentTask = livenessTaskSequence[currentTaskIndex] + } + + /// Processes face geometry data and checks for task completion + /// - Parameter faceGeometry: The current face geometry data. + func processFaceGeometry(_ faceGeometry: FaceGeometryModel) { + updateFaceOrientationValues(from: faceGeometry) + + guard let currentTask = currentTask else { return } + + switch currentTask { + case .lookLeft: + let yawValue = CGFloat(faceGeometry.yaw.doubleValue) + if yawValue < -minYawAngleThreshold { + completeCurrentTask() + } + case .lookRight: + let yawValue = CGFloat(faceGeometry.yaw.doubleValue) + if yawValue > minYawAngleThreshold { + completeCurrentTask() + } + case .lookUp: + let pitchValue = CGFloat(faceGeometry.pitch.doubleValue) + if pitchValue < -minPitchAngleThreshold { + completeCurrentTask() + } + } + } + + /// Updates the face orientation values based on the given face geometry. + /// - Parameter faceGeometry: The current face geometry data. + private func updateFaceOrientationValues(from faceGeometry: FaceGeometryModel) { + yawAngle = faceGeometry.yaw.doubleValue + rollAngle = faceGeometry.roll.doubleValue + pitchAngle = faceGeometry.pitch.doubleValue + } + + /// Completes the current task and moves to the next one. + /// If all tasks are completed, it signals the completion of the liveness challenge. + private func completeCurrentTask() { + captureImage?() + captureImage?() + + if !advanceToNextTask() { + // Liveness challenge complete + selfieViewModel?.perform(action: .activeLivenessCompleted) + self.currentTask = nil + } + } +} diff --git a/Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift b/Sources/SmileID/Classes/FaceDetector/Models.swift similarity index 75% rename from Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift rename to Sources/SmileID/Classes/FaceDetector/Models.swift index 83687b18c..0d0bdd2ac 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceDetectionState.swift +++ b/Sources/SmileID/Classes/FaceDetector/Models.swift @@ -1,19 +1,15 @@ import Foundation enum FaceDetectionState: Equatable { - case sceneUnstable - case finalFrame - case multipleFacesDetected case faceDetected case noFaceDetected case faceDetectionErrored - case smileFrame } -enum FaceObservation: Equatable { +enum FaceObservation { case faceFound(T) case faceNotFound - case errored(E) + case errored(Error) } enum FaceBoundsState { diff --git a/ML/SelfieQualityDetector.swift b/Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift similarity index 99% rename from ML/SelfieQualityDetector.swift rename to Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift index 43dc6a736..e87c214c6 100644 --- a/ML/SelfieQualityDetector.swift +++ b/Sources/SmileID/Classes/FaceDetector/SelfieQualityDetector.swift @@ -94,7 +94,7 @@ class SelfieQualityDetector { /// URL of model assuming it was installed in the same bundle as this class class var urlOfModelInThisBundle : URL { - let bundle = Bundle(for: self) + let bundle = SmileIDResourcesHelper.bundle return bundle.url(forResource: "SelfieQualityDetector", withExtension:"mlmodelc")! } diff --git a/ML/UIImage+CVPixelBuffer.swift b/Sources/SmileID/Classes/FaceDetector/UIImage+CVPixelBuffer.swift similarity index 100% rename from ML/UIImage+CVPixelBuffer.swift rename to Sources/SmileID/Classes/FaceDetector/UIImage+CVPixelBuffer.swift diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift index 1feb000e8..e3d96093b 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift @@ -26,7 +26,7 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { private let extraPartnerParams: [String: String] private let faceDetector = FaceDetector() - var cameraManager = CameraManager(orientation: .portrait) + var cameraManager = CameraManager.shared var shouldAnalyzeImages = true var lastAutoCaptureTime = Date() var previousHeadRoll = Double.infinity diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift new file mode 100644 index 000000000..13c24f5ee --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift @@ -0,0 +1,19 @@ +import Foundation + +enum SelfieViewModelAction { + // View Setup Actions + case windowSizeDetected(CGRect) + + // Face Detection Actions + case noFaceDetected + case faceObservationDetected(FaceGeometryModel) + case faceQualityObservationDetected(FaceQualityModel) + case selfieQualityObservationDetected(SelfieQualityModel) + case activeLivenessCompleted + case activeLivenessTimeout + + // Others + case toggleDebugMode + case openApplicationSettings + case handleError(Error) +} diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index 53b7d7139..5592a0a1a 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -3,7 +3,71 @@ import Combine import Foundation public class SelfieViewModelV2: ObservableObject { + // MARK: Dependencies + let cameraManager = CameraManager.shared + let faceDetector = FaceDetectorV2() + let activeLiveness = LivenessCheckManager() + private var subscribers = Set() + + var selfieImage: URL? + var livenessImages: [URL] = [] + + // MARK: Publishers + @Published private(set) var debugEnabled: Bool + @Published var unauthorizedAlert: AlertState? + @Published var directive: String = "Instructions.Start" + + // MARK: Publishers for Vision data + @Published private(set) var hasDetectedValidFace: Bool + @Published private(set) var hasCompletedLivenessChallenge: Bool + @Published private(set) var faceDetectedState: FaceDetectionState { + didSet { + determineDirective() + } + } + @Published private(set) var faceGeometryState: FaceObservation { + didSet { + processUpdatedFaceGeometry() + } + } + @Published private(set) var faceQualityState: FaceObservation { + didSet { + processUpdatedFaceQuality() + } + } + @Published private(set) var selfieQualityState: FaceObservation { + didSet { + processUpdatedSelfieQuality() + } + } + @Published private(set) var isAcceptableBounds: FaceBoundsState { + didSet { + calculateDetectedFaceValidity() + } + } + @Published private(set) var isAcceptableFaceQuality: Bool { + didSet { + calculateDetectedFaceValidity() + } + } + @Published private(set) var isAcceptableSelfieQuality: Bool { + didSet { + calculateDetectedFaceValidity() + } + } + @Published private(set) var boundingXDelta: CGFloat = .zero + @Published private(set) var boundingYDelta: CGFloat = .zero + + // MARK: Constants + private let livenessImageSize = 320 + private let selfieImageSize = 640 + // MARK: UI Properties + @Published private(set) var faceQualityValue: Double = 0.0 + @Published private(set) var selfieQualityValue: SelfieQualityModel = .zero + var faceLayoutGuideFrame = CGRect(x: 0, y: 0, width: 200, height: 300) + + // MARK: Private Properties private let isEnroll: Bool private let userId: String private let jobId: String @@ -11,15 +75,6 @@ public class SelfieViewModelV2: ObservableObject { private let skipApiSubmission: Bool private let extraPartnerParams: [String: String] private let useStrictMode: Bool - private let faceDetector = FaceDetector() - - var cameraManager = CameraManager(orientation: .portrait) - - private var subscribers = Set() - - // UI Properties - @Published var unauthorizedAlert: AlertState? - @Published var directive: String = "Instructions.Start" public init( isEnroll: Bool, @@ -38,6 +93,24 @@ public class SelfieViewModelV2: ObservableObject { self.extraPartnerParams = extraPartnerParams self.useStrictMode = useStrictMode + hasDetectedValidFace = false + hasCompletedLivenessChallenge = false + faceDetectedState = .noFaceDetected + faceGeometryState = .faceNotFound + faceQualityState = .faceNotFound + selfieQualityState = .faceNotFound + isAcceptableBounds = .unknown + isAcceptableFaceQuality = false + isAcceptableSelfieQuality = false + + #if DEBUG + debugEnabled = true + #else + debugEnabled = false + #endif + + self.faceDetector.selfieViewModel = self + cameraManager.$status .receive(on: DispatchQueue.main) .filter { $0 == .unauthorized } @@ -46,7 +119,6 @@ public class SelfieViewModelV2: ObservableObject { .store(in: &subscribers) cameraManager.sampleBufferPublisher - .throttle(for: 0.35, scheduler: DispatchQueue.global(qos: .userInitiated), latest: true) // Drop the first ~2 seconds to allow the user to settle in .dropFirst(5) .compactMap { $0 } @@ -54,10 +126,233 @@ public class SelfieViewModelV2: ObservableObject { .store(in: &subscribers) } - func analyzeImage(image: CVPixelBuffer) {} + private func analyzeImage(imageBuffer: CVPixelBuffer) { + faceDetector.detect(imageBuffer) + if hasDetectedValidFace && selfieImage == nil { + captureSelfieImage(imageBuffer) + activeLiveness.initiateLivenessCheck() + } + + activeLiveness.captureImage = { [weak self] in + self?.captureLivenessImage(imageBuffer) + } + } + + // MARK: Actions + func perform(action: SelfieViewModelAction) { + switch action { + case let .windowSizeDetected(windowRect): + handleWindowSizeChanged(toRect: windowRect) + case .noFaceDetected: + publishNoFaceObserved() + case let .faceObservationDetected(faceObservation): + publishFaceObservation(faceObservation) + case let .faceQualityObservationDetected(faceQualityObservation): + publishFaceQualityObservation(faceQualityObservation) + case let .selfieQualityObservationDetected(selfieQualityObservation): + publishSelfieQualityObservation(selfieQualityObservation) + case .activeLivenessCompleted: + // Completed at this stage: submit the images. + return + case .activeLivenessTimeout: + // Submit with forced failure reason here. + return + case .toggleDebugMode: + toggleDebugMode() + case .openApplicationSettings: + openSettings() + case let .handleError(error): + handleError(error) + } + } + + // MARK: Action Handlers + private func handleWindowSizeChanged(toRect: CGRect) { + faceLayoutGuideFrame = CGRect( + x: toRect.midX - faceLayoutGuideFrame.width / 2, + y: toRect.midY - faceLayoutGuideFrame.height / 2, + width: faceLayoutGuideFrame.width, + height: faceLayoutGuideFrame.height + ) + } + + private func publishNoFaceObserved() { + DispatchQueue.main.async { [self] in + faceDetectedState = .noFaceDetected + faceGeometryState = .faceNotFound + } + } + + private func publishFaceObservation(_ faceGeometryModel: FaceGeometryModel) { + DispatchQueue.main.async { [self] in + faceDetectedState = .faceDetected + faceGeometryState = .faceFound(faceGeometryModel) + } + } + + private func publishFaceQualityObservation(_ faceQualityModel: FaceQualityModel) { + DispatchQueue.main.async { [self] in + faceDetectedState = .faceDetected + faceQualityState = .faceFound(faceQualityModel) + } + } + + private func publishSelfieQualityObservation(_ selfieQualityModel: SelfieQualityModel) { + DispatchQueue.main.async { [self] in + faceDetectedState = .faceDetected + selfieQualityState = .faceFound(selfieQualityModel) + } + } + + private func captureSelfieImage(_ pixelBuffer: CVPixelBuffer) { + do { + guard let imageData = ImageUtils.resizePixelBufferToHeight( + pixelBuffer, + height: selfieImageSize, + orientation: .up + ) else { + throw SmileIDError.unknown("Error resizing selfie image") + } + let selfieImage = try LocalStorage.createSelfieFile(jobId: jobId, selfieFile: imageData) + self.selfieImage = selfieImage + } catch { + handleError(error) + } + } + + private func captureLivenessImage(_ pixelBuffer: CVPixelBuffer) { + do { + guard let imageData = ImageUtils.resizePixelBufferToHeight( + pixelBuffer, + height: livenessImageSize, + orientation: .up + ) else { + throw SmileIDError.unknown("Error resizing liveness image") + } + let imageUrl = try LocalStorage.createLivenessFile(jobId: jobId, livenessFile: imageData) + livenessImages.append(imageUrl) + } catch { + handleError(error) + } + } + + private func handleError(_ error: Error) { + print(error.localizedDescription) + } - func openSettings() { + private func openSettings() { guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } UIApplication.shared.open(settingsURL) } + + private func toggleDebugMode() { + debugEnabled.toggle() + } +} + +// MARK: Helpers +extension SelfieViewModelV2 { + func determineDirective() { + switch faceDetectedState { + case .faceDetected: + if hasDetectedValidFace { + if let currentLivenessTask = activeLiveness.currentTask { + directive = currentLivenessTask.instruction + } else { + directive = "" + } + } else if isAcceptableBounds == .detectedFaceTooSmall { + directive = "Please bring your face closer to the camera" + } else if isAcceptableBounds == .detectedFaceTooLarge { + directive = "Please hold the camera further from your face" + } else if isAcceptableBounds == .detectedFaceOffCentre { + directive = "Please move your face to the center of the frame" + } else if !isAcceptableSelfieQuality { + directive = "Image quality is too low" + } else { + directive = "We cannot take your photo right now" + } + case .noFaceDetected: + directive = "Please look at the camera" + case .faceDetectionErrored: + directive = "An unexpected error ocurred" + } + } + + func processUpdatedFaceGeometry() { + switch faceGeometryState { + case let .faceFound(faceGeometryModel): + let boundingBox = faceGeometryModel.boundingBox + updateAcceptableBounds(using: boundingBox) + if hasDetectedValidFace && + selfieImage != nil && + activeLiveness.currentTask != nil { + activeLiveness.processFaceGeometry(faceGeometryModel) + } + case .faceNotFound: + invalidateFaceGeometryState() + case let .errored(error): + print(error.localizedDescription) + invalidateFaceGeometryState() + } + } + + func invalidateFaceGeometryState() { + // This is where we reset all the face geometry values. + isAcceptableBounds = .unknown + } + + func updateAcceptableBounds(using boundingBox: CGRect) { + boundingXDelta = abs(boundingBox.midX - faceLayoutGuideFrame.midX) + boundingYDelta = abs(boundingBox.midY - faceLayoutGuideFrame.midY) + + if boundingBox.width > 1.2 * faceLayoutGuideFrame.width { + isAcceptableBounds = .detectedFaceTooLarge + } else if boundingBox.width * 1.2 < faceLayoutGuideFrame.width { + isAcceptableBounds = .detectedFaceTooSmall + } else { + if abs(boundingBox.midX - faceLayoutGuideFrame.midX) > 50 { + isAcceptableBounds = .detectedFaceOffCentre + } else if abs(boundingBox.midY - faceLayoutGuideFrame.midY) > 50 { + isAcceptableBounds = .detectedFaceOffCentre + } else { + isAcceptableBounds = .detectedFaceAppropriateSizeAndPosition + } + } + } + + func processUpdatedFaceQuality() { + switch faceQualityState { + case let .faceFound(faceQualityModel): + // Check acceptable range here. + isAcceptableFaceQuality = faceQualityModel.quality > 0.2 + faceQualityValue = Double(faceQualityModel.quality) + case .faceNotFound: + isAcceptableFaceQuality = false + case let .errored(error): + print(error.localizedDescription) + isAcceptableFaceQuality = false + } + } + + func processUpdatedSelfieQuality() { + switch selfieQualityState { + case let .faceFound(selfieQualityModel): + // Check acceptable range here. + isAcceptableSelfieQuality = selfieQualityModel.passed > 0.5 + selfieQualityValue = selfieQualityModel + case .faceNotFound: + isAcceptableSelfieQuality = false + case let .errored(error): + print(error.localizedDescription) + isAcceptableSelfieQuality = false + } + } + + func calculateDetectedFaceValidity() { + hasDetectedValidFace = + isAcceptableBounds == .detectedFaceAppropriateSizeAndPosition && + isAcceptableFaceQuality && + isAcceptableSelfieQuality + } } diff --git a/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift b/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift index 8833a4a15..aa04685b2 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/CameraView.swift @@ -3,16 +3,21 @@ import AVFoundation import Vision struct CameraView: UIViewControllerRepresentable { - typealias UIViewType = PreviewView - let preview: PreviewView + typealias UIViewType = CameraViewController + let cameraViewController: CameraViewController - init(cameraManager: CameraManager) { - preview = PreviewView(cameraManager: cameraManager) + init( + cameraManager: CameraManager, + selfieViewModel: SelfieViewModelV2? = nil + ) { + let controller = CameraViewController(cameraManager: cameraManager) + controller.faceDetector = selfieViewModel?.faceDetector + cameraViewController = controller } - func makeUIViewController(context: Context) -> PreviewView { - preview + func makeUIViewController(context: Context) -> CameraViewController { + cameraViewController } - func updateUIViewController(_ uiViewController: PreviewView, context: Context) {} + func updateUIViewController(_ uiViewController: CameraViewController, context: Context) {} } diff --git a/Sources/SmileID/Classes/SelfieCapture/View/CapturedImagesView.swift b/Sources/SmileID/Classes/SelfieCapture/View/CapturedImagesView.swift new file mode 100644 index 000000000..8a04c6162 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/CapturedImagesView.swift @@ -0,0 +1,61 @@ +import SwiftUI + +struct CapturedImagesView: View { + var model: SelfieViewModelV2 + @State private var images: [UIImage] = [] + + var body: some View { + NavigationView { + ScrollView(.vertical, showsIndicators: false) { + VStack(alignment: .leading) { + if let selfieURL = model.selfieImage, + let selfieImage = loadImage(from: selfieURL) { + Image(uiImage: selfieImage) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 148, height: 320) + } else { + Text("No selfie image") + .font(.title) + } + if !images.isEmpty { + ScrollView(.horizontal, showsIndicators: false) { + HStack { + ForEach(images, id: \.self) { image in + Image(uiImage: image) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 148, height: 320) + } + } + } + } else { + Text("No liveness images") + .font(.title) + } + Spacer() + } + .foregroundColor(.primary) + .padding() + .frame(maxWidth: .infinity) + .navigationBarTitle(Text("Captured Images"), displayMode: .inline) + .onAppear { + loadImages() + } + } + } + } + + private func loadImages() { + images = model.livenessImages.compactMap { + loadImage(from: $0) + } + } + + private func loadImage(from url: URL) -> UIImage? { + guard let imageData = try? Data(contentsOf: url) else { + return nil + } + return UIImage(data: imageData) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingBoxView.swift b/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingBoxView.swift new file mode 100644 index 000000000..1290e2c2b --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/FaceBoundingBoxView.swift @@ -0,0 +1,23 @@ +import SwiftUI + +struct FaceBoundingBoxView: View { + @ObservedObject private(set) var model: SelfieViewModelV2 + + var body: some View { + switch model.faceGeometryState { + case let .faceFound(faceGeometryModel): + Rectangle() + .path(in: CGRect( + x: faceGeometryModel.boundingBox.origin.x, + y: faceGeometryModel.boundingBox.origin.y, + width: faceGeometryModel.boundingBox.width, + height: faceGeometryModel.boundingBox.height + )) + .stroke(Color.yellow, lineWidth: 2.0) + case .faceNotFound: + Rectangle().fill(Color.clear) + case .errored: + Rectangle().fill(Color.clear) + } + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/FaceLayoutGuideView.swift b/Sources/SmileID/Classes/SelfieCapture/View/FaceLayoutGuideView.swift new file mode 100644 index 000000000..b5745027b --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/FaceLayoutGuideView.swift @@ -0,0 +1,16 @@ +import SwiftUI + +struct FaceLayoutGuideView: View { + @ObservedObject private(set) var model: SelfieViewModelV2 + + var body: some View { + Rectangle() + .path(in: CGRect( + x: model.faceLayoutGuideFrame.minX, + y: model.faceLayoutGuideFrame.minY, + width: model.faceLayoutGuideFrame.width, + height: model.faceLayoutGuideFrame.height + )) + .stroke(Color.red) + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift b/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift new file mode 100644 index 000000000..21d699e09 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/LayoutGuideView.swift @@ -0,0 +1,19 @@ +import SwiftUI + +struct LayoutGuideView: View { + let layoutGuideFrame: CGRect + + var body: some View { + VStack { + Ellipse() + .stroke(.blue) + .frame(width: layoutGuideFrame.width, height: layoutGuideFrame.height) + } + } +} + +#Preview { + LayoutGuideView( + layoutGuideFrame: CGRect(x: 0, y: 0, width: 200, height: 300) + ) +} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift index 67794fed2..cc0cf214c 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift @@ -4,104 +4,131 @@ import Lottie public struct SelfieCaptureScreenV2: View { @ObservedObject var viewModel: SelfieViewModelV2 let showAttribution: Bool - - @State private var playbackMode: LottiePlaybackMode = LottiePlaybackMode.paused + @State private var showImages: Bool = false public var body: some View { - VStack(spacing: 40) { - Text(SmileIDResourcesHelper.localizedString(for: viewModel.directive)) - .font(SmileID.theme.header2) - .foregroundColor(.primary) - + GeometryReader { proxy in ZStack { - RoundedRectangle(cornerRadius: 25) - .stroke(SmileID.theme.onLight, lineWidth: 20.0) - CameraView(cameraManager: viewModel.cameraManager) - .clipShape(.rect(cornerRadius: 25)) - .onAppear { - viewModel.cameraManager.switchCamera(to: .front) + CameraView(cameraManager: viewModel.cameraManager, selfieViewModel: viewModel) + .onAppear { + viewModel.cameraManager.switchCamera(to: .front) + } + LayoutGuideView(layoutGuideFrame: viewModel.faceLayoutGuideFrame) + + if viewModel.debugEnabled { + DebugView() + } + VStack { + UserInstructionsView(model: viewModel) + Spacer() + } + } + .edgesIgnoringSafeArea(.all) + .onAppear { + viewModel.perform(action: .windowSizeDetected(proxy.frame(in: .global))) + } + .alert(item: $viewModel.unauthorizedAlert) { alert in + Alert( + title: Text(alert.title), + message: Text(alert.message ?? ""), + primaryButton: .default( + Text(SmileIDResourcesHelper.localizedString(for: "Camera.Unauthorized.PrimaryAction")), + action: { + viewModel.perform(action: .openApplicationSettings) } - CornerShapes() - RoundedRectangle(cornerRadius: 25) - .foregroundColor(.white.opacity(0.8)) - .cutout(Ellipse().scale(x: 0.8, y: 0.8)) + ), + secondaryButton: .cancel() + ) } - .frame(width: 300, height: 400) - - if showAttribution { - Image(uiImage: SmileIDResourcesHelper.SmileEmblem) + .sheet(isPresented: $showImages) { + CapturedImagesView(model: viewModel) } } - .frame(maxWidth: .infinity, maxHeight: .infinity) - .alert(item: $viewModel.unauthorizedAlert) { alert in - Alert( - title: Text(alert.title), - message: Text(alert.message ?? ""), - primaryButton: .default( - Text(SmileIDResourcesHelper.localizedString(for: "Camera.Unauthorized.PrimaryAction")), - action: { - viewModel.openSettings() + } + + // swiftlint:disable identifier_name + @ViewBuilder func DebugView() -> some View { + ZStack { + FaceBoundingBoxView(model: viewModel) + FaceLayoutGuideView(model: viewModel) + VStack(spacing: 0) { + Spacer() + Text("xDelta: \(viewModel.boundingXDelta)") + Text("yDelta: \(viewModel.boundingYDelta)") + switch viewModel.isAcceptableBounds { + case .unknown: + Text("Bounds - Unknown") + case .detectedFaceTooSmall: + Text("Bounds - Face too small") + case .detectedFaceTooLarge: + Text("Bounds - Face too large") + case .detectedFaceOffCentre: + Text("Bounds - Face off Center") + case .detectedFaceAppropriateSizeAndPosition: + Text("Bounds - Appropriate Size and Position") + } + Divider() + Text("Yaw: \(viewModel.activeLiveness.yawAngle)") + Text("Row: \(viewModel.activeLiveness.rollAngle)") + Text("Pitch: \(viewModel.activeLiveness.pitchAngle)") + Text("Quality: \(viewModel.faceQualityValue)") + Text("Fail: \(viewModel.selfieQualityValue.failed) | Pass: \(viewModel.selfieQualityValue.passed)") + .font(.subheadline.weight(.medium)) + .padding(5) + .background(Color.yellow) + .clipShape(.rect(cornerRadius: 5)) + .padding(.bottom, 10) + HStack { + switch viewModel.activeLiveness.faceDirection { + case .left: + Text("Looking Left") + case .right: + Text("Looking Right") + case .none: + Text("Looking Straight") + } + Spacer() + Button { + showImages = true + } label: { + RoundedRectangle(cornerRadius: 5) + .fill(.yellow) + .frame(width: 50, height: 50) + .overlay( + Text("\(viewModel.livenessImages.count + (viewModel.selfieImage != nil ? 1 : 0))") + .fontWeight(.medium) + .foregroundColor(.primary) + ) } - ), - secondaryButton: .cancel() - ) + } + } + .font(.footnote) + .foregroundColor(.white) + .padding(.bottom, 40) + .padding(.horizontal) } } // swiftlint:disable identifier_name - @ViewBuilder func CornerShapes() -> some View { + @ViewBuilder func CameraOverlayView() -> some View { VStack { HStack { - // Top Left Corner - CornerShape() - .stroke(SmileID.theme.success, style: StrokeStyle(lineWidth: 5)) - .frame(width: 40, height: 40) - .rotationEffect(.degrees(90)) - .offset(x: -2.0, y: -2.0) - Spacer() - // Top Right Corner - CornerShape() - .stroke(SmileID.theme.success, style: StrokeStyle(lineWidth: 5)) - .frame(width: 40, height: 40) - .rotationEffect(.degrees(180)) - .offset(x: 2.0, y: -2.0) + Text(SmileIDResourcesHelper.localizedString(for: viewModel.directive)) + .font(SmileID.theme.header2) + .foregroundColor(.primary) + .padding(.bottom) } + .background(Color.black) Spacer() HStack { - // Bottom Left Corner - CornerShape() - .stroke(SmileID.theme.success, style: StrokeStyle(lineWidth: 5)) - .frame(width: 40, height: 40) - .offset(x: -2.0, y: 2.0) - Spacer() - // Bottom Right Corner - CornerShape() - .stroke(SmileID.theme.success, style: StrokeStyle(lineWidth: 5)) - .frame(width: 40, height: 40) - .rotationEffect(.degrees(270)) - .offset(x: 2.0, y: 2.0) + Button { + viewModel.perform(action: .toggleDebugMode) + } label: { + Image(systemName: "ladybug") + .font(.title) + } + .buttonStyle(.plain) } } } } - -struct CornerShape: Shape { - let width: CGFloat = 40 - let height: CGFloat = 40 - let cornerRadius: CGFloat = 25 - - func path(in rect: CGRect) -> Path { - var path = Path() - path.move(to: CGPoint(x: 0, y: 0)) - path.addLine(to: CGPoint(x: 0, y: height - cornerRadius)) - path.addArc( - center: CGPoint(x: cornerRadius, y: height - cornerRadius), - radius: cornerRadius, - startAngle: .degrees(180), - endAngle: .degrees(90), - clockwise: true - ) - path.addLine(to: CGPoint(x: width, y: height)) - return path - } -} diff --git a/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift b/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift new file mode 100644 index 000000000..265b6cf21 --- /dev/null +++ b/Sources/SmileID/Classes/SelfieCapture/View/UserInstructionsView.swift @@ -0,0 +1,12 @@ +import SwiftUI + +struct UserInstructionsView: View { + @ObservedObject var model: SelfieViewModelV2 + + var body: some View { + Text(model.directive) + .font(.title) + .foregroundColor(.white) + .padding() + } +} diff --git a/Sources/SmileID/Classes/Util.swift b/Sources/SmileID/Classes/Util.swift index 3e0e6e2f9..a05784767 100644 --- a/Sources/SmileID/Classes/Util.swift +++ b/Sources/SmileID/Classes/Util.swift @@ -24,6 +24,21 @@ public extension View { } } +extension View { + @inlinable func reverseMask( + alignment: Alignment = .center, + @ViewBuilder _ mask: () -> Mask + ) -> some View { + self.mask( + ZStack(alignment: alignment) { + Rectangle() + mask() + .blendMode(.destinationOut) + } + ) + } +} + private struct StackedShape: Shape { var bottom: Bottom var top: Top diff --git a/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin similarity index 100% rename from ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin rename to Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/analytics/coremldata.bin diff --git a/ML/SelfieQualityDetector.mlmodelc/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/coremldata.bin similarity index 100% rename from ML/SelfieQualityDetector.mlmodelc/coremldata.bin rename to Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/coremldata.bin diff --git a/ML/SelfieQualityDetector.mlmodelc/metadata.json b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/metadata.json similarity index 100% rename from ML/SelfieQualityDetector.mlmodelc/metadata.json rename to Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/metadata.json diff --git a/ML/SelfieQualityDetector.mlmodelc/model.espresso.net b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.net similarity index 100% rename from ML/SelfieQualityDetector.mlmodelc/model.espresso.net rename to Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.net diff --git a/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape similarity index 100% rename from ML/SelfieQualityDetector.mlmodelc/model.espresso.shape rename to Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.shape diff --git a/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights similarity index 100% rename from ML/SelfieQualityDetector.mlmodelc/model.espresso.weights rename to Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model.espresso.weights diff --git a/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin similarity index 100% rename from ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin rename to Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/model/coremldata.bin diff --git a/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin b/Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin similarity index 100% rename from ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin rename to Sources/SmileID/Resources/ML/SelfieQualityDetector.mlmodelc/neural_network_optionals/coremldata.bin