Skip to content

Commit

Permalink
Fix selfie capture fallback implementation (#47)
Browse files Browse the repository at this point in the history
* Calculate face guide frame correctly

* Calculate faceshape frame properly

* Calculate displayed crop rect based on orientation

* Setup AVCaptureSession in the right sequence

* Remove manual face bounding box conversion from and delegate to `AVCaptureVideoPreviewLayer`

* Set correct orientation for face detection sequence handler

* Don't scale down ARKit face bounding box

* Fix agent mode toggle animation

* Set correct orientation

* Add `allowAgentMode` and `showAttribution` flags to public interfaces

* Add agent mode flag to UI

* Enable agent mode on the sample app

* Add pre smile check
  • Loading branch information
JubrilO authored Jul 18, 2023
1 parent 0139b0c commit 861b7f5
Show file tree
Hide file tree
Showing 13 changed files with 123 additions and 126 deletions.
4 changes: 2 additions & 2 deletions Example/SmileID.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -611,7 +611,7 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 19;
CURRENT_PROJECT_VERSION = 22;
DEVELOPMENT_TEAM = 99P7YGX9Q6;
"DEVELOPMENT_TEAM[sdk=iphoneos*]" = 99P7YGX9Q6;
INFOPLIST_FILE = SmileID/Info.plist;
Expand All @@ -638,7 +638,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Manual;
CURRENT_PROJECT_VERSION = 19;
CURRENT_PROJECT_VERSION = 22;
DEVELOPMENT_TEAM = "";
"DEVELOPMENT_TEAM[sdk=iphoneos*]" = 99P7YGX9Q6;
INFOPLIST_FILE = SmileID/Info.plist;
Expand Down
1 change: 1 addition & 0 deletions Example/SmileID/EnterUserIDView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ struct EnterUserIDView: View {
SmileTextField(field: $userId, placeholder: "User ID")
.multilineTextAlignment(.center)
NavigationLink(destination: SmileID.smartSelfieAuthenticationScreen(userId: userId,
allowAgentMode: true,
delegate: viewModel)
.navigationBarBackButtonHidden(true), isActive: $goToAuth ) {
}
Expand Down
1 change: 1 addition & 0 deletions Example/SmileID/HomeView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ struct HomeView: View {
})
.sheet(isPresented: $viewModel.presentSmartSelfieEnrollment,
content: { SmileID.smartSelfieEnrollmentScreen(userId: viewModel.generateUserID(),
allowAgentMode: true,
delegate: viewModel) })
Button(action: { self.viewModel.handleSmartSelfieAuthTap() },
label: {
Expand Down
6 changes: 4 additions & 2 deletions Example/SmileID/HomeViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ class HomeViewController: UIViewController, SmartSelfieResultDelegate {
userID = UUID().uuidString
currentJob = .smartSelfieEnrollment
let smartSelfieRegistrationScreen = SmileID.smartSelfieEnrollmentScreen(userId: userID,
delegate: self)
allowAgentMode: true,
delegate: self)
cameraVC = UIHostingController(rootView: smartSelfieRegistrationScreen)
cameraVC?.modalPresentationStyle = .fullScreen
navigationController?.present(cameraVC!, animated: true)
Expand All @@ -42,7 +43,8 @@ class HomeViewController: UIViewController, SmartSelfieResultDelegate {

func smartSelfieAuthenticationScreen(userID: String) {
let smartSelfieAuthenticationScreen = SmileID.smartSelfieAuthenticationScreen(userId: userID,
delegate: self)
allowAgentMode: true,
delegate: self)
cameraVC = UIHostingController(rootView: smartSelfieAuthenticationScreen)
cameraVC?.modalPresentationStyle = .fullScreen
navigationController?.present(cameraVC!, animated: true)
Expand Down
45 changes: 20 additions & 25 deletions Sources/SmileID/Classes/Camera/CameraManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import Foundation
import AVFoundation
import SwiftUI

protocol CameraManageable: AnyObject {
protocol CameraManageable: AnyObject {
func switchCamera(to position: AVCaptureDevice.Position)
func pauseSession()
func resumeSession()
Expand Down Expand Up @@ -84,19 +84,6 @@ class CameraManager: NSObject, ObservableObject, CameraManageable {
}
}

private func configureCaptureSession() {
guard status == .unconfigured else {
return
}
session.beginConfiguration()
defer {
session.commitConfiguration()
}

addCameraInput(position: .front)
configureVideoOutput()
}

private func addCameraInput(position: AVCaptureDevice.Position) {
guard let camera = getCameraForPosition(position) else {
set(error: .cameraUnavailable)
Expand Down Expand Up @@ -130,14 +117,14 @@ class CameraManager: NSObject, ObservableObject, CameraManageable {
}

private func configureVideoOutput() {
session.removeOutput(videoOutput)
if session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
videoOutput.videoSettings =
[kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]

let videoConnection = videoOutput.connection(with: .video)
videoConnection?.videoOrientation = .portrait
videoConnection?.isVideoMirrored = true
} else {
set(error: .cannotAddOutput)
status = .failed
Expand All @@ -148,15 +135,21 @@ class CameraManager: NSObject, ObservableObject, CameraManageable {
self.checkPermissions()
sessionQueue.async { [self] in
if !self.session.isRunning {
self.session.startRunning()
}
self.session.beginConfiguration()
defer { self.session.commitConfiguration() }
if let currentInput = self.session.inputs.first as? AVCaptureDeviceInput {
self.session.removeInput(currentInput)
if let currentInput = self.session.inputs.first as? AVCaptureDeviceInput {
self.session.removeInput(currentInput)
}
self.addCameraInput(position: position)
self.configureVideoOutput()
session.startRunning()
} else {
self.session.beginConfiguration()
if let currentInput = self.session.inputs.first as? AVCaptureDeviceInput {
self.session.removeInput(currentInput)
}
self.addCameraInput(position: position)
self.configureVideoOutput()
self.session.commitConfiguration()
}
self.configureVideoOutput()
self.addCameraInput(position: position)
}
}

Expand All @@ -179,8 +172,10 @@ extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
if let buffer = sampleBuffer.imageBuffer {
self.sampleBuffer = buffer
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}

self.sampleBuffer = imageBuffer
}
}
27 changes: 4 additions & 23 deletions Sources/SmileID/Classes/Camera/CameraViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,16 @@ class PreviewView: UIViewController {
fatalError("init(coder:) has not been implemented")
}

override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if layedOutSubviews == false {
configurePreviewLayer()
layedOutSubviews = true
}
override func viewDidLoad() {
super.viewDidLoad()
configurePreviewLayer()
}

func configurePreviewLayer() {
guard let session = cameraManager?.session else { return }
previewLayer = AVCaptureVideoPreviewLayer(session: session)
previewLayer?.videoGravity = .resizeAspectFill
previewLayer?.frame = view.bounds
previewLayer?.connection?.videoOrientation = .portrait
view.layer.addSublayer(previewLayer!)
}
}
Expand All @@ -40,21 +36,6 @@ extension PreviewView: FaceDetectorDelegate {
guard let previewLayer = previewLayer else {
return .zero
}


let normalizedRect = cameraManager?.cameraPositon == .back ? rect : CGRect(x: rect.origin.y,
y: rect.origin.x,
width: rect.height,
height: rect.width)

let transformedRect = previewLayer.layerRectConverted(fromMetadataOutputRect: normalizedRect)

let mirroredRect = CGRect(x: previewLayer.bounds.width - transformedRect.origin.x - transformedRect.width,
y: previewLayer.bounds.height - transformedRect.origin.y - transformedRect.height,
width: transformedRect.width,
height: transformedRect.height)

return mirroredRect

return previewLayer.layerRectConverted(fromMetadataOutputRect: rect)
}
}
27 changes: 2 additions & 25 deletions Sources/SmileID/Classes/FaceDetector/FaceDetector.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,29 +16,7 @@ class FaceDetector: NSObject, ARSCNViewDelegate {
func detectFaces(imageBuffer: CVImageBuffer) {
let detectCaptureQualityRequest = VNDetectFaceCaptureQualityRequest(completionHandler:
detectedFaceQualityRequest)
let detectFaceRectanglesRequest = VNDetectFaceRectanglesRequest { [self] request, _ in
guard let results = request.results as? [VNFaceObservation], let viewDelegate = viewDelegate else {
model?.perform(action: .noFaceDetected)
return
}

if results.count > 1 {
model?.perform(action: .multipleFacesDetected)
return
}
guard let result = results.first, !result.boundingBox.isNaN else {
model?.perform(action: .noFaceDetected)
return
}
let convertedBoundingBox = viewDelegate.convertFromMetadataToPreviewRect(rect: result.boundingBox)

let faceObservationModel = FaceGeometryModel(
boundingBox: convertedBoundingBox,
roll: result.roll ?? 0,
yaw: result.yaw ?? 0
)
model?.perform(action: .faceObservationDetected(faceObservationModel))
}
let detectFaceRectanglesRequest = VNDetectFaceRectanglesRequest(completionHandler: detectedFaceRectangles)

// Use most recent models or fallback to older versions
if #available(iOS 14.0, *) {
Expand Down Expand Up @@ -114,8 +92,7 @@ class FaceDetector: NSObject, ARSCNViewDelegate {
imageBuffer: CVImageBuffer) {
do {
try sequenceHandler.perform(requests,
on: imageBuffer,
orientation: .upMirrored)
on: imageBuffer, orientation: .leftMirrored)
} catch {
print(error.localizedDescription)
}
Expand Down
16 changes: 8 additions & 8 deletions Sources/SmileID/Classes/Helpers/ImageUtils.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,24 @@ class ImageUtils {
let trueImageSize = CGSize(width: imagewidth, height: screenImageSize.height)

// ratio of the true image width to displayed image width
let ycuttoffregionAgentMode: CGFloat = max(imageHeight, screenImageSize.width) / min(imageHeight, screenImageSize.width)
let xcutoffregion: CGFloat = max(imagewidth, screenImageSize.width) / min(imagewidth, screenImageSize.width)
let ycutoffregion: CGFloat = max(imageHeight, screenImageSize.height) / min(imageHeight, screenImageSize.height)
var ycutoffregion: CGFloat = max(imageHeight, screenImageSize.height) / min(imageHeight, screenImageSize.height)

// if pixel bufffer is gotten from AVCaptureSession, ycutoff is not required
if orientation == .up || orientation == .upMirrored {
ycutoffregion = 1
}
// scale down the original buffer to match the size of whats displayed on screen
guard let scaledDownBuffer = resizePixelBuffer(buffer, size: trueImageSize) else { return nil }

// calculate crop rect

let cropL = max(faceGeometry.boundingBox.width, faceGeometry.boundingBox.height)
let cropRect = agentMode ? CGRect(x: faceGeometry.boundingBox.origin.y * ycuttoffregionAgentMode,
y: faceGeometry.boundingBox.origin.y * ycuttoffregionAgentMode,
width: cropL,
height: cropL) : CGRect(x: faceGeometry.boundingBox.origin.x * xcutoffregion,
let cropRect = CGRect(x: faceGeometry.boundingBox.origin.x * xcutoffregion,
y: faceGeometry.boundingBox.origin.y * ycutoffregion,
width: cropL,
height: cropL)
let finalrect = agentMode ? increaseRect(rect: cropRect,
byPercentage: 1.5) : increaseRect(rect: cropRect, byPercentage: 1)
let finalrect = increaseRect(rect: cropRect, byPercentage: 1)

// crop face from the buffer returned in the above operation and return jpg
return cropFace(scaledDownBuffer,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ class ARViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {
let maxX = allXs.max() ?? 0
let minY = allYs.min() ?? 0
let maxY = allYs.max() ?? 0
let boundingBox = CGRect(x: minX, y: minY, width: (maxX - minX) * 0.8, height: (maxY - minY) * 0.8)
let boundingBox = CGRect(x: minX, y: minY, width: (maxX - minX), height: (maxY - minY))
return boundingBox
}

Expand Down
38 changes: 30 additions & 8 deletions Sources/SmileID/Classes/SelfieCapture/View/FaceOverlayView.swift
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import Foundation
import Combine
import SwiftUI

struct FaceOverlayView: View {
@State var agentMode = false
@ObservedObject private(set) var model: SelfieCaptureViewModel
var body: some View {
GeometryReader { geometry in
Expand All @@ -19,7 +19,27 @@ struct FaceOverlayView: View {
.blendMode(.destinationOut)
.frame(width: faceWidth,
height: faceHeight)
.background(GeometryReader { localGeometry in // local geometry reader
Color.clear.onReceive(Just(localGeometry.frame(in: .global))) { globalFrame in
if globalFrame.origin.x != model.faceLayoutGuideFrame.origin.x
{
let window = UIApplication
.shared
.connectedScenes
.flatMap { ($0 as? UIWindowScene)?.windows ?? [] }
.last { $0.isKeyWindow }
if let rootView = window {
// Geometry reader's .global returns the frame in the screen's coordinate system.
let safeArea = rootView.screen.bounds.height - geometry.size.height
model.faceLayoutGuideFrame = CGRect(origin: CGPoint(x: globalFrame.origin.x,
y: globalFrame.origin.y - safeArea),
size: globalFrame.size)

}

}
}
})
)
.overlay(FaceShape()
.stroke(SmileID.theme.accent.opacity(0.4),
Expand All @@ -41,18 +61,20 @@ struct FaceOverlayView: View {
.scaleEffect(1.2, anchor: .top)
InstructionsView(model: model)
.padding(.top, -((faceWidth)/2))
HStack(spacing: 10) {
Text("Agent Mode")
.foregroundColor(agentMode ? SmileID.theme.backgroundMain : SmileID.theme.accent)
.font(SmileID.theme.header4)
Toggle("", isOn: $model.agentMode).labelsHidden()
}
if model.allowsAgentMode {
HStack(spacing: 10) {
Text("Agent Mode")
.foregroundColor(model.agentMode ? SmileID.theme.backgroundMain : SmileID.theme.accent)
.font(SmileID.theme.header4)
Toggle("", isOn: $model.agentMode).labelsHidden()
}
.frame(width: 188, height: 46)
.background(agentMode ? SmileID.theme.accent : SmileID.theme.backgroundMain)
.background(model.agentMode ? SmileID.theme.accent : SmileID.theme.backgroundMain)
.cornerRadius(23)
.shadow(radius: 23)
.padding(.bottom, 35)
.animation(.default)
}
}
}
}
Expand Down
21 changes: 4 additions & 17 deletions Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,10 @@ public struct SelfieCaptureView: View, SelfieViewDelegate {

public var body: some View {
GeometryReader { geometry in
let ovalSize = ovalSize(from: geometry)
ZStack {
if ARFaceTrackingConfiguration.isSupported && viewModel.agentMode == false {
arView
.onAppear {
viewModel.faceLayoutGuideFrame =
CGRect(origin: .zero,
size: ovalSize)
arView?.preview.model = viewModel
viewModel.viewFinderSize = geometry.size
viewModel.selfieViewDelegate = self
Expand All @@ -43,13 +39,10 @@ public struct SelfieCaptureView: View, SelfieViewDelegate {
camera
.onAppear {
viewModel.captureResultDelegate = delegate
viewModel.faceLayoutGuideFrame =
CGRect(origin: .zero,
size: ovalSize)
viewModel.viewDelegate = camera!.preview
viewModel.viewFinderSize = geometry.size
viewModel.cameraManager.switchCamera(to: viewModel.agentMode ? .back : .front)
}.scaleEffect(1.2, anchor: .top)
}
}
faceOverlay
switch viewModel.processingState {
Expand All @@ -62,9 +55,8 @@ public struct SelfieCaptureView: View, SelfieViewDelegate {
case .error:
ModalPresenter { ErrorView(viewModel: viewModel) }
default:
Text("")
Color.clear
}

}
}
.edgesIgnoringSafeArea(.all)
Expand Down Expand Up @@ -106,15 +98,10 @@ struct FaceBoundingBoxView: View {
Rectangle().fill(Color.clear)
case .faceFound(let faceGeometryModel):
Rectangle()
.path(in: CGRect(
x: faceGeometryModel.boundingBox.origin.x,
y: faceGeometryModel.boundingBox.origin.y,
width: faceGeometryModel.boundingBox.width,
height: faceGeometryModel.boundingBox.height
))
.path(in: faceGeometryModel.boundingBox)
.stroke(Color.yellow, lineWidth: 2.0)
case .errored:
Rectangle().fill(Color.clear)
Rectangle().fill(Color.yellow)
}
}
}
Expand Down
Loading

0 comments on commit 861b7f5

Please sign in to comment.