diff --git a/components/FaceScanner/FaceScanner.tsx b/components/FaceScanner/FaceScanner.tsx index 49dbd11066..9934ded13d 100644 --- a/components/FaceScanner/FaceScanner.tsx +++ b/components/FaceScanner/FaceScanner.tsx @@ -28,7 +28,7 @@ import {Theme} from '.././ui/styleUtils'; import {getRandomInt} from '../../shared/commonUtil'; import { checkBlink, - cropEyeAreaFromFace, + validateLiveness, faceDetectorConfig, getFaceBounds, imageCaptureConfig, @@ -60,7 +60,7 @@ export const FaceScanner: React.FC = props => { const [screenColor, setScreenColor] = useState('#0000ff'); const [faceToCompare, setFaceToCompare] = useState(null); const [opacity, setOpacity] = useState(1); - const [picArray, setPicArray] = useState([]); + const [capturedImages, setCapturedImages] = useState([]); const screenFlashColors = ['#0000FF', '#00FF00', '#FF0000']; const MAX_COUNTER = 15; @@ -88,7 +88,10 @@ export const FaceScanner: React.FC = props => { imageCaptureConfig, ); - setPicArray([...picArray, {color: screenColor, image: capturedImage}]); + setCapturedImages([ + ...capturedImages, + {screenColor: screenColor, capturedImageUri: capturedImage.uri}, + ]); if (counter === randomNumToFaceCompare) { setFaceToCompare(capturedImage); @@ -100,27 +103,16 @@ export const FaceScanner: React.FC = props => { } async function handleFacesDetected({faces}) { - checkBlink(faces[0]); - - if (counter == MAX_COUNTER) { - setCounter(counter + 1); - cameraRef.pausePreview(); - - setScreenColor('#ffffff'); - setInfoText(t('faceProcessingInfo')); - - const result = await cropEyeAreaFromFace( - picArray, - props.vcImage, - faceToCompare, - ); - return result ? props.onValid() : props.onInvalid(); - } else if (faces.length > 0) { + if (counter < MAX_COUNTER) { + if (faces.length > 1) { + setInfoText(t('multipleFacesDetectedGuide')); + return; + } + setInfoText(t('livenessCaptureGuide')); + checkBlink(faces[0]); const [withinXBounds, withinYBounds, withinYawAngle, withinRollAngle] = getFaceBounds(faces[0]); - setInfoText(t('faceOutGuide')); - if ( withinXBounds && withinYBounds && @@ -135,6 +127,18 @@ export const FaceScanner: React.FC = props => { setInfoText(t('faceInGuide')); await captureImage(screenColor); } + } else { + cameraRef.pausePreview(); + + setScreenColor('#ffffff'); + setInfoText(t('faceProcessingInfo')); + + const isLiveImage = await validateLiveness( + capturedImages, + props.vcImage, + faceToCompare, + ); + return isLiveImage ? props.onValid() : props.onInvalid(); } } diff --git a/components/FaceScanner/FaceScannerHelper.ts b/components/FaceScanner/FaceScannerHelper.ts index 1fb035bff8..3f8884f33f 100644 --- a/components/FaceScanner/FaceScannerHelper.ts +++ b/components/FaceScanner/FaceScannerHelper.ts @@ -6,14 +6,15 @@ import ImageEditor from '@react-native-community/image-editor'; import {ImageType} from 'expo-camera'; import {getColors} from 'react-native-image-colors'; import {faceCompare} from '@iriscan/biometric-sdk-react-native'; +import fileStorage from '../../shared/fileStorage'; -let FaceCropPicArray: any[] = new Array(); -let EyeCropPicArray: any[] = new Array(); +let croppedFaceImages: any[] = new Array(); +let croppedEyeImages: any[] = new Array(); let predictedColorResults: any[] = new Array(); let facePoints; let calculatedThreshold; let faceCompareOuptut; -let capturedFaceImage; +let croppedFaceImage; let leftEyeWasClosed = false; let rightEyeWasClosed = false; let lastBlinkTimestamp = 0; @@ -148,92 +149,97 @@ export const getEyeColorPredictionResult = async ( }); }; -export const cropEyeAreaFromFace = async (picArray, vcImage, capturedImage) => { - try { - await Promise.all( - picArray.map(async pic => { - facePoints = ( - await FaceDetector.detectFacesAsync(pic.image.uri, faceDetectorConfig) - ).faces[0]; - - if ( - facePoints.leftEyeOpenProbability > eyeOpenProbability && - facePoints.rightEyeOpenProbability > eyeOpenProbability - ) { - capturedFaceImage = await ImageEditor.cropImage(pic.image.uri, { - offset: { - x: facePoints.bounds.origin.x, - y: facePoints.bounds.origin.y, - }, - size: { - width: facePoints.bounds.size.width, - height: facePoints.bounds.size.height, - }, - }); - - FaceCropPicArray.push({color: pic.color, image: capturedFaceImage}); - } - }), - ); +const cropFacePortionFromCapturedImage = async ({ + screenColor, + capturedImageUri, +}) => { + facePoints = ( + await FaceDetector.detectFacesAsync(capturedImageUri, faceDetectorConfig) + ).faces[0]; - await Promise.all( - FaceCropPicArray.map(async pic => { - let [leftEyeX, leftEyeY, rightEyeX, rightEyeY] = - getNormalizedFacePoints(facePoints); - - const leftCroppedImage = await ImageEditor.cropImage(pic.image.uri, { - offset: { - x: leftEyeX - offsetX, - y: leftEyeY - offsetY, - }, - size: { - width: offsetX * 2, - height: offsetY / 2 - eyeCropHeightConst, - }, - }); - - const rightCroppedImage = await ImageEditor.cropImage(pic.image.uri, { - offset: { - x: rightEyeX - offsetX, - y: rightEyeY - offsetY, - }, - size: { - width: offsetX * 2, - height: offsetY / 2 - eyeCropHeightConst, - }, - }); - - EyeCropPicArray.push({ - color: pic.color, - leftEye: leftCroppedImage, - rightEye: rightCroppedImage, - }); - }), - ); + if ( + facePoints.leftEyeOpenProbability > eyeOpenProbability && + facePoints.rightEyeOpenProbability > eyeOpenProbability + ) { + croppedFaceImage = await ImageEditor.cropImage(capturedImageUri, { + offset: { + x: facePoints.bounds.origin.x, + y: facePoints.bounds.origin.y, + }, + size: { + width: facePoints.bounds.size.width, + height: facePoints.bounds.size.height, + }, + }); + croppedFaceImages.push({screenColor, faceImageUri: croppedFaceImage.uri}); + await fileStorage.removeItemIfExist(capturedImageUri); + } +}; - await Promise.all( - EyeCropPicArray.map(async pic => { - const leftEyeColors = await getColors(pic.leftEye.uri); - const rightEyeColors = await getColors(pic.rightEye.uri); +const cropEyePortionsFromCroppedFaceImages = async ({ + screenColor, + faceImageUri, +}) => { + let [leftEyeX, leftEyeY, rightEyeX, rightEyeY] = + getNormalizedFacePoints(facePoints); + + const leftCroppedImage = await ImageEditor.cropImage(faceImageUri, { + offset: { + x: leftEyeX - offsetX, + y: leftEyeY - offsetY, + }, + size: { + width: offsetX * 2, + height: offsetY / 2 - eyeCropHeightConst, + }, + }); - const leftRGBAColors = Object.values(leftEyeColors) - .filter(filterColor) - .map(color => hexRgb(color)); + const rightCroppedImage = await ImageEditor.cropImage(faceImageUri, { + offset: { + x: rightEyeX - offsetX, + y: rightEyeY - offsetY, + }, + size: { + width: offsetX * 2, + height: offsetY / 2 - eyeCropHeightConst, + }, + }); - const rightRGBAColors = Object.values(rightEyeColors) - .filter(filterColor) - .map(color => hexRgb(color)); + croppedEyeImages.push({ + screenColor: screenColor, + leftEyeUri: leftCroppedImage.uri, + rightEyeUri: rightCroppedImage.uri, + }); + await fileStorage.removeItemIfExist(faceImageUri); +}; - const rgbColor = hexRgb(pic.color); - await getEyeColorPredictionResult(leftRGBAColors, rgbColor); - await getEyeColorPredictionResult(rightRGBAColors, rgbColor); - }), - ); - } catch (err) { - console.error('Unable to crop the images::', err); - return false; - } +const compareEyeColorsWithScreenColor = async ({ + screenColor, + leftEyeUri, + rightEyeUri, +}) => { + const leftEyeColors = await getColors(leftEyeUri); + const rightEyeColors = await getColors(rightEyeUri); + + const leftRGBAColors = Object.values(leftEyeColors) + .filter(filterColor) + .map(color => hexRgb(color)); + + const rightRGBAColors = Object.values(rightEyeColors) + .filter(filterColor) + .map(color => hexRgb(color)); + + const rgbColor = hexRgb(screenColor); + await getEyeColorPredictionResult(leftRGBAColors, rgbColor); + await getEyeColorPredictionResult(rightRGBAColors, rgbColor); + await fileStorage.removeItemIfExist(leftEyeUri); + await fileStorage.removeItemIfExist(rightEyeUri); +}; +const calculateThresholdAndDetectFaceLiveness = async ( + vcImage, + randomCapturedImage, +) => { calculatedThreshold = predictedColorResults.filter(element => element).length / predictedColorResults.length; @@ -241,17 +247,53 @@ export const cropEyeAreaFromFace = async (picArray, vcImage, capturedImage) => { const matches = rxDataURI.exec(vcImage).groups; const vcFace = matches.data; - faceCompareOuptut = await faceCompare(vcFace, capturedImage.base64); + faceCompareOuptut = await faceCompare(vcFace, randomCapturedImage.base64); if (blinkCounter > 0) { calculatedThreshold = calculatedThreshold + blinkConfidenceScore; } - if (calculatedThreshold > LIVENESS_THRESHOLD && faceCompareOuptut) { - return true; - } else { + return calculatedThreshold > LIVENESS_THRESHOLD && faceCompareOuptut + ? true + : false; +}; + +export const validateLiveness = async ( + capturedImages, + vcImage, + randomCapturedImage, +) => { + try { + await Promise.all( + capturedImages.map(async capturedImage => { + await cropFacePortionFromCapturedImage(capturedImage); + }), + ); + + await Promise.all( + croppedFaceImages.map(async croppedFaceImage => { + await cropEyePortionsFromCroppedFaceImages(croppedFaceImage); + }), + ); + } catch (err) { + console.error('Unable to crop the images::', err); + return false; + } + + try { + await Promise.all( + croppedEyeImages.map(async croppedEyeImage => { + compareEyeColorsWithScreenColor(croppedEyeImage); + }), + ); + } catch (err) { + console.error( + 'Error occured when extracting the colors from eyes and comparing them with screen color::', + err, + ); return false; } + return calculateThresholdAndDetectFaceLiveness(vcImage, randomCapturedImage); }; export interface FaceDetectorConfig { diff --git a/components/FaceScanner/LivenessDetection.tsx b/components/FaceScanner/LivenessDetection.tsx index d7df5d664a..3067da7a9d 100644 --- a/components/FaceScanner/LivenessDetection.tsx +++ b/components/FaceScanner/LivenessDetection.tsx @@ -4,7 +4,6 @@ import {View, TouchableOpacity} from 'react-native'; import Spinner from 'react-native-spinkit'; import {Column, Text} from '.././ui'; import {Theme} from '.././ui/styleUtils'; -import Svg, {Defs, Mask, Rect, Ellipse} from 'react-native-svg'; import testIDProps from '../../shared/commonUtil'; import {FaceDetectorConfig} from './FaceScannerHelper'; @@ -46,20 +45,6 @@ const LivenessDetection: React.FC = ({ onFacesDetected={handleFacesDetected} faceDetectorSettings={faceDetectorConfig} /> - - - - - - - - - diff --git a/locales/ara.json b/locales/ara.json index 160c69f2d2..6e72b17c45 100644 --- a/locales/ara.json +++ b/locales/ara.json @@ -27,12 +27,12 @@ "FaceScanner": { "livenessCaptureGuide": "أمسك الهاتف بثبات، وحافظ على تركيز وجهك في المنتصف.", "faceProcessingInfo": "يرجى الانتظار بينما نقوم بمعالجة البيانات.", - "faceOutGuide": "أبقِ وجهك داخل الشكل البيضاوي!", "faceInGuide": "جاري الالتقاط!", "cancel": "يلغي", "imageCaptureGuide": "أمسك الهاتف بثبات، وحافظ على تركيز وجهك في المنتصف وانقر على ‘التقاط'", "capture": "يأسر", - "flipCamera": "فليب الكاميرا" + "flipCamera": "فليب الكاميرا", + "multipleFacesDetectedGuide": "تم اكتشاف وجوه متعددة. تأكد من أنك وحدك في إطار التحقق." }, "OIDcAuth": { "title": "مصادقة OIDC", @@ -725,9 +725,13 @@ } }, "postFaceCapture": { - "captureSuccessMessage": "تم التحقق من الوجه بنجاح! يتم بدء مشاركة بيانات الاعتماد.", - "captureFailureTitle": "فشل التحقق من الوجه!", - "captureFailureMessage": "يرجى التأكد من أن وجهك مرئي بوضوح وحاول التقاط صورة شخصية مرة أخرى." + "successMessage": "تم التحقق من الوجه بنجاح! يتم بدء مشاركة بيانات الاعتماد.", + "failureTitle": "فشل التحقق من الوجه!", + "failureMessage": "يرجى التأكد من أن وجهك مرئي بوضوح وحاول التقاط صورة شخصية مرة أخرى.", + "LivenessDetection": { + "retryFailureMessage": "لم نتمكن من مشاركة بطاقتك بسبب فشل اكتشاف النشاط. الرجاء النقر فوق إعادة المحاولة لمحاولة مشاركة بطاقتك مرة أخرى، أو النقر فوق الصفحة الرئيسية للخروج من المشاركة.", + "maxRetryFailureMessage": "آسف! لم نتمكن من مشاركة بطاقتك بسبب فشل اكتشاف النشاط. الرجاء معاودة المحاولة في وقت لاحق." + } }, "rational": { "title": "قم بتشغيل موقعك", diff --git a/locales/en.json b/locales/en.json index 14ebd9d7ad..6b72daaef8 100644 --- a/locales/en.json +++ b/locales/en.json @@ -27,12 +27,12 @@ "FaceScanner": { "livenessCaptureGuide": "Hold the phone steady, keep your face focused in the centre.", "faceProcessingInfo": "Please wait while we process the data.", - "faceOutGuide": "Keep your face inside the oval!", "faceInGuide": "Capturing in progress!", "imageCaptureGuide": "Hold the phone steady, keep your face focused in the centre and click on Capture.", "capture": "Capture", "cancel": "Cancel", - "flipCamera": "Flip Camera" + "flipCamera": "Flip Camera", + "multipleFacesDetectedGuide": "Multiple faces detected. Make sure only you are in the frame for verification." }, "OIDcAuth": { "title": "OIDC Authentication", @@ -733,9 +733,13 @@ } }, "postFaceCapture": { - "captureSuccessMessage": "Face Verification is successful! Credential sharing is initiated.", - "captureFailureTitle": "Face verification failed!", - "captureFailureMessage": "Please ensure that your face is clearly visible and try taking selfie again." + "successMessage": "Face Verification is successful! Credential sharing is initiated.", + "failureTitle": "Face Verification Failed!", + "failureMessage": "Please ensure that your face is clearly visible and try taking selfie again.", + "LivenessDetection": { + "retryFailureMessage": "We were unable to share your card due to a failure in liveness detection. Please click Retry to attempt sharing your card again, or click Home to exit sharing.", + "maxRetryFailureMessage": "Sorry! We were unable to share your card due to a failure in liveness detection. Please try again later." + } }, "rational": { "title": "Turn on your location", diff --git a/locales/fil.json b/locales/fil.json index e4677f1dc9..be253031d6 100644 --- a/locales/fil.json +++ b/locales/fil.json @@ -30,12 +30,12 @@ "FaceScanner": { "livenessCaptureGuide": "Hawakan nang matatag ang telepono, panatilihing nakatutok ang iyong mukha sa gitna.", "faceProcessingInfo": "Mangyaring maghintay habang pinoproseso namin ang data.", - "faceOutGuide": "Panatilihin ang iyong mukha sa loob ng oval!", "faceInGuide": "Kasalukuyang kumukuha!", "cancel": "Kanselahin", "imageCaptureGuide": "Hawakan nang matatag ang telepono, panatilihing nakatutok ang iyong mukha sa gitna at mag-click sa Capture.", "capture": "Kunin", - "flipCamera": "I-flip ang Camera" + "flipCamera": "I-flip ang Camera", + "multipleFacesDetectedGuide": "Maramihang mukha ang nakita. Tiyaking ikaw lang ang nasa frame para sa pag-verify." }, "OIDcAuth": { "title": "OIDC Authentication", @@ -724,9 +724,13 @@ } }, "postFaceCapture": { - "captureSuccessMessage": "Matagumpay ang Face Verification! Sinisimulan ang pagbabahagi ng kredensyal.", - "captureFailureTitle": "Nabigo ang pag-verify ng mukha!", - "captureFailureMessage": "Pakitiyak na malinaw na nakikita ang iyong mukha at subukang mag-selfie muli." + "successMessage": "Matagumpay ang Face Verification! Sinisimulan ang pagbabahagi ng kredensyal.", + "failureTitle": "Nabigo ang Pag-verify ng Mukha!", + "failureMessage": "Pakitiyak na malinaw na nakikita ang iyong mukha at subukang mag-selfie muli.", + "LivenessDetection": { + "retryFailureMessage": "Hindi namin naibahagi ang iyong card dahil sa pagkabigo sa liveness detection. Paki-click ang Subukang muli upang subukang ibahagi muli ang iyong card, o i-click ang Home upang lumabas sa pagbabahagi.", + "maxRetryFailureMessage": "Paumanhin! Hindi namin naibahagi ang iyong card dahil sa pagkabigo sa liveness detection. Subukang muli mamaya." + } }, "rational": { "title": "I-on ang iyong lokasyon", diff --git a/locales/hin.json b/locales/hin.json index 402a65dd25..7ad2b2e2a8 100644 --- a/locales/hin.json +++ b/locales/hin.json @@ -30,12 +30,12 @@ "FaceScanner": { "livenessCaptureGuide": "फ़ोन को स्थिर रखें, अपना चेहरा केंद्र में रखें।", "faceProcessingInfo": "जब तक हम डेटा संसाधित कर रहे हैं कृपया प्रतीक्षा करें।", - "faceOutGuide": "अपना चेहरा अंडाकार के अंदर रखें!", "faceInGuide": "कैप्चरिंग प्रगति पर है!", "cancel": "रद्द करना", "imageCaptureGuide": "फ़ोन को स्थिर रखें, अपना चेहरा केंद्र में रखें और कैप्चर पर क्लिक करें।", "capture": "कब्जा", - "flipCamera": "कैमरा पलटें" + "flipCamera": "कैमरा पलटें", + "multipleFacesDetectedGuide": "अनेक चेहरों का पता चला. सुनिश्चित करें कि सत्यापन के लिए केवल आप ही फ्रेम में हैं।" }, "OIDcAuth": { "title": "OIDC प्रमाणीकरण", @@ -727,9 +727,13 @@ } }, "postFaceCapture": { - "captureSuccessMessage": "चेहरा सत्यापन सफल है! क्रेडेंशियल शेयरिंग शुरू की गई है.", - "captureFailureTitle": "चेहरे का सत्यापन विफल!", - "captureFailureMessage": "कृपया सुनिश्चित करें कि आपका चेहरा स्पष्ट रूप से दिखाई दे रहा है और फिर से सेल्फी लेने का प्रयास करें।" + "successMessage": "चेहरा सत्यापन सफल है! क्रेडेंशियल शेयरिंग शुरू की गई है.", + "failureTitle": "चेहरा सत्यापन विफल!", + "failureMessage": "कृपया सुनिश्चित करें कि आपका चेहरा स्पष्ट रूप से दिखाई दे रहा है और फिर से सेल्फी लेने का प्रयास करें।", + "LivenessDetection": { + "retryFailureMessage": "लाइवनेस डिविजन में विफलता के कारण हम आपका कार्ड साझा करने में असफल थे। कृपया अपने कार्ड को साझा करने के प्रयास के लिए पुनः आरंभ करने के प्रयास पर क्लिक करें, या साझाकरण से बाहर जाने के लिए होम पर क्लिक करें।", + "maxRetryFailureMessage": "क्षमा मांगना! लाइवनेस डिटेक्शन में विफलता के कारण हम आपका कार्ड साझा करने में असमर्थ थे। कृपया बाद में पुन: प्रयास करें।" + } }, "rational": { "title": "अपना स्थान चालू करें", diff --git a/locales/kan.json b/locales/kan.json index 9d8074dd4b..a98afd9926 100644 --- a/locales/kan.json +++ b/locales/kan.json @@ -30,12 +30,12 @@ "FaceScanner": { "livenessCaptureGuide": "ಫೋನ್ ಅನ್ನು ಸ್ಥಿರವಾಗಿ ಹಿಡಿದುಕೊಳ್ಳಿ, ನಿಮ್ಮ ಮುಖವನ್ನು ಮಧ್ಯದಲ್ಲಿ ಕೇಂದ್ರೀಕರಿಸಿ.", "faceProcessingInfo": "ನಾವು ಡೇಟಾವನ್ನು ಪ್ರಕ್ರಿಯೆಗೊಳಿಸುವಾಗ ದಯವಿಟ್ಟು ನಿರೀಕ್ಷಿಸಿ.", - "faceOutGuide": "ನಿಮ್ಮ ಮುಖವನ್ನು ಅಂಡಾಕಾರದೊಳಗೆ ಇರಿಸಿ!", "faceInGuide": "ಸೆರೆಹಿಡಿಯುವಿಕೆ ಪ್ರಗತಿಯಲ್ಲಿದೆ!", "cancel": "ರದ್ದುಮಾಡು", "imageCaptureGuide": "ಫೋನ್ ಅನ್ನು ಸ್ಥಿರವಾಗಿ ಹಿಡಿದುಕೊಳ್ಳಿ, ನಿಮ್ಮ ಮುಖವನ್ನು ಮಧ್ಯದಲ್ಲಿ ಕೇಂದ್ರೀಕರಿಸಿ ಮತ್ತು ಕ್ಯಾಪ್ಚರ್ ಕ್ಲಿಕ್ ಮಾಡಿ.", "capture": "ಸೆರೆಹಿಡಿಯಿರಿ", - "flipCamera": "ಫ್ಲಿಪ್ ಕ್ಯಾಮೆರಾ" + "flipCamera": "ಫ್ಲಿಪ್ ಕ್ಯಾಮೆರಾ", + "multipleFacesDetectedGuide": "ಬಹು ಮುಖಗಳು ಪತ್ತೆಯಾಗಿವೆ. ಪರಿಶೀಲನೆಗಾಗಿ ನೀವು ಮಾತ್ರ ಫ್ರೇಮ್‌ನಲ್ಲಿದ್ದೀರಿ ಎಂದು ಖಚಿತಪಡಿಸಿಕೊಳ್ಳಿ." }, "OIDcAuth": { "title": "OIDC ದೃಢೀಕರಣ", @@ -725,9 +725,13 @@ } }, "postFaceCapture": { - "captureSuccessMessage": "ಮುಖ ಪರಿಶೀಲನೆ ಯಶಸ್ವಿಯಾಗಿದೆ! ರುಜುವಾತು ಹಂಚಿಕೆಯನ್ನು ಪ್ರಾರಂಭಿಸಲಾಗಿದೆ.", - "captureFailureTitle": "ಮುಖ ಪರಿಶೀಲನೆ ವಿಫಲವಾಗಿದೆ!", - "captureFailureMessage": "ನಿಮ್ಮ ಮುಖವು ಸ್ಪಷ್ಟವಾಗಿ ಗೋಚರಿಸುತ್ತಿದೆಯೇ ಎಂಬುದನ್ನು ಖಚಿತಪಡಿಸಿಕೊಳ್ಳಿ ಮತ್ತು ಮತ್ತೊಮ್ಮೆ ಸೆಲ್ಫಿ ತೆಗೆದುಕೊಳ್ಳಲು ಪ್ರಯತ್ನಿಸಿ." + "successMessage": "ಮುಖ ಪರಿಶೀಲನೆ ಯಶಸ್ವಿಯಾಗಿದೆ! ರುಜುವಾತು ಹಂಚಿಕೆಯನ್ನು ಪ್ರಾರಂಭಿಸಲಾಗಿದೆ.", + "failureTitle": "ಮುಖ ಪರಿಶೀಲನೆ ವಿಫಲವಾಗಿದೆ!", + "failureMessage": "ನಿಮ್ಮ ಮುಖವು ಸ್ಪಷ್ಟವಾಗಿ ಗೋಚರಿಸುತ್ತಿದೆಯೇ ಎಂಬುದನ್ನು ಖಚಿತಪಡಿಸಿಕೊಳ್ಳಿ ಮತ್ತು ಮತ್ತೊಮ್ಮೆ ಸೆಲ್ಫಿ ತೆಗೆದುಕೊಳ್ಳಲು ಪ್ರಯತ್ನಿಸಿ.", + "LivenessDetection": { + "retryFailureMessage": "ಲೈವ್‌ನೆಸ್ ಪತ್ತೆಯಲ್ಲಿ ವಿಫಲವಾದ ಕಾರಣ ನಿಮ್ಮ ಕಾರ್ಡ್ ಅನ್ನು ಹಂಚಿಕೊಳ್ಳಲು ನಮಗೆ ಸಾಧ್ಯವಾಗಲಿಲ್ಲ. ನಿಮ್ಮ ಕಾರ್ಡ್ ಅನ್ನು ಮತ್ತೊಮ್ಮೆ ಹಂಚಿಕೊಳ್ಳಲು ಪ್ರಯತ್ನಿಸಲು ಮರುಪ್ರಯತ್ನಿಸಿ ಕ್ಲಿಕ್ ಮಾಡಿ ಅಥವಾ ಹಂಚಿಕೆಯಿಂದ ನಿರ್ಗಮಿಸಲು ಹೋಮ್ ಕ್ಲಿಕ್ ಮಾಡಿ.", + "maxRetryFailureMessage": "ಕ್ಷಮಿಸಿ! ಲೈವ್‌ನೆಸ್ ಪತ್ತೆಯಲ್ಲಿ ವಿಫಲವಾದ ಕಾರಣ ನಿಮ್ಮ ಕಾರ್ಡ್ ಅನ್ನು ಹಂಚಿಕೊಳ್ಳಲು ನಮಗೆ ಸಾಧ್ಯವಾಗಲಿಲ್ಲ. ದಯವಿಟ್ಟು ನಂತರ ಮತ್ತೆ ಪ್ರಯತ್ನಿಸಿ." + } }, "rational": { "title": "ನಿಮ್ಮ ಸ್ಥಳವನ್ನು ಆನ್ ಮಾಡಿ", diff --git a/locales/tam.json b/locales/tam.json index b902a6f1ff..440fcce261 100644 --- a/locales/tam.json +++ b/locales/tam.json @@ -30,12 +30,12 @@ "FaceScanner": { "livenessCaptureGuide": "மொபைலை நிலையாகப் பிடித்து, உங்கள் முகத்தை மையமாக வைத்துக்கொள்ளவும்.", "faceProcessingInfo": "நாங்கள் தரவைச் செயலாக்கும் வரை காத்திருக்கவும்.", - "faceOutGuide": "உங்கள் முகத்தை ஓவலின் உள்ளே வைத்திருங்கள்!", "faceInGuide": "பிடிப்பு நடைபெறுகிறது!", "cancel": "ரத்து செய்", "imageCaptureGuide": "மொபைலை நிலையாகப் பிடித்து, உங்கள் முகத்தை மையமாக வைத்து, பிடிப்பு என்பதைக் கிளிக் செய்யவும்.", "capture": "பிடிப்பு", - "flipCamera": "ஃபிளிப் கேமரா" + "flipCamera": "ஃபிளிப் கேமரா", + "multipleFacesDetectedGuide": "பல முகங்கள் கண்டறியப்பட்டன. சரிபார்ப்பிற்கான சட்டகத்தில் நீங்கள் மட்டுமே இருக்கிறீர்கள் என்பதை உறுதிப்படுத்தவும்." }, "OIDcAuth": { "title": "OIDC அங்கீகாரம்", @@ -725,9 +725,13 @@ } }, "postFaceCapture": { - "captureSuccessMessage": "முக சரிபார்ப்பு வெற்றிகரமாக உள்ளது! நற்சான்றிதழ் பகிர்வு தொடங்கப்பட்டுள்ளது.", - "captureFailureTitle": "முக சரிபார்ப்பு தோல்வியடைந்தது!", - "captureFailureMessage": "உங்கள் முகம் தெளிவாகத் தெரியும்படி இருப்பதை உறுதிசெய்து, மீண்டும் செல்ஃபி எடுக்க முயற்சிக்கவும்." + "successMessage": "முக சரிபார்ப்பு வெற்றிகரமாக உள்ளது! நற்சான்றிதழ் பகிர்வு தொடங்கப்பட்டுள்ளது.", + "failureTitle": "முக சரிபார்ப்பு தோல்வியடைந்தது!", + "failureMessage": "உங்கள் முகம் தெளிவாகத் தெரியும்படி இருப்பதை உறுதிசெய்து, மீண்டும் செல்ஃபி எடுக்க முயற்சிக்கவும்.", + "LivenessDetection": { + "retryFailureMessage": "லைவ்னெஸ் கண்டறிதலில் தோல்வியடைந்ததால், உங்கள் கார்டை எங்களால் பகிர முடியவில்லை. உங்கள் கார்டை மீண்டும் பகிர முயற்சிக்கவும் அல்லது பகிர்விலிருந்து வெளியேற முகப்பு என்பதைக் கிளிக் செய்யவும்.", + "maxRetryFailureMessage": "மன்னிக்கவும்! லைவ்னெஸ் கண்டறிதலில் தோல்வியடைந்ததால், உங்கள் கார்டை எங்களால் பகிர முடியவில்லை. பிறகு முயற்சிக்கவும்." + } }, "rational": { "title": "உங்கள் இருப்பிடத்தை இயக்கவும்", diff --git a/machines/Issuers/IssuersMachine.typegen.ts b/machines/Issuers/IssuersMachine.typegen.ts index 78a7eb285a..a9726fbf32 100644 --- a/machines/Issuers/IssuersMachine.typegen.ts +++ b/machines/Issuers/IssuersMachine.typegen.ts @@ -181,7 +181,9 @@ export interface Typegen0 { | 'error.platform.issuersMachine.performAuthorization:invocation[0]'; resetVerificationErrorMessage: 'RESET_VERIFY_ERROR'; sendBackupEvent: 'done.invoke.issuersMachine.storing:invocation[0]'; - sendDownloadingFailedToVcMeta: 'error.platform.issuersMachine.downloadCredentials:invocation[0]'; + sendDownloadingFailedToVcMeta: + | 'error.platform.issuersMachine.downloadCredentials:invocation[0]' + | 'error.platform.issuersMachine.performAuthorization:invocation[0]'; sendErrorEndEvent: 'error.platform.issuersMachine.verifyingCredential:invocation[0]'; sendImpressionEvent: 'done.invoke.issuersMachine.displayIssuers:invocation[0]'; sendSuccessEndEvent: 'done.invoke.issuersMachine.verifyingCredential:invocation[0]'; diff --git a/screens/Scan/ScanLayout.tsx b/screens/Scan/ScanLayout.tsx index b54e6dfc54..f9a86a1d55 100644 --- a/screens/Scan/ScanLayout.tsx +++ b/screens/Scan/ScanLayout.tsx @@ -41,7 +41,7 @@ export const ScanLayout: React.FC = () => { } onRetry={controller.statusOverlay?.onRetry} showBanner={controller.isFaceIdentityVerified} - bannerMessage={t('ScanScreen:postFaceCapture:captureSuccessMessage')} + bannerMessage={t('ScanScreen:postFaceCapture:successMessage')} onBannerClose={controller.CLOSE_BANNER} bannerType={BannerStatusType.SUCCESS} bannerTestID={'faceVerificationSuccess'} diff --git a/screens/VerifyIdentityOverlay.tsx b/screens/VerifyIdentityOverlay.tsx index 1d8a3c404a..4e70507eb8 100644 --- a/screens/VerifyIdentityOverlay.tsx +++ b/screens/VerifyIdentityOverlay.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, {useState} from 'react'; import {FaceScanner} from '../components/FaceScanner/FaceScanner'; import {Column} from '../components/ui'; import {Theme} from '../components/ui/styleUtils'; @@ -7,11 +7,13 @@ import {Modal} from '../components/ui/Modal'; import {useTranslation} from 'react-i18next'; import {Error} from '../components/ui/Error'; import {SvgImage} from '../components/ui/svg'; +import {LIVENESS_CHECK_RETRY_LIMIT} from '../shared/constants'; export const VerifyIdentityOverlay: React.FC< VerifyIdentityOverlayProps > = props => { const {t} = useTranslation('VerifyIdentityOverlay'); + const [retryCount, setRetryCount] = useState(0); const credential = props.credential; const vcImage = props.verifiableCredentialData.face; @@ -33,6 +35,17 @@ export const VerifyIdentityOverlay: React.FC< modalProps.showHeader = false; } + const handlePrimaryButtonEvent = () => { + setRetryCount(retryCount + 1); + props.onRetryVerification(); + }; + + const faceDetectionFailureMessage = props.isLivenessEnabled + ? retryCount < LIVENESS_CHECK_RETRY_LIMIT + ? t('ScanScreen:postFaceCapture.LivenessDetection.retryFailureMessage') + : t('ScanScreen:postFaceCapture.LivenessDetection.maxRetryFailureMessage') + : t('ScanScreen:postFaceCapture.failureMessage'); + return ( <> @@ -57,12 +70,18 @@ export const VerifyIdentityOverlay: React.FC< alignActionsOnEnd showClose={false} isVisible={props.isInvalidIdentity} - title={t('ScanScreen:postFaceCapture.captureFailureTitle')} - message={t('ScanScreen:postFaceCapture.captureFailureMessage')} + title={t('ScanScreen:postFaceCapture.failureTitle')} + message={faceDetectionFailureMessage} image={SvgImage.PermissionDenied()} primaryButtonTestID={'retry'} - primaryButtonText={t('ScanScreen:status.retry')} - primaryButtonEvent={props.onRetryVerification} + primaryButtonText={ + (props.isLivenessEnabled && + retryCount < LIVENESS_CHECK_RETRY_LIMIT) || + !props.isLivenessEnabled + ? t('ScanScreen:status.retry') + : undefined + } + primaryButtonEvent={handlePrimaryButtonEvent} textButtonTestID={'home'} textButtonText={t('ScanScreen:status.accepted.home')} textButtonEvent={props.onNavigateHome} diff --git a/shared/constants.ts b/shared/constants.ts index 27bf69b16c..6ee26eaef2 100644 --- a/shared/constants.ts +++ b/shared/constants.ts @@ -14,6 +14,7 @@ export let DEBUG_MODE_ENABLED = DEBUG_MODE === 'true'; export const LIVENESS_CHECK = LIVENESS_DETECTION === 'true'; export const LIVENESS_THRESHOLD = 0.4; +export const LIVENESS_CHECK_RETRY_LIMIT = 3; export const changeCrendetialRegistry = (host: string) => (MIMOTO_BASE_URL = host);