diff --git a/HostApp/HostApp/Views/LivenessResultContentView+Result.swift b/HostApp/HostApp/Views/LivenessResultContentView+Result.swift index 749e88a7..3f57982f 100644 --- a/HostApp/HostApp/Views/LivenessResultContentView+Result.swift +++ b/HostApp/HostApp/Views/LivenessResultContentView+Result.swift @@ -14,7 +14,8 @@ extension LivenessResultContentView { let valueTextColor: Color let valueBackgroundColor: Color let auditImage: Data? - + let isLive: Bool + init(livenessResult: LivenessResult) { guard livenessResult.confidenceScore > 0 else { text = "" @@ -22,9 +23,10 @@ extension LivenessResultContentView { valueTextColor = .clear valueBackgroundColor = .clear auditImage = nil + isLive = false return } - + isLive = livenessResult.isLive let truncated = String(format: "%.4f", livenessResult.confidenceScore) value = truncated if livenessResult.isLive { diff --git a/HostApp/HostApp/Views/LivenessResultContentView.swift b/HostApp/HostApp/Views/LivenessResultContentView.swift index b1787015..de2ecff7 100644 --- a/HostApp/HostApp/Views/LivenessResultContentView.swift +++ b/HostApp/HostApp/Views/LivenessResultContentView.swift @@ -17,9 +17,7 @@ struct LivenessResultContentView: View { Text("Result:") Text(result.text) .fontWeight(.semibold) - } - .padding(.bottom, 12) HStack { Text("Liveness confidence score:") @@ -42,6 +40,20 @@ struct LivenessResultContentView: View { .frame(maxWidth: .infinity, idealHeight: 268) .background(Color.secondary.opacity(0.1)) } + + if !result.isLive { + steps() + .padding() + .background( + Rectangle() + .foregroundColor( + .dynamicColors( + light: .hex("#ECECEC"), + dark: .darkGray + ) + ) + .cornerRadius(6)) + } } .padding(.bottom, 16) .onAppear { @@ -54,6 +66,29 @@ struct LivenessResultContentView: View { } } } + + private func steps() -> some View { + func step(number: Int, text: String) -> some View { + HStack(alignment: .top) { + Text("\(number).") + Text(text) + } + } + + return VStack( + alignment: .leading, + spacing: 8 + ) { + Text("Tips to pass the video check:") + .fontWeight(.semibold) + + step(number: 1, text: "Avoid very bright lighting conditions, such as direct sunlight.") + .accessibilityElement(children: .combine) + + step(number: 2, text: "Remove sunglasses, mask, hat, or anything blocking your face.") + .accessibilityElement(children: .combine) + } + } } diff --git a/HostApp/HostApp/Views/LivenessResultView.swift b/HostApp/HostApp/Views/LivenessResultView.swift index 7f75844d..23ba99fe 100644 --- a/HostApp/HostApp/Views/LivenessResultView.swift +++ b/HostApp/HostApp/Views/LivenessResultView.swift @@ -15,7 +15,7 @@ struct LivenessResultView: View { @State var displayingCopiedNotification = false init( - title: String = "Liveness Check", + title: String = "Liveness Result", sessionID: String, onTryAgain: @escaping () -> Void, @ViewBuilder content: () -> Content diff --git a/HostApp/README.md b/HostApp/README.md index 38b60704..1234c14b 100644 --- a/HostApp/README.md +++ b/HostApp/README.md @@ -29,7 +29,7 @@ cd amplify-ui-swift-livenes/HostApp 7. Once signed in and authenticated, the "Create Liveness Session" is enabled. Click the button to generate and get a session id from your backend. -8. Once a session id is created, the Liveness Check screen is displayed. Follow the instructions and click on Begin Check button to begin liveness verification. +8. Once a session id is created, the Liveness Check screen is displayed. Follow the instructions and click on Start video check button to begin liveness verification. ## Provision AWS Backend Resources diff --git a/Sources/FaceLiveness/AV/CMSampleBuffer+Rotate.swift b/Sources/FaceLiveness/AV/CMSampleBuffer+Rotate.swift deleted file mode 100644 index a5ecedd9..00000000 --- a/Sources/FaceLiveness/AV/CMSampleBuffer+Rotate.swift +++ /dev/null @@ -1,42 +0,0 @@ -// -// Copyright Amazon.com Inc. or its affiliates. -// All Rights Reserved. -// -// SPDX-License-Identifier: Apache-2.0 -// - -import AVFoundation -import CoreImage - -extension CMSampleBuffer { - func rotateRightUpMirrored() -> CVPixelBuffer? { - guard let pixelBuffer = CMSampleBufferGetImageBuffer(self) else { - return nil - } - - var cvPixelBufferPtr: CVPixelBuffer? - - let error = CVPixelBufferCreate( - kCFAllocatorDefault, - CVPixelBufferGetHeight(pixelBuffer), - CVPixelBufferGetWidth(pixelBuffer), - kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, - nil, - &cvPixelBufferPtr - ) - - guard error == kCVReturnSuccess, - let cvPixelBuffer = cvPixelBufferPtr - else { - return nil - } - - let ciImage = CIImage(cvPixelBuffer: pixelBuffer) - .oriented(.right) - .oriented(.upMirrored) - - let context = CIContext(options: nil) - context.render(ciImage, to: cvPixelBuffer) - return cvPixelBuffer - } -} diff --git a/Sources/FaceLiveness/AV/LivenessCaptureSession.swift b/Sources/FaceLiveness/AV/LivenessCaptureSession.swift index 995a09ae..9cd8eccf 100644 --- a/Sources/FaceLiveness/AV/LivenessCaptureSession.swift +++ b/Sources/FaceLiveness/AV/LivenessCaptureSession.swift @@ -11,15 +11,34 @@ import AVFoundation class LivenessCaptureSession { let captureDevice: LivenessCaptureDevice private let captureQueue = DispatchQueue(label: "com.amazonaws.faceliveness.cameracapturequeue") - let outputDelegate: OutputSampleBufferCapturer + let outputDelegate: AVCaptureVideoDataOutputSampleBufferDelegate var captureSession: AVCaptureSession? + + var outputSampleBufferCapturer: OutputSampleBufferCapturer? { + return outputDelegate as? OutputSampleBufferCapturer + } - init(captureDevice: LivenessCaptureDevice, outputDelegate: OutputSampleBufferCapturer) { + init(captureDevice: LivenessCaptureDevice, outputDelegate: AVCaptureVideoDataOutputSampleBufferDelegate) { self.captureDevice = captureDevice self.outputDelegate = outputDelegate } func startSession(frame: CGRect) throws -> CALayer { + try startSession() + + guard let captureSession = captureSession else { + throw LivenessCaptureSessionError.captureSessionUnavailable + } + + let previewLayer = previewLayer( + frame: frame, + for: captureSession + ) + + return previewLayer + } + + func startSession() throws { guard let camera = captureDevice.avCaptureDevice else { throw LivenessCaptureSessionError.cameraUnavailable } @@ -44,17 +63,10 @@ class LivenessCaptureSession { captureSession.startRunning() } - let previewLayer = previewLayer( - frame: frame, - for: captureSession - ) - videoOutput.setSampleBufferDelegate( outputDelegate, queue: captureQueue ) - - return previewLayer } func stopRunning() { @@ -83,6 +95,11 @@ class LivenessCaptureSession { _ output: AVCaptureVideoDataOutput, for captureSession: AVCaptureSession ) throws { + if captureSession.canAddOutput(output) { + captureSession.addOutput(output) + } else { + throw LivenessCaptureSessionError.captureSessionOutputUnavailable + } output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA ] @@ -92,12 +109,6 @@ class LivenessCaptureSession { .forEach { $0.videoOrientation = .portrait } - - if captureSession.canAddOutput(output) { - captureSession.addOutput(output) - } else { - throw LivenessCaptureSessionError.captureSessionOutputUnavailable - } } private func previewLayer( diff --git a/Sources/FaceLiveness/AV/OutputSampleBufferCapturer.swift b/Sources/FaceLiveness/AV/OutputSampleBufferCapturer.swift index 0ec9b65c..23f6defb 100644 --- a/Sources/FaceLiveness/AV/OutputSampleBufferCapturer.swift +++ b/Sources/FaceLiveness/AV/OutputSampleBufferCapturer.swift @@ -24,7 +24,7 @@ class OutputSampleBufferCapturer: NSObject, AVCaptureVideoDataOutputSampleBuffer ) { videoChunker.consume(sampleBuffer) - guard let imageBuffer = sampleBuffer.rotateRightUpMirrored() + guard let imageBuffer = sampleBuffer.imageBuffer else { return } faceDetector.detectFaces(from: imageBuffer) diff --git a/Sources/FaceLiveness/AV/VideoChunker.swift b/Sources/FaceLiveness/AV/VideoChunker.swift index 326e2bc1..7e17e2f3 100644 --- a/Sources/FaceLiveness/AV/VideoChunker.swift +++ b/Sources/FaceLiveness/AV/VideoChunker.swift @@ -34,9 +34,9 @@ final class VideoChunker { func start() { guard state == .pending else { return } - state = .writing assetWriter.startWriting() assetWriter.startSession(atSourceTime: .zero) + state = .writing } func finish(singleFrame: @escaping (UIImage) -> Void) { @@ -49,8 +49,8 @@ final class VideoChunker { func consume(_ buffer: CMSampleBuffer) { if state == .awaitingSingleFrame { - guard let rotated = buffer.rotateRightUpMirrored() else { return } - let singleFrame = singleFrame(from: rotated) + guard let imageBuffer = buffer.imageBuffer else { return } + let singleFrame = singleFrame(from: imageBuffer) provideSingleFrame?(singleFrame) state = .complete } @@ -66,10 +66,10 @@ final class VideoChunker { if assetWriterInput.isReadyForMoreMediaData { let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer).seconds let presentationTime = CMTime(seconds: timestamp - startTimeSeconds, preferredTimescale: 600) - guard let rotated = buffer.rotateRightUpMirrored() else { return } + guard let imageBuffer = buffer.imageBuffer else { return } pixelBufferAdaptor.append( - rotated, + imageBuffer, withPresentationTime: presentationTime ) } diff --git a/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift b/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift index c2a38452..d6879848 100644 --- a/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift +++ b/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift @@ -14,53 +14,79 @@ struct DetectedFace { let rightEye: CGPoint let nose: CGPoint let mouth: CGPoint + let rightEar: CGPoint + let leftEar: CGPoint let confidence: Float - func boundingBoxFromLandmarks() -> CGRect { - let eyeCenterX = (leftEye.x + rightEye.x) / 2 - let eyeCenterY = (leftEye.y + rightEye.y) / 2 - - let cx = (nose.x + eyeCenterX) / 2 - let cy = (nose.y + eyeCenterY) / 2 - - let ow = sqrt(pow((leftEye.x - rightEye.x), 2) + pow((leftEye.y - rightEye.y), 2)) * 2 - let oh = 1.618 * ow - let minX = cx - ow / 2 - let minY = cy - oh / 2 - - let rect = CGRect(x: minX, y: minY, width: ow, height: oh) + func boundingBoxFromLandmarks(ovalRect: CGRect) -> CGRect { + let alpha = 2.0 + let gamma = 1.8 + let ow = (alpha * pupilDistance + gamma * faceHeight) / 2 + var cx = (eyeCenterX + nose.x) / 2 + + if ovalRect != CGRect.zero { + let ovalTop = ovalRect.minY + let ovalHeight = ovalRect.maxY - ovalRect.minY + if eyeCenterY > (ovalTop + ovalHeight) / 2 { + cx = eyeCenterX + } + } + + let faceWidth = ow + let faceHeight = 1.618 * faceWidth + let faceBoxBottom = boundingBox.maxY + let faceBoxTop = faceBoxBottom - faceHeight + let faceBoxLeft = min(cx - ow / 2, rightEar.x) + let faceBoxRight = max(cx + ow / 2, leftEar.x) + let width = faceBoxRight - faceBoxLeft + let height = faceBoxBottom - faceBoxTop + let rect = CGRect(x: faceBoxLeft, y: faceBoxTop, width: width, height: height) return rect } var faceDistance: CGFloat { sqrt(pow(rightEye.x - leftEye.x, 2) + pow(rightEye.y - leftEye.y, 2)) } + + var pupilDistance: CGFloat { + sqrt(pow(leftEye.x - rightEye.x, 2) + pow(leftEye.y - rightEye.y, 2)) + } + + var eyeCenterX: CGFloat { + (leftEye.x + rightEye.x) / 2 + } + + var eyeCenterY: CGFloat { + (leftEye.y + rightEye.y) / 2 + } + + var faceHeight: CGFloat { + sqrt(pow(eyeCenterX - mouth.x, 2) + pow(eyeCenterY - mouth.y, 2)) + } func normalize(width: CGFloat, height: CGFloat) -> DetectedFace { - .init( - boundingBox: .init( - x: boundingBox.minX * width, - y: boundingBox.minY * height, - width: boundingBox.width * width, - height: boundingBox.height * height - ), - leftEye: .init( - x: leftEye.x * width, - y: leftEye.y * height - ), - rightEye: .init( - x: rightEye.x * width, - y: rightEye.y * height - ), - nose: .init( - x: nose.x * width, - y: nose.y * height - ), - mouth: .init( - x: mouth.x * width, - y: mouth.y * height - ), + let boundingBox = CGRect( + x: boundingBox.minX * width, + y: boundingBox.minY * height, + width: boundingBox.width * width, + height: boundingBox.height * height + ) + let leftEye = CGPoint(x: leftEye.x * width, y: leftEye.y * height) + let rightEye = CGPoint(x: rightEye.x * width, y: rightEye.y * height) + let nose = CGPoint(x: nose.x * width, y: nose.y * height) + let mouth = CGPoint(x: mouth.x * width, y: mouth.y * height) + let rightEar = CGPoint(x: rightEar.x * width, y: rightEar.y * height) + let leftEar = CGPoint(x: leftEar.x * width, y: leftEar.y * height) + + return DetectedFace( + boundingBox: boundingBox, + leftEye: leftEye, + rightEye: rightEye, + nose: nose, + mouth: mouth, + rightEar: rightEar, + leftEar: leftEar, confidence: confidence ) } diff --git a/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift b/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift index 2ea3b297..d9430720 100644 --- a/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift +++ b/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift @@ -157,6 +157,9 @@ extension FaceDetectorShortRange { let leftEye = faceResult[3] let nose = faceResult[4] let mouth = faceResult[5] + let rightEar = faceResult[6] + let leftEar = faceResult[7] + let boundingBox = CGRect( @@ -172,6 +175,8 @@ extension FaceDetectorShortRange { rightEye: .init(x: rightEye.x, y: rightEye.y), nose: .init(x: nose.x, y: nose.y), mouth: .init(x: mouth.x, y: mouth.y), + rightEar: .init(x: rightEar.x, y: rightEar.y), + leftEar: .init(x: leftEar.x, y: leftEar.y), confidence: overlappingConfidenceScore / Float(overlappingOutputs.count) ) diff --git a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_good_fit.imageset/Contents.json b/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_good_fit.imageset/Contents.json deleted file mode 100644 index a46c819a..00000000 --- a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_good_fit.imageset/Contents.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "images" : [ - { - "filename" : "illustration_face_good_fit.png", - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "scale" : "2x" - }, - { - "idiom" : "universal", - "scale" : "3x" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_good_fit.imageset/illustration_face_good_fit.png b/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_good_fit.imageset/illustration_face_good_fit.png deleted file mode 100644 index 80f924b4..00000000 Binary files a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_good_fit.imageset/illustration_face_good_fit.png and /dev/null differ diff --git a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_close.imageset/Contents.json b/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_close.imageset/Contents.json deleted file mode 100644 index aaa99aa6..00000000 --- a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_close.imageset/Contents.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "images" : [ - { - "filename" : "illustration_face_too_close.png", - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "scale" : "2x" - }, - { - "idiom" : "universal", - "scale" : "3x" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_close.imageset/illustration_face_too_close.png b/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_close.imageset/illustration_face_too_close.png deleted file mode 100644 index daedb319..00000000 Binary files a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_close.imageset/illustration_face_too_close.png and /dev/null differ diff --git a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_far.imageset/Contents.json b/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_far.imageset/Contents.json deleted file mode 100644 index 98f6d2b6..00000000 --- a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_far.imageset/Contents.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "images" : [ - { - "filename" : "illustration_face_too_far.png", - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "scale" : "2x" - }, - { - "idiom" : "universal", - "scale" : "3x" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_far.imageset/illustration_face_too_far.png b/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_far.imageset/illustration_face_too_far.png deleted file mode 100644 index 30128a7c..00000000 Binary files a/Sources/FaceLiveness/Resources/Assets.xcassets/illustration_face_too_far.imageset/illustration_face_too_far.png and /dev/null differ diff --git a/Sources/FaceLiveness/Resources/Base.lproj/Localizable.strings b/Sources/FaceLiveness/Resources/Base.lproj/Localizable.strings index 8b5b2ea8..afe3aff2 100644 --- a/Sources/FaceLiveness/Resources/Base.lproj/Localizable.strings +++ b/Sources/FaceLiveness/Resources/Base.lproj/Localizable.strings @@ -6,19 +6,12 @@ // "amplify_ui_liveness_get_ready_page_title" = "Liveness Check"; -"amplify_ui_liveness_get_ready_page_description" = "You will go through a face verification process to prove that you are a real person. Your screen's brightness will temporarily be set to 100% for highest accuracy."; "amplify_ui_liveness_get_ready_photosensitivity_title" = "Photosensitivity Warning"; -"amplify_ui_liveness_get_ready_photosensitivity_description" = "This check displays colored lights. Use caution if you are photosensitive."; +"amplify_ui_liveness_get_ready_photosensitivity_description" = "This check flashes different colors. Use caution if you are photosensitive."; "amplify_ui_liveness_get_ready_photosensitivity_icon_a11y" = "Photosensitivity Information"; "amplify_ui_liveness_get_ready_photosensitivity_dialog_title" = "Photosensitivity warning"; -"amplify_ui_liveness_get_ready_photosensitivity_dialog_description" = "A small percentage of individuals may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition."; -"amplify_ui_liveness_get_ready_steps_title" = "Follow the instructions to complete the check:"; -"amplify_ui_liveness_get_ready_face_not_covered" = "Make sure your face is not covered with sunglasses or a mask."; -"amplify_ui_liveness_get_ready_lighting" = "Move to a well-lit place that is not in direct sunlight."; -"amplify_ui_liveness_get_ready_fit_face" = "When an oval appears, fill the oval with your face in it."; -"amplify_ui_liveness_get_ready_begin_check" = "Begin Check"; -"amplify_ui_liveness_get_ready_good_fit_example" = "Good fit"; -"amplify_ui_liveness_get_ready_too_far_example" = "Too far"; +"amplify_ui_liveness_get_ready_photosensitivity_dialog_description" = "Some people may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition."; +"amplify_ui_liveness_get_ready_begin_check" = "Start video check"; "amplify_ui_liveness_challenge_recording_indicator_label" = "REC"; "amplify_ui_liveness_challenge_instruction_hold_face_during_countdown" = "Hold face position during countdown."; @@ -26,7 +19,7 @@ "amplify_ui_liveness_challenge_instruction_move_face_back" = "Move back"; "amplify_ui_liveness_challenge_instruction_move_face_closer" = "Move closer"; "amplify_ui_liveness_challenge_instruction_move_face_in_front_of_camera" = "Move face in front of camera"; -"amplify_ui_liveness_challenge_instruction_multiple_faces_detected" = "Ensure only one face is in front of camera"; +"amplify_ui_liveness_challenge_instruction_multiple_faces_detected" = "Only one face per check"; "amplify_ui_liveness_challenge_instruction_hold_still" = "Hold still"; "amplify_ui_liveness_challenge_connecting" = "Connecting..."; @@ -39,3 +32,20 @@ "amplify_ui_liveness_camera_setting_alert_not_now_button_text" = "Not Now"; "amplify_ui_liveness_close_button_a11y" = "Close"; + +"amplify_ui_liveness_center_your_face_text" = "Center your face"; +"amplify_ui_liveness_camera_permission_page_title" = "Liveness Check"; +"amplify_ui_liveness_camera_permission_button_title" = "Change Camera Setting"; +"amplify_ui_liveness_camera_permission_button_header" = "Camera is not accessible"; +"amplify_ui_liveness_camera_permission_button_description" = "You may have to go into settings to grant camera permissions and close the app and retry."; + +"amplify_ui_liveness_face_not_prepared_reason_pendingCheck" = " "; +"amplify_ui_liveness_face_not_prepared_reason_not_in_oval" = "Move face to fit in oval"; +"amplify_ui_liveness_face_not_prepared_reason_move_face_closer" = "Move closer"; +"amplify_ui_liveness_face_not_prepared_reason_move_face_right" = "Move face right"; +"amplify_ui_liveness_face_not_prepared_reason_move_face_left" = "Move face left"; +"amplify_ui_liveness_face_not_prepared_reason_move_to_dimmer_area" = "Move to dimmer area"; +"amplify_ui_liveness_face_not_prepared_reason_move_to_brighter_area" = "Move to brighter area"; +"amplify_ui_liveness_face_not_prepared_reason_no_face" = "Move face in front of camera"; +"amplify_ui_liveness_face_not_prepared_reason_multiple_faces" = "Only one face per check"; +"amplify_ui_liveness_face_not_prepared_reason_face_too_close" = "Move face farther away"; diff --git a/Sources/FaceLiveness/Utilities/CGImage+Convert.swift b/Sources/FaceLiveness/Utilities/CGImage+Convert.swift new file mode 100644 index 00000000..7304392a --- /dev/null +++ b/Sources/FaceLiveness/Utilities/CGImage+Convert.swift @@ -0,0 +1,26 @@ +// +// Copyright Amazon.com Inc. or its affiliates. +// All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +import CoreGraphics +import VideoToolbox + +extension CGImage { + static func convert(from cvPixelBuffer: CVPixelBuffer?) -> CGImage? { + guard let pixelBuffer = cvPixelBuffer else { + return nil + } + + var image: CGImage? + VTCreateCGImageFromCVPixelBuffer( + pixelBuffer, + options: nil, + imageOut: &image + ) + + return image + } +} diff --git a/Sources/FaceLiveness/Utilities/Color+Liveness.swift b/Sources/FaceLiveness/Utilities/Color+Liveness.swift index 1b0e6367..6d998b58 100644 --- a/Sources/FaceLiveness/Utilities/Color+Liveness.swift +++ b/Sources/FaceLiveness/Utilities/Color+Liveness.swift @@ -39,12 +39,17 @@ extension Color { ) static let livenessWarningBackground = Color.dynamicColors( - light: .hex("#F5D9BC"), + light: .hex("#B8CEF9"), dark: .hex("#663300") ) static let livenessWarningLabel = Color.dynamicColors( - light: .hex("#663300"), + light: .hex("#002266"), dark: .hex("#EFBF8F") ) + + static let livenessPreviewBorder = Color.dynamicColors( + light: .hex("#AEB3B7"), + dark: .white + ) } diff --git a/Sources/FaceLiveness/Utilities/LivenessLocalizedStrings.swift b/Sources/FaceLiveness/Utilities/LivenessLocalizedStrings.swift index ba45e269..b09bb7eb 100644 --- a/Sources/FaceLiveness/Utilities/LivenessLocalizedStrings.swift +++ b/Sources/FaceLiveness/Utilities/LivenessLocalizedStrings.swift @@ -10,10 +10,7 @@ import SwiftUI enum LocalizedStrings { /// en = "Liveness Check" static let get_ready_page_title = "amplify_ui_liveness_get_ready_page_title".localized() - - /// en = "You will go through a face verification process to prove that you are a real person. Your screen's brightness will temporarily be set to 100% for highest accuracy." - static let get_ready_page_description = "amplify_ui_liveness_get_ready_page_description".localized() - + /// en = "Photosensitivity Warning" static let get_ready_photosensitivity_title = "amplify_ui_liveness_get_ready_photosensitivity_title".localized() @@ -29,27 +26,9 @@ enum LocalizedStrings { /// en = "A small percentage of individuals may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition." static let get_ready_photosensitivity_dialog_description = "amplify_ui_liveness_get_ready_photosensitivity_dialog_description".localized() - /// en = "Follow the instructions to complete the check:" - static let get_ready_steps_title = "amplify_ui_liveness_get_ready_steps_title".localized() - - /// en = "Make sure your face is not covered with sunglasses or a mask." - static let get_ready_face_not_covered = "amplify_ui_liveness_get_ready_face_not_covered".localized() - - /// en = "Move to a well-lit place that is not in direct sunlight." - static let get_ready_lighting = "amplify_ui_liveness_get_ready_lighting".localized() - - /// en = "When an oval appears, fill the oval with your face in it." - static let get_ready_fit_face = "amplify_ui_liveness_get_ready_fit_face".localized() - - /// en = "Begin Check" + /// en = "Start video check" static let get_ready_begin_check = "amplify_ui_liveness_get_ready_begin_check".localized() - /// en = "Illustration demonstrating good fit of face in oval." - static let get_ready_illustration_good_fit_a11y = "amplify_ui_liveness_get_ready_illustration_good_fit_a11y".localized() - - /// en = "Illustration demonstrating face too far from screen." - static let get_ready_illustration_too_far_a11y = "amplify_ui_liveness_get_ready_illustration_too_far_a11y".localized() - /// en = "REC" static let challenge_recording_indicator_label = "amplify_ui_liveness_challenge_recording_indicator_label".localized() @@ -68,7 +47,7 @@ enum LocalizedStrings { /// en = "Hold still" static let challenge_instruction_hold_still = "amplify_ui_liveness_challenge_instruction_hold_still".localized() - /// en = "Ensure only one face is in front of camera" + /// en = "Only one face per check" static let challenge_instruction_multiple_faces_detected = "amplify_ui_liveness_challenge_instruction_multiple_faces_detected".localized() /// en = "Connecting..." @@ -94,11 +73,49 @@ enum LocalizedStrings { /// en = "Close" static let close_button_a11y = "amplify_ui_liveness_close_button_a11y".localized() - - /// en = "Good fit" - static let get_ready_good_fit_example = "amplify_ui_liveness_get_ready_good_fit_example".localized() - /// en = "Too far" - static let get_ready_too_far_example = "amplify_ui_liveness_get_ready_too_far_example".localized() + /// en = "Center your face" + static let preview_center_your_face_text = "amplify_ui_liveness_center_your_face_text".localized() + + /// en = "Liveness check" + static let camera_permission_page_title = "amplify_ui_liveness_camera_permission_page_title".localized() + + /// en = "Change Camera Setting" + static let camera_permission_change_setting_button_title = "amplify_ui_liveness_camera_permission_button_title".localized() + + /// en = "Camera is not accessible" + static let camera_permission_change_setting_header = "amplify_ui_liveness_camera_permission_button_header".localized() + + /// en = "You may have to go into settings to grant camera permissions and close the app and retry" + static let camera_permission_change_setting_description = "amplify_ui_liveness_camera_permission_button_description".localized() + + /// en = "" + static let amplify_ui_liveness_face_not_prepared_reason_pendingCheck = "amplify_ui_liveness_face_not_prepared_reason_pendingCheck".localized() + + /// en = "Move face to fit in oval" + static let amplify_ui_liveness_face_not_prepared_reason_not_in_oval = "amplify_ui_liveness_face_not_prepared_reason_not_in_oval".localized() + + /// en = "Move closer" + static let amplify_ui_liveness_face_not_prepared_reason_move_face_closer = "amplify_ui_liveness_face_not_prepared_reason_move_face_closer".localized() + + /// en = "Move face right" + static let amplify_ui_liveness_face_not_prepared_reason_move_face_right = "amplify_ui_liveness_face_not_prepared_reason_move_face_right".localized() + + /// en = "Move face left" + static let amplify_ui_liveness_face_not_prepared_reason_move_face_left = "amplify_ui_liveness_face_not_prepared_reason_move_face_left".localized() + + /// en = "Move to dimmer area" + static let amplify_ui_liveness_face_not_prepared_reason_move_to_dimmer_area = "amplify_ui_liveness_face_not_prepared_reason_move_to_dimmer_area".localized() + + /// en = "Move to brighter area" + static let amplify_ui_liveness_face_not_prepared_reason_move_to_brighter_area = "amplify_ui_liveness_face_not_prepared_reason_move_to_brighter_area".localized() + + /// en = "Move face in front of camera" + static let amplify_ui_liveness_face_not_prepared_reason_no_face = "amplify_ui_liveness_face_not_prepared_reason_no_face".localized() + + /// en = "Ensure only one face is in front of camera" + static let amplify_ui_liveness_face_not_prepared_reason_multiple_faces = "amplify_ui_liveness_face_not_prepared_reason_multiple_faces".localized() + /// en = "Move face farther away" + static let amplify_ui_liveness_face_not_prepared_reason_face_too_close = "amplify_ui_liveness_face_not_prepared_reason_face_too_close".localized() } diff --git a/Sources/FaceLiveness/Views/CameraPermission/CameraPermissionView.swift b/Sources/FaceLiveness/Views/CameraPermission/CameraPermissionView.swift new file mode 100644 index 00000000..e5edbf3f --- /dev/null +++ b/Sources/FaceLiveness/Views/CameraPermission/CameraPermissionView.swift @@ -0,0 +1,79 @@ +// +// Copyright Amazon.com Inc. or its affiliates. +// All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +import SwiftUI + +struct CameraPermissionView: View { + @Binding var displayingCameraPermissionsNeededAlert: Bool + + init( + displayingCameraPermissionsNeededAlert: Binding = .constant(false) + ) { + self._displayingCameraPermissionsNeededAlert = displayingCameraPermissionsNeededAlert + } + + var body: some View { + VStack(alignment: .leading) { + Spacer() + VStack { + Text(LocalizedStrings.camera_permission_change_setting_header) + .font(.title2) + .fontWeight(.medium) + .multilineTextAlignment(.center) + .padding(8) + + Text(LocalizedStrings.camera_permission_change_setting_description) + .multilineTextAlignment(.center) + .padding(8) + } + Spacer() + editPermissionButton + } + .alert(isPresented: $displayingCameraPermissionsNeededAlert) { + Alert( + title: Text(LocalizedStrings.camera_setting_alert_title), + message: Text(LocalizedStrings.camera_setting_alert_message), + primaryButton: .default( + Text(LocalizedStrings.camera_setting_alert_update_setting_button_text).bold(), + action: { + goToSettingsAppPage() + }), + secondaryButton: .default( + Text(LocalizedStrings.camera_setting_alert_not_now_button_text) + ) + ) + } + } + + private func goToSettingsAppPage() { + guard let settingsAppURL = URL(string: UIApplication.openSettingsURLString) + else { return } + UIApplication.shared.open(settingsAppURL, options: [:]) + } + + private var editPermissionButton: some View { + Button( + action: goToSettingsAppPage, + label: { + Text(LocalizedStrings.camera_permission_change_setting_button_title) + .foregroundColor(.livenessPrimaryLabel) + .frame(maxWidth: .infinity) + } + ) + .frame(height: 52) + ._background { Color.livenessPrimaryBackground } + .cornerRadius(14) + .padding([.leading, .trailing]) + .padding(.bottom, 16) + } +} + +struct CameraPermissionView_Previews: PreviewProvider { + static var previews: some View { + CameraPermissionView() + } +} diff --git a/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewView.swift b/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewView.swift new file mode 100644 index 00000000..f78e8df0 --- /dev/null +++ b/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewView.swift @@ -0,0 +1,50 @@ +// +// Copyright Amazon.com Inc. or its affiliates. +// All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +import SwiftUI + +struct CameraPreviewView: View { + private static let previewWidthRatio = 0.6 + private static let previewHeightRatio = 0.55 + private static let previewXPositionRatio = 0.5 + private static let previewYPositionRatio = 0.6 + + @StateObject var model: CameraPreviewViewModel + + init(model: CameraPreviewViewModel = CameraPreviewViewModel()) { + self._model = StateObject(wrappedValue: model) + } + + var body: some View { + ZStack { + ImageFrameView(image: model.currentImageFrame) + .edgesIgnoringSafeArea(.all) + .mask( + GeometryReader { geometry in + Ellipse() + .frame(width: geometry.size.width*Self.previewWidthRatio, + height: geometry.size.height*Self.previewHeightRatio) + .position(x: geometry.size.width*Self.previewXPositionRatio, + y: geometry.size.height*Self.previewYPositionRatio) + }) + GeometryReader { geometry in + Ellipse() + .stroke(Color.livenessPreviewBorder, style: StrokeStyle(lineWidth: 3)) + .frame(width: geometry.size.width*Self.previewWidthRatio, + height: geometry.size.height*Self.previewHeightRatio) + .position(x: geometry.size.width*Self.previewXPositionRatio, + y: geometry.size.height*Self.previewYPositionRatio) + } + } + } +} + +struct CameraPreviewView_Previews: PreviewProvider { + static var previews: some View { + CameraPreviewView() + } +} diff --git a/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewViewModel.swift b/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewViewModel.swift new file mode 100644 index 00000000..eb661451 --- /dev/null +++ b/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewViewModel.swift @@ -0,0 +1,64 @@ +// +// Copyright Amazon.com Inc. or its affiliates. +// All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +import Foundation +import CoreImage +import Combine +import AVFoundation +import Amplify + +class CameraPreviewViewModel: NSObject, ObservableObject { + @Published var currentImageFrame: CGImage? + @Published var buffer: CVPixelBuffer? + + var previewCaptureSession: LivenessCaptureSession? + + override init() { + super.init() + setupSubscriptions() + + let avCaptureDevice = AVCaptureDevice.DiscoverySession( + deviceTypes: [.builtInWideAngleCamera], + mediaType: .video, + position: .front + ).devices.first + + self.previewCaptureSession = LivenessCaptureSession( + captureDevice: .init(avCaptureDevice: avCaptureDevice), + outputDelegate: self + ) + + do { + try self.previewCaptureSession?.startSession() + } catch { + Amplify.Logging.default.error("Error starting preview capture session with error: \(error)") + } + } + + func setupSubscriptions() { + self.$buffer + .receive(on: RunLoop.main) + .compactMap { + return CGImage.convert(from: $0) + } + .assign(to: &$currentImageFrame) + } +} + +extension CameraPreviewViewModel: AVCaptureVideoDataOutputSampleBufferDelegate { + func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) { + if let buffer = sampleBuffer.imageBuffer { + DispatchQueue.main.async { + self.buffer = buffer + } + } + } +} diff --git a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift index 404d261e..00ecb9b7 100644 --- a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift +++ b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift @@ -8,75 +8,36 @@ import SwiftUI struct GetReadyPageView: View { - @Binding var displayingCameraPermissionsNeededAlert: Bool let beginCheckButtonDisabled: Bool let onBegin: () -> Void init( - displayingCameraPermissionsNeededAlert: Binding = .constant(false), onBegin: @escaping () -> Void, beginCheckButtonDisabled: Bool = false ) { - self._displayingCameraPermissionsNeededAlert = displayingCameraPermissionsNeededAlert self.onBegin = onBegin self.beginCheckButtonDisabled = beginCheckButtonDisabled } var body: some View { VStack { - ScrollView { - VStack(alignment: .leading) { - Text(LocalizedStrings.get_ready_page_title) - .font(.system(size: 34, weight: .semibold)) - .accessibilityAddTraits(.isHeader) - .padding(.bottom, 8) - - Text(LocalizedStrings.get_ready_page_description) - .padding(.bottom, 8) - + ZStack { + CameraPreviewView() + VStack { WarningBox( titleText: LocalizedStrings.get_ready_photosensitivity_title, bodyText: LocalizedStrings.get_ready_photosensitivity_description, popoverContent: { photosensitivityWarningPopoverContent } ) .accessibilityElement(children: .combine) - .padding(.bottom, 8) - - Text(LocalizedStrings.get_ready_steps_title) - .fontWeight(.semibold) - .padding(.bottom, 16) - - OvalIllustrationExamples() - .accessibilityHidden(true) - .padding(.bottom) - - steps() - } - .padding() + Text(LocalizedStrings.preview_center_your_face_text) + .font(.title) + .multilineTextAlignment(.center) + Spacer() + }.padding() } - beginCheckButton } - .alert(isPresented: $displayingCameraPermissionsNeededAlert) { - Alert( - title: Text(LocalizedStrings.camera_setting_alert_title), - message: Text(LocalizedStrings.camera_setting_alert_message), - primaryButton: .default( - Text(LocalizedStrings.camera_setting_alert_update_setting_button_text).bold(), - action: { - goToSettingsAppPage() - }), - secondaryButton: .default( - Text(LocalizedStrings.camera_setting_alert_not_now_button_text) - ) - ) - } - } - - private func goToSettingsAppPage() { - guard let settingsAppURL = URL(string: UIApplication.openSettingsURLString) - else { return } - UIApplication.shared.open(settingsAppURL, options: [:]) } private var beginCheckButton: some View { @@ -107,29 +68,6 @@ struct GetReadyPageView: View { Spacer() } } - - private func steps() -> some View { - func step(number: Int, text: String) -> some View { - HStack(alignment: .top) { - Text("\(number).") - Text(text) - } - } - - return VStack( - alignment: .leading, - spacing: 16 - ) { - step(number: 1, text: LocalizedStrings.get_ready_fit_face) - .accessibilityElement(children: .combine) - - step(number: 2, text: LocalizedStrings.get_ready_face_not_covered) - .accessibilityElement(children: .combine) - - step(number: 3, text: LocalizedStrings.get_ready_lighting) - .accessibilityElement(children: .combine) - } - } } struct GetReadyPageView_Previews: PreviewProvider { diff --git a/Sources/FaceLiveness/Views/GetReadyPage/ImageFrameView.swift b/Sources/FaceLiveness/Views/GetReadyPage/ImageFrameView.swift new file mode 100644 index 00000000..63052904 --- /dev/null +++ b/Sources/FaceLiveness/Views/GetReadyPage/ImageFrameView.swift @@ -0,0 +1,35 @@ +// +// Copyright Amazon.com Inc. or its affiliates. +// All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +import SwiftUI + +struct ImageFrameView: View { + var image: CGImage? + + var body: some View { + if let image = image { + GeometryReader { geometry in + Image(decorative: image, scale: 1.0, orientation: .upMirrored) + .resizable() + .scaledToFill() + .frame( + width: geometry.size.width, + height: geometry.size.height, + alignment: .center) + .clipped() + } + } else { + Color.black + } + } +} + +struct ImageFrameView_Previews: PreviewProvider { + static var previews: some View { + ImageFrameView() + } +} diff --git a/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationExamples.swift b/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationExamples.swift deleted file mode 100644 index b188cff8..00000000 --- a/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationExamples.swift +++ /dev/null @@ -1,43 +0,0 @@ -// -// Copyright Amazon.com Inc. or its affiliates. -// All Rights Reserved. -// -// SPDX-License-Identifier: Apache-2.0 -// - -import SwiftUI - -struct OvalIllustrationExamples: View { - var body: some View { - HStack(spacing: 16) { - OvalIllustrationView( - icon: .checkmark(backgroundColor: .hex("#365E3D")), - text: { Text(LocalizedStrings.get_ready_good_fit_example) }, - primaryColor: .hex("#365E3D"), - secondaryColor: .hex("#D6F5DB"), - illustration: { Image("illustration_face_good_fit", bundle: .module) } - ) - .accessibilityElement(children: .ignore) - .accessibilityLabel(Text(LocalizedStrings.get_ready_illustration_good_fit_a11y)) - - OvalIllustrationView( - icon: .xmark(backgroundColor: .hex("#660000")), - text: { Text(LocalizedStrings.get_ready_too_far_example) }, - primaryColor: .hex("#660000"), - secondaryColor: .hex("#F5BCBC"), - illustration: { Image("illustration_face_too_far", bundle: .module) } - ) - .accessibilityElement(children: .ignore) - .accessibilityLabel(Text(LocalizedStrings.get_ready_illustration_too_far_a11y)) - - Spacer() - } - } -} - -struct OvalIllustrationExamples_Previews: PreviewProvider { - static var previews: some View { - OvalIllustrationExamples() - .background(Color.purple) - } -} diff --git a/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationIconView.swift b/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationIconView.swift deleted file mode 100644 index 1f1ecbad..00000000 --- a/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationIconView.swift +++ /dev/null @@ -1,47 +0,0 @@ -// -// Copyright Amazon.com Inc. or its affiliates. -// All Rights Reserved. -// -// SPDX-License-Identifier: Apache-2.0 -// - -import SwiftUI - -struct OvalIllustrationIconView: View { - let systemName: String - let iconColor: Color - let backgroundColor: Color - - init( - systemName: String, - iconColor: Color = .white, - backgroundColor: Color - ) { - self.systemName = systemName - self.iconColor = iconColor - self.backgroundColor = backgroundColor - } - - var body: some View { - Image(systemName: systemName) - .font(.system(size: 12, weight: .heavy)) - .foregroundColor(iconColor) - .frame(width: 15, height: 15) - .padding(5) - .background(backgroundColor) - } - - static func checkmark(backgroundColor: Color) -> Self { - OvalIllustrationIconView( - systemName: "checkmark", - backgroundColor: backgroundColor - ) - } - - static func xmark(backgroundColor: Color) -> Self { - OvalIllustrationIconView( - systemName: "xmark", - backgroundColor: backgroundColor - ) - } -} diff --git a/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationView.swift b/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationView.swift deleted file mode 100644 index 54e94916..00000000 --- a/Sources/FaceLiveness/Views/GetReadyPage/OvalIllustrationView.swift +++ /dev/null @@ -1,36 +0,0 @@ -// -// Copyright Amazon.com Inc. or its affiliates. -// All Rights Reserved. -// -// SPDX-License-Identifier: Apache-2.0 -// - -import SwiftUI -import Amplify - -struct OvalIllustrationView: View { - let icon: OvalIllustrationIconView - let text: () -> Text - let primaryColor: Color - let secondaryColor: Color - let illustration: () -> Illustration - - var body: some View { - VStack(alignment: .leading) { - ZStack(alignment: .topLeading) { - VStack(alignment: .leading, spacing: 0) { - illustration() - .border(primaryColor, width: 0.8) - - text() - .bold() - .foregroundColor(primaryColor) - .padding(4) - } - .background(secondaryColor) - - icon - } - } - } -} diff --git a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift index a1ae560d..ff02a3d6 100644 --- a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift +++ b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift @@ -43,7 +43,7 @@ struct InstructionContainerView: View { case .awaitingFaceInOvalMatch(let reason, let percentage): InstructionView( - text: .init(reason.rawValue), + text: .init(reason.localizedValue), backgroundColor: .livenessPrimaryBackground, textColor: .livenessPrimaryLabel, font: .title @@ -81,8 +81,10 @@ struct InstructionContainerView: View { .frame(width: 200, height: 30) case .pendingFacePreparedConfirmation(let reason): InstructionView( - text: .init(reason.rawValue), - backgroundColor: .livenessBackground + text: .init(reason.localizedValue), + backgroundColor: .livenessPrimaryBackground, + textColor: .livenessPrimaryLabel, + font: .title ) case .completedDisplayingFreshness: InstructionView( @@ -95,6 +97,13 @@ struct InstructionContainerView: View { argument: LocalizedStrings.challenge_verifying ) } + case .faceMatched: + InstructionView( + text: LocalizedStrings.challenge_instruction_hold_still, + backgroundColor: .livenessPrimaryBackground, + textColor: .livenessPrimaryLabel, + font: .title + ) default: EmptyView() } diff --git a/Sources/FaceLiveness/Views/Liveness/CameraView.swift b/Sources/FaceLiveness/Views/Liveness/CameraView.swift index 6c8214c7..e984bce4 100644 --- a/Sources/FaceLiveness/Views/Liveness/CameraView.swift +++ b/Sources/FaceLiveness/Views/Liveness/CameraView.swift @@ -32,11 +32,3 @@ struct CameraView: UIViewControllerRepresentable { context: Context ) {} } - -class _CameraViewCoordinator: NSObject { - let livenessViewController: _LivenessViewController - - init(livenessViewController: _LivenessViewController) { - self.livenessViewController = livenessViewController - } -} diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift index 23c03ae0..64098e5f 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift @@ -16,7 +16,7 @@ import Amplify public struct FaceLivenessDetectorView: View { @StateObject var viewModel: FaceLivenessDetectionViewModel @Binding var isPresented: Bool - @State var displayState: DisplayState = .awaitingLivenessSession + @State var displayState: DisplayState = .awaitingCameraPermission @State var displayingCameraPermissionsNeededAlert = false let disableStartView: Bool @@ -114,10 +114,10 @@ public struct FaceLivenessDetectorView: View { self._viewModel = StateObject( wrappedValue: .init( - faceDetector: captureSession.outputDelegate.faceDetector, + faceDetector: captureSession.outputSampleBufferCapturer!.faceDetector, faceInOvalMatching: faceInOvalStateMatching, captureSession: captureSession, - videoChunker: captureSession.outputDelegate.videoChunker, + videoChunker: captureSession.outputSampleBufferCapturer!.videoChunker, closeButtonAction: { onCompletion(.failure(.userCancelled)) }, sessionID: sessionID ) @@ -131,13 +131,14 @@ public struct FaceLivenessDetectorView: View { .onAppear { Task { do { + let newState = disableStartView + ? DisplayState.displayingLiveness + : DisplayState.displayingGetReadyView + guard self.displayState != newState else { return } let session = try await sessionTask.value viewModel.livenessService = session viewModel.registerServiceEvents() - - self.displayState = disableStartView - ? .displayingLiveness - : .displayingGetReadyView + self.displayState = newState } catch { throw FaceLivenessDetectionError.accessDenied } @@ -146,10 +147,17 @@ public struct FaceLivenessDetectorView: View { case .displayingGetReadyView: GetReadyPageView( - displayingCameraPermissionsNeededAlert: $displayingCameraPermissionsNeededAlert, - onBegin: beginButtonTapped, + onBegin: { + guard displayState != .displayingLiveness else { return } + displayState = .displayingLiveness + }, beginCheckButtonDisabled: false ) + .onAppear { + DispatchQueue.main.async { + UIScreen.main.brightness = 1.0 + } + } case .displayingLiveness: _FaceLivenessDetectionView( viewModel: viewModel, @@ -171,13 +179,18 @@ public struct FaceLivenessDetectorView: View { onCompletion(.success(())) case .encounteredUnrecoverableError(let error): let closeCode = error.webSocketCloseCode ?? .normalClosure - viewModel.livenessService.closeSocket(with: closeCode) + viewModel.livenessService?.closeSocket(with: closeCode) isPresented = false onCompletion(.failure(mapError(error))) default: break } } + case .awaitingCameraPermission: + CameraPermissionView(displayingCameraPermissionsNeededAlert: $displayingCameraPermissionsNeededAlert) + .onAppear { + checkCameraPermission() + } } } @@ -199,10 +212,7 @@ public struct FaceLivenessDetectorView: View { for: .video, completionHandler: { accessGranted in guard accessGranted == true else { return } - displayState = .displayingLiveness - DispatchQueue.main.async { - UIScreen.main.brightness = 1.0 - } + displayState = .awaitingLivenessSession } ) @@ -211,16 +221,16 @@ public struct FaceLivenessDetectorView: View { private func alertCameraAccessNeeded() { displayingCameraPermissionsNeededAlert = true } - - private func beginButtonTapped() { + + private func checkCameraPermission() { let cameraAuthorizationStatus = AVCaptureDevice.authorizationStatus(for: .video) switch cameraAuthorizationStatus { case .notDetermined: requestCameraPermission() - case .authorized: - displayState = .displayingLiveness case .restricted, .denied: alertCameraAccessNeeded() + case .authorized: + displayState = .awaitingLivenessSession @unknown default: break } @@ -231,6 +241,7 @@ enum DisplayState { case awaitingLivenessSession case displayingGetReadyView case displayingLiveness + case awaitingCameraPermission } enum InstructionState { diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift index 22c33a74..99e92ee2 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift @@ -28,7 +28,7 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { } case .singleFace(let face): var normalizedFace = normalizeFace(face) - normalizedFace.boundingBox = normalizedFace.boundingBoxFromLandmarks() + normalizedFace.boundingBox = normalizedFace.boundingBoxFromLandmarks(ovalRect: ovalRect) switch livenessState.state { case .pendingFacePreparedConfirmation: @@ -111,7 +111,9 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { case .match: self.livenessState.faceMatched() self.faceMatchedTimestamp = Date().timestampMilliseconds - self.livenessViewControllerDelegate?.displayFreshness(colorSequences: colorSequences) + DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { + self.livenessViewControllerDelegate?.displayFreshness(colorSequences: colorSequences) + } let generator = UINotificationFeedbackGenerator() generator.notificationOccurred(.success) self.noFitStartTime = nil diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift index a29622f5..709ac49e 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift @@ -24,7 +24,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { var closeButtonAction: () -> Void let videoChunker: VideoChunker let sessionID: String - var livenessService: LivenessService! + var livenessService: LivenessService? let faceDetector: FaceDetector let faceInOvalMatching: FaceInOvalMatching let challengeID: String = UUID().uuidString @@ -90,7 +90,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { } func registerServiceEvents() { - livenessService.register(onComplete: { [weak self] reason in + livenessService?.register(onComplete: { [weak self] reason in self?.stopRecording() switch reason { @@ -106,7 +106,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { } }) - livenessService.register( + livenessService?.register( listener: { [weak self] _sessionConfiguration in self?.sessionConfiguration = _sessionConfiguration }, @@ -115,6 +115,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { } @objc func willResignActive(_ notification: Notification) { + guard self.livenessState.state != .initial else { return } DispatchQueue.main.async { self.stopRecording() self.livenessState.unrecoverableStateEncountered(.viewResignation) @@ -174,7 +175,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { func initializeLivenessStream() { do { - try livenessService.initializeLivenessStream( + try livenessService?.initializeLivenessStream( withSessionID: sessionID, userAgent: UserAgentValues.standard().userAgentString ) @@ -197,7 +198,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { ) do { - try livenessService.send( + try livenessService?.send( .freshness(event: freshnessEvent), eventDate: { .init() } ) @@ -238,7 +239,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { initialClientEvent = _initialClientEvent do { - try livenessService.send( + try livenessService?.send( .initialFaceDetected(event: _initialClientEvent), eventDate: { .init() } ) @@ -270,7 +271,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { ) do { - try livenessService.send( + try livenessService?.send( .final(event: finalClientEvent), eventDate: { .init() } ) @@ -296,7 +297,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { let videoEvent = VideoEvent.init(chunk: data, timestamp: timestamp) do { - try livenessService.send( + try livenessService?.send( .video(event: videoEvent), eventDate: { eventDate } ) @@ -335,7 +336,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { let videoEvent = VideoEvent.init(chunk: data, timestamp: timestamp) do { - try livenessService.send( + try livenessService?.send( .video(event: videoEvent), eventDate: { eventDate } ) diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift b/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift index 48dd1efa..872c7ee6 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift @@ -109,17 +109,42 @@ struct LivenessStateMachine { case waitForRecording } - enum FaceNotPreparedReason: String, Equatable { - case pendingCheck = "" - case notInOval = "Move face to fit in oval" - case moveFaceCloser = "Move closer" - case moveFaceRight = "Move face right" - case moveFaceLeft = "Move face left" - case moveToDimmerArea = "Move to dimmer area" - case moveToBrighterArea = "Move to brighter area" - case noFace = "Move face in front of camera" - case multipleFaces = "Ensure only one face is in front of camera" - case faceTooClose = "Move face farther away" + enum FaceNotPreparedReason { + case pendingCheck + case notInOval + case moveFaceCloser + case moveFaceRight + case moveFaceLeft + case moveToDimmerArea + case moveToBrighterArea + case noFace + case multipleFaces + case faceTooClose + + var localizedValue: String { + switch self { + case .pendingCheck: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_pendingCheck + case .notInOval: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_not_in_oval + case .moveFaceCloser: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_move_face_closer + case .moveFaceRight: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_move_face_right + case .moveFaceLeft: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_move_face_left + case .moveToDimmerArea: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_move_to_dimmer_area + case .moveToBrighterArea: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_move_to_brighter_area + case .noFace: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_no_face + case .multipleFaces: + return LocalizedStrings.challenge_instruction_multiple_faces_detected + case .faceTooClose: + return LocalizedStrings.amplify_ui_liveness_face_not_prepared_reason_face_too_close + } + } } struct LivenessError: Error, Equatable { diff --git a/Sources/FaceLiveness/Views/ProgressBarView.swift b/Sources/FaceLiveness/Views/ProgressBarView.swift index a1cfc5b2..fbba9108 100644 --- a/Sources/FaceLiveness/Views/ProgressBarView.swift +++ b/Sources/FaceLiveness/Views/ProgressBarView.swift @@ -27,7 +27,7 @@ struct ProgressBarView: View { .foregroundColor(emptyColor) Rectangle() - .cornerRadius(8, corners: [.topLeft, .bottomLeft]) + .cornerRadius(8, corners: .allCorners) .frame( width: min(percentage, 1) * proxy.size.width, height: proxy.size.height - 8 diff --git a/Tests/FaceLivenessTests/DetectedFaceTests.swift b/Tests/FaceLivenessTests/DetectedFaceTests.swift new file mode 100644 index 00000000..4bee8292 --- /dev/null +++ b/Tests/FaceLivenessTests/DetectedFaceTests.swift @@ -0,0 +1,126 @@ +// +// Copyright Amazon.com Inc. or its affiliates. +// All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +import XCTest +@testable import FaceLiveness + + +final class DetectedFaceTests: XCTestCase { + var detectedFace: DetectedFace! + var expectedNormalizeFace: DetectedFace! + let normalizeWidth = 414.0 + let normalizeHeight = 552.0 + + override func setUp() { + let boundingBox = CGRect( + x: 0.15805082494171963, + y: 0.3962942063808441, + width: 0.6549023386310235, + height: 0.49117204546928406 + ) + let leftEye = CGPoint(x: 0.6686329891870315, y: 0.48738187551498413) + let rightEye = CGPoint(x: 0.35714725227596134, y: 0.4664449691772461) + let nose = CGPoint(x: 0.5283648181467697, y: 0.5319401621818542) + let mouth = CGPoint(x: 0.5062596005080024, y: 0.689265251159668) + let rightEar = CGPoint(x: 0.1658528943614037, y: 0.5668278932571411) + let leftEar = CGPoint(x: 0.7898947484263203, y: 0.5973731875419617) + let confidence: Float = 0.94027895 + detectedFace = DetectedFace( + boundingBox: boundingBox, + leftEye: leftEye, + rightEye: rightEye, + nose: nose, + mouth: mouth, + rightEar: rightEar, + leftEar: leftEar, + confidence: confidence + ) + + let normalizedBoundingBox = CGRect( + x: 0.15805082494171963 * normalizeWidth, + y: 0.3962942063808441 * normalizeHeight, + width: 0.6549023386310235 * normalizeWidth, + height: 0.49117204546928406 * normalizeHeight + ) + let normalizedLeftEye = CGPoint( + x: 0.6686329891870315 * normalizeWidth, + y: 0.48738187551498413 * normalizeHeight + ) + let normalizedRightEye = CGPoint( + x: 0.35714725227596134 * normalizeWidth, + y: 0.4664449691772461 * normalizeHeight) + let normalizedNose = CGPoint( + x: 0.5283648181467697 * normalizeWidth, + y: 0.5319401621818542 * normalizeHeight + ) + let normalizedMouth = CGPoint( + x: 0.5062596005080024 * normalizeWidth, + y: 0.689265251159668 * normalizeHeight + ) + let normalizedRightEar = CGPoint( + x: 0.1658528943614037 * normalizeWidth, + y: 0.5668278932571411 * normalizeHeight + ) + let normalizedLeftEar = CGPoint( + x: 0.7898947484263203 * normalizeWidth, + y: 0.5973731875419617 * normalizeHeight + ) + + expectedNormalizeFace = DetectedFace( + boundingBox: normalizedBoundingBox, + leftEye: normalizedLeftEye, + rightEye: normalizedRightEye, + nose: normalizedNose, + mouth: normalizedMouth, + rightEar: normalizedRightEar, + leftEar: normalizedLeftEar, + confidence: confidence + ) + } + + /// Given: A `DetectedFace` + /// When: when the struct is initialized + /// Then: the calculated landmarks are available and calculated as expected + func testDetectedFaceLandmarks() { + XCTAssertEqual(detectedFace.eyeCenterX, 0.5128901207314964) + XCTAssertEqual(detectedFace.eyeCenterY, 0.4769134223461151) + XCTAssertEqual(detectedFace.faceDistance, 0.31218859419592454) + XCTAssertEqual(detectedFace.pupilDistance, 0.31218859419592454) + XCTAssertEqual(detectedFace.faceHeight, 0.21245532000610062) + } + + /// Given: A `DetectedFace` + /// When: when boundingBoxFromLandmarks is called + /// Then: the calculated bounding box is returned + func testDetectedFaceBoundingBoxFromLandmarks() { + let ovalRect = CGRect.zero + let expectedBoundingBox = CGRect( + x: 0.1658528943614037, + y: 0.072967669448238516, + width: 0.6240418540649166, + height: 0.8144985824018897 + ) + let boundingBox = detectedFace.boundingBoxFromLandmarks(ovalRect: ovalRect) + XCTAssertEqual(boundingBox.origin.x, expectedBoundingBox.origin.x) + XCTAssertEqual(boundingBox.origin.y, expectedBoundingBox.origin.y) + XCTAssertEqual(boundingBox.width, expectedBoundingBox.width) + XCTAssertEqual(boundingBox.height, expectedBoundingBox.height) + } + + /// Given: A `DetectedFace` + /// When: when normalize is called with a view dimension + /// Then: the normalized face calculates the correct landmark distances + func testDetectedFaceNormalize() { + let normalizedFace = detectedFace.normalize(width: normalizeWidth, height: normalizeHeight) + XCTAssertEqual(normalizedFace.eyeCenterX, expectedNormalizeFace.eyeCenterX) + XCTAssertEqual(normalizedFace.eyeCenterY, expectedNormalizeFace.eyeCenterY) + XCTAssertEqual(normalizedFace.faceDistance, expectedNormalizeFace.faceDistance) + XCTAssertEqual(normalizedFace.pupilDistance, expectedNormalizeFace.pupilDistance) + XCTAssertEqual(normalizedFace.faceHeight, expectedNormalizeFace.faceHeight) + } + +} diff --git a/Tests/FaceLivenessTests/LivenessTests.swift b/Tests/FaceLivenessTests/LivenessTests.swift index 39f84f53..da063930 100644 --- a/Tests/FaceLivenessTests/LivenessTests.swift +++ b/Tests/FaceLivenessTests/LivenessTests.swift @@ -126,7 +126,9 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { let rightEye = CGPoint(x: 0.38036393762719456, y: 0.48050540685653687) let nose = CGPoint(x: 0.48489856674964926, y: 0.54713362455368042) let mouth = CGPoint(x: 0.47411978167652435, y: 0.63170802593231201) - let detectedFace = DetectedFace(boundingBox: boundingBox, leftEye: leftEye, rightEye: rightEye, nose: nose, mouth: mouth, confidence: 0.971859633) + let leftEar = CGPoint(x: 0.7898947484263203, y: 0.5973731875419617) + let rightEar = CGPoint(x: 0.1658528943614037, y: 0.5668278932571411) + let detectedFace = DetectedFace(boundingBox: boundingBox, leftEye: leftEye, rightEye: rightEye, nose: nose, mouth: mouth, rightEar: rightEar, leftEar: leftEar, confidence: 0.971859633) viewModel.process(newResult: .singleFace(detectedFace)) try await Task.sleep(seconds: 1) diff --git a/Tests/IntegrationTestApp/IntegrationTestApp/Extension/MockLivenessCaptureSession.swift b/Tests/IntegrationTestApp/IntegrationTestApp/Extension/MockLivenessCaptureSession.swift index b7e0d0c2..248cf8a6 100644 --- a/Tests/IntegrationTestApp/IntegrationTestApp/Extension/MockLivenessCaptureSession.swift +++ b/Tests/IntegrationTestApp/IntegrationTestApp/Extension/MockLivenessCaptureSession.swift @@ -95,10 +95,10 @@ final class MockLivenessCaptureSession: LivenessCaptureSession { sampleTiming: &timingInfo, sampleBufferOut: &sampleBuffer) if let sampleBuffer = sampleBuffer { - self.outputDelegate.videoChunker.consume(sampleBuffer) - guard let imageBuffer = sampleBuffer.rotateRightUpMirrored() + self.outputSampleBufferCapturer?.videoChunker.consume(sampleBuffer) + guard let imageBuffer = sampleBuffer.imageBuffer else { return } - self.outputDelegate.faceDetector.detectFaces(from: imageBuffer) + self.outputSampleBufferCapturer?.faceDetector.detectFaces(from: imageBuffer) } } } diff --git a/Tests/IntegrationTestApp/IntegrationTestApp/Views/LivenessResultContentView+Result.swift b/Tests/IntegrationTestApp/IntegrationTestApp/Views/LivenessResultContentView+Result.swift index 749e88a7..3f57982f 100644 --- a/Tests/IntegrationTestApp/IntegrationTestApp/Views/LivenessResultContentView+Result.swift +++ b/Tests/IntegrationTestApp/IntegrationTestApp/Views/LivenessResultContentView+Result.swift @@ -14,7 +14,8 @@ extension LivenessResultContentView { let valueTextColor: Color let valueBackgroundColor: Color let auditImage: Data? - + let isLive: Bool + init(livenessResult: LivenessResult) { guard livenessResult.confidenceScore > 0 else { text = "" @@ -22,9 +23,10 @@ extension LivenessResultContentView { valueTextColor = .clear valueBackgroundColor = .clear auditImage = nil + isLive = false return } - + isLive = livenessResult.isLive let truncated = String(format: "%.4f", livenessResult.confidenceScore) value = truncated if livenessResult.isLive { diff --git a/Tests/IntegrationTestApp/IntegrationTestApp/Views/LivenessResultContentView.swift b/Tests/IntegrationTestApp/IntegrationTestApp/Views/LivenessResultContentView.swift index b1787015..eb2cbbd0 100644 --- a/Tests/IntegrationTestApp/IntegrationTestApp/Views/LivenessResultContentView.swift +++ b/Tests/IntegrationTestApp/IntegrationTestApp/Views/LivenessResultContentView.swift @@ -17,6 +17,10 @@ struct LivenessResultContentView: View { Text("Result:") Text(result.text) .fontWeight(.semibold) + .foregroundColor(result.valueTextColor) + .padding(6) + .background(result.valueBackgroundColor) + .cornerRadius(8) } .padding(.bottom, 12) @@ -42,6 +46,20 @@ struct LivenessResultContentView: View { .frame(maxWidth: .infinity, idealHeight: 268) .background(Color.secondary.opacity(0.1)) } + + if !result.isLive { + steps() + .padding() + .background( + Rectangle() + .foregroundColor( + .dynamicColors( + light: .hex("#ECECEC"), + dark: .darkGray + ) + ) + .cornerRadius(6)) + } } .padding(.bottom, 16) .onAppear { @@ -54,6 +72,31 @@ struct LivenessResultContentView: View { } } } + + private func steps() -> some View { + func step(number: Int, text: String) -> some View { + HStack(alignment: .top) { + Text("\(number).") + Text(text) + } + } + + return VStack( + alignment: .leading, + spacing: 8 + ) { + Text("Tips to pass the video check:") + .fontWeight(.semibold) + step(number: 1, text: "Maximize your screen's brightness.") + .accessibilityElement(children: .combine) + + step(number: 2, text: "Avoid very bright lighting conditions, such as direct sunlight.") + .accessibilityElement(children: .combine) + + step(number: 3, text: "Remove sunglasses, mask, hat, or anything blocking your face.") + .accessibilityElement(children: .combine) + } + } }