Skip to content

Commit

Permalink
Merge branch 'main' into chore/update_localization
Browse files Browse the repository at this point in the history
  • Loading branch information
phantumcode authored Nov 29, 2023
2 parents 90b89fb + ba5e1ae commit 28f3899
Show file tree
Hide file tree
Showing 21 changed files with 287 additions and 103 deletions.
14 changes: 14 additions & 0 deletions HostApp/HostApp/Views/LivenessResultContentView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,20 @@ struct LivenessResultContentView: View {
.frame(maxWidth: .infinity, idealHeight: 268)
.background(Color.secondary.opacity(0.1))
}

if !result.isLive {
steps()
.padding()
.background(
Rectangle()
.foregroundColor(
.dynamicColors(
light: .hex("#ECECEC"),
dark: .darkGray
)
)
.cornerRadius(6))
}
}
.padding(.bottom, 16)
.onAppear {
Expand Down
2 changes: 1 addition & 1 deletion HostApp/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ cd amplify-ui-swift-livenes/HostApp

7. Once signed in and authenticated, the "Create Liveness Session" is enabled. Click the button to generate and get a session id from your backend.

8. Once a session id is created, the Liveness Check screen is displayed. Follow the instructions and click on Begin Check button to begin liveness verification.
8. Once a session id is created, the Liveness Check screen is displayed. Follow the instructions and click on Start video check button to begin liveness verification.

## Provision AWS Backend Resources

Expand Down
2 changes: 1 addition & 1 deletion Sources/FaceLiveness/AV/VideoChunker.swift
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ final class VideoChunker {

func start() {
guard state == .pending else { return }
state = .writing
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero)
state = .writing
}

func finish(singleFrame: @escaping (UIImage) -> Void) {
Expand Down
98 changes: 62 additions & 36 deletions Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,53 +14,79 @@ struct DetectedFace {
let rightEye: CGPoint
let nose: CGPoint
let mouth: CGPoint
let rightEar: CGPoint
let leftEar: CGPoint

let confidence: Float

func boundingBoxFromLandmarks() -> CGRect {
let eyeCenterX = (leftEye.x + rightEye.x) / 2
let eyeCenterY = (leftEye.y + rightEye.y) / 2

let cx = (nose.x + eyeCenterX) / 2
let cy = (nose.y + eyeCenterY) / 2

let ow = sqrt(pow((leftEye.x - rightEye.x), 2) + pow((leftEye.y - rightEye.y), 2)) * 2
let oh = 1.618 * ow
let minX = cx - ow / 2
let minY = cy - oh / 2

let rect = CGRect(x: minX, y: minY, width: ow, height: oh)
func boundingBoxFromLandmarks(ovalRect: CGRect) -> CGRect {
let alpha = 2.0
let gamma = 1.8
let ow = (alpha * pupilDistance + gamma * faceHeight) / 2
var cx = (eyeCenterX + nose.x) / 2

if ovalRect != CGRect.zero {
let ovalTop = ovalRect.minY
let ovalHeight = ovalRect.maxY - ovalRect.minY
if eyeCenterY > (ovalTop + ovalHeight) / 2 {
cx = eyeCenterX
}
}

let faceWidth = ow
let faceHeight = 1.68 * faceWidth
let faceBoxBottom = boundingBox.maxY
let faceBoxTop = faceBoxBottom - faceHeight
let faceBoxLeft = min(cx - ow / 2, rightEar.x)
let faceBoxRight = max(cx + ow / 2, leftEar.x)
let width = faceBoxRight - faceBoxLeft
let height = faceBoxBottom - faceBoxTop
let rect = CGRect(x: faceBoxLeft, y: faceBoxTop, width: width, height: height)
return rect
}

var faceDistance: CGFloat {
sqrt(pow(rightEye.x - leftEye.x, 2) + pow(rightEye.y - leftEye.y, 2))
}

var pupilDistance: CGFloat {
sqrt(pow(leftEye.x - rightEye.x, 2) + pow(leftEye.y - rightEye.y, 2))
}

var eyeCenterX: CGFloat {
(leftEye.x + rightEye.x) / 2
}

var eyeCenterY: CGFloat {
(leftEye.y + rightEye.y) / 2
}

var faceHeight: CGFloat {
sqrt(pow(eyeCenterX - mouth.x, 2) + pow(eyeCenterY - mouth.y, 2))
}

func normalize(width: CGFloat, height: CGFloat) -> DetectedFace {
.init(
boundingBox: .init(
x: boundingBox.minX * width,
y: boundingBox.minY * height,
width: boundingBox.width * width,
height: boundingBox.height * height
),
leftEye: .init(
x: leftEye.x * width,
y: leftEye.y * height
),
rightEye: .init(
x: rightEye.x * width,
y: rightEye.y * height
),
nose: .init(
x: nose.x * width,
y: nose.y * height
),
mouth: .init(
x: mouth.x * width,
y: mouth.y * height
),
let boundingBox = CGRect(
x: boundingBox.minX * width,
y: boundingBox.minY * height,
width: boundingBox.width * width,
height: boundingBox.height * height
)
let leftEye = CGPoint(x: leftEye.x * width, y: leftEye.y * height)
let rightEye = CGPoint(x: rightEye.x * width, y: rightEye.y * height)
let nose = CGPoint(x: nose.x * width, y: nose.y * height)
let mouth = CGPoint(x: mouth.x * width, y: mouth.y * height)
let rightEar = CGPoint(x: rightEar.x * width, y: rightEar.y * height)
let leftEar = CGPoint(x: leftEar.x * width, y: leftEar.y * height)

return DetectedFace(
boundingBox: boundingBox,
leftEye: leftEye,
rightEye: rightEye,
nose: nose,
mouth: mouth,
rightEar: rightEar,
leftEar: leftEar,
confidence: confidence
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,9 @@ extension FaceDetectorShortRange {
let leftEye = faceResult[3]
let nose = faceResult[4]
let mouth = faceResult[5]
let rightEar = faceResult[6]
let leftEar = faceResult[7]



let boundingBox = CGRect(
Expand All @@ -172,6 +175,8 @@ extension FaceDetectorShortRange {
rightEye: .init(x: rightEye.x, y: rightEye.y),
nose: .init(x: nose.x, y: nose.y),
mouth: .init(x: mouth.x, y: mouth.y),
rightEar: .init(x: rightEar.x, y: rightEar.y),
leftEar: .init(x: leftEar.x, y: leftEar.y),
confidence: overlappingConfidenceScore / Float(overlappingOutputs.count)
)

Expand Down
7 changes: 3 additions & 4 deletions Sources/FaceLiveness/Resources/Base.lproj/Localizable.strings
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@
"amplify_ui_liveness_get_ready_photosensitivity_icon_a11y" = "Photosensitivity Information";
"amplify_ui_liveness_get_ready_photosensitivity_dialog_title" = "Photosensitivity warning";
"amplify_ui_liveness_get_ready_photosensitivity_dialog_description" = "Some people may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition.";
"amplify_ui_liveness_get_ready_begin_check" = "Begin Check";
"amplify_ui_liveness_get_ready_begin_check" = "Start video check";

"amplify_ui_liveness_challenge_recording_indicator_label" = "REC";
"amplify_ui_liveness_challenge_instruction_hold_face_during_countdown" = "Hold face position during countdown.";
"amplify_ui_liveness_challenge_instruction_hold_face_during_freshness" = "Hold face in oval for colored lights.";
"amplify_ui_liveness_challenge_instruction_move_face_back" = "Move back";
"amplify_ui_liveness_challenge_instruction_move_face_closer" = "Move closer";
"amplify_ui_liveness_challenge_instruction_move_face_in_front_of_camera" = "Move face in front of camera";
"amplify_ui_liveness_challenge_instruction_multiple_faces_detected" = "Ensure only one face is in front of camera";
"amplify_ui_liveness_challenge_instruction_multiple_faces_detected" = "Only one face per check";
"amplify_ui_liveness_challenge_instruction_hold_still" = "Hold still";

"amplify_ui_liveness_challenge_connecting" = "Connecting...";
Expand Down Expand Up @@ -47,6 +47,5 @@
"amplify_ui_liveness_face_not_prepared_reason_move_to_dimmer_area" = "Move to dimmer area";
"amplify_ui_liveness_face_not_prepared_reason_move_to_brighter_area" = "Move to brighter area";
"amplify_ui_liveness_face_not_prepared_reason_no_face" = "Move face in front of camera";
"amplify_ui_liveness_face_not_prepared_reason_multiple_faces" = "Ensure only one face is in front of camera";
"amplify_ui_liveness_face_not_prepared_reason_multiple_faces" = "Only one face per check";
"amplify_ui_liveness_face_not_prepared_reason_face_too_close" = "Move face farther away";

24 changes: 13 additions & 11 deletions Sources/FaceLiveness/Utilities/CGImage+Convert.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,18 @@ import CoreGraphics
import VideoToolbox

extension CGImage {
static func convert(from cvPixelBuffer: CVPixelBuffer?) -> CGImage? {
guard let pixelBuffer = cvPixelBuffer else {
return nil
}
static func convert(from cvPixelBuffer: CVPixelBuffer?) -> CGImage? {
guard let pixelBuffer = cvPixelBuffer else {
return nil
}

var image: CGImage?
VTCreateCGImageFromCVPixelBuffer(
pixelBuffer,
options: nil,
imageOut: &image)
return image
}
var image: CGImage?
VTCreateCGImageFromCVPixelBuffer(
pixelBuffer,
options: nil,
imageOut: &image
)

return image
}
}
4 changes: 2 additions & 2 deletions Sources/FaceLiveness/Utilities/LivenessLocalizedStrings.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ enum LocalizedStrings {
/// en = "A small percentage of individuals may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition."
static let get_ready_photosensitivity_dialog_description = "amplify_ui_liveness_get_ready_photosensitivity_dialog_description".localized()

/// en = "Begin Check"
/// en = "Start video check"
static let get_ready_begin_check = "amplify_ui_liveness_get_ready_begin_check".localized()

/// en = "REC"
Expand All @@ -47,7 +47,7 @@ enum LocalizedStrings {
/// en = "Hold still"
static let challenge_instruction_hold_still = "amplify_ui_liveness_challenge_instruction_hold_still".localized()

/// en = "Ensure only one face is in front of camera"
/// en = "Only one face per check"
static let challenge_instruction_multiple_faces_detected = "amplify_ui_liveness_challenge_instruction_multiple_faces_detected".localized()

/// en = "Connecting..."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,6 @@ struct CameraPermissionView: View {

var body: some View {
VStack(alignment: .leading) {
Text(LocalizedStrings.camera_permission_page_title)
.font(.system(size: 34, weight: .semibold))
.accessibilityAddTraits(.isHeader)
.padding()
Spacer()
VStack {
Text(LocalizedStrings.camera_permission_change_setting_header)
Expand Down
13 changes: 3 additions & 10 deletions Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ import SwiftUI

struct CameraPreviewView: View {
private static let previewWidthRatio = 0.6
private static let previewHeightRatio = 0.75
private static let previewHeightRatio = 0.55
private static let previewXPositionRatio = 0.5
private static let previewYPositionRatio = 0.55
private static let previewYPositionRatio = 0.6

@StateObject var model: CameraPreviewViewModel

public init(model: CameraPreviewViewModel = CameraPreviewViewModel()) {
init(model: CameraPreviewViewModel = CameraPreviewViewModel()) {
self._model = StateObject(wrappedValue: model)
}

Expand All @@ -39,13 +39,6 @@ struct CameraPreviewView: View {
.position(x: geometry.size.width*Self.previewXPositionRatio,
y: geometry.size.height*Self.previewYPositionRatio)
}
VStack {
Text(LocalizedStrings.preview_center_your_face_text)
.font(.largeTitle)
.multilineTextAlignment(.center)
.padding()
Spacer()
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ class CameraPreviewViewModel: NSObject, ObservableObject {
super.init()
setupSubscriptions()

let avCpatureDevice = AVCaptureDevice.DiscoverySession(
let avCaptureDevice = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera],
mediaType: .video,
position: .front
).devices.first

self.previewCaptureSession = LivenessCaptureSession(
captureDevice: .init(avCaptureDevice: avCpatureDevice),
captureDevice: .init(avCaptureDevice: avCaptureDevice),
outputDelegate: self
)

Expand Down
22 changes: 9 additions & 13 deletions Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,25 +21,21 @@ struct GetReadyPageView: View {

var body: some View {
VStack {
VStack(alignment: .leading) {
Text(LocalizedStrings.get_ready_page_title)
.font(.system(size: 34, weight: .semibold))
.accessibilityAddTraits(.isHeader)
.padding(.bottom, 8)

ZStack {
CameraPreviewView()
VStack {
WarningBox(
titleText: LocalizedStrings.get_ready_photosensitivity_title,
bodyText: LocalizedStrings.get_ready_photosensitivity_description,
popoverContent: { photosensitivityWarningPopoverContent }
)
.accessibilityElement(children: .combine)
.padding(.bottom, 8)

CameraPreviewView()
.border(Color.livenessPreviewBorder)
}
.padding()

Text(LocalizedStrings.preview_center_your_face_text)
.font(.title)
.multilineTextAlignment(.center)
Spacer()
}.padding()
}
beginCheckButton
}
}
Expand Down
21 changes: 10 additions & 11 deletions Sources/FaceLiveness/Views/GetReadyPage/ImageFrameView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,19 @@ struct ImageFrameView: View {

var body: some View {
if let image = image {
GeometryReader { geometry in
Image(decorative: image, scale: 1.0, orientation: .upMirrored)
.resizable()
.scaledToFill()
.frame(
width: geometry.size.width,
height: geometry.size.height,
alignment: .center)
.clipped()
}
GeometryReader { geometry in
Image(decorative: image, scale: 1.0, orientation: .upMirrored)
.resizable()
.scaledToFill()
.frame(
width: geometry.size.width,
height: geometry.size.height,
alignment: .center)
.clipped()
}
} else {
Color.black
}

}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,13 @@ struct InstructionContainerView: View {
argument: LocalizedStrings.challenge_verifying
)
}
case .faceMatched:
InstructionView(
text: LocalizedStrings.challenge_instruction_hold_still,
backgroundColor: .livenessPrimaryBackground,
textColor: .livenessPrimaryLabel,
font: .title
)
default:
EmptyView()
}
Expand Down
Loading

0 comments on commit 28f3899

Please sign in to comment.