Skip to content

Commit

Permalink
chore: kickoff release
Browse files Browse the repository at this point in the history
  • Loading branch information
phantumcode authored Dec 4, 2023
2 parents 615ccd8 + eb2e037 commit df30e86
Show file tree
Hide file tree
Showing 40 changed files with 749 additions and 464 deletions.
6 changes: 4 additions & 2 deletions HostApp/HostApp/Views/LivenessResultContentView+Result.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,19 @@ extension LivenessResultContentView {
let valueTextColor: Color
let valueBackgroundColor: Color
let auditImage: Data?

let isLive: Bool

init(livenessResult: LivenessResult) {
guard livenessResult.confidenceScore > 0 else {
text = ""
value = ""
valueTextColor = .clear
valueBackgroundColor = .clear
auditImage = nil
isLive = false
return
}

isLive = livenessResult.isLive
let truncated = String(format: "%.4f", livenessResult.confidenceScore)
value = truncated
if livenessResult.isLive {
Expand Down
39 changes: 37 additions & 2 deletions HostApp/HostApp/Views/LivenessResultContentView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@ struct LivenessResultContentView: View {
Text("Result:")
Text(result.text)
.fontWeight(.semibold)

}
.padding(.bottom, 12)

HStack {
Text("Liveness confidence score:")
Expand All @@ -42,6 +40,20 @@ struct LivenessResultContentView: View {
.frame(maxWidth: .infinity, idealHeight: 268)
.background(Color.secondary.opacity(0.1))
}

if !result.isLive {
steps()
.padding()
.background(
Rectangle()
.foregroundColor(
.dynamicColors(
light: .hex("#ECECEC"),
dark: .darkGray
)
)
.cornerRadius(6))
}
}
.padding(.bottom, 16)
.onAppear {
Expand All @@ -54,6 +66,29 @@ struct LivenessResultContentView: View {
}
}
}

private func steps() -> some View {
func step(number: Int, text: String) -> some View {
HStack(alignment: .top) {
Text("\(number).")
Text(text)
}
}

return VStack(
alignment: .leading,
spacing: 8
) {
Text("Tips to pass the video check:")
.fontWeight(.semibold)

step(number: 1, text: "Avoid very bright lighting conditions, such as direct sunlight.")
.accessibilityElement(children: .combine)

step(number: 2, text: "Remove sunglasses, mask, hat, or anything blocking your face.")
.accessibilityElement(children: .combine)
}
}
}


Expand Down
2 changes: 1 addition & 1 deletion HostApp/HostApp/Views/LivenessResultView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ struct LivenessResultView<Content: View>: View {
@State var displayingCopiedNotification = false

init(
title: String = "Liveness Check",
title: String = "Liveness Result",
sessionID: String,
onTryAgain: @escaping () -> Void,
@ViewBuilder content: () -> Content
Expand Down
2 changes: 1 addition & 1 deletion HostApp/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ cd amplify-ui-swift-livenes/HostApp

7. Once signed in and authenticated, the "Create Liveness Session" is enabled. Click the button to generate and get a session id from your backend.

8. Once a session id is created, the Liveness Check screen is displayed. Follow the instructions and click on Begin Check button to begin liveness verification.
8. Once a session id is created, the Liveness Check screen is displayed. Follow the instructions and click on Start video check button to begin liveness verification.

## Provision AWS Backend Resources

Expand Down
42 changes: 0 additions & 42 deletions Sources/FaceLiveness/AV/CMSampleBuffer+Rotate.swift

This file was deleted.

41 changes: 26 additions & 15 deletions Sources/FaceLiveness/AV/LivenessCaptureSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,34 @@ import AVFoundation
class LivenessCaptureSession {
let captureDevice: LivenessCaptureDevice
private let captureQueue = DispatchQueue(label: "com.amazonaws.faceliveness.cameracapturequeue")
let outputDelegate: OutputSampleBufferCapturer
let outputDelegate: AVCaptureVideoDataOutputSampleBufferDelegate
var captureSession: AVCaptureSession?

var outputSampleBufferCapturer: OutputSampleBufferCapturer? {
return outputDelegate as? OutputSampleBufferCapturer
}

init(captureDevice: LivenessCaptureDevice, outputDelegate: OutputSampleBufferCapturer) {
init(captureDevice: LivenessCaptureDevice, outputDelegate: AVCaptureVideoDataOutputSampleBufferDelegate) {
self.captureDevice = captureDevice
self.outputDelegate = outputDelegate
}

func startSession(frame: CGRect) throws -> CALayer {
try startSession()

guard let captureSession = captureSession else {
throw LivenessCaptureSessionError.captureSessionUnavailable
}

let previewLayer = previewLayer(
frame: frame,
for: captureSession
)

return previewLayer
}

func startSession() throws {
guard let camera = captureDevice.avCaptureDevice
else { throw LivenessCaptureSessionError.cameraUnavailable }

Expand All @@ -44,17 +63,10 @@ class LivenessCaptureSession {
captureSession.startRunning()
}

let previewLayer = previewLayer(
frame: frame,
for: captureSession
)

videoOutput.setSampleBufferDelegate(
outputDelegate,
queue: captureQueue
)

return previewLayer
}

func stopRunning() {
Expand Down Expand Up @@ -83,6 +95,11 @@ class LivenessCaptureSession {
_ output: AVCaptureVideoDataOutput,
for captureSession: AVCaptureSession
) throws {
if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
} else {
throw LivenessCaptureSessionError.captureSessionOutputUnavailable
}
output.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
Expand All @@ -92,12 +109,6 @@ class LivenessCaptureSession {
.forEach {
$0.videoOrientation = .portrait
}

if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
} else {
throw LivenessCaptureSessionError.captureSessionOutputUnavailable
}
}

private func previewLayer(
Expand Down
2 changes: 1 addition & 1 deletion Sources/FaceLiveness/AV/OutputSampleBufferCapturer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class OutputSampleBufferCapturer: NSObject, AVCaptureVideoDataOutputSampleBuffer
) {
videoChunker.consume(sampleBuffer)

guard let imageBuffer = sampleBuffer.rotateRightUpMirrored()
guard let imageBuffer = sampleBuffer.imageBuffer
else { return }

faceDetector.detectFaces(from: imageBuffer)
Expand Down
10 changes: 5 additions & 5 deletions Sources/FaceLiveness/AV/VideoChunker.swift
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ final class VideoChunker {

func start() {
guard state == .pending else { return }
state = .writing
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero)
state = .writing
}

func finish(singleFrame: @escaping (UIImage) -> Void) {
Expand All @@ -49,8 +49,8 @@ final class VideoChunker {

func consume(_ buffer: CMSampleBuffer) {
if state == .awaitingSingleFrame {
guard let rotated = buffer.rotateRightUpMirrored() else { return }
let singleFrame = singleFrame(from: rotated)
guard let imageBuffer = buffer.imageBuffer else { return }
let singleFrame = singleFrame(from: imageBuffer)
provideSingleFrame?(singleFrame)
state = .complete
}
Expand All @@ -66,10 +66,10 @@ final class VideoChunker {
if assetWriterInput.isReadyForMoreMediaData {
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer).seconds
let presentationTime = CMTime(seconds: timestamp - startTimeSeconds, preferredTimescale: 600)
guard let rotated = buffer.rotateRightUpMirrored() else { return }
guard let imageBuffer = buffer.imageBuffer else { return }

pixelBufferAdaptor.append(
rotated,
imageBuffer,
withPresentationTime: presentationTime
)
}
Expand Down
98 changes: 62 additions & 36 deletions Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,53 +14,79 @@ struct DetectedFace {
let rightEye: CGPoint
let nose: CGPoint
let mouth: CGPoint
let rightEar: CGPoint
let leftEar: CGPoint

let confidence: Float

func boundingBoxFromLandmarks() -> CGRect {
let eyeCenterX = (leftEye.x + rightEye.x) / 2
let eyeCenterY = (leftEye.y + rightEye.y) / 2

let cx = (nose.x + eyeCenterX) / 2
let cy = (nose.y + eyeCenterY) / 2

let ow = sqrt(pow((leftEye.x - rightEye.x), 2) + pow((leftEye.y - rightEye.y), 2)) * 2
let oh = 1.618 * ow
let minX = cx - ow / 2
let minY = cy - oh / 2

let rect = CGRect(x: minX, y: minY, width: ow, height: oh)
func boundingBoxFromLandmarks(ovalRect: CGRect) -> CGRect {
let alpha = 2.0
let gamma = 1.8
let ow = (alpha * pupilDistance + gamma * faceHeight) / 2
var cx = (eyeCenterX + nose.x) / 2

if ovalRect != CGRect.zero {
let ovalTop = ovalRect.minY
let ovalHeight = ovalRect.maxY - ovalRect.minY
if eyeCenterY > (ovalTop + ovalHeight) / 2 {
cx = eyeCenterX
}
}

let faceWidth = ow
let faceHeight = 1.618 * faceWidth
let faceBoxBottom = boundingBox.maxY
let faceBoxTop = faceBoxBottom - faceHeight
let faceBoxLeft = min(cx - ow / 2, rightEar.x)
let faceBoxRight = max(cx + ow / 2, leftEar.x)
let width = faceBoxRight - faceBoxLeft
let height = faceBoxBottom - faceBoxTop
let rect = CGRect(x: faceBoxLeft, y: faceBoxTop, width: width, height: height)
return rect
}

var faceDistance: CGFloat {
sqrt(pow(rightEye.x - leftEye.x, 2) + pow(rightEye.y - leftEye.y, 2))
}

var pupilDistance: CGFloat {
sqrt(pow(leftEye.x - rightEye.x, 2) + pow(leftEye.y - rightEye.y, 2))
}

var eyeCenterX: CGFloat {
(leftEye.x + rightEye.x) / 2
}

var eyeCenterY: CGFloat {
(leftEye.y + rightEye.y) / 2
}

var faceHeight: CGFloat {
sqrt(pow(eyeCenterX - mouth.x, 2) + pow(eyeCenterY - mouth.y, 2))
}

func normalize(width: CGFloat, height: CGFloat) -> DetectedFace {
.init(
boundingBox: .init(
x: boundingBox.minX * width,
y: boundingBox.minY * height,
width: boundingBox.width * width,
height: boundingBox.height * height
),
leftEye: .init(
x: leftEye.x * width,
y: leftEye.y * height
),
rightEye: .init(
x: rightEye.x * width,
y: rightEye.y * height
),
nose: .init(
x: nose.x * width,
y: nose.y * height
),
mouth: .init(
x: mouth.x * width,
y: mouth.y * height
),
let boundingBox = CGRect(
x: boundingBox.minX * width,
y: boundingBox.minY * height,
width: boundingBox.width * width,
height: boundingBox.height * height
)
let leftEye = CGPoint(x: leftEye.x * width, y: leftEye.y * height)
let rightEye = CGPoint(x: rightEye.x * width, y: rightEye.y * height)
let nose = CGPoint(x: nose.x * width, y: nose.y * height)
let mouth = CGPoint(x: mouth.x * width, y: mouth.y * height)
let rightEar = CGPoint(x: rightEar.x * width, y: rightEar.y * height)
let leftEar = CGPoint(x: leftEar.x * width, y: leftEar.y * height)

return DetectedFace(
boundingBox: boundingBox,
leftEye: leftEye,
rightEye: rightEye,
nose: nose,
mouth: mouth,
rightEar: rightEar,
leftEar: leftEar,
confidence: confidence
)
}
Expand Down
Loading

0 comments on commit df30e86

Please sign in to comment.