-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathalignFace.swift
126 lines (104 loc) · 4.56 KB
/
alignFace.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
//
// Untitled.swift
import UIKit
import Vision
extension UIImage {
func alignFace(completion: @escaping (UIImage?, UIImage?) -> Void) {
guard let cgImage = self.cgImage else {
completion(nil, nil)
return
}
let faceDetectionRequest = VNDetectFaceRectanglesRequest { request, error in
guard let face = (request.results as? [VNFaceObservation])?.first else {
completion(nil, nil)
return
}
let faceRect = face.boundingBox.scaled(to: self.size)
guard let croppedImage = self.cropped(to: faceRect) else {
completion(nil, nil)
return
}
croppedImage.detectFaceLandmarks { rotatedImage in
completion(croppedImage, rotatedImage)
}
}
let handler = VNImageRequestHandler(cgImage: cgImage, orientation: .up)
try? handler.perform([faceDetectionRequest])
}
func cropped(to rect: CGRect) -> UIImage? {
let scaledRect = CGRect(
x: rect.origin.x * scale,
y: rect.origin.y * scale,
width: rect.width * scale,
height: rect.height * scale
)
guard let croppedCgImage = cgImage?.cropping(to: scaledRect) else { return nil }
return UIImage(cgImage: croppedCgImage, scale: scale, orientation: .up)
}
private func detectFaceLandmarks(completion: @escaping (UIImage?) -> Void) {
guard let cgImage = self.cgImage else { completion(nil); return }
let request = VNDetectFaceLandmarksRequest { request, error in
guard let face = (request.results as? [VNFaceObservation])?.first,
let landmarks = face.landmarks,
let leftEye = landmarks.leftEye?.normalizedPoints.first,
let rightEye = landmarks.rightEye?.normalizedPoints.first,
let nose = landmarks.nose?.normalizedPoints.first else {
completion(nil)
return
}
let imageSize = CGSize(width: cgImage.width, height: cgImage.height)
let faceBoundingBox = face.boundingBox.scaled(to: imageSize)
let leftEyePos = self.convertNormalizedPoint(leftEye, to: faceBoundingBox)
let rightEyePos = self.convertNormalizedPoint(rightEye, to: faceBoundingBox)
let nosePos = self.convertNormalizedPoint(nose, to: faceBoundingBox)
let deltaY = leftEyePos.y - rightEyePos.y
let deltaX = leftEyePos.x - rightEyePos.x
let angle = atan2(deltaY, deltaX)
guard let rotatedImage = self.rotated(by: -angle, around: nosePos) else {
completion(nil)
return
}
completion(rotatedImage)
}
let handler = VNImageRequestHandler(cgImage: cgImage)
try? handler.perform([request])
}
private func convertNormalizedPoint(_ point: CGPoint, to rect: CGRect) -> CGPoint {
return CGPoint(
x: rect.origin.x + point.x * rect.width,
y: rect.origin.y + (1 - point.y) * rect.height
)
}
func rotated(by angle: CGFloat, around center: CGPoint) -> UIImage? {
let radians = angle
let rotatedRect = CGRect(origin: .zero, size: self.size)
.applying(CGAffineTransform(rotationAngle: radians))
let newSize = CGSize(
width: abs(rotatedRect.width),
height: abs(rotatedRect.height)
)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
guard let context = UIGraphicsGetCurrentContext() else { return nil }
context.translateBy(x: newSize.width/2, y: newSize.height/2)
context.rotate(by: radians + .pi)
self.draw(in: CGRect(
x: -self.size.width/2 + (center.x - self.size.width/2),
y: -self.size.height/2 + (center.y - self.size.height/2),
width: self.size.width,
height: self.size.height
))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
}
extension CGRect {
func scaled(to size: CGSize) -> CGRect {
return CGRect(
x: self.origin.x * size.width,
y: (1 - self.origin.y - self.height) * size.height,
width: self.width * size.width,
height: self.height * size.height
)
}
}