-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathBodyTrackingAR_v2.swift
More file actions
112 lines (93 loc) · 3.84 KB
/
BodyTrackingAR_v2.swift
File metadata and controls
112 lines (93 loc) · 3.84 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import ARKit
import Vision
import UIKit
import RealityKit
final class BodyTrackingAR {
weak var arView: ARView?
private let bodyRequest = VNDetectHumanBodyPoseRequest()
private let processingQueue = DispatchQueue(label: "com.example.bodytracking.queue", qos: .userInitiated)
private let minInterval: TimeInterval = 0.15
private var lastProcessed: TimeInterval = 0
// 2D слой для отрисовки
private var overlayLayer = CAShapeLayer()
init(arView: ARView) {
self.arView = arView
setupOverlay()
}
private func setupOverlay() {
guard let arView = arView else { return }
overlayLayer.frame = arView.bounds
overlayLayer.strokeColor = UIColor.green.cgColor
overlayLayer.lineWidth = 3
overlayLayer.fillColor = UIColor.clear.cgColor
arView.layer.addSublayer(overlayLayer)
}
func process(frame: ARFrame) {
let now = Date().timeIntervalSince1970
if now - lastProcessed < minInterval { return }
lastProcessed = now
let pixelBuffer = frame.capturedImage
processingQueue.async { [weak self] in
guard let self = self else { return }
let orientation = CGImagePropertyOrientation(interfaceOrientation:
arViewInterfaceOrientation() ?? .portrait)
let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: orientation, options: [:])
do {
try handler.perform([self.bodyRequest])
guard let result = self.bodyRequest.results?.first else {
DispatchQueue.main.async { self.clearOverlay() }
return
}
DispatchQueue.main.async {
self.drawBodyPose(result)
}
} catch {
print("Vision error: \(error)")
}
}
}
private func drawBodyPose(_ observation: VNHumanBodyPoseObservation) {
guard let arView = arView,
let recognizedPoints = try? observation.recognizedPoints(.all) else { return }
let size = arView.bounds.size
let path = UIBezierPath()
// Точки и соединения
let joints: [VNHumanBodyPoseObservation.JointName] = [
.neck, .root,
.rightShoulder, .rightElbow, .rightWrist,
.leftShoulder, .leftElbow, .leftWrist,
.rightHip, .rightKnee, .rightAnkle,
.leftHip, .leftKnee, .leftAnkle
]
let connections: [(VNHumanBodyPoseObservation.JointName, VNHumanBodyPoseObservation.JointName)] = [
(.neck, .rightShoulder), (.neck, .leftShoulder),
(.rightShoulder, .rightElbow), (.rightElbow, .rightWrist),
(.leftShoulder, .leftElbow), (.leftElbow, .leftWrist),
(.root, .rightHip), (.root, .leftHip),
(.rightHip, .rightKnee), (.rightKnee, .rightAnkle),
(.leftHip, .leftKnee), (.leftKnee, .leftAnkle),
(.neck, .root)
]
func point(for joint: VNHumanBodyPoseObservation.JointName) -> CGPoint? {
guard let p = recognizedPoints[joint], p.confidence > 0.2 else { return nil }
return CGPoint(
x: CGFloat(p.location.x) * size.width,
y: (1 - CGFloat(p.location.y)) * size.height
)
}
for (a, b) in connections {
if let pa = point(for: a), let pb = point(for: b) {
path.move(to: pa)
path.addLine(to: pb)
}
}
// Обновляем слой
overlayLayer.path = path.cgPath
}
private func clearOverlay() {
overlayLayer.path = nil
}
}
private func arViewInterfaceOrientation() -> UIInterfaceOrientation? {
(UIApplication.shared.connectedScenes.first as? UIWindowScene)?.interfaceOrientation
}