-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
104 lines (74 loc) · 2.28 KB
/
main.py
File metadata and controls
104 lines (74 loc) · 2.28 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
from os import stat
import cv2
import torch
import time
import serial
dev = serial.Serial("/dev/tty.usbmodem1101", baudrate=19200)
# Model
model_path = 'best.pt'
model_conf = 0.65
model = torch.hub.load(
'yolov5', 'custom', path=model_path, source='local')
model.conf = model_conf
def goToEnaged():
dev.write('3'.encode()) # alarms off
dev.write('4'.encode()) # blue on
def firstDistracted():
dev.write('1'.encode()) # red on
dev.write('5'.encode()) # blue off
def secondDistracted():
dev.write('2'.encode()) # buzzer on
dev.write('5'.encode()) # blue off
def thirdDistracted():
dev.write('0'.encode()) # vibrate on
dev.write('5'.encode()) # blue off
def video():
states = []
video = cv2.VideoCapture(1)
ret, frame = video.read()
timeDistracted = 0
# 0 - engaged
# 1 - distracted
while ret:
frame = cv2.resize(frame, (640, 640), cv2.INTER_AREA)
frameForModel = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
results = model(frameForModel)
d = False
currentTime = time.time()
for _, r in results.pandas().xyxyn[0].iterrows():
if r.confidence > 0.1:
states.append([r.values[6],currentTime])
states = [s for s in states if s[1] > currentTime - 1]
predict = sum([int(s[0]) for s in states])/ (len(states) + 1) #avoid divide by zero
if predict > 0.5:
timeDistracted = timeDistracted + (states[-1][1] - states[-2][1])
else:
timeDistracted = 0
# goToEnaged()
print('good driving')
if timeDistracted > 5:
# thirdDistracted()
print('3rd dist')
if timeDistracted > 3:
# secondDistracted()
print('2rd dist')
if timeDistracted > 1:
# firstDistracted()
print('1rd dist')
cv2.imshow("Video", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
ret, frame = video.read()
def test():
goToEnaged()
time.sleep(5)
firstDistracted()
time.sleep(1)
secondDistracted()
time.sleep(1)
thirdDistracted()
time.sleep(3)
goToEnaged()
time.sleep(1)
# test()
video()