forked from programmersA4/HMM
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpred.py
More file actions
67 lines (57 loc) · 2.56 KB
/
pred.py
File metadata and controls
67 lines (57 loc) · 2.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# import torch
def get_pred(results):
names = {} # class names
for i, (im, pred) in enumerate(zip(results.imgs, results.pred)):
img_info = f'image {i + 1}/{len(results.pred)}: {im.shape[0]}x{im.shape[1]} '
print(img_info)
detected_objs = dict()
if pred.shape[0]:
for c in pred[:, -1].unique():
n = (pred[:, -1] == c).sum() # detections per class
obj = results.names[int(c)].lower()
detected_objs[obj] = int(n)
# show print confidence and bounding box
for *box, conf, cls in reversed(pred): # xyxy, confidence, class
label = f'{results.names[int(cls)]} {conf:.2f}'
bbox = tuple(map(float, box))
print(label)
return detected_objs
def check_reuse(results):
names = {} # class names
for i, (im, pred) in enumerate(zip(results.imgs, results.pred)):
img_info = f'image {i + 1}/{len(results.pred)}: {im.shape[0]}x{im.shape[1]} '
print(img_info)
detected_objs = dict()
if pred.shape[0]:
mboxes = []
cboxes = []
# show print confidence and bounding box
for *box, conf, cls in reversed(pred): # xyxy, confidence, class
# label = f'{results.names[int(cls)]} {conf:.2f}'
label = results.names[int(cls)]
bbox = list(map(float, box))
if label == 'Mobile phone':
mboxes.append(bbox)
else:
bbox.append(cls)
cboxes.append(bbox)
print()
print('mboxes', mboxes)
print('cboxes', cboxes)
for cx, cy, cw, ch, cls in cboxes:
for mx, my, mw, mh in mboxes:
center_x = cx + cw / 2
center_y = cy + ch / 2
if mx < center_x < mx + mw and my < center_y < mh:
obj = results.names[int(cls)].lower()
detected_objs[obj] = True
return detected_objs
# if __name__=="__main__":
# fruit = torch.hub.load('taehyun-learn/yolov5', 'custom', path='yolov5m_fruit.pt')
# # fruit = torch.hub.load('JJ-HH/yolov5', 'custom', path='yolov5m_fruit.pt')
# infered = fruit('static/images/auth/img0002.jpg')
# infered.save(save_dir='static/images/infered')
# print()
# print(infered.files[0])
# # detected = get_pred(infered)
# # print(detected)