forked from v1xerunt/DocTr
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.py
More file actions
48 lines (37 loc) · 1.31 KB
/
utils.py
File metadata and controls
48 lines (37 loc) · 1.31 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import numpy as np
import torch
def get_device():
if torch.cuda.is_available():
device = torch.device('cuda')
else:
device = torch.device('cpu')
return device
def com_sim(y_true, y_pred, k=10):
all_rel = []
for i in range(len(y_true)):
gt_embd = y_true[i] # N_gt, D
rec_embd = y_pred[i] # K, D
if k == -1:
rec_embd = rec_embd[:len(gt_embd)]
else:
rec_embd = rec_embd[:k]
gt_embd = gt_embd / np.linalg.norm(gt_embd, axis=1, keepdims=True)
rec_embd = rec_embd / np.linalg.norm(rec_embd, axis=1, keepdims=True)
qd = np.dot(rec_embd, gt_embd.T)
maxs = np.max(qd, axis=1)
rel = np.mean(maxs)
all_rel.append(rel)
return np.mean(all_rel)
def max_sim(y_true, y_pred):
# y_true: N_gt, D
# y_pred: 1, D
all_maxs = []
for i in range(len(y_true)):
gt_embd = y_true[i] # N_gt, D
rec_embd = y_pred[i] # K, D
gt_embd = gt_embd / np.linalg.norm(gt_embd, axis=1, keepdims=True)
rec_embd = rec_embd / np.linalg.norm(rec_embd, axis=1, keepdims=True)
qd = np.dot(rec_embd, gt_embd.T)
maxs = np.max(qd, axis=1)
all_maxs.append(maxs)
return np.array(all_maxs).squeeze()