-
Notifications
You must be signed in to change notification settings - Fork 9
Expand file tree
/
Copy pathgoodness_measure.py
More file actions
90 lines (67 loc) · 2.78 KB
/
goodness_measure.py
File metadata and controls
90 lines (67 loc) · 2.78 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
from data_sets import DataSets
from robustness_measure import Measure
from abc import ABCMeta, abstractmethod
import numpy as np
class RankData:
def __init__(self, features_rank, n_significant_features, n_indices):
self.features_rank = features_rank
self.sorted_indices = np.argsort(features_rank)[::-1]
self.n_significant = n_significant_features
self.n_indices = n_indices
def __len__(self):
return len(self.features_rank)
@property
def true_positive(self):
return (self.sorted_indices[:self.n_indices] < self.n_significant).sum()
@property
def false_positive(self):
return (self.sorted_indices[:self.n_indices] >= self.n_significant).sum()
@property
def true_negative(self):
return (self.sorted_indices[self.n_indices:] >= self.n_significant).sum()
@property
def false_negative(self):
return (self.sorted_indices[self.n_indices:] < self.n_significant).sum()
class GoodnessMeasure(Measure, metaclass=ABCMeta):
def __init__(self, data_set_name, n_indices=None):
super().__init__()
feature_probe_labels = DataSets.load_features_labels(data_set_name)
if feature_probe_labels is None:
self.n_significant_features = None
else:
self.n_significant_features = np.sum([feature_probe_labels == 1])
self.n_indices = self.n_significant_features if n_indices is None else n_indices
def measures(self, features_ranks):
if not self.n_significant_features:
return 0
goodness = []
for i in range(features_ranks.shape[1]):
goodness.append(self.goodness(
RankData(features_ranks[:, i].T, self.n_significant_features, self.n_indices)
))
return np.array(goodness)
@abstractmethod
def goodness(self, data: RankData):
pass
class Dummy(GoodnessMeasure):
def __init__(self, *args, n_significant_features=None, **kwargs):
super().__init__(*args, **kwargs)
if n_significant_features is not None:
self.n_significant_features = n_significant_features
def goodness(self, data: RankData):
return data.features_rank[0]
class Accuracy(GoodnessMeasure):
def goodness(self, data: RankData):
return (data.true_negative + data.true_positive) / len(data)
class Precision(GoodnessMeasure):
def goodness(self, data: RankData):
return data.true_positive / data.n_significant
class XPrecision(GoodnessMeasure):
def goodness(self, data: RankData):
p = 0
alpha = 0.5
n = data.n_significant
for i in range(data.sorted_indices.shape[0] // n):
positives = (data.sorted_indices[n * i: n * (i+1)] < n).sum() / n
p += alpha ** i * positives
return p