-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcostfuncs.py
More file actions
38 lines (31 loc) · 1.08 KB
/
costfuncs.py
File metadata and controls
38 lines (31 loc) · 1.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
'''Copyright (c) 2016 Jason Bunk
Covered by LICENSE.txt, which contains the "MIT License (Expat)".
'''
import numpy as np
# Classification accuracy is not a "cost function", but it's another useful training metric
def ClassificationAccuracy(ypred, ytrue):
return np.mean(ytrue[np.arange(ytrue.shape[0]), np.argmax(ypred,axis=1)])
class MSE:
@staticmethod
def forward(aa,yy):
aydiff = np.abs(np.subtract(aa,yy))
return 0.5*np.sum(np.multiply(aydiff, aydiff)) / float(aa.shape[0])
@staticmethod
def backward(aa,yy):
return np.subtract(aa,yy) / float(aa.shape[0])
class SoftmaxCrossEntropy:
@staticmethod
def probs(aa):
maxaa = np.amax(aa)
allexp = np.exp(aa-maxaa)
allexpsum = np.sum(allexp,axis=1)
return allexp / np.reshape(allexpsum,(allexpsum.shape[0],1))
@staticmethod
def forward(aa,yy):
softmaxed = SoftmaxCrossEntropy.probs(aa)
softmaxmult = np.multiply(yy,np.log(softmaxed))
return -1*np.sum(softmaxmult) / float(aa.shape[0])
@staticmethod
def backward(aa,yy):
softmaxed = SoftmaxCrossEntropy.probs(aa)
return np.subtract(softmaxed,yy) / float(aa.shape[0])