From 5a75299af97cb9e76582647bd9717265bd4e8fbe Mon Sep 17 00:00:00 2001 From: Harrison Date: Sun, 6 Jun 2021 08:54:56 -0500 Subject: [PATCH 1/3] Create p008-Categorical-Cross-Entropy-Loss-applied.swift --- ...-Categorical-Cross-Entropy-Loss-applied.py | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 Python/p008-Categorical-Cross-Entropy-Loss-applied.py diff --git a/Python/p008-Categorical-Cross-Entropy-Loss-applied.py b/Python/p008-Categorical-Cross-Entropy-Loss-applied.py new file mode 100644 index 0000000..5b0e31a --- /dev/null +++ b/Python/p008-Categorical-Cross-Entropy-Loss-applied.py @@ -0,0 +1,73 @@ +""" +Applying Categorical Cross Entropy loss to our NNFS framework +Associated with YT NNFS tutorial: https://www.youtube.com/watch?v=levekYbxauw&list=PLQVvvaa0QuDcjD5BAw2DxE6OF2tius3V3&index=8 +""" + +import numpy as np +import nnfs +from nnfs.datasets import spiral_data + + +nnfs.init() + +class Layer_Dense: + def __init__(self, n_inputs, n_neurons): + self.weights = 0.01 * np.random.randn(n_inputs, n_neurons) + self.biases = np.zeros((1, n_neurons)) + def forward(self, inputs): + self.output = np.dot(inputs, self.weights) + self.biases + + +class Activation_ReLU: + def forward(self, inputs): + self.output = np.maximum(0, inputs) + +class Activation_Softmax: + def forward(self, inputs): + exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True)) + probabilities = exp_values / np.sum(exp_values, axis=1, keepdims=True) + self.output = probabilities + +class Loss: + def calculate(self, output, y): + sample_losses = self.forward(output, y) + data_loss = np.mean(sample_losses) + return data_loss + +class Loss_CategoricalCrossentropy(Loss): + def forward(self, y_pred, y_true): + samples = len(y_pred) + y_pred_clipped = np.clip(y_pred, 1e-7, 1-1e-7) + + if len(y_true.shape) == 1: + correct_confidences = y_pred_clipped[range(samples), y_true] + + elif len(y_true.shape) == 2: + correct_confidences = np.sum(y_pred_clipped*y_true, axis=1) + + negative_log_likelihoods = -np.log(correct_confidences) + return negative_log_likelihoods + + + + +X, y = spiral_data(samples=100, classes=3) + +dense1 = Layer_Dense(2,3) +activation1 = Activation_ReLU() + +dense2 = Layer_Dense(3, 3) +activation2 = Activation_Softmax() + +dense1.forward(X) +activation1.forward(dense1.output) + +dense2.forward(activation1.output) +activation2.forward(dense2.output) + +print(activation2.output[:5]) + +loss_function = Loss_CategoricalCrossentropy() +loss = loss_function.calculate(activation2.output, y) + +print("Loss:", loss) From 44c9822520894954ad41c228b6951dcbde2993a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Roberto=20Tom=C3=A1s=20Collins?= Date: Sun, 27 Jun 2021 11:34:09 -0400 Subject: [PATCH 2/3] p008 swift --- ...tegorical-Cross-Entropy-Loss-applied.swift | 366 ++++++++++++++++++ 1 file changed, 366 insertions(+) create mode 100644 Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift diff --git a/Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift b/Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift new file mode 100644 index 0000000..4c14fae --- /dev/null +++ b/Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift @@ -0,0 +1,366 @@ +import GameKit +import Foundation + +let (X, y) = NNfS.spiral_data(points: 100, classes: 3) + +protocol Layer { + var output: [[Double]] { get } + var weights: [[Double]] { get } + var biases: [Double] { get } + func forward(inputs:[[Double]]) +} + +class DenseLayer: Layer { + public var output: [[Double]] + public var weights: [[Double]] + public var biases: [Double] + + init(n_inputs: Int, n_neurons: Int) { + weights = (0.. [Double] in + guard let max_value = row.max() else { + fatalError("invalid inputs") + } + return row.map { value in + exp(value - max_value) + } + } + + self.output = exp_values.map { row in row.div(row.reduce(0,+)) } + } +} + +protocol Loss { + func forward(y_pred:[[Double]], y_true:[Int]) -> [Double] + func forward(y_pred:[[Double]], y_true:[[Int]]) -> [Double] +} + +extension Loss { + func calculate(output:[[Double]], y:[Int]) -> Double { + let sample_losses = self.forward(y_pred: output, y_true: y) + let data_loss = sample_losses.mean + return data_loss + } + func calculatedoub(output:[[Double]], y:[[Int]]) -> Double { + let sample_losses = self.forward(y_pred: output, y_true: y) + let data_loss = sample_losses.mean + return data_loss + } +} + +class Loss_CategoricalCrossentropy: Loss { + public func forward(y_pred:[[Double]], y_true:[Int]) -> [Double] { + let samples = y_pred.count + let y_pred_clipped = y_pred.clip(to:1e-7...1-1e-7) + + let correct_confidences = zip(Array(0...samples), y_true).map{ (x,y) in + y_pred_clipped[x][y] + } + + let negative_log_likelihoods = correct_confidences.map{ el in -log(el) } + return negative_log_likelihoods + } + + public func forward(y_pred:[[Double]], y_true:[[Int]]) -> [Double] { + let y_pred_clipped = y_pred.clip(to:1e-7...1-1e-7) + + let correct_confidences: [Double] = y_true + .map{ y_pred_clipped[$0.first!][$0.last!] } + + let negative_log_likelihoods = correct_confidences.map{ el in -log(el) } + return negative_log_likelihoods + } +} + +let layer1 = DenseLayer(n_inputs: 2, n_neurons: 3) +let activation1 = ReLU() + +let layer2 = DenseLayer(n_inputs: 3, n_neurons: 3) +let activation2 = SoftMax() + +layer1.forward(inputs: X) +activation1.forward(inputs: layer1.output) + +layer2.forward(inputs: activation1.output) +activation2.forward(inputs: layer2.output) + +print(activation2.output.prefix(5)) + +let loss_function = Loss_CategoricalCrossentropy() +let loss = loss_function.calculate(output: activation2.output, y: y.map(Int.init)) + +print("Loss:", loss) + +public class NNfS { + static public let rs = GKLinearCongruentialRandomSource(seed: 0) + static public let rd = GKGaussianDistribution(randomSource: rs, mean: 0, deviation: Float(UInt8.max)) + + // https://cs231n.github.io/neural-networks-case-study/ + static public func spiral_data(points:Int, classes:Int) -> ([[Double]], [UInt8]) { + let height = points * classes + var X:[[Double]] = [[Double]](count: height, generating: { _ in [Double](repeating: 0.0, count: 2) }) + var y:[UInt8] = [UInt8](repeating: 0, count: points*classes) + + for classNumber in 0.. Double in Double(n)/Double(points) * 4.0 + Double(classNumber) }) + let tr = Array(count: points, generating: { _ in Double(rd.nextUniform()) * 0.2 }) + let t = tl.add(tr)! + + let rSin:[Double] = r.mul(sin(radians: t.mul(2.5)))! + let rCos:[Double] = r.mul(cos(radians: t.mul(2.5)))! + let xSub = rSin.concatHorizontal(rCos) + + X.replaceSubrange( ix, with: xSub ) + y.replaceSubrange( ix, with: Array(repeating: UInt8(classNumber), count: ix.count) ) + } + + return (X, y) + } +} + +extension Array { + /// Create a new Array whose values are generated by the given closure. + /// - Parameters: + /// - count: The number of elements to generate + /// - elementGenerator: The closure that generates the elements. + /// The index into which the element will be + /// inserted is passed into the closure. + public init(count: Int, generating elementGenerator: (Int) -> Element) { + self = (0..) -> Self { + return min(max(self, limits.lowerBound), limits.upperBound) + } +} + +extension Array where Element: BinaryInteger { + var mean: Double { + if self.isEmpty { + return 0.0 + } else { + let sum = self.reduce(0, +) + return Double(sum) / Double(self.count) + } + } +} + +extension Array where Element: BinaryFloatingPoint { + var mean: Double { + if self.isEmpty { + return 0.0 + } else { + let sum = self.reduce(0, +) + return Double(sum) / Double(self.count) + } + } +} + +extension Array where Element == Double { + public func clip (to limits: ClosedRange) -> Self { + let r = self + return r.map{ row in + row.clamped(to: limits) + } + } + + public func dot (_ rval: [Double]) -> Double? { + if self.count != rval.count { return nil } + return zip(self, rval).reduce(0.0, { (sum, tuple) -> Double in + sum + tuple.0 * tuple.1 + }) + } + + public func concatHorizontal(_ rval: [[Double]]) -> [[Double]] { + return self.T.concatHorizontal(rval) + } + + public func concatHorizontal(_ rval: [Double]) -> [[Double]] { + return self.T.concatHorizontal(rval.T) + } + + public var T: [[Double]] { + get { + return self.reduce(into: []){ $0.append([$1]) } + } + } + + public func add(_ rval: [Double]) -> [Double]? { + if self.count != rval.count { return nil } + + return zip(self, rval).map(+) + } + + public func mul(_ rval: [Double]) -> [Double]? { + if self.count != rval.count { return nil } + + return zip(self, rval).map(*) + } + + public func div(_ rval: [Double]) -> [Double]? { + if self.count != rval.count { return nil } + + return zip(self, rval).map(/) + } + + public func mul(_ rval: Double) -> [Double] { + return self.map{ lval in lval * rval } + } + + public func div(_ rval: Double) -> [Double] { + return self.map{ lval in lval / rval } + } +} + +extension Array where Element == [Double] { + public func clip (to limits: ClosedRange) -> Self { + let r = self + return r.map{ row in + row.map { el in + el.clamped(to: limits) + } + } + } + + public func dot (_ rval: [Double]) -> [Double]? { + let rc = rval.count + if self.reduce(false, { $1.count != rc }) { return nil } + + return self.map { (lval) -> Double in lval.dot(rval)! } + } + + public func dot (_ rval: [[Double]]) -> [[Double]]? { + if(rval.count != self[0].count) { return nil } + + let rt = rval.T + return zip(self.indices, self).map{ (index, row) -> [Double] in rt.dot(row)! } + } + + public func add(_ rval: [Double]) -> [[Double]]? { + if self[0].count != rval.count { return nil } + + return self.map{ row in zip(row, rval).map(+) } + } + + public var T: [[Double]] { + get { + var out: [[Double]] = [] + let x = self.count + let y = self[0].count + let N = x > y ? x : y + let M = y < x ? y : x + for n in 0.. y ? n : m + let b = y < x ? m : n + if out.count <= b { + out.append([]) + } + out[b].append(self[a][b]) + } + } + return out + } + } + + public func concatHorizontal(_ rval: [[Double]]) -> [[Double]] { + return zip(self.indices, self).map{ (index, row) in + var next = row + next.append(contentsOf: rval[index]) + return next + } + } + + public func elmul(_ rval: [[Double]]) -> [[Double]]? { + if rval.count != self.count || rval[0].count != self[0].count { return nil } + + return zip(self, rval).map{ (lrow, rrow) in lrow.mul(rrow)! } + } + + public func eldiv(_ rval: [[Double]]) -> [[Double]]? { + if rval.count != self.count || rval[0].count != self[0].count { return nil } + + return zip(self, rval).map{ (lrow, rrow) in lrow.mul(rrow)! } + } + + public func mul(_ rval: [Double]) -> [[Double]]? { + if rval.count != self[0].count { return nil } + + return self.map{ row in row.mul(rval)! } + } + + public func div(_ rval: [Double]) -> [[Double]]? { + if rval.count != self[0].count { return nil } + + return self.map{ row in row.div(rval)! } + } + + public func mul(_ rval: Double) -> [[Double]] { + return self.map{ row in row.mul(rval) } + } + + public func div(_ rval: Double) -> [[Double]] { + return self.map{ row in row.div(rval) } + } +} + +public func sin(radians: [[Double]]) -> [[Double]] { + return radians.map{ val in sin(radians: val) } +} + +public func cos(radians: [[Double]]) -> [[Double]] { + return radians.map{ val in cos(radians: val) } +} + +public func sin(radians: [Double]) -> [Double] { + return radians.map(sin) +} + +public func cos(radians: [Double]) -> [Double] { + return radians.map(cos) +} From 4c66887b0978eca03c1e586de9d6471adcebdbd9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Roberto=20Tom=C3=A1s=20Collins?= Date: Sun, 27 Jun 2021 11:35:48 -0400 Subject: [PATCH 3/3] removed debugging-related rename --- Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift b/Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift index 4c14fae..27fefc6 100644 --- a/Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift +++ b/Swift/p008-Categorical-Cross-Entropy-Loss-applied.swift @@ -78,7 +78,7 @@ extension Loss { let data_loss = sample_losses.mean return data_loss } - func calculatedoub(output:[[Double]], y:[[Int]]) -> Double { + func calculate(output:[[Double]], y:[[Int]]) -> Double { let sample_losses = self.forward(y_pred: output, y_true: y) let data_loss = sample_losses.mean return data_loss