From 9c51f0a0982671f7e06523fbf1bf858a75305805 Mon Sep 17 00:00:00 2001 From: SLASHLogin Date: Wed, 30 Jun 2021 21:42:22 +0200 Subject: [PATCH 1/3] p.7 in Rust --- Rust/p007-Categorical-Cross-Entropy-Loss.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 Rust/p007-Categorical-Cross-Entropy-Loss.rs diff --git a/Rust/p007-Categorical-Cross-Entropy-Loss.rs b/Rust/p007-Categorical-Cross-Entropy-Loss.rs new file mode 100644 index 0000000..754aa81 --- /dev/null +++ b/Rust/p007-Categorical-Cross-Entropy-Loss.rs @@ -0,0 +1,14 @@ +fn main() { + let softmax_output: [f64; 3] = [0.7, 0.1, 0.2]; + + let target_output: [f64; 3] = [1., 0., 0.]; + + let loss = -(softmax_output[0].ln() * target_output[0] + + softmax_output[1].ln() * target_output[1] + + softmax_output[2].ln() * target_output[2]); + + println!("{}", loss); + + println!("{}", -0.7_f64.ln()); + println!("{}", -0.5_f64.ln()); +} From c693309641cb19836d2896d0a31a315980a7c20c Mon Sep 17 00:00:00 2001 From: SLASHLogin Date: Fri, 2 Jul 2021 20:33:57 +0200 Subject: [PATCH 2/3] p. 8 in Rust --- ...-Categorical-Cross-Entropy-Loss-applied.rs | 137 ++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 Rust/p008-Categorical-Cross-Entropy-Loss-applied.rs diff --git a/Rust/p008-Categorical-Cross-Entropy-Loss-applied.rs b/Rust/p008-Categorical-Cross-Entropy-Loss-applied.rs new file mode 100644 index 0000000..0505dd9 --- /dev/null +++ b/Rust/p008-Categorical-Cross-Entropy-Loss-applied.rs @@ -0,0 +1,137 @@ +use ndarray::{Array, Array1, Array2, Axis, Dimension, Ix1, Ix2}; +use ndarray_rand::RandomExt; +use rand_distr::Normal; + +fn main() { + let (x, y) = spiral_data(100, 3); + + let mut dense1 = LayerDense::new(2, 3); + let mut dense2 = LayerDense::new(3, 3); + + dense1.forward(x); + let activation1 = activation_relu(dense1.outputs.unwrap()); + + dense2.forward(activation1); + let activation2 = softmax(dense2.outputs.unwrap()); + + println!("{:?}", activation2); + + let loss = CategoricalCrossentropy::calculate(activation2, y).unwrap(); + + println!("Loss: {}", loss); +} + +#[derive(Debug)] +struct LayerDense { + weights: Array2, + biases: Array2, + outputs: Option>, +} + +impl LayerDense { + fn new(n_inputs: usize, n_neurons: usize) -> Self { + let weights = Array::random((n_inputs, n_neurons), Normal::new(0.0, 1.0).unwrap()); + let biases = Array::zeros((1, n_neurons)); + LayerDense { + weights, + biases, + outputs: None, + } + } + + fn forward(&mut self, inputs: Array2) { + self.outputs = Some(inputs.dot(&self.weights) + &self.biases); + } +} + +fn activation_relu(input: Array2) -> Array2 { + input.map(|x| x.max(0.0)) +} + +fn softmax(input: Array2) -> Array2 { + let mut output = Array2::zeros(input.raw_dim()); + for (in_row, mut out_row) in input.axis_iter(Axis(0)).zip(output.axis_iter_mut(Axis(0))) { + let mut max = 0.0; + for col in in_row.iter() { + if col > &max { + max = *col; + } + } + let exp = in_row.map(|x| (x - max).exp()); + let sum = exp.sum(); + out_row.assign(&(exp / sum)); + } + output +} + +trait Loss { + fn forward(output: Array2, y: Array) -> Array1; + + fn calculate(output: Array2, y: Array) -> Option { + let sample_losses = Self::forward(output, y); + sample_losses.mean() + } +} + +struct CategoricalCrossentropy {} + +impl Loss for CategoricalCrossentropy { + fn forward(output: Array2, y: Array) -> Array1 { + y.iter() + .zip(output.outer_iter()) + .map(|(&targ_idx, distribution)| { + -distribution[targ_idx as usize].clamp(1e-7, 1.0 - 1e-7).ln() + }) + .collect() + } +} + +impl Loss for CategoricalCrossentropy { + fn forward(output: Array2, y: Array) -> Array1 { + let sum = (y * output).sum_axis(Axis(1)); + + sum.iter() + .map(|&confidence| -confidence.clamp(1e-7, 1.0 - 1e-7).ln()) + .collect() + } +} + +type X = Array2; +type Y = Array1; + +pub fn spiral_data(points: usize, classes: usize) -> (X, Y) { + let mut y: Array1 = Array::zeros(points * classes); + let mut x = Vec::with_capacity(points * classes * 2); + + for class_number in 0..classes { + let r = Array::linspace(0.0, 1.0, points); + let t = (Array::linspace( + (class_number * 4) as f64, + ((class_number + 1) * 4) as f64, + points, + ) + Array::random(points, Normal::new(0.0, 1.0).unwrap()) * 0.2) + * 2.5; + let r2 = r.clone(); + let mut c = Vec::new(); + for (x, y) in (r * t.map(|x| (x).sin())) + .into_raw_vec() + .iter() + .zip((r2 * t.map(|x| (x).cos())).into_raw_vec().iter()) + { + c.push(*x); + c.push(*y); + } + for (ix, n) in + ((points * class_number)..(points * (class_number + 1))).zip((0..).step_by(2)) + { + x.push(c[n]); + x.push(c[n + 1]); + y[ix] = class_number as f64; + } + } + ( + ndarray::ArrayBase::from_shape_vec((points * classes, 2), x).unwrap(), + y, + ) +} + From 820752ab39d7132946bfe022c2f5d9650dee3a3f Mon Sep 17 00:00:00 2001 From: SLASHLogin Date: Fri, 2 Jul 2021 20:35:27 +0200 Subject: [PATCH 3/3] Clippy --- Rust/p007-Categorical-Cross-Entropy-Loss.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Rust/p007-Categorical-Cross-Entropy-Loss.rs b/Rust/p007-Categorical-Cross-Entropy-Loss.rs index 754aa81..8136871 100644 --- a/Rust/p007-Categorical-Cross-Entropy-Loss.rs +++ b/Rust/p007-Categorical-Cross-Entropy-Loss.rs @@ -9,6 +9,6 @@ fn main() { println!("{}", loss); - println!("{}", -0.7_f64.ln()); - println!("{}", -0.5_f64.ln()); + println!("{}", -(0.7_f64.ln())); + println!("{}", -(0.5_f64.ln())); }