diff --git a/index.html b/index.html
index 3f6060d6..28cb45e0 100644
--- a/index.html
+++ b/index.html
@@ -98,6 +98,8 @@
Tinker With a Neural Network R
+
+
diff --git a/src/nn.ts b/src/nn.ts
index e92a13de..f597421f 100644
--- a/src/nn.ts
+++ b/src/nn.ts
@@ -134,6 +134,22 @@ export class Activations {
output: x => x,
der: x => 1
};
+ public static GAUSSIAN_Z: ActivationFunction = {
+ output: x => Math.exp(-x * x),
+ der: x => {
+ let o = Math.exp(-x * x);
+ return -2 * x * o;
+ }
+ };
+ /**
+ * Binary (Heaviside) step: 1 if pre-activation z >= 0, else 0.
+ * Derivative is 0 (no gradient through z); only layers above step units
+ * receive useful updates from the output error signal.
+ */
+ public static STEP: ActivationFunction = {
+ output: x => (x >= 0 ? 1 : 0),
+ der: () => 0
+ };
}
/** Build-in regularization functions */
diff --git a/src/state.ts b/src/state.ts
index 42dc8154..56edf502 100644
--- a/src/state.ts
+++ b/src/state.ts
@@ -24,7 +24,9 @@ export let activations: {[key: string]: nn.ActivationFunction} = {
"relu": nn.Activations.RELU,
"tanh": nn.Activations.TANH,
"sigmoid": nn.Activations.SIGMOID,
- "linear": nn.Activations.LINEAR
+ "linear": nn.Activations.LINEAR,
+ "step": nn.Activations.STEP,
+ "gaussian_z": nn.Activations.GAUSSIAN_Z
};
/** A map between names and regularization functions. */